diff --git a/.circleci/config.yml b/.circleci/config.yml index a1eafd47fed..6138c6f72c7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -53,28 +53,16 @@ parameters: heavy_fuzz_dispatch: type: boolean default: false - acceptance_tests_dispatch: - type: boolean - default: false sync_test_op_node_dispatch: type: boolean default: false ai_contracts_test_dispatch: type: boolean default: false - kona_dispatch: - type: boolean - default: false - op_alloy_dispatch: - type: boolean - default: false - alloy_op_hardforks_dispatch: + rust_ci_dispatch: type: boolean default: false - alloy_op_evm_dispatch: - type: boolean - default: false - op_reth_dispatch: + rust_e2e_dispatch: type: boolean default: false github-event-type: @@ -115,55 +103,38 @@ workflows: base-revision: develop config-path: .circleci/continue/main.yml mapping: | - .* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/main.yml - .* base_image << pipeline.parameters.base_image >> .circleci/continue/main.yml - .* main_dispatch << pipeline.parameters.main_dispatch >> .circleci/continue/main.yml - .* fault_proofs_dispatch << pipeline.parameters.fault_proofs_dispatch >> .circleci/continue/main.yml - .* reproducibility_dispatch << pipeline.parameters.reproducibility_dispatch >> .circleci/continue/main.yml - .* kontrol_dispatch << pipeline.parameters.kontrol_dispatch >> .circleci/continue/main.yml - .* cannon_full_test_dispatch << pipeline.parameters.cannon_full_test_dispatch >> .circleci/continue/main.yml - .* sdk_dispatch << pipeline.parameters.sdk_dispatch >> .circleci/continue/main.yml - .* docker_publish_dispatch << pipeline.parameters.docker_publish_dispatch >> .circleci/continue/main.yml - .* publish_contract_artifacts_dispatch << pipeline.parameters.publish_contract_artifacts_dispatch >> .circleci/continue/main.yml - .* stale_check_dispatch << pipeline.parameters.stale_check_dispatch >> .circleci/continue/main.yml - .* contracts_coverage_dispatch << pipeline.parameters.contracts_coverage_dispatch >> .circleci/continue/main.yml - .* heavy_fuzz_dispatch << pipeline.parameters.heavy_fuzz_dispatch >> .circleci/continue/main.yml - .* acceptance_tests_dispatch << pipeline.parameters.acceptance_tests_dispatch >> .circleci/continue/main.yml - .* sync_test_op_node_dispatch << pipeline.parameters.sync_test_op_node_dispatch >> .circleci/continue/main.yml - .* ai_contracts_test_dispatch << pipeline.parameters.ai_contracts_test_dispatch >> .circleci/continue/main.yml - .* github-event-type << pipeline.parameters.github-event-type >> .circleci/continue/main.yml - .* github-event-action << pipeline.parameters.github-event-action >> .circleci/continue/main.yml - .* github-event-base64 << pipeline.parameters.github-event-base64 >> .circleci/continue/main.yml - .* devnet-metrics-collect << pipeline.parameters.devnet-metrics-collect >> .circleci/continue/main.yml - .* flake-shake-dispatch << pipeline.parameters.flake-shake-dispatch >> .circleci/continue/main.yml - .* flake-shake-iterations << pipeline.parameters.flake-shake-iterations >> .circleci/continue/main.yml - .* flake-shake-workers << pipeline.parameters.flake-shake-workers >> .circleci/continue/main.yml - .* go-cache-version << pipeline.parameters.go-cache-version >> .circleci/continue/main.yml - - kona/.* kona_dispatch << pipeline.parameters.kona_dispatch >> .circleci/continue/kona.yml - kona/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/kona.yml - kona/.* base_image << pipeline.parameters.base_image >> .circleci/continue/kona.yml - kona/.* go-cache-version << pipeline.parameters.go-cache-version >> .circleci/continue/kona.yml - kona/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/rust-ci.yml - - op-alloy/.* op_alloy_dispatch << pipeline.parameters.op_alloy_dispatch >> .circleci/continue/op-alloy.yml - op-alloy/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/op-alloy.yml - op-alloy/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/rust-ci.yml - - alloy-op-hardforks/.* alloy_op_hardforks_dispatch << pipeline.parameters.alloy_op_hardforks_dispatch >> .circleci/continue/alloy-op-hardforks.yml - alloy-op-hardforks/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/alloy-op-hardforks.yml - alloy-op-hardforks/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/rust-ci.yml + .* c-default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/main.yml + .* c-base_image << pipeline.parameters.base_image >> .circleci/continue/main.yml + .* c-main_dispatch << pipeline.parameters.main_dispatch >> .circleci/continue/main.yml + .* c-fault_proofs_dispatch << pipeline.parameters.fault_proofs_dispatch >> .circleci/continue/main.yml + .* c-reproducibility_dispatch << pipeline.parameters.reproducibility_dispatch >> .circleci/continue/main.yml + .* c-kontrol_dispatch << pipeline.parameters.kontrol_dispatch >> .circleci/continue/main.yml + .* c-cannon_full_test_dispatch << pipeline.parameters.cannon_full_test_dispatch >> .circleci/continue/main.yml + .* c-sdk_dispatch << pipeline.parameters.sdk_dispatch >> .circleci/continue/main.yml + .* c-docker_publish_dispatch << pipeline.parameters.docker_publish_dispatch >> .circleci/continue/main.yml + .* c-publish_contract_artifacts_dispatch << pipeline.parameters.publish_contract_artifacts_dispatch >> .circleci/continue/main.yml + .* c-stale_check_dispatch << pipeline.parameters.stale_check_dispatch >> .circleci/continue/main.yml + .* c-contracts_coverage_dispatch << pipeline.parameters.contracts_coverage_dispatch >> .circleci/continue/main.yml + .* c-heavy_fuzz_dispatch << pipeline.parameters.heavy_fuzz_dispatch >> .circleci/continue/main.yml + .* c-sync_test_op_node_dispatch << pipeline.parameters.sync_test_op_node_dispatch >> .circleci/continue/main.yml + .* c-ai_contracts_test_dispatch << pipeline.parameters.ai_contracts_test_dispatch >> .circleci/continue/main.yml + .* c-github-event-type << pipeline.parameters.github-event-type >> .circleci/continue/main.yml + .* c-github-event-action << pipeline.parameters.github-event-action >> .circleci/continue/main.yml + .* c-github-event-base64 << pipeline.parameters.github-event-base64 >> .circleci/continue/main.yml + .* c-devnet-metrics-collect << pipeline.parameters.devnet-metrics-collect >> .circleci/continue/main.yml + .* c-flake-shake-dispatch << pipeline.parameters.flake-shake-dispatch >> .circleci/continue/main.yml + .* c-flake-shake-iterations << pipeline.parameters.flake-shake-iterations >> .circleci/continue/main.yml + .* c-flake-shake-workers << pipeline.parameters.flake-shake-workers >> .circleci/continue/main.yml + .* c-go-cache-version << pipeline.parameters.go-cache-version >> .circleci/continue/main.yml - alloy-op-evm/.* alloy_op_evm_dispatch << pipeline.parameters.alloy_op_evm_dispatch >> .circleci/continue/alloy-op-evm.yml - alloy-op-evm/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/alloy-op-evm.yml - alloy-op-evm/.* base_image << pipeline.parameters.base_image >> .circleci/continue/alloy-op-evm.yml - alloy-op-evm/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/rust-ci.yml + rust/.* c-rust_ci_dispatch << pipeline.parameters.rust_ci_dispatch >> .circleci/continue/rust-ci.yml + rust/.* c-default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/rust-ci.yml + rust/.* c-base_image << pipeline.parameters.base_image >> .circleci/continue/rust-ci.yml + rust/.* c-go-cache-version << pipeline.parameters.go-cache-version >> .circleci/continue/rust-ci.yml - op-reth/.* op_reth_dispatch << pipeline.parameters.op_reth_dispatch >> .circleci/continue/op-reth.yml - op-reth/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/op-reth.yml - op-reth/.* base_image << pipeline.parameters.base_image >> .circleci/continue/op-reth.yml - op-reth/.* go-cache-version << pipeline.parameters.go-cache-version >> .circleci/continue/op-reth.yml - op-reth/.* default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/rust-ci.yml + rust/.* c-rust_e2e_dispatch << pipeline.parameters.rust_e2e_dispatch >> .circleci/continue/rust-e2e.yml + rust/.* c-default_docker_image << pipeline.parameters.default_docker_image >> .circleci/continue/rust-e2e.yml + rust/.* c-go-cache-version << pipeline.parameters.go-cache-version >> .circleci/continue/rust-e2e.yml setup-tag: when: diff --git a/.circleci/continue/alloy-op-evm.yml b/.circleci/continue/alloy-op-evm.yml deleted file mode 100644 index 131d7a0c517..00000000000 --- a/.circleci/continue/alloy-op-evm.yml +++ /dev/null @@ -1,118 +0,0 @@ -version: 2.1 - -# Alloy-op-evm CI Continuation Configuration -# This file contains ONLY alloy-op-evm-specific jobs and workflows. -# It is merged with main.yml and rust-ci.yml when alloy-op-evm/** changes are detected. -# Shared orbs, commands, and jobs come from main.yml and rust-ci.yml during merge. - - -parameters: - # Required parameters (also in main.yml, merged during continuation) - alloy_op_evm_dispatch: - type: boolean - default: false - -# ============================================================================ -# ALLOY-OP-EVM WORKFLOWS -# ============================================================================ -workflows: - # Alloy-op-evm main CI workflow (runs on push to main, pull_request) - # Only runs when changes are detected in the alloy-op-evm folder - alloy-op-evm-ci: - when: - or: - - equal: ["webhook", << pipeline.trigger_source >>] - - and: - - equal: [true, <>] - - equal: ["api", << pipeline.trigger_source >>] - jobs: - # Build (shared job from main.yml) - - rust-build-binary: - name: alloy-op-evm-build - directory: alloy-op-evm - context: &alloy-op-evm-job-context - - circleci-repo-readonly-authenticated-github-token - - # Test jobs (shared) - - rust-ci-cargo-tests: - context: *alloy-op-evm-job-context - name: alloy-op-evm-test-no-default-features - directory: alloy-op-evm - flags: "--no-default-features" - - rust-ci-cargo-tests: - context: *alloy-op-evm-job-context - name: alloy-op-evm-test-default - directory: alloy-op-evm - - rust-ci-cargo-tests: - context: *alloy-op-evm-job-context - name: alloy-op-evm-test-all-features - directory: alloy-op-evm - flags: "--all-features" - - # Doc tests (shared job) - - rust-ci-doctest: - name: alloy-op-evm-doctest - directory: alloy-op-evm - command: | - cargo test --workspace --doc - cargo test --all-features --workspace --doc - context: *alloy-op-evm-job-context - - # WASM builds (shared job) - - rust-ci-cargo-hack-build: - name: alloy-op-evm-wasm-unknown - directory: alloy-op-evm - target: wasm32-unknown-unknown - flags: "--workspace --ignore-unknown-features --features ws --no-default-features" - context: *alloy-op-evm-job-context - - rust-ci-cargo-hack-build: - name: alloy-op-evm-wasm-wasi - directory: alloy-op-evm - target: wasm32-wasip1 - context: *alloy-op-evm-job-context - - # Feature checks (shared job) - - rust-ci-cargo-hack: - name: alloy-op-evm-feature-checks - directory: alloy-op-evm - flags: "--feature-powerset --depth 1" - context: *alloy-op-evm-job-context - - # no_std check (shared job) - - rust-ci-check-no-std: - name: alloy-op-evm-check-no-std - directory: alloy-op-evm - context: *alloy-op-evm-job-context - - # Linting (shared job) - - rust-ci-clippy: - name: alloy-op-evm-clippy - directory: alloy-op-evm - toolchain: nightly - command: "cargo +nightly clippy --workspace --all-targets --all-features" - context: *alloy-op-evm-job-context - - # Documentation (shared job) - - rust-ci-docs: - name: alloy-op-evm-docs - directory: alloy-op-evm - context: *alloy-op-evm-job-context - - # Formatting (shared job) - - rust-ci-fmt: - name: alloy-op-evm-fmt - directory: alloy-op-evm - command: "cargo fmt --all --check" - context: *alloy-op-evm-job-context - - # Dependencies check (shared job) - - rust-ci-deny: - name: alloy-op-evm-deny - directory: alloy-op-evm - context: *alloy-op-evm-job-context - - # Feature propagation check (shared job) - - rust-ci-zepter: - name: alloy-op-evm-zepter - directory: alloy-op-evm - context: *alloy-op-evm-job-context diff --git a/.circleci/continue/alloy-op-hardforks.yml b/.circleci/continue/alloy-op-hardforks.yml deleted file mode 100644 index aa6b93cc3c4..00000000000 --- a/.circleci/continue/alloy-op-hardforks.yml +++ /dev/null @@ -1,78 +0,0 @@ -version: 2.1 - -# alloy-op-hardforks CI Continuation Configuration -# This file contains ONLY alloy-op-hardforks-specific jobs and workflows. -# It is merged with main.yml and rust-ci.yml when alloy-op-hardforks/** changes are detected. -# Shared orbs, commands, and jobs come from main.yml and rust-ci.yml during merge. - -parameters: - # Required parameters (also in main.yml, merged during continuation) - alloy_op_hardforks_dispatch: - type: boolean - default: false - -workflows: - # ============================================================================ - # alloy-op-hardforks Workflows (migrated from alloy-op-hardforks/.github/workflows/ci.yml) - # ============================================================================ - - # alloy-op-hardforks main CI workflow (runs on webhooks and API triggers) - # Only runs when changes are detected in the alloy-op-hardforks folder - alloy-op-hardforks-ci: - when: - or: - - equal: ["webhook", << pipeline.trigger_source >>] - - and: - - equal: [true, <>] - - equal: ["api", << pipeline.trigger_source >>] - jobs: - # Build (shared job from main.yml) - - rust-build-binary: - name: alloy-op-hardforks-build - directory: alloy-op-hardforks - context: &alloy-op-hardforks-context - - circleci-repo-readonly-authenticated-github-token - - # Test matrix: Rust versions x feature flags (shared job) - - rust-ci-cargo-tests: - context: *alloy-op-hardforks-context - name: alloy-op-hardforks-test-<>-<> - directory: alloy-op-hardforks - matrix: - parameters: - rust_version: ["stable", "beta", "nightly", "1.85"] - flags: ["--no-default-features", "", "--all-features"] - # Check no_std compatibility (shared job) - - rust-ci-check-no-std: - name: alloy-op-hardforks-check-no-std - directory: alloy-op-hardforks - context: *alloy-op-hardforks-context - # Feature powerset checks with cargo-hack (shared job) - - rust-ci-cargo-hack: - name: alloy-op-hardforks-cargo-hack - directory: alloy-op-hardforks - flags: "--feature-powerset --depth 2" - context: *alloy-op-hardforks-context - # Clippy lints (shared job) - - rust-ci-clippy: - name: alloy-op-hardforks-clippy - directory: alloy-op-hardforks - context: *alloy-op-hardforks-context - # Documentation build (shared job) - - rust-ci-docs: - name: alloy-op-hardforks-docs - directory: alloy-op-hardforks - command: "cargo doc --workspace --all-features --no-deps --document-private-items" - rustdocflags: "--cfg docsrs -D warnings -Zunstable-options --show-type-layout --generate-link-to-definition" - context: *alloy-op-hardforks-context - # Format check (shared job) - - rust-ci-fmt: - name: alloy-op-hardforks-fmt - directory: alloy-op-hardforks - command: "cargo fmt --all --check" - context: *alloy-op-hardforks-context - # Cargo deny (shared job) - - rust-ci-deny: - name: alloy-op-hardforks-cargo-deny - directory: alloy-op-hardforks - context: *alloy-op-hardforks-context diff --git a/.circleci/continue/kona.yml b/.circleci/continue/kona.yml deleted file mode 100644 index 566630cef20..00000000000 --- a/.circleci/continue/kona.yml +++ /dev/null @@ -1,759 +0,0 @@ -version: 2.1 - -# Kona CI Continuation Configuration -# This file contains ONLY Kona-specific jobs and workflows. -# It is merged with main.yml when kona/** changes are detected. -# Shared orbs, commands, and jobs come from main.yml during merge. - -parameters: - # Required parameters (also in main.yml, merged during continuation) - default_docker_image: - type: string - default: cimg/base:2024.01 - base_image: - type: string - default: default - kona_dispatch: - type: boolean - default: false - go-cache-version: - type: string - default: "v0.0" - -# Commands used only by Kona-specific jobs (shared Rust commands are in rust-ci.yml) -commands: - install-zstd: - description: "Install zstd compression utility" - steps: - - run: - name: Install zstd - command: sudo apt-get update && sudo apt-get install -y zstd=1.4.8* - - go-restore-cache: - parameters: - module: - type: string - default: . - namespace: - type: string - version: - type: string - default: <> - steps: - - restore_cache: - name: Restore go cache for <> (<>/go.mod) - keys: - - go-<>-<>-<>-{{ checksum "<>/go.mod" }}-{{ checksum "<>/go.sum" }} - - go-<>-<>-<>-{{ checksum "<>/go.mod" }}- - - go-<>-<>-<>- - - go-save-cache: - parameters: - module: - type: string - default: . - namespace: - type: string - version: - type: string - default: <> - steps: - - save_cache: - name: Save go cache for <> (<>/go.mod) - paths: - - ~/.cache/go-build - - ~/go/pkg/mod - key: go-<>-<>-<>-{{ checksum "<>/go.mod" }}-{{ checksum "<>/go.sum" }} - - gcp-oidc-authenticate: - description: "Authenticate with GCP using a CircleCI OIDC token." - parameters: - project_id: - type: env_var_name - default: GCP_PROJECT_ID - workload_identity_pool_id: - type: env_var_name - default: GCP_WIP_ID - workload_identity_pool_provider_id: - type: env_var_name - default: GCP_WIP_PROVIDER_ID - service_account_email: - type: env_var_name - default: GCP_SERVICE_ACCOUNT_EMAIL - gcp_cred_config_file_path: - type: string - default: /home/circleci/gcp_cred_config.json - oidc_token_file_path: - type: string - default: /home/circleci/oidc_token.json - steps: - - run: - name: "Create OIDC credential configuration" - command: | - echo $CIRCLE_OIDC_TOKEN > << parameters.oidc_token_file_path >> - gcloud iam workload-identity-pools create-cred-config \ - "projects/${<< parameters.project_id >>}/locations/global/workloadIdentityPools/${<< parameters.workload_identity_pool_id >>}/providers/${<< parameters.workload_identity_pool_provider_id >>}"\ - --output-file="<< parameters.gcp_cred_config_file_path >>" \ - --service-account="${<< parameters.service_account_email >>}" \ - --credential-source-file=<< parameters.oidc_token_file_path >> - - run: - name: "Authenticate with GCP using OIDC" - command: | - gcloud auth login --brief --cred-file "<< parameters.gcp_cred_config_file_path >>" - echo "export GOOGLE_APPLICATION_CREDENTIALS='<< parameters.gcp_cred_config_file_path >>'" | tee -a "$BASH_ENV" - -# ============================================================================ -# KONA JOBS -# ============================================================================ -jobs: - # Kona Node E2E Sysgo Tests (from node_e2e_sysgo_tests.yaml) - kona-node-e2e-sysgo-tests: - parameters: - devnet_config: - description: The devnet configuration to test - type: string - reorg_tests: - description: Whether to run reorg tests - type: boolean - default: false - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - attach_workspace: - at: . - - go-restore-cache: - namespace: kona-ci - - rust-build: &kona-rust-build-release - directory: kona - profile: release - binary: "kona-node" - - run: - name: Run common tests for node with sysgo orchestrator - no_output_timeout: 60m - command: | - WD=$(pwd) - echo "Running tests..." - export OP_RETH_EXEC_PATH="$WD/.circleci-cache/rust-binaries/op-reth" - export RUST_BINARY_PATH_KONA_NODE="$WD/kona/target/release/kona-node" - cd kona && just test-e2e-sysgo-run node node/common "<>" - - when: - condition: - equal: [true, <>] - steps: - - run: - name: Run reorg tests for node with sysgo orchestrator - no_output_timeout: 60m - command: | - WD=$(pwd) - echo "Running tests..." - export OP_RETH_EXEC_PATH="$WD/.circleci-cache/rust-binaries/op-reth" - export RUST_BINARY_PATH_KONA_NODE="$WD/kona/target/release/kona-node" - cd kona && just test-e2e-sysgo-run node node/reorgs "<>" - - go-save-cache: - namespace: kona-ci - - # Kona Node Restart Tests (from node_e2e_sysgo_tests.yaml) - kona-node-restart-sysgo-tests: - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - attach_workspace: - at: . - - go-restore-cache: - namespace: kona-ci - - rust-build: *kona-rust-build-release - - run: - name: Run restart tests for node with sysgo orchestrator - no_output_timeout: 60m - command: | - echo "Running tests..." - WD=$(pwd) - export RUST_BINARY_PATH_KONA_NODE="$WD/kona/target/release/kona-node" - cd kona && just test-e2e-sysgo node node/restart - - go-save-cache: - namespace: kona-ci - - # Kona Proof Action Tests (from proof.yaml) - kona-proof-action-tests: - parameters: - kind: - description: The kind of action test (single or interop) - type: string - docker: - - image: <> - resource_class: xlarge - parallelism: 4 - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - attach_workspace: - at: . - - go-restore-cache: - namespace: kona-ci - - rust-build: - <<: *kona-rust-build-release - binary: "kona-host" - - run: - name: Build kona and run action tests - working_directory: kona - no_output_timeout: 90m - command: | - echo "Running action tests" - export KONA_HOST_PATH=$(pwd)/target/release/kona-host - just action-tests-<>-run - - go-save-cache: - namespace: kona-ci - - # Kona Host Client Offline Runs (from proof.yaml) - kona-host-client-offline: - parameters: - target: - description: The target platform (native, asterisc, cannon) - type: string - machine: - image: <> - docker_layer_caching: true - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - install-zstd - - rust-prepare - - install-cargo-binstall - - run: - name: Install cargo-llvm-cov - command: | - command -v cargo-llvm-cov >/dev/null || cargo binstall --no-confirm cargo-llvm-cov - - when: - condition: - equal: [asterisc, <>] - steps: - - run: - name: Clone and build asterisc - command: | - git clone --no-checkout --depth 1 https://github.com/ethereum-optimism/asterisc.git - cd asterisc && git fetch --depth 1 origin v1.3.0 && git checkout v1.3.0 - make build-rvgo - sudo mv ./rvgo/bin/asterisc /usr/local/bin/ - - when: - condition: - equal: [cannon, <>] - steps: - - run: - name: Build cannon - command: | - cd cannon && make - sudo mv ./bin/cannon /usr/local/bin/ - - run: - name: Set run environment - command: | - echo 'export BLOCK_NUMBER=26215604' >> $BASH_ENV - echo 'export L2_CLAIM=0x7415d942f80a34f77d344e4bccb7050f14e593f5ea33669d27ea01dce273d72d' >> $BASH_ENV - echo 'export L2_OUTPUT_ROOT=0xaa34b62993bd888d7a2ad8541935374e39948576fce12aa8179a0aa5b5bc787b' >> $BASH_ENV - echo 'export L2_HEAD=0xf4adf5790bad1ffc9eee315dc163df9102473c5726a2743da27a8a10dc16b473' >> $BASH_ENV - echo 'export L1_HEAD=0x010cfdb22eaa13e8cdfbf66403f8de2a026475e96a6635d53c31f853a0e3ae25' >> $BASH_ENV - echo 'export L2_CHAIN_ID=11155420' >> $BASH_ENV - echo 'export WITNESS_TAR_NAME=holocene-op-sepolia-26215604-witness.tar.zst' >> $BASH_ENV - - run: - name: Decompress witness data - working_directory: kona - command: | - tar --zstd -xvf ./bin/client/testdata/$WITNESS_TAR_NAME -C . - - run: - name: Run host + client offline - working_directory: kona/bin/client - no_output_timeout: 40m - command: | - source <(cargo llvm-cov show-env --export-prefix) - mkdir -p ../../target - just run-client-<>-offline \ - $BLOCK_NUMBER \ - $L2_CLAIM \ - $L2_OUTPUT_ROOT \ - $L2_HEAD \ - $L1_HEAD \ - $L2_CHAIN_ID - cargo llvm-cov report --lcov --output-path client_host_cov.lcov - - # Kona Rust CI - Lint (from rust_ci.yaml) - kona-cargo-lint: - parameters: - target: - description: The lint target (native, cannon, asterisc) - type: string - machine: - image: <> - docker_layer_caching: true - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-prepare-and-restore-cache: - directory: kona - features: "all" - - rust-install-toolchain: - components: rustfmt - - run: - name: Run fmt + lint for <> - working_directory: kona - no_output_timeout: 40m - command: | - just lint-<> - - kona-build-fpvm: - parameters: - target: - description: The build target (cannon-client, asterisc-client) - type: string - machine: - image: <> - docker_layer_caching: true - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-prepare - - run: - name: Build <> - working_directory: kona - no_output_timeout: 40m - command: | - just build-<> - - # Kona Rust CI - Build Benchmarks (from rust_ci.yaml) - kona-cargo-build-benches: - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-prepare-and-restore-cache: &kona-benches-cache - directory: kona - prefix: kona-benches - - run: - name: Build benchmarks - working_directory: kona - no_output_timeout: 40m - command: | - just benches - - rust-save-build-cache: *kona-benches-cache - - # Kona Rust CI - Coverage (from rust_ci.yaml) - kona-coverage: - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-prepare-and-restore-cache: &kona-coverage-cache - directory: kona - prefix: kona-coverage - version: "1" - - rust-install-toolchain: - components: llvm-tools-preview - - install-cargo-binstall - - run: - name: Install cargo-llvm-cov and nextest - command: | - command -v cargo-llvm-cov >/dev/null || cargo binstall --no-confirm cargo-llvm-cov - command -v cargo-nextest >/dev/null || cargo binstall --no-confirm cargo-nextest - - run: - name: Generate lockfile if needed - working_directory: kona - command: | - [ -f Cargo.lock ] || cargo generate-lockfile - - run: - name: Run coverage - working_directory: kona - no_output_timeout: 40m - command: | - just llvm-cov-tests && mv ./target/nextest/ci/junit.xml ./junit.xml - - codecov/upload: - disable_search: true - files: kona/lcov.info - flags: unit - - store_test_results: - path: kona/junit.xml - - rust-save-build-cache: *kona-coverage-cache - - # Kona Docs Build (from docs.yaml) - kona-docs-build: - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-prepare - - run: - name: Install Node.js and Bun - command: | - curl -fsSL https://deb.nodesource.com/setup_24.x | sudo -E bash - - sudo apt-get install -y nodejs - curl -fsSL https://bun.sh/install | bash - echo 'export BUN_INSTALL="$HOME/.bun"' >> $BASH_ENV - echo 'export PATH="$BUN_INSTALL/bin:$PATH"' >> $BASH_ENV - - run: - name: Install dependencies and Playwright browsers - working_directory: kona/docs - command: | - bun i - npx playwright install --with-deps chromium - - run: - name: Build Vocs documentation - working_directory: kona/docs - no_output_timeout: 60m - command: | - bun run build - echo "Vocs Build Complete" - - store_artifacts: - path: kona/docs/docs/dist - destination: kona-docs - - # Kona Link Checker (from lychee.yaml) - kona-link-checker: - docker: - - image: <> - resource_class: medium - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - run: - name: Install lychee - command: | - curl -sSfL https://github.com/lycheeverse/lychee/releases/download/v0.15.1/lychee-v0.15.1-x86_64-unknown-linux-gnu.tar.gz | tar xz - sudo mv lychee /usr/local/bin/ - - run: - name: Check links - working_directory: kona - command: | - lychee --config ./lychee.toml --cache-exclude-status 429 '**/README.md' './docs/**/*.md' './docs/**/*.mdx' './docs/**/*.html' './docs/**/*.json' || true - - # Kona Supervisor E2E Tests (from supervisor_e2e_sysgo.yaml) - kona-supervisor-e2e-tests: - parameters: - test_pkg: - description: The test package to run - type: string - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-build: - <<: *kona-rust-build-release - binary: "kona-supervisor" - - run: - name: Run supervisor tests for <> - working_directory: kona - no_output_timeout: 40m - command: | - just test-e2e-sysgo supervisor "/supervisor/<>" - - # Kona Publish Prestate Artifacts (from publish_artifacts.yaml) - kona-publish-prestate-artifacts: - parameters: - kind: - description: The kind of prestate (cannon) - type: string - default: "cannon" - version: - description: The version to build (kona-client, kona-client-int) - type: string - machine: - image: <> - docker_layer_caching: true # we rely on this for faster builds, and actively warm it up for builds with common stages - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-install-toolchain: - components: llvm-tools-preview - - rust-prepare-and-restore-cache: - directory: kona - needs_clang: true - profile: release - - gcp-cli/install - - gcp-oidc-authenticate: - gcp_cred_config_file_path: /tmp/gcp_cred_config.json - oidc_token_file_path: /tmp/oidc_token.json - - run: - name: Generate prestate artifacts - working_directory: kona - no_output_timeout: 60m - command: | - CANNON_TAG="$(cat .config/cannon_tag)" - cd docker/fpvm-prestates - just "<>" "<>" "$CANNON_TAG" - - run: - name: Upload prestates to GCS - command: | - PRESTATE_HASH=$(jq -r .pre ./prestate-artifacts-<>/prestate-proof.json) - BRANCH_NAME=$(echo "<< pipeline.git.branch >>" | tr '/' '-') - echo "Publishing ${PRESTATE_HASH} as ${BRANCH_NAME}" - if [ -n "<< pipeline.git.branch >>" ]; then - echo "Publishing commit hash data" - INFO_FILE=$(mktemp) - (echo "Commit=<< pipeline.git.revision >>" && echo "Prestate: ${PRESTATE_HASH}") > "${INFO_FILE}" - gsutil cp "${INFO_FILE}" "gs://oplabs-network-data/proofs/kona/<>/${BRANCH_NAME}-<>-prestate.bin.gz.txt" - rm "${INFO_FILE}" - PRESTATE_HASH="${BRANCH_NAME}-<>" - fi - gsutil cp ./prestate-artifacts-<>/prestate.bin.gz "gs://oplabs-network-data/proofs/kona/<>/${PRESTATE_HASH}.bin.gz" - echo "Successfully published prestates artifacts to GCS" - -# ============================================================================ -# KONA WORKFLOWS -# ============================================================================ -workflows: - # Kona main CI workflow (runs on push to main, merge_group, pull_request) - kona-ci: - when: - or: - - equal: ["webhook", << pipeline.trigger_source >>] - - and: - - equal: [true, <>] - - equal: ["api", << pipeline.trigger_source >>] - jobs: - # Dependencies from main config (contracts-bedrock-build, cannon-prestate-quick) - # These jobs exist in main.yml and are available after merge - - contracts-bedrock-build: - build_args: --skip test - context: - - circleci-repo-readonly-authenticated-github-token - - cannon-prestate-quick: &kona-job-base - context: - - circleci-repo-readonly-authenticated-github-token - - cannon-kona-prestate: - <<: *kona-job-base - - rust-build-binary: &cannon-kona-host - name: cannon-kona-host - directory: kona - profile: release - binary: "kona-host" - context: - - circleci-repo-readonly-authenticated-github-token - # Rust CI jobs (from rust_ci.yaml) - - rust-build-binary: &kona-build-release - name: kona-build-release - directory: kona - profile: release - context: - - circleci-repo-readonly-authenticated-github-token - - rust-build-binary: - <<: *kona-job-base - name: kona-build-debug - directory: kona - profile: "debug" - features: "default" - save_cache: true - - rust-build-binary: - <<: *kona-job-base - name: kona-build-all-features - directory: kona - profile: "debug" - features: "all" - save_cache: true - - kona-build-fpvm: - <<: *kona-job-base - name: kona-build-fpvm-<> - matrix: - parameters: - target: ["cannon-client", "asterisc-client"] - - rust-ci-cargo-tests: - <<: *kona-job-base - name: kona-cargo-tests - directory: kona - - kona-cargo-lint: - <<: *kona-job-base - name: kona-lint-<> - matrix: - parameters: - target: ["cannon", "asterisc"] - - rust-ci-fmt: - <<: *kona-job-base - name: kona-fmt - directory: kona - - rust-ci-clippy: - <<: *kona-job-base - name: kona-clippy - directory: kona - - kona-cargo-build-benches: - <<: *kona-job-base - - rust-ci-udeps: - <<: *kona-job-base - name: kona-cargo-udeps - directory: kona - - rust-ci-docs: - <<: *kona-job-base - name: kona-cargo-doc-lint - directory: kona - command: "just lint-docs" - rustdocflags: "-D warnings" - - rust-ci-doctest: - <<: *kona-job-base - name: kona-cargo-doc-test - directory: kona - command: "just test-docs" - - rust-ci-typos: - <<: *kona-job-base - name: kona-typos - directory: kona - - rust-ci-deny: - <<: *kona-job-base - name: kona-cargo-deny - directory: kona - - rust-ci-zepter: - <<: *kona-job-base - name: kona-zepter - directory: kona - command: | - zepter format features - zepter - - rust-ci-check-no-std: - <<: *kona-job-base - name: kona-check-no-std - directory: kona - toolchain: "1.88.0" - - kona-coverage: - context: - - circleci-repo-readonly-authenticated-github-token - requires: - - kona-cargo-tests - - rust-ci-cargo-hack: - <<: *kona-job-base - name: kona-cargo-hack - directory: kona - flags: "--feature-powerset --no-dev-deps --workspace" - parallelism: 4 - requires: - - kona-build-release - - kona-build-debug - - kona-cargo-udeps - - kona-build-all-features - - - rust-build-submodule: - <<: *kona-job-base - name: op-reth-build - directory: reth - binaries: op-reth - build_command: cd crates/optimism/bin && cargo build --release --bin op-reth - needs_clang: true - - # Node E2E tests (from node_e2e_sysgo_tests.yaml) - - kona-node-e2e-sysgo-tests: - name: kona-node-e2e-<> - matrix: - parameters: - devnet_config: ["simple-kona", "simple-kona-geth", "simple-kona-sequencer", "large-kona-sequencer"] - context: - - circleci-repo-readonly-authenticated-github-token - requires: - - contracts-bedrock-build - - cannon-prestate-quick - - cannon-kona-prestate - - cannon-kona-host - - kona-build-release - - op-reth-build - - kona-node-restart-sysgo-tests: - name: kona-node-e2e-restart - context: - - circleci-repo-readonly-authenticated-github-token - requires: - - contracts-bedrock-build - - cannon-prestate-quick - - cannon-kona-prestate - - cannon-kona-host - - kona-build-release - - # Proof tests (from proof.yaml) - single kind only, interop excluded per original config - - kona-proof-action-tests: - name: kona-proof-action-single - kind: single - requires: - - kona-build-release - - contracts-bedrock-build - context: - - circleci-repo-readonly-authenticated-github-token - - kona-host-client-offline: - name: kona-host-client-native - target: native - requires: - - kona-build-release - context: - - circleci-repo-readonly-authenticated-github-token - - kona-host-client-offline: - name: kona-host-client-asterisc - target: asterisc - requires: - - kona-build-release - context: - - circleci-repo-readonly-authenticated-github-token - - kona-host-client-offline: - name: kona-host-client-cannon - target: cannon - requires: - - kona-build-release - context: - - circleci-repo-readonly-authenticated-github-token - - # Docs build (from docs.yaml) - - kona-docs-build: - context: - - circleci-repo-readonly-authenticated-github-token - - # Kona scheduled workflows - scheduled-kona-link-checker: - when: - equal: [build_weekly, <>] - jobs: - - kona-link-checker: - context: - - circleci-repo-readonly-authenticated-github-token - - scheduled-kona-sync: - when: - equal: [build_weekly, <>] - jobs: - - kona-update-monorepo: - context: - - circleci-repo-readonly-authenticated-github-token - - # Kona supervisor E2E tests (from supervisor_e2e_sysgo.yaml) - manual dispatch only - kona-supervisor-e2e: - when: - and: - - equal: [true, <>] - - equal: ["api", << pipeline.trigger_source >>] - jobs: - - kona-supervisor-e2e-tests: - name: kona-supervisor-<> - matrix: - parameters: - test_pkg: ["pre_interop", "l1reorg/sysgo"] - context: - - circleci-repo-readonly-authenticated-github-token - - # Kona publish prestate artifacts (from publish_artifacts.yaml) - on push to main or tags - kona-publish-prestates: - when: - or: - - equal: ["develop", <>] - jobs: - - kona-publish-prestate-artifacts: - name: kona-publish-<> - matrix: - parameters: - version: ["kona-client", "kona-client-int"] - context: - - circleci-repo-readonly-authenticated-github-token - - oplabs-gcr diff --git a/.circleci/continue/main.yml b/.circleci/continue/main.yml index 93b58e75168..4dee69e8e6a 100644 --- a/.circleci/continue/main.yml +++ b/.circleci/continue/main.yml @@ -1,10 +1,10 @@ version: 2.1 parameters: - default_docker_image: + c-default_docker_image: type: string default: cimg/base:2024.01 - base_image: + c-base_image: type: string default: default # The dispatch parameters are used to manually dispatch pipelines that normally only run post-merge on develop @@ -12,73 +12,70 @@ parameters: # when: # or: # - equal: [ "develop", <> ] - # - equal: [ true, <> ] + # - equal: [ true, <> ] # Add a new `*_dispatch` parameter for any pipeline you want manual dispatch for. - main_dispatch: + c-main_dispatch: type: boolean default: true # default to running main in case the manual run cancelled an automatic run - fault_proofs_dispatch: + c-fault_proofs_dispatch: type: boolean default: false - reproducibility_dispatch: + c-reproducibility_dispatch: type: boolean default: false - kontrol_dispatch: + c-kontrol_dispatch: type: boolean default: false - cannon_full_test_dispatch: + c-cannon_full_test_dispatch: type: boolean default: false - sdk_dispatch: + c-sdk_dispatch: type: boolean default: false - docker_publish_dispatch: + c-docker_publish_dispatch: type: boolean default: false - publish_contract_artifacts_dispatch: + c-publish_contract_artifacts_dispatch: type: boolean default: false - stale_check_dispatch: + c-stale_check_dispatch: type: boolean default: false - contracts_coverage_dispatch: + c-contracts_coverage_dispatch: type: boolean default: false - heavy_fuzz_dispatch: + c-heavy_fuzz_dispatch: type: boolean default: false - acceptance_tests_dispatch: + c-sync_test_op_node_dispatch: type: boolean default: false - sync_test_op_node_dispatch: + c-ai_contracts_test_dispatch: type: boolean default: false - ai_contracts_test_dispatch: - type: boolean - default: false - github-event-type: + c-github-event-type: type: string default: "__not_set__" - github-event-action: + c-github-event-action: type: string default: "__not_set__" - github-event-base64: + c-github-event-base64: type: string default: "__not_set__" - devnet-metrics-collect: + c-devnet-metrics-collect: type: boolean default: false - flake-shake-dispatch: + c-flake-shake-dispatch: type: boolean default: false - flake-shake-iterations: + c-flake-shake-iterations: type: integer default: 300 - flake-shake-workers: + c-flake-shake-workers: type: integer default: 50 # go-cache-version can be used as a cache buster when making breaking changes to caching strategy - go-cache-version: + c-go-cache-version: type: string default: "v0.0" @@ -88,7 +85,7 @@ orbs: slack: circleci/slack@6.0.0 shellcheck: circleci/shellcheck@3.2.0 codecov: codecov/codecov@5.0.3 - utils: ethereum-optimism/circleci-utils@1.0.23 + utils: ethereum-optimism/circleci-utils@1.0.24 docker: circleci/docker@2.8.2 github-cli: circleci/github-cli@2.7.0 @@ -337,7 +334,7 @@ commands: # Version can be used as a cache buster when making breaking changes to caching strategy version: type: string - default: <> + default: <> steps: - restore_cache: name: Restore go cache for <> (<>/go.mod) @@ -358,7 +355,7 @@ commands: type: string version: type: string - default: <> + default: <> steps: - save_cache: name: Save go cache for <> (<>/go.mod) @@ -422,8 +419,8 @@ commands: command: | ROOT_DIR="$(pwd)" BIN_DIR="$ROOT_DIR/.circleci-cache/rust-binaries" - echo "export RUST_BINARY_PATH_KONA_NODE=$ROOT_DIR/kona/target/release/kona-node" >> "$BASH_ENV" - echo "export RUST_BINARY_PATH_KONA_SUPERVISOR=$ROOT_DIR/kona/target/release/kona-supervisor" >> "$BASH_ENV" + echo "export RUST_BINARY_PATH_KONA_NODE=$ROOT_DIR/rust/target/release/kona-node" >> "$BASH_ENV" + echo "export RUST_BINARY_PATH_KONA_SUPERVISOR=$ROOT_DIR/rust/target/release/kona-supervisor" >> "$BASH_ENV" echo "export RUST_BINARY_PATH_OP_RBUILDER=$BIN_DIR/op-rbuilder" >> "$BASH_ENV" echo "export RUST_BINARY_PATH_ROLLUP_BOOST=$BIN_DIR/rollup-boost" >> "$BASH_ENV" @@ -623,7 +620,7 @@ commands: export PACKAGE="--package << parameters.package >>" fi - export BINARY="--all-targets" + export BINARY="" if [ -n "<< parameters.binary >>" ]; then export BINARY="--bin << parameters.binary >>" fi @@ -633,7 +630,7 @@ commands: export FEATURES="--all-features" fi - cd << parameters.directory >> && cargo build $PROFILE $TARGET $PACKAGE $FEATURES + cd << parameters.directory >> && cargo build $PROFILE $TARGET $PACKAGE $FEATURES $BINARY no_output_timeout: 30m - when: condition: << parameters.save_cache >> @@ -649,7 +646,7 @@ jobs: rust-build-binary: description: "Build a Rust workspace with target directory caching" docker: - - image: <> + - image: <> resource_class: xlarge parameters: directory: @@ -702,7 +699,7 @@ jobs: # Build a single Rust binary from a submodule. rust-build-submodule: docker: - - image: <> + - image: <> resource_class: xlarge parameters: directory: @@ -812,7 +809,7 @@ jobs: type: string default: 30m docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -914,7 +911,7 @@ jobs: path: ./op-acceptance-tests/logs initialize: docker: - - image: <> + - image: <> resource_class: large steps: - run: @@ -922,7 +919,7 @@ jobs: cannon-go-lint-and-test: docker: - - image: <> + - image: <> resource_class: xlarge parameters: skip_slow_tests: @@ -987,7 +984,7 @@ jobs: contracts-bedrock-build: docker: - - image: <> + - image: <> resource_class: 2xlarge parameters: build_args: @@ -1033,7 +1030,7 @@ jobs: check-kontrol-build: docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -1095,7 +1092,7 @@ jobs: type: string default: medium machine: - image: <> + image: <> resource_class: "<>" docker_layer_caching: true # we rely on this for faster builds, and actively warm it up for builds with common stages steps: @@ -1252,7 +1249,7 @@ jobs: # Verify newly published images (built on AMD machine) will run on ARM check-cross-platform: docker: - - image: <> + - image: <> resource_class: arm.medium parameters: registry: @@ -1297,7 +1294,7 @@ jobs: contracts-bedrock-tests: circleci_ip_ranges: true docker: - - image: <> + - image: <> resource_class: 2xlarge parameters: test_list: @@ -1389,7 +1386,7 @@ jobs: contracts-bedrock-heavy-fuzz-nightly: circleci_ip_ranges: true docker: - - image: <> + - image: <> resource_class: 2xlarge steps: - utils/checkout-with-mise: @@ -1447,7 +1444,7 @@ jobs: ai-contracts-test: circleci_ip_ranges: true docker: - - image: <> + - image: <> resource_class: medium steps: - utils/checkout-with-mise: @@ -1478,7 +1475,7 @@ jobs: contracts-bedrock-coverage: circleci_ip_ranges: true docker: - - image: <> + - image: <> resource_class: 2xlarge parameters: test_timeout: @@ -1589,7 +1586,7 @@ jobs: type: string default: "" docker: - - image: <> + - image: <> resource_class: 2xlarge steps: - utils/checkout-with-mise: @@ -1691,7 +1688,7 @@ jobs: contracts-bedrock-checks: docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -1738,7 +1735,7 @@ jobs: contracts-bedrock-checks-fast: docker: - - image: <> + - image: <> resource_class: 2xlarge steps: - utils/checkout-with-mise: @@ -1766,7 +1763,7 @@ jobs: type: boolean default: true machine: - image: <> + image: <> steps: - utils/checkout-with-mise: checkout-method: blobless @@ -1792,7 +1789,7 @@ jobs: type: boolean default: false docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -1825,7 +1822,7 @@ jobs: go-lint: docker: - - image: <> + - image: <> resource_class: large steps: - utils/checkout-with-mise: @@ -1842,9 +1839,31 @@ jobs: paths: - "/home/circleci/.cache/golangci-lint" + go-binaries-for-sysgo: + docker: + - image: <> + resource_class: large + steps: + - utils/checkout-with-mise: + checkout-method: blobless + enable-mise-cache: true + - go-restore-cache: + namespace: sysgo-go-binaries + - run: + name: Build Go binaries for sysgo + command: make cannon op-program + - go-save-cache: + namespace: sysgo-go-binaries + - persist_to_workspace: + root: . + paths: + - "cannon/bin" + - "op-program/bin/op-program" + - "op-program/bin/op-program-client" + check-op-geth-version: docker: - - image: <> + - image: <> resource_class: small steps: - utils/checkout-with-mise: @@ -1886,7 +1905,7 @@ jobs: type: integer default: 1 docker: - - image: <> + - image: <> resource_class: 2xlarge circleci_ip_ranges: true parallelism: <> @@ -2029,7 +2048,7 @@ jobs: default: "" resource_class: xlarge docker: - - image: <> + - image: <> circleci_ip_ranges: true steps: - utils/checkout-with-mise: @@ -2055,7 +2074,7 @@ jobs: name: Persist schedule name into env var command: | echo 'export CIRCLECI_PIPELINE_SCHEDULE_NAME="<< pipeline.schedule.name >>"' >> $BASH_ENV - echo 'export CIRCLECI_PARAMETERS_SYNC_TEST_OP_NODE_DISPATCH="<< pipeline.parameters.sync_test_op_node_dispatch >>"' >> $BASH_ENV + echo 'export CIRCLECI_PARAMETERS_SYNC_TEST_OP_NODE_DISPATCH="<< pipeline.parameters.c-sync_test_op_node_dispatch >>"' >> $BASH_ENV # Run the acceptance tests - run: name: Run acceptance tests (gate=<>) @@ -2127,7 +2146,7 @@ jobs: type: string default: 30m docker: - - image: <> + - image: <> circleci_ip_ranges: true resource_class: 2xlarge+ steps: @@ -2138,15 +2157,15 @@ jobs: at: . # Build kona-node for the acceptance tests. This automatically gets kona from the cache. - rust-build: - directory: kona + directory: rust profile: "release" - run: name: Configure Rust binary paths (sysgo) command: | ROOT_DIR="$(pwd)" BIN_DIR="$ROOT_DIR/.circleci-cache/rust-binaries" - echo "export RUST_BINARY_PATH_KONA_NODE=$ROOT_DIR/kona/target/release/kona-node" >> "$BASH_ENV" - echo "export RUST_BINARY_PATH_KONA_SUPERVISOR=$ROOT_DIR/kona/target/release/kona-supervisor" >> "$BASH_ENV" + echo "export RUST_BINARY_PATH_KONA_NODE=$ROOT_DIR/rust/target/release/kona-node" >> "$BASH_ENV" + echo "export RUST_BINARY_PATH_KONA_SUPERVISOR=$ROOT_DIR/rust/target/release/kona-supervisor" >> "$BASH_ENV" echo "export RUST_BINARY_PATH_OP_RBUILDER=$BIN_DIR/op-rbuilder" >> "$BASH_ENV" echo "export RUST_BINARY_PATH_ROLLUP_BOOST=$BIN_DIR/rollup-boost" >> "$BASH_ENV" # Restore cached Go modules @@ -2237,7 +2256,7 @@ jobs: machine: image: ubuntu-2404:current resource_class: xlarge - parallelism: << pipeline.parameters.flake-shake-workers >> + parallelism: << pipeline.parameters.c-flake-shake-workers >> steps: - utils/checkout-with-mise: checkout-method: blobless @@ -2269,7 +2288,7 @@ jobs: - run: name: Calculate iterations for worker command: | - bash ./op-acceptance-tests/scripts/ci_flake_shake_calc_iterations.sh << pipeline.parameters.flake-shake-iterations >> + bash ./op-acceptance-tests/scripts/ci_flake_shake_calc_iterations.sh << pipeline.parameters.c-flake-shake-iterations >> - run: name: Run flake-shake iterations no_output_timeout: 3h @@ -2283,6 +2302,7 @@ jobs: --testdir tests \ --flake-shake \ --flake-shake-iterations "$FLAKE_SHAKE_ITERATIONS" \ + --log.level debug \ --orchestrator sysgo \ --logdir "./$OUTPUT_DIR" - persist_to_workspace: @@ -2429,7 +2449,7 @@ jobs: sanitize-op-program: docker: - - image: <> + - image: <> resource_class: large steps: - utils/checkout-with-mise: @@ -2451,82 +2471,28 @@ jobs: command: make sanitize-program GUEST_PROGRAM=../op-program/bin/op-program-client64.elf working_directory: cannon - cannon-prestate-quick: - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - enable-mise-cache: true - - restore_cache: - name: Restore cannon prestate cache - key: cannon-prestate-{{ checksum "./cannon/bin/cannon" }}-{{ checksum "op-program/bin/op-program-client.elf" }} - - run: - name: Build prestates - command: make cannon-prestates - - save_cache: - key: cannon-prestate-{{ checksum "./cannon/bin/cannon" }}-{{ checksum "op-program/bin/op-program-client.elf" }} - name: Save Cannon prestate to cache - paths: - - "op-program/bin/prestate*.bin.gz" - - "op-program/bin/meta*.json" - - "op-program/bin/prestate-proof*.json" - - persist_to_workspace: - root: . - paths: - - "op-program/bin/prestate*" - - "op-program/bin/meta*" - - "op-program/bin/op-program" - - "op-program/bin/op-program-client" - - "cannon/bin" - cannon-prestate: - docker: - - image: <> + machine: + docker_layer_caching: true # we rely on this for faster builds, and actively warm it up for builds with common stages steps: - utils/checkout-with-mise: checkout-method: blobless enable-mise-cache: true - - setup_remote_docker - run: name: Build prestates - command: make reproducible-prestate + command: make -j reproducible-prestate - persist_to_workspace: root: . paths: - "op-program/bin/prestate*" - "op-program/bin/meta*" - - cannon-kona-prestate: - machine: - docker_layer_caching: true # we rely on this for faster builds, and actively warm it up for builds with common stages - steps: - - utils/checkout-with-mise: - checkout-method: blobless - enable-mise-cache: true - - restore_cache: - name: Restore kona cache - key: kona-prestate-{{ checksum "./kona/justfile" }}-{{ checksum "./kona/.config/cannon_tag" }} - - run: - name: Build kona prestates - command: just build-prestates - working_directory: kona - - save_cache: - key: kona-prestate-{{ checksum "./kona/justfile" }}-{{ checksum "./kona/.config/cannon_tag" }} - name: Save Kona to cache - paths: - - "kona/prestate-artifacts-*/" - - persist_to_workspace: - root: . - paths: - - "kona/prestate-artifacts-*/*" + - "rust/kona/prestate-artifacts-*/" # Aggregator job - allows downstream jobs to depend on a single job instead of listing all build jobs. rust-binaries-for-sysgo: docker: - - image: <> + - image: <> resource_class: small steps: - run: @@ -2539,7 +2505,7 @@ jobs: publish-cannon-prestates: resource_class: medium docker: - - image: <> + - image: <> steps: - utils/checkout-with-mise: checkout-method: blobless @@ -2617,7 +2583,7 @@ jobs: cannon-stf-verify: docker: - - image: <> + - image: <> steps: - utils/checkout-with-mise: checkout-method: blobless @@ -2678,14 +2644,14 @@ jobs: bedrock-go-tests: # just a helper, that depends on all the actual test jobs docker: - - image: <> + - image: <> resource_class: medium steps: - run: echo Done analyze-op-program-client: docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -2700,7 +2666,7 @@ jobs: op-program-compat: docker: - - image: <> + - image: <> resource_class: large steps: - utils/checkout-with-mise: @@ -2714,7 +2680,7 @@ jobs: check-generated-mocks-op-node: docker: - - image: <> + - image: <> resource_class: large steps: - utils/checkout-with-mise: @@ -2728,7 +2694,7 @@ jobs: check-generated-mocks-op-service: docker: - - image: <> + - image: <> resource_class: large steps: - utils/checkout-with-mise: @@ -2742,7 +2708,7 @@ jobs: op-deployer-forge-version: docker: - - image: <> + - image: <> steps: - utils/checkout-with-mise: checkout-method: blobless @@ -2753,7 +2719,7 @@ jobs: kontrol-tests: docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -2787,7 +2753,7 @@ jobs: publish-contract-artifacts: docker: - - image: <> + - image: <> resource_class: 2xlarge steps: - gcp-cli/install @@ -2827,7 +2793,7 @@ jobs: default: .goreleaser.yaml type: string docker: - - image: <> + - image: <> resource_class: large steps: - setup_remote_docker: @@ -2850,7 +2816,7 @@ jobs: diff-fetcher-forge-artifacts: docker: - - image: <> + - image: <> resource_class: medium steps: - utils/checkout-with-mise: @@ -2901,7 +2867,7 @@ jobs: steps: - github-cli/install - utils/github-event-handler-setup: - github_event_base64: << pipeline.parameters.github-event-base64 >> + github_event_base64: << pipeline.parameters.c-github-event-base64 >> env_prefix: "github_" - run: name: Close issue if label is added @@ -2917,7 +2883,7 @@ jobs: devnet-metrics-collect-authorship: docker: - - image: <> + - image: <> steps: - utils/checkout-with-mise: checkout-method: blobless @@ -2977,10 +2943,10 @@ workflows: - or: - equal: ["webhook", << pipeline.trigger_source >>] - and: - - equal: [true, <>] + - equal: [true, <>] - equal: ["api", << pipeline.trigger_source >>] - equal: [ - << pipeline.parameters.github-event-type >>, + << pipeline.parameters.c-github-event-type >>, "__not_set__", ] #this is to prevent triggering this workflow as the default value is always set for main_dispatch jobs: @@ -3132,7 +3098,7 @@ workflows: - circleci-repo-readonly-authenticated-github-token requires: - contracts-bedrock-build - - cannon-prestate-quick + - cannon-prestate - go-tests: name: go-tests-short parallelism: 12 @@ -3140,7 +3106,8 @@ workflows: test_timeout: 20m requires: - contracts-bedrock-build - - cannon-prestate-quick + - cannon-prestate + - go-binaries-for-sysgo context: - circleci-repo-readonly-authenticated-github-token filters: @@ -3158,7 +3125,7 @@ workflows: only: develop # Only runs on develop branch (post-merge) requires: - contracts-bedrock-build - - cannon-prestate-quick + - cannon-prestate context: - circleci-repo-readonly-authenticated-github-token - slack @@ -3191,12 +3158,12 @@ workflows: - circleci-repo-readonly-authenticated-github-token requires: - contracts-bedrock-build - - cannon-prestate-quick: + - cannon-prestate: context: - circleci-repo-readonly-authenticated-github-token - sanitize-op-program: requires: - - cannon-prestate-quick + - cannon-prestate context: - circleci-repo-readonly-authenticated-github-token - check-generated-mocks-op-node: @@ -3226,6 +3193,65 @@ workflows: ignore-dirs: ./packages/contracts-bedrock/lib context: - circleci-repo-readonly-authenticated-github-token + # Acceptance test jobs (formerly in separate acceptance-tests workflow) + - rust-build-binary: &cannon-kona-host + name: cannon-kona-host + directory: rust + profile: "release" + binary: "kona-host" + save_cache: true + context: + - circleci-repo-readonly-authenticated-github-token + - rust-build-binary: &kona-build-release + name: kona-build-release + directory: rust + profile: "release" + features: "default" + save_cache: true + context: + - circleci-repo-readonly-authenticated-github-token + - rust-build-submodule: &rust-build-op-rbuilder + name: rust-build-op-rbuilder + directory: op-rbuilder + binaries: "op-rbuilder" + build_command: cargo build --release -p op-rbuilder --bin op-rbuilder + needs_clang: true + context: + - circleci-repo-readonly-authenticated-github-token + - rust-build-submodule: &rust-build-rollup-boost + name: rust-build-rollup-boost + directory: rollup-boost + binaries: "rollup-boost" + build_command: cargo build --release -p rollup-boost --bin rollup-boost + context: + - circleci-repo-readonly-authenticated-github-token + - rust-binaries-for-sysgo: + requires: + - kona-build-release + - rust-build-op-rbuilder + - rust-build-rollup-boost + - go-binaries-for-sysgo + # IN-MEMORY (all) + - op-acceptance-tests: + name: memory-all + gate: "" # Empty gate = gateless mode + no_output_timeout: 120m # Allow longer runs for memory-all gate + context: + - circleci-repo-readonly-authenticated-github-token + - slack + - discord + requires: + - contracts-bedrock-build + - cannon-prestate + - cannon-kona-host + - rust-binaries-for-sysgo + - go-binaries-for-sysgo + # Generate flaky test report + - generate-flaky-report: + name: generate-flaky-tests-report + context: + - circleci-repo-readonly-authenticated-github-token + - circleci-api-token go-release-op-deployer: jobs: @@ -3373,7 +3399,7 @@ workflows: - equal: [ true, - <>, + <>, ] - equal: ["api", << pipeline.trigger_source >>] jobs: @@ -3388,7 +3414,7 @@ workflows: - equal: ["develop", <>] - equal: ["webhook", << pipeline.trigger_source >>] - and: - - equal: [true, <>] + - equal: [true, <>] - equal: ["api", << pipeline.trigger_source >>] jobs: - cannon-prestate: @@ -3432,7 +3458,7 @@ workflows: - equal: ["develop", <>] - equal: ["webhook", << pipeline.trigger_source >>] - and: - - equal: [true, <>] + - equal: [true, <>] - equal: ["api", << pipeline.trigger_source >>] jobs: - kontrol-tests: @@ -3445,7 +3471,7 @@ workflows: when: or: - equal: [build_four_hours, <>] - - equal: [true, << pipeline.parameters.cannon_full_test_dispatch >>] + - equal: [true, << pipeline.parameters.c-cannon_full_test_dispatch >>] jobs: - contracts-bedrock-build: build_args: --deny-warnings --skip test @@ -3466,7 +3492,7 @@ workflows: or: - equal: [build_daily, <>] # Trigger on manual triggers if explicitly requested - - equal: [true, << pipeline.parameters.docker_publish_dispatch >>] + - equal: [true, << pipeline.parameters.c-docker_publish_dispatch >>] jobs: - contracts-bedrock-build: context: @@ -3502,38 +3528,25 @@ workflows: or: - equal: [build_daily, << pipeline.schedule.name >>] - and: - - equal: [true, << pipeline.parameters.flake-shake-dispatch >>] + - equal: [true, << pipeline.parameters.c-flake-shake-dispatch >>] - equal: ["api", << pipeline.trigger_source >>] jobs: - contracts-bedrock-build: build_args: --skip test context: - circleci-repo-readonly-authenticated-github-token - - cannon-prestate-quick: + - cannon-prestate: context: - circleci-repo-readonly-authenticated-github-token - rust-build-binary: name: kona-build-release - directory: kona + directory: rust needs_clang: true profile: "release" context: - circleci-repo-readonly-authenticated-github-token - - rust-build-submodule: &rust-build-op-rbuilder - name: rust-build-op-rbuilder - directory: op-rbuilder - binaries: "op-rbuilder" - build_command: cargo build --release -p op-rbuilder --bin op-rbuilder - needs_clang: true - context: - - circleci-repo-readonly-authenticated-github-token - - rust-build-submodule: &rust-build-rollup-boost - name: rust-build-rollup-boost - directory: rollup-boost - binaries: "rollup-boost" - build_command: cargo build --release -p rollup-boost --bin rollup-boost - context: - - circleci-repo-readonly-authenticated-github-token + - rust-build-submodule: *rust-build-op-rbuilder + - rust-build-submodule: *rust-build-rollup-boost - rust-binaries-for-sysgo: requires: - kona-build-release @@ -3544,7 +3557,7 @@ workflows: - circleci-repo-readonly-authenticated-github-token requires: - contracts-bedrock-build - - cannon-prestate-quick + - cannon-prestate - rust-binaries-for-sysgo - op-acceptance-tests-flake-shake-report: requires: @@ -3563,7 +3576,7 @@ workflows: or: - equal: [build_daily, <>] # Trigger on manual triggers if explicitly requested - - equal: [true, << pipeline.parameters.reproducibility_dispatch >>] + - equal: [true, << pipeline.parameters.c-reproducibility_dispatch >>] jobs: - preimage-reproducibility: context: @@ -3575,7 +3588,7 @@ workflows: or: - equal: [build_daily, <>] # Trigger on manual triggers if explicitly requested - - equal: [true, << pipeline.parameters.stale_check_dispatch >>] + - equal: [true, << pipeline.parameters.c-stale_check_dispatch >>] jobs: - stale-check: context: @@ -3586,13 +3599,13 @@ workflows: or: - equal: [build_daily, <>] # Trigger on manual triggers if explicitly requested - - equal: [true, << pipeline.parameters.sync_test_op_node_dispatch >>] + - equal: [true, << pipeline.parameters.c-sync_test_op_node_dispatch >>] jobs: - contracts-bedrock-build: # needed for sysgo tests build_args: --skip test context: - circleci-repo-readonly-authenticated-github-token - - cannon-prestate-quick: # needed for sysgo tests + - cannon-prestate: # needed for sysgo tests context: - circleci-repo-readonly-authenticated-github-token - op-acceptance-sync-tests-docker: @@ -3604,7 +3617,7 @@ workflows: - slack requires: - contracts-bedrock-build - - cannon-prestate-quick + - cannon-prestate matrix: parameters: network_preset: @@ -3621,84 +3634,18 @@ workflows: when: or: - equal: [build_daily, <>] - - equal: [true, << pipeline.parameters.heavy_fuzz_dispatch >>] + - equal: [true, << pipeline.parameters.c-heavy_fuzz_dispatch >>] jobs: - contracts-bedrock-heavy-fuzz-nightly: context: - slack - circleci-repo-readonly-authenticated-github-token - - # Acceptance tests - acceptance-tests: - when: - or: - - equal: ["webhook", << pipeline.trigger_source >>] - # Manual dispatch - - and: - - equal: [true, <>] - - equal: ["api", << pipeline.trigger_source >>] - jobs: - - contracts-bedrock-build: # needed for sysgo tests - build_args: --skip test - context: - - circleci-repo-readonly-authenticated-github-token - - cannon-prestate-quick: # needed for sysgo tests - context: - - circleci-repo-readonly-authenticated-github-token - - cannon-kona-prestate: # needed for sysgo tests (if any package is in-memory) - context: - - circleci-repo-readonly-authenticated-github-token - - rust-build-binary: &cannon-kona-host - name: cannon-kona-host - directory: kona - profile: "release" - binary: "kona-host" - save_cache: true - context: - - circleci-repo-readonly-authenticated-github-token - - rust-build-binary: &kona-build-release - name: kona-build-release - directory: kona - profile: "release" - features: "default" - save_cache: true - context: - - circleci-repo-readonly-authenticated-github-token - - rust-build-submodule: *rust-build-op-rbuilder - - rust-build-submodule: *rust-build-rollup-boost - - rust-binaries-for-sysgo: - requires: - - kona-build-release - - rust-build-op-rbuilder - - rust-build-rollup-boost - # IN-MEMORY (all) - - op-acceptance-tests: - name: memory-all - gate: "" # Empty gate = gateless mode - no_output_timeout: 120m # Allow longer runs for memory-all gate - context: - - circleci-repo-readonly-authenticated-github-token - - slack - - discord - requires: - - contracts-bedrock-build - - cannon-prestate-quick - - cannon-kona-prestate - - cannon-kona-host - - rust-binaries-for-sysgo - # Generate flaky test report - - generate-flaky-report: - name: generate-flaky-tests-report - context: - - circleci-repo-readonly-authenticated-github-token - - circleci-api-token - close-issue-workflow: when: and: - equal: [<< pipeline.trigger_source >>, "api"] - - equal: [<< pipeline.parameters.github-event-type >>, "pull_request"] - - equal: [<< pipeline.parameters.github-event-action >>, "labeled"] + - equal: [<< pipeline.parameters.c-github-event-type >>, "pull_request"] + - equal: [<< pipeline.parameters.c-github-event-action >>, "labeled"] jobs: - close-issue: label_name: "auto-close-trivial-contribution" @@ -3714,7 +3661,7 @@ workflows: or: - equal: [<< pipeline.trigger_source >>, "webhook"] - and: - - equal: [true, << pipeline.parameters.devnet-metrics-collect >>] + - equal: [true, << pipeline.parameters.c-devnet-metrics-collect >>] - equal: [<< pipeline.trigger_source >>, "api"] jobs: - devnet-metrics-collect-authorship: @@ -3726,7 +3673,7 @@ workflows: when: or: - equal: [build_mon_thu, <>] - - equal: [true, << pipeline.parameters.ai_contracts_test_dispatch >>] + - equal: [true, << pipeline.parameters.c-ai_contracts_test_dispatch >>] jobs: - ai-contracts-test: context: diff --git a/.circleci/continue/op-alloy.yml b/.circleci/continue/op-alloy.yml deleted file mode 100644 index 7ec0b5077e4..00000000000 --- a/.circleci/continue/op-alloy.yml +++ /dev/null @@ -1,233 +0,0 @@ -version: 2.1 - -# Op-Alloy CI Continuation Configuration -# This file contains ONLY Op-Alloy-specific jobs and workflows. -# It is merged with main.yml and rust-ci.yml when op-alloy/** changes are detected. -# Shared orbs, commands, and jobs come from main.yml and rust-ci.yml during merge. - -parameters: - default_docker_image: - type: string - default: cimg/base:2024.01 - op_alloy_dispatch: - type: boolean - default: false - -# ============================================================================ -# Op-Alloy Jobs (project-specific only; shared jobs are in rust-ci.yml) -# ============================================================================ -jobs: - # Op-Alloy Rust CI - Examples (from ci.yml) - op-alloy-examples: - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-prepare-and-restore-cache: &op-alloy-examples-cache - directory: op-alloy - prefix: op-alloy-examples - - rust-install-toolchain: - channel: nightly - toolchain_version: nightly - - run: - name: Build and run examples - working_directory: op-alloy - no_output_timeout: 40m - command: | - just examples - - rust-save-build-cache: *op-alloy-examples-cache - - # Op-Alloy Rust CI - Cfg Check (from ci.yml) - op-alloy-cfg-check: - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-prepare-and-restore-cache: &op-alloy-cfg-check-cache - directory: op-alloy - prefix: op-alloy-cfg-check - - rust-install-toolchain: - channel: nightly - toolchain_version: nightly - - run: - name: Run cfg check - working_directory: op-alloy - no_output_timeout: 40m - command: | - just check - - rust-save-build-cache: *op-alloy-cfg-check-cache - -### END OP-ALLOY JOBS ### - -# ============================================================================ -# Op-Alloy Workflow (migrated from op-alloy/.github/workflows) -# ============================================================================ -workflows: - op-alloy: - when: - or: - - equal: ["webhook", << pipeline.trigger_source >>] - - and: - - equal: [true, <>] - - equal: ["api", << pipeline.trigger_source >>] - jobs: - # Build (shared job from main.yml) - - rust-build-binary: - name: op-alloy-build - directory: op-alloy - context: - - circleci-repo-readonly-authenticated-github-token - - # Rust CI jobs (shared job) - - rust-ci-cargo-tests: - name: op-alloy-test-stable - directory: op-alloy - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-tests: - name: op-alloy-test-stable-no-default - directory: op-alloy - flags: "--no-default-features" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-tests: - name: op-alloy-test-stable-all-features - directory: op-alloy - flags: "--all-features" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-tests: - name: op-alloy-test-nightly - directory: op-alloy - rust_version: "nightly" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-tests: - name: op-alloy-test-nightly-no-default - directory: op-alloy - rust_version: "nightly" - flags: "--no-default-features" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-tests: - name: op-alloy-test-nightly-all-features - directory: op-alloy - rust_version: "nightly" - flags: "--all-features" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-tests: - name: op-alloy-build-msrv - directory: op-alloy - rust_version: "1.88" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-tests: - name: op-alloy-build-msrv-no-default - directory: op-alloy - rust_version: "1.88" - flags: "--no-default-features" - context: - - circleci-repo-readonly-authenticated-github-token - - # Doc tests (shared job) - - rust-ci-doctest: - name: op-alloy-cargo-doctest - directory: op-alloy - command: "just test-docs" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-hack-build: - name: op-alloy-wasm-unknown - directory: op-alloy - target: wasm32-unknown-unknown - flags: "--workspace --exclude op-alloy-network --exclude op-alloy-rpc-jsonrpsee --exclude op-alloy-provider" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-hack-build: - name: op-alloy-wasm-wasi - directory: op-alloy - target: wasm32-wasip1 - flags: "--workspace --exclude op-alloy-network --exclude op-alloy-rpc-types --exclude op-alloy-provider --exclude op-alloy-rpc-jsonrpsee" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-check-no-std: - name: op-alloy-check-no-std - directory: op-alloy - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-hack: - name: op-alloy-feature-checks - directory: op-alloy - flags: "--feature-powerset --depth 1" - context: - - circleci-repo-readonly-authenticated-github-token - - # Clippy (shared job) - - rust-ci-clippy: - name: op-alloy-clippy - directory: op-alloy - command: "just clippy" - context: - - circleci-repo-readonly-authenticated-github-token - - # Documentation (shared job) - - rust-ci-docs: - name: op-alloy-docs - directory: op-alloy - command: "cargo doc --workspace --all-features --no-deps --document-private-items" - rustdocflags: "--cfg docsrs -D warnings --show-type-layout --generate-link-to-definition --enable-index-page -Zunstable-options" - context: - - circleci-repo-readonly-authenticated-github-token - - # Formatting (shared job) - - rust-ci-fmt: - name: op-alloy-fmt - directory: op-alloy - command: "just fmt-check" - context: - - circleci-repo-readonly-authenticated-github-token - - - op-alloy-examples: - context: - - circleci-repo-readonly-authenticated-github-token - - op-alloy-cfg-check: - context: - - circleci-repo-readonly-authenticated-github-token - - # Lint jobs (shared) - - rust-ci-typos: - name: op-alloy-typos - directory: op-alloy - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-zepter: - name: op-alloy-zepter - directory: op-alloy - command: "time zepter run check" - context: - - circleci-repo-readonly-authenticated-github-token - - # Deps jobs (shared) - - rust-ci-deny: - name: op-alloy-cargo-deny - directory: op-alloy - context: - - circleci-repo-readonly-authenticated-github-token - - # Feature powerset (from ci.yml) - runs after initial checks - - rust-ci-cargo-hack: - name: op-alloy-cargo-hack - directory: op-alloy - flags: "--feature-powerset --no-dev-deps --workspace" - parallelism: 4 - context: - - circleci-repo-readonly-authenticated-github-token - requires: - - op-alloy-test-stable - - op-alloy-clippy - - op-alloy-fmt diff --git a/.circleci/continue/op-reth.yml b/.circleci/continue/op-reth.yml deleted file mode 100644 index 82bab694aca..00000000000 --- a/.circleci/continue/op-reth.yml +++ /dev/null @@ -1,204 +0,0 @@ -version: 2.1 - -# OP-Reth CI Continuation Configuration -# This file contains ONLY op-reth-specific jobs and workflows. -# It is merged with main.yml and rust-ci.yml when op-reth/** changes are detected. -# Shared orbs, commands, and jobs come from main.yml and rust-ci.yml during merge. - -parameters: - # Required parameters (also in main.yml, merged during continuation) - default_docker_image: - type: string - default: cimg/base:2024.01 - base_image: - type: string - default: default - op_reth_dispatch: - type: boolean - default: false - go-cache-version: - type: string - default: "v0.0" - -# ============================================================================ -# OP-RETH JOBS (project-specific only; shared jobs are in rust-ci.yml) -# ============================================================================ -# Note: Commands like rust-prepare, rust-restore-build-cache, rust-save-build-cache, -# rust-prepare-and-restore-cache, rust-install-toolchain, install-cargo-binstall, -# and notify-failures-on-develop are imported from main.yml -jobs: - # OP-Reth compact codec backwards compatibility - op-reth-compact-codec: - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-prepare-and-restore-cache: &op-reth-compact-cache - directory: op-reth - prefix: op-reth-compact - profile: debug - - run: - name: Generate compact vectors from base - working_directory: op-reth - command: | - # Use develop as the base branch - BASE_BRANCH="develop" - - # Save current state - git stash || true - git fetch origin $BASE_BRANCH - git checkout origin/$BASE_BRANCH - - # Generate vectors on base - cargo run --bin op-reth --features "dev" --manifest-path bin/Cargo.toml -- test-vectors compact --write - - # Return to PR branch - git checkout - - git stash pop || true - - run: - name: Read compact vectors on PR branch - working_directory: op-reth - command: | - cargo run --bin op-reth --features "dev" --manifest-path bin/Cargo.toml -- test-vectors compact --read - - rust-save-build-cache: *op-reth-compact-cache - - # OP-Reth Windows cross-compile check - op-reth-windows-check: - docker: - - image: <> - resource_class: xlarge - steps: - - utils/checkout-with-mise: - checkout-method: blobless - - rust-prepare-and-restore-cache: &op-reth-windows-cache - directory: op-reth - prefix: op-reth-windows - profile: debug - - rust-install-toolchain: - target: x86_64-pc-windows-gnu - - run: - name: Install mingw-w64 - command: sudo apt-get update && sudo apt-get install -y mingw-w64 - - run: - name: Check OP-Reth Windows build - working_directory: op-reth - no_output_timeout: 40m - command: cargo check -p op-reth --target x86_64-pc-windows-gnu - - rust-save-build-cache: *op-reth-windows-cache - -# ============================================================================ -# OP-RETH WORKFLOWS -# ============================================================================ -workflows: - # OP-Reth main CI workflow (runs on push/PR when op-reth/** changes) - op-reth-ci: - when: - or: - - equal: ["webhook", << pipeline.trigger_source >>] - - and: - - equal: [true, <>] - - equal: ["api", << pipeline.trigger_source >>] - jobs: - # Lint jobs (shared) - - rust-ci-clippy: - name: op-reth-clippy - directory: op-reth - command: "cargo clippy --workspace --lib --examples --tests --benches --all-features --locked" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-fmt: - name: op-reth-fmt - directory: op-reth - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-typos: - name: op-reth-typos - directory: op-reth - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-deny: - name: op-reth-deny - directory: op-reth - command: "cargo deny --all-features check all" - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-zepter: - name: op-reth-feature-propagation - directory: op-reth - command: | - cargo metadata --format-version=1 --locked > /dev/null - zepter run check - context: - - circleci-repo-readonly-authenticated-github-token - - # Crate checks (parallelized, shared job) - - rust-ci-cargo-hack: - name: op-reth-crate-checks - directory: op-reth - flags: "--workspace" - parallelism: 3 - context: - - circleci-repo-readonly-authenticated-github-token - - # Build checks - - rust-build-binary: - name: op-reth-msrv - directory: op-reth - binary: op-reth - toolchain: "1.88.0" - context: - - circleci-repo-readonly-authenticated-github-token - - op-reth-windows-check: - context: - - circleci-repo-readonly-authenticated-github-token - - rust-build-binary: - name: op-reth-build-release - directory: op-reth - profile: release - binary: op-reth - context: - - circleci-repo-readonly-authenticated-github-token - - # Test jobs (shared) - stable and edge storage variants - - rust-ci-cargo-tests: - name: op-reth-unit-tests-stable - directory: op-reth - cache_profile: debug - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-tests: - name: op-reth-unit-tests-edge - directory: op-reth - flags: "edge" - cache_profile: debug - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-cargo-tests: - name: op-reth-integration-tests - directory: op-reth - command: test-integration - cache_profile: debug - context: - - circleci-repo-readonly-authenticated-github-token - - rust-ci-doctest: - name: op-reth-doc-test - directory: op-reth - command: "cargo test --doc --workspace --all-features" - context: - - circleci-repo-readonly-authenticated-github-token - - # Codec compatibility - - op-reth-compact-codec: - context: - - circleci-repo-readonly-authenticated-github-token - - # Udeps (slower, depends on other jobs) - - rust-ci-udeps: - name: op-reth-udeps - directory: op-reth - context: - - circleci-repo-readonly-authenticated-github-token - requires: - - op-reth-clippy diff --git a/.circleci/continue/rust-ci.yml b/.circleci/continue/rust-ci.yml index f47b94d1612..bdd3b57ec56 100644 --- a/.circleci/continue/rust-ci.yml +++ b/.circleci/continue/rust-ci.yml @@ -1,18 +1,30 @@ version: 2.1 -# Shared Rust CI Commands and Jobs -# This file contains common commands and parameterized jobs used by multiple Rust subproject configs. -# Each subproject config references these jobs from its workflows section. -# No workflows are defined here - workflows remain in each project's config. +# Unified Rust CI Configuration +# This file contains all Rust CI commands, parameterized jobs, crate-specific jobs, and workflows. orbs: - utils: ethereum-optimism/circleci-utils@1.0.23 + utils: ethereum-optimism/circleci-utils@1.0.24 + gcp-cli: circleci/gcp-cli@3.0.1 + codecov: codecov/codecov@5.0.3 parameters: - default_docker_image: + c-default_docker_image: type: string default: cimg/base:2024.01 + c-base_image: + type: string + default: default + c-rust_ci_dispatch: + type: boolean + default: false + c-go-cache-version: + type: string + default: "v0.0" +# ============================================================================ +# COMMANDS +# ============================================================================ commands: install-cargo-binstall: description: "Install cargo-binstall for fast binary installations" @@ -72,8 +84,8 @@ commands: command: | ROOT_DIR="$(pwd)" BIN_DIR="$ROOT_DIR/.circleci-cache/rust-binaries" - echo "export RUST_BINARY_PATH_KONA_NODE=$ROOT_DIR/kona/target/release/kona-node" >> "$BASH_ENV" - echo "export RUST_BINARY_PATH_KONA_SUPERVISOR=$ROOT_DIR/kona/target/release/kona-supervisor" >> "$BASH_ENV" + echo "export RUST_BINARY_PATH_KONA_NODE=$ROOT_DIR/rust/target/release/kona-node" >> "$BASH_ENV" + echo "export RUST_BINARY_PATH_KONA_SUPERVISOR=$ROOT_DIR/rust/target/release/kona-supervisor" >> "$BASH_ENV" echo "export RUST_BINARY_PATH_OP_RBUILDER=$BIN_DIR/op-rbuilder" >> "$BASH_ENV" echo "export RUST_BINARY_PATH_ROLLUP_BOOST=$BIN_DIR/rollup-boost" >> "$BASH_ENV" @@ -250,7 +262,59 @@ commands: profile: << parameters.profile >> features: << parameters.features >> + # Kona-specific commands + install-zstd: + description: "Install zstd compression utility" + steps: + - run: + name: Install zstd + command: sudo apt-get update && sudo apt-get install -y zstd=1.4.8* + + gcp-oidc-authenticate: + description: "Authenticate with GCP using a CircleCI OIDC token." + parameters: + project_id: + type: env_var_name + default: GCP_PROJECT_ID + workload_identity_pool_id: + type: env_var_name + default: GCP_WIP_ID + workload_identity_pool_provider_id: + type: env_var_name + default: GCP_WIP_PROVIDER_ID + service_account_email: + type: env_var_name + default: GCP_SERVICE_ACCOUNT_EMAIL + gcp_cred_config_file_path: + type: string + default: /home/circleci/gcp_cred_config.json + oidc_token_file_path: + type: string + default: /home/circleci/oidc_token.json + steps: + - run: + name: "Create OIDC credential configuration" + command: | + echo $CIRCLE_OIDC_TOKEN > << parameters.oidc_token_file_path >> + gcloud iam workload-identity-pools create-cred-config \ + "projects/${<< parameters.project_id >>}/locations/global/workloadIdentityPools/${<< parameters.workload_identity_pool_id >>}/providers/${<< parameters.workload_identity_pool_provider_id >>}"\ + --output-file="<< parameters.gcp_cred_config_file_path >>" \ + --service-account="${<< parameters.service_account_email >>}" \ + --credential-source-file=<< parameters.oidc_token_file_path >> + - run: + name: "Authenticate with GCP using OIDC" + command: | + gcloud auth login --brief --cred-file "<< parameters.gcp_cred_config_file_path >>" + echo "export GOOGLE_APPLICATION_CREDENTIALS='<< parameters.gcp_cred_config_file_path >>'" | tee -a "$BASH_ENV" + +# ============================================================================ +# JOBS +# ============================================================================ jobs: + # -------------------------------------------------------------------------- + # Shared parameterized jobs (reusable templates) + # -------------------------------------------------------------------------- + # Shared format check job rust-ci-fmt: parameters: @@ -262,7 +326,7 @@ jobs: type: string default: "cargo +nightly fmt --all --check" docker: - - image: <> + - image: <> resource_class: medium steps: - utils/checkout-with-mise: @@ -295,7 +359,7 @@ jobs: type: string default: "stable" docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -328,7 +392,7 @@ jobs: type: string default: "cargo deny check" docker: - - image: <> + - image: <> resource_class: medium steps: - utils/checkout-with-mise: @@ -358,7 +422,7 @@ jobs: type: string default: "zepter run check" docker: - - image: <> + - image: <> resource_class: medium steps: - utils/checkout-with-mise: @@ -385,7 +449,7 @@ jobs: description: "Directory containing the Cargo workspace" type: string docker: - - image: <> + - image: <> resource_class: medium steps: - utils/checkout-with-mise: @@ -418,8 +482,12 @@ jobs: description: "Rust toolchain version to use" type: string default: "stable" + command: + description: "Command to run for no_std check" + type: string + default: "just check-no-std" docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -434,7 +502,7 @@ jobs: name: Check no_std compatibility working_directory: <> no_output_timeout: 30m - command: just check-no-std + command: <> - rust-save-build-cache: *no-std-cache-args # Shared documentation build job @@ -452,7 +520,7 @@ jobs: type: string default: "--cfg docsrs -D warnings --show-type-layout --generate-link-to-definition -Zunstable-options" docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -484,7 +552,7 @@ jobs: type: string default: "cargo test --workspace --doc" docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -522,7 +590,7 @@ jobs: type: string default: "release" docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -551,8 +619,12 @@ jobs: directory: description: "Directory containing the Cargo workspace" type: string + command: + description: "Command to run for unused dependency check" + type: string + default: "just check-udeps" docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -573,7 +645,7 @@ jobs: name: Check unused dependencies working_directory: <> no_output_timeout: 40m - command: just check-udeps + command: <> - rust-save-build-cache: *udeps-cache-args # Shared cargo hack build job (cross-compile targets like WASM) @@ -590,7 +662,7 @@ jobs: type: string default: "--workspace" docker: - - image: <> + - image: <> resource_class: xlarge steps: - utils/checkout-with-mise: @@ -598,8 +670,6 @@ jobs: - rust-prepare-and-restore-cache: &hack-build-cache-args directory: <> prefix: <>-hack-build - - rust-install-toolchain: - target: <> - install-cargo-binstall - run: name: Install cargo-hack @@ -609,33 +679,26 @@ jobs: name: Build for <> working_directory: <> no_output_timeout: 40m - command: cargo hack build --target <> <> + command: rustup target add <> && cargo hack build --target <> <> - rust-save-build-cache: *hack-build-cache-args # Shared cargo hack job rust-ci-cargo-hack: parameters: - directory: - description: "Directory containing the Cargo workspace" - type: string - flags: - description: "Flags appended to cargo hack check (e.g. --feature-powerset --depth 2)" - type: string - default: "--feature-powerset --no-dev-deps --workspace" parallelism: description: "Number of parallel nodes (adds --partition automatically when > 1)" type: integer default: 1 docker: - - image: <> + - image: <> resource_class: xlarge parallelism: <> steps: - utils/checkout-with-mise: checkout-method: blobless - rust-prepare-and-restore-cache: &hack-cache-args - directory: <> - prefix: <>-hack + directory: rust + prefix: rust-hack features: "all" - install-cargo-binstall - run: @@ -644,12 +707,602 @@ jobs: command -v cargo-hack >/dev/null || cargo binstall --no-confirm cargo-hack - run: name: Run cargo hack - working_directory: <> + working_directory: rust no_output_timeout: 60m command: | PARTITION_FLAG="" if [ "$CIRCLE_NODE_TOTAL" -gt 1 ]; then - PARTITION_FLAG="--partition $((CIRCLE_NODE_INDEX + 1))/$CIRCLE_NODE_TOTAL" + PARTITION_FLAG="$((CIRCLE_NODE_INDEX + 1))/$CIRCLE_NODE_TOTAL" fi - cargo hack check <> $PARTITION_FLAG + just hack $PARTITION_FLAG - rust-save-build-cache: *hack-cache-args + + # -------------------------------------------------------------------------- + # OP-Reth crate-specific jobs + # -------------------------------------------------------------------------- + + # OP-Reth compact codec backwards compatibility + op-reth-compact-codec: + docker: + - image: <> + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - rust-prepare-and-restore-cache: &op-reth-compact-cache + directory: rust + prefix: op-reth-compact + profile: debug + - run: + name: Generate compact vectors from base + command: | + # Use develop as the base branch + BASE_BRANCH="develop" + + # Save current state + git stash || true + git fetch origin $BASE_BRANCH + git checkout origin/$BASE_BRANCH + + # Generate vectors on base + cargo run --bin op-reth --features "dev" --manifest-path rust/op-reth/bin/Cargo.toml -- test-vectors compact --write + + # Return to PR branch + git checkout - + git stash pop || true + - run: + name: Read compact vectors on PR branch + command: | + cargo run --bin op-reth --features "dev" --manifest-path rust/op-reth/bin/Cargo.toml -- test-vectors compact --read + - rust-save-build-cache: *op-reth-compact-cache + + # OP-Reth Windows cross-compile check + op-reth-windows-check: + docker: + - image: <> + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - rust-prepare-and-restore-cache: &op-reth-windows-cache + directory: rust + prefix: op-reth-windows + profile: debug + - run: + name: Install mingw-w64 + command: sudo apt-get update && sudo apt-get install -y mingw-w64 + - run: + name: Check OP-Reth Windows build + working_directory: rust + no_output_timeout: 40m + command: just --justfile op-reth/justfile check-windows + - rust-save-build-cache: *op-reth-windows-cache + + # -------------------------------------------------------------------------- + # Op-Alloy crate-specific jobs + # -------------------------------------------------------------------------- + # Op-Alloy cfg check + op-alloy-cfg-check: + docker: + - image: <> + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - rust-prepare-and-restore-cache: &op-alloy-cfg-check-cache + directory: rust + prefix: op-alloy-cfg-check + - rust-install-toolchain: + channel: nightly + toolchain_version: nightly + - run: + name: Run cfg check + working_directory: rust + no_output_timeout: 40m + command: | + just --justfile op-alloy/Justfile check + - rust-save-build-cache: *op-alloy-cfg-check-cache + + # -------------------------------------------------------------------------- + # Kona crate-specific jobs + # -------------------------------------------------------------------------- + + # Kona Host Client Offline Runs + kona-host-client-offline: + parameters: + machine: + image: <> + docker_layer_caching: true + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - install-zstd + - rust-prepare + - install-cargo-binstall + - run: + name: Install cargo-llvm-cov + command: | + command -v cargo-llvm-cov >/dev/null || cargo binstall --no-confirm cargo-llvm-cov + - run: + name: Build cannon + command: | + cd cannon && make + sudo mv ./bin/cannon /usr/local/bin/ + - run: + name: Set run environment + command: | + echo 'export BLOCK_NUMBER=26215604' >> $BASH_ENV + echo 'export L2_CLAIM=0x7415d942f80a34f77d344e4bccb7050f14e593f5ea33669d27ea01dce273d72d' >> $BASH_ENV + echo 'export L2_OUTPUT_ROOT=0xaa34b62993bd888d7a2ad8541935374e39948576fce12aa8179a0aa5b5bc787b' >> $BASH_ENV + echo 'export L2_HEAD=0xf4adf5790bad1ffc9eee315dc163df9102473c5726a2743da27a8a10dc16b473' >> $BASH_ENV + echo 'export L1_HEAD=0x010cfdb22eaa13e8cdfbf66403f8de2a026475e96a6635d53c31f853a0e3ae25' >> $BASH_ENV + echo 'export L2_CHAIN_ID=11155420' >> $BASH_ENV + echo 'export WITNESS_TAR_NAME=holocene-op-sepolia-26215604-witness.tar.zst' >> $BASH_ENV + - run: + name: Decompress witness data + working_directory: rust/kona + command: | + tar --zstd -xvf ./bin/client/testdata/$WITNESS_TAR_NAME -C . + - run: + name: Run host + client offline + working_directory: rust/kona/bin/client + no_output_timeout: 40m + command: | + source <(cargo llvm-cov show-env --export-prefix) + mkdir -p ../../../target + just run-client-cannon-offline \ + $BLOCK_NUMBER \ + $L2_CLAIM \ + $L2_OUTPUT_ROOT \ + $L2_HEAD \ + $L1_HEAD \ + $L2_CHAIN_ID + cargo llvm-cov report --lcov --output-path client_host_cov.lcov + + # Kona Rust CI - Lint (cannon/asterisc targets) + kona-cargo-lint: + parameters: + target: + description: The lint target (native, cannon, asterisc) + type: string + machine: + image: <> + docker_layer_caching: true + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - rust-prepare-and-restore-cache: + directory: rust + features: "all" + - rust-install-toolchain: + components: rustfmt + - run: + name: Run fmt + lint for <> + working_directory: rust/kona + no_output_timeout: 40m + command: | + just lint-<> + + # Kona Build FPVM targets + kona-build-fpvm: + parameters: + target: + description: The build target (cannon-client, asterisc-client) + type: string + machine: + image: <> + docker_layer_caching: true + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - rust-prepare + - run: + name: Build <> + working_directory: rust/kona + no_output_timeout: 40m + command: | + just build-<> + + # Kona Build Benchmarks + kona-cargo-build-benches: + docker: + - image: <> + resource_class: 2xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - rust-prepare-and-restore-cache: &kona-benches-cache + directory: rust + prefix: kona-benches + - run: + name: Build benchmarks + working_directory: rust/kona + no_output_timeout: 40m + command: | + just benches + - rust-save-build-cache: *kona-benches-cache + + # Kona Coverage + kona-coverage: + docker: + - image: <> + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - rust-prepare-and-restore-cache: &kona-coverage-cache + directory: rust + prefix: kona-coverage + version: "1" + - rust-install-toolchain: + components: llvm-tools-preview + - install-cargo-binstall + - run: + name: Install cargo-llvm-cov and nextest + command: | + command -v cargo-llvm-cov >/dev/null || cargo binstall --no-confirm cargo-llvm-cov + command -v cargo-nextest >/dev/null || cargo binstall --locked --no-confirm cargo-nextest + - run: + name: Generate lockfile if needed + working_directory: rust + command: | + [ -f Cargo.lock ] || cargo generate-lockfile + - run: + name: Run coverage + working_directory: rust/kona + no_output_timeout: 40m + command: | + just llvm-cov-tests + - codecov/upload: + disable_search: true + files: rust/kona/lcov.info + flags: unit + - rust-save-build-cache: *kona-coverage-cache + + # Unified Rust Docs Build + rust-docs-build: + docker: + - image: <> + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - run: + name: Install Node.js and Bun + command: | + curl -fsSL https://deb.nodesource.com/setup_24.x | sudo -E bash - + sudo apt-get install -y nodejs + curl -fsSL https://bun.sh/install | bash + echo 'export BUN_INSTALL="$HOME/.bun"' >> $BASH_ENV + echo 'export PATH="$BUN_INSTALL/bin:$PATH"' >> $BASH_ENV + - run: + name: Install dependencies and Playwright browsers + working_directory: rust/docs + command: | + bun i + npx playwright install --with-deps chromium + - run: + name: Build Vocs documentation + working_directory: rust/docs + no_output_timeout: 60m + command: | + bun run build + echo "Vocs Build Complete" + - store_artifacts: + path: rust/docs/docs/docs/dist + destination: rust-docs + + # OP-Reth docs build + op-reth-docs-build: + docker: + - image: <> + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - run: + name: Install Node.js and Bun + command: | + curl -fsSL https://deb.nodesource.com/setup_24.x | sudo -E bash - + sudo apt-get install -y nodejs + curl -fsSL https://bun.sh/install | bash + echo 'export BUN_INSTALL="$HOME/.bun"' >> $BASH_ENV + echo 'export PATH="$BUN_INSTALL/bin:$PATH"' >> $BASH_ENV + - run: + name: Build op-reth documentation + working_directory: rust/op-reth + no_output_timeout: 30m + command: | + just docs-build + - store_artifacts: + path: op-reth/docs/vocs/docs/dist + destination: op-reth-docs + + + # Kona Link Checker + kona-link-checker: + docker: + - image: <> + resource_class: medium + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - run: + name: Install lychee + command: | + curl -sSfL https://github.com/lycheeverse/lychee/releases/download/v0.15.1/lychee-v0.15.1-x86_64-unknown-linux-gnu.tar.gz | tar xz + sudo mv lychee /usr/local/bin/ + - run: + name: Check links + working_directory: rust/kona + command: | + lychee --config ./lychee.toml --cache-exclude-status 429 '**/README.md' || true + + # Kona Publish Prestate Artifacts + kona-publish-prestate-artifacts: + parameters: + kind: + description: The kind of prestate (cannon) + type: string + default: "cannon" + version: + description: The version to build (kona-client, kona-client-int) + type: string + machine: + image: <> + docker_layer_caching: true + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - rust-install-toolchain: + components: llvm-tools-preview + - rust-prepare-and-restore-cache: &kona-publish-prestate-cache + directory: rust + profile: release + - gcp-cli/install + - gcp-oidc-authenticate: + gcp_cred_config_file_path: /tmp/gcp_cred_config.json + oidc_token_file_path: /tmp/oidc_token.json + - run: + name: Generate prestate artifacts + working_directory: rust/kona + no_output_timeout: 60m + command: | + cd docker/fpvm-prestates + just "<>" "<>" "../.." + - run: + name: Upload prestates to GCS + working_directory: rust/kona + command: | + PRESTATE_HASH=$(jq -r .pre ./prestate-artifacts-<>/prestate-proof.json) + BRANCH_NAME=$(echo "<< pipeline.git.branch >>" | tr '/' '-') + echo "Publishing ${PRESTATE_HASH} as ${BRANCH_NAME}" + if [ -n "<< pipeline.git.branch >>" ]; then + echo "Publishing commit hash data" + INFO_FILE=$(mktemp) + (echo "Commit=<< pipeline.git.revision >>" && echo "Prestate: ${PRESTATE_HASH}") > "${INFO_FILE}" + gsutil cp "${INFO_FILE}" "gs://oplabs-network-data/proofs/kona/<>/${BRANCH_NAME}-<>-prestate.bin.gz.txt" + rm "${INFO_FILE}" + PRESTATE_HASH="${BRANCH_NAME}-<>" + fi + gsutil cp ./prestate-artifacts-<>/prestate.bin.gz "gs://oplabs-network-data/proofs/kona/<>/${PRESTATE_HASH}.bin.gz" + echo "Successfully published prestates artifacts to GCS" + - rust-save-build-cache: *kona-publish-prestate-cache + +# ============================================================================ +# WORKFLOWS +# ============================================================================ +workflows: + # ========================================================================== + # Unified Rust CI workflow + # Runs on any rust/.* change or manual dispatch with rust_ci_dispatch=true + # ========================================================================== + rust-ci: + when: + or: + - equal: ["webhook", << pipeline.trigger_source >>] + - and: + - equal: [true, <>] + - equal: ["api", << pipeline.trigger_source >>] + jobs: + # ----------------------------------------------------------------------- + # Workspace-level jobs (replacing per-crate duplicates) + # ----------------------------------------------------------------------- + - rust-ci-fmt: + name: rust-fmt + directory: rust + context: &rust-ci-context + - circleci-repo-readonly-authenticated-github-token + + - rust-ci-clippy: + name: rust-clippy + directory: rust + command: "cargo clippy --workspace --all-targets --all-features --locked" + context: *rust-ci-context + + - rust-ci-deny: + name: rust-deny + directory: rust + command: "cargo deny --all-features check all" + context: *rust-ci-context + + - rust-ci-typos: + name: rust-typos + directory: rust + context: *rust-ci-context + + - rust-ci-zepter: + name: rust-zepter + directory: rust + context: *rust-ci-context + + - rust-ci-cargo-tests: + name: rust-tests + directory: rust + context: *rust-ci-context + + - rust-ci-doctest: + name: rust-doctest + directory: rust + command: "cargo test --doc --workspace --all-features" + context: *rust-ci-context + + - rust-ci-docs: + name: rust-docs + directory: rust + context: *rust-ci-context + + # Docs build + - rust-docs-build: + context: + - circleci-repo-readonly-authenticated-github-token + + - rust-ci-udeps: + name: rust-udeps + directory: rust + context: *rust-ci-context + + - rust-ci-cargo-hack: + name: rust-cargo-hack + parallelism: 6 + context: *rust-ci-context + + - rust-build-binary: + name: rust-build + directory: rust + context: *rust-ci-context + + - rust-build-binary: + name: rust-build-release + directory: rust + profile: release + context: *rust-ci-context + + - rust-build-binary: + name: rust-msrv + directory: rust + toolchain: "1.88.0" + context: *rust-ci-context + + # ----------------------------------------------------------------------- + # Unified cross-compilation jobs + # ----------------------------------------------------------------------- + - rust-ci-check-no-std: + name: rust-check-no-std + directory: rust + command: | + just check-no-std + toolchain: "1.88.0" + context: *rust-ci-context + + - rust-ci-cargo-hack-build: + name: rust-wasm-unknown + directory: rust + target: wasm32-unknown-unknown + flags: "-p op-alloy-consensus -p op-alloy-rpc-types -p op-alloy-rpc-types-engine -p alloy-op-evm --no-default-features" + context: *rust-ci-context + + - rust-ci-cargo-hack-build: + name: rust-wasm-wasi + directory: rust + target: wasm32-wasip1 + flags: "-p op-alloy-consensus -p op-alloy-rpc-types-engine -p alloy-op-evm" + context: *rust-ci-context + + # ----------------------------------------------------------------------- + # OP-Reth crate-specific jobs + # ----------------------------------------------------------------------- + - op-reth-compact-codec: + context: *rust-ci-context + + - op-reth-windows-check: + context: *rust-ci-context + + - rust-ci-cargo-tests: + name: op-reth-integration-tests + directory: rust + command: "--justfile op-reth/justfile test-integration" + cache_profile: debug + context: *rust-ci-context + + - rust-ci-cargo-tests: + name: op-reth-tests-edge + directory: rust + command: "--justfile op-reth/justfile test" + flags: "edge" + cache_profile: debug + context: *rust-ci-context + + # ----------------------------------------------------------------------- + # Op-Alloy crate-specific jobs + # ----------------------------------------------------------------------- + - op-alloy-cfg-check: + context: *rust-ci-context + + # ----------------------------------------------------------------------- + # Kona crate-specific jobs (lint, FPVM builds, benches, coverage) + # ----------------------------------------------------------------------- + - kona-cargo-lint: + name: kona-lint-<> + matrix: + parameters: + target: ["cannon", "asterisc"] + context: *rust-ci-context + + - kona-build-fpvm: + name: kona-build-fpvm-<> + matrix: + parameters: + target: ["cannon-client", "asterisc-client"] + context: *rust-ci-context + + - kona-cargo-build-benches: + context: *rust-ci-context + + - kona-coverage: + context: *rust-ci-context + requires: + - rust-tests + + - kona-host-client-offline: + name: kona-host-client-offline-cannon + context: *rust-ci-context + + + # ========================================================================== + # Kona scheduled workflows + # ========================================================================== + scheduled-kona-link-checker: + when: + equal: [build_weekly, <>] + jobs: + - kona-link-checker: + context: + - circleci-repo-readonly-authenticated-github-token + + scheduled-kona-sync: + when: + equal: [build_weekly, <>] + jobs: + - kona-update-monorepo: + context: + - circleci-repo-readonly-authenticated-github-token + + # Kona publish prestate artifacts - on push to develop + kona-publish-prestates: + when: + or: + - equal: ["develop", <>] + jobs: + - kona-publish-prestate-artifacts: + name: kona-publish-<> + matrix: + parameters: + version: ["kona-client", "kona-client-int"] + context: + - circleci-repo-readonly-authenticated-github-token + - oplabs-gcr diff --git a/.circleci/continue/rust-e2e.yml b/.circleci/continue/rust-e2e.yml new file mode 100644 index 00000000000..8837d835f9c --- /dev/null +++ b/.circleci/continue/rust-e2e.yml @@ -0,0 +1,276 @@ +version: 2.1 + +# Rust E2E CI Continuation Configuration +# This file contains E2E tests that depend on both kona and op-reth. +# It is merged with main.yml and rust-ci.yml when rust/** changes are detected. +# Shared orbs, commands, and jobs come from main.yml and rust-ci.yml during merge. + +parameters: + # Required parameters (also in main.yml, merged during continuation) + c-default_docker_image: + type: string + default: cimg/base:2024.01 + c-rust_e2e_dispatch: + type: boolean + default: false + c-go-cache-version: + type: string + default: "v0.0" + +# Commands used by rust-e2e jobs +commands: + go-restore-cache: + parameters: + module: + type: string + default: . + namespace: + type: string + version: + type: string + default: <> + steps: + - restore_cache: + name: Restore go cache for <> (<>/go.mod) + keys: + - go-<>-<>-<>-{{ checksum "<>/go.mod" }}-{{ checksum "<>/go.sum" }} + - go-<>-<>-<>-{{ checksum "<>/go.mod" }}- + - go-<>-<>-<>- + + go-save-cache: + parameters: + module: + type: string + default: . + namespace: + type: string + version: + type: string + default: <> + steps: + - save_cache: + name: Save go cache for <> (<>/go.mod) + paths: + - ~/.cache/go-build + - ~/go/pkg/mod + key: go-<>-<>-<>-{{ checksum "<>/go.mod" }}-{{ checksum "<>/go.sum" }} + +# ============================================================================ +# RUST E2E JOBS +# ============================================================================ +jobs: + # Kona Node E2E Sysgo Tests (requires both kona and op-reth) + rust-e2e-sysgo-tests: + parameters: + devnet_config: + description: The devnet configuration to test + type: string + reorg_tests: + description: Whether to run reorg tests + type: boolean + default: false + docker: + - image: <> + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - attach_workspace: + at: . + - go-restore-cache: + namespace: kona-ci + - rust-build: &kona-rust-build-release + directory: rust + profile: release + binary: "kona-node" + - run: + name: Run common tests for node with sysgo orchestrator + no_output_timeout: 60m + command: | + WD=$(pwd) + echo "Running tests..." + export OP_RETH_EXEC_PATH="$WD/rust/target/release/op-reth" + export RUST_BINARY_PATH_KONA_NODE="$WD/rust/target/release/kona-node" + cd rust/kona && just test-e2e-sysgo-run node node/common "<>" + - when: + condition: + equal: [true, <>] + steps: + - run: + name: Run reorg tests for node with sysgo orchestrator + no_output_timeout: 60m + command: | + WD=$(pwd) + echo "Running tests..." + export OP_RETH_EXEC_PATH="$WD/rust/target/release/op-reth" + export RUST_BINARY_PATH_KONA_NODE="$WD/rust/target/release/kona-node" + cd rust/kona && just test-e2e-sysgo-run node node/reorgs "<>" + - go-save-cache: + namespace: kona-ci + + # Kona Node Restart Tests (from node_e2e_sysgo_tests.yaml) + rust-restart-sysgo-tests: + docker: + - image: <> + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - attach_workspace: + at: . + - go-restore-cache: + namespace: kona-ci + - rust-build: + <<: *kona-rust-build-release + - run: + name: Run restart tests for node with sysgo orchestrator + no_output_timeout: 60m + command: | + echo "Running tests..." + WD=$(pwd) + export RUST_BINARY_PATH_KONA_NODE="$WD/rust/target/release/kona-node" + cd rust/kona && just test-e2e-sysgo node node/restart + - go-save-cache: + namespace: kona-ci + + # Kona Supervisor E2E Tests + kona-supervisor-e2e-tests: + parameters: + test_pkg: + description: The test package to run + type: string + docker: + - image: <> + resource_class: xlarge + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - rust-build: + <<: *kona-rust-build-release + binary: "kona-supervisor" + - run: + name: Run supervisor tests for <> + working_directory: rust/kona + no_output_timeout: 40m + command: | + just test-e2e-sysgo supervisor "/supervisor/<>" + + # Kona Proof Action Tests (from proof.yaml) + kona-proof-action-tests: + parameters: + kind: + description: The kind of action test (single or interop) + type: string + docker: + - image: <> + resource_class: xlarge + parallelism: 4 + steps: + - utils/checkout-with-mise: + checkout-method: blobless + - attach_workspace: + at: . + - go-restore-cache: + namespace: kona-ci + - rust-build: + <<: *kona-rust-build-release + binary: "kona-host" + - run: + name: Build kona and run action tests + working_directory: rust/kona + no_output_timeout: 90m + command: | + echo "Running action tests" + export KONA_HOST_PATH=$(pwd)/../target/release/kona-host + just action-tests-<>-run + - go-save-cache: + namespace: kona-ci + +# ============================================================================ +# RUST E2E WORKFLOWS +# ============================================================================ +workflows: + rust-e2e-ci: + when: + or: + - equal: ["webhook", << pipeline.trigger_source >>] + - and: + - equal: [true, <>] + - equal: ["api", << pipeline.trigger_source >>] + jobs: + - contracts-bedrock-build: + build_args: --skip test + context: + - circleci-repo-readonly-authenticated-github-token + - cannon-prestate-quick: &rust-e2e-job-base + context: + - circleci-repo-readonly-authenticated-github-token + - cannon-kona-prestate: + <<: *rust-e2e-job-base + - rust-build-binary: &cannon-kona-host + name: cannon-kona-host + directory: rust + profile: release + binary: "kona-host" + context: + - circleci-repo-readonly-authenticated-github-token + - rust-build-binary: &kona-build-release + name: kona-build-release + directory: rust + profile: release + context: + - circleci-repo-readonly-authenticated-github-token + - rust-build-binary: + name: op-reth-build + directory: rust + profile: release + binary: "op-reth" + context: + - circleci-repo-readonly-authenticated-github-token + - rust-e2e-sysgo-tests: + name: rust-e2e-<> + matrix: + parameters: + devnet_config: ["simple-kona", "simple-kona-geth", "simple-kona-sequencer", "large-kona-sequencer"] + context: + - circleci-repo-readonly-authenticated-github-token + requires: + - contracts-bedrock-build + - cannon-prestate-quick + - cannon-kona-prestate + - cannon-kona-host + - kona-build-release + - op-reth-build + - rust-restart-sysgo-tests: + name: rust-e2e-restart + <<: *rust-e2e-job-base + requires: + - contracts-bedrock-build + - cannon-prestate-quick + - cannon-kona-prestate + - cannon-kona-host + - kona-build-release + # Proof tests - single kind only, interop excluded per original config + - kona-proof-action-tests: + name: kona-proof-action-single + kind: single + requires: + - kona-build-release + - contracts-bedrock-build + context: + - circleci-repo-readonly-authenticated-github-token + + # Kona supervisor E2E tests - manual dispatch only + kona-supervisor-e2e: + when: + and: + - equal: [true, <>] + - equal: ["api", << pipeline.trigger_source >>] + jobs: + - kona-supervisor-e2e-tests: + name: kona-supervisor-<> + matrix: + parameters: + test_pkg: ["pre_interop", "l1reorg/sysgo"] + context: + - circleci-repo-readonly-authenticated-github-token diff --git a/.claude/skills/fix-todo/SKILL.md b/.claude/skills/fix-todo/SKILL.md new file mode 100644 index 00000000000..ffecb489d81 --- /dev/null +++ b/.claude/skills/fix-todo/SKILL.md @@ -0,0 +1,200 @@ +# fix-todo + +Resolve TODO checker CI failures by reopening GitHub issues that still have active TODOs in the codebase. + +## When to Use + +Use this skill when the scheduled TODO checker CI job fails. The TODO checker validates that TODO comments in the codebase don't reference closed GitHub issues. + +### Trigger Phrases + +- "Fix the latest TODO checker failure" +- "Resolve the TODO checker CI failure" +- "Handle the TODO checker issue" +- "Reopen issues from TODO checker" + +## Background + +The repository runs a scheduled CircleCI workflow (`scheduled-todo-issues`) every 4 hours that validates TODO comments. TODO comments can reference issues in formats like: +- `TODO(#1234)` - references ethereum-optimism/optimism +- `TODO(repo#1234)` - references ethereum-optimism/repo +- `TODO(org/repo#1234)` - full reference + +When an issue is closed but TODOs still reference it, the job fails and issues need to be reopened to track the remaining work. + +## Prerequisites + +- `gh` CLI authenticated with GitHub +- Note: CircleCI API is publicly accessible for this repository, no token required + +## Workflow + +### Step 1: Find the latest scheduled TODO checker job + +```bash +LATEST_PIPELINE=$(curl -s "https://circleci.com/api/v2/project/gh/ethereum-optimism/optimism/pipeline?branch=develop" | \ + jq -r '.items[] | select(.trigger.type == "scheduled_pipeline") | {id, number, created_at} | @json' | head -1) + +PIPELINE_ID=$(echo "$LATEST_PIPELINE" | jq -r '.id') +PIPELINE_NUMBER=$(echo "$LATEST_PIPELINE" | jq -r '.number') +``` + +### Step 2: Get the workflow and job details + +Note: The latest scheduled pipeline may only contain a "setup" workflow. Search through recent scheduled pipelines to find one with the "scheduled-todo-issues" workflow. + +```bash +# Find a pipeline with the TODO workflow +PIPELINE_WITH_TODO=$(curl -s "https://circleci.com/api/v2/project/gh/ethereum-optimism/optimism/pipeline?branch=develop" | \ + jq -r '.items[] | select(.trigger.type == "scheduled_pipeline") | .id' | while read pid; do + workflows=$(curl -s "https://circleci.com/api/v2/pipeline/$pid/workflow" | jq -r '.items[] | .name') + if echo "$workflows" | grep -q "scheduled-todo-issues"; then + echo "$pid" + break + fi + done) + +PIPELINE_ID="$PIPELINE_WITH_TODO" +PIPELINE_NUMBER=$(curl -s "https://circleci.com/api/v2/project/gh/ethereum-optimism/optimism/pipeline?branch=develop" | \ + jq -r ".items[] | select(.id == \"$PIPELINE_ID\") | .number") + +WORKFLOW_DATA=$(curl -s "https://circleci.com/api/v2/pipeline/$PIPELINE_ID/workflow" | \ + jq '.items[] | select(.name == "scheduled-todo-issues")') +WORKFLOW_ID=$(echo "$WORKFLOW_DATA" | jq -r '.id') +WORKFLOW_STATUS=$(echo "$WORKFLOW_DATA" | jq -r '.status') + +JOB_NUMBER=$(curl -s "https://circleci.com/api/v2/workflow/$WORKFLOW_ID/job" | \ + jq -r '.items[] | .job_number') +``` + +Check if the workflow status is "failed". If it's "success" or "running", inform the user there's no failure to fix or to wait for completion. + +### Step 3: Fetch the job output to find closed issues + +```bash +OUTPUT_URL=$(curl -s "https://circleci.com/api/v1.1/project/gh/ethereum-optimism/optimism/$JOB_NUMBER" | \ + jq -r '.steps[] | select(.name | contains("TODO")) | .actions[0].output_url') + +curl -s "$OUTPUT_URL" | jq -r '.[].message' +``` + +The output will show a table of closed issues. Look for the `[Error] Closed issue details:` section at the end which shows: +- Repository & Issue (e.g., "ethereum-optimism/optimism #18616") +- Issue Title +- Location (e.g., "op-acceptance-tests/tests/isthmus/preinterop/interop_readiness_test.go:106") + +### Step 4: Parse the closed issue information + +Extract from the "Closed issue details" table: +- Issue number (e.g., #18616) +- File path and line number (e.g., `op-acceptance-tests/tests/isthmus/preinterop/interop_readiness_test.go:106`) +- Issue title + +### Step 5: Find who closed the issue + +Issues can be closed via PR or directly by a user. Check the timeline to find the most recent person who closed it: + +```bash +ISSUE_NUM="" + +# Use GraphQL to get the timeline and find the most recent close event +CLOSER=$(gh api graphql -f query=" +query { + repository(owner: \"ethereum-optimism\", name: \"optimism\") { + issue(number: $ISSUE_NUM) { + timelineItems(last: 20, itemTypes: [CLOSED_EVENT, REOPENED_EVENT]) { + nodes { + ... on ClosedEvent { + __typename + createdAt + actor { + login + } + closer { + __typename + } + } + ... on ReopenedEvent { + __typename + createdAt + actor { + login + } + } + } + } + } + } +}" --jq '.data.repository.issue.timelineItems.nodes | reverse | .[] | select(.__typename == "ClosedEvent") | .actor.login' | head -1) + +echo "Issue closed by: @$CLOSER" +``` + +This finds the most recent ClosedEvent in the timeline, which correctly handles cases where an issue was: +- Closed via PR, then reopened, then closed directly by a user +- Closed multiple times by different people + +Always tag the person from the most recent close event. + +### Step 6: Read the actual TODO line from the file + +Read the file at the location specified in the error to get the exact TODO comment text. + +### Step 7: Reopen the issue with proper attribution + +Format the reopening comment following this template: + +```bash +gh issue reopen $ISSUE_NUM --comment "@${CLOSER} Reopening because this issue was closed but there's still a TODO/skip referencing it in the codebase. + +[Brief context about what was completed vs what remains] + +The [TestName] at \`:\` is still skipped with: + +\`\`\` + +\`\`\` + +Discovered by the TODO check in CI: https://app.circleci.com/pipelines/github/ethereum-optimism/optimism/${PIPELINE_NUMBER}/workflows/${WORKFLOW_ID}/jobs/${JOB_NUMBER}" +``` + +## Requirements + +- **Always tag the person who closed the issue** using their GitHub handle (found via the most recent close event in the timeline) +- **Include the exact file location** where the TODO exists +- **Include the CircleCI job URL** for traceability +- **Read and include the actual TODO line** from the code +- **Provide context** about what was completed vs what remains (if determinable from the issue) + +## Output Format + +After successfully reopening, report: + +``` +✓ TODO checker failure resolved + +Issue: # - +Status: Reopened +Tagged: @<username> +Location: <file>:<line> + +View issue: https://github.com/ethereum-optimism/optimism/issues/<number> +CircleCI job: https://app.circleci.com/pipelines/github/ethereum-optimism/optimism/<pipeline>/workflows/<workflow>/jobs/<job> +``` + +## TODO Comment Formats + +The TODO checker validates these formats: +- `TODO(#<number>)` - references ethereum-optimism/optimism +- `TODO(<repo>#<number>)` - references ethereum-optimism/<repo> +- `TODO(<org>/<repo>#<number>)` - full reference + +## Error Handling + +**Multiple closed issues**: Process each one sequentially, asking for confirmation before reopening each. + +**Issue already reopened**: Check if there's already a comment about the TODO. If not, add a comment with the location. + +## About the TODO Checker + +The TODO checker runs via `.circleci/continue/main.yml` as a scheduled workflow named `scheduled-todo-issues`. It executes `ops/scripts/todo-checker.sh --verbose --strict --check-closed`. diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index a955dcc5bc1..39c66c4853f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,58 +1,25 @@ -# Monorepo - default to go-reviewers -* @ethereum-optimism/go-reviewers - -# OP Stack general -/op-alt-da @ethereum-optimism/op-stack @ethereum-optimism/go-reviewers -/op-batcher @ethereum-optimism/op-stack @ethereum-optimism/go-reviewers -/op-chain-ops @ethereum-optimism/op-stack @ethereum-optimism/go-reviewers -/op-e2e @ethereum-optimism/op-stack @ethereum-optimism/go-reviewers -/op-node @ethereum-optimism/op-stack @ethereum-optimism/go-reviewers -/op-proposer @ethereum-optimism/op-stack @ethereum-optimism/go-reviewers -/op-wheel @ethereum-optimism/op-stack @ethereum-optimism/go-reviewers -/ops-bedrock @ethereum-optimism/op-stack @ethereum-optimism/go-reviewers -/op-devstack @ethereum-optimism/op-stack @ethereum-optimism/go-reviewers +# Monorepo - default to monorepo-reviewers +* @ethereum-optimism/monorepo-reviewers # Expert areas -/op-deployer @ethereum-optimism/platforms-team -/op-validator @ethereum-optimism/platforms-team - -/op-node/rollup @ethereum-optimism/consensus @ethereum-optimism/go-reviewers - -/op-supervisor @ethereum-optimism/interop @ethereum-optimism/go-reviewers - -/op-conductor @ethereum-optimism/op-conductor @ethereum-optimism/go-reviewers - -/cannon @ethereum-optimism/proofs @ethereum-optimism/go-reviewers -/op-challenger @ethereum-optimism/proofs @ethereum-optimism/go-reviewers -/op-dispute-mon @ethereum-optimism/proofs @ethereum-optimism/go-reviewers -/op-preimage @ethereum-optimism/proofs @ethereum-optimism/go-reviewers -/op-program @ethereum-optimism/proofs @ethereum-optimism/go-reviewers -/op-e2e/actions/proofs @ethereum-optimism/proofs @ethereum-optimism/go-reviewers -/op-e2e/faultproofs @ethereum-optimism/proofs @ethereum-optimism/go-reviewers -# Kona -/kona @ethereum-optimism/kona-reviewers +/op-node/rollup/derive @ethereum-optimism/consensus # exclusive +/rust/kona/crates/protocol @ethereum-optimism/consensus # exclusive -# op-reth -/op-reth @ethereum-optimism/kona-reviewers +/op-deployer @ethereum-optimism/platforms-team @ethereum-optimism/monorepo-reviewers +/op-validator @ethereum-optimism/platforms-team @ethereum-optimism/monorepo-reviewers -# op-alloy -/op-alloy @ethereum-optimism/kona-reviewers - -# Alloy OP Hardforks -/alloy-op-hardforks @ethereum-optimism/kona-reviewers - -# Alloy -/alloy-op-evm @ethereum-optimism/kona-reviewers - -# Ops -/.cursor/rules/solidity-styles.mdc @ethereum-optimism/contract-reviewers +/op-conductor @ethereum-optimism/op-conductor @ethereum-optimism/monorepo-reviewers # Contracts # We require a minimum of 2 reviewers for all changes to the contracts-bedrock # directory unless only markdown files are being changed. /packages/contracts-bedrock/** @ethereum-optimism/contract-reviewers #[min:2] /packages/contracts-bedrock/**/*.md @ethereum-optimism/contract-reviewers +/.cursor/rules/solidity-styles.mdc @ethereum-optimism/contract-reviewers # Security docs -/docs @ethereum-optimism/evm-safety +/docs/security-reviews @ethereum-optimism/evm-safety + +# Must come last to avoid being overridden +/.github/CODEOWNERS @ethereum-optimism/cloud-security diff --git a/.gitmodules b/.gitmodules index 2f49f007885..1856b3a961a 100644 --- a/.gitmodules +++ b/.gitmodules @@ -32,12 +32,9 @@ [submodule "packages/contracts-bedrock/lib/superchain-registry"] path = packages/contracts-bedrock/lib/superchain-registry url = https://github.com/ethereum-optimism/superchain-registry -[submodule "kona/crates/protocol/registry/superchain-registry"] - path = kona/crates/protocol/registry/superchain-registry - url = https://github.com/ethereum-optimism/superchain-registry.git -[submodule "reth"] - path = reth - url = https://github.com/paradigmxyz/reth +[submodule "rust/kona/crates/protocol/registry/superchain-registry"] + path = rust/kona/crates/protocol/registry/superchain-registry + url = https://github.com/ethereum-optimism/superchain-registry [submodule "op-rbuilder"] path = op-rbuilder url = https://github.com/flashbots/op-rbuilder diff --git a/.semgrepignore b/.semgrepignore index 350d32f2b17..db09993ebfe 100644 --- a/.semgrepignore +++ b/.semgrepignore @@ -18,4 +18,4 @@ vendor/ op-chain-ops/script/testdata/scripts/ # Op-alloy book theme (third-party mdbook assets) -op-alloy/book/ \ No newline at end of file +rust/op-alloy/book/ diff --git a/AGENTS.md b/AGENTS.md index 17f969ae964..867e8ea1a70 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -57,6 +57,7 @@ The OP Stack includes significant Rust implementations: More detailed guidance for AI agents can be found in: +- [docs/ai/ci-ops.md](docs/ai/ci-ops.md) - CI/CD operations - [docs/ai/contract-dev.md](docs/ai/contract-dev.md) - Smart contract development - [docs/ai/go-dev.md](docs/ai/go-dev.md) - Go service development - [docs/ai/rust-dev.md](docs/ai/rust-dev.md) - Rust development (kona, op-reth, alloy crates) diff --git a/Makefile b/Makefile index c263eee7227..bfe18718604 100644 --- a/Makefile +++ b/Makefile @@ -147,11 +147,18 @@ cannon: ## Builds cannon binary make -C ./cannon cannon .PHONY: cannon -reproducible-prestate: ## Builds reproducible prestates for op-program and kona +reproducible-prestate-op-program: make -C ./op-program build-reproducible-prestate - cd kona && just build-reproducible-prestate +.PHONY: reproducible-prestate-op-program + +reproducible-prestate-kona: + cd rust && just build-kona-reproducible-prestate +.PHONY: reproducible-prestate-kona + +reproducible-prestate: reproducible-prestate-op-program reproducible-prestate-kona ## Builds reproducible prestates for op-program and kona + # Output the prestate hashes after all the builds complete so they are easy to find at the end of the build logs. make -C ./op-program output-prestate-hash - cd kona && just output-prestate-hash + cd rust && just output-kona-prestate-hash .PHONY: reproducible-prestate cannon-prestates: cannon op-program diff --git a/alloy-op-evm/Cargo.lock b/alloy-op-evm/Cargo.lock deleted file mode 100644 index 1aa80666ece..00000000000 --- a/alloy-op-evm/Cargo.lock +++ /dev/null @@ -1,4299 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "ahash" -version = "0.8.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" -dependencies = [ - "cfg-if", - "once_cell", - "version_check", - "zerocopy", -] - -[[package]] -name = "allocator-api2" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" - -[[package]] -name = "alloy-chains" -version = "0.2.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3842d8c52fcd3378039f4703dba392dca8b546b1c8ed6183048f8dab95b2be78" -dependencies = [ - "alloy-primitives", - "num_enum", - "strum", -] - -[[package]] -name = "alloy-consensus" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed1958f0294ecc05ebe7b3c9a8662a3e221c2523b7f2bcd94c7a651efbd510bf" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "alloy-trie", - "alloy-tx-macros", - "auto_impl", - "borsh", - "c-kzg", - "derive_more", - "either", - "k256", - "once_cell", - "rand 0.8.5", - "secp256k1 0.30.0", - "serde", - "serde_json", - "serde_with", - "thiserror", -] - -[[package]] -name = "alloy-consensus-any" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f752e99497ddc39e22d547d7dfe516af10c979405a034ed90e69b914b7dddeae" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "serde", -] - -[[package]] -name = "alloy-eip2124" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "crc", - "serde", - "thiserror", -] - -[[package]] -name = "alloy-eip2930" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9441120fa82df73e8959ae0e4ab8ade03de2aaae61be313fbf5746277847ce25" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "borsh", - "serde", -] - -[[package]] -name = "alloy-eip7702" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2919c5a56a1007492da313e7a3b6d45ef5edc5d33416fdec63c0d7a2702a0d20" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "borsh", - "k256", - "serde", - "thiserror", -] - -[[package]] -name = "alloy-eip7928" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6adac476434bf024279164dcdca299309f0c7d1e3557024eb7a83f8d9d01c6b5" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "borsh", - "serde", -] - -[[package]] -name = "alloy-eips" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "813a67f87e56b38554d18b182616ee5006e8e2bf9df96a0df8bf29dff1d52e3f" -dependencies = [ - "alloy-eip2124", - "alloy-eip2930", - "alloy-eip7702", - "alloy-eip7928", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "auto_impl", - "borsh", - "c-kzg", - "derive_more", - "either", - "ethereum_ssz", - "ethereum_ssz_derive", - "serde", - "serde_with", - "sha2", - "thiserror", -] - -[[package]] -name = "alloy-evm" -version = "0.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1582933a9fc27c0953220eb4f18f6492ff577822e9a8d848890ff59f6b4f5beb" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-hardforks", - "alloy-op-hardforks", - "alloy-primitives", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-sol-types", - "auto_impl", - "derive_more", - "op-alloy", - "op-revm", - "revm", - "thiserror", -] - -[[package]] -name = "alloy-hardforks" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83ba208044232d14d4adbfa77e57d6329f51bc1acc21f5667bb7db72d88a0831" -dependencies = [ - "alloy-chains", - "alloy-eip2124", - "alloy-primitives", - "auto_impl", - "dyn-clone", -] - -[[package]] -name = "alloy-json-abi" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84e3cf01219c966f95a460c95f1d4c30e12f6c18150c21a30b768af2a2a29142" -dependencies = [ - "alloy-primitives", - "alloy-sol-type-parser", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-json-rpc" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b626409c98ba43aaaa558361bca21440c88fd30df7542c7484b9c7a1489cdb" -dependencies = [ - "alloy-primitives", - "alloy-sol-types", - "http", - "serde", - "serde_json", - "thiserror", - "tracing", -] - -[[package]] -name = "alloy-network" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89924fdcfeee0e0fa42b1f10af42f92802b5d16be614a70897382565663bf7cf" -dependencies = [ - "alloy-consensus", - "alloy-consensus-any", - "alloy-eips", - "alloy-json-rpc", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-types-any", - "alloy-rpc-types-eth", - "alloy-serde", - "alloy-signer", - "alloy-sol-types", - "async-trait", - "auto_impl", - "derive_more", - "futures-utils-wasm", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "alloy-network-primitives" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "833037c04917bc2031541a60e8249e4ab5500e24c637c1c62e95e963a655d66f" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-serde", - "serde", -] - -[[package]] -name = "alloy-op-evm" -version = "0.27.0" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-hardforks", - "alloy-op-hardforks", - "alloy-primitives", - "auto_impl", - "op-alloy", - "op-revm", - "revm", - "test-case", - "thiserror", -] - -[[package]] -name = "alloy-op-hardforks" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6472c610150c4c4c15be9e1b964c9b78068f933bda25fb9cdf09b9ac2bb66f36" -dependencies = [ - "alloy-chains", - "alloy-hardforks", - "alloy-primitives", - "auto_impl", -] - -[[package]] -name = "alloy-primitives" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6a0fb18dd5fb43ec5f0f6a20be1ce0287c79825827de5744afaa6c957737c33" -dependencies = [ - "alloy-rlp", - "bytes", - "cfg-if", - "const-hex", - "derive_more", - "foldhash", - "hashbrown 0.16.1", - "indexmap 2.13.0", - "itoa", - "k256", - "keccak-asm", - "paste", - "proptest", - "rand 0.9.2", - "rapidhash", - "ruint", - "rustc-hash", - "serde", - "sha3", - "tiny-keccak", -] - -[[package]] -name = "alloy-provider" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b56f7a77513308a21a2ba0e9d57785a9d9d2d609e77f4e71a78a1192b83ff2d" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-network", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-client", - "alloy-rpc-types-eth", - "alloy-signer", - "alloy-sol-types", - "alloy-transport", - "async-stream", - "async-trait", - "auto_impl", - "dashmap", - "either", - "futures", - "futures-utils-wasm", - "lru", - "parking_lot", - "pin-project", - "serde", - "serde_json", - "thiserror", - "tokio", - "tracing", - "wasmtimer", -] - -[[package]] -name = "alloy-rlp" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f70d83b765fdc080dbcd4f4db70d8d23fe4761f2f02ebfa9146b833900634b4" -dependencies = [ - "alloy-rlp-derive", - "arrayvec", - "bytes", -] - -[[package]] -name = "alloy-rlp-derive" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "alloy-rpc-client" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff01723afc25ec4c5b04de399155bef7b6a96dfde2475492b1b7b4e7a4f46445" -dependencies = [ - "alloy-json-rpc", - "alloy-primitives", - "alloy-transport", - "futures", - "pin-project", - "serde", - "serde_json", - "tokio", - "tokio-stream", - "tower", - "tracing", - "wasmtimer", -] - -[[package]] -name = "alloy-rpc-types-any" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212ca1c1dab27f531d3858f8b1a2d6bfb2da664be0c1083971078eb7b71abe4b" -dependencies = [ - "alloy-consensus-any", - "alloy-rpc-types-eth", - "alloy-serde", -] - -[[package]] -name = "alloy-rpc-types-engine" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "336ef381c7409f23c69f6e79bddc1917b6e832cff23e7a5cf84b9381d53582e6" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "derive_more", - "ethereum_ssz", - "ethereum_ssz_derive", - "rand 0.8.5", - "serde", - "strum", -] - -[[package]] -name = "alloy-rpc-types-eth" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28e97603095020543a019ab133e0e3dc38cd0819f19f19bdd70c642404a54751" -dependencies = [ - "alloy-consensus", - "alloy-consensus-any", - "alloy-eips", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "alloy-sol-types", - "itertools 0.14.0", - "serde", - "serde_json", - "serde_with", - "thiserror", -] - -[[package]] -name = "alloy-serde" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "946a0d413dbb5cd9adba0de5f8a1a34d5b77deda9b69c1d7feed8fc875a1aa26" -dependencies = [ - "alloy-primitives", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-signer" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb10ccd49d0248df51063fce6b716f68a315dd912d55b32178c883fd48b4021d" -dependencies = [ - "alloy-primitives", - "async-trait", - "auto_impl", - "either", - "elliptic-curve", - "k256", - "thiserror", -] - -[[package]] -name = "alloy-sol-macro" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09eb18ce0df92b4277291bbaa0ed70545d78b02948df756bbd3d6214bf39a218" -dependencies = [ - "alloy-sol-macro-expander", - "alloy-sol-macro-input", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "alloy-sol-macro-expander" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95d9fa2daf21f59aa546d549943f10b5cce1ae59986774019fbedae834ffe01b" -dependencies = [ - "alloy-sol-macro-input", - "const-hex", - "heck", - "indexmap 2.13.0", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.114", - "syn-solidity", - "tiny-keccak", -] - -[[package]] -name = "alloy-sol-macro-input" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9396007fe69c26ee118a19f4dee1f5d1d6be186ea75b3881adf16d87f8444686" -dependencies = [ - "const-hex", - "dunce", - "heck", - "macro-string", - "proc-macro2", - "quote", - "syn 2.0.114", - "syn-solidity", -] - -[[package]] -name = "alloy-sol-type-parser" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af67a0b0dcebe14244fc92002cd8d96ecbf65db4639d479f5fcd5805755a4c27" -dependencies = [ - "serde", - "winnow", -] - -[[package]] -name = "alloy-sol-types" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09aeea64f09a7483bdcd4193634c7e5cf9fd7775ee767585270cd8ce2d69dc95" -dependencies = [ - "alloy-json-abi", - "alloy-primitives", - "alloy-sol-macro", - "serde", -] - -[[package]] -name = "alloy-transport" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f50a9516736d22dd834cc2240e5bf264f338667cc1d9e514b55ec5a78b987ca" -dependencies = [ - "alloy-json-rpc", - "auto_impl", - "base64", - "derive_more", - "futures", - "futures-utils-wasm", - "parking_lot", - "serde", - "serde_json", - "thiserror", - "tokio", - "tower", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-trie" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428aa0f0e0658ff091f8f667c406e034b431cb10abd39de4f507520968acc499" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arrayvec", - "derive_more", - "nybbles", - "serde", - "smallvec", - "tracing", -] - -[[package]] -name = "alloy-tx-macros" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45ceac797eb8a56bdf5ab1fab353072c17d472eab87645ca847afe720db3246d" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "ark-bls12-381" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3df4dcc01ff89867cd86b0da835f23c3f02738353aaee7dde7495af71363b8d5" -dependencies = [ - "ark-ec", - "ark-ff 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", -] - -[[package]] -name = "ark-bn254" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d69eab57e8d2663efa5c63135b2af4f396d66424f88954c21104125ab6b3e6bc" -dependencies = [ - "ark-ec", - "ark-ff 0.5.0", - "ark-r1cs-std", - "ark-std 0.5.0", -] - -[[package]] -name = "ark-ec" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43d68f2d516162846c1238e755a7c4d131b892b70cc70c471a8e3ca3ed818fce" -dependencies = [ - "ahash", - "ark-ff 0.5.0", - "ark-poly", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "educe", - "fnv", - "hashbrown 0.15.5", - "itertools 0.13.0", - "num-bigint", - "num-integer", - "num-traits", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" -dependencies = [ - "ark-ff-asm 0.3.0", - "ark-ff-macros 0.3.0", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.3.3", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" -dependencies = [ - "ark-ff-asm 0.4.2", - "ark-ff-macros 0.4.2", - "ark-serialize 0.4.2", - "ark-std 0.4.0", - "derivative", - "digest 0.10.7", - "itertools 0.10.5", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.4.1", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70" -dependencies = [ - "ark-ff-asm 0.5.0", - "ark-ff-macros 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "arrayvec", - "digest 0.10.7", - "educe", - "itertools 0.13.0", - "num-bigint", - "num-traits", - "paste", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-asm" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-asm" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" -dependencies = [ - "quote", - "syn 2.0.114", -] - -[[package]] -name = "ark-ff-macros" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" -dependencies = [ - "num-bigint", - "num-traits", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "ark-poly" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "579305839da207f02b89cd1679e50e67b4331e2f9294a57693e5051b7703fe27" -dependencies = [ - "ahash", - "ark-ff 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "educe", - "fnv", - "hashbrown 0.15.5", -] - -[[package]] -name = "ark-r1cs-std" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "941551ef1df4c7a401de7068758db6503598e6f01850bdb2cfdb614a1f9dbea1" -dependencies = [ - "ark-ec", - "ark-ff 0.5.0", - "ark-relations", - "ark-std 0.5.0", - "educe", - "num-bigint", - "num-integer", - "num-traits", - "tracing", -] - -[[package]] -name = "ark-relations" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec46ddc93e7af44bcab5230937635b06fb5744464dd6a7e7b083e80ebd274384" -dependencies = [ - "ark-ff 0.5.0", - "ark-std 0.5.0", - "tracing", - "tracing-subscriber", -] - -[[package]] -name = "ark-serialize" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" -dependencies = [ - "ark-std 0.3.0", - "digest 0.9.0", -] - -[[package]] -name = "ark-serialize" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" -dependencies = [ - "ark-std 0.4.0", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7" -dependencies = [ - "ark-serialize-derive", - "ark-std 0.5.0", - "arrayvec", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize-derive" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "ark-std" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "ark-std" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "ark-std" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "arrayref" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -dependencies = [ - "serde", -] - -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "aurora-engine-modexp" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "518bc5745a6264b5fd7b09dffb9667e400ee9e2bbe18555fac75e1fe9afa0df9" -dependencies = [ - "hex", - "num", -] - -[[package]] -name = "auto_impl" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "autocfg" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" - -[[package]] -name = "base16ct" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "base64ct" -version = "1.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" - -[[package]] -name = "bit-set" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" - -[[package]] -name = "bitcoin-io" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dee39a0ee5b4095224a0cfc6bf4cc1baf0f9624b96b367e53b66d974e51d953" - -[[package]] -name = "bitcoin_hashes" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26ec84b80c482df901772e931a9a681e26a1b9ee2302edeff23cb30328745c8b" -dependencies = [ - "bitcoin-io", - "hex-conservative", -] - -[[package]] -name = "bitflags" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "blst" -version = "0.3.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcdb4c7013139a150f9fc55d123186dbfaba0d912817466282c73ac49e71fb45" -dependencies = [ - "cc", - "glob", - "threadpool", - "zeroize", -] - -[[package]] -name = "borsh" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1da5ab77c1437701eeff7c88d968729e7766172279eab0676857b3d63af7a6f" -dependencies = [ - "borsh-derive", - "cfg_aliases", -] - -[[package]] -name = "borsh-derive" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0686c856aa6aac0c4498f936d7d6a02df690f614c03e4d906d1018062b5c5e2c" -dependencies = [ - "once_cell", - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "bumpalo" -version = "3.19.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" - -[[package]] -name = "byte-slice-cast" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7575182f7272186991736b70173b0ea045398f984bf5ebbb3804736ce1330c9d" - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "bytes" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" -dependencies = [ - "serde", -] - -[[package]] -name = "c-kzg" -version = "2.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e00bf4b112b07b505472dbefd19e37e53307e2bfed5a79e0cc161d58ccd0e687" -dependencies = [ - "blst", - "cc", - "glob", - "hex", - "libc", - "once_cell", - "serde", -] - -[[package]] -name = "cc" -version = "1.2.53" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755d2fce177175ffca841e9a06afdb2c4ab0f593d53b4dee48147dfaade85932" -dependencies = [ - "find-msvc-tools", - "shlex", -] - -[[package]] -name = "cfg-if" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - -[[package]] -name = "chrono" -version = "0.4.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" -dependencies = [ - "iana-time-zone", - "num-traits", - "serde", - "windows-link", -] - -[[package]] -name = "const-hex" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735" -dependencies = [ - "cfg-if", - "cpufeatures", - "proptest", - "serde_core", -] - -[[package]] -name = "const-oid" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" - -[[package]] -name = "const_format" -version = "0.2.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" -dependencies = [ - "const_format_proc_macros", -] - -[[package]] -name = "const_format_proc_macros" -version = "0.2.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "convert_case" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" - -[[package]] -name = "cpufeatures" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" -dependencies = [ - "libc", -] - -[[package]] -name = "crc" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crunchy" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" - -[[package]] -name = "crypto-bigint" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" -dependencies = [ - "generic-array", - "rand_core 0.6.4", - "subtle", - "zeroize", -] - -[[package]] -name = "crypto-common" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "darling" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" -dependencies = [ - "darling_core 0.20.11", - "darling_macro 0.20.11", -] - -[[package]] -name = "darling" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" -dependencies = [ - "darling_core 0.21.3", - "darling_macro 0.21.3", -] - -[[package]] -name = "darling_core" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.114", -] - -[[package]] -name = "darling_core" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "serde", - "strsim", - "syn 2.0.114", -] - -[[package]] -name = "darling_macro" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" -dependencies = [ - "darling_core 0.20.11", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "darling_macro" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" -dependencies = [ - "darling_core 0.21.3", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "dashmap" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" -dependencies = [ - "cfg-if", - "crossbeam-utils", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", -] - -[[package]] -name = "der" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "deranged" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" -dependencies = [ - "powerfmt", - "serde_core", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "derive-where" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef941ded77d15ca19b40374869ac6000af1c9f2a4c0f3d4c70926287e6364a8f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "derive_more" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" -dependencies = [ - "convert_case", - "proc-macro2", - "quote", - "rustc_version 0.4.1", - "syn 2.0.114", - "unicode-xid", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "const-oid", - "crypto-common", - "subtle", -] - -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "dunce" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" - -[[package]] -name = "dyn-clone" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" - -[[package]] -name = "ecdsa" -version = "0.16.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" -dependencies = [ - "der", - "digest 0.10.7", - "elliptic-curve", - "rfc6979", - "serdect", - "signature", - "spki", -] - -[[package]] -name = "educe" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" -dependencies = [ - "enum-ordinalize", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "either" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -dependencies = [ - "serde", -] - -[[package]] -name = "elliptic-curve" -version = "0.13.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" -dependencies = [ - "base16ct", - "crypto-bigint", - "digest 0.10.7", - "ff", - "generic-array", - "group", - "pkcs8", - "rand_core 0.6.4", - "sec1", - "serdect", - "subtle", - "zeroize", -] - -[[package]] -name = "enum-ordinalize" -version = "4.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" -dependencies = [ - "enum-ordinalize-derive", -] - -[[package]] -name = "enum-ordinalize-derive" -version = "4.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "equivalent" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" - -[[package]] -name = "errno" -version = "0.3.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" -dependencies = [ - "libc", - "windows-sys 0.61.2", -] - -[[package]] -name = "ethereum_serde_utils" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dc1355dbb41fbbd34ec28d4fb2a57d9a70c67ac3c19f6a5ca4d4a176b9e997a" -dependencies = [ - "alloy-primitives", - "hex", - "serde", - "serde_derive", - "serde_json", -] - -[[package]] -name = "ethereum_ssz" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dcddb2554d19cde19b099fadddde576929d7a4d0c1cd3512d1fd95cf174375c" -dependencies = [ - "alloy-primitives", - "ethereum_serde_utils", - "itertools 0.13.0", - "serde", - "serde_derive", - "smallvec", - "typenum", -] - -[[package]] -name = "ethereum_ssz_derive" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a657b6b3b7e153637dc6bdc6566ad9279d9ee11a15b12cfb24a2e04360637e9f" -dependencies = [ - "darling 0.20.11", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "fastrand" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" - -[[package]] -name = "fastrlp" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", -] - -[[package]] -name = "fastrlp" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", -] - -[[package]] -name = "ff" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" -dependencies = [ - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "find-msvc-tools" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" - -[[package]] -name = "fixed-hash" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" -dependencies = [ - "byteorder", - "rand 0.8.5", - "rustc-hex", - "static_assertions", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foldhash" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" - -[[package]] -name = "form_urlencoded" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "futures" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" - -[[package]] -name = "futures-executor" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" - -[[package]] -name = "futures-macro" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "futures-sink" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" - -[[package]] -name = "futures-task" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" - -[[package]] -name = "futures-util" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "futures-utils-wasm" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" - -[[package]] -name = "generic-array" -version = "0.14.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2" -dependencies = [ - "typenum", - "version_check", - "zeroize", -] - -[[package]] -name = "getrandom" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "getrandom" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" -dependencies = [ - "cfg-if", - "libc", - "r-efi", - "wasip2", -] - -[[package]] -name = "glob" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" - -[[package]] -name = "gmp-mpfr-sys" -version = "1.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60f8970a75c006bb2f8ae79c6768a116dd215fa8346a87aed99bf9d82ca43394" -dependencies = [ - "libc", - "windows-sys 0.60.2", -] - -[[package]] -name = "group" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" -dependencies = [ - "ff", - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" - -[[package]] -name = "hashbrown" -version = "0.15.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" -dependencies = [ - "allocator-api2", -] - -[[package]] -name = "hashbrown" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" -dependencies = [ - "allocator-api2", - "equivalent", - "foldhash", - "serde", - "serde_core", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hermit-abi" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hex-conservative" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda06d18ac606267c40c04e41b9947729bf8b9efe74bd4e82b61a5f26a510b9f" -dependencies = [ - "arrayvec", -] - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "http" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" -dependencies = [ - "bytes", - "itoa", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "log", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "icu_collections" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" -dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locale_core" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" -dependencies = [ - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" - -[[package]] -name = "icu_properties" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" -dependencies = [ - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" - -[[package]] -name = "icu_provider" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" -dependencies = [ - "displaydoc", - "icu_locale_core", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "idna" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", -] - -[[package]] -name = "indexmap" -version = "2.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" -dependencies = [ - "equivalent", - "hashbrown 0.16.1", - "serde", - "serde_core", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" - -[[package]] -name = "js-sys" -version = "0.3.85" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "k256" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" -dependencies = [ - "cfg-if", - "ecdsa", - "elliptic-curve", - "once_cell", - "serdect", - "sha2", -] - -[[package]] -name = "keccak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "keccak-asm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6" -dependencies = [ - "digest 0.10.7", - "sha3-asm", -] - -[[package]] -name = "libc" -version = "0.2.180" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" - -[[package]] -name = "libm" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" - -[[package]] -name = "linux-raw-sys" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" - -[[package]] -name = "litemap" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" - -[[package]] -name = "lock_api" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" -dependencies = [ - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" - -[[package]] -name = "lru" -version = "0.16.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" -dependencies = [ - "hashbrown 0.16.1", -] - -[[package]] -name = "macro-string" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "memchr" -version = "2.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" - -[[package]] -name = "num" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" -dependencies = [ - "num-bigint", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", -] - -[[package]] -name = "num-complex" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-iter" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-rational" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" -dependencies = [ - "num-bigint", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", - "libm", -] - -[[package]] -name = "num_cpus" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "num_enum" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" -dependencies = [ - "num_enum_derive", - "rustversion", -] - -[[package]] -name = "num_enum_derive" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "nybbles" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5676b5c379cf5b03da1df2b3061c4a4e2aa691086a56ac923e08c143f53f59" -dependencies = [ - "alloy-rlp", - "cfg-if", - "proptest", - "ruint", - "serde", - "smallvec", -] - -[[package]] -name = "once_cell" -version = "1.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" - -[[package]] -name = "op-alloy" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9b8fee21003dd4f076563de9b9d26f8c97840157ef78593cd7f262c5ca99848" -dependencies = [ - "op-alloy-consensus", - "op-alloy-network", - "op-alloy-provider", - "op-alloy-rpc-types", - "op-alloy-rpc-types-engine", -] - -[[package]] -name = "op-alloy-consensus" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736381a95471d23e267263cfcee9e1d96d30b9754a94a2819148f83379de8a86" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-serde", - "derive_more", - "serde", - "thiserror", -] - -[[package]] -name = "op-alloy-network" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4034183dca6bff6632e7c24c92e75ff5f0eabb58144edb4d8241814851334d47" -dependencies = [ - "alloy-consensus", - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-eth", - "alloy-signer", - "op-alloy-consensus", - "op-alloy-rpc-types", -] - -[[package]] -name = "op-alloy-provider" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6753d90efbaa8ea8bcb89c1737408ca85fa60d7adb875049d3f382c063666f86" -dependencies = [ - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-engine", - "alloy-transport", - "async-trait", - "op-alloy-rpc-types-engine", -] - -[[package]] -name = "op-alloy-rpc-types" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddd87c6b9e5b6eee8d6b76f41b04368dca0e9f38d83338e5b00e730c282098a4" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", - "derive_more", - "op-alloy-consensus", - "serde", - "serde_json", - "thiserror", -] - -[[package]] -name = "op-alloy-rpc-types-engine" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77727699310a18cdeed32da3928c709e2704043b6584ed416397d5da65694efc" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "alloy-serde", - "derive_more", - "ethereum_ssz", - "ethereum_ssz_derive", - "op-alloy-consensus", - "serde", - "sha2", - "snap", - "thiserror", -] - -[[package]] -name = "op-revm" -version = "15.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79c92b75162c2ed1661849fa51683b11254a5b661798360a2c24be918edafd40" -dependencies = [ - "auto_impl", - "revm", - "serde", -] - -[[package]] -name = "p256" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" -dependencies = [ - "ecdsa", - "elliptic-curve", - "primeorder", - "sha2", -] - -[[package]] -name = "parity-scale-codec" -version = "3.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799781ae679d79a948e13d4824a40970bfa500058d245760dd857301059810fa" -dependencies = [ - "arrayvec", - "bitvec", - "byte-slice-cast", - "const_format", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "rustversion", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34b4653168b563151153c9e4c08ebed57fb8262bebfa79711552fa983c623e7a" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "parking_lot" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-link", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "percent-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - -[[package]] -name = "pest" -version = "2.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9eb05c21a464ea704b53158d358a31e6425db2f63a1a7312268b05fe2b75f7" -dependencies = [ - "memchr", - "ucd-trie", -] - -[[package]] -name = "phf" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" -dependencies = [ - "phf_macros", - "phf_shared", - "serde", -] - -[[package]] -name = "phf_generator" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" -dependencies = [ - "fastrand", - "phf_shared", -] - -[[package]] -name = "phf_macros" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" -dependencies = [ - "phf_generator", - "phf_shared", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "phf_shared" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pin-project" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "potential_utf" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" -dependencies = [ - "zerovec", -] - -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppv-lite86" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "primeorder" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" -dependencies = [ - "elliptic-curve", -] - -[[package]] -name = "primitive-types" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" -dependencies = [ - "fixed-hash", - "impl-codec", - "uint", -] - -[[package]] -name = "proc-macro-crate" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" -dependencies = [ - "toml_edit", -] - -[[package]] -name = "proc-macro-error-attr2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "proc-macro-error2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" -dependencies = [ - "proc-macro-error-attr2", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "proc-macro2" -version = "1.0.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "proptest" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" -dependencies = [ - "bit-set", - "bit-vec", - "bitflags", - "num-traits", - "rand 0.9.2", - "rand_chacha 0.9.0", - "rand_xorshift", - "regex-syntax", - "rusty-fork", - "tempfile", - "unarray", -] - -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - -[[package]] -name = "quote" -version = "1.0.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", - "serde", -] - -[[package]] -name = "rand" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" -dependencies = [ - "rand_chacha 0.9.0", - "rand_core 0.9.5", - "serde", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core 0.9.5", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.17", -] - -[[package]] -name = "rand_core" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" -dependencies = [ - "getrandom 0.3.4", - "serde", -] - -[[package]] -name = "rand_xorshift" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" -dependencies = [ - "rand_core 0.9.5", -] - -[[package]] -name = "rapidhash" -version = "4.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8b5b858a440a0bc02625b62dd95131b9201aa9f69f411195dd4a7cfb1de3d7" -dependencies = [ - "rustversion", -] - -[[package]] -name = "redox_syscall" -version = "0.5.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" -dependencies = [ - "bitflags", -] - -[[package]] -name = "ref-cast" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "regex-syntax" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" - -[[package]] -name = "revm" -version = "34.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2aabdebaa535b3575231a88d72b642897ae8106cf6b0d12eafc6bfdf50abfc7" -dependencies = [ - "revm-bytecode", - "revm-context", - "revm-context-interface", - "revm-database", - "revm-database-interface", - "revm-handler", - "revm-inspector", - "revm-interpreter", - "revm-precompile", - "revm-primitives", - "revm-state", -] - -[[package]] -name = "revm-bytecode" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d1e5c1eaa44d39d537f668bc5c3409dc01e5c8be954da6c83370bbdf006457" -dependencies = [ - "bitvec", - "phf", - "revm-primitives", - "serde", -] - -[[package]] -name = "revm-context" -version = "13.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "892ff3e6a566cf8d72ffb627fdced3becebbd9ba64089c25975b9b028af326a5" -dependencies = [ - "bitvec", - "cfg-if", - "derive-where", - "revm-bytecode", - "revm-context-interface", - "revm-database-interface", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-context-interface" -version = "14.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57f61cc6d23678c4840af895b19f8acfbbd546142ec8028b6526c53cc1c16c98" -dependencies = [ - "alloy-eip2930", - "alloy-eip7702", - "auto_impl", - "either", - "revm-database-interface", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-database" -version = "10.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "529528d0b05fe646be86223032c3e77aa8b05caa2a35447d538c55965956a511" -dependencies = [ - "alloy-eips", - "revm-bytecode", - "revm-database-interface", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-database-interface" -version = "9.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7bf93ac5b91347c057610c0d96e923db8c62807e03f036762d03e981feddc1d" -dependencies = [ - "auto_impl", - "either", - "revm-primitives", - "revm-state", - "serde", - "thiserror", -] - -[[package]] -name = "revm-handler" -version = "15.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cd0e43e815a85eded249df886c4badec869195e70cdd808a13cfca2794622d2" -dependencies = [ - "auto_impl", - "derive-where", - "revm-bytecode", - "revm-context", - "revm-context-interface", - "revm-database-interface", - "revm-interpreter", - "revm-precompile", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-inspector" -version = "15.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f3ccad59db91ef93696536a0dbaf2f6f17cfe20d4d8843ae118edb7e97947ef" -dependencies = [ - "auto_impl", - "either", - "revm-context", - "revm-database-interface", - "revm-handler", - "revm-interpreter", - "revm-primitives", - "revm-state", - "serde", - "serde_json", -] - -[[package]] -name = "revm-interpreter" -version = "32.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11406408597bc249392d39295831c4b641b3a6f5c471a7c41104a7a1e3564c07" -dependencies = [ - "revm-bytecode", - "revm-context-interface", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-precompile" -version = "32.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50c1285c848d240678bf69cb0f6179ff5a4aee6fc8e921d89708087197a0aff3" -dependencies = [ - "ark-bls12-381", - "ark-bn254", - "ark-ec", - "ark-ff 0.5.0", - "ark-serialize 0.5.0", - "arrayref", - "aurora-engine-modexp", - "c-kzg", - "cfg-if", - "gmp-mpfr-sys", - "k256", - "p256", - "revm-primitives", - "ripemd", - "secp256k1 0.31.1", - "sha2", -] - -[[package]] -name = "revm-primitives" -version = "22.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba580c56a8ec824a64f8a1683577876c2e1dbe5247044199e9b881421ad5dcf9" -dependencies = [ - "alloy-primitives", - "num_enum", - "once_cell", - "serde", -] - -[[package]] -name = "revm-state" -version = "9.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "311720d4f0f239b041375e7ddafdbd20032a33b7bae718562ea188e188ed9fd3" -dependencies = [ - "alloy-eip7928", - "bitflags", - "revm-bytecode", - "revm-primitives", - "serde", -] - -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" -dependencies = [ - "hmac", - "subtle", -] - -[[package]] -name = "ripemd" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "rlp" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" -dependencies = [ - "bytes", - "rustc-hex", -] - -[[package]] -name = "ruint" -version = "1.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" -dependencies = [ - "alloy-rlp", - "ark-ff 0.3.0", - "ark-ff 0.4.2", - "ark-ff 0.5.0", - "bytes", - "fastrlp 0.3.1", - "fastrlp 0.4.0", - "num-bigint", - "num-integer", - "num-traits", - "parity-scale-codec", - "primitive-types", - "proptest", - "rand 0.8.5", - "rand 0.9.2", - "rlp", - "ruint-macro", - "serde_core", - "valuable", - "zeroize", -] - -[[package]] -name = "ruint-macro" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" - -[[package]] -name = "rustc-hash" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" - -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - -[[package]] -name = "rustc_version" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" -dependencies = [ - "semver 0.11.0", -] - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver 1.0.27", -] - -[[package]] -name = "rustix" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys 0.61.2", -] - -[[package]] -name = "rustversion" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" - -[[package]] -name = "rusty-fork" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" -dependencies = [ - "fnv", - "quick-error", - "tempfile", - "wait-timeout", -] - -[[package]] -name = "schemars" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "schemars" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "sec1" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" -dependencies = [ - "base16ct", - "der", - "generic-array", - "pkcs8", - "serdect", - "subtle", - "zeroize", -] - -[[package]] -name = "secp256k1" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" -dependencies = [ - "bitcoin_hashes", - "rand 0.8.5", - "secp256k1-sys 0.10.1", - "serde", -] - -[[package]] -name = "secp256k1" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c3c81b43dc2d8877c216a3fccf76677ee1ebccd429566d3e67447290d0c42b2" -dependencies = [ - "bitcoin_hashes", - "rand 0.9.2", - "secp256k1-sys 0.11.0", -] - -[[package]] -name = "secp256k1-sys" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9" -dependencies = [ - "cc", -] - -[[package]] -name = "secp256k1-sys" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcb913707158fadaf0d8702c2db0e857de66eb003ccfdda5924b5f5ac98efb38" -dependencies = [ - "cc", -] - -[[package]] -name = "semver" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" - -[[package]] -name = "semver-parser" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" -dependencies = [ - "pest", -] - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "serde_json" -version = "1.0.149" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" -dependencies = [ - "indexmap 2.13.0", - "itoa", - "memchr", - "serde", - "serde_core", - "zmij", -] - -[[package]] -name = "serde_with" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7" -dependencies = [ - "base64", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.13.0", - "schemars 0.9.0", - "schemars 1.2.0", - "serde_core", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "serdect" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a84f14a19e9a014bb9f4512488d9829a68e04ecabffb0f9904cd1ace94598177" -dependencies = [ - "base16ct", - "serde", -] - -[[package]] -name = "sha2" -version = "0.10.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest 0.10.7", - "keccak", -] - -[[package]] -name = "sha3-asm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46" -dependencies = [ - "cc", - "cfg-if", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "digest 0.10.7", - "rand_core 0.6.4", -] - -[[package]] -name = "siphasher" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" - -[[package]] -name = "slab" -version = "0.4.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" - -[[package]] -name = "smallvec" -version = "1.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" -dependencies = [ - "serde", -] - -[[package]] -name = "snap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" - -[[package]] -name = "spki" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "strum" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn-solidity" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f92d01b5de07eaf324f7fca61cc6bd3d82bbc1de5b6c963e6fe79e86f36580d" -dependencies = [ - "paste", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" - -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "tempfile" -version = "3.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" -dependencies = [ - "fastrand", - "getrandom 0.3.4", - "once_cell", - "rustix", - "windows-sys 0.61.2", -] - -[[package]] -name = "test-case" -version = "3.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb2550dd13afcd286853192af8601920d959b14c401fcece38071d53bf0768a8" -dependencies = [ - "test-case-macros", -] - -[[package]] -name = "test-case-core" -version = "3.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adcb7fd841cd518e279be3d5a3eb0636409487998a4aff22f3de87b81e88384f" -dependencies = [ - "cfg-if", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "test-case-macros" -version = "3.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", - "test-case-core", -] - -[[package]] -name = "thiserror" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "threadpool" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" -dependencies = [ - "num_cpus", -] - -[[package]] -name = "time" -version = "0.3.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9e442fc33d7fdb45aa9bfeb312c095964abdf596f7567261062b2a7107aaabd" -dependencies = [ - "deranged", - "itoa", - "num-conv", - "powerfmt", - "serde_core", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b36ee98fd31ec7426d599183e8fe26932a8dc1fb76ddb6214d05493377d34ca" - -[[package]] -name = "time-macros" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e552d1249bf61ac2a52db88179fd0673def1e1ad8243a00d9ec9ed71fee3dd" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "tinystr" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" -dependencies = [ - "displaydoc", - "zerovec", -] - -[[package]] -name = "tokio" -version = "1.49.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" -dependencies = [ - "pin-project-lite", - "tokio-macros", -] - -[[package]] -name = "tokio-macros" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "tokio-stream" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", - "tokio-util", -] - -[[package]] -name = "tokio-util" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" -dependencies = [ - "bytes", - "futures-core", - "futures-sink", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "toml_datetime" -version = "0.7.5+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" -dependencies = [ - "serde_core", -] - -[[package]] -name = "toml_edit" -version = "0.23.10+spec-1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" -dependencies = [ - "indexmap 2.13.0", - "toml_datetime", - "toml_parser", - "winnow", -] - -[[package]] -name = "toml_parser" -version = "1.0.6+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" -dependencies = [ - "winnow", -] - -[[package]] -name = "tower" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" -dependencies = [ - "futures-core", - "futures-util", - "pin-project-lite", - "sync_wrapper", - "tower-layer", - "tower-service", -] - -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - -[[package]] -name = "tracing" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" -dependencies = [ - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "tracing-core" -version = "0.1.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-subscriber" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" -dependencies = [ - "tracing-core", -] - -[[package]] -name = "typenum" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" - -[[package]] -name = "ucd-trie" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" - -[[package]] -name = "uint" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "unarray" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" - -[[package]] -name = "unicode-ident" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - -[[package]] -name = "url" -version = "2.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", -] - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - -[[package]] -name = "valuable" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "wait-timeout" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" -dependencies = [ - "libc", -] - -[[package]] -name = "wasi" -version = "0.11.1+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" - -[[package]] -name = "wasip2" -version = "1.0.2+wasi-0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wasm-bindgen" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" -dependencies = [ - "cfg-if", - "once_cell", - "rustversion", - "wasm-bindgen-macro", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" -dependencies = [ - "bumpalo", - "proc-macro2", - "quote", - "syn 2.0.114", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "wasmtimer" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b" -dependencies = [ - "futures", - "js-sys", - "parking_lot", - "pin-utils", - "slab", - "wasm-bindgen", -] - -[[package]] -name = "windows-core" -version = "0.62.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" -dependencies = [ - "windows-implement", - "windows-interface", - "windows-link", - "windows-result", - "windows-strings", -] - -[[package]] -name = "windows-implement" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "windows-interface" -version = "0.59.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - -[[package]] -name = "windows-result" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-sys" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" -dependencies = [ - "windows-targets", -] - -[[package]] -name = "windows-sys" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-targets" -version = "0.53.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" -dependencies = [ - "windows-link", - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_gnullvm", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" - -[[package]] -name = "windows_i686_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" - -[[package]] -name = "windows_i686_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" - -[[package]] -name = "winnow" -version = "0.7.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" -dependencies = [ - "memchr", -] - -[[package]] -name = "wit-bindgen" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" - -[[package]] -name = "writeable" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - -[[package]] -name = "yoke" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" -dependencies = [ - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", - "synstructure", -] - -[[package]] -name = "zerocopy" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", - "synstructure", -] - -[[package]] -name = "zeroize" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "zerotrie" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "zmij" -version = "1.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfcd145825aace48cff44a8844de64bf75feec3080e0aa5cdbde72961ae51a65" diff --git a/alloy-op-evm/Cargo.toml b/alloy-op-evm/Cargo.toml deleted file mode 100644 index 2f97b2a96ed..00000000000 --- a/alloy-op-evm/Cargo.toml +++ /dev/null @@ -1,65 +0,0 @@ -[package] -name = "alloy-op-evm" -description = "OP EVM implementation" - -version = "0.27.0" -edition = "2021" -rust-version = "1.88" -authors = ["Alloy Contributors", "OpLabsPBC"] -license = "MIT OR Apache-2.0" -homepage = "https://github.com/ethereum-optimism/optimism" -repository = "https://github.com/ethereum-optimism/optimism" - -[lints.rustdoc] -all = "warn" - -[lints.rust] -missing-debug-implementations = "warn" -missing-docs = "warn" -unreachable-pub = "warn" -unused-must-use = "deny" -rust-2018-idioms = "deny" -unnameable-types = "warn" - -[lints.clippy] -all = { level = "warn", priority = -1 } -missing-const-for-fn = "warn" -use-self = "warn" -option-if-let-else = "allow" -redundant-clone = "warn" - -[dependencies] -alloy-evm = { version = "0.27.0", default-features = false, features = ["op"] } - -alloy-eips = { version = "1.5.2", default-features = false } -alloy-consensus = { version = "1.5.2", default-features = false } -alloy-primitives = { version = "1.0.0", default-features = false } - -alloy-op-hardforks = { version = "0.4.7" } -op-alloy = { version = "0.23", default-features = false, features = ["consensus"] } - -revm = { version = "34.0.0", default-features = false } -op-revm = { version = "15.0.0", default-features = false } - -thiserror = { version = "2.0.0", default-features = false } - -auto_impl = "1" - -[dev-dependencies] -alloy-hardforks = { version = "0.4.7" } -test-case = "3" - -[features] -default = ["std"] -std = [ - "alloy-primitives/std", - "revm/std", - "alloy-evm/std", - "op-revm/std", - "alloy-consensus/std", - "alloy-eips/std", - "op-alloy/std", - "thiserror/std" -] -gmp = ["alloy-evm/gmp"] -asm-keccak = ["alloy-evm/asm-keccak", "alloy-primitives/asm-keccak", "revm/asm-keccak"] diff --git a/alloy-op-evm/deny.toml b/alloy-op-evm/deny.toml deleted file mode 100644 index e32a2135e00..00000000000 --- a/alloy-op-evm/deny.toml +++ /dev/null @@ -1,54 +0,0 @@ -[advisories] -version = 2 -yanked = "warn" -ignore = [ - # https://rustsec.org/advisories/RUSTSEC-2024-0437, trezor-client dependency, no fix available yet - "RUSTSEC-2024-0437", - # https://rustsec.org/advisories/RUSTSEC-2024-0436 - "RUSTSEC-2024-0436", -] - -[bans] -multiple-versions = "warn" -wildcards = "deny" -highlight = "all" - -[licenses] -version = 2 -confidence-threshold = 0.8 - -allow = [ - "MIT", - "Apache-2.0", - "Apache-2.0 WITH LLVM-exception", - "BSD-2-Clause", - "BSD-3-Clause", - "Unicode-3.0", - "Unlicense", - "Zlib", - "CC0-1.0", -] - -exceptions = [ - # gmp feature (optional, LGPL-licensed) - { allow = ["LGPL-3.0-or-later"], crate = "rug" }, - { allow = ["LGPL-3.0-or-later"], crate = "gmp-mpfr-sys" }, -] - -[[licenses.clarify]] -name = "ring" -expression = "LicenseRef-ring" -license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }] - -[[licenses.clarify]] -name = "webpki" -expression = "LicenseRef-webpki" -license-files = [{ path = "LICENSE", hash = 0x001c7e6c }] - -[sources] -unknown-registry = "deny" -unknown-git = "deny" -allow-git = [ - "https://github.com/bluealloy/revm", - #"https://github.com/alloy-rs/hardforks", -] diff --git a/alloy-op-evm/justfile b/alloy-op-evm/justfile deleted file mode 100644 index aacdc2ad5d8..00000000000 --- a/alloy-op-evm/justfile +++ /dev/null @@ -1,12 +0,0 @@ -# default recipe to display help information -default: - @just --list - -# Run cargo tests -test *args='': - cargo nextest run --workspace {{args}} - -# Check no_std compatibility -check-no-std: - rustup target add riscv32imac-unknown-none-elf - cargo check -p alloy-op-evm --target riscv32imac-unknown-none-elf --no-default-features diff --git a/alloy-op-evm/rustfmt.toml b/alloy-op-evm/rustfmt.toml deleted file mode 100644 index 3063df707a6..00000000000 --- a/alloy-op-evm/rustfmt.toml +++ /dev/null @@ -1,12 +0,0 @@ -reorder_imports = true -use_field_init_shorthand = true -use_small_heuristics = "Max" - -# Nightly -max_width = 100 -comment_width = 100 -imports_granularity = "Crate" -wrap_comments = true -format_code_in_doc_comments = true -doc_comment_code_block_width = 100 -format_macro_matchers = true diff --git a/alloy-op-evm/src/block/canyon.rs b/alloy-op-evm/src/block/canyon.rs deleted file mode 100644 index 7e2adbbcda2..00000000000 --- a/alloy-op-evm/src/block/canyon.rs +++ /dev/null @@ -1,50 +0,0 @@ -use alloy_evm::Database; -use alloy_op_hardforks::OpHardforks; -use alloy_primitives::{address, b256, hex, Address, Bytes, B256}; -use revm::{primitives::HashMap, state::Bytecode, DatabaseCommit}; - -/// The address of the create2 deployer -const CREATE_2_DEPLOYER_ADDR: Address = address!("0x13b0D85CcB8bf860b6b79AF3029fCA081AE9beF2"); - -/// The codehash of the create2 deployer contract. -const CREATE_2_DEPLOYER_CODEHASH: B256 = - b256!("0xb0550b5b431e30d38000efb7107aaa0ade03d48a7198a140edda9d27134468b2"); - -/// The raw bytecode of the create2 deployer contract. -const CREATE_2_DEPLOYER_BYTECODE: [u8; 1584] = hex!("6080604052600436106100435760003560e01c8063076c37b21461004f578063481286e61461007157806356299481146100ba57806366cfa057146100da57600080fd5b3661004a57005b600080fd5b34801561005b57600080fd5b5061006f61006a366004610327565b6100fa565b005b34801561007d57600080fd5b5061009161008c366004610327565b61014a565b60405173ffffffffffffffffffffffffffffffffffffffff909116815260200160405180910390f35b3480156100c657600080fd5b506100916100d5366004610349565b61015d565b3480156100e657600080fd5b5061006f6100f53660046103ca565b610172565b61014582826040518060200161010f9061031a565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe082820381018352601f90910116604052610183565b505050565b600061015683836102e7565b9392505050565b600061016a8484846102f0565b949350505050565b61017d838383610183565b50505050565b6000834710156101f4576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601d60248201527f437265617465323a20696e73756666696369656e742062616c616e636500000060448201526064015b60405180910390fd5b815160000361025f576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f437265617465323a2062797465636f6465206c656e677468206973207a65726f60448201526064016101eb565b8282516020840186f5905073ffffffffffffffffffffffffffffffffffffffff8116610156576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601960248201527f437265617465323a204661696c6564206f6e206465706c6f790000000000000060448201526064016101eb565b60006101568383305b6000604051836040820152846020820152828152600b8101905060ff815360559020949350505050565b61014e806104ad83390190565b6000806040838503121561033a57600080fd5b50508035926020909101359150565b60008060006060848603121561035e57600080fd5b8335925060208401359150604084013573ffffffffffffffffffffffffffffffffffffffff8116811461039057600080fd5b809150509250925092565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6000806000606084860312156103df57600080fd5b8335925060208401359150604084013567ffffffffffffffff8082111561040557600080fd5b818601915086601f83011261041957600080fd5b81358181111561042b5761042b61039b565b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0908116603f011681019083821181831017156104715761047161039b565b8160405282815289602084870101111561048a57600080fd5b826020860160208301376000602084830101528095505050505050925092509256fe608060405234801561001057600080fd5b5061012e806100206000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c8063249cb3fa14602d575b600080fd5b603c603836600460b1565b604e565b60405190815260200160405180910390f35b60008281526020818152604080832073ffffffffffffffffffffffffffffffffffffffff8516845290915281205460ff16608857600060aa565b7fa2ef4600d742022d532d4747cb3547474667d6f13804902513b2ec01c848f4b45b9392505050565b6000806040838503121560c357600080fd5b82359150602083013573ffffffffffffffffffffffffffffffffffffffff8116811460ed57600080fd5b80915050925092905056fea26469706673582212205ffd4e6cede7d06a5daf93d48d0541fc68189eeb16608c1999a82063b666eb1164736f6c63430008130033a2646970667358221220fdc4a0fe96e3b21c108ca155438d37c9143fb01278a3c1d274948bad89c564ba64736f6c63430008130033"); - -/// The Canyon hardfork issues an irregular state transition that force-deploys the create2 -/// deployer contract. This is done by directly setting the code of the create2 deployer account -/// prior to executing any transactions on the timestamp activation of the fork. -pub(crate) fn ensure_create2_deployer<DB>( - chain_spec: impl OpHardforks, - timestamp: u64, - db: &mut DB, -) -> Result<(), DB::Error> -where - DB: Database + DatabaseCommit, -{ - // If the canyon hardfork is active at the current timestamp, and it was not active at the - // previous block timestamp (heuristically, block time is not perfectly constant at 2s), and the - // chain is an optimism chain, then we need to force-deploy the create2 deployer contract. - if chain_spec.is_canyon_active_at_timestamp(timestamp) - && !chain_spec.is_canyon_active_at_timestamp(timestamp.saturating_sub(2)) - { - // Load the create2 deployer account from the cache. - let mut acc_info = db.basic(CREATE_2_DEPLOYER_ADDR)?.unwrap_or_default(); - - // Update the account info with the create2 deployer codehash and bytecode. - acc_info.code_hash = CREATE_2_DEPLOYER_CODEHASH; - acc_info.code = Some(Bytecode::new_raw(Bytes::from_static(&CREATE_2_DEPLOYER_BYTECODE))); - - // Convert the cache account back into a revm account and mark it as touched. - let mut revm_acc: revm::state::Account = acc_info.into(); - revm_acc.mark_touch(); - - // Commit the create2 deployer account to the database. - db.commit(HashMap::from_iter([(CREATE_2_DEPLOYER_ADDR, revm_acc)])); - return Ok(()); - } - - Ok(()) -} diff --git a/alloy-op-evm/src/block/mod.rs b/alloy-op-evm/src/block/mod.rs deleted file mode 100644 index f282bcbe1c6..00000000000 --- a/alloy-op-evm/src/block/mod.rs +++ /dev/null @@ -1,755 +0,0 @@ -//! Block executor for Optimism. - -use crate::OpEvmFactory; -use alloc::{borrow::Cow, boxed::Box, vec::Vec}; -use alloy_consensus::{Eip658Value, Header, Transaction, TransactionEnvelope, TxReceipt}; -use alloy_eips::{Encodable2718, Typed2718}; -use alloy_evm::{ - block::{ - state_changes::{balance_increment_state, post_block_balance_increments}, - BlockExecutionError, BlockExecutionResult, BlockExecutor, BlockExecutorFactory, - BlockExecutorFor, BlockValidationError, ExecutableTx, OnStateHook, - StateChangePostBlockSource, StateChangeSource, StateDB, SystemCaller, TxResult, - }, - eth::{receipt_builder::ReceiptBuilderCtx, EthTxResult}, - Database, Evm, EvmFactory, FromRecoveredTx, FromTxWithEncoded, RecoveredTx, -}; -use alloy_op_hardforks::{OpChainHardforks, OpHardforks}; -use alloy_primitives::{Address, Bytes, B256}; -use canyon::ensure_create2_deployer; -use op_alloy::consensus::OpDepositReceipt; -use op_revm::{ - constants::L1_BLOCK_CONTRACT, estimate_tx_compressed_size, - transaction::deposit::DEPOSIT_TRANSACTION_TYPE, L1BlockInfo, OpTransaction, -}; -pub use receipt_builder::OpAlloyReceiptBuilder; -use receipt_builder::OpReceiptBuilder; -use revm::{ - context::{result::ResultAndState, Block}, - database::{DatabaseCommitExt, State}, - Database as _, DatabaseCommit, Inspector, -}; - -mod canyon; -pub mod receipt_builder; - -/// Trait for OP transaction environments. Allows to recover the transaction encoded bytes if -/// they're available. -pub trait OpTxEnv { - /// Returns the encoded bytes of the transaction. - fn encoded_bytes(&self) -> Option<&Bytes>; -} - -impl<T: revm::context::Transaction> OpTxEnv for OpTransaction<T> { - fn encoded_bytes(&self) -> Option<&Bytes> { - self.enveloped_tx.as_ref() - } -} - -/// Context for OP block execution. -#[derive(Debug, Default, Clone)] -pub struct OpBlockExecutionCtx { - /// Parent block hash. - pub parent_hash: B256, - /// Parent beacon block root. - pub parent_beacon_block_root: Option<B256>, - /// The block's extra data. - pub extra_data: Bytes, -} - -/// The result of executing an OP transaction. -#[derive(Debug)] -pub struct OpTxResult<H, T> { - /// The inner result of the transaction execution. - pub inner: EthTxResult<H, T>, - /// Whether the transaction is a deposit transaction. - pub is_deposit: bool, - /// The sender of the transaction. - pub sender: Address, -} - -impl<H, T> TxResult for OpTxResult<H, T> { - type HaltReason = H; - - fn result(&self) -> &ResultAndState<Self::HaltReason> { - &self.inner.result - } -} - -/// Block executor for Optimism. -#[derive(Debug)] -pub struct OpBlockExecutor<Evm, R: OpReceiptBuilder, Spec> { - /// Spec. - pub spec: Spec, - /// Receipt builder. - pub receipt_builder: R, - /// Context for block execution. - pub ctx: OpBlockExecutionCtx, - /// The EVM used by executor. - pub evm: Evm, - /// Receipts of executed transactions. - pub receipts: Vec<R::Receipt>, - /// Total gas used by executed transactions. - pub gas_used: u64, - /// Da footprint. - /// - /// This is only set for blocks post-Jovian activation. - /// See [DA footprint block limit spec](https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/exec-engine.md#da-footprint-block-limit) - pub da_footprint_used: u64, - /// Whether Regolith hardfork is active. - pub is_regolith: bool, - /// Utility to call system smart contracts. - pub system_caller: SystemCaller<Spec>, -} - -impl<E, R, Spec> OpBlockExecutor<E, R, Spec> -where - E: Evm, - R: OpReceiptBuilder, - Spec: OpHardforks + Clone, -{ - /// Creates a new [`OpBlockExecutor`]. - pub fn new(evm: E, ctx: OpBlockExecutionCtx, spec: Spec, receipt_builder: R) -> Self { - Self { - is_regolith: spec - .is_regolith_active_at_timestamp(evm.block().timestamp().saturating_to()), - evm, - system_caller: SystemCaller::new(spec.clone()), - spec, - receipt_builder, - receipts: Vec::new(), - gas_used: 0, - da_footprint_used: 0, - ctx, - } - } -} - -/// Custom errors that can occur during OP block execution. -#[derive(Debug, thiserror::Error)] -pub enum OpBlockExecutionError { - /// Failed to load cache account. - #[error("failed to load cache account")] - LoadCacheAccount, - - /// Failed to get Jovian da footprint gas scalar from database. - #[error("failed to get da footprint gas scalar from database: {_0}")] - GetJovianDaFootprintScalar(Box<dyn core::error::Error + Send + Sync + 'static>), - - /// Transaction DA footprint exceeds available block DA footprint. - #[error("transaction DA footprint exceeds available block DA footprint. transaction_da_footprint: {transaction_da_footprint}, available_block_da_footprint: {available_block_da_footprint}")] - TransactionDaFootprintAboveGasLimit { - /// The DA footprint of the transaction to execute. - transaction_da_footprint: u64, - /// The available block DA footprint. - available_block_da_footprint: u64, - }, -} - -impl<E, R, Spec> OpBlockExecutor<E, R, Spec> -where - E: Evm< - DB: Database + DatabaseCommit + StateDB, - Tx: FromRecoveredTx<R::Transaction> + FromTxWithEncoded<R::Transaction> + OpTxEnv, - >, - R: OpReceiptBuilder<Transaction: Transaction + Encodable2718, Receipt: TxReceipt>, - Spec: OpHardforks, -{ - fn jovian_da_footprint_estimation( - &mut self, - tx_env: &E::Tx, - tx: impl RecoveredTx<R::Transaction>, - ) -> Result<u64, BlockExecutionError> { - // Try to use the enveloped tx if it exists, otherwise use the encoded 2718 bytes - let encoded = match tx_env.encoded_bytes() { - Some(encoded) => estimate_tx_compressed_size(encoded), - None => estimate_tx_compressed_size(tx.tx().encoded_2718().as_ref()), - } - .saturating_div(1_000_000); - - // Load the L1 block contract into the cache. If the L1 block contract is not pre-loaded the - // database will panic when trying to fetch the DA footprint gas scalar. - self.evm.db_mut().basic(L1_BLOCK_CONTRACT).map_err(BlockExecutionError::other)?; - - let da_footprint_gas_scalar = L1BlockInfo::fetch_da_footprint_gas_scalar(self.evm.db_mut()) - .map_err(BlockExecutionError::other)? - .into(); - - Ok(encoded.saturating_mul(da_footprint_gas_scalar)) - } -} - -impl<E, R, Spec> BlockExecutor for OpBlockExecutor<E, R, Spec> -where - E: Evm< - DB: Database + DatabaseCommit + StateDB, - Tx: FromRecoveredTx<R::Transaction> + FromTxWithEncoded<R::Transaction> + OpTxEnv, - >, - R: OpReceiptBuilder<Transaction: Transaction + Encodable2718, Receipt: TxReceipt>, - Spec: OpHardforks, -{ - type Transaction = R::Transaction; - type Receipt = R::Receipt; - type Evm = E; - type Result = OpTxResult<E::HaltReason, <R::Transaction as TransactionEnvelope>::TxType>; - - fn apply_pre_execution_changes(&mut self) -> Result<(), BlockExecutionError> { - // Set state clear flag if the block is after the Spurious Dragon hardfork. - let state_clear_flag = - self.spec.is_spurious_dragon_active_at_block(self.evm.block().number().saturating_to()); - self.evm.db_mut().set_state_clear_flag(state_clear_flag); - - self.system_caller.apply_blockhashes_contract_call(self.ctx.parent_hash, &mut self.evm)?; - self.system_caller - .apply_beacon_root_contract_call(self.ctx.parent_beacon_block_root, &mut self.evm)?; - - // Ensure that the create2deployer is force-deployed at the canyon transition. Optimism - // blocks will always have at least a single transaction in them (the L1 info transaction), - // so we can safely assume that this will always be triggered upon the transition and that - // the above check for empty blocks will never be hit on OP chains. - ensure_create2_deployer( - &self.spec, - self.evm.block().timestamp().saturating_to(), - self.evm.db_mut(), - ) - .map_err(BlockExecutionError::other)?; - - Ok(()) - } - - fn execute_transaction_without_commit( - &mut self, - tx: impl ExecutableTx<Self>, - ) -> Result<Self::Result, BlockExecutionError> { - let (tx_env, tx) = tx.into_parts(); - let is_deposit = tx.tx().ty() == DEPOSIT_TRANSACTION_TYPE; - - // The sum of the transaction's gas limit, Tg, and the gas utilized in this block prior, - // must be no greater than the block's gasLimit. - let block_available_gas = self.evm.block().gas_limit() - self.gas_used; - if tx.tx().gas_limit() > block_available_gas && (self.is_regolith || !is_deposit) { - return Err(BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { - transaction_gas_limit: tx.tx().gas_limit(), - block_available_gas, - } - .into()); - } - - let da_footprint_used = if self - .spec - .is_jovian_active_at_timestamp(self.evm.block().timestamp().saturating_to()) - && !is_deposit - { - let da_footprint_available = self.evm.block().gas_limit() - self.da_footprint_used; - - let tx_da_footprint = self.jovian_da_footprint_estimation(&tx_env, &tx)?; - - if tx_da_footprint > da_footprint_available { - return Err(BlockExecutionError::Validation(BlockValidationError::Other( - Box::new(OpBlockExecutionError::TransactionDaFootprintAboveGasLimit { - transaction_da_footprint: tx_da_footprint, - available_block_da_footprint: da_footprint_available, - }), - ))); - } - - tx_da_footprint - } else { - 0 - }; - - // Execute transaction and return the result - let result = self.evm.transact(tx_env).map_err(|err| { - let hash = tx.tx().trie_hash(); - BlockExecutionError::evm(err, hash) - })?; - - Ok(OpTxResult { - inner: EthTxResult { - result, - blob_gas_used: da_footprint_used, - tx_type: tx.tx().tx_type(), - }, - is_deposit, - sender: *tx.signer(), - }) - } - - fn commit_transaction(&mut self, output: Self::Result) -> Result<u64, BlockExecutionError> { - let OpTxResult { - inner: EthTxResult { result: ResultAndState { result, state }, blob_gas_used, tx_type }, - is_deposit, - sender, - } = output; - - // Fetch the depositor account from the database for the deposit nonce. - // Note that this *only* needs to be done post-regolith hardfork, as deposit nonces - // were not introduced in Bedrock. In addition, regular transactions don't have deposit - // nonces, so we don't need to touch the DB for those. - let depositor = (self.is_regolith && is_deposit) - .then(|| self.evm.db_mut().basic(sender).map(|acc| acc.unwrap_or_default())) - .transpose() - .map_err(BlockExecutionError::other)?; - - self.system_caller.on_state(StateChangeSource::Transaction(self.receipts.len()), &state); - - let gas_used = result.gas_used(); - - // append gas used - self.gas_used += gas_used; - - // Update DA footprint if Jovian is active - if self.spec.is_jovian_active_at_timestamp(self.evm.block().timestamp().saturating_to()) - && !is_deposit - { - // Add to DA footprint used - self.da_footprint_used = self.da_footprint_used.saturating_add(blob_gas_used); - } - - self.receipts.push( - match self.receipt_builder.build_receipt(ReceiptBuilderCtx { - tx_type, - result, - cumulative_gas_used: self.gas_used, - evm: &self.evm, - state: &state, - }) { - Ok(receipt) => receipt, - Err(ctx) => { - let receipt = alloy_consensus::Receipt { - // Success flag was added in `EIP-658: Embedding transaction status code - // in receipts`. - status: Eip658Value::Eip658(ctx.result.is_success()), - cumulative_gas_used: self.gas_used, - logs: ctx.result.into_logs(), - }; - - self.receipt_builder.build_deposit_receipt(OpDepositReceipt { - inner: receipt, - deposit_nonce: depositor.map(|account| account.nonce), - // The deposit receipt version was introduced in Canyon to indicate an - // update to how receipt hashes should be computed - // when set. The state transition process ensures - // this is only set for post-Canyon deposit - // transactions. - deposit_receipt_version: (is_deposit - && self.spec.is_canyon_active_at_timestamp( - self.evm.block().timestamp().saturating_to(), - )) - .then_some(1), - }) - } - }, - ); - - self.evm.db_mut().commit(state); - - Ok(gas_used) - } - - fn finish( - mut self, - ) -> Result<(Self::Evm, BlockExecutionResult<R::Receipt>), BlockExecutionError> { - let balance_increments = - post_block_balance_increments::<Header>(&self.spec, self.evm.block(), &[], None); - // increment balances - self.evm - .db_mut() - .increment_balances(balance_increments.clone()) - .map_err(|_| BlockValidationError::IncrementBalanceFailed)?; - // call state hook with changes due to balance increments. - self.system_caller.try_on_state_with(|| { - balance_increment_state(&balance_increments, self.evm.db_mut()).map(|state| { - ( - StateChangeSource::PostBlock(StateChangePostBlockSource::BalanceIncrements), - Cow::Owned(state), - ) - }) - })?; - - let legacy_gas_used = - self.receipts.last().map(|r| r.cumulative_gas_used()).unwrap_or_default(); - - Ok(( - self.evm, - BlockExecutionResult { - receipts: self.receipts, - requests: Default::default(), - gas_used: legacy_gas_used, - blob_gas_used: self.da_footprint_used, - }, - )) - } - - fn set_state_hook(&mut self, hook: Option<Box<dyn OnStateHook>>) { - self.system_caller.with_state_hook(hook); - } - - fn evm_mut(&mut self) -> &mut Self::Evm { - &mut self.evm - } - - fn evm(&self) -> &Self::Evm { - &self.evm - } - - fn receipts(&self) -> &[Self::Receipt] { - &self.receipts - } -} - -/// Ethereum block executor factory. -#[derive(Debug, Clone, Default, Copy)] -pub struct OpBlockExecutorFactory< - R = OpAlloyReceiptBuilder, - Spec = OpChainHardforks, - EvmFactory = OpEvmFactory, -> { - /// Receipt builder. - receipt_builder: R, - /// Chain specification. - spec: Spec, - /// EVM factory. - evm_factory: EvmFactory, -} - -impl<R, Spec, EvmFactory> OpBlockExecutorFactory<R, Spec, EvmFactory> { - /// Creates a new [`OpBlockExecutorFactory`] with the given spec, [`EvmFactory`], and - /// [`OpReceiptBuilder`]. - pub const fn new(receipt_builder: R, spec: Spec, evm_factory: EvmFactory) -> Self { - Self { receipt_builder, spec, evm_factory } - } - - /// Exposes the receipt builder. - pub const fn receipt_builder(&self) -> &R { - &self.receipt_builder - } - - /// Exposes the chain specification. - pub const fn spec(&self) -> &Spec { - &self.spec - } - - /// Exposes the EVM factory. - pub const fn evm_factory(&self) -> &EvmFactory { - &self.evm_factory - } -} - -impl<R, Spec, EvmF> BlockExecutorFactory for OpBlockExecutorFactory<R, Spec, EvmF> -where - R: OpReceiptBuilder<Transaction: Transaction + Encodable2718, Receipt: TxReceipt>, - Spec: OpHardforks, - EvmF: EvmFactory< - Tx: FromRecoveredTx<R::Transaction> + FromTxWithEncoded<R::Transaction> + OpTxEnv, - >, - Self: 'static, -{ - type EvmFactory = EvmF; - type ExecutionCtx<'a> = OpBlockExecutionCtx; - type Transaction = R::Transaction; - type Receipt = R::Receipt; - - fn evm_factory(&self) -> &Self::EvmFactory { - &self.evm_factory - } - - fn create_executor<'a, DB, I>( - &'a self, - evm: EvmF::Evm<&'a mut State<DB>, I>, - ctx: Self::ExecutionCtx<'a>, - ) -> impl BlockExecutorFor<'a, Self, DB, I> - where - DB: Database + 'a, - I: Inspector<EvmF::Context<&'a mut State<DB>>> + 'a, - { - OpBlockExecutor::new(evm, ctx, &self.spec, &self.receipt_builder) - } -} - -#[cfg(test)] -mod tests { - use alloc::{string::ToString, vec}; - use alloy_consensus::{transaction::Recovered, SignableTransaction, TxLegacy}; - use alloy_eips::eip2718::WithEncoded; - use alloy_evm::{EvmEnv, ToTxEnv}; - use alloy_hardforks::ForkCondition; - use alloy_op_hardforks::OpHardfork; - use alloy_primitives::{uint, Address, Signature, U256}; - use op_alloy::consensus::OpTxEnvelope; - use op_revm::{ - constants::{ - BASE_FEE_SCALAR_OFFSET, ECOTONE_L1_BLOB_BASE_FEE_SLOT, ECOTONE_L1_FEE_SCALARS_SLOT, - L1_BASE_FEE_SLOT, L1_BLOCK_CONTRACT, OPERATOR_FEE_SCALARS_SLOT, - }, - DefaultOp, L1BlockInfo, OpBuilder, OpSpecId, - }; - use revm::{ - context::BlockEnv, - database::{CacheDB, EmptyDB, InMemoryDB}, - inspector::NoOpInspector, - primitives::HashMap, - state::AccountInfo, - Context, - }; - - use crate::OpEvm; - - use super::*; - - #[test] - fn test_with_encoded() { - let executor_factory = OpBlockExecutorFactory::new( - OpAlloyReceiptBuilder::default(), - OpChainHardforks::op_mainnet(), - OpEvmFactory::default(), - ); - let mut db = State::builder().with_database(CacheDB::<EmptyDB>::default()).build(); - let evm = executor_factory.evm_factory.create_evm(&mut db, EvmEnv::default()); - let mut executor = executor_factory.create_executor(evm, OpBlockExecutionCtx::default()); - let tx = Recovered::new_unchecked( - OpTxEnvelope::Legacy(TxLegacy::default().into_signed(Signature::new( - Default::default(), - Default::default(), - Default::default(), - ))), - Address::ZERO, - ); - let tx_with_encoded = WithEncoded::new(tx.encoded_2718().into(), tx.clone()); - - // make sure we can use both `WithEncoded` and transaction itself as inputs. - let _ = executor.execute_transaction(&tx); - let _ = executor.execute_transaction(&tx_with_encoded); - } - - fn prepare_jovian_db(da_footprint_gas_scalar: u16) -> State<InMemoryDB> { - const L1_BASE_FEE: U256 = uint!(1_U256); - const L1_BLOB_BASE_FEE: U256 = uint!(2_U256); - const L1_BASE_FEE_SCALAR: u64 = 3; - const L1_BLOB_BASE_FEE_SCALAR: u64 = 4; - const L1_FEE_SCALARS: U256 = U256::from_limbs([ - 0, - (L1_BASE_FEE_SCALAR << (64 - BASE_FEE_SCALAR_OFFSET * 2)) | L1_BLOB_BASE_FEE_SCALAR, - 0, - 0, - ]); - const OPERATOR_FEE_SCALAR: u8 = 5; - const OPERATOR_FEE_CONST: u8 = 6; - let da_footprint_gas_scalar_bytes = da_footprint_gas_scalar.to_be_bytes(); - let mut operator_fee_and_da_footprint = [0u8; 32]; - operator_fee_and_da_footprint[31] = OPERATOR_FEE_CONST; - operator_fee_and_da_footprint[23] = OPERATOR_FEE_SCALAR; - operator_fee_and_da_footprint[19] = da_footprint_gas_scalar_bytes[1]; - operator_fee_and_da_footprint[18] = da_footprint_gas_scalar_bytes[0]; - let operator_fee_and_da_footprint_u256 = U256::from_be_bytes(operator_fee_and_da_footprint); - - let mut db = State::builder().with_database(InMemoryDB::default()).build(); - - db.insert_account_with_storage( - L1_BLOCK_CONTRACT, - AccountInfo { ..Default::default() }, - HashMap::from_iter([ - (L1_BASE_FEE_SLOT, L1_BASE_FEE), - (ECOTONE_L1_FEE_SCALARS_SLOT, L1_FEE_SCALARS), - (ECOTONE_L1_BLOB_BASE_FEE_SLOT, L1_BLOB_BASE_FEE), - (OPERATOR_FEE_SCALARS_SLOT, operator_fee_and_da_footprint_u256), - ]), - ); - - db.insert_account( - Address::ZERO, - AccountInfo { balance: U256::from(400_000_000), ..Default::default() }, - ); - - db - } - - fn build_executor<'a>( - db: &'a mut State<InMemoryDB>, - receipt_builder: &'a OpAlloyReceiptBuilder, - op_chain_hardforks: &'a OpChainHardforks, - gas_limit: u64, - jovian_timestamp: u64, - ) -> OpBlockExecutor< - OpEvm<&'a mut State<InMemoryDB>, NoOpInspector>, - &'a OpAlloyReceiptBuilder, - &'a OpChainHardforks, - > { - let ctx = Context::op() - .with_db(db) - .with_chain(L1BlockInfo { - operator_fee_scalar: Some(U256::from(2)), - operator_fee_constant: Some(U256::from(50)), - ..Default::default() - }) - .with_block(BlockEnv { - timestamp: U256::from(jovian_timestamp), - gas_limit, - ..Default::default() - }) - .modify_cfg_chained(|cfg| cfg.spec = OpSpecId::JOVIAN); - - let evm = OpEvm::new(ctx.build_op_with_inspector(NoOpInspector {}), true); - - OpBlockExecutor::new( - evm, - OpBlockExecutionCtx::default(), - op_chain_hardforks, - receipt_builder, - ) - } - - #[test] - fn test_jovian_da_footprint_estimation() { - const DA_FOOTPRINT_GAS_SCALAR: u16 = 7; - const GAS_LIMIT: u64 = 100_000; - const JOVIAN_TIMESTAMP: u64 = 1746806402; - - let mut db = prepare_jovian_db(DA_FOOTPRINT_GAS_SCALAR); - let op_chain_hardforks = OpChainHardforks::new( - OpHardfork::op_mainnet() - .into_iter() - .chain(vec![(OpHardfork::Jovian, ForkCondition::Timestamp(JOVIAN_TIMESTAMP))]), - ); - - let receipt_builder = OpAlloyReceiptBuilder::default(); - let mut executor = build_executor( - &mut db, - &receipt_builder, - &op_chain_hardforks, - GAS_LIMIT, - JOVIAN_TIMESTAMP, - ); - - let tx_inner = TxLegacy { gas_limit: GAS_LIMIT, ..Default::default() }; - - let tx = Recovered::new_unchecked( - OpTxEnvelope::Legacy(tx_inner.into_signed(Signature::new( - Default::default(), - Default::default(), - Default::default(), - ))), - Address::ZERO, - ); - let tx_env = tx.to_tx_env(); - - assert!(executor.da_footprint_used == 0); - - let expected_da_footprint = executor.jovian_da_footprint_estimation(&tx_env, &tx).unwrap(); - - // make sure we can use both `WithEncoded` and transaction itself as inputs. - let res = executor.execute_transaction(&tx); - assert!(res.is_ok()); - - assert!(executor.da_footprint_used == expected_da_footprint); - } - - #[test] - fn test_jovian_da_footprint_estimation_out_of_gas() { - const DA_FOOTPRINT_GAS_SCALAR: u16 = 7; - const JOVIAN_TIMESTAMP: u64 = 1746806402; - const GAS_LIMIT: u64 = 100; - - let mut db = prepare_jovian_db(DA_FOOTPRINT_GAS_SCALAR); - let op_chain_hardforks = OpChainHardforks::new( - OpHardfork::op_mainnet() - .into_iter() - .chain(vec![(OpHardfork::Jovian, ForkCondition::Timestamp(JOVIAN_TIMESTAMP))]), - ); - - let receipt_builder = OpAlloyReceiptBuilder::default(); - let mut executor = build_executor( - &mut db, - &receipt_builder, - &op_chain_hardforks, - GAS_LIMIT, - JOVIAN_TIMESTAMP, - ); - - let tx_inner = TxLegacy { gas_limit: GAS_LIMIT, ..Default::default() }; - - let tx = Recovered::new_unchecked( - OpTxEnvelope::Legacy(tx_inner.into_signed(Signature::new( - Default::default(), - Default::default(), - Default::default(), - ))), - Address::ZERO, - ); - let tx_env = tx.to_tx_env(); - - assert!(executor.da_footprint_used == 0); - - let expected_da_footprint = executor.jovian_da_footprint_estimation(&tx_env, &tx).unwrap(); - - // make sure we can use both `WithEncoded` and transaction itself as inputs. - let res = executor.execute_transaction(&tx); - assert!(res.is_err()); - let err = res.unwrap_err(); - match err { - BlockExecutionError::Validation(BlockValidationError::Other(err)) => { - assert_eq!( - err.to_string(), - OpBlockExecutionError::TransactionDaFootprintAboveGasLimit { - transaction_da_footprint: expected_da_footprint, - available_block_da_footprint: GAS_LIMIT, - } - .to_string(), - ); - } - _ => panic!("expected TransactionDaFootprintAboveGasLimit error"), - } - } - - #[test] - fn test_jovian_da_footprint_estimation_maxed_out_da_footprint() { - const DA_FOOTPRINT_GAS_SCALAR: u16 = 2000; - const JOVIAN_TIMESTAMP: u64 = 1746806402; - const GAS_LIMIT: u64 = 200_000; - - let mut db = prepare_jovian_db(DA_FOOTPRINT_GAS_SCALAR); - let op_chain_hardforks = OpChainHardforks::new( - OpHardfork::op_mainnet() - .into_iter() - .chain(vec![(OpHardfork::Jovian, ForkCondition::Timestamp(JOVIAN_TIMESTAMP))]), - ); - - let receipt_builder = OpAlloyReceiptBuilder::default(); - let mut executor = build_executor( - &mut db, - &receipt_builder, - &op_chain_hardforks, - GAS_LIMIT, - JOVIAN_TIMESTAMP, - ); - - let tx_inner = TxLegacy { gas_limit: GAS_LIMIT, ..Default::default() }; - - let tx = Recovered::new_unchecked( - OpTxEnvelope::Legacy(tx_inner.into_signed(Signature::new( - Default::default(), - Default::default(), - Default::default(), - ))), - Address::ZERO, - ); - let tx_env = tx.to_tx_env(); - - assert!(executor.da_footprint_used == 0); - - let expected_da_footprint = executor.jovian_da_footprint_estimation(&tx_env, &tx).unwrap(); - - // make sure we can use both `WithEncoded` and transaction itself as inputs. - let gas_used_tx = executor.execute_transaction(&tx).expect("failed to execute transaction"); - - // The gas used when executing the transaction should be the legacy value... - assert!(gas_used_tx < expected_da_footprint); - - // The gas used when finishing the executor should be the DA footprint since this is higher - // than the legacy gas used and jovian is active... - let (_, result) = executor.finish().expect("failed to finish executor"); - assert_eq!(result.blob_gas_used, expected_da_footprint); - assert_eq!(result.gas_used, gas_used_tx); - assert!(result.blob_gas_used > result.gas_used); - } -} diff --git a/alloy-op-evm/src/lib.rs b/alloy-op-evm/src/lib.rs deleted file mode 100644 index 84770741798..00000000000 --- a/alloy-op-evm/src/lib.rs +++ /dev/null @@ -1,360 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/alloy-rs/core/main/assets/alloy.jpg", - html_favicon_url = "https://raw.githubusercontent.com/alloy-rs/core/main/assets/favicon.ico" -)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] - -extern crate alloc; - -pub use alloy_evm::op::{spec, spec_by_timestamp_after_bedrock}; - -use alloy_evm::{precompiles::PrecompilesMap, Database, Evm, EvmEnv, EvmFactory}; -use alloy_primitives::{Address, Bytes}; -use core::{ - fmt::Debug, - ops::{Deref, DerefMut}, -}; -use op_revm::{ - precompiles::OpPrecompiles, DefaultOp, OpBuilder, OpContext, OpHaltReason, OpSpecId, - OpTransaction, OpTransactionError, -}; -use revm::{ - context::{BlockEnv, TxEnv}, - context_interface::result::{EVMError, ResultAndState}, - handler::{instructions::EthInstructions, PrecompileProvider}, - inspector::NoOpInspector, - interpreter::{interpreter::EthInterpreter, InterpreterResult}, - Context, ExecuteEvm, InspectEvm, Inspector, SystemCallEvm, -}; - -pub mod block; -pub use block::{OpBlockExecutionCtx, OpBlockExecutor, OpBlockExecutorFactory}; - -/// OP EVM implementation. -/// -/// This is a wrapper type around the `revm` evm with optional [`Inspector`] (tracing) -/// support. [`Inspector`] support is configurable at runtime because it's part of the underlying -/// [`OpEvm`](op_revm::OpEvm) type. -#[allow(missing_debug_implementations)] // missing revm::OpContext Debug impl -pub struct OpEvm<DB: Database, I, P = OpPrecompiles> { - inner: op_revm::OpEvm<OpContext<DB>, I, EthInstructions<EthInterpreter, OpContext<DB>>, P>, - inspect: bool, -} - -impl<DB: Database, I, P> OpEvm<DB, I, P> { - /// Provides a reference to the EVM context. - pub const fn ctx(&self) -> &OpContext<DB> { - &self.inner.0.ctx - } - - /// Provides a mutable reference to the EVM context. - pub const fn ctx_mut(&mut self) -> &mut OpContext<DB> { - &mut self.inner.0.ctx - } -} - -impl<DB: Database, I, P> OpEvm<DB, I, P> { - /// Creates a new OP EVM instance. - /// - /// The `inspect` argument determines whether the configured [`Inspector`] of the given - /// [`OpEvm`](op_revm::OpEvm) should be invoked on [`Evm::transact`]. - pub const fn new( - evm: op_revm::OpEvm<OpContext<DB>, I, EthInstructions<EthInterpreter, OpContext<DB>>, P>, - inspect: bool, - ) -> Self { - Self { inner: evm, inspect } - } -} - -impl<DB: Database, I, P> Deref for OpEvm<DB, I, P> { - type Target = OpContext<DB>; - - #[inline] - fn deref(&self) -> &Self::Target { - self.ctx() - } -} - -impl<DB: Database, I, P> DerefMut for OpEvm<DB, I, P> { - #[inline] - fn deref_mut(&mut self) -> &mut Self::Target { - self.ctx_mut() - } -} - -impl<DB, I, P> Evm for OpEvm<DB, I, P> -where - DB: Database, - I: Inspector<OpContext<DB>>, - P: PrecompileProvider<OpContext<DB>, Output = InterpreterResult>, -{ - type DB = DB; - type Tx = OpTransaction<TxEnv>; - type Error = EVMError<DB::Error, OpTransactionError>; - type HaltReason = OpHaltReason; - type Spec = OpSpecId; - type BlockEnv = BlockEnv; - type Precompiles = P; - type Inspector = I; - - fn block(&self) -> &BlockEnv { - &self.block - } - - fn chain_id(&self) -> u64 { - self.cfg.chain_id - } - - fn transact_raw( - &mut self, - tx: Self::Tx, - ) -> Result<ResultAndState<Self::HaltReason>, Self::Error> { - if self.inspect { - self.inner.inspect_tx(tx) - } else { - self.inner.transact(tx) - } - } - - fn transact_system_call( - &mut self, - caller: Address, - contract: Address, - data: Bytes, - ) -> Result<ResultAndState<Self::HaltReason>, Self::Error> { - self.inner.system_call_with_caller(caller, contract, data) - } - - fn finish(self) -> (Self::DB, EvmEnv<Self::Spec>) { - let Context { block: block_env, cfg: cfg_env, journaled_state, .. } = self.inner.0.ctx; - - (journaled_state.database, EvmEnv { block_env, cfg_env }) - } - - fn set_inspector_enabled(&mut self, enabled: bool) { - self.inspect = enabled; - } - - fn components(&self) -> (&Self::DB, &Self::Inspector, &Self::Precompiles) { - ( - &self.inner.0.ctx.journaled_state.database, - &self.inner.0.inspector, - &self.inner.0.precompiles, - ) - } - - fn components_mut(&mut self) -> (&mut Self::DB, &mut Self::Inspector, &mut Self::Precompiles) { - ( - &mut self.inner.0.ctx.journaled_state.database, - &mut self.inner.0.inspector, - &mut self.inner.0.precompiles, - ) - } -} - -/// Factory producing [`OpEvm`]s. -#[derive(Debug, Default, Clone, Copy)] -#[non_exhaustive] -pub struct OpEvmFactory; - -impl EvmFactory for OpEvmFactory { - type Evm<DB: Database, I: Inspector<OpContext<DB>>> = OpEvm<DB, I, Self::Precompiles>; - type Context<DB: Database> = OpContext<DB>; - type Tx = OpTransaction<TxEnv>; - type Error<DBError: core::error::Error + Send + Sync + 'static> = - EVMError<DBError, OpTransactionError>; - type HaltReason = OpHaltReason; - type Spec = OpSpecId; - type BlockEnv = BlockEnv; - type Precompiles = PrecompilesMap; - - fn create_evm<DB: Database>( - &self, - db: DB, - input: EvmEnv<OpSpecId>, - ) -> Self::Evm<DB, NoOpInspector> { - let spec_id = input.cfg_env.spec; - OpEvm { - inner: Context::op() - .with_db(db) - .with_block(input.block_env) - .with_cfg(input.cfg_env) - .build_op_with_inspector(NoOpInspector {}) - .with_precompiles(PrecompilesMap::from_static( - OpPrecompiles::new_with_spec(spec_id).precompiles(), - )), - inspect: false, - } - } - - fn create_evm_with_inspector<DB: Database, I: Inspector<Self::Context<DB>>>( - &self, - db: DB, - input: EvmEnv<OpSpecId>, - inspector: I, - ) -> Self::Evm<DB, I> { - let spec_id = input.cfg_env.spec; - OpEvm { - inner: Context::op() - .with_db(db) - .with_block(input.block_env) - .with_cfg(input.cfg_env) - .build_op_with_inspector(inspector) - .with_precompiles(PrecompilesMap::from_static( - OpPrecompiles::new_with_spec(spec_id).precompiles(), - )), - inspect: true, - } - } -} - -#[cfg(test)] -mod tests { - use alloc::{string::ToString, vec}; - use alloy_evm::{ - precompiles::{Precompile, PrecompileInput}, - EvmInternals, - }; - use alloy_primitives::U256; - use op_revm::precompiles::{bls12_381, bn254_pair}; - use revm::{context::CfgEnv, database::EmptyDB, precompile::PrecompileError}; - - use super::*; - - #[test] - fn test_precompiles_jovian_fail() { - let mut evm = OpEvmFactory::default().create_evm( - EmptyDB::default(), - EvmEnv::new(CfgEnv::new_with_spec(OpSpecId::JOVIAN), BlockEnv::default()), - ); - - let (precompiles, ctx) = (&mut evm.inner.0.precompiles, &mut evm.inner.0.ctx); - - let jovian_precompile = precompiles.get(bn254_pair::JOVIAN.address()).unwrap(); - let result = jovian_precompile.call(PrecompileInput { - data: &vec![0; bn254_pair::JOVIAN_MAX_INPUT_SIZE + 1], - gas: u64::MAX, - caller: Address::ZERO, - value: U256::ZERO, - is_static: false, - target_address: Address::ZERO, - bytecode_address: Address::ZERO, - internals: EvmInternals::from_context(ctx), - }); - - assert!(result.is_err()); - assert!(matches!(result.unwrap_err(), PrecompileError::Bn254PairLength)); - - let jovian_precompile = precompiles.get(bls12_381::JOVIAN_G1_MSM.address()).unwrap(); - let result = jovian_precompile.call(PrecompileInput { - data: &vec![0; bls12_381::JOVIAN_G1_MSM_MAX_INPUT_SIZE + 1], - gas: u64::MAX, - caller: Address::ZERO, - value: U256::ZERO, - is_static: false, - target_address: Address::ZERO, - bytecode_address: Address::ZERO, - internals: EvmInternals::from_context(ctx), - }); - - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("G1MSM input length too long")); - - let jovian_precompile = precompiles.get(bls12_381::JOVIAN_G2_MSM.address()).unwrap(); - let result = jovian_precompile.call(PrecompileInput { - data: &vec![0; bls12_381::JOVIAN_G2_MSM_MAX_INPUT_SIZE + 1], - gas: u64::MAX, - caller: Address::ZERO, - value: U256::ZERO, - is_static: false, - target_address: Address::ZERO, - bytecode_address: Address::ZERO, - internals: EvmInternals::from_context(ctx), - }); - - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("G2MSM input length too long")); - - let jovian_precompile = precompiles.get(bls12_381::JOVIAN_PAIRING.address()).unwrap(); - let result = jovian_precompile.call(PrecompileInput { - data: &vec![0; bls12_381::JOVIAN_PAIRING_MAX_INPUT_SIZE + 1], - gas: u64::MAX, - caller: Address::ZERO, - value: U256::ZERO, - is_static: false, - target_address: Address::ZERO, - bytecode_address: Address::ZERO, - internals: EvmInternals::from_context(ctx), - }); - - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("Pairing input length too long")); - } - - #[test] - fn test_precompiles_jovian() { - let mut evm = OpEvmFactory::default().create_evm( - EmptyDB::default(), - EvmEnv::new(CfgEnv::new_with_spec(OpSpecId::JOVIAN), BlockEnv::default()), - ); - let (precompiles, ctx) = (&mut evm.inner.0.precompiles, &mut evm.inner.0.ctx); - let jovian_precompile = precompiles.get(bn254_pair::JOVIAN.address()).unwrap(); - let result = jovian_precompile.call(PrecompileInput { - data: &vec![0; bn254_pair::JOVIAN_MAX_INPUT_SIZE], - gas: u64::MAX, - caller: Address::ZERO, - value: U256::ZERO, - is_static: false, - target_address: Address::ZERO, - bytecode_address: Address::ZERO, - internals: EvmInternals::from_context(ctx), - }); - - assert!(result.is_ok()); - - let jovian_precompile = precompiles.get(bls12_381::JOVIAN_G1_MSM.address()).unwrap(); - let result = jovian_precompile.call(PrecompileInput { - data: &vec![0; bls12_381::JOVIAN_G1_MSM_MAX_INPUT_SIZE], - gas: u64::MAX, - caller: Address::ZERO, - value: U256::ZERO, - is_static: false, - target_address: Address::ZERO, - bytecode_address: Address::ZERO, - internals: EvmInternals::from_context(ctx), - }); - - assert!(result.is_ok()); - - let jovian_precompile = precompiles.get(bls12_381::JOVIAN_G2_MSM.address()).unwrap(); - let result = jovian_precompile.call(PrecompileInput { - data: &vec![0; bls12_381::JOVIAN_G2_MSM_MAX_INPUT_SIZE], - gas: u64::MAX, - caller: Address::ZERO, - value: U256::ZERO, - is_static: false, - target_address: Address::ZERO, - bytecode_address: Address::ZERO, - internals: EvmInternals::from_context(ctx), - }); - - assert!(result.is_ok()); - - let jovian_precompile = precompiles.get(bls12_381::JOVIAN_PAIRING.address()).unwrap(); - let result = jovian_precompile.call(PrecompileInput { - data: &vec![0; bls12_381::JOVIAN_PAIRING_MAX_INPUT_SIZE], - gas: u64::MAX, - caller: Address::ZERO, - value: U256::ZERO, - is_static: false, - target_address: Address::ZERO, - bytecode_address: Address::ZERO, - internals: EvmInternals::from_context(ctx), - }); - - assert!(result.is_ok()); - } -} diff --git a/alloy-op-hardforks/Cargo.lock b/alloy-op-hardforks/Cargo.lock deleted file mode 100644 index ca8039a64e6..00000000000 --- a/alloy-op-hardforks/Cargo.lock +++ /dev/null @@ -1,632 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "alloy-chains" -version = "0.2.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3842d8c52fcd3378039f4703dba392dca8b546b1c8ed6183048f8dab95b2be78" -dependencies = [ - "alloy-primitives", - "num_enum", - "strum", -] - -[[package]] -name = "alloy-eip2124" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "crc", - "thiserror", -] - -[[package]] -name = "alloy-hardforks" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83ba208044232d14d4adbfa77e57d6329f51bc1acc21f5667bb7db72d88a0831" -dependencies = [ - "alloy-chains", - "alloy-eip2124", - "alloy-primitives", - "auto_impl", - "dyn-clone", - "serde", -] - -[[package]] -name = "alloy-op-hardforks" -version = "0.4.7" -dependencies = [ - "alloy-chains", - "alloy-hardforks", - "alloy-primitives", - "auto_impl", - "serde", -] - -[[package]] -name = "alloy-primitives" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6a0fb18dd5fb43ec5f0f6a20be1ce0287c79825827de5744afaa6c957737c33" -dependencies = [ - "alloy-rlp", - "bytes", - "cfg-if", - "const-hex", - "derive_more", - "hashbrown", - "indexmap", - "itoa", - "paste", - "rand 0.9.2", - "ruint", - "rustc-hash", - "serde", - "tiny-keccak", -] - -[[package]] -name = "alloy-rlp" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f70d83b765fdc080dbcd4f4db70d8d23fe4761f2f02ebfa9146b833900634b4" -dependencies = [ - "alloy-rlp-derive", - "bytes", -] - -[[package]] -name = "alloy-rlp-derive" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "auto_impl" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "autocfg" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" - -[[package]] -name = "bitflags" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" - -[[package]] -name = "bytes" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" -dependencies = [ - "serde", -] - -[[package]] -name = "cfg-if" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" - -[[package]] -name = "const-hex" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735" -dependencies = [ - "cfg-if", - "cpufeatures", - "proptest", - "serde_core", -] - -[[package]] -name = "convert_case" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "cpufeatures" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" -dependencies = [ - "libc", -] - -[[package]] -name = "crc" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" - -[[package]] -name = "crunchy" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" - -[[package]] -name = "derive_more" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" -dependencies = [ - "convert_case", - "proc-macro2", - "quote", - "rustc_version", - "syn", - "unicode-xid", -] - -[[package]] -name = "dyn-clone" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" - -[[package]] -name = "equivalent" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" - -[[package]] -name = "foldhash" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" - -[[package]] -name = "getrandom" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" -dependencies = [ - "cfg-if", - "libc", - "r-efi", - "wasip2", -] - -[[package]] -name = "hashbrown" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" -dependencies = [ - "foldhash", - "serde", - "serde_core", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "indexmap" -version = "2.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" -dependencies = [ - "equivalent", - "hashbrown", - "serde", - "serde_core", -] - -[[package]] -name = "itoa" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" - -[[package]] -name = "libc" -version = "0.2.180" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" - -[[package]] -name = "libm" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", - "libm", -] - -[[package]] -name = "num_enum" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" -dependencies = [ - "num_enum_derive", - "rustversion", -] - -[[package]] -name = "num_enum_derive" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "ppv-lite86" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "proc-macro2" -version = "1.0.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "proptest" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" -dependencies = [ - "bitflags", - "num-traits", - "rand 0.9.2", - "rand_chacha", - "rand_xorshift", - "unarray", -] - -[[package]] -name = "quote" -version = "1.0.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "rand_core 0.6.4", -] - -[[package]] -name = "rand" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" -dependencies = [ - "rand_core 0.9.5", - "serde", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core 0.9.5", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" - -[[package]] -name = "rand_core" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" -dependencies = [ - "getrandom", - "serde", -] - -[[package]] -name = "rand_xorshift" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" -dependencies = [ - "rand_core 0.9.5", -] - -[[package]] -name = "ruint" -version = "1.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" -dependencies = [ - "alloy-rlp", - "proptest", - "rand 0.8.5", - "rand 0.9.2", - "ruint-macro", - "serde_core", - "valuable", - "zeroize", -] - -[[package]] -name = "ruint-macro" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" - -[[package]] -name = "rustc-hash" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver", -] - -[[package]] -name = "rustversion" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" - -[[package]] -name = "semver" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "strum" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "syn" -version = "2.0.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "thiserror" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" -dependencies = [ - "thiserror-impl", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "unarray" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" - -[[package]] -name = "unicode-ident" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - -[[package]] -name = "valuable" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" - -[[package]] -name = "wasip2" -version = "1.0.1+wasi-0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wit-bindgen" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" - -[[package]] -name = "zerocopy" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "zeroize" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" diff --git a/alloy-op-hardforks/Cargo.toml b/alloy-op-hardforks/Cargo.toml deleted file mode 100644 index 5bf52f5520f..00000000000 --- a/alloy-op-hardforks/Cargo.toml +++ /dev/null @@ -1,40 +0,0 @@ -[package] -name = "alloy-op-hardforks" -description = "Bindings for named OP hardforks" - -version = "0.4.7" -edition = "2024" -rust-version = "1.85" -authors = ["Alloy Contributors", "OpLabsPBC"] -license = "MIT OR Apache-2.0" -homepage = "https://github.com/ethereum-optimism/optimism" -repository = "https://github.com/ethereum-optimism/optimism" - -[package.lints.clippy] -all = { level = "warn", priority = -1 } -missing-const-for-fn = "warn" -use-self = "warn" -option-if-let-else = "warn" -redundant-clone = "warn" - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[dependencies] -alloy-hardforks = { version = "0.4.7", default-features = false } - -# ethereum -alloy-chains = { version = "0.2.0", default-features = false } -alloy-primitives = { version = "1.0.0", default-features = false } - -# misc -auto_impl = "1" -serde = { version = "1.0", features = ["derive", "alloc"], default-features = false, optional = true } - -[features] -default = [] -serde = [ - "dep:serde", - "alloy-hardforks/serde" -] diff --git a/alloy-op-hardforks/clippy.toml b/alloy-op-hardforks/clippy.toml deleted file mode 100644 index b339f7c33d7..00000000000 --- a/alloy-op-hardforks/clippy.toml +++ /dev/null @@ -1 +0,0 @@ -msrv = "1.85" diff --git a/alloy-op-hardforks/deny.toml b/alloy-op-hardforks/deny.toml deleted file mode 100644 index 6efd16e5583..00000000000 --- a/alloy-op-hardforks/deny.toml +++ /dev/null @@ -1,39 +0,0 @@ -[advisories] -version = 2 -yanked = "warn" -ignore = [ - # https://rustsec.org/advisories/RUSTSEC-2024-0436 - "RUSTSEC-2024-0436", -] - -[bans] -multiple-versions = "warn" -wildcards = "deny" -highlight = "all" - -[licenses] -version = 2 -confidence-threshold = 0.8 - -allow = [ - "MIT", - "Apache-2.0", - "BSD-3-Clause", - "Unicode-3.0", - "CC0-1.0", -] - -[[licenses.clarify]] -name = "ring" -expression = "LicenseRef-ring" -license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }] - -[[licenses.clarify]] -name = "webpki" -expression = "LicenseRef-webpki" -license-files = [{ path = "LICENSE", hash = 0x001c7e6c }] - -[sources] -unknown-registry = "deny" -unknown-git = "deny" -allow-git = [] diff --git a/alloy-op-hardforks/justfile b/alloy-op-hardforks/justfile deleted file mode 100644 index 8fc70442129..00000000000 --- a/alloy-op-hardforks/justfile +++ /dev/null @@ -1,12 +0,0 @@ -# default recipe to display help information -default: - @just --list - -# Run cargo tests -test *args='': - cargo test --workspace {{args}} - -# Check no_std compatibility -check-no-std: - rustup target add riscv32imac-unknown-none-elf - cargo check --target riscv32imac-unknown-none-elf --no-default-features diff --git a/alloy-op-hardforks/rustfmt.toml b/alloy-op-hardforks/rustfmt.toml deleted file mode 100644 index 3063df707a6..00000000000 --- a/alloy-op-hardforks/rustfmt.toml +++ /dev/null @@ -1,12 +0,0 @@ -reorder_imports = true -use_field_init_shorthand = true -use_small_heuristics = "Max" - -# Nightly -max_width = 100 -comment_width = 100 -imports_granularity = "Crate" -wrap_comments = true -format_code_in_doc_comments = true -doc_comment_code_block_width = 100 -format_macro_matchers = true diff --git a/alloy-op-hardforks/src/base/mainnet.rs b/alloy-op-hardforks/src/base/mainnet.rs deleted file mode 100644 index ff40a4ebf70..00000000000 --- a/alloy-op-hardforks/src/base/mainnet.rs +++ /dev/null @@ -1,22 +0,0 @@ -//! Base Mainnet hardfork starting points - -use crate::optimism::mainnet::*; - -/// Bedrock base hardfork activation block is 0. -pub const BASE_MAINNET_BEDROCK_BLOCK: u64 = 0; -/// Regolith base hardfork activation timestamp is 0. -pub const BASE_MAINNET_REGOLITH_TIMESTAMP: u64 = OP_MAINNET_REGOLITH_TIMESTAMP; -/// Canyon base hardfork activation timestamp is 1704992401. -pub const BASE_MAINNET_CANYON_TIMESTAMP: u64 = OP_MAINNET_CANYON_TIMESTAMP; -/// Ecotone base hardfork activation timestamp is 1710374401. -pub const BASE_MAINNET_ECOTONE_TIMESTAMP: u64 = OP_MAINNET_ECOTONE_TIMESTAMP; -/// Fjord base hardfork activation timestamp is 1720627201. -pub const BASE_MAINNET_FJORD_TIMESTAMP: u64 = OP_MAINNET_FJORD_TIMESTAMP; -/// Granite base hardfork activation timestamp is 1726070401. -pub const BASE_MAINNET_GRANITE_TIMESTAMP: u64 = OP_MAINNET_GRANITE_TIMESTAMP; -/// Holocene base hardfork activation timestamp is 1736445601. -pub const BASE_MAINNET_HOLOCENE_TIMESTAMP: u64 = OP_MAINNET_HOLOCENE_TIMESTAMP; -/// Isthmus base hardfork activation timestamp is 1746806401. -pub const BASE_MAINNET_ISTHMUS_TIMESTAMP: u64 = OP_MAINNET_ISTHMUS_TIMESTAMP; -/// Jovian base hardfork activation timestamp is 1_763_481_601. -pub const BASE_MAINNET_JOVIAN_TIMESTAMP: u64 = OP_MAINNET_JOVIAN_TIMESTAMP; diff --git a/alloy-op-hardforks/src/base/sepolia.rs b/alloy-op-hardforks/src/base/sepolia.rs deleted file mode 100644 index 5fb6a2472a7..00000000000 --- a/alloy-op-hardforks/src/base/sepolia.rs +++ /dev/null @@ -1,22 +0,0 @@ -//! Base Sepolia hardfork starting points - -use crate::optimism::sepolia::*; - -/// Bedrock base sepolia hardfork activation block is 0. -pub const BASE_SEPOLIA_BEDROCK_BLOCK: u64 = OP_SEPOLIA_BEDROCK_BLOCK; -/// Regolith base sepolia hardfork activation timestamp is 0. -pub const BASE_SEPOLIA_REGOLITH_TIMESTAMP: u64 = OP_SEPOLIA_REGOLITH_TIMESTAMP; -/// Canyon base sepolia hardfork activation timestamp is 1699981200. -pub const BASE_SEPOLIA_CANYON_TIMESTAMP: u64 = OP_SEPOLIA_CANYON_TIMESTAMP; -/// Ecotone base sepolia hardfork activation timestamp is 1708534800. -pub const BASE_SEPOLIA_ECOTONE_TIMESTAMP: u64 = OP_SEPOLIA_ECOTONE_TIMESTAMP; -/// Fjord base sepolia hardfork activation timestamp is 1716998400. -pub const BASE_SEPOLIA_FJORD_TIMESTAMP: u64 = OP_SEPOLIA_FJORD_TIMESTAMP; -/// Granite base sepolia hardfork activation timestamp is 1723478400. -pub const BASE_SEPOLIA_GRANITE_TIMESTAMP: u64 = OP_SEPOLIA_GRANITE_TIMESTAMP; -/// Holocene base sepolia hardfork activation timestamp is 1732633200. -pub const BASE_SEPOLIA_HOLOCENE_TIMESTAMP: u64 = OP_SEPOLIA_HOLOCENE_TIMESTAMP; -/// Isthmus base sepolia hardfork activation timestamp is 1744905600. -pub const BASE_SEPOLIA_ISTHMUS_TIMESTAMP: u64 = OP_SEPOLIA_ISTHMUS_TIMESTAMP; -/// Jovian base sepolia hardfork activation timestamp is 1_762_963_201. -pub const BASE_SEPOLIA_JOVIAN_TIMESTAMP: u64 = OP_SEPOLIA_JOVIAN_TIMESTAMP; diff --git a/alloy-op-hardforks/src/lib.rs b/alloy-op-hardforks/src/lib.rs deleted file mode 100644 index eddcfd4bcc7..00000000000 --- a/alloy-op-hardforks/src/lib.rs +++ /dev/null @@ -1,671 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/alloy-rs/core/main/assets/alloy.jpg", - html_favicon_url = "https://raw.githubusercontent.com/alloy-rs/core/main/assets/favicon.ico" -)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![no_std] - -extern crate alloc; -use alloc::vec::Vec; -use alloy_chains::{Chain, NamedChain}; -use alloy_hardforks::{EthereumHardfork, hardfork}; -pub use alloy_hardforks::{EthereumHardforks, ForkCondition}; -use alloy_primitives::U256; -use core::ops::Index; - -pub mod optimism; -pub use optimism::{mainnet as op_mainnet, mainnet::*, sepolia as op_sepolia, sepolia::*}; - -pub mod base; -pub use base::{mainnet as base_mainnet, mainnet::*, sepolia as base_sepolia, sepolia::*}; - -hardfork!( - /// The name of an optimism hardfork. - /// - /// When building a list of hardforks for a chain, it's still expected to zip with - /// [`EthereumHardfork`]. - #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] - #[derive(Default)] - OpHardfork { - /// Bedrock: <https://blog.oplabs.co/introducing-optimism-bedrock>. - Bedrock, - /// Regolith: <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/superchain-upgrades.md#regolith>. - Regolith, - /// <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/superchain-upgrades.md#canyon>. - Canyon, - /// Ecotone: <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/superchain-upgrades.md#ecotone>. - Ecotone, - /// Fjord: <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/superchain-upgrades.md#fjord> - Fjord, - /// Granite: <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/superchain-upgrades.md#granite> - Granite, - /// Holocene: <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/superchain-upgrades.md#holocene> - Holocene, - /// Isthmus: <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/isthmus/overview.md> - #[default] - Isthmus, - /// Jovian: <https://github.com/ethereum-optimism/specs/tree/main/specs/protocol/jovian> - Jovian, - /// TODO: add interop hardfork overview when available - Interop, - } -); - -impl OpHardfork { - /// Reverse lookup to find the hardfork given a chain ID and block timestamp. - /// Returns the active hardfork at the given timestamp for the specified OP chain. - pub fn from_chain_and_timestamp(chain: Chain, timestamp: u64) -> Option<Self> { - let named = chain.named()?; - - match named { - NamedChain::Optimism => Some(match timestamp { - _i if timestamp < OP_MAINNET_CANYON_TIMESTAMP => Self::Regolith, - _i if timestamp < OP_MAINNET_ECOTONE_TIMESTAMP => Self::Canyon, - _i if timestamp < OP_MAINNET_FJORD_TIMESTAMP => Self::Ecotone, - _i if timestamp < OP_MAINNET_GRANITE_TIMESTAMP => Self::Fjord, - _i if timestamp < OP_MAINNET_HOLOCENE_TIMESTAMP => Self::Granite, - _i if timestamp < OP_MAINNET_ISTHMUS_TIMESTAMP => Self::Holocene, - _i if timestamp < OP_MAINNET_JOVIAN_TIMESTAMP => Self::Isthmus, - _ => Self::Jovian, - }), - NamedChain::OptimismSepolia => Some(match timestamp { - _i if timestamp < OP_SEPOLIA_CANYON_TIMESTAMP => Self::Regolith, - _i if timestamp < OP_SEPOLIA_ECOTONE_TIMESTAMP => Self::Canyon, - _i if timestamp < OP_SEPOLIA_FJORD_TIMESTAMP => Self::Ecotone, - _i if timestamp < OP_SEPOLIA_GRANITE_TIMESTAMP => Self::Fjord, - _i if timestamp < OP_SEPOLIA_HOLOCENE_TIMESTAMP => Self::Granite, - _i if timestamp < OP_SEPOLIA_ISTHMUS_TIMESTAMP => Self::Holocene, - _i if timestamp < OP_SEPOLIA_JOVIAN_TIMESTAMP => Self::Isthmus, - _ => Self::Jovian, - }), - NamedChain::Base => Some(match timestamp { - _i if timestamp < BASE_MAINNET_CANYON_TIMESTAMP => Self::Regolith, - _i if timestamp < BASE_MAINNET_ECOTONE_TIMESTAMP => Self::Canyon, - _i if timestamp < BASE_MAINNET_FJORD_TIMESTAMP => Self::Ecotone, - _i if timestamp < BASE_MAINNET_GRANITE_TIMESTAMP => Self::Fjord, - _i if timestamp < BASE_MAINNET_HOLOCENE_TIMESTAMP => Self::Granite, - _i if timestamp < BASE_MAINNET_ISTHMUS_TIMESTAMP => Self::Holocene, - _i if timestamp < BASE_MAINNET_JOVIAN_TIMESTAMP => Self::Isthmus, - _ => Self::Jovian, - }), - NamedChain::BaseSepolia => Some(match timestamp { - _i if timestamp < BASE_SEPOLIA_CANYON_TIMESTAMP => Self::Regolith, - _i if timestamp < BASE_SEPOLIA_ECOTONE_TIMESTAMP => Self::Canyon, - _i if timestamp < BASE_SEPOLIA_FJORD_TIMESTAMP => Self::Ecotone, - _i if timestamp < BASE_SEPOLIA_GRANITE_TIMESTAMP => Self::Fjord, - _i if timestamp < BASE_SEPOLIA_HOLOCENE_TIMESTAMP => Self::Granite, - _i if timestamp < BASE_SEPOLIA_ISTHMUS_TIMESTAMP => Self::Holocene, - _i if timestamp < BASE_SEPOLIA_JOVIAN_TIMESTAMP => Self::Isthmus, - _ => Self::Jovian, - }), - _ => None, - } - } - - /// Optimism mainnet list of hardforks. - pub const fn op_mainnet() -> [(Self, ForkCondition); 9] { - [ - (Self::Bedrock, ForkCondition::Block(OP_MAINNET_BEDROCK_BLOCK)), - (Self::Regolith, ForkCondition::Timestamp(OP_MAINNET_REGOLITH_TIMESTAMP)), - (Self::Canyon, ForkCondition::Timestamp(OP_MAINNET_CANYON_TIMESTAMP)), - (Self::Ecotone, ForkCondition::Timestamp(OP_MAINNET_ECOTONE_TIMESTAMP)), - (Self::Fjord, ForkCondition::Timestamp(OP_MAINNET_FJORD_TIMESTAMP)), - (Self::Granite, ForkCondition::Timestamp(OP_MAINNET_GRANITE_TIMESTAMP)), - (Self::Holocene, ForkCondition::Timestamp(OP_MAINNET_HOLOCENE_TIMESTAMP)), - (Self::Isthmus, ForkCondition::Timestamp(OP_MAINNET_ISTHMUS_TIMESTAMP)), - (Self::Jovian, ForkCondition::Timestamp(OP_MAINNET_JOVIAN_TIMESTAMP)), - ] - } - - /// Optimism Sepolia list of hardforks. - pub const fn op_sepolia() -> [(Self, ForkCondition); 9] { - [ - (Self::Bedrock, ForkCondition::Block(OP_SEPOLIA_BEDROCK_BLOCK)), - (Self::Regolith, ForkCondition::Timestamp(OP_SEPOLIA_REGOLITH_TIMESTAMP)), - (Self::Canyon, ForkCondition::Timestamp(OP_SEPOLIA_CANYON_TIMESTAMP)), - (Self::Ecotone, ForkCondition::Timestamp(OP_SEPOLIA_ECOTONE_TIMESTAMP)), - (Self::Fjord, ForkCondition::Timestamp(OP_SEPOLIA_FJORD_TIMESTAMP)), - (Self::Granite, ForkCondition::Timestamp(OP_SEPOLIA_GRANITE_TIMESTAMP)), - (Self::Holocene, ForkCondition::Timestamp(OP_SEPOLIA_HOLOCENE_TIMESTAMP)), - (Self::Isthmus, ForkCondition::Timestamp(OP_SEPOLIA_ISTHMUS_TIMESTAMP)), - (Self::Jovian, ForkCondition::Timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP)), - ] - } - - /// Base mainnet list of hardforks. - pub const fn base_mainnet() -> [(Self, ForkCondition); 9] { - [ - (Self::Bedrock, ForkCondition::Block(BASE_MAINNET_BEDROCK_BLOCK)), - (Self::Regolith, ForkCondition::Timestamp(BASE_MAINNET_REGOLITH_TIMESTAMP)), - (Self::Canyon, ForkCondition::Timestamp(BASE_MAINNET_CANYON_TIMESTAMP)), - (Self::Ecotone, ForkCondition::Timestamp(BASE_MAINNET_ECOTONE_TIMESTAMP)), - (Self::Fjord, ForkCondition::Timestamp(BASE_MAINNET_FJORD_TIMESTAMP)), - (Self::Granite, ForkCondition::Timestamp(BASE_MAINNET_GRANITE_TIMESTAMP)), - (Self::Holocene, ForkCondition::Timestamp(BASE_MAINNET_HOLOCENE_TIMESTAMP)), - (Self::Isthmus, ForkCondition::Timestamp(BASE_MAINNET_ISTHMUS_TIMESTAMP)), - (Self::Jovian, ForkCondition::Timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP)), - ] - } - - /// Base Sepolia list of hardforks. - pub const fn base_sepolia() -> [(Self, ForkCondition); 9] { - [ - (Self::Bedrock, ForkCondition::Block(BASE_SEPOLIA_BEDROCK_BLOCK)), - (Self::Regolith, ForkCondition::Timestamp(BASE_SEPOLIA_REGOLITH_TIMESTAMP)), - (Self::Canyon, ForkCondition::Timestamp(BASE_SEPOLIA_CANYON_TIMESTAMP)), - (Self::Ecotone, ForkCondition::Timestamp(BASE_SEPOLIA_ECOTONE_TIMESTAMP)), - (Self::Fjord, ForkCondition::Timestamp(BASE_SEPOLIA_FJORD_TIMESTAMP)), - (Self::Granite, ForkCondition::Timestamp(BASE_SEPOLIA_GRANITE_TIMESTAMP)), - (Self::Holocene, ForkCondition::Timestamp(BASE_SEPOLIA_HOLOCENE_TIMESTAMP)), - (Self::Isthmus, ForkCondition::Timestamp(BASE_SEPOLIA_ISTHMUS_TIMESTAMP)), - (Self::Jovian, ForkCondition::Timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP)), - ] - } - - /// Devnet list of hardforks. - pub const fn devnet() -> [(Self, ForkCondition); 9] { - [ - (Self::Bedrock, ForkCondition::ZERO_BLOCK), - (Self::Regolith, ForkCondition::ZERO_TIMESTAMP), - (Self::Canyon, ForkCondition::ZERO_TIMESTAMP), - (Self::Ecotone, ForkCondition::ZERO_TIMESTAMP), - (Self::Fjord, ForkCondition::ZERO_TIMESTAMP), - (Self::Granite, ForkCondition::ZERO_TIMESTAMP), - (Self::Holocene, ForkCondition::ZERO_TIMESTAMP), - (Self::Isthmus, ForkCondition::ZERO_TIMESTAMP), - (Self::Jovian, ForkCondition::Timestamp(1762185600)), - ] - } - - /// Returns index of `self` in sorted canonical array. - pub const fn idx(&self) -> usize { - *self as usize - } -} - -/// Extends [`EthereumHardforks`] with optimism helper methods. -#[auto_impl::auto_impl(&, Arc)] -pub trait OpHardforks: EthereumHardforks { - /// Retrieves [`ForkCondition`] by an [`OpHardfork`]. If `fork` is not present, returns - /// [`ForkCondition::Never`]. - fn op_fork_activation(&self, fork: OpHardfork) -> ForkCondition; - - /// Convenience method to check if [`OpHardfork::Bedrock`] is active at a given block - /// number. - fn is_bedrock_active_at_block(&self, block_number: u64) -> bool { - self.op_fork_activation(OpHardfork::Bedrock).active_at_block(block_number) - } - - /// Returns `true` if [`Regolith`](OpHardfork::Regolith) is active at given block - /// timestamp. - fn is_regolith_active_at_timestamp(&self, timestamp: u64) -> bool { - self.op_fork_activation(OpHardfork::Regolith).active_at_timestamp(timestamp) - } - - /// Returns `true` if [`Canyon`](OpHardfork::Canyon) is active at given block timestamp. - fn is_canyon_active_at_timestamp(&self, timestamp: u64) -> bool { - self.op_fork_activation(OpHardfork::Canyon).active_at_timestamp(timestamp) - } - - /// Returns `true` if [`Ecotone`](OpHardfork::Ecotone) is active at given block timestamp. - fn is_ecotone_active_at_timestamp(&self, timestamp: u64) -> bool { - self.op_fork_activation(OpHardfork::Ecotone).active_at_timestamp(timestamp) - } - - /// Returns `true` if [`Fjord`](OpHardfork::Fjord) is active at given block timestamp. - fn is_fjord_active_at_timestamp(&self, timestamp: u64) -> bool { - self.op_fork_activation(OpHardfork::Fjord).active_at_timestamp(timestamp) - } - - /// Returns `true` if [`Granite`](OpHardfork::Granite) is active at given block timestamp. - fn is_granite_active_at_timestamp(&self, timestamp: u64) -> bool { - self.op_fork_activation(OpHardfork::Granite).active_at_timestamp(timestamp) - } - - /// Returns `true` if [`Holocene`](OpHardfork::Holocene) is active at given block - /// timestamp. - fn is_holocene_active_at_timestamp(&self, timestamp: u64) -> bool { - self.op_fork_activation(OpHardfork::Holocene).active_at_timestamp(timestamp) - } - - /// Returns `true` if [`Isthmus`](OpHardfork::Isthmus) is active at given block - /// timestamp. - fn is_isthmus_active_at_timestamp(&self, timestamp: u64) -> bool { - self.op_fork_activation(OpHardfork::Isthmus).active_at_timestamp(timestamp) - } - - /// Returns `true` if [`Jovian`](OpHardfork::Jovian) is active at given block - /// timestamp. - fn is_jovian_active_at_timestamp(&self, timestamp: u64) -> bool { - self.op_fork_activation(OpHardfork::Jovian).active_at_timestamp(timestamp) - } - - /// Returns `true` if [`Interop`](OpHardfork::Interop) is active at given block - /// timestamp. - fn is_interop_active_at_timestamp(&self, timestamp: u64) -> bool { - self.op_fork_activation(OpHardfork::Interop).active_at_timestamp(timestamp) - } -} - -/// A type allowing to configure activation [`ForkCondition`]s for a given list of -/// [`OpHardfork`]s. -/// -/// Zips together [`EthereumHardfork`]s and [`OpHardfork`]s. Optimism hard forks, at least, -/// whenever Ethereum hard forks. When Ethereum hard forks, a new [`OpHardfork`] piggybacks on top -/// of the new [`EthereumHardfork`] to include (or to noop) the L1 changes on L2. -/// -/// Optimism can also hard fork independently of Ethereum. The relation between Ethereum and -/// Optimism hard forks is described by predicate [`EthereumHardfork`] `=>` [`OpHardfork`], since -/// an OP chain can undergo an [`OpHardfork`] without an [`EthereumHardfork`], but not the other -/// way around. -#[derive(Debug, Clone)] -pub struct OpChainHardforks { - /// Ordered list of OP hardfork activations. - forks: Vec<(OpHardfork, ForkCondition)>, -} - -impl OpChainHardforks { - /// Creates a new [`OpChainHardforks`] with the given list of forks. The input list is sorted - /// w.r.t. the hardcoded canonicity of [`OpHardfork`]s. - pub fn new(forks: impl IntoIterator<Item = (OpHardfork, ForkCondition)>) -> Self { - let mut forks = forks.into_iter().collect::<Vec<_>>(); - forks.sort(); - Self { forks } - } - - /// Creates a new [`OpChainHardforks`] with OP mainnet configuration. - pub fn op_mainnet() -> Self { - Self::new(OpHardfork::op_mainnet()) - } - - /// Creates a new [`OpChainHardforks`] with OP Sepolia configuration. - pub fn op_sepolia() -> Self { - Self::new(OpHardfork::op_sepolia()) - } - - /// Creates a new [`OpChainHardforks`] with Base mainnet configuration. - pub fn base_mainnet() -> Self { - Self::new(OpHardfork::base_mainnet()) - } - - /// Creates a new [`OpChainHardforks`] with Base Sepolia configuration. - pub fn base_sepolia() -> Self { - Self::new(OpHardfork::base_sepolia()) - } - - /// Creates a new [`OpChainHardforks`] with devnet configuration. - pub fn devnet() -> Self { - Self::new(OpHardfork::devnet()) - } - - /// Returns `true` if this is an OP mainnet instance. - pub fn is_op_mainnet(&self) -> bool { - self[OpHardfork::Bedrock] == ForkCondition::Block(OP_MAINNET_BEDROCK_BLOCK) - } -} - -impl EthereumHardforks for OpChainHardforks { - fn ethereum_fork_activation(&self, fork: EthereumHardfork) -> ForkCondition { - use EthereumHardfork::{Cancun, Prague, Shanghai}; - use OpHardfork::{Canyon, Ecotone, Isthmus}; - - if self.forks.is_empty() { - return ForkCondition::Never; - } - - let forks_len = self.forks.len(); - // check index out of bounds - match fork { - Shanghai if forks_len <= Canyon.idx() => ForkCondition::Never, - Cancun if forks_len <= Ecotone.idx() => ForkCondition::Never, - Prague if forks_len <= Isthmus.idx() => ForkCondition::Never, - _ => self[fork], - } - } -} - -impl OpHardforks for OpChainHardforks { - fn op_fork_activation(&self, fork: OpHardfork) -> ForkCondition { - // check index out of bounds - if self.forks.len() <= fork.idx() { - return ForkCondition::Never; - } - self[fork] - } -} - -impl Index<OpHardfork> for OpChainHardforks { - type Output = ForkCondition; - - fn index(&self, hf: OpHardfork) -> &Self::Output { - use OpHardfork::*; - - match hf { - Bedrock => &self.forks[Bedrock.idx()].1, - Regolith => &self.forks[Regolith.idx()].1, - Canyon => &self.forks[Canyon.idx()].1, - Ecotone => &self.forks[Ecotone.idx()].1, - Fjord => &self.forks[Fjord.idx()].1, - Granite => &self.forks[Granite.idx()].1, - Holocene => &self.forks[Holocene.idx()].1, - Isthmus => &self.forks[Isthmus.idx()].1, - Jovian => &self.forks[Jovian.idx()].1, - Interop => &self.forks[Interop.idx()].1, - } - } -} - -impl Index<EthereumHardfork> for OpChainHardforks { - type Output = ForkCondition; - - fn index(&self, hf: EthereumHardfork) -> &Self::Output { - use EthereumHardfork::*; - use OpHardfork::*; - - match hf { - Frontier | Homestead | Tangerine | SpuriousDragon | Byzantium | Constantinople - | Petersburg | Istanbul | MuirGlacier => &ForkCondition::ZERO_BLOCK, - // Dao Hardfork is not needed for OpChainHardforks - Dao => &ForkCondition::Never, - Berlin if self.is_op_mainnet() => &ForkCondition::Block(OP_MAINNET_BERLIN_BLOCK), - Berlin => &ForkCondition::ZERO_BLOCK, - London | ArrowGlacier | GrayGlacier => &self[Bedrock], - Paris if self.is_op_mainnet() => &ForkCondition::TTD { - activation_block_number: OP_MAINNET_BEDROCK_BLOCK, - fork_block: Some(OP_MAINNET_BEDROCK_BLOCK), - total_difficulty: U256::ZERO, - }, - Paris => &ForkCondition::TTD { - activation_block_number: 0, - fork_block: Some(0), - total_difficulty: U256::ZERO, - }, - Shanghai => &self[Canyon], - Cancun => &self[Ecotone], - Prague => &self[Isthmus], - // Not activated for now - Osaka | Bpo1 | Bpo2 | Bpo3 | Bpo4 | Bpo5 | Amsterdam => &ForkCondition::Never, - _ => unreachable!(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use core::str::FromStr; - - extern crate alloc; - - #[test] - fn check_op_hardfork_from_str() { - let hardfork_str = [ - "beDrOck", "rEgOlITH", "cAnYoN", "eCoToNe", "FJorD", "GRaNiTe", "hOlOcEnE", "isthMUS", - "jOvIaN", "inTerOP", - ]; - let expected_hardforks = [ - OpHardfork::Bedrock, - OpHardfork::Regolith, - OpHardfork::Canyon, - OpHardfork::Ecotone, - OpHardfork::Fjord, - OpHardfork::Granite, - OpHardfork::Holocene, - OpHardfork::Isthmus, - OpHardfork::Jovian, - OpHardfork::Interop, - ]; - - let hardforks: alloc::vec::Vec<OpHardfork> = - hardfork_str.iter().map(|h| OpHardfork::from_str(h).unwrap()).collect(); - - assert_eq!(hardforks, expected_hardforks); - } - - #[test] - fn check_nonexistent_hardfork_from_str() { - assert!(OpHardfork::from_str("not a hardfork").is_err()); - } - - #[test] - fn op_mainnet_fork_conditions() { - use OpHardfork::*; - - let op_mainnet_forks = OpChainHardforks::op_mainnet(); - assert_eq!(op_mainnet_forks[Bedrock], ForkCondition::Block(OP_MAINNET_BEDROCK_BLOCK)); - assert_eq!( - op_mainnet_forks[Regolith], - ForkCondition::Timestamp(OP_MAINNET_REGOLITH_TIMESTAMP) - ); - assert_eq!(op_mainnet_forks[Canyon], ForkCondition::Timestamp(OP_MAINNET_CANYON_TIMESTAMP)); - assert_eq!( - op_mainnet_forks[Ecotone], - ForkCondition::Timestamp(OP_MAINNET_ECOTONE_TIMESTAMP) - ); - assert_eq!(op_mainnet_forks[Fjord], ForkCondition::Timestamp(OP_MAINNET_FJORD_TIMESTAMP)); - assert_eq!( - op_mainnet_forks[Granite], - ForkCondition::Timestamp(OP_MAINNET_GRANITE_TIMESTAMP) - ); - assert_eq!( - op_mainnet_forks[Holocene], - ForkCondition::Timestamp(OP_MAINNET_HOLOCENE_TIMESTAMP) - ); - assert_eq!( - op_mainnet_forks[Isthmus], - ForkCondition::Timestamp(OP_MAINNET_ISTHMUS_TIMESTAMP) - ); - assert_eq!(op_mainnet_forks[Jovian], ForkCondition::Timestamp(OP_MAINNET_JOVIAN_TIMESTAMP)); - assert_eq!(op_mainnet_forks.op_fork_activation(Interop), ForkCondition::Never); - } - - #[test] - fn op_sepolia_fork_conditions() { - use OpHardfork::*; - - let op_sepolia_forks = OpChainHardforks::op_sepolia(); - assert_eq!(op_sepolia_forks[Bedrock], ForkCondition::Block(OP_SEPOLIA_BEDROCK_BLOCK)); - assert_eq!( - op_sepolia_forks[Regolith], - ForkCondition::Timestamp(OP_SEPOLIA_REGOLITH_TIMESTAMP) - ); - assert_eq!(op_sepolia_forks[Canyon], ForkCondition::Timestamp(OP_SEPOLIA_CANYON_TIMESTAMP)); - assert_eq!( - op_sepolia_forks[Ecotone], - ForkCondition::Timestamp(OP_SEPOLIA_ECOTONE_TIMESTAMP) - ); - assert_eq!(op_sepolia_forks[Fjord], ForkCondition::Timestamp(OP_SEPOLIA_FJORD_TIMESTAMP)); - assert_eq!( - op_sepolia_forks[Granite], - ForkCondition::Timestamp(OP_SEPOLIA_GRANITE_TIMESTAMP) - ); - assert_eq!( - op_sepolia_forks[Holocene], - ForkCondition::Timestamp(OP_SEPOLIA_HOLOCENE_TIMESTAMP) - ); - assert_eq!( - op_sepolia_forks[Isthmus], - ForkCondition::Timestamp(OP_SEPOLIA_ISTHMUS_TIMESTAMP) - ); - assert_eq!(op_sepolia_forks[Jovian], ForkCondition::Timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP)); - assert_eq!(op_sepolia_forks.op_fork_activation(Interop), ForkCondition::Never); - } - - #[test] - fn base_mainnet_fork_conditions() { - use OpHardfork::*; - - let base_mainnet_forks = OpChainHardforks::base_mainnet(); - assert_eq!(base_mainnet_forks[Bedrock], ForkCondition::Block(BASE_MAINNET_BEDROCK_BLOCK)); - assert_eq!( - base_mainnet_forks[Regolith], - ForkCondition::Timestamp(BASE_MAINNET_REGOLITH_TIMESTAMP) - ); - assert_eq!( - base_mainnet_forks[Canyon], - ForkCondition::Timestamp(BASE_MAINNET_CANYON_TIMESTAMP) - ); - assert_eq!( - base_mainnet_forks[Ecotone], - ForkCondition::Timestamp(BASE_MAINNET_ECOTONE_TIMESTAMP) - ); - assert_eq!( - base_mainnet_forks[Fjord], - ForkCondition::Timestamp(BASE_MAINNET_FJORD_TIMESTAMP) - ); - assert_eq!( - base_mainnet_forks[Granite], - ForkCondition::Timestamp(BASE_MAINNET_GRANITE_TIMESTAMP) - ); - assert_eq!( - base_mainnet_forks[Holocene], - ForkCondition::Timestamp(BASE_MAINNET_HOLOCENE_TIMESTAMP) - ); - assert_eq!( - base_mainnet_forks[Isthmus], - ForkCondition::Timestamp(BASE_MAINNET_ISTHMUS_TIMESTAMP) - ); - assert_eq!( - base_mainnet_forks[Jovian], - ForkCondition::Timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP) - ); - assert_eq!( - base_mainnet_forks[Jovian], - ForkCondition::Timestamp(OP_MAINNET_JOVIAN_TIMESTAMP) - ); - assert_eq!(base_mainnet_forks.op_fork_activation(Interop), ForkCondition::Never); - } - - #[test] - fn base_sepolia_fork_conditions() { - use OpHardfork::*; - - let base_sepolia_forks = OpChainHardforks::base_sepolia(); - assert_eq!(base_sepolia_forks[Bedrock], ForkCondition::Block(BASE_SEPOLIA_BEDROCK_BLOCK)); - assert_eq!( - base_sepolia_forks[Regolith], - ForkCondition::Timestamp(BASE_SEPOLIA_REGOLITH_TIMESTAMP) - ); - assert_eq!( - base_sepolia_forks[Canyon], - ForkCondition::Timestamp(BASE_SEPOLIA_CANYON_TIMESTAMP) - ); - assert_eq!( - base_sepolia_forks[Ecotone], - ForkCondition::Timestamp(BASE_SEPOLIA_ECOTONE_TIMESTAMP) - ); - assert_eq!( - base_sepolia_forks[Fjord], - ForkCondition::Timestamp(BASE_SEPOLIA_FJORD_TIMESTAMP) - ); - assert_eq!( - base_sepolia_forks[Granite], - ForkCondition::Timestamp(BASE_SEPOLIA_GRANITE_TIMESTAMP) - ); - assert_eq!( - base_sepolia_forks[Holocene], - ForkCondition::Timestamp(BASE_SEPOLIA_HOLOCENE_TIMESTAMP) - ); - assert_eq!( - base_sepolia_forks[Isthmus], - ForkCondition::Timestamp(BASE_SEPOLIA_ISTHMUS_TIMESTAMP) - ); - assert_eq!( - base_sepolia_forks.op_fork_activation(Jovian), - ForkCondition::Timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP) - ); - assert_eq!( - base_sepolia_forks[Jovian], - ForkCondition::Timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP) - ); - assert_eq!(base_sepolia_forks.op_fork_activation(Interop), ForkCondition::Never); - } - - #[test] - fn is_jovian_active_at_timestamp() { - let op_mainnet_forks = OpChainHardforks::op_mainnet(); - assert!(op_mainnet_forks.is_jovian_active_at_timestamp(OP_MAINNET_JOVIAN_TIMESTAMP)); - assert!(!op_mainnet_forks.is_jovian_active_at_timestamp(OP_MAINNET_JOVIAN_TIMESTAMP - 1)); - assert!(op_mainnet_forks.is_jovian_active_at_timestamp(OP_MAINNET_JOVIAN_TIMESTAMP + 1000)); - - let op_sepolia_forks = OpChainHardforks::op_sepolia(); - assert!(op_sepolia_forks.is_jovian_active_at_timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP)); - assert!(!op_sepolia_forks.is_jovian_active_at_timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP - 1)); - assert!(op_sepolia_forks.is_jovian_active_at_timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP + 1000)); - - let base_mainnet_forks = OpChainHardforks::base_mainnet(); - assert!(base_mainnet_forks.is_jovian_active_at_timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP)); - assert!( - !base_mainnet_forks.is_jovian_active_at_timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP - 1) - ); - assert!( - base_mainnet_forks.is_jovian_active_at_timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP + 1000) - ); - - let base_sepolia_forks = OpChainHardforks::base_sepolia(); - assert!(base_sepolia_forks.is_jovian_active_at_timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP)); - assert!( - !base_sepolia_forks.is_jovian_active_at_timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP - 1) - ); - assert!( - base_sepolia_forks.is_jovian_active_at_timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP + 1000) - ); - } - - #[test] - fn test_reverse_lookup_op_chains() { - // Test key hardforks across all OP stack chains - let test_cases = [ - // (chain_id, timestamp, expected) - focusing on major transitions - // OP Mainnet - (Chain::optimism_mainnet(), OP_MAINNET_CANYON_TIMESTAMP, OpHardfork::Canyon), - (Chain::optimism_mainnet(), OP_MAINNET_ECOTONE_TIMESTAMP, OpHardfork::Ecotone), - (Chain::optimism_mainnet(), OP_MAINNET_GRANITE_TIMESTAMP, OpHardfork::Granite), - (Chain::optimism_mainnet(), OP_MAINNET_CANYON_TIMESTAMP - 1, OpHardfork::Regolith), - (Chain::optimism_mainnet(), OP_MAINNET_ISTHMUS_TIMESTAMP + 1000, OpHardfork::Isthmus), - (Chain::optimism_mainnet(), OP_MAINNET_JOVIAN_TIMESTAMP, OpHardfork::Jovian), - (Chain::optimism_mainnet(), OP_MAINNET_JOVIAN_TIMESTAMP - 1, OpHardfork::Isthmus), - (Chain::optimism_mainnet(), OP_MAINNET_JOVIAN_TIMESTAMP + 1000, OpHardfork::Jovian), - // OP Sepolia - (Chain::optimism_sepolia(), OP_SEPOLIA_CANYON_TIMESTAMP, OpHardfork::Canyon), - (Chain::optimism_sepolia(), OP_SEPOLIA_ECOTONE_TIMESTAMP, OpHardfork::Ecotone), - (Chain::optimism_sepolia(), OP_SEPOLIA_CANYON_TIMESTAMP - 1, OpHardfork::Regolith), - (Chain::optimism_sepolia(), OP_SEPOLIA_JOVIAN_TIMESTAMP, OpHardfork::Jovian), - (Chain::optimism_sepolia(), OP_SEPOLIA_JOVIAN_TIMESTAMP - 1, OpHardfork::Isthmus), - (Chain::optimism_sepolia(), OP_SEPOLIA_JOVIAN_TIMESTAMP + 1000, OpHardfork::Jovian), - // Base Mainnet - (Chain::base_mainnet(), BASE_MAINNET_CANYON_TIMESTAMP, OpHardfork::Canyon), - (Chain::base_mainnet(), BASE_MAINNET_ECOTONE_TIMESTAMP, OpHardfork::Ecotone), - (Chain::base_mainnet(), BASE_MAINNET_JOVIAN_TIMESTAMP, OpHardfork::Jovian), - // Base Sepolia - (Chain::base_sepolia(), BASE_SEPOLIA_CANYON_TIMESTAMP, OpHardfork::Canyon), - (Chain::base_sepolia(), BASE_SEPOLIA_ECOTONE_TIMESTAMP, OpHardfork::Ecotone), - (Chain::base_sepolia(), BASE_SEPOLIA_JOVIAN_TIMESTAMP, OpHardfork::Jovian), - ]; - - for (chain_id, timestamp, expected) in test_cases { - assert_eq!( - OpHardfork::from_chain_and_timestamp(chain_id, timestamp), - Some(expected), - "chain {chain_id} at timestamp {timestamp}" - ); - } - - // Edge cases - assert_eq!(OpHardfork::from_chain_and_timestamp(Chain::from_id(999999), 1000000), None); - } - - // https://github.com/alloy-rs/hardforks/issues/63 - #[test] - fn test_ethereum_fork_activation_consistency() { - let op_mainnet_forks = OpChainHardforks::op_mainnet(); - for ethereum_hardfork in EthereumHardfork::VARIANTS { - let _ = op_mainnet_forks.ethereum_fork_activation(*ethereum_hardfork); - } - for op_hardfork in OpHardfork::VARIANTS { - let _ = op_mainnet_forks.op_fork_activation(*op_hardfork); - } - } -} diff --git a/alloy-op-hardforks/src/optimism/mainnet.rs b/alloy-op-hardforks/src/optimism/mainnet.rs deleted file mode 100644 index 7f9bbe5e089..00000000000 --- a/alloy-op-hardforks/src/optimism/mainnet.rs +++ /dev/null @@ -1,24 +0,0 @@ -//! Optimism Mainnet hardfork starting points - -//------------------------ OVM chain ------------------------// -/// Berlin hardfork activation block is 3950000. -pub const OP_MAINNET_BERLIN_BLOCK: u64 = 3_950_000; -//------------------------ EVM chain ------------------------// -/// Bedrock hardfork activation block is 105235063. -pub const OP_MAINNET_BEDROCK_BLOCK: u64 = 105_235_063; -/// Regolith hardfork activation timestamp is 0. -pub const OP_MAINNET_REGOLITH_TIMESTAMP: u64 = 0; -/// Canyon hardfork activation timestamp is 1704992401. -pub const OP_MAINNET_CANYON_TIMESTAMP: u64 = 1_704_992_401; -/// Ecotone hardfork activation timestamp is 1710374401. -pub const OP_MAINNET_ECOTONE_TIMESTAMP: u64 = 1_710_374_401; -/// Fjord hardfork activation timestamp is 1720627201. -pub const OP_MAINNET_FJORD_TIMESTAMP: u64 = 1_720_627_201; -/// Granite hardfork activation timestamp is 1726070401. -pub const OP_MAINNET_GRANITE_TIMESTAMP: u64 = 1_726_070_401; -/// Holocene hardfork activation timestamp is 1736445601. -pub const OP_MAINNET_HOLOCENE_TIMESTAMP: u64 = 1_736_445_601; -/// Isthmus hardfork activation timestamp is 1746806401. -pub const OP_MAINNET_ISTHMUS_TIMESTAMP: u64 = 1_746_806_401; -/// Jovian hardfork activation timestamp is 1_764_691_201 # Tue 2 Dec 2025 16:00:01 UTC -pub const OP_MAINNET_JOVIAN_TIMESTAMP: u64 = 1_764_691_201; diff --git a/alloy-op-hardforks/src/optimism/sepolia.rs b/alloy-op-hardforks/src/optimism/sepolia.rs deleted file mode 100644 index 12739e15fc5..00000000000 --- a/alloy-op-hardforks/src/optimism/sepolia.rs +++ /dev/null @@ -1,20 +0,0 @@ -//! Optimism Sepolia hardfork starting points - -/// Bedrock sepolia hardfork activation block is 0. -pub const OP_SEPOLIA_BEDROCK_BLOCK: u64 = 0; -/// Regolith sepolia hardfork activation timestamp is 0. -pub const OP_SEPOLIA_REGOLITH_TIMESTAMP: u64 = 0; -/// Canyon sepolia hardfork activation timestamp is 1699981200. -pub const OP_SEPOLIA_CANYON_TIMESTAMP: u64 = 1_699_981_200; -/// Ecotone sepolia hardfork activation timestamp is 1708534800. -pub const OP_SEPOLIA_ECOTONE_TIMESTAMP: u64 = 1_708_534_800; -/// Fjord sepolia hardfork activation timestamp is 1716998400. -pub const OP_SEPOLIA_FJORD_TIMESTAMP: u64 = 1_716_998_400; -/// Granite sepolia hardfork activation timestamp is 1723478400. -pub const OP_SEPOLIA_GRANITE_TIMESTAMP: u64 = 1_723_478_400; -/// Holocene sepolia hardfork activation timestamp is 1732633200. -pub const OP_SEPOLIA_HOLOCENE_TIMESTAMP: u64 = 1_732_633_200; -/// Isthmus sepolia hardfork activation timestamp is 1744905600. -pub const OP_SEPOLIA_ISTHMUS_TIMESTAMP: u64 = 1_744_905_600; -/// Jovian sepolia hardfork activation timestamp is 1_763_568_001 # Wed 19 Nov 2025 16:00:01 UTC. -pub const OP_SEPOLIA_JOVIAN_TIMESTAMP: u64 = 1_763_568_001; diff --git a/docker-bake.hcl b/docker-bake.hcl index 7a9454528b4..a1165148c23 100644 --- a/docker-bake.hcl +++ b/docker-bake.hcl @@ -344,8 +344,8 @@ target "op-rbuilder" { } target "kona-node" { - dockerfile = "docker/apps/kona_app_generic.dockerfile" - context = "kona" + dockerfile = "kona/docker/apps/kona_app_generic.dockerfile" + context = "rust" args = { REPO_LOCATION = "local" BIN_TARGET = "kona-node" @@ -354,3 +354,38 @@ target "kona-node" { platforms = split(",", PLATFORMS) tags = [for tag in split(",", IMAGE_TAGS) : "${REGISTRY}/${REPOSITORY}/kona-node:${tag}"] } + +target "kona-host" { + dockerfile = "kona/docker/apps/kona_app_generic.dockerfile" + context = "rust" + args = { + REPO_LOCATION = "local" + BIN_TARGET = "kona-host" + BUILD_PROFILE = "release" + } + platforms = split(",", PLATFORMS) + tags = [for tag in split(",", IMAGE_TAGS) : "${REGISTRY}/${REPOSITORY}/kona-host:${tag}"] +} + +target "kona-client" { + dockerfile = "kona/docker/apps/kona_app_generic.dockerfile" + context = "rust" + args = { + REPO_LOCATION = "local" + BIN_TARGET = "kona-client" + BUILD_PROFILE = "release" + } + platforms = split(",", PLATFORMS) + tags = [for tag in split(",", IMAGE_TAGS) : "${REGISTRY}/${REPOSITORY}/kona-client:${tag}"] +} + +target "op-reth" { + dockerfile = "op-reth/DockerfileOp" + context = "rust" + args = { + BUILD_PROFILE = "maxperf" + FEATURES = "" + } + platforms = split(",", PLATFORMS) + tags = [for tag in split(",", IMAGE_TAGS) : "${REGISTRY}/${REPOSITORY}/op-reth:${tag}"] +} diff --git a/docs/README.md b/docs/README.md index cabb2506b08..94936558bbc 100644 --- a/docs/README.md +++ b/docs/README.md @@ -4,5 +4,7 @@ The `docs/` directory contains Optimism documentation closely tied to the implem The directory layout is divided into the following sub-directories. +- [`ai/`](./ai/): AI agent guidance for development in the monorepo. +- [`handbook/`](./handbook/): Development handbook and guidelines. - [`postmortems/`](./postmortems/): Timestamped post-mortem documents. -- [`security-reviews`](./security-reviews/): Audit summaries and other security review documents. +- [`security-reviews/`](./security-reviews/): Audit summaries and other security review documents. diff --git a/docs/ai/ci-ops.md b/docs/ai/ci-ops.md new file mode 100644 index 00000000000..b5fd1880f6d --- /dev/null +++ b/docs/ai/ci-ops.md @@ -0,0 +1,29 @@ +# CI/CD Operations + +This document provides guidance for AI agents working with CI/CD operational tasks in the Optimism monorepo. + +## TODO Checker Failures + +The repo runs a scheduled CircleCI job every 4 hours that validates TODO comments don't reference closed GitHub issues. When this job fails, issues need to be reopened. + +### Quick Instructions + +1. Find the failed TODO checker job in CircleCI (scheduled workflow named `scheduled-todo-issues`) +2. Identify which issues were closed but still have active TODOs in the codebase +3. For each issue: + - Determine who closed it (using GitHub timeline API) + - Read the actual TODO comment from the code + - Reopen with proper attribution and context + - Include file location and CircleCI job link + +### Detailed Workflow + +For complete step-by-step instructions with all commands and error handling, see: +**[.claude/skills/fix-todo/SKILL.md](../../.claude/skills/fix-todo/SKILL.md)** + +The skill includes: +- Detailed commands for querying CircleCI and GitHub APIs +- How to find who closed an issue +- Comment template for reopening +- Error handling for edge cases +- Output format and requirements diff --git a/justfile b/justfile index d0bc6b10bf1..12e2674c06c 100644 --- a/justfile +++ b/justfile @@ -1,6 +1,6 @@ # Build all Rust binaries (release) for sysgo tests. build-rust-release: - cd kona && cargo build --release --bin kona-node --bin kona-supervisor + cd rust && cargo build --release --bin kona-node --bin kona-supervisor cd op-rbuilder && cargo build --release -p op-rbuilder --bin op-rbuilder cd rollup-boost && cargo build --release -p rollup-boost --bin rollup-boost diff --git a/kona/.config/cannon_tag b/kona/.config/cannon_tag deleted file mode 100644 index d59e58dba3c..00000000000 --- a/kona/.config/cannon_tag +++ /dev/null @@ -1 +0,0 @@ -b9b1429b3c342a5a8c81cf4dc1d420b767474649 \ No newline at end of file diff --git a/kona/CONTRIBUTING.md b/kona/CONTRIBUTING.md deleted file mode 100644 index 4d8ccfa04c0..00000000000 --- a/kona/CONTRIBUTING.md +++ /dev/null @@ -1,31 +0,0 @@ -# Contributing - -Thank you for wanting to contribute! Before contributing to this repository, -please read through this document and discuss the change you wish to make via issue. - -## Dependencies - -Before working with this repository locally, you'll need to install the following dependencies. - -- [just][just] for our command-runner scripts. -- The [Rust toolchain][rust] - -## Pull Request Process - -1. Before anything, [create an issue][create-an-issue] to discuss the change you're - wanting to make, if it is significant or changes functionality. Feel free to skip this step for trivial changes. -1. Once your change is implemented, ensure that all checks are passing before creating a PR. The full CI pipeline can - be run locally via the `justfile`s in the repository. -1. Make sure to update any documentation that has gone stale as a result of the change, in the `README` files, the [book][book], - and in rustdoc comments. -1. Once you have sign-off from a maintainer, you may merge your pull request yourself if you have permissions to do so. - If not, the maintainer who approves your pull request will add it to the merge queue. - -<!-- Links --> - -[just]: https://github.com/casey/just -[rust]: https://rustup.rs/ - -[book]: https://rollup.yoga - -[create-an-issue]: https://github.com/op-rs/kona/issues/new diff --git a/kona/Cargo.lock b/kona/Cargo.lock deleted file mode 100644 index cb8e90dc42a..00000000000 --- a/kona/Cargo.lock +++ /dev/null @@ -1,11940 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "addr2line" -version = "0.25.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" - -[[package]] -name = "aead" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" -dependencies = [ - "crypto-common", - "generic-array", -] - -[[package]] -name = "aes" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures", -] - -[[package]] -name = "aes-gcm" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" -dependencies = [ - "aead", - "aes", - "cipher", - "ctr", - "ghash", - "subtle", -] - -[[package]] -name = "ahash" -version = "0.8.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" -dependencies = [ - "cfg-if", - "getrandom 0.3.4", - "once_cell", - "version_check", - "zerocopy", -] - -[[package]] -name = "aho-corasick" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" -dependencies = [ - "memchr", -] - -[[package]] -name = "aligned-vec" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc890384c8602f339876ded803c97ad529f3842aba97f6392b3dba0dd171769b" -dependencies = [ - "equator", -] - -[[package]] -name = "alloc-no-stdlib" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" - -[[package]] -name = "alloc-stdlib" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" -dependencies = [ - "alloc-no-stdlib", -] - -[[package]] -name = "allocator-api2" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" - -[[package]] -name = "alloy-chains" -version = "0.2.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25db5bcdd086f0b1b9610140a12c59b757397be90bd130d8d836fc8da0815a34" -dependencies = [ - "alloy-primitives", - "arbitrary", - "num_enum", - "proptest", - "serde", - "strum", -] - -[[package]] -name = "alloy-consensus" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c3a590d13de3944675987394715f37537b50b856e3b23a0e66e97d963edbf38" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "alloy-trie", - "alloy-tx-macros", - "arbitrary", - "auto_impl", - "borsh", - "c-kzg", - "derive_more", - "either", - "k256", - "once_cell", - "rand 0.8.5", - "secp256k1 0.30.0", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-consensus-any" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f28f769d5ea999f0d8a105e434f483456a15b4e1fcb08edbbbe1650a497ff6d" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "arbitrary", - "serde", -] - -[[package]] -name = "alloy-eip2124" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "crc", - "rand 0.8.5", - "serde", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-eip2930" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9441120fa82df73e8959ae0e4ab8ade03de2aaae61be313fbf5746277847ce25" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "borsh", - "rand 0.8.5", - "serde", -] - -[[package]] -name = "alloy-eip7702" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2919c5a56a1007492da313e7a3b6d45ef5edc5d33416fdec63c0d7a2702a0d20" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "borsh", - "k256", - "rand 0.8.5", - "serde", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-eips" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09535cbc646b0e0c6fcc12b7597eaed12cf86dff4c4fba9507a61e71b94f30eb" -dependencies = [ - "alloy-eip2124", - "alloy-eip2930", - "alloy-eip7702", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "arbitrary", - "auto_impl", - "borsh", - "c-kzg", - "derive_more", - "either", - "ethereum_ssz", - "ethereum_ssz_derive", - "serde", - "serde_with", - "sha2", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-evm" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6ccc4c702c840148af1ce784cc5c6ed9274a020ef32417c5b1dbeab8c317673" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-hardforks", - "alloy-op-hardforks", - "alloy-primitives", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-sol-types", - "auto_impl", - "derive_more", - "op-alloy", - "op-revm", - "revm", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-genesis" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1005520ccf89fa3d755e46c1d992a9e795466c2e7921be2145ef1f749c5727de" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-serde", - "alloy-trie", - "borsh", - "serde", - "serde_with", -] - -[[package]] -name = "alloy-hardforks" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83ba208044232d14d4adbfa77e57d6329f51bc1acc21f5667bb7db72d88a0831" -dependencies = [ - "alloy-chains", - "alloy-eip2124", - "alloy-primitives", - "auto_impl", - "dyn-clone", - "serde", -] - -[[package]] -name = "alloy-json-abi" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84e3cf01219c966f95a460c95f1d4c30e12f6c18150c21a30b768af2a2a29142" -dependencies = [ - "alloy-primitives", - "alloy-sol-type-parser", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-json-rpc" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b626409c98ba43aaaa558361bca21440c88fd30df7542c7484b9c7a1489cdb" -dependencies = [ - "alloy-primitives", - "alloy-sol-types", - "http", - "serde", - "serde_json", - "thiserror 2.0.17", - "tracing", -] - -[[package]] -name = "alloy-network" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89924fdcfeee0e0fa42b1f10af42f92802b5d16be614a70897382565663bf7cf" -dependencies = [ - "alloy-consensus", - "alloy-consensus-any", - "alloy-eips", - "alloy-json-rpc", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-types-any", - "alloy-rpc-types-eth", - "alloy-serde", - "alloy-signer", - "alloy-sol-types", - "async-trait", - "auto_impl", - "derive_more", - "futures-utils-wasm", - "serde", - "serde_json", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-network-primitives" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0dbe56ff50065713ff8635d8712a0895db3ad7f209db9793ad8fcb6b1734aa" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-serde", - "serde", -] - -[[package]] -name = "alloy-op-evm" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f640da852f93ddaa3b9a602b7ca41d80e0023f77a67b68aaaf511c32f1fe0ce" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-op-hardforks", - "alloy-primitives", - "auto_impl", - "op-alloy", - "op-revm", - "revm", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-op-hardforks" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6472c610150c4c4c15be9e1b964c9b78068f933bda25fb9cdf09b9ac2bb66f36" -dependencies = [ - "alloy-chains", - "alloy-hardforks", - "alloy-primitives", - "auto_impl", - "serde", -] - -[[package]] -name = "alloy-primitives" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6a0fb18dd5fb43ec5f0f6a20be1ce0287c79825827de5744afaa6c957737c33" -dependencies = [ - "alloy-rlp", - "arbitrary", - "bytes", - "cfg-if", - "const-hex", - "derive_more", - "foldhash 0.2.0", - "getrandom 0.3.4", - "hashbrown 0.16.1", - "indexmap 2.12.1", - "itoa", - "k256", - "keccak-asm", - "paste", - "proptest", - "proptest-derive 0.6.0", - "rand 0.9.2", - "rapidhash", - "ruint", - "rustc-hash 2.1.1", - "serde", - "sha3", - "tiny-keccak", -] - -[[package]] -name = "alloy-provider" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b56f7a77513308a21a2ba0e9d57785a9d9d2d609e77f4e71a78a1192b83ff2d" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-network", - "alloy-network-primitives", - "alloy-primitives", - "alloy-pubsub", - "alloy-rpc-client", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-signer", - "alloy-sol-types", - "alloy-transport", - "alloy-transport-http", - "alloy-transport-ipc", - "alloy-transport-ws", - "async-stream", - "async-trait", - "auto_impl", - "dashmap", - "either", - "futures", - "futures-utils-wasm", - "lru 0.16.3", - "parking_lot", - "pin-project", - "reqwest", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-pubsub" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94813abbd7baa30c700ea02e7f92319dbcb03bff77aeea92a3a9af7ba19c5c70" -dependencies = [ - "alloy-json-rpc", - "alloy-primitives", - "alloy-transport", - "auto_impl", - "bimap", - "futures", - "parking_lot", - "serde", - "serde_json", - "tokio", - "tokio-stream", - "tower 0.5.3", - "tracing", - "wasmtimer", -] - -[[package]] -name = "alloy-rlp" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f70d83b765fdc080dbcd4f4db70d8d23fe4761f2f02ebfa9146b833900634b4" -dependencies = [ - "alloy-rlp-derive", - "arrayvec", - "bytes", -] - -[[package]] -name = "alloy-rlp-derive" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "alloy-rpc-client" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff01723afc25ec4c5b04de399155bef7b6a96dfde2475492b1b7b4e7a4f46445" -dependencies = [ - "alloy-json-rpc", - "alloy-primitives", - "alloy-pubsub", - "alloy-transport", - "alloy-transport-http", - "alloy-transport-ipc", - "alloy-transport-ws", - "futures", - "pin-project", - "reqwest", - "serde", - "serde_json", - "tokio", - "tokio-stream", - "tower 0.5.3", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-rpc-types" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91bf006bb06b7d812591b6ac33395cb92f46c6a65cda11ee30b348338214f0f" -dependencies = [ - "alloy-primitives", - "alloy-rpc-types-debug", - "alloy-rpc-types-eth", - "alloy-serde", - "serde", -] - -[[package]] -name = "alloy-rpc-types-any" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212ca1c1dab27f531d3858f8b1a2d6bfb2da664be0c1083971078eb7b71abe4b" -dependencies = [ - "alloy-consensus-any", - "alloy-rpc-types-eth", - "alloy-serde", -] - -[[package]] -name = "alloy-rpc-types-beacon" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d92a9b4b268fac505ef7fb1dac9bb129d4fd7de7753f22a5b6e9f666f7f7de6" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "derive_more", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-rpc-types-debug" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab1ebed118b701c497e6541d2d11dfa6f3c6ae31a3c52999daa802fcdcc16b7" -dependencies = [ - "alloy-primitives", - "derive_more", - "serde", - "serde_with", -] - -[[package]] -name = "alloy-rpc-types-engine" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "232f00fcbcd3ee3b9399b96223a8fc884d17742a70a44f9d7cef275f93e6e872" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "arbitrary", - "derive_more", - "ethereum_ssz", - "ethereum_ssz_derive", - "jsonwebtoken", - "rand 0.8.5", - "serde", - "strum", -] - -[[package]] -name = "alloy-rpc-types-eth" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5715d0bf7efbd360873518bd9f6595762136b5327a9b759a8c42ccd9b5e44945" -dependencies = [ - "alloy-consensus", - "alloy-consensus-any", - "alloy-eips", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "alloy-sol-types", - "arbitrary", - "itertools 0.14.0", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-serde" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ed8531cae8d21ee1c6571d0995f8c9f0652a6ef6452fde369283edea6ab7138" -dependencies = [ - "alloy-primitives", - "arbitrary", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-signer" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb10ccd49d0248df51063fce6b716f68a315dd912d55b32178c883fd48b4021d" -dependencies = [ - "alloy-primitives", - "async-trait", - "auto_impl", - "either", - "elliptic-curve", - "k256", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-signer-local" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4d992d44e6c414ece580294abbadb50e74cfd4eaa69787350a4dfd4b20eaa1b" -dependencies = [ - "alloy-consensus", - "alloy-network", - "alloy-primitives", - "alloy-signer", - "async-trait", - "k256", - "rand 0.8.5", - "thiserror 2.0.17", -] - -[[package]] -name = "alloy-sol-macro" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09eb18ce0df92b4277291bbaa0ed70545d78b02948df756bbd3d6214bf39a218" -dependencies = [ - "alloy-sol-macro-expander", - "alloy-sol-macro-input", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "alloy-sol-macro-expander" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95d9fa2daf21f59aa546d549943f10b5cce1ae59986774019fbedae834ffe01b" -dependencies = [ - "alloy-sol-macro-input", - "const-hex", - "heck", - "indexmap 2.12.1", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.111", - "syn-solidity", - "tiny-keccak", -] - -[[package]] -name = "alloy-sol-macro-input" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9396007fe69c26ee118a19f4dee1f5d1d6be186ea75b3881adf16d87f8444686" -dependencies = [ - "const-hex", - "dunce", - "heck", - "macro-string", - "proc-macro2", - "quote", - "syn 2.0.111", - "syn-solidity", -] - -[[package]] -name = "alloy-sol-type-parser" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af67a0b0dcebe14244fc92002cd8d96ecbf65db4639d479f5fcd5805755a4c27" -dependencies = [ - "serde", - "winnow", -] - -[[package]] -name = "alloy-sol-types" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09aeea64f09a7483bdcd4193634c7e5cf9fd7775ee767585270cd8ce2d69dc95" -dependencies = [ - "alloy-json-abi", - "alloy-primitives", - "alloy-sol-macro", - "serde", -] - -[[package]] -name = "alloy-transport" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f50a9516736d22dd834cc2240e5bf264f338667cc1d9e514b55ec5a78b987ca" -dependencies = [ - "alloy-json-rpc", - "auto_impl", - "base64", - "derive_more", - "futures", - "futures-utils-wasm", - "parking_lot", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tower 0.5.3", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-transport-http" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a18b541a6197cf9a084481498a766fdf32fefda0c35ea6096df7d511025e9f1" -dependencies = [ - "alloy-json-rpc", - "alloy-rpc-types-engine", - "alloy-transport", - "http-body-util", - "hyper", - "hyper-tls", - "hyper-util", - "jsonwebtoken", - "reqwest", - "serde_json", - "tower 0.5.3", - "tracing", - "url", -] - -[[package]] -name = "alloy-transport-ipc" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8075911680ebc537578cacf9453464fd394822a0f68614884a9c63f9fbaf5e89" -dependencies = [ - "alloy-json-rpc", - "alloy-pubsub", - "alloy-transport", - "bytes", - "futures", - "interprocess", - "pin-project", - "serde", - "serde_json", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "alloy-transport-ws" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "921d37a57e2975e5215f7dd0f28873ed5407c7af630d4831a4b5c737de4b0b8b" -dependencies = [ - "alloy-pubsub", - "alloy-transport", - "futures", - "http", - "serde_json", - "tokio", - "tokio-tungstenite", - "tracing", - "ws_stream_wasm", -] - -[[package]] -name = "alloy-trie" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428aa0f0e0658ff091f8f667c406e034b431cb10abd39de4f507520968acc499" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "arrayvec", - "derive_arbitrary", - "derive_more", - "nybbles", - "proptest", - "proptest-derive 0.5.1", - "serde", - "smallvec", - "tracing", -] - -[[package]] -name = "alloy-tx-macros" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2289a842d02fe63f8c466db964168bb2c7a9fdfb7b24816dbb17d45520575fb" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "ambassador" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e68de4cdc6006162265d0957edb4a860fe4e711b1dc17a5746fd95f952f08285" -dependencies = [ - "itertools 0.10.5", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anes" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" - -[[package]] -name = "anstream" -version = "0.6.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" - -[[package]] -name = "anstyle-parse" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" -dependencies = [ - "anstyle", - "once_cell_polyfill", - "windows-sys 0.61.2", -] - -[[package]] -name = "anyhow" -version = "1.0.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" - -[[package]] -name = "arbitrary" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" -dependencies = [ - "derive_arbitrary", -] - -[[package]] -name = "arbtest" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a3be567977128c0f71ad1462d9624ccda712193d124e944252f0c5789a06d46" -dependencies = [ - "arbitrary", -] - -[[package]] -name = "ark-bls12-381" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3df4dcc01ff89867cd86b0da835f23c3f02738353aaee7dde7495af71363b8d5" -dependencies = [ - "ark-ec", - "ark-ff 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", -] - -[[package]] -name = "ark-bn254" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d69eab57e8d2663efa5c63135b2af4f396d66424f88954c21104125ab6b3e6bc" -dependencies = [ - "ark-ec", - "ark-ff 0.5.0", - "ark-r1cs-std", - "ark-std 0.5.0", -] - -[[package]] -name = "ark-ec" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43d68f2d516162846c1238e755a7c4d131b892b70cc70c471a8e3ca3ed818fce" -dependencies = [ - "ahash", - "ark-ff 0.5.0", - "ark-poly", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "educe", - "fnv", - "hashbrown 0.15.5", - "itertools 0.13.0", - "num-bigint", - "num-integer", - "num-traits", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" -dependencies = [ - "ark-ff-asm 0.3.0", - "ark-ff-macros 0.3.0", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.3.3", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" -dependencies = [ - "ark-ff-asm 0.4.2", - "ark-ff-macros 0.4.2", - "ark-serialize 0.4.2", - "ark-std 0.4.0", - "derivative", - "digest 0.10.7", - "itertools 0.10.5", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.4.1", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70" -dependencies = [ - "ark-ff-asm 0.5.0", - "ark-ff-macros 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "arrayvec", - "digest 0.10.7", - "educe", - "itertools 0.13.0", - "num-bigint", - "num-traits", - "paste", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-asm" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-asm" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" -dependencies = [ - "quote", - "syn 2.0.111", -] - -[[package]] -name = "ark-ff-macros" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" -dependencies = [ - "num-bigint", - "num-traits", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "ark-poly" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "579305839da207f02b89cd1679e50e67b4331e2f9294a57693e5051b7703fe27" -dependencies = [ - "ahash", - "ark-ff 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "educe", - "fnv", - "hashbrown 0.15.5", -] - -[[package]] -name = "ark-r1cs-std" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "941551ef1df4c7a401de7068758db6503598e6f01850bdb2cfdb614a1f9dbea1" -dependencies = [ - "ark-ec", - "ark-ff 0.5.0", - "ark-relations", - "ark-std 0.5.0", - "educe", - "num-bigint", - "num-integer", - "num-traits", - "tracing", -] - -[[package]] -name = "ark-relations" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec46ddc93e7af44bcab5230937635b06fb5744464dd6a7e7b083e80ebd274384" -dependencies = [ - "ark-ff 0.5.0", - "ark-std 0.5.0", - "tracing", - "tracing-subscriber 0.2.25", -] - -[[package]] -name = "ark-serialize" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" -dependencies = [ - "ark-std 0.3.0", - "digest 0.9.0", -] - -[[package]] -name = "ark-serialize" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" -dependencies = [ - "ark-std 0.4.0", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7" -dependencies = [ - "ark-serialize-derive", - "ark-std 0.5.0", - "arrayvec", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize-derive" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "ark-std" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "ark-std" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "ark-std" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "arrayref" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -dependencies = [ - "serde", -] - -[[package]] -name = "asn1-rs" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56624a96882bb8c26d61312ae18cb45868e5a9992ea73c58e45c3101e56a1e60" -dependencies = [ - "asn1-rs-derive", - "asn1-rs-impl", - "displaydoc", - "nom", - "num-traits", - "rusticata-macros", - "thiserror 2.0.17", - "time", -] - -[[package]] -name = "asn1-rs-derive" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", - "synstructure", -] - -[[package]] -name = "asn1-rs-impl" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "asn1_der" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "155a5a185e42c6b77ac7b88a15143d930a9e9727a5b7b77eed417404ab15c247" - -[[package]] -name = "assert-json-diff" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" -dependencies = [ - "serde", - "serde_json", -] - -[[package]] -name = "async-channel" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" -dependencies = [ - "concurrent-queue", - "event-listener-strategy", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-compression" -version = "0.4.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07a926debf178f2d355197f9caddb08e54a9329d44748034bba349c5848cb519" -dependencies = [ - "compression-codecs", - "compression-core", - "futures-core", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "async-io" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" -dependencies = [ - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-io", - "futures-lite", - "parking", - "polling", - "rustix 1.1.3", - "slab", - "windows-sys 0.61.2", -] - -[[package]] -name = "async-lock" -version = "3.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" -dependencies = [ - "event-listener", - "event-listener-strategy", - "pin-project-lite", -] - -[[package]] -name = "async-object-pool" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1ac0219111eb7bb7cb76d4cf2cb50c598e7ae549091d3616f9e95442c18486f" -dependencies = [ - "async-lock", - "event-listener", -] - -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "async_io_stream" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" -dependencies = [ - "futures", - "pharos", - "rustc_version 0.4.1", -] - -[[package]] -name = "asynchronous-codec" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233" -dependencies = [ - "bytes", - "futures-sink", - "futures-util", - "memchr", - "pin-project-lite", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "attohttpc" -version = "0.30.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e2cdb6d5ed835199484bb92bb8b3edd526effe995c61732580439c1a67e2e9" -dependencies = [ - "base64", - "http", - "log", - "url", -] - -[[package]] -name = "aurora-engine-modexp" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "518bc5745a6264b5fd7b09dffb9667e400ee9e2bbe18555fac75e1fe9afa0df9" -dependencies = [ - "hex", - "num", -] - -[[package]] -name = "auto_impl" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "autocfg" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" - -[[package]] -name = "aws-lc-rs" -version = "1.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c953fe1ba023e6b7730c0d4b031d06f267f23a46167dcbd40316644b10a17ba" -dependencies = [ - "aws-lc-sys", - "zeroize", -] - -[[package]] -name = "aws-lc-sys" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbfd150b5dbdb988bcc8fb1fe787eb6b7ee6180ca24da683b61ea5405f3d43ff" -dependencies = [ - "bindgen 0.69.5", - "cc", - "cmake", - "dunce", - "fs_extra", -] - -[[package]] -name = "axum" -version = "0.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" -dependencies = [ - "async-trait", - "axum-core", - "bytes", - "futures-util", - "http", - "http-body", - "http-body-util", - "itoa", - "matchit", - "memchr", - "mime", - "percent-encoding", - "pin-project-lite", - "rustversion", - "serde", - "sync_wrapper", - "tower 0.5.3", - "tower-layer", - "tower-service", -] - -[[package]] -name = "axum-core" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" -dependencies = [ - "async-trait", - "bytes", - "futures-util", - "http", - "http-body", - "http-body-util", - "mime", - "pin-project-lite", - "rustversion", - "sync_wrapper", - "tower-layer", - "tower-service", -] - -[[package]] -name = "az" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b7e4c2464d97fe331d41de9d5db0def0a96f4d823b8b32a2efd503578988973" - -[[package]] -name = "backoff" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1" -dependencies = [ - "getrandom 0.2.16", - "instant", - "rand 0.8.5", -] - -[[package]] -name = "backon" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef" -dependencies = [ - "fastrand", - "tokio", -] - -[[package]] -name = "backtrace" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide 0.8.9", - "object", - "rustc-demangle", - "windows-link", -] - -[[package]] -name = "base-x" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" - -[[package]] -name = "base16ct" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" - -[[package]] -name = "base256emoji" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e9430d9a245a77c92176e649af6e275f20839a48389859d1661e9a128d077c" -dependencies = [ - "const-str", - "match-lookup", -] - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "base64ct" -version = "1.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" - -[[package]] -name = "bimap" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" - -[[package]] -name = "bincode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" -dependencies = [ - "serde", -] - -[[package]] -name = "bincode" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36eaf5d7b090263e8150820482d5d93cd964a81e4019913c972f4edcc6edb740" -dependencies = [ - "bincode_derive", - "serde", - "unty", -] - -[[package]] -name = "bincode_derive" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf95709a440f45e986983918d0e8a1f30a9b1df04918fc828670606804ac3c09" -dependencies = [ - "virtue", -] - -[[package]] -name = "bindgen" -version = "0.69.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" -dependencies = [ - "bitflags 2.10.0", - "cexpr", - "clang-sys", - "itertools 0.12.1", - "lazy_static", - "lazycell", - "log", - "prettyplease", - "proc-macro2", - "quote", - "regex", - "rustc-hash 1.1.0", - "shlex", - "syn 2.0.111", - "which", -] - -[[package]] -name = "bindgen" -version = "0.70.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f" -dependencies = [ - "bitflags 2.10.0", - "cexpr", - "clang-sys", - "itertools 0.13.0", - "proc-macro2", - "quote", - "regex", - "rustc-hash 1.1.0", - "shlex", - "syn 2.0.111", -] - -[[package]] -name = "bindgen" -version = "0.72.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" -dependencies = [ - "bitflags 2.10.0", - "cexpr", - "clang-sys", - "itertools 0.13.0", - "proc-macro2", - "quote", - "regex", - "rustc-hash 2.1.1", - "shlex", - "syn 2.0.111", -] - -[[package]] -name = "bit-set" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" - -[[package]] -name = "bitcoin-io" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dee39a0ee5b4095224a0cfc6bf4cc1baf0f9624b96b367e53b66d974e51d953" - -[[package]] -name = "bitcoin_hashes" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26ec84b80c482df901772e931a9a681e26a1b9ee2302edeff23cb30328745c8b" -dependencies = [ - "bitcoin-io", - "hex-conservative", -] - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" -dependencies = [ - "serde_core", -] - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "serde", - "tap", - "wyz", -] - -[[package]] -name = "blake2" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "blake3" -version = "1.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2468ef7d57b3fb7e16b576e8377cdbde2320c60e1491e961d11da40fc4f02a2d" -dependencies = [ - "arrayref", - "arrayvec", - "cc", - "cfg-if", - "constant_time_eq", - "cpufeatures", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "blst" -version = "0.3.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcdb4c7013139a150f9fc55d123186dbfaba0d912817466282c73ac49e71fb45" -dependencies = [ - "cc", - "glob", - "threadpool", - "zeroize", -] - -[[package]] -name = "borsh" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1da5ab77c1437701eeff7c88d968729e7766172279eab0676857b3d63af7a6f" -dependencies = [ - "borsh-derive", - "cfg_aliases", -] - -[[package]] -name = "borsh-derive" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0686c856aa6aac0c4498f936d7d6a02df690f614c03e4d906d1018062b5c5e2c" -dependencies = [ - "once_cell", - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "brotli" -version = "8.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", - "brotli-decompressor", -] - -[[package]] -name = "brotli-decompressor" -version = "5.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", -] - -[[package]] -name = "bs58" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" -dependencies = [ - "tinyvec", -] - -[[package]] -name = "buddy_system_allocator" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a0108968a3a2dab95b089c0fc3f1afa7759aa5ebe6f1d86d206d6f7ba726eb" -dependencies = [ - "spin 0.9.8", -] - -[[package]] -name = "bumpalo" -version = "3.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" - -[[package]] -name = "byte-slice-cast" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7575182f7272186991736b70173b0ea045398f984bf5ebbb3804736ce1330c9d" - -[[package]] -name = "bytecheck" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0caa33a2c0edca0419d15ac723dff03f1956f7978329b1e3b5fdaaaed9d3ca8b" -dependencies = [ - "bytecheck_derive", - "ptr_meta", - "rancor", - "simdutf8", -] - -[[package]] -name = "bytecheck_derive" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89385e82b5d1821d2219e0b095efa2cc1f246cbf99080f3be46a1a85c0d392d9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "bytecount" -version = "0.6.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "175812e0be2bccb6abe50bb8d566126198344f707e304f45c648fd8f2cc0365e" - -[[package]] -name = "bytemuck" -version = "1.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "bytes" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" -dependencies = [ - "serde", -] - -[[package]] -name = "bzip2-sys" -version = "0.1.13+1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" -dependencies = [ - "cc", - "pkg-config", -] - -[[package]] -name = "c-kzg" -version = "2.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e00bf4b112b07b505472dbefd19e37e53307e2bfed5a79e0cc161d58ccd0e687" -dependencies = [ - "arbitrary", - "blst", - "cc", - "glob", - "hex", - "libc", - "once_cell", - "serde", -] - -[[package]] -name = "camino" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609" -dependencies = [ - "serde_core", -] - -[[package]] -name = "cargo-platform" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e35af189006b9c0f00a064685c727031e3ed2d8020f7ba284d78cc2671bd36ea" -dependencies = [ - "serde", -] - -[[package]] -name = "cargo-platform" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87a0c0e6148f11f01f32650a2ea02d532b2ad4e81d8bd41e6e565b5adc5e6082" -dependencies = [ - "serde", - "serde_core", -] - -[[package]] -name = "cargo_metadata" -version = "0.19.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" -dependencies = [ - "camino", - "cargo-platform 0.1.9", - "semver 1.0.27", - "serde", - "serde_json", - "thiserror 2.0.17", -] - -[[package]] -name = "cargo_metadata" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef987d17b0a113becdd19d3d0022d04d7ef41f9efe4f3fb63ac44ba61df3ade9" -dependencies = [ - "camino", - "cargo-platform 0.3.2", - "semver 1.0.27", - "serde", - "serde_json", - "thiserror 2.0.17", -] - -[[package]] -name = "cast" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" - -[[package]] -name = "cc" -version = "1.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" -dependencies = [ - "jobserver", - "libc", - "shlex", -] - -[[package]] -name = "cesu8" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" - -[[package]] -name = "cexpr" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" -dependencies = [ - "nom", -] - -[[package]] -name = "cfg-if" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - -[[package]] -name = "chacha20" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures", -] - -[[package]] -name = "chacha20poly1305" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" -dependencies = [ - "aead", - "chacha20", - "cipher", - "poly1305", - "zeroize", -] - -[[package]] -name = "chrono" -version = "0.4.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" -dependencies = [ - "iana-time-zone", - "js-sys", - "num-traits", - "serde", - "wasm-bindgen", - "windows-link", -] - -[[package]] -name = "ciborium" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" -dependencies = [ - "ciborium-io", - "ciborium-ll", - "serde", -] - -[[package]] -name = "ciborium-io" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" - -[[package]] -name = "ciborium-ll" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" -dependencies = [ - "ciborium-io", - "half", -] - -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", - "zeroize", -] - -[[package]] -name = "clang-sys" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" -dependencies = [ - "glob", - "libc", - "libloading", -] - -[[package]] -name = "clap" -version = "4.5.54" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6e6ff9dcd79cff5cd969a17a545d79e84ab086e444102a591e288a8aa3ce394" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.5.54" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa42cf4d2b7a41bc8f663a7cab4031ebafa1bf3875705bfaf8466dc60ab52c00" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.5.49" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "clap_lex" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" - -[[package]] -name = "cmake" -version = "0.1.54" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" -dependencies = [ - "cc", -] - -[[package]] -name = "colorchoice" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" - -[[package]] -name = "combine" -version = "4.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" -dependencies = [ - "bytes", - "memchr", -] - -[[package]] -name = "compression-codecs" -version = "0.4.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34a3cbbb8b6eca96f3a5c4bf6938d5b27ced3675d69f95bb51948722870bc323" -dependencies = [ - "brotli", - "compression-core", - "flate2", - "memchr", - "zstd", - "zstd-safe", -] - -[[package]] -name = "compression-core" -version = "0.4.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d" - -[[package]] -name = "concurrent-queue" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "const-hex" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735" -dependencies = [ - "cfg-if", - "cpufeatures", - "proptest", - "serde_core", -] - -[[package]] -name = "const-oid" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" - -[[package]] -name = "const-str" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3" - -[[package]] -name = "const_format" -version = "0.2.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" -dependencies = [ - "const_format_proc_macros", -] - -[[package]] -name = "const_format_proc_macros" -version = "0.2.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "constant_time_eq" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b" - -[[package]] -name = "convert_case" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "convert_case" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" - -[[package]] -name = "core2" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" -dependencies = [ - "memchr", -] - -[[package]] -name = "cpp_demangle" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2bb79cb74d735044c972aae58ed0aaa9a837e85b01106a54c39e42e97f62253" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "cpufeatures" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" -dependencies = [ - "libc", -] - -[[package]] -name = "crc" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" - -[[package]] -name = "crc32fast" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "criterion" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" -dependencies = [ - "anes", - "cast", - "ciborium", - "clap", - "criterion-plot", - "is-terminal", - "itertools 0.10.5", - "num-traits", - "once_cell", - "oorandom", - "plotters", - "rayon", - "regex", - "serde", - "serde_derive", - "serde_json", - "tinytemplate", - "walkdir", -] - -[[package]] -name = "criterion-plot" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" -dependencies = [ - "cast", - "itertools 0.10.5", -] - -[[package]] -name = "critical-section" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" - -[[package]] -name = "crossbeam-channel" -version = "0.5.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crunchy" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" - -[[package]] -name = "crypto-bigint" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" -dependencies = [ - "generic-array", - "rand_core 0.6.4", - "subtle", - "zeroize", -] - -[[package]] -name = "crypto-common" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" -dependencies = [ - "generic-array", - "rand_core 0.6.4", - "typenum", -] - -[[package]] -name = "ctr" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" -dependencies = [ - "cipher", -] - -[[package]] -name = "curve25519-dalek" -version = "4.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" -dependencies = [ - "cfg-if", - "cpufeatures", - "curve25519-dalek-derive", - "digest 0.10.7", - "fiat-crypto", - "rustc_version 0.4.1", - "subtle", - "zeroize", -] - -[[package]] -name = "curve25519-dalek-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "darling" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" -dependencies = [ - "darling_core 0.20.11", - "darling_macro 0.20.11", -] - -[[package]] -name = "darling" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" -dependencies = [ - "darling_core 0.21.3", - "darling_macro 0.21.3", -] - -[[package]] -name = "darling_core" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.111", -] - -[[package]] -name = "darling_core" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "serde", - "strsim", - "syn 2.0.111", -] - -[[package]] -name = "darling_macro" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" -dependencies = [ - "darling_core 0.20.11", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "darling_macro" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" -dependencies = [ - "darling_core 0.21.3", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "dashmap" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" -dependencies = [ - "cfg-if", - "crossbeam-utils", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", -] - -[[package]] -name = "data-encoding" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" - -[[package]] -name = "data-encoding-macro" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47ce6c96ea0102f01122a185683611bd5ac8d99e62bc59dd12e6bda344ee673d" -dependencies = [ - "data-encoding", - "data-encoding-macro-internal", -] - -[[package]] -name = "data-encoding-macro-internal" -version = "0.1.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976" -dependencies = [ - "data-encoding", - "syn 2.0.111", -] - -[[package]] -name = "debugid" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef552e6f588e446098f6ba40d89ac146c8c7b64aade83c051ee00bb5d2bc18d" -dependencies = [ - "uuid", -] - -[[package]] -name = "delay_map" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88e365f083a5cb5972d50ce8b1b2c9f125dc5ec0f50c0248cfb568ae59efcf0b" -dependencies = [ - "futures", - "tokio", - "tokio-util", -] - -[[package]] -name = "der" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "der-parser" -version = "10.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07da5016415d5a3c4dd39b11ed26f915f52fc4e0dc197d87908bc916e51bc1a6" -dependencies = [ - "asn1-rs", - "displaydoc", - "nom", - "num-bigint", - "num-traits", - "rusticata-macros", -] - -[[package]] -name = "deranged" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" -dependencies = [ - "powerfmt", - "serde_core", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "derive-where" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef941ded77d15ca19b40374869ac6000af1c9f2a4c0f3d4c70926287e6364a8f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "derive_arbitrary" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "derive_builder" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" -dependencies = [ - "derive_builder_macro", -] - -[[package]] -name = "derive_builder_core" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" -dependencies = [ - "darling 0.20.11", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "derive_builder_macro" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" -dependencies = [ - "derive_builder_core", - "syn 2.0.111", -] - -[[package]] -name = "derive_more" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" -dependencies = [ - "convert_case 0.10.0", - "proc-macro2", - "quote", - "rustc_version 0.4.1", - "syn 2.0.111", - "unicode-xid", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "const-oid", - "crypto-common", - "subtle", -] - -[[package]] -name = "dirs" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" -dependencies = [ - "libc", - "option-ext", - "redox_users", - "windows-sys 0.61.2", -] - -[[package]] -name = "discv5" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f170f4f6ed0e1df52bf43b403899f0081917ecf1500bfe312505cc3b515a8899" -dependencies = [ - "aes", - "aes-gcm", - "alloy-rlp", - "arrayvec", - "ctr", - "delay_map", - "enr", - "fnv", - "futures", - "hashlink", - "hex", - "hkdf", - "lazy_static", - "libp2p-identity", - "lru 0.12.5", - "more-asserts", - "multiaddr", - "parking_lot", - "rand 0.8.5", - "smallvec", - "socket2 0.5.10", - "tokio", - "tracing", - "uint 0.10.0", - "zeroize", -] - -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "doctest-file" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aac81fa3e28d21450aa4d2ac065992ba96a1d7303efbce51a95f4fd175b67562" - -[[package]] -name = "dotenvy" -version = "0.15.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" - -[[package]] -name = "downcast" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" - -[[package]] -name = "dtoa" -version = "1.0.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6add3b8cff394282be81f3fc1a0605db594ed69890078ca6e2cab1c408bcf04" - -[[package]] -name = "dunce" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" - -[[package]] -name = "dyn-clone" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" - -[[package]] -name = "ecdsa" -version = "0.16.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" -dependencies = [ - "der", - "digest 0.10.7", - "elliptic-curve", - "rfc6979", - "serdect", - "signature", - "spki", -] - -[[package]] -name = "ed25519" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" -dependencies = [ - "pkcs8", - "serde", - "signature", -] - -[[package]] -name = "ed25519-dalek" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" -dependencies = [ - "curve25519-dalek", - "ed25519", - "rand_core 0.6.4", - "serde", - "sha2", - "subtle", - "zeroize", -] - -[[package]] -name = "educe" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" -dependencies = [ - "enum-ordinalize", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "either" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -dependencies = [ - "serde", -] - -[[package]] -name = "elliptic-curve" -version = "0.13.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" -dependencies = [ - "base16ct", - "crypto-bigint", - "digest 0.10.7", - "ff", - "generic-array", - "group", - "pkcs8", - "rand_core 0.6.4", - "sec1", - "serdect", - "subtle", - "zeroize", -] - -[[package]] -name = "encoding_rs" -version = "0.8.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "endian-type" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" - -[[package]] -name = "enr" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "851bd664a3d3a3c175cff92b2f0df02df3c541b4895d0ae307611827aae46152" -dependencies = [ - "alloy-rlp", - "base64", - "bytes", - "ed25519-dalek", - "hex", - "k256", - "log", - "rand 0.8.5", - "serde", - "sha3", - "zeroize", -] - -[[package]] -name = "enum-as-inner" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "enum-ordinalize" -version = "4.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" -dependencies = [ - "enum-ordinalize-derive", -] - -[[package]] -name = "enum-ordinalize-derive" -version = "4.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "equator" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4711b213838dfee0117e3be6ac926007d7f433d7bbe33595975d4190cb07e6fc" -dependencies = [ - "equator-macro", -] - -[[package]] -name = "equator-macro" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "equivalent" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" - -[[package]] -name = "errno" -version = "0.3.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" -dependencies = [ - "libc", - "windows-sys 0.61.2", -] - -[[package]] -name = "ethereum_serde_utils" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dc1355dbb41fbbd34ec28d4fb2a57d9a70c67ac3c19f6a5ca4d4a176b9e997a" -dependencies = [ - "alloy-primitives", - "hex", - "serde", - "serde_derive", - "serde_json", -] - -[[package]] -name = "ethereum_ssz" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dcddb2554d19cde19b099fadddde576929d7a4d0c1cd3512d1fd95cf174375c" -dependencies = [ - "alloy-primitives", - "ethereum_serde_utils", - "itertools 0.13.0", - "serde", - "serde_derive", - "smallvec", - "typenum", -] - -[[package]] -name = "ethereum_ssz_derive" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a657b6b3b7e153637dc6bdc6566ad9279d9ee11a15b12cfb24a2e04360637e9f" -dependencies = [ - "darling 0.20.11", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "event-listener" -version = "5.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - -[[package]] -name = "event-listener-strategy" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" -dependencies = [ - "event-listener", - "pin-project-lite", -] - -[[package]] -name = "example-discovery" -version = "0.0.0" -dependencies = [ - "anyhow", - "clap", - "discv5", - "kona-cli", - "kona-disc", - "tokio", - "tracing", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "example-gossip" -version = "0.0.0" -dependencies = [ - "anyhow", - "async-trait", - "clap", - "discv5", - "kona-cli", - "kona-disc", - "kona-node-service", - "kona-registry", - "libp2p", - "op-alloy-rpc-types-engine", - "tokio", - "tokio-util", - "tracing", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "execution-fixture" -version = "0.0.0" -dependencies = [ - "anyhow", - "clap", - "kona-cli", - "kona-executor", - "tokio", - "tracing", - "tracing-subscriber 0.3.22", - "url", -] - -[[package]] -name = "eyre" -version = "0.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" -dependencies = [ - "indenter", - "once_cell", -] - -[[package]] -name = "fastrand" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" - -[[package]] -name = "fastrlp" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", -] - -[[package]] -name = "fastrlp" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", -] - -[[package]] -name = "ff" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" -dependencies = [ - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "fiat-crypto" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" - -[[package]] -name = "findshlibs" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40b9e59cd0f7e0806cca4be089683ecb6434e602038df21fe6bf6711b2f07f64" -dependencies = [ - "cc", - "lazy_static", - "libc", - "winapi", -] - -[[package]] -name = "fixed-hash" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" -dependencies = [ - "byteorder", - "rand 0.8.5", - "rustc-hex", - "static_assertions", -] - -[[package]] -name = "flate2" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb" -dependencies = [ - "crc32fast", - "miniz_oxide 0.8.9", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foldhash" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" - -[[package]] -name = "foldhash" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" - -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "form_urlencoded" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "fragile" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dd6caf6059519a65843af8fe2a3ae298b14b80179855aeb4adc2c1934ee619" - -[[package]] -name = "fs_extra" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" - -[[package]] -name = "fsevent-sys" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" -dependencies = [ - "libc", -] - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "futures" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-bounded" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91f328e7fb845fc832912fb6a34f40cf6d1888c92f974d1893a54e97b5ff542e" -dependencies = [ - "futures-timer", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" - -[[package]] -name = "futures-executor" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", - "num_cpus", -] - -[[package]] -name = "futures-io" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" - -[[package]] -name = "futures-lite" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" -dependencies = [ - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "futures-macro" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "futures-rustls" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" -dependencies = [ - "futures-io", - "rustls", - "rustls-pki-types", -] - -[[package]] -name = "futures-sink" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" - -[[package]] -name = "futures-task" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" - -[[package]] -name = "futures-timer" -version = "3.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" -dependencies = [ - "gloo-timers", - "send_wrapper 0.4.0", -] - -[[package]] -name = "futures-util" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "futures-utils-wasm" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", - "zeroize", -] - -[[package]] -name = "getrandom" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "r-efi", - "wasip2", - "wasm-bindgen", -] - -[[package]] -name = "ghash" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" -dependencies = [ - "opaque-debug", - "polyval", -] - -[[package]] -name = "gimli" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" - -[[package]] -name = "git2" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2deb07a133b1520dc1a5690e9bd08950108873d7ed5de38dcc74d3b5ebffa110" -dependencies = [ - "bitflags 2.10.0", - "libc", - "libgit2-sys", - "log", - "url", -] - -[[package]] -name = "glob" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" - -[[package]] -name = "gloo-net" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06f627b1a58ca3d42b45d6104bf1e1a03799df472df00988b6ba21accc10580" -dependencies = [ - "futures-channel", - "futures-core", - "futures-sink", - "gloo-utils", - "http", - "js-sys", - "pin-project", - "serde", - "serde_json", - "thiserror 1.0.69", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "gloo-utils" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" -dependencies = [ - "js-sys", - "serde", - "serde_json", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "gmp-mpfr-sys" -version = "1.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60f8970a75c006bb2f8ae79c6768a116dd215fa8346a87aed99bf9d82ca43394" -dependencies = [ - "libc", - "windows-sys 0.60.2", -] - -[[package]] -name = "group" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" -dependencies = [ - "ff", - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "h2" -version = "0.4.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http", - "indexmap 2.12.1", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "half" -version = "2.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" -dependencies = [ - "cfg-if", - "crunchy", - "zerocopy", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -dependencies = [ - "ahash", -] - -[[package]] -name = "hashbrown" -version = "0.15.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" -dependencies = [ - "allocator-api2", - "equivalent", - "foldhash 0.1.5", -] - -[[package]] -name = "hashbrown" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" -dependencies = [ - "allocator-api2", - "equivalent", - "foldhash 0.2.0", - "serde", - "serde_core", -] - -[[package]] -name = "hashlink" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" -dependencies = [ - "hashbrown 0.14.5", -] - -[[package]] -name = "headers" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" -dependencies = [ - "base64", - "bytes", - "headers-core", - "http", - "httpdate", - "mime", - "sha1", -] - -[[package]] -name = "headers-core" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" -dependencies = [ - "http", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hermit-abi" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hex-conservative" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda06d18ac606267c40c04e41b9947729bf8b9efe74bd4e82b61a5f26a510b9f" -dependencies = [ - "arrayvec", -] - -[[package]] -name = "hex_fmt" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b07f60793ff0a4d9cef0f18e63b5357e06209987153a64648c972c1e5aff336f" - -[[package]] -name = "hickory-proto" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8a6fe56c0038198998a6f217ca4e7ef3a5e51f46163bd6dd60b5c71ca6c6502" -dependencies = [ - "async-trait", - "cfg-if", - "data-encoding", - "enum-as-inner", - "futures-channel", - "futures-io", - "futures-util", - "idna", - "ipnet", - "once_cell", - "rand 0.9.2", - "ring", - "socket2 0.5.10", - "thiserror 2.0.17", - "tinyvec", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "hickory-resolver" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc62a9a99b0bfb44d2ab95a7208ac952d31060efc16241c87eaf36406fecf87a" -dependencies = [ - "cfg-if", - "futures-util", - "hickory-proto", - "ipconfig", - "moka", - "once_cell", - "parking_lot", - "rand 0.9.2", - "resolv-conf", - "smallvec", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "hkdf" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" -dependencies = [ - "hmac", -] - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "home" -version = "0.5.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "http" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" -dependencies = [ - "bytes", - "itoa", -] - -[[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http", -] - -[[package]] -name = "http-body-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" -dependencies = [ - "bytes", - "futures-core", - "http", - "http-body", - "pin-project-lite", -] - -[[package]] -name = "httparse" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" - -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" - -[[package]] -name = "httpmock" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "511f510e9b1888d67f10bab4397f8b019d2a9b249a2c10acbce2d705b1b32e26" -dependencies = [ - "assert-json-diff", - "async-object-pool", - "async-trait", - "base64", - "bytes", - "crossbeam-utils", - "form_urlencoded", - "futures-timer", - "futures-util", - "headers", - "http", - "http-body-util", - "hyper", - "hyper-util", - "path-tree", - "regex", - "serde", - "serde_json", - "serde_regex", - "similar", - "stringmetrics", - "tabwriter", - "thiserror 2.0.17", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "hyper" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" -dependencies = [ - "atomic-waker", - "bytes", - "futures-channel", - "futures-core", - "h2", - "http", - "http-body", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "pin-utils", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" -dependencies = [ - "http", - "hyper", - "hyper-util", - "log", - "rustls", - "rustls-native-certs", - "rustls-pki-types", - "tokio", - "tokio-rustls", - "tower-service", - "webpki-roots 1.0.4", -] - -[[package]] -name = "hyper-timeout" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" -dependencies = [ - "hyper", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", -] - -[[package]] -name = "hyper-tls" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" -dependencies = [ - "bytes", - "http-body-util", - "hyper", - "hyper-util", - "native-tls", - "tokio", - "tokio-native-tls", - "tower-service", -] - -[[package]] -name = "hyper-util" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" -dependencies = [ - "base64", - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http", - "http-body", - "hyper", - "ipnet", - "libc", - "percent-encoding", - "pin-project-lite", - "socket2 0.6.1", - "system-configuration", - "tokio", - "tower-layer", - "tower-service", - "tracing", - "windows-registry", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "log", - "wasm-bindgen", - "windows-core 0.62.2", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "icu_collections" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" -dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locale_core" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" -dependencies = [ - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" - -[[package]] -name = "icu_properties" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99" -dependencies = [ - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899" - -[[package]] -name = "icu_provider" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" -dependencies = [ - "displaydoc", - "icu_locale_core", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "idna" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "if-addrs" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cabb0019d51a643781ff15c9c8a3e5dedc365c47211270f4e8f82812fedd8f0a" -dependencies = [ - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "if-watch" -version = "3.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf9d64cfcf380606e64f9a0bcf493616b65331199f984151a6fa11a7b3cde38" -dependencies = [ - "async-io", - "core-foundation 0.9.4", - "fnv", - "futures", - "if-addrs", - "ipnet", - "log", - "netlink-packet-core", - "netlink-packet-route", - "netlink-proto", - "netlink-sys", - "rtnetlink", - "system-configuration", - "tokio", - "windows 0.53.0", -] - -[[package]] -name = "igd-next" -version = "0.16.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "516893339c97f6011282d5825ac94fc1c7aad5cad26bdc2d0cee068c0bf97f97" -dependencies = [ - "async-trait", - "attohttpc", - "bytes", - "futures", - "http", - "http-body-util", - "hyper", - "hyper-util", - "log", - "rand 0.9.2", - "tokio", - "url", - "xmltree", -] - -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "indenter" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "964de6e86d545b246d84badc0fef527924ace5134f30641c203ef52ba83f58d5" - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", -] - -[[package]] -name = "indexmap" -version = "2.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" -dependencies = [ - "arbitrary", - "equivalent", - "hashbrown 0.16.1", - "serde", - "serde_core", -] - -[[package]] -name = "inferno" -version = "0.11.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "232929e1d75fe899576a3d5c7416ad0d88dbfbb3c3d6aa00873a7408a50ddb88" -dependencies = [ - "ahash", - "indexmap 2.12.1", - "is-terminal", - "itoa", - "log", - "num-format", - "once_cell", - "quick-xml", - "rgb", - "str_stack", -] - -[[package]] -name = "inotify" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" -dependencies = [ - "bitflags 2.10.0", - "inotify-sys", - "libc", -] - -[[package]] -name = "inotify-sys" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" -dependencies = [ - "libc", -] - -[[package]] -name = "inout" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" -dependencies = [ - "generic-array", -] - -[[package]] -name = "instant" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "interprocess" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d941b405bd2322993887859a8ee6ac9134945a24ec5ec763a8a962fc64dfec2d" -dependencies = [ - "doctest-file", - "futures-core", - "libc", - "recvmsg", - "tokio", - "widestring", - "windows-sys 0.52.0", -] - -[[package]] -name = "ipconfig" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" -dependencies = [ - "socket2 0.5.10", - "widestring", - "windows-sys 0.48.0", - "winreg", -] - -[[package]] -name = "ipnet" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" -dependencies = [ - "serde", -] - -[[package]] -name = "iri-string" -version = "0.7.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f867b9d1d896b67beb18518eda36fdb77a32ea590de864f1325b294a6d14397" -dependencies = [ - "memchr", - "serde", -] - -[[package]] -name = "is-terminal" -version = "0.4.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.61.2", -] - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" - -[[package]] -name = "jni" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" -dependencies = [ - "cesu8", - "cfg-if", - "combine", - "jni-sys", - "log", - "thiserror 1.0.69", - "walkdir", - "windows-sys 0.45.0", -] - -[[package]] -name = "jni-sys" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" - -[[package]] -name = "jobserver" -version = "0.1.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" -dependencies = [ - "getrandom 0.3.4", - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "jsonrpsee" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f3f48dc3e6b8bd21e15436c1ddd0bc22a6a54e8ec46fedd6adf3425f396ec6a" -dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-http-client", - "jsonrpsee-proc-macros", - "jsonrpsee-server", - "jsonrpsee-types", - "jsonrpsee-wasm-client", - "jsonrpsee-ws-client", - "tokio", - "tracing", -] - -[[package]] -name = "jsonrpsee-client-transport" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98" -dependencies = [ - "base64", - "futures-channel", - "futures-util", - "gloo-net", - "http", - "jsonrpsee-core", - "pin-project", - "rustls", - "rustls-pki-types", - "rustls-platform-verifier", - "soketto", - "thiserror 2.0.17", - "tokio", - "tokio-rustls", - "tokio-util", - "tracing", - "url", -] - -[[package]] -name = "jsonrpsee-core" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "316c96719901f05d1137f19ba598b5fe9c9bc39f4335f67f6be8613921946480" -dependencies = [ - "async-trait", - "bytes", - "futures-timer", - "futures-util", - "http", - "http-body", - "http-body-util", - "jsonrpsee-types", - "parking_lot", - "pin-project", - "rand 0.9.2", - "rustc-hash 2.1.1", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tokio-stream", - "tower 0.5.3", - "tracing", - "wasm-bindgen-futures", -] - -[[package]] -name = "jsonrpsee-http-client" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8" -dependencies = [ - "base64", - "http-body", - "hyper", - "hyper-rustls", - "hyper-util", - "jsonrpsee-core", - "jsonrpsee-types", - "rustls", - "rustls-platform-verifier", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tower 0.5.3", - "url", -] - -[[package]] -name = "jsonrpsee-proc-macros" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da3f8ab5ce1bb124b6d082e62dffe997578ceaf0aeb9f3174a214589dc00f07" -dependencies = [ - "heck", - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "jsonrpsee-server" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c51b7c290bb68ce3af2d029648148403863b982f138484a73f02a9dd52dbd7f" -dependencies = [ - "futures-util", - "http", - "http-body", - "http-body-util", - "hyper", - "hyper-util", - "jsonrpsee-core", - "jsonrpsee-types", - "pin-project", - "route-recognizer", - "serde", - "serde_json", - "soketto", - "thiserror 2.0.17", - "tokio", - "tokio-stream", - "tokio-util", - "tower 0.5.3", - "tracing", -] - -[[package]] -name = "jsonrpsee-types" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc88ff4688e43cc3fa9883a8a95c6fa27aa2e76c96e610b737b6554d650d7fd5" -dependencies = [ - "http", - "serde", - "serde_json", - "thiserror 2.0.17", -] - -[[package]] -name = "jsonrpsee-wasm-client" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7902885de4779f711a95d82c8da2d7e5f9f3a7c7cfa44d51c067fd1c29d72a3c" -dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", - "tower 0.5.3", -] - -[[package]] -name = "jsonrpsee-ws-client" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6fceceeb05301cc4c065ab3bd2fa990d41ff4eb44e4ca1b30fa99c057c3e79" -dependencies = [ - "http", - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", - "tower 0.5.3", - "url", -] - -[[package]] -name = "jsonwebtoken" -version = "9.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" -dependencies = [ - "base64", - "js-sys", - "pem", - "ring", - "serde", - "serde_json", - "simple_asn1", -] - -[[package]] -name = "k256" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" -dependencies = [ - "cfg-if", - "ecdsa", - "elliptic-curve", - "once_cell", - "serdect", - "sha2", - "signature", -] - -[[package]] -name = "keccak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "keccak-asm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6" -dependencies = [ - "digest 0.10.7", - "sha3-asm", -] - -[[package]] -name = "kona-cli" -version = "0.3.2" -dependencies = [ - "alloy-chains", - "alloy-primitives", - "clap", - "kona-genesis", - "kona-registry", - "libc", - "libp2p", - "metrics-exporter-prometheus 0.18.1", - "metrics-process", - "rstest", - "serde", - "thiserror 2.0.17", - "tracing", - "tracing-appender", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "kona-client" -version = "1.0.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-op-evm", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "async-trait", - "cfg-if", - "kona-derive", - "kona-driver", - "kona-executor", - "kona-genesis", - "kona-interop", - "kona-mpt", - "kona-preimage", - "kona-proof", - "kona-proof-interop", - "kona-protocol", - "kona-registry", - "kona-std-fpvm", - "kona-std-fpvm-proc", - "lru 0.16.3", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types-engine", - "op-revm", - "revm", - "serde", - "serde_json", - "sha2", - "spin 0.10.0", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "kona-comp" -version = "0.4.5" -dependencies = [ - "alloc-no-stdlib", - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "alloy-serde", - "alloy-sol-types", - "arbitrary", - "async-trait", - "brotli", - "kona-genesis", - "kona-protocol", - "miniz_oxide 0.9.0", - "op-alloy-consensus 0.23.1", - "proptest", - "rand 0.9.2", - "serde", - "serde_json", - "spin 0.10.0", - "thiserror 2.0.17", - "tracing", - "tracing-subscriber 0.3.22", - "unsigned-varint 0.8.0", -] - -[[package]] -name = "kona-derive" -version = "0.4.5" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "async-trait", - "kona-genesis", - "kona-hardforks", - "kona-macros", - "kona-protocol", - "kona-registry", - "metrics", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types-engine", - "proptest", - "serde", - "serde_json", - "spin 0.10.0", - "thiserror 2.0.17", - "tokio", - "tracing", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "kona-disc" -version = "0.1.2" -dependencies = [ - "alloy-rlp", - "backon", - "derive_more", - "discv5", - "kona-cli", - "kona-genesis", - "kona-macros", - "kona-peers", - "libp2p", - "metrics", - "rand 0.9.2", - "serde_json", - "tempfile", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "kona-driver" -version = "0.4.0" -dependencies = [ - "alloy-consensus", - "alloy-evm", - "alloy-primitives", - "alloy-rlp", - "async-trait", - "kona-derive", - "kona-executor", - "kona-genesis", - "kona-protocol", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types-engine", - "spin 0.10.0", - "thiserror 2.0.17", - "tracing", -] - -[[package]] -name = "kona-engine" -version = "0.1.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-client", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-transport", - "alloy-transport-http", - "arbitrary", - "async-trait", - "derive_more", - "http", - "http-body-util", - "jsonrpsee-types", - "kona-genesis", - "kona-macros", - "kona-protocol", - "kona-registry", - "metrics", - "metrics-exporter-prometheus 0.18.1", - "op-alloy-consensus 0.23.1", - "op-alloy-network", - "op-alloy-provider", - "op-alloy-rpc-types", - "op-alloy-rpc-types-engine", - "parking_lot", - "rand 0.9.2", - "rollup-boost", - "rollup-boost-types", - "rstest", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tower 0.5.3", - "tracing", - "url", -] - -[[package]] -name = "kona-executor" -version = "0.4.0" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-op-evm", - "alloy-op-hardforks", - "alloy-primitives", - "alloy-provider", - "alloy-rlp", - "alloy-rpc-client", - "alloy-rpc-types-engine", - "alloy-transport", - "alloy-transport-http", - "alloy-trie", - "kona-genesis", - "kona-mpt", - "kona-protocol", - "kona-registry", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types-engine", - "op-revm", - "rand 0.9.2", - "revm", - "rocksdb", - "rstest", - "serde", - "serde_json", - "tempfile", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "kona-genesis" -version = "0.4.5" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-genesis", - "alloy-hardforks", - "alloy-op-hardforks", - "alloy-primitives", - "alloy-sol-types", - "arbitrary", - "derive_more", - "op-revm", - "rand 0.9.2", - "serde", - "serde_json", - "serde_repr", - "tabled", - "thiserror 2.0.17", - "toml", -] - -[[package]] -name = "kona-gossip" -version = "0.1.2" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "arbitrary", - "derive_more", - "discv5", - "futures", - "ipnet", - "kona-disc", - "kona-genesis", - "kona-macros", - "kona-peers", - "lazy_static", - "libp2p", - "libp2p-identity", - "libp2p-stream", - "metrics", - "multihash", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types-engine", - "openssl", - "rand 0.9.2", - "serde", - "serde_json", - "serde_repr", - "snap", - "tempfile", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "kona-hardforks" -version = "0.4.5" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "kona-protocol", - "op-alloy-consensus 0.23.1", - "op-revm", - "revm", -] - -[[package]] -name = "kona-host" -version = "1.0.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-op-evm", - "alloy-primitives", - "alloy-provider", - "alloy-rlp", - "alloy-rpc-client", - "alloy-rpc-types", - "alloy-rpc-types-beacon", - "alloy-serde", - "alloy-transport", - "alloy-transport-http", - "anyhow", - "ark-ff 0.5.0", - "async-trait", - "clap", - "kona-cli", - "kona-client", - "kona-derive", - "kona-driver", - "kona-executor", - "kona-genesis", - "kona-mpt", - "kona-preimage", - "kona-proof", - "kona-proof-interop", - "kona-protocol", - "kona-providers-alloy", - "kona-registry", - "kona-std-fpvm", - "op-alloy-network", - "op-alloy-rpc-types-engine", - "proptest", - "reqwest", - "revm", - "rocksdb", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tracing", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "kona-interop" -version = "0.4.5" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "alloy-sol-types", - "arbitrary", - "async-trait", - "derive_more", - "kona-genesis", - "kona-protocol", - "kona-registry", - "op-alloy-consensus 0.23.1", - "rand 0.9.2", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "kona-macros" -version = "0.1.2" - -[[package]] -name = "kona-mpt" -version = "0.3.0" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "alloy-provider", - "alloy-rlp", - "alloy-rpc-types", - "alloy-transport-http", - "alloy-trie", - "criterion", - "op-alloy-rpc-types-engine", - "pprof", - "proptest", - "rand 0.9.2", - "reqwest", - "serde", - "thiserror 2.0.17", - "tokio", -] - -[[package]] -name = "kona-node" -version = "1.0.0-rc.1" -dependencies = [ - "alloy-chains", - "alloy-genesis", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-engine", - "alloy-signer", - "alloy-signer-local", - "alloy-transport", - "alloy-transport-http", - "anyhow", - "backon", - "clap", - "derive_more", - "dirs", - "discv5", - "futures", - "http", - "jsonrpsee", - "kona-cli", - "kona-derive", - "kona-disc", - "kona-engine", - "kona-genesis", - "kona-gossip", - "kona-node-service", - "kona-peers", - "kona-protocol", - "kona-providers-alloy", - "kona-registry", - "kona-rpc", - "kona-sources", - "libp2p", - "metrics", - "op-alloy-network", - "op-alloy-provider", - "op-alloy-rpc-types-engine", - "reqwest", - "rollup-boost", - "rstest", - "serde_json", - "strum", - "tabled", - "tempfile", - "thiserror 2.0.17", - "tokio", - "tokio-stream", - "tokio-util", - "tracing", - "tracing-subscriber 0.3.22", - "url", - "vergen", - "vergen-git2", -] - -[[package]] -name = "kona-node-service" -version = "0.1.3" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-client", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-signer", - "alloy-signer-local", - "alloy-transport", - "alloy-transport-http", - "anyhow", - "arbitrary", - "async-stream", - "async-trait", - "backon", - "derive_more", - "discv5", - "futures", - "http", - "http-body-util", - "jsonrpsee", - "kona-derive", - "kona-disc", - "kona-engine", - "kona-genesis", - "kona-gossip", - "kona-macros", - "kona-peers", - "kona-protocol", - "kona-providers-alloy", - "kona-rpc", - "kona-sources", - "libp2p", - "libp2p-stream", - "metrics", - "mockall", - "op-alloy-consensus 0.23.1", - "op-alloy-network", - "op-alloy-provider", - "op-alloy-rpc-types-engine", - "rand 0.9.2", - "rollup-boost", - "rstest", - "strum", - "thiserror 2.0.17", - "tokio", - "tokio-stream", - "tokio-util", - "tower 0.5.3", - "tracing", - "url", -] - -[[package]] -name = "kona-peers" -version = "0.1.2" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "arbtest", - "derive_more", - "dirs", - "discv5", - "kona-genesis", - "kona-registry", - "lazy_static", - "libp2p", - "libp2p-identity", - "multihash", - "secp256k1 0.31.1", - "serde", - "serde_json", - "tempfile", - "thiserror 2.0.17", - "tracing", - "unsigned-varint 0.8.0", - "url", -] - -[[package]] -name = "kona-preimage" -version = "0.3.0" -dependencies = [ - "alloy-primitives", - "async-channel", - "async-trait", - "rkyv", - "serde", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "kona-proof" -version = "0.3.0" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-op-evm", - "alloy-primitives", - "alloy-rlp", - "alloy-trie", - "ark-bls12-381", - "ark-ff 0.5.0", - "async-trait", - "c-kzg", - "kona-derive", - "kona-driver", - "kona-executor", - "kona-genesis", - "kona-mpt", - "kona-preimage", - "kona-protocol", - "kona-registry", - "lazy_static", - "lru 0.16.3", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types-engine", - "op-revm", - "rand 0.9.2", - "rayon", - "rstest", - "serde", - "serde_json", - "spin 0.10.0", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "kona-proof-interop" -version = "0.2.0" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-op-evm", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "arbitrary", - "async-trait", - "kona-executor", - "kona-genesis", - "kona-interop", - "kona-mpt", - "kona-preimage", - "kona-proof", - "kona-protocol", - "kona-registry", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types-engine", - "op-revm", - "rand 0.9.2", - "revm", - "serde", - "serde_json", - "spin 0.10.0", - "thiserror 2.0.17", - "tracing", -] - -[[package]] -name = "kona-protocol" -version = "0.4.5" -dependencies = [ - "alloc-no-stdlib", - "alloy-consensus", - "alloy-eips", - "alloy-hardforks", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-serde", - "alloy-sol-types", - "ambassador", - "arbitrary", - "async-trait", - "brotli", - "derive_more", - "kona-genesis", - "kona-registry", - "miniz_oxide 0.9.0", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types", - "op-alloy-rpc-types-engine", - "proptest", - "rand 0.9.2", - "rstest", - "serde", - "serde_json", - "spin 0.10.0", - "thiserror 2.0.17", - "tokio", - "tracing", - "tracing-subscriber 0.3.22", - "unsigned-varint 0.8.0", -] - -[[package]] -name = "kona-providers-alloy" -version = "0.3.3" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-client", - "alloy-rpc-types-beacon", - "alloy-rpc-types-engine", - "alloy-serde", - "alloy-transport", - "alloy-transport-http", - "async-trait", - "c-kzg", - "http-body-util", - "httpmock", - "kona-derive", - "kona-genesis", - "kona-macros", - "kona-protocol", - "lru 0.16.3", - "metrics", - "op-alloy-consensus 0.23.1", - "op-alloy-network", - "reqwest", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tower 0.5.3", -] - -[[package]] -name = "kona-providers-local" -version = "0.1.0" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "async-trait", - "kona-derive", - "kona-genesis", - "kona-macros", - "kona-protocol", - "lru 0.16.3", - "metrics", - "op-alloy-consensus 0.23.1", - "rstest", - "thiserror 2.0.17", - "tokio", -] - -[[package]] -name = "kona-registry" -version = "0.4.5" -dependencies = [ - "alloy-chains", - "alloy-eips", - "alloy-genesis", - "alloy-hardforks", - "alloy-op-hardforks", - "alloy-primitives", - "kona-genesis", - "lazy_static", - "serde", - "serde_json", - "tabled", - "toml", -] - -[[package]] -name = "kona-rpc" -version = "0.3.2" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rpc-client", - "alloy-rpc-types-engine", - "async-trait", - "backon", - "derive_more", - "getrandom 0.3.4", - "ipnet", - "jsonrpsee", - "kona-engine", - "kona-genesis", - "kona-gossip", - "kona-macros", - "kona-protocol", - "libp2p", - "metrics", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-jsonrpsee", - "op-alloy-rpc-types", - "op-alloy-rpc-types-engine", - "rollup-boost", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "kona-serde" -version = "0.2.2" -dependencies = [ - "alloy-primitives", - "serde", - "serde_json", - "toml", -] - -[[package]] -name = "kona-sources" -version = "0.1.2" -dependencies = [ - "alloy-primitives", - "alloy-rpc-client", - "alloy-signer", - "alloy-signer-local", - "alloy-transport", - "alloy-transport-http", - "derive_more", - "notify", - "op-alloy-rpc-types-engine", - "reqwest", - "rustls", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "kona-std-fpvm" -version = "0.2.0" -dependencies = [ - "async-trait", - "buddy_system_allocator", - "cfg-if", - "kona-preimage", - "thiserror 2.0.17", - "tracing", -] - -[[package]] -name = "kona-std-fpvm-proc" -version = "0.2.0" -dependencies = [ - "cfg-if", - "kona-std-fpvm", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "kona-supervisor" -version = "0.1.0" -dependencies = [ - "alloy-network", - "alloy-provider", - "alloy-rpc-types-engine", - "anyhow", - "clap", - "glob", - "kona-cli", - "kona-genesis", - "kona-interop", - "kona-protocol", - "kona-registry", - "kona-supervisor-core", - "kona-supervisor-service", - "metrics", - "reqwest", - "serde", - "serde_json", - "tempfile", - "tokio", - "tracing", - "tracing-subscriber 0.3.22", - "vergen", - "vergen-git2", -] - -[[package]] -name = "kona-supervisor-core" -version = "0.1.0" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-client", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-transport", - "async-trait", - "auto_impl", - "derive_more", - "futures", - "jsonrpsee", - "kona-genesis", - "kona-interop", - "kona-protocol", - "kona-supervisor-metrics", - "kona-supervisor-rpc", - "kona-supervisor-storage", - "kona-supervisor-types", - "metrics", - "mockall", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types", - "reqwest", - "serde", - "serde_json", - "tempfile", - "thiserror 2.0.17", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "kona-supervisor-metrics" -version = "0.1.0" - -[[package]] -name = "kona-supervisor-rpc" -version = "0.1.1" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rpc-client", - "alloy-rpc-types-engine", - "alloy-serde", - "async-trait", - "derive_more", - "jsonrpsee", - "kona-interop", - "kona-protocol", - "kona-supervisor-types", - "op-alloy-consensus 0.23.1", - "serde", - "serde_json", - "thiserror 2.0.17", - "tokio", -] - -[[package]] -name = "kona-supervisor-service" -version = "0.1.0" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-client", - "alloy-rpc-types-eth", - "anyhow", - "async-trait", - "derive_more", - "futures", - "jsonrpsee", - "kona-genesis", - "kona-interop", - "kona-protocol", - "kona-supervisor-core", - "kona-supervisor-metrics", - "kona-supervisor-rpc", - "kona-supervisor-storage", - "kona-supervisor-types", - "mockall", - "thiserror 2.0.17", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "kona-supervisor-storage" -version = "0.1.0" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "bytes", - "derive_more", - "eyre", - "kona-cli", - "kona-interop", - "kona-protocol", - "kona-supervisor-metrics", - "kona-supervisor-types", - "metrics", - "modular-bitfield", - "op-alloy-consensus 0.23.1", - "reth-codecs", - "reth-db", - "reth-db-api", - "serde", - "tempfile", - "test-fuzz", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "kona-supervisor-types" -version = "0.1.1" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-serde", - "derive_more", - "kona-interop", - "kona-protocol", - "op-alloy-consensus 0.23.1", - "serde", - "serde_json", - "thiserror 2.0.17", -] - -[[package]] -name = "kqueue" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" -dependencies = [ - "kqueue-sys", - "libc", -] - -[[package]] -name = "kqueue-sys" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" -dependencies = [ - "bitflags 1.3.2", - "libc", -] - -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" -dependencies = [ - "spin 0.9.8", -] - -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - -[[package]] -name = "libc" -version = "0.2.178" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" - -[[package]] -name = "libgit2-sys" -version = "0.18.2+1.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c42fe03df2bd3c53a3a9c7317ad91d80c81cd1fb0caec8d7cc4cd2bfa10c222" -dependencies = [ - "cc", - "libc", - "libz-sys", - "pkg-config", -] - -[[package]] -name = "libloading" -version = "0.8.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" -dependencies = [ - "cfg-if", - "windows-link", -] - -[[package]] -name = "libm" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" - -[[package]] -name = "libp2p" -version = "0.56.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce71348bf5838e46449ae240631117b487073d5f347c06d434caddcb91dceb5a" -dependencies = [ - "bytes", - "either", - "futures", - "futures-timer", - "getrandom 0.2.16", - "libp2p-allow-block-list", - "libp2p-connection-limits", - "libp2p-core", - "libp2p-dns", - "libp2p-gossipsub", - "libp2p-identify", - "libp2p-identity", - "libp2p-mdns", - "libp2p-metrics", - "libp2p-noise", - "libp2p-ping", - "libp2p-quic", - "libp2p-swarm", - "libp2p-tcp", - "libp2p-upnp", - "libp2p-yamux", - "multiaddr", - "pin-project", - "rw-stream-sink", - "thiserror 2.0.17", -] - -[[package]] -name = "libp2p-allow-block-list" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d16ccf824ee859ca83df301e1c0205270206223fd4b1f2e512a693e1912a8f4a" -dependencies = [ - "libp2p-core", - "libp2p-identity", - "libp2p-swarm", -] - -[[package]] -name = "libp2p-connection-limits" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a18b8b607cf3bfa2f8c57db9c7d8569a315d5cc0a282e6bfd5ebfc0a9840b2a0" -dependencies = [ - "libp2p-core", - "libp2p-identity", - "libp2p-swarm", -] - -[[package]] -name = "libp2p-core" -version = "0.43.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d28e2d2def7c344170f5c6450c0dbe3dfef655610dbfde2f6ac28a527abbe36" -dependencies = [ - "either", - "fnv", - "futures", - "futures-timer", - "libp2p-identity", - "multiaddr", - "multihash", - "multistream-select", - "parking_lot", - "pin-project", - "quick-protobuf", - "rand 0.8.5", - "rw-stream-sink", - "thiserror 2.0.17", - "tracing", - "unsigned-varint 0.8.0", - "web-time", -] - -[[package]] -name = "libp2p-dns" -version = "0.44.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b770c1c8476736ca98c578cba4b505104ff8e842c2876b528925f9766379f9a" -dependencies = [ - "async-trait", - "futures", - "hickory-resolver", - "libp2p-core", - "libp2p-identity", - "parking_lot", - "smallvec", - "tracing", -] - -[[package]] -name = "libp2p-gossipsub" -version = "0.49.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f58e37d8d6848e5c4c9e3c35c6f61133235bff2960c9c00a663b0849301221" -dependencies = [ - "async-channel", - "asynchronous-codec", - "base64", - "byteorder", - "bytes", - "either", - "fnv", - "futures", - "futures-timer", - "getrandom 0.2.16", - "hashlink", - "hex_fmt", - "libp2p-core", - "libp2p-identity", - "libp2p-swarm", - "quick-protobuf", - "quick-protobuf-codec", - "rand 0.8.5", - "regex", - "sha2", - "tracing", - "web-time", -] - -[[package]] -name = "libp2p-identify" -version = "0.47.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ab792a8b68fdef443a62155b01970c81c3aadab5e659621b063ef252a8e65e8" -dependencies = [ - "asynchronous-codec", - "either", - "futures", - "futures-bounded", - "futures-timer", - "libp2p-core", - "libp2p-identity", - "libp2p-swarm", - "quick-protobuf", - "quick-protobuf-codec", - "smallvec", - "thiserror 2.0.17", - "tracing", -] - -[[package]] -name = "libp2p-identity" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c7892c221730ba55f7196e98b0b8ba5e04b4155651736036628e9f73ed6fc3" -dependencies = [ - "asn1_der", - "bs58", - "ed25519-dalek", - "hkdf", - "k256", - "multihash", - "quick-protobuf", - "rand 0.8.5", - "sha2", - "thiserror 2.0.17", - "tracing", - "zeroize", -] - -[[package]] -name = "libp2p-mdns" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66872d0f1ffcded2788683f76931be1c52e27f343edb93bc6d0bcd8887be443" -dependencies = [ - "futures", - "hickory-proto", - "if-watch", - "libp2p-core", - "libp2p-identity", - "libp2p-swarm", - "rand 0.8.5", - "smallvec", - "socket2 0.5.10", - "tokio", - "tracing", -] - -[[package]] -name = "libp2p-metrics" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "805a555148522cb3414493a5153451910cb1a146c53ffbf4385708349baf62b7" -dependencies = [ - "futures", - "libp2p-core", - "libp2p-gossipsub", - "libp2p-identify", - "libp2p-identity", - "libp2p-ping", - "libp2p-swarm", - "pin-project", - "prometheus-client", - "web-time", -] - -[[package]] -name = "libp2p-noise" -version = "0.46.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc73eacbe6462a0eb92a6527cac6e63f02026e5407f8831bde8293f19217bfbf" -dependencies = [ - "asynchronous-codec", - "bytes", - "futures", - "libp2p-core", - "libp2p-identity", - "multiaddr", - "multihash", - "quick-protobuf", - "rand 0.8.5", - "snow", - "static_assertions", - "thiserror 2.0.17", - "tracing", - "x25519-dalek", - "zeroize", -] - -[[package]] -name = "libp2p-ping" -version = "0.47.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74bb7fcdfd9fead4144a3859da0b49576f171a8c8c7c0bfc7c541921d25e60d3" -dependencies = [ - "futures", - "futures-timer", - "libp2p-core", - "libp2p-identity", - "libp2p-swarm", - "rand 0.8.5", - "tracing", - "web-time", -] - -[[package]] -name = "libp2p-quic" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dc448b2de9f4745784e3751fe8bc6c473d01b8317edd5ababcb0dec803d843f" -dependencies = [ - "futures", - "futures-timer", - "if-watch", - "libp2p-core", - "libp2p-identity", - "libp2p-tls", - "quinn", - "rand 0.8.5", - "ring", - "rustls", - "socket2 0.5.10", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "libp2p-stream" -version = "0.4.0-alpha" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6bd8025c80205ec2810cfb28b02f362ab48a01bee32c50ab5f12761e033464" -dependencies = [ - "futures", - "libp2p-core", - "libp2p-identity", - "libp2p-swarm", - "rand 0.8.5", - "tracing", -] - -[[package]] -name = "libp2p-swarm" -version = "0.47.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6aa762e5215919a34e31c35d4b18bf2e18566ecab7f8a3d39535f4a3068f8b62" -dependencies = [ - "either", - "fnv", - "futures", - "futures-timer", - "libp2p-core", - "libp2p-identity", - "libp2p-swarm-derive", - "lru 0.12.5", - "multistream-select", - "rand 0.8.5", - "smallvec", - "tokio", - "tracing", - "web-time", -] - -[[package]] -name = "libp2p-swarm-derive" -version = "0.35.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd297cf53f0cb3dee4d2620bb319ae47ef27c702684309f682bdb7e55a18ae9c" -dependencies = [ - "heck", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "libp2p-tcp" -version = "0.44.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65b4e030c52c46c8d01559b2b8ca9b7c4185f10576016853129ca1fe5cd1a644" -dependencies = [ - "futures", - "futures-timer", - "if-watch", - "libc", - "libp2p-core", - "socket2 0.5.10", - "tokio", - "tracing", -] - -[[package]] -name = "libp2p-tls" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96ff65a82e35375cbc31ebb99cacbbf28cb6c4fefe26bf13756ddcf708d40080" -dependencies = [ - "futures", - "futures-rustls", - "libp2p-core", - "libp2p-identity", - "rcgen", - "ring", - "rustls", - "rustls-webpki", - "thiserror 2.0.17", - "x509-parser", - "yasna", -] - -[[package]] -name = "libp2p-upnp" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4757e65fe69399c1a243bbb90ec1ae5a2114b907467bf09f3575e899815bb8d3" -dependencies = [ - "futures", - "futures-timer", - "igd-next", - "libp2p-core", - "libp2p-swarm", - "tokio", - "tracing", -] - -[[package]] -name = "libp2p-yamux" -version = "0.47.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f15df094914eb4af272acf9adaa9e287baa269943f32ea348ba29cfb9bfc60d8" -dependencies = [ - "either", - "futures", - "libp2p-core", - "thiserror 2.0.17", - "tracing", - "yamux 0.12.1", - "yamux 0.13.8", -] - -[[package]] -name = "libproc" -version = "0.14.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a54ad7278b8bc5301d5ffd2a94251c004feb971feba96c971ea4063645990757" -dependencies = [ - "bindgen 0.72.1", - "errno", - "libc", -] - -[[package]] -name = "libredox" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" -dependencies = [ - "bitflags 2.10.0", - "libc", -] - -[[package]] -name = "librocksdb-sys" -version = "0.17.3+10.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cef2a00ee60fe526157c9023edab23943fae1ce2ab6f4abb2a807c1746835de9" -dependencies = [ - "bindgen 0.72.1", - "bzip2-sys", - "cc", - "libc", - "libz-sys", -] - -[[package]] -name = "libz-sys" -version = "1.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "linux-raw-sys" -version = "0.4.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" - -[[package]] -name = "linux-raw-sys" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" - -[[package]] -name = "litemap" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" - -[[package]] -name = "lock_api" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" -dependencies = [ - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" - -[[package]] -name = "lru" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" -dependencies = [ - "hashbrown 0.15.5", -] - -[[package]] -name = "lru" -version = "0.16.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" -dependencies = [ - "hashbrown 0.16.1", -] - -[[package]] -name = "lru-slab" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" - -[[package]] -name = "lz4_flex" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08ab2867e3eeeca90e844d1940eab391c9dc5228783db2ed999acbc0a9ed375a" - -[[package]] -name = "mach2" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a1b95cd5421ec55b445b5ae102f5ea0e768de1f82bd3001e11f426c269c3aea" -dependencies = [ - "libc", -] - -[[package]] -name = "macro-string" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "match-lookup" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1265724d8cb29dbbc2b0f06fffb8bf1a8c0cf73a78eede9ba73a4a66c52a981e" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "matchers" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" -dependencies = [ - "regex-automata", -] - -[[package]] -name = "matchit" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" - -[[package]] -name = "memchr" -version = "2.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" - -[[package]] -name = "memmap2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744133e4a0e0a658e1374cf3bf8e415c4052a15a111acd372764c55b4177d490" -dependencies = [ - "libc", -] - -[[package]] -name = "metrics" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d5312e9ba3771cfa961b585728215e3d972c950a3eed9252aa093d6301277e8" -dependencies = [ - "ahash", - "portable-atomic", -] - -[[package]] -name = "metrics-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3dbdd96ed57d565ec744cba02862d707acf373c5772d152abae6ec5c4e24f6c" -dependencies = [ - "proc-macro2", - "quote", - "regex", - "syn 2.0.111", -] - -[[package]] -name = "metrics-exporter-prometheus" -version = "0.16.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd7399781913e5393588a8d8c6a2867bf85fb38eaf2502fdce465aad2dc6f034" -dependencies = [ - "base64", - "http-body-util", - "hyper", - "hyper-rustls", - "hyper-util", - "indexmap 2.12.1", - "ipnet", - "metrics", - "metrics-util 0.19.1", - "quanta", - "thiserror 1.0.69", - "tokio", - "tracing", -] - -[[package]] -name = "metrics-exporter-prometheus" -version = "0.18.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3589659543c04c7dc5526ec858591015b87cd8746583b51b48ef4353f99dbcda" -dependencies = [ - "base64", - "http-body-util", - "hyper", - "hyper-util", - "indexmap 2.12.1", - "ipnet", - "metrics", - "metrics-util 0.20.1", - "quanta", - "thiserror 2.0.17", - "tokio", - "tracing", -] - -[[package]] -name = "metrics-process" -version = "2.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f615e08e049bd14a44c4425415782efb9bcd479fc1e19ddeb971509074c060d0" -dependencies = [ - "libc", - "libproc", - "mach2", - "metrics", - "once_cell", - "procfs", - "rlimit", - "windows 0.62.2", -] - -[[package]] -name = "metrics-util" -version = "0.19.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8496cc523d1f94c1385dd8f0f0c2c480b2b8aeccb5b7e4485ad6365523ae376" -dependencies = [ - "aho-corasick", - "crossbeam-epoch", - "crossbeam-utils", - "hashbrown 0.15.5", - "indexmap 2.12.1", - "metrics", - "ordered-float", - "quanta", - "radix_trie", - "rand 0.9.2", - "rand_xoshiro", - "sketches-ddsketch", -] - -[[package]] -name = "metrics-util" -version = "0.20.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdfb1365fea27e6dd9dc1dbc19f570198bc86914533ad639dae939635f096be4" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", - "hashbrown 0.16.1", - "metrics", - "quanta", - "rand 0.9.2", - "rand_xoshiro", - "sketches-ddsketch", -] - -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "miniz_oxide" -version = "0.8.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" -dependencies = [ - "adler2", - "simd-adler32", -] - -[[package]] -name = "miniz_oxide" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5faa9f23e86bd5768d76def086192ff5f869fb088da12a976ea21e9796b975f6" -dependencies = [ - "adler2", - "serde", -] - -[[package]] -name = "mio" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" -dependencies = [ - "libc", - "log", - "wasi", - "windows-sys 0.61.2", -] - -[[package]] -name = "mockall" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f58d964098a5f9c6b63d0798e5372fd04708193510a7af313c22e9f29b7b620b" -dependencies = [ - "cfg-if", - "downcast", - "fragile", - "mockall_derive", - "predicates", - "predicates-tree", -] - -[[package]] -name = "mockall_derive" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca41ce716dda6a9be188b385aa78ee5260fc25cd3802cb2a8afdc6afbe6b6dbf" -dependencies = [ - "cfg-if", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "modular-bitfield" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a53d79ba8304ac1c4f9eb3b9d281f21f7be9d4626f72ce7df4ad8fbde4f38a74" -dependencies = [ - "modular-bitfield-impl", - "static_assertions", -] - -[[package]] -name = "modular-bitfield-impl" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a7d5f7076603ebc68de2dc6a650ec331a062a13abaa346975be747bbfa4b789" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "moka" -version = "0.12.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8261cd88c312e0004c1d51baad2980c66528dfdb2bee62003e643a4d8f86b077" -dependencies = [ - "async-lock", - "crossbeam-channel", - "crossbeam-epoch", - "crossbeam-utils", - "equivalent", - "event-listener", - "futures-util", - "parking_lot", - "portable-atomic", - "rustc_version 0.4.1", - "smallvec", - "tagptr", - "uuid", -] - -[[package]] -name = "more-asserts" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fafa6961cabd9c63bcd77a45d7e3b7f3b552b70417831fb0f56db717e72407e" - -[[package]] -name = "multiaddr" -version = "0.18.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe6351f60b488e04c1d21bc69e56b89cb3f5e8f5d22557d6e8031bdfd79b6961" -dependencies = [ - "arrayref", - "byteorder", - "data-encoding", - "libp2p-identity", - "multibase", - "multihash", - "percent-encoding", - "serde", - "static_assertions", - "unsigned-varint 0.8.0", - "url", -] - -[[package]] -name = "multibase" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8694bb4835f452b0e3bb06dbebb1d6fc5385b6ca1caf2e55fd165c042390ec77" -dependencies = [ - "base-x", - "base256emoji", - "data-encoding", - "data-encoding-macro", -] - -[[package]] -name = "multihash" -version = "0.19.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b430e7953c29dd6a09afc29ff0bb69c6e306329ee6794700aee27b76a1aea8d" -dependencies = [ - "core2", - "unsigned-varint 0.8.0", -] - -[[package]] -name = "multistream-select" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea0df8e5eec2298a62b326ee4f0d7fe1a6b90a09dfcf9df37b38f947a8c42f19" -dependencies = [ - "bytes", - "futures", - "log", - "pin-project", - "smallvec", - "unsigned-varint 0.7.2", -] - -[[package]] -name = "munge" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e17401f259eba956ca16491461b6e8f72913a0a114e39736ce404410f915a0c" -dependencies = [ - "munge_macro", -] - -[[package]] -name = "munge_macro" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4568f25ccbd45ab5d5603dc34318c1ec56b117531781260002151b8530a9f931" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "native-tls" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" -dependencies = [ - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework 2.11.1", - "security-framework-sys", - "tempfile", -] - -[[package]] -name = "netlink-packet-core" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72724faf704479d67b388da142b186f916188505e7e0b26719019c525882eda4" -dependencies = [ - "anyhow", - "byteorder", - "netlink-packet-utils", -] - -[[package]] -name = "netlink-packet-route" -version = "0.17.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053998cea5a306971f88580d0829e90f270f940befd7cf928da179d4187a5a66" -dependencies = [ - "anyhow", - "bitflags 1.3.2", - "byteorder", - "libc", - "netlink-packet-core", - "netlink-packet-utils", -] - -[[package]] -name = "netlink-packet-utils" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ede8a08c71ad5a95cdd0e4e52facd37190977039a4704eb82a283f713747d34" -dependencies = [ - "anyhow", - "byteorder", - "paste", - "thiserror 1.0.69", -] - -[[package]] -name = "netlink-proto" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72452e012c2f8d612410d89eea01e2d9b56205274abb35d53f60200b2ec41d60" -dependencies = [ - "bytes", - "futures", - "log", - "netlink-packet-core", - "netlink-sys", - "thiserror 2.0.17", -] - -[[package]] -name = "netlink-sys" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16c903aa70590cb93691bf97a767c8d1d6122d2cc9070433deb3bbf36ce8bd23" -dependencies = [ - "bytes", - "futures", - "libc", - "log", - "tokio", -] - -[[package]] -name = "nibble_vec" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" -dependencies = [ - "smallvec", -] - -[[package]] -name = "nix" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" -dependencies = [ - "bitflags 1.3.2", - "cfg-if", - "libc", -] - -[[package]] -name = "nohash-hasher" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "notify" -version = "8.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" -dependencies = [ - "bitflags 2.10.0", - "fsevent-sys", - "inotify", - "kqueue", - "libc", - "log", - "mio", - "notify-types", - "walkdir", - "windows-sys 0.60.2", -] - -[[package]] -name = "notify-types" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" - -[[package]] -name = "ntapi" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" -dependencies = [ - "winapi", -] - -[[package]] -name = "nu-ansi-term" -version = "0.50.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "num" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" -dependencies = [ - "num-bigint", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", -] - -[[package]] -name = "num-complex" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - -[[package]] -name = "num-format" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3" -dependencies = [ - "arrayvec", - "itoa", -] - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-iter" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-rational" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" -dependencies = [ - "num-bigint", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", - "libm", -] - -[[package]] -name = "num_cpus" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "num_enum" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" -dependencies = [ - "num_enum_derive", - "rustversion", -] - -[[package]] -name = "num_enum_derive" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "num_threads" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" -dependencies = [ - "libc", -] - -[[package]] -name = "nybbles" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c4b5ecbd0beec843101bffe848217f770e8b8da81d8355b7d6e226f2199b3dc" -dependencies = [ - "alloy-rlp", - "arbitrary", - "cfg-if", - "proptest", - "ruint", - "serde", - "smallvec", -] - -[[package]] -name = "object" -version = "0.37.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" -dependencies = [ - "memchr", -] - -[[package]] -name = "oid-registry" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f40cff3dde1b6087cc5d5f5d4d65712f34016a03ed60e9c08dcc392736b5b7" -dependencies = [ - "asn1-rs", -] - -[[package]] -name = "once_cell" -version = "1.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" -dependencies = [ - "critical-section", - "portable-atomic", -] - -[[package]] -name = "once_cell_polyfill" -version = "1.70.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" - -[[package]] -name = "oorandom" -version = "11.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" - -[[package]] -name = "op-alloy" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9b8fee21003dd4f076563de9b9d26f8c97840157ef78593cd7f262c5ca99848" -dependencies = [ - "op-alloy-consensus 0.23.1", - "op-alloy-network", - "op-alloy-provider", - "op-alloy-rpc-types", - "op-alloy-rpc-types-engine", -] - -[[package]] -name = "op-alloy-consensus" -version = "0.18.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c88d2940558fd69f8f07b3cbd7bb3c02fc7d31159c1a7ba9deede50e7881024" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "derive_more", - "serde", - "thiserror 2.0.17", -] - -[[package]] -name = "op-alloy-consensus" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736381a95471d23e267263cfcee9e1d96d30b9754a94a2819148f83379de8a86" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-serde", - "arbitrary", - "derive_more", - "serde", - "thiserror 2.0.17", -] - -[[package]] -name = "op-alloy-network" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4034183dca6bff6632e7c24c92e75ff5f0eabb58144edb4d8241814851334d47" -dependencies = [ - "alloy-consensus", - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-eth", - "alloy-signer", - "op-alloy-consensus 0.23.1", - "op-alloy-rpc-types", -] - -[[package]] -name = "op-alloy-provider" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6753d90efbaa8ea8bcb89c1737408ca85fa60d7adb875049d3f382c063666f86" -dependencies = [ - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-engine", - "alloy-transport", - "async-trait", - "op-alloy-rpc-types-engine", -] - -[[package]] -name = "op-alloy-rpc-jsonrpsee" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1c820ef9c802ebc732281a940bfb6ac2345af4d9fff041cbb64b4b546676686" -dependencies = [ - "alloy-primitives", - "jsonrpsee", -] - -[[package]] -name = "op-alloy-rpc-types" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddd87c6b9e5b6eee8d6b76f41b04368dca0e9f38d83338e5b00e730c282098a4" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", - "arbitrary", - "derive_more", - "jsonrpsee", - "op-alloy-consensus 0.23.1", - "serde", - "serde_json", - "thiserror 2.0.17", -] - -[[package]] -name = "op-alloy-rpc-types-engine" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77727699310a18cdeed32da3928c709e2704043b6584ed416397d5da65694efc" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "alloy-serde", - "arbitrary", - "derive_more", - "ethereum_ssz", - "ethereum_ssz_derive", - "op-alloy-consensus 0.23.1", - "serde", - "sha2", - "snap", - "thiserror 2.0.17", -] - -[[package]] -name = "op-revm" -version = "14.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1475a779c73999fc803778524042319691b31f3d6699d2b560c4ed8be1db802a" -dependencies = [ - "auto_impl", - "revm", - "serde", -] - -[[package]] -name = "opaque-debug" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" - -[[package]] -name = "openssl" -version = "0.10.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" -dependencies = [ - "bitflags 2.10.0", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "openssl-probe" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" - -[[package]] -name = "openssl-src" -version = "300.5.4+3.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507b3792995dae9b0df8a1c1e3771e8418b7c2d9f0baeba32e6fe8b06c7cb72" -dependencies = [ - "cc", -] - -[[package]] -name = "openssl-sys" -version = "0.9.111" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" -dependencies = [ - "cc", - "libc", - "openssl-src", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "opentelemetry" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "236e667b670a5cdf90c258f5a55794ec5ac5027e960c224bff8367a59e1e6426" -dependencies = [ - "futures-core", - "futures-sink", - "js-sys", - "pin-project-lite", - "thiserror 2.0.17", - "tracing", -] - -[[package]] -name = "opentelemetry-http" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8863faf2910030d139fb48715ad5ff2f35029fc5f244f6d5f689ddcf4d26253" -dependencies = [ - "async-trait", - "bytes", - "http", - "opentelemetry", - "reqwest", - "tracing", -] - -[[package]] -name = "opentelemetry-otlp" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bef114c6d41bea83d6dc60eb41720eedd0261a67af57b66dd2b84ac46c01d91" -dependencies = [ - "async-trait", - "futures-core", - "http", - "opentelemetry", - "opentelemetry-http", - "opentelemetry-proto", - "opentelemetry_sdk", - "prost", - "reqwest", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tonic", - "tracing", -] - -[[package]] -name = "opentelemetry-proto" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f8870d3024727e99212eb3bb1762ec16e255e3e6f58eeb3dc8db1aa226746d" -dependencies = [ - "base64", - "hex", - "opentelemetry", - "opentelemetry_sdk", - "prost", - "serde", - "tonic", -] - -[[package]] -name = "opentelemetry_sdk" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84dfad6042089c7fc1f6118b7040dc2eb4ab520abbf410b79dc481032af39570" -dependencies = [ - "async-trait", - "futures-channel", - "futures-executor", - "futures-util", - "glob", - "opentelemetry", - "percent-encoding", - "rand 0.8.5", - "serde_json", - "thiserror 2.0.17", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] -name = "ordered-float" -version = "4.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951" -dependencies = [ - "num-traits", -] - -[[package]] -name = "p256" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" -dependencies = [ - "ecdsa", - "elliptic-curve", - "primeorder", - "sha2", -] - -[[package]] -name = "page_size" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "papergrid" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6978128c8b51d8f4080631ceb2302ab51e32cc6e8615f735ee2f83fd269ae3f1" -dependencies = [ - "bytecount", - "fnv", - "unicode-width", -] - -[[package]] -name = "parity-scale-codec" -version = "3.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799781ae679d79a948e13d4824a40970bfa500058d245760dd857301059810fa" -dependencies = [ - "arrayvec", - "bitvec", - "byte-slice-cast", - "bytes", - "const_format", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "rustversion", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34b4653168b563151153c9e4c08ebed57fb8262bebfa79711552fa983c623e7a" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "parking" -version = "2.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" - -[[package]] -name = "parking_lot" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-link", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "path-tree" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2a97453bc21a968f722df730bfe11bd08745cb50d1300b0df2bda131dece136" -dependencies = [ - "smallvec", -] - -[[package]] -name = "pem" -version = "3.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" -dependencies = [ - "base64", - "serde_core", -] - -[[package]] -name = "percent-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - -[[package]] -name = "pest" -version = "2.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22" -dependencies = [ - "memchr", - "ucd-trie", -] - -[[package]] -name = "pharos" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" -dependencies = [ - "futures", - "rustc_version 0.4.1", -] - -[[package]] -name = "phf" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" -dependencies = [ - "phf_macros 0.11.3", - "phf_shared 0.11.3", - "serde", -] - -[[package]] -name = "phf" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" -dependencies = [ - "phf_macros 0.13.1", - "phf_shared 0.13.1", - "serde", -] - -[[package]] -name = "phf_generator" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" -dependencies = [ - "phf_shared 0.11.3", - "rand 0.8.5", -] - -[[package]] -name = "phf_generator" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" -dependencies = [ - "fastrand", - "phf_shared 0.13.1", -] - -[[package]] -name = "phf_macros" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" -dependencies = [ - "phf_generator 0.11.3", - "phf_shared 0.11.3", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "phf_macros" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" -dependencies = [ - "phf_generator 0.13.1", - "phf_shared 0.13.1", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "phf_shared" -version = "0.11.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" -dependencies = [ - "siphasher", -] - -[[package]] -name = "phf_shared" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pin-project" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "pkg-config" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" - -[[package]] -name = "plotters" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" -dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "plotters-backend" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" - -[[package]] -name = "plotters-svg" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" -dependencies = [ - "plotters-backend", -] - -[[package]] -name = "polling" -version = "3.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" -dependencies = [ - "cfg-if", - "concurrent-queue", - "hermit-abi", - "pin-project-lite", - "rustix 1.1.3", - "windows-sys 0.61.2", -] - -[[package]] -name = "poly1305" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" -dependencies = [ - "cpufeatures", - "opaque-debug", - "universal-hash", -] - -[[package]] -name = "polyval" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" -dependencies = [ - "cfg-if", - "cpufeatures", - "opaque-debug", - "universal-hash", -] - -[[package]] -name = "portable-atomic" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" - -[[package]] -name = "potential_utf" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" -dependencies = [ - "zerovec", -] - -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "pprof" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38a01da47675efa7673b032bf8efd8214f1917d89685e07e395ab125ea42b187" -dependencies = [ - "aligned-vec", - "backtrace", - "cfg-if", - "criterion", - "findshlibs", - "inferno", - "libc", - "log", - "nix", - "once_cell", - "smallvec", - "spin 0.10.0", - "symbolic-demangle", - "tempfile", - "thiserror 2.0.17", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "predicates" -version = "3.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573" -dependencies = [ - "anstyle", - "predicates-core", -] - -[[package]] -name = "predicates-core" -version = "1.0.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa" - -[[package]] -name = "predicates-tree" -version = "1.0.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c" -dependencies = [ - "predicates-core", - "termtree", -] - -[[package]] -name = "prettyplease" -version = "0.2.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" -dependencies = [ - "proc-macro2", - "syn 2.0.111", -] - -[[package]] -name = "primeorder" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" -dependencies = [ - "elliptic-curve", -] - -[[package]] -name = "primitive-types" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" -dependencies = [ - "fixed-hash", - "impl-codec", - "uint 0.9.5", -] - -[[package]] -name = "proc-macro-crate" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" -dependencies = [ - "toml_edit", -] - -[[package]] -name = "proc-macro-error-attr2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "proc-macro-error2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" -dependencies = [ - "proc-macro-error-attr2", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "proc-macro2" -version = "1.0.103" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee95bc4ef87b8d5ba32e8b7714ccc834865276eab0aed5c9958d00ec45f49e8" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "procfs" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25485360a54d6861439d60facef26de713b1e126bf015ec8f98239467a2b82f7" -dependencies = [ - "bitflags 2.10.0", - "procfs-core", - "rustix 1.1.3", -] - -[[package]] -name = "procfs-core" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6401bf7b6af22f78b563665d15a22e9aef27775b79b149a66ca022468a4e405" -dependencies = [ - "bitflags 2.10.0", - "hex", -] - -[[package]] -name = "prometheus-client" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf41c1a7c32ed72abe5082fb19505b969095c12da9f5732a4bc9878757fd087c" -dependencies = [ - "dtoa", - "itoa", - "parking_lot", - "prometheus-client-derive-encode", -] - -[[package]] -name = "prometheus-client-derive-encode" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "proptest" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" -dependencies = [ - "bit-set", - "bit-vec", - "bitflags 2.10.0", - "num-traits", - "rand 0.9.2", - "rand_chacha 0.9.0", - "rand_xorshift", - "regex-syntax", - "rusty-fork", - "tempfile", - "unarray", -] - -[[package]] -name = "proptest-derive" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "proptest-derive" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "095a99f75c69734802359b682be8daaf8980296731f6470434ea2c652af1dd30" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "prost" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" -dependencies = [ - "bytes", - "prost-derive", -] - -[[package]] -name = "prost-derive" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" -dependencies = [ - "anyhow", - "itertools 0.14.0", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "ptr_meta" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b9a0cf95a1196af61d4f1cbdab967179516d9a4a4312af1f31948f8f6224a79" -dependencies = [ - "ptr_meta_derive", -] - -[[package]] -name = "ptr_meta_derive" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7347867d0a7e1208d93b46767be83e2b8f978c3dad35f775ac8d8847551d6fe1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "quanta" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7" -dependencies = [ - "crossbeam-utils", - "libc", - "once_cell", - "raw-cpuid", - "wasi", - "web-sys", - "winapi", -] - -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - -[[package]] -name = "quick-protobuf" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6da84cc204722a989e01ba2f6e1e276e190f22263d0cb6ce8526fcdb0d2e1f" -dependencies = [ - "byteorder", -] - -[[package]] -name = "quick-protobuf-codec" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15a0580ab32b169745d7a39db2ba969226ca16738931be152a3209b409de2474" -dependencies = [ - "asynchronous-codec", - "bytes", - "quick-protobuf", - "thiserror 1.0.69", - "unsigned-varint 0.8.0", -] - -[[package]] -name = "quick-xml" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f50b1c63b38611e7d4d7f68b82d3ad0cc71a2ad2e7f61fc10f1328d917c93cd" -dependencies = [ - "memchr", -] - -[[package]] -name = "quinn" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" -dependencies = [ - "bytes", - "cfg_aliases", - "futures-io", - "pin-project-lite", - "quinn-proto", - "quinn-udp", - "rustc-hash 2.1.1", - "rustls", - "socket2 0.6.1", - "thiserror 2.0.17", - "tokio", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-proto" -version = "0.11.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" -dependencies = [ - "bytes", - "getrandom 0.3.4", - "lru-slab", - "rand 0.9.2", - "ring", - "rustc-hash 2.1.1", - "rustls", - "rustls-pki-types", - "slab", - "thiserror 2.0.17", - "tinyvec", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-udp" -version = "0.5.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" -dependencies = [ - "cfg_aliases", - "libc", - "once_cell", - "socket2 0.6.1", - "tracing", - "windows-sys 0.60.2", -] - -[[package]] -name = "quote" -version = "1.0.42" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "radix_trie" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd" -dependencies = [ - "endian-type", - "nibble_vec", -] - -[[package]] -name = "rancor" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a063ea72381527c2a0561da9c80000ef822bdd7c3241b1cc1b12100e3df081ee" -dependencies = [ - "ptr_meta", -] - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", - "serde", -] - -[[package]] -name = "rand" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" -dependencies = [ - "rand_chacha 0.9.0", - "rand_core 0.9.3", - "serde", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core 0.9.3", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.16", -] - -[[package]] -name = "rand_core" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" -dependencies = [ - "getrandom 0.3.4", - "serde", -] - -[[package]] -name = "rand_xorshift" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" -dependencies = [ - "rand_core 0.9.3", -] - -[[package]] -name = "rand_xoshiro" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f703f4665700daf5512dcca5f43afa6af89f09db47fb56be587f80636bda2d41" -dependencies = [ - "rand_core 0.9.3", -] - -[[package]] -name = "rapidhash" -version = "4.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8b5b858a440a0bc02625b62dd95131b9201aa9f69f411195dd4a7cfb1de3d7" -dependencies = [ - "rand 0.9.2", - "rustversion", -] - -[[package]] -name = "raw-cpuid" -version = "11.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186" -dependencies = [ - "bitflags 2.10.0", -] - -[[package]] -name = "rayon" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "rcgen" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75e669e5202259b5314d1ea5397316ad400819437857b90861765f24c4cf80a2" -dependencies = [ - "pem", - "ring", - "rustls-pki-types", - "time", - "yasna", -] - -[[package]] -name = "recvmsg" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" - -[[package]] -name = "redox_syscall" -version = "0.5.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" -dependencies = [ - "bitflags 2.10.0", -] - -[[package]] -name = "redox_users" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" -dependencies = [ - "getrandom 0.2.16", - "libredox", - "thiserror 2.0.17", -] - -[[package]] -name = "ref-cast" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "regex" -version = "1.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata", - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" - -[[package]] -name = "relative-path" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" - -[[package]] -name = "rend" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cadadef317c2f20755a64d7fdc48f9e7178ee6b0e1f7fce33fa60f1d68a276e6" -dependencies = [ - "bytecheck", -] - -[[package]] -name = "reqwest" -version = "0.12.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" -dependencies = [ - "base64", - "bytes", - "encoding_rs", - "futures-channel", - "futures-core", - "futures-util", - "h2", - "http", - "http-body", - "http-body-util", - "hyper", - "hyper-rustls", - "hyper-tls", - "hyper-util", - "js-sys", - "log", - "mime", - "native-tls", - "percent-encoding", - "pin-project-lite", - "quinn", - "rustls", - "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tokio-native-tls", - "tokio-rustls", - "tokio-util", - "tower 0.5.3", - "tower-http", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-streams", - "web-sys", - "webpki-roots 1.0.4", -] - -[[package]] -name = "resolv-conf" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7" - -[[package]] -name = "reth-codecs" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-genesis", - "alloy-primitives", - "alloy-trie", - "bytes", - "modular-bitfield", - "op-alloy-consensus 0.18.14", - "reth-codecs-derive", - "reth-zstd-compressors", - "serde", -] - -[[package]] -name = "reth-codecs-derive" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "convert_case 0.7.1", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "reth-db" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-primitives", - "derive_more", - "eyre", - "metrics", - "page_size", - "reth-db-api", - "reth-fs-util", - "reth-libmdbx", - "reth-metrics", - "reth-nippy-jar", - "reth-static-file-types", - "reth-storage-errors", - "reth-tracing", - "rustc-hash 2.1.1", - "strum", - "sysinfo", - "thiserror 2.0.17", -] - -[[package]] -name = "reth-db-api" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-consensus", - "alloy-genesis", - "alloy-primitives", - "bytes", - "derive_more", - "metrics", - "modular-bitfield", - "parity-scale-codec", - "reth-codecs", - "reth-db-models", - "reth-ethereum-primitives", - "reth-primitives-traits", - "reth-prune-types", - "reth-stages-types", - "reth-storage-errors", - "reth-trie-common", - "roaring", - "serde", -] - -[[package]] -name = "reth-db-models" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "bytes", - "modular-bitfield", - "reth-codecs", - "reth-primitives-traits", - "serde", -] - -[[package]] -name = "reth-ethereum-primitives" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "modular-bitfield", - "reth-codecs", - "reth-primitives-traits", - "reth-zstd-compressors", - "serde", - "serde_with", -] - -[[package]] -name = "reth-fs-util" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "serde", - "serde_json", - "thiserror 2.0.17", -] - -[[package]] -name = "reth-libmdbx" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "bitflags 2.10.0", - "byteorder", - "dashmap", - "derive_more", - "indexmap 2.12.1", - "parking_lot", - "reth-mdbx-sys", - "smallvec", - "thiserror 2.0.17", - "tracing", -] - -[[package]] -name = "reth-mdbx-sys" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "bindgen 0.70.1", - "cc", -] - -[[package]] -name = "reth-metrics" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "metrics", - "metrics-derive", -] - -[[package]] -name = "reth-nippy-jar" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "anyhow", - "bincode 1.3.3", - "derive_more", - "lz4_flex", - "memmap2", - "reth-fs-util", - "serde", - "thiserror 2.0.17", - "tracing", - "zstd", -] - -[[package]] -name = "reth-primitives-traits" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-genesis", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-trie", - "auto_impl", - "byteorder", - "bytes", - "derive_more", - "modular-bitfield", - "once_cell", - "op-alloy-consensus 0.18.14", - "reth-codecs", - "revm-bytecode 6.2.2", - "revm-primitives 20.2.1", - "revm-state 7.0.5", - "secp256k1 0.30.0", - "serde", - "serde_with", - "thiserror 2.0.17", -] - -[[package]] -name = "reth-prune-types" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-primitives", - "derive_more", - "modular-bitfield", - "reth-codecs", - "serde", - "thiserror 2.0.17", -] - -[[package]] -name = "reth-stages-types" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-primitives", - "bytes", - "modular-bitfield", - "reth-codecs", - "reth-trie-common", - "serde", -] - -[[package]] -name = "reth-static-file-types" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-primitives", - "derive_more", - "serde", - "strum", -] - -[[package]] -name = "reth-storage-errors" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "derive_more", - "reth-primitives-traits", - "reth-prune-types", - "reth-static-file-types", - "revm-database-interface 7.0.5", - "thiserror 2.0.17", -] - -[[package]] -name = "reth-tracing" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "clap", - "eyre", - "rolling-file", - "tracing", - "tracing-appender", - "tracing-journald", - "tracing-logfmt", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "reth-trie-common" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-trie", - "bytes", - "derive_more", - "itertools 0.14.0", - "nybbles", - "reth-codecs", - "reth-primitives-traits", - "revm-database 7.0.5", - "serde", -] - -[[package]] -name = "reth-zstd-compressors" -version = "1.6.0" -source = "git+https://github.com/paradigmxyz/reth?tag=v1.6.0#d8451e54e7267f9f1634118d6d279b2216f7e2bb" -dependencies = [ - "zstd", -] - -[[package]] -name = "revm" -version = "33.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c85ed0028f043f87b3c88d4a4cb6f0a76440085523b6a8afe5ff003cf418054" -dependencies = [ - "revm-bytecode 7.1.1", - "revm-context", - "revm-context-interface", - "revm-database 9.0.6", - "revm-database-interface 8.0.5", - "revm-handler", - "revm-inspector", - "revm-interpreter", - "revm-precompile", - "revm-primitives 21.0.2", - "revm-state 8.1.1", -] - -[[package]] -name = "revm-bytecode" -version = "6.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66c52031b73cae95d84cd1b07725808b5fd1500da3e5e24574a3b2dc13d9f16d" -dependencies = [ - "bitvec", - "phf 0.11.3", - "revm-primitives 20.2.1", - "serde", -] - -[[package]] -name = "revm-bytecode" -version = "7.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2c6b5e6e8dd1e28a4a60e5f46615d4ef0809111c9e63208e55b5c7058200fb0" -dependencies = [ - "bitvec", - "phf 0.13.1", - "revm-primitives 21.0.2", - "serde", -] - -[[package]] -name = "revm-context" -version = "12.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f038f0c9c723393ac897a5df9140b21cfa98f5753a2cb7d0f28fa430c4118abf" -dependencies = [ - "bitvec", - "cfg-if", - "derive-where", - "revm-bytecode 7.1.1", - "revm-context-interface", - "revm-database-interface 8.0.5", - "revm-primitives 21.0.2", - "revm-state 8.1.1", - "serde", -] - -[[package]] -name = "revm-context-interface" -version = "13.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "431c9a14e4ef1be41ae503708fd02d974f80ef1f2b6b23b5e402e8d854d1b225" -dependencies = [ - "alloy-eip2930", - "alloy-eip7702", - "auto_impl", - "either", - "revm-database-interface 8.0.5", - "revm-primitives 21.0.2", - "revm-state 8.1.1", - "serde", -] - -[[package]] -name = "revm-database" -version = "7.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39a276ed142b4718dcf64bc9624f474373ed82ef20611025045c3fb23edbef9c" -dependencies = [ - "alloy-eips", - "revm-bytecode 6.2.2", - "revm-database-interface 7.0.5", - "revm-primitives 20.2.1", - "revm-state 7.0.5", - "serde", -] - -[[package]] -name = "revm-database" -version = "9.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "980d8d6bba78c5dd35b83abbb6585b0b902eb25ea4448ed7bfba6283b0337191" -dependencies = [ - "alloy-eips", - "revm-bytecode 7.1.1", - "revm-database-interface 8.0.5", - "revm-primitives 21.0.2", - "revm-state 8.1.1", - "serde", -] - -[[package]] -name = "revm-database-interface" -version = "7.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c523c77e74eeedbac5d6f7c092e3851dbe9c7fec6f418b85992bd79229db361" -dependencies = [ - "auto_impl", - "either", - "revm-primitives 20.2.1", - "revm-state 7.0.5", - "serde", -] - -[[package]] -name = "revm-database-interface" -version = "8.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cce03e3780287b07abe58faf4a7f5d8be7e81321f93ccf3343c8f7755602bae" -dependencies = [ - "auto_impl", - "either", - "revm-primitives 21.0.2", - "revm-state 8.1.1", - "serde", -] - -[[package]] -name = "revm-handler" -version = "14.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d44f8f6dbeec3fecf9fe55f78ef0a758bdd92ea46cd4f1ca6e2a946b32c367f3" -dependencies = [ - "auto_impl", - "derive-where", - "revm-bytecode 7.1.1", - "revm-context", - "revm-context-interface", - "revm-database-interface 8.0.5", - "revm-interpreter", - "revm-precompile", - "revm-primitives 21.0.2", - "revm-state 8.1.1", - "serde", -] - -[[package]] -name = "revm-inspector" -version = "14.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5617e49216ce1ca6c8826bcead0386bc84f49359ef67cde6d189961735659f93" -dependencies = [ - "auto_impl", - "either", - "revm-context", - "revm-database-interface 8.0.5", - "revm-handler", - "revm-interpreter", - "revm-primitives 21.0.2", - "revm-state 8.1.1", - "serde", - "serde_json", -] - -[[package]] -name = "revm-interpreter" -version = "31.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26ec36405f7477b9dccdc6caa3be19adf5662a7a0dffa6270cdb13a090c077e5" -dependencies = [ - "revm-bytecode 7.1.1", - "revm-context-interface", - "revm-primitives 21.0.2", - "revm-state 8.1.1", - "serde", -] - -[[package]] -name = "revm-precompile" -version = "31.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a62958af953cc4043e93b5be9b8497df84cc3bd612b865c49a7a7dfa26a84e2" -dependencies = [ - "ark-bls12-381", - "ark-bn254", - "ark-ec", - "ark-ff 0.5.0", - "ark-serialize 0.5.0", - "arrayref", - "aurora-engine-modexp", - "blst", - "c-kzg", - "cfg-if", - "k256", - "p256", - "revm-primitives 21.0.2", - "ripemd", - "rug", - "secp256k1 0.31.1", - "sha2", -] - -[[package]] -name = "revm-primitives" -version = "20.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aa29d9da06fe03b249b6419b33968ecdf92ad6428e2f012dc57bcd619b5d94e" -dependencies = [ - "alloy-primitives", - "num_enum", - "once_cell", - "serde", -] - -[[package]] -name = "revm-primitives" -version = "21.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29e161db429d465c09ba9cbff0df49e31049fe6b549e28eb0b7bd642fcbd4412" -dependencies = [ - "alloy-primitives", - "num_enum", - "once_cell", - "serde", -] - -[[package]] -name = "revm-state" -version = "7.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f64fbacb86008394aaebd3454f9643b7d5a782bd251135e17c5b33da592d84d" -dependencies = [ - "bitflags 2.10.0", - "revm-bytecode 6.2.2", - "revm-primitives 20.2.1", - "serde", -] - -[[package]] -name = "revm-state" -version = "8.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d8be953b7e374dbdea0773cf360debed8df394ea8d82a8b240a6b5da37592fc" -dependencies = [ - "bitflags 2.10.0", - "revm-bytecode 7.1.1", - "revm-primitives 21.0.2", - "serde", -] - -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" -dependencies = [ - "hmac", - "subtle", -] - -[[package]] -name = "rgb" -version = "0.8.52" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c6a884d2998352bb4daf0183589aec883f16a6da1f4dde84d8e2e9a5409a1ce" -dependencies = [ - "bytemuck", -] - -[[package]] -name = "ring" -version = "0.17.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" -dependencies = [ - "cc", - "cfg-if", - "getrandom 0.2.16", - "libc", - "untrusted", - "windows-sys 0.52.0", -] - -[[package]] -name = "ripemd" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "rkyv" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360b333c61ae24e5af3ae7c8660bd6b21ccd8200dbbc5d33c2454421e85b9c69" -dependencies = [ - "bytecheck", - "bytes", - "hashbrown 0.16.1", - "indexmap 2.12.1", - "munge", - "ptr_meta", - "rancor", - "rend", - "rkyv_derive", - "tinyvec", - "uuid", -] - -[[package]] -name = "rkyv_derive" -version = "0.8.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02f8cdd12b307ab69fe0acf4cd2249c7460eb89dce64a0febadf934ebb6a9e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "rlimit" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7043b63bd0cd1aaa628e476b80e6d4023a3b50eb32789f2728908107bd0c793a" -dependencies = [ - "libc", -] - -[[package]] -name = "rlp" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" -dependencies = [ - "bytes", - "rustc-hex", -] - -[[package]] -name = "roaring" -version = "0.10.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e8d2cfa184d94d0726d650a9f4a1be7f9b76ac9fdb954219878dc00c1c1e7b" -dependencies = [ - "bytemuck", - "byteorder", -] - -[[package]] -name = "rocksdb" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddb7af00d2b17dbd07d82c0063e25411959748ff03e8d4f96134c2ff41fce34f" -dependencies = [ - "libc", - "librocksdb-sys", -] - -[[package]] -name = "rolling-file" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8395b4f860856b740f20a296ea2cd4d823e81a2658cf05ef61be22916026a906" -dependencies = [ - "chrono", -] - -[[package]] -name = "rollup-boost" -version = "0.7.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d1d7c635dec67c86346eb871e8a22dd1596c33d4a96a9a4926b4d2fd703b63" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-serde", - "backoff", - "blake3", - "bytes", - "clap", - "dashmap", - "dotenvy", - "ed25519-dalek", - "eyre", - "futures", - "hex", - "http", - "http-body-util", - "hyper", - "hyper-rustls", - "hyper-util", - "jsonrpsee", - "lru 0.16.3", - "metrics", - "metrics-derive", - "metrics-exporter-prometheus 0.16.2", - "metrics-util 0.19.1", - "moka", - "op-alloy-rpc-types-engine", - "opentelemetry", - "opentelemetry-otlp", - "opentelemetry_sdk", - "parking_lot", - "paste", - "rollup-boost-types", - "rustls", - "serde", - "serde_json", - "sha2", - "thiserror 2.0.17", - "tokio", - "tokio-tungstenite", - "tokio-util", - "tower 0.5.3", - "tower-http", - "tracing", - "tracing-opentelemetry", - "tracing-subscriber 0.3.22", - "url", - "uuid", - "vergen", - "vergen-git2", -] - -[[package]] -name = "rollup-boost-types" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "756f32c7f241ab6d91d823e94d20f6e0729bfcaec3b545bd30f33b24e50f5821" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-serde", - "blake3", - "ed25519-dalek", - "futures", - "moka", - "op-alloy-rpc-types-engine", - "serde", - "serde_json", - "thiserror 2.0.17", - "tracing", -] - -[[package]] -name = "route-recognizer" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" - -[[package]] -name = "rstest" -version = "0.26.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5a3193c063baaa2a95a33f03035c8a72b83d97a54916055ba22d35ed3839d49" -dependencies = [ - "futures-timer", - "futures-util", - "rstest_macros", -] - -[[package]] -name = "rstest_macros" -version = "0.26.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c845311f0ff7951c5506121a9ad75aec44d083c31583b2ea5a30bcb0b0abba0" -dependencies = [ - "cfg-if", - "glob", - "proc-macro-crate", - "proc-macro2", - "quote", - "regex", - "relative-path", - "rustc_version 0.4.1", - "syn 2.0.111", - "unicode-ident", -] - -[[package]] -name = "rtnetlink" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a552eb82d19f38c3beed3f786bd23aa434ceb9ac43ab44419ca6d67a7e186c0" -dependencies = [ - "futures", - "log", - "netlink-packet-core", - "netlink-packet-route", - "netlink-packet-utils", - "netlink-proto", - "netlink-sys", - "nix", - "thiserror 1.0.69", - "tokio", -] - -[[package]] -name = "rug" -version = "1.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58ad2e973fe3c3214251a840a621812a4f40468da814b1a3d6947d433c2af11f" -dependencies = [ - "az", - "gmp-mpfr-sys", - "libc", - "libm", -] - -[[package]] -name = "ruint" -version = "1.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" -dependencies = [ - "alloy-rlp", - "arbitrary", - "ark-ff 0.3.0", - "ark-ff 0.4.2", - "ark-ff 0.5.0", - "bytes", - "fastrlp 0.3.1", - "fastrlp 0.4.0", - "num-bigint", - "num-integer", - "num-traits", - "parity-scale-codec", - "primitive-types", - "proptest", - "rand 0.8.5", - "rand 0.9.2", - "rlp", - "ruint-macro", - "serde_core", - "valuable", - "zeroize", -] - -[[package]] -name = "ruint-macro" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" - -[[package]] -name = "rustc-demangle" -version = "0.1.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" - -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - -[[package]] -name = "rustc-hash" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" -dependencies = [ - "rand 0.8.5", -] - -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - -[[package]] -name = "rustc_version" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" -dependencies = [ - "semver 0.11.0", -] - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver 1.0.27", -] - -[[package]] -name = "rusticata-macros" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" -dependencies = [ - "nom", -] - -[[package]] -name = "rustix" -version = "0.38.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" -dependencies = [ - "bitflags 2.10.0", - "errno", - "libc", - "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustix" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" -dependencies = [ - "bitflags 2.10.0", - "errno", - "libc", - "linux-raw-sys 0.11.0", - "windows-sys 0.61.2", -] - -[[package]] -name = "rustls" -version = "0.23.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc" -dependencies = [ - "aws-lc-rs", - "log", - "once_cell", - "ring", - "rustls-pki-types", - "rustls-webpki", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-native-certs" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9980d917ebb0c0536119ba501e90834767bffc3d60641457fd84a1f3fd337923" -dependencies = [ - "openssl-probe", - "rustls-pki-types", - "schannel", - "security-framework 3.5.1", -] - -[[package]] -name = "rustls-pki-types" -version = "1.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "708c0f9d5f54ba0272468c1d306a52c495b31fa155e91bc25371e6df7996908c" -dependencies = [ - "web-time", - "zeroize", -] - -[[package]] -name = "rustls-platform-verifier" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19787cda76408ec5404443dc8b31795c87cd8fec49762dc75fa727740d34acc1" -dependencies = [ - "core-foundation 0.10.1", - "core-foundation-sys", - "jni", - "log", - "once_cell", - "rustls", - "rustls-native-certs", - "rustls-platform-verifier-android", - "rustls-webpki", - "security-framework 3.5.1", - "security-framework-sys", - "webpki-root-certs 0.26.11", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustls-platform-verifier-android" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" - -[[package]] -name = "rustls-webpki" -version = "0.103.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" -dependencies = [ - "aws-lc-rs", - "ring", - "rustls-pki-types", - "untrusted", -] - -[[package]] -name = "rustversion" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" - -[[package]] -name = "rusty-fork" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" -dependencies = [ - "fnv", - "quick-error", - "tempfile", - "wait-timeout", -] - -[[package]] -name = "rw-stream-sink" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8c9026ff5d2f23da5e45bbc283f156383001bfb09c4e44256d02c1a685fe9a1" -dependencies = [ - "futures", - "pin-project", - "static_assertions", -] - -[[package]] -name = "ryu" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schannel" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "schemars" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "schemars" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "sec1" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" -dependencies = [ - "base16ct", - "der", - "generic-array", - "pkcs8", - "serdect", - "subtle", - "zeroize", -] - -[[package]] -name = "secp256k1" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" -dependencies = [ - "bitcoin_hashes", - "rand 0.8.5", - "secp256k1-sys 0.10.1", - "serde", -] - -[[package]] -name = "secp256k1" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c3c81b43dc2d8877c216a3fccf76677ee1ebccd429566d3e67447290d0c42b2" -dependencies = [ - "bitcoin_hashes", - "rand 0.9.2", - "secp256k1-sys 0.11.0", -] - -[[package]] -name = "secp256k1-sys" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9" -dependencies = [ - "cc", -] - -[[package]] -name = "secp256k1-sys" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcb913707158fadaf0d8702c2db0e857de66eb003ccfdda5924b5f5ac98efb38" -dependencies = [ - "cc", -] - -[[package]] -name = "security-framework" -version = "2.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" -dependencies = [ - "bitflags 2.10.0", - "core-foundation 0.9.4", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework" -version = "3.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" -dependencies = [ - "bitflags 2.10.0", - "core-foundation 0.10.1", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "semver" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" -dependencies = [ - "serde", - "serde_core", -] - -[[package]] -name = "semver-parser" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" -dependencies = [ - "pest", -] - -[[package]] -name = "send_wrapper" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" - -[[package]] -name = "send_wrapper" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_combinators" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de5fb30ae2918667d3cee99ef4b112f1f7ca0a6c58fa349d7d9e76035c72f8b" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "serde_json" -version = "1.0.149" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" -dependencies = [ - "indexmap 2.12.1", - "itoa", - "memchr", - "serde", - "serde_core", - "zmij", -] - -[[package]] -name = "serde_regex" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8136f1a4ea815d7eac4101cfd0b16dc0cb5e1fe1b8609dfd728058656b7badf" -dependencies = [ - "regex", - "serde", -] - -[[package]] -name = "serde_repr" -version = "0.1.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "serde_spanned" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" -dependencies = [ - "serde_core", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serde_with" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7" -dependencies = [ - "base64", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.12.1", - "schemars 0.9.0", - "schemars 1.1.0", - "serde_core", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "serdect" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a84f14a19e9a014bb9f4512488d9829a68e04ecabffb0f9904cd1ace94598177" -dependencies = [ - "base16ct", - "serde", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha2" -version = "0.10.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest 0.10.7", - "keccak", -] - -[[package]] -name = "sha3-asm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46" -dependencies = [ - "cc", - "cfg-if", -] - -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "signal-hook-registry" -version = "1.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7664a098b8e616bdfcc2dc0e9ac44eb231eedf41db4e9fe95d8d32ec728dedad" -dependencies = [ - "libc", -] - -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "digest 0.10.7", - "rand_core 0.6.4", -] - -[[package]] -name = "simd-adler32" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" - -[[package]] -name = "simdutf8" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" - -[[package]] -name = "similar" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" - -[[package]] -name = "simple_asn1" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" -dependencies = [ - "num-bigint", - "num-traits", - "thiserror 2.0.17", - "time", -] - -[[package]] -name = "siphasher" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" - -[[package]] -name = "sketches-ddsketch" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e9a774a6c28142ac54bb25d25562e6bcf957493a184f15ad4eebccb23e410a" - -[[package]] -name = "slab" -version = "0.4.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" - -[[package]] -name = "smallvec" -version = "1.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" -dependencies = [ - "arbitrary", - "serde", -] - -[[package]] -name = "snap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" - -[[package]] -name = "snow" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "850948bee068e713b8ab860fe1adc4d109676ab4c3b621fd8147f06b261f2f85" -dependencies = [ - "aes-gcm", - "blake2", - "chacha20poly1305", - "curve25519-dalek", - "rand_core 0.6.4", - "ring", - "rustc_version 0.4.1", - "sha2", - "subtle", -] - -[[package]] -name = "socket2" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "socket2" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" -dependencies = [ - "libc", - "windows-sys 0.60.2", -] - -[[package]] -name = "soketto" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" -dependencies = [ - "base64", - "bytes", - "futures", - "http", - "httparse", - "log", - "rand 0.8.5", - "sha1", -] - -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" -dependencies = [ - "lock_api", -] - -[[package]] -name = "spin" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5fe4ccb98d9c292d56fec89a5e07da7fc4cf0dc11e156b41793132775d3e591" -dependencies = [ - "lock_api", -] - -[[package]] -name = "spki" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "str_stack" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9091b6114800a5f2141aee1d1b9d6ca3592ac062dc5decb3764ec5895a47b4eb" - -[[package]] -name = "stringmetrics" -version = "2.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b3c8667cd96245cbb600b8dec5680a7319edd719c5aa2b5d23c6bff94f39765" - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "strum" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "symbolic-common" -version = "12.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d8046c5674ab857104bc4559d505f4809b8060d57806e45d49737c97afeb60" -dependencies = [ - "debugid", - "memmap2", - "stable_deref_trait", - "uuid", -] - -[[package]] -name = "symbolic-demangle" -version = "12.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1accb6e5c4b0f682de907623912e616b44be1c9e725775155546669dbff720ec" -dependencies = [ - "cpp_demangle", - "rustc-demangle", - "symbolic-common", -] - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.111" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "390cc9a294ab71bdb1aa2e99d13be9c753cd2d7bd6560c77118597410c4d2e87" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn-solidity" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f92d01b5de07eaf324f7fca61cc6bd3d82bbc1de5b6c963e6fe79e86f36580d" -dependencies = [ - "paste", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" -dependencies = [ - "futures-core", -] - -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "sysinfo" -version = "0.33.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fc858248ea01b66f19d8e8a6d55f41deaf91e9d495246fd01368d99935c6c01" -dependencies = [ - "core-foundation-sys", - "libc", - "memchr", - "ntapi", - "windows 0.57.0", -] - -[[package]] -name = "system-configuration" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" -dependencies = [ - "bitflags 2.10.0", - "core-foundation 0.9.4", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "tabled" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e39a2ee1fbcd360805a771e1b300f78cc88fec7b8d3e2f71cd37bbf23e725c7d" -dependencies = [ - "papergrid", - "tabled_derive", - "testing_table", -] - -[[package]] -name = "tabled_derive" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ea5d1b13ca6cff1f9231ffd62f15eefd72543dab5e468735f1a456728a02846" -dependencies = [ - "heck", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "tabwriter" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fce91f2f0ec87dff7e6bcbbeb267439aa1188703003c6055193c821487400432" -dependencies = [ - "unicode-width", -] - -[[package]] -name = "tagptr" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "tempfile" -version = "3.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" -dependencies = [ - "fastrand", - "getrandom 0.3.4", - "once_cell", - "rustix 1.1.3", - "windows-sys 0.61.2", -] - -[[package]] -name = "termtree" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" - -[[package]] -name = "test-fuzz" -version = "7.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11e5c77910b1d5b469a342be541cf44933f0ad2c4b8d5acb32ee46697fd60546" -dependencies = [ - "serde", - "serde_combinators", - "test-fuzz-internal", - "test-fuzz-macro", - "test-fuzz-runtime", -] - -[[package]] -name = "test-fuzz-internal" -version = "7.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d25f2f0ee315b130411a98570dd128dfe344bfaa0a28bf33d38f4a1fe85f39b" -dependencies = [ - "bincode 2.0.1", - "cargo_metadata 0.19.2", - "serde", -] - -[[package]] -name = "test-fuzz-macro" -version = "7.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8c03ba0a9e3e4032f94d71c85e149af147843c6f212e4ca4383542d606b04a6" -dependencies = [ - "darling 0.21.3", - "heck", - "itertools 0.14.0", - "prettyplease", - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "test-fuzz-runtime" -version = "7.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a4ac481aa983d386e857a7be0006c2f0ef26e0c5326bbc7262f73c2891b91d" -dependencies = [ - "hex", - "num-traits", - "serde", - "sha1", - "test-fuzz-internal", -] - -[[package]] -name = "testing_table" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f8daae29995a24f65619e19d8d31dea5b389f3d853d8bf297bbf607cd0014cc" -dependencies = [ - "unicode-width", -] - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl 1.0.69", -] - -[[package]] -name = "thiserror" -version = "2.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" -dependencies = [ - "thiserror-impl 2.0.17", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "thread_local" -version = "1.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "threadpool" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" -dependencies = [ - "num_cpus", -] - -[[package]] -name = "time" -version = "0.3.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9e442fc33d7fdb45aa9bfeb312c095964abdf596f7567261062b2a7107aaabd" -dependencies = [ - "deranged", - "itoa", - "libc", - "num-conv", - "num_threads", - "powerfmt", - "serde_core", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b36ee98fd31ec7426d599183e8fe26932a8dc1fb76ddb6214d05493377d34ca" - -[[package]] -name = "time-macros" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e552d1249bf61ac2a52db88179fd0673def1e1ad8243a00d9ec9ed71fee3dd" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "tinystr" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" -dependencies = [ - "displaydoc", - "zerovec", -] - -[[package]] -name = "tinytemplate" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" -dependencies = [ - "serde", - "serde_json", -] - -[[package]] -name = "tinyvec" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.49.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" -dependencies = [ - "bytes", - "libc", - "mio", - "parking_lot", - "pin-project-lite", - "signal-hook-registry", - "socket2 0.6.1", - "tokio-macros", - "windows-sys 0.61.2", -] - -[[package]] -name = "tokio-macros" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - -[[package]] -name = "tokio-rustls" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" -dependencies = [ - "rustls", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", - "tokio-util", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.26.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" -dependencies = [ - "futures-util", - "log", - "native-tls", - "rustls", - "rustls-pki-types", - "tokio", - "tokio-native-tls", - "tokio-rustls", - "tungstenite", - "webpki-roots 0.26.11", -] - -[[package]] -name = "tokio-util" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" -dependencies = [ - "bytes", - "futures-core", - "futures-io", - "futures-sink", - "pin-project-lite", - "slab", - "tokio", -] - -[[package]] -name = "toml" -version = "0.9.11+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3afc9a848309fe1aaffaed6e1546a7a14de1f935dc9d89d32afd9a44bab7c46" -dependencies = [ - "serde_core", - "serde_spanned", - "toml_datetime", - "toml_parser", - "winnow", -] - -[[package]] -name = "toml_datetime" -version = "0.7.5+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" -dependencies = [ - "serde_core", -] - -[[package]] -name = "toml_edit" -version = "0.23.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d7cbc3b4b49633d57a0509303158ca50de80ae32c265093b24c414705807832" -dependencies = [ - "indexmap 2.12.1", - "toml_datetime", - "toml_parser", - "winnow", -] - -[[package]] -name = "toml_parser" -version = "1.0.6+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" -dependencies = [ - "winnow", -] - -[[package]] -name = "tonic" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" -dependencies = [ - "async-stream", - "async-trait", - "axum", - "base64", - "bytes", - "h2", - "http", - "http-body", - "http-body-util", - "hyper", - "hyper-timeout", - "hyper-util", - "percent-encoding", - "pin-project", - "prost", - "socket2 0.5.10", - "tokio", - "tokio-stream", - "tower 0.4.13", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" -dependencies = [ - "futures-core", - "futures-util", - "indexmap 1.9.3", - "pin-project", - "pin-project-lite", - "rand 0.8.5", - "slab", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" -dependencies = [ - "futures-core", - "futures-util", - "pin-project-lite", - "sync_wrapper", - "tokio", - "tower-layer", - "tower-service", -] - -[[package]] -name = "tower-http" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" -dependencies = [ - "async-compression", - "bitflags 2.10.0", - "bytes", - "futures-core", - "futures-util", - "http", - "http-body", - "http-body-util", - "iri-string", - "pin-project-lite", - "tokio", - "tokio-util", - "tower 0.5.3", - "tower-layer", - "tower-service", -] - -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - -[[package]] -name = "tracing" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" -dependencies = [ - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-appender" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" -dependencies = [ - "crossbeam-channel", - "thiserror 2.0.17", - "time", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "tracing-core" -version = "0.1.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-journald" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0b4143302cf1022dac868d521e36e8b27691f72c84b3311750d5188ebba657" -dependencies = [ - "libc", - "tracing-core", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-logfmt" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b1f47d22deb79c3f59fcf2a1f00f60cbdc05462bf17d1cd356c1fefa3f444bd" -dependencies = [ - "time", - "tracing", - "tracing-core", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "tracing-opentelemetry" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "721f2d2569dce9f3dfbbddee5906941e953bfcdf736a62da3377f5751650cc36" -dependencies = [ - "js-sys", - "once_cell", - "opentelemetry", - "opentelemetry_sdk", - "smallvec", - "tracing", - "tracing-core", - "tracing-log", - "tracing-subscriber 0.3.22", - "web-time", -] - -[[package]] -name = "tracing-serde" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" -dependencies = [ - "serde", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" -dependencies = [ - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex-automata", - "serde", - "serde_json", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", - "tracing-serde", -] - -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" - -[[package]] -name = "tungstenite" -version = "0.26.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" -dependencies = [ - "bytes", - "data-encoding", - "http", - "httparse", - "log", - "native-tls", - "rand 0.9.2", - "rustls", - "rustls-pki-types", - "sha1", - "thiserror 2.0.17", - "utf-8", -] - -[[package]] -name = "typenum" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" - -[[package]] -name = "ucd-trie" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" - -[[package]] -name = "uint" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "uint" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "909988d098b2f738727b161a106cfc7cab00c539c2687a8836f8e565976fb53e" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "unarray" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" - -[[package]] -name = "unicode-ident" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "unicode-width" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4ac048d71ede7ee76d585517add45da530660ef4390e49b098733c6e897f254" - -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - -[[package]] -name = "universal-hash" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" -dependencies = [ - "crypto-common", - "subtle", -] - -[[package]] -name = "unsigned-varint" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" - -[[package]] -name = "unsigned-varint" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - -[[package]] -name = "unty" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" - -[[package]] -name = "url" -version = "2.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", -] - -[[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - -[[package]] -name = "utf8parse" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" - -[[package]] -name = "uuid" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" -dependencies = [ - "getrandom 0.3.4", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "valuable" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "vergen" -version = "9.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b849a1f6d8639e8de261e81ee0fc881e3e3620db1af9f2e0da015d4382ceaf75" -dependencies = [ - "anyhow", - "cargo_metadata 0.23.1", - "derive_builder", - "regex", - "rustversion", - "time", - "vergen-lib", -] - -[[package]] -name = "vergen-git2" -version = "9.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d51ab55ddf1188c8d679f349775362b0fa9e90bd7a4ac69838b2a087623f0d57" -dependencies = [ - "anyhow", - "derive_builder", - "git2", - "rustversion", - "time", - "vergen", - "vergen-lib", -] - -[[package]] -name = "vergen-lib" -version = "9.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b34a29ba7e9c59e62f229ae1932fb1b8fb8a6fdcc99215a641913f5f5a59a569" -dependencies = [ - "anyhow", - "derive_builder", - "rustversion", -] - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "virtue" -version = "0.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" - -[[package]] -name = "wait-timeout" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" -dependencies = [ - "libc", -] - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.11.1+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" - -[[package]] -name = "wasip2" -version = "1.0.1+wasi-0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wasm-bindgen" -version = "0.2.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" -dependencies = [ - "cfg-if", - "once_cell", - "rustversion", - "wasm-bindgen-macro", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" -dependencies = [ - "cfg-if", - "js-sys", - "once_cell", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" -dependencies = [ - "bumpalo", - "proc-macro2", - "quote", - "syn 2.0.111", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "wasm-streams" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" -dependencies = [ - "futures-util", - "js-sys", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "wasmtimer" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b" -dependencies = [ - "futures", - "js-sys", - "parking_lot", - "pin-utils", - "slab", - "wasm-bindgen", -] - -[[package]] -name = "web-sys" -version = "0.3.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "web-time" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki-root-certs" -version = "0.26.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75c7f0ef91146ebfb530314f5f1d24528d7f0767efbfd31dce919275413e393e" -dependencies = [ - "webpki-root-certs 1.0.4", -] - -[[package]] -name = "webpki-root-certs" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee3e3b5f5e80bc89f30ce8d0343bf4e5f12341c51f3e26cbeecbc7c85443e85b" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "webpki-roots" -version = "0.26.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" -dependencies = [ - "webpki-roots 1.0.4", -] - -[[package]] -name = "webpki-roots" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2878ef029c47c6e8cf779119f20fcf52bde7ad42a731b2a304bc221df17571e" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "which" -version = "4.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" -dependencies = [ - "either", - "home", - "once_cell", - "rustix 0.38.44", -] - -[[package]] -name = "widestring" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efc5cf48f83140dcaab716eeaea345f9e93d0018fb81162753a3f76c3397b538" -dependencies = [ - "windows-core 0.53.0", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" -dependencies = [ - "windows-core 0.57.0", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows" -version = "0.62.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "527fadee13e0c05939a6a05d5bd6eec6cd2e3dbd648b9f8e447c6518133d8580" -dependencies = [ - "windows-collections", - "windows-core 0.62.2", - "windows-future", - "windows-numerics", -] - -[[package]] -name = "windows-collections" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b2d95af1a8a14a3c7367e1ed4fc9c20e0a26e79551b1454d72583c97cc6610" -dependencies = [ - "windows-core 0.62.2", -] - -[[package]] -name = "windows-core" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dcc5b895a6377f1ab9fa55acedab1fd5ac0db66ad1e6c7f47e28a22e446a5dd" -dependencies = [ - "windows-result 0.1.2", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-core" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" -dependencies = [ - "windows-implement 0.57.0", - "windows-interface 0.57.0", - "windows-result 0.1.2", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-core" -version = "0.62.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" -dependencies = [ - "windows-implement 0.60.2", - "windows-interface 0.59.3", - "windows-link", - "windows-result 0.4.1", - "windows-strings", -] - -[[package]] -name = "windows-future" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d6f90251fe18a279739e78025bd6ddc52a7e22f921070ccdc67dde84c605cb" -dependencies = [ - "windows-core 0.62.2", - "windows-link", - "windows-threading", -] - -[[package]] -name = "windows-implement" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "windows-implement" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "windows-interface" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "windows-interface" -version = "0.59.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - -[[package]] -name = "windows-numerics" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e2e40844ac143cdb44aead537bbf727de9b044e107a0f1220392177d15b0f26" -dependencies = [ - "windows-core 0.62.2", - "windows-link", -] - -[[package]] -name = "windows-registry" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" -dependencies = [ - "windows-link", - "windows-result 0.4.1", - "windows-strings", -] - -[[package]] -name = "windows-result" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-result" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" -dependencies = [ - "windows-targets 0.53.5", -] - -[[package]] -name = "windows-sys" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm 0.52.6", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.53.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" -dependencies = [ - "windows-link", - "windows_aarch64_gnullvm 0.53.1", - "windows_aarch64_msvc 0.53.1", - "windows_i686_gnu 0.53.1", - "windows_i686_gnullvm 0.53.1", - "windows_i686_msvc 0.53.1", - "windows_x86_64_gnu 0.53.1", - "windows_x86_64_gnullvm 0.53.1", - "windows_x86_64_msvc 0.53.1", -] - -[[package]] -name = "windows-threading" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3949bd5b99cafdf1c7ca86b43ca564028dfe27d66958f2470940f73d86d75b37" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_i686_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" - -[[package]] -name = "winnow" -version = "0.7.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" -dependencies = [ - "memchr", -] - -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - -[[package]] -name = "wit-bindgen" -version = "0.46.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" - -[[package]] -name = "writeable" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" - -[[package]] -name = "ws_stream_wasm" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c173014acad22e83f16403ee360115b38846fe754e735c5d9d3803fe70c6abc" -dependencies = [ - "async_io_stream", - "futures", - "js-sys", - "log", - "pharos", - "rustc_version 0.4.1", - "send_wrapper 0.6.0", - "thiserror 2.0.17", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - -[[package]] -name = "x25519-dalek" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" -dependencies = [ - "curve25519-dalek", - "rand_core 0.6.4", - "serde", - "zeroize", -] - -[[package]] -name = "x509-parser" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4569f339c0c402346d4a75a9e39cf8dad310e287eef1ff56d4c68e5067f53460" -dependencies = [ - "asn1-rs", - "data-encoding", - "der-parser", - "lazy_static", - "nom", - "oid-registry", - "rusticata-macros", - "thiserror 2.0.17", - "time", -] - -[[package]] -name = "xml-rs" -version = "0.8.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ae8337f8a065cfc972643663ea4279e04e7256de865aa66fe25cec5fb912d3f" - -[[package]] -name = "xmltree" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7d8a75eaf6557bb84a65ace8609883db44a29951042ada9b393151532e41fcb" -dependencies = [ - "xml-rs", -] - -[[package]] -name = "yamux" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed0164ae619f2dc144909a9f082187ebb5893693d8c0196e8085283ccd4b776" -dependencies = [ - "futures", - "log", - "nohash-hasher", - "parking_lot", - "pin-project", - "rand 0.8.5", - "static_assertions", -] - -[[package]] -name = "yamux" -version = "0.13.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "deab71f2e20691b4728b349c6cee8fc7223880fa67b6b4f92225ec32225447e5" -dependencies = [ - "futures", - "log", - "nohash-hasher", - "parking_lot", - "pin-project", - "rand 0.9.2", - "static_assertions", - "web-time", -] - -[[package]] -name = "yasna" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd" -dependencies = [ - "time", -] - -[[package]] -name = "yoke" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" -dependencies = [ - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", - "synstructure", -] - -[[package]] -name = "zerocopy" -version = "0.8.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", - "synstructure", -] - -[[package]] -name = "zeroize" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "zerotrie" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.111", -] - -[[package]] -name = "zmij" -version = "1.0.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd8f3f50b848df28f887acb68e41201b5aea6bc8a8dacc00fb40635ff9a72fea" - -[[package]] -name = "zstd" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "7.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" -dependencies = [ - "zstd-sys", -] - -[[package]] -name = "zstd-sys" -version = "2.0.16+zstd.1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" -dependencies = [ - "cc", - "pkg-config", -] diff --git a/kona/Cargo.toml b/kona/Cargo.toml deleted file mode 100644 index 3684d9253a2..00000000000 --- a/kona/Cargo.toml +++ /dev/null @@ -1,279 +0,0 @@ -[workspace.package] -edition = "2024" -license = "MIT" -rust-version = "1.88" -authors = ["clabby", "refcell", "theochap", "emhane"] -homepage = "https://github.com/op-rs/kona" -repository = "https://github.com/op-rs/kona" -keywords = ["ethereum", "optimism", "crypto"] -categories = ["cryptography", "cryptography::cryptocurrencies"] -exclude = ["**/target"] - -[workspace] -resolver = "2" -members = [ - "bin/*", - "crates/proof/*", - "crates/node/*", - "crates/supervisor/*", - "crates/protocol/*", - "crates/batcher/*", - "crates/providers/*", - "crates/utilities/*", - "examples/*", -] -default-members = [ - "bin/host", - "bin/client", - "bin/node", - "bin/supervisor", -] - -[workspace.metadata.cargo-udeps.ignore] -normal = ["rustls-platform-verifier"] - -[workspace.lints.rust] -missing-debug-implementations = "warn" -missing-docs = "warn" -unreachable-pub = "warn" -unused-must-use = "deny" -rust-2018-idioms = "deny" -unnameable-types = "warn" - -[workspace.lints.rustdoc] -all = "warn" - -[workspace.lints.clippy] -all = { level = "warn", priority = -1 } -missing-const-for-fn = "warn" -use-self = "warn" -option-if-let-else = "warn" -redundant-clone = "warn" - -[profile.dev] -opt-level = 1 -overflow-checks = false - -[profile.bench] -debug = true - -[profile.dev-client] -inherits = "dev" -panic = "abort" - -[profile.release-client-lto] -inherits = "release" -panic = "abort" -codegen-units = 1 -lto = "fat" - -[profile.release-perf] -inherits = "release" -lto = "fat" -codegen-units = 1 - -[workspace.dependencies] -# Binaries -kona-host = { path = "bin/host", version = "1.0.2", default-features = false } -kona-client = { path = "bin/client", version = "1.0.2", default-features = false } - -# Protocol -kona-comp = { path = "crates/batcher/comp", version = "0.4.5", default-features = false } -kona-derive = { path = "crates/protocol/derive", version = "0.4.5", default-features = false } -kona-interop = { path = "crates/protocol/interop", version = "0.4.5", default-features = false } -kona-genesis = { path = "crates/protocol/genesis", version = "0.4.5", default-features = false } -kona-protocol = { path = "crates/protocol/protocol", version = "0.4.5", default-features = false } -kona-registry = { path = "crates/protocol/registry", version = "0.4.5", default-features = false } -kona-hardforks = { path = "crates/protocol/hardforks", version = "0.4.5", default-features = false } - -# Node -kona-rpc = { path = "crates/node/rpc", version = "0.3.2", default-features = false } -kona-peers = { path = "crates/node/peers", version = "0.1.2", default-features = false } -kona-engine = { path = "crates/node/engine", version = "0.1.2", default-features = false } -kona-sources = { path = "crates/node/sources", version = "0.1.2", default-features = false } -kona-node-service = { path = "crates/node/service", version = "0.1.3", default-features = false } -kona-disc = { path = "crates/node/disc", version = "0.1.2", default-features = false } -kona-gossip = { path = "crates/node/gossip", version = "0.1.2", default-features = false } - -# Supervisor -kona-supervisor-rpc = { path = "crates/supervisor/rpc", version = "0.1.1", default-features = false } -kona-supervisor-core = { path = "crates/supervisor/core", version = "0.1.0", default-features = false } -kona-supervisor-service = { path = "crates/supervisor/service", version = "0.1.0", default-features = false } -kona-supervisor-types = { path = "crates/supervisor/types", version = "0.1.1", default-features = false } -kona-supervisor-storage = { path = "crates/supervisor/storage", version = "0.1.0", default-features = false } -kona-supervisor-metrics = { path = "crates/supervisor/metrics", version = "0.1.0", default-features = false } - -# Providers -kona-providers-alloy = { path = "crates/providers/providers-alloy", version = "0.3.3", default-features = false } -kona-providers-local = { path = "crates/providers/providers-local", version = "0.1.0", default-features = false } - -# Proof -kona-driver = { path = "crates/proof/driver", version = "0.4.0", default-features = false } -kona-mpt = { path = "crates/proof/mpt", version = "0.3.0", default-features = false } -kona-proof = { path = "crates/proof/proof", version = "0.3.0", default-features = false } -kona-executor = { path = "crates/proof/executor", version = "0.4.0", default-features = false } -kona-std-fpvm = { path = "crates/proof/std-fpvm", version = "0.2.0", default-features = false } -kona-preimage = { path = "crates/proof/preimage", version = "0.3.0", default-features = false } -kona-std-fpvm-proc = { path = "crates/proof/std-fpvm-proc", version = "0.2.0", default-features = false } -kona-proof-interop = { path = "crates/proof/proof-interop", version = "0.2.0", default-features = false } - -# Utilities -kona-cli = { path = "crates/utilities/cli", version = "0.3.2", default-features = false } -kona-serde = { path = "crates/utilities/serde", version = "0.2.2", default-features = false } -kona-macros = { path = "crates/utilities/macros", version = "0.1.2", default-features = false } - -# Alloy -alloy-rlp = { version = "0.3.12", default-features = false } -alloy-trie = { version = "0.9.3", default-features = false } -alloy-eips = { version = "1.4.3", default-features = false } -alloy-serde = { version = "1.4.3", default-features = false } -alloy-signer = { version = "1.4.3", default-features = false } -alloy-chains = { version = "0.2.27", default-features = false } -alloy-network = { version = "1.4.3", default-features = false } -alloy-genesis = { version = "1.4.3", default-features = false } -alloy-provider = { version = "1.4.3", default-features = false } -alloy-hardforks = { version = "0.4.7", default-features = false } -alloy-sol-types = { version = "1.5.2", default-features = false } -alloy-consensus = { version = "1.4.3", default-features = false } -alloy-transport = { version = "1.4.3", default-features = false } -alloy-rpc-types = { version = "1.4.3", default-features = false } -alloy-rpc-client = { version = "1.4.3", default-features = false } -alloy-primitives = { version = "1.5.2", default-features = false } -alloy-signer-local = { version = "1.4.3", default-features = false } -alloy-node-bindings = { version = "1.4.3", default-features = false } -alloy-rpc-types-eth = { version = "1.4.3", default-features = false } -alloy-transport-http = { version = "1.4.3", default-features = false } -alloy-rpc-types-engine = { version = "1.4.3", default-features = false } -alloy-rpc-types-beacon = { version = "1.4.3", default-features = false } -alloy-network-primitives = { version = "1.4.3", default-features = false } -alloy-json-rpc = { version = "1.4.3", default-features = false } - -# OP Alloy -op-alloy-network = { version = "0.23.1", default-features = false } -op-alloy-provider = { version = "0.23.1", default-features = false } -alloy-op-hardforks = { version = "0.4.7", default-features = false } -op-alloy-consensus = { version = "0.23.1", default-features = false } -op-alloy-rpc-types = { version = "0.23.1", default-features = false } -op-alloy-rpc-jsonrpsee = { version = "0.23.1", default-features = false } -op-alloy-rpc-types-engine = { version = "0.23.1", default-features = false } - -# Execution -revm = { version = "33.0.0", default-features = false } -op-revm = { version = "14.0.0", default-features = false } -alloy-evm = { version = "0.25.2", default-features = false } -alloy-op-evm = { version = "0.25.2", default-features = false } - -# Reth (pinned to v1.6.0 for kona-supervisor-storage) -reth-db-api = { git = "https://github.com/paradigmxyz/reth", tag = "v1.6.0" } -reth-db = { git = "https://github.com/paradigmxyz/reth", tag = "v1.6.0" } -reth-codecs = { git = "https://github.com/paradigmxyz/reth", tag = "v1.6.0" } - -# General -notify = "8.2" -url = "2.5.8" -http = "1.4.0" -lru = "0.16.3" -glob = "0.3.3" -dirs = "6.0.0" -eyre = "0.6.12" -spin = "0.10.0" -clap = "4.5.54" -tower = "0.5.3" -bytes = "1.11.0" -vergen = "9.1.0" -tokio = "1.49.0" -rayon = "1.11.0" -strum = "0.27" -cfg-if = "1.0.4" -rstest = "0.26.1" -ratatui = "0.30.0" -futures = "0.3.31" -futures-util = "0.3.31" -reqwest = "0.12.24" -auto_impl = "1.3.0" -tempfile = "3.24.0" -test-fuzz = "7.2.5" -arbitrary = "1.4.2" -multihash = "0.19.3" -crossterm = "0.29.0" -color-eyre = "0.6.5" -jsonrpsee = "0.26.0" -jsonrpsee-types = "0.26.0" -tokio-util = "0.7.18" -rustls = { version = "0.23", default-features = false } -rustls-pemfile = { version = "2.2", default-features = false } -vergen-git2 = "9.1.0" -async-trait = "0.1.89" -tokio-stream = "0.1.18" -async-stream = "0.3.6" -async-channel = "2.5.0" -http-body-util = "0.1.3" -unsigned-varint = "0.8.0" -modular-bitfield = "0.11.2" -buddy_system_allocator = "0.11.0" - -rand = { version = "0.9.2", default-features = false } -backon = { version = "1.6.0", default-features = false } -tabled = { version = "0.20.0", default-features = false } -anyhow = { version = "1.0.100", default-features = false } -thiserror = { version = "2.0.17", default-features = false } -derive_more = { version = "2.1.1", default-features = false } -lazy_static = { version = "1.5.0", default-features = false } - -# Compression -getrandom = "0.3.4" -miniz_oxide = "0.9.0" -alloc-no-stdlib = "2.0.4" -brotli = { version = "8.0.2", default-features = false } - -# Networking -snap = "1.1.1" -discv5 = "0.10.2" -libp2p = "0.56.0" -libp2p-stream = "0.4.0-alpha" -libp2p-identity = "0.2.13" -openssl = "0.10.75" -ipnet = "2.11.0" - -# Tracing -tracing-loki = "0.2.6" -tracing-subscriber = "0.3.22" -tracing-appender = "0.2.4" -tracing = { version = "0.1.44", default-features = false } - -# Metrics -metrics = { version = "0.24.3", default-features = false } -prometheus = { version = "0.14.0", default-features = false } -metrics-exporter-prometheus = { version = "0.18.1", default-features = false } -metrics-process = "2.4.2" - -# Testing -pprof = "0.15.0" -arbtest = "0.3.2" -proptest = "1.9.0" -criterion = "0.5.1" -mockall = "0.14.0" -httpmock = "0.8.2" - -# Serialization -rkyv = "0.8.14" -serde_repr = "0.1.20" -ethereum_ssz = "0.10.1" -toml = { version = "0.9.11", default-features = false } -serde = { version = "1.0.228", default-features = false } -serde_json = { version = "1.0.149", default-features = false } - -# K/V database -rocksdb = { version = "0.24.0", default-features = false } - -# Cryptography -sha2 = { version = "0.10.9", default-features = false } -c-kzg = { version = "2.1.5", default-features = false } -ark-ff = { version = "0.5.0", default-features = false } -secp256k1 = { version = "0.31.1", default-features = false } -ark-bls12-381 = { version = "0.5.0", default-features = false } - -# Rollup Boost (required for rollup-boost integration) -rollup-boost = "0.7.13" -rollup-boost-types = "0.1.0" -parking_lot = "0.12.5" diff --git a/kona/README.md b/kona/README.md deleted file mode 100644 index 54bb379e57e..00000000000 --- a/kona/README.md +++ /dev/null @@ -1,182 +0,0 @@ -<h1 align="center"> -<img src="./assets/banner.png" alt="Kona" width="100%" align="center"> -</h1> - -<h4 align="center"> - The Monorepo for <a href="https://specs.optimism.io/">OP Stack</a> Types, Components, and Services built in Rust. -</h4> - -<p align="center"> - <a href="https://github.com/op-rs/kona/releases"><img src="https://img.shields.io/github/v/release/op-rs/kona?style=flat&labelColor=1C2C2E&color=C96329&logo=GitHub&logoColor=white"></a> - <a href="https://docs.rs/kona-derive/"><img src="https://img.shields.io/docsrs/kona-derive?style=flat&labelColor=1C2C2E&color=C96329&logo=Rust&logoColor=white"></a> - <a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://img.shields.io/github/actions/workflow/status/op-rs/kona/rust_ci.yaml?style=flat&labelColor=1C2C2E&label=ci&color=BEC5C9&logo=GitHub%20Actions&logoColor=BEC5C9" alt="CI"></a> - <a href="https://app.codecov.io/gh/op-rs/kona"><img src="https://img.shields.io/codecov/c/gh/op-rs/kona?style=flat&labelColor=1C2C2E&logo=Codecov&color=BEC5C9&logoColor=BEC5C9" alt="Codecov"></a> - <a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=googledocs&label=license&logoColor=BEC5C9" alt="License"></a> - <a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs"></a> -</p> - -<p align="center"> - <a href="#whats-kona">What's Kona?</a> • - <a href="#overview">Overview</a> • - <a href="#msrv">MSRV</a> • - <a href="https://rollup.yoga/intro/contributing">Contributing</a> • - <a href="#credits">Credits</a> • - <a href="#license">License</a> -</p> - -# 🚧 Important information - -We have taken the decision to move `Kona` to `https://github.com/ethereum-optimism/optimism`. Once `https://github.com/ethereum-optimism/optimism/pull/18569` gets merged, `op-rs` operations are going to be transferred to `ethereum-optimism/optimism`. - -This repository will get archived mid January 2026. - - -The commit/contribution history will be maintained, thanks for contributing to `op-rs/kona` and we can't wait to see you in `https://github.com/ethereum-optimism/optimism`! - - -## What's Kona? - -Originally a suite of portable implementations of the OP Stack rollup state transition, -Kona has been extended to be _the monorepo_ for <a href="https://specs.optimism.io/">OP Stack</a> -types, components, and services built in Rust. Kona provides an ecosystem of extensible, low-level -crates that compose into components and services required for the OP Stack. - -The [docs][site] contains a more in-depth overview of the project, contributor guidelines, tutorials for -getting started with building your own programs, and a reference for the libraries and tools provided by Kona. - -## Overview - -> [!NOTE] -> -> Ethereum (Alloy) types modified for the OP Stack live in [op-alloy](https://github.com/alloy-rs/op-alloy). - -**Binaries** - -- [`client`](./bin/client): The bare-metal program that executes the state transition, to be run on a prover. -- [`host`](./bin/host): The host program that runs natively alongside the prover, serving as the [Preimage Oracle][g-preimage-oracle] server. -- [`node`](./bin/node): [WIP] A [Rollup Node][rollup-node-spec] implementation, backed by [`kona-derive`](./crates/protocol/derive). Supports flexible chain ID specification via `--l2-chain-id` using either numeric IDs (`10`) or chain names (`optimism`). -- [`supervisor`](./bin/supervisor): [WIP] A [Supervisor][supervisor-spec] implementation. - -**Protocol** - -- [`genesis`](./crates/protocol/genesis): Genesis types for OP Stack chains. -- [`protocol`](./crates/protocol/protocol): Core protocol types used across OP Stack rust crates. -- [`derive`](./crates/protocol/derive): `no_std` compatible implementation of the [derivation pipeline][g-derivation-pipeline]. -- [`driver`](./crates/proof/driver): Stateful derivation pipeline driver. -- [`interop`](./crates/protocol/interop): Core functionality and primitives for the [Interop feature](https://specs.optimism.io/interop/overview.html) of the OP Stack. -- [`registry`](./crates/protocol/registry): Rust bindings for the [superchain-registry][superchain-registry]. -- [`comp`](./crates/batcher/comp): Compression types for the OP Stack. -- [`hardforks`](./crates/protocol/hardforks): Consensus layer hardfork types for the OP Stack including network upgrade transactions. - -**Proof** - -- [`mpt`](./crates/proof/mpt): Utilities for interacting with the Merkle Patricia Trie in the client program. -- [`executor`](./crates/proof/executor): `no_std` stateless block executor for the [OP Stack][op-stack]. -- [`proof`](./crates/proof/proof): High level OP Stack state transition proof SDK. -- [`proof-interop`](./crates/proof/proof-interop): Extension of `kona-proof` with interop support. -- [`preimage`](./crates/proof/preimage): High level interfaces to the [`PreimageOracle`][fpp-specs] ABI. -- [`std-fpvm`](./crates/proof/std-fpvm): Platform specific [Fault Proof VM][g-fault-proof-vm] kernel APIs. -- [`std-fpvm-proc`](./crates/proof/std-fpvm-proc): Proc macro for [Fault Proof Program][fpp-specs] entrypoints. - -**Node** - -- [`service`](./crates/node/service): The OP Stack rollup node service. -- [`engine`](./crates/node/engine): An extensible implementation of the [OP Stack][op-stack] rollup node engine client -- [`rpc`](./crates/node/rpc): OP Stack RPC types and extensions. -- [`gossip`](./crates/node/gossip): OP Stack P2P Networking - Gossip. -- [`disc`](./crates/node/disc): OP Stack P2P Networking - Discovery. -- [`peers`](./crates/node/peers): Networking Utilities ported from reth. -- [`sources`](./crates/node/sources): Data source types and utilities for the kona-node. - -**Providers** - -- [`providers-alloy`](./crates/providers/providers-alloy): Provider implementations for `kona-derive` backed by [Alloy][alloy]. - -**Utilities** - -- [`serde`](./crates/utilities/serde): Serialization helpers. -- [`cli`](./crates/utilities/cli): Standard CLI utilities, used across `kona`'s binaries. -- [`macros`](./crates/utilities/macros): Utility macros. - -### Proof - -Built on top of these libraries, this repository also features a [proof program][fpp-specs] -designed to deterministically execute the rollup state transition in order to verify an -[L2 output root][g-output-root] from the L1 inputs it was [derived from][g-derivation-pipeline]. - -Kona's libraries were built with alternative backend support and extensibility in mind - the repository features -a fault proof virtual machine backend for use in the governance-approved OP Stack, but it's portable across -provers! Kona is also used by: - -- [`op-succinct`][op-succinct] -- [`kailua`][kailua] - -To build your own backend for kona, or build a new application on top of its libraries, -see the [SDK section of the docs](https://rollup.yoga/node/design/intro). - -## MSRV - -The current MSRV (minimum supported rust version) is `1.88`. - -The MSRV is not increased automatically, and will be updated -only as part of a patch (pre-1.0) or minor (post-1.0) release. - - -## Crate Releases - -`kona` releases are done using the [`cargo-release`](https://crates.io/crates/cargo-release) crate. -A detailed guide is available in [./RELEASES.md](./RELEASES.md). - - -## Contributing - -`kona` is built by open source contributors like you, thank you for improving the project! - -A [contributing guide][contributing] is available that sets guidelines for contributing. - -Pull requests will not be merged unless CI passes, so please ensure that your contribution -follows the linting rules and passes clippy. - - -## Credits - -`kona` is inspired by the work of several teams, namely [OP Labs][op-labs] and other contributors' work on the -[Optimism monorepo][op-go-monorepo] and [BadBoiLabs][bad-boi-labs]'s work on [Cannon-rs][badboi-cannon-rs]. - -`kona` is also built on rust types in [alloy][alloy], [op-alloy][op-alloy], and [maili][maili]. - -## License - -Licensed under the [MIT license.](https://github.com/op-rs/kona/blob/main/LICENSE.md) - -> [!NOTE] -> -> Contributions intentionally submitted for inclusion in these crates by you -> shall be licensed as above, without any additional terms or conditions. - - -<!-- Links --> - -[alloy]: https://github.com/alloy-rs/alloy -[maili]: https://github.com/op-rs/maili -[op-alloy]: https://github.com/alloy-rs/op-alloy -[contributing]: https://rollup.yoga/intro/contributing -[op-stack]: https://github.com/ethereum-optimism/optimism -[superchain-registry]: https://github.com/ethereum-optimism/superchain-registry -[op-go-monorepo]: https://github.com/ethereum-optimism/optimism/tree/develop -[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon -[cannon-rs]: https://github.com/op-rs/cannon-rs -[rollup-node-spec]: https://specs.optimism.io/protocol/rollup-node.html -[supervisor-spec]: https://specs.optimism.io/interop/supervisor.html -[badboi-cannon-rs]: https://github.com/BadBoiLabs/cannon-rs -[asterisc]: https://github.com/ethereum-optimism/asterisc -[fpp-specs]: https://specs.optimism.io/fault-proof/index.html -[site]: https://rollup.yoga -[op-succinct]: https://github.com/succinctlabs/op-succinct -[kailua]: https://github.com/risc0/kailua -[op-labs]: https://github.com/ethereum-optimism -[bad-boi-labs]: https://github.com/BadBoiLabs -[g-output-root]: https://specs.optimism.io/glossary.html#l2-output-root -[g-derivation-pipeline]: https://specs.optimism.io/protocol/derivation.html#l2-chain-derivation-pipeline -[g-fault-proof-vm]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-vm -[g-preimage-oracle]: https://specs.optimism.io/fault-proof/index.html#pre-image-oracle diff --git a/kona/bin/client/justfile b/kona/bin/client/justfile deleted file mode 100644 index 11fb537a136..00000000000 --- a/kona/bin/client/justfile +++ /dev/null @@ -1,297 +0,0 @@ -set fallback := true - -KONA_CLIENT_ROOT := source_directory() - -# default recipe to display help information -default: - @just --list - -# Run the client program on asterisc with the host in detached server mode. -run-client-asterisc block_number l1_rpc l1_beacon_rpc l2_rpc rollup_node_rpc verbosity='': - #!/usr/bin/env bash - - L1_NODE_ADDRESS="{{l1_rpc}}" - L1_BEACON_ADDRESS="{{l1_beacon_rpc}}" - L2_NODE_ADDRESS="{{l2_rpc}}" - OP_NODE_ADDRESS="{{rollup_node_rpc}}" - - HOST_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../target/release/kona-host" - CLIENT_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../target/riscv64imac-unknown-none-elf/release-client-lto/kona-client" - STATE_PATH="{{KONA_CLIENT_ROOT}}/../../state.bin.gz" - - CLAIMED_L2_BLOCK_NUMBER={{block_number}} - echo "Fetching configuration for block #$CLAIMED_L2_BLOCK_NUMBER..." - - # Get output root for block - CLAIMED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $CLAIMED_L2_BLOCK_NUMBER) | jq -r .outputRoot) - - # Get the info for the previous block - AGREED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .outputRoot) - AGREED_L2_HEAD_HASH=$(cast block --rpc-url $L2_NODE_ADDRESS $((CLAIMED_L2_BLOCK_NUMBER - 1)) --json | jq -r .hash) - L1_ORIGIN_NUM=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .blockRef.l1origin.number) - L1_HEAD=$(cast block --rpc-url $L1_NODE_ADDRESS $((L1_ORIGIN_NUM + 30)) --json | jq -r .hash) - L2_CHAIN_ID=$(cast chain-id --rpc-url $L2_NODE_ADDRESS) - - # Move to the kona root - cd {{KONA_CLIENT_ROOT}}/../.. - - echo "Building client program for RISC-V target..." - just build-asterisc-client - - echo "Loading client program into Asterisc state format..." - asterisc load-elf --path=$CLIENT_BIN_PATH - - echo "Building host program for native target..." - cargo build --bin kona-host --release - - echo "Running asterisc" - asterisc run \ - --info-at '%10000000' \ - --proof-at never \ - --input $STATE_PATH \ - -- \ - $HOST_BIN_PATH \ - single \ - --l1-head $L1_HEAD \ - --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ - --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ - --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ - --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ - --l2-chain-id $L2_CHAIN_ID \ - --l1-node-address $L1_NODE_ADDRESS \ - --l1-beacon-address $L1_BEACON_ADDRESS \ - --l2-node-address $L2_NODE_ADDRESS \ - --server \ - --data-dir ./data \ - {{verbosity}} - -# Run the client program natively with the host program attached. -run-client-native block_number l1_rpc l1_beacon_rpc l2_rpc rollup_node_rpc rollup_config_path='' verbosity='': - #!/usr/bin/env bash - set -o errexit -o nounset -o pipefail - - L1_NODE_ADDRESS="{{l1_rpc}}" - L1_BEACON_ADDRESS="{{l1_beacon_rpc}}" - L2_NODE_ADDRESS="{{l2_rpc}}" - OP_NODE_ADDRESS="{{rollup_node_rpc}}" - - L2_CHAIN_ID=$(cast chain-id --rpc-url $L2_NODE_ADDRESS) - if [ -z "{{rollup_config_path}}" ]; then - CHAIN_ID_OR_ROLLUP_CONFIG_ARG="--l2-chain-id $L2_CHAIN_ID" - else - CHAIN_ID_OR_ROLLUP_CONFIG_ARG="--rollup-config-path $(realpath {{rollup_config_path}})" - fi - - CLAIMED_L2_BLOCK_NUMBER={{block_number}} - echo "Fetching configuration for block #$CLAIMED_L2_BLOCK_NUMBER..." - - # Get output root for block - CLAIMED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $CLAIMED_L2_BLOCK_NUMBER) | jq -r .outputRoot) - - # Get the info for the previous block - AGREED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .outputRoot) - AGREED_L2_HEAD_HASH=$(cast block --rpc-url $L2_NODE_ADDRESS $((CLAIMED_L2_BLOCK_NUMBER - 1)) --json | jq -r .hash) - L1_ORIGIN_NUM=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .blockRef.l1origin.number) - L1_HEAD=$(cast block --rpc-url $L1_NODE_ADDRESS $((L1_ORIGIN_NUM + 30)) --json | jq -r .hash) - - # Move to the kona root - cd {{KONA_CLIENT_ROOT}}/../.. - - echo "Running host program with native client program..." - cargo r --bin kona-host --release -- \ - single \ - --l1-head $L1_HEAD \ - --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ - --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ - --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ - --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ - --l1-node-address $L1_NODE_ADDRESS \ - --l1-beacon-address $L1_BEACON_ADDRESS \ - --l2-node-address $L2_NODE_ADDRESS \ - --native \ - --data-dir ./data \ - $CHAIN_ID_OR_ROLLUP_CONFIG_ARG \ - {{verbosity}} - -# Run the client program natively with the host program attached, in offline mode. -run-client-native-offline block_number l2_claim l2_output_root l2_head l1_head l2_chain_id verbosity='': - #!/usr/bin/env bash - - CLAIMED_L2_BLOCK_NUMBER={{block_number}} - CLAIMED_L2_OUTPUT_ROOT={{l2_claim}} - AGREED_L2_OUTPUT_ROOT={{l2_output_root}} - AGREED_L2_HEAD_HASH={{l2_head}} - L1_HEAD={{l1_head}} - L2_CHAIN_ID={{l2_chain_id}} - - # Move to the kona root - cd {{KONA_CLIENT_ROOT}}/../.. - - echo "Running host program with native client program..." - cargo r --bin kona-host -- \ - single \ - --l1-head $L1_HEAD \ - --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ - --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ - --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ - --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ - --l2-chain-id $L2_CHAIN_ID \ - --native \ - --data-dir ./data \ - {{verbosity}} - -# Run the client program on asterisc with the host program detached, in offline mode. -run-client-asterisc-offline block_number l2_claim l2_output_root l2_head l1_head l2_chain_id verbosity='': - #!/usr/bin/env bash - - HOST_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../target/debug/kona-host" - CLIENT_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../target/riscv64imac-unknown-none-elf/release-client-lto/kona-client" - STATE_PATH="{{KONA_CLIENT_ROOT}}/../../state.bin.gz" - - CLAIMED_L2_BLOCK_NUMBER={{block_number}} - CLAIMED_L2_OUTPUT_ROOT={{l2_claim}} - AGREED_L2_OUTPUT_ROOT={{l2_output_root}} - AGREED_L2_HEAD_HASH={{l2_head}} - L1_HEAD={{l1_head}} - L2_CHAIN_ID={{l2_chain_id}} - - # Move to the kona root - cd {{KONA_CLIENT_ROOT}}/../.. - - echo "Building client program for RISC-V target..." - just build-asterisc-client - - echo "Loading client program into Asterisc state format..." - asterisc load-elf --path=$CLIENT_BIN_PATH - - echo "Building host program for native target..." - cargo build --bin kona-host - - echo "Running asterisc" - asterisc run \ - --info-at '%10000000' \ - --proof-at never \ - --input $STATE_PATH \ - -- \ - $HOST_BIN_PATH \ - single \ - --l1-head $L1_HEAD \ - --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ - --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ - --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ - --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ - --l2-chain-id $L2_CHAIN_ID \ - --server \ - --data-dir ./data \ - {{verbosity}} - -# Run the client program on cannon with the host in detached server mode. -run-client-cannon block_number l1_rpc l1_beacon_rpc l2_rpc rollup_node_rpc rollup_config_path='' verbosity='': - #!/usr/bin/env bash - set -o errexit -o nounset -o pipefail - - HOST_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../target/release/kona-host" - CLIENT_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../target/mips64-unknown-none/release-client-lto/kona-client" - STATE_PATH="{{KONA_CLIENT_ROOT}}/../../state.bin.gz" - - L1_NODE_ADDRESS="{{l1_rpc}}" - L1_BEACON_ADDRESS="{{l1_beacon_rpc}}" - L2_NODE_ADDRESS="{{l2_rpc}}" - OP_NODE_ADDRESS="{{rollup_node_rpc}}" - - L2_CHAIN_ID=$(cast chain-id --rpc-url $L2_NODE_ADDRESS) - if [ -z "{{rollup_config_path}}" ]; then - CHAIN_ID_OR_ROLLUP_CONFIG_ARG="--l2-chain-id $L2_CHAIN_ID" - else - CHAIN_ID_OR_ROLLUP_CONFIG_ARG="--rollup-config-path $(realpath {{rollup_config_path}})" - fi - - CLAIMED_L2_BLOCK_NUMBER={{block_number}} - echo "Fetching configuration for block #$CLAIMED_L2_BLOCK_NUMBER..." - - # Get output root for block - CLAIMED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $CLAIMED_L2_BLOCK_NUMBER) | jq -r .outputRoot) - - # Get the info for the previous block - AGREED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .outputRoot) - AGREED_L2_HEAD_HASH=$(cast block --rpc-url $L2_NODE_ADDRESS $((CLAIMED_L2_BLOCK_NUMBER - 1)) --json | jq -r .hash) - L1_ORIGIN_NUM=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .blockRef.l1origin.number) - L1_HEAD=$(cast block --rpc-url $L1_NODE_ADDRESS $((L1_ORIGIN_NUM + 30)) --json | jq -r .hash) - - # Move to the kona root - cd {{KONA_CLIENT_ROOT}}/../.. - - echo "Building client program for MIPS64 target..." - just build-cannon-client - - echo "Loading client program into Cannon state format..." - cannon load-elf --path=$CLIENT_BIN_PATH --type multithreaded64-5 - - echo "Building host program for native target..." - cargo build --bin kona-host --release - - echo "Running cannon" - cannon run \ - --info-at '%10000000' \ - --proof-at never \ - --input $STATE_PATH \ - -- \ - $HOST_BIN_PATH \ - single \ - --l1-head $L1_HEAD \ - --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ - --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ - --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ - --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ - --l2-chain-id $L2_CHAIN_ID \ - --l1-node-address $L1_NODE_ADDRESS \ - --l1-beacon-address $L1_BEACON_ADDRESS \ - --l2-node-address $L2_NODE_ADDRESS \ - --server \ - --data-dir ./data \ - {{verbosity}} - -# Run the client program on cannon with the host program detached, in offline mode. -run-client-cannon-offline block_number l2_claim l2_output_root l2_head l1_head l2_chain_id verbosity='': - #!/usr/bin/env bash - - HOST_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../target/debug/kona-host" - CLIENT_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../target/mips64-unknown-none/release-client-lto/kona-client" - STATE_PATH="{{KONA_CLIENT_ROOT}}/../../state.bin.gz" - - CLAIMED_L2_BLOCK_NUMBER={{block_number}} - CLAIMED_L2_OUTPUT_ROOT={{l2_claim}} - AGREED_L2_OUTPUT_ROOT={{l2_output_root}} - AGREED_L2_HEAD_HASH={{l2_head}} - L1_HEAD={{l1_head}} - L2_CHAIN_ID={{l2_chain_id}} - - # Move to the kona root - cd {{KONA_CLIENT_ROOT}}/../.. - - echo "Building client program for MIPS64 target..." - just build-cannon-client - - echo "Loading client program into Cannon state format..." - cannon load-elf --path=$CLIENT_BIN_PATH --type multithreaded64-5 - - echo "Building host program for native target..." - cargo build --bin kona-host - - echo "Running cannon" - cannon run \ - --info-at '%10000000' \ - --proof-at never \ - --input $STATE_PATH \ - -- \ - $HOST_BIN_PATH \ - single \ - --l1-head $L1_HEAD \ - --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ - --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ - --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ - --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ - --l2-chain-id $L2_CHAIN_ID \ - --server \ - --data-dir ./data \ - {{verbosity}} diff --git a/kona/bin/client/src/lib.rs b/kona/bin/client/src/lib.rs deleted file mode 100644 index 58128dd3524..00000000000 --- a/kona/bin/client/src/lib.rs +++ /dev/null @@ -1,12 +0,0 @@ -#![doc = include_str!("../README.md")] -#![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] -#![deny(unused_must_use, rust_2018_idioms)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![allow(clippy::type_complexity)] -#![cfg_attr(not(test), no_std)] - -extern crate alloc; - -pub mod fpvm_evm; -pub mod interop; -pub mod single; diff --git a/kona/bin/host/Cargo.toml b/kona/bin/host/Cargo.toml deleted file mode 100644 index edba656bb6c..00000000000 --- a/kona/bin/host/Cargo.toml +++ /dev/null @@ -1,82 +0,0 @@ -[package] -name = "kona-host" -version = "1.0.2" -edition.workspace = true -license.workspace = true -authors.workspace = true -repository.workspace = true -homepage.workspace = true -publish = false - -[lints] -workspace = true - -[dependencies] -# Proof -kona-mpt.workspace = true -kona-client.workspace = true -kona-executor.workspace = true -kona-std-fpvm.workspace = true -kona-proof-interop.workspace = true -kona-proof = { workspace = true, features = ["std"] } -kona-preimage = { workspace = true, features = ["std"] } - -# Protocol -kona-driver.workspace = true -kona-derive.workspace = true -kona-registry.workspace = true -kona-protocol = { workspace = true, features = ["std", "serde"] } -kona-genesis = { workspace = true, features = ["std", "serde"] } - -# Services -kona-cli.workspace = true -kona-providers-alloy.workspace = true - -# Alloy -alloy-rlp.workspace = true -alloy-transport.workspace = true -alloy-eips = { workspace = true, features = ["kzg"] } -alloy-serde.workspace = true -alloy-provider = { workspace = true, features = ["reqwest"] } -alloy-consensus = { workspace = true, features = ["std"] } -alloy-rpc-client.workspace = true -alloy-transport-http.workspace = true -alloy-rpc-types = { workspace = true, features = ["eth", "debug"] } -alloy-primitives = { workspace = true, features = ["serde"] } -alloy-rpc-types-beacon.workspace = true - -# Op Alloy -op-alloy-rpc-types-engine = { workspace = true, features = ["serde"] } -op-alloy-network.workspace = true -alloy-op-evm = { workspace = true, features = ["std"] } - -# Revm -revm = { workspace = true, features = ["std", "c-kzg", "secp256k1", "portable", "blst"] } - -# General -anyhow.workspace = true -tracing.workspace = true -reqwest.workspace = true -serde_json.workspace = true -async-trait.workspace = true -rocksdb = { workspace = true, features = ["snappy", "bindgen-runtime"] } -tokio = { workspace = true, features = ["full"] } -serde = { workspace = true, features = ["derive"] } -clap = { workspace = true, features = ["derive", "env"] } -tracing-subscriber = { workspace = true, features = ["fmt"] } -thiserror.workspace = true - -# KZG -ark-ff.workspace = true - -[dev-dependencies] -proptest.workspace = true - -[features] -default = [ "interop", "single" ] -single = [] -interop = [ "single" ] - -[[bin]] -name = "kona-host" -path = "src/bin/host.rs" diff --git a/kona/bin/host/src/backend/util.rs b/kona/bin/host/src/backend/util.rs deleted file mode 100644 index 66c2c413646..00000000000 --- a/kona/bin/host/src/backend/util.rs +++ /dev/null @@ -1,40 +0,0 @@ -//! Utilities for the preimage server backend. - -use crate::{KeyValueStore, Result}; -use alloy_consensus::EMPTY_ROOT_HASH; -use alloy_primitives::keccak256; -use alloy_rlp::EMPTY_STRING_CODE; -use kona_preimage::{PreimageKey, PreimageKeyType}; -use tokio::sync::RwLock; - -/// Constructs a merkle patricia trie from the ordered list passed and stores all encoded -/// intermediate nodes of the trie in the [KeyValueStore]. -pub(crate) async fn store_ordered_trie<KV: KeyValueStore + ?Sized, T: AsRef<[u8]>>( - kv: &RwLock<KV>, - values: &[T], -) -> Result<()> { - let mut kv_write_lock = kv.write().await; - - // If the list of nodes is empty, store the empty root hash and exit early. - // The `HashBuilder` will not push the preimage of the empty root hash to the - // `ProofRetainer` in the event that there are no leaves inserted. - if values.is_empty() { - let empty_key = PreimageKey::new(*EMPTY_ROOT_HASH, PreimageKeyType::Keccak256); - return kv_write_lock.set(empty_key.into(), [EMPTY_STRING_CODE].into()); - } - - let mut hb = kona_mpt::ordered_trie_with_encoder(values, |node, buf| { - buf.put_slice(node.as_ref()); - }); - hb.root(); - let intermediates = hb.take_proof_nodes().into_inner(); - - for (_, value) in intermediates.into_iter() { - let value_hash = keccak256(value.as_ref()); - let key = PreimageKey::new(*value_hash, PreimageKeyType::Keccak256); - - kv_write_lock.set(key.into(), value.into())?; - } - - Ok(()) -} diff --git a/kona/bin/host/src/error.rs b/kona/bin/host/src/error.rs deleted file mode 100644 index 1cabc4fcf38..00000000000 --- a/kona/bin/host/src/error.rs +++ /dev/null @@ -1,142 +0,0 @@ -//! Error types for the host binary. - -use alloy_rlp::Error as RlpError; -use alloy_transport::TransportError; -use kona_preimage::errors::PreimageOracleError; -use std::array::TryFromSliceError; -use thiserror::Error; - -/// Result type for host operations. -pub type Result<T> = std::result::Result<T, HostError>; - -/// Error type for host operations. -#[derive(Debug, Error)] -pub enum HostError { - /// A custom error message. - #[error("{0}")] - Custom(String), - - /// Block not found error. - #[error("Block not found")] - BlockNotFound, - - /// Invalid hint data length. - #[error("Invalid hint data length")] - InvalidHintDataLength, - - /// Precompile not accelerated. - #[error("Precompile not accelerated")] - PrecompileNotAccelerated, - - /// Failed precompile execution. - #[error("Failed precompile execution: {0}")] - PrecompileExecutionFailed(String), - - /// No rollup config found for chain ID. - #[error("No rollup config found for chain ID: {0}")] - NoRollupConfig(u64), - - /// Output root mismatch. - #[error("Output root does not match L2 head")] - OutputRootMismatch, - - /// Agreed pre-state hash mismatch. - #[error("Agreed pre-state hash does not match")] - AgreedPreStateHashMismatch, - - /// Expected blob count mismatch. - #[error("Expected {expected} blob(s), got {actual}")] - BlobCountMismatch { - /// Expected blob count. - expected: usize, - /// Actual blob count. - actual: usize, - }, - - /// Expected sidecar count mismatch. - #[error("Expected {expected} sidecar(s), got {actual}")] - SidecarCountMismatch { - /// Expected sidecar count. - expected: usize, - /// Actual sidecar count. - actual: usize, - }, - - /// No artifacts found for safe head. - #[error("No artifacts found for the safe head")] - NoArtifactsForSafeHead, - - /// Failed to fetch blob sidecars. - #[error("Failed to fetch blob sidecars: {0}")] - BlobSidecarFetchFailed(String), - - /// Failed to set key-value pair. - #[error("Failed to set key-value pair: {0}")] - KeyValueSetFailed(String), - - /// Failed to convert slice to B256. - #[error("Failed to convert slice to B256: {0}")] - B256ConversionFailed(String), - - /// Failed to fetch header RLP. - #[error("Failed to fetch header RLP: {0}")] - HeaderRlpFetchFailed(String), - - /// Error fetching code hash preimage. - #[error("Error fetching code hash preimage: {0}")] - CodeHashPreimageFetchFailed(String), - - /// Transport error. - #[error("Transport error: {0}")] - Transport(#[from] TransportError), - - /// RLP decoding error. - #[error("RLP decoding error: {0}")] - Rlp(#[from] RlpError), - - /// TryFromSlice error. - #[error("TryFromSlice error: {0}")] - TryFromSlice(#[from] TryFromSliceError), - - /// Serde JSON error. - #[error("Serde JSON error: {0}")] - SerdeJson(#[from] serde_json::Error), - - /// RocksDB error. - #[error("RocksDB error: {0}")] - RocksDb(String), - - /// Preimage oracle error. - #[error("Preimage oracle error: {0}")] - PreimageOracle(#[from] PreimageOracleError), - - /// Kona derive error. - #[error("Kona derive error: {0}")] - KonaDerive(String), - - /// Kona executor error. - #[error("Kona executor error: {0}")] - KonaExecutor(String), - - /// IO error. - #[error("IO error: {0}")] - Io(#[from] std::io::Error), -} - -impl From<rocksdb::Error> for HostError { - fn from(err: rocksdb::Error) -> Self { - Self::RocksDb(err.to_string()) - } -} - -impl From<kona_derive::PipelineError> for HostError { - fn from(err: kona_derive::PipelineError) -> Self { - Self::KonaDerive(err.to_string()) - } -} - -impl From<kona_executor::ExecutorError> for HostError { - fn from(err: kona_executor::ExecutorError) -> Self { - Self::KonaExecutor(err.to_string()) - } -} diff --git a/kona/bin/host/src/interop/cfg.rs b/kona/bin/host/src/interop/cfg.rs deleted file mode 100644 index 311040c6eae..00000000000 --- a/kona/bin/host/src/interop/cfg.rs +++ /dev/null @@ -1,381 +0,0 @@ -//! This module contains all CLI-specific code for the interop entrypoint. - -use super::{InteropHintHandler, InteropLocalInputs}; -use crate::{ - DiskKeyValueStore, MemoryKeyValueStore, OfflineHostBackend, OnlineHostBackend, - OnlineHostBackendCfg, PreimageServer, SharedKeyValueStore, SplitKeyValueStore, - eth::rpc_provider, server::PreimageServerError, -}; -use alloy_primitives::{B256, Bytes}; -use alloy_provider::{Provider, RootProvider}; -use clap::Parser; -use kona_cli::cli_styles; -use kona_genesis::{L1ChainConfig, RollupConfig}; -use kona_preimage::{ - BidirectionalChannel, Channel, HintReader, HintWriter, OracleReader, OracleServer, -}; -use kona_proof_interop::HintType; -use kona_providers_alloy::{OnlineBeaconClient, OnlineBlobProvider}; -use kona_std_fpvm::{FileChannel, FileDescriptor}; -use op_alloy_network::Optimism; -use serde::Serialize; -use std::{collections::HashMap, path::PathBuf, str::FromStr, sync::Arc}; -use tokio::{ - sync::RwLock, - task::{self, JoinHandle}, -}; - -/// The interop host application. -#[derive(Default, Parser, Serialize, Clone, Debug)] -#[command(styles = cli_styles())] -pub struct InteropHost { - /// Hash of the L1 head block, marking a static, trusted cutoff point for reading data from the - /// L1 chain. - #[arg(long, env)] - pub l1_head: B256, - /// Agreed [PreState] to start from. - /// - /// [PreState]: kona_proof_interop::PreState - #[arg(long, visible_alias = "l2-pre-state", value_parser = Bytes::from_str, env)] - pub agreed_l2_pre_state: Bytes, - /// Claimed L2 post-state to validate. - #[arg(long, visible_alias = "l2-claim", env)] - pub claimed_l2_post_state: B256, - /// Claimed L2 timestamp, corresponding to the L2 post-state. - #[arg(long, visible_alias = "l2-timestamp", env)] - pub claimed_l2_timestamp: u64, - /// Addresses of L2 JSON-RPC endpoints to use (eth and debug namespace required). - #[arg( - long, - visible_alias = "l2s", - requires = "l1_node_address", - requires = "l1_beacon_address", - value_delimiter = ',', - env - )] - pub l2_node_addresses: Option<Vec<String>>, - /// Address of L1 JSON-RPC endpoint to use (eth and debug namespace required) - #[arg( - long, - visible_alias = "l1", - requires = "l2_node_addresses", - requires = "l1_beacon_address", - env - )] - pub l1_node_address: Option<String>, - /// Address of the L1 Beacon API endpoint to use. - #[arg( - long, - visible_alias = "beacon", - requires = "l1_node_address", - requires = "l2_node_addresses", - env - )] - pub l1_beacon_address: Option<String>, - /// The Data Directory for preimage data storage. Optional if running in online mode, - /// required if running in offline mode. - #[arg( - long, - visible_alias = "db", - required_unless_present_all = ["l2_node_addresses", "l1_node_address", "l1_beacon_address"], - env - )] - pub data_dir: Option<PathBuf>, - /// Run the client program natively. - #[arg(long, conflicts_with = "server", required_unless_present = "server")] - pub native: bool, - /// Run in pre-image server mode without executing any client program. If not provided, the - /// host will run the client program in the host process. - #[arg(long, conflicts_with = "native", required_unless_present = "native")] - pub server: bool, - /// Path to rollup configs. If provided, the host will use this config instead of attempting to - /// look up the configs in the superchain registry. - /// The rollup configs should be stored as serde-JSON serialized files. - #[arg(long, alias = "rollup-cfgs", value_delimiter = ',', env)] - pub rollup_config_paths: Option<Vec<PathBuf>>, - /// Path to l1 config. If provided, the host will use this config instead of attempting to - /// look up the config in the superchain registry. - /// The l1 config should be stored as serde-JSON serialized files. - #[arg(long, alias = "l1-cfg")] - pub l1_config_path: Option<PathBuf>, -} - -/// An error that can occur when handling interop hosts -#[derive(Debug, thiserror::Error)] -pub enum InteropHostError { - /// An error when handling preimage requests. - #[error("Error handling preimage request: {0}")] - PreimageServerError(#[from] PreimageServerError), - /// An IO error. - #[error("IO error: {0}")] - IOError(#[from] std::io::Error), - /// A JSON parse error. - #[error("Failed deserializing RollupConfig: {0}")] - ParseError(#[from] serde_json::Error), - /// No l1 config found. - #[error("No l1 config found")] - NoL1Config, - /// Task failed to execute to completion. - #[error("Join error: {0}")] - ExecutionError(#[from] tokio::task::JoinError), - /// A RPC error. - #[error("Rpc Error: {0}")] - RpcError(#[from] alloy_transport::RpcError<alloy_transport::TransportErrorKind>), - /// An error when no provider found for chain ID. - #[error("No provider found for chain ID: {0}")] - RootProviderError(u64), - /// Any other error. - #[error("Error: {0}")] - Other(&'static str), -} - -impl InteropHost { - /// Starts the [InteropHost] application. - pub async fn start(self) -> Result<(), InteropHostError> { - if self.server { - let hint = FileChannel::new(FileDescriptor::HintRead, FileDescriptor::HintWrite); - let preimage = - FileChannel::new(FileDescriptor::PreimageRead, FileDescriptor::PreimageWrite); - - self.start_server(hint, preimage).await?.await? - } else { - self.start_native().await - } - } - - /// Starts the preimage server, communicating with the client over the provided channels. - async fn start_server<C>( - &self, - hint: C, - preimage: C, - ) -> Result<JoinHandle<Result<(), InteropHostError>>, InteropHostError> - where - C: Channel + Send + Sync + 'static, - { - let kv_store = self.create_key_value_store()?; - - let task_handle = if self.is_offline() { - task::spawn(async { - PreimageServer::new( - OracleServer::new(preimage), - HintReader::new(hint), - Arc::new(OfflineHostBackend::new(kv_store)), - ) - .start() - .await - .map_err(InteropHostError::from) - }) - } else { - let providers = self.create_providers().await?; - let backend = OnlineHostBackend::new( - self.clone(), - kv_store.clone(), - providers, - InteropHintHandler, - ) - .with_proactive_hint(HintType::L2BlockData); - - task::spawn(async { - PreimageServer::new( - OracleServer::new(preimage), - HintReader::new(hint), - Arc::new(backend), - ) - .start() - .await - .map_err(InteropHostError::from) - }) - }; - - Ok(task_handle) - } - - /// Starts the host in native mode, running both the client and preimage server in the same - /// process. - async fn start_native(&self) -> Result<(), InteropHostError> { - let hint = BidirectionalChannel::new()?; - let preimage = BidirectionalChannel::new()?; - - let server_task = self.start_server(hint.host, preimage.host).await?; - let client_task = task::spawn(kona_client::interop::run( - OracleReader::new(preimage.client), - HintWriter::new(hint.client), - )); - - let (_, client_result) = tokio::try_join!(server_task, client_task)?; - - // Bubble up the exit status of the client program if execution completes. - std::process::exit(client_result.is_err() as i32) - } - - /// Returns `true` if the host is running in offline mode. - pub const fn is_offline(&self) -> bool { - self.l1_node_address.is_none() && - self.l2_node_addresses.is_none() && - self.l1_beacon_address.is_none() && - self.data_dir.is_some() - } - - /// Reads the [RollupConfig]s from the file system and returns a map of L2 chain ID -> - /// [RollupConfig]s. - pub fn read_rollup_configs( - &self, - ) -> Option<Result<HashMap<u64, RollupConfig>, InteropHostError>> { - let rollup_config_paths = self.rollup_config_paths.as_ref()?; - - Some(rollup_config_paths.iter().try_fold(HashMap::default(), |mut acc, path| { - // Read the serialized config from the file system. - let ser_config = std::fs::read_to_string(path)?; - - // Deserialize the config and return it. - let cfg: RollupConfig = serde_json::from_str(&ser_config)?; - - acc.insert(cfg.l2_chain_id.id(), cfg); - Ok(acc) - })) - } - - /// Reads the [`L1ChainConfig`]s from the file system and returns a map of L1 chain ID -> - /// [`L1ChainConfig`]s. - pub fn read_l1_config(&self) -> Result<L1ChainConfig, InteropHostError> { - let path = self.l1_config_path.as_ref().ok_or_else(|| InteropHostError::NoL1Config)?; - - // Read the serialized config from the file system. - let ser_config = std::fs::read_to_string(path)?; - - // Deserialize the config and return it. - serde_json::from_str(&ser_config) - .map_err(|_| InteropHostError::Other("failed to parse L1 config")) - } - - /// Creates the key-value store for the host backend. - fn create_key_value_store(&self) -> Result<SharedKeyValueStore, InteropHostError> { - let local_kv_store = InteropLocalInputs::new(self.clone()); - - let kv_store: SharedKeyValueStore = if let Some(ref data_dir) = self.data_dir { - let disk_kv_store = DiskKeyValueStore::new(data_dir.clone()); - let split_kv_store = SplitKeyValueStore::new(local_kv_store, disk_kv_store); - Arc::new(RwLock::new(split_kv_store)) - } else { - let mem_kv_store = MemoryKeyValueStore::new(); - let split_kv_store = SplitKeyValueStore::new(local_kv_store, mem_kv_store); - Arc::new(RwLock::new(split_kv_store)) - }; - - Ok(kv_store) - } - - /// Creates the providers required for the preimage server backend. - async fn create_providers(&self) -> Result<InteropProviders, InteropHostError> { - let l1_provider = rpc_provider( - self.l1_node_address.as_ref().ok_or(InteropHostError::Other("Provider must be set"))?, - ) - .await; - - let blob_provider = OnlineBlobProvider::init(OnlineBeaconClient::new_http( - self.l1_beacon_address - .clone() - .ok_or(InteropHostError::Other("Beacon API URL must be set"))?, - )) - .await; - - // Resolve all chain IDs to their corresponding providers. - let l2_node_addresses = self - .l2_node_addresses - .as_ref() - .ok_or(InteropHostError::Other("L2 node addresses must be set"))?; - let mut l2_providers = HashMap::default(); - for l2_node_address in l2_node_addresses { - let l2_provider = rpc_provider::<Optimism>(l2_node_address).await; - let chain_id = l2_provider.get_chain_id().await?; - l2_providers.insert(chain_id, l2_provider); - } - - Ok(InteropProviders { l1: l1_provider, blobs: blob_provider, l2s: l2_providers }) - } -} - -impl OnlineHostBackendCfg for InteropHost { - type HintType = HintType; - type Providers = InteropProviders; -} - -/// The providers required for the single chain host. -#[derive(Debug, Clone)] -pub struct InteropProviders { - /// The L1 EL provider. - pub l1: RootProvider, - /// The L1 beacon node provider. - pub blobs: OnlineBlobProvider<OnlineBeaconClient>, - /// The L2 EL providers, keyed by chain ID. - pub l2s: HashMap<u64, RootProvider<Optimism>>, -} - -impl InteropProviders { - /// Returns the L2 [RootProvider] for the given chain ID. - pub fn l2(&self, chain_id: &u64) -> Result<&RootProvider<Optimism>, InteropHostError> { - self.l2s.get(chain_id).ok_or_else(|| InteropHostError::RootProviderError(*chain_id)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::b256; - - #[test] - fn test_parse_interop_host_cli() { - let hash = b256!("ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68"); - let host = InteropHost::parse_from([ - "interop-host", - "--l1-head", - "ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68", - "--l2-pre-state", - "ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68", - "--claimed-l2-post-state", - &hash.to_string(), - "--claimed-l2-timestamp", - "0", - "--native", - "--l2-node-addresses", - "http://localhost:8545", - "--l1-node-address", - "http://localhost:8546", - "--l1-beacon-address", - "http://localhost:8547", - ]); - assert_eq!(host.l1_head, hash); - assert_eq!(host.agreed_l2_pre_state, Bytes::from(hash.0)); - assert_eq!(host.claimed_l2_post_state, hash); - assert_eq!(host.claimed_l2_timestamp, 0); - assert!(host.native); - } - - #[test] - fn test_parse_interop_hex_bytes() { - let hash = b256!("ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68"); - let host = InteropHost::parse_from([ - "interop-host", - "--l1-head", - "ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68", - "--l2-pre-state", - "ff", - "--claimed-l2-post-state", - &hash.to_string(), - "--claimed-l2-timestamp", - "0", - "--native", - "--l2-node-addresses", - "http://localhost:8545", - "--l1-node-address", - "http://localhost:8546", - "--l1-beacon-address", - "http://localhost:8547", - ]); - assert_eq!(host.l1_head, hash); - assert_eq!(host.agreed_l2_pre_state, Bytes::from([0xff])); - assert_eq!(host.claimed_l2_post_state, hash); - assert_eq!(host.claimed_l2_timestamp, 0); - assert!(host.native); - } -} diff --git a/kona/bin/host/src/interop/handler.rs b/kona/bin/host/src/interop/handler.rs deleted file mode 100644 index daa9db2ec6b..00000000000 --- a/kona/bin/host/src/interop/handler.rs +++ /dev/null @@ -1,616 +0,0 @@ -//! [HintHandler] for the [InteropHost]. - -use super::InteropHost; -use crate::{ - HintHandler, OnlineHostBackend, OnlineHostBackendCfg, PreimageServer, SharedKeyValueStore, - backend::util::store_ordered_trie, -}; -use alloy_consensus::{Header, Sealed}; -use alloy_eips::{ - eip2718::Encodable2718, - eip4844::{BlobTransactionSidecarItem, FIELD_ELEMENTS_PER_BLOB, IndexedBlobHash}, -}; -use alloy_op_evm::OpEvmFactory; -use alloy_primitives::{Address, B256, Bytes, keccak256}; -use alloy_provider::Provider; -use alloy_rlp::{Decodable, Encodable}; -use alloy_rpc_types::Block; -use anyhow::{Result, anyhow, ensure}; -use ark_ff::{BigInteger, PrimeField}; -use async_trait::async_trait; -use kona_derive::EthereumDataSource; -use kona_driver::Driver; -use kona_executor::TrieDBProvider; -use kona_preimage::{ - BidirectionalChannel, HintReader, HintWriter, OracleReader, OracleServer, PreimageKey, - PreimageKeyType, -}; -use kona_proof::{ - CachingOracle, Hint, - executor::KonaExecutor, - l1::{OracleBlobProvider, OracleL1ChainProvider, OraclePipeline, ROOTS_OF_UNITY}, - l2::OracleL2ChainProvider, - sync::new_oracle_pipeline_cursor, -}; -use kona_proof_interop::{HintType, PreState}; -use kona_protocol::{BlockInfo, OutputRoot, Predeploys}; -use kona_registry::{L1_CONFIGS, ROLLUP_CONFIGS}; -use std::sync::Arc; -use tokio::task; -use tracing::{Instrument, debug, info, info_span, warn}; - -/// The [HintHandler] for the [InteropHost]. -#[derive(Debug, Clone, Copy)] -pub struct InteropHintHandler; - -#[async_trait] -impl HintHandler for InteropHintHandler { - type Cfg = InteropHost; - - async fn fetch_hint( - hint: Hint<<Self::Cfg as OnlineHostBackendCfg>::HintType>, - cfg: &Self::Cfg, - providers: &<Self::Cfg as OnlineHostBackendCfg>::Providers, - kv: SharedKeyValueStore, - ) -> Result<()> { - match hint.ty { - HintType::L1BlockHeader => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - let raw_header: Bytes = - providers.l1.client().request("debug_getRawHeader", [hash]).await?; - - let mut kv_lock = kv.write().await; - kv_lock.set(PreimageKey::new_keccak256(*hash).into(), raw_header.into())?; - } - HintType::L1Transactions => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - let Block { transactions, .. } = providers - .l1 - .get_block_by_hash(hash) - .full() - .await? - .ok_or(anyhow!("Block not found"))?; - let encoded_transactions = transactions - .into_transactions() - .map(|tx| tx.inner.encoded_2718()) - .collect::<Vec<_>>(); - - store_ordered_trie(kv.as_ref(), encoded_transactions.as_slice()).await?; - } - HintType::L1Receipts => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - let raw_receipts: Vec<Bytes> = - providers.l1.client().request("debug_getRawReceipts", [hash]).await?; - - store_ordered_trie(kv.as_ref(), raw_receipts.as_slice()).await?; - } - HintType::L1Blob => { - ensure!(hint.data.len() == 48, "Invalid hint data length"); - - let hash_data_bytes: [u8; 32] = hint.data[0..32].try_into()?; - let index_data_bytes: [u8; 8] = hint.data[32..40].try_into()?; - let timestamp_data_bytes: [u8; 8] = hint.data[40..48].try_into()?; - - let hash: B256 = hash_data_bytes.into(); - let index = u64::from_be_bytes(index_data_bytes); - let timestamp = u64::from_be_bytes(timestamp_data_bytes); - - let partial_block_ref = BlockInfo { timestamp, ..Default::default() }; - let indexed_hash = IndexedBlobHash { index, hash }; - - // Fetch the blob sidecar from the blob provider. - let mut sidecars = providers - .blobs - .fetch_filtered_blob_sidecars(&partial_block_ref, &[indexed_hash]) - .await - .map_err(|e| anyhow!("Failed to fetch blob sidecars: {e}"))?; - - if sidecars.len() != 1 { - anyhow::bail!("Expected 1 sidecar, got {}", sidecars.len()); - } - - let BlobTransactionSidecarItem { - blob, - kzg_proof: proof, - kzg_commitment: commitment, - .. - } = sidecars.pop().expect("Expected 1 sidecar"); - - // Acquire a lock on the key-value store and set the preimages. - let mut kv_lock = kv.write().await; - - // Set the preimage for the blob commitment. - kv_lock.set( - PreimageKey::new(*hash, PreimageKeyType::Sha256).into(), - commitment.to_vec(), - )?; - - // Write all the field elements to the key-value store. There should be 4096. - // The preimage oracle key for each field element is the keccak256 hash of - // `abi.encodePacked(sidecar.KZGCommitment, bytes32(ROOTS_OF_UNITY[i]))`. - let mut blob_key = [0u8; 80]; - blob_key[..48].copy_from_slice(commitment.as_ref()); - for i in 0..FIELD_ELEMENTS_PER_BLOB { - blob_key[48..].copy_from_slice( - ROOTS_OF_UNITY[i as usize].into_bigint().to_bytes_be().as_ref(), - ); - let blob_key_hash = keccak256(blob_key.as_ref()); - - kv_lock - .set(PreimageKey::new_keccak256(*blob_key_hash).into(), blob_key.into())?; - kv_lock.set( - PreimageKey::new(*blob_key_hash, PreimageKeyType::Blob).into(), - blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(), - )?; - } - - // Write the KZG Proof as the 4096th element. - // Note: This is not associated with a root of unity, as to be backwards compatible - // with ZK users of kona that use this proof for the overall blob. - blob_key[72..].copy_from_slice((FIELD_ELEMENTS_PER_BLOB).to_be_bytes().as_ref()); - let blob_key_hash = keccak256(blob_key.as_ref()); - - kv_lock.set(PreimageKey::new_keccak256(*blob_key_hash).into(), blob_key.into())?; - kv_lock.set( - PreimageKey::new(*blob_key_hash, PreimageKeyType::Blob).into(), - proof.to_vec(), - )?; - } - HintType::L1Precompile => { - ensure!(hint.data.len() >= 28, "Invalid hint data length"); - - let address = Address::from_slice(&hint.data.as_ref()[..20]); - let gas = u64::from_be_bytes(hint.data.as_ref()[20..28].try_into()?); - let input = hint.data[28..].to_vec(); - let input_hash = keccak256(hint.data.as_ref()); - - let result = crate::eth::execute(address, input, gas).map_or_else( - |_| vec![0u8; 1], - |raw_res| { - let mut res = Vec::with_capacity(1 + raw_res.len()); - res.push(0x01); - res.extend_from_slice(&raw_res); - res - }, - ); - - let mut kv_lock = kv.write().await; - kv_lock.set(PreimageKey::new_keccak256(*input_hash).into(), hint.data.into())?; - kv_lock.set( - PreimageKey::new(*input_hash, PreimageKeyType::Precompile).into(), - result, - )?; - } - HintType::AgreedPreState => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - - if hash != keccak256(cfg.agreed_l2_pre_state.as_ref()) { - anyhow::bail!("Agreed pre-state hash does not match."); - } - - let mut kv_write_lock = kv.write().await; - kv_write_lock.set( - PreimageKey::new_keccak256(*hash).into(), - cfg.agreed_l2_pre_state.clone().into(), - )?; - } - HintType::L2OutputRoot => { - ensure!(hint.data.len() >= 32 && hint.data.len() <= 40, "Invalid hint data length"); - - let hash = B256::from_slice(&hint.data.as_ref()[0..32]); - let chain_id = u64::from_be_bytes(hint.data.as_ref()[32..40].try_into()?); - let l2_provider = providers.l2(&chain_id)?; - - // Decode the pre-state to determine the timestamp of the block. - let pre = PreState::decode(&mut cfg.agreed_l2_pre_state.as_ref())?; - let timestamp = match pre { - PreState::SuperRoot(super_root) => super_root.timestamp, - PreState::TransitionState(transition_state) => { - transition_state.pre_state.timestamp - } - }; - - // Convert the timestamp to an L2 block number, using the rollup config for the - // chain ID embedded within the hint. - let rollup_config = cfg - .read_rollup_configs() - // If an error occurred while reading the rollup configs, return the error. - .transpose()? - // Try to find the appropriate rollup config for the chain ID. - .and_then(|configs| configs.get(&chain_id).cloned()) - // If we can't find the rollup config, try to find it in the global rollup - // configs. - .or_else(|| ROLLUP_CONFIGS.get(&chain_id).cloned()) - .map(Arc::new) - .ok_or(anyhow!("No rollup config found for chain ID: {chain_id}"))?; - let block_number = rollup_config.block_number_from_timestamp(timestamp); - - // Fetch the header for the L2 head block. - let raw_header: Bytes = l2_provider - .client() - .request("debug_getRawHeader", &[format!("0x{block_number:x}")]) - .await - .map_err(|e| anyhow!("Failed to fetch header RLP: {e}"))?; - let header = Header::decode(&mut raw_header.as_ref())?; - - // Fetch the storage root for the L2 head block. - let l2_to_l1_message_passer = l2_provider - .get_proof(Predeploys::L2_TO_L1_MESSAGE_PASSER, Default::default()) - .block_id(block_number.into()) - .await?; - - let output_root = OutputRoot::from_parts( - header.state_root, - l2_to_l1_message_passer.storage_hash, - header.hash_slow(), - ); - let output_root_hash = output_root.hash(); - - ensure!( - output_root_hash == hash, - "Output root does not match L2 head. Expected: {hash}, got: {output_root_hash}" - ); - - let mut kv_lock = kv.write().await; - kv_lock.set( - PreimageKey::new_keccak256(*output_root_hash).into(), - output_root.encode().into(), - )?; - } - HintType::L2BlockHeader => { - ensure!(hint.data.len() == 40, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref()[..32].try_into()?; - let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); - - let raw_header: Bytes = - providers.l2(&chain_id)?.client().request("debug_getRawHeader", [hash]).await?; - - let mut kv_lock = kv.write().await; - kv_lock.set(PreimageKey::new_keccak256(*hash).into(), raw_header.into())?; - } - HintType::L2Transactions => { - ensure!(hint.data.len() == 40, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref()[..32].try_into()?; - let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); - - let Block { transactions, .. } = providers - .l2(&chain_id)? - .get_block_by_hash(hash) - .full() - .await? - .ok_or(anyhow!("Block not found"))?; - let encoded_transactions = transactions - .into_transactions() - .map(|tx| tx.inner.inner.encoded_2718()) - .collect::<Vec<_>>(); - - store_ordered_trie(kv.as_ref(), encoded_transactions.as_slice()).await?; - } - HintType::L2Receipts => { - ensure!(hint.data.len() == 40, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref()[..32].try_into()?; - let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); - - let raw_receipts: Vec<Bytes> = providers - .l2(&chain_id)? - .client() - .request("debug_getRawReceipts", [hash]) - .await?; - - store_ordered_trie(kv.as_ref(), raw_receipts.as_slice()).await?; - } - HintType::L2Code => { - // geth hashdb scheme code hash key prefix - const CODE_PREFIX: u8 = b'c'; - - ensure!(hint.data.len() == 40, "Invalid hint data length"); - - let hash: B256 = B256::from_slice(&hint.data[0..32]); - let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); - let l2_provider = providers.l2(&chain_id)?; - - // Attempt to fetch the code from the L2 chain provider. - let code_key = [&[CODE_PREFIX], hash.as_slice()].concat(); - let code = l2_provider - .client() - .request::<&[Bytes; 1], Bytes>("debug_dbGet", &[code_key.into()]) - .await; - - // Check if the first attempt to fetch the code failed. If it did, try fetching the - // code hash preimage without the geth hashdb scheme prefix. - let code = match code { - Ok(code) => code, - Err(_) => l2_provider - .client() - .request::<&[B256; 1], Bytes>("debug_dbGet", &[hash]) - .await - .map_err(|e| anyhow!("Error fetching code hash preimage: {e}"))?, - }; - - let mut kv_lock = kv.write().await; - kv_lock.set(PreimageKey::new_keccak256(*hash).into(), code.into())?; - } - HintType::L2StateNode => { - ensure!(hint.data.len() == 40, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); - - // Fetch the preimage from the L2 chain provider. - let preimage: Bytes = - providers.l2(&chain_id)?.client().request("debug_dbGet", &[hash]).await?; - - let mut kv_write_lock = kv.write().await; - kv_write_lock.set(PreimageKey::new_keccak256(*hash).into(), preimage.into())?; - } - HintType::L2AccountProof => { - ensure!(hint.data.len() == 8 + 20 + 8, "Invalid hint data length"); - - let block_number = u64::from_be_bytes(hint.data.as_ref()[..8].try_into()?); - let address = Address::from_slice(&hint.data.as_ref()[8..28]); - let chain_id = u64::from_be_bytes(hint.data[28..].try_into()?); - - let proof_response = providers - .l2(&chain_id)? - .get_proof(address, Default::default()) - .block_id(block_number.into()) - .await?; - - // Write the account proof nodes to the key-value store. - let mut kv_lock = kv.write().await; - proof_response.account_proof.into_iter().try_for_each(|node| { - let node_hash = keccak256(node.as_ref()); - let key = PreimageKey::new_keccak256(*node_hash); - kv_lock.set(key.into(), node.into())?; - Ok::<(), anyhow::Error>(()) - })?; - } - HintType::L2AccountStorageProof => { - ensure!(hint.data.len() == 8 + 20 + 32 + 8, "Invalid hint data length"); - - let block_number = u64::from_be_bytes(hint.data.as_ref()[..8].try_into()?); - let address = Address::from_slice(&hint.data.as_ref()[8..28]); - let slot = B256::from_slice(&hint.data.as_ref()[28..60]); - let chain_id = u64::from_be_bytes(hint.data[60..].try_into()?); - - let mut proof_response = providers - .l2(&chain_id)? - .get_proof(address, vec![slot]) - .block_id(block_number.into()) - .await?; - - let mut kv_lock = kv.write().await; - - // Write the account proof nodes to the key-value store. - proof_response.account_proof.into_iter().try_for_each(|node| { - let node_hash = keccak256(node.as_ref()); - let key = PreimageKey::new_keccak256(*node_hash); - kv_lock.set(key.into(), node.into())?; - Ok::<(), anyhow::Error>(()) - })?; - - // Write the storage proof nodes to the key-value store. - let storage_proof = proof_response.storage_proof.remove(0); - storage_proof.proof.into_iter().try_for_each(|node| { - let node_hash = keccak256(node.as_ref()); - let key = PreimageKey::new_keccak256(*node_hash); - kv_lock.set(key.into(), node.into())?; - Ok::<(), anyhow::Error>(()) - })?; - } - HintType::L2BlockData => { - ensure!(hint.data.len() == 72, "Invalid hint data length"); - - let agreed_block_hash = B256::from_slice(&hint.data.as_ref()[..32]); - let disputed_block_hash = B256::from_slice(&hint.data.as_ref()[32..64]); - let chain_id = u64::from_be_bytes(hint.data.as_ref()[64..72].try_into()?); - - // Return early if the agreed and disputed block are the same. This can occur when - // the chain has not progressed past its prestate, but the super root timestamp has - // progressed. - if agreed_block_hash == disputed_block_hash { - debug!( - target: "interop_hint_handler", - chain_id, - "Chain has not progressed. Skipping block data hint." - ); - return Ok(()); - } - - let l2_provider = providers.l2(&chain_id)?; - let rollup_config = cfg - .read_rollup_configs() - // If an error occurred while reading the rollup configs, return the error. - .transpose()? - // Try to find the appropriate rollup config for the chain ID. - .and_then(|configs| configs.get(&chain_id).cloned()) - // If we can't find the rollup config, try to find it in the global rollup - // configs. - .or_else(|| ROLLUP_CONFIGS.get(&chain_id).cloned()) - .map(Arc::new) - .ok_or(anyhow!("No rollup config found for chain ID: {chain_id}"))?; - - let l1_config = cfg - .read_l1_config() - .or_else(|_| { - L1_CONFIGS.get(&rollup_config.l1_chain_id).cloned().ok_or_else(|| { - anyhow!( - "No L1 config found for chain ID: {}", - rollup_config.l1_chain_id - ) - }) - }) - .map(Arc::new)?; - - // Check if the block is canonical before continuing. - let parent_block = l2_provider - .get_block_by_hash(agreed_block_hash) - .await? - .ok_or(anyhow!("Block not found."))?; - let disputed_block = l2_provider - .get_block_by_number((parent_block.header.number + 1).into()) - .await? - .ok_or(anyhow!("Block not found."))?; - - // Return early if the disputed block is canonical - preimages can be fetched - // through the normal flow. - if disputed_block.header.hash == disputed_block_hash { - debug!( - target: "interop_hint_handler", - number = disputed_block.header.number, - hash = ?disputed_block.header.hash, - "Block is already canonical. Skipping re-derivation + execution." - ); - return Ok(()); - } - - info!( - target: "interop_hint_handler", - optimistic_hash = ?disputed_block_hash, - "Re-executing optimistic block for witness collection" - ); - - // Reproduce the preimages for the optimistic block's derivation + execution and - // store them in the key-value store. - let hint = BidirectionalChannel::new()?; - let preimage = BidirectionalChannel::new()?; - let backend = - OnlineHostBackend::new(cfg.clone(), kv.clone(), providers.clone(), Self); - let server_task = task::spawn( - PreimageServer::new( - OracleServer::new(preimage.host), - HintReader::new(hint.host), - Arc::new(backend), - ) - .start(), - ); - let client_task = task::spawn({ - let l1_head = cfg.l1_head; - - async move { - let oracle = Arc::new(CachingOracle::new( - 1024, - OracleReader::new(preimage.client), - HintWriter::new(hint.client), - )); - - let mut l1_provider = OracleL1ChainProvider::new(l1_head, oracle.clone()); - let mut l2_provider = OracleL2ChainProvider::new( - agreed_block_hash, - rollup_config.clone(), - oracle.clone(), - ); - let beacon = OracleBlobProvider::new(oracle.clone()); - - l2_provider.set_chain_id(Some(chain_id)); - - let safe_head = l2_provider - .header_by_hash(agreed_block_hash) - .map(|header| Sealed::new_unchecked(header, agreed_block_hash))?; - let target_block = safe_head.number + 1; - - let cursor = new_oracle_pipeline_cursor( - rollup_config.as_ref(), - safe_head, - &mut l1_provider, - &mut l2_provider, - ) - .await?; - l2_provider.set_cursor(cursor.clone()); - - let da_provider = EthereumDataSource::new_from_parts( - l1_provider.clone(), - beacon, - &rollup_config, - ); - let pipeline = OraclePipeline::new( - rollup_config.clone(), - l1_config.clone(), - cursor.clone(), - oracle, - da_provider, - l1_provider, - l2_provider.clone(), - ) - .await?; - let executor = KonaExecutor::new( - rollup_config.as_ref(), - l2_provider.clone(), - l2_provider, - OpEvmFactory::default(), - None, - ); - let mut driver = Driver::new(cursor, executor, pipeline); - - driver - .advance_to_target(rollup_config.as_ref(), Some(target_block)) - .await?; - - driver - .safe_head_artifacts - .ok_or_else(|| anyhow!("No artifacts found for the safe head")) - } - .instrument(info_span!( - "OptimisticBlockReexecution", - block_number = disputed_block.header.number - )) - }); - - // Wait on both the server and client tasks to complete. - let (_, client_result) = tokio::try_join!(server_task, client_task)?; - let (build_outcome, raw_transactions) = client_result?; - - // Store optimistic block hash preimage. - let mut kv_lock = kv.write().await; - let mut rlp_buf = Vec::with_capacity(build_outcome.header.length()); - build_outcome.header.encode(&mut rlp_buf); - kv_lock.set( - PreimageKey::new(*build_outcome.header.hash(), PreimageKeyType::Keccak256) - .into(), - rlp_buf, - )?; - - // Drop the lock on the key-value store to avoid deadlocks. - drop(kv_lock); - - // Store receipts root preimages. - let raw_receipts = build_outcome - .execution_result - .receipts - .into_iter() - .map(|receipt| Ok::<_, anyhow::Error>(receipt.encoded_2718())) - .collect::<Result<Vec<_>>>()?; - store_ordered_trie(kv.as_ref(), raw_receipts.as_slice()).await?; - - // Store tx root preimages. - store_ordered_trie(kv.as_ref(), raw_transactions.as_slice()).await?; - - info!( - target: "interop_hint_handler", - number = build_outcome.header.number, - hash = ?build_outcome.header.hash(), - "Re-executed optimistic block and collected witness" - ); - } - HintType::L2PayloadWitness => { - warn!( - target: "interop_hint_handler", - "L2PayloadWitness hint not implemented for interop hint handler, ignoring hint" - ); - } - } - - Ok(()) - } -} diff --git a/kona/bin/host/src/interop/local_kv.rs b/kona/bin/host/src/interop/local_kv.rs deleted file mode 100644 index 8c81318e01d..00000000000 --- a/kona/bin/host/src/interop/local_kv.rs +++ /dev/null @@ -1,51 +0,0 @@ -//! Contains a concrete implementation of the [KeyValueStore] trait that stores data on disk, -//! using the [InteropHost] config. - -use super::InteropHost; -use crate::{KeyValueStore, Result}; -use alloy_primitives::{B256, keccak256}; -use kona_preimage::PreimageKey; -use kona_proof_interop::boot::{ - L1_CONFIG_KEY, L1_HEAD_KEY, L2_AGREED_PRE_STATE_KEY, L2_CLAIMED_POST_STATE_KEY, - L2_CLAIMED_TIMESTAMP_KEY, L2_ROLLUP_CONFIG_KEY, -}; - -/// A simple, synchronous key-value store that returns data from a [InteropHost] config. -#[derive(Debug)] -pub struct InteropLocalInputs { - cfg: InteropHost, -} - -impl InteropLocalInputs { - /// Create a new [InteropLocalInputs] with the given [InteropHost] config. - pub const fn new(cfg: InteropHost) -> Self { - Self { cfg } - } -} - -impl KeyValueStore for InteropLocalInputs { - fn get(&self, key: B256) -> Option<Vec<u8>> { - let preimage_key = PreimageKey::try_from(*key).ok()?; - match preimage_key.key_value() { - L1_HEAD_KEY => Some(self.cfg.l1_head.to_vec()), - L2_AGREED_PRE_STATE_KEY => { - Some(keccak256(self.cfg.agreed_l2_pre_state.as_ref()).to_vec()) - } - L2_CLAIMED_POST_STATE_KEY => Some(self.cfg.claimed_l2_post_state.to_vec()), - L2_CLAIMED_TIMESTAMP_KEY => Some(self.cfg.claimed_l2_timestamp.to_be_bytes().to_vec()), - L2_ROLLUP_CONFIG_KEY => { - let rollup_configs = self.cfg.read_rollup_configs()?.ok()?; - serde_json::to_vec(&rollup_configs).ok() - } - L1_CONFIG_KEY => { - let l1_config = self.cfg.read_l1_config().ok()?; - serde_json::to_vec(&l1_config).ok() - } - _ => None, - } - } - - fn set(&mut self, _: B256, _: Vec<u8>) -> Result<()> { - unreachable!("LocalKeyValueStore is read-only") - } -} diff --git a/kona/bin/host/src/kv/mod.rs b/kona/bin/host/src/kv/mod.rs deleted file mode 100644 index a5ba9b6605c..00000000000 --- a/kona/bin/host/src/kv/mod.rs +++ /dev/null @@ -1,27 +0,0 @@ -//! This module contains the [KeyValueStore] trait and concrete implementations of it. - -use crate::Result; -use alloy_primitives::B256; -use std::sync::Arc; -use tokio::sync::RwLock; - -mod mem; -pub use mem::MemoryKeyValueStore; - -mod disk; -pub use disk::DiskKeyValueStore; - -mod split; -pub use split::SplitKeyValueStore; - -/// A type alias for a shared key-value store. -pub type SharedKeyValueStore = Arc<RwLock<dyn KeyValueStore + Send + Sync>>; - -/// Describes the interface of a simple, synchronous key-value store. -pub trait KeyValueStore { - /// Get the value associated with the given key. - fn get(&self, key: B256) -> Option<Vec<u8>>; - - /// Set the value associated with the given key. - fn set(&mut self, key: B256, value: Vec<u8>) -> Result<()>; -} diff --git a/kona/bin/host/src/kv/split.rs b/kona/bin/host/src/kv/split.rs deleted file mode 100644 index 55566f86060..00000000000 --- a/kona/bin/host/src/kv/split.rs +++ /dev/null @@ -1,47 +0,0 @@ -//! Contains a concrete implementation of the [KeyValueStore] trait that splits between two separate -//! [KeyValueStore]s depending on [PreimageKeyType]. - -use super::KeyValueStore; -use crate::Result; -use alloy_primitives::B256; -use kona_preimage::PreimageKeyType; - -/// A split implementation of the [KeyValueStore] trait that splits between two separate -/// [KeyValueStore]s. -#[derive(Clone, Debug)] -pub struct SplitKeyValueStore<L, R> -where - L: KeyValueStore, - R: KeyValueStore, -{ - local_store: L, - remote_store: R, -} - -impl<L, R> SplitKeyValueStore<L, R> -where - L: KeyValueStore, - R: KeyValueStore, -{ - /// Create a new [SplitKeyValueStore] with the given left and right [KeyValueStore]s. - pub const fn new(local_store: L, remote_store: R) -> Self { - Self { local_store, remote_store } - } -} - -impl<L, R> KeyValueStore for SplitKeyValueStore<L, R> -where - L: KeyValueStore, - R: KeyValueStore, -{ - fn get(&self, key: B256) -> Option<Vec<u8>> { - match PreimageKeyType::try_from(key[0]).ok()? { - PreimageKeyType::Local => self.local_store.get(key), - _ => self.remote_store.get(key), - } - } - - fn set(&mut self, key: B256, value: Vec<u8>) -> Result<()> { - self.remote_store.set(key, value) - } -} diff --git a/kona/bin/host/src/lib.rs b/kona/bin/host/src/lib.rs deleted file mode 100644 index b767c170227..00000000000 --- a/kona/bin/host/src/lib.rs +++ /dev/null @@ -1,24 +0,0 @@ -#![doc = include_str!("../README.md")] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -mod error; -pub use error::{HostError, Result}; - -mod server; -pub use server::{PreimageServer, PreimageServerError}; - -mod kv; -pub use kv::{ - DiskKeyValueStore, KeyValueStore, MemoryKeyValueStore, SharedKeyValueStore, SplitKeyValueStore, -}; - -mod backend; -pub use backend::{HintHandler, OfflineHostBackend, OnlineHostBackend, OnlineHostBackendCfg}; - -pub mod eth; - -#[cfg(feature = "single")] -pub mod single; - -#[cfg(feature = "interop")] -pub mod interop; diff --git a/kona/bin/host/src/server.rs b/kona/bin/host/src/server.rs deleted file mode 100644 index 6d0476b8f1d..00000000000 --- a/kona/bin/host/src/server.rs +++ /dev/null @@ -1,99 +0,0 @@ -//! This module contains the [PreimageServer] struct and its implementation. - -use kona_preimage::{ - HintReaderServer, PreimageOracleServer, PreimageServerBackend, errors::PreimageOracleError, -}; -use std::sync::Arc; -use tokio::spawn; -use tracing::{error, info}; - -/// The [PreimageServer] is responsible for waiting for incoming preimage requests and -/// serving them to the client. -#[derive(Debug)] -pub struct PreimageServer<P, H, B> { - /// The oracle server. - oracle_server: P, - /// The hint router. - hint_reader: H, - /// [PreimageServerBackend] that routes hints and retrieves preimages. - backend: Arc<B>, -} - -/// An error that can occur when handling preimage requests -#[derive(Debug, thiserror::Error)] -pub enum PreimageServerError { - /// A preimage request error. - #[error("Failed to serve preimage request: {0}")] - PreimageRequestFailed(PreimageOracleError), - /// An error when failed to serve route hint. - #[error("Failed to route hint: {0}")] - RouteHintFailed(PreimageOracleError), - /// Task failed to execute to completion. - #[error("Join error: {0}")] - ExecutionError(#[from] tokio::task::JoinError), -} - -impl<P, H, B> PreimageServer<P, H, B> -where - P: PreimageOracleServer + Send + Sync + 'static, - H: HintReaderServer + Send + Sync + 'static, - B: PreimageServerBackend + Send + Sync + 'static, -{ - /// Create a new [PreimageServer] with the given [PreimageOracleServer], - /// [HintReaderServer], and [PreimageServerBackend]. - pub const fn new(oracle_server: P, hint_reader: H, backend: Arc<B>) -> Self { - Self { oracle_server, hint_reader, backend } - } - - /// Starts the [PreimageServer] and waits for incoming requests. - pub async fn start(self) -> Result<(), PreimageServerError> { - // Create the futures for the oracle server and hint router. - let server = spawn(Self::start_oracle_server(self.oracle_server, self.backend.clone())); - let hint_router = spawn(Self::start_hint_router(self.hint_reader, self.backend.clone())); - - // Race the two futures to completion, returning the result of the first one to finish. - tokio::select! { - s = server => s?, - h = hint_router => h?, - } - } - - /// Starts the oracle server, which waits for incoming preimage requests and serves them to the - /// client. - async fn start_oracle_server( - oracle_server: P, - backend: Arc<B>, - ) -> Result<(), PreimageServerError> { - info!(target: "host_server", "Starting oracle server"); - loop { - // Serve the next preimage request. This `await` will yield to the runtime - // if no progress can be made. - match oracle_server.next_preimage_request(backend.as_ref()).await { - Ok(_) => continue, - Err(PreimageOracleError::IOError(_)) => return Ok(()), - Err(e) => { - error!(target: "host_server", "Failed to serve preimage request: {e}"); - return Err(PreimageServerError::PreimageRequestFailed(e)); - } - } - } - } - - /// Starts the hint router, which waits for incoming hints and routes them to the appropriate - /// handler. - async fn start_hint_router(hint_reader: H, backend: Arc<B>) -> Result<(), PreimageServerError> { - info!(target: "host_server", "Starting hint router"); - loop { - // Route the next hint. This `await` will yield to the runtime if no progress can be - // made. - match hint_reader.next_hint(backend.as_ref()).await { - Ok(_) => continue, - Err(PreimageOracleError::IOError(_)) => return Ok(()), - Err(e) => { - error!(target: "host_server", "Failed to serve route hint: {e}"); - return Err(PreimageServerError::RouteHintFailed(e)); - } - } - } - } -} diff --git a/kona/bin/host/src/single/cfg.rs b/kona/bin/host/src/single/cfg.rs deleted file mode 100644 index 73f4e53d4fd..00000000000 --- a/kona/bin/host/src/single/cfg.rs +++ /dev/null @@ -1,389 +0,0 @@ -//! This module contains all CLI-specific code for the single chain entrypoint. - -use super::{SingleChainHintHandler, SingleChainLocalInputs}; -use crate::{ - DiskKeyValueStore, MemoryKeyValueStore, OfflineHostBackend, OnlineHostBackend, - OnlineHostBackendCfg, PreimageServer, SharedKeyValueStore, SplitKeyValueStore, - eth::rpc_provider, server::PreimageServerError, -}; -use alloy_primitives::B256; -use alloy_provider::RootProvider; -use clap::Parser; -use kona_cli::cli_styles; -use kona_genesis::{L1ChainConfig, RollupConfig}; -use kona_preimage::{ - BidirectionalChannel, Channel, HintReader, HintWriter, OracleReader, OracleServer, -}; -use kona_proof::HintType; -use kona_providers_alloy::{OnlineBeaconClient, OnlineBlobProvider}; -use kona_std_fpvm::{FileChannel, FileDescriptor}; -use op_alloy_network::Optimism; -use serde::Serialize; -use std::{path::PathBuf, sync::Arc}; -use tokio::{ - sync::RwLock, - task::{self, JoinHandle}, -}; - -/// The host binary CLI application arguments. -#[derive(Default, Parser, Serialize, Clone, Debug)] -#[command(styles = cli_styles())] -pub struct SingleChainHost { - /// Hash of the L1 head block. Derivation stops after this block is processed. - #[arg(long, env)] - pub l1_head: B256, - /// Hash of the agreed upon safe L2 block committed to by `--agreed-l2-output-root`. - #[arg(long, visible_alias = "l2-head", env)] - pub agreed_l2_head_hash: B256, - /// Agreed safe L2 Output Root to start derivation from. - #[arg(long, visible_alias = "l2-output-root", env)] - pub agreed_l2_output_root: B256, - /// Claimed L2 output root at block # `--claimed-l2-block-number` to validate. - #[arg(long, visible_alias = "l2-claim", env)] - pub claimed_l2_output_root: B256, - /// Number of the L2 block that the claimed output root commits to. - #[arg(long, visible_alias = "l2-block-number", env)] - pub claimed_l2_block_number: u64, - /// Address of L2 JSON-RPC endpoint to use (eth and debug namespace required). - #[arg( - long, - visible_alias = "l2", - requires = "l1_node_address", - requires = "l1_beacon_address", - env - )] - pub l2_node_address: Option<String>, - /// Address of L1 JSON-RPC endpoint to use (eth and debug namespace required) - #[arg( - long, - visible_alias = "l1", - requires = "l2_node_address", - requires = "l1_beacon_address", - env - )] - pub l1_node_address: Option<String>, - /// Address of the L1 Beacon API endpoint to use. - #[arg( - long, - visible_alias = "beacon", - requires = "l1_node_address", - requires = "l2_node_address", - env - )] - pub l1_beacon_address: Option<String>, - /// The Data Directory for preimage data storage. Optional if running in online mode, - /// required if running in offline mode. - #[arg( - long, - visible_alias = "db", - required_unless_present_all = ["l2_node_address", "l1_node_address", "l1_beacon_address"], - env - )] - pub data_dir: Option<PathBuf>, - /// Run the client program natively. - #[arg(long, conflicts_with = "server", required_unless_present = "server")] - pub native: bool, - /// Run in pre-image server mode without executing any client program. If not provided, the - /// host will run the client program in the host process. - #[arg(long, conflicts_with = "native", required_unless_present = "native")] - pub server: bool, - /// The L2 chain ID of a supported chain. If provided, the host will look for the corresponding - /// rollup config in the superchain registry. - #[arg( - long, - conflicts_with = "rollup_config_path", - required_unless_present = "rollup_config_path", - env - )] - pub l2_chain_id: Option<u64>, - /// Path to rollup config. If provided, the host will use this config instead of attempting to - /// look up the config in the superchain registry. - #[arg( - long, - alias = "rollup-cfg", - conflicts_with = "l2_chain_id", - required_unless_present = "l2_chain_id", - env - )] - pub rollup_config_path: Option<PathBuf>, - /// Path to l1 config. If provided, the host will use this config instead of attempting to - /// look up the config in the known l1 configs. - #[arg(long, alias = "l1-cfg", env)] - pub l1_config_path: Option<PathBuf>, - /// Optionally enables the use of `debug_executePayload` to collect the execution witness from - /// the execution layer. - #[arg(long, env)] - pub enable_experimental_witness_endpoint: bool, -} - -/// An error that can occur when handling single chain hosts -#[derive(Debug, thiserror::Error)] -pub enum SingleChainHostError { - /// An error when handling preimage requests. - #[error("Error handling preimage request: {0}")] - PreimageServerError(#[from] PreimageServerError), - /// An IO error. - #[error("IO error: {0}")] - IOError(#[from] std::io::Error), - /// A JSON parse error. - #[error("Failed deserializing RollupConfig: {0}")] - ParseError(#[from] serde_json::Error), - /// Task failed to execute to completion. - #[error("Join error: {0}")] - ExecutionError(#[from] tokio::task::JoinError), - /// No rollup config found. - #[error("No rollup config found")] - NoRollupConfig, - /// No l1 config found. - #[error("No l1 config found")] - NoL1Config, - /// Any other error. - #[error("Error: {0}")] - Other(&'static str), -} - -impl SingleChainHost { - /// Starts the [SingleChainHost] application. - pub async fn start(self) -> Result<(), SingleChainHostError> { - if self.server { - let hint = FileChannel::new(FileDescriptor::HintRead, FileDescriptor::HintWrite); - let preimage = - FileChannel::new(FileDescriptor::PreimageRead, FileDescriptor::PreimageWrite); - - self.start_server(hint, preimage).await?.await? - } else { - self.start_native().await - } - } - - /// Starts the preimage server, communicating with the client over the provided channels. - pub async fn start_server<C>( - &self, - hint: C, - preimage: C, - ) -> Result<JoinHandle<Result<(), SingleChainHostError>>, SingleChainHostError> - where - C: Channel + Send + Sync + 'static, - { - let kv_store = self.create_key_value_store()?; - - let task_handle = if self.is_offline() { - task::spawn(async { - PreimageServer::new( - OracleServer::new(preimage), - HintReader::new(hint), - Arc::new(OfflineHostBackend::new(kv_store)), - ) - .start() - .await - .map_err(SingleChainHostError::from) - }) - } else { - let providers = self.create_providers().await?; - let backend = OnlineHostBackend::new( - self.clone(), - kv_store.clone(), - providers, - SingleChainHintHandler, - ) - .with_proactive_hint(HintType::L2PayloadWitness); - - task::spawn(async { - PreimageServer::new( - OracleServer::new(preimage), - HintReader::new(hint), - Arc::new(backend), - ) - .start() - .await - .map_err(SingleChainHostError::from) - }) - }; - - Ok(task_handle) - } - - /// Starts the host in native mode, running both the client and preimage server in the same - /// process. - async fn start_native(&self) -> Result<(), SingleChainHostError> { - let hint = BidirectionalChannel::new()?; - let preimage = BidirectionalChannel::new()?; - - let server_task = self.start_server(hint.host, preimage.host).await?; - let client_task = task::spawn(kona_client::single::run( - OracleReader::new(preimage.client), - HintWriter::new(hint.client), - )); - - let (_, client_result) = tokio::try_join!(server_task, client_task)?; - - // Bubble up the exit status of the client program if execution completes. - std::process::exit(client_result.is_err() as i32) - } - - /// Returns `true` if the host is running in offline mode. - pub const fn is_offline(&self) -> bool { - self.l1_node_address.is_none() && - self.l2_node_address.is_none() && - self.l1_beacon_address.is_none() && - self.data_dir.is_some() - } - - /// Reads the [RollupConfig] from the file system and returns the deserialized configuration. - pub fn read_rollup_config(&self) -> Result<RollupConfig, SingleChainHostError> { - let path = - self.rollup_config_path.as_ref().ok_or_else(|| SingleChainHostError::NoRollupConfig)?; - - // Read the serialized config from the file system. - let ser_config = std::fs::read_to_string(path)?; - - // Deserialize the config and return it. - serde_json::from_str(&ser_config).map_err(SingleChainHostError::ParseError) - } - - /// Reads the [L1ChainConfig] from the file system and returns the deserialized configuration. - pub fn read_l1_config(&self) -> Result<L1ChainConfig, SingleChainHostError> { - let path = self.l1_config_path.as_ref().ok_or_else(|| SingleChainHostError::NoL1Config)?; - - // Read the serialized config from the file system. - let ser_config = std::fs::read_to_string(path)?; - - // Deserialize the config and return it. - serde_json::from_str(&ser_config).map_err(SingleChainHostError::ParseError) - } - - /// Creates the key-value store for the host backend. - pub fn create_key_value_store(&self) -> Result<SharedKeyValueStore, SingleChainHostError> { - let local_kv_store = SingleChainLocalInputs::new(self.clone()); - - let kv_store: SharedKeyValueStore = if let Some(ref data_dir) = self.data_dir { - let disk_kv_store = DiskKeyValueStore::new(data_dir.clone()); - let split_kv_store = SplitKeyValueStore::new(local_kv_store, disk_kv_store); - Arc::new(RwLock::new(split_kv_store)) - } else { - let mem_kv_store = MemoryKeyValueStore::new(); - let split_kv_store = SplitKeyValueStore::new(local_kv_store, mem_kv_store); - Arc::new(RwLock::new(split_kv_store)) - }; - - Ok(kv_store) - } - - /// Creates the providers required for the host backend. - pub async fn create_providers(&self) -> Result<SingleChainProviders, SingleChainHostError> { - let l1_provider = rpc_provider( - self.l1_node_address - .as_ref() - .ok_or(SingleChainHostError::Other("Provider must be set"))?, - ) - .await; - let blob_provider = OnlineBlobProvider::init(OnlineBeaconClient::new_http( - self.l1_beacon_address - .clone() - .ok_or(SingleChainHostError::Other("Beacon API URL must be set"))?, - )) - .await; - let l2_provider = rpc_provider::<Optimism>( - self.l2_node_address - .as_ref() - .ok_or(SingleChainHostError::Other("L2 node address must be set"))?, - ) - .await; - - Ok(SingleChainProviders { l1: l1_provider, blobs: blob_provider, l2: l2_provider }) - } -} - -impl OnlineHostBackendCfg for SingleChainHost { - type HintType = HintType; - type Providers = SingleChainProviders; -} - -/// The providers required for the single chain host. -#[derive(Debug, Clone)] -pub struct SingleChainProviders { - /// The L1 EL provider. - pub l1: RootProvider, - /// The L1 beacon node provider. - pub blobs: OnlineBlobProvider<OnlineBeaconClient>, - /// The L2 EL provider. - pub l2: RootProvider<Optimism>, -} - -#[cfg(test)] -mod test { - use crate::single::SingleChainHost; - use alloy_primitives::B256; - use clap::Parser; - - #[test] - fn test_flags() { - let zero_hash_str = &B256::ZERO.to_string(); - let default_flags = [ - "single", - "--l1-head", - zero_hash_str, - "--l2-head", - zero_hash_str, - "--l2-output-root", - zero_hash_str, - "--l2-claim", - zero_hash_str, - "--l2-block-number", - "0", - ]; - - let cases = [ - // valid - (["--server", "--l2-chain-id", "0", "--data-dir", "dummy"].as_slice(), true), - (["--server", "--rollup-config-path", "dummy", "--data-dir", "dummy"].as_slice(), true), - (["--native", "--l2-chain-id", "0", "--data-dir", "dummy"].as_slice(), true), - (["--native", "--rollup-config-path", "dummy", "--data-dir", "dummy"].as_slice(), true), - ( - [ - "--l1-node-address", - "dummy", - "--l2-node-address", - "dummy", - "--l1-beacon-address", - "dummy", - "--server", - "--l2-chain-id", - "0", - ] - .as_slice(), - true, - ), - ( - [ - "--server", - "--l2-chain-id", - "0", - "--data-dir", - "dummy", - "--enable-experimental-witness-endpoint", - ] - .as_slice(), - true, - ), - // invalid - (["--server", "--native", "--l2-chain-id", "0"].as_slice(), false), - (["--l2-chain-id", "0", "--rollup-config-path", "dummy", "--server"].as_slice(), false), - (["--server"].as_slice(), false), - (["--native"].as_slice(), false), - (["--rollup-config-path", "dummy"].as_slice(), false), - (["--l2-chain-id", "0"].as_slice(), false), - (["--l1-node-address", "dummy", "--server", "--l2-chain-id", "0"].as_slice(), false), - (["--l2-node-address", "dummy", "--server", "--l2-chain-id", "0"].as_slice(), false), - (["--l1-beacon-address", "dummy", "--server", "--l2-chain-id", "0"].as_slice(), false), - ([].as_slice(), false), - ]; - - for (args_ext, valid) in cases.into_iter() { - let args = default_flags.iter().chain(args_ext.iter()).cloned().collect::<Vec<_>>(); - - let parsed = SingleChainHost::try_parse_from(args); - assert_eq!(parsed.is_ok(), valid); - } - } -} diff --git a/kona/bin/host/src/single/handler.rs b/kona/bin/host/src/single/handler.rs deleted file mode 100644 index fb2eae3fb12..00000000000 --- a/kona/bin/host/src/single/handler.rs +++ /dev/null @@ -1,384 +0,0 @@ -//! [HintHandler] for the [SingleChainHost]. - -use crate::{ - HintHandler, OnlineHostBackendCfg, backend::util::store_ordered_trie, kv::SharedKeyValueStore, - single::cfg::SingleChainHost, -}; -use alloy_consensus::Header; -use alloy_eips::{ - eip2718::Encodable2718, - eip4844::{BlobTransactionSidecarItem, FIELD_ELEMENTS_PER_BLOB, IndexedBlobHash}, -}; -use alloy_primitives::{Address, B256, Bytes, keccak256}; -use alloy_provider::Provider; -use alloy_rlp::Decodable; -use alloy_rpc_types::{Block, debug::ExecutionWitness}; -use anyhow::{Result, anyhow, ensure}; -use ark_ff::{BigInteger, PrimeField}; -use async_trait::async_trait; -use kona_preimage::{PreimageKey, PreimageKeyType}; -use kona_proof::{Hint, HintType, l1::ROOTS_OF_UNITY}; -use kona_protocol::{BlockInfo, OutputRoot, Predeploys}; -use op_alloy_rpc_types_engine::OpPayloadAttributes; -use tracing::warn; - -/// The [HintHandler] for the [SingleChainHost]. -#[derive(Debug, Clone, Copy)] -pub struct SingleChainHintHandler; - -#[async_trait] -impl HintHandler for SingleChainHintHandler { - type Cfg = SingleChainHost; - - async fn fetch_hint( - hint: Hint<<Self::Cfg as OnlineHostBackendCfg>::HintType>, - cfg: &Self::Cfg, - providers: &<Self::Cfg as OnlineHostBackendCfg>::Providers, - kv: SharedKeyValueStore, - ) -> Result<()> { - match hint.ty { - HintType::L1BlockHeader => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - let raw_header: Bytes = - providers.l1.client().request("debug_getRawHeader", [hash]).await?; - - let mut kv_lock = kv.write().await; - kv_lock.set(PreimageKey::new_keccak256(*hash).into(), raw_header.into())?; - } - HintType::L1Transactions => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - let Block { transactions, .. } = providers - .l1 - .get_block_by_hash(hash) - .full() - .await? - .ok_or(anyhow!("Block not found"))?; - let encoded_transactions = transactions - .into_transactions() - .map(|tx| tx.inner.encoded_2718()) - .collect::<Vec<_>>(); - - store_ordered_trie(kv.as_ref(), encoded_transactions.as_slice()).await?; - } - HintType::L1Receipts => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - let raw_receipts: Vec<Bytes> = - providers.l1.client().request("debug_getRawReceipts", [hash]).await?; - - store_ordered_trie(kv.as_ref(), raw_receipts.as_slice()).await?; - } - HintType::L1Blob => { - ensure!(hint.data.len() == 48, "Invalid hint data length"); - - let hash_data_bytes: [u8; 32] = hint.data[0..32].try_into()?; - let index_data_bytes: [u8; 8] = hint.data[32..40].try_into()?; - let timestamp_data_bytes: [u8; 8] = hint.data[40..48].try_into()?; - - let hash: B256 = hash_data_bytes.into(); - let index = u64::from_be_bytes(index_data_bytes); - let timestamp = u64::from_be_bytes(timestamp_data_bytes); - - let partial_block_ref = BlockInfo { timestamp, ..Default::default() }; - let indexed_hash = IndexedBlobHash { index, hash }; - - // Fetch the blobs from the blob provider. - let mut blobs = providers - .blobs - .fetch_filtered_blob_sidecars(&partial_block_ref, &[indexed_hash]) - .await - .map_err(|e| anyhow!("Failed to fetch blob sidecars: {e}"))?; - if blobs.len() != 1 { - anyhow::bail!("Expected 1 blob, got {}", blobs.len()); - } - let BlobTransactionSidecarItem { - blob, - kzg_proof: proof, - kzg_commitment: commitment, - .. - } = blobs.pop().expect("Expected 1 blob"); - - // Acquire a lock on the key-value store and set the preimages. - let mut kv_lock = kv.write().await; - - // Set the preimage for the blob commitment. - kv_lock.set( - PreimageKey::new(*hash, PreimageKeyType::Sha256).into(), - commitment.to_vec(), - )?; - - // Write all the field elements to the key-value store. There should be 4096. - // The preimage oracle key for each field element is the keccak256 hash of - // `abi.encodePacked(sidecar.KZGCommitment, bytes32(ROOTS_OF_UNITY[i]))`. - let mut blob_key = [0u8; 80]; - blob_key[..48].copy_from_slice(commitment.as_ref()); - for i in 0..FIELD_ELEMENTS_PER_BLOB { - blob_key[48..].copy_from_slice( - ROOTS_OF_UNITY[i as usize].into_bigint().to_bytes_be().as_ref(), - ); - let blob_key_hash = keccak256(blob_key.as_ref()); - - kv_lock - .set(PreimageKey::new_keccak256(*blob_key_hash).into(), blob_key.into())?; - kv_lock.set( - PreimageKey::new(*blob_key_hash, PreimageKeyType::Blob).into(), - blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(), - )?; - } - - // Write the KZG Proof as the 4096th element. - // Note: This is not associated with a root of unity, as to be backwards compatible - // with ZK users of kona that use this proof for the overall blob. - blob_key[72..].copy_from_slice(FIELD_ELEMENTS_PER_BLOB.to_be_bytes().as_ref()); - let blob_key_hash = keccak256(blob_key.as_ref()); - - kv_lock.set(PreimageKey::new_keccak256(*blob_key_hash).into(), blob_key.into())?; - kv_lock.set( - PreimageKey::new(*blob_key_hash, PreimageKeyType::Blob).into(), - proof.to_vec(), - )?; - } - HintType::L1Precompile => { - ensure!(hint.data.len() >= 28, "Invalid hint data length"); - - let address = Address::from_slice(&hint.data.as_ref()[..20]); - let gas = u64::from_be_bytes(hint.data.as_ref()[20..28].try_into()?); - let input = hint.data[28..].to_vec(); - let input_hash = keccak256(hint.data.as_ref()); - - let result = crate::eth::execute(address, input, gas).map_or_else( - |_| vec![0u8; 1], - |raw_res| { - let mut res = Vec::with_capacity(1 + raw_res.len()); - res.push(0x01); - res.extend_from_slice(&raw_res); - res - }, - ); - - let mut kv_lock = kv.write().await; - kv_lock.set(PreimageKey::new_keccak256(*input_hash).into(), hint.data.into())?; - kv_lock.set( - PreimageKey::new(*input_hash, PreimageKeyType::Precompile).into(), - result, - )?; - } - HintType::L2BlockHeader => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - // Fetch the raw header from the L2 chain provider. - let hash: B256 = hint.data.as_ref().try_into()?; - let raw_header: Bytes = - providers.l2.client().request("debug_getRawHeader", [hash]).await?; - - // Acquire a lock on the key-value store and set the preimage. - let mut kv_lock = kv.write().await; - kv_lock.set(PreimageKey::new_keccak256(*hash).into(), raw_header.into())?; - } - HintType::L2Transactions => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - let Block { transactions, .. } = providers - .l2 - .get_block_by_hash(hash) - .full() - .await? - .ok_or(anyhow!("Block not found."))?; - - let encoded_transactions = transactions - .into_transactions() - .map(|tx| tx.inner.inner.encoded_2718()) - .collect::<Vec<_>>(); - store_ordered_trie(kv.as_ref(), encoded_transactions.as_slice()).await?; - } - HintType::StartingL2Output => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - // Fetch the header for the L2 head block. - let raw_header: Bytes = providers - .l2 - .client() - .request("debug_getRawHeader", &[cfg.agreed_l2_head_hash]) - .await?; - let header = Header::decode(&mut raw_header.as_ref())?; - - // Fetch the storage root for the L2 head block. - let l2_to_l1_message_passer = providers - .l2 - .get_proof(Predeploys::L2_TO_L1_MESSAGE_PASSER, Default::default()) - .block_id(cfg.agreed_l2_head_hash.into()) - .await?; - - let output_root = OutputRoot::from_parts( - header.state_root, - l2_to_l1_message_passer.storage_hash, - cfg.agreed_l2_head_hash, - ); - let output_root_hash = output_root.hash(); - - ensure!( - output_root_hash == cfg.agreed_l2_output_root, - "Output root does not match L2 head." - ); - - let mut kv_write_lock = kv.write().await; - kv_write_lock.set( - PreimageKey::new_keccak256(*output_root_hash).into(), - output_root.encode().into(), - )?; - } - HintType::L2Code => { - // geth hashdb scheme code hash key prefix - const CODE_PREFIX: u8 = b'c'; - - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - - // Attempt to fetch the code from the L2 chain provider. - let code_key = [&[CODE_PREFIX], hash.as_slice()].concat(); - let code = providers - .l2 - .client() - .request::<&[Bytes; 1], Bytes>("debug_dbGet", &[code_key.into()]) - .await; - - // Check if the first attempt to fetch the code failed. If it did, try fetching the - // code hash preimage without the geth hashdb scheme prefix. - let code = match code { - Ok(code) => code, - Err(_) => providers - .l2 - .client() - .request::<&[B256; 1], Bytes>("debug_dbGet", &[hash]) - .await - .map_err(|e| anyhow!("Error fetching code hash preimage: {e}"))?, - }; - - let mut kv_lock = kv.write().await; - kv_lock.set(PreimageKey::new_keccak256(*hash).into(), code.into())?; - } - HintType::L2StateNode => { - ensure!(hint.data.len() == 32, "Invalid hint data length"); - - let hash: B256 = hint.data.as_ref().try_into()?; - - warn!(target: "single_hint_handler", "L2StateNode hint was sent for node hash: {}", hash); - warn!( - target: "single_hint_handler", - "`debug_executePayload` failed to return a complete witness." - ); - - // Fetch the preimage from the L2 chain provider. - let preimage: Bytes = providers.l2.client().request("debug_dbGet", &[hash]).await?; - - let mut kv_write_lock = kv.write().await; - kv_write_lock.set(PreimageKey::new_keccak256(*hash).into(), preimage.into())?; - } - HintType::L2AccountProof => { - ensure!(hint.data.len() == 8 + 20, "Invalid hint data length"); - - let block_number = u64::from_be_bytes(hint.data.as_ref()[..8].try_into()?); - let address = Address::from_slice(&hint.data.as_ref()[8..28]); - - let proof_response = providers - .l2 - .get_proof(address, Default::default()) - .block_id(block_number.into()) - .await?; - - // Write the account proof nodes to the key-value store. - let mut kv_lock = kv.write().await; - proof_response.account_proof.into_iter().try_for_each(|node| { - let node_hash = keccak256(node.as_ref()); - let key = PreimageKey::new_keccak256(*node_hash); - kv_lock.set(key.into(), node.into())?; - Ok::<(), anyhow::Error>(()) - })?; - } - HintType::L2AccountStorageProof => { - ensure!(hint.data.len() == 8 + 20 + 32, "Invalid hint data length"); - - let block_number = u64::from_be_bytes(hint.data.as_ref()[..8].try_into()?); - let address = Address::from_slice(&hint.data.as_ref()[8..28]); - let slot = B256::from_slice(&hint.data.as_ref()[28..]); - - let mut proof_response = providers - .l2 - .get_proof(address, vec![slot]) - .block_id(block_number.into()) - .await?; - - let mut kv_lock = kv.write().await; - - // Write the account proof nodes to the key-value store. - proof_response.account_proof.into_iter().try_for_each(|node| { - let node_hash = keccak256(node.as_ref()); - let key = PreimageKey::new_keccak256(*node_hash); - kv_lock.set(key.into(), node.into())?; - Ok::<(), anyhow::Error>(()) - })?; - - // Write the storage proof nodes to the key-value store. - let storage_proof = proof_response.storage_proof.remove(0); - storage_proof.proof.into_iter().try_for_each(|node| { - let node_hash = keccak256(node.as_ref()); - let key = PreimageKey::new_keccak256(*node_hash); - kv_lock.set(key.into(), node.into())?; - Ok::<(), anyhow::Error>(()) - })?; - } - HintType::L2PayloadWitness => { - if !cfg.enable_experimental_witness_endpoint { - warn!( - target: "single_hint_handler", - "L2PayloadWitness hint was sent, but payload witness is disabled. Skipping hint." - ); - return Ok(()); - } - - ensure!(hint.data.len() >= 32, "Invalid hint data length"); - - let parent_block_hash = B256::from_slice(&hint.data.as_ref()[..32]); - let payload_attributes: OpPayloadAttributes = - serde_json::from_slice(&hint.data[32..])?; - - let Ok(execute_payload_response) = providers - .l2 - .client() - .request::<(B256, OpPayloadAttributes), ExecutionWitness>( - "debug_executePayload", - (parent_block_hash, payload_attributes), - ) - .await - else { - // Allow this hint to fail silently, as not all execution clients support - // the `debug_executePayload` method. - return Ok(()); - }; - - let preimages = execute_payload_response - .state - .into_iter() - .chain(execute_payload_response.codes) - .chain(execute_payload_response.keys); - - let mut kv_lock = kv.write().await; - for preimage in preimages { - let computed_hash = keccak256(preimage.as_ref()); - - let key = PreimageKey::new_keccak256(*computed_hash); - kv_lock.set(key.into(), preimage.into())?; - } - } - } - - Ok(()) - } -} diff --git a/kona/bin/host/src/single/local_kv.rs b/kona/bin/host/src/single/local_kv.rs deleted file mode 100644 index 1d805b2f0da..00000000000 --- a/kona/bin/host/src/single/local_kv.rs +++ /dev/null @@ -1,56 +0,0 @@ -//! Contains a concrete implementation of the [KeyValueStore] trait that stores data on disk, -//! using the [SingleChainHost] config. - -use super::SingleChainHost; -use crate::{KeyValueStore, Result}; -use alloy_primitives::B256; -use kona_preimage::PreimageKey; -use kona_proof::boot::{ - L1_CONFIG_KEY, L1_HEAD_KEY, L2_CHAIN_ID_KEY, L2_CLAIM_BLOCK_NUMBER_KEY, L2_CLAIM_KEY, - L2_OUTPUT_ROOT_KEY, L2_ROLLUP_CONFIG_KEY, -}; - -/// A simple, synchronous key-value store that returns data from a [SingleChainHost] config. -#[derive(Debug)] -pub struct SingleChainLocalInputs { - cfg: SingleChainHost, -} - -impl SingleChainLocalInputs { - /// Create a new [SingleChainLocalInputs] with the given [SingleChainHost] config. - pub const fn new(cfg: SingleChainHost) -> Self { - Self { cfg } - } -} - -impl KeyValueStore for SingleChainLocalInputs { - fn get(&self, key: B256) -> Option<Vec<u8>> { - let preimage_key = PreimageKey::try_from(*key).ok()?; - match preimage_key.key_value() { - L1_HEAD_KEY => Some(self.cfg.l1_head.to_vec()), - L2_OUTPUT_ROOT_KEY => Some(self.cfg.agreed_l2_output_root.to_vec()), - L2_CLAIM_KEY => Some(self.cfg.claimed_l2_output_root.to_vec()), - L2_CLAIM_BLOCK_NUMBER_KEY => { - Some(self.cfg.claimed_l2_block_number.to_be_bytes().to_vec()) - } - L2_CHAIN_ID_KEY => { - Some(self.cfg.l2_chain_id.unwrap_or_default().to_be_bytes().to_vec()) - } - L2_ROLLUP_CONFIG_KEY => { - let rollup_config = self.cfg.read_rollup_config().ok()?; - let serialized = serde_json::to_vec(&rollup_config).ok()?; - Some(serialized) - } - L1_CONFIG_KEY => { - let l1_config = self.cfg.read_l1_config().ok()?; - let serialized = serde_json::to_vec(&l1_config).ok()?; - Some(serialized) - } - _ => None, - } - } - - fn set(&mut self, _: B256, _: Vec<u8>) -> Result<()> { - unreachable!("LocalKeyValueStore is read-only") - } -} diff --git a/kona/bin/node/src/commands/net.rs b/kona/bin/node/src/commands/net.rs deleted file mode 100644 index 222926adfa3..00000000000 --- a/kona/bin/node/src/commands/net.rs +++ /dev/null @@ -1,156 +0,0 @@ -//! Net Subcommand - -use crate::flags::{GlobalArgs, P2PArgs, RpcArgs}; -use clap::Parser; -use futures::future::OptionFuture; -use jsonrpsee::{RpcModule, core::async_trait, server::Server}; -use kona_cli::LogConfig; -use kona_gossip::P2pRpcRequest; -use kona_node_service::{ - EngineClientResult, NetworkActor, NetworkBuilder, NetworkEngineClient, NetworkInboundData, - NodeActor, -}; -use kona_registry::scr_rollup_config_by_alloy_ident; -use kona_rpc::{OpP2PApiServer, P2pRpc, RpcBuilder}; -use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; -use tokio::sync::mpsc; -use tokio_util::sync::CancellationToken; -use tracing::{error, info, warn}; -use url::Url; - -/// The `net` Subcommand -/// -/// The `net` subcommand is used to run the networking stack for the `kona-node`. -/// -/// # Usage -/// -/// ```sh -/// kona-node net [FLAGS] [OPTIONS] -/// ``` -#[derive(Parser, Default, PartialEq, Debug, Clone)] -#[command(about = "Runs the networking stack for the kona-node.")] -pub struct NetCommand { - /// URL of the L1 execution client RPC API. - /// This is used to load the unsafe block signer at startup. - /// Without this, the rollup config unsafe block signer will be used which may be outdated. - #[arg(long, visible_alias = "l1", env = "L1_ETH_RPC")] - pub l1_eth_rpc: Option<Url>, - /// P2P CLI Flags - #[command(flatten)] - pub p2p: P2PArgs, - /// RPC CLI Flags - #[command(flatten)] - pub rpc: RpcArgs, -} - -impl NetCommand { - /// Initializes the logging system based on global arguments. - pub fn init_logs(&self, args: &GlobalArgs) -> anyhow::Result<()> { - // Filter out discovery warnings since they're very very noisy. - let filter = tracing_subscriber::EnvFilter::from_default_env() - .add_directive("discv5=error".parse()?) - .add_directive("bootstore=debug".parse()?); - - // Initialize the telemetry stack. - LogConfig::new(args.log_args.clone()).init_tracing_subscriber(Some(filter))?; - Ok(()) - } - - /// Run the Net subcommand. - pub async fn run(self, args: &GlobalArgs) -> anyhow::Result<()> { - let signer = args.genesis_signer()?; - info!(target: "net", "Genesis block signer: {:?}", signer); - - let rpc_config = Option::<RpcBuilder>::from(self.rpc); - - // Get the rollup config from the args - let rollup_config = scr_rollup_config_by_alloy_ident(&args.l2_chain_id) - .ok_or(anyhow::anyhow!("Rollup config not found for chain id: {}", args.l2_chain_id))?; - - // Start the Network Stack - self.p2p.check_ports()?; - let p2p_config = self.p2p.config(rollup_config, args, self.l1_eth_rpc).await?; - - let (block_tx, mut block_rx) = mpsc::channel(1024); - let (NetworkInboundData { p2p_rpc: rpc, .. }, network) = NetworkActor::new( - ForwardingNetworkEngineClient { block_tx }, - CancellationToken::new(), - NetworkBuilder::from(p2p_config), - ); - - network.start(()).await?; - - info!(target: "net", "Network started, receiving blocks."); - - // On an interval, use the rpc tx to request stats about the p2p network. - let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(2)); - - let handle = if let Some(config) = rpc_config { - info!(target: "net", socket = ?config.socket, "Starting RPC server"); - - // Setup the RPC server with the P2P RPC Module - let mut launcher = RpcModule::new(()); - launcher.merge(P2pRpc::new(rpc.clone()).into_rpc())?; - - let server = Server::builder().build(config.socket).await?; - Some(server.start(launcher)) - } else { - info!(target: "net", "RPC server disabled"); - None - }; - - loop { - tokio::select! { - Some(payload) = block_rx.recv() => { - info!(target: "net", "Received unsafe payload: {:?}", payload.execution_payload.block_hash()); - } - _ = interval.tick(), if !rpc.is_closed() => { - let (otx, mut orx) = tokio::sync::oneshot::channel(); - if let Err(e) = rpc.send(P2pRpcRequest::PeerCount(otx)).await { - warn!(target: "net", "Failed to send network rpc request: {:?}", e); - continue; - } - tokio::time::timeout(tokio::time::Duration::from_secs(5), async move { - loop { - match orx.try_recv() { - Ok((d, g)) => { - let d = d.unwrap_or_default(); - info!(target: "net", "Peer counts: Discovery={} | Swarm={}", d, g); - break; - } - Err(tokio::sync::oneshot::error::TryRecvError::Empty) => { - /* Keep trying to receive */ - } - Err(tokio::sync::oneshot::error::TryRecvError::Closed) => { - break; - } - } - } - }).await.unwrap(); - } - _ = OptionFuture::from(handle.clone().map(|h| h.stopped())) => { - warn!(target: "net", "RPC server stopped"); - return Ok(()); - } - } - } - } -} - -#[derive(Debug)] -struct ForwardingNetworkEngineClient { - block_tx: mpsc::Sender<OpExecutionPayloadEnvelope>, -} - -#[async_trait] -impl NetworkEngineClient for ForwardingNetworkEngineClient { - async fn send_unsafe_block(&self, block: OpExecutionPayloadEnvelope) -> EngineClientResult<()> { - let _ = self - .block_tx - .send(block) - .await - .inspect_err(|e| error!(target: "net", "Failed to send block: {:?}", e)); - - Ok(()) - } -} diff --git a/kona/bin/node/src/commands/node.rs b/kona/bin/node/src/commands/node.rs deleted file mode 100644 index 5f9e8558964..00000000000 --- a/kona/bin/node/src/commands/node.rs +++ /dev/null @@ -1,519 +0,0 @@ -//! Node Subcommand. - -use crate::{ - flags::{ - BuilderClientArgs, DerivationDelegateArgs, GlobalArgs, L1ClientArgs, L2ClientArgs, P2PArgs, - RollupBoostFlags, RpcArgs, SequencerArgs, - }, - metrics::{CliMetrics, init_rollup_config_metrics}, -}; -use alloy_provider::RootProvider; -use alloy_rpc_types_engine::JwtSecret; -use alloy_transport_http::Http; -use anyhow::{Result, bail}; -use backon::{ExponentialBuilder, Retryable}; -use clap::Parser; -use kona_cli::{LogConfig, MetricsArgs}; -use kona_engine::{HyperAuthClient, OpEngineClient}; -use kona_genesis::{L1ChainConfig, RollupConfig}; -use kona_node_service::{EngineConfig, L1ConfigBuilder, NodeMode, RollupNodeBuilder}; -use kona_registry::{L1Config, scr_rollup_config_by_alloy_ident}; -use op_alloy_network::Optimism; -use op_alloy_provider::ext::engine::OpEngineApi; -use serde_json::from_reader; -use std::{fs::File, io::Write, path::PathBuf, sync::Arc, time::Duration}; -use strum::IntoEnumIterator; -use tracing::{debug, error, info}; - -/// A JWT token validation error. -#[derive(Debug, thiserror::Error)] -pub(super) enum JwtValidationError { - #[error("JWT signature is invalid")] - InvalidSignature, - #[error("Failed to exchange capabilities with engine: {0}")] - CapabilityExchange(String), -} - -/// Command-line interface for running a Kona rollup node. -/// -/// The `NodeCommand` struct defines all the configuration options needed to start and run -/// a rollup node in the Kona ecosystem. It supports multiple node modes including validator -/// and sequencer modes, and provides comprehensive networking and RPC configuration options. -/// -/// # Node Modes -/// -/// The node can operate in different modes: -/// - **Validator**: Validates L2 blocks and participates in consensus -/// - **Sequencer**: Sequences transactions and produces L2 blocks -/// -/// # Configuration Sources -/// -/// Configuration can be provided through: -/// - Command-line arguments -/// - Environment variables (prefixed with `KONA_NODE_`) -/// - Configuration files (for rollup config) -/// -/// # Examples -/// -/// ```bash -/// # Run as validator with default settings -/// kona node --l1-eth-rpc http://localhost:8545 \ -/// --l1-beacon http://localhost:5052 \ -/// --l2-engine-rpc http://localhost:8551 -/// -/// # Run as sequencer with custom JWT secret -/// kona node --mode sequencer \ -/// --l1-eth-rpc http://localhost:8545 \ -/// --l1-beacon http://localhost:5052 \ -/// --l2-engine-rpc http://localhost:8551 \ -/// --l2-engine-jwt-secret /path/to/jwt.hex -/// ``` -#[derive(Parser, Debug, Clone)] -#[command(about = "Runs the consensus node")] -pub struct NodeCommand { - /// The mode to run the node in. - #[arg( - long = "mode", - default_value_t = NodeMode::Validator, - env = "KONA_NODE_MODE", - help = format!( - "The mode to run the node in. Supported modes are: {}", - NodeMode::iter() - .map(|mode| format!("\"{}\"", mode.to_string())) - .collect::<Vec<_>>() - .join(", ") - ) - )] - pub node_mode: NodeMode, - - /// L1 RPC CLI arguments. - #[clap(flatten)] - pub l1_rpc_args: L1ClientArgs, - - /// L2 engine CLI arguments. - #[clap(flatten)] - pub l2_client_args: L2ClientArgs, - - /// Optional block builder client. - #[clap(flatten)] - pub builder_client_args: BuilderClientArgs, - - /// Optional derivation delegation client. - #[clap(flatten)] - pub derivation_delegate_args: DerivationDelegateArgs, - - /// Path to a custom L2 rollup configuration file - /// (overrides the default rollup configuration from the registry) - #[arg(long, visible_alias = "rollup-cfg", env = "KONA_NODE_ROLLUP_CONFIG")] - pub l2_config_file: Option<PathBuf>, - /// Path to a custom L1 rollup configuration file - /// (overrides the default rollup configuration from the registry) - #[arg(long, visible_alias = "rollup-l1-cfg", env = "KONA_NODE_L1_CHAIN_CONFIG")] - pub l1_config_file: Option<PathBuf>, - /// P2P CLI arguments. - #[command(flatten)] - pub p2p_flags: P2PArgs, - /// RPC CLI arguments. - #[command(flatten)] - pub rpc_flags: RpcArgs, - /// SEQUENCER CLI arguments. - #[command(flatten)] - pub sequencer_flags: SequencerArgs, - - /// Rollup boost CLI arguments - contains the builder and l2 engine arguments. - #[command(flatten)] - pub rollup_boost_flags: RollupBoostFlags, -} - -impl Default for NodeCommand { - fn default() -> Self { - Self { - l1_rpc_args: L1ClientArgs::default(), - l2_client_args: L2ClientArgs::default(), - builder_client_args: BuilderClientArgs::default(), - derivation_delegate_args: DerivationDelegateArgs::default(), - l2_config_file: None, - l1_config_file: None, - node_mode: NodeMode::Validator, - p2p_flags: P2PArgs::default(), - rpc_flags: RpcArgs::default(), - sequencer_flags: SequencerArgs::default(), - rollup_boost_flags: RollupBoostFlags::default(), - } - } -} - -impl NodeCommand { - /// Initializes the logging system based on global arguments. - pub fn init_logs(&self, args: &GlobalArgs) -> anyhow::Result<()> { - // Filter out discovery warnings since they're very very noisy. - let filter = tracing_subscriber::EnvFilter::from_default_env() - .add_directive("discv5=error".parse()?); - - LogConfig::new(args.log_args.clone()).init_tracing_subscriber(Some(filter))?; - Ok(()) - } - - /// Initializes CLI metrics for the Node subcommand. - pub fn init_cli_metrics(&self, args: &MetricsArgs) -> anyhow::Result<()> { - if !args.enabled { - debug!("CLI metrics are disabled"); - return Ok(()); - } - metrics::gauge!( - CliMetrics::IDENTIFIER, - &[ - (CliMetrics::P2P_PEER_SCORING_LEVEL, self.p2p_flags.scoring.to_string()), - (CliMetrics::P2P_TOPIC_SCORING_ENABLED, self.p2p_flags.topic_scoring.to_string()), - (CliMetrics::P2P_BANNING_ENABLED, self.p2p_flags.ban_enabled.to_string()), - ( - CliMetrics::P2P_PEER_REDIALING, - self.p2p_flags.peer_redial.unwrap_or(0).to_string() - ), - (CliMetrics::P2P_FLOOD_PUBLISH, self.p2p_flags.gossip_flood_publish.to_string()), - (CliMetrics::P2P_DISCOVERY_INTERVAL, self.p2p_flags.discovery_interval.to_string()), - ( - CliMetrics::P2P_ADVERTISE_IP, - self.p2p_flags - .advertise_ip - .map(|ip| ip.to_string()) - .unwrap_or(String::from("0.0.0.0")) - ), - ( - CliMetrics::P2P_ADVERTISE_TCP_PORT, - self.p2p_flags - .advertise_tcp_port - .map_or_else(|| "auto".to_string(), |p| p.to_string()) - ), - ( - CliMetrics::P2P_ADVERTISE_UDP_PORT, - self.p2p_flags - .advertise_udp_port - .map_or_else(|| "auto".to_string(), |p| p.to_string()) - ), - (CliMetrics::P2P_PEERS_LO, self.p2p_flags.peers_lo.to_string()), - (CliMetrics::P2P_PEERS_HI, self.p2p_flags.peers_hi.to_string()), - (CliMetrics::P2P_GOSSIP_MESH_D, self.p2p_flags.gossip_mesh_d.to_string()), - (CliMetrics::P2P_GOSSIP_MESH_D_LO, self.p2p_flags.gossip_mesh_dlo.to_string()), - (CliMetrics::P2P_GOSSIP_MESH_D_HI, self.p2p_flags.gossip_mesh_dhi.to_string()), - (CliMetrics::P2P_GOSSIP_MESH_D_LAZY, self.p2p_flags.gossip_mesh_dlazy.to_string()), - (CliMetrics::P2P_BAN_DURATION, self.p2p_flags.ban_duration.to_string()), - ] - ) - .set(1); - Ok(()) - } - - /// Check if the error is related to JWT signature validation - fn is_jwt_signature_error(error: &dyn std::error::Error) -> bool { - let mut source = Some(error); - while let Some(err) = source { - let err_str = err.to_string().to_lowercase(); - if err_str.contains("signature invalid") || - (err_str.contains("jwt") && err_str.contains("invalid")) || - err_str.contains("unauthorized") || - err_str.contains("authentication failed") - { - return true; - } - source = err.source(); - } - false - } - - /// Helper to check JWT signature error from anyhow::Error (for retry condition) - fn is_jwt_signature_error_from_anyhow(error: &anyhow::Error) -> bool { - Self::is_jwt_signature_error(error.as_ref() as &dyn std::error::Error) - } - - /// Validate the jwt secret if specified by exchanging capabilities with the engine. - /// Since the engine client will fail if the jwt token is invalid, this allows to ensure - /// that the jwt token passed as a cli arg is correct. - pub async fn validate_jwt(&self) -> anyhow::Result<JwtSecret> { - let jwt_secret = self.l2_jwt_secret()?; - - let engine = OpEngineClient::<RootProvider, RootProvider<Optimism>>::rpc_client::<Optimism>( - self.l2_client_args.l2_engine_rpc.clone(), - jwt_secret, - ); - - let exchange = || async { - match <RootProvider<Optimism> as OpEngineApi< - Optimism, - Http<HyperAuthClient>, - >>::exchange_capabilities(&engine, vec![]) - .await - { - Ok(_) => { - debug!("Successfully exchanged capabilities with engine"); - Ok(jwt_secret) - } - Err(e) => { - if Self::is_jwt_signature_error(&e) { - error!( - "Engine API JWT secret differs from the one specified by --l2.jwt-secret/--l2.jwt-secret-encoded" - ); - error!( - "Ensure that the JWT secret file specified is correct (by default it is `jwt.hex` in the current directory)" - ); - return Err(JwtValidationError::InvalidSignature.into()); - } - Err(JwtValidationError::CapabilityExchange(e.to_string()).into()) - } - } - }; - - exchange - .retry(ExponentialBuilder::default()) - .when(|e| !Self::is_jwt_signature_error_from_anyhow(e)) - .notify(|_, duration| { - debug!("Retrying engine capability handshake after {duration:?}"); - }) - .await - } - - /// Run the Node subcommand. - pub async fn run(self, args: &GlobalArgs) -> anyhow::Result<()> { - let cfg = self.get_l2_config(args)?; - - info!( - target: "rollup_node", - chain_id = cfg.l2_chain_id.id(), - "Starting rollup node services" - ); - for hf in cfg.hardforks.to_string().lines() { - info!(target: "rollup_node", "{hf}"); - } - - let l1_config = L1ConfigBuilder { - chain_config: self.get_l1_config(cfg.l1_chain_id)?, - trust_rpc: self.l1_rpc_args.l1_trust_rpc, - beacon: self.l1_rpc_args.l1_beacon.clone(), - rpc_url: self.l1_rpc_args.l1_eth_rpc.clone(), - slot_duration_override: self.l1_rpc_args.l1_slot_duration_override, - }; - - // If metrics are enabled, initialize the global cli metrics. - args.metrics.enabled.then(|| init_rollup_config_metrics(&cfg)); - - let jwt_secret = self.validate_jwt().await?; - - self.p2p_flags.check_ports()?; - let p2p_config = self - .p2p_flags - .clone() - .config(&cfg, args, Some(self.l1_rpc_args.l1_eth_rpc.clone())) - .await?; - let rpc_config = self.rpc_flags.clone().into(); - - let engine_config = EngineConfig { - config: Arc::new(cfg.clone()), - builder_url: self.builder_client_args.l2_builder_rpc.clone(), - builder_jwt_secret: self.builder_jwt_secret()?, - builder_timeout: Duration::from_millis(self.builder_client_args.builder_timeout), - l2_url: self.l2_client_args.l2_engine_rpc.clone(), - l2_jwt_secret: jwt_secret, - l2_timeout: Duration::from_millis(self.l2_client_args.l2_engine_timeout), - l1_url: self.l1_rpc_args.l1_eth_rpc.clone(), - mode: self.node_mode, - rollup_boost: self.rollup_boost_flags.as_rollup_boost_args(), - }; - - RollupNodeBuilder::new( - cfg, - l1_config, - self.l2_client_args.l2_trust_rpc, - engine_config, - p2p_config, - rpc_config, - ) - .with_sequencer_config(self.sequencer_flags.config()) - .with_derivation_delegate_config(self.derivation_delegate_args.config()) - .build() - .start() - .await - .map_err(|e| { - error!(target: "rollup_node", "Failed to start rollup node service: {e}"); - anyhow::anyhow!("{e}") - })?; - - Ok(()) - } - - /// Get the L1 config, either from a file or the known chains. - pub fn get_l1_config(&self, l1_chain_id: u64) -> Result<L1ChainConfig> { - match &self.l1_config_file { - Some(path) => { - debug!("Loading l1 config from file: {:?}", path); - let file = File::open(path) - .map_err(|e| anyhow::anyhow!("Failed to open l1 config file: {e}"))?; - from_reader(file).map_err(|e| anyhow::anyhow!("Failed to parse l1 config: {e}")) - } - None => { - debug!("Loading l1 config from known chains"); - let cfg = L1Config::get_l1_genesis(l1_chain_id).map_err(|e| { - anyhow::anyhow!("Failed to find l1 config for chain ID {l1_chain_id}: {e}") - })?; - Ok(cfg.into()) - } - } - } - - /// Get the L2 rollup config, either from a file or the superchain registry. - pub fn get_l2_config(&self, args: &GlobalArgs) -> Result<RollupConfig> { - match &self.l2_config_file { - Some(path) => { - debug!("Loading l2 config from file: {:?}", path); - let file = File::open(path) - .map_err(|e| anyhow::anyhow!("Failed to open l2 config file: {e}"))?; - from_reader(file).map_err(|e| anyhow::anyhow!("Failed to parse l2 config: {e}")) - } - None => { - debug!("Loading l2 config from superchain registry"); - let Some(cfg) = scr_rollup_config_by_alloy_ident(&args.l2_chain_id) else { - bail!("Failed to find l2 config for chain ID {}", args.l2_chain_id); - }; - Ok(cfg.clone()) - } - } - } - - /// Returns the L2 JWT secret for the engine API - /// using the provided [PathBuf]. If the file is not found, - /// it will return the default JWT secret. - pub fn l2_jwt_secret(&self) -> anyhow::Result<JwtSecret> { - if let Some(path) = &self.l2_client_args.l2_engine_jwt_secret && - let Ok(secret) = std::fs::read_to_string(path) - { - return JwtSecret::from_hex(secret) - .map_err(|e| anyhow::anyhow!("Failed to parse JWT secret: {e}")); - } - - if let Some(secret) = &self.l2_client_args.l2_engine_jwt_encoded { - return Ok(*secret); - } - - Self::default_jwt_secret("l2_jwt.hex") - } - - /// Returns the builder JWT secret for the engine API - /// using the provided [PathBuf]. If the file is not found, - /// it will return the default JWT secret. - pub fn builder_jwt_secret(&self) -> anyhow::Result<JwtSecret> { - if let Some(path) = &self.builder_client_args.builder_jwt_path && - let Ok(secret) = std::fs::read_to_string(path) - { - return JwtSecret::from_hex(secret) - .map_err(|e| anyhow::anyhow!("Failed to parse JWT secret: {e}")); - } - - if let Some(secret) = &self.builder_client_args.builder_jwt_secret { - return Ok(*secret); - } - - Self::default_jwt_secret("builder_jwt.hex") - } - - /// Uses the current directory to attempt to read - /// the JWT secret from a file named `file_name`. - /// If the file is not found, it will create a new random JWT secret and write it to the file. - pub fn default_jwt_secret(file_name: &str) -> anyhow::Result<JwtSecret> { - let cur_dir = std::env::current_dir() - .map_err(|e| anyhow::anyhow!("Failed to get current directory: {e}"))?; - std::fs::read_to_string(cur_dir.join(file_name)).map_or_else( - |_| { - let secret = JwtSecret::random(); - - if let Ok(mut file) = File::create(file_name) && - let Err(e) = file - .write_all(alloy_primitives::hex::encode(secret.as_bytes()).as_bytes()) - { - return Err(anyhow::anyhow!("Failed to write JWT secret to file: {e}")); - } - - Ok(secret) - }, - |content| Ok(JwtSecret::from_hex(content)?), - ) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use anyhow::anyhow; - - #[derive(Debug)] - struct MockError { - message: String, - } - - impl std::fmt::Display for MockError { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.message) - } - } - - impl std::error::Error for MockError {} - - const fn default_flags() -> &'static [&'static str] { - &[ - "--l1-eth-rpc", - "http://localhost:8545", - "--l1-beacon", - "http://localhost:5052", - "--l2-engine-rpc", - "http://localhost:8551", - ] - } - - #[test] - fn test_node_cli_defaults() { - let args = NodeCommand::parse_from(["node"].iter().chain(default_flags().iter()).copied()); - assert_eq!(args.node_mode, NodeMode::Validator); - } - - #[test] - fn test_node_cli_missing_l1_eth_rpc() { - let err = NodeCommand::try_parse_from(["node"]).unwrap_err(); - assert!(err.to_string().contains("--l1-eth-rpc")); - } - - #[test] - fn test_node_cli_missing_l1_beacon() { - let err = NodeCommand::try_parse_from(["node", "--l1-eth-rpc", "http://localhost:8545"]) - .unwrap_err(); - assert!(err.to_string().contains("--l1-beacon")); - } - - #[test] - fn test_node_cli_missing_l2_engine_rpc() { - let err = NodeCommand::try_parse_from([ - "node", - "--l1-eth-rpc", - "http://localhost:8545", - "--l1-beacon", - "http://localhost:5052", - ]) - .unwrap_err(); - assert!(err.to_string().contains("--l2-engine-rpc")); - } - - #[test] - fn test_is_jwt_signature_error() { - let jwt_error = MockError { message: "signature invalid".to_string() }; - assert!(NodeCommand::is_jwt_signature_error(&jwt_error)); - - let other_error = MockError { message: "network timeout".to_string() }; - assert!(!NodeCommand::is_jwt_signature_error(&other_error)); - } - - #[test] - fn test_is_jwt_signature_error_from_anyhow() { - let jwt_anyhow_error = anyhow!("signature invalid"); - assert!(NodeCommand::is_jwt_signature_error_from_anyhow(&jwt_anyhow_error)); - - let other_anyhow_error = anyhow!("network timeout"); - assert!(!NodeCommand::is_jwt_signature_error_from_anyhow(&other_anyhow_error)); - } -} diff --git a/kona/bin/node/src/flags/metrics.rs b/kona/bin/node/src/flags/metrics.rs deleted file mode 100644 index 16b3edecfbb..00000000000 --- a/kona/bin/node/src/flags/metrics.rs +++ /dev/null @@ -1,62 +0,0 @@ -//! Prometheus metrics CLI args -//! -//! Specifies the available flags for prometheus metric configuration inside CLI - -use crate::metrics::VersionInfo; -use kona_cli::MetricsArgs; - -/// Initializes metrics for a Kona application, including Prometheus and node-specific metrics. -/// Initialize the tracing stack and Prometheus metrics recorder. -/// -/// This function should be called at the beginning of the program. -pub fn init_unified_metrics(args: &MetricsArgs) -> anyhow::Result<()> { - args.init_metrics()?; - if args.enabled { - kona_gossip::Metrics::init(); - kona_disc::Metrics::init(); - kona_engine::Metrics::init(); - kona_node_service::Metrics::init(); - kona_derive::Metrics::init(); - kona_providers_alloy::Metrics::init(); - VersionInfo::from_build().register_version_metrics(); - } - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - use clap::Parser; - use std::net::IpAddr; - - /// A mock command that uses the MetricsArgs. - #[derive(Parser, Debug, Clone)] - #[command(about = "Mock command")] - struct MockCommand { - /// Metrics CLI Flags - #[clap(flatten)] - pub metrics: MetricsArgs, - } - - #[test] - fn test_metrics_args_listen_enabled() { - let args = MockCommand::parse_from(["test", "--metrics.enabled"]); - assert!(args.metrics.enabled); - - let args = MockCommand::parse_from(["test"]); - assert!(!args.metrics.enabled); - } - - #[test] - fn test_metrics_args_listen_ip() { - let args = MockCommand::parse_from(["test", "--metrics.addr", "127.0.0.1"]); - let expected: IpAddr = "127.0.0.1".parse().unwrap(); - assert_eq!(args.metrics.addr, expected); - } - - #[test] - fn test_metrics_args_listen_port() { - let args = MockCommand::parse_from(["test", "--metrics.port", "1234"]); - assert_eq!(args.metrics.port, 1234); - } -} diff --git a/kona/bin/node/src/flags/p2p.rs b/kona/bin/node/src/flags/p2p.rs deleted file mode 100644 index 0cda8a53274..00000000000 --- a/kona/bin/node/src/flags/p2p.rs +++ /dev/null @@ -1,725 +0,0 @@ -//! P2P CLI Flags -//! -//! These are based on p2p flags from the [`op-node`][op-node] CLI. -//! -//! [op-node]: https://github.com/ethereum-optimism/optimism/blob/develop/op-node/flags/p2p_flags.go - -use crate::flags::{GlobalArgs, SignerArgs}; -use alloy_primitives::{B256, b256}; -use alloy_provider::Provider; -use alloy_signer_local::PrivateKeySigner; -use anyhow::Result; -use clap::Parser; -use discv5::enr::k256; -use kona_derive::ChainProvider; -use kona_disc::LocalNode; -use kona_genesis::RollupConfig; -use kona_gossip::GaterConfig; -use kona_node_service::NetworkConfig; -use kona_peers::{BootNode, BootStoreFile, PeerMonitoring, PeerScoreLevel}; -use kona_providers_alloy::AlloyChainProvider; -use libp2p::identity::Keypair; -use std::{ - net::{IpAddr, SocketAddr, ToSocketAddrs}, - num::ParseIntError, - path::PathBuf, - str::FromStr, -}; -use tokio::time::Duration; -use url::Url; - -/// Resolves a hostname or IP address string to an [`IpAddr`]. -/// -/// Accepts either: -/// - A valid IP address string (e.g., "127.0.0.1", "::1") -/// - A DNS hostname (e.g., "node1.example.com") -/// -/// For DNS hostnames, this performs synchronous DNS resolution and returns the first -/// resolved IP address. -fn resolve_host(host: &str) -> Result<IpAddr, String> { - // First, try to parse as a direct IP address - if let Ok(ip) = host.parse::<IpAddr>() { - return Ok(ip); - } - - // If that fails, try DNS resolution - // We append a port to make it a valid socket address for resolution - let socket_addr = format!("{host}:0"); - match socket_addr.to_socket_addrs() { - Ok(mut addrs) => addrs - .next() - .map(|addr| addr.ip()) - .ok_or_else(|| format!("DNS resolution for '{host}' returned no addresses")), - Err(e) => Err(format!("Failed to resolve '{host}': {e}")), - } -} - -/// P2P CLI Flags -#[derive(Parser, Clone, Debug, PartialEq, Eq)] -pub struct P2PArgs { - /// Disable Discv5 (node discovery). - #[arg(long = "p2p.no-discovery", default_value = "false", env = "KONA_NODE_P2P_NO_DISCOVERY")] - pub no_discovery: bool, - /// Read the hex-encoded 32-byte private key for the peer ID from this txt file. - /// Created if not already exists. Important to persist to keep the same network identity after - /// restarting, maintaining the previous advertised identity. - #[arg(long = "p2p.priv.path", env = "KONA_NODE_P2P_PRIV_PATH")] - pub priv_path: Option<PathBuf>, - /// The hex-encoded 32-byte private key for the peer ID. - #[arg(long = "p2p.priv.raw", env = "KONA_NODE_P2P_PRIV_RAW")] - pub private_key: Option<B256>, - - /// IP address or DNS hostname to advertise to external peers from Discv5. - /// Optional argument. Use the `p2p.listen.ip` if not set. - /// Accepts either an IP address (e.g., "1.2.3.4") or a DNS hostname (e.g., - /// "node1.example.com"). DNS hostnames are resolved to IP addresses at startup. - /// - /// Technical note: if this argument is set, the dynamic ENR updates from the discovery layer - /// will be disabled. This is to allow the advertised IP to be static (to use in a network - /// behind a NAT for instance). - #[arg(long = "p2p.advertise.ip", env = "KONA_NODE_P2P_ADVERTISE_IP", value_parser = resolve_host)] - pub advertise_ip: Option<IpAddr>, - /// TCP port to advertise to external peers from the discovery layer. Same as `p2p.listen.tcp` - /// if set to zero. - #[arg(long = "p2p.advertise.tcp", env = "KONA_NODE_P2P_ADVERTISE_TCP_PORT")] - pub advertise_tcp_port: Option<u16>, - /// UDP port to advertise to external peers from the discovery layer. - /// Same as `p2p.listen.udp` if set to zero. - #[arg(long = "p2p.advertise.udp", env = "KONA_NODE_P2P_ADVERTISE_UDP_PORT")] - pub advertise_udp_port: Option<u16>, - - /// IP address or DNS hostname to bind LibP2P/Discv5 to. - /// Accepts either an IP address (e.g., "0.0.0.0") or a DNS hostname (e.g., - /// "node1.example.com"). DNS hostnames are resolved to IP addresses at startup. - #[arg(long = "p2p.listen.ip", default_value = "0.0.0.0", env = "KONA_NODE_P2P_LISTEN_IP", value_parser = resolve_host)] - pub listen_ip: IpAddr, - /// TCP port to bind LibP2P to. Any available system port if set to 0. - #[arg(long = "p2p.listen.tcp", default_value = "9222", env = "KONA_NODE_P2P_LISTEN_TCP_PORT")] - pub listen_tcp_port: u16, - /// UDP port to bind Discv5 to. Same as TCP port if left 0. - #[arg(long = "p2p.listen.udp", default_value = "9223", env = "KONA_NODE_P2P_LISTEN_UDP_PORT")] - pub listen_udp_port: u16, - /// Low-tide peer count. The node actively searches for new peer connections if below this - /// amount. - #[arg(long = "p2p.peers.lo", default_value = "20", env = "KONA_NODE_P2P_PEERS_LO")] - pub peers_lo: u32, - /// High-tide peer count. The node starts pruning peer connections slowly after reaching this - /// number. - #[arg(long = "p2p.peers.hi", default_value = "30", env = "KONA_NODE_P2P_PEERS_HI")] - pub peers_hi: u32, - /// Grace period to keep a newly connected peer around, if it is not misbehaving. - #[arg( - long = "p2p.peers.grace", - default_value = "30", - env = "KONA_NODE_P2P_PEERS_GRACE", - value_parser = |arg: &str| -> Result<Duration, ParseIntError> {Ok(Duration::from_secs(arg.parse()?))} - )] - pub peers_grace: Duration, - /// Configure GossipSub topic stable mesh target count. - /// Aka: The desired outbound degree (numbers of peers to gossip to). - #[arg(long = "p2p.gossip.mesh.d", default_value = "8", env = "KONA_NODE_P2P_GOSSIP_MESH_D")] - pub gossip_mesh_d: usize, - /// Configure GossipSub topic stable mesh low watermark. - /// Aka: The lower bound of outbound degree. - #[arg(long = "p2p.gossip.mesh.lo", default_value = "6", env = "KONA_NODE_P2P_GOSSIP_MESH_DLO")] - pub gossip_mesh_dlo: usize, - /// Configure GossipSub topic stable mesh high watermark. - /// Aka: The upper bound of outbound degree (additional peers will not receive gossip). - #[arg( - long = "p2p.gossip.mesh.dhi", - default_value = "12", - env = "KONA_NODE_P2P_GOSSIP_MESH_DHI" - )] - pub gossip_mesh_dhi: usize, - /// Configure GossipSub gossip target. - /// Aka: The target degree for gossip only (not messaging like p2p.gossip.mesh.d, just - /// announcements of IHAVE). - #[arg( - long = "p2p.gossip.mesh.dlazy", - default_value = "6", - env = "KONA_NODE_P2P_GOSSIP_MESH_DLAZY" - )] - pub gossip_mesh_dlazy: usize, - /// Configure GossipSub to publish messages to all known peers on the topic, outside of the - /// mesh. Also see Dlazy as less aggressive alternative. - #[arg( - long = "p2p.gossip.mesh.floodpublish", - default_value = "false", - env = "KONA_NODE_P2P_GOSSIP_FLOOD_PUBLISH" - )] - pub gossip_flood_publish: bool, - /// Sets the peer scoring strategy for the P2P stack. - /// Can be one of: none or light. - #[arg(long = "p2p.scoring", default_value = "light", env = "KONA_NODE_P2P_SCORING")] - pub scoring: PeerScoreLevel, - - /// Allows to ban peers based on their score. - /// - /// Peers are banned based on a ban threshold (see `p2p.ban.threshold`). - /// If a peer's score is below the threshold, it gets automatically banned. - #[arg(long = "p2p.ban.peers", default_value = "false", env = "KONA_NODE_P2P_BAN_PEERS")] - pub ban_enabled: bool, - - /// The threshold used to ban peers. - /// - /// For peers to be banned, the `p2p.ban.peers` flag must be set to `true`. - /// By default, peers are banned if their score is below -100. This follows the `op-node` default `<https://github.com/ethereum-optimism/optimism/blob/09a8351a72e43647c8a96f98c16bb60e7b25dc6e/op-node/flags/p2p_flags.go#L123-L130>`. - #[arg(long = "p2p.ban.threshold", default_value = "-100", env = "KONA_NODE_P2P_BAN_THRESHOLD")] - pub ban_threshold: i64, - - /// The duration in minutes to ban a peer for. - /// - /// For peers to be banned, the `p2p.ban.peers` flag must be set to `true`. - /// By default peers are banned for 1 hour. This follows the `op-node` default `<https://github.com/ethereum-optimism/optimism/blob/09a8351a72e43647c8a96f98c16bb60e7b25dc6e/op-node/flags/p2p_flags.go#L131-L138>`. - #[arg(long = "p2p.ban.duration", default_value = "60", env = "KONA_NODE_P2P_BAN_DURATION")] - pub ban_duration: u64, - - /// The interval in seconds to find peers using the discovery service. - /// Defaults to 5 seconds. - #[arg( - long = "p2p.discovery.interval", - default_value = "5", - env = "KONA_NODE_P2P_DISCOVERY_INTERVAL" - )] - pub discovery_interval: u64, - /// The directory to store the bootstore. - #[arg(long = "p2p.bootstore", env = "KONA_NODE_P2P_BOOTSTORE")] - pub bootstore: Option<PathBuf>, - /// Disables the bootstore. - #[arg(long = "p2p.no-bootstore", env = "KONA_NODE_P2P_NO_BOOTSTORE")] - pub disable_bootstore: bool, - /// Peer Redialing threshold is the maximum amount of times to attempt to redial a peer that - /// disconnects. By default, peers are *not* redialed. If set to 0, the peer will be - /// redialed indefinitely. - #[arg(long = "p2p.redial", env = "KONA_NODE_P2P_REDIAL", default_value = "500")] - pub peer_redial: Option<u64>, - - /// The duration in minutes of the peer dial period. - /// When the last time a peer was dialed is longer than the dial period, the number of peer - /// dials is reset to 0, allowing the peer to be dialed again. - #[arg(long = "p2p.redial.period", env = "KONA_NODE_P2P_REDIAL_PERIOD", default_value = "60")] - pub redial_period: u64, - - /// An optional list of bootnode ENRs or node records to start the node with. - #[arg(long = "p2p.bootnodes", value_delimiter = ',', env = "KONA_NODE_P2P_BOOTNODES")] - pub bootnodes: Vec<String>, - - /// Optionally enable topic scoring. - /// - /// Topic scoring is a mechanism to score peers based on their behavior in the gossip network. - /// Historically, topic scoring was only enabled for the v1 topic on the OP Stack p2p network - /// in the `op-node`. This was a silent bug, and topic scoring is actively being - /// [phased out of the `op-node`][out]. - /// - /// This flag is only presented for backwards compatibility and debugging purposes. - /// - /// [out]: https://github.com/ethereum-optimism/optimism/pull/15719 - #[arg( - long = "p2p.topic-scoring", - default_value = "false", - env = "KONA_NODE_P2P_TOPIC_SCORING" - )] - pub topic_scoring: bool, - - /// An optional unsafe block signer address. - /// - /// By default, this is fetched from the chain config in the superchain-registry using the - /// specified L2 chain ID. - #[arg(long = "p2p.unsafe.block.signer", env = "KONA_NODE_P2P_UNSAFE_BLOCK_SIGNER")] - pub unsafe_block_signer: Option<alloy_primitives::Address>, - - /// An optional flag to remove random peers from discovery to rotate the peer set. - /// - /// This is the number of seconds to wait before removing a peer from the discovery - /// service. By default, peers are not removed from the discovery service. - /// - /// This is useful for discovering a wider set of peers. - #[arg(long = "p2p.discovery.randomize", env = "KONA_NODE_P2P_DISCOVERY_RANDOMIZE")] - pub discovery_randomize: Option<u64>, - - /// Specify optional remote signer configuration. Note that this argument is mutually exclusive - /// with `p2p.sequencer.key` that specifies a local sequencer signer. - #[command(flatten)] - pub signer: SignerArgs, -} - -impl Default for P2PArgs { - fn default() -> Self { - // Construct default values using the clap parser. - // This works since none of the cli flags are required. - Self::parse_from::<[_; 0], &str>([]) - } -} - -impl P2PArgs { - fn check_ports_inner(ip_addr: IpAddr, tcp_port: u16, udp_port: u16) -> Result<()> { - if tcp_port == 0 { - return Ok(()); - } - if udp_port == 0 { - return Ok(()); - } - let tcp_socket = std::net::TcpListener::bind((ip_addr, tcp_port)); - let udp_socket = std::net::UdpSocket::bind((ip_addr, udp_port)); - if let Err(e) = tcp_socket { - tracing::error!(target: "p2p::flags", tcp_port, "Error binding TCP socket: {e}"); - anyhow::bail!("Error binding TCP socket on port {tcp_port}: {e}"); - } - if let Err(e) = udp_socket { - tracing::error!(target: "p2p::flags", udp_port, "Error binding UDP socket: {e}"); - anyhow::bail!("Error binding UDP socket on port {udp_port}: {e}"); - } - - Ok(()) - } - - /// Checks if the listen ports are available on the system. - /// - /// If either of the ports are `0`, this check is skipped. - /// - /// ## Errors - /// - /// - If the TCP port is already in use. - /// - If the UDP port is already in use. - pub fn check_ports(&self) -> Result<()> { - Self::check_ports_inner(self.listen_ip, self.listen_tcp_port, self.listen_udp_port) - } - - /// Returns the private key as specified in the raw cli flag or via file path. - pub fn private_key(&self) -> Option<PrivateKeySigner> { - if let Some(key) = self.private_key { - match PrivateKeySigner::from_bytes(&key) { - Ok(signer) => return Some(signer), - Err(e) => { - tracing::error!(target: "p2p::flags", "Failed to parse private key: {}", e); - return None; - } - } - } - - if let Some(path) = self.priv_path.as_ref() { - if path.exists() { - let contents = std::fs::read_to_string(path).ok()?; - let decoded = B256::from_str(&contents).ok()?; - match PrivateKeySigner::from_bytes(&decoded) { - Ok(signer) => return Some(signer), - Err(e) => { - tracing::error!(target: "p2p::flags", "Failed to parse private key from file: {}", e); - return None; - } - } - } - } - - None - } - - /// Returns the unsafe block signer from the CLI arguments. - pub async fn unsafe_block_signer( - &self, - args: &GlobalArgs, - rollup_config: &RollupConfig, - l1_eth_rpc: Option<Url>, - ) -> anyhow::Result<alloy_primitives::Address> { - if let Some(l1_eth_rpc) = l1_eth_rpc { - /// The storage slot that the unsafe block signer address is stored at. - /// Computed as: `bytes32(uint256(keccak256("systemconfig.unsafeblocksigner")) - 1)` - const UNSAFE_BLOCK_SIGNER_ADDRESS_STORAGE_SLOT: B256 = - b256!("0x65a7ed542fb37fe237fdfbdd70b31598523fe5b32879e307bae27a0bd9581c08"); - - let mut provider = AlloyChainProvider::new_http(l1_eth_rpc, 1024); - let latest_block_num = provider.latest_block_number().await?; - let block_info = provider.block_info_by_number(latest_block_num).await?; - - // Fetch the unsafe block signer address from the system config. - let unsafe_block_signer_address = provider - .inner - .get_storage_at( - rollup_config.l1_system_config_address, - UNSAFE_BLOCK_SIGNER_ADDRESS_STORAGE_SLOT.into(), - ) - .hash(block_info.hash) - .await?; - - // Convert the unsafe block signer address to the correct type. - return Ok(alloy_primitives::Address::from_slice( - &unsafe_block_signer_address.to_be_bytes_vec()[12..], - )); - } - - // Otherwise use the genesis signer or the configured unsafe block signer. - args.genesis_signer().or_else(|_| { - self.unsafe_block_signer.ok_or(anyhow::anyhow!("Unsafe block signer not provided")) - }) - } - - /// Constructs kona's P2P network [`NetworkConfig`] from CLI arguments. - /// - /// ## Parameters - /// - /// - [`GlobalArgs`]: required to fetch the genesis unsafe block signer. - /// - /// Errors if the genesis unsafe block signer isn't available for the specified L2 Chain ID. - pub async fn config( - self, - config: &RollupConfig, - args: &GlobalArgs, - l1_rpc: Option<Url>, - ) -> anyhow::Result<NetworkConfig> { - // Note: the advertised address is contained in the ENR for external peers from the - // discovery layer to use. - - // Fallback to the listen ip if the advertise ip is not specified - let advertise_ip = self.advertise_ip.unwrap_or(self.listen_ip); - - // If the advertise ip is set, we will disable the dynamic ENR updates. - let static_ip = self.advertise_ip.is_some(); - - // If the advertise tcp port is null, use the listen tcp port - let advertise_tcp_port = match self.advertise_tcp_port { - None => self.listen_tcp_port, - Some(port) => port, - }; - - let advertise_udp_port = match self.advertise_udp_port { - None => self.listen_udp_port, - Some(port) => port, - }; - - let keypair = self.keypair().unwrap_or_else(|e| { - let generated = Keypair::generate_secp256k1(); - tracing::warn!( - target: "p2p::config", - error = %e, - peer_id = %generated.public().to_peer_id(), - "Failed to load P2P keypair from configuration, generated ephemeral keypair. \ - Set --p2p.priv.path or --p2p.priv.raw for a persistent peer ID." - ); - generated - }); - let secp256k1_key = keypair.clone().try_into_secp256k1() - .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to secp256k1. This is a bug since we only support secp256k1 keys: {e}"))? - .secret().to_bytes(); - let local_node_key = k256::ecdsa::SigningKey::from_bytes(&secp256k1_key.into()) - .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to k256 signing key. This is a bug since we only support secp256k1 keys: {e}"))?; - - let discovery_address = - LocalNode::new(local_node_key, advertise_ip, advertise_tcp_port, advertise_udp_port); - let gossip_config = kona_gossip::default_config_builder() - .mesh_n(self.gossip_mesh_d) - .mesh_n_low(self.gossip_mesh_dlo) - .mesh_n_high(self.gossip_mesh_dhi) - .gossip_lazy(self.gossip_mesh_dlazy) - .flood_publish(self.gossip_flood_publish) - .build()?; - - let monitor_peers = self.ban_enabled.then_some(PeerMonitoring { - ban_duration: Duration::from_secs(60 * self.ban_duration), - ban_threshold: self.ban_threshold as f64, - }); - - let discovery_listening_address = SocketAddr::new(self.listen_ip, self.listen_udp_port); - let discovery_config = - NetworkConfig::discv5_config(discovery_listening_address.into(), static_ip); - - let mut gossip_address = libp2p::Multiaddr::from(self.listen_ip); - gossip_address.push(libp2p::multiaddr::Protocol::Tcp(self.listen_tcp_port)); - - let unsafe_block_signer = self.unsafe_block_signer(args, config, l1_rpc).await?; - - let bootstore = if self.disable_bootstore { - None - } else { - Some(self.bootstore.map_or( - BootStoreFile::Default { chain_id: args.l2_chain_id.into() }, - BootStoreFile::Custom, - )) - }; - - let bootnodes = self - .bootnodes - .iter() - .map(|bootnode| BootNode::parse_bootnode(bootnode)) - .collect::<Vec<BootNode>>() - .into(); - - Ok(NetworkConfig { - discovery_config, - discovery_interval: Duration::from_secs(self.discovery_interval), - discovery_address, - discovery_randomize: self.discovery_randomize.map(Duration::from_secs), - enr_update: !static_ip, - gossip_address, - keypair, - unsafe_block_signer, - gossip_config, - scoring: self.scoring, - monitor_peers, - bootstore, - topic_scoring: self.topic_scoring, - gater_config: GaterConfig { - peer_redialing: self.peer_redial, - dial_period: Duration::from_secs(60 * self.redial_period), - }, - bootnodes, - rollup_config: config.clone(), - gossip_signer: self.signer.config(args)?, - }) - } - - /// Returns the [`Keypair`] from the cli inputs. - /// - /// If the raw private key is empty and the specified file is empty, - /// this method will generate a new private key and write it out to the file. - /// - /// If neither a file is specified, nor a raw private key input, this method - /// will error. - pub fn keypair(&self) -> Result<Keypair> { - // Attempt the parse the private key if specified. - if let Some(mut private_key) = self.private_key { - let keypair = kona_cli::SecretKeyLoader::parse(&mut private_key.0) - .map_err(|e| anyhow::anyhow!(e))?; - tracing::info!( - target: "p2p::config", - peer_id = %keypair.public().to_peer_id(), - "Successfully loaded P2P keypair from raw private key" - ); - return Ok(keypair); - } - - let Some(ref key_path) = self.priv_path else { - anyhow::bail!("Neither a raw private key nor a private key file path was provided."); - }; - - kona_cli::SecretKeyLoader::load(key_path).map_err(|e| anyhow::anyhow!(e)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::b256; - use clap::Parser; - use kona_peers::NodeRecord; - - /// A mock command that uses the P2PArgs. - #[derive(Parser, Debug, Clone)] - #[command(about = "Mock command")] - struct MockCommand { - /// P2P CLI Flags - #[clap(flatten)] - pub p2p: P2PArgs, - } - - #[test] - fn test_p2p_args_keypair_missing_both() { - let args = MockCommand::parse_from(["test"]); - assert!(args.p2p.keypair().is_err()); - } - - #[test] - fn test_p2p_args_keypair_raw_private_key() { - let args = MockCommand::parse_from([ - "test", - "--p2p.priv.raw", - "1d2b0bda21d56b8bd12d4f94ebacffdfb35f5e226f84b461103bb8beab6353be", - ]); - assert!(args.p2p.keypair().is_ok()); - } - - #[test] - fn test_p2p_args_keypair_from_path() { - // Create a temporary directory. - let dir = std::env::temp_dir(); - let mut source_path = dir.clone(); - assert!(std::env::set_current_dir(dir).is_ok()); - - // Write a private key to a file. - let key = b256!("1d2b0bda21d56b8bd12d4f94ebacffdfb35f5e226f84b461103bb8beab6353be"); - let hex = alloy_primitives::hex::encode(key.0); - source_path.push("test.txt"); - std::fs::write(&source_path, &hex).unwrap(); - - // Parse the keypair from the file. - let args = - MockCommand::parse_from(["test", "--p2p.priv.path", source_path.to_str().unwrap()]); - assert!(args.p2p.keypair().is_ok()); - } - - #[test] - fn test_p2p_args() { - let args = MockCommand::parse_from(["test"]); - assert_eq!(args.p2p, P2PArgs::default()); - } - - #[test] - fn test_p2p_args_randomized() { - let args = MockCommand::parse_from(["test", "--p2p.discovery.randomize", "10"]); - assert_eq!(args.p2p.discovery_randomize, Some(10)); - let args = MockCommand::parse_from(["test"]); - assert_eq!(args.p2p.discovery_randomize, None); - } - - #[test] - fn test_p2p_args_no_discovery() { - let args = MockCommand::parse_from(["test", "--p2p.no-discovery"]); - assert!(args.p2p.no_discovery); - } - - #[test] - fn test_p2p_args_priv_path() { - let args = MockCommand::parse_from(["test", "--p2p.priv.path", "test.txt"]); - assert_eq!(args.p2p.priv_path, Some(PathBuf::from("test.txt"))); - } - - #[test] - fn test_p2p_args_private_key() { - let args = MockCommand::parse_from([ - "test", - "--p2p.priv.raw", - "1d2b0bda21d56b8bd12d4f94ebacffdfb35f5e226f84b461103bb8beab6353be", - ]); - let key = b256!("1d2b0bda21d56b8bd12d4f94ebacffdfb35f5e226f84b461103bb8beab6353be"); - assert_eq!(args.p2p.private_key, Some(key)); - } - - #[test] - fn test_p2p_args_sequencer_key() { - let args = MockCommand::parse_from([ - "test", - "--p2p.sequencer.key", - "bcc617ea05150ff60490d3c6058630ba94ae9f12a02a87efd291349ca0e54e0a", - ]); - let key = b256!("bcc617ea05150ff60490d3c6058630ba94ae9f12a02a87efd291349ca0e54e0a"); - assert_eq!(args.p2p.signer.sequencer_key, Some(key)); - } - - #[test] - fn test_p2p_args_listen_ip() { - let args = MockCommand::parse_from(["test", "--p2p.listen.ip", "127.0.0.1"]); - let expected: IpAddr = "127.0.0.1".parse().unwrap(); - assert_eq!(args.p2p.listen_ip, expected); - } - - #[test] - fn test_p2p_args_listen_tcp_port() { - let args = MockCommand::parse_from(["test", "--p2p.listen.tcp", "1234"]); - assert_eq!(args.p2p.listen_tcp_port, 1234); - } - - #[test] - fn test_p2p_args_listen_udp_port() { - let args = MockCommand::parse_from(["test", "--p2p.listen.udp", "1234"]); - assert_eq!(args.p2p.listen_udp_port, 1234); - } - - #[test] - fn test_p2p_args_bootnodes() { - let args = MockCommand::parse_from([ - "test", - "--p2p.bootnodes", - "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305", - ]); - assert_eq!( - args.p2p.bootnodes, - vec![ - "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305", - ] - ); - - // Parse the bootnodes. - let bootnodes = args - .p2p - .bootnodes - .iter() - .map(|bootnode| BootNode::parse_bootnode(bootnode)) - .collect::<Vec<BootNode>>(); - - // Otherwise, attempt to use the Node Record format. - let record = NodeRecord::from_str( - "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305").unwrap(); - let expected_bootnode = vec![BootNode::from_unsigned(record).unwrap()]; - - assert_eq!(bootnodes, expected_bootnode); - } - - #[test] - fn test_p2p_args_bootnodes_multiple() { - let args = MockCommand::parse_from([ - "test", - "--p2p.bootnodes", - "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305,enode://dd751a9ef8912be1bfa7a5e34e2c3785cc5253110bd929f385e07ba7ac19929fb0e0c5d93f77827291f4da02b2232240fbc47ea7ce04c46e333e452f8656b667@34.65.107.0:30305", - ]); - assert_eq!( - args.p2p.bootnodes, - vec![ - "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305", - "enode://dd751a9ef8912be1bfa7a5e34e2c3785cc5253110bd929f385e07ba7ac19929fb0e0c5d93f77827291f4da02b2232240fbc47ea7ce04c46e333e452f8656b667@34.65.107.0:30305", - ] - ); - } - - #[test] - fn test_p2p_args_bootnode_enr() { - let args = MockCommand::parse_from([ - "test", - "--p2p.bootnodes", - "enr:-J64QBbwPjPLZ6IOOToOLsSjtFUjjzN66qmBZdUexpO32Klrc458Q24kbty2PdRaLacHM5z-cZQr8mjeQu3pik6jPSOGAYYFIqBfgmlkgnY0gmlwhDaRWFWHb3BzdGFja4SzlAUAiXNlY3AyNTZrMaECmeSnJh7zjKrDSPoNMGXoopeDF4hhpj5I0OsQUUt4u8uDdGNwgiQGg3VkcIIkBg", - ]); - assert_eq!( - args.p2p.bootnodes, - vec![ - "enr:-J64QBbwPjPLZ6IOOToOLsSjtFUjjzN66qmBZdUexpO32Klrc458Q24kbty2PdRaLacHM5z-cZQr8mjeQu3pik6jPSOGAYYFIqBfgmlkgnY0gmlwhDaRWFWHb3BzdGFja4SzlAUAiXNlY3AyNTZrMaECmeSnJh7zjKrDSPoNMGXoopeDF4hhpj5I0OsQUUt4u8uDdGNwgiQGg3VkcIIkBg", - ] - ); - } - - #[test] - fn test_p2p_args_listen_ip_dns_resolution() { - // Test that DNS hostnames are resolved to IP addresses - // Using localhost which should resolve reliably - let args = MockCommand::parse_from(["test", "--p2p.listen.ip", "localhost"]); - // localhost typically resolves to 127.0.0.1 or ::1 - assert!( - args.p2p.listen_ip == "127.0.0.1".parse::<IpAddr>().unwrap() || - args.p2p.listen_ip == "::1".parse::<IpAddr>().unwrap() - ); - } - - #[test] - fn test_p2p_args_advertise_ip_dns_resolution() { - // Test that DNS hostnames are resolved to IP addresses for advertise_ip - let args = MockCommand::parse_from(["test", "--p2p.advertise.ip", "localhost"]); - // localhost typically resolves to 127.0.0.1 or ::1 - let ip = args.p2p.advertise_ip.unwrap(); - assert!( - ip == "127.0.0.1".parse::<IpAddr>().unwrap() || ip == "::1".parse::<IpAddr>().unwrap() - ); - } - - #[test] - fn test_resolve_host_with_ip() { - // Test that IP addresses are passed through directly - let ip = resolve_host("192.168.1.1").unwrap(); - assert_eq!(ip, "192.168.1.1".parse::<IpAddr>().unwrap()); - - let ipv6 = resolve_host("::1").unwrap(); - assert_eq!(ipv6, "::1".parse::<IpAddr>().unwrap()); - } - - #[test] - fn test_resolve_host_with_dns() { - // Test DNS resolution with localhost - let ip = resolve_host("localhost").unwrap(); - assert!( - ip == "127.0.0.1".parse::<IpAddr>().unwrap() || ip == "::1".parse::<IpAddr>().unwrap() - ); - } - - #[test] - fn test_resolve_host_invalid() { - // Test that invalid hostnames return an error - let result = resolve_host("this-hostname-definitely-does-not-exist.invalid"); - assert!(result.is_err()); - } -} diff --git a/kona/bin/node/src/flags/sequencer.rs b/kona/bin/node/src/flags/sequencer.rs deleted file mode 100644 index 6080d852cd8..00000000000 --- a/kona/bin/node/src/flags/sequencer.rs +++ /dev/null @@ -1,78 +0,0 @@ -//! Sequencer CLI Flags -//! -//! These are based on sequencer flags from the [`op-node`][op-node] CLI. -//! -//! [op-node]: https://github.com/ethereum-optimism/optimism/blob/develop/op-node/flags/flags.go#L233-L265 - -use clap::Parser; -use kona_node_service::SequencerConfig; -use std::{num::ParseIntError, time::Duration}; -use url::Url; - -/// Sequencer CLI Flags -#[derive(Parser, Clone, Debug, PartialEq, Eq)] -pub struct SequencerArgs { - /// Initialize the sequencer in a stopped state. The sequencer can be started using the - /// admin_startSequencer RPC. - #[arg( - long = "sequencer.stopped", - default_value = "false", - env = "KONA_NODE_SEQUENCER_STOPPED" - )] - pub stopped: bool, - - /// Maximum number of L2 blocks for restricting the distance between L2 safe and unsafe. - /// Disabled if 0. - #[arg( - long = "sequencer.max-safe-lag", - default_value = "0", - env = "KONA_NODE_SEQUENCER_MAX_SAFE_LAG" - )] - pub max_safe_lag: u64, - - /// Number of L1 blocks to keep distance from the L1 head as a sequencer for picking an L1 - /// origin. - #[arg(long = "sequencer.l1-confs", default_value = "4", env = "KONA_NODE_SEQUENCER_L1_CONFS")] - pub l1_confs: u64, - - /// Forces the sequencer to strictly prepare the next L1 origin and create empty L2 blocks - #[arg( - long = "sequencer.recover", - default_value = "false", - env = "KONA_NODE_SEQUENCER_RECOVER" - )] - pub recover: bool, - - /// Conductor service rpc endpoint. Providing this value will enable the conductor service. - #[arg(long = "conductor.rpc", env = "KONA_NODE_CONDUCTOR_RPC")] - pub conductor_rpc: Option<Url>, - - /// Conductor service rpc timeout. - #[arg( - long = "conductor.rpc.timeout", - default_value = "1", - env = "KONA_NODE_CONDUCTOR_RPC_TIMEOUT", - value_parser = |arg: &str| -> Result<Duration, ParseIntError> {Ok(Duration::from_secs(arg.parse()?))} - )] - pub conductor_rpc_timeout: Duration, -} - -impl Default for SequencerArgs { - fn default() -> Self { - // Construct default values using the clap parser. - // This works since none of the cli flags are required. - Self::parse_from::<[_; 0], &str>([]) - } -} - -impl SequencerArgs { - /// Creates a [`SequencerConfig`] from the [`SequencerArgs`]. - pub fn config(&self) -> SequencerConfig { - SequencerConfig { - sequencer_stopped: self.stopped, - sequencer_recovery_mode: self.recover, - conductor_rpc_url: self.conductor_rpc.clone(), - l1_conf_delay: self.l1_confs, - } - } -} diff --git a/kona/bin/node/src/main.rs b/kona/bin/node/src/main.rs deleted file mode 100644 index 70333e112b8..00000000000 --- a/kona/bin/node/src/main.rs +++ /dev/null @@ -1,26 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -pub mod cli; -pub mod commands; -pub mod flags; -pub mod metrics; - -pub(crate) mod version; - -fn main() { - use clap::Parser; - - kona_cli::sigsegv_handler::install(); - kona_cli::backtrace::enable(); - - if let Err(err) = cli::Cli::parse().run() { - eprintln!("Error: {err:?}"); - std::process::exit(1); - } -} diff --git a/kona/bin/supervisor/Cargo.toml b/kona/bin/supervisor/Cargo.toml deleted file mode 100644 index 124c650dbca..00000000000 --- a/kona/bin/supervisor/Cargo.toml +++ /dev/null @@ -1,48 +0,0 @@ -[package] -name = "kona-supervisor" -version = "0.1.0" -description = "Kona Supervisor" - -edition.workspace = true -license.workspace = true -rust-version.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true - -[dependencies] -# Workspace -kona-supervisor-service.workspace = true -kona-supervisor-core.workspace = true -kona-cli.workspace = true -kona-interop.workspace = true -kona-genesis.workspace = true -kona-protocol.workspace = true - -alloy-network.workspace = true -alloy-provider.workspace = true -alloy-rpc-types-engine.workspace = true - -clap = { workspace = true, features = ["derive", "env"] } -tokio = { workspace = true, features = [ "full", "macros"] } -anyhow = { workspace = true } -tracing-subscriber = { workspace = true, features = ["fmt", "env-filter"] } -tracing = { workspace = true } -serde.workspace = true -serde_json.workspace = true -glob.workspace = true -reqwest.workspace = true -metrics.workspace = true - -[dev-dependencies] -tempfile.workspace = true -kona-registry.workspace = true - -[build-dependencies] -vergen = { workspace = true, features = ["build", "cargo", "emit_and_set"] } -vergen-git2.workspace = true - -[lints] -workspace = true diff --git a/kona/bin/supervisor/src/main.rs b/kona/bin/supervisor/src/main.rs deleted file mode 100644 index 96dc0868001..00000000000 --- a/kona/bin/supervisor/src/main.rs +++ /dev/null @@ -1,24 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -pub mod cli; -pub mod flags; -pub mod metrics; -pub(crate) mod version; - -use clap::Parser; - -fn main() { - kona_cli::sigsegv_handler::install(); - kona_cli::backtrace::enable(); - - if let Err(err) = cli::Cli::parse().run() { - eprintln!("Error: {err:?}"); - std::process::exit(1); - } -} diff --git a/kona/clippy.toml b/kona/clippy.toml deleted file mode 100644 index f3322b5fd24..00000000000 --- a/kona/clippy.toml +++ /dev/null @@ -1 +0,0 @@ -msrv = "1.88" diff --git a/kona/crates/batcher/comp/Cargo.toml b/kona/crates/batcher/comp/Cargo.toml deleted file mode 100644 index f9277bb166b..00000000000 --- a/kona/crates/batcher/comp/Cargo.toml +++ /dev/null @@ -1,113 +0,0 @@ -[package] -name = "kona-comp" -version = "0.4.5" -description = "Compression types for the OP Stack" - -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -kona-protocol.workspace = true -kona-genesis.workspace = true - -# OP Alloy -op-alloy-consensus.workspace = true - -# Alloy -alloy-primitives = { workspace = true, features = ["map"] } -alloy-rlp.workspace = true -alloy-eips.workspace = true -alloy-consensus.workspace = true -alloy-rpc-types-engine.workspace = true - -# Misc -rand = { workspace = true, features = ["small_rng"] } -tracing.workspace = true -thiserror.workspace = true -async-trait.workspace = true -unsigned-varint.workspace = true - -# Compression -brotli.workspace = true -miniz_oxide.workspace = true -alloc-no-stdlib.workspace = true - -# `arbitrary` feature -arbitrary = { workspace = true, features = ["derive"], optional = true } - -# `serde` feature -serde = { workspace = true, optional = true } -alloy-serde = { workspace = true, optional = true } - -# `test-utils` feature -spin = { workspace = true, optional = true } -tracing-subscriber = { workspace = true, features = ["fmt"], optional = true } - -[dev-dependencies] -brotli = { workspace = true, features = ["std"] } -spin.workspace = true -rand = { workspace = true, features = ["std", "std_rng"] } -proptest.workspace = true -serde_json.workspace = true -alloy-sol-types.workspace = true -arbitrary = { workspace = true, features = ["derive"] } -tracing-subscriber = { workspace = true, features = ["fmt"] } -alloy-primitives = { workspace = true, features = ["arbitrary"] } -op-alloy-consensus.workspace = true - -[features] -default = [] -std = [ - "alloy-consensus/std", - "alloy-eips/std", - "alloy-primitives/std", - "alloy-rlp/std", - "alloy-rpc-types-engine/std", - "alloy-serde?/std", - "brotli/std", - "kona-genesis/std", - "kona-protocol/std", - "miniz_oxide/std", - "op-alloy-consensus/std", - "rand/std", - "serde?/std", - "spin?/std", - "thiserror/std", - "tracing/std", - "unsigned-varint/std", -] -test-utils = [ "kona-protocol/test-utils" ] -serde = [ - "alloy-consensus/serde", - "alloy-eips/serde", - "alloy-primitives/serde", - "alloy-rpc-types-engine/serde", - "dep:alloy-serde", - "dep:serde", - "kona-genesis/serde", - "kona-protocol/serde", - "miniz_oxide/serde", - "op-alloy-consensus/serde", - "rand/serde", - "tracing-subscriber?/serde", -] -arbitrary = [ - "alloy-consensus/arbitrary", - "alloy-eips/arbitrary", - "alloy-primitives/arbitrary", - "alloy-rpc-types-engine/arbitrary", - "alloy-serde?/arbitrary", - "dep:arbitrary", - "kona-genesis/arbitrary", - "kona-protocol/arbitrary", - "op-alloy-consensus/arbitrary", -] diff --git a/kona/crates/batcher/comp/README.md b/kona/crates/batcher/comp/README.md deleted file mode 100644 index 533d737d0f2..00000000000 --- a/kona/crates/batcher/comp/README.md +++ /dev/null @@ -1,8 +0,0 @@ -## `kona-comp` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-comp"><img src="https://img.shields.io/crates/v/kona-comp.svg" alt="kona-comp crate"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="MIT License"></a> -<a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> - -Compression types for the OP Stack. diff --git a/kona/crates/batcher/comp/examples/batch_to_frames.rs b/kona/crates/batcher/comp/examples/batch_to_frames.rs deleted file mode 100644 index 3a41d15a4d1..00000000000 --- a/kona/crates/batcher/comp/examples/batch_to_frames.rs +++ /dev/null @@ -1,111 +0,0 @@ -//! An example encoding and decoding a [SingleBatch]. -//! -//! This example demonstrates EIP-2718 encoding a [SingleBatch] -//! through a [ChannelOut] and into individual [Frame]s. -//! -//! Notice, the raw batch is first _encoded_. -//! Once encoded, it is compressed into raw data that the channel is constructed with. -//! -//! The [ChannelOut] then outputs frames individually using the maximum frame size, -//! in this case hardcoded to 100, to construct the frames. -//! -//! Finally, once [Frame]s are built from the [ChannelOut], they are encoded and ready -//! to be batch-submitted to the data availability layer. - -#[cfg(feature = "std")] -fn main() { - use alloy_primitives::BlockHash; - use kona_comp::{ChannelOut, CompressionAlgo, VariantCompressor}; - use kona_genesis::RollupConfig; - use kona_protocol::{Batch, ChannelId, SingleBatch}; - - // Use the example transaction - let transactions = example_transactions(); - - // Construct a basic `SingleBatch` - let parent_hash = BlockHash::ZERO; - let epoch_num = 1; - let epoch_hash = BlockHash::ZERO; - let timestamp = 1; - let single_batch = SingleBatch { parent_hash, epoch_num, epoch_hash, timestamp, transactions }; - let batch = Batch::Single(single_batch); - - // Create a new channel. - let id = ChannelId::default(); - let config = RollupConfig::default(); - let compressor: VariantCompressor = CompressionAlgo::Brotli10.into(); - let mut channel_out = ChannelOut::new(id, &config, compressor); - - // Add the compressed batch to the `ChannelOut`. - channel_out.add_batch(batch).unwrap(); - - // Output frames - while channel_out.ready_bytes() > 0 { - let frame = channel_out.output_frame(100).expect("outputs frame"); - println!("Frame: {}", alloy_primitives::hex::encode(frame.encode())); - if channel_out.ready_bytes() <= 100 { - channel_out.close(); - } - } - - assert!(channel_out.closed); - println!("Successfully encoded Batch to frames"); -} - -#[cfg(feature = "std")] -fn example_transactions() -> Vec<alloy_primitives::Bytes> { - use alloy_consensus::{SignableTransaction, TxEip1559, TxEnvelope}; - use alloy_eips::eip2718::{Decodable2718, Encodable2718}; - use alloy_primitives::{Address, Signature, U256}; - - let mut transactions = Vec::new(); - - // First Transaction in the batch. - let tx = TxEip1559 { - chain_id: 10u64, - nonce: 2, - max_fee_per_gas: 3, - max_priority_fee_per_gas: 4, - gas_limit: 5, - to: Address::left_padding_from(&[6]).into(), - value: U256::from(7_u64), - input: vec![8].into(), - access_list: Default::default(), - }; - let sig = Signature::test_signature(); - let tx_signed = tx.into_signed(sig); - let envelope: TxEnvelope = tx_signed.into(); - let encoded = envelope.encoded_2718(); - transactions.push(encoded.clone().into()); - let mut slice = encoded.as_slice(); - let decoded = TxEnvelope::decode_2718(&mut slice).unwrap(); - assert!(matches!(decoded, TxEnvelope::Eip1559(_))); - - // Second transaction in the batch. - let tx = TxEip1559 { - chain_id: 10u64, - nonce: 2, - max_fee_per_gas: 3, - max_priority_fee_per_gas: 4, - gas_limit: 5, - to: Address::left_padding_from(&[7]).into(), - value: U256::from(7_u64), - input: vec![8].into(), - access_list: Default::default(), - }; - let sig = Signature::test_signature(); - let tx_signed = tx.into_signed(sig); - let envelope: TxEnvelope = tx_signed.into(); - let encoded = envelope.encoded_2718(); - transactions.push(encoded.clone().into()); - let mut slice = encoded.as_slice(); - let decoded = TxEnvelope::decode_2718(&mut slice).unwrap(); - assert!(matches!(decoded, TxEnvelope::Eip1559(_))); - - transactions -} - -#[cfg(not(feature = "std"))] -fn main() { - /* not implemented for no_std */ -} diff --git a/kona/crates/batcher/comp/src/brotli.rs b/kona/crates/batcher/comp/src/brotli.rs deleted file mode 100644 index 3302d7b210b..00000000000 --- a/kona/crates/batcher/comp/src/brotli.rs +++ /dev/null @@ -1,183 +0,0 @@ -//! Contains brotli compression utilities. - -use crate::{ChannelCompressor, CompressorError, CompressorResult, CompressorWriter}; -use std::vec::Vec; - -/// The brotli encoding level used in Optimism. -/// -/// See: <https://github.com/ethereum-optimism/optimism/blob/develop/op-node/rollup/derive/types.go#L50> -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum BrotliLevel { - /// The fastest compression level. - Brotli9 = 9, - /// The default compression level. - Brotli10 = 10, - /// The highest compression level. - Brotli11 = 11, -} - -impl From<BrotliLevel> for u32 { - fn from(level: BrotliLevel) -> Self { - level as Self - } -} - -/// A Brotli Compression Error. -#[derive(thiserror::Error, Debug)] -pub enum BrotliCompressionError { - /// Unimplemented in no_std environments. - #[error("brotli compression is not supported in no_std environments")] - NoStd, - /// An error returned by the `std` brotli compression method. - #[error("Error from Brotli compression: {0}")] - CompressionError(#[from] std::io::Error), -} - -/// The brotli compressor. -#[derive(Debug, Clone)] -pub struct BrotliCompressor { - /// The compressed bytes. - compressed: Vec<u8>, - /// The raw bytes (need to store on reset). - raw: Vec<u8>, - /// Marks that the compressor is closed. - closed: bool, - /// The compression level. - pub level: BrotliLevel, -} - -impl BrotliCompressor { - /// Creates a new brotli compressor with the given compression level. - pub fn new(level: impl Into<BrotliLevel>) -> Self { - let level = level.into(); - Self { compressed: Vec::new(), raw: Vec::new(), closed: false, level } - } -} - -impl From<BrotliLevel> for BrotliCompressor { - fn from(level: BrotliLevel) -> Self { - Self::new(level) - } -} - -/// Compresses the given bytes data using the Brotli compressor implemented -/// in the [`brotli`](https://crates.io/crates/brotli) crate. -/// -/// Note: The level must be between 0 and 11. In Optimism, the levels 9, 10, and 11 are used. -/// By default, [BrotliLevel::Brotli10] is used. -#[allow(unused_variables)] -#[allow(unused_mut)] -pub fn compress_brotli( - mut input: &[u8], - level: BrotliLevel, -) -> Result<Vec<u8>, BrotliCompressionError> { - use brotli::enc::{BrotliCompress, BrotliEncoderParams}; - let mut output = alloc::vec![]; - BrotliCompress( - &mut input, - &mut output, - &BrotliEncoderParams { quality: level as i32, ..Default::default() }, - )?; - Ok(output) -} - -impl CompressorWriter for BrotliCompressor { - fn write(&mut self, data: &[u8]) -> CompressorResult<usize> { - if self.closed { - return Err(CompressorError::Brotli); - } - - // First append the new data to the raw buffer. - self.raw.extend_from_slice(data); - - // Compress the raw buffer. - self.compressed = - compress_brotli(&self.raw, self.level).map_err(|_| CompressorError::Brotli)?; - - Ok(data.len()) - } - - fn flush(&mut self) -> CompressorResult<()> { - Ok(()) - } - - fn close(&mut self) -> CompressorResult<()> { - self.flush()?; - self.closed = true; - Ok(()) - } - - fn reset(&mut self) { - self.closed = false; - self.raw.clear(); - self.compressed.clear(); - } - - fn read(&mut self, buf: &mut [u8]) -> CompressorResult<usize> { - let len = self.compressed.len().min(buf.len()); - buf[..len].copy_from_slice(&self.compressed[..len]); - Ok(len) - } - - fn len(&self) -> usize { - self.compressed.len() - } -} - -impl ChannelCompressor for BrotliCompressor { - fn get_compressed(&self) -> Vec<u8> { - self.compressed.clone() - } -} - -#[cfg(test)] -mod test { - use super::*; - use alloy_primitives::hex; - use kona_genesis::MAX_RLP_BYTES_PER_CHANNEL_FJORD; - use kona_protocol::decompress_brotli; - - #[test] - fn test_compress_brotli() { - let expected = hex!("8b048075ed184249e9bc19675e03"); - let decompressed = hex!("75ed184249e9bc19675e"); - - let mut compressor = BrotliCompressor::new(BrotliLevel::Brotli11); - compressor.write(&decompressed).unwrap(); - compressor.close().unwrap(); - let compressed = compressor.get_compressed(); - assert_eq!(compressed, expected); - } - - #[test] - fn test_compress_batch_brotli() { - let raw_batch_decompressed = hex!( - "b930d700f930d3a0a8d01076e1235e0c33674a449c13fc37ee57f9ea065bf41af3aa03d5981f1432833bd0b0a0652a19cd927ae4a22e8f8069385002252d78e1c3cc91a59ac188708b7074449184766cbcf3f93085b903ee02f903ea82014d884062b70d4e215ee885019d47a37c8543ae9f382a8310c97b9451294f5cd6e52c003ecfb412ca8b42705c618d29883782dace9d900000b903690d669b0cd98174ac3b57393839029ac04ad36454109851443b4f6580664fe06766a7dea5b1ed31e14e7c11aa738eecb86e979f874873cd3d7ca9481681b4b17d134316e7bbe828ef69339ef85c6f0e9dcdfe1dc85309effb487569383d5464b519bdc1c85fffc72bfe93d4081a3e1b75e5dd39f95a91df0997a22d8fbdeca57a8b35b4f0e277ec8502cc55581a94eec1d1000b2921b4d7c3985ace205713641d03c3975e4049e13b3d2c5926b224684e38beb3b8d2e5d4060b109aafc3f2d144783aadf6086aa1d5a931d21282711484a9c0537bd4981fc222444f2c057211708e70dc4223063cbf39e4af0b795d3ec0dfba32391611d151145c1b6bb33d53ce2bb7983bd7b6c1516f7a1a719fd876f4b20910aba76c16dbfc57199a60e2ab938bc285613c3802c17aa03cb9654f5142d607bac01293c9aaf4e58b422c543f7e5e458af0b7cf57f33109558bef71e8b5506da723d996eb8e2c265b1cae43dba571d07d3ea1bcfdcb73089597e3744344e049bf21b4244d5aff60d559010b69a6335f4bb21178de504f50808204da652c7767dbf11f2a34b4fb710e6df9ad8810aa75dcdb2c99dfe9bf898912817e490b4982d44fe09f8adb43e0da2a0c824a9069ce8cc36b5fb0074c2db895ee92d92fa6b7efdf5c97ae05ae27556bc07ddc9d9d6261a53e3a10c350c3b1da26b27b345768e17da7dabfe6e30e019c88ef4a0e8df840bbd3fbbb639edf775449d8be7510cc811564789b861372fe97f7b5b1389f20c9872517634e9225669ee80cf077f9c8606cdbad53819a875ecd9f7b6d778c1dc302ca19ae67ffb054eb99206fc90eacbac8177712d0b4c72700df3f5e2c88fb4e9c8284cefa66390a78605ad9320aee34f72f3cb263020204393d9359a65f48b0e6e942b016a1f2c5bd6579f0a65997635ab15fa38db76ae8a5d3be516441499819bfaf730ebaec389db082e41443660dcc6280315154888b9e726b971237fae5e06b01958aac081398c814e446a003039dd090c0efa5d39735ed0ab46c7b4e4c960ae414b045fd19117089e65aaf3779cc9045d6e62538b1b75c2689d23ba3c08ceed46d4fdf9b969b34a1903ebd96a3a6b091842480e638b095c1ec11bb5c599668ea1b0a5a714d13462edb39dfd992b569897ac8f45c587182770631c262fc459afa6f23d5670eee2aac2ddaa89314607d30c6bfd408980c082749ad6b48a5310ac75b880cc080a00b5d23a075615f50233ce278d11b7b0ba0ad6a01486dbf31c54aae096f0f066aa02d9feeb4771b5a37d1247a4cc58a64d392f3916b5602d9d41d97b52b391ffd47b9011801f9011482014d88a793ab3f17510b308821f5d9030532aae9831708c1940b6f262f685c8d0ff7dfc9ba9686d8f75b78923c80b89f7644852b70713a788b69f191c54ec8368a7f2675623b2369f9078516605d0d4550ff9f5b92b9da2147fa3a24cc17605f30cccedc5bacafb2bb86e2640db6654a514b8eb13d3c3ab6b5e344498de0c709dd9bef58a8af16d3efcd2c0b2cb69d6089d0af8d42baab434dea885253e42050aeec01f233e64289b2e894c680fbab4f25a653745dbd89edb19d97e35bdd4293794c69503b0e60ed9cffe7e9ab3cbbc080a0dd08ebab0802fc61ccf26c357b638a55cbcd6b366251c17e2fa52d328d9d59e5a027d334772553048d6b76fc39ddee5f85363810c235219356cb4c5c3dbf9661d5b90298f9029588e383f18817bb0d1c882c58aa6b12de88f3830a7831945c1c1314ed944220436fad3742023cba2a71c4a2886124fee993bc0000b90219fb039c014cd76a327bb9b3f59e8176f377249385e67cb1681f8eacff1dee5a5a949511438ce370f8ad6618f3af81cb1f775a0b365546dd7791b0ad71fb1f2f29154265a8175b7e518580732a5a46dae3752e1234ff779d4eb614af2c66beec964181ecd0cfd1640bb2ca2b860649c41930a60de0cc754884a780488f05d1d5833a381670b368c85bf08d6650e26122f6714056382a006fcd5f9c97f55a98d68dd9293bb1be24823eaa8cb007481dc78a7a670123976e7b6e81fc223f42637759a0c933b73ba89a1d902c0874fedeb0a97dfab298972a18378539c2894ca6df9c0a423c2e98df4c133e5e808809849785b069e323640bf93d4b82a0917aaea8fda9a3072ab9a00a4b8b9b7b3a3eb326e54231d0f6a064cdf4a1fc06c961e5087359c029b13e229fb477d6651bad52c75e503ac45002a803a7457488966cc16bbc9be5c1c9a797d0377710c028e4f05a6cb929cc1fd4018912929252e04e107ffbcbd4c81ba01ab4b11faa90be0f9f9a6a22c87257e4a2aa8283e6f71d7b9e03b5308b16525c4d79705bb0906be0e947e8075ac6ce2235356aa0a66bec39e918e47a6220b322e326bf8fd65e47778e14074c47cb62b7ef8ef956c996097d2919df7aac8ea2ed69c1fd9f1d96b6b82b411c524cacec0f4a4269821fd6766d24954b8870fb1d85f5cda0528ae18419915a8b30b25baf6a162978a4bec86009cece83017d50667a202b3fad18f8ed8b5140c97fa74e91be608fdb788202bea05f469660e363ec580825d1e2bf753c01db044279f862720a27831744b91494f5a050fa7445e0e6156dfdb712a647ef73a2dd35b73d5cc988430c831352d4ac7e8bb90458f9045588a106e4c16d06833a881973c4c642fba1bb83068f2294050c84206ba9d32d93d144884644e5bd36fc92d0883782dace9d900000b903d9b303f8efb68766822d7eea21ca4b7c5dd79dce832c4893247f6784fe47cd7a18caea7b5b4d8bdf02da0276aca185add01fa2d16c2f1188ff7cbf6fb8c6308999037b2b92d725094d8faed86f0b1a45b55de4f36dbb71dcbf4be12fe624077213e0c170afbbbb546a343ac3f2a1333a7a7a7db7be46640a73d61b3aabc805b022be416198d809b62f99d26cf4a3bf555d40686f4b8970ec15386462bec5f2b728de0da047d6b3f3ea51f571507f32f047322fa204f0c5697cbb56b4b5c7792acaa40f02926651fa715a40e1f212c78cd4ecca285ada2c8cbb6e5dcfa3823725b44e29aacbeb9b6224f90fbc895a5980d63da46688832e9776b0666e90deacbcf8a4c559b625cf004cd04c686aaf9d7d6e2d394f5d36311f7afdcec5033daccc63c0540935f59514c9aa8ac3c2aeff48f624f2dbd38062fcd046651e92fc7ffce4dd914bb0dae704e5b26a8b73b3baef8ea022881e15666fada8e43fd621793713cb8c867775b9cdcf3b066582fc9baa705a0e1dc61a4b33b1b33ad3ba3bd0cc41b5850cadc04654dec222178709910209c6ac3db9054ef91facae2d729d7ee54898a18411b6d20d599a3de14d5375e5a9c90f3bce78479cb0f20afca895e40b576940e063587f451a8828ec2dd4a8538b4bebc39f72a6c54e379a07b7d5e0c02ccd57dbff13729bbfe5e78498c01cea12e830944fd0a123b7383fdcda97d8d9cc831e542ab6d9b36774d540b180c2bd52d46ca7f0e17d400cf3cd559b1b4e51ba93cd954777ba27a9f0327eb6c68aafe74fabca4610210db7498aecffd3164c5eef8cede655e1b42d5f54f5a52b4f5fe9698a4463f30f20693263d41074d0403a737c4d4986f0ee7fee828fb7072a80603613fb4d6c219dfa47adad433af6b437dd199f3bbc651487718b2e6d42728034c242672a98a9f36fab6d4162f4e8eb7bf2a9868cead8ad657a67f0aa50286113db972936260323d7b11353328151e80691d551bbe1f7f11774e15db4f175aeac5b91668a712c3c2399a977abb9fd9c2b53c5ba68f2c0ea353028416b36a47028f78918e2b205bf9b3bce6f1a08bd4448abc3f12a240482b4be98dcb77c74fff47e92d833735e802465e50b79d51de5a7fe45a95b650b051c61a529d5f51cd0c603a2de67a3123be1c52263e1c9167765b13ad1e01cfb27531c9203f39e8913fe0cab9d8c14b17bad0100b76c41d41d68ae3b7aeef5f6af4f66d113fd29eb9c4bf994f04decad13880d9d1eb3865a30e2540e86923b36369c121ef2a6a43a618aa4b15560fa806601a85be361468bd09c6dca39ad7ec44809adc0907dd0458177343a7c23330605b802f3ffd3ae61b3be952ca2effae8222e9ed0b6ea4240728a7800e4882efa7dd1ef8202bea05db690cab7dc8c52c2c375428c0aa9ead02bf44e2b1f8ee06e1cf7af25eecc13a07d967fb12e1f0073adac46e0676a6006b30d780e6a1387afec76cbd1f07016e3b9012401f9012082014d88df6f092495b7f4148840c5b5541d013c63830408e194aef36f2041e560a641af89e0ba2799ea630a9592881bc16d674ec80000b8a3afb9380f9228224c1aa59eab115ed4172b471aa2ee11b3d4ac93f4b6a33518007a798170801f4f582e188b489005d8f108e2a4acd6f7ac28852580e73b6a1590ea1af1443666f1d14affb0a9d0655a5c57cd4190b2a00c07276054641ee4204ed8a806ded2b3aaa7453c24e442992434d060b51d2255c1cc2a002264b5dadb32057f4a5d52626e0ff453e2f05f1e0d8294614916c00110853462d51d9ab7e03b7019c6c001a06028ddc42f0d3e1cd6cb1ed7377d518480626d56c80e6d15eacd42ecf2f30957a03f6e1098b300b6329997bacc5e667eeed72a38f6c4e1db7199483bc9a18267d8b90222f9021f88c0988653bce0e07388fbc67f04e5c6772e8311bd5c94eeecd6da1ee441093ef70d8c86a26f4dc4da11588853444835ec580000b901a349e745c1cca19957c43f15309935f7bf49547884332dfe6d5b8b9d61542dd88ecc61187fda813a7f700ca96e8847a33bf8552690d91ec8e8fa70c21b380c9c681b54e859add36c3c19e7fda3075ec1a3cf47ed39c89241bb73f206d7497f93c47db9a85be7135948e19809c195ccd4c9a379ed464bf77ec562e360c52b9225f103d323364a72e8a725ad2b34a355928acc6aa563b67d120ddf54cf68f710624499ddeb30b0c94b8722ef2d641ae49f17f4a916d54350ec483ec5bcfd9748e0a228c3e73cee9ea248ad85060ac51b3e6834e1f771f725a466affa28453ad3726d794caab223fa76c8b994ac5d3a1e8ee830e4fadfe0786174364af3109c04d7d607aca17933c4366d44d9c5376ca34febaaa612707eec4e2fc5c6b1668b3450340938d17e5552df96ae84a905d069f9e3455bccab30640a0720f9b4598d8f82ebd19bd32b7e82165303123a0ed80c57375174c08d32ad3ae354251c97316b2977f3a2fdf2dba1c595093c88275badc54e3aad65f77c56f55d04b1e6d668406058ea01da2364fc207659b028d9c55371c776f732e63255dd177b95f857e3cbdb4c66fabd8202bda060830662664d96755362addcc0908287c99c60761cf9c7a613058894eab6e599a059cd2461d4a89458dc68adf287fee71a783dab0aaa05587a21b4aba1ca4f5efeb9017801f9017482014d88d15c09b7ee8f9562880ae58585f383aacc831e72f6808853444835ec580000b9010a2e818d2c4fa7a974f5c3acf3c0f9439f4c83721b2bb9df4fa290c7fa57bc1f9f77e4b80866845a8bbbf8030b707b1f07a54a0ab901188eb2e1262a45618a08517f943cb032eeec926e4343d5d3089c145da1d53128ae901ce91a813c205c615bc1ce9b8658a9da4c2d258fe36f6ffb6289df910566386dd1a9f73b44053bb64523d8faf7b9055c592695fc426c360479c1e2d1f68ca5c7965dd20b6879989606cea7c0db28f27ead4a591ee264f755b7358146586c6a1a8530ec463dd754f100fac603ec3360c0440874c12bb179c43a23e40957bd446f2573af413f3314e9f0668af2491de96156a9bf35bc469d51935305f4df051580b84e98ec8395fbd42fc0c3f3e7410ac4719af4c080a09a774db7e3a26966edb91c1f7956a091425044ead1589f435c8d04aac9533764a04325d5543464929773cc6ac555f5ce1830c997f4d26f2dad5a7e056db6f0a2e6b9032d02f9032982014d88828a67bc288355d78498c2cc318542aa1a60df8305fbb6808853444835ec580000b902bd082cb3f3fa41ebf06fbb17afeed9ccdcf3d2999e2fdd1e1171e0b1549c06de17dffc4ee7785232184a698311c7487fdf090e34b9954a41affc0d0ad44104f70750f6a896b1b2b5ff1024de66ba877c5494e67735cdfd45f9ec0df1c198b357b60e4d840abaa72c5667074c43bfa5e1f07b5970f018820db6fc2bf84341cd024cefe455c92426f876e51aec0fedded8d4aa4003aaf6970c48d898d8d82a8411990e73c8ec792a2cc4a129e526d0fa34a54c37ac13ecf4e3c597304cdbd327704fc97f2ba0b110afee78da5c3f46d3354bd20f56cb91b7ba8d302422428082748faf8b4828ba925ab1a02ba695e686da4d1e759b6456b0388ac8fd769f3b726332be36d3153ebee040b5d822fe62d73b629a6251c8e49a988cdfe599762759df03c9100db5f7a87ce7102ddd21831e0736924f230ffe6aaf6b012423e351627e118f2bc12736a3694b5468858ec6310017b10de24fe75ff0abc060b1e60271dc5274b4bbf0b755a0a617bc23f57ee2286c805086d5824ca4bb6297545c5c1ccaf03be03b7df33c953ddb183730313f09c88392e4bdf688f1d2b730318cc9b148e488c2f1e383505a383672755a221ee7dffec5a4f77e7efe66043d686a126480ea01a8ef0f72f9a5799e03e863a85b7aa56c88b7575d6ebb9df809a240969d3a2b2e086e742130e38cfe7870db79bbd281849912fa611e04b8dd0dea9b7da5d16a66969e54ab9def159b9c1d351d719a93821c40ad6c6014644c5f77374cbd486d6a7cfe75d7d849ce240ac86a1c0843aab27fba4d317c725eb101752803ea67d3e12b784bb424eee6f766e33d6664ca113af63c54ba27b8a8e904c572dc3fd09848cca3499c403a1c601db77a7f36d244024ceacfd9d6ae494b7e7e0f92fa5f83458d5da139eb127709e3dd75c88fd5f75244e15f1bb8cdbd3056bfa56139442c0bacbf3263f29ef34946e928b9a4f1c085e5df3b09f31c6e87397bd939c001a08b9ac3bc299eff8eedc51ed3ff077e49da6fb145a0c495f430964581fd4d230ba05fef2837a800e231a3178226f59a981d2c4bcebc4b4cfba9680371da1e2c1a61b9042bf904288821c649ab1ae8ea668896d6c78054ad7a6583121a8994e3294b628e98892fc56ae3fcbce852265aa657e7884563918244f40000b903ac0177c66fecad5135344e89f45ec7e083130a3e5eab1abb75bab0aa357cf044c0582542047a3f9985d3439a6f850466061142af44a9208656e278b7ad1bd0e03539cc019d6ebf8758bde3e0489ba540c523f178a0b055c1fedc3627fee427467ab67545c154106bb9e0c12a7120c175d66f9e3eb9183ae5c7640d4cb4bd3dc94c7b4e0c9fe70e692c3fd027e0ebb46bb32b73a269037a76731a9f114343ea0584c3f7e9cb4530d086609b59ab6b72e7dc6c2c0c95699091e06a33af5ba200a168ef483fe11056330e84da4f2a59db72d5d697d262b9565fe81a738a48d24a9f1c8c49a671101bb7db5eb64deb454a117eb00f4ccc31bc93c061e975ab6d375967544a2a06ff8b9d59bfe1ecb1dc47d5536c645d764028c5de77f3f34d6c7999785b70b187d9ec4631e83cc69499a4ff8ace98a6f17b77f648ab7a07d5ee0558a8efc19d4601573156a0264d2e6574e867c1eca423eac1fdbfe0967bb8f02524cc2d9933141acf619ffe99483305fbdd6913f1e1feb78a17fc6b81c705c81eb08d5602b097ddec64f6c334509caeed7525e3e34845b21e56e4424aa9609f4df8bb13f31c5448b6bdede84d9a9aeba9fcc38a3c8eb1f3f31b80918e045266c7d69b252c86f8b5711b2cf7136e2c3d86d1301608c7c16655c3ffe6d04014dfd55a9563c2a307525088fd017486ffeaeed45873013a7940a7a91442b975065c765c32546aee9b001ba78d8563e039c8edc24a92f9f457ae28172eb29e16cc588d52c8e75a565aad1a8f9d6d341189a24718c26c19a83c6cfe1bbec2f4b878759a7dbeb4ffc0568b902b1dfb18af00c7014f2822965ddfb56d7aec508822531834ad2c869affba1f95bf3dfdf1d1dd1c2994d904b9c5133900962c8137d7fce9f0b9a7d0474dff9173edbcefb4bf355539dfa791241031e90770c8f09af595eb1aa0d083bac4fb9b929ad7e23c0fc8d3ecc7458a0790929cf7588cc255916a6c16811f09d0c972b294dee6e1f739c5e9d3eab8016b565c8570e41bcddeef2dfbbf95910ae6a46a2834919742ec599b9ed204d1f86ce6baa534039ed308d8be0d289824303deb54af5f9f50d88807134b8f42485cec121432e58b83c8aecb32fc62623b06c39c3f1e0e921b1bb880d2eb017578e5f33a25a335a813f02259e1b12b8a76a90a65d015bb214032a095cd8918b78003d310a06a246ac95c126188911bda8a6623407c0dad308e25a438f78c7409267b729413b7d248a6a88cd64c73118999f00981aa4f6b639e4252d39b1706c686c7763ae9c41aea7b46fdd48bc490502ae876175e5aff8361ccc530ad8202bea0b0209fabc8a5c0e2a5bd08e9a6b532d51670f41513cf007781f27e49b070ccdba0795755f4fe231840196d847d100e7cf1e5650ae172890c469428269cb105c16cb9031ef9031b882565c357c3279f0c88e90114422a470a4682e988808829a2241af62c0000b902b424fb91666edaa16addea67f72c9e0bc7a8053bda59776ede2a0ec3f7c78ffac0eee97ff259f92b21378193aeeadd0253b08897a14f10ab537db63202a4c9f78eb4b399d55c5a256a8414f58f45b109e6228a75ed1eb09627f44b56eb539c334df412b30ee6f4ea39a04aa671aee9e7157b9cb69aad4ab1d9d75c6d90f3488342b29bb59c97ecfd2bec4f991b095038b9e20eeb591b641f64e32e5020130f8a8daf7c51caf93ca460a4e60132835119f99d0484529cf541ab9f922bf15a782521a0f6739c1edb8d4bc26a07e63790087b4c098e4df74534340bf7815039326d1bdcafa53932deeaff03a31e97c6733cc702cdd42be18e4716dd0d014f3e916b0cee3a16bd52cf717f5efb59fb7e41c8e4c0d7eee8ba92ee5b293b25612ee9a3b0043664e918a2aa2b602accd357c8f22f382b16f637b57f2fedb7d8f66172f22e67cc04f230e28ec96b928f449fba63b7862bc3102181d6c7bf063d9376363b8be8200169aa88c46732c5ab1e19dcbd8abeb34f1e1cbc632484d9864e630c4567c0f04a2bf5895d3cafae1b0e70e4c1ea28d4d9578a82611f09ddb22c3c4440e8236be2bf9cecd3fa64b19930af8664d78d6f10aa9c913be537bf2b539e3a9042d5744eb3d1bbc16d98564488a51ba45edb2713b466beac560789c4eda3c0961bab002b95eba9f512108dee2e39a8759c04b18a923f2f2aab2e1ca30ec7361b25ae71923027c950c089469820a4ec3ec60529f1509b92ef04fb7fac70f25d3e5ea5c6a28226fe19317bd4d0f42085884020a2b22dcb0ed8e5600ac969b4f910e54f617597a84b05774776d694ba38ccd3d1055a7245334cddb1ca20d7e001285a57001d03b2fc1ff893ab044612dba9b311247528d7490a9a7f3e7c3ed8531844d3b829de3604e8546ee8d4c3d7a308d32035159aecfa20ae4660e6dc94b6a155aa78150a01fb0e6c48b660a0f051ab59accaf4508202bda080d51bfef036fd4c4ebe7151b2755d6606122e565323878701113b84fc86548fa06fb34b02deb66359ae8095d3c339673ab2a8b138fcf9aed2d4276c8a16435a60b88801f88582014d88bbd39acc70c3229d884ec80fa5565439d283119a84942d89ae04c33fcbd75e3c6c43b826b266625b854f883782dace9d9000008911d1f14d3a721904f1c001a046bf61e70c69943c277ef7d09ce5e779a10e3671cfec81423e0f951254dfaad2a012fa75748afaa79673d94a17d35666009001775a2b868b9b839c77065649bbebb90143f9014088e1cba06e2ce482dc8804b98caf86fcf0898305c61980880de0b6b3a7640000b8d9854e530ac567b7d29eedd91690a0d2397591c6a1b1f5068bc292b740f6aa5d38003a933c0560971d4701b31d537fb7c1ff68c40ef07221089f37671b101309000e0eccbc42284732aa002f2cb3197def9947c2b2fe47d3fea2efc71b1f3cd681082d043dbc1471a56a5d0a5c757b8c115277a2af2e044e56e5e3c2cf8756dbe51a347096a4ead46fe53f4c03fc100fe0009f6b2fd6ade28fc89230602e9221962f4512740857b87f415f134a224c5149e374fe22f3048f0620f1bddbc9acdc268a5de1296d265bac65fc2650b3de55e6bcbc26bc4d01dbf7548202bda03e35d4429ee24e44134f7f51b32fb69691a16c60a0347d9283a8e593d5a095baa01c590af4c1fcd3aca728bb5aaf03f48aca22c756a87607b4153a5ac6be59ebb5b9029002f9028c82014d88aab881c6fe3d0b7484b0da2b368542c231bfe483115994808829a2241af62c0000b90220a8317aae8cca53d039d79f09934b9c5d0b07bf13ceeffacf1011fda22a85505eb7c717168c18d8fb230a7a3f166a4e93326fa82884ad3093b5e07b4edee095d98bb92f357fd4a98201be26960d4253da6fcd09874b364595a47b95d2b50f8cd45921931469a302be9699779775b59f27deea2aaae41a010a47b825a46103b7d355f1c154b3422b4fbe4e62c71c5b6b98b627beb82014ad990bda2b6c06ddd237543b3652c7a029928153a8cec540311406260fd3a55cc5788610321d66c29f168ffe5d93f92378359231ff89492db2bd2e90a4d9c28263d75b77842584d253fd7316e61c27f71771ac7e7a3c8ae6921ff2280c459c36348e0a098fe8da94c1546c15db7968d6b2821b24edced45a7ca8f2bfb2b9bb7a497b950bdaaf771bd777e918887c0d2d6ad3b72c168228f49fae155862e0baef308ace6952606a660beee10da3fd2d29b5ac31f2d55e34da94a4274e1bd679fa42bccc5db074a070b899e28948680d82c7229223d846a1a2c19143dd99c78bc42c33490b85be5067a25f6361d6b803b315519de254191557ec691967ccc3d087b8799dfa5888ad748b7a6e164da0c726bc1f916110b6fe6a013ce0e28b79bee045d250657a70211dc11a5dee69a2c05e9eedde536a9911883e5ef2ee76729ff8fbc3aae0fa13a36daf01199a7ac60b21c7fcac00d7c6a80f5ce10b79f4666d69a1a45b3ec864a57f1f6fd492223c539351326d7a25b18bcfd8697f55e972607b9675b1d40dea3ba4c0b3c080a0e69a3802e5dbe5284f817eaa05c76127a3898633d4524f3da9ba8d7e7b98af23a05a2672729a0136c572a68b494cdd49ce47c2c0e33582b601632b3a1d15f3cc38b9016001f9015c82014d889e607b89f9d2717488ee3a5d83a713a9fa831ab7e68080b8fb754cefe26136c37abae044d7be8e1a3b8aa3ff230de4579b08bf12020e9ea66a2f282ef549cd7f72d056ded10c2fa21fe339fe56715960a4bacb65525bde1671a0a691f44c0ed582e64d3799c4ee453a4fbb700cc130eef66cc66913d919b6a96bd31efc3d77e4accf3a7c695275188ed2e5a76526e4706bea7df44cf6a36fb9e43d0e37cf5d6e3c5b984062e57ceeb1c5e6a9d0c418a5a83b77c4c99e8799fba27bd884e51d5df3db1562fa0b13cb1051ef5d5269b4215078384fa84cbcdd93cd7e67d166ebfb88eadc77cfab6a09fd1ea8f82f530ecf62d60d176d3bdf4f2eebf57b45b532ba6471fb53312e32c3452ac69c7b0ce227a61e69cac080a0434df311dffabb4af9df6fd81f48814ad8f5363567d421c5466423bf3bdacc05a0032341e2314432f05701cb222c2868894039e6e156ee6872ebc8739a4c45a43db9027d01f9027982014d880843386325d71bf988456fca4e1ec42cda830601c994c5e72917d21e4aa0f724ed1cbe014171f1be66ff80b90203e082cfea48d8bbd73dc4f299c37a26fcfe1286a62d17e6bfd13084a47fbccd302a44770baa03092d7aa3bf8f15281bde3418b5a6f610199a7ca97fc11df8058de81fdc05527047d32e0e4527db10cddaa2e1a190d7dde1987c0501a200df8eea07d61ea0028930e7422451b44295ce91f79de155d6169bd64c0cadae791e59b67544023e5fcde77eb509d6418daa17dba99d0f09c23c7df78d609f4af7c1ad95b01c26edae2080556b8e63ac632d78b87eb57ef23791c2336775ccf12f62dba46b65a5b5c7017068194fd2b7bff11923ac2dba3ba0d7e28c1ed2ef1c5d2069e189c09bc51efb571c63f2891acacd6a327dc810180290f9699541f4b65bdd8935e074f80887d3f6f4c3ecd75a54c95476b26b42f02964c16ae02532433d48fb5b5f779562224d1bc099f51d332c67cecb1e619bcda1aee26011a463952719987f705b12fbbbf34e3989d6b5c5182bddc569fb545de391ef10031bf1b0f673f0ea1a9763f652624852bee8f09dd517250da77dd194f8310086ba52032212ed38e014a9bb3f47d8a16cd463a977a443ee02d5548ebb5c518e5a0125c6645f2ad2d52f99aec5c88cf4aba79167cb8f7012386916fe2b863da27d16a7c3c350442ebf9b54a569ccfcfe4f4e64853fd810e6a5b3b3cba9ac8525a260505d12492b99437309f94b91dd68c7658291052e2c4d414f87c1d7b7bde565791fdf99004316f02ef4d7c001a05044b928ccada6036e32565da0b9ac1b51d4a0eb5d702efb781a832c120665aca027befe34f4cf0deb37ef259882c20be1af0efa2ab726e06eb33736ab2f0b34e5b90186f90183881a09a2f1c8cde2c488c2eb098e1a51326d83159c2580884563918244f40000b9011b643c223acabd55c37efc426850758db45eb7a0ccb908d9e2ab6a122d812921618aaf4e30c377ed8c7c5b829846b473702496e87f2fac0a78fe92a7602239414117ba9d42c354b05e5561f234e4fc76ecf8285abc17060e980e1713a3f0ab031a53c6757c972e363485581436b20fcb4aa524281e6765ae59362fe284cb6c9c26e3980cec0a9b2f61d1446e9a1679fd055fca089b838872a26f866cb09ceaa5a57a061440ba3a342807d83a5a83589a7297afba2c456c628954a3daa451cb42207f9de22fd5dad066647b8e8ed43fccd3f335298291601fd8737a2ed69cb89e0573fc8eef594568c236f8f976870f2da93c65f77aeda9ae17d812e16dae936ca069e489d3d820580c636f12164c73795e287db92ddcc73dd6b341408202bda0b8ad8ad3d5218e0e27145286459b952ffce119c42b7b143d3ae68f08991c6198a07bd60b6dd3efcb39d42fbd3b15f2f65f9561ed6106484285f3a9d235d2962c2cb903a9f903a6883c0753f96351f096886eb111ddc0775d1c8308a6ae80881bc16d674ec80000b9033e6cc26ae2edabe8f726535a61e77b09496c76d81407ade4466993d4785c16ae669c39a5f9ee18875389a6004576a39465d66329e18646036b9ff5657ba1ec659bb2acedda2862458a642949d15f2108c9c9a712216e2d9d13077a134a69c64daa48018d835b542cfa7861a12febf7b79023af48f860377d4d8bf99639ba627ae9844ddd982438e2a508b6cb89c87d4b78f31e42f842f62af9cd59a69f4e899720156f7a2adf1d348e9b665481165af600a3f781aceea0589215f06dc022fd28fc6025ff85e3d4b7c25c358f35ed5f5f025eb2b0ec5511634494515a197f3e06f4e8a2fef699f33f58ab71376581b455cbf592e1e657115448db5237d010399045e023d0d69797131720de65ffba81c41037657951db3bd5fcc555b8bf6944a67f1fc0ae9ddecbdbb955743a86d2ca82b6239a47f0d37759cb3bcca9d95d7ad084bd8269d06f6cee9effb2173096ef22875db79714328f2d80beac6cff4b3f8fbde3ea1a1040b6885d86bc92390ed2efa52181d3fcf6b761c0a14b8417ea3878d311d3690f93258e57848e926364fc0a60dcaa161a1cd9ea4fda657c5e868f59bc6d2ded1e264a100ff752fbc32d30728f13d74f60a1931cf1cd302aec02f4ca94541335c0f0717cda44c966db4c2c1e522794e0cc5a9dd84ed6355f979c4931231225096d3f651aa1970fd8a6de80325a6b7b3362b11eeeb3401df138bf8742bb94fca940ed45f8b4937d1645c98adad12836b19e09b59dd1e4cf020a2d4efeae49aff02a0c92537dfbcd4a560e876d0a3da71a38302efd5986e70a0592c02c4a8e5638869db811e47ce514bbe71acb864580d9f3be29e73f8af1584130a448b85c0a4a790d750a3d67a4f1c3e52b0db1c7ec28b891c66570c894b9955f0914981f28efef48616b004ca747fcdb448d0a1b6d7196e2ca002e17cfe65e7bb08027b95bea17ba0dd5b9a479726b5cd32a0fe24052c2afb163e60733e6ab77f8d1d2f606de15a31a2db1c8b7827434b64f794b808287f612854c7df802822340442cb00b8c508eb8d74a6334da415319557d4a8cb58247a7e65c74ef2238843fd02d24d6a859f02c547fab6e35903f69394659a2b1bb02fb89a613733cce7c4af817f6b8cf2ce38f425fa8b59b3fea76273664b8215d0503198393443c926b578202bda0115d2f3409265aaa2d214d11e19f314193884ce34c3274f4258d5f09a97172fca0418e2cf579d94373b0a81e66636160ad2f1de4597445af60d0ec37e9a97770deb882f880880f511ab07ca9dce1889745de5325aa780e8311fec19424eb7935928d6e5fc275944276ee070e90b9619e8853444835ec58000086428a36f8feba8202bda0d3d221e5abc91d1bf4721d9f51100bdb7e25f4e1b2eb363d200aa1b0c09727bba07688424185824dde9b365f31e258987ffcdbf3c850f9992ed80d0e71e54712ffb902d702f902d382014d88e4400f9aa703b1f98501db23a8d88543ec7b3d868309954b94e59842fa49a842609ce51ec1a4e9f75a00da8e1280b9025a30fadb0cd19a05ca7d20dbd28ffd1ec743d59a1169a730091be383f6c571c51a8514f9ddf9961a588f38bd388786c9e7efc5d0e71ca89e7f24a73201839f40e9378e5305f4174752c6eef07273a2c51009f04350abed1b6dbfff400ac6f790013028b56aa08f5090e4483b7bfd1b08042b8651dfb27520b3167e9b912e37bbefe7f13153571ef8ae23f2034df09ae737e672bd09d896bb01cc035322407ab3ca2a026f1d8d5beab70178c580a650874a57787d92b6f31f7f86ee939bf8fac22b23c6b6666b5e0241fb55dd4d397f1c78fe6da9fc3e66c2e34058e223a4567d259e3e1a3560bae9f5e2e3e7df1b7384b6af9a4155f1eeb61a6bf4b5e149db22109c635cbe9a4266ef48c211fe1236becc472cb7869906e27166f3f017ce75d188fa708e037fe1a5729b43892460458478cdaa91af1f9367cd1164204b240212101e631cbd027c814efd1e46368b37041836964dc6a76701c38810f36cc02ae93eddd5ebe83c24527244a55eceec6d47ec8df4b158fd1166a7d0d7bbee043632852ecd8e5aab24d71717a232eae9facb45b534f75103fc57f5cd8f978a362249a16e6b3783443bc5100bd1d8bbbd45144b7c63393f5d8169c4381f645bbbabc899e022d58e7b4293125d6c4d7ef75436b4542618636fb247b48ff823f52f416348fb767f6146c1f443147baeea5c6ca7fdcfe3795e09112224301f87c5667027b74b54dcc0f3c4e149a1e67aa6f8a940e1f2891980a6e565821a1f06d522eee5803650f6c0b8c8f5452804f9c456550cb8f1d4827c7fd1c8fe77b71aca3aef9be16494a4bf7d40b274d28ed9cd92a2169b6de5fdfa3ed1b6ef8318c080a008c406d42212f12e384b8f8bb7bb40d0c4660b67026646436ca589d143edc5a9a055fb6596377274cd6af52d95a127c503c0af5b7df6df59ec493d2bf15cf02bcbb9046102f9045d82014d8822e3c64dba5192b7843cffd35685424e576804831aa2e894b002add3a6fe3cfc260c378a187213b6bac436f3887ce66c50e2840000b903dd35dffee48e5855b9f4e7d47630f215334f242c738b2aaccc6e4a815ad70d29a94bd5fea67cd0cc855835ab9bf81c789806e311f744dfc370960d5246099d70e509571437c3c61e11c2971782d7ebbe3dd231c3025966d5ae37fea256ab601339db76c325884b7939ac8e772ff54c8196d35cb823cd42287ccad89e0f1a8092caae92612bc897cee16c73c18a39a5b1ba5bc5df73beb108cf5c896a420837ff53f6e601052ec017e75d3554c0ada83b7874ded4edab8b1a25e39c56c4666ae2812fe82f65f5f7d423ab3a173261ff29495a5ed0851171d1c261129b2062fffa4fc682cb41394f5ebe335bc2220abe7e950d9afa85f305eac439eec8eba9227352f592804f5b47208c262b220c1eb39d6ef89a92ec3ef051e9cca642658a8d8e55b35e78583d7a6cfc01bc5b9d579a1514c201d34230684e4385a1774f8b5f38b5191682a8b91b536ccd3821ee409028180d0f5eabf6e1e2e3dcbeeae0d92cd83e52ae68842bf781824cb7dc8c1507361d7d03b03bb15f7f7a0a9bf12171e01408f60b35722a5a819d7d9107fcea1b94184160cd9890f1f510207d47752fc27f58729ca8490b81ea720d5fcae71db92a9b140099047f45526d26af5da8bfe3e41beffe14d5d1cbe31bd1e50b9c38b9b393ef4b1b5514050e4a934d9501fc70d9ee3720a22fe18533b420cda21aea8c483e5bd3cb4786d6ce2d0f97d1a653253efd1c0283772e8ae43013dba4990bb6c7d9c7087c0d9b2fd3b79decd9a775989c81b87ccbb1e2d6b3c4df6dbe1b7e3a147dd8ff6998a0dcbe3f517899f2dbbbc788d5004d2de3d23224268406d02fecb0ba553123528c6b41f6f55aeaf8f32aa767a9f3113ca91d92e2dcf656cdef77f966a6b2cba83340658aa5c26aa0cb8ce54ae3a55b1eaafef66763ff4de971cd6a0b65a680169837dac945b0a7f13864795670922c99dfc6b5a5465e5043ad1b3205e4579cfc0e037f0b4e0a8b22b5d6ddba7d24b31388620d4aba83f84c5a1334261955d52294bd8b56d7175afbae015933ab1e0ef91e8161468f8eaa76a6f7a9bb8c8fc1195b9d8ff5dc4a51ff73a74b0640999bebcecb6036ef676c65e9fa5b1be22872082989c55a789fc4c2252452f786a13c4e868b85fbcd09bab689bb66dfae14c2ea7024647ad97728deed03314b007dbe461c1836e97f928308d39e5afc43ee3ae22ff47fff183553f56711880cc5ef72c5d66b4e2c6f651c57311d48fcc0aec762fae6444a5be11793be04c85ba97450673687734e681a1f3c64699686880d32d4cf87202b49ce13fbc8771fcf30d5593b41ffa61462c64061449b2c0a24ad8a03d280500bc86049bd55a27a05d70b12c7fd700454dbf3869b329a1ffa9994ecc2a6ec9572e3adaa0056c080a013fed42f6ecae05ccdb9bd8dc88ed44579b6a8871118710058f72c29f6db3b8ea03d200c0fb3e4416a51538d2ba41be88cfe830fa74c280e8b4b66cc3fad24ec06" - ); - let raw_batch = hex!( - "1bd930e08e94a89daf73710d130fc039db221fa427e3e9d10b5ff602fca4577fc203ad9313f493c51668a017c2a4ed1260401ae0dd8967eb390d13f2fab12f43bdb0cf432a6630bc76a84c50bedb2a48e562bff35eeabe9cc219de13de55412f6692e1708609ce3440ac1909a693fdf68b581342ecf8d480342c3e3b435349a5d903609718170fa9a4702fc7df772fec119dd097c017e8531040192c66d18eaa4261721c01c8932d0e8890ac2be0630cc398f04f556750355a3a608612f9d782f52746c2c5c83c8e01cc0b5afb9b97080505da0ed526076535d4a34650979f8f1f98ddaf306fa58591a92e25a86a1d62a3ba6d6b53be59da78c1b1a3128059e51e7fdef133a3e0979cfbb47040a51c6e684b6320b624ee51f731fd95ddf7fc672367b4bce94f92714dc4ab37394f3b3e612dab56829e8171d3af31a6cf940504421122cf830dfe1783a42dc48c2296849ef352bf18ee96eb5deff308e094b61e61eae5c02c14320345cbf250a6c15f725d6c2b12e8a10c1331f91d4161667dda26ea1f2a7cbdcd1d73070b70c818d9f543b7b3523e02b58f08f6858b951c735820579cf0ca7e4dff854cb2414a29556658374c977897ff125470427dfcfbf5c8bec622fd5b5d9cfcb898b3ea3846440ecdc29a7f99da330597db06d49dfd085d0b56bcee9b1031aacb1d71d7df7509b2cd76ab53620623cc85f880037e10a14e6b55758925f8ae7eac9489aafb831809662dd12013e9e8ebf67fba771c88da3157aec7ad6a4ee554abe967f1ccb486c47592eba5ae33812285bf3f26dd11d232f63c24a5b6e5fb285aa8950dbecc16f501c87665df4d159b307d36d554d54240306bd6ccdeb6eb37648c5c2d6fae684e2fb5608c2acfffebcc595b277d515158a141f2c8f2a005d5ed82e875c9ed3546149042a2dddfd82107d3067825968eb4cbe455b6b2f6ab2da38c3ad83a3a6d87fec0ff797916e6a5220218436a438d6bb44dfe5cba3f7602cbd7fa0ef7d000b9e02b05b4b867b1eef9b76ecbfc2d6f2df9955e4f8ca9d06f563e3991d86e9f194fad8d7c05e413bf68f02c5592696cf28f51aebd5fc6cd1cd76b3543b37f994c17f83b79c7920c01ff10d4d97e35689d65913b4fa0d5748de37963cdb48cd1416d899a3083df547241e17f5f6df8917ccc0c5639912eb99ed8849a2c8140187ee114fd3253b986c3138906dcc2db911e6bdfeb32fd0c4b8346d3e2b876fbe3d2f95e752b71f94c82be7a77b4ae73bebc06d03e8ea40dea94450887ba163826dfcd21038bf7f560db0190165d83809d398eb32f038186ce9b49ecbf2a9dcfe0be406a71f457514a47dac76990fe20c074893a34a8e7f59d4a945e3aa4e16b6c37a28d9a132cee8fbd5c7052ddca49cfe12a4c14e9492f2e6b480aa70e39e46b481b38c7ec36d24fff714a8464e0aa8c2dc3bacebfb59adc6a17e5377e6fa4e70af286e318b47897ce7e75a65ab445bb64ac6159ab48c1310b641fed5b40c84441a093af75902be5401a3304a3f48740908da9209ee6a66a5442bb3eb344fec8905a7b809c531fc788421da2333a9c3d84a5e0b2c59bc8807796da4f6924da6a3ef92ec94107b8ba4092d1cac44ff621db09c007bc007040006570794ab5289e3a323b98e261151a96b3ea240c0f612015d99996ed87511cfad3d644577ae4ca93a14fb250484781975404938bab804f8cdd4dd288ca384f7430ada7852095dd0b7c04ae9931aab4da57816172e71a85ecab00f5149e9929fbd4dfff8635f54ddd91bb56a86dd60aea8af18dc242026dad7b52f271db63881b39577a15f5b8f357d3ccc8cc6d79665133f571125dd592caa7600dcd7d72b5ba73c0edf74389a8a6e3d4d190b76a559a324d0fe39ea88bc6bc8c3dc30d89145f253b354134b38bdcafa3936aa1eefe10c806c2593502f0dd7cead691dbdf325a7b72da81c7427d2088ad9485332e4fff004237cfe54da30913e7e0f5cebf71691ac1c38731c84d91a233a96424dc976ebed809cc7c01a681f7c26ec078dda8c46066bd2a07ac4df05d18920f47aa113136ce45aa04b9a4732daf0450a88bd175b8086c4efd7992f21b0a0a90e00d3a17a0b46ccfe9dfd9fc901fea75e74d9d127118d0f8832cbee68be4d2c020350d533276cfe5b9d606ffae3e7492ccdb0099475b66c33ba9a1d6f58d8c8de19b8475059e61907a44883ba381ccda9e272b16d797779e4a1b4e3db34def79ba78e8f9ccbf592be4a63f4c9170f2c304ec65a8db539e72e1e5217209b0b38b61027cb82ecd3fc60dafe36cd476cd291f5dc574f818a19ca74d73331e0c3297e25619041b7ba9412255b10df0722463d17eb600aa8c9ffe3f43df2945252cbdf52113dfdb052bb2491299113c3e371b2a035f9b323318f17923f807a394cab6729124845833b794b0454c42c088e119110d767b5456c82fc28a2048925f5dc54765313c632704493126c75f40a499f6408263e61162357d5ff80e37617e80e0aedfcfd0284259d0e2bd644d54ab3166a22630ac06ac802e97f600a73b0e38fcce39189828cf98e1f5c6e8a7dfbf3670ec6498225b00446125276b6cab6004bf4d2e8c1341085b1ac9aa127bd10bb2ed29c7dd74f78baa4061874f24fef9d0adec31b81a46cabe2e860d890edb27b2c7f006a37f29b9b9ed21650ee7fc27f8fb7e16e4cd947bb47d094b26b2def138f04ab29316ed57f12f3a13e988810c045b7e35f1451776031f0524e96d1d4ce2c41a4a35e7e80a127620b2252f27ea3445b0cb1b49c4c33444237a279c20c92086bdc9b0de1e97c1a7a477dc0cf1efdf3040a09a8d1f3993682dfef3458cbad84470b94a52af59c2ba0f08d80b31954937dbb33cd743a099ddedf31402acc348f83e5bb821d185e14975e2a43e40d45e3da4b70fbf397db46395c95eb9176d70b70b1b4d802551c2b035166a82623a61f45e60b4c18570fb034e7061026002f7e15189b7c2ee30b804ca545894707287ca7996945929b08cd4410fcf7bf28c385be9abcdd0cf576dbf6c402c41a7147f14038c97f3fe8631cba55007db867fca4efbe1ff39f537548ed902ae01bd6a0a236a67c88a661dd930c15f017dce1da3ec5159d0fe4cc9cb3488ca09752bcec884d2adc6fb774eddaefffb1477d80ea9e1ddb0b7075ceabbbbb5ecb904866e0bbf0bf8f905b6f7ca5821b92f1109548fc33650f68a9b67ae20b6b165cd39de17f7691b8bfd70568c7239ffc66765d13b72db4ebf890a915d6abe3b557f70550be6bc96e5642b82b91eb10be8d669691df365fc53820e4cb6517f753510dbb9c51a8b5d38ff436fb0c61cdbfdd3f85f318897a64585a16af22cc782fa05fd7794817ec89270890d388c35c3abc1e667e266cdefe79211fd369a7f504a334a3fecebf3027fb2f0ab1af37090f97dfc1d8116ae99b2ecd742e47e48c399a88a1e1aacfb927ba4be5d9f0fb1789f91b1264d7e0f7edfdf48526c583b823968b28f716feeba8a87508249bfd938d756ec8b2e51f8f2624fc6467a7b764eff1384b306bde754b918a0918c122a7e6f6c1698ef129c99126f8d40a9ed97d1da1ca4c4fb859804441cad11ee84557921aba96371cb0b3a90cb2c0cc76c9b43d5cf16de51d6f43ca89c4017fceb239bdb708bf45e91b68fac6b27b66da9172c4d08a63f6759a8d08c513c1b2a702b1b51e1cd866f5fdcee679ed65dffc276cbe93b380acfec273ec53a664f559d29a46ae713fdbf96b1b23a1546aac5d8b6da6cebb128d61832d8a3b1e0587ebd1328867237ad9d43a4a2de95329d26ebdd455779cd19d4361a5d7fa45afd47068302b55d3efafc6b1e57c9e42af6e2507ba785c554eba19449d5f4c42e5acaf20e9ddc8ed37201c363464cc03d40593ef2fa32f81294d00ecf1862c683fda6ec4891f72a5b5b2b29f0d8c2bb415020f8db1ae7976b0cab93845b08d7a0842d6366e59d73b593b8c5fdf199ff6d6564ece94aadb59fed75951abd39f67a06030f2d34d57223b62667a8fa315cd2a27af7ced30d9ec78e71cb8d675d8d61924db42bb3105556a57775e7472e93e648d78fdbfe536e767a71079e1217faa728fcdd26d8be1cc1bfce84083d5272d543378cd430a096deccffed011e5ff741c92bdfdd4d42a8ad0f907d17490eca3fa52b0dad916189cd4b19161f886746a18b366d8bb1047746282d772670bdad1b0566b789dfe8348993a1eff2a3b03f51aaf362711afd6b0150ed8ee20b243fea04fd2e1f1eeb556d66b13f18ce72155f52af95cf6bb1c1a879a4cd9106ecbb5a6891c9823c3cb958a4b7652502e6d1258dda66af2136800ac33d739998995ca73ffcb541c37288b5fd898133d2a1de5c020154dfe1603b80775ff375e6cdbd69cc4557afc794acf9336da712626ed13e50fb60d6d7c0d92b10b01762dc96f8a7fd7facc6e090a7442c52e5e90cd3bd0a1359fcf64fe2a77a9acb296c48607a70232b19947b6d8dccb6adbd195c33aa0f9a3df6affa73afc9d96b17dcbd4e0035e005400e022883b79c11a9d3daef71c06223ad5a240021cb3018849dd4ba3b6772f103b332f1faa8ed2ebaac534ba4b46430d18093adca381454c5f59d7ce8c9f4944a84a5f9d598260b784cb284459798cd0b3529f76dc5dcf8507ebea12e2164aa7aacf8317289b02b3708bb25354b4f35f41134214782f6df124f096fa4786c6e6615be1a2a67ac0d8c74a7c5139b2028f074665a56a4fbe42a2b15709b73cd55e5d242d4fb1259d45c3366ad2494da03538c509456ad6beb9cb0c10ac61a163fd1ef3577af4d495141a9e6f2b8fd008c082e8b4592ecf66d411782d17e00c48c7e63980d5584786992749937503d3cc4c249671ccde9dbe9b4c4f9ed1da22e44f427466633541b675646d794894dd0e53223dfe3f0ceba6b969ce04421c876a51348f9022403f767466afedede7607bf8d06c31c8c7ab38661f618a55e9e2fad91ee8b238a3ca1c64616392b0faf61ea8135a5e4b8cff5a0a0008ae58fa407a60ab3748745bfb167713ff5c96bf9847f67f974328cc933d76259899f32c70f5e0b15087641a9fc09962d167cd6a64d5c251d3f7e751924e243c9fd41a475ac5f3bef284470f4510c6f3250fc4ff6827f3c59bcdbfd166e593e386538b0b3c2f0085b5f6e271371206d6a61a2d8f74246f12968c462cf6c842999e6067a9e8a47c1edb89ca69689ab583b397acabed4b22d100b754bebdf8f270c0ba9ac8d33f68609c55f94572c5684fb0578f795b88b926ae7722223bf3f32e4b68be8878e842ef38be46a23e0904688447e70ed3cb93ed194d8d4bfd24b0bccbb39f92a553551bd7a8d77a6d6180b90c61fb3efbf6e6dfb987bf028dc61e4c22c2fc1d714fa7e1fe671925a1de1752c563dab2ac372093a57611b196db489e152e342e49b0dd2d6d84aaf0baf849db17bd993369caa66b74282277f69d18f4b009dcde6cc3305817035a1b104d056507479d53dfae3386b05f6b4688833381c18bcef8a3e6ed70b47d21085c07486b5232a02b5d64f013a0fc6308d874b3fc4ccf44e016b5456efe45efa0df4ab239aae635e4f9c879cda1b78fe69cfba7b93eb4a36af3d20600fc42c0ccec24639dd53d3a2f67f7f22e8d744ae9917f1cb5819362c38f5b4ed200ba23f4d6dbe5091aaf7ff47ededafcf23421fe16aa42a583d3f8a96eac23faa269f9d001fc00bc003045006cf1a21b65f26a45980910e2222eec2aaa6c248dd1e433ae25f22b186c631ab96577a3c0cd5dcf5bf48162885b91131756ea916258ebdeafe262bf0deef40b0093788e97e864676f127832f5540ea04e0c737edd0324a9b4723a807a70a35705e9e27ff94945c9c47c8c5312e5ce4a0af4b243e210c15223732371cf89b13a957b9a6c44293b0e7ecfc6611b595046bc3e7345bf92428052bd8264db5f2fad4096ba44f9bf62ee1c803e33bb03bfb185b3a966e3c87fcc337331dee6f79ff3afd6d50ad823ee9aed593763b77a88c9ea33d6104fbb98cf0b2d60dd4eb28f4f977b37e29048f01a646df6101aa7d44dd1e29671af77a71d1ef3827d736d1b7f22427e63a957ddcbf65f2d4533461efb760bf8574a8649e87a5bd2db0f50fdd1d89230dbb66dff78740b2bd95dbf78aec6c2e3a89c97c752049126a52a7b37a059246713055139abc5610499a452d2eabe40cf729fb11ed87bff8ec1319f773ce2cb50641b04e6dd745879dd02cc01768061040190c8ab6fd4d1fa6bd1c9e3938c51121514568b61506fbd696f91b12600f0273f3ddabf8d9b573375efde5ead4ffbbb9ac7cb60d524cd7ed46ad5cb84dbcad7795231f0d4e7c05bb30cc31b9e02d4434aece3405f1fa7754a40571982778b5c78af4c6a6d62f0cea4d9bae5f015aa987dedcd31fd22fe7a8370399cdba6d68cae1485de5cc3ab6f04a927da53bd7fefa2ed7f820d4b677a66749f169a0d2d5bef60435edb3d701e139fac5e6ca42951874d563068adc4ae6ca0a633866169afbf8b92f23f37021c301edcc2b57a9126f0df6f9fdde4806bbd2fa3c9d8bea443013a411a3fed267cd4854669e5b710e5d6732a9bd2b8e9d9a522204e491501f2347df956cd008612a4b3b8c5c5326f5ccb1d269e08b1efff02a1074b3e4ece599ff26d2bb2dd6ba42f969b12c68916da13ebe9f9d19bb7590e545a7bf053d8181dafa54117084c1b24111460acf93ac4a85fe695fef00a0a6da53b708c24c601aa0e329b653d4fa11113fca0185d788baab7a647a5ddd6fd6780874fdafe1d1d27dddae0d29c3fb4df510b44bef18a216b908522ae9b6c8d0323222fe732db82d1878279426bc8ecfcbce218a381e96bcdff308be996b67e7889d6894db070fdeec85a919f0f1b8791a50921e6d7d8e943c05057ddac008ffb0c7b20a3905545ca1bbbd94fae6431f5b5618fa953a82db758d7f76e73d231689a5e70930b122fcf4a060df8bfdf47159f7ed9e0b0dcfc27a352785e9d8403dcd092c9db5b749cfd7aacebbfa96934bc24de29a9d022216ab7534c3b15232f5e655ea9173b20ff8f45c5e91ff4b8d346e4f8c2059d514dca5cc11e066d208f0a4873eb59ddf61f2516ca1be3c7cb2d913b6b1fa8329f028a4d545d751710233e2f65f7426536eaa583e574c80d88ca4dd2f98674e0aa874fa6f75a94e5e3128083df9d5344c3aceb890ff0ccb1b716fc3733c61f149436ac794a863ba875da7afd49c5f8a19b9a68fd3f236ff4e5ee684beb3e4a63fe2604b10f18ef8e72f7eff55fe7e0024267be83743fe57fcc508e9fc177c90fc9a73a3346438ed9e3d5d3af443990a19627a45cf5b01b5cb518c07a27dc8ce246156fcfb5b51e9adf207b4eb1a2933a179270cd30b0c3d986254be9af0f8d4069cbe3416a255eb671d86451895bac7a068119f19c53662bff7fefb5883d6a04cf7082c6d990492ba8782025d03f01e753eaf55e7e65289ba3719db0ec3461231a926ecf6ec6aa8e20eb896ead7a39180f113cd8a9897cc768e80b181c394a897aa248fd4d9f569af259ad9e6e69f02e4fdecfba5d7b3b72d97532a364275e30369d01ef8fccf43f7b94f27e3d7e6293da085e1d0b93dc0e84a3ee0b9e49c2fd2892f70306685aad4d2233ca1e4af8252708466c72c3a43b77dd6e2d0cce45e6407ede7e54e58802929790a1b3ef4743229cd3e136996a35fede076f4df911925cd2e3169dbfe7bbd611154e18f2b39d11d0c9def68e16baa8cfaeb6e8b4b1973169d3aa6c784eed172730a05c4b1f265ae1844edeb266dca67d20a98410de84a531cbf53facd4f3cab9d78f56db51418e1be62f2f4fb76ce1bffcb2e6a3a5a197b89d18f6c7adfdd293bfa66f918ba34fe5a3d97e138161a4dcd2af98afe9b5976e3effd2857ed07bf7809ab577135902703d0e5d081d02ab35a7b1cdb0e9c97509d0e7cf46da7fb775cd3504fb1647dc721fc675ef09925f71df66dc30efb66e7b33d1aefdd21740c769cb4214e07d890b1716ef538c4a5965b77e149b3b72727dd44aab32fa1506956a0fcdc8d7d47ac25d7d67371ac9c9d7d56f93e142d14df7877471492140fa36133b69443c31cf9dcea4ac4fc84fd93593872961d17616cc0467be8eb70460c676bd120cd72b0185e430dfc01f088fc3abd5cd0730708f88a9557e248747ac2197919716ad95fe6401195c745586ef38f5f0c2a24bfdcebd6d1e3b136e5e34ee9c5698c1f19e818d41226e43971614615c9e20f3a125408397e12f50ede77f8786607f6b67cf5ebc4243291bce1d7438d0154e929d38db75a9dfcced5c0949af85cb5cc91d95f5d64697dc21f37b31bc40ca9ab309d23d8fc50e9cca1bccbef27d79de533b2ca5f49ee17bfaf5afab8b5f9b7ca93a831384ee05dc6afb31fd2ce082133615dc36f39c9d9cbbb42e8e3f3e763d2d1f089c9b94f7ab183da49f68eb8a1648833136e4da99b873b4ffc2327f3a71d00071da308977da2e9cd2b96b7beb424a4c3127b7aaea40c8973fd9cfc3998d967c7c3ae522ee8fc7984955e54fa4c6a76e133ad7ad302b515303cb66282849cd139160ee7414cd878dd24e7bb858520dc50ae28295a32115147c8dc19c0d3e7e04e80a698bb02fb9a527fa79129daab12c97ae65b37851827246d3a0abf3d047a1e03624f6d3f6184650e4e225a8bb6a1120b40ad658fa729e17b8af540a4f5774bc56e9f932bab885d5272c78ccaba460cad5275b0cc97d098cfc1831b8d1cf3123819263cf597f95888194e54633cf6c23331f80a339f1a61af05017b210de405d5e3a5fdbba53d082765ad9c8bb82ef7dfb0ff417987de06c937b84cad437c75b5ef3fa9f0c5089cc20331d0026e0eac9176dca2506452e969731b61071c3ba1495fa089c034d643ba43740528e013008e04a32c920ce8041c026628a2267c648682026ca17e4bb2f9b95668bf716afbc49c8f3c56012bb8a6effd7393116de8692ce1b5fff224f856ff8589823734a5ee7403a8d900ce2854c5a8d60c6ce304964c3cc5b734672d1a19d0d887e33c244837221e52467b5e9036a4d3dd2bea9c69e67e57bec76a463bbc3fe5872894b9d69d1c7df3cf6dcbae55685c5d36724abc930b9368ca69cdcd38ff603a57cd224254e3ebdc453bd327b222b3da635523c7468f8eab0f50fff3225462567208e00c532778c98309d7c87d10af2e4866ba31f0a1a1803cbae792aec7290edac31ff22622f82b21c62b3f497371213f85aaf1733a11fdaf2fe5e7dc3cfd822e26cd1875171a034e2f30edc4cbe26ea0025445921c502e05707b34feb9069bbce9bf05898feff72f5f1e77255f1a3208d298b39e1437c0f0589de017553199314ecdeb8edfb2f131e13ef2b606b35db4af3c9abbddb2da4ec1dcb2efc64f38242748157459b647320d6150842e8df5c109a778f108c61c9303ecaac0c3b69c23d5a404ff7ba27b9b5549897f5b5287af46aa58a248dbf65f2b303d44190bd5d711a1b9ec0f9cd22facea4683ccc910379a9885a4c48ea91c76fb72cfe75fad1ab3d8eb23ad96e39c31aa7293040f78fb9834f3051225163d549a67bb079c3275a4c0a5442526eb74d82d36bd353af051c317c5fde944d5504c6967950f58187197acfc159eb9308dd1c9a26c8cd5acc4c568633c443475aec9ec74136afd513299e425e722a3b00c376a39c957306fc1352eb7c62226a5a34520da4f020eff85997bf208b018795113cb24daca8119d2845dcb0bc681aab967468522acb7acd7526a17dfd4fc2cac819bf477a58dc63fe4cbdb007a035d2812e8a677b2e7946a1819acf5ca664c6a4ffa6579a4ec60910091154d7ca9f90e864d1e9863ad9fc70b43cbc508f3e4dcdfb2cf5fc9eb64cc0effa7b6156a57f97c4302bca139cab59941aed5abf56bfedcab81803d909045a2cf6b9e0f25955e57f5264f631b382c561d4daa5fbf009882e1ef915a0910e76645e0669ba57e5d48dafc10bfad40534523dffb4bdddc029d6334aea481590718f01c01022883bbe7b3a75c8628f3c02ae3e8a53a5afc736198d9e1a92c51753043a293cb26428e921db44d36168611aaffb96e38e6ec8db2801b01cc4d3f0022d3677e8462972a4417f434937b70e45b88c6e3faef3c5442043d0d4b6bab6a0e82f5eae911fd5a9eeebeaa8037af63039508f036608a8cc909cbf586d391ef3eeb0448be00c4c03b93909fccfba0ff6098ced8fac8f7eba830d851821030ea765b73b9151454ab112a9a4823b6ed73f917abb88990397ecfa4d1c2c607b898c1e476b1c72a633e2881142158b30c12594033896670fbc0d78f61b46b370a84025e5b220c6c442834b4a9df12f4b29c55506ccccd04815759b2834d9fb2f39f4557634464424ba1082c30c2bf715c4bed8d918c3cfd633135bc8bea596154740ef606fffdd2593f20e472492f395d703e1055827ec740df862a70605baadd4d184f6637634da6486793c6f240d0ed081637c556a0545297dff3f8a4bc83498023bfe9599fa8f94f1b6dbcc3e0446b5863fd4eabd6bca97df8fb37ed6f65c0fa9356316944b81724f27755a4b05583d59bd9dad2930a1dcd205c81c9611507298b90b42e08b13ed2fdc0fb7c4d397db7413df47df41fca319d0a2ff8966f0206a3bdfa67ad9dc044e00b301699aa8ed0d14f61648ff08635269e0889418ebe7d04fdd4a1e711915770f8d5c5fed19ce15f2e404c51cc354686efc3fe7bf5fa0f03f3a3883142cda47d0c0f37167fe58d0ac94f14d75e2585d3b9823ebc963da575db5f65733b6d35a6938d3b78a11204e8a54d4795d05c739e46fca5c8239d56e29f36d78a1ebba04f918263570cec4dcff2cef06c2da0db3c65acda270420c976d0949843b7cf6bdf0c68354b30a9f6aa588c111b3a64b6d1f57690e3d46621af3139c26dccf16a09f2688c41189243352bfe8e8871e0c0d1a2cf971a8df844627092d80c16e0267c1aa7bc50f97027737c45c9b334f5b02696ac0e822c970dbdc369c2e7343fdc710f89e99ef05c6b82fc84a20f93c96ee951a47379b8e29110138ed75207b41cbfadfbfe586a0211515ffc5d3008a8b8ed6beaecf693f74f435eabf7265af63ec10707a0b2d8cfb733e382e8ef0beabee9596c775db147ffb5d330b3b741bedc412dfe606168ade1b85d34e15a4b2da153215af27d95f83d65dce00171e9c8da8d92fb810a1aa34ca65a91292c1a4892dcc81a8b1966fe2e8f1cd1ab665b646a69bed401ddfc4d3d6f578be09beeb91d81edd4d0ccaf0edfbc573d70ad478cdf4f5c65c818ed6fb224738cb64f7b80d0f66e8c6fefb6f49c9ab59f0b05c900a1f1a55d51bf49fec5a6a67d162658c4e4f6d2cbade0f96da86afb15bbd8a91e4090ed378a4c31f65e03b53c5a816eb483ec7b6fe36457586228326e551a4ce6bc904c29a499a2cee9e447d318f36fc52e58fbc4cdcc3fddb37101f554b0a4bfb93f047298cd073c583fa0570d0daa821d33a72b8e8afcbea0a12a5cd91517e49f594f0531a07573cb06f08cb895c5c82b6dc8ff951decdfe306b5012c990448bedd4df17502cc002f00231040199c6fe61ff532e7ea01be14300e2bc7ae7c0d236c3f0c09e978f354bada35e719f2ebda965aee8d27148c2efea242ddd7cd41e8c302a2e597d9b3d1b33ec84f07bbcde86ccea2b01591edf17feab8ea9b95744d5a6b186ee2ba42ca92ee95d0164187cabc59c397d202aadd2e2803ec978f8ea376d8ab046d950ef3a4efc2defb35ff0402ec343cf1e3e70ecaad69f75d1c4e03ef951e8b9d3bf785d178ff19ff1432cc14b33808b86c1c39ac9c19c62fad10f41e9ec8ff95f556e4bf127e40627cb7fecde215197b1243fdca58c3ae8542cd874fb542e9f746ca7490edaccdd91bf8f4bff7bf6a7bc40fd28a67364db47f164ba8784e825baaac670ffed2ad9c5d56ae6f9a1cac9c43d28ca3b9fe28bb7465a4767ffa432092ca77985bafdd0a2f5bce2b6472a10a2b0f3cdcb60b14233256547d826b53b010682af15d0e29ce6b5dc0242533fd8f2831fec31d9dcb1f6e67e3eed94ad225c29dc040c00bd0170450062348f259d22c13bd80a59dffa8900e33af85c1012652478e18f0e64815204fd417c4bd0071d79b5e9baf904e20f436e8dfa9dc4af7b2f0f06dd6901fedfb275664190bde61df2c7b7849f0ef697646296fe42a684416bfe2be846ff4449b5cf8ad658f1804d90195c10324cfb071c764ff61355c64e759d1a4e9d631a6d78a760a139737763203600145505bf1a7f04ba4106014fb9104b57a8b44dadfa4a7bc1ded25dc9c252594da3a5f52fd364f29a088e9f451502be292785c15de7a651e3ae2a050e0539c5981c2d3406d5a0331ed451d7988b643bc658d258b4f47506bd02d6fd2e0775bcfa91b368bf51207ebd2d63180cb0f02d5b9f6be1b02aa1a962e41c8f26e2ce9dc15b131b9dd4e547fce08e99b2eb1e56d14e19f697bcec0710c7c60e28b5d9af87d9be14614f7b6c733c2aff9c7fba1f36503ad092daf2607896b06ab01fb6d1a4e4961b9374353ef340b4a65a2feac0792efccb67f2749cd73a60beb76cd304b2cd3e80832835d0cb1debaef54f8a3965a47f0993646ecdeb48cf792ae30a0896e1a1eddaf4f09332c1f352ce4347a8faff316f16850cb0367539f39a022bb34a029b12ef8b6712abb4565570a1d172c2bbd4b242f818b5af1dd46eaf106009a512b53c6b945b6acba91f1d8fbeaf224dbc904172c3e3bdc4fa648e1a240dcf2a1213529d6be1cad52bba9f74f5515ab08d1158cc3d2e6e6c9ca9a089a223335632c79a62c4977c417c5a48d1f63d6a0245856666571d55f03cbed3d07d6be645b595092b8d7acf7cbfd00889a5427fd546d19f44f4e1d6348670d91fa02e4ccd885f5cd87308c190bceba0642d7fbc975ff0ff58cf78a26133488bc538ff6cad84ebbfdb39997a79a0d99eba01310f9020803132216dd8c4fef0e8307cf10e309d5399dc2bee5d2845cdfd30320b212a214f8d3a33d14f42ef143cd33aec5a41d32d589b0ba5b6d8fc512ca40611e5dafc23ee47d111b6008ca94697177a14e3f0e66ab41f2f94c2e37a3e41717c7ebca9318d26a30d136bfe5da7ff73a7fa637f88d0787968986875d7c5d0d4da839ea1990c1cac315a187c3d3843ea9504a4d4f6a6b5da7cfc3b61b3ee9984bbb9789728e94c3663e2bf5331bd7f703d6f40f424e18d8adc839d2b121f7b4b4d40f0e47ac4b808b1e7e45c0204c2fdb2da3be8b59dad1224aab78ad447d52823c386f976d716dc6c6ca3f3e7e41746afe8e9b01946446b6e2eec7ba94db910febfe1e7fa52ffd6390e7f9c5eb173a4ba590f593df45651dde0ad68e535d8a23c46e3f6a7e855c0fc5d2190b57c9ddd7843eb093e5ff98f052b3b81808d803e9a88d9e5fa48847a3c3d18894ce49637bdf211866f2c71116384c40c82236cf84f82f213bcd5f4df22fb0f5087ebb7d344d33bf3087939388b8ab9ce39e4b6766ee84ae7c812e030bc16bbe58aa5fa7837f36626ef47b1b13872194d585381f3b17b488e3a0fdee45f5f113ada681f9913fa2bca3d70a0e7cedbe8c5dca828f116e9d4d2e7fe9cb25fe2fcda8322869afe254eb254b869d4819688782076bffc273eb9ca69a8b357a0be9682b06f959530a848989722c8a9f2c79d8f07ed80a76a3ca557280b830432de6571ff342b6b3a7f644aa7ab96733de40e5989774fe2e0bbf9370e58d4c6abedd5284b6c9a8f140459f3b5289678947c69769fdb20295a80cf26fce7e8c5b245cf139365aa1b8068f50c54fa1359710bde46fd74efd941ff4ef66345f117b0bed8346dc09dc17a6a64093a686736d4c4ba9547503826011b8fbe3a2cfc7ac3b51bd6a575eebd016edc987e49d435d4c2db9750dde190cfe44e96daa99a58c0c6c3cf24debafaed0610c5e6b5ff575c1c5f711ed8e3e3ba9b260b2febabccdc9f54e6d0f81c8b93fa036aa6cd9eb796ffd49c1a9ca5563bf99f01e90dd8cb3e365fe57131e7bfa15e52fe3f60c1cce049690de1ac72f45d1849ebd53420cb136b071e4ef647eb4b9ab528ad0f9f7c776f3fe42c570bc533e9f79cc793bf4149a78ddf0f8644bab7724b3ff55b884c4aba7aa6bd601269109fabf9d582e48691530d09f34de8658d8dd12b09755cd0a53886fa6d919cf81f77f52b5b0b9fbaf5d03d6a4266cd695984935e852b7a70cf2565496b84d3372e539a8b068109d44ad090b33d057ca3643380d1cfcbf5b34925e368b91dcf0f5fd92e84e7daf14bb907e6e4909c4959e885e9ba5e769cc476ccd9bddff07446251f9ac93afbf664449d60c7c9b56d041fbe584245c4ec8c7cefaa11f7984049bdccfac10afc31799d781b91f7080d37d443819291db27ad7cb70241a7da327ce5e22d76184a4e08bea89246c5b723374c084da38764edf91346aed329eda99668a889349467f752567ee00a5542efbbe2e158744e4e49abefb078a15efdfa1897f43085da7e1295e17ea626789af9b83d13c23faae98c6607da3e521fcf7c36aefe7d9b947b8cd6fc5842c8de3ed200fae36555fc510d0af47ac08a5c06720884a4c8ab90139562dbe6359c1926b4d5c93403b5021b615245b7e68e47145c9172e3ac342bd54a17fcdac155cfd933b51d48e5f46bfa8b11bf8165586ed2ef43740e119efb1e31ff35e828469456b8ee8a9171d8f550785312c3441588a9450b2832e08d803c13466e342a435a862a150c6dc29e0104f012bed29717adcd3c4992256bababd43c4e3991f7c5725dd1b2a486d2ccdcd6ad948f6f53da4ebcd66ce794f2abd5d363b40cf21607475c28680caff3be00ce94d4d9a2a1fb430cccf8154ea335feee4b89fa6839ea9125e97f068899de6f916004e229ae7f9b32b009af9398a83ea0912a27b379202750ce4f5209afb9da6331e6172a4c286fe0cba6e881758423c02a4bc99c363cab1ab9719fcbe7e37aef692c5ba828ae67a208bf5d5095c06be00e7b786da7d31f4ec72e8c69708c03a55c54a84e4b9bf706418629a62ea41a6c4ab7c858459ee01e940c9c99301d45a3c16b5c980fd751d65361bf64f20f9fa5cc207e998e46236a65d393b22d15ed8e388eb086104cecbc64b3aa15e025f0fdfafc889a3ab919923e28afc60ea724e405881d8096fbc26ec3d9eacc6e8e5b59b7bd10806e2b5a45af5059153df9718d2e85322809dbed51e92a096b82f27e8ff418400c314a9bed5e449de8c1b9493c4cadb7d7574c3a1a94bfa5c47750bad7c9d7dc47be8d683b892de0d9882d6a414f36bfec308742689333c6d07f88c8494a9fd52d0f5094c6ebb230b8ebc4cc9797e1d64a21a6db37130018abf696f28f30713fb7c3a55be8bd80cee89e9ed5295e804d2e48b10729b759d3cecee1d6d11b987b7d5678b6bdf5cb6113adcb7eec8af5362e45a1af5664bd85cc90d627e62f1f44ec932b74766c1edbadadc5de3fdf59c05bfbac44307ef94bae54846519b4987fb4c5725d593a5d84e635a912b5203b130482d897b8001a12a1fa4323c31bc30f83ce9caa3e5b6802130c69d633fe389c8c6e2d9b110b5869b54a9c9df7327d9f3b8fb46bd0f4c9bf299e5ee4b181ece08d6e978836aea653cbc22ced393d749e956ae2775e877dd87e9848c681e4af9c29f0ebc6152822318c8b32bdd3dd2388a0196fca2c6a176c37c645686ddd5e359db948bf1fe122958c68eca414f5a3c2d5ab4f896ce4db22d09cf540f6ce296726f5ec1e63203f79238fe75a7468ee51ffff67c4d103129a9d9c97e8dd8b8d0b52b6afdefbf1bde912f3cf42b7bae14dbb98d2208293bb0061192c12d525e1e84f0a83df6778c3f48d3c3bc0ceb68a374dc2c80028267c73fbddb09ff085ce5ec58a596f4058a3579ccc5af4e2717e1e6381d7cbc8accb65d85e1f787401086e11628b16e58f9141362dabbc566866d906d813632928ea551b39217239510ed37eb745e378f69fb0796b442ba11e8fe7ac3c0c72dfd737961a61ba36ba6c94e1873e00b8c3108a00ca6dc1b55ee524f6e0f17fa9ad7899050d1fd01134658749cda00ac9d2ffb147aa745e18dc677c36eeb1ae6b903071c3aaaed860ba4c06f706f7deec7eb6977de1f2d78b1df7efbae4acdec1ec35833f55321d4601995a15271f1b32c60662a428fbb3ae799d827136e0ff3496a6bc8251d55430631cfe500511787776894147330030fb47cd62b3cc73104d4b759ace3fd2cd2a936c3e65ff71aa2012bfaf2d7c47bb33d2885a6cf1b75504d4bd007fc59c947270c49fe53976cce349ef177c7d17d209abfb4b1cb7064cbbdb711e19f5194bd0402ef97e6e3210096b51fefc8985babbfb642d0c76373e1a23a8690662f5767d8c67e3794ed98cdfae16981aa5a008fd3fc8b41dec0642602d37576d01c2b87dce2eb5575143429ceaf6ad2fbdd709012a937280d35fb35e20ef67498ff72fcac92d25de3213944d550963c9696891285b439efe77376f2b9c8b5fac954998475745cbc76b3898f9eb09fe33be0b7619e6ef6379c41c0bc04fb0f15c988426fd51853be56025c50452791a6e3341fc5a558223d3e2aba49f5e3ceeedeffded3ed55e615118dba1fe14c4fa120a5f6ffb1dfe0794802a11b041b4d83fd90726e285cb771101e91b9dd180d42f30293e0df4f8952f1c5cda633136f1e30c803653dd90683f5dc722be491434fdd504dff1c917432e6e04065c1044b6b38d1d61b57f4eded135d7de22cce4eee11cd1e20e7f27536a75c291269e3c0a229a428a701de5d562f79c98bd87622beb7904f17119ec6ca8918ce4fca462efd6541cf982dd3a411f920068679b346efb363af976421b78dad8e2104a0e6b0cdb7e79daf967b66e68676044c36ee2e350f6f39f5120509e004ae7cd96542fef78aaafb64ddae778f8117a19459f6e638a969c3e166d8ce1bbab439a834621dc41f1f0c4e9fef18cb6d2bef30852a499277ff3fea4c5f79bfd894354d567c17b38e2e1db4874cc61e28ec951a92567d3eda5a7e299fb84edb235b9785e066f2ae4d483794dff059f9eab82433676d8db696ca98a849d61271c2eeebe6bea3410723ab20c550b62e6d7405523763832d5015bac29e950cb0b96809b41729537b627496f10cdeea00fadfab49d15e4843cd6512e2abbcca9e2abb631306080cf3121efe2ba87fb9972bc28965e59cfd9d34e3b9b275b43e793524daa5774360881a31f029181ea4a1d6788a2c1452898c89789b46ec6a8beab6d9aac3193c75a1b6f25bf5a6dfbc80e650840aec9521c6e739094e0e4398cf377897bc14d865bb0ffec2e7d67cb0c504a38c5cb98d2c39a8303b5b13eb7290c8bdf7d78d59bc1e4a2918eee0a5f4c28c1b567aa8d9f2fc7257f94148266465971e0946e55cf8f78b9c49fe2ff6dbb837d93ff6457d41f1af321c8b513173a91c6624eada68e8b91035e47133f91eed223fd86564acb10f1718adf5bbc81cce6cb2d7acd4f3c1b2f334b7bdda2a289dfe1008f6e702dbcf3fdb46d39d3d71e3f10bd2be6d15bf30f15da1f49e98191ed705e321c2e428e8cdfbe2f6ea9a714c2544c7b19f61e8e54af468318a3653a2b5d4e770" - ); - - let mut compressor = BrotliCompressor::new(BrotliLevel::Brotli10); - compressor.write(&raw_batch_decompressed).unwrap(); - compressor.close().unwrap(); - let compressed = compressor.get_compressed(); - assert_eq!(compressed, raw_batch); - } - - #[test] - fn test_brotli_roundtrip() { - let raw_batch_decompressed = hex!( - "b930d700f930d3a0a8d01076e1235e0c33674a449c13fc37ee57f9ea065bf41af3aa03d5981f1432833bd0b0a0652a19cd927ae4a22e8f8069385002252d78e1c3cc91a59ac188708b7074449184766cbcf3f93085b903ee02f903ea82014d884062b70d4e215ee885019d47a37c8543ae9f382a8310c97b9451294f5cd6e52c003ecfb412ca8b42705c618d29883782dace9d900000b903690d669b0cd98174ac3b57393839029ac04ad36454109851443b4f6580664fe06766a7dea5b1ed31e14e7c11aa738eecb86e979f874873cd3d7ca9481681b4b17d134316e7bbe828ef69339ef85c6f0e9dcdfe1dc85309effb487569383d5464b519bdc1c85fffc72bfe93d4081a3e1b75e5dd39f95a91df0997a22d8fbdeca57a8b35b4f0e277ec8502cc55581a94eec1d1000b2921b4d7c3985ace205713641d03c3975e4049e13b3d2c5926b224684e38beb3b8d2e5d4060b109aafc3f2d144783aadf6086aa1d5a931d21282711484a9c0537bd4981fc222444f2c057211708e70dc4223063cbf39e4af0b795d3ec0dfba32391611d151145c1b6bb33d53ce2bb7983bd7b6c1516f7a1a719fd876f4b20910aba76c16dbfc57199a60e2ab938bc285613c3802c17aa03cb9654f5142d607bac01293c9aaf4e58b422c543f7e5e458af0b7cf57f33109558bef71e8b5506da723d996eb8e2c265b1cae43dba571d07d3ea1bcfdcb73089597e3744344e049bf21b4244d5aff60d559010b69a6335f4bb21178de504f50808204da652c7767dbf11f2a34b4fb710e6df9ad8810aa75dcdb2c99dfe9bf898912817e490b4982d44fe09f8adb43e0da2a0c824a9069ce8cc36b5fb0074c2db895ee92d92fa6b7efdf5c97ae05ae27556bc07ddc9d9d6261a53e3a10c350c3b1da26b27b345768e17da7dabfe6e30e019c88ef4a0e8df840bbd3fbbb639edf775449d8be7510cc811564789b861372fe97f7b5b1389f20c9872517634e9225669ee80cf077f9c8606cdbad53819a875ecd9f7b6d778c1dc302ca19ae67ffb054eb99206fc90eacbac8177712d0b4c72700df3f5e2c88fb4e9c8284cefa66390a78605ad9320aee34f72f3cb263020204393d9359a65f48b0e6e942b016a1f2c5bd6579f0a65997635ab15fa38db76ae8a5d3be516441499819bfaf730ebaec389db082e41443660dcc6280315154888b9e726b971237fae5e06b01958aac081398c814e446a003039dd090c0efa5d39735ed0ab46c7b4e4c960ae414b045fd19117089e65aaf3779cc9045d6e62538b1b75c2689d23ba3c08ceed46d4fdf9b969b34a1903ebd96a3a6b091842480e638b095c1ec11bb5c599668ea1b0a5a714d13462edb39dfd992b569897ac8f45c587182770631c262fc459afa6f23d5670eee2aac2ddaa89314607d30c6bfd408980c082749ad6b48a5310ac75b880cc080a00b5d23a075615f50233ce278d11b7b0ba0ad6a01486dbf31c54aae096f0f066aa02d9feeb4771b5a37d1247a4cc58a64d392f3916b5602d9d41d97b52b391ffd47b9011801f9011482014d88a793ab3f17510b308821f5d9030532aae9831708c1940b6f262f685c8d0ff7dfc9ba9686d8f75b78923c80b89f7644852b70713a788b69f191c54ec8368a7f2675623b2369f9078516605d0d4550ff9f5b92b9da2147fa3a24cc17605f30cccedc5bacafb2bb86e2640db6654a514b8eb13d3c3ab6b5e344498de0c709dd9bef58a8af16d3efcd2c0b2cb69d6089d0af8d42baab434dea885253e42050aeec01f233e64289b2e894c680fbab4f25a653745dbd89edb19d97e35bdd4293794c69503b0e60ed9cffe7e9ab3cbbc080a0dd08ebab0802fc61ccf26c357b638a55cbcd6b366251c17e2fa52d328d9d59e5a027d334772553048d6b76fc39ddee5f85363810c235219356cb4c5c3dbf9661d5b90298f9029588e383f18817bb0d1c882c58aa6b12de88f3830a7831945c1c1314ed944220436fad3742023cba2a71c4a2886124fee993bc0000b90219fb039c014cd76a327bb9b3f59e8176f377249385e67cb1681f8eacff1dee5a5a949511438ce370f8ad6618f3af81cb1f775a0b365546dd7791b0ad71fb1f2f29154265a8175b7e518580732a5a46dae3752e1234ff779d4eb614af2c66beec964181ecd0cfd1640bb2ca2b860649c41930a60de0cc754884a780488f05d1d5833a381670b368c85bf08d6650e26122f6714056382a006fcd5f9c97f55a98d68dd9293bb1be24823eaa8cb007481dc78a7a670123976e7b6e81fc223f42637759a0c933b73ba89a1d902c0874fedeb0a97dfab298972a18378539c2894ca6df9c0a423c2e98df4c133e5e808809849785b069e323640bf93d4b82a0917aaea8fda9a3072ab9a00a4b8b9b7b3a3eb326e54231d0f6a064cdf4a1fc06c961e5087359c029b13e229fb477d6651bad52c75e503ac45002a803a7457488966cc16bbc9be5c1c9a797d0377710c028e4f05a6cb929cc1fd4018912929252e04e107ffbcbd4c81ba01ab4b11faa90be0f9f9a6a22c87257e4a2aa8283e6f71d7b9e03b5308b16525c4d79705bb0906be0e947e8075ac6ce2235356aa0a66bec39e918e47a6220b322e326bf8fd65e47778e14074c47cb62b7ef8ef956c996097d2919df7aac8ea2ed69c1fd9f1d96b6b82b411c524cacec0f4a4269821fd6766d24954b8870fb1d85f5cda0528ae18419915a8b30b25baf6a162978a4bec86009cece83017d50667a202b3fad18f8ed8b5140c97fa74e91be608fdb788202bea05f469660e363ec580825d1e2bf753c01db044279f862720a27831744b91494f5a050fa7445e0e6156dfdb712a647ef73a2dd35b73d5cc988430c831352d4ac7e8bb90458f9045588a106e4c16d06833a881973c4c642fba1bb83068f2294050c84206ba9d32d93d144884644e5bd36fc92d0883782dace9d900000b903d9b303f8efb68766822d7eea21ca4b7c5dd79dce832c4893247f6784fe47cd7a18caea7b5b4d8bdf02da0276aca185add01fa2d16c2f1188ff7cbf6fb8c6308999037b2b92d725094d8faed86f0b1a45b55de4f36dbb71dcbf4be12fe624077213e0c170afbbbb546a343ac3f2a1333a7a7a7db7be46640a73d61b3aabc805b022be416198d809b62f99d26cf4a3bf555d40686f4b8970ec15386462bec5f2b728de0da047d6b3f3ea51f571507f32f047322fa204f0c5697cbb56b4b5c7792acaa40f02926651fa715a40e1f212c78cd4ecca285ada2c8cbb6e5dcfa3823725b44e29aacbeb9b6224f90fbc895a5980d63da46688832e9776b0666e90deacbcf8a4c559b625cf004cd04c686aaf9d7d6e2d394f5d36311f7afdcec5033daccc63c0540935f59514c9aa8ac3c2aeff48f624f2dbd38062fcd046651e92fc7ffce4dd914bb0dae704e5b26a8b73b3baef8ea022881e15666fada8e43fd621793713cb8c867775b9cdcf3b066582fc9baa705a0e1dc61a4b33b1b33ad3ba3bd0cc41b5850cadc04654dec222178709910209c6ac3db9054ef91facae2d729d7ee54898a18411b6d20d599a3de14d5375e5a9c90f3bce78479cb0f20afca895e40b576940e063587f451a8828ec2dd4a8538b4bebc39f72a6c54e379a07b7d5e0c02ccd57dbff13729bbfe5e78498c01cea12e830944fd0a123b7383fdcda97d8d9cc831e542ab6d9b36774d540b180c2bd52d46ca7f0e17d400cf3cd559b1b4e51ba93cd954777ba27a9f0327eb6c68aafe74fabca4610210db7498aecffd3164c5eef8cede655e1b42d5f54f5a52b4f5fe9698a4463f30f20693263d41074d0403a737c4d4986f0ee7fee828fb7072a80603613fb4d6c219dfa47adad433af6b437dd199f3bbc651487718b2e6d42728034c242672a98a9f36fab6d4162f4e8eb7bf2a9868cead8ad657a67f0aa50286113db972936260323d7b11353328151e80691d551bbe1f7f11774e15db4f175aeac5b91668a712c3c2399a977abb9fd9c2b53c5ba68f2c0ea353028416b36a47028f78918e2b205bf9b3bce6f1a08bd4448abc3f12a240482b4be98dcb77c74fff47e92d833735e802465e50b79d51de5a7fe45a95b650b051c61a529d5f51cd0c603a2de67a3123be1c52263e1c9167765b13ad1e01cfb27531c9203f39e8913fe0cab9d8c14b17bad0100b76c41d41d68ae3b7aeef5f6af4f66d113fd29eb9c4bf994f04decad13880d9d1eb3865a30e2540e86923b36369c121ef2a6a43a618aa4b15560fa806601a85be361468bd09c6dca39ad7ec44809adc0907dd0458177343a7c23330605b802f3ffd3ae61b3be952ca2effae8222e9ed0b6ea4240728a7800e4882efa7dd1ef8202bea05db690cab7dc8c52c2c375428c0aa9ead02bf44e2b1f8ee06e1cf7af25eecc13a07d967fb12e1f0073adac46e0676a6006b30d780e6a1387afec76cbd1f07016e3b9012401f9012082014d88df6f092495b7f4148840c5b5541d013c63830408e194aef36f2041e560a641af89e0ba2799ea630a9592881bc16d674ec80000b8a3afb9380f9228224c1aa59eab115ed4172b471aa2ee11b3d4ac93f4b6a33518007a798170801f4f582e188b489005d8f108e2a4acd6f7ac28852580e73b6a1590ea1af1443666f1d14affb0a9d0655a5c57cd4190b2a00c07276054641ee4204ed8a806ded2b3aaa7453c24e442992434d060b51d2255c1cc2a002264b5dadb32057f4a5d52626e0ff453e2f05f1e0d8294614916c00110853462d51d9ab7e03b7019c6c001a06028ddc42f0d3e1cd6cb1ed7377d518480626d56c80e6d15eacd42ecf2f30957a03f6e1098b300b6329997bacc5e667eeed72a38f6c4e1db7199483bc9a18267d8b90222f9021f88c0988653bce0e07388fbc67f04e5c6772e8311bd5c94eeecd6da1ee441093ef70d8c86a26f4dc4da11588853444835ec580000b901a349e745c1cca19957c43f15309935f7bf49547884332dfe6d5b8b9d61542dd88ecc61187fda813a7f700ca96e8847a33bf8552690d91ec8e8fa70c21b380c9c681b54e859add36c3c19e7fda3075ec1a3cf47ed39c89241bb73f206d7497f93c47db9a85be7135948e19809c195ccd4c9a379ed464bf77ec562e360c52b9225f103d323364a72e8a725ad2b34a355928acc6aa563b67d120ddf54cf68f710624499ddeb30b0c94b8722ef2d641ae49f17f4a916d54350ec483ec5bcfd9748e0a228c3e73cee9ea248ad85060ac51b3e6834e1f771f725a466affa28453ad3726d794caab223fa76c8b994ac5d3a1e8ee830e4fadfe0786174364af3109c04d7d607aca17933c4366d44d9c5376ca34febaaa612707eec4e2fc5c6b1668b3450340938d17e5552df96ae84a905d069f9e3455bccab30640a0720f9b4598d8f82ebd19bd32b7e82165303123a0ed80c57375174c08d32ad3ae354251c97316b2977f3a2fdf2dba1c595093c88275badc54e3aad65f77c56f55d04b1e6d668406058ea01da2364fc207659b028d9c55371c776f732e63255dd177b95f857e3cbdb4c66fabd8202bda060830662664d96755362addcc0908287c99c60761cf9c7a613058894eab6e599a059cd2461d4a89458dc68adf287fee71a783dab0aaa05587a21b4aba1ca4f5efeb9017801f9017482014d88d15c09b7ee8f9562880ae58585f383aacc831e72f6808853444835ec580000b9010a2e818d2c4fa7a974f5c3acf3c0f9439f4c83721b2bb9df4fa290c7fa57bc1f9f77e4b80866845a8bbbf8030b707b1f07a54a0ab901188eb2e1262a45618a08517f943cb032eeec926e4343d5d3089c145da1d53128ae901ce91a813c205c615bc1ce9b8658a9da4c2d258fe36f6ffb6289df910566386dd1a9f73b44053bb64523d8faf7b9055c592695fc426c360479c1e2d1f68ca5c7965dd20b6879989606cea7c0db28f27ead4a591ee264f755b7358146586c6a1a8530ec463dd754f100fac603ec3360c0440874c12bb179c43a23e40957bd446f2573af413f3314e9f0668af2491de96156a9bf35bc469d51935305f4df051580b84e98ec8395fbd42fc0c3f3e7410ac4719af4c080a09a774db7e3a26966edb91c1f7956a091425044ead1589f435c8d04aac9533764a04325d5543464929773cc6ac555f5ce1830c997f4d26f2dad5a7e056db6f0a2e6b9032d02f9032982014d88828a67bc288355d78498c2cc318542aa1a60df8305fbb6808853444835ec580000b902bd082cb3f3fa41ebf06fbb17afeed9ccdcf3d2999e2fdd1e1171e0b1549c06de17dffc4ee7785232184a698311c7487fdf090e34b9954a41affc0d0ad44104f70750f6a896b1b2b5ff1024de66ba877c5494e67735cdfd45f9ec0df1c198b357b60e4d840abaa72c5667074c43bfa5e1f07b5970f018820db6fc2bf84341cd024cefe455c92426f876e51aec0fedded8d4aa4003aaf6970c48d898d8d82a8411990e73c8ec792a2cc4a129e526d0fa34a54c37ac13ecf4e3c597304cdbd327704fc97f2ba0b110afee78da5c3f46d3354bd20f56cb91b7ba8d302422428082748faf8b4828ba925ab1a02ba695e686da4d1e759b6456b0388ac8fd769f3b726332be36d3153ebee040b5d822fe62d73b629a6251c8e49a988cdfe599762759df03c9100db5f7a87ce7102ddd21831e0736924f230ffe6aaf6b012423e351627e118f2bc12736a3694b5468858ec6310017b10de24fe75ff0abc060b1e60271dc5274b4bbf0b755a0a617bc23f57ee2286c805086d5824ca4bb6297545c5c1ccaf03be03b7df33c953ddb183730313f09c88392e4bdf688f1d2b730318cc9b148e488c2f1e383505a383672755a221ee7dffec5a4f77e7efe66043d686a126480ea01a8ef0f72f9a5799e03e863a85b7aa56c88b7575d6ebb9df809a240969d3a2b2e086e742130e38cfe7870db79bbd281849912fa611e04b8dd0dea9b7da5d16a66969e54ab9def159b9c1d351d719a93821c40ad6c6014644c5f77374cbd486d6a7cfe75d7d849ce240ac86a1c0843aab27fba4d317c725eb101752803ea67d3e12b784bb424eee6f766e33d6664ca113af63c54ba27b8a8e904c572dc3fd09848cca3499c403a1c601db77a7f36d244024ceacfd9d6ae494b7e7e0f92fa5f83458d5da139eb127709e3dd75c88fd5f75244e15f1bb8cdbd3056bfa56139442c0bacbf3263f29ef34946e928b9a4f1c085e5df3b09f31c6e87397bd939c001a08b9ac3bc299eff8eedc51ed3ff077e49da6fb145a0c495f430964581fd4d230ba05fef2837a800e231a3178226f59a981d2c4bcebc4b4cfba9680371da1e2c1a61b9042bf904288821c649ab1ae8ea668896d6c78054ad7a6583121a8994e3294b628e98892fc56ae3fcbce852265aa657e7884563918244f40000b903ac0177c66fecad5135344e89f45ec7e083130a3e5eab1abb75bab0aa357cf044c0582542047a3f9985d3439a6f850466061142af44a9208656e278b7ad1bd0e03539cc019d6ebf8758bde3e0489ba540c523f178a0b055c1fedc3627fee427467ab67545c154106bb9e0c12a7120c175d66f9e3eb9183ae5c7640d4cb4bd3dc94c7b4e0c9fe70e692c3fd027e0ebb46bb32b73a269037a76731a9f114343ea0584c3f7e9cb4530d086609b59ab6b72e7dc6c2c0c95699091e06a33af5ba200a168ef483fe11056330e84da4f2a59db72d5d697d262b9565fe81a738a48d24a9f1c8c49a671101bb7db5eb64deb454a117eb00f4ccc31bc93c061e975ab6d375967544a2a06ff8b9d59bfe1ecb1dc47d5536c645d764028c5de77f3f34d6c7999785b70b187d9ec4631e83cc69499a4ff8ace98a6f17b77f648ab7a07d5ee0558a8efc19d4601573156a0264d2e6574e867c1eca423eac1fdbfe0967bb8f02524cc2d9933141acf619ffe99483305fbdd6913f1e1feb78a17fc6b81c705c81eb08d5602b097ddec64f6c334509caeed7525e3e34845b21e56e4424aa9609f4df8bb13f31c5448b6bdede84d9a9aeba9fcc38a3c8eb1f3f31b80918e045266c7d69b252c86f8b5711b2cf7136e2c3d86d1301608c7c16655c3ffe6d04014dfd55a9563c2a307525088fd017486ffeaeed45873013a7940a7a91442b975065c765c32546aee9b001ba78d8563e039c8edc24a92f9f457ae28172eb29e16cc588d52c8e75a565aad1a8f9d6d341189a24718c26c19a83c6cfe1bbec2f4b878759a7dbeb4ffc0568b902b1dfb18af00c7014f2822965ddfb56d7aec508822531834ad2c869affba1f95bf3dfdf1d1dd1c2994d904b9c5133900962c8137d7fce9f0b9a7d0474dff9173edbcefb4bf355539dfa791241031e90770c8f09af595eb1aa0d083bac4fb9b929ad7e23c0fc8d3ecc7458a0790929cf7588cc255916a6c16811f09d0c972b294dee6e1f739c5e9d3eab8016b565c8570e41bcddeef2dfbbf95910ae6a46a2834919742ec599b9ed204d1f86ce6baa534039ed308d8be0d289824303deb54af5f9f50d88807134b8f42485cec121432e58b83c8aecb32fc62623b06c39c3f1e0e921b1bb880d2eb017578e5f33a25a335a813f02259e1b12b8a76a90a65d015bb214032a095cd8918b78003d310a06a246ac95c126188911bda8a6623407c0dad308e25a438f78c7409267b729413b7d248a6a88cd64c73118999f00981aa4f6b639e4252d39b1706c686c7763ae9c41aea7b46fdd48bc490502ae876175e5aff8361ccc530ad8202bea0b0209fabc8a5c0e2a5bd08e9a6b532d51670f41513cf007781f27e49b070ccdba0795755f4fe231840196d847d100e7cf1e5650ae172890c469428269cb105c16cb9031ef9031b882565c357c3279f0c88e90114422a470a4682e988808829a2241af62c0000b902b424fb91666edaa16addea67f72c9e0bc7a8053bda59776ede2a0ec3f7c78ffac0eee97ff259f92b21378193aeeadd0253b08897a14f10ab537db63202a4c9f78eb4b399d55c5a256a8414f58f45b109e6228a75ed1eb09627f44b56eb539c334df412b30ee6f4ea39a04aa671aee9e7157b9cb69aad4ab1d9d75c6d90f3488342b29bb59c97ecfd2bec4f991b095038b9e20eeb591b641f64e32e5020130f8a8daf7c51caf93ca460a4e60132835119f99d0484529cf541ab9f922bf15a782521a0f6739c1edb8d4bc26a07e63790087b4c098e4df74534340bf7815039326d1bdcafa53932deeaff03a31e97c6733cc702cdd42be18e4716dd0d014f3e916b0cee3a16bd52cf717f5efb59fb7e41c8e4c0d7eee8ba92ee5b293b25612ee9a3b0043664e918a2aa2b602accd357c8f22f382b16f637b57f2fedb7d8f66172f22e67cc04f230e28ec96b928f449fba63b7862bc3102181d6c7bf063d9376363b8be8200169aa88c46732c5ab1e19dcbd8abeb34f1e1cbc632484d9864e630c4567c0f04a2bf5895d3cafae1b0e70e4c1ea28d4d9578a82611f09ddb22c3c4440e8236be2bf9cecd3fa64b19930af8664d78d6f10aa9c913be537bf2b539e3a9042d5744eb3d1bbc16d98564488a51ba45edb2713b466beac560789c4eda3c0961bab002b95eba9f512108dee2e39a8759c04b18a923f2f2aab2e1ca30ec7361b25ae71923027c950c089469820a4ec3ec60529f1509b92ef04fb7fac70f25d3e5ea5c6a28226fe19317bd4d0f42085884020a2b22dcb0ed8e5600ac969b4f910e54f617597a84b05774776d694ba38ccd3d1055a7245334cddb1ca20d7e001285a57001d03b2fc1ff893ab044612dba9b311247528d7490a9a7f3e7c3ed8531844d3b829de3604e8546ee8d4c3d7a308d32035159aecfa20ae4660e6dc94b6a155aa78150a01fb0e6c48b660a0f051ab59accaf4508202bda080d51bfef036fd4c4ebe7151b2755d6606122e565323878701113b84fc86548fa06fb34b02deb66359ae8095d3c339673ab2a8b138fcf9aed2d4276c8a16435a60b88801f88582014d88bbd39acc70c3229d884ec80fa5565439d283119a84942d89ae04c33fcbd75e3c6c43b826b266625b854f883782dace9d9000008911d1f14d3a721904f1c001a046bf61e70c69943c277ef7d09ce5e779a10e3671cfec81423e0f951254dfaad2a012fa75748afaa79673d94a17d35666009001775a2b868b9b839c77065649bbebb90143f9014088e1cba06e2ce482dc8804b98caf86fcf0898305c61980880de0b6b3a7640000b8d9854e530ac567b7d29eedd91690a0d2397591c6a1b1f5068bc292b740f6aa5d38003a933c0560971d4701b31d537fb7c1ff68c40ef07221089f37671b101309000e0eccbc42284732aa002f2cb3197def9947c2b2fe47d3fea2efc71b1f3cd681082d043dbc1471a56a5d0a5c757b8c115277a2af2e044e56e5e3c2cf8756dbe51a347096a4ead46fe53f4c03fc100fe0009f6b2fd6ade28fc89230602e9221962f4512740857b87f415f134a224c5149e374fe22f3048f0620f1bddbc9acdc268a5de1296d265bac65fc2650b3de55e6bcbc26bc4d01dbf7548202bda03e35d4429ee24e44134f7f51b32fb69691a16c60a0347d9283a8e593d5a095baa01c590af4c1fcd3aca728bb5aaf03f48aca22c756a87607b4153a5ac6be59ebb5b9029002f9028c82014d88aab881c6fe3d0b7484b0da2b368542c231bfe483115994808829a2241af62c0000b90220a8317aae8cca53d039d79f09934b9c5d0b07bf13ceeffacf1011fda22a85505eb7c717168c18d8fb230a7a3f166a4e93326fa82884ad3093b5e07b4edee095d98bb92f357fd4a98201be26960d4253da6fcd09874b364595a47b95d2b50f8cd45921931469a302be9699779775b59f27deea2aaae41a010a47b825a46103b7d355f1c154b3422b4fbe4e62c71c5b6b98b627beb82014ad990bda2b6c06ddd237543b3652c7a029928153a8cec540311406260fd3a55cc5788610321d66c29f168ffe5d93f92378359231ff89492db2bd2e90a4d9c28263d75b77842584d253fd7316e61c27f71771ac7e7a3c8ae6921ff2280c459c36348e0a098fe8da94c1546c15db7968d6b2821b24edced45a7ca8f2bfb2b9bb7a497b950bdaaf771bd777e918887c0d2d6ad3b72c168228f49fae155862e0baef308ace6952606a660beee10da3fd2d29b5ac31f2d55e34da94a4274e1bd679fa42bccc5db074a070b899e28948680d82c7229223d846a1a2c19143dd99c78bc42c33490b85be5067a25f6361d6b803b315519de254191557ec691967ccc3d087b8799dfa5888ad748b7a6e164da0c726bc1f916110b6fe6a013ce0e28b79bee045d250657a70211dc11a5dee69a2c05e9eedde536a9911883e5ef2ee76729ff8fbc3aae0fa13a36daf01199a7ac60b21c7fcac00d7c6a80f5ce10b79f4666d69a1a45b3ec864a57f1f6fd492223c539351326d7a25b18bcfd8697f55e972607b9675b1d40dea3ba4c0b3c080a0e69a3802e5dbe5284f817eaa05c76127a3898633d4524f3da9ba8d7e7b98af23a05a2672729a0136c572a68b494cdd49ce47c2c0e33582b601632b3a1d15f3cc38b9016001f9015c82014d889e607b89f9d2717488ee3a5d83a713a9fa831ab7e68080b8fb754cefe26136c37abae044d7be8e1a3b8aa3ff230de4579b08bf12020e9ea66a2f282ef549cd7f72d056ded10c2fa21fe339fe56715960a4bacb65525bde1671a0a691f44c0ed582e64d3799c4ee453a4fbb700cc130eef66cc66913d919b6a96bd31efc3d77e4accf3a7c695275188ed2e5a76526e4706bea7df44cf6a36fb9e43d0e37cf5d6e3c5b984062e57ceeb1c5e6a9d0c418a5a83b77c4c99e8799fba27bd884e51d5df3db1562fa0b13cb1051ef5d5269b4215078384fa84cbcdd93cd7e67d166ebfb88eadc77cfab6a09fd1ea8f82f530ecf62d60d176d3bdf4f2eebf57b45b532ba6471fb53312e32c3452ac69c7b0ce227a61e69cac080a0434df311dffabb4af9df6fd81f48814ad8f5363567d421c5466423bf3bdacc05a0032341e2314432f05701cb222c2868894039e6e156ee6872ebc8739a4c45a43db9027d01f9027982014d880843386325d71bf988456fca4e1ec42cda830601c994c5e72917d21e4aa0f724ed1cbe014171f1be66ff80b90203e082cfea48d8bbd73dc4f299c37a26fcfe1286a62d17e6bfd13084a47fbccd302a44770baa03092d7aa3bf8f15281bde3418b5a6f610199a7ca97fc11df8058de81fdc05527047d32e0e4527db10cddaa2e1a190d7dde1987c0501a200df8eea07d61ea0028930e7422451b44295ce91f79de155d6169bd64c0cadae791e59b67544023e5fcde77eb509d6418daa17dba99d0f09c23c7df78d609f4af7c1ad95b01c26edae2080556b8e63ac632d78b87eb57ef23791c2336775ccf12f62dba46b65a5b5c7017068194fd2b7bff11923ac2dba3ba0d7e28c1ed2ef1c5d2069e189c09bc51efb571c63f2891acacd6a327dc810180290f9699541f4b65bdd8935e074f80887d3f6f4c3ecd75a54c95476b26b42f02964c16ae02532433d48fb5b5f779562224d1bc099f51d332c67cecb1e619bcda1aee26011a463952719987f705b12fbbbf34e3989d6b5c5182bddc569fb545de391ef10031bf1b0f673f0ea1a9763f652624852bee8f09dd517250da77dd194f8310086ba52032212ed38e014a9bb3f47d8a16cd463a977a443ee02d5548ebb5c518e5a0125c6645f2ad2d52f99aec5c88cf4aba79167cb8f7012386916fe2b863da27d16a7c3c350442ebf9b54a569ccfcfe4f4e64853fd810e6a5b3b3cba9ac8525a260505d12492b99437309f94b91dd68c7658291052e2c4d414f87c1d7b7bde565791fdf99004316f02ef4d7c001a05044b928ccada6036e32565da0b9ac1b51d4a0eb5d702efb781a832c120665aca027befe34f4cf0deb37ef259882c20be1af0efa2ab726e06eb33736ab2f0b34e5b90186f90183881a09a2f1c8cde2c488c2eb098e1a51326d83159c2580884563918244f40000b9011b643c223acabd55c37efc426850758db45eb7a0ccb908d9e2ab6a122d812921618aaf4e30c377ed8c7c5b829846b473702496e87f2fac0a78fe92a7602239414117ba9d42c354b05e5561f234e4fc76ecf8285abc17060e980e1713a3f0ab031a53c6757c972e363485581436b20fcb4aa524281e6765ae59362fe284cb6c9c26e3980cec0a9b2f61d1446e9a1679fd055fca089b838872a26f866cb09ceaa5a57a061440ba3a342807d83a5a83589a7297afba2c456c628954a3daa451cb42207f9de22fd5dad066647b8e8ed43fccd3f335298291601fd8737a2ed69cb89e0573fc8eef594568c236f8f976870f2da93c65f77aeda9ae17d812e16dae936ca069e489d3d820580c636f12164c73795e287db92ddcc73dd6b341408202bda0b8ad8ad3d5218e0e27145286459b952ffce119c42b7b143d3ae68f08991c6198a07bd60b6dd3efcb39d42fbd3b15f2f65f9561ed6106484285f3a9d235d2962c2cb903a9f903a6883c0753f96351f096886eb111ddc0775d1c8308a6ae80881bc16d674ec80000b9033e6cc26ae2edabe8f726535a61e77b09496c76d81407ade4466993d4785c16ae669c39a5f9ee18875389a6004576a39465d66329e18646036b9ff5657ba1ec659bb2acedda2862458a642949d15f2108c9c9a712216e2d9d13077a134a69c64daa48018d835b542cfa7861a12febf7b79023af48f860377d4d8bf99639ba627ae9844ddd982438e2a508b6cb89c87d4b78f31e42f842f62af9cd59a69f4e899720156f7a2adf1d348e9b665481165af600a3f781aceea0589215f06dc022fd28fc6025ff85e3d4b7c25c358f35ed5f5f025eb2b0ec5511634494515a197f3e06f4e8a2fef699f33f58ab71376581b455cbf592e1e657115448db5237d010399045e023d0d69797131720de65ffba81c41037657951db3bd5fcc555b8bf6944a67f1fc0ae9ddecbdbb955743a86d2ca82b6239a47f0d37759cb3bcca9d95d7ad084bd8269d06f6cee9effb2173096ef22875db79714328f2d80beac6cff4b3f8fbde3ea1a1040b6885d86bc92390ed2efa52181d3fcf6b761c0a14b8417ea3878d311d3690f93258e57848e926364fc0a60dcaa161a1cd9ea4fda657c5e868f59bc6d2ded1e264a100ff752fbc32d30728f13d74f60a1931cf1cd302aec02f4ca94541335c0f0717cda44c966db4c2c1e522794e0cc5a9dd84ed6355f979c4931231225096d3f651aa1970fd8a6de80325a6b7b3362b11eeeb3401df138bf8742bb94fca940ed45f8b4937d1645c98adad12836b19e09b59dd1e4cf020a2d4efeae49aff02a0c92537dfbcd4a560e876d0a3da71a38302efd5986e70a0592c02c4a8e5638869db811e47ce514bbe71acb864580d9f3be29e73f8af1584130a448b85c0a4a790d750a3d67a4f1c3e52b0db1c7ec28b891c66570c894b9955f0914981f28efef48616b004ca747fcdb448d0a1b6d7196e2ca002e17cfe65e7bb08027b95bea17ba0dd5b9a479726b5cd32a0fe24052c2afb163e60733e6ab77f8d1d2f606de15a31a2db1c8b7827434b64f794b808287f612854c7df802822340442cb00b8c508eb8d74a6334da415319557d4a8cb58247a7e65c74ef2238843fd02d24d6a859f02c547fab6e35903f69394659a2b1bb02fb89a613733cce7c4af817f6b8cf2ce38f425fa8b59b3fea76273664b8215d0503198393443c926b578202bda0115d2f3409265aaa2d214d11e19f314193884ce34c3274f4258d5f09a97172fca0418e2cf579d94373b0a81e66636160ad2f1de4597445af60d0ec37e9a97770deb882f880880f511ab07ca9dce1889745de5325aa780e8311fec19424eb7935928d6e5fc275944276ee070e90b9619e8853444835ec58000086428a36f8feba8202bda0d3d221e5abc91d1bf4721d9f51100bdb7e25f4e1b2eb363d200aa1b0c09727bba07688424185824dde9b365f31e258987ffcdbf3c850f9992ed80d0e71e54712ffb902d702f902d382014d88e4400f9aa703b1f98501db23a8d88543ec7b3d868309954b94e59842fa49a842609ce51ec1a4e9f75a00da8e1280b9025a30fadb0cd19a05ca7d20dbd28ffd1ec743d59a1169a730091be383f6c571c51a8514f9ddf9961a588f38bd388786c9e7efc5d0e71ca89e7f24a73201839f40e9378e5305f4174752c6eef07273a2c51009f04350abed1b6dbfff400ac6f790013028b56aa08f5090e4483b7bfd1b08042b8651dfb27520b3167e9b912e37bbefe7f13153571ef8ae23f2034df09ae737e672bd09d896bb01cc035322407ab3ca2a026f1d8d5beab70178c580a650874a57787d92b6f31f7f86ee939bf8fac22b23c6b6666b5e0241fb55dd4d397f1c78fe6da9fc3e66c2e34058e223a4567d259e3e1a3560bae9f5e2e3e7df1b7384b6af9a4155f1eeb61a6bf4b5e149db22109c635cbe9a4266ef48c211fe1236becc472cb7869906e27166f3f017ce75d188fa708e037fe1a5729b43892460458478cdaa91af1f9367cd1164204b240212101e631cbd027c814efd1e46368b37041836964dc6a76701c38810f36cc02ae93eddd5ebe83c24527244a55eceec6d47ec8df4b158fd1166a7d0d7bbee043632852ecd8e5aab24d71717a232eae9facb45b534f75103fc57f5cd8f978a362249a16e6b3783443bc5100bd1d8bbbd45144b7c63393f5d8169c4381f645bbbabc899e022d58e7b4293125d6c4d7ef75436b4542618636fb247b48ff823f52f416348fb767f6146c1f443147baeea5c6ca7fdcfe3795e09112224301f87c5667027b74b54dcc0f3c4e149a1e67aa6f8a940e1f2891980a6e565821a1f06d522eee5803650f6c0b8c8f5452804f9c456550cb8f1d4827c7fd1c8fe77b71aca3aef9be16494a4bf7d40b274d28ed9cd92a2169b6de5fdfa3ed1b6ef8318c080a008c406d42212f12e384b8f8bb7bb40d0c4660b67026646436ca589d143edc5a9a055fb6596377274cd6af52d95a127c503c0af5b7df6df59ec493d2bf15cf02bcbb9046102f9045d82014d8822e3c64dba5192b7843cffd35685424e576804831aa2e894b002add3a6fe3cfc260c378a187213b6bac436f3887ce66c50e2840000b903dd35dffee48e5855b9f4e7d47630f215334f242c738b2aaccc6e4a815ad70d29a94bd5fea67cd0cc855835ab9bf81c789806e311f744dfc370960d5246099d70e509571437c3c61e11c2971782d7ebbe3dd231c3025966d5ae37fea256ab601339db76c325884b7939ac8e772ff54c8196d35cb823cd42287ccad89e0f1a8092caae92612bc897cee16c73c18a39a5b1ba5bc5df73beb108cf5c896a420837ff53f6e601052ec017e75d3554c0ada83b7874ded4edab8b1a25e39c56c4666ae2812fe82f65f5f7d423ab3a173261ff29495a5ed0851171d1c261129b2062fffa4fc682cb41394f5ebe335bc2220abe7e950d9afa85f305eac439eec8eba9227352f592804f5b47208c262b220c1eb39d6ef89a92ec3ef051e9cca642658a8d8e55b35e78583d7a6cfc01bc5b9d579a1514c201d34230684e4385a1774f8b5f38b5191682a8b91b536ccd3821ee409028180d0f5eabf6e1e2e3dcbeeae0d92cd83e52ae68842bf781824cb7dc8c1507361d7d03b03bb15f7f7a0a9bf12171e01408f60b35722a5a819d7d9107fcea1b94184160cd9890f1f510207d47752fc27f58729ca8490b81ea720d5fcae71db92a9b140099047f45526d26af5da8bfe3e41beffe14d5d1cbe31bd1e50b9c38b9b393ef4b1b5514050e4a934d9501fc70d9ee3720a22fe18533b420cda21aea8c483e5bd3cb4786d6ce2d0f97d1a653253efd1c0283772e8ae43013dba4990bb6c7d9c7087c0d9b2fd3b79decd9a775989c81b87ccbb1e2d6b3c4df6dbe1b7e3a147dd8ff6998a0dcbe3f517899f2dbbbc788d5004d2de3d23224268406d02fecb0ba553123528c6b41f6f55aeaf8f32aa767a9f3113ca91d92e2dcf656cdef77f966a6b2cba83340658aa5c26aa0cb8ce54ae3a55b1eaafef66763ff4de971cd6a0b65a680169837dac945b0a7f13864795670922c99dfc6b5a5465e5043ad1b3205e4579cfc0e037f0b4e0a8b22b5d6ddba7d24b31388620d4aba83f84c5a1334261955d52294bd8b56d7175afbae015933ab1e0ef91e8161468f8eaa76a6f7a9bb8c8fc1195b9d8ff5dc4a51ff73a74b0640999bebcecb6036ef676c65e9fa5b1be22872082989c55a789fc4c2252452f786a13c4e868b85fbcd09bab689bb66dfae14c2ea7024647ad97728deed03314b007dbe461c1836e97f928308d39e5afc43ee3ae22ff47fff183553f56711880cc5ef72c5d66b4e2c6f651c57311d48fcc0aec762fae6444a5be11793be04c85ba97450673687734e681a1f3c64699686880d32d4cf87202b49ce13fbc8771fcf30d5593b41ffa61462c64061449b2c0a24ad8a03d280500bc86049bd55a27a05d70b12c7fd700454dbf3869b329a1ffa9994ecc2a6ec9572e3adaa0056c080a013fed42f6ecae05ccdb9bd8dc88ed44579b6a8871118710058f72c29f6db3b8ea03d200c0fb3e4416a51538d2ba41be88cfe830fa74c280e8b4b66cc3fad24ec06" - ); - - let mut compressor = BrotliCompressor::new(BrotliLevel::Brotli11); - compressor.write(&raw_batch_decompressed).unwrap(); - compressor.close().unwrap(); - let compressed = compressor.get_compressed(); - - let decompressed = - decompress_brotli(&compressed, MAX_RLP_BYTES_PER_CHANNEL_FJORD as usize).unwrap(); - assert_eq!(decompressed, raw_batch_decompressed); - } -} diff --git a/kona/crates/batcher/comp/src/channel_out.rs b/kona/crates/batcher/comp/src/channel_out.rs deleted file mode 100644 index d4fce73c261..00000000000 --- a/kona/crates/batcher/comp/src/channel_out.rs +++ /dev/null @@ -1,299 +0,0 @@ -//! Contains the `ChannelOut` primitive for Optimism. - -use crate::{ChannelCompressor, CompressorError}; -use alloc::{vec, vec::Vec}; -use kona_genesis::RollupConfig; -use kona_protocol::{Batch, ChannelId, Frame}; -use rand::{RngCore, SeedableRng, rngs::SmallRng}; - -/// The frame overhead. -const FRAME_V0_OVERHEAD: usize = 23; - -/// An error returned by the [ChannelOut] when adding single batches. -#[derive(Debug, Clone, PartialEq, thiserror::Error)] -pub enum ChannelOutError { - /// The channel is closed. - #[error("The channel is already closed")] - ChannelClosed, - /// The max frame size is too small. - #[error("The max frame size is too small")] - MaxFrameSizeTooSmall, - /// Missing compressed batch data. - #[error("Missing compressed batch data")] - MissingData, - /// An error from compression. - #[error("Error from compression")] - Compression(#[from] CompressorError), - /// An error encoding the `Batch`. - #[error("Error encoding the batch")] - BatchEncoding, - /// The encoded batch exceeds the max RLP bytes per channel. - #[error("The encoded batch exceeds the max RLP bytes per channel")] - ExceedsMaxRlpBytesPerChannel, -} - -/// [ChannelOut] constructs a channel from compressed, encoded batch data. -#[allow(missing_debug_implementations)] -pub struct ChannelOut<'a, C> -where - C: ChannelCompressor, -{ - /// The unique identifier for the channel. - pub id: ChannelId, - /// A reference to the [RollupConfig] used to - /// check the max RLP bytes per channel when - /// encoding and accepting the batch. - pub config: &'a RollupConfig, - /// The rlp length of the channel. - pub rlp_length: u64, - /// Whether the channel is closed. - pub closed: bool, - /// The frame number. - pub frame_number: u16, - /// The compressor. - pub compressor: C, -} - -impl<'a, C> ChannelOut<'a, C> -where - C: ChannelCompressor, -{ - /// Creates a new [ChannelOut] with the given [ChannelId]. - pub const fn new(id: ChannelId, config: &'a RollupConfig, compressor: C) -> Self { - Self { id, config, rlp_length: 0, frame_number: 0, closed: false, compressor } - } - - /// Resets the [ChannelOut] to its initial state. - pub fn reset(&mut self) { - self.rlp_length = 0; - self.frame_number = 0; - self.closed = false; - self.compressor.reset(); - // `getrandom` isn't available for wasm and risc targets - // Thread-based RNGs are not available for no_std - // So we must use a seeded RNG. - let mut small_rng = SmallRng::seed_from_u64(43); - SmallRng::fill_bytes(&mut small_rng, &mut self.id); - } - - /// Accepts the given [Batch] data into the [ChannelOut], compressing it - /// into frames. - pub fn add_batch(&mut self, batch: Batch) -> Result<(), ChannelOutError> { - if self.closed { - return Err(ChannelOutError::ChannelClosed); - } - - // Encode the batch. - let mut buf = vec![]; - batch.encode(&mut buf).map_err(|_| ChannelOutError::BatchEncoding)?; - - // Validate that the RLP length is within the channel's limits. - let max_rlp_bytes_per_channel = self.config.max_rlp_bytes_per_channel(batch.timestamp()); - if self.rlp_length + buf.len() as u64 > max_rlp_bytes_per_channel { - return Err(ChannelOutError::ExceedsMaxRlpBytesPerChannel); - } - - self.compressor.write(&buf)?; - self.rlp_length += buf.len() as u64; - - Ok(()) - } - - /// Returns the total amount of rlp-encoded input bytes. - pub const fn input_bytes(&self) -> u64 { - self.rlp_length - } - - /// Returns the number of bytes ready to be output to a frame. - pub fn ready_bytes(&self) -> usize { - self.compressor.len() - } - - /// Flush the internal compressor. - pub fn flush(&mut self) -> Result<(), ChannelOutError> { - self.compressor.flush()?; - Ok(()) - } - - /// Closes the channel if not already closed. - pub const fn close(&mut self) { - self.closed = true; - } - - /// Outputs a [Frame] from the [ChannelOut]. - pub fn output_frame(&mut self, max_size: usize) -> Result<Frame, ChannelOutError> { - if max_size < FRAME_V0_OVERHEAD { - return Err(ChannelOutError::MaxFrameSizeTooSmall); - } - - // Construct an empty frame. - let mut frame = - Frame { id: self.id, number: self.frame_number, is_last: self.closed, data: vec![] }; - - let mut max_size = max_size - FRAME_V0_OVERHEAD; - if max_size > self.ready_bytes() { - max_size = self.ready_bytes(); - } - - // Read `max_size` bytes from the compressed data. - let mut data = Vec::with_capacity(max_size); - self.compressor.read(&mut data).map_err(ChannelOutError::Compression)?; - frame.data.extend_from_slice(data.as_slice()); - - // Update the compressed data. - self.frame_number += 1; - Ok(frame) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{CompressorWriter, test_utils::MockCompressor}; - use alloy_primitives::Bytes; - use kona_protocol::{SingleBatch, SpanBatch}; - - #[test] - fn test_output_frame_max_size_too_small() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new(ChannelId::default(), &config, MockCompressor::default()); - assert_eq!(channel.output_frame(0), Err(ChannelOutError::MaxFrameSizeTooSmall)); - } - - #[test] - fn test_channel_out_output_frame_no_data() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new( - ChannelId::default(), - &config, - MockCompressor { read_error: true, compressed: Some(Default::default()) }, - ); - let err = channel.output_frame(FRAME_V0_OVERHEAD).unwrap_err(); - assert_eq!(err, ChannelOutError::Compression(CompressorError::Full)); - } - - #[test] - fn test_channel_out_output() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new( - ChannelId::default(), - &config, - MockCompressor { compressed: Some(Default::default()), ..Default::default() }, - ); - let frame = channel.output_frame(FRAME_V0_OVERHEAD).unwrap(); - assert_eq!(frame.id, ChannelId::default()); - assert_eq!(frame.number, 0); - assert!(!frame.is_last); - } - - #[test] - fn test_channel_out_reset() { - let config = RollupConfig::default(); - let mut channel = ChannelOut { - id: ChannelId::default(), - config: &config, - rlp_length: 10, - closed: true, - frame_number: 11, - compressor: MockCompressor::default(), - }; - channel.reset(); - assert_eq!(channel.rlp_length, 0); - assert_eq!(channel.frame_number, 0); - // The odds of a randomized channel id being equal to the - // default are so astronomically low, this test will always pass. - // The randomized [u8; 16] is about 1/255^16. - assert!(channel.id != ChannelId::default()); - assert!(!channel.closed); - } - - #[test] - fn test_channel_out_ready_bytes_empty() { - let config = RollupConfig::default(); - let channel = ChannelOut::new(ChannelId::default(), &config, MockCompressor::default()); - assert_eq!(channel.ready_bytes(), 0); - } - - #[test] - fn test_channel_out_ready_bytes_some() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new(ChannelId::default(), &config, MockCompressor::default()); - channel.compressor.write(&[1, 2, 3]).unwrap(); - assert_eq!(channel.ready_bytes(), 3); - } - - #[test] - fn test_channel_out_close() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new(ChannelId::default(), &config, MockCompressor::default()); - assert!(!channel.closed); - - channel.close(); - assert!(channel.closed); - } - - #[test] - fn test_channel_out_add_batch_closed() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new(ChannelId::default(), &config, MockCompressor::default()); - channel.close(); - - let batch = Batch::Single(SingleBatch::default()); - assert_eq!(channel.add_batch(batch), Err(ChannelOutError::ChannelClosed)); - } - - #[test] - fn test_channel_out_empty_span_batch_decode_error() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new(ChannelId::default(), &config, MockCompressor::default()); - - let batch = Batch::Span(SpanBatch::default()); - assert_eq!(channel.add_batch(batch), Err(ChannelOutError::BatchEncoding)); - } - - #[test] - fn test_channel_out_max_rlp_bytes_per_channel() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new(ChannelId::default(), &config, MockCompressor::default()); - - let batch = Batch::Single(SingleBatch::default()); - channel.rlp_length = config.max_rlp_bytes_per_channel(batch.timestamp()); - - assert_eq!(channel.add_batch(batch), Err(ChannelOutError::ExceedsMaxRlpBytesPerChannel)); - } - - #[test] - fn test_channel_out_add_batch() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new(ChannelId::default(), &config, MockCompressor::default()); - - let batch = Batch::Single(SingleBatch::default()); - assert_eq!(channel.add_batch(batch), Ok(())); - } - - #[test] - fn test_channel_out_add_batch_enforces_cumulative_rlp_limit() { - let config = RollupConfig::default(); - let mut channel = ChannelOut::new(ChannelId::default(), &config, MockCompressor::default()); - - let timestamp = 0; - let max_rlp = config.max_rlp_bytes_per_channel(timestamp); - let payload_size = (max_rlp / 2 + 1) as usize; - - let large_batch = Batch::Single(SingleBatch { - timestamp, - transactions: vec![Bytes::from(vec![0u8; payload_size])], - ..Default::default() - }); - - let mut encoded = Vec::new(); - large_batch.encode(&mut encoded).expect("test batch should encode"); - assert!(encoded.len() as u64 <= max_rlp, "test batch should fit within per-channel limit"); - - channel.add_batch(large_batch.clone()).expect("first batch should fit"); - assert_eq!(channel.rlp_length, encoded.len() as u64); - - let err = channel.add_batch(large_batch).unwrap_err(); - assert_eq!(err, ChannelOutError::ExceedsMaxRlpBytesPerChannel); - } -} diff --git a/kona/crates/batcher/comp/src/config.rs b/kona/crates/batcher/comp/src/config.rs deleted file mode 100644 index ea9be16364e..00000000000 --- a/kona/crates/batcher/comp/src/config.rs +++ /dev/null @@ -1,22 +0,0 @@ -//! Compression configuration. - -use crate::{CompressionAlgo, CompressorType}; - -/// Config for the compressor itself. -#[derive(Debug, Clone)] -pub struct Config { - /// TargetOutputSize is the target size that the compressed data should reach. - /// The shadow compressor guarantees that the compressed data stays below - /// this bound. The ratio compressor might go over. - pub target_output_size: u64, - /// ApproxComprRatio to assume (only ratio compressor). Should be slightly smaller - /// than average from experiments to avoid the chances of creating a small - /// additional leftover frame. - pub approx_compr_ratio: f64, - /// Kind of compressor to use. Must be one of KindKeys. If unset, NewCompressor - /// will default to RatioKind. - pub kind: CompressorType, - - /// Type of compression algorithm to use. Must be one of [zlib, brotli-(9|10|11)] - pub compression_algo: CompressionAlgo, -} diff --git a/kona/crates/batcher/comp/src/lib.rs b/kona/crates/batcher/comp/src/lib.rs deleted file mode 100644 index d406f52e819..00000000000 --- a/kona/crates/batcher/comp/src/lib.rs +++ /dev/null @@ -1,48 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] - -extern crate alloc; - -mod channel_out; -pub use channel_out::{ChannelOut, ChannelOutError}; - -mod traits; -pub use traits::{ChannelCompressor, CompressorWriter}; - -mod config; -pub use config::Config; - -mod types; -pub use types::{CompressionAlgo, CompressorError, CompressorResult, CompressorType}; - -mod zlib; -pub use zlib::{ZlibCompressor, compress_zlib, decompress_zlib}; - -#[cfg(feature = "std")] -mod brotli; -#[cfg(feature = "std")] -pub use brotli::{BrotliCompressionError, BrotliCompressor, BrotliLevel, compress_brotli}; - -#[cfg(feature = "std")] -mod variant; -#[cfg(feature = "std")] -pub use variant::VariantCompressor; - -#[cfg(feature = "std")] -mod shadow; -#[cfg(feature = "std")] -pub use shadow::ShadowCompressor; - -#[cfg(feature = "std")] -mod ratio; -#[cfg(feature = "std")] -pub use ratio::RatioCompressor; - -#[cfg(any(test, feature = "test-utils"))] -pub mod test_utils; diff --git a/kona/crates/batcher/comp/src/mod.rs b/kona/crates/batcher/comp/src/mod.rs deleted file mode 100644 index c90ff206dd0..00000000000 --- a/kona/crates/batcher/comp/src/mod.rs +++ /dev/null @@ -1,32 +0,0 @@ -//! Contains compression and decompression primitives for Optimism. - -#[cfg(feature = "std")] -mod variant; -#[cfg(feature = "std")] -pub use variant::VariantCompressor; - -mod config; -pub use config::Config; - -mod types; -pub use types::{CompressionAlgo, CompressorError, CompressorResult, CompressorType}; - -mod zlib; -pub use zlib::{ZlibCompressor, compress_zlib, decompress_zlib}; - -mod brotli; -#[cfg(feature = "std")] -pub use brotli::{BrotliCompressionError, compress_brotli, BrotliCompressor}; - -mod traits; -pub use traits::{ChannelCompressor, CompressorWriter}; - -#[cfg(feature = "std")] -mod shadow; -#[cfg(feature = "std")] -pub use shadow::ShadowCompressor; - -#[cfg(feature = "std")] -mod ratio; -#[cfg(feature = "std")] -pub use ratio::RatioCompressor; diff --git a/kona/crates/batcher/comp/src/ratio.rs b/kona/crates/batcher/comp/src/ratio.rs deleted file mode 100644 index c9704c951be..00000000000 --- a/kona/crates/batcher/comp/src/ratio.rs +++ /dev/null @@ -1,137 +0,0 @@ -//! Contains the ratio compressor for Optimism. -//! -//! This is a port of the [RatioCompressor][rc] from the op-batcher. -//! -//! [rc]: https://github.com/ethereum-optimism/optimism/blob/develop/op-batcher/compressor/ratio_compressor.go#L7 - -use crate::{CompressorResult, CompressorWriter, Config, VariantCompressor}; - -/// Ratio Compressor -/// -/// The ratio compressor uses the target size and a compression ration parameter -/// to determine how much data can be written to the compressor before it's -/// considered full. The full calculation is as follows: -/// -/// full = uncompressedLength * approxCompRatio >= targetFrameSize * targetNumFrames -/// -/// The ratio compressor wraps a [VariantCompressor] which dispatches to the -/// appropriate compression algorithm (ZLIB or Brotli). -#[derive(Debug, Clone)] -pub struct RatioCompressor { - /// The compressor configuration. - config: Config, - /// The amount of data currently in the compressor. - lake: u64, - /// The inner [VariantCompressor] that will be used to compress the data. - compressor: VariantCompressor, -} - -impl RatioCompressor { - /// Create a new [RatioCompressor] with the given [VariantCompressor]. - pub const fn new(config: Config, compressor: VariantCompressor) -> Self { - Self { config, lake: 0, compressor } - } - - /// Calculates the input threshold in bytes. - pub fn input_threshold(&self) -> usize { - let target_frame_size = self.config.target_output_size; - let approx_comp_ratio = self.config.approx_compr_ratio; - - (target_frame_size as f64 / approx_comp_ratio) as usize - } - - /// Returns if the compressor is full (exceeds the input threshold). - pub fn is_full(&self) -> bool { - self.lake >= self.input_threshold() as u64 - } -} - -impl From<Config> for RatioCompressor { - fn from(config: Config) -> Self { - let compressor = VariantCompressor::from(config.compression_algo); - Self::new(config, compressor) - } -} - -impl CompressorWriter for RatioCompressor { - fn write(&mut self, data: &[u8]) -> CompressorResult<usize> { - match self.compressor.write(data) { - Ok(n) => { - self.lake += n as u64; - Ok(n) - } - Err(e) => Err(e), - } - } - - fn flush(&mut self) -> CompressorResult<()> { - self.compressor.flush() - } - - fn close(&mut self) -> CompressorResult<()> { - self.compressor.close() - } - - fn reset(&mut self) { - self.compressor.reset(); - self.lake = 0; - } - - fn len(&self) -> usize { - self.compressor.len() - } - - fn read(&mut self, buf: &mut [u8]) -> CompressorResult<usize> { - self.compressor.read(buf) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{CompressionAlgo, CompressorType}; - - #[test] - fn test_input_threshold() { - let config = Config { - target_output_size: 1024, - approx_compr_ratio: 0.5, - compression_algo: CompressionAlgo::Zlib, - kind: CompressorType::Ratio, - }; - - let inner = VariantCompressor::from(CompressionAlgo::Zlib); - let compressor = RatioCompressor::new(config, inner); - assert_eq!(compressor.input_threshold(), 2048); - } - - #[test] - fn test_ratio_compressor() { - let config = Config { - target_output_size: 1024, - approx_compr_ratio: 0.5, - compression_algo: CompressionAlgo::Zlib, - kind: CompressorType::Ratio, - }; - - let inner = VariantCompressor::from(CompressionAlgo::Zlib); - let mut compressor = RatioCompressor::new(config, inner); - - assert!(!compressor.is_full()); - compressor.write(&[0; 2048]).unwrap(); - assert!(compressor.is_full()); - assert_eq!(compressor.len(), 18); - - let mut buf = []; - compressor.read(&mut buf).unwrap(); - assert_eq!(buf.len(), 0); - - compressor.flush().unwrap(); - - compressor.reset(); - assert!(!compressor.is_full()); - assert_eq!(compressor.len(), 0); - - compressor.close().unwrap(); - } -} diff --git a/kona/crates/batcher/comp/src/shadow.rs b/kona/crates/batcher/comp/src/shadow.rs deleted file mode 100644 index 735521f2088..00000000000 --- a/kona/crates/batcher/comp/src/shadow.rs +++ /dev/null @@ -1,117 +0,0 @@ -//! Contains the shadow compressor for Optimism. -//! -//! This is a port of the [ShadowCompressor][sc] from the op-batcher. -//! -//! [sc]: https://github.com/ethereum-optimism/optimism/blob/develop/op-batcher/compressor/shadow_compressor.go#L18 - -use crate::{CompressorError, CompressorResult, CompressorWriter, Config, VariantCompressor}; - -/// The largest potential blow-up in bytes we expect to see when compressing -/// arbitrary (e.g. random) data. Here we account for a 2 byte header, 4 byte -/// digest, 5 byte eof indicator, and then 5 byte flate block header for each 16k of potential -/// data. Assuming frames are max 128k size (the current max blob size) this is 2+4+5+(5*8) = 51 -/// bytes. If we start using larger frames (e.g. should max blob size increase) a larger blowup -/// might be possible, but it would be highly unlikely, and the system still works if our -/// estimate is wrong -- we just end up writing one more tx for the overflow. -const SAFE_COMPRESSION_OVERHEAD: u64 = 51; - -// The number of final bytes a `zlib.Writer` call writes to the output buffer. -const CLOSE_OVERHEAD_ZLIB: u64 = 9; - -/// Shadow Compressor -/// -/// The shadow compressor contains two compression buffers, one for size estimation, and -/// one for the final compressed data. The first compression buffer is flushed on every -/// write, and the second isn't, which means the final compressed data is always at least -/// smaller than the size estimation. -/// -/// One exception to the rule is when the first write to the buffer is not checked against -/// the target. This allows individual blocks larger than the target to be included. -/// Notice, this will be split across multiple channel frames. -#[derive(Debug, Clone)] -pub struct ShadowCompressor { - /// The compressor configuration. - config: Config, - /// The inner [VariantCompressor] that will be used to compress the data. - compressor: VariantCompressor, - /// The shadow compressor. - shadow: VariantCompressor, - - /// Flags that the buffer is full. - is_full: bool, - /// An upper bound on the size of the compressed data. - bound: u64, -} - -impl ShadowCompressor { - /// Creates a new [ShadowCompressor] with the given [VariantCompressor]. - pub const fn new( - config: Config, - compressor: VariantCompressor, - shadow: VariantCompressor, - ) -> Self { - Self { config, is_full: false, compressor, shadow, bound: SAFE_COMPRESSION_OVERHEAD } - } -} - -impl From<Config> for ShadowCompressor { - fn from(config: Config) -> Self { - let compressor = VariantCompressor::from(config.compression_algo); - let shadow = VariantCompressor::from(config.compression_algo); - Self::new(config, compressor, shadow) - } -} - -impl CompressorWriter for ShadowCompressor { - fn write(&mut self, data: &[u8]) -> CompressorResult<usize> { - // If the buffer is full, error so the user can flush. - if self.is_full { - return Err(CompressorError::Full); - } - - // Write to the shadow compressor. - self.shadow.write(data)?; - - // The new bound increases by the length of the compressed data. - let mut newbound = data.len() as u64; - if newbound > self.config.target_output_size { - // Don't flush the buffer if there's a chance we're over the size limit. - self.shadow.flush()?; - newbound = self.shadow.len() as u64 + CLOSE_OVERHEAD_ZLIB; - if newbound > self.config.target_output_size { - self.is_full = true; - // Only error if the buffer has been written to. - if self.compressor.len() > 0 { - return Err(CompressorError::Full); - } - } - } - - // Update the bound and compress. - self.bound = newbound; - self.compressor.write(data) - } - - fn len(&self) -> usize { - self.compressor.len() - } - - fn flush(&mut self) -> CompressorResult<()> { - self.shadow.flush() - } - - fn close(&mut self) -> CompressorResult<()> { - self.shadow.close() - } - - fn reset(&mut self) { - self.compressor.reset(); - self.shadow.reset(); - self.is_full = false; - self.bound = SAFE_COMPRESSION_OVERHEAD; - } - - fn read(&mut self, buf: &mut [u8]) -> CompressorResult<usize> { - self.compressor.read(buf) - } -} diff --git a/kona/crates/batcher/comp/src/test_utils.rs b/kona/crates/batcher/comp/src/test_utils.rs deleted file mode 100644 index 9168ad65f94..00000000000 --- a/kona/crates/batcher/comp/src/test_utils.rs +++ /dev/null @@ -1,54 +0,0 @@ -//! Test Utilities for the compression crate. - -use crate::{ChannelCompressor, CompressorError, CompressorResult, CompressorWriter}; -use alloc::vec::Vec; -use alloy_primitives::Bytes; - -/// A Mock compressor for testing. -#[derive(Debug, Clone, Default)] -pub struct MockCompressor { - /// Compressed bytes - pub compressed: Option<Bytes>, - /// Whether to throw a read error. - pub read_error: bool, -} - -impl CompressorWriter for MockCompressor { - fn write(&mut self, data: &[u8]) -> CompressorResult<usize> { - let data = data.to_vec(); - let written = data.len(); - self.compressed = Some(Bytes::from(data)); - Ok(written) - } - - fn flush(&mut self) -> CompressorResult<()> { - Ok(()) - } - - fn close(&mut self) -> CompressorResult<()> { - Ok(()) - } - - fn reset(&mut self) { - self.compressed = None; - } - - fn len(&self) -> usize { - self.compressed.as_ref().map(|b| b.len()).unwrap_or(0) - } - - fn read(&mut self, buf: &mut [u8]) -> CompressorResult<usize> { - if self.read_error { - return Err(CompressorError::Full); - } - let len = self.compressed.as_ref().map(|b| b.len()).unwrap_or(0); - buf[..len].copy_from_slice(self.compressed.as_ref().unwrap()); - Ok(len) - } -} - -impl ChannelCompressor for MockCompressor { - fn get_compressed(&self) -> Vec<u8> { - self.compressed.as_ref().unwrap().to_vec() - } -} diff --git a/kona/crates/batcher/comp/src/traits.rs b/kona/crates/batcher/comp/src/traits.rs deleted file mode 100644 index 50ef1a32942..00000000000 --- a/kona/crates/batcher/comp/src/traits.rs +++ /dev/null @@ -1,39 +0,0 @@ -//! Contains the core `Compressor` trait. - -use crate::CompressorResult; -use alloc::vec::Vec; - -/// Compressor Writer -/// -/// A trait that expands the standard library `Write` trait to include -/// compression-specific methods and return [CompressorResult] instead of -/// standard library `Result`. -#[allow(clippy::len_without_is_empty)] -pub trait CompressorWriter { - /// Writes the given data to the compressor. - fn write(&mut self, data: &[u8]) -> CompressorResult<usize>; - - /// Flushes the buffer. - fn flush(&mut self) -> CompressorResult<()>; - - /// Closes the compressor. - fn close(&mut self) -> CompressorResult<()>; - - /// Resets the compressor. - fn reset(&mut self); - - /// Returns the length of the compressed data. - fn len(&self) -> usize; - - /// Reads the compressed data into the given buffer. - /// Returns the number of bytes read. - fn read(&mut self, buf: &mut [u8]) -> CompressorResult<usize>; -} - -/// Channel Compressor -/// -/// A compressor for channels. -pub trait ChannelCompressor: CompressorWriter { - /// Returns the compressed data buffer. - fn get_compressed(&self) -> Vec<u8>; -} diff --git a/kona/crates/batcher/comp/src/types.rs b/kona/crates/batcher/comp/src/types.rs deleted file mode 100644 index afdc05f587c..00000000000 --- a/kona/crates/batcher/comp/src/types.rs +++ /dev/null @@ -1,52 +0,0 @@ -//! Compression types. - -/// The result from compressing data. -pub type CompressorResult<T> = Result<T, CompressorError>; - -/// An error returned by the compressor. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] -pub enum CompressorError { - /// Thrown when the compressor is full. - #[error("compressor is full")] - Full, - /// Brotli compression failed. - #[error("brotli compression failed")] - Brotli, -} - -/// The type of compressor to use. -/// -/// See: <https://github.com/ethereum-optimism/optimism/blob/042433b89ce38ccc15456e9673829f6783bb97ac/op-batcher/compressor/compressors.go#L20> -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum CompressorType { - /// The ratio compression. - Ratio, - /// The shadow compression. - Shadow, -} - -/// The compression algorithm type. -/// -/// See: -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum CompressionAlgo { - /// The fastest brotli compression level. - Brotli9, - /// The default brotli compression level. - Brotli10, - /// The best brotli compression level. - Brotli11, - /// The zlib compression. - Zlib, -} - -#[cfg(feature = "std")] -impl<A: alloc::borrow::Borrow<CompressionAlgo>> From<A> for crate::BrotliLevel { - fn from(algo: A) -> Self { - match algo.borrow() { - CompressionAlgo::Brotli9 => Self::Brotli9, - CompressionAlgo::Brotli11 => Self::Brotli11, - _ => Self::Brotli10, - } - } -} diff --git a/kona/crates/batcher/comp/src/variant.rs b/kona/crates/batcher/comp/src/variant.rs deleted file mode 100644 index 969aecaf813..00000000000 --- a/kona/crates/batcher/comp/src/variant.rs +++ /dev/null @@ -1,92 +0,0 @@ -//! A variant over the different implementations of [ChannelCompressor]. - -use crate::{ - BrotliCompressor, ChannelCompressor, CompressionAlgo, CompressorResult, CompressorWriter, - ZlibCompressor, -}; -use kona_genesis::RollupConfig; - -/// The channel compressor wraps the brotli and zlib compressor types, -/// implementing the [ChannelCompressor] trait itself. -#[derive(Debug, Clone)] -pub enum VariantCompressor { - /// The brotli compressor. - Brotli(BrotliCompressor), - /// The zlib compressor. - Zlib(ZlibCompressor), -} - -impl VariantCompressor { - /// Constructs a [VariantCompressor] using the given [RollupConfig] and timestamp. - pub fn from_timestamp(config: &RollupConfig, timestamp: u64) -> Self { - if config.is_fjord_active(timestamp) { - Self::Brotli(BrotliCompressor::new(CompressionAlgo::Brotli10)) - } else { - Self::Zlib(ZlibCompressor::new()) - } - } -} - -impl CompressorWriter for VariantCompressor { - fn write(&mut self, data: &[u8]) -> CompressorResult<usize> { - match self { - Self::Brotli(compressor) => compressor.write(data), - Self::Zlib(compressor) => compressor.write(data), - } - } - - fn flush(&mut self) -> CompressorResult<()> { - match self { - Self::Brotli(compressor) => compressor.flush(), - Self::Zlib(compressor) => compressor.flush(), - } - } - - fn close(&mut self) -> CompressorResult<()> { - match self { - Self::Brotli(compressor) => compressor.close(), - Self::Zlib(compressor) => compressor.close(), - } - } - - fn reset(&mut self) { - match self { - Self::Brotli(compressor) => compressor.reset(), - Self::Zlib(compressor) => compressor.reset(), - } - } - - fn len(&self) -> usize { - match self { - Self::Brotli(compressor) => compressor.len(), - Self::Zlib(compressor) => compressor.len(), - } - } - - fn read(&mut self, buf: &mut [u8]) -> CompressorResult<usize> { - match self { - Self::Brotli(compressor) => compressor.read(buf), - Self::Zlib(compressor) => compressor.read(buf), - } - } -} - -impl ChannelCompressor for VariantCompressor { - fn get_compressed(&self) -> Vec<u8> { - match self { - Self::Brotli(compressor) => compressor.get_compressed(), - Self::Zlib(compressor) => compressor.get_compressed(), - } - } -} - -impl From<CompressionAlgo> for VariantCompressor { - fn from(algo: CompressionAlgo) -> Self { - match algo { - lvl @ CompressionAlgo::Brotli9 => Self::Brotli(BrotliCompressor::new(lvl)), - lvl @ CompressionAlgo::Brotli10 => Self::Brotli(BrotliCompressor::new(lvl)), - lvl @ CompressionAlgo::Brotli11 => Self::Brotli(BrotliCompressor::new(lvl)), - CompressionAlgo::Zlib => Self::Zlib(ZlibCompressor::new()), - } - } -} diff --git a/kona/crates/batcher/comp/src/zlib.rs b/kona/crates/batcher/comp/src/zlib.rs deleted file mode 100644 index cf2521072f6..00000000000 --- a/kona/crates/batcher/comp/src/zlib.rs +++ /dev/null @@ -1,73 +0,0 @@ -//! Contains ZLIB compression and decompression primitives for Optimism. - -use crate::{ChannelCompressor, CompressorResult, CompressorWriter}; -use alloc::vec::Vec; -use miniz_oxide::inflate::DecompressError; - -/// The best compression. -const BEST_ZLIB_COMPRESSION: u8 = 9; - -/// Method to compress data using ZLIB. -pub fn compress_zlib(data: &[u8]) -> Vec<u8> { - miniz_oxide::deflate::compress_to_vec(data, BEST_ZLIB_COMPRESSION) -} - -/// Method to decompress data using ZLIB. -pub fn decompress_zlib(data: &[u8]) -> Result<Vec<u8>, DecompressError> { - miniz_oxide::inflate::decompress_to_vec(data) -} - -/// The ZLIB compressor. -#[derive(Debug, Clone, Default)] -#[non_exhaustive] -pub struct ZlibCompressor { - /// Holds a non-compressed buffer. - buffer: Vec<u8>, - /// The compressed buffer. - compressed: Vec<u8>, -} - -impl ZlibCompressor { - /// Create a new ZLIB compressor. - pub const fn new() -> Self { - Self { buffer: Vec::new(), compressed: Vec::new() } - } -} - -impl CompressorWriter for ZlibCompressor { - fn write(&mut self, data: &[u8]) -> CompressorResult<usize> { - self.buffer.extend_from_slice(data); - self.compressed.clear(); - self.compressed.extend_from_slice(&compress_zlib(&self.buffer)); - Ok(data.len()) - } - - fn flush(&mut self) -> CompressorResult<()> { - Ok(()) - } - - fn close(&mut self) -> CompressorResult<()> { - Ok(()) - } - - fn reset(&mut self) { - self.buffer.clear(); - self.compressed.clear(); - } - - fn len(&self) -> usize { - self.compressed.len() - } - - fn read(&mut self, buf: &mut [u8]) -> CompressorResult<usize> { - let len = self.compressed.len().min(buf.len()); - buf[..len].copy_from_slice(&self.compressed[..len]); - Ok(len) - } -} - -impl ChannelCompressor for ZlibCompressor { - fn get_compressed(&self) -> Vec<u8> { - self.compressed.clone() - } -} diff --git a/kona/crates/node/disc/src/driver.rs b/kona/crates/node/disc/src/driver.rs deleted file mode 100644 index 269711e6684..00000000000 --- a/kona/crates/node/disc/src/driver.rs +++ /dev/null @@ -1,545 +0,0 @@ -//! Discovery Module. - -use backon::{ExponentialBuilder, RetryableWithContext}; -use derive_more::Debug; -use discv5::{Config, Discv5, Enr, enr::NodeId}; -use kona_peers::{BootNode, BootNodes, BootStore, BootStoreFile, EnrValidation, enr_to_multiaddr}; -use tokio::{ - sync::mpsc::channel, - time::{Duration, sleep}, -}; - -use crate::{Discv5Builder, Discv5Handler, HandlerRequest, LocalNode}; - -/// The [`Discv5Driver`] drives the discovery service. -/// -/// Calling [`Discv5Driver::start`] spawns a new [`Discv5`] -/// discovery service in a new tokio task and returns a -/// [`Discv5Handler`]. -/// -/// Channels are used to communicate between the [`Discv5Handler`] -/// and the spawned task containing the [`Discv5`] service. -/// -/// Since some requested operations are asynchronous, this pattern of message -/// passing is used as opposed to wrapping the [`Discv5`] in an `Arc<Mutex<>>`. -/// If an `Arc<Mutex<>>` were used, a lock held across the operation's future -/// would be needed since some asynchronous operations require a mutable -/// reference to the [`Discv5`] service. -#[derive(Debug)] -pub struct Discv5Driver { - /// The [`Discv5`] discovery service. - #[debug(skip)] - pub disc: Discv5, - /// The optional [`BootStoreFile`] to use for the bootstore. - pub bootstore: Option<BootStoreFile>, - /// Bootnodes used to bootstrap the discovery service. - pub bootnodes: BootNodes, - /// The chain ID of the network. - pub chain_id: u64, - /// The interval to discovery random nodes. - pub interval: Duration, - /// Whether to forward ENRs to the enr receiver on startup. - pub forward: bool, - /// The interval at which to store the ENRs in the bootstore. - /// This is set to 60 seconds by default. - pub store_interval: Duration, - /// The frequency at which to remove random nodes from the discovery table. - /// This is not enabled (`None`) by default. - pub remove_interval: Option<Duration>, -} - -impl Discv5Driver { - /// Returns a new [`Discv5Builder`] instance. - pub fn builder( - local_node: LocalNode, - chain_id: u64, - discovery_config: Config, - ) -> Discv5Builder { - Discv5Builder::new(local_node, chain_id, discovery_config) - } - - /// Instantiates a new [`Discv5Driver`]. - pub const fn new( - disc: Discv5, - interval: Duration, - chain_id: u64, - bootstore: Option<BootStoreFile>, - bootnodes: BootNodes, - ) -> Result<Self, std::io::Error> { - Ok(Self { - disc, - chain_id, - bootnodes, - interval, - forward: true, - remove_interval: None, - store_interval: Duration::from_secs(60), - bootstore, - }) - } - - /// Starts the inner [`Discv5`] service. - async fn init(self) -> Result<Self, discv5::Error> { - let (s, res) = { - |mut v: Self| async { - let res = v.disc.start().await; - (v, res) - } - } - .retry(ExponentialBuilder::default()) - .context(self) - .notify(|err: &discv5::Error, dur: Duration| { - warn!(target: "discovery", ?err, "Failed to start discovery service [Duration: {:?}]", dur); - }) - .await; - res.map(|_| s) - } - - /// Bootstraps the [`Discv5`] table with bootnodes. - async fn bootstrap_peers( - bootstore: Option<BootStoreFile>, - bootnodes: BootNodes, - chain_id: u64, - disc: &Discv5, - ) -> BootStore { - // Note: if the bootstore file cannot be created, we use a default bootstore. - let mut store = bootstore - .map_or_else(BootStore::default, |bootstore| bootstore.try_into().unwrap_or_default()); - - let initial_store_length = store.len(); - - for bn in bootnodes.0.into_iter().chain(BootNodes::from_chain_id(chain_id).0.into_iter()) { - let res = match bn { - BootNode::Enr(enr) => Ok(enr.clone()), - BootNode::Enode(enode) => disc.request_enr(enode.clone()).await, - }; - - let Ok(enr) = res else { - debug!(target: "discovery::bootstrap", ?res, "Failed to add boot node ENR to discovery table"); - continue; - }; - - let validation = EnrValidation::validate(&enr, chain_id); - if validation.is_invalid() { - trace!(target: "discovery::bootstrap", "Ignoring Invalid Bootnode ENR: {:?}. {:?}", enr, validation); - continue; - } - - if let Err(e) = disc.add_enr(enr.clone()) { - debug!(target: "discovery::bootstrap", "Failed to add enr: {:?}", e); - continue; - } - - store.add_enr(enr); - } - - let new_store_len = store.len(); - - debug!(target: "discovery::bootstrap", - added=%(new_store_len - initial_store_length), - total=%new_store_len, - "Added new ENRs to discv5 bootstore" - ); - - store - } - - /// Spawns a new [`Discv5`] discovery service in a new tokio task. - /// - /// Returns a [`Discv5Handler`] to communicate with the spawned task. - pub fn start(mut self) -> (Discv5Handler, tokio::sync::mpsc::Receiver<Enr>) { - let chain_id = self.chain_id; - let (req_sender, mut req_recv) = channel::<HandlerRequest>(1024); - let (enr_sender, enr_recv) = channel::<Enr>(1024); - - tokio::spawn(async move { - let remove = self.remove_interval.is_some(); - let remove_dur = self.remove_interval.unwrap_or(std::time::Duration::from_secs(600)); - let mut removal_interval = tokio::time::interval(remove_dur); - let mut interval = tokio::time::interval(self.interval); - let mut store_interval = tokio::time::interval(self.store_interval); - - // Step 1: Start the discovery service. - let Ok(s) = self.init().await else { - error!(target: "discovery", "Failed to start discovery service"); - return; - }; - self = s; - trace!(target: "discovery", "Discv5 Initialized"); - - // Step 2: Bootstrap the discovery table with bootnodes. - let mut store = - Self::bootstrap_peers(self.bootstore, self.bootnodes, chain_id, &self.disc).await; - - let enrs = self.disc.table_entries_enr(); - info!(target: "discovery", "Discv5 Started with {} ENRs", enrs.len()); - - // Step 3: Forward ENRs in the bootstore to the enr receiver. - if self.forward { - for enr in store.valid_peers_with_chain_id(self.chain_id) { - if let Err(e) = enr_sender.send(enr.clone()).await { - debug!(target: "discovery", "Failed to forward enr: {:?}", e); - } - } - } - - // Continuously attempt to start the event stream with a retry limit and shutdown - // signal. - let mut retries = 0; - let max_retries = 10; // Maximum number of retries before giving up. - let mut event_stream = loop { - if retries >= max_retries { - error!(target: "discovery", "Exceeded maximum retries for event stream startup. Aborting..."); - return; // Exit the task if the retry limit is reached. - } - match self.disc.event_stream().await { - Ok(event_stream) => { - break event_stream; - } - Err(e) => { - warn!(target: "discovery", "Failed to start event stream: {:?}", e); - retries += 1; - sleep(Duration::from_secs(2)).await; - info!(target: "discovery", "Retrying event stream startup... (Attempt {}/{})", retries, max_retries); - } - } - }; - - // Step 4: Run the core driver loop. - loop { - tokio::select! { - msg = req_recv.recv() => { - match msg { - Some(msg) => match msg { - HandlerRequest::Metrics(tx) => { - let metrics = self.disc.metrics(); - if let Err(e) = tx.send(metrics) { - warn!(target: "discovery", "Failed to send metrics: {:?}", e); - } - } - HandlerRequest::PeerCount(tx) => { - let peers = self.disc.connected_peers(); - if let Err(e) = tx.send(peers) { - warn!(target: "discovery", "Failed to send peer count: {:?}", e); - } - } - HandlerRequest::LocalEnr(tx) => { - let enr = self.disc.local_enr().clone(); - if let Err(e) = tx.send(enr.clone()) { - warn!(target: "discovery", "Failed to send local enr: {:?}", e); - } - } - HandlerRequest::AddEnr(enr) => { - let _ = self.disc.add_enr(enr); - } - HandlerRequest::RequestEnr{out, addr} => { - let enr = self.disc.request_enr(addr).await; - if let Err(e) = out.send(enr) { - warn!(target: "discovery", "Failed to send request enr: {:?}", e); - } - } - HandlerRequest::TableEnrs(tx) => { - let enrs = self.disc.table_entries_enr(); - if let Err(e) = tx.send(enrs) { - warn!(target: "discovery", "Failed to send table enrs: {:?}", e); - } - }, - HandlerRequest::TableInfos(tx) => { - let infos = self.disc.table_entries(); - if let Err(e) = tx.send(infos) { - warn!(target: "discovery", "Failed to send table infos: {:?}", e); - } - }, - HandlerRequest::BanAddrs{addrs_to_ban, ban_duration} => { - let enrs = self.disc.table_entries_enr(); - - for enr in enrs { - let Some(multi_addr) = enr_to_multiaddr(&enr) else { - continue; - }; - - if addrs_to_ban.contains(&multi_addr) { - self.disc.ban_node(&enr.node_id(), Some(ban_duration)); - } - } - }, - } - None => { - trace!(target: "discovery", "Receiver `None` peer enr"); - } - } - } - event = event_stream.recv() => { - let Some(event) = event else { - trace!(target: "discovery", "Received `None` event"); - continue; - }; - match event { - discv5::Event::Discovered(enr) => { - if EnrValidation::validate(&enr, chain_id).is_valid() { - debug!(target: "discovery", "Valid ENR discovered, forwarding to swarm: {:?}", enr); - kona_macros::inc!(gauge, crate::Metrics::DISCOVERY_EVENT, "type" => "discovered"); - store.add_enr(enr.clone()); - let sender = enr_sender.clone(); - tokio::spawn(async move { - if let Err(e) = sender.send(enr).await { - debug!(target: "discovery", "Failed to send enr: {:?}", e); - } - }); - } - } - discv5::Event::SessionEstablished(enr, addr) => { - if EnrValidation::validate(&enr, chain_id).is_valid() { - debug!(target: "discovery", "Session established with valid ENR, forwarding to swarm. Address: {:?}, ENR: {:?}", addr, enr); - kona_macros::inc!(gauge, crate::Metrics::DISCOVERY_EVENT, "type" => "session_established"); - store.add_enr(enr.clone()); - let sender = enr_sender.clone(); - tokio::spawn(async move { - if let Err(e) = sender.send(enr).await { - debug!(target: "discovery", "Failed to send enr: {:?}", e); - } - }); - } - } - discv5::Event::UnverifiableEnr { enr, .. } => { - if EnrValidation::validate(&enr, chain_id).is_valid() { - debug!(target: "discovery", "Valid ENR discovered, forwarding to swarm: {:?}", enr); - kona_macros::inc!(gauge, crate::Metrics::DISCOVERY_EVENT, "type" => "unverifiable_enr"); - store.add_enr(enr.clone()); - let sender = enr_sender.clone(); - tokio::spawn(async move { - if let Err(e) = sender.send(enr).await { - debug!(target: "discovery", "Failed to send enr: {:?}", e); - } - }); - } - - } - _ => {} - } - } - _ = interval.tick() => { - let id = NodeId::random(); - trace!(target: "discovery", "Finding random node: {}", id); - kona_macros::inc!(gauge, crate::Metrics::FIND_NODE_REQUEST, "find_node" => "find_node"); - let fut = self.disc.find_node(id); - let enr_sender = enr_sender.clone(); - tokio::spawn(async move { - match fut.await { - Ok(nodes) => { - let enrs = nodes.into_iter().filter(|node| EnrValidation::validate(node, chain_id).is_valid()); - for enr in enrs { - _ = enr_sender.send(enr).await; - } - } - Err(err) => { - info!(target: "discovery", "Failed to find node: {:?}", err); - } - } - }); - } - _ = store_interval.tick() => { - let start = std::time::Instant::now(); - let enrs = self.disc.table_entries_enr(); - store.merge(enrs); - - if let Err(e) = store.sync() { - warn!(target: "discovery", "Failed to sync bootstore: {:?}", e); - } - - let elapsed = start.elapsed(); - debug!(target: "discovery", "Bootstore ENRs stored in {:?}", elapsed); - kona_macros::record!(histogram, crate::Metrics::ENR_STORE_TIME, "store_time", "store_time", elapsed.as_secs_f64()); - kona_macros::set!(gauge, crate::Metrics::DISCOVERY_PEER_COUNT, self.disc.connected_peers() as f64); - } - _ = removal_interval.tick() => { - if remove { - let enrs = self.disc.table_entries_enr(); - if enrs.len() > 20 { - let mut rng = rand::rng(); - let index = rand::Rng::random_range(&mut rng, 0..enrs.len()); - let enr = enrs[index].clone(); - debug!(target: "removal", "Removing random ENR: {:?}", enr); - self.disc.remove_node(&enr.node_id()); - } - } - } - } - } - }); - - (Discv5Handler::new(chain_id, req_sender), enr_recv) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::LocalNode; - use discv5::{ - ConfigBuilder, - enr::{CombinedKey, CombinedPublicKey}, - handler::NodeContact, - }; - use kona_genesis::{OP_MAINNET_CHAIN_ID, OP_SEPOLIA_CHAIN_ID}; - use tempfile::tempdir; - - use std::net::{IpAddr, Ipv4Addr, SocketAddr}; - - #[tokio::test] - async fn test_online_discv5_driver() { - let CombinedKey::Secp256k1(secret_key) = CombinedKey::generate_secp256k1() else { - unreachable!() - }; - - let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); - let discovery = Discv5Driver::builder( - LocalNode::new(secret_key, IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0, 0), - OP_SEPOLIA_CHAIN_ID, - ConfigBuilder::new(socket.into()).build(), - ) - .build() - .expect("Failed to build discovery service"); - let (handle, _) = discovery.start(); - assert_eq!(handle.chain_id, OP_SEPOLIA_CHAIN_ID); - } - - #[tokio::test] - async fn test_online_discv5_driver_bootstrap_testnet() { - // Use a test file to make sure bootstore - // doesn't conflict with a local bootstore. - let file = tempdir().unwrap(); - let file = file.path().join("bootstore.json"); - - let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); - let CombinedKey::Secp256k1(secret_key) = CombinedKey::generate_secp256k1() else { - unreachable!() - }; - let mut discovery = Discv5Driver::builder( - LocalNode::new(secret_key, IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0, 0), - OP_SEPOLIA_CHAIN_ID, - ConfigBuilder::new(socket.into()).build(), - ) - .build() - .expect("Failed to build discovery service"); - discovery.bootstore = Some(BootStoreFile::Custom(file)); - - discovery = discovery.init().await.expect("Failed to initialize discovery service"); - - // There are no ENRs for `OP_SEPOLIA_CHAIN_ID` in the bootstore. - // If an ENR is added, this check will fail. - Discv5Driver::bootstrap_peers( - discovery.bootstore, - discovery.bootnodes, - OP_SEPOLIA_CHAIN_ID, - &discovery.disc, - ) - .await; - assert!( - discovery.disc.table_entries_enr().len() >= 5, - "Discovery table should have at least 5 ENRs" - ); - - // It should have the same number of entries as the testnet table. - let testnet = BootNodes::testnet(); - - // Filter out testnet ENRs that are not valid. - let testnet: Vec<CombinedPublicKey> = testnet - .iter() - .filter_map(|node| { - if let BootNode::Enr(enr) = node { - // Check that the ENR is valid for the testnet. - if EnrValidation::validate(enr, OP_SEPOLIA_CHAIN_ID).is_invalid() { - return None; - } - } - let node_contact = - NodeContact::try_from_multiaddr(node.to_multiaddr().unwrap()).unwrap(); - - Some(node_contact.public_key()) - }) - .collect(); - - // There should be 8 valid ENRs for the testnet. - assert_eq!(testnet.len(), 8); - - // Those 8 ENRs should be in the discovery table. - let disc_enrs = discovery.disc.table_entries_enr(); - for public_key in testnet { - assert!( - disc_enrs.iter().any(|enr| enr.public_key() == public_key), - "Discovery table does not contain testnet ENR: {public_key:?}" - ); - } - } - - #[tokio::test] - async fn test_online_discv5_driver_bootstrap_mainnet() { - kona_cli::init_test_tracing(); - - // Use a test file to make sure bootstore - // doesn't conflict with a local bootstore. - let file = tempdir().unwrap(); - let file = file.path().join("bootstore.json"); - - // Filter out ENRs that are not valid. - let mainnet = BootNodes::mainnet(); - let mainnet: Vec<CombinedPublicKey> = mainnet - .iter() - .filter_map(|node| { - if let BootNode::Enr(enr) = node { - if EnrValidation::validate(enr, OP_MAINNET_CHAIN_ID).is_invalid() { - return None; - } - } - let node_contact = - NodeContact::try_from_multiaddr(node.to_multiaddr().unwrap()).unwrap(); - - Some(node_contact.public_key()) - }) - .collect(); - - // There should be 16 valid ENRs for the mainnet. - assert_eq!(mainnet.len(), 16); - - let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); - - let CombinedKey::Secp256k1(secret_key) = CombinedKey::generate_secp256k1() else { - unreachable!() - }; - - let mut discovery = Discv5Driver::builder( - LocalNode::new(secret_key, IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0, 0), - OP_MAINNET_CHAIN_ID, - ConfigBuilder::new(socket.into()).build(), - ) - .build() - .expect("Failed to build discovery service"); - discovery.bootstore = Some(BootStoreFile::Custom(file)); - - discovery = discovery.init().await.expect("Failed to initialize discovery service"); - - // There are no ENRs for op mainnet in the bootstore. - // If an ENR is added, this check will fail. - Discv5Driver::bootstrap_peers( - discovery.bootstore, - discovery.bootnodes, - OP_MAINNET_CHAIN_ID, - &discovery.disc, - ) - .await; - assert!( - discovery.disc.table_entries_enr().len() >= 10, - "Discovery table should have at least 10 ENRs" - ); - - // Those ENRs should be in the mainnet bootnodes. - let disc_enrs = discovery.disc.table_entries_enr(); - for enr in disc_enrs { - assert!( - mainnet.iter().any(|pub_key| pub_key == &enr.public_key()), - "Discovery table does not contain mainnet ENR: {enr:?}" - ); - } - } -} diff --git a/kona/crates/node/disc/src/lib.rs b/kona/crates/node/disc/src/lib.rs deleted file mode 100644 index 335f76371b6..00000000000 --- a/kona/crates/node/disc/src/lib.rs +++ /dev/null @@ -1,72 +0,0 @@ -//! Discovery service for the OP Stack. -//! -//! This crate provides decentralized peer discovery capabilities using the Discv5 distributed -//! hash table (DHT) protocol, as defined in the Ethereum networking specifications. -//! -//! ## Overview -//! -//! The discovery service enables OP Stack nodes to find and connect to other network -//! participants without relying on centralized infrastructure. It maintains a local -//! view of the network through ENRs (Ethereum Node Records) and facilitates peer -//! connections for the gossip layer. -//! -//! ## Key Components -//! -//! - [`Discv5Driver`]: Main service driver that manages the discovery process -//! - [`Discv5Builder`]: Builder pattern for configuring discovery service parameters -//! - [`Discv5Handler`]: Handle for interacting with the discovery service -//! - [`LocalNode`]: Represents the local node's discovery information -//! -//! ## Discovery Process -//! -//! 1. **Bootstrap**: Connect to known bootstrap nodes to join the network -//! 2. **Table Population**: Discover peers through DHT queries and populate the routing table -//! 3. **Peer Maintenance**: Periodically refresh peer information and prune stale entries -//! 4. **ENR Updates**: Keep local ENR information current and propagate changes -//! -//! ## ENR Management -//! -//! ENRs (Ethereum Node Records) contain essential information about network peers: -//! - Node identity and cryptographic proof -//! - Network address and port information -//! - Protocol capabilities and version -//! - Chain-specific information (chain ID, etc.) -//! -//! ## Persistent Storage -//! -//! The service maintains a persistent bootstore that caches discovered peers across -//! restarts, reducing bootstrap time and improving network resilience. -//! -//! ## Configuration -//! -//! Key configuration parameters include: -//! - Discovery interval for random peer queries -//! - Bootstrap node list -//! - Storage location for persistent peer cache -//! - Network interface and port bindings - -#![doc(html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/kona-logo.png")] -#![doc(issue_tracker_base_url = "https://github.com/op-rs/kona/issues/")] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] - -// Logging -#[macro_use] -extern crate tracing; -// Used in tests -use kona_genesis as _; - -mod builder; -pub use builder::{Discv5Builder, LocalNode}; - -mod error; -pub use error::Discv5BuilderError; - -mod driver; -pub use driver::Discv5Driver; - -mod handler; -pub use handler::{Discv5Handler, HandlerRequest}; - -mod metrics; -pub use metrics::Metrics; diff --git a/kona/crates/node/disc/src/metrics.rs b/kona/crates/node/disc/src/metrics.rs deleted file mode 100644 index ef4ce0fcf1a..00000000000 --- a/kona/crates/node/disc/src/metrics.rs +++ /dev/null @@ -1,62 +0,0 @@ -//! Metrics for the discovery service. - -/// Container for discovery metrics. -#[derive(Debug, Clone)] -pub struct Metrics; - -impl Metrics { - /// Identifier for discv5 events. - pub const DISCOVERY_EVENT: &str = "kona_node_discovery_events"; - - /// Counter for the number of FIND_NODE requests. - pub const FIND_NODE_REQUEST: &str = "kona_node_find_node_requests"; - - /// Timer for the time taken to store ENRs in the bootstore. - pub const ENR_STORE_TIME: &str = "kona_node_enr_store_time"; - - /// Identifier for the gauge that tracks the number of peers in the discovery service. - pub const DISCOVERY_PEER_COUNT: &str = "kona_node_discovery_peer_count"; - - /// Initializes metrics for the discovery service. - /// - /// This does two things: - /// * Describes various metrics. - /// * Initializes metrics to 0 so they can be queried immediately. - #[cfg(feature = "metrics")] - pub fn init() { - Self::describe(); - Self::zero(); - } - - /// Describes metrics used in the discovery service. - #[cfg(feature = "metrics")] - pub fn describe() { - metrics::describe_gauge!(Self::DISCOVERY_EVENT, "Events received by the discv5 service"); - metrics::describe_histogram!( - Self::ENR_STORE_TIME, - "Observations of elapsed time to store ENRs in the on-disk bootstore" - ); - metrics::describe_gauge!( - Self::DISCOVERY_PEER_COUNT, - "Number of peers connected to the discv5 service" - ); - metrics::describe_gauge!( - Self::FIND_NODE_REQUEST, - "Requests made to find a node through the discv5 peer discovery service" - ); - } - - /// Initializes metrics to `0` so they can be queried immediately by consumers of prometheus - /// metrics. - #[cfg(feature = "metrics")] - pub fn zero() { - // Discovery Event - kona_macros::set!(gauge, Self::DISCOVERY_EVENT, "type", "discovered", 0); - kona_macros::set!(gauge, Self::DISCOVERY_EVENT, "type", "session_established", 0); - kona_macros::set!(gauge, Self::DISCOVERY_EVENT, "type", "unverifiable_enr", 0); - - // Peer Counts - kona_macros::set!(gauge, Self::DISCOVERY_PEER_COUNT, 0); - kona_macros::set!(gauge, Self::FIND_NODE_REQUEST, 0); - } -} diff --git a/kona/crates/node/engine/Cargo.toml b/kona/crates/node/engine/Cargo.toml deleted file mode 100644 index 20113becb8a..00000000000 --- a/kona/crates/node/engine/Cargo.toml +++ /dev/null @@ -1,72 +0,0 @@ -[package] -name = "kona-engine" -description = "An implementation of the OP Stack engine client" -version = "0.1.2" -edition.workspace = true -authors.workspace = true -license.workspace = true -repository.workspace = true -homepage.workspace = true - -[lints] -workspace = true - -[dependencies] -# workspace -kona-genesis.workspace = true -kona-macros.workspace = true -kona-protocol = {workspace = true, features = ["serde", "std"]} - -# alloy -alloy-eips.workspace = true -alloy-consensus.workspace = true -alloy-json-rpc.workspace = true -alloy-network.workspace = true -alloy-transport.workspace = true -alloy-primitives.workspace = true -alloy-provider = { workspace = true, features = ["ipc", "reqwest", "reqwest-rustls-tls", "engine-api"] } -alloy-rpc-client.workspace = true -alloy-rpc-types-eth.workspace = true -alloy-rpc-types-engine = { workspace = true, features = ["jwt", "serde"] } -alloy-transport-http = { workspace = true, features = ["reqwest", "hyper", "jwt-auth"] } - -# op-alloy -op-alloy-network.workspace = true -op-alloy-consensus.workspace = true -op-alloy-provider.workspace = true -op-alloy-rpc-types.workspace = true -op-alloy-rpc-types-engine.workspace = true - -# general -serde.workspace = true -tokio.workspace = true -tracing.workspace = true -async-trait.workspace = true -thiserror.workspace = true -url.workspace = true -tower.workspace = true -http-body-util.workspace = true -derive_more = { workspace = true, features = ["display", "deref", "from_str", "constructor"] } -serde_json.workspace = true -jsonrpsee-types.workspace = true - -# metrics -metrics = { workspace = true, optional = true } - -# rollup boost -rollup-boost.workspace = true -rollup-boost-types.workspace = true -http.workspace = true -parking_lot.workspace = true - -[dev-dependencies] -kona-registry.workspace = true -rand = {workspace = true, features = ["thread_rng"]} -arbitrary.workspace = true -op-alloy-rpc-types = {workspace = true, features = ["arbitrary", "k256"]} -metrics-exporter-prometheus.workspace = true -rstest.workspace = true - -[features] -metrics = [ "dep:metrics" ] -test-utils = [] diff --git a/kona/crates/node/engine/README.md b/kona/crates/node/engine/README.md deleted file mode 100644 index a9062e98ed2..00000000000 --- a/kona/crates/node/engine/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# `kona-engine` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-engine"><img src="https://img.shields.io/crates/v/kona-engine.svg?label=kona-engine&labelColor=2a2f35" alt="Kona Engine"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -An extensible implementation of the [OP Stack][op-stack] rollup node engine client. - -## Overview - -The `kona-engine` crate provides a task-based engine client for interacting with Ethereum execution layers. It implements the Engine API specification and manages the execution layer state through a priority-driven task queue system. - -## Key Components - -- **[`Engine`](crate::Engine)** - Main task queue processor that executes engine operations atomically -- **[`EngineClient`](crate::EngineClient)** - HTTP client for Engine API communication with JWT authentication -- **[`EngineState`](crate::EngineState)** - Tracks the current state of the execution layer -- **Task Types** - Specialized tasks for different engine operations: - - [`InsertTask`](crate::InsertTask) - Insert new payloads into the execution engine - - [`BuildTask`](crate::BuildTask) - Build new payloads with automatic forkchoice synchronization - - [`ConsolidateTask`](crate::ConsolidateTask) - Consolidate unsafe payloads to advance the safe chain - - [`FinalizeTask`](crate::FinalizeTask) - Finalize safe payloads on L1 confirmation - - [`SynchronizeTask`](crate::SynchronizeTask) - Internal task for execution layer forkchoice synchronization - -## Architecture - -The engine implements a task-driven architecture where forkchoice synchronization is handled automatically: - -- **Automatic Forkchoice Handling**: The [`BuildTask`](crate::BuildTask) automatically performs forkchoice updates during block building, eliminating the need for explicit forkchoice management in user code. -- **Internal Synchronization**: [`SynchronizeTask`](crate::SynchronizeTask) handles internal execution layer synchronization and is primarily used by other tasks rather than directly by users. -- **Priority-Based Execution**: Tasks are executed in priority order to ensure optimal sequencer performance and block processing efficiency. - -## Engine API Compatibility - -The crate supports multiple Engine API versions with automatic version selection based on the rollup configuration: - -- **Engine Forkchoice Updated**: V2, V3 -- **Engine New Payload**: V2, V3, V4 -- **Engine Get Payload**: V2, V3, V4 - -Version selection follows Optimism hardfork activation times (Bedrock, Canyon, Delta, Ecotone, Isthmus). - -## Features - -- `metrics` - Enable Prometheus metrics collection (optional) - -<!-- Hyper Links --> - -[op-stack]: https://specs.optimism.io diff --git a/kona/crates/node/engine/src/attributes.rs b/kona/crates/node/engine/src/attributes.rs deleted file mode 100644 index 5eaf24a3ecd..00000000000 --- a/kona/crates/node/engine/src/attributes.rs +++ /dev/null @@ -1,1007 +0,0 @@ -//! Contains a utility method to check if attributes match a block. - -use alloy_eips::{Decodable2718, eip1559::BaseFeeParams}; -use alloy_network::TransactionResponse; -use alloy_primitives::{Address, B256, Bytes}; -use alloy_rpc_types_eth::{Block, BlockTransactions, Withdrawals}; -use kona_genesis::RollupConfig; -use kona_protocol::OpAttributesWithParent; -use op_alloy_consensus::{ - EIP1559ParamError, OpTxEnvelope, decode_holocene_extra_data, decode_jovian_extra_data, -}; -use op_alloy_rpc_types::Transaction; - -/// Result of validating payload attributes against an execution layer block. -/// -/// Used to verify that proposed payload attributes match the actual executed block, -/// ensuring consistency between the rollup derivation process and execution layer. -/// Validation includes withdrawals, transactions, fees, and other block properties. -/// -/// # Examples -/// -/// ```rust,ignore -/// use kona_engine::AttributesMatch; -/// use kona_genesis::RollupConfig; -/// use kona_protocol::OpAttributesWithParent; -/// -/// let config = RollupConfig::default(); -/// let match_result = AttributesMatch::check_withdrawals(&config, &attributes, &block); -/// -/// if match_result.is_match() { -/// println!("Attributes are valid for this block"); -/// } -/// ``` -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum AttributesMatch { - /// The payload attributes are consistent with the block. - Match, - /// The attributes do not match the block (contains mismatch details). - Mismatch(AttributesMismatch), -} - -impl AttributesMatch { - /// Returns true if the attributes match the block. - pub const fn is_match(&self) -> bool { - matches!(self, Self::Match) - } - - /// Returns true if the attributes do not match the block. - pub const fn is_mismatch(&self) -> bool { - matches!(self, Self::Mismatch(_)) - } - - /// Checks that withdrawals for a block and attributes match. - pub fn check_withdrawals( - config: &RollupConfig, - attributes: &OpAttributesWithParent, - block: &Block<Transaction>, - ) -> Self { - let attr_withdrawals = attributes.attributes().payload_attributes.withdrawals.as_ref(); - let attr_withdrawals = attr_withdrawals.map(|w| Withdrawals::new(w.to_vec())); - let block_withdrawals = block.withdrawals.as_ref(); - - if config.is_canyon_active(block.header.timestamp) { - // In canyon, the withdrawals list should be some and empty - if attr_withdrawals.is_none_or(|w| !w.is_empty()) { - return Self::Mismatch(AttributesMismatch::CanyonWithdrawalsNotEmpty); - } - if block_withdrawals.is_none_or(|w| !w.is_empty()) { - return Self::Mismatch(AttributesMismatch::CanyonWithdrawalsNotEmpty); - } - if !config.is_isthmus_active(block.header.timestamp) { - // In canyon, the withdrawals root should be set to the empty value - let empty_hash = alloy_consensus::EMPTY_ROOT_HASH; - if block.header.inner.withdrawals_root != Some(empty_hash) { - return Self::Mismatch(AttributesMismatch::CanyonNotEmptyHash); - } - } - } else { - // In bedrock, the withdrawals list should be None - if attr_withdrawals.is_some() { - return Self::Mismatch(AttributesMismatch::BedrockWithdrawals); - } - } - - if config.is_isthmus_active(block.header.timestamp) { - // In isthmus, the withdrawals root must be set - if block.header.inner.withdrawals_root.is_none() { - return Self::Mismatch(AttributesMismatch::IsthmusMissingWithdrawalsRoot); - } - } - - Self::Match - } - - /// Checks the attributes and block transaction list for consolidation. - /// We start by checking that there are the same number of transactions in both the attribute - /// payload and the block. Then we compare their contents - fn check_transactions(attributes_txs: &[Bytes], block: &Block<Transaction>) -> Self { - // Before checking the number of transactions, we have to make sure that the block - // has the right transactions format. We need to have access to the - // full transactions to be able to compare their contents. - let block_txs = match block.transactions { - BlockTransactions::Hashes(_) | BlockTransactions::Full(_) - if attributes_txs.is_empty() && block.transactions.is_empty() => - { - // We early return when both attributes and blocks are empty. This is for ergonomics - // because the default [`BlockTransactions`] format is - // [`BlockTransactions::Hash`], which may cause - // the [`BlockTransactions`] format check to fail right below. We may want to be a - // bit more flexible and not reject the hash format if both the - // attributes and the block are empty. - return Self::Match; - } - BlockTransactions::Uncle => { - // This can never be uncle transactions - error!( - "Invalid format for the block transactions. The `Uncle` transaction format is not relevant in that context and should not get used here. This is a bug" - ); - - return AttributesMismatch::MalformedBlockTransactions.into(); - } - BlockTransactions::Hashes(_) => { - // We can't have hash transactions with non empty blocks - error!( - "Invalid format for the block transactions. The `Hash` transaction format is not relevant in that context and should not get used here. This is a bug." - ); - - return AttributesMismatch::MalformedBlockTransactions.into(); - } - BlockTransactions::Full(ref block_txs) => block_txs, - }; - - let attributes_txs_len = attributes_txs.len(); - let block_txs_len = block_txs.len(); - - if attributes_txs_len != block_txs_len { - return AttributesMismatch::TransactionLen(attributes_txs_len, block_txs_len).into(); - } - - // Then we need to check that the content of the encoded transactions match - // Note that it is safe to zip both iterators because we checked their length - // beforehand. - for (attr_tx_bytes, block_tx) in attributes_txs.iter().zip(block_txs) { - trace!( - target: "engine", - ?attr_tx_bytes, - block_tx_hash = %block_tx.tx_hash(), - "Checking attributes transaction against block transaction", - ); - // Let's try to deserialize the attributes transaction - let Ok(attr_tx) = OpTxEnvelope::decode_2718(&mut &attr_tx_bytes[..]) else { - error!( - "Impossible to deserialize transaction from attributes. If we have stored these attributes it means the transactions where well formatted. This is a bug" - ); - - return AttributesMismatch::MalformedAttributesTransaction.into(); - }; - - if &attr_tx != block_tx.inner.inner.inner() { - warn!(target: "engine", ?attr_tx, ?block_tx, "Transaction mismatch in derived attributes"); - return AttributesMismatch::TransactionContent( - attr_tx.tx_hash(), - block_tx.tx_hash(), - ) - .into(); - } - } - - Self::Match - } - - /// Validates and compares EIP1559 parameters for consolidation. - fn check_eip1559( - config: &RollupConfig, - attributes: &OpAttributesWithParent, - block: &Block<Transaction>, - ) -> Self { - // We can assume that the EIP-1559 params are set iff holocene is active. - // Note here that we don't need to check for the attributes length because of type-safety. - let (ae, ad): (u128, u128) = match attributes.attributes().decode_eip_1559_params() { - None => { - // Holocene is active but the eip1559 are not set. This is a bug! - // Note: we checked the timestamp match above, so we can assume that both the - // attributes and the block have the same stamps - if config.is_holocene_active(block.header.timestamp) { - error!( - "EIP1559 parameters for attributes not set while holocene is active. This is a bug" - ); - return AttributesMismatch::MissingAttributesEIP1559.into(); - } - - // If the attributes are not specified, that means we can just early return. - return Self::Match; - } - Some((0, e)) if e != 0 => { - error!( - "Holocene EIP1559 params cannot have a 0 denominator unless elasticity is also 0. This is a bug" - ); - return AttributesMismatch::InvalidEIP1559ParamsCombination.into(); - } - // We need to translate (0, 0) parameters to pre-holocene protocol constants. - // Since holocene is supposed to be active, canyon should be as well. We take the canyon - // base fee params. - Some((0, 0)) => { - let BaseFeeParams { max_change_denominator, elasticity_multiplier } = - config.chain_op_config.post_canyon_params(); - - (elasticity_multiplier, max_change_denominator) - } - Some((ae, ad)) => (ae.into(), ad.into()), - }; - - let extra_data_decoded = if config.is_jovian_active(block.header.timestamp) { - decode_jovian_extra_data(&block.header.extra_data).map(|(be, bd, _)| (be, bd)) - } else if config.is_holocene_active(block.header.timestamp) { - decode_holocene_extra_data(&block.header.extra_data) - } else { - return AttributesMismatch::MissingBlockEIP1559.into(); - }; - - // We decode the extra data stemming from the block header. - let (be, bd): (u128, u128) = match extra_data_decoded { - Ok((be, bd)) => (be.into(), bd.into()), - Err(EIP1559ParamError::NoEIP1559Params) => { - error!( - "EIP1559 parameters for the block not set while holocene is active. This is a bug" - ); - return AttributesMismatch::MissingBlockEIP1559.into(); - } - Err(EIP1559ParamError::InvalidVersion(v)) => { - error!( - version = v, - "The version in the extra data EIP1559 payload is incorrect. Should be 0. This is a bug", - ); - return AttributesMismatch::InvalidExtraDataVersion.into(); - } - Err(e) => { - error!(err = ?e, "An unknown extra data decoding error occurred. This is a bug",); - - return AttributesMismatch::UnknownExtraDataDecodingError(e).into(); - } - }; - - // We now have to check that both parameters match - if ae != be || ad != bd { - return AttributesMismatch::EIP1559Parameters( - BaseFeeParams { max_change_denominator: ad, elasticity_multiplier: ae }, - BaseFeeParams { max_change_denominator: bd, elasticity_multiplier: be }, - ) - .into(); - } - - Self::Match - } - - /// Checks if the specified [`OpAttributesWithParent`] matches the specified [`Block`]. - /// Returns [`AttributesMatch::Match`] if they match, otherwise returns - /// [`AttributesMatch::Mismatch`]. - pub fn check( - config: &RollupConfig, - attributes: &OpAttributesWithParent, - block: &Block<Transaction>, - ) -> Self { - if attributes.parent.block_info.hash != block.header.inner.parent_hash { - return AttributesMismatch::ParentHash( - attributes.parent.block_info.hash, - block.header.inner.parent_hash, - ) - .into(); - } - - if attributes.attributes().payload_attributes.timestamp != block.header.inner.timestamp { - return AttributesMismatch::Timestamp( - attributes.attributes().payload_attributes.timestamp, - block.header.inner.timestamp, - ) - .into(); - } - - let mix_hash = block.header.inner.mix_hash; - if attributes.attributes().payload_attributes.prev_randao != mix_hash { - return AttributesMismatch::PrevRandao( - attributes.attributes().payload_attributes.prev_randao, - mix_hash, - ) - .into(); - } - - // Let's extract the list of attribute transactions - let default_vec = vec![]; - let attributes_txs = attributes - .attributes() - .transactions - .as_ref() - .map_or_else(|| &default_vec, |attrs| attrs); - - // Check transactions - if let mismatch @ Self::Mismatch(_) = Self::check_transactions(attributes_txs, block) { - return mismatch; - } - - let Some(gas_limit) = attributes.attributes().gas_limit else { - return AttributesMismatch::MissingAttributesGasLimit.into(); - }; - - if gas_limit != block.header.inner.gas_limit { - return AttributesMismatch::GasLimit(gas_limit, block.header.inner.gas_limit).into(); - } - - if let m @ Self::Mismatch(_) = Self::check_withdrawals(config, attributes, block) { - return m; - } - - if attributes.attributes().payload_attributes.parent_beacon_block_root != - block.header.inner.parent_beacon_block_root - { - return AttributesMismatch::ParentBeaconBlockRoot( - attributes.attributes().payload_attributes.parent_beacon_block_root, - block.header.inner.parent_beacon_block_root, - ) - .into(); - } - - if attributes.attributes().payload_attributes.suggested_fee_recipient != - block.header.inner.beneficiary - { - return AttributesMismatch::FeeRecipient( - attributes.attributes().payload_attributes.suggested_fee_recipient, - block.header.inner.beneficiary, - ) - .into(); - } - - // Check the EIP-1559 parameters in a separate helper method - if let m @ Self::Mismatch(_) = Self::check_eip1559(config, attributes, block) { - return m; - } - - Self::Match - } -} - -/// An enum over the type of mismatch between [`OpAttributesWithParent`] -/// and a [`Block`]. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum AttributesMismatch { - /// The parent hash of the block does not match the parent hash of the attributes. - ParentHash(B256, B256), - /// The timestamp of the block does not match the timestamp of the attributes. - Timestamp(u64, u64), - /// The prev randao of the block does not match the prev randao of the attributes. - PrevRandao(B256, B256), - /// The block contains malformed transactions. This is a bug - the transaction format - /// should be checked before the consolidation step. - MalformedBlockTransactions, - /// There is a malformed transaction inside the attributes. This is a bug - the transaction - /// format should be checked before the consolidation step. - MalformedAttributesTransaction, - /// A mismatch in the number of transactions contained in the attributes and the block. - TransactionLen(usize, usize), - /// A mismatch in the content of some transactions contained in the attributes and the block. - TransactionContent(B256, B256), - /// The EIP1559 payload for the [`OpAttributesWithParent`] is missing when holocene is active. - MissingAttributesEIP1559, - /// The EIP1559 payload for the block is missing when holocene is active. - MissingBlockEIP1559, - /// The version in the extra data EIP1559 payload is incorrect. Should be 0. - InvalidExtraDataVersion, - /// An unknown extra data decoding error occurred. - UnknownExtraDataDecodingError(EIP1559ParamError), - /// Holocene EIP1559 params cannot have a 0 denominator unless elasticity is also 0 - InvalidEIP1559ParamsCombination, - /// The EIP1559 base fee parameters of the attributes and the block don't match - EIP1559Parameters(BaseFeeParams, BaseFeeParams), - /// Transactions mismatch. - Transactions(u64, u64), - /// The gas limit of the block does not match the gas limit of the attributes. - GasLimit(u64, u64), - /// The gas limit for the [`OpAttributesWithParent`] is missing. - MissingAttributesGasLimit, - /// The fee recipient of the block does not match the fee recipient of the attributes. - FeeRecipient(Address, Address), - /// A mismatch in the parent beacon block root. - ParentBeaconBlockRoot(Option<B256>, Option<B256>), - /// After the canyon hardfork, withdrawals cannot be empty. - CanyonWithdrawalsNotEmpty, - /// After the canyon hardfork, the withdrawals root must be the empty hash. - CanyonNotEmptyHash, - /// In the bedrock hardfork, the attributes must has empty withdrawals. - BedrockWithdrawals, - /// In the isthmus hardfork, the withdrawals root must be set. - IsthmusMissingWithdrawalsRoot, -} - -impl From<AttributesMismatch> for AttributesMatch { - fn from(mismatch: AttributesMismatch) -> Self { - Self::Mismatch(mismatch) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::AttributesMismatch::EIP1559Parameters; - use alloy_consensus::EMPTY_ROOT_HASH; - use alloy_primitives::{Bytes, FixedBytes, address, b256}; - use alloy_rpc_types_eth::BlockTransactions; - use arbitrary::{Arbitrary, Unstructured}; - use kona_protocol::{BlockInfo, L2BlockInfo}; - use kona_registry::ROLLUP_CONFIGS; - use op_alloy_consensus::encode_holocene_extra_data; - use op_alloy_rpc_types_engine::OpPayloadAttributes; - - fn default_attributes() -> OpAttributesWithParent { - OpAttributesWithParent { - attributes: OpPayloadAttributes::default(), - parent: L2BlockInfo::default(), - derived_from: Some(BlockInfo::default()), - is_last_in_span: true, - } - } - - fn default_rollup_config() -> &'static RollupConfig { - let opm = 10; - ROLLUP_CONFIGS.get(&opm).expect("default rollup config should exist") - } - - #[test] - fn test_attributes_match_parent_hash_mismatch() { - let cfg = default_rollup_config(); - let attributes = default_attributes(); - let mut block = Block::<Transaction>::default(); - block.header.inner.parent_hash = - b256!("1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"); - let check = AttributesMatch::check(cfg, &attributes, &block); - let expected: AttributesMatch = AttributesMismatch::ParentHash( - attributes.parent.block_info.hash, - block.header.inner.parent_hash, - ) - .into(); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - #[test] - fn test_attributes_match_check_timestamp() { - let cfg = default_rollup_config(); - let attributes = default_attributes(); - let mut block = Block::<Transaction>::default(); - block.header.inner.timestamp = 1234567890; - let check = AttributesMatch::check(cfg, &attributes, &block); - let expected: AttributesMatch = AttributesMismatch::Timestamp( - attributes.attributes().payload_attributes.timestamp, - block.header.inner.timestamp, - ) - .into(); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - #[test] - fn test_attributes_match_check_prev_randao() { - let cfg = default_rollup_config(); - let attributes = default_attributes(); - let mut block = Block::<Transaction>::default(); - block.header.inner.mix_hash = - b256!("1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"); - let check = AttributesMatch::check(cfg, &attributes, &block); - let expected: AttributesMatch = AttributesMismatch::PrevRandao( - attributes.attributes().payload_attributes.prev_randao, - block.header.inner.mix_hash, - ) - .into(); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - #[test] - fn test_attributes_match_missing_gas_limit() { - let cfg = default_rollup_config(); - let attributes = default_attributes(); - let mut block = Block::<Transaction>::default(); - block.header.inner.gas_limit = 123456; - let check = AttributesMatch::check(cfg, &attributes, &block); - let expected: AttributesMatch = AttributesMismatch::MissingAttributesGasLimit.into(); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - #[test] - fn test_attributes_match_check_gas_limit() { - let cfg = default_rollup_config(); - let mut attributes = default_attributes(); - attributes.attributes.gas_limit = Some(123457); - let mut block = Block::<Transaction>::default(); - block.header.inner.gas_limit = 123456; - let check = AttributesMatch::check(cfg, &attributes, &block); - let expected: AttributesMatch = AttributesMismatch::GasLimit( - attributes.attributes().gas_limit.unwrap_or_default(), - block.header.inner.gas_limit, - ) - .into(); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - #[test] - fn test_attributes_match_check_parent_beacon_block_root() { - let cfg = default_rollup_config(); - let mut attributes = default_attributes(); - attributes.attributes.gas_limit = Some(0); - attributes.attributes.payload_attributes.parent_beacon_block_root = - Some(b256!("1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef")); - let block = Block::<Transaction>::default(); - let check = AttributesMatch::check(cfg, &attributes, &block); - let expected: AttributesMatch = AttributesMismatch::ParentBeaconBlockRoot( - attributes.attributes().payload_attributes.parent_beacon_block_root, - block.header.inner.parent_beacon_block_root, - ) - .into(); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - #[test] - fn test_attributes_match_check_fee_recipient() { - let cfg = default_rollup_config(); - let mut attributes = default_attributes(); - attributes.attributes.gas_limit = Some(0); - let mut block = Block::<Transaction>::default(); - block.header.inner.beneficiary = address!("1234567890abcdef1234567890abcdef12345678"); - let check = AttributesMatch::check(cfg, &attributes, &block); - let expected: AttributesMatch = AttributesMismatch::FeeRecipient( - attributes.attributes().payload_attributes.suggested_fee_recipient, - block.header.inner.beneficiary, - ) - .into(); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - fn generate_txs(num_txs: usize) -> Vec<Transaction> { - // Simulate some random data - let mut data = vec![0; 1024]; - let mut rng = rand::rng(); - - (0..num_txs) - .map(|_| { - rand::Rng::fill(&mut rng, &mut data[..]); - - // Create unstructured data with the random bytes - let u = Unstructured::new(&data); - - // Generate a random instance of MyStruct - Transaction::arbitrary_take_rest(u).expect("Impossible to generate arbitrary tx") - }) - .collect() - } - - fn test_transactions_match_helper() -> (OpAttributesWithParent, Block<Transaction>) { - const NUM_TXS: usize = 10; - - let transactions = generate_txs(NUM_TXS); - let mut attributes = default_attributes(); - attributes.attributes.gas_limit = Some(0); - attributes.attributes.transactions = Some( - transactions - .iter() - .map(|tx| { - use alloy_eips::Encodable2718; - let mut buf = vec![]; - tx.inner.inner.inner().encode_2718(&mut buf); - Bytes::from(buf) - }) - .collect::<Vec<_>>(), - ); - - let block = Block::<Transaction> { - transactions: BlockTransactions::Full(transactions), - ..Default::default() - }; - - (attributes, block) - } - - #[test] - fn test_attributes_match_check_transactions() { - let cfg = default_rollup_config(); - let (attributes, block) = test_transactions_match_helper(); - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, AttributesMatch::Match); - } - - #[test] - fn test_attributes_mismatch_check_transactions_len() { - let cfg = default_rollup_config(); - let (mut attributes, block) = test_transactions_match_helper(); - attributes.attributes = OpPayloadAttributes { - transactions: attributes.attributes.transactions.map(|mut txs| { - txs.pop(); - txs - }), - ..attributes.attributes - }; - - let block_txs_len = block.transactions.len(); - - let expected: AttributesMatch = - AttributesMismatch::TransactionLen(block_txs_len - 1, block_txs_len).into(); - - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - #[test] - fn test_attributes_mismatch_check_transaction_content() { - let cfg = default_rollup_config(); - let (attributes, mut block) = test_transactions_match_helper(); - let BlockTransactions::Full(block_txs) = &mut block.transactions else { - unreachable!("The helper should build a full list of transactions") - }; - - let first_tx = block_txs.last().unwrap().clone(); - let first_tx_hash = first_tx.tx_hash(); - - // We set the last tx to be the same as the first transaction. - // Since the transactions are generated randomly and there are more than one transaction, - // there is a very high likelihood that any pair of transactions is distinct. - let last_tx = block_txs.first_mut().unwrap(); - let last_tx_hash = last_tx.tx_hash(); - *last_tx = first_tx; - - let expected: AttributesMatch = - AttributesMismatch::TransactionContent(last_tx_hash, first_tx_hash).into(); - - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - /// Checks the edge case where the attributes array is empty. - #[test] - fn test_attributes_mismatch_empty_tx_attributes() { - let cfg = default_rollup_config(); - let (mut attributes, block) = test_transactions_match_helper(); - attributes.attributes = OpPayloadAttributes { transactions: None, ..attributes.attributes }; - - let block_txs_len = block.transactions.len(); - - let expected: AttributesMatch = AttributesMismatch::TransactionLen(0, block_txs_len).into(); - - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - /// Checks the edge case where the transactions contained in the block have the wrong - /// format. - #[test] - fn test_block_transactions_wrong_format() { - let cfg = default_rollup_config(); - let (attributes, mut block) = test_transactions_match_helper(); - block.transactions = BlockTransactions::Uncle; - - let expected: AttributesMatch = AttributesMismatch::MalformedBlockTransactions.into(); - - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - /// Checks the edge case where the transactions contained in the attributes have the wrong - /// format. - #[test] - fn test_attributes_transactions_wrong_format() { - let cfg = default_rollup_config(); - let (mut attributes, block) = test_transactions_match_helper(); - let txs = attributes.attributes.transactions.as_mut().unwrap(); - let first_tx_bytes = txs.first_mut().unwrap(); - *first_tx_bytes = Bytes::copy_from_slice(&[0, 1, 2]); - - let expected: AttributesMatch = AttributesMismatch::MalformedAttributesTransaction.into(); - - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, expected); - assert!(check.is_mismatch()); - } - - // Test that the check pass if the transactions obtained from the attributes have the format - // `Some(vec![])`, ie an empty vector inside a `Some` option. - #[test] - fn test_attributes_and_block_transactions_empty() { - let cfg = default_rollup_config(); - let (mut attributes, mut block) = test_transactions_match_helper(); - - attributes.attributes = - OpPayloadAttributes { transactions: Some(vec![]), ..attributes.attributes }; - - block.transactions = BlockTransactions::Full(vec![]); - - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, AttributesMatch::Match); - - // Edge case: if the block transactions and the payload attributes are empty, we can also - // use the hash format (this is the default value of `BlockTransactions`). - attributes.attributes = OpPayloadAttributes { transactions: None, ..attributes.attributes }; - block.transactions = BlockTransactions::Hashes(vec![]); - - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, AttributesMatch::Match); - } - - // Edge case: if the payload attributes has the format `Some(vec![])`, we can still - // use the hash format. - #[test] - fn test_attributes_and_block_transactions_empty_hash_format() { - let cfg = default_rollup_config(); - let (mut attributes, mut block) = test_transactions_match_helper(); - - attributes.attributes = - OpPayloadAttributes { transactions: Some(vec![]), ..attributes.attributes }; - - block.transactions = BlockTransactions::Hashes(vec![]); - - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, AttributesMatch::Match); - } - - // Test that the check fails if the block format is incorrect and the attributes are empty - #[test] - fn test_attributes_empty_and_block_uncle() { - let cfg = default_rollup_config(); - let (mut attributes, mut block) = test_transactions_match_helper(); - - attributes.attributes = - OpPayloadAttributes { transactions: Some(vec![]), ..attributes.attributes }; - - block.transactions = BlockTransactions::Uncle; - - let expected: AttributesMatch = AttributesMismatch::MalformedBlockTransactions.into(); - - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, expected); - } - - fn eip1559_test_setup() -> (RollupConfig, OpAttributesWithParent, Block<Transaction>) { - let mut cfg = default_rollup_config().clone(); - - // We need to activate holocene to make sure it works! We set the activation time to zero to - // make sure that it is activated by default. - cfg.hardforks.holocene_time = Some(0); - - let mut attributes = default_attributes(); - attributes.attributes.gas_limit = Some(0); - // For canyon and above we need to specify the withdrawals - attributes.attributes.payload_attributes.withdrawals = Some(vec![]); - - // For canyon and above we also need to specify the withdrawal headers - let block = Block { - withdrawals: Some(Withdrawals(vec![])), - header: alloy_rpc_types_eth::Header { - inner: alloy_consensus::Header { - withdrawals_root: Some(EMPTY_ROOT_HASH), - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - (cfg, attributes, block) - } - - /// Ensures that we have to set the EIP1559 parameters for holocene and above. - #[test] - fn test_eip1559_parameters_not_specified_holocene() { - let (cfg, attributes, block) = eip1559_test_setup(); - - let check = AttributesMatch::check(&cfg, &attributes, &block); - assert_eq!(check, AttributesMatch::Mismatch(AttributesMismatch::MissingAttributesEIP1559)); - assert!(check.is_mismatch()); - } - - /// Ensures that we have to set the EIP1559 parameters for holocene and above. - #[test] - fn test_eip1559_parameters_specified_attributes_but_not_block() { - let (cfg, mut attributes, block) = eip1559_test_setup(); - - attributes.attributes.eip_1559_params = Some(Default::default()); - - let check = AttributesMatch::check(&cfg, &attributes, &block); - assert_eq!( - check, - AttributesMatch::Mismatch(AttributesMismatch::UnknownExtraDataDecodingError( - EIP1559ParamError::InvalidExtraDataLength - )) - ); - assert!(check.is_mismatch()); - } - - /// Check that, when the eip1559 params are specified and empty, the check fails because we - /// fallback on canyon params for the attributes but not for the block (edge case). - #[test] - fn test_eip1559_parameters_specified_both_and_empty() { - let (cfg, mut attributes, mut block) = eip1559_test_setup(); - - attributes.attributes.eip_1559_params = Some(Default::default()); - block.header.extra_data = vec![0; 9].into(); - - let check = AttributesMatch::check(&cfg, &attributes, &block); - assert_eq!( - check, - AttributesMatch::Mismatch(EIP1559Parameters( - BaseFeeParams { max_change_denominator: 250, elasticity_multiplier: 6 }, - BaseFeeParams { max_change_denominator: 0, elasticity_multiplier: 0 } - )) - ); - assert!(check.is_mismatch()); - } - - #[test] - fn test_eip1559_parameters_empty_for_attr_only() { - let (cfg, mut attributes, mut block) = eip1559_test_setup(); - - attributes.attributes.eip_1559_params = Some(Default::default()); - block.header.extra_data = encode_holocene_extra_data( - Default::default(), - BaseFeeParams { max_change_denominator: 250, elasticity_multiplier: 6 }, - ) - .unwrap(); - - let check = AttributesMatch::check(&cfg, &attributes, &block); - assert_eq!(check, AttributesMatch::Match); - assert!(check.is_match()); - } - - #[test] - fn test_eip1559_parameters_custom_values_match() { - let (cfg, mut attributes, mut block) = eip1559_test_setup(); - - let eip1559_extra_params = encode_holocene_extra_data( - Default::default(), - BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 }, - ) - .unwrap(); - let eip1559_params: FixedBytes<8> = - eip1559_extra_params.clone().split_off(1).as_ref().try_into().unwrap(); - - attributes.attributes.eip_1559_params = Some(eip1559_params); - block.header.extra_data = eip1559_extra_params; - - let check = AttributesMatch::check(&cfg, &attributes, &block); - assert_eq!(check, AttributesMatch::Match); - assert!(check.is_match()); - } - - #[test] - fn test_eip1559_parameters_custom_values_mismatch() { - let (cfg, mut attributes, mut block) = eip1559_test_setup(); - - let eip1559_extra_params = encode_holocene_extra_data( - Default::default(), - BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 }, - ) - .unwrap(); - - let eip1559_params: FixedBytes<8> = encode_holocene_extra_data( - Default::default(), - BaseFeeParams { max_change_denominator: 99, elasticity_multiplier: 2 }, - ) - .unwrap() - .split_off(1) - .as_ref() - .try_into() - .unwrap(); - - attributes.attributes.eip_1559_params = Some(eip1559_params); - block.header.extra_data = eip1559_extra_params; - - let check = AttributesMatch::check(&cfg, &attributes, &block); - assert_eq!( - check, - AttributesMatch::Mismatch(AttributesMismatch::EIP1559Parameters( - BaseFeeParams { max_change_denominator: 99, elasticity_multiplier: 2 }, - BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 } - )) - ); - assert!(check.is_mismatch()); - } - - /// Edge case: if the elasticity multiplier is 0, the max change denominator cannot be 0 as well - #[test] - fn test_eip1559_parameters_combination_mismatch() { - let (cfg, mut attributes, mut block) = eip1559_test_setup(); - - let eip1559_extra_params = encode_holocene_extra_data( - Default::default(), - BaseFeeParams { max_change_denominator: 5, elasticity_multiplier: 0 }, - ) - .unwrap(); - let eip1559_params: FixedBytes<8> = - eip1559_extra_params.clone().split_off(1).as_ref().try_into().unwrap(); - - attributes.attributes.eip_1559_params = Some(eip1559_params); - block.header.extra_data = eip1559_extra_params; - - let check = AttributesMatch::check(&cfg, &attributes, &block); - assert_eq!( - check, - AttributesMatch::Mismatch(AttributesMismatch::InvalidEIP1559ParamsCombination) - ); - assert!(check.is_mismatch()); - } - - /// Check that the version of the extra block data must be zero. - #[test] - fn test_eip1559_parameters_invalid_version() { - let (cfg, mut attributes, mut block) = eip1559_test_setup(); - - let eip1559_extra_params = encode_holocene_extra_data( - Default::default(), - BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 }, - ) - .unwrap(); - let eip1559_params: FixedBytes<8> = - eip1559_extra_params.clone().split_off(1).as_ref().try_into().unwrap(); - - let mut raw_extra_params_bytes = eip1559_extra_params.to_vec(); - raw_extra_params_bytes[0] = 10; - - attributes.attributes.eip_1559_params = Some(eip1559_params); - block.header.extra_data = raw_extra_params_bytes.into(); - - let check = AttributesMatch::check(&cfg, &attributes, &block); - assert_eq!(check, AttributesMatch::Mismatch(AttributesMismatch::InvalidExtraDataVersion)); - assert!(check.is_mismatch()); - } - - /// Try to encode jovian extra data with the holocene encoding function. - #[test] - fn test_eip1559_parameters_invalid_jovian_encoding() { - let (mut cfg, mut attributes, mut block) = eip1559_test_setup(); - - cfg.hardforks.jovian_time = Some(0); - - let eip1559_extra_params = encode_holocene_extra_data( - Default::default(), - BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 }, - ) - .unwrap(); - let eip1559_params: FixedBytes<8> = - eip1559_extra_params.clone().split_off(1).as_ref().try_into().unwrap(); - - let raw_extra_params_bytes = eip1559_extra_params.to_vec(); - - attributes.attributes.eip_1559_params = Some(eip1559_params); - block.header.extra_data = raw_extra_params_bytes.into(); - - let check = AttributesMatch::check(&cfg, &attributes, &block); - assert_eq!( - check, - AttributesMatch::Mismatch(AttributesMismatch::UnknownExtraDataDecodingError( - EIP1559ParamError::InvalidExtraDataLength - )) - ); - assert!(check.is_mismatch()); - } - - /// The default parameters can't overflow the u32 byte representation of the base fee params! - #[test] - fn test_eip1559_default_param_cant_overflow() { - let (mut cfg, mut attributes, mut block) = eip1559_test_setup(); - cfg.chain_op_config.eip1559_denominator_canyon = u64::MAX; - cfg.chain_op_config.eip1559_elasticity = u64::MAX; - - attributes.attributes.eip_1559_params = Some(Default::default()); - block.header.extra_data = vec![0; 9].into(); - - let check = AttributesMatch::check(&cfg, &attributes, &block); - - // Note that in this case we *always* have a mismatch because there isn't enough bytes in - // the default representation of the extra params to represent a u128 - assert_eq!( - check, - AttributesMatch::Mismatch(EIP1559Parameters( - BaseFeeParams { - max_change_denominator: u64::MAX as u128, - elasticity_multiplier: u64::MAX as u128 - }, - BaseFeeParams { max_change_denominator: 0, elasticity_multiplier: 0 } - )) - ); - assert!(check.is_mismatch()); - } - - #[test] - fn test_attributes_match() { - let cfg = default_rollup_config(); - let mut attributes = default_attributes(); - attributes.attributes.gas_limit = Some(0); - let block = Block::<Transaction>::default(); - let check = AttributesMatch::check(cfg, &attributes, &block); - assert_eq!(check, AttributesMatch::Match); - assert!(check.is_match()); - } -} diff --git a/kona/crates/node/engine/src/client.rs b/kona/crates/node/engine/src/client.rs deleted file mode 100644 index 2f1b49198d4..00000000000 --- a/kona/crates/node/engine/src/client.rs +++ /dev/null @@ -1,511 +0,0 @@ -//! An Engine API Client. - -use crate::{Metrics, RollupBoostServerArgs, RollupBoostServerError}; -use alloy_eips::{BlockId, eip1898::BlockNumberOrTag}; -use alloy_network::{Ethereum, Network}; -use alloy_primitives::{Address, B256, BlockHash, Bytes, StorageKey}; -use alloy_provider::{EthGetBlock, Provider, RootProvider, RpcWithBlock, ext::EngineApi}; -use alloy_rpc_client::RpcClient; -use alloy_rpc_types_engine::{ - ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadEnvelopeV2, ExecutionPayloadInputV2, - ExecutionPayloadV1, ExecutionPayloadV3, ForkchoiceState, ForkchoiceUpdated, JwtSecret, - PayloadId, PayloadStatus, -}; -use alloy_rpc_types_eth::{Block, EIP1186AccountProofResponse}; -use alloy_transport::{RpcError, TransportErrorKind, TransportResult}; -use alloy_transport_http::{ - AuthLayer, AuthService, Http, HyperClient, - hyper_util::{ - client::legacy::{Client, connect::HttpConnector}, - rt::TokioExecutor, - }, -}; -use async_trait::async_trait; -use http::uri::InvalidUri; -use http_body_util::Full; -use kona_genesis::RollupConfig; -use kona_protocol::{FromBlockError, L2BlockInfo}; -use op_alloy_network::Optimism; -use op_alloy_provider::ext::engine::OpEngineApi; -use op_alloy_rpc_types::Transaction; -use op_alloy_rpc_types_engine::{ - OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, OpExecutionPayloadV4, - OpPayloadAttributes, ProtocolVersion, -}; -use parking_lot::Mutex; -use rollup_boost::{ - EngineApiServer, Flashblocks, FlashblocksWebsocketConfig, Probes, RollupBoostServer, - RpcClientError, -}; -use rollup_boost_types::payload::PayloadSource; -use std::{ - future::Future, - net::{AddrParseError, IpAddr, SocketAddr}, - str::FromStr, - sync::Arc, - time::{Duration, Instant}, -}; -use thiserror::Error; -use tower::ServiceBuilder; -use url::Url; - -/// An error that occurred in the [`EngineClient`]. -#[derive(Error, Debug)] -pub enum EngineClientError { - /// An RPC error occurred - #[error("An RPC error occurred: {0}")] - RpcError(#[from] RpcError<TransportErrorKind>), - - /// An error occurred while decoding the payload - #[error("An error occurred while decoding the payload: {0}")] - BlockInfoDecodeError(#[from] FromBlockError), -} -/// A Hyper HTTP client with a JWT authentication layer. -pub type HyperAuthClient<B = Full<Bytes>> = HyperClient<B, AuthService<Client<HttpConnector, B>>>; - -/// Engine API client used to communicate with L1/L2 ELs and optional rollup-boost. -/// EngineClient trait that is very coupled to its only implementation. -/// The main reason this exists is for mocking/unit testing. -#[async_trait] -pub trait EngineClient: OpEngineApi<Optimism, Http<HyperAuthClient>> + Send + Sync { - /// Returns a reference to the inner [`RollupConfig`]. - fn cfg(&self) -> &RollupConfig; - - /// Fetches the L1 block with the provided `BlockId`. - fn get_l1_block(&self, block: BlockId) -> EthGetBlock<<Ethereum as Network>::BlockResponse>; - - /// Fetches the L2 block with the provided `BlockId`. - fn get_l2_block(&self, block: BlockId) -> EthGetBlock<<Optimism as Network>::BlockResponse>; - - /// Get the account and storage values of the specified account including the merkle proofs. - /// This call can be used to verify that the data has not been tampered with. - fn get_proof( - &self, - address: Address, - keys: Vec<StorageKey>, - ) -> RpcWithBlock<(Address, Vec<StorageKey>), EIP1186AccountProofResponse>; - - /// Sends the given payload to the execution layer client, as specified for the Paris fork. - async fn new_payload_v1(&self, payload: ExecutionPayloadV1) -> TransportResult<PayloadStatus>; - - /// Fetches the [`Block<Transaction>`] for the given [`BlockNumberOrTag`]. - async fn l2_block_by_label( - &self, - numtag: BlockNumberOrTag, - ) -> Result<Option<Block<Transaction>>, EngineClientError>; - - /// Fetches the [L2BlockInfo] by [BlockNumberOrTag]. - async fn l2_block_info_by_label( - &self, - numtag: BlockNumberOrTag, - ) -> Result<Option<L2BlockInfo>, EngineClientError>; -} - -/// An Engine API client that provides authenticated HTTP communication with an execution layer. -/// -/// The [`OpEngineClient`] handles JWT authentication and manages connections to both L1 and L2 -/// execution layers. It automatically selects the appropriate Engine API version based on the -/// rollup configuration and block timestamps. -/// -/// Engine API client used to communicate with L1/L2 ELs and optional rollup-boost. -#[derive(Clone, Debug)] -pub struct OpEngineClient<L1Provider, L2Provider> -where - L1Provider: Provider, - L2Provider: Provider<Optimism>, -{ - /// The L2 engine provider for Engine API calls. - engine: L2Provider, - /// The L1 chain provider for reading L1 data. - l1_provider: L1Provider, - /// The [`RollupConfig`] for determining Engine API versions based on hardfork activations. - cfg: Arc<RollupConfig>, - /// The rollup boost server - pub rollup_boost: Arc<RollupBoostServer>, -} - -impl<L1Provider, L2Provider> OpEngineClient<L1Provider, L2Provider> -where - L1Provider: Provider, - L2Provider: Provider<Optimism>, -{ - /// Creates a new RPC client for the given address and JWT secret. - pub fn rpc_client<N: Network>(addr: Url, jwt: JwtSecret) -> RootProvider<N> { - let hyper_client = Client::builder(TokioExecutor::new()).build_http::<Full<Bytes>>(); - let auth_layer = AuthLayer::new(jwt); - let service = ServiceBuilder::new().layer(auth_layer).service(hyper_client); - let layer_transport = HyperClient::with_service(service); - let http_hyper = Http::with_client(layer_transport, addr); - let rpc_client = RpcClient::new(http_hyper, false); - RootProvider::<N>::new(rpc_client) - } -} - -/// The builder for the [`OpEngineClient`]. -#[derive(Debug, Clone)] -pub struct EngineClientBuilder { - /// The builder URL. - pub builder: Url, - /// The builder JWT secret. - pub builder_jwt: JwtSecret, - /// The builder timeout. - pub builder_timeout: Duration, - /// The L2 Engine API endpoint URL. - pub l2: Url, - /// The L2 JWT secret. - pub l2_jwt: JwtSecret, - /// The L2 timeout. - pub l2_timeout: Duration, - /// The L1 RPC URL. - pub l1_rpc: Url, - /// The [`RollupConfig`] for determining Engine API versions based on hardfork activations. - pub cfg: Arc<RollupConfig>, - /// The rollup boost arguments. - pub rollup_boost: RollupBoostServerArgs, -} - -/// An error that occurred in the [`EngineClientBuilder`]. -#[derive(Error, Debug)] -pub enum EngineClientBuilderError { - /// An error occurred while parsing the URL - #[error("An error occurred while parsing the URL: {0}")] - UrlParseError(#[from] InvalidUri), - /// An error occurred while parsing the IP address - #[error("An error occurred while parsing the IP address: {0}")] - IpAddrParseError(#[from] AddrParseError), - /// An error occurred while creating the RPC client - #[error("An error occurred while creating the RPC client: {0}")] - RpcClientError(#[from] RpcClientError), - /// An error occurred while creating the Flashblocks service - #[error("An error occurred while creating the Flashblocks service: {0}")] - FlashblocksError(String), -} - -impl EngineClientBuilder { - /// Creates a new [`OpEngineClient`] with authenticated HTTP connections. - /// - /// Sets up JWT-authenticated connections to the Engine API endpoint through the rollup-boost - /// server along with an unauthenticated connection to the L1 chain. - /// - /// # FIXME(@theochap, `<https://github.com/op-rs/kona/issues/3053>`, `<https://github.com/op-rs/kona/issues/3054>`): - /// This method can be simplified/improved in a few ways: - /// - Unify kona's and rollup-boost's RPC client creation - /// - Removed the `dyn RollupBoostServerLike` type erasure. - pub fn build( - self, - ) -> Result<OpEngineClient<RootProvider, RootProvider<Optimism>>, EngineClientBuilderError> - { - let probes = Arc::new(Probes::default()); - let l2_client = rollup_boost::RpcClient::new( - http::Uri::from_str(self.l2.to_string().as_str())?, - self.l2_jwt, - self.l2_timeout.as_millis() as u64, - PayloadSource::L2, - )?; - let builder_client = rollup_boost::RpcClient::new( - http::Uri::from_str(self.builder.to_string().as_str())?, - self.builder_jwt, - self.builder_timeout.as_millis() as u64, - PayloadSource::Builder, - )?; - - let rollup_boost_server = match self.rollup_boost.flashblocks { - Some(flashblocks) => { - let inbound_url = flashblocks.flashblocks_builder_url; - let outbound_addr = SocketAddr::new( - IpAddr::from_str(&flashblocks.flashblocks_host)?, - flashblocks.flashblocks_port, - ); - - let ws_config = flashblocks.flashblocks_ws_config; - - let builder_client = Arc::new( - Flashblocks::run( - builder_client, - inbound_url, - outbound_addr, - FlashblocksWebsocketConfig { - flashblock_builder_ws_initial_reconnect_ms: ws_config - .flashblock_builder_ws_initial_reconnect_ms, - flashblock_builder_ws_max_reconnect_ms: ws_config - .flashblock_builder_ws_max_reconnect_ms, - flashblock_builder_ws_connect_timeout_ms: ws_config - .flashblock_builder_ws_connect_timeout_ms, - flashblock_builder_ws_ping_interval_ms: ws_config - .flashblock_builder_ws_ping_interval_ms, - flashblock_builder_ws_pong_timeout_ms: ws_config - .flashblock_builder_ws_pong_timeout_ms, - }, - ) - .map_err(|e| EngineClientBuilderError::FlashblocksError(e.to_string()))?, - ); - Arc::new(rollup_boost::RollupBoostServer::new( - l2_client, - builder_client, - Arc::new(Mutex::new(self.rollup_boost.initial_execution_mode)), - self.rollup_boost.block_selection_policy, - probes, - self.rollup_boost.external_state_root, - self.rollup_boost.ignore_unhealthy_builders, - )) - } - None => Arc::new(rollup_boost::RollupBoostServer::new( - l2_client, - Arc::new(builder_client), - Arc::new(Mutex::new(self.rollup_boost.initial_execution_mode)), - self.rollup_boost.block_selection_policy, - probes, - self.rollup_boost.external_state_root, - self.rollup_boost.ignore_unhealthy_builders, - )), - }; - - // TODO(ethereum-optimism/optimism#18656): remove this client, upstream the remaining - // EngineApiExt methods to the RollupBoostServer - let engine = OpEngineClient::<RootProvider, RootProvider<Optimism>>::rpc_client::<Optimism>( - self.l2, - self.l2_jwt, - ); - - let l1_provider = RootProvider::new_http(self.l1_rpc); - - Ok(OpEngineClient { engine, l1_provider, cfg: self.cfg, rollup_boost: rollup_boost_server }) - } -} - -#[async_trait] -impl<L1Provider, L2Provider> EngineClient for OpEngineClient<L1Provider, L2Provider> -where - L1Provider: Provider, - L2Provider: Provider<Optimism>, -{ - fn cfg(&self) -> &RollupConfig { - self.cfg.as_ref() - } - - fn get_l1_block(&self, block: BlockId) -> EthGetBlock<<Ethereum as Network>::BlockResponse> { - self.l1_provider.get_block(block) - } - - fn get_l2_block(&self, block: BlockId) -> EthGetBlock<<Optimism as Network>::BlockResponse> { - self.engine.get_block(block) - } - - fn get_proof( - &self, - address: Address, - keys: Vec<StorageKey>, - ) -> RpcWithBlock<(Address, Vec<StorageKey>), EIP1186AccountProofResponse> { - self.engine.get_proof(address, keys) - } - - async fn new_payload_v1(&self, payload: ExecutionPayloadV1) -> TransportResult<PayloadStatus> { - self.engine.new_payload_v1(payload).await - } - - async fn l2_block_by_label( - &self, - numtag: BlockNumberOrTag, - ) -> Result<Option<Block<Transaction>>, EngineClientError> { - Ok(self.engine.get_block_by_number(numtag).full().await?) - } - - async fn l2_block_info_by_label( - &self, - numtag: BlockNumberOrTag, - ) -> Result<Option<L2BlockInfo>, EngineClientError> { - let block = self.engine.get_block_by_number(numtag).full().await?; - let Some(block) = block else { - return Ok(None); - }; - Ok(Some(L2BlockInfo::from_block_and_genesis(&block.into_consensus(), &self.cfg.genesis)?)) - } -} - -#[async_trait::async_trait] -impl<L1Provider, L2Provider> OpEngineApi<Optimism, Http<HyperAuthClient>> - for OpEngineClient<L1Provider, L2Provider> -where - L1Provider: Provider, - L2Provider: Provider<Optimism>, -{ - async fn new_payload_v2( - &self, - payload: ExecutionPayloadInputV2, - ) -> TransportResult<PayloadStatus> { - let call = <L2Provider as OpEngineApi<Optimism, Http<HyperAuthClient>>>::new_payload_v2( - &self.engine, - payload, - ); - - record_call_time(call, Metrics::NEW_PAYLOAD_METHOD).await - } - - async fn new_payload_v3( - &self, - payload: ExecutionPayloadV3, - parent_beacon_block_root: B256, - ) -> TransportResult<PayloadStatus> { - let call = self.rollup_boost.new_payload_v3(payload, vec![], parent_beacon_block_root); - - record_call_time(call, Metrics::NEW_PAYLOAD_METHOD) - .await - .map_err(|err| RollupBoostServerError::from(err).into()) - } - - async fn new_payload_v4( - &self, - payload: OpExecutionPayloadV4, - parent_beacon_block_root: B256, - ) -> TransportResult<PayloadStatus> { - let call = self.rollup_boost.new_payload_v4( - payload.clone(), - vec![], - parent_beacon_block_root, - vec![], - ); - - record_call_time(call, Metrics::NEW_PAYLOAD_METHOD) - .await - .map_err(|err| RollupBoostServerError::from(err).into()) - } - - async fn fork_choice_updated_v2( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option<OpPayloadAttributes>, - ) -> TransportResult<ForkchoiceUpdated> { - let call = - <L2Provider as OpEngineApi<Optimism, Http<HyperAuthClient>>>::fork_choice_updated_v2( - &self.engine, - fork_choice_state, - payload_attributes, - ); - - record_call_time(call, Metrics::FORKCHOICE_UPDATE_METHOD).await - } - - async fn fork_choice_updated_v3( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option<OpPayloadAttributes>, - ) -> TransportResult<ForkchoiceUpdated> { - let call = self.rollup_boost.fork_choice_updated_v3(fork_choice_state, payload_attributes); - - record_call_time(call, Metrics::FORKCHOICE_UPDATE_METHOD) - .await - .map_err(|err| RollupBoostServerError::from(err).into()) - } - - async fn get_payload_v2( - &self, - payload_id: PayloadId, - ) -> TransportResult<ExecutionPayloadEnvelopeV2> { - let call = <L2Provider as OpEngineApi<Optimism, Http<HyperAuthClient>>>::get_payload_v2( - &self.engine, - payload_id, - ); - - record_call_time(call, Metrics::GET_PAYLOAD_METHOD).await - } - - async fn get_payload_v3( - &self, - payload_id: PayloadId, - ) -> TransportResult<OpExecutionPayloadEnvelopeV3> { - let call = self.rollup_boost.get_payload_v3(payload_id); - - record_call_time(call, Metrics::GET_PAYLOAD_METHOD) - .await - .map_err(|err| RollupBoostServerError::from(err).into()) - } - - async fn get_payload_v4( - &self, - payload_id: PayloadId, - ) -> TransportResult<OpExecutionPayloadEnvelopeV4> { - let call = self.rollup_boost.get_payload_v4(payload_id); - - record_call_time(call, Metrics::GET_PAYLOAD_METHOD) - .await - .map_err(|err| RollupBoostServerError::from(err).into()) - } - - async fn get_payload_bodies_by_hash_v1( - &self, - block_hashes: Vec<BlockHash>, - ) -> TransportResult<ExecutionPayloadBodiesV1> { - <L2Provider as OpEngineApi<Optimism, Http<HyperAuthClient>>>::get_payload_bodies_by_hash_v1( - &self.engine, - block_hashes, - ) - .await - } - - async fn get_payload_bodies_by_range_v1( - &self, - start: u64, - count: u64, - ) -> TransportResult<ExecutionPayloadBodiesV1> { - <L2Provider as OpEngineApi< - Optimism, - Http<HyperAuthClient>, - >>::get_payload_bodies_by_range_v1(&self.engine, start, count).await - } - - async fn get_client_version_v1( - &self, - client_version: ClientVersionV1, - ) -> TransportResult<Vec<ClientVersionV1>> { - <L2Provider as OpEngineApi<Optimism, Http<HyperAuthClient>>>::get_client_version_v1( - &self.engine, - client_version, - ) - .await - } - - async fn signal_superchain_v1( - &self, - recommended: ProtocolVersion, - required: ProtocolVersion, - ) -> TransportResult<ProtocolVersion> { - <L2Provider as OpEngineApi<Optimism, Http<HyperAuthClient>>>::signal_superchain_v1( - &self.engine, - recommended, - required, - ) - .await - } - - async fn exchange_capabilities( - &self, - capabilities: Vec<String>, - ) -> TransportResult<Vec<String>> { - <L2Provider as OpEngineApi<Optimism, Http<HyperAuthClient>>>::exchange_capabilities( - &self.engine, - capabilities, - ) - .await - } -} - -/// Wrapper to record the time taken for a call to the engine API and log the result as a metric. -async fn record_call_time<T, Err>( - f: impl Future<Output = Result<T, Err>>, - metric_label: &'static str, -) -> Result<T, Err> { - // Await on the future and track its duration. - let start = Instant::now(); - let result = f.await?; - let duration = start.elapsed(); - - // Record the call duration. - kona_macros::record!( - histogram, - Metrics::ENGINE_METHOD_REQUEST_DURATION, - "method", - metric_label, - duration.as_secs_f64() - ); - Ok(result) -} diff --git a/kona/crates/node/engine/src/lib.rs b/kona/crates/node/engine/src/lib.rs deleted file mode 100644 index 63b564d84e2..00000000000 --- a/kona/crates/node/engine/src/lib.rs +++ /dev/null @@ -1,84 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -//! ## Architecture -//! -//! The engine operates as a task-driven system where operations are queued and executed atomically: -//! -//! ```text -//! ┌─────────────┐ ┌──────────────┐ ┌─────────────┐ -//! │ Engine │◄───┤ Task Queue │◄───┤ Engine │ -//! │ Client │ │ (Priority) │ │ Tasks │ -//! └─────────────┘ └──────────────┘ └─────────────┘ -//! │ │ │ -//! ▼ ▼ ▼ -//! ┌─────────────┐ ┌──────────────┐ ┌─────────────┐ -//! │ Engine API │ │ Engine State │ │ Rollup │ -//! │ (HTTP/JWT) │ │ Updates │ │ Config │ -//! └─────────────┘ └──────────────┘ └─────────────┘ -//! ``` -//! -//! ## Module Organization -//! -//! - **Task Queue** - Core engine task queue and execution logic via [`Engine`] -//! - **Client** - HTTP client for Engine API communication via [`EngineClient`] -//! - **State** - Engine state management and synchronization via [`EngineState`] -//! - **Versions** - Engine API version selection via [`EngineForkchoiceVersion`], -//! [`EngineNewPayloadVersion`], [`EngineGetPayloadVersion`] -//! - **Attributes** - Payload attribute validation via [`AttributesMatch`] -//! - **Kinds** - Engine client type identification via [`EngineKind`] -//! - **Query** - Engine query interface via [`EngineQueries`] -//! - **Metrics** - Optional Prometheus metrics collection via [`Metrics`] - -#[macro_use] -extern crate tracing; - -mod task_queue; -pub use task_queue::{ - BuildTask, BuildTaskError, ConsolidateInput, ConsolidateTask, ConsolidateTaskError, Engine, - EngineBuildError, EngineResetError, EngineTask, EngineTaskError, EngineTaskErrorSeverity, - EngineTaskErrors, EngineTaskExt, FinalizeTask, FinalizeTaskError, InsertTask, InsertTaskError, - SealTask, SealTaskError, SynchronizeTask, SynchronizeTaskError, -}; - -mod attributes; -pub use attributes::{AttributesMatch, AttributesMismatch}; - -mod client; -pub use client::{ - EngineClient, EngineClientBuilder, EngineClientBuilderError, EngineClientError, - HyperAuthClient, OpEngineClient, -}; - -mod rollup_boost; -pub use rollup_boost::{ - FlashblocksClientArgs, FlashblocksWebsocketConfig, RollupBoostServer, RollupBoostServerArgs, - RollupBoostServerError, -}; - -mod versions; -pub use versions::{EngineForkchoiceVersion, EngineGetPayloadVersion, EngineNewPayloadVersion}; - -mod state; -pub use state::{EngineState, EngineSyncState, EngineSyncStateUpdate}; - -mod kinds; -pub use kinds::EngineKind; - -mod query; -pub use query::{EngineQueries, EngineQueriesError, EngineQuerySender}; - -mod metrics; -pub use metrics::Metrics; - -mod sync; -pub use sync::{L2ForkchoiceState, SyncStartError, find_starting_forkchoice}; - -#[cfg(any(test, feature = "test-utils"))] -/// Utilities that are useful when creating unit tests using structs within this library. -pub mod test_utils; diff --git a/kona/crates/node/engine/src/state/core.rs b/kona/crates/node/engine/src/state/core.rs deleted file mode 100644 index 6f4de6b43b6..00000000000 --- a/kona/crates/node/engine/src/state/core.rs +++ /dev/null @@ -1,258 +0,0 @@ -//! The internal state of the engine controller. - -use crate::Metrics; -use alloy_rpc_types_engine::ForkchoiceState; -use kona_protocol::L2BlockInfo; -use serde::{Deserialize, Serialize}; - -/// The synchronization state of the execution layer across different safety levels. -/// -/// Tracks block progression through various stages of verification and finalization, -/// from initial unsafe blocks received via P2P to fully finalized blocks derived from -/// finalized L1 data. Each level represents increasing confidence in the block's validity. -/// -/// # Safety Levels -/// -/// The state tracks blocks at different safety levels, listed from least to most safe: -/// -/// 1. **Unsafe** - Most recent blocks from P2P network (unverified) -/// 2. **Cross-unsafe** - Unsafe blocks with cross-layer verification -/// 3. **Local-safe** - Derived from L1 data, completed span-batch -/// 4. **Safe** - Cross-verified with safe L1 dependencies -/// 5. **Finalized** - Derived from finalized L1 data only -/// -/// See the [OP Stack specifications](https://specs.optimism.io) for detailed safety definitions. -#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)] -pub struct EngineSyncState { - /// Most recent block found on the P2P network (lowest safety level). - unsafe_head: L2BlockInfo, - /// Cross-verified unsafe head (equal to unsafe_head pre-interop). - cross_unsafe_head: L2BlockInfo, - /// Derived from L1 data as a completed span-batch, but not yet cross-verified. - local_safe_head: L2BlockInfo, - /// Derived from L1 data and cross-verified to have safe L1 dependencies. - safe_head: L2BlockInfo, - /// Derived from finalized L1 data with only finalized dependencies (highest safety level). - finalized_head: L2BlockInfo, -} - -impl EngineSyncState { - /// Returns the current unsafe head. - pub const fn unsafe_head(&self) -> L2BlockInfo { - self.unsafe_head - } - - /// Returns the current cross-verified unsafe head. - pub const fn cross_unsafe_head(&self) -> L2BlockInfo { - self.cross_unsafe_head - } - - /// Returns the current local safe head. - pub const fn local_safe_head(&self) -> L2BlockInfo { - self.local_safe_head - } - - /// Returns the current safe head. - pub const fn safe_head(&self) -> L2BlockInfo { - self.safe_head - } - - /// Returns the current finalized head. - pub const fn finalized_head(&self) -> L2BlockInfo { - self.finalized_head - } - - /// Creates a `ForkchoiceState` - /// - /// - `head_block` = `unsafe_head` - /// - `safe_block` = `safe_head` - /// - `finalized_block` = `finalized_head` - /// - /// If the block info is not yet available, the default values are used. - pub const fn create_forkchoice_state(&self) -> ForkchoiceState { - ForkchoiceState { - head_block_hash: self.unsafe_head.hash(), - safe_block_hash: self.safe_head.hash(), - finalized_block_hash: self.finalized_head.hash(), - } - } - - /// Applies the update to the provided sync state, using the current state values if the update - /// is not specified. Returns the new sync state. - pub fn apply_update(self, sync_state_update: EngineSyncStateUpdate) -> Self { - if let Some(unsafe_head) = sync_state_update.unsafe_head { - Self::update_block_label_metric( - Metrics::UNSAFE_BLOCK_LABEL, - unsafe_head.block_info.number, - ); - } - if let Some(cross_unsafe_head) = sync_state_update.cross_unsafe_head { - Self::update_block_label_metric( - Metrics::CROSS_UNSAFE_BLOCK_LABEL, - cross_unsafe_head.block_info.number, - ); - } - if let Some(local_safe_head) = sync_state_update.local_safe_head { - Self::update_block_label_metric( - Metrics::LOCAL_SAFE_BLOCK_LABEL, - local_safe_head.block_info.number, - ); - } - if let Some(safe_head) = sync_state_update.safe_head { - Self::update_block_label_metric(Metrics::SAFE_BLOCK_LABEL, safe_head.block_info.number); - } - if let Some(finalized_head) = sync_state_update.finalized_head { - Self::update_block_label_metric( - Metrics::FINALIZED_BLOCK_LABEL, - finalized_head.block_info.number, - ); - } - - Self { - unsafe_head: sync_state_update.unsafe_head.unwrap_or(self.unsafe_head), - cross_unsafe_head: sync_state_update - .cross_unsafe_head - .unwrap_or(self.cross_unsafe_head), - local_safe_head: sync_state_update.local_safe_head.unwrap_or(self.local_safe_head), - safe_head: sync_state_update.safe_head.unwrap_or(self.safe_head), - finalized_head: sync_state_update.finalized_head.unwrap_or(self.finalized_head), - } - } - - /// Updates a block label metric, keyed by the label. - #[inline] - fn update_block_label_metric(label: &'static str, number: u64) { - kona_macros::set!(gauge, Metrics::BLOCK_LABELS, "label", label, number as f64); - } -} - -/// Specifies how to update the sync state of the engine. -#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] -pub struct EngineSyncStateUpdate { - /// Most recent block found on the p2p network - pub unsafe_head: Option<L2BlockInfo>, - /// Cross-verified unsafe head, always equal to the unsafe head pre-interop - pub cross_unsafe_head: Option<L2BlockInfo>, - /// Derived from L1, and known to be a completed span-batch, - /// but not cross-verified yet. - pub local_safe_head: Option<L2BlockInfo>, - /// Derived from L1 and cross-verified to have cross-safe dependencies. - pub safe_head: Option<L2BlockInfo>, - /// Derived from finalized L1 data, - /// and cross-verified to only have finalized dependencies. - pub finalized_head: Option<L2BlockInfo>, -} - -/// The chain state viewed by the engine controller. -#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)] -pub struct EngineState { - /// The sync state of the engine. - pub sync_state: EngineSyncState, - - /// Whether or not the EL has finished syncing. - pub el_sync_finished: bool, - - /// Track when the rollup node changes the forkchoice to restore previous - /// known unsafe chain. e.g. Unsafe Reorg caused by Invalid span batch. - /// This update does not retry except engine returns non-input error - /// because engine may forgot backupUnsafeHead or backupUnsafeHead is not part - /// of the chain. - pub need_fcu_call_backup_unsafe_reorg: bool, -} - -impl EngineState { - /// Returns if consolidation is needed. - /// - /// [Consolidation] is only performed by a rollup node when the unsafe head - /// is ahead of the safe head. When the two are equal, consolidation isn't - /// required and the [`crate::BuildTask`] can be used to build the block. - /// - /// [Consolidation]: https://specs.optimism.io/protocol/derivation.html#l1-consolidation-payload-attributes-matching - pub fn needs_consolidation(&self) -> bool { - self.sync_state.safe_head() != self.sync_state.unsafe_head() - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::Metrics; - use kona_protocol::BlockInfo; - use metrics_exporter_prometheus::PrometheusBuilder; - use rstest::rstest; - - impl EngineState { - /// Set the unsafe head. - pub fn set_unsafe_head(&mut self, unsafe_head: L2BlockInfo) { - self.sync_state.apply_update(EngineSyncStateUpdate { - unsafe_head: Some(unsafe_head), - ..Default::default() - }); - } - - /// Set the cross-verified unsafe head. - pub fn set_cross_unsafe_head(&mut self, cross_unsafe_head: L2BlockInfo) { - self.sync_state.apply_update(EngineSyncStateUpdate { - cross_unsafe_head: Some(cross_unsafe_head), - ..Default::default() - }); - } - - /// Set the local safe head. - pub fn set_local_safe_head(&mut self, local_safe_head: L2BlockInfo) { - self.sync_state.apply_update(EngineSyncStateUpdate { - local_safe_head: Some(local_safe_head), - ..Default::default() - }); - } - - /// Set the safe head. - pub fn set_safe_head(&mut self, safe_head: L2BlockInfo) { - self.sync_state.apply_update(EngineSyncStateUpdate { - safe_head: Some(safe_head), - ..Default::default() - }); - } - - /// Set the finalized head. - pub fn set_finalized_head(&mut self, finalized_head: L2BlockInfo) { - self.sync_state.apply_update(EngineSyncStateUpdate { - finalized_head: Some(finalized_head), - ..Default::default() - }); - } - } - - #[rstest] - #[case::set_unsafe(EngineState::set_unsafe_head, Metrics::UNSAFE_BLOCK_LABEL, 1)] - #[case::set_cross_unsafe( - EngineState::set_cross_unsafe_head, - Metrics::CROSS_UNSAFE_BLOCK_LABEL, - 2 - )] - #[case::set_local_safe(EngineState::set_local_safe_head, Metrics::LOCAL_SAFE_BLOCK_LABEL, 3)] - #[case::set_safe_head(EngineState::set_safe_head, Metrics::SAFE_BLOCK_LABEL, 4)] - #[case::set_finalized_head(EngineState::set_finalized_head, Metrics::FINALIZED_BLOCK_LABEL, 5)] - #[cfg(feature = "metrics")] - fn test_chain_label_metrics( - #[case] set_fn: impl Fn(&mut EngineState, L2BlockInfo), - #[case] label_name: &str, - #[case] number: u64, - ) { - let handle = PrometheusBuilder::new().install_recorder().unwrap(); - crate::Metrics::init(); - - let mut state = EngineState::default(); - set_fn( - &mut state, - L2BlockInfo { - block_info: BlockInfo { number, ..Default::default() }, - ..Default::default() - }, - ); - - assert!(handle.render().contains( - format!("kona_node_block_labels{{label=\"{label_name}\"}} {number}").as_str() - )); - } -} diff --git a/kona/crates/node/engine/src/sync/mod.rs b/kona/crates/node/engine/src/sync/mod.rs deleted file mode 100644 index 3d798f2146a..00000000000 --- a/kona/crates/node/engine/src/sync/mod.rs +++ /dev/null @@ -1,150 +0,0 @@ -//! Sync start algorithm for the OP Stack rollup node. - -use kona_genesis::RollupConfig; -use kona_protocol::L2BlockInfo; - -mod forkchoice; -pub use forkchoice::L2ForkchoiceState; - -mod error; -pub use error::SyncStartError; - -use tracing::info; - -use crate::EngineClient; - -/// Searches for the latest [`L2ForkchoiceState`] that we can use to start the sync process with. -/// -/// - The *unsafe L2 block*: This is the highest L2 block whose L1 origin is a *plausible* -/// extension of the canonical L1 chain (as known to the rollup node). -/// - The *safe L2 block*: This is the highest L2 block whose epoch's sequencing window is -/// complete within the canonical L1 chain (as known to the rollup node). -/// - The *finalized L2 block*: This is the L2 block which is known to be fully derived from -/// finalized L1 block data. -/// -/// Plausible: meaning that the blockhash of the L2 block's L1 origin -/// (as reported in the L1 Attributes deposit within the L2 block) is not canonical at another -/// height in the L1 chain, and the same holds for all its ancestors. -pub async fn find_starting_forkchoice<EngineClient_: EngineClient>( - cfg: &RollupConfig, - engine_client: &EngineClient_, -) -> Result<L2ForkchoiceState, SyncStartError> { - let mut current_fc = L2ForkchoiceState::current(cfg, engine_client).await?; - info!( - target: "sync_start", - unsafe = %current_fc.un_safe.block_info.number, - safe = %current_fc.safe.block_info.number, - finalized = %current_fc.finalized.block_info.number, - "Loaded current L2 EL forkchoice state" - ); - - // Search for the highest `unsafe` block, relative to the initial `unsafe` block's L1 origin, - loop { - let l1_origin = - engine_client.get_l1_block(current_fc.un_safe.l1_origin.hash.into()).await?; - info!( - target: "sync_start", - l1_origin = %current_fc.un_safe.l1_origin.number, - l2_unsafe = %current_fc.un_safe.block_info.number, - "Searching for L2 unsafe block with canonical L1 origin" - ); - - match l1_origin { - Some(_) => { - // Unsafe block has existing L1 origin. Continue with this head. - info!( - target: "sync_start", - l2_unsafe = %current_fc.un_safe.block_info.number, - "Found L2 unsafe block with canonical L1 origin" - ); - break; - } - None => { - let l2_parent_hash = current_fc.un_safe.block_info.parent_hash.into(); - let l2_parent = engine_client - .get_l2_block(l2_parent_hash) - .full() - .await? - .ok_or(SyncStartError::BlockNotFound(l2_parent_hash))?; - - current_fc.un_safe = - L2BlockInfo::from_block_and_genesis(&l2_parent.into_consensus(), &cfg.genesis)?; - } - } - } - - // Search for the highest `safe` block that's L1 origin is at least older than the sequencing - // window, relative to the L1 origin of the `unsafe` block. - let mut safe_cursor = current_fc.un_safe; - loop { - info!( - target: "sync_start", - l1_origin = %safe_cursor.l1_origin.number, - l2_safe = %safe_cursor.block_info.number, - "Searching for L2 safe block beyond sequencing window" - ); - - let is_behind_sequence_window = - current_fc.un_safe.l1_origin.number.saturating_sub(cfg.seq_window_size) > - safe_cursor.l1_origin.number; - let is_finalized = safe_cursor.block_info.hash == current_fc.finalized.block_info.hash; - let is_genesis = safe_cursor.block_info.hash == cfg.genesis.l2.hash; - if is_behind_sequence_window || is_finalized || is_genesis { - info!( - target: "sync_start", - l2_safe = %safe_cursor.block_info.number, - is_behind_sequence_window, - is_finalized, - is_genesis, - "Found suitable L2 safe block" - ); - current_fc.safe = safe_cursor; - break; - } else { - let block = engine_client - .get_l2_block(safe_cursor.block_info.parent_hash.into()) - .full() - .await? - .ok_or(SyncStartError::BlockNotFound(safe_cursor.block_info.parent_hash.into()))?; - safe_cursor = - L2BlockInfo::from_block_and_genesis(&block.into_consensus(), &cfg.genesis)?; - } - } - - // Leave the finalized block as-is, and return the current forkchoice. - Ok(current_fc) -} - -#[cfg(test)] -mod test { - use alloy_provider::Network; - use alloy_rpc_types_eth::Block; - use kona_protocol::L2BlockInfo; - use kona_registry::ROLLUP_CONFIGS; - use op_alloy_network::Optimism; - - const OP_SEPOLIA_CHAIN_ID: u64 = 11155420; - const OP_SEPOLIA_GENESIS_RPC_RESPONSE: &str = "{\"hash\":\"0x102de6ffb001480cc9b8b548fd05c34cd4f46ae4aa91759393db90ea0409887d\",\"parentHash\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"sha3Uncles\":\"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347\",\"miner\":\"0x4200000000000000000000000000000000000011\",\"stateRoot\":\"0x06787a17a3ed87c339a39dbbeeb311578a0c83ed29daa2db95da62b28efce8a9\",\"transactionsRoot\":\"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421\",\"receiptsRoot\":\"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421\",\"logsBloom\":\"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\",\"difficulty\":\"0x0\",\"number\":\"0x0\",\"gasLimit\":\"0x1c9c380\",\"gasUsed\":\"0x0\",\"timestamp\":\"0x64d6dbac\",\"extraData\":\"0x424544524f434b\",\"mixHash\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"nonce\":\"0x0000000000000000\",\"baseFeePerGas\":\"0x3b9aca00\",\"size\":\"0x209\",\"uncles\":[],\"transactions\":[]}"; - - /// Sanity regression test - `alloy_rpc_types`' `Block::into_consensus` failed to saturate the - /// header of the `alloy_consensus::Header` type on an old version. This test covers the - /// conversion to ensure an OP genesis block's conversion to the consensus type works for - /// the sake of `L2BlockInfo::from_block_and_genesis`. - #[tokio::test] - async fn test_genesis_block_hash() { - let rollup_config = ROLLUP_CONFIGS.get(&OP_SEPOLIA_CHAIN_ID).unwrap(); - let genesis_block: Block<<Optimism as Network>::TransactionResponse> = - serde_json::from_str(OP_SEPOLIA_GENESIS_RPC_RESPONSE).unwrap(); - - let rpc_reported_hash = genesis_block.header.hash; - let consensus_block = genesis_block.into_consensus(); - - // Check that the genesis block's RPC-reported hash is equal to the manually computed hash. - assert_eq!(rpc_reported_hash, consensus_block.hash_slow()); - - // Convert to `L2BlockInfo` and check the same. - let l2_block_info = - L2BlockInfo::from_block_and_genesis(&consensus_block, &rollup_config.genesis).unwrap(); - assert_eq!(rpc_reported_hash, l2_block_info.block_info.hash); - } -} diff --git a/kona/crates/node/engine/src/task_queue/core.rs b/kona/crates/node/engine/src/task_queue/core.rs deleted file mode 100644 index 50d8f910bb6..00000000000 --- a/kona/crates/node/engine/src/task_queue/core.rs +++ /dev/null @@ -1,182 +0,0 @@ -//! The [`Engine`] is a task queue that receives and executes [`EngineTask`]s. - -use super::EngineTaskExt; -use crate::{ - EngineClient, EngineState, EngineSyncStateUpdate, EngineTask, EngineTaskError, - EngineTaskErrorSeverity, Metrics, SyncStartError, SynchronizeTask, SynchronizeTaskError, - find_starting_forkchoice, task_queue::EngineTaskErrors, -}; -use alloy_rpc_types_eth::Transaction; -use kona_genesis::{RollupConfig, SystemConfig}; -use kona_protocol::{BlockInfo, L2BlockInfo, OpBlockConversionError, to_system_config}; -use op_alloy_consensus::OpTxEnvelope; -use std::{collections::BinaryHeap, sync::Arc}; -use thiserror::Error; -use tokio::sync::watch::Sender; - -/// The [`Engine`] task queue. -/// -/// Tasks of a shared [`EngineTask`] variant are processed in FIFO order, providing synchronization -/// guarantees for the L2 execution layer and other actors. A priority queue, ordered by -/// [`EngineTask`]'s [`Ord`] implementation, is used to prioritize tasks executed by the -/// [`Engine::drain`] method. -/// -/// Because tasks are executed one at a time, they are considered to be atomic operations over the -/// [`EngineState`], and are given exclusive access to the engine state during execution. -/// -/// Tasks within the queue are also considered fallible. If they fail with a temporary error, -/// they are not popped from the queue, the error is returned, and they are retried on the -/// next call to [`Engine::drain`]. -#[derive(Debug)] -pub struct Engine<EngineClient_: EngineClient> { - /// The state of the engine. - state: EngineState, - /// A sender that can be used to notify the engine actor of state changes. - state_sender: Sender<EngineState>, - /// A sender that can be used to notify the engine actor of task queue length changes. - task_queue_length: Sender<usize>, - /// The task queue. - tasks: BinaryHeap<EngineTask<EngineClient_>>, -} - -impl<EngineClient_: EngineClient> Engine<EngineClient_> { - /// Creates a new [`Engine`] with an empty task queue and the passed initial [`EngineState`]. - pub fn new( - initial_state: EngineState, - state_sender: Sender<EngineState>, - task_queue_length: Sender<usize>, - ) -> Self { - Self { state: initial_state, state_sender, task_queue_length, tasks: BinaryHeap::default() } - } - - /// Returns a reference to the inner [`EngineState`]. - pub const fn state(&self) -> &EngineState { - &self.state - } - - /// Returns a receiver that can be used to listen to engine state updates. - pub fn state_subscribe(&self) -> tokio::sync::watch::Receiver<EngineState> { - self.state_sender.subscribe() - } - - /// Returns a receiver that can be used to listen to engine queue length updates. - pub fn queue_length_subscribe(&self) -> tokio::sync::watch::Receiver<usize> { - self.task_queue_length.subscribe() - } - - /// Enqueues a new [`EngineTask`] for execution. - /// Updates the queue length and notifies listeners of the change. - pub fn enqueue(&mut self, task: EngineTask<EngineClient_>) { - self.tasks.push(task); - self.task_queue_length.send_replace(self.tasks.len()); - } - - /// Resets the engine by finding a plausible sync starting point via - /// [`find_starting_forkchoice`]. The state will be updated to the starting point, and a - /// forkchoice update will be enqueued in order to reorg the execution layer. - pub async fn reset( - &mut self, - client: Arc<EngineClient_>, - config: Arc<RollupConfig>, - ) -> Result<(L2BlockInfo, BlockInfo, SystemConfig), EngineResetError> { - // Clear any outstanding tasks to prepare for the reset. - self.clear(); - - let mut start = find_starting_forkchoice(&config, client.as_ref()).await?; - - // Retry to synchronize the engine until we succeeds or a critical error occurs. - while let Err(err) = SynchronizeTask::new( - client.clone(), - config.clone(), - EngineSyncStateUpdate { - unsafe_head: Some(start.un_safe), - cross_unsafe_head: Some(start.un_safe), - local_safe_head: Some(start.safe), - safe_head: Some(start.safe), - finalized_head: Some(start.finalized), - }, - ) - .execute(&mut self.state) - .await - { - match err.severity() { - EngineTaskErrorSeverity::Temporary | - EngineTaskErrorSeverity::Flush | - EngineTaskErrorSeverity::Reset => { - warn!(target: "engine", ?err, "Forkchoice update failed during reset. Trying again..."); - start = find_starting_forkchoice(&config, client.as_ref()).await?; - } - EngineTaskErrorSeverity::Critical => { - return Err(EngineResetError::Forkchoice(err)); - } - } - } - - // Find the new safe head's L1 origin and SystemConfig. - let origin_block = start - .safe - .l1_origin - .number - .saturating_sub(config.channel_timeout(start.safe.block_info.timestamp)); - let l1_origin_info: BlockInfo = client - .get_l1_block(origin_block.into()) - .await - .map_err(SyncStartError::RpcError)? - .ok_or(SyncStartError::BlockNotFound(origin_block.into()))? - .into_consensus() - .into(); - let l2_safe_block = client - .get_l2_block(start.safe.block_info.hash.into()) - .full() - .await - .map_err(SyncStartError::RpcError)? - .ok_or(SyncStartError::BlockNotFound(origin_block.into()))? - .into_consensus() - .map_transactions(|t| <Transaction<OpTxEnvelope> as Clone>::clone(&t).into_inner()); - let system_config = to_system_config(&l2_safe_block, &config)?; - - kona_macros::inc!(counter, Metrics::ENGINE_RESET_COUNT); - - Ok((start.safe, l1_origin_info, system_config)) - } - - /// Clears the task queue. - pub fn clear(&mut self) { - self.tasks.clear(); - } - - /// Attempts to drain the queue by executing all [`EngineTask`]s in-order. If any task returns - /// an error along the way, it is not popped from the queue (in case it must be retried) and - /// the error is returned. - pub async fn drain(&mut self) -> Result<(), EngineTaskErrors> { - // Drain tasks in order of priority, halting on errors for a retry to be attempted. - while let Some(task) = self.tasks.peek() { - // Execute the task - task.execute(&mut self.state).await?; - - // Update the state and notify the engine actor. - self.state_sender.send_replace(self.state); - - // Pop the task from the queue now that it's been executed. - self.tasks.pop(); - - self.task_queue_length.send_replace(self.tasks.len()); - } - - Ok(()) - } -} - -/// An error occurred while attempting to reset the [`Engine`]. -#[derive(Debug, Error)] -pub enum EngineResetError { - /// An error that occurred while updating the forkchoice state. - #[error(transparent)] - Forkchoice(#[from] SynchronizeTaskError), - /// An error occurred while traversing the L1 for the sync starting point. - #[error(transparent)] - SyncStart(#[from] SyncStartError), - /// An error occurred while constructing the SystemConfig for the new safe head. - #[error(transparent)] - SystemConfigConversion(#[from] OpBlockConversionError), -} diff --git a/kona/crates/node/engine/src/task_queue/tasks/build/error.rs b/kona/crates/node/engine/src/task_queue/tasks/build/error.rs deleted file mode 100644 index dc1c634b45f..00000000000 --- a/kona/crates/node/engine/src/task_queue/tasks/build/error.rs +++ /dev/null @@ -1,80 +0,0 @@ -//! Contains error types for the [crate::SynchronizeTask]. - -use crate::{EngineTaskError, task_queue::tasks::task::EngineTaskErrorSeverity}; -use alloy_rpc_types_engine::{PayloadId, PayloadStatusEnum}; -use alloy_transport::{RpcError, TransportErrorKind}; -use thiserror::Error; -use tokio::sync::mpsc; - -/// An error that occurs during payload building within the engine. -/// -/// This error type is specific to the block building process and represents failures -/// that can occur during the automatic forkchoice update phase of [`BuildTask`]. -/// Unlike [`BuildTaskError`], which handles higher-level build orchestration errors, -/// `EngineBuildError` focuses on low-level engine API communication failures. -/// -/// ## Error Categories -/// -/// - **State Validation**: Errors related to inconsistent chain state -/// - **Engine Communication**: RPC failures during forkchoice updates -/// - **Payload Validation**: Invalid payload status responses from the execution layer -/// -/// [`BuildTask`]: crate::BuildTask -#[derive(Debug, Error)] -pub enum EngineBuildError { - /// The finalized head is ahead of the unsafe head. - #[error("Finalized head is ahead of unsafe head")] - FinalizedAheadOfUnsafe(u64, u64), - /// The forkchoice update call to the engine api failed. - #[error("Failed to build payload attributes in the engine. Forkchoice RPC error: {0}")] - AttributesInsertionFailed(#[from] RpcError<TransportErrorKind>), - /// The inserted payload is invalid. - #[error("The inserted payload is invalid: {0}")] - InvalidPayload(String), - /// The inserted payload status is unexpected. - #[error("The inserted payload status is unexpected: {0}")] - UnexpectedPayloadStatus(PayloadStatusEnum), - /// The payload ID is missing. - #[error("The inserted payload ID is missing")] - MissingPayloadId, - /// The engine is syncing. - #[error("The engine is syncing")] - EngineSyncing, -} - -/// An error that occurs when running the [crate::BuildTask]. -#[derive(Debug, Error)] -pub enum BuildTaskError { - /// An error occurred when building the payload attributes in the engine. - #[error("An error occurred when building the payload attributes to the engine.")] - EngineBuildError(EngineBuildError), - /// Error sending the built payload envelope. - #[error(transparent)] - MpscSend(#[from] Box<mpsc::error::SendError<PayloadId>>), -} - -impl EngineTaskError for BuildTaskError { - fn severity(&self) -> EngineTaskErrorSeverity { - match self { - Self::EngineBuildError(EngineBuildError::FinalizedAheadOfUnsafe(_, _)) => { - EngineTaskErrorSeverity::Critical - } - Self::EngineBuildError(EngineBuildError::AttributesInsertionFailed(_)) => { - EngineTaskErrorSeverity::Temporary - } - Self::EngineBuildError(EngineBuildError::InvalidPayload(_)) => { - EngineTaskErrorSeverity::Temporary - } - Self::EngineBuildError(EngineBuildError::UnexpectedPayloadStatus(_)) => { - EngineTaskErrorSeverity::Temporary - } - Self::EngineBuildError(EngineBuildError::MissingPayloadId) => { - EngineTaskErrorSeverity::Temporary - } - Self::EngineBuildError(EngineBuildError::EngineSyncing) => { - EngineTaskErrorSeverity::Temporary - } - Self::MpscSend(_) => EngineTaskErrorSeverity::Critical, - } - } -} diff --git a/kona/crates/node/engine/src/task_queue/tasks/build/task.rs b/kona/crates/node/engine/src/task_queue/tasks/build/task.rs deleted file mode 100644 index 8fc4ecda757..00000000000 --- a/kona/crates/node/engine/src/task_queue/tasks/build/task.rs +++ /dev/null @@ -1,184 +0,0 @@ -//! A task for building a new block and importing it. -use super::BuildTaskError; -use crate::{ - EngineClient, EngineForkchoiceVersion, EngineState, EngineTaskExt, - state::EngineSyncStateUpdate, task_queue::tasks::build::error::EngineBuildError, -}; -use alloy_rpc_types_engine::{PayloadId, PayloadStatusEnum}; -use async_trait::async_trait; -use derive_more::Constructor; -use kona_genesis::RollupConfig; -use kona_protocol::OpAttributesWithParent; -use std::{sync::Arc, time::Instant}; -use tokio::sync::mpsc; - -/// Task for building new blocks with automatic forkchoice synchronization. -/// -/// The [`BuildTask`] only performs the `engine_forkchoiceUpdated` call within the block building -/// workflow. It makes this call with the provided attributes to initiate block building on the -/// execution layer and, if successful, sends the new [`PayloadId`] via the configured sender. -/// -/// ## Error Handling -/// -/// The task uses [`EngineBuildError`] for build-specific failures during the forkchoice update -/// phase. -/// -/// [`EngineBuildError`]: crate::EngineBuildError -#[derive(Debug, Clone, Constructor)] -pub struct BuildTask<EngineClient_: EngineClient> { - /// The engine API client. - pub engine: Arc<EngineClient_>, - /// The [`RollupConfig`]. - pub cfg: Arc<RollupConfig>, - /// The [`OpAttributesWithParent`] to instruct the execution layer to build. - pub attributes: OpAttributesWithParent, - /// The optional sender through which [`PayloadId`] will be sent after the - /// block build has been started. - pub payload_id_tx: Option<mpsc::Sender<PayloadId>>, -} - -impl<EngineClient_: EngineClient> BuildTask<EngineClient_> { - /// Validates the provided [PayloadStatusEnum] according to the rules listed below. - /// - /// ## Observed [PayloadStatusEnum] Variants - /// - `VALID`: Returns Ok(()) - forkchoice update was successful - /// - `INVALID`: Returns error with validation details - /// - `SYNCING`: Returns temporary error - EL is syncing - /// - Other: Returns error for unexpected status codes - fn validate_forkchoice_status(status: PayloadStatusEnum) -> Result<(), BuildTaskError> { - match status { - PayloadStatusEnum::Valid => Ok(()), - PayloadStatusEnum::Invalid { validation_error } => { - error!(target: "engine_builder", "Forkchoice update failed: {}", validation_error); - Err(BuildTaskError::EngineBuildError(EngineBuildError::InvalidPayload( - validation_error, - ))) - } - PayloadStatusEnum::Syncing => { - warn!(target: "engine_builder", "Forkchoice update failed temporarily: EL is syncing"); - Err(BuildTaskError::EngineBuildError(EngineBuildError::EngineSyncing)) - } - PayloadStatusEnum::Accepted => { - // Other codes are never returned by `engine_forkchoiceUpdate` - Err(BuildTaskError::EngineBuildError(EngineBuildError::UnexpectedPayloadStatus( - status, - ))) - } - } - } - - /// Starts the block building process by sending an initial `engine_forkchoiceUpdate` call with - /// the payload attributes to build. - /// - /// ### Success (`VALID`) - /// If the build is successful, the [PayloadId] is returned for sealing and the successful - /// forkchoice update identifier is relayed via the stored `payload_id_tx` sender. - /// - /// ### Failure (`INVALID`) - /// If the forkchoice update fails, the [BuildTaskError]. - /// - /// ### Syncing (`SYNCING`) - /// If the EL is syncing, the payload attributes are buffered and the function returns early. - /// This is a temporary state, and the function should be called again later. - /// - /// Note: This is `pub(super)` to allow testing via the `tests` submodule. - pub(super) async fn start_build( - &self, - state: &EngineState, - engine_client: &EngineClient_, - attributes_envelope: OpAttributesWithParent, - ) -> Result<PayloadId, BuildTaskError> { - // Sanity check if the head is behind the finalized head. If it is, this is a critical - // error. - if state.sync_state.unsafe_head().block_info.number < - state.sync_state.finalized_head().block_info.number - { - return Err(BuildTaskError::EngineBuildError(EngineBuildError::FinalizedAheadOfUnsafe( - state.sync_state.unsafe_head().block_info.number, - state.sync_state.finalized_head().block_info.number, - ))); - } - - // When inserting a payload, we advertise the parent's unsafe head as the current unsafe - // head to build on top of. - let new_forkchoice = state - .sync_state - .apply_update(EngineSyncStateUpdate { - unsafe_head: Some(attributes_envelope.parent), - ..Default::default() - }) - .create_forkchoice_state(); - - let forkchoice_version = EngineForkchoiceVersion::from_cfg( - &self.cfg, - attributes_envelope.attributes.payload_attributes.timestamp, - ); - let update = match forkchoice_version { - EngineForkchoiceVersion::V3 => { - engine_client - .fork_choice_updated_v3(new_forkchoice, Some(attributes_envelope.attributes)) - .await - } - EngineForkchoiceVersion::V2 => { - engine_client - .fork_choice_updated_v2(new_forkchoice, Some(attributes_envelope.attributes)) - .await - } - } - .map_err(|e| { - error!(target: "engine_builder", "Forkchoice update failed: {}", e); - BuildTaskError::EngineBuildError(EngineBuildError::AttributesInsertionFailed(e)) - })?; - - Self::validate_forkchoice_status(update.payload_status.status)?; - - debug!( - target: "engine_builder", - unsafe_hash = new_forkchoice.head_block_hash.to_string(), - safe_hash = new_forkchoice.safe_block_hash.to_string(), - finalized_hash = new_forkchoice.finalized_block_hash.to_string(), - "Forkchoice update with attributes successful" - ); - - // Fetch the payload ID from the FCU. If no payload ID was returned, something went wrong - - // the block building job on the EL should have been initiated. - update - .payload_id - .ok_or(BuildTaskError::EngineBuildError(EngineBuildError::MissingPayloadId)) - } -} - -#[async_trait] -impl<EngineClient_: EngineClient> EngineTaskExt for BuildTask<EngineClient_> { - type Output = PayloadId; - - type Error = BuildTaskError; - - async fn execute(&self, state: &mut EngineState) -> Result<PayloadId, BuildTaskError> { - debug!( - target: "engine_builder", - txs = self.attributes.attributes().transactions.as_ref().map_or(0, |txs| txs.len()), - is_deposits = self.attributes.is_deposits_only(), - "Starting new build job" - ); - - // Start the build by sending an FCU call with the current forkchoice and the input - // payload attributes. - let fcu_start_time = Instant::now(); - let payload_id = self.start_build(state, &self.engine, self.attributes.clone()).await?; - let fcu_duration = fcu_start_time.elapsed(); - - info!( - target: "engine_builder", - fcu_duration = ?fcu_duration, - "block build started" - ); - - // If a channel was provided, send the payload ID to it. - if let Some(tx) = &self.payload_id_tx { - tx.send(payload_id).await.map_err(Box::new)?; - } - - Ok(payload_id) - } -} diff --git a/kona/crates/node/engine/src/task_queue/tasks/consolidate/task.rs b/kona/crates/node/engine/src/task_queue/tasks/consolidate/task.rs deleted file mode 100644 index 35c103f9a63..00000000000 --- a/kona/crates/node/engine/src/task_queue/tasks/consolidate/task.rs +++ /dev/null @@ -1,295 +0,0 @@ -//! A task to consolidate the engine state. - -use crate::{ - ConsolidateTaskError, EngineClient, EngineState, EngineTaskExt, SynchronizeTask, - state::EngineSyncStateUpdate, task_queue::build_and_seal, -}; -use alloy_rpc_types_eth::Block; -use async_trait::async_trait; -use kona_genesis::RollupConfig; -use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; -use op_alloy_rpc_types::Transaction; -use std::{sync::Arc, time::Instant}; - -/// Input for consolidation - either derived attributes or safe L2 block -#[derive(Debug, Clone)] -pub enum ConsolidateInput { - /// Consolidate based on derived attributes. - Attributes(Box<OpAttributesWithParent>), - /// Derivation Delegation: consolidate based on safe L2 block info. - BlockInfo(L2BlockInfo), -} - -impl From<L2BlockInfo> for ConsolidateInput { - fn from(v: L2BlockInfo) -> Self { - Self::BlockInfo(v) - } -} - -impl From<OpAttributesWithParent> for ConsolidateInput { - fn from(v: OpAttributesWithParent) -> Self { - Self::Attributes(Box::new(v)) - } -} - -impl ConsolidateInput { - /// Returns the block number for this consolidation input. - const fn l2_block_number(&self) -> u64 { - match self { - Self::Attributes(attributes) => attributes.block_number(), - Self::BlockInfo(info) => info.block_info.number, - } - } - - /// Checks if the block is consistent with this consolidation input. - fn is_consistent_with_block(&self, cfg: &RollupConfig, block: &Block<Transaction>) -> bool { - match self { - Self::Attributes(attributes) => { - crate::AttributesMatch::check(cfg, attributes, block).is_match() - } - Self::BlockInfo(info) => block.header.hash == info.block_info.hash, - } - } - - /// Returns true if this is `Attributes` and `attributes.is_last_in_span` is true. - const fn is_attributes_last_in_span(&self) -> bool { - matches!( - self, - Self::Attributes(attributes) - if attributes.is_last_in_span - ) - } -} - -/// The [`ConsolidateTask`] attempts to consolidate the engine state -/// using the specified payload attributes or block info. -#[derive(Debug, Clone)] -pub struct ConsolidateTask<EngineClient_: EngineClient> { - /// The engine client. - pub client: Arc<EngineClient_>, - /// The [`RollupConfig`]. - pub cfg: Arc<RollupConfig>, - /// The input for consolidation (either attributes or block info). - pub input: ConsolidateInput, -} - -impl<EngineClient_: EngineClient> ConsolidateTask<EngineClient_> { - /// Creates a new [`ConsolidateTask`] with the specified input - pub const fn new( - client: Arc<EngineClient_>, - cfg: Arc<RollupConfig>, - input: ConsolidateInput, - ) -> Self { - Self { client, cfg, input } - } - - /// This is used when the [`ConsolidateTask`] fails to consolidate the engine state - async fn execute_build_and_seal_tasks( - &self, - state: &mut EngineState, - attributes: &OpAttributesWithParent, - ) -> Result<(), ConsolidateTaskError> { - build_and_seal(state, self.client.clone(), self.cfg.clone(), attributes.clone(), true) - .await?; - - Ok(()) - } - - /// This provides symmetric fallback behavior to with build_and_seal. - async fn reconcile_to_safe_head( - &self, - state: &mut EngineState, - safe_l2: &L2BlockInfo, - ) -> Result<(), ConsolidateTaskError> { - warn!( - target: "engine", - safe_l2 = %safe_l2, - "Apply safe head" - ); - - let fcu_start = Instant::now(); - - // We intentionally set unsafe_head and cross_unsafe_head to safe_l2 to ensure the - // engine observes a self-consistent head state. This is required to correctly handle - // reorgs (where unsafe may be ahead on a non-canonical fork) and to trigger EL sync when - // the local unsafe head lags behind the safe head. - SynchronizeTask::new( - Arc::clone(&self.client), - self.cfg.clone(), - EngineSyncStateUpdate { - unsafe_head: Some(*safe_l2), - cross_unsafe_head: Some(*safe_l2), - safe_head: Some(*safe_l2), - local_safe_head: Some(*safe_l2), - ..Default::default() - }, - ) - .execute(state) - .await - .map_err(|e| { - warn!(target: "engine", ?e, "Apply safe head failed"); - e - })?; - - let fcu_duration = fcu_start.elapsed(); - - info!( - target: "engine", - hash = %safe_l2.block_info.hash, - number = safe_l2.block_info.number, - fcu_duration = ?fcu_duration, - "Updated safe head via follow safe" - ); - - Ok(()) - } - - /// Handles the fallback case when the block doesn't match the input or does not exist. - async fn reconcile_unsafe_to_safe( - &self, - state: &mut EngineState, - ) -> Result<(), ConsolidateTaskError> { - match &self.input { - ConsolidateInput::Attributes(attributes) => { - self.execute_build_and_seal_tasks(state, attributes).await - } - ConsolidateInput::BlockInfo(safe_l2) => { - self.reconcile_to_safe_head(state, safe_l2).await - } - } - } - - /// Attempts consolidation on the engine state. - pub async fn consolidate(&self, state: &mut EngineState) -> Result<(), ConsolidateTaskError> { - let global_start = Instant::now(); - - // Fetch the unsafe L2 block - let block_num = self.input.l2_block_number(); - let fetch_start = Instant::now(); - let block = match self.client.l2_block_by_label(block_num.into()).await { - Ok(Some(block)) => block, - Ok(None) => { - warn!(target: "engine", "Received `None` block for {}", block_num); - return Err(ConsolidateTaskError::MissingUnsafeL2Block(block_num)); - } - Err(_) => { - warn!(target: "engine", "Failed to fetch unsafe l2 block for consolidation"); - return Err(ConsolidateTaskError::FailedToFetchUnsafeL2Block); - } - }; - let block_fetch_duration = fetch_start.elapsed(); - let block_hash = block.header.hash; - - if self.input.is_consistent_with_block(&self.cfg, &block) { - trace!( - target: "engine", - input = ?self.input, - block_hash = %block_hash, - "Consolidating engine state", - ); - match L2BlockInfo::from_block_and_genesis(&block.into_consensus(), &self.cfg.genesis) { - // Only issue a forkchoice update if the attributes are the last in the span - // batch. This is an optimization to avoid sending a FCU - // call for every block in the span batch. - Ok(block_info) if !self.input.is_attributes_last_in_span() => { - let total_duration = global_start.elapsed(); - - // Apply a transient update to the safe head. - state.sync_state = state.sync_state.apply_update(EngineSyncStateUpdate { - safe_head: Some(block_info), - local_safe_head: Some(block_info), - ..Default::default() - }); - - info!( - target: "engine", - hash = %block_info.block_info.hash, - number = block_info.block_info.number, - ?total_duration, - ?block_fetch_duration, - "Updated safe head via L1 consolidation" - ); - - return Ok(()); - } - Ok(block_info) => { - let fcu_start = Instant::now(); - - SynchronizeTask::new( - Arc::clone(&self.client), - self.cfg.clone(), - EngineSyncStateUpdate { - safe_head: Some(block_info), - local_safe_head: Some(block_info), - ..Default::default() - }, - ) - .execute(state) - .await - .map_err(|e| { - warn!(target: "engine", ?e, "Consolidation failed"); - e - })?; - - let fcu_duration = fcu_start.elapsed(); - let total_duration = global_start.elapsed(); - - info!( - target: "engine", - hash = %block_info.block_info.hash, - number = block_info.block_info.number, - ?total_duration, - ?block_fetch_duration, - fcu_duration = ?fcu_duration, - "Updated safe head via L1 consolidation" - ); - - return Ok(()); - } - Err(e) => { - // Continue on to build the block since we failed to construct the block info. - warn!(target: "engine", ?e, "Failed to construct L2BlockInfo, proceeding to build task"); - } - } - } - - debug!( - target: "engine", - input = ?self.input, - block_hash = %block_hash, - "ConsolidateInput mismatch! Initiating reorg", - ); - // Handle mismatch case - called when consistency check fails - // or when L2BlockInfo construction fails in Attributes branch - self.reconcile_unsafe_to_safe(state).await - } -} - -#[async_trait] -impl<EngineClient_: EngineClient> EngineTaskExt for ConsolidateTask<EngineClient_> { - type Output = (); - - type Error = ConsolidateTaskError; - - // Behavior depends on how the safe head is provided: - // - // - `Attributes`: The safe head is advanced through the normal derivation flow, where the - // DerivationActor and EngineActor coordinate both safe and unsafe heads. In this case, we - // consolidate as long as the unsafe head has not fallen behind. - // - // - `BlockInfo`: The safe head is injected externally by the DerivationActor while delegating - // derivation, and is not coordinated with the EngineActor's safe/unsafe heads. If the - // injected safe head is ahead of the EngineActor's unsafe head, we reconcile the unsafe chain - // up to the safe head instead of consolidating. - async fn execute(&self, state: &mut EngineState) -> Result<(), ConsolidateTaskError> { - let safe_head_number = match &self.input { - ConsolidateInput::Attributes { .. } => state.sync_state.safe_head().block_info.number, - ConsolidateInput::BlockInfo(safe_block_info) => safe_block_info.block_info.number, - }; - if safe_head_number < state.sync_state.unsafe_head().block_info.number { - self.consolidate(state).await - } else { - self.reconcile_unsafe_to_safe(state).await - } - } -} diff --git a/kona/crates/node/engine/src/task_queue/tasks/finalize/error.rs b/kona/crates/node/engine/src/task_queue/tasks/finalize/error.rs deleted file mode 100644 index 60a11b4da96..00000000000 --- a/kona/crates/node/engine/src/task_queue/tasks/finalize/error.rs +++ /dev/null @@ -1,42 +0,0 @@ -//! Contains error types for the [crate::FinalizeTask]. - -use crate::{ - EngineTaskError, SynchronizeTaskError, task_queue::tasks::task::EngineTaskErrorSeverity, -}; -use alloy_transport::{RpcError, TransportErrorKind}; -use kona_protocol::FromBlockError; -use thiserror::Error; - -/// An error that occurs when running the [crate::FinalizeTask]. -#[derive(Debug, Error)] -pub enum FinalizeTaskError { - /// The block is not safe, and therefore cannot be finalized. - #[error("Attempted to finalize a block that is not yet safe")] - BlockNotSafe, - /// The block to finalize was not found. - #[error("The block to finalize was not found: Number {0}")] - BlockNotFound(u64), - /// An error occurred while transforming the RPC block into [`L2BlockInfo`]. - /// - /// [`L2BlockInfo`]: kona_protocol::L2BlockInfo - #[error(transparent)] - FromBlock(#[from] FromBlockError), - /// A temporary RPC failure. - #[error(transparent)] - TransportError(#[from] RpcError<TransportErrorKind>), - /// The forkchoice update call to finalize the block failed. - #[error(transparent)] - ForkchoiceUpdateFailed(#[from] SynchronizeTaskError), -} - -impl EngineTaskError for FinalizeTaskError { - fn severity(&self) -> EngineTaskErrorSeverity { - match self { - Self::BlockNotSafe => EngineTaskErrorSeverity::Critical, - Self::BlockNotFound(_) => EngineTaskErrorSeverity::Critical, - Self::FromBlock(_) => EngineTaskErrorSeverity::Critical, - Self::TransportError(_) => EngineTaskErrorSeverity::Temporary, - Self::ForkchoiceUpdateFailed(inner) => inner.severity(), - } - } -} diff --git a/kona/crates/node/engine/src/task_queue/tasks/insert/error.rs b/kona/crates/node/engine/src/task_queue/tasks/insert/error.rs deleted file mode 100644 index 8d0cfc02164..00000000000 --- a/kona/crates/node/engine/src/task_queue/tasks/insert/error.rs +++ /dev/null @@ -1,45 +0,0 @@ -//! Contains the error types for the [InsertTask]. -//! -//! [InsertTask]: crate::InsertTask - -use crate::{ - EngineTaskError, SynchronizeTaskError, task_queue::tasks::task::EngineTaskErrorSeverity, -}; -use alloy_rpc_types_engine::PayloadStatusEnum; -use alloy_transport::{RpcError, TransportErrorKind}; -use kona_protocol::FromBlockError; -use op_alloy_rpc_types_engine::OpPayloadError; - -/// An error that occurs when running the [InsertTask]. -/// -/// [InsertTask]: crate::InsertTask -#[derive(Debug, thiserror::Error)] -pub enum InsertTaskError { - /// Error converting a payload into a block. - #[error(transparent)] - FromBlockError(#[from] OpPayloadError), - /// Failed to insert new payload. - #[error("Failed to insert new payload: {0}")] - InsertFailed(RpcError<TransportErrorKind>), - /// Unexpected payload status - #[error("Unexpected payload status: {0}")] - UnexpectedPayloadStatus(PayloadStatusEnum), - /// Error converting the payload + chain genesis into an L2 block info. - #[error(transparent)] - L2BlockInfoConstruction(#[from] FromBlockError), - /// The forkchoice update call to consolidate the block into the engine state failed. - #[error(transparent)] - ForkchoiceUpdateFailed(#[from] SynchronizeTaskError), -} - -impl EngineTaskError for InsertTaskError { - fn severity(&self) -> EngineTaskErrorSeverity { - match self { - Self::FromBlockError(_) => EngineTaskErrorSeverity::Critical, - Self::InsertFailed(_) => EngineTaskErrorSeverity::Temporary, - Self::UnexpectedPayloadStatus(_) => EngineTaskErrorSeverity::Temporary, - Self::L2BlockInfoConstruction(_) => EngineTaskErrorSeverity::Critical, - Self::ForkchoiceUpdateFailed(inner) => inner.severity(), - } - } -} diff --git a/kona/crates/node/engine/src/task_queue/tasks/seal/error.rs b/kona/crates/node/engine/src/task_queue/tasks/seal/error.rs deleted file mode 100644 index 736fa24aea2..00000000000 --- a/kona/crates/node/engine/src/task_queue/tasks/seal/error.rs +++ /dev/null @@ -1,67 +0,0 @@ -//! Contains error types for the [crate::SynchronizeTask]. - -use crate::{EngineTaskError, InsertTaskError, task_queue::tasks::task::EngineTaskErrorSeverity}; -use alloy_transport::{RpcError, TransportErrorKind}; -use kona_protocol::FromBlockError; -use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; -use thiserror::Error; -use tokio::sync::mpsc; - -/// An error that occurs when running the [crate::SealTask]. -#[derive(Debug, Error)] -pub enum SealTaskError { - /// Impossible to insert the payload into the engine. - #[error(transparent)] - PayloadInsertionFailed(#[from] Box<InsertTaskError>), - /// The get payload call to the engine api failed. - #[error(transparent)] - GetPayloadFailed(RpcError<TransportErrorKind>), - /// A deposit-only payload failed to import. - #[error("Deposit-only payload failed to import")] - DepositOnlyPayloadFailed, - /// Failed to re-attempt payload import with deposit-only payload. - #[error("Failed to re-attempt payload import with deposit-only payload")] - DepositOnlyPayloadReattemptFailed, - /// The payload is invalid, and the derivation pipeline must - /// be flushed post-holocene. - #[error("Invalid payload, must flush post-holocene")] - HoloceneInvalidFlush, - /// Failed to convert a [`OpExecutionPayload`] to a [`L2BlockInfo`]. - /// - /// [`OpExecutionPayload`]: op_alloy_rpc_types_engine::OpExecutionPayload - /// [`L2BlockInfo`]: kona_protocol::L2BlockInfo - #[error(transparent)] - FromBlock(#[from] FromBlockError), - /// Error sending the built payload envelope. - #[error(transparent)] - MpscSend( - #[from] Box<mpsc::error::SendError<Result<OpExecutionPayloadEnvelope, SealTaskError>>>, - ), - /// The clock went backwards. - #[error("The clock went backwards")] - ClockWentBackwards, - /// Unsafe head changed between build and seal. This likely means that there was some race - /// condition between the previous seal updating the unsafe head and the build attributes - /// being created. This build has been invalidated. - /// - /// If not propagated to the original caller for handling (i.e. there was no original caller), - /// this should not happen and is a critical error. - #[error("Unsafe head changed between build and seal")] - UnsafeHeadChangedSinceBuild, -} - -impl EngineTaskError for SealTaskError { - fn severity(&self) -> EngineTaskErrorSeverity { - match self { - Self::PayloadInsertionFailed(inner) => inner.severity(), - Self::GetPayloadFailed(_) => EngineTaskErrorSeverity::Temporary, - Self::HoloceneInvalidFlush => EngineTaskErrorSeverity::Flush, - Self::DepositOnlyPayloadReattemptFailed => EngineTaskErrorSeverity::Critical, - Self::DepositOnlyPayloadFailed => EngineTaskErrorSeverity::Critical, - Self::FromBlock(_) => EngineTaskErrorSeverity::Critical, - Self::MpscSend(_) => EngineTaskErrorSeverity::Critical, - Self::ClockWentBackwards => EngineTaskErrorSeverity::Critical, - Self::UnsafeHeadChangedSinceBuild => EngineTaskErrorSeverity::Critical, - } - } -} diff --git a/kona/crates/node/engine/src/task_queue/tasks/seal/task.rs b/kona/crates/node/engine/src/task_queue/tasks/seal/task.rs deleted file mode 100644 index 86885c0e4d1..00000000000 --- a/kona/crates/node/engine/src/task_queue/tasks/seal/task.rs +++ /dev/null @@ -1,285 +0,0 @@ -//! A task for importing a block that has already been started. -use super::SealTaskError; -use crate::{ - EngineClient, EngineGetPayloadVersion, EngineState, EngineTaskExt, InsertTask, - InsertTaskError::{self}, - task_queue::build_and_seal, -}; -use alloy_rpc_types_engine::{ExecutionPayload, PayloadId}; -use async_trait::async_trait; -use derive_more::Constructor; -use kona_genesis::RollupConfig; -use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; -use op_alloy_rpc_types_engine::{OpExecutionPayload, OpExecutionPayloadEnvelope}; -use std::{sync::Arc, time::Instant}; -use tokio::sync::mpsc; - -/// Task for block sealing and canonicalization. -/// -/// The [`SealTask`] handles the following parts of the block building workflow: -/// -/// 1. **Payload Construction**: Retrieves the built payload using `engine_getPayload` -/// 2. **Block Import**: Imports the payload using [`InsertTask`] for canonicalization -/// -/// ## Error Handling -/// -/// The task delegates to [`InsertTaskError`] for payload import failures. -/// -/// [`InsertTask`]: crate::InsertTask -/// [`InsertTaskError`]: crate::InsertTaskError -#[derive(Debug, Clone, Constructor)] -pub struct SealTask<EngineClient_: EngineClient> { - /// The engine API client. - pub engine: Arc<EngineClient_>, - /// The [`RollupConfig`]. - pub cfg: Arc<RollupConfig>, - /// The [`PayloadId`] being sealed. - pub payload_id: PayloadId, - /// The [`OpAttributesWithParent`] to instruct the execution layer to build. - pub attributes: OpAttributesWithParent, - /// Whether or not the payload was derived, or created by the sequencer. - pub is_attributes_derived: bool, - /// An optional sender to convey success/failure result of the built - /// [`OpExecutionPayloadEnvelope`] after the block has been built, imported, and canonicalized - /// or the [`SealTaskError`] that occurred during processing. - pub result_tx: Option<mpsc::Sender<Result<OpExecutionPayloadEnvelope, SealTaskError>>>, -} - -impl<EngineClient_: EngineClient> SealTask<EngineClient_> { - /// Seals the execution payload in the EL, returning the execution envelope. - /// - /// ## Engine Method Selection - /// The method used to fetch the payload from the EL is determined by the payload timestamp. The - /// method used to import the payload into the engine is determined by the payload version. - /// - /// - `engine_getPayloadV2` is used for payloads with a timestamp before the Ecotone fork. - /// - `engine_getPayloadV3` is used for payloads with a timestamp after the Ecotone fork. - /// - `engine_getPayloadV4` is used for payloads with a timestamp after the Isthmus fork. - async fn seal_payload( - &self, - cfg: &RollupConfig, - engine: &EngineClient_, - payload_id: PayloadId, - payload_attrs: OpAttributesWithParent, - ) -> Result<OpExecutionPayloadEnvelope, SealTaskError> { - let payload_timestamp = payload_attrs.attributes().payload_attributes.timestamp; - - debug!( - target: "engine", - payload_id = payload_id.to_string(), - l2_time = payload_timestamp, - "Sealing payload" - ); - - let get_payload_version = EngineGetPayloadVersion::from_cfg(cfg, payload_timestamp); - let payload_envelope = match get_payload_version { - EngineGetPayloadVersion::V4 => { - let payload = engine.get_payload_v4(payload_id).await.map_err(|e| { - error!(target: "engine", "Payload fetch failed: {e}"); - SealTaskError::GetPayloadFailed(e) - })?; - - OpExecutionPayloadEnvelope { - parent_beacon_block_root: Some(payload.parent_beacon_block_root), - execution_payload: OpExecutionPayload::V4(payload.execution_payload), - } - } - EngineGetPayloadVersion::V3 => { - let payload = engine.get_payload_v3(payload_id).await.map_err(|e| { - error!(target: "engine", "Payload fetch failed: {e}"); - SealTaskError::GetPayloadFailed(e) - })?; - - OpExecutionPayloadEnvelope { - parent_beacon_block_root: Some(payload.parent_beacon_block_root), - execution_payload: OpExecutionPayload::V3(payload.execution_payload), - } - } - EngineGetPayloadVersion::V2 => { - let payload = engine.get_payload_v2(payload_id).await.map_err(|e| { - error!(target: "engine", "Payload fetch failed: {e}"); - SealTaskError::GetPayloadFailed(e) - })?; - - OpExecutionPayloadEnvelope { - parent_beacon_block_root: None, - execution_payload: match payload.execution_payload.into_payload() { - ExecutionPayload::V1(payload) => OpExecutionPayload::V1(payload), - ExecutionPayload::V2(payload) => OpExecutionPayload::V2(payload), - _ => unreachable!("the response should be a V1 or V2 payload"), - }, - } - } - }; - - Ok(payload_envelope) - } - - /// Inserts a payload into the engine with Holocene fallback support. - /// - /// This function handles: - /// 1. Executing the InsertTask to import the payload - /// 2. Handling deposits-only payload failures - /// 3. Holocene fallback via build_and_seal if needed - /// - /// Returns Ok(()) if the payload is successfully inserted, or an error if insertion fails. - async fn insert_payload( - &self, - state: &mut EngineState, - new_payload: OpExecutionPayloadEnvelope, - ) -> Result<(), SealTaskError> { - // Insert the new block into the engine. - match InsertTask::new( - Arc::clone(&self.engine), - self.cfg.clone(), - new_payload.clone(), - self.is_attributes_derived, - ) - .execute(state) - .await - { - Err(InsertTaskError::UnexpectedPayloadStatus(e)) - if self.attributes.is_deposits_only() => - { - error!(target: "engine", error = ?e, "Critical: Deposit-only payload import failed"); - return Err(SealTaskError::DepositOnlyPayloadFailed); - } - Err(InsertTaskError::UnexpectedPayloadStatus(e)) - if self.cfg.is_holocene_active( - self.attributes.attributes().payload_attributes.timestamp, - ) => - { - warn!(target: "engine", error = ?e, "Re-attempting payload import with deposits only."); - - // HOLOCENE: Re-attempt payload import with deposits only - // First build the deposits-only payload, then seal it - let deposits_only_attrs = self.attributes.as_deposits_only(); - - return match build_and_seal( - state, - self.engine.clone(), - self.cfg.clone(), - deposits_only_attrs.clone(), - self.is_attributes_derived, - ) - .await - { - Ok(_) => { - info!(target: "engine", "Successfully imported deposits-only payload"); - Err(SealTaskError::HoloceneInvalidFlush) - } - Err(_) => Err(SealTaskError::DepositOnlyPayloadReattemptFailed), - } - } - Err(e) => { - error!(target: "engine", "Payload import failed: {e}"); - return Err(Box::new(e).into()); - } - Ok(_) => { - info!(target: "engine", "Successfully imported payload") - } - } - - Ok(()) - } - - /// Seals and canonicalizes the block by fetching the payload and importing it. - /// - /// This function handles: - /// 1. Fetching the execution payload from the EL - /// 2. Importing the payload into the engine with Holocene fallback support - /// 3. Sending the payload to the optional channel - async fn seal_and_canonicalize_block( - &self, - state: &mut EngineState, - ) -> Result<OpExecutionPayloadEnvelope, SealTaskError> { - // Fetch the payload just inserted from the EL and import it into the engine. - let block_import_start_time = Instant::now(); - let new_payload = self - .seal_payload(&self.cfg, &self.engine, self.payload_id, self.attributes.clone()) - .await?; - - let new_block_ref = L2BlockInfo::from_payload_and_genesis( - new_payload.execution_payload.clone(), - self.attributes.attributes().payload_attributes.parent_beacon_block_root, - &self.cfg.genesis, - ) - .map_err(SealTaskError::FromBlock)?; - - // Insert the payload into the engine. - self.insert_payload(state, new_payload.clone()).await?; - - let block_import_duration = block_import_start_time.elapsed(); - - info!( - target: "engine", - l2_number = new_block_ref.block_info.number, - l2_time = new_block_ref.block_info.timestamp, - block_import_duration = ?block_import_duration, - "Built and imported new {} block", - if self.is_attributes_derived { "safe" } else { "unsafe" }, - ); - - Ok(new_payload) - } - - /// Sends the provided result via the `result_tx` sender if one exists, returning the - /// appropriate error if it does not. - /// - /// This allows the original caller to handle errors, removing that burden from the engine, - /// which may not know the caller's intent or retry preferences. If the original caller did not - /// provide a mechanism to get notified of updates, handle the error in the default manner in - /// the task queue logic. - async fn send_channel_result_or_get_error( - &self, - res: Result<OpExecutionPayloadEnvelope, SealTaskError>, - ) -> Result<(), SealTaskError> { - // NB: If a response channel was provided, that channel will receive success/failure info, - // and this task will always succeed. If not, task failure will be relayed to the caller. - if let Some(tx) = &self.result_tx { - tx.send(res).await.map_err(|e| SealTaskError::MpscSend(Box::new(e)))?; - } else if let Err(x) = res { - return Err(x) - } - - Ok(()) - } -} - -#[async_trait] -impl<EngineClient_: EngineClient> EngineTaskExt for SealTask<EngineClient_> { - type Output = (); - - type Error = SealTaskError; - - async fn execute(&self, state: &mut EngineState) -> Result<(), SealTaskError> { - debug!( - target: "engine", - txs = self.attributes.attributes().transactions.as_ref().map_or(0, |txs| txs.len()), - is_deposits = self.attributes.is_deposits_only(), - "Starting new seal job" - ); - - let unsafe_block_info = state.sync_state.unsafe_head().block_info; - let parent_block_info = self.attributes.parent.block_info; - - let res = if unsafe_block_info.hash != parent_block_info.hash || - unsafe_block_info.number != parent_block_info.number - { - info!( - target: "engine", - unsafe_block_info = ?unsafe_block_info, - parent_block_info = ?parent_block_info, - "Seal attributes parent does not match unsafe head, returning rebuild error" - ); - Err(SealTaskError::UnsafeHeadChangedSinceBuild) - } else { - // Seal the block and import it into the engine. - self.seal_and_canonicalize_block(state).await - }; - - self.send_channel_result_or_get_error(res).await?; - - Ok(()) - } -} diff --git a/kona/crates/node/engine/src/task_queue/tasks/synchronize/error.rs b/kona/crates/node/engine/src/task_queue/tasks/synchronize/error.rs deleted file mode 100644 index 2b3b9712042..00000000000 --- a/kona/crates/node/engine/src/task_queue/tasks/synchronize/error.rs +++ /dev/null @@ -1,34 +0,0 @@ -//! Contains error types for the [crate::SynchronizeTask]. - -use crate::{EngineTaskError, task_queue::tasks::task::EngineTaskErrorSeverity}; -use alloy_rpc_types_engine::PayloadStatusEnum; -use alloy_transport::{RpcError, TransportErrorKind}; -use thiserror::Error; - -/// An error that occurs when running the [crate::SynchronizeTask]. -#[derive(Debug, Error)] -pub enum SynchronizeTaskError { - /// The forkchoice update call to the engine api failed. - #[error("Forkchoice update engine api call failed due to an RPC error: {0}")] - ForkchoiceUpdateFailed(RpcError<TransportErrorKind>), - /// The finalized head is behind the unsafe head. - #[error("Invalid forkchoice state: unsafe head {0} is ahead of finalized head {1}")] - FinalizedAheadOfUnsafe(u64, u64), - /// The forkchoice state is invalid. - #[error("Invalid forkchoice state")] - InvalidForkchoiceState, - /// The payload status is unexpected. - #[error("Unexpected payload status: {0}")] - UnexpectedPayloadStatus(PayloadStatusEnum), -} - -impl EngineTaskError for SynchronizeTaskError { - fn severity(&self) -> EngineTaskErrorSeverity { - match self { - Self::FinalizedAheadOfUnsafe(_, _) => EngineTaskErrorSeverity::Critical, - Self::ForkchoiceUpdateFailed(_) => EngineTaskErrorSeverity::Temporary, - Self::UnexpectedPayloadStatus(_) => EngineTaskErrorSeverity::Temporary, - Self::InvalidForkchoiceState => EngineTaskErrorSeverity::Reset, - } - } -} diff --git a/kona/crates/node/engine/src/task_queue/tasks/task.rs b/kona/crates/node/engine/src/task_queue/tasks/task.rs deleted file mode 100644 index 5a60cc63c75..00000000000 --- a/kona/crates/node/engine/src/task_queue/tasks/task.rs +++ /dev/null @@ -1,242 +0,0 @@ -//! Tasks sent to the [`Engine`] for execution. -//! -//! [`Engine`]: crate::Engine - -use super::{BuildTask, ConsolidateTask, FinalizeTask, InsertTask}; -use crate::{ - BuildTaskError, ConsolidateTaskError, EngineClient, EngineState, FinalizeTaskError, - InsertTaskError, - task_queue::{SealTask, SealTaskError}, -}; -use async_trait::async_trait; -use derive_more::Display; -use std::cmp::Ordering; -use thiserror::Error; -use tokio::task::yield_now; - -/// The severity of an engine task error. -/// -/// This is used to determine how to handle the error when draining the engine task queue. -#[derive(Debug, PartialEq, Eq, Display, Clone, Copy)] -pub enum EngineTaskErrorSeverity { - /// The error is temporary and the task is retried. - #[display("temporary")] - Temporary, - /// The error is critical and is propagated to the engine actor. - #[display("critical")] - Critical, - /// The error indicates that the engine should be reset. - #[display("reset")] - Reset, - /// The error indicates that the engine should be flushed. - #[display("flush")] - Flush, -} - -/// The interface for an engine task error. -/// -/// An engine task error should have an associated severity level to specify how to handle the error -/// when draining the engine task queue. -pub trait EngineTaskError { - /// The severity of the error. - fn severity(&self) -> EngineTaskErrorSeverity; -} - -/// The interface for an engine task. -#[async_trait] -pub trait EngineTaskExt { - /// The output type of the task. - type Output; - - /// The error type of the task. - type Error: EngineTaskError; - - /// Executes the task, taking a shared lock on the engine state and `self`. - async fn execute(&self, state: &mut EngineState) -> Result<Self::Output, Self::Error>; -} - -/// An error that may occur during an [`EngineTask`]'s execution. -#[derive(Error, Debug)] -pub enum EngineTaskErrors { - /// An error that occurred while inserting a block into the engine. - #[error(transparent)] - Insert(#[from] InsertTaskError), - /// An error that occurred while building a block. - #[error(transparent)] - Build(#[from] BuildTaskError), - /// An error that occurred while sealing a block. - #[error(transparent)] - Seal(#[from] SealTaskError), - /// An error that occurred while consolidating the engine state. - #[error(transparent)] - Consolidate(#[from] ConsolidateTaskError), - /// An error that occurred while finalizing an L2 block. - #[error(transparent)] - Finalize(#[from] FinalizeTaskError), -} - -impl EngineTaskError for EngineTaskErrors { - fn severity(&self) -> EngineTaskErrorSeverity { - match self { - Self::Insert(inner) => inner.severity(), - Self::Build(inner) => inner.severity(), - Self::Seal(inner) => inner.severity(), - Self::Consolidate(inner) => inner.severity(), - Self::Finalize(inner) => inner.severity(), - } - } -} - -/// Tasks that may be inserted into and executed by the [`Engine`]. -/// -/// [`Engine`]: crate::Engine -#[derive(Debug, Clone)] -pub enum EngineTask<EngineClient_: EngineClient> { - /// Inserts a payload into the execution engine. - Insert(Box<InsertTask<EngineClient_>>), - /// Begins building a new block with the given attributes, producing a new payload ID. - Build(Box<BuildTask<EngineClient_>>), - /// Seals the block with the given payload ID and attributes, inserting it into the execution - /// engine. - Seal(Box<SealTask<EngineClient_>>), - /// Performs consolidation on the engine state, reverting to payload attribute processing - /// via the [`BuildTask`] if consolidation fails. - Consolidate(Box<ConsolidateTask<EngineClient_>>), - /// Finalizes an L2 block - Finalize(Box<FinalizeTask<EngineClient_>>), -} - -impl<EngineClient_: EngineClient> EngineTask<EngineClient_> { - /// Executes the task without consuming it. - async fn execute_inner(&self, state: &mut EngineState) -> Result<(), EngineTaskErrors> { - match self { - Self::Insert(task) => task.execute(state).await?, - Self::Seal(task) => task.execute(state).await?, - Self::Consolidate(task) => task.execute(state).await?, - Self::Finalize(task) => task.execute(state).await?, - Self::Build(task) => { - task.execute(state).await?; - } - }; - - Ok(()) - } - - const fn task_metrics_label(&self) -> &'static str { - match self { - Self::Insert(_) => crate::Metrics::INSERT_TASK_LABEL, - Self::Consolidate(_) => crate::Metrics::CONSOLIDATE_TASK_LABEL, - Self::Build(_) => crate::Metrics::BUILD_TASK_LABEL, - Self::Seal(_) => crate::Metrics::SEAL_TASK_LABEL, - Self::Finalize(_) => crate::Metrics::FINALIZE_TASK_LABEL, - } - } -} - -impl<EngineClient_: EngineClient> PartialEq for EngineTask<EngineClient_> { - fn eq(&self, other: &Self) -> bool { - matches!( - (self, other), - (Self::Insert(_), Self::Insert(_)) | - (Self::Build(_), Self::Build(_)) | - (Self::Seal(_), Self::Seal(_)) | - (Self::Consolidate(_), Self::Consolidate(_)) | - (Self::Finalize(_), Self::Finalize(_)) - ) - } -} - -impl<EngineClient_: EngineClient> Eq for EngineTask<EngineClient_> {} - -impl<EngineClient_: EngineClient> PartialOrd for EngineTask<EngineClient_> { - fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> { - Some(self.cmp(other)) - } -} - -impl<EngineClient_: EngineClient> Ord for EngineTask<EngineClient_> { - fn cmp(&self, other: &Self) -> Ordering { - // Order (descending): BuildBlock -> InsertUnsafe -> Consolidate -> Finalize - // - // https://specs.optimism.io/protocol/derivation.html#forkchoice-synchronization - // - // - Block building jobs are prioritized above all other tasks, to give priority to the - // sequencer. BuildTask handles forkchoice updates automatically. - // - InsertUnsafe tasks are prioritized over Consolidate tasks, to ensure that unsafe block - // gossip is imported promptly. - // - Consolidate tasks are prioritized over Finalize tasks, as they advance the safe chain - // via derivation. - // - Finalize tasks have the lowest priority, as they only update finalized status. - match (self, other) { - // Same variant cases - (Self::Insert(_), Self::Insert(_)) => Ordering::Equal, - (Self::Consolidate(_), Self::Consolidate(_)) => Ordering::Equal, - (Self::Build(_), Self::Build(_)) => Ordering::Equal, - (Self::Seal(_), Self::Seal(_)) => Ordering::Equal, - (Self::Finalize(_), Self::Finalize(_)) => Ordering::Equal, - - // SealBlock tasks are prioritized over all others - (Self::Seal(_), _) => Ordering::Greater, - (_, Self::Seal(_)) => Ordering::Less, - - // BuildBlock tasks are prioritized over InsertUnsafe and Consolidate tasks - (Self::Build(_), _) => Ordering::Greater, - (_, Self::Build(_)) => Ordering::Less, - - // InsertUnsafe tasks are prioritized over Consolidate and Finalize tasks - (Self::Insert(_), _) => Ordering::Greater, - (_, Self::Insert(_)) => Ordering::Less, - - // Consolidate tasks are prioritized over Finalize tasks - (Self::Consolidate(_), _) => Ordering::Greater, - (_, Self::Consolidate(_)) => Ordering::Less, - } - } -} - -#[async_trait] -impl<EngineClient_: EngineClient> EngineTaskExt for EngineTask<EngineClient_> { - type Output = (); - - type Error = EngineTaskErrors; - - async fn execute(&self, state: &mut EngineState) -> Result<(), Self::Error> { - // Retry the task until it succeeds or a critical error occurs. - while let Err(e) = self.execute_inner(state).await { - let severity = e.severity(); - - kona_macros::inc!( - counter, - crate::Metrics::ENGINE_TASK_FAILURE, - self.task_metrics_label() => severity.to_string() - ); - - match severity { - EngineTaskErrorSeverity::Temporary => { - trace!(target: "engine", "{e}"); - - // Yield the task to allow other tasks to execute to avoid starvation. - yield_now().await; - - continue; - } - EngineTaskErrorSeverity::Critical => { - error!(target: "engine", "{e}"); - return Err(e); - } - EngineTaskErrorSeverity::Reset => { - warn!(target: "engine", "Engine requested derivation reset"); - return Err(e); - } - EngineTaskErrorSeverity::Flush => { - warn!(target: "engine", "Engine requested derivation flush"); - return Err(e); - } - } - } - - kona_macros::inc!(counter, crate::Metrics::ENGINE_TASK_SUCCESS, self.task_metrics_label()); - - Ok(()) - } -} diff --git a/kona/crates/node/engine/src/test_utils/attributes.rs b/kona/crates/node/engine/src/test_utils/attributes.rs deleted file mode 100644 index 09c2cbe8143..00000000000 --- a/kona/crates/node/engine/src/test_utils/attributes.rs +++ /dev/null @@ -1,111 +0,0 @@ -use alloy_eips::BlockNumHash; -use alloy_primitives::{B256, b256}; -use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; -use op_alloy_rpc_types_engine::OpPayloadAttributes; - -/// Builder for creating test OpAttributesWithParent instances with sensible defaults -#[derive(Debug)] -pub struct TestAttributesBuilder { - timestamp: u64, - prev_randao: B256, - suggested_fee_recipient: alloy_primitives::Address, - withdrawals: Option<Vec<alloy_eips::eip4895::Withdrawal>>, - parent_beacon_block_root: Option<B256>, - transactions: Option<Vec<alloy_primitives::Bytes>>, - no_tx_pool: Option<bool>, - gas_limit: Option<u64>, - eip_1559_params: Option<alloy_primitives::B64>, - min_base_fee: Option<u64>, - parent: L2BlockInfo, - derived_from: Option<BlockInfo>, - is_last_in_span: bool, -} - -impl TestAttributesBuilder { - /// Creates a new builder with default values - pub fn new() -> Self { - let parent = L2BlockInfo { - block_info: BlockInfo { - number: 0, - hash: b256!("1111111111111111111111111111111111111111111111111111111111111111"), - parent_hash: B256::ZERO, - timestamp: 1000, - }, - l1_origin: BlockNumHash::default(), - seq_num: 0, - }; - - Self { - timestamp: 2000, - prev_randao: b256!("2222222222222222222222222222222222222222222222222222222222222222"), - suggested_fee_recipient: alloy_primitives::Address::ZERO, - withdrawals: None, - parent_beacon_block_root: Some(B256::ZERO), - transactions: None, - no_tx_pool: Some(false), - gas_limit: Some(30_000_000), - eip_1559_params: None, - min_base_fee: None, - parent, - derived_from: None, - is_last_in_span: false, - } - } - - /// Sets the timestamp - pub const fn with_timestamp(mut self, timestamp: u64) -> Self { - self.timestamp = timestamp; - self - } - - /// Sets the parent block - pub const fn with_parent(mut self, parent: L2BlockInfo) -> Self { - self.parent = parent; - self - } - - /// Sets the transactions - #[allow(dead_code)] - pub fn with_transactions(mut self, txs: Vec<alloy_primitives::Bytes>) -> Self { - self.transactions = Some(txs); - self - } - - /// Sets the gas limit - #[allow(dead_code)] - pub const fn with_gas_limit(mut self, gas_limit: u64) -> Self { - self.gas_limit = Some(gas_limit); - self - } - - /// Builds the OpAttributesWithParent - pub fn build(self) -> OpAttributesWithParent { - let attributes = OpPayloadAttributes { - payload_attributes: alloy_rpc_types_engine::PayloadAttributes { - timestamp: self.timestamp, - prev_randao: self.prev_randao, - suggested_fee_recipient: self.suggested_fee_recipient, - withdrawals: self.withdrawals, - parent_beacon_block_root: self.parent_beacon_block_root, - }, - transactions: self.transactions, - no_tx_pool: self.no_tx_pool, - gas_limit: self.gas_limit, - eip_1559_params: self.eip_1559_params, - min_base_fee: self.min_base_fee, - }; - - OpAttributesWithParent::new( - attributes, - self.parent, - self.derived_from, - self.is_last_in_span, - ) - } -} - -impl Default for TestAttributesBuilder { - fn default() -> Self { - Self::new() - } -} diff --git a/kona/crates/node/engine/src/test_utils/engine_client.rs b/kona/crates/node/engine/src/test_utils/engine_client.rs deleted file mode 100644 index 5fedc431b56..00000000000 --- a/kona/crates/node/engine/src/test_utils/engine_client.rs +++ /dev/null @@ -1,803 +0,0 @@ -//! Mock implementations for testing engine client functionality. - -use crate::{EngineClient, HyperAuthClient}; -use alloy_eips::{BlockId, eip1898::BlockNumberOrTag}; -use alloy_network::{Ethereum, Network}; -use alloy_primitives::{Address, B256, BlockHash, StorageKey}; -use alloy_provider::{EthGetBlock, ProviderCall, RpcWithBlock}; -use alloy_rpc_types_engine::{ - ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadEnvelopeV2, ExecutionPayloadInputV2, - ExecutionPayloadV1, ExecutionPayloadV3, ForkchoiceState, ForkchoiceUpdated, PayloadId, - PayloadStatus, -}; -use alloy_rpc_types_eth::{Block, EIP1186AccountProofResponse, Transaction as EthTransaction}; -use alloy_transport::{TransportError, TransportErrorKind, TransportResult}; -use alloy_transport_http::Http; -use async_trait::async_trait; -use kona_genesis::RollupConfig; -use kona_protocol::L2BlockInfo; -use op_alloy_network::Optimism; -use op_alloy_provider::ext::engine::OpEngineApi; -use op_alloy_rpc_types::Transaction as OpTransaction; -use op_alloy_rpc_types_engine::{ - OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, OpExecutionPayloadV4, - OpPayloadAttributes, ProtocolVersion, -}; -use std::{collections::HashMap, sync::Arc}; -use tokio::sync::RwLock; - -use crate::EngineClientError; - -/// Builder for creating test MockEngineClient instances with sensible defaults -pub fn test_engine_client_builder() -> MockEngineClientBuilder { - MockEngineClientBuilder::new().with_config(Arc::new(RollupConfig::default())) -} - -/// Mock storage for engine client responses. -/// -/// Each API method has version-specific storage to allow tests to verify -/// which specific version was called and return different responses per version. -#[derive(Debug, Clone, Default)] -pub struct MockEngineStorage { - /// Storage for block responses by tag. - pub l2_blocks_by_label: HashMap<BlockNumberOrTag, Block<OpTransaction>>, - /// Storage for block info responses by tag. - pub block_info_by_tag: HashMap<BlockNumberOrTag, L2BlockInfo>, - - // Version-specific new_payload responses - /// Storage for new_payload_v1 responses. - pub new_payload_v1_response: Option<PayloadStatus>, - /// Storage for new_payload_v2 responses. - pub new_payload_v2_response: Option<PayloadStatus>, - /// Storage for new_payload_v3 responses. - pub new_payload_v3_response: Option<PayloadStatus>, - /// Storage for new_payload_v4 responses. - pub new_payload_v4_response: Option<PayloadStatus>, - - // Version-specific fork_choice_updated responses - /// Storage for fork_choice_updated_v2 responses. - pub fork_choice_updated_v2_response: Option<ForkchoiceUpdated>, - /// Storage for fork_choice_updated_v3 responses. - pub fork_choice_updated_v3_response: Option<ForkchoiceUpdated>, - - // Version-specific get_payload responses - /// Storage for execution payload envelope v2 responses. - pub execution_payload_v2: Option<ExecutionPayloadEnvelopeV2>, - /// Storage for OP execution payload envelope v3 responses. - pub execution_payload_v3: Option<OpExecutionPayloadEnvelopeV3>, - /// Storage for OP execution payload envelope v4 responses. - pub execution_payload_v4: Option<OpExecutionPayloadEnvelopeV4>, - - // Version-specific get_payload_bodies responses - /// Storage for get_payload_bodies_by_hash_v1 responses. - pub get_payload_bodies_by_hash_v1_response: Option<ExecutionPayloadBodiesV1>, - /// Storage for get_payload_bodies_by_range_v1 responses. - pub get_payload_bodies_by_range_v1_response: Option<ExecutionPayloadBodiesV1>, - - // Non-versioned responses - /// Storage for client version responses. - pub client_versions: Option<Vec<ClientVersionV1>>, - /// Storage for protocol version responses. - pub protocol_version: Option<ProtocolVersion>, - /// Storage for capabilities responses. - pub capabilities: Option<Vec<String>>, - - // Storage for get_l1_block, get_l2_block, and get_proof - /// Storage for L1 blocks by stringified BlockId. - /// L1 blocks use standard Ethereum transactions. - pub l1_blocks_by_id: HashMap<String, Block<EthTransaction>>, - /// Storage for L2 blocks by stringified BlockId. - /// L2 blocks use OP Stack transactions. - pub l2_blocks_by_id: HashMap<String, Block<OpTransaction>>, - /// Storage for proofs by (address, stringified BlockId) key. - pub proofs_by_address: HashMap<(Address, String), EIP1186AccountProofResponse>, -} - -/// Builder for constructing a [`MockEngineClient`] with pre-configured responses. -/// -/// This builder allows you to set up mock responses before creating the client, -/// making it easier to write concise tests. -/// -/// # Example -/// -/// ```rust,ignore -/// use kona_engine::test_utils::{MockEngineClient}; -/// use alloy_rpc_types_engine::{PayloadStatus, PayloadStatusEnum}; -/// use std::sync::Arc; -/// -/// let mock = MockEngineClient::builder() -/// .with_config(Arc::new(RollupConfig::default())) -/// .with_payload_status(PayloadStatus { -/// status: PayloadStatusEnum::Valid, -/// latest_valid_hash: Some(B256::ZERO), -/// }) -/// .build(); -/// ``` -#[derive(Debug)] -pub struct MockEngineClientBuilder { - cfg: Option<Arc<RollupConfig>>, - storage: MockEngineStorage, -} - -impl MockEngineClientBuilder { - /// Creates a new builder with default values. - pub fn new() -> Self { - Self { cfg: None, storage: MockEngineStorage::default() } - } - - /// Sets the rollup configuration. - pub fn with_config(mut self, cfg: Arc<RollupConfig>) -> Self { - self.cfg = Some(cfg); - self - } - - /// Sets a block response for a specific tag. - pub fn with_l2_block_by_label( - mut self, - tag: BlockNumberOrTag, - block: Block<OpTransaction>, - ) -> Self { - self.storage.l2_blocks_by_label.insert(tag, block); - self - } - - /// Sets a block info response for a specific tag. - pub fn with_block_info_by_tag(mut self, tag: BlockNumberOrTag, info: L2BlockInfo) -> Self { - self.storage.block_info_by_tag.insert(tag, info); - self - } - - /// Sets the new_payload_v1 response. - pub fn with_new_payload_v1_response(mut self, status: PayloadStatus) -> Self { - self.storage.new_payload_v1_response = Some(status); - self - } - - /// Sets the new_payload_v2 response. - pub fn with_new_payload_v2_response(mut self, status: PayloadStatus) -> Self { - self.storage.new_payload_v2_response = Some(status); - self - } - - /// Sets the new_payload_v3 response. - pub fn with_new_payload_v3_response(mut self, status: PayloadStatus) -> Self { - self.storage.new_payload_v3_response = Some(status); - self - } - - /// Sets the new_payload_v4 response. - pub fn with_new_payload_v4_response(mut self, status: PayloadStatus) -> Self { - self.storage.new_payload_v4_response = Some(status); - self - } - - /// Sets the fork_choice_updated_v2 response. - pub fn with_fork_choice_updated_v2_response(mut self, response: ForkchoiceUpdated) -> Self { - self.storage.fork_choice_updated_v2_response = Some(response); - self - } - - /// Sets the fork_choice_updated_v3 response. - pub fn with_fork_choice_updated_v3_response(mut self, response: ForkchoiceUpdated) -> Self { - self.storage.fork_choice_updated_v3_response = Some(response); - self - } - - /// Sets the execution payload v2 response. - pub fn with_execution_payload_v2(mut self, payload: ExecutionPayloadEnvelopeV2) -> Self { - self.storage.execution_payload_v2 = Some(payload); - self - } - - /// Sets the execution payload v3 response. - pub fn with_execution_payload_v3(mut self, payload: OpExecutionPayloadEnvelopeV3) -> Self { - self.storage.execution_payload_v3 = Some(payload); - self - } - - /// Sets the execution payload v4 response. - pub fn with_execution_payload_v4(mut self, payload: OpExecutionPayloadEnvelopeV4) -> Self { - self.storage.execution_payload_v4 = Some(payload); - self - } - - /// Sets the get_payload_bodies_by_hash_v1 response. - pub fn with_payload_bodies_by_hash_response( - mut self, - bodies: ExecutionPayloadBodiesV1, - ) -> Self { - self.storage.get_payload_bodies_by_hash_v1_response = Some(bodies); - self - } - - /// Sets the get_payload_bodies_by_range_v1 response. - pub fn with_payload_bodies_by_range_response( - mut self, - bodies: ExecutionPayloadBodiesV1, - ) -> Self { - self.storage.get_payload_bodies_by_range_v1_response = Some(bodies); - self - } - - /// Sets the client versions response. - pub fn with_client_versions(mut self, versions: Vec<ClientVersionV1>) -> Self { - self.storage.client_versions = Some(versions); - self - } - - /// Sets the protocol version response. - pub const fn with_protocol_version(mut self, version: ProtocolVersion) -> Self { - self.storage.protocol_version = Some(version); - self - } - - /// Sets the capabilities response. - pub fn with_capabilities(mut self, capabilities: Vec<String>) -> Self { - self.storage.capabilities = Some(capabilities); - self - } - - /// Sets an L1 block response for a specific BlockId. - pub fn with_l1_block(mut self, block_id: BlockId, block: Block<EthTransaction>) -> Self { - let key = block_id_to_key(&block_id); - self.storage.l1_blocks_by_id.insert(key, block); - self - } - - /// Sets an L2 block response for a specific BlockId. - pub fn with_l2_block(mut self, block_id: BlockId, block: Block<OpTransaction>) -> Self { - let key = block_id_to_key(&block_id); - self.storage.l2_blocks_by_id.insert(key, block); - self - } - - /// Sets a proof response for a specific address and BlockId. - pub fn with_proof( - mut self, - address: Address, - block_id: BlockId, - proof: EIP1186AccountProofResponse, - ) -> Self { - let key = block_id_to_key(&block_id); - self.storage.proofs_by_address.insert((address, key), proof); - self - } - - /// Builds the [`MockEngineClient`] with the configured values. - /// - /// # Panics - /// - /// Panics if any required fields (cfg) are not set. - pub fn build(self) -> MockEngineClient { - let cfg = self.cfg.expect("cfg must be set"); - - MockEngineClient { cfg, storage: Arc::new(RwLock::new(self.storage)) } - } -} - -impl Default for MockEngineClientBuilder { - fn default() -> Self { - Self::new() - } -} - -/// Mock implementation of the EngineClient trait for testing. -/// -/// This mock allows tests to configure expected responses for all EngineClient -/// and OpEngineApi methods. All responses are stored in a shared [`MockEngineStorage`] -/// protected by an RwLock for thread-safe access. -#[derive(Debug, Clone)] -pub struct MockEngineClient { - /// The rollup configuration. - cfg: Arc<RollupConfig>, - /// Shared storage for mock responses. - storage: Arc<RwLock<MockEngineStorage>>, -} - -impl MockEngineClient { - /// Creates a new mock engine client with the given config. - pub fn new(cfg: Arc<RollupConfig>) -> Self { - Self { cfg, storage: Arc::new(RwLock::new(MockEngineStorage::default())) } - } - - /// Creates a builder for constructing a mock engine client. - pub fn builder() -> MockEngineClientBuilder { - MockEngineClientBuilder::new() - } - - /// Returns a reference to the mock storage for configuring responses. - pub fn storage(&self) -> Arc<RwLock<MockEngineStorage>> { - Arc::clone(&self.storage) - } - - /// Sets a block response for a specific tag. - pub async fn set_l2_block_by_label(&self, tag: BlockNumberOrTag, block: Block<OpTransaction>) { - self.storage.write().await.l2_blocks_by_label.insert(tag, block); - } - - /// Sets a block info response for a specific tag. - pub async fn set_block_info_by_tag(&self, tag: BlockNumberOrTag, info: L2BlockInfo) { - self.storage.write().await.block_info_by_tag.insert(tag, info); - } - - /// Sets the new_payload_v1 response. - pub async fn set_new_payload_v1_response(&self, status: PayloadStatus) { - self.storage.write().await.new_payload_v1_response = Some(status); - } - - /// Sets the new_payload_v2 response. - pub async fn set_new_payload_v2_response(&self, status: PayloadStatus) { - self.storage.write().await.new_payload_v2_response = Some(status); - } - - /// Sets the new_payload_v3 response. - pub async fn set_new_payload_v3_response(&self, status: PayloadStatus) { - self.storage.write().await.new_payload_v3_response = Some(status); - } - - /// Sets the new_payload_v4 response. - pub async fn set_new_payload_v4_response(&self, status: PayloadStatus) { - self.storage.write().await.new_payload_v4_response = Some(status); - } - - /// Sets the fork_choice_updated_v2 response. - pub async fn set_fork_choice_updated_v2_response(&self, response: ForkchoiceUpdated) { - self.storage.write().await.fork_choice_updated_v2_response = Some(response); - } - - /// Sets the fork_choice_updated_v3 response. - pub async fn set_fork_choice_updated_v3_response(&self, response: ForkchoiceUpdated) { - self.storage.write().await.fork_choice_updated_v3_response = Some(response); - } - - /// Sets the execution payload v2 response. - pub async fn set_execution_payload_v2(&self, payload: ExecutionPayloadEnvelopeV2) { - self.storage.write().await.execution_payload_v2 = Some(payload); - } - - /// Sets the execution payload v3 response. - pub async fn set_execution_payload_v3(&self, payload: OpExecutionPayloadEnvelopeV3) { - self.storage.write().await.execution_payload_v3 = Some(payload); - } - - /// Sets the execution payload v4 response. - pub async fn set_execution_payload_v4(&self, payload: OpExecutionPayloadEnvelopeV4) { - self.storage.write().await.execution_payload_v4 = Some(payload); - } - - /// Sets the get_payload_bodies_by_hash_v1 response. - pub async fn set_payload_bodies_by_hash_response(&self, bodies: ExecutionPayloadBodiesV1) { - self.storage.write().await.get_payload_bodies_by_hash_v1_response = Some(bodies); - } - - /// Sets the get_payload_bodies_by_range_v1 response. - pub async fn set_payload_bodies_by_range_response(&self, bodies: ExecutionPayloadBodiesV1) { - self.storage.write().await.get_payload_bodies_by_range_v1_response = Some(bodies); - } - - /// Sets the client versions response. - pub async fn set_client_versions(&self, versions: Vec<ClientVersionV1>) { - self.storage.write().await.client_versions = Some(versions); - } - - /// Sets the protocol version response. - pub async fn set_protocol_version(&self, version: ProtocolVersion) { - self.storage.write().await.protocol_version = Some(version); - } - - /// Sets the capabilities response. - pub async fn set_capabilities(&self, capabilities: Vec<String>) { - self.storage.write().await.capabilities = Some(capabilities); - } - - /// Sets an L1 block response for a specific BlockId. - pub async fn set_l1_block(&self, block_id: BlockId, block: Block<EthTransaction>) { - let key = block_id_to_key(&block_id); - self.storage.write().await.l1_blocks_by_id.insert(key, block); - } - - /// Sets an L2 block response for a specific BlockId. - pub async fn set_l2_block(&self, block_id: BlockId, block: Block<OpTransaction>) { - let key = block_id_to_key(&block_id); - self.storage.write().await.l2_blocks_by_id.insert(key, block); - } - - /// Sets a proof response for a specific address and BlockId. - pub async fn set_proof( - &self, - address: Address, - block_id: BlockId, - proof: EIP1186AccountProofResponse, - ) { - let key = block_id_to_key(&block_id); - self.storage.write().await.proofs_by_address.insert((address, key), proof); - } -} - -#[async_trait] -impl EngineClient for MockEngineClient { - fn cfg(&self) -> &RollupConfig { - self.cfg.as_ref() - } - - fn get_l1_block(&self, block: BlockId) -> EthGetBlock<<Ethereum as Network>::BlockResponse> { - let storage = Arc::clone(&self.storage); - let block_key = block_id_to_key(&block); - - EthGetBlock::new_provider( - block, - Box::new(move |_kind| { - let storage = Arc::clone(&storage); - let block_key = block_key.clone(); - - ProviderCall::BoxedFuture(Box::pin(async move { - let storage_guard = storage.read().await; - Ok(storage_guard.l1_blocks_by_id.get(&block_key).cloned()) - })) - }), - ) - } - - fn get_l2_block(&self, block: BlockId) -> EthGetBlock<<Optimism as Network>::BlockResponse> { - let storage = Arc::clone(&self.storage); - let block_key = block_id_to_key(&block); - - EthGetBlock::new_provider( - block, - Box::new(move |_kind| { - let storage = Arc::clone(&storage); - let block_key = block_key.clone(); - - ProviderCall::BoxedFuture(Box::pin(async move { - let storage_guard = storage.read().await; - Ok(storage_guard.l2_blocks_by_id.get(&block_key).cloned()) - })) - }), - ) - } - - fn get_proof( - &self, - address: Address, - _keys: Vec<StorageKey>, - ) -> RpcWithBlock<(Address, Vec<StorageKey>), EIP1186AccountProofResponse> { - let storage = Arc::clone(&self.storage); - - RpcWithBlock::new_provider(move |block_id| { - let storage = Arc::clone(&storage); - let block_key = block_id_to_key(&block_id); - let address = address; - - ProviderCall::BoxedFuture(Box::pin(async move { - let storage_guard = storage.read().await; - storage_guard.proofs_by_address.get(&(address, block_key)).cloned().ok_or_else( - || { - TransportError::from(TransportErrorKind::custom_str( - "No proof configured for this address and block. \ - Use with_proof() or set_proof() to set a response.", - )) - }, - ) - })) - }) - } - - async fn new_payload_v1(&self, _payload: ExecutionPayloadV1) -> TransportResult<PayloadStatus> { - let storage = self.storage.read().await; - storage.new_payload_v1_response.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "new_payload_v1 was called but no v1 response configured. \ - Use with_new_payload_v1_response() or set_new_payload_v1_response() to set a response." - )) - }) - } - - async fn l2_block_by_label( - &self, - numtag: BlockNumberOrTag, - ) -> Result<Option<Block<OpTransaction>>, EngineClientError> { - let storage = self.storage.read().await; - Ok(storage.l2_blocks_by_label.get(&numtag).cloned()) - } - - async fn l2_block_info_by_label( - &self, - numtag: BlockNumberOrTag, - ) -> Result<Option<L2BlockInfo>, EngineClientError> { - let storage = self.storage.read().await; - Ok(storage.block_info_by_tag.get(&numtag).cloned()) - } -} - -#[async_trait] -impl OpEngineApi<Optimism, Http<HyperAuthClient>> for MockEngineClient { - async fn new_payload_v2( - &self, - _payload: ExecutionPayloadInputV2, - ) -> TransportResult<PayloadStatus> { - let storage = self.storage.read().await; - storage.new_payload_v2_response.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "new_payload_v2 was called but no v2 response configured. \ - Use with_new_payload_v2_response() or set_new_payload_v2_response() to set a response." - )) - }) - } - - async fn new_payload_v3( - &self, - _payload: ExecutionPayloadV3, - _parent_beacon_block_root: B256, - ) -> TransportResult<PayloadStatus> { - let storage = self.storage.read().await; - storage.new_payload_v3_response.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "new_payload_v3 was called but no v3 response configured. \ - Use with_new_payload_v3_response() or set_new_payload_v3_response() to set a response." - )) - }) - } - - async fn new_payload_v4( - &self, - _payload: OpExecutionPayloadV4, - _parent_beacon_block_root: B256, - ) -> TransportResult<PayloadStatus> { - let storage = self.storage.read().await; - storage.new_payload_v4_response.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "new_payload_v4 was called but no v4 response configured. \ - Use with_new_payload_v4_response() or set_new_payload_v4_response() to set a response." - )) - }) - } - - async fn fork_choice_updated_v2( - &self, - _fork_choice_state: ForkchoiceState, - _payload_attributes: Option<OpPayloadAttributes>, - ) -> TransportResult<ForkchoiceUpdated> { - let storage = self.storage.read().await; - storage.fork_choice_updated_v2_response.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "fork_choice_updated_v2 was called but no v2 response configured. \ - Use with_fork_choice_updated_v2_response() or set_fork_choice_updated_v2_response() to set a response." - )) - }) - } - - async fn fork_choice_updated_v3( - &self, - _fork_choice_state: ForkchoiceState, - _payload_attributes: Option<OpPayloadAttributes>, - ) -> TransportResult<ForkchoiceUpdated> { - let storage = self.storage.read().await; - storage.fork_choice_updated_v3_response.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "fork_choice_updated_v3 was called but no v3 response configured. \ - Use with_fork_choice_updated_v3_response() or set_fork_choice_updated_v3_response() to set a response." - )) - }) - } - - async fn get_payload_v2( - &self, - _payload_id: PayloadId, - ) -> TransportResult<ExecutionPayloadEnvelopeV2> { - let storage = self.storage.read().await; - storage.execution_payload_v2.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "No execution payload v2 set in mock", - )) - }) - } - - async fn get_payload_v3( - &self, - _payload_id: PayloadId, - ) -> TransportResult<OpExecutionPayloadEnvelopeV3> { - let storage = self.storage.read().await; - storage.execution_payload_v3.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "No execution payload v3 set in mock", - )) - }) - } - - async fn get_payload_v4( - &self, - _payload_id: PayloadId, - ) -> TransportResult<OpExecutionPayloadEnvelopeV4> { - let storage = self.storage.read().await; - storage.execution_payload_v4.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "No execution payload v4 set in mock", - )) - }) - } - - async fn get_payload_bodies_by_hash_v1( - &self, - _block_hashes: Vec<BlockHash>, - ) -> TransportResult<ExecutionPayloadBodiesV1> { - let storage = self.storage.read().await; - storage.get_payload_bodies_by_hash_v1_response.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "get_payload_bodies_by_hash_v1 was called but no response configured. \ - Use with_payload_bodies_by_hash_response() or set_payload_bodies_by_hash_response() to set a response." - )) - }) - } - - async fn get_payload_bodies_by_range_v1( - &self, - _start: u64, - _count: u64, - ) -> TransportResult<ExecutionPayloadBodiesV1> { - let storage = self.storage.read().await; - storage.get_payload_bodies_by_range_v1_response.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str( - "get_payload_bodies_by_range_v1 was called but no response configured. \ - Use with_payload_bodies_by_range_response() or set_payload_bodies_by_range_response() to set a response." - )) - }) - } - - async fn get_client_version_v1( - &self, - _client_version: ClientVersionV1, - ) -> TransportResult<Vec<ClientVersionV1>> { - let storage = self.storage.read().await; - storage.client_versions.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str("No client versions set in mock")) - }) - } - - async fn signal_superchain_v1( - &self, - _recommended: ProtocolVersion, - _required: ProtocolVersion, - ) -> TransportResult<ProtocolVersion> { - let storage = self.storage.read().await; - storage.protocol_version.ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str("No protocol version set in mock")) - }) - } - - async fn exchange_capabilities( - &self, - _capabilities: Vec<String>, - ) -> TransportResult<Vec<String>> { - let storage = self.storage.read().await; - storage.capabilities.clone().ok_or_else(|| { - TransportError::from(TransportErrorKind::custom_str("No capabilities set in mock")) - }) - } -} - -/// Helper function to convert BlockId to a string key for HashMap storage. -/// This is necessary because BlockId doesn't implement Hash. -fn block_id_to_key(block_id: &BlockId) -> String { - match block_id { - BlockId::Hash(hash) => format!("hash:{}", hash.block_hash), - BlockId::Number(num) => format!("number:{num}"), - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_rpc_types_engine::PayloadStatusEnum; - - #[tokio::test] - async fn test_mock_engine_client_creation() { - let cfg = Arc::new(RollupConfig::default()); - - let mock = MockEngineClient::new(cfg.clone()); - - // Verify the config was set correctly - assert_eq!(mock.cfg().block_time, cfg.block_time); - } - - #[tokio::test] - async fn test_mock_payload_status() { - let cfg = Arc::new(RollupConfig::default()); - - let mock = MockEngineClient::new(cfg); - - let status = - PayloadStatus { status: PayloadStatusEnum::Valid, latest_valid_hash: Some(B256::ZERO) }; - - mock.set_new_payload_v2_response(status.clone()).await; - - // Create a minimal ExecutionPayloadInputV2 for testing - use alloy_primitives::{Bytes, U256}; - use alloy_rpc_types_engine::ExecutionPayloadV1; - let payload = ExecutionPayloadInputV2 { - execution_payload: ExecutionPayloadV1 { - parent_hash: B256::ZERO, - fee_recipient: Default::default(), - state_root: B256::ZERO, - receipts_root: B256::ZERO, - logs_bloom: Default::default(), - prev_randao: B256::ZERO, - block_number: 0, - gas_limit: 0, - gas_used: 0, - timestamp: 0, - extra_data: Bytes::new(), - base_fee_per_gas: U256::ZERO, - block_hash: B256::ZERO, - transactions: vec![], - }, - withdrawals: None, - }; - - let result = mock.new_payload_v2(payload).await.unwrap(); - - assert_eq!(result.status, status.status); - } - - #[tokio::test] - async fn test_mock_forkchoice_updated() { - let cfg = Arc::new(RollupConfig::default()); - - let mock = MockEngineClient::new(cfg); - - let fcu = ForkchoiceUpdated { - payload_status: PayloadStatus { - status: PayloadStatusEnum::Valid, - latest_valid_hash: Some(B256::ZERO), - }, - payload_id: None, - }; - - mock.set_fork_choice_updated_v2_response(fcu.clone()).await; - - let result = mock.fork_choice_updated_v2(ForkchoiceState::default(), None).await.unwrap(); - - assert_eq!(result.payload_status.status, fcu.payload_status.status); - } - - #[tokio::test] - async fn test_builder_pattern() { - let cfg = Arc::new(RollupConfig::default()); - let status = - PayloadStatus { status: PayloadStatusEnum::Valid, latest_valid_hash: Some(B256::ZERO) }; - - let mock = MockEngineClient::builder() - .with_config(cfg.clone()) - .with_new_payload_v2_response(status.clone()) - .build(); - - // Verify the config was set - assert_eq!(mock.cfg().block_time, cfg.block_time); - - // Create a minimal ExecutionPayloadInputV2 for testing - use alloy_primitives::{Bytes, U256}; - use alloy_rpc_types_engine::ExecutionPayloadV1; - let payload = ExecutionPayloadInputV2 { - execution_payload: ExecutionPayloadV1 { - parent_hash: B256::ZERO, - fee_recipient: Default::default(), - state_root: B256::ZERO, - receipts_root: B256::ZERO, - logs_bloom: Default::default(), - prev_randao: B256::ZERO, - block_number: 0, - gas_limit: 0, - gas_used: 0, - timestamp: 0, - extra_data: Bytes::new(), - base_fee_per_gas: U256::ZERO, - block_hash: B256::ZERO, - transactions: vec![], - }, - withdrawals: None, - }; - - // Verify the pre-configured response is returned - let result = mock.new_payload_v2(payload).await.unwrap(); - assert_eq!(result.status, status.status); - } -} diff --git a/kona/crates/node/engine/src/test_utils/provider.rs b/kona/crates/node/engine/src/test_utils/provider.rs deleted file mode 100644 index d7d4adc51ac..00000000000 --- a/kona/crates/node/engine/src/test_utils/provider.rs +++ /dev/null @@ -1,32 +0,0 @@ -use alloy_network::Ethereum; -use alloy_provider::Provider; -use async_trait::async_trait; -use op_alloy_network::Optimism; - -/// Mock L1 Provider that implements the Provider trait for testing. -/// -/// This is a minimal no-op provider that satisfies the trait bounds required -/// by [`MockEngineClient`]. All provider methods return empty/default values. -#[derive(Debug, Clone)] -pub struct MockL1Provider; - -#[async_trait] -impl Provider<Ethereum> for MockL1Provider { - fn root(&self) -> &alloy_provider::RootProvider<Ethereum> { - unimplemented!("MockL1Provider does not support root()") - } -} - -/// Mock L2 Provider that implements the Provider trait for Optimism network. -/// -/// This is a minimal no-op provider that satisfies the trait bounds required -/// by [`MockEngineClient`]. All provider methods return empty/default values. -#[derive(Debug, Clone)] -pub struct MockL2Provider; - -#[async_trait] -impl Provider<Optimism> for MockL2Provider { - fn root(&self) -> &alloy_provider::RootProvider<Optimism> { - unimplemented!("MockL2Provider does not support root()") - } -} diff --git a/kona/crates/node/gossip/src/builder.rs b/kona/crates/node/gossip/src/builder.rs deleted file mode 100644 index e77c333be82..00000000000 --- a/kona/crates/node/gossip/src/builder.rs +++ /dev/null @@ -1,221 +0,0 @@ -//! A builder for the [`GossipDriver`]. - -use alloy_primitives::Address; -use kona_genesis::RollupConfig; -use kona_peers::{PeerMonitoring, PeerScoreLevel}; -use libp2p::{ - Multiaddr, StreamProtocol, SwarmBuilder, gossipsub::Config, identity::Keypair, - noise::Config as NoiseConfig, tcp::Config as TcpConfig, yamux::Config as YamuxConfig, -}; -use std::time::Duration; -use tokio::sync::watch::{self}; - -use crate::{Behaviour, BlockHandler, GaterConfig, GossipDriver, GossipDriverBuilderError}; - -/// A builder for the [`GossipDriver`]. -#[derive(Debug)] -pub struct GossipDriverBuilder { - /// The [`RollupConfig`] for the network. - rollup_config: RollupConfig, - /// The [`Keypair`] for the node. - keypair: Keypair, - /// The [`Multiaddr`] for the gossip driver to listen on. - gossip_addr: Multiaddr, - /// Unsafe block signer [`Address`]. - signer: Address, - /// The idle connection timeout as a [`Duration`]. - timeout: Option<Duration>, - /// Sets the [`PeerScoreLevel`] for the [`Behaviour`]. - scoring: Option<PeerScoreLevel>, - /// The [`Config`] for the [`Behaviour`]. - config: Option<Config>, - /// If set, the gossip layer will monitor peer scores and ban peers that are below a given - /// threshold. - peer_monitoring: Option<PeerMonitoring>, - /// The configuration for the connection gater. - gater_config: Option<GaterConfig>, - /// Topic scoring. Disabled by default. - topic_scoring: bool, -} - -impl GossipDriverBuilder { - /// Creates a new [`GossipDriverBuilder`]. - pub const fn new( - rollup_config: RollupConfig, - signer: Address, - gossip_addr: Multiaddr, - keypair: Keypair, - ) -> Self { - Self { - timeout: None, - keypair, - gossip_addr, - signer, - scoring: None, - config: None, - peer_monitoring: None, - gater_config: None, - rollup_config, - topic_scoring: false, - } - } - - /// Sets the configuration for the connection gater. - pub const fn with_gater_config(mut self, config: GaterConfig) -> Self { - self.gater_config = Some(config); - self - } - - /// Sets the [`RollupConfig`] for the network. - /// This is used to determine the topic to publish to. - pub fn with_rollup_config(mut self, rollup_config: RollupConfig) -> Self { - self.rollup_config = rollup_config; - self - } - - /// Sets topic scoring. - /// This is disabled by default. - pub const fn with_topic_scoring(mut self, topic_scoring: bool) -> Self { - self.topic_scoring = topic_scoring; - self - } - - /// Sets the [`PeerScoreLevel`] for the [`Behaviour`]. - pub const fn with_peer_scoring(mut self, level: PeerScoreLevel) -> Self { - self.scoring = Some(level); - self - } - - /// Sets the [`PeerMonitoring`] configuration for the gossip driver. - pub const fn with_peer_monitoring(mut self, peer_monitoring: Option<PeerMonitoring>) -> Self { - self.peer_monitoring = peer_monitoring; - self - } - - /// Sets the unsafe block signer [`Address`]. - pub const fn with_unsafe_block_signer_receiver(mut self, signer: Address) -> Self { - self.signer = signer; - self - } - - /// Sets the [`Keypair`] for the node. - pub fn with_keypair(mut self, keypair: Keypair) -> Self { - self.keypair = keypair; - self - } - - /// Sets the swarm's idle connection timeout. - pub const fn with_timeout(mut self, timeout: Duration) -> Self { - self.timeout = Some(timeout); - self - } - - /// Sets the [`Multiaddr`] for the gossip driver to listen on. - pub fn with_address(mut self, addr: Multiaddr) -> Self { - self.gossip_addr = addr; - self - } - - /// Sets the [`Config`] for the [`Behaviour`]. - pub fn with_config(mut self, config: Config) -> Self { - self.config = Some(config); - self - } - - /// Builds the [`GossipDriver`]. - pub fn build( - mut self, - ) -> Result< - (GossipDriver<crate::ConnectionGater>, watch::Sender<Address>), - GossipDriverBuilderError, - > { - // Extract builder arguments - let timeout = self.timeout.take().unwrap_or(Duration::from_secs(60)); - let keypair = self.keypair; - let addr = self.gossip_addr; - let signer_recv = self.signer; - let rollup_config = self.rollup_config; - let l2_chain_id = rollup_config.l2_chain_id; - let block_time = rollup_config.block_time; - - let (signer_tx, signer_rx) = watch::channel(signer_recv); - - // Block Handler setup - let handler = BlockHandler::new(rollup_config, signer_rx); - - // Construct the gossip behaviour - let config = self.config.unwrap_or(crate::default_config()); - info!( - target: "gossip", - "CONFIG: [Mesh D: {}] [Mesh L: {}] [Mesh H: {}] [Gossip Lazy: {}] [Flood Publish: {}]", - config.mesh_n(), - config.mesh_n_low(), - config.mesh_n_high(), - config.gossip_lazy(), - config.flood_publish() - ); - info!( - target: "gossip", - "CONFIG: [Heartbeat: {}] [Floodsub: {}] [Validation: {:?}] [Max Transmit: {} bytes]", - config.heartbeat_interval().as_secs(), - config.support_floodsub(), - config.validation_mode(), - config.max_transmit_size() - ); - let mut behaviour = Behaviour::new(keypair.public(), config, &[Box::new(handler.clone())])?; - - // If peer scoring is configured, set it on the behaviour. - match self.scoring { - None => info!(target: "scoring", "Peer scoring not enabled"), - Some(PeerScoreLevel::Off) => { - info!(target: "scoring", level = ?PeerScoreLevel::Off, "Peer scoring explicitly disabled") - } - Some(level) => { - use crate::handler::Handler; - let params = level - .to_params(handler.topics(), self.topic_scoring, block_time) - .unwrap_or_default(); - match behaviour.gossipsub.with_peer_score(params, PeerScoreLevel::thresholds()) { - Ok(_) => debug!(target: "scoring", "Peer scoring enabled successfully"), - Err(e) => warn!(target: "scoring", "Peer scoring failed: {}", e), - } - } - } - - // Let's setup the sync request/response protocol stream. - let mut sync_handler = behaviour.sync_req_resp.new_control(); - - let protocol = format!("/opstack/req/payload_by_number/{l2_chain_id}/0/"); - let sync_protocol_name = StreamProtocol::try_from_owned(protocol) - .map_err(|_| GossipDriverBuilderError::SetupSyncReqRespError)?; - let sync_protocol = sync_handler - .accept(sync_protocol_name) - .map_err(|_| GossipDriverBuilderError::SyncReqRespAlreadyAccepted)?; - - // Build the swarm with DNS+TCP transport. - // Note: with_dns() must be called after with_tcp() to wrap TCP with DNS resolution. - debug!(target: "gossip", "Building Swarm with Peer ID: {}", keypair.public().to_peer_id()); - let swarm = SwarmBuilder::with_existing_identity(keypair) - .with_tokio() - .with_tcp( - TcpConfig::default().nodelay(true), - |i: &Keypair| { - debug!(target: "gossip", "Noise Config Peer ID: {}", i.public().to_peer_id()); - NoiseConfig::new(i) - }, - YamuxConfig::default, - ) - .map_err(|_| GossipDriverBuilderError::TcpError)? - .with_dns() - .map_err(|_| GossipDriverBuilderError::TcpError)? - .with_behaviour(|_| behaviour) - .map_err(|_| GossipDriverBuilderError::WithBehaviourError)? - .with_swarm_config(|c| c.with_idle_connection_timeout(timeout)) - .build(); - - let gater_config = self.gater_config.take().unwrap_or_default(); - let gate = crate::ConnectionGater::new(gater_config); - - Ok((GossipDriver::new(swarm, addr, handler, sync_handler, sync_protocol, gate), signer_tx)) - } -} diff --git a/kona/crates/node/gossip/src/config.rs b/kona/crates/node/gossip/src/config.rs deleted file mode 100644 index 0193d9d8f00..00000000000 --- a/kona/crates/node/gossip/src/config.rs +++ /dev/null @@ -1,164 +0,0 @@ -//! Gossipsub Config - -use lazy_static::lazy_static; -use libp2p::gossipsub::{Config, ConfigBuilder, Message, MessageId}; -use openssl::sha::sha256; -use snap::raw::Decoder; -use std::time::Duration; - -//////////////////////////////////////////////////////////////////////////////////////////////// -// GossipSub Constants -//////////////////////////////////////////////////////////////////////////////////////////////// - -/// The maximum gossip size. -/// Limits the total size of gossip RPC containers as well as decompressed individual messages. -pub const MAX_GOSSIP_SIZE: usize = 10 * (1 << 20); - -/// The minimum gossip size. -/// Used to make sure that there is at least some data to validate the signature against. -pub const MIN_GOSSIP_SIZE: usize = 66; - -/// The maximum outbound queue. -pub const MAX_OUTBOUND_QUEUE: usize = 256; - -/// The maximum validate queue. -pub const MAX_VALIDATE_QUEUE: usize = 256; - -/// The global validate throttle. -pub const GLOBAL_VALIDATE_THROTTLE: usize = 512; - -/// The default mesh D. -pub const DEFAULT_MESH_D: usize = 8; - -/// The default mesh D low. -pub const DEFAULT_MESH_DLO: usize = 6; - -/// The default mesh D high. -pub const DEFAULT_MESH_DHI: usize = 12; - -/// The default mesh D lazy. -pub const DEFAULT_MESH_DLAZY: usize = 6; - -//////////////////////////////////////////////////////////////////////////////////////////////// -// Duration Constants -//////////////////////////////////////////////////////////////////////////////////////////////// - -lazy_static! { - /// The gossip heartbeat. - pub static ref GOSSIP_HEARTBEAT: Duration = Duration::from_millis(500); - - /// The seen messages TTL. - /// Limits the duration that message IDs are remembered for gossip deduplication purposes. - pub static ref SEEN_MESSAGES_TTL: Duration = 130 * *GOSSIP_HEARTBEAT; - - /// The peer score inspect frequency. - /// The frequency at which peer scores are inspected. - pub static ref PEER_SCORE_INSPECT_FREQUENCY: Duration = 15 * Duration::from_secs(1); -} - -//////////////////////////////////////////////////////////////////////////////////////////////// -// Config Building -//////////////////////////////////////////////////////////////////////////////////////////////// - -/// Builds the default gossipsub configuration. -/// -/// Notable defaults: -/// - flood_publish: false (call `.flood_publish(true)` on the [ConfigBuilder] to enable) -/// - backoff_slack: 1 -/// - heart beat interval: 1 second -/// - peer exchange is disabled -/// - maximum byte size for gossip messages: 2048 bytes -/// -/// # Returns -/// -/// A [`ConfigBuilder`] with the default gossipsub configuration already set. -/// Call `.build()` on the returned builder to get the final [libp2p::gossipsub::Config]. -pub fn default_config_builder() -> ConfigBuilder { - let mut builder = ConfigBuilder::default(); - builder - .mesh_n(DEFAULT_MESH_D) - .mesh_n_low(DEFAULT_MESH_DLO) - .mesh_n_high(DEFAULT_MESH_DHI) - .gossip_lazy(DEFAULT_MESH_DLAZY) - .heartbeat_interval(*GOSSIP_HEARTBEAT) - .fanout_ttl(Duration::from_secs(60)) - .history_length(12) - .history_gossip(3) - .flood_publish(false) - .support_floodsub() - .max_transmit_size(MAX_GOSSIP_SIZE) - .duplicate_cache_time(Duration::from_secs(120)) - .validation_mode(libp2p::gossipsub::ValidationMode::None) - .validate_messages() - .message_id_fn(compute_message_id); - - builder -} - -/// Returns the default [Config] for gossipsub. -pub fn default_config() -> Config { - default_config_builder().build().expect("default gossipsub config must be valid") -} - -/// Computes the [MessageId] of a `gossipsub` message. -fn compute_message_id(msg: &Message) -> MessageId { - let mut decoder = Decoder::new(); - let id = decoder.decompress_vec(&msg.data).map_or_else( - |_| { - warn!(target: "cfg", "Failed to decompress message, using invalid snappy"); - let domain_invalid_snappy: Vec<u8> = vec![0x0, 0x0, 0x0, 0x0]; - sha256([domain_invalid_snappy.as_slice(), msg.data.as_slice()].concat().as_slice()) - [..20] - .to_vec() - }, - |data| { - let domain_valid_snappy: Vec<u8> = vec![0x1, 0x0, 0x0, 0x0]; - sha256([domain_valid_snappy.as_slice(), data.as_slice()].concat().as_slice())[..20] - .to_vec() - }, - ); - - MessageId(id) -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_constructs_default_config() { - let cfg = default_config(); - assert_eq!(cfg.mesh_n(), DEFAULT_MESH_D); - assert_eq!(cfg.mesh_n_low(), DEFAULT_MESH_DLO); - assert_eq!(cfg.mesh_n_high(), DEFAULT_MESH_DHI); - } - - #[test] - fn test_compute_message_id_invalid_snappy() { - let msg = Message { - source: None, - data: vec![1, 2, 3, 4, 5], - sequence_number: None, - topic: libp2p::gossipsub::TopicHash::from_raw("test"), - }; - - let id = compute_message_id(&msg); - let hashed = sha256(&[&[0x0, 0x0, 0x0, 0x0], [1, 2, 3, 4, 5].as_slice()].concat()); - assert_eq!(id.0, hashed[..20].to_vec()); - } - - #[test] - fn test_compute_message_id_valid_snappy() { - let compressed = snap::raw::Encoder::new().compress_vec(&[1, 2, 3, 4, 5]).unwrap(); - let msg = Message { - source: None, - data: compressed, - sequence_number: None, - topic: libp2p::gossipsub::TopicHash::from_raw("test"), - }; - - let id = compute_message_id(&msg); - let hashed = sha256(&[&[0x1, 0x0, 0x0, 0x0], [1, 2, 3, 4, 5].as_slice()].concat()); - assert_eq!(id.0, hashed[..20].to_vec()); - } -} diff --git a/kona/crates/node/gossip/src/driver.rs b/kona/crates/node/gossip/src/driver.rs deleted file mode 100644 index db1e9f90e80..00000000000 --- a/kona/crates/node/gossip/src/driver.rs +++ /dev/null @@ -1,492 +0,0 @@ -//! Consensus-layer gossipsub driver for Optimism. - -use alloy_primitives::{Address, hex}; -use derive_more::Debug; -use discv5::Enr; -use futures::{AsyncReadExt, AsyncWriteExt, stream::StreamExt}; -use kona_genesis::RollupConfig; -use kona_peers::{EnrValidation, PeerMonitoring, enr_to_multiaddr}; -use libp2p::{ - Multiaddr, PeerId, Swarm, TransportError, - gossipsub::{IdentTopic, MessageId}, - swarm::SwarmEvent, -}; -use libp2p_identity::Keypair; -use libp2p_stream::IncomingStreams; -use op_alloy_rpc_types_engine::OpNetworkPayloadEnvelope; -use std::{ - collections::HashMap, - sync::Arc, - time::{Duration, Instant}, -}; -use tokio::sync::Mutex; - -use crate::{ - Behaviour, BlockHandler, ConnectionGate, ConnectionGater, Event, GossipDriverBuilder, Handler, - PublishError, -}; - -/// A driver for a [`Swarm`] instance. -/// -/// Connects the swarm to the given [`Multiaddr`] -/// and handles events using the [`BlockHandler`]. -#[derive(Debug)] -pub struct GossipDriver<G: ConnectionGate> { - /// The [`Swarm`] instance. - #[debug(skip)] - pub swarm: Swarm<Behaviour>, - /// A [`Multiaddr`] to listen on. - pub addr: Multiaddr, - /// The [`BlockHandler`]. - pub handler: BlockHandler, - /// A [`libp2p_stream::Control`] instance. Can be used to control the sync request/response - #[debug(skip)] - pub sync_handler: libp2p_stream::Control, - /// The inbound streams for the sync request/response protocol. - /// - /// This is an option to allow to take the underlying value when the gossip driver gets - /// activated. - /// - /// TODO(op-rs/kona#2141): remove the sync-req-resp protocol once the `op-node` phases it out. - #[debug(skip)] - pub sync_protocol: Option<IncomingStreams>, - /// A mapping from [`PeerId`] to [`Multiaddr`]. - pub peerstore: HashMap<PeerId, libp2p::identify::Info>, - /// If set, the gossip layer will monitor peer scores and ban peers that are below a given - /// threshold. - pub peer_monitoring: Option<PeerMonitoring>, - /// Tracks connection start time for peers - pub peer_connection_start: HashMap<PeerId, Instant>, - /// The connection gate. - pub connection_gate: G, - /// Tracks ping times for peers. - pub ping: Arc<Mutex<HashMap<PeerId, Duration>>>, -} - -impl<G> GossipDriver<G> -where - G: ConnectionGate, -{ - /// Returns the [`GossipDriverBuilder`] that can be used to construct the [`GossipDriver`]. - pub const fn builder( - rollup_config: RollupConfig, - signer: Address, - gossip_addr: Multiaddr, - keypair: Keypair, - ) -> GossipDriverBuilder { - GossipDriverBuilder::new(rollup_config, signer, gossip_addr, keypair) - } - - /// Creates a new [`GossipDriver`] instance. - pub fn new( - swarm: Swarm<Behaviour>, - addr: Multiaddr, - handler: BlockHandler, - sync_handler: libp2p_stream::Control, - sync_protocol: IncomingStreams, - gate: G, - ) -> Self { - Self { - swarm, - addr, - handler, - peerstore: Default::default(), - peer_monitoring: None, - peer_connection_start: Default::default(), - sync_handler, - sync_protocol: Some(sync_protocol), - connection_gate: gate, - ping: Arc::new(Mutex::new(Default::default())), - } - } - - /// Publishes an unsafe block to gossip. - /// - /// ## Arguments - /// - /// * `topic_selector` - A function that selects the topic for the block. This is expected to be - /// a closure that takes the [`BlockHandler`] and returns the [`IdentTopic`] for the block. - /// * `payload` - The payload to be published. - /// - /// ## Returns - /// - /// Returns the [`MessageId`] of the published message or a [`PublishError`] - /// if the message could not be published. - pub fn publish( - &mut self, - selector: impl FnOnce(&BlockHandler) -> IdentTopic, - payload: Option<OpNetworkPayloadEnvelope>, - ) -> Result<Option<MessageId>, PublishError> { - let Some(payload) = payload else { - return Ok(None); - }; - let topic = selector(&self.handler); - let topic_hash = topic.hash(); - let data = self.handler.encode(topic, payload)?; - let id = self.swarm.behaviour_mut().gossipsub.publish(topic_hash, data)?; - kona_macros::inc!(gauge, crate::Metrics::UNSAFE_BLOCK_PUBLISHED); - Ok(Some(id)) - } - - /// Handles the sync request/response protocol. - /// - /// This is a mock handler that supports the `payload_by_number` protocol. - /// It always returns: not found (1), version (0). `<https://specs.optimism.io/protocol/rollup-node-p2p.html#payload_by_number>` - /// - /// ## Note - /// - /// This is used to ensure op-nodes are not penalizing kona-nodes for not supporting it. - /// This feature is being deprecated by the op-node team. Once it is fully removed from the - /// op-node's implementation we will remove this handler. - pub(super) fn sync_protocol_handler(&mut self) { - let Some(mut sync_protocol) = self.sync_protocol.take() else { - return; - }; - - // Spawn a new task to handle the sync request/response protocol. - tokio::spawn(async move { - loop { - let Some((peer_id, mut inbound_stream)) = sync_protocol.next().await else { - warn!(target: "gossip", "The sync protocol stream has ended"); - return; - }; - - info!(target: "gossip", "Received a sync request from {peer_id}, spawning a new task to handle it"); - - tokio::spawn(async move { - let mut buffer = Vec::new(); - let Ok(bytes_received) = inbound_stream.read_to_end(&mut buffer).await else { - error!(target: "gossip", "Failed to read the sync request from {peer_id}"); - return; - }; - - debug!(target: "gossip", bytes_received = bytes_received, peer_id = ?peer_id, payload = ?buffer, "Received inbound sync request"); - - // We return: not found (1), version (0). `<https://specs.optimism.io/protocol/rollup-node-p2p.html#payload_by_number>` - // Response format: <response> = <res><version><payload> - // No payload is returned. - const OUTPUT: [u8; 2] = hex!("0100"); - - // We only write that we're not supporting the sync request. - if let Err(e) = inbound_stream.write_all(&OUTPUT).await { - error!(target: "gossip", err = ?e, "Failed to write the sync response to {peer_id}"); - return; - }; - - debug!(target: "gossip", bytes_sent = OUTPUT.len(), peer_id = ?peer_id, "Sent outbound sync response"); - }); - } - }); - } - - /// Starts the libp2p Swarm. - /// - /// - Starts the sync request/response protocol handler. - /// - Tells the swarm to listen on the given [`Multiaddr`]. - /// - /// Waits for the swarm to start listen before returning and connecting to peers. - pub async fn start(&mut self) -> Result<Multiaddr, TransportError<std::io::Error>> { - // Start the sync request/response protocol handler. - self.sync_protocol_handler(); - - match self.swarm.listen_on(self.addr.clone()) { - Ok(id) => loop { - if let SwarmEvent::NewListenAddr { address, listener_id } = - self.swarm.select_next_some().await - { - if id == listener_id { - info!(target: "gossip", "Swarm now listening on: {address}"); - - self.addr = address.clone(); - - return Ok(address); - } - } - }, - Err(err) => { - error!(target: "gossip", "Fail to listen on {}: {err}", self.addr); - Err(err) - } - } - } - - /// Returns the local peer id. - pub fn local_peer_id(&self) -> &libp2p::PeerId { - self.swarm.local_peer_id() - } - - /// Returns a mutable reference to the Swarm's behaviour. - pub fn behaviour_mut(&mut self) -> &mut Behaviour { - self.swarm.behaviour_mut() - } - - /// Attempts to select the next event from the Swarm. - pub async fn next(&mut self) -> Option<SwarmEvent<Event>> { - self.swarm.next().await - } - - /// Returns the number of connected peers. - pub fn connected_peers(&self) -> usize { - self.swarm.connected_peers().count() - } - - /// Dials the given [`Enr`]. - pub fn dial(&mut self, enr: Enr) { - let validation = EnrValidation::validate(&enr, self.handler.rollup_config.l2_chain_id.id()); - if validation.is_invalid() { - trace!(target: "gossip", "Invalid OP Stack ENR for chain id {}: {}", self.handler.rollup_config.l2_chain_id.id(), validation); - return; - } - let Some(multiaddr) = enr_to_multiaddr(&enr) else { - debug!(target: "gossip", "Failed to extract tcp socket from enr: {:?}", enr); - kona_macros::inc!(gauge, crate::Metrics::DIAL_PEER_ERROR, "type" => "invalid_enr"); - return; - }; - self.dial_multiaddr(multiaddr); - } - - /// Dials the given [`Multiaddr`]. - pub fn dial_multiaddr(&mut self, addr: Multiaddr) { - // Check if we're allowed to dial the address. - if let Err(dial_error) = self.connection_gate.can_dial(&addr) { - debug!(target: "gossip", ?dial_error, "unable to dial peer"); - return; - } - - // Extract the peer ID from the address. - let Some(peer_id) = ConnectionGater::peer_id_from_addr(&addr) else { - warn!(target: "gossip", peer=?addr, "Failed to extract PeerId from Multiaddr"); - return; - }; - - if self.swarm.connected_peers().any(|p| p == &peer_id) { - debug!(target: "gossip", peer=?addr, "Already connected to peer, not dialing"); - kona_macros::inc!(gauge, crate::Metrics::DIAL_PEER_ERROR, "type" => "already_connected", "peer" => peer_id.to_string()); - return; - } - - // Let the gate know we are dialing the address. - // Note: libp2p-dns will automatically resolve DNS multiaddrs at the transport layer. - self.connection_gate.dialing(&addr); - - // Dial - match self.swarm.dial(addr.clone()) { - Ok(_) => { - trace!(target: "gossip", peer=?addr, "Dialed peer"); - self.connection_gate.dialed(&addr); - kona_macros::inc!(gauge, crate::Metrics::DIAL_PEER, "peer" => peer_id.to_string()); - } - Err(e) => { - error!(target: "gossip", "Failed to connect to peer: {:?}", e); - self.connection_gate.remove_dial(&peer_id); - kona_macros::inc!(gauge, crate::Metrics::DIAL_PEER_ERROR, "type" => "connection_error", "error" => e.to_string(), "peer" => peer_id.to_string()); - } - } - } - - fn handle_gossip_event(&mut self, event: Event) -> Option<OpNetworkPayloadEnvelope> { - match event { - Event::Gossipsub(e) => return self.handle_gossipsub_event(*e), - Event::Ping(libp2p::ping::Event { peer, result, .. }) => { - trace!(target: "gossip", ?peer, ?result, "Ping received"); - - // If the peer is connected to gossip, record the connection duration. - if let Some(start_time) = self.peer_connection_start.get(&peer) { - let _ping_duration = start_time.elapsed(); - kona_macros::record!( - histogram, - crate::Metrics::GOSSIP_PEER_CONNECTION_DURATION_SECONDS, - _ping_duration.as_secs_f64() - ); - } - - // Record the peer score in the metrics if available. - if let Some(_peer_score) = self.behaviour_mut().gossipsub.peer_score(&peer) { - kona_macros::record!( - histogram, - crate::Metrics::PEER_SCORES, - "peer", - peer.to_string(), - _peer_score - ); - } - - let pings = Arc::clone(&self.ping); - tokio::spawn(async move { - if let Ok(time) = result { - pings.lock().await.insert(peer, time); - } - }); - } - Event::Identify(e) => self.handle_identify_event(*e), - // Don't do anything with stream events as this should be unreachable code. - Event::Stream => { - error!(target: "gossip", "Stream events should not be emitted!"); - } - }; - - None - } - - fn handle_identify_event(&mut self, event: libp2p::identify::Event) { - match event { - libp2p::identify::Event::Received { connection_id, peer_id, info } => { - debug!(target: "gossip", ?connection_id, ?peer_id, ?info, "Received identify info from peer"); - self.peerstore.insert(peer_id, info); - } - libp2p::identify::Event::Sent { connection_id, peer_id } => { - debug!(target: "gossip", ?connection_id, ?peer_id, "Sent identify info to peer"); - } - libp2p::identify::Event::Pushed { connection_id, peer_id, info } => { - debug!(target: "gossip", ?connection_id, ?peer_id, ?info, "Pushed identify info to peer"); - } - libp2p::identify::Event::Error { connection_id, peer_id, error } => { - error!(target: "gossip", ?connection_id, ?peer_id, ?error, "Error raised while attempting to identify remote"); - } - } - } - - /// Handles a [`libp2p::gossipsub::Event`]. - fn handle_gossipsub_event( - &mut self, - event: libp2p::gossipsub::Event, - ) -> Option<OpNetworkPayloadEnvelope> { - match event { - libp2p::gossipsub::Event::Message { - propagation_source: src, - message_id: id, - message, - } => { - trace!(target: "gossip", "Received message with topic: {}", message.topic); - kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "message", "topic" => message.topic.to_string()); - if self.handler.topics().contains(&message.topic) { - let (status, payload) = self.handler.handle(message); - _ = self - .swarm - .behaviour_mut() - .gossipsub - .report_message_validation_result(&id, &src, status); - return payload; - } - } - libp2p::gossipsub::Event::Subscribed { peer_id, topic } => { - trace!(target: "gossip", "Peer: {:?} subscribed to topic: {:?}", peer_id, topic); - kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "subscribed", "topic" => topic.to_string()); - } - libp2p::gossipsub::Event::Unsubscribed { peer_id, topic } => { - trace!(target: "gossip", "Peer: {:?} unsubscribed from topic: {:?}", peer_id, topic); - kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "unsubscribed", "topic" => topic.to_string()); - } - libp2p::gossipsub::Event::SlowPeer { peer_id, .. } => { - trace!(target: "gossip", "Slow peer: {:?}", peer_id); - kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "slow_peer", "peer" => peer_id.to_string()); - } - libp2p::gossipsub::Event::GossipsubNotSupported { peer_id } => { - trace!(target: "gossip", "Peer: {:?} does not support gossipsub", peer_id); - kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "not_supported", "peer" => peer_id.to_string()); - } - } - None - } - - /// Handles the [`SwarmEvent<Event>`]. - pub fn handle_event(&mut self, event: SwarmEvent<Event>) -> Option<OpNetworkPayloadEnvelope> { - match event { - SwarmEvent::Behaviour(behavior_event) => { - return self.handle_gossip_event(behavior_event); - } - SwarmEvent::ConnectionEstablished { peer_id, .. } => { - let peer_count = self.swarm.connected_peers().count(); - info!(target: "gossip", "Connection established: {:?} | Peer Count: {}", peer_id, peer_count); - kona_macros::inc!( - gauge, - crate::Metrics::GOSSIPSUB_CONNECTION, - "type" => "connected", - "peer" => peer_id.to_string(), - ); - kona_macros::set!(gauge, crate::Metrics::GOSSIP_PEER_COUNT, peer_count as f64); - - self.peer_connection_start.insert(peer_id, Instant::now()); - } - SwarmEvent::OutgoingConnectionError { peer_id: _peer_id, error, .. } => { - debug!(target: "gossip", "Outgoing connection error: {:?}", error); - // Remove the peer from current_dials so it can be dialed again - if let Some(peer_id) = _peer_id { - self.connection_gate.remove_dial(&peer_id); - } - kona_macros::inc!( - gauge, - crate::Metrics::GOSSIPSUB_CONNECTION, - "type" => "outgoing_error", - "peer" => _peer_id.map(|p| p.to_string()).unwrap_or_default() - ); - } - SwarmEvent::IncomingConnectionError { - error, connection_id: _connection_id, .. - } => { - debug!(target: "gossip", "Incoming connection error: {:?}", error); - kona_macros::inc!( - gauge, - crate::Metrics::GOSSIPSUB_CONNECTION, - "type" => "incoming_error", - "connection_id" => _connection_id.to_string() - ); - } - SwarmEvent::ConnectionClosed { peer_id, cause, .. } => { - let peer_count = self.swarm.connected_peers().count(); - warn!(target: "gossip", ?peer_id, ?cause, peer_count, "Connection closed"); - kona_macros::inc!( - gauge, - crate::Metrics::GOSSIPSUB_CONNECTION, - "type" => "closed", - "peer" => peer_id.to_string() - ); - kona_macros::set!(gauge, crate::Metrics::GOSSIP_PEER_COUNT, peer_count as f64); - - // Record the total connection duration. - if let Some(start_time) = self.peer_connection_start.remove(&peer_id) { - let _peer_duration = start_time.elapsed(); - kona_macros::record!( - histogram, - crate::Metrics::GOSSIP_PEER_CONNECTION_DURATION_SECONDS, - _peer_duration.as_secs_f64() - ); - } - - // Record the peer score in the metrics if available. - if let Some(_peer_score) = self.behaviour_mut().gossipsub.peer_score(&peer_id) { - kona_macros::record!( - histogram, - crate::Metrics::PEER_SCORES, - "peer", - peer_id.to_string(), - _peer_score - ); - } - - let pings = Arc::clone(&self.ping); - tokio::spawn(async move { - pings.lock().await.remove(&peer_id); - }); - - // If the connection was initiated by us, remove the peer from the current dials - // set so that we can dial it again. - self.connection_gate.remove_dial(&peer_id); - } - SwarmEvent::NewListenAddr { listener_id, address } => { - debug!(target: "gossip", reporter_id = ?listener_id, new_address = ?address, "New listen address"); - } - SwarmEvent::Dialing { peer_id, connection_id } => { - debug!(target: "gossip", ?peer_id, ?connection_id, "Dialing peer"); - } - SwarmEvent::NewExternalAddrOfPeer { peer_id, address } => { - debug!(target: "gossip", ?peer_id, ?address, "New external address of peer"); - } - _ => { - debug!(target: "gossip", ?event, "Ignoring non-behaviour in event handler"); - } - }; - - None - } -} diff --git a/kona/crates/node/gossip/src/error.rs b/kona/crates/node/gossip/src/error.rs deleted file mode 100644 index 36b165af7bd..00000000000 --- a/kona/crates/node/gossip/src/error.rs +++ /dev/null @@ -1,116 +0,0 @@ -//! Error types for the gossip networking module. - -use crate::BehaviourError; -use derive_more::From; -use libp2p::{Multiaddr, PeerId}; -use std::net::IpAddr; -use thiserror::Error; - -/// Error encountered when publishing a payload to the gossip network. -/// -/// Represents failures in the payload publishing pipeline, including -/// network-level publishing errors and payload encoding issues. -#[derive(Debug, Error)] -pub enum PublishError { - /// Failed to publish the payload via GossipSub protocol. - /// - /// This can occur due to network connectivity issues, mesh topology - /// problems, or protocol-level errors in the libp2p stack. - #[error("Failed to publish payload: {0}")] - PublishError(#[from] libp2p::gossipsub::PublishError), - - /// Failed to encode the payload before publishing. - /// - /// Indicates an issue with serializing the payload data structure - /// into the binary format expected by the network protocol. - #[error("Failed to encode payload: {0}")] - EncodeError(#[from] HandlerEncodeError), -} - -/// Error encountered when encoding payloads in the block handler. -/// -/// Represents failures in the payload serialization process, typically -/// occurring when converting OP Stack data structures to network format. -#[derive(Debug, Error)] -pub enum HandlerEncodeError { - /// Failed to encode the OP Stack payload envelope. - /// - /// This error indicates issues with serializing the OP Stack network payload - /// structure, which contains the consensus data being gossiped. - #[error("Failed to encode payload: {0}")] - PayloadEncodeError(#[from] op_alloy_rpc_types_engine::PayloadEnvelopeEncodeError), - - /// Attempted to publish to an unknown or unsubscribed topic. - /// - /// This error occurs when trying to publish to a GossipSub topic that - /// is not recognized or that the node is not subscribed to. - #[error("Unknown topic: {0}")] - UnknownTopic(libp2p::gossipsub::TopicHash), -} - -/// An error type for the [`crate::GossipDriverBuilder`]. -#[derive(Debug, Clone, PartialEq, Eq, From, Error)] -pub enum GossipDriverBuilderError { - /// A TCP error. - #[error("TCP error")] - TcpError, - /// An error when setting the behaviour on the swarm builder. - #[error("error setting behaviour on swarm builder")] - WithBehaviourError, - /// An error when building the gossip behaviour. - #[error("error building gossip behaviour")] - BehaviourError(BehaviourError), - /// An error when setting up the sync request/response protocol. - #[error("error setting up sync request/response protocol")] - SetupSyncReqRespError, - /// The sync request/response protocol has already been accepted. - #[error("sync request/response protocol already accepted")] - SyncReqRespAlreadyAccepted, -} - -/// An error type representing reasons why a peer cannot be dialed. -#[derive(Debug, Clone, Error)] -pub enum DialError { - /// Failed to extract PeerId from Multiaddr. - #[error("Failed to extract PeerId from Multiaddr: {addr}")] - InvalidMultiaddr { - /// The multiaddress that failed to be parsed or does not contain a valid PeerId component - addr: Multiaddr, - }, - /// Already dialing this peer. - #[error("Already dialing peer: {peer_id}")] - AlreadyDialing { - /// The PeerId of the peer that is already being dialed - peer_id: PeerId, - }, - /// Dial threshold reached for this peer. - #[error("Dial threshold reached for peer: {addr}")] - ThresholdReached { - /// The multiaddress of the peer that has reached the maximum dial attempts - addr: Multiaddr, - }, - /// Peer is blocked. - #[error("Peer is blocked: {peer_id}")] - PeerBlocked { - /// The PeerId of the peer that is on the blocklist - peer_id: PeerId, - }, - /// Failed to extract IP address from Multiaddr. - #[error("Failed to extract IP address from Multiaddr: {addr}")] - InvalidIpAddress { - /// The multiaddress that does not contain a valid IP address component - addr: Multiaddr, - }, - /// IP address is blocked. - #[error("IP address is blocked: {ip}")] - AddressBlocked { - /// The IP address that is on the blocklist - ip: IpAddr, - }, - /// IP address is in a blocked subnet. - #[error("IP address {ip} is in a blocked subnet")] - SubnetBlocked { - /// The IP address that belongs to a blocked subnet range - ip: IpAddr, - }, -} diff --git a/kona/crates/node/gossip/src/event.rs b/kona/crates/node/gossip/src/event.rs deleted file mode 100644 index c8e0c877bf3..00000000000 --- a/kona/crates/node/gossip/src/event.rs +++ /dev/null @@ -1,108 +0,0 @@ -//! Event Handling Module. - -use libp2p::{gossipsub, identify, ping}; - -/// High-level events emitted by the gossip networking system. -/// -/// This enum wraps the various low-level libp2p events into a unified -/// event type that can be handled by the application layer. Events are -/// generated by the underlying libp2p protocols and bubble up through -/// the networking stack. -#[derive(Debug)] -pub enum Event { - /// Network connectivity check event from the ping protocol. - /// - /// Used to verify peer connectivity and measure round-trip times. - #[allow(dead_code)] - Ping(ping::Event), - - /// GossipSub mesh networking event. - /// - /// Includes message reception, peer subscription changes, and mesh - /// topology updates. This is the primary event type for consensus - /// layer networking. - Gossipsub(Box<gossipsub::Event>), - - /// Peer identification protocol event. - /// - /// Contains information about peer capabilities, supported protocols, - /// and network identity. Used for protocol negotiation and compatibility - /// checking. - Identify(Box<identify::Event>), - - /// Stream protocol event for request-response communication. - /// - /// Handles direct peer-to-peer communication outside of the gossip mesh, - /// typically used for block synchronization requests. - Stream, -} - -impl From<ping::Event> for Event { - /// Converts [ping::Event] to [Event] - fn from(value: ping::Event) -> Self { - Self::Ping(value) - } -} - -impl From<gossipsub::Event> for Event { - /// Converts [gossipsub::Event] to [Event] - fn from(value: gossipsub::Event) -> Self { - Self::Gossipsub(Box::new(value)) - } -} - -impl From<identify::Event> for Event { - /// Converts [identify::Event] to [Event] - fn from(value: identify::Event) -> Self { - Self::Identify(Box::new(value)) - } -} - -impl From<()> for Event { - /// Converts () to [Event] - fn from(_value: ()) -> Self { - Self::Stream - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_event_conversion() { - let gossipsub_event = libp2p::gossipsub::Event::Message { - propagation_source: libp2p::PeerId::random(), - message_id: libp2p::gossipsub::MessageId(vec![]), - message: libp2p::gossipsub::Message { - source: None, - data: vec![], - sequence_number: None, - topic: libp2p::gossipsub::TopicHash::from_raw("test"), - }, - }; - let event = Event::from(gossipsub_event); - match event { - Event::Gossipsub(e) => { - if !matches!(*e, libp2p::gossipsub::Event::Message { .. }) { - panic!("Event conversion failed"); - } - } - _ => panic!("Event conversion failed"), - } - } - - #[test] - fn test_event_conversion_ping() { - let ping_event = ping::Event { - peer: libp2p::PeerId::random(), - connection: libp2p::swarm::ConnectionId::new_unchecked(0), - result: Ok(core::time::Duration::from_secs(1)), - }; - let event = Event::from(ping_event); - match event { - Event::Ping(_) => {} - _ => panic!("Event conversion failed"), - } - } -} diff --git a/kona/crates/node/gossip/src/lib.rs b/kona/crates/node/gossip/src/lib.rs deleted file mode 100644 index 79a695e8c4b..00000000000 --- a/kona/crates/node/gossip/src/lib.rs +++ /dev/null @@ -1,76 +0,0 @@ -//! Gossip protocol implementation for the OP Stack. -//! -//! This crate provides a comprehensive gossip networking implementation for the OP Stack, -//! including GossipSub-based consensus layer networking, RPC interfaces for network -//! administration, and metrics collection. -//! -//! ## Key Components -//! -//! - [`GossipDriver`]: Main driver managing the libp2p swarm and event handling -//! - [`Behaviour`]: Custom libp2p behavior combining GossipSub, Ping, and Identify -//! - [`BlockHandler`]: Validates and processes incoming block payloads -//! - [`ConnectionGater`]: Sophisticated connection management and rate limiting -//! - [`P2pRpcRequest`]: RPC interface for network administration -//! - [`Metrics`]: Metrics collection for monitoring and observability - -#![doc(html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/kona-logo.png")] -#![doc(issue_tracker_base_url = "https://github.com/op-rs/kona/issues/")] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] - -#[macro_use] -extern crate tracing; -// Used in tests -#[allow(unused_extern_crates)] -extern crate alloy_rlp; - -mod metrics; -pub use metrics::Metrics; - -mod rpc; -pub use rpc::{ - Connectedness, Direction, GossipScores, P2pRpcRequest, PeerCount, PeerDump, PeerInfo, - PeerScores, PeerStats, ReqRespScores, TopicScores, -}; - -mod behaviour; -pub use behaviour::{Behaviour, BehaviourError}; - -mod config; -pub use config::{ - DEFAULT_MESH_D, DEFAULT_MESH_DHI, DEFAULT_MESH_DLAZY, DEFAULT_MESH_DLO, - GLOBAL_VALIDATE_THROTTLE, GOSSIP_HEARTBEAT, MAX_GOSSIP_SIZE, MAX_OUTBOUND_QUEUE, - MAX_VALIDATE_QUEUE, MIN_GOSSIP_SIZE, PEER_SCORE_INSPECT_FREQUENCY, SEEN_MESSAGES_TTL, - default_config, default_config_builder, -}; - -mod gate; -pub use gate::ConnectionGate; // trait - -mod gater; -pub use gater::{ - ConnectionGater, // implementation - DialInfo, - GaterConfig, -}; - -mod builder; -pub use builder::GossipDriverBuilder; - -mod error; -pub use error::{DialError, GossipDriverBuilderError, HandlerEncodeError, PublishError}; - -mod event; -pub use event::Event; - -mod handler; -pub use handler::{BlockHandler, Handler}; - -mod driver; -pub use driver::GossipDriver; - -mod block_validity; -pub use block_validity::BlockInvalidError; - -#[cfg(test)] -pub(crate) use block_validity::tests::*; diff --git a/kona/crates/node/gossip/src/rpc/request.rs b/kona/crates/node/gossip/src/rpc/request.rs deleted file mode 100644 index 5bd36c4e7fa..00000000000 --- a/kona/crates/node/gossip/src/rpc/request.rs +++ /dev/null @@ -1,662 +0,0 @@ -//! Contains the p2p RPC request type. - -use std::{net::IpAddr, num::TryFromIntError, sync::Arc}; - -use crate::{GossipDriver, GossipScores}; -use alloy_primitives::map::{HashMap, HashSet}; -use discv5::{ - enr::{NodeId, k256::ecdsa}, - multiaddr::Protocol, -}; -use ipnet::IpNet; -use kona_disc::Discv5Handler; -use kona_peers::OpStackEnr; -use libp2p::{Multiaddr, PeerId, gossipsub::TopicHash}; -use tokio::sync::oneshot::Sender; - -use super::{ - PeerDump, PeerStats, - types::{Connectedness, Direction, PeerInfo, PeerScores}, -}; -use crate::ConnectionGate; - -/// A p2p RPC Request. -#[derive(Debug)] -pub enum P2pRpcRequest { - /// Returns [`PeerInfo`] for the p2p network. - PeerInfo(Sender<PeerInfo>), - /// Dumps the node's discovery table from the [`kona_disc::Discv5Driver`]. - DiscoveryTable(Sender<Vec<String>>), - /// Returns the current peer count for both the - /// - Discovery Service ([`kona_disc::Discv5Driver`]) - /// - Gossip Service ([`crate::GossipDriver`]) - PeerCount(Sender<(Option<usize>, usize)>), - /// Returns a [`PeerDump`] containing detailed information about connected peers. - /// If `connected` is true, only returns connected peers. - Peers { - /// The output channel to send the [`PeerDump`] to. - out: Sender<PeerDump>, - /// Whether to only return connected peers. - connected: bool, - }, - /// Request to block a peer by its [`PeerId`]. - BlockPeer { - /// The [`PeerId`] of the peer to block. - id: PeerId, - }, - /// Request to unblock a peer by its [`PeerId`]. - UnblockPeer { - /// The [`PeerId`] of the peer to unblock. - id: PeerId, - }, - /// Request to list all blocked peers. - ListBlockedPeers(Sender<Vec<PeerId>>), - /// Request to block a given IP Address. - BlockAddr { - /// The IP address to block. - address: IpAddr, - }, - /// Request to unblock a given IP Address. - UnblockAddr { - /// The IP address to unblock. - address: IpAddr, - }, - /// Request to list all blocked IP Addresses. - ListBlockedAddrs(Sender<Vec<IpAddr>>), - /// Request to block a given Subnet. - BlockSubnet { - /// The Subnet to block. - address: IpNet, - }, - /// Request to unblock a given Subnet. - UnblockSubnet { - /// The Subnet to unblock. - address: IpNet, - }, - - /// Request to connect to a given peer. - ConnectPeer { - /// The [`Multiaddr`] of the peer to connect to. - address: Multiaddr, - }, - /// Request to disconnect the specified peer. - DisconnectPeer { - /// The peer id to disconnect. - peer_id: PeerId, - }, - /// Protects a given peer from disconnection. - ProtectPeer { - /// The id of the peer. - peer_id: PeerId, - }, - /// Unprotects a given peer. - UnprotectPeer { - /// The id of the peer. - peer_id: PeerId, - }, - /// Request to list all blocked Subnets. - ListBlockedSubnets(Sender<Vec<IpNet>>), - /// Returns the current peer stats for both the - /// - Discovery Service ([`kona_disc::Discv5Driver`]) - /// - Gossip Service ([`crate::GossipDriver`]) - /// - /// This information can be used to briefly monitor the current state of the p2p network for a - /// given peer. - PeerStats(Sender<PeerStats>), -} - -impl P2pRpcRequest { - /// Handles the peer count request. - pub fn handle<G: ConnectionGate>(self, gossip: &mut GossipDriver<G>, disc: &Discv5Handler) { - match self { - Self::PeerCount(s) => Self::handle_peer_count(s, gossip, disc), - Self::DiscoveryTable(s) => Self::handle_discovery_table(s, disc), - Self::PeerInfo(s) => Self::handle_peer_info(s, gossip, disc), - Self::Peers { out, connected } => Self::handle_peers(out, connected, gossip, disc), - Self::DisconnectPeer { peer_id } => Self::disconnect_peer(peer_id, gossip), - Self::PeerStats(s) => Self::handle_peer_stats(s, gossip, disc), - Self::ConnectPeer { address } => Self::connect_peer(address, gossip), - Self::BlockPeer { id } => Self::block_peer(id, gossip), - Self::UnblockPeer { id } => Self::unblock_peer(id, gossip), - Self::ListBlockedPeers(s) => Self::list_blocked_peers(s, gossip), - Self::BlockAddr { address } => Self::block_addr(address, gossip), - Self::UnblockAddr { address } => Self::unblock_addr(address, gossip), - Self::ListBlockedAddrs(s) => Self::list_blocked_addrs(s, gossip), - Self::ProtectPeer { peer_id } => Self::protect_peer(peer_id, gossip), - Self::UnprotectPeer { peer_id } => Self::unprotect_peer(peer_id, gossip), - Self::BlockSubnet { address } => Self::block_subnet(address, gossip), - Self::UnblockSubnet { address } => Self::unblock_subnet(address, gossip), - Self::ListBlockedSubnets(s) => Self::list_blocked_subnets(s, gossip), - } - } - - fn protect_peer<G: ConnectionGate>(id: PeerId, gossip: &mut GossipDriver<G>) { - gossip.connection_gate.protect_peer(id); - } - - fn unprotect_peer<G: ConnectionGate>(id: PeerId, gossip: &mut GossipDriver<G>) { - gossip.connection_gate.unprotect_peer(id); - } - - fn block_addr<G: ConnectionGate>(address: IpAddr, gossip: &mut GossipDriver<G>) { - gossip.connection_gate.block_addr(address); - } - - fn unblock_addr<G: ConnectionGate>(address: IpAddr, gossip: &mut GossipDriver<G>) { - gossip.connection_gate.unblock_addr(address); - } - - fn list_blocked_addrs<G: ConnectionGate>(s: Sender<Vec<IpAddr>>, gossip: &GossipDriver<G>) { - let blocked_addrs = gossip.connection_gate.list_blocked_addrs(); - if let Err(e) = s.send(blocked_addrs) { - warn!(target: "p2p::rpc", "Failed to send blocked addresses through response channel: {:?}", e); - } - } - - fn block_peer<G: ConnectionGate>(id: PeerId, gossip: &mut GossipDriver<G>) { - gossip.connection_gate.block_peer(&id); - gossip.swarm.behaviour_mut().gossipsub.blacklist_peer(&id); - } - - fn unblock_peer<G: ConnectionGate>(id: PeerId, gossip: &mut GossipDriver<G>) { - gossip.connection_gate.unblock_peer(&id); - gossip.swarm.behaviour_mut().gossipsub.remove_blacklisted_peer(&id); - } - - fn list_blocked_peers<G: ConnectionGate>(s: Sender<Vec<PeerId>>, gossip: &GossipDriver<G>) { - let blocked_peers = gossip.connection_gate.list_blocked_peers(); - if let Err(e) = s.send(blocked_peers) { - warn!(target: "p2p::rpc", "Failed to send blocked peers through response channel: {:?}", e); - } - } - - fn block_subnet<G: ConnectionGate>(address: IpNet, gossip: &mut GossipDriver<G>) { - gossip.connection_gate.block_subnet(address); - } - - fn unblock_subnet<G: ConnectionGate>(address: IpNet, gossip: &mut GossipDriver<G>) { - gossip.connection_gate.unblock_subnet(address); - } - - fn connect_peer<G: ConnectionGate>(address: Multiaddr, gossip: &mut GossipDriver<G>) { - gossip.dial_multiaddr(address) - } - - fn disconnect_peer<G: ConnectionGate>(peer_id: PeerId, gossip: &mut GossipDriver<G>) { - if let Err(e) = gossip.swarm.disconnect_peer_id(peer_id) { - warn!(target: "p2p::rpc", "Failed to disconnect peer {}: {:?}", peer_id, e); - } else { - info!(target: "p2p::rpc", "Disconnected peer {}", peer_id); - // Record the duration of the peer connection. - if let Some(start_time) = gossip.peer_connection_start.remove(&peer_id) { - let _peer_duration = start_time.elapsed(); - kona_macros::record!( - histogram, - crate::Metrics::GOSSIP_PEER_CONNECTION_DURATION_SECONDS, - _peer_duration.as_secs_f64() - ); - } - } - } - - fn list_blocked_subnets<G: ConnectionGate>(s: Sender<Vec<IpNet>>, gossip: &GossipDriver<G>) { - let blocked_subnets = gossip.connection_gate.list_blocked_subnets(); - if let Err(e) = s.send(blocked_subnets) { - warn!(target: "p2p::rpc", "Failed to send blocked subnets through response channel: {:?}", e); - } - } - - fn handle_discovery_table(sender: Sender<Vec<String>>, disc: &Discv5Handler) { - let enrs = disc.table_enrs(); - tokio::spawn(async move { - let dt = match enrs.await { - Ok(dt) => dt.into_iter().map(|e| e.to_string()).collect(), - - Err(e) => { - warn!(target: "p2p_rpc", "Failed to receive peer count: {:?}", e); - return; - } - }; - - if let Err(e) = sender.send(dt) { - warn!(target: "p2p_rpc", "Failed to send peer count through response channel: {:?}", e); - } - }); - } - - fn handle_peers<G: ConnectionGate>( - sender: Sender<PeerDump>, - connected: bool, - gossip: &GossipDriver<G>, - disc: &Discv5Handler, - ) { - let Ok(total_connected) = gossip.swarm.network_info().num_peers().try_into() else { - error!(target: "p2p::rpc", "Failed to get total connected peers. The number of connected peers is too large and overflows u32."); - return; - }; - - let peer_ids: Vec<PeerId> = if connected { - gossip.swarm.connected_peers().cloned().collect() - } else { - gossip.peerstore.keys().cloned().collect() - }; - - // Get the set of actually connected peers from the swarm for accurate connectedness - // reporting. - let actually_connected: HashSet<PeerId> = gossip.swarm.connected_peers().cloned().collect(); - - // Get connection gate information. - let banned_subnets = gossip.connection_gate.list_blocked_subnets(); - let banned_ips = gossip.connection_gate.list_blocked_addrs(); - let banned_peers = gossip.connection_gate.list_blocked_peers(); - let protected_peers = gossip.connection_gate.list_protected_peers(); - - // For each peer id, determine connectedness based on actual swarm connection state. - // This fixes the issue where the connection gate's internal state could be stale, - // especially for inbound connections or after connections close. - let connectedness = peer_ids - .iter() - .copied() - .map(|id| { - if actually_connected.contains(&id) { - (id, Connectedness::Connected) - } else if banned_peers.contains(&id) { - (id, Connectedness::CannotConnect) - } else { - (id, Connectedness::NotConnected) - } - }) - .collect::<HashMap<PeerId, Connectedness>>(); - - // Clone the ping map - let pings = Arc::clone(&gossip.ping); - - #[derive(Default)] - struct PeerMetadata { - protocols: Option<Vec<String>>, - addresses: Vec<String>, - user_agent: String, - protocol_version: String, - score: f64, - } - - // Build a map of peer ids to their supported protocols and addresses. - let mut peer_metadata: HashMap<PeerId, PeerMetadata> = gossip - .peerstore - .iter() - .map(|(id, info)| { - let protocols = if info.protocols.is_empty() { - None - } else { - Some( - info.protocols - .iter() - .map(|protocol| protocol.to_string()) - .collect::<Vec<String>>(), - ) - }; - let addresses = info - .listen_addrs - .iter() - .map(|addr| { - let mut addr = addr.clone(); - addr.push(Protocol::P2p(*id)); - addr.to_string() - }) - .collect::<Vec<String>>(); - - let score = gossip.swarm.behaviour().gossipsub.peer_score(id).unwrap_or_default(); - - ( - *id, - PeerMetadata { - protocols, - addresses, - user_agent: info.agent_version.clone(), - protocol_version: info.protocol_version.clone(), - score, - }, - ) - }) - .collect(); - - // We consider that kona-nodes are gossiping blocks if their peers are subscribed to any of - // the blocks topics. - // This is the same heuristic as the one used in the op-node (`<https://github.com/ethereum-optimism/optimism/blob/6a8b2349c29c2a14f948fcb8aefb90526130acec/op-node/p2p/rpc_server.go#L179-L183>`). - let peer_gossip_info = gossip - .swarm - .behaviour() - .gossipsub - .all_peers() - .filter_map(|(peer_id, topics)| { - let supported_topics = HashSet::from([ - gossip.handler.blocks_v1_topic.hash(), - gossip.handler.blocks_v2_topic.hash(), - gossip.handler.blocks_v3_topic.hash(), - gossip.handler.blocks_v4_topic.hash(), - ]); - - if topics.iter().any(|topic| supported_topics.contains(topic)) { - Some(*peer_id) - } else { - None - } - }) - .collect::<HashSet<_>>(); - - let disc_table_infos = disc.table_infos(); - - tokio::spawn(async move { - let Ok(table_infos) = disc_table_infos.await else { - error!(target: "p2p::rpc", "Failed to get table infos. The connection to the gossip driver is closed."); - return; - }; - - let pings = { pings.lock().await.clone() }; - - let node_to_peer_id: HashMap<NodeId, PeerId> = peer_ids.into_iter().filter_map(|id| - { - let Ok(pubkey) = libp2p_identity::PublicKey::try_decode_protobuf(&id.to_bytes()[2..]) else { - error!(target: "p2p::rpc", peer_id = ?id, "Failed to decode public key from peer id. This is a bug as all the peer ids should be decodable (because they come from secp256k1 public keys)."); - return None; - }; - - let key = - match pubkey.try_into_secp256k1().map_err(|err| err.to_string()).and_then( - |key| ecdsa::VerifyingKey::from_sec1_bytes(key.to_bytes().as_slice()).map_err(|err| err.to_string()) - ) { Ok(key) => key, - Err(err) => { - error!(target: "p2p::rpc", peer_id = ?id, err = ?err, "Failed to convert public key to secp256k1 public key. This is a bug."); - return None; - }}; - let node_id = NodeId::from(key); - Some((node_id, id)) - } - ).collect(); - - // Filter out peers that are not in the gossip network. - let node_to_table_infos = table_infos - .into_iter() - .filter(|(id, _, _)| node_to_peer_id.contains_key(id)) - .map(|(id, enr, status)| (id, (enr, status))) - .collect::<HashMap<_, _>>(); - - // Build the peer info map. - let infos: HashMap<String, PeerInfo> = node_to_peer_id - .iter() - .map(|(id, peer_id)| { - let (maybe_enr, maybe_status) = node_to_table_infos.get(id).cloned().unzip(); - - let opstack_enr = - maybe_enr.clone().and_then(|enr| OpStackEnr::try_from(&enr).ok()); - - let direction = maybe_status - .map(|status| { - if status.is_incoming() { - Direction::Inbound - } else { - Direction::Outbound - } - }) - .unwrap_or_default(); - - let PeerMetadata { protocols, addresses, user_agent, protocol_version, score } = - peer_metadata.remove(peer_id).unwrap_or_default(); - - let peer_connectedness = - connectedness.get(peer_id).copied().unwrap_or(Connectedness::NotConnected); - - let latency = pings.get(peer_id).map(|d| d.as_secs()).unwrap_or(0); - - let node_id = format!("{:?}", &id); - ( - peer_id.to_string(), - PeerInfo { - peer_id: peer_id.to_string(), - node_id, - user_agent, - protocol_version, - enr: maybe_enr.map(|enr| enr.to_string()), - addresses, - protocols, - connectedness: peer_connectedness, - direction, - // Note: we use the chain id from the ENR if it exists, otherwise we - // use 0 to be consistent with op-node's behavior (`<https://github.com/ethereum-optimism/optimism/blob/6a8b2349c29c2a14f948fcb8aefb90526130acec/op-service/apis/p2p.go#L55>`). - chain_id: opstack_enr.map(|enr| enr.chain_id).unwrap_or(0), - gossip_blocks: peer_gossip_info.contains(peer_id), - protected: protected_peers.contains(peer_id), - latency, - peer_scores: PeerScores { - gossip: GossipScores { - total: score, - // Note(@theochap): we don't compute the topic scores - // because we don't - // `rust-libp2p` doesn't expose that information to the - // user-facing API. - // See `<https://github.com/libp2p/rust-libp2p/issues/6058>` - blocks: Default::default(), - // Note(@theochap): We can't compute the ip colocation - // factor because - // `rust-libp2p` doesn't expose that information to the - // user-facing API - // See `<https://github.com/libp2p/rust-libp2p/issues/6058>` - ip_colocation_factor: Default::default(), - // Note(@theochap): We can't compute the behavioral penalty - // because - // `rust-libp2p` doesn't expose that information to the - // user-facing API - // See `<https://github.com/libp2p/rust-libp2p/issues/6058>` - behavioral_penalty: Default::default(), - }, - // We only support a shim implementation for the req/resp - // protocol so we're not - // computing scores for it. - req_resp: Default::default(), - }, - }, - ) - }) - .collect(); - - if let Err(e) = sender.send(PeerDump { - total_connected, - peers: infos, - banned_peers: banned_peers.into_iter().map(|p| p.to_string()).collect(), - banned_ips, - banned_subnets, - }) { - warn!(target: "p2p::rpc", "Failed to send peer info through response channel: {:?}", e); - } - }); - } - - /// Handles a peer info request by spawning a task. - fn handle_peer_info<G: ConnectionGate>( - sender: Sender<PeerInfo>, - gossip: &GossipDriver<G>, - disc: &Discv5Handler, - ) { - let peer_id = *gossip.local_peer_id(); - let chain_id = disc.chain_id; - let local_enr = disc.local_enr(); - let mut addresses = gossip - .swarm - .listeners() - .map(|a| { - let mut addr = a.clone(); - addr.push(Protocol::P2p(peer_id)); - addr.to_string() - }) - .collect::<Vec<String>>(); - - addresses.append( - &mut gossip.swarm.external_addresses().map(|a| a.to_string()).collect::<Vec<String>>(), - ); - - tokio::spawn(async move { - let enr = match local_enr.await { - Ok(enr) => enr, - Err(e) => { - warn!(target: "p2p::rpc", "Failed to receive local ENR: {:?}", e); - return; - } - }; - - // Note: we need to use `Debug` impl here because the `Display` impl of - // `NodeId` strips some part of the hex string and replaces it with "...". - let node_id = format!("{:?}", &enr.node_id()); - - // We need to add the local multiaddr to the list of known addresses. - let peer_info = PeerInfo { - peer_id: peer_id.to_string(), - node_id, - user_agent: "kona".to_string(), - protocol_version: String::new(), - enr: Some(enr.to_string()), - addresses, - protocols: Some(vec![ - "/ipfs/id/push/1.0.0".to_string(), - "/meshsub/1.1.0".to_string(), - "/ipfs/ping/1.0.0".to_string(), - "/meshsub/1.2.0".to_string(), - "/ipfs/id/1.0.0".to_string(), - format!("/opstack/req/payload_by_number/{chain_id}/0/"), - "/meshsub/1.0.0".to_string(), - "/floodsub/1.0.0".to_string(), - ]), - connectedness: Connectedness::Connected, - direction: Direction::Inbound, - protected: false, - chain_id, - latency: 0, - gossip_blocks: true, - peer_scores: PeerScores::default(), - }; - if let Err(e) = sender.send(peer_info) { - warn!(target: "p2p_rpc", "Failed to send peer info through response channel: {:?}", e); - } - }); - } - - fn handle_peer_stats<G: ConnectionGate>( - sender: Sender<PeerStats>, - gossip: &GossipDriver<G>, - disc: &Discv5Handler, - ) { - let peers_known = gossip.peerstore.len(); - let gossip_network_info = gossip.swarm.network_info(); - let table_info = disc.peer_count(); - - let banned_peers = gossip.connection_gate.list_blocked_peers().len(); - - let topics = gossip.swarm.behaviour().gossipsub.topics().collect::<HashSet<_>>(); - - let topics = topics - .into_iter() - .map(|hash| { - ( - hash.clone(), - gossip - .swarm - .behaviour() - .gossipsub - .all_peers() - .filter(|(_, topics)| topics.contains(&hash)) - .count(), - ) - }) - .collect::<HashMap<_, _>>(); - - let v1_topic_hash = gossip.handler.blocks_v1_topic.hash(); - let v2_topic_hash = gossip.handler.blocks_v2_topic.hash(); - let v3_topic_hash = gossip.handler.blocks_v3_topic.hash(); - let v4_topic_hash = gossip.handler.blocks_v4_topic.hash(); - - tokio::spawn(async move { - let Ok(table) = table_info.await else { - error!(target: "p2p::rpc", "failed to get discovery table size. The sender has been dropped. The discv5 service may not be running anymore."); - return; - }; - - let Ok(table) = table.try_into() else { - error!(target: "p2p::rpc", "failed to get discovery table size. Integer overflow. Please ensure that the number of peers in the discovery table fits in a u32."); - return; - }; - - let Ok(connected) = gossip_network_info.num_peers().try_into() else { - error!(target: "p2p::rpc", "failed to get number of connected peers. Integer overflow. Please ensure that the number of connected peers fits in a u32."); - return; - }; - - let Ok(known) = peers_known.try_into() else { - error!(target: "p2p::rpc", "failed to get number of known peers. Integer overflow. Please ensure that the number of known peers fits in a u32."); - return; - }; - - // Given a topic hash, this method: - // - gets the number of peers in the mesh for that topic - // - returns an error if the number of peers in the mesh overflows a u32 - // - returns 0 if there are no peers in the mesh for that topic - let get_topic = |topic: &TopicHash| { - Ok::<u32, TryFromIntError>( - topics - .get(topic) - .cloned() - .map(|v| v.try_into()) - .transpose()? - .unwrap_or_default(), - ) - }; - - let Ok(block_topics) = vec![ - get_topic(&v1_topic_hash), - get_topic(&v2_topic_hash), - get_topic(&v3_topic_hash), - get_topic(&v4_topic_hash), - ] - .into_iter() - .collect::<Result<Vec<_>, _>>() else { - error!(target: "p2p::rpc", "failed to get blocks topic. Some topic count overflowed. Make sure that the number of peers for a given topic fits in a u32."); - return; - }; - - let stats = PeerStats { - connected, - table, - blocks_topic: block_topics[0], - blocks_topic_v2: block_topics[1], - blocks_topic_v3: block_topics[2], - blocks_topic_v4: block_topics[3], - banned: banned_peers as u32, - known, - }; - - if let Err(e) = sender.send(stats) { - warn!(target: "p2p_rpc", "Failed to send peer stats through response channel: {:?}", e); - }; - }); - } - - /// Handles a peer count request by spawning a task. - fn handle_peer_count<G: ConnectionGate>( - sender: Sender<(Option<usize>, usize)>, - gossip: &GossipDriver<G>, - disc: &Discv5Handler, - ) { - let pc_req = disc.peer_count(); - let gossip_pc = gossip.connected_peers(); - tokio::spawn(async move { - let pc = match pc_req.await { - Ok(pc) => Some(pc), - Err(e) => { - warn!(target: "p2p_rpc", "Failed to receive peer count: {:?}", e); - None - } - }; - if let Err(e) = sender.send((pc, gossip_pc)) { - warn!(target: "p2p_rpc", "Failed to send peer count through response channel: {:?}", e); - } - }); - } -} diff --git a/kona/crates/node/gossip/src/rpc/types.rs b/kona/crates/node/gossip/src/rpc/types.rs deleted file mode 100644 index 48cddaf09f6..00000000000 --- a/kona/crates/node/gossip/src/rpc/types.rs +++ /dev/null @@ -1,428 +0,0 @@ -//! The types used in the p2p RPC API. - -use core::net::IpAddr; -use derive_more::Display; - -use alloy_primitives::{ChainId, map::HashMap}; - -/// The peer info. -/// -/// <https://github.com/ethereum-optimism/optimism/blob/develop/op-node/p2p/rpc_api.go#L15> -#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PeerInfo { - /// The peer id. - #[serde(rename = "peerID")] - pub peer_id: String, - /// The node id. - #[serde(rename = "nodeID")] - pub node_id: String, - /// The user agent. - pub user_agent: String, - /// The protocol version. - pub protocol_version: String, - /// The enr for the peer. - /// If the peer is not in the discovery table, this will not be set. - #[serde(rename = "ENR")] - #[serde(skip_serializing_if = "Option::is_none")] - pub enr: Option<String>, - /// The peer addresses. - pub addresses: Vec<String>, - /// Peer supported protocols - pub protocols: Option<Vec<String>>, - /// 0: "`NotConnected`", - /// 1: "Connected", - /// 2: "`CanConnect`" (gracefully disconnected) - /// 3: "`CannotConnect`" (tried but failed) - pub connectedness: Connectedness, - /// 0: "Unknown", - /// 1: "Inbound" (if the peer contacted us) - /// 2: "Outbound" (if we connected to them) - pub direction: Direction, - /// Whether the peer is protected. - pub protected: bool, - /// The chain id. - #[serde(rename = "chainID")] - pub chain_id: ChainId, - /// The peer latency in nanoseconds - pub latency: u64, - /// Whether the peer gossips - pub gossip_blocks: bool, - /// The peer scores. - #[serde(rename = "scores")] - pub peer_scores: PeerScores, -} - -/// GossipSub topic-specific scoring metrics. -/// -/// Tracks peer performance within specific gossip topics, used by the -/// GossipSub protocol to maintain mesh quality and route messages efficiently. -/// These scores influence peer selection for the gossip mesh topology. -/// -/// Reference: <https://github.com/ethereum-optimism/optimism/blob/8dd17a7b114a7c25505cd2e15ce4e3d0f7e3f7c1/op-node/p2p/store/iface.go#L13> -#[derive(Clone, Default, Debug, Copy, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct TopicScores { - /// Duration the peer has participated in the topic mesh. - /// - /// Longer participation indicates stability and commitment to the topic, - /// contributing positively to the peer's mesh score. - pub time_in_mesh: f64, - - /// Count of first-time message deliveries from this peer. - /// - /// Measures how often this peer is the first to deliver new messages, - /// indicating their connectivity and responsiveness to the network. - pub first_message_deliveries: f64, - - /// Count of messages delivered while in the mesh topology. - /// - /// Tracks consistent message forwarding behavior while the peer is - /// an active participant in the mesh structure. - pub mesh_message_deliveries: f64, - - /// Count of invalid or malicious messages from this peer. - /// - /// Penalizes peers that send invalid, duplicate, or malformed messages, - /// helping maintain network health and preventing spam. - pub invalid_message_deliveries: f64, -} - -/// Comprehensive GossipSub scoring metrics for peer quality assessment. -/// -/// Aggregates various scoring factors used by the GossipSub protocol to -/// evaluate peer quality and determine mesh topology. Higher scores indicate -/// more reliable and well-behaved peers. -/// -/// Reference: <https://github.com/ethereum-optimism/optimism/blob/8dd17a7b114a7c25505cd2e15ce4e3d0f7e3f7c1/op-node/p2p/store/iface.go#L20C6-L20C18> -#[derive(Debug, Default, Clone, Copy, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct GossipScores { - /// Aggregate score across all scoring dimensions. - /// - /// The final computed score that determines this peer's overall - /// reputation in the gossip network. - pub total: f64, - - /// Block-specific topic scores for consensus messages. - /// - /// Tracks peer behavior specifically for block gossip, which is - /// the primary message type in OP Stack networks. - pub blocks: TopicScores, - - /// Penalty for IP address colocation with other peers. - /// - /// Reduces scores for peers sharing IP addresses to prevent - /// eclipse attacks and improve network decentralization. - #[serde(rename = "IPColocationFactor")] - pub ip_colocation_factor: f64, - - /// Penalty for problematic behavior patterns. - /// - /// Applied to peers exhibiting suspicious or harmful behavior - /// that doesn't fit other specific scoring categories. - pub behavioral_penalty: f64, -} - -/// Request-response protocol scoring metrics. -/// -/// Tracks peer performance in direct request-response interactions outside -/// of the gossip mesh, such as block synchronization requests. -/// -/// Reference: <https://github.com/ethereum-optimism/optimism/blob/8dd17a7b114a7c25505cd2e15ce4e3d0f7e3f7c1/op-node/p2p/store/iface.go#L31C1-L35C2> -#[derive(Debug, Default, Clone, Copy, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct ReqRespScores { - /// Number of valid responses provided by this peer. - /// - /// Counts successful request-response exchanges where the peer - /// provided correct and timely responses to queries. - pub valid_responses: f64, - - /// Number of error responses or failed requests. - /// - /// Tracks cases where the peer returned errors, timeouts, or - /// otherwise failed to properly respond to requests. - pub error_responses: f64, - /// Number of rejected payloads. - pub rejected_payloads: f64, -} - -/// Peer Scores -/// -/// <https://github.com/ethereum-optimism/optimism/blob/8dd17a7b114a7c25505cd2e15ce4e3d0f7e3f7c1/op-node/p2p/store/iface.go#L81> -#[derive(Clone, Default, Debug, Copy, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PeerScores { - /// The gossip scores - pub gossip: GossipScores, - /// The request-response scores. - pub req_resp: ReqRespScores, -} - -/// Peer count data. -#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PeerCount { - /// The total number of connected peers to the discovery service. - pub connected_discovery: Option<usize>, - /// The total number of connected peers to the gossip service. - pub connected_gossip: usize, -} - -/// A raw peer dump. -/// -/// <https://github.com/ethereum-optimism/optimism/blob/40750a58e7a4a6f06370d18dfe6c6eab309012d9/op-node/p2p/rpc_api.go#L36> -#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PeerDump { - /// The total number of connected peers - pub total_connected: u32, - /// A map from peer id to peer info - pub peers: HashMap<String, PeerInfo>, - /// A list of banned peers. - pub banned_peers: Vec<String>, - /// A list of banned ip addresses. - #[serde(rename = "bannedIPS")] - pub banned_ips: Vec<IpAddr>, - /// The banned subnets - pub banned_subnets: Vec<ipnet::IpNet>, -} - -/// Peer stats. -/// -/// <https://github.com/ethereum-optimism/optimism/blob/develop/op-node/p2p/rpc_server.go#L203> -#[derive(Clone, Default, Debug, Copy, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PeerStats { - /// The number of connections - pub connected: u32, - /// The table. - pub table: u32, - /// The blocks topic. - #[serde(rename = "blocksTopic")] - pub blocks_topic: u32, - /// The blocks v2 topic. - #[serde(rename = "blocksTopicV2")] - pub blocks_topic_v2: u32, - /// The blocks v3 topic. - #[serde(rename = "blocksTopicV3")] - pub blocks_topic_v3: u32, - /// The blocks v4 topic. - #[serde(rename = "blocksTopicV4")] - pub blocks_topic_v4: u32, - /// The banned count. - pub banned: u32, - /// The known count. - pub known: u32, -} - -/// Represents the connectivity state of a peer in a network, indicating the reachability and -/// interaction status of a node with its peers. -#[derive( - Clone, - Debug, - Display, - PartialEq, - Copy, - Default, - // We need to use `serde_repr` to serialize the enum as an integer to match the `op-node` API. - serde_repr::Serialize_repr, - serde_repr::Deserialize_repr, -)] -#[repr(u8)] -pub enum Connectedness { - /// No current connection to the peer, and no recent history of a successful connection. - #[default] - #[display("Not Connected")] - NotConnected = 0, - - /// An active, open connection to the peer exists. - #[display("Connected")] - Connected = 1, - - /// Connection to the peer is possible but not currently established; usually implies a past - /// successful connection. - #[display("Can Connect")] - CanConnect = 2, - - /// Recent attempts to connect to the peer failed, indicating potential issues in reachability - /// or peer status. - #[display("Cannot Connect")] - CannotConnect = 3, - - /// Connection to the peer is limited; may not have full capabilities. - #[display("Limited")] - Limited = 4, -} - -impl From<u8> for Connectedness { - fn from(value: u8) -> Self { - match value { - 0 => Self::NotConnected, - 1 => Self::Connected, - 2 => Self::CanConnect, - 3 => Self::CannotConnect, - 4 => Self::Limited, - _ => Self::NotConnected, - } - } -} -/// Direction represents the direction of a connection. -#[derive(Debug, Clone, Display, Copy, PartialEq, Eq, Default)] -pub enum Direction { - /// Unknown is the default direction when the direction is not specified. - #[default] - #[display("Unknown")] - Unknown = 0, - /// Inbound is for when the remote peer initiated the connection. - #[display("Inbound")] - Inbound = 1, - /// Outbound is for when the local peer initiated the connection. - #[display("Outbound")] - Outbound = 2, -} - -impl serde::Serialize for Direction { - fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> - where - S: serde::Serializer, - { - serializer.serialize_u8(*self as u8) - } -} - -impl<'de> serde::Deserialize<'de> for Direction { - fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> - where - D: serde::Deserializer<'de>, - { - let value = u8::deserialize(deserializer)?; - match value { - 0 => Ok(Self::Unknown), - 1 => Ok(Self::Inbound), - 2 => Ok(Self::Outbound), - _ => Err(serde::de::Error::invalid_value( - serde::de::Unexpected::Unsigned(value as u64), - &"a value between 0 and 2", - )), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_connectedness_from_u8() { - assert_eq!(Connectedness::from(0), Connectedness::NotConnected); - assert_eq!(Connectedness::from(1), Connectedness::Connected); - assert_eq!(Connectedness::from(2), Connectedness::CanConnect); - assert_eq!(Connectedness::from(3), Connectedness::CannotConnect); - assert_eq!(Connectedness::from(4), Connectedness::Limited); - assert_eq!(Connectedness::from(5), Connectedness::NotConnected); - } - - #[test] - fn test_direction_display() { - assert_eq!(Direction::Unknown.to_string(), "Unknown"); - assert_eq!(Direction::Inbound.to_string(), "Inbound"); - assert_eq!(Direction::Outbound.to_string(), "Outbound"); - } - - #[test] - fn test_direction_serialization() { - assert_eq!( - serde_json::to_string(&Direction::Unknown).unwrap(), - "0", - "Serialization failed for Direction::Unknown" - ); - assert_eq!( - serde_json::to_string(&Direction::Inbound).unwrap(), - "1", - "Serialization failed for Direction::Inbound" - ); - assert_eq!( - serde_json::to_string(&Direction::Outbound).unwrap(), - "2", - "Serialization failed for Direction::Outbound" - ); - } - - #[test] - fn test_direction_deserialization() { - let unknown: Direction = serde_json::from_str("0").unwrap(); - let inbound: Direction = serde_json::from_str("1").unwrap(); - let outbound: Direction = serde_json::from_str("2").unwrap(); - - assert_eq!(unknown, Direction::Unknown, "Deserialization mismatch for Direction::Unknown"); - assert_eq!(inbound, Direction::Inbound, "Deserialization mismatch for Direction::Inbound"); - assert_eq!( - outbound, - Direction::Outbound, - "Deserialization mismatch for Direction::Outbound" - ); - } - - #[test] - fn test_peer_info_connectedness_serialization() { - let peer_info = PeerInfo { - peer_id: String::from("peer123"), - node_id: String::from("node123"), - user_agent: String::from("MyUserAgent"), - protocol_version: String::from("v1"), - enr: Some(String::from("enr123")), - addresses: [String::from("127.0.0.1")].to_vec(), - protocols: Some([String::from("eth"), String::from("p2p")].to_vec()), - connectedness: Connectedness::Connected, - direction: Direction::Outbound, - protected: true, - chain_id: 1, - latency: 100, - gossip_blocks: true, - peer_scores: PeerScores { - gossip: GossipScores { - total: 1.0, - blocks: TopicScores { - time_in_mesh: 10.0, - first_message_deliveries: 5.0, - mesh_message_deliveries: 2.0, - invalid_message_deliveries: 0.0, - }, - ip_colocation_factor: 0.5, - behavioral_penalty: 0.1, - }, - req_resp: ReqRespScores { - valid_responses: 10.0, - error_responses: 1.0, - rejected_payloads: 0.0, - }, - }, - }; - - let serialized = serde_json::to_string(&peer_info).expect("Serialization failed"); - - let deserialized: PeerInfo = - serde_json::from_str(&serialized).expect("Deserialization failed"); - - assert_eq!(peer_info.peer_id, deserialized.peer_id); - assert_eq!(peer_info.node_id, deserialized.node_id); - assert_eq!(peer_info.user_agent, deserialized.user_agent); - assert_eq!(peer_info.protocol_version, deserialized.protocol_version); - assert_eq!(peer_info.enr, deserialized.enr); - assert_eq!(peer_info.addresses, deserialized.addresses); - assert_eq!(peer_info.protocols, deserialized.protocols); - assert_eq!(peer_info.connectedness, deserialized.connectedness); - assert_eq!(peer_info.direction, deserialized.direction); - assert_eq!(peer_info.protected, deserialized.protected); - assert_eq!(peer_info.chain_id, deserialized.chain_id); - assert_eq!(peer_info.latency, deserialized.latency); - assert_eq!(peer_info.gossip_blocks, deserialized.gossip_blocks); - assert_eq!(peer_info.peer_scores.gossip.total, deserialized.peer_scores.gossip.total); - assert_eq!( - peer_info.peer_scores.req_resp.valid_responses, - deserialized.peer_scores.req_resp.valid_responses - ); - } -} diff --git a/kona/crates/node/peers/README.md b/kona/crates/node/peers/README.md deleted file mode 100644 index e6ab644ad5e..00000000000 --- a/kona/crates/node/peers/README.md +++ /dev/null @@ -1,42 +0,0 @@ -# `kona-peers` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-peers"><img src="https://img.shields.io/crates/v/kona-peers.svg?label=kona-peers&labelColor=2a2f35" alt="Kona Peers"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -Networking Utilities ported from reth. - -Much of this module is ported from -<https://github.com/paradigmxyz/reth/blob/0e087ae1c35502f0b8d128c64e4c57269af20c0e/crates/net/peers/src/lib.rs>. - -This module manages and converts Ethereum network entities such as node records, peer IDs, and -Ethereum Node Records (ENRs) - -## Node Record Overview - -Ethereum uses different types of "node records" to represent peers on the network. - -The simplest way to identify a peer is by public key. This is the `PeerId` type, which usually -represents a peer's secp256k1 public key. - -A more complete representation of a peer is the `NodeRecord` type, which includes the peer's -IP address, the ports where it is reachable (TCP and UDP), and the peer's public key. This is -what is returned from discovery v4 queries. - -The most comprehensive node record type is the Ethereum Node Record (`discv5::Enr`), which is -a signed, versioned record that includes the information from a `NodeRecord` along with -additional metadata. This is the data structure returned from discovery v5 queries. - -When we need to deserialize an identifier that could be any of these three types (`PeerId`, -`NodeRecord`, and `discv5::Enr`), we use the `AnyNode` type, which is an enum over the -three types. `AnyNode` is used in reth's `admin_addTrustedPeer` RPC method. - -In short, the types are as follows: -- `PeerId`: A simple public key identifier. -- `NodeRecord`: A more complete representation of a peer, including IP address and ports. -- `discv5::Enr`: An Ethereum Node Record, which is a signed, versioned record that includes - additional metadata. Useful when interacting with discovery v5, or when custom metadata is - required. -- `AnyNode`: An enum over `PeerId`, `NodeRecord`, and `discv5::Enr`, useful in - deserialization when the type of the node record is not known. \ No newline at end of file diff --git a/kona/crates/node/peers/src/lib.rs b/kona/crates/node/peers/src/lib.rs deleted file mode 100644 index 46f96edd39b..00000000000 --- a/kona/crates/node/peers/src/lib.rs +++ /dev/null @@ -1,45 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -#[macro_use] -extern crate tracing; - -/// Alias for a peer identifier. -/// -/// This is the most primitive secp256k1 public key identifier for a given peer. -pub type PeerId = alloy_primitives::B512; - -mod nodes; -pub use nodes::{BootNodes, OP_RAW_BOOTNODES, OP_RAW_TESTNET_BOOTNODES}; - -mod store; -pub use store::{BootStore, BootStoreFile}; - -mod score; -pub use score::PeerScoreLevel; - -mod enr; -pub use enr::{EnrValidation, OpStackEnr, OpStackEnrError}; - -mod any; -pub use any::{AnyNode, DialOptsError}; - -mod boot; -pub use boot::BootNode; - -mod record; -pub use record::{NodeRecord, NodeRecordParseError}; - -mod utils; -pub use utils::{ - PeerIdConversionError, enr_to_multiaddr, local_id_to_p2p_id, peer_id_to_secp256k1_pubkey, -}; - -mod monitoring; -pub use monitoring::PeerMonitoring; diff --git a/kona/crates/node/peers/src/utils.rs b/kona/crates/node/peers/src/utils.rs deleted file mode 100644 index c76d523ede3..00000000000 --- a/kona/crates/node/peers/src/utils.rs +++ /dev/null @@ -1,205 +0,0 @@ -//! Utilities to translate types. - -use discv5::{ - Enr, - enr::{CombinedPublicKey, EnrPublicKey}, - multiaddr::Protocol, -}; -use libp2p::Multiaddr; - -use super::PeerId; - -/// Converts an [`Enr`] into a [`Multiaddr`]. -pub fn enr_to_multiaddr(enr: &Enr) -> Option<Multiaddr> { - let mut addr = if let Some(socket) = enr.tcp4_socket() { - let mut addr = Multiaddr::from(*socket.ip()); - addr.push(Protocol::Tcp(socket.port())); - addr - } else if let Some(socket) = enr.tcp6_socket() { - let mut addr = Multiaddr::from(*socket.ip()); - addr.push(Protocol::Tcp(socket.port())); - addr - } else { - return None; - }; - - let CombinedPublicKey::Secp256k1(pub_key) = enr.public_key() else { - return None; - }; - - let pub_key = libp2p_identity::secp256k1::PublicKey::try_from_bytes(&pub_key.encode()).ok()?; - let pub_key = libp2p_identity::PublicKey::from(pub_key); - - addr.push(Protocol::P2p(libp2p::PeerId::from_public_key(&pub_key))); - - Some(addr) -} - -/// Converts an uncompressed [`PeerId`] to a [`secp256k1::PublicKey`] by prepending the [`PeerId`] -/// bytes with the `SECP256K1_TAG_PUBKEY_UNCOMPRESSED` tag. -pub fn peer_id_to_secp256k1_pubkey(id: PeerId) -> Result<secp256k1::PublicKey, secp256k1::Error> { - /// Tags the public key as uncompressed. - /// - /// See: <https://github.com/bitcoin-core/secp256k1/blob/master/include/secp256k1.h#L211> - const SECP256K1_TAG_PUBKEY_UNCOMPRESSED: u8 = 4; - - let mut full_pubkey = [0u8; secp256k1::constants::UNCOMPRESSED_PUBLIC_KEY_SIZE]; - full_pubkey[0] = SECP256K1_TAG_PUBKEY_UNCOMPRESSED; - full_pubkey[1..].copy_from_slice(id.as_slice()); - secp256k1::PublicKey::from_slice(&full_pubkey) -} - -/// An error that can occur when converting a [`PeerId`] to a [`libp2p::PeerId`]. -#[derive(Debug, thiserror::Error)] -pub enum PeerIdConversionError { - /// The peer id is not valid and cannot be converted to a secp256k1 public key. - #[error("Invalid peer id: {0}")] - InvalidPeerId(secp256k1::Error), - /// The secp256k1 public key cannot be converted to a libp2p peer id. This is a bug. - #[error("Invalid conversion from secp256k1 public key to libp2p peer id: {0}. This is a bug.")] - InvalidPublicKey(#[from] discv5::libp2p_identity::DecodingError), -} - -/// Converts an uncoded [`PeerId`] to a [`libp2p::PeerId`]. These two types represent the same -/// underlying concept (secp256k1 public key) but using different encodings (the local [`PeerId`] is -/// the uncompressed representation of the public key, while the "p2plib" [`libp2p::PeerId`] is a -/// more complex representation, involving protobuf encoding and bitcoin encoding, defined here: <https://github.com/libp2p/specs/blob/master/peer-ids/peer-ids.md>). -pub fn local_id_to_p2p_id(peer_id: PeerId) -> Result<libp2p::PeerId, PeerIdConversionError> { - // The libp2p library works with compressed public keys. - let encoded_pk_bytes = peer_id_to_secp256k1_pubkey(peer_id) - .map_err(PeerIdConversionError::InvalidPeerId)? - .serialize(); - let pk: discv5::libp2p_identity::PublicKey = - discv5::libp2p_identity::secp256k1::PublicKey::try_from_bytes(&encoded_pk_bytes)?.into(); - - Ok(pk.to_peer_id()) -} - -#[cfg(test)] -mod tests { - use std::net::{Ipv4Addr, Ipv6Addr}; - - use super::*; - use crate::PeerId; - use alloy_primitives::hex::FromHex; - use discv5::enr::{CombinedKey, Enr, EnrKey}; - - #[test] - fn test_resolve_multiaddr() { - let ip = Ipv4Addr::new(132, 145, 16, 10); - let tcp_port = 9000; - let udp_port = 9001; - let private_key = CombinedKey::generate_secp256k1(); - - let public_key = private_key.public().encode(); - let public_key = - libp2p_identity::secp256k1::PublicKey::try_from_bytes(&public_key).unwrap(); - let peer_id = libp2p::PeerId::from_public_key(&public_key.into()); - - let enr = Enr::builder().ip4(ip).tcp4(tcp_port).udp4(udp_port).build(&private_key).unwrap(); - - let multiaddr = enr_to_multiaddr(&enr).unwrap(); - - let mut received_ip = None; - let mut received_tcp_port = None; - let mut received_p2p_id = None; - - for protocol in multiaddr.iter() { - match protocol { - Protocol::Ip4(ip) => { - received_ip = Some(ip); - } - Protocol::Tcp(port) => { - received_tcp_port = Some(port); - } - Protocol::P2p(id) => { - received_p2p_id = Some(id); - } - _ => { - panic!("Unexpected protocol: {protocol:?}"); - } - } - } - assert_eq!(received_ip, Some(ip)); - assert_eq!(received_tcp_port, Some(tcp_port)); - assert_eq!(received_p2p_id, Some(peer_id)); - } - - #[test] - fn test_resolve_multiaddr_ipv6() { - let ip = Ipv6Addr::new(0x2001, 0xdb8, 0x0a, 0x11, 0x1e, 0x8a, 0x2e, 0x3a); - let tcp_port = 9000; - let udp_port = 9001; - let private_key = CombinedKey::generate_secp256k1(); - - let public_key = private_key.public().encode(); - let public_key = - libp2p_identity::secp256k1::PublicKey::try_from_bytes(&public_key).unwrap(); - let peer_id = libp2p::PeerId::from_public_key(&public_key.into()); - - let enr = Enr::builder().ip6(ip).tcp6(tcp_port).udp6(udp_port).build(&private_key).unwrap(); - - let multiaddr = enr_to_multiaddr(&enr).unwrap(); - - let mut received_ip = None; - let mut received_tcp_port = None; - let mut received_p2p_id = None; - - for protocol in multiaddr.iter() { - match protocol { - Protocol::Ip6(ip) => { - received_ip = Some(ip); - } - Protocol::Tcp(port) => { - received_tcp_port = Some(port); - } - Protocol::P2p(id) => { - received_p2p_id = Some(id); - } - _ => { - panic!("Unexpected protocol: {protocol:?}"); - } - } - } - assert_eq!(received_ip, Some(ip)); - assert_eq!(received_tcp_port, Some(tcp_port)); - assert_eq!(received_p2p_id, Some(peer_id)); - } - - #[test] - fn test_convert_local_peer_id_to_multi_peer_id() { - let p2p_keypair = discv5::libp2p_identity::secp256k1::Keypair::generate(); - let uncompressed = p2p_keypair.public().to_bytes_uncompressed(); - let local_peer_id = PeerId::from_slice(&uncompressed[1..]); - - // We need to convert the local peer id (uncompressed secp256k1 public key) to a libp2p - // peer id (protocol buffer encoded public key). - let peer_id = local_id_to_p2p_id(local_peer_id).unwrap(); - - let p2p_public_key: discv5::libp2p_identity::PublicKey = - p2p_keypair.public().clone().into(); - - assert_eq!(peer_id, p2p_public_key.to_peer_id()); - } - - #[test] - fn test_hardcoded_peer_id() { - const PUB_KEY_STR: &str = "548f715f3fc388a7c917ba644a2f16270f1ede48a5d88a4d14ea287cc916068363f3092e39936f1a3e7885198bef0e5af951f1d7b1041ce8ba4010917777e71f"; - let pub_key = PeerId::from_hex(PUB_KEY_STR).unwrap(); - - // We need to convert the local peer id (uncompressed secp256k1 public key) to a libp2p - // peer id (protocol buffer encoded public key). - let peer_id = local_id_to_p2p_id(pub_key).unwrap(); - - let uncompressed_pub_key = peer_id_to_secp256k1_pubkey(pub_key).unwrap(); - - let p2p_public_key: discv5::libp2p_identity::PublicKey = - discv5::libp2p_identity::secp256k1::PublicKey::try_from_bytes( - &uncompressed_pub_key.serialize(), - ) - .unwrap() - .into(); - - assert_eq!(peer_id, p2p_public_key.to_peer_id()); - } -} diff --git a/kona/crates/node/rpc/README.md b/kona/crates/node/rpc/README.md deleted file mode 100644 index bb754a83043..00000000000 --- a/kona/crates/node/rpc/README.md +++ /dev/null @@ -1,9 +0,0 @@ -## `kona-rpc` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-rpc"><img src="https://img.shields.io/crates/v/kona-rpc.svg" alt="kona-rpc crate"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="MIT License"></a> -<a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> - - -Low-level Optimism JSON-RPC server and client implementations. diff --git a/kona/crates/node/rpc/src/client.rs b/kona/crates/node/rpc/src/client.rs deleted file mode 100644 index c3aa834a798..00000000000 --- a/kona/crates/node/rpc/src/client.rs +++ /dev/null @@ -1,100 +0,0 @@ -use alloy_eips::BlockNumberOrTag; -use alloy_primitives::B256; -use async_trait::async_trait; -use jsonrpsee::core::RpcResult; -use kona_engine::EngineState; -use kona_genesis::RollupConfig; -use kona_protocol::{L2BlockInfo, OutputRoot}; -use rollup_boost::{GetExecutionModeResponse, SetExecutionModeRequest, SetExecutionModeResponse}; -use std::fmt::Debug; -use thiserror::Error; -use tokio::sync::watch; - -/// Client trait wrapping RPC implementation for the EngineActor. -#[async_trait] -pub trait EngineRpcClient: Debug + Send + Sync + Clone { - /// Request the current [`RollupConfig`]. - async fn get_config(&self) -> RpcResult<RollupConfig>; - /// Request the current [`EngineState`] snapshot. - async fn get_state(&self) -> RpcResult<EngineState>; - /// Request the L2 output root for a specific [`BlockNumberOrTag`]. - /// - /// Returns a tuple of [`L2BlockInfo`], [`OutputRoot`], and [`EngineState`] at the requested - /// block. - async fn output_at_block( - &self, - block: BlockNumberOrTag, - ) -> RpcResult<(L2BlockInfo, OutputRoot, EngineState)>; - /// Development API: Get the current number of pending tasks in the queue. - async fn dev_get_task_queue_length(&self) -> RpcResult<usize>; - /// Development API: Subscribes to engine queue length updates managed by the returned - /// [`watch::Receiver`]. - async fn dev_subscribe_to_engine_queue_length(&self) -> RpcResult<watch::Receiver<usize>>; - /// Development API: Subscribes to engine state updates managed by the returned - /// [`watch::Receiver`]. - async fn dev_subscribe_to_engine_state(&self) -> RpcResult<watch::Receiver<EngineState>>; -} - -/// Client trait wrapping RPC implementation for the rollup boost admin endpoints. -#[async_trait] -pub trait RollupBoostAdminClient: Send + Sync + Debug { - /// Sets the execution mode for the rollup boost server. - async fn set_execution_mode( - &self, - request: SetExecutionModeRequest, - ) -> RpcResult<SetExecutionModeResponse>; - - /// Gets the current execution mode from the rollup boost server. - async fn get_execution_mode(&self) -> RpcResult<GetExecutionModeResponse>; -} - -/// Client trait wrapping RPC implementation for the Sequencer admin endpoints. -#[async_trait] -pub trait SequencerAdminAPIClient: Send + Sync + Debug { - /// Check if the sequencer is active. - async fn is_sequencer_active(&self) -> Result<bool, SequencerAdminAPIError>; - - /// Check if the conductor is enabled. - async fn is_conductor_enabled(&self) -> Result<bool, SequencerAdminAPIError>; - - /// Check if in recovery mode. - async fn is_recovery_mode(&self) -> Result<bool, SequencerAdminAPIError>; - - /// Start the sequencer. - async fn start_sequencer(&self) -> Result<(), SequencerAdminAPIError>; - - /// Stop the sequencer. - async fn stop_sequencer(&self) -> Result<B256, SequencerAdminAPIError>; - - /// Set recovery mode. - async fn set_recovery_mode(&self, mode: bool) -> Result<(), SequencerAdminAPIError>; - - /// Override the leader. - async fn override_leader(&self) -> Result<(), SequencerAdminAPIError>; - - /// Reset the derivation pipeline. - async fn reset_derivation_pipeline(&self) -> Result<(), SequencerAdminAPIError>; -} - -/// Errors that can occur when using the sequencer admin API. -#[derive(Debug, Error)] -pub enum SequencerAdminAPIError { - /// Error sending request. - #[error("Error sending request: {0}.")] - RequestError(String), - - /// Error receiving response. - /// Note: this error message is not future-proof, in that it may not be a safe assumption that - /// communication is channel-based. If/when that changes the enum will likely need to be updated - /// to take a parameter, so we can change it then. - #[error("Error receiving response: response channel closed.")] - ResponseError, - - /// Sequencer stopped successfully, followed by some error. - #[error("Sequencer stopped successfully, followed by error: {0}.")] - ErrorAfterSequencerWasStopped(String), - - /// Error overriding leader. - #[error("Error overriding leader: {0}.")] - LeaderOverrideError(String), -} diff --git a/kona/crates/node/rpc/src/config.rs b/kona/crates/node/rpc/src/config.rs deleted file mode 100644 index 056e393f54e..00000000000 --- a/kona/crates/node/rpc/src/config.rs +++ /dev/null @@ -1,48 +0,0 @@ -//! Contains the RPC Configuration. - -use std::{net::SocketAddr, path::PathBuf}; - -/// The RPC configuration. -#[derive(Debug, Clone)] -pub struct RpcBuilder { - /// Prevent the rpc server from being restarted. - pub no_restart: bool, - /// The RPC socket address. - pub socket: SocketAddr, - /// Enable the admin API. - pub enable_admin: bool, - /// File path used to persist state changes made via the admin API so they persist across - /// restarts. - pub admin_persistence: Option<PathBuf>, - /// Enable the websocket rpc server - pub ws_enabled: bool, - /// Enable development RPC endpoints - pub dev_enabled: bool, -} - -impl RpcBuilder { - /// Returns whether WebSocket RPC endpoint is enabled - pub const fn ws_enabled(&self) -> bool { - self.ws_enabled - } - - /// Returns whether development RPC endpoints are enabled - pub const fn dev_enabled(&self) -> bool { - self.dev_enabled - } - - /// Returns the socket address of the [`RpcBuilder`]. - pub const fn socket(&self) -> SocketAddr { - self.socket - } - - /// Returns the number of times the RPC server will attempt to restart if it stops. - pub const fn restart_count(&self) -> u32 { - if self.no_restart { 0 } else { 3 } - } - - /// Sets the given [`SocketAddr`] on the [`RpcBuilder`]. - pub fn set_addr(self, addr: SocketAddr) -> Self { - Self { socket: addr, ..self } - } -} diff --git a/kona/crates/node/rpc/src/lib.rs b/kona/crates/node/rpc/src/lib.rs deleted file mode 100644 index 0e9ccd58141..00000000000 --- a/kona/crates/node/rpc/src/lib.rs +++ /dev/null @@ -1,59 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -#[macro_use] -extern crate tracing; - -mod admin; -pub use admin::{AdminRpc, NetworkAdminQuery, RollupBoostAdminQuery}; - -mod client; -pub use client::{ - EngineRpcClient, RollupBoostAdminClient, SequencerAdminAPIClient, SequencerAdminAPIError, -}; - -mod config; -pub use config::RpcBuilder; - -mod net; -pub use net::P2pRpc; - -mod p2p; - -mod response; -pub use response::SafeHeadResponse; - -mod output; -pub use output::OutputResponse; - -mod dev; -pub use dev::DevEngineRpc; - -mod jsonrpsee; -pub use jsonrpsee::{ - AdminApiServer, DevEngineApiServer, HealthzApiServer, MinerApiExtServer, OpAdminApiServer, - OpP2PApiServer, RollupBoostHealthzApiServer, RollupNodeApiServer, WsServer, -}; - -#[cfg(feature = "client")] -pub use jsonrpsee::RollupNodeApiClient; - -mod rollup; -pub use rollup::RollupRpc; - -mod l1_watcher; -pub use l1_watcher::{L1State, L1WatcherQueries, L1WatcherQuerySender}; - -mod ws; -pub use ws::WsRPC; - -mod health; -pub use health::{ - HealthzResponse, HealthzRpc, RollupBoostHealth, RollupBoostHealthQuery, - RollupBoostHealthzResponse, -}; diff --git a/kona/crates/node/rpc/src/net.rs b/kona/crates/node/rpc/src/net.rs deleted file mode 100644 index 7732b80820b..00000000000 --- a/kona/crates/node/rpc/src/net.rs +++ /dev/null @@ -1,22 +0,0 @@ -//! Network types - -use kona_gossip::P2pRpcRequest; - -/// A type alias for the sender of a [`P2pRpcRequest`]. -type P2pReqSender = tokio::sync::mpsc::Sender<P2pRpcRequest>; - -/// P2pRpc -/// -/// This is a server implementation of [`crate::OpP2PApiServer`]. -#[derive(Debug)] -pub struct P2pRpc { - /// The channel to send [`P2pRpcRequest`]s. - pub sender: P2pReqSender, -} - -impl P2pRpc { - /// Constructs a new [`P2pRpc`] given a sender channel. - pub const fn new(sender: P2pReqSender) -> Self { - Self { sender } - } -} diff --git a/kona/crates/node/rpc/src/rollup.rs b/kona/crates/node/rpc/src/rollup.rs deleted file mode 100644 index 42ef0e70909..00000000000 --- a/kona/crates/node/rpc/src/rollup.rs +++ /dev/null @@ -1,132 +0,0 @@ -//! Implements the rollup client rpc endpoints. These endpoints serve data about the rollup state. -//! -//! Implemented in the op-node in <https://github.com/ethereum-optimism/optimism/blob/174e55f0a1e73b49b80a561fd3fedd4fea5770c6/op-service/sources/rollupclient.go#L16> - -use alloy_eips::BlockNumberOrTag; -use async_trait::async_trait; -use jsonrpsee::{ - core::RpcResult, - types::{ErrorCode, ErrorObject}, -}; -use kona_engine::EngineState; -use kona_genesis::RollupConfig; -use kona_protocol::SyncStatus; -use std::fmt::Debug; - -use crate::{ - EngineRpcClient, L1State, L1WatcherQueries, OutputResponse, RollupNodeApiServer, - SafeHeadResponse, l1_watcher::L1WatcherQuerySender, -}; - -/// RollupRpc -/// -/// This is a server implementation of [`crate::RollupNodeApiServer`]. -#[derive(Debug)] -pub struct RollupRpc<EngineRpcClient_> { - /// The channel to send [`kona_engine::EngineQueries`]s. - pub engine_client: EngineRpcClient_, - /// The channel to send [`crate::L1WatcherQueries`]s. - pub l1_watcher_sender: L1WatcherQuerySender, -} - -impl<EngineRpcClient_: EngineRpcClient> RollupRpc<EngineRpcClient_> { - /// The identifier for the Metric that tracks rollup RPC calls. - pub const RPC_IDENT: &'static str = "rollup_rpc"; - - /// Constructs a new [`RollupRpc`] given a sender channel. - pub const fn new( - engine_client: EngineRpcClient_, - l1_watcher_sender: L1WatcherQuerySender, - ) -> Self { - Self { engine_client, l1_watcher_sender } - } - - // Important note: we zero-out the fields that can't be derived yet to follow op-node's - // behaviour. - fn sync_status_from_actor_queries( - l1_sync_status: L1State, - l2_sync_status: EngineState, - ) -> SyncStatus { - SyncStatus { - current_l1: l1_sync_status.current_l1.unwrap_or_default(), - current_l1_finalized: l1_sync_status.current_l1_finalized.unwrap_or_default(), - head_l1: l1_sync_status.head_l1.unwrap_or_default(), - safe_l1: l1_sync_status.safe_l1.unwrap_or_default(), - finalized_l1: l1_sync_status.finalized_l1.unwrap_or_default(), - unsafe_l2: l2_sync_status.sync_state.unsafe_head(), - cross_unsafe_l2: l2_sync_status.sync_state.cross_unsafe_head(), - local_safe_l2: l2_sync_status.sync_state.local_safe_head(), - safe_l2: l2_sync_status.sync_state.safe_head(), - finalized_l2: l2_sync_status.sync_state.finalized_head(), - } - } -} - -#[async_trait] -impl<EngineRpcClient_: EngineRpcClient + 'static> RollupNodeApiServer - for RollupRpc<EngineRpcClient_> -{ - async fn op_output_at_block(&self, block_num: BlockNumberOrTag) -> RpcResult<OutputResponse> { - kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_outputAtBlock"); - - let (l1_sync_status_send, l1_sync_status_recv) = tokio::sync::oneshot::channel(); - - let ((l2_block_info, output_root, l2_sync_status), l1_sync_status) = - tokio::try_join!(self.engine_client.output_at_block(block_num), async { - self.l1_watcher_sender - .send(L1WatcherQueries::L1State(l1_sync_status_send)) - .await - .map_err(|_| ErrorObject::from(ErrorCode::InternalError))?; - - l1_sync_status_recv.await.map_err(|_| ErrorObject::from(ErrorCode::InternalError)) - })?; - - let sync_status = Self::sync_status_from_actor_queries(l1_sync_status, l2_sync_status); - - Ok(OutputResponse::from_v0(output_root, sync_status, l2_block_info)) - } - - /// This RPC endpoint is not supported. It is not necessary to track the safe head for every L1 - /// block post-interop anymore so we can remove this method from the rpc interface. - async fn op_safe_head_at_l1_block( - &self, - _block_num: BlockNumberOrTag, - ) -> RpcResult<SafeHeadResponse> { - kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_safeHeadAtL1Block"); - return Err(ErrorObject::from(ErrorCode::MethodNotFound)); - } - - async fn op_sync_status(&self) -> RpcResult<SyncStatus> { - kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_syncStatus"); - - let (l1_sync_status_send, l1_sync_status_recv) = tokio::sync::oneshot::channel(); - - let (l1_sync_status, l2_sync_status) = tokio::try_join!( - async { - self.l1_watcher_sender - .send(L1WatcherQueries::L1State(l1_sync_status_send)) - .await - .map_err(|_| ErrorObject::from(ErrorCode::InternalError))?; - l1_sync_status_recv.await.map_err(|_| ErrorObject::from(ErrorCode::InternalError)) - }, - self.engine_client.get_state() - ) - .map_err(|_| ErrorObject::from(ErrorCode::InternalError))?; - - return Ok(Self::sync_status_from_actor_queries(l1_sync_status, l2_sync_status)); - } - - async fn op_rollup_config(&self) -> RpcResult<RollupConfig> { - kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_rollupConfig"); - - self.engine_client.get_config().await - } - - async fn op_version(&self) -> RpcResult<String> { - kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_version"); - - const RPC_VERSION: &str = env!("CARGO_PKG_VERSION"); - - return Ok(RPC_VERSION.to_string()); - } -} diff --git a/kona/crates/node/service/Cargo.toml b/kona/crates/node/service/Cargo.toml deleted file mode 100644 index 52f03fab5be..00000000000 --- a/kona/crates/node/service/Cargo.toml +++ /dev/null @@ -1,97 +0,0 @@ -[package] -name = "kona-node-service" -description = "An implementation of the OP Stack consensus node service" -version = "0.1.3" -edition.workspace = true -authors.workspace = true -license.workspace = true -repository.workspace = true -homepage.workspace = true - -[lints] -workspace = true - -[dependencies] -# workspace -kona-gossip.workspace = true -kona-disc.workspace = true -kona-engine.workspace = true -kona-sources.workspace = true -kona-genesis.workspace = true -kona-derive.workspace = true -kona-protocol.workspace = true -kona-providers-alloy.workspace = true -kona-rpc = { workspace = true, features = ["client"] } -kona-peers.workspace = true -kona-macros.workspace = true - -# rollup-boost -rollup-boost.workspace = true - -# alloy -alloy-chains.workspace = true -alloy-signer.workspace = true -alloy-signer-local.workspace = true -alloy-primitives.workspace = true -alloy-rpc-client.workspace = true -alloy-rpc-types-eth.workspace = true -alloy-rpc-types-engine = { workspace = true, features = ["jwt", "serde"] } -alloy-provider = { workspace = true, features = ["reqwest", "reqwest-rustls-tls", "hyper", "hyper-tls"] } -alloy-eips.workspace = true -alloy-transport.workspace = true -alloy-transport-http = { workspace = true, features = ["reqwest", "reqwest-rustls-tls", "hyper", "hyper-tls", "jwt-auth"] } - -# op-alloy -op-alloy-network.workspace = true -op-alloy-rpc-types-engine = { workspace = true, features = ["std"] } -op-alloy-provider.workspace = true - -# general -url.workspace = true -libp2p.workspace = true -libp2p-stream.workspace = true -discv5.workspace = true -futures.workspace = true -tracing.workspace = true -thiserror.workspace = true -tokio-util.workspace = true -async-trait.workspace = true -async-stream.workspace = true -tokio-stream.workspace = true -strum = { workspace = true, features = ["derive"] } -backon.workspace = true -derive_more = { workspace = true, features = ["debug", "eq"] } -jsonrpsee = { workspace = true, features = ["server", "http-client"] } -tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } -tower.workspace = true -http-body-util.workspace = true - -# metrics -metrics = { workspace = true, optional = true } - -[dev-dependencies] -rstest.workspace = true -arbitrary.workspace = true -rand.workspace = true -anyhow.workspace = true -backon.workspace = true -http = "1" -mockall.workspace = true -alloy-primitives = { workspace = true, features = ["k256"] } -alloy-rpc-types-engine = { workspace = true, features = ["arbitrary"] } -alloy-consensus = { workspace = true, features = ["arbitrary"] } -op-alloy-consensus = { workspace = true, features = ["arbitrary", "k256"] } -kona-derive = {workspace = true, features = ["test-utils"]} - -[features] -default = [] -metrics = [ - "dep:metrics", - "kona-derive/metrics", - "kona-disc/metrics", - "kona-engine/metrics", - "kona-gossip/metrics", - "kona-providers-alloy/metrics", - "kona-rpc/metrics", - "libp2p/metrics", -] diff --git a/kona/crates/node/service/README.md b/kona/crates/node/service/README.md deleted file mode 100644 index 2effabfac57..00000000000 --- a/kona/crates/node/service/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# `kona-node-service` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-node-service"><img src="https://img.shields.io/crates/v/kona-node-service.svg" alt="kona-node-service crate"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="MIT License"></a> -<a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> - -An implementation of the OP Stack [RollupNode][rn-spec] service. - -[rn-spec]: https://specs.optimism.io/protocol/rollup-node.html diff --git a/kona/crates/node/service/src/actors/derivation/actor.rs b/kona/crates/node/service/src/actors/derivation/actor.rs deleted file mode 100644 index 77275dc5205..00000000000 --- a/kona/crates/node/service/src/actors/derivation/actor.rs +++ /dev/null @@ -1,341 +0,0 @@ -//! [NodeActor] implementation for the derivation sub-routine. - -use crate::{ - CancellableContext, DerivationActorRequest, DerivationEngineClient, DerivationState, - DerivationStateMachine, DerivationStateTransitionError, DerivationStateUpdate, Metrics, - NodeActor, actors::derivation::L2Finalizer, -}; -use async_trait::async_trait; -use kona_derive::{ - ActivationSignal, Pipeline, PipelineError, PipelineErrorKind, ResetError, ResetSignal, Signal, - SignalReceiver, StepResult, -}; -use kona_protocol::OpAttributesWithParent; -use thiserror::Error; -use tokio::{select, sync::mpsc}; -use tokio_util::sync::{CancellationToken, WaitForCancellationFuture}; - -/// The [NodeActor] for the derivation sub-routine. -/// -/// This actor is responsible for receiving messages from [NodeActor]s and stepping the -/// derivation pipeline forward to produce new payload attributes. The actor then sends the payload -/// to the [NodeActor] responsible for the execution sub-routine. -#[derive(Debug)] -pub struct DerivationActor<DerivationEngineClient_, PipelineSignalReceiver> -where - DerivationEngineClient_: DerivationEngineClient, - PipelineSignalReceiver: Pipeline + SignalReceiver, -{ - /// The cancellation token, shared between all tasks. - cancellation_token: CancellationToken, - /// The channel on which all inbound requests are received by the [`DerivationActor`]. - inbound_request_rx: mpsc::Receiver<DerivationActorRequest>, - /// The Engine client used to interact with the engine. - engine_client: DerivationEngineClient_, - - /// The derivation pipeline. - pipeline: PipelineSignalReceiver, - /// The state machine controlling when derivation can occur. - derivation_state_machine: DerivationStateMachine, - /// The [`L2Finalizer`] tracks derived L2 blocks awaiting finalization. - pub(crate) finalizer: L2Finalizer, -} - -impl<DerivationEngineClient_, PipelineSignalReceiver> CancellableContext - for DerivationActor<DerivationEngineClient_, PipelineSignalReceiver> -where - DerivationEngineClient_: DerivationEngineClient, - PipelineSignalReceiver: Pipeline + SignalReceiver + Send + Sync, -{ - fn cancelled(&self) -> WaitForCancellationFuture<'_> { - self.cancellation_token.cancelled() - } -} - -impl<DerivationEngineClient_, PipelineSignalReceiver> - DerivationActor<DerivationEngineClient_, PipelineSignalReceiver> -where - DerivationEngineClient_: DerivationEngineClient, - PipelineSignalReceiver: Pipeline + SignalReceiver, -{ - /// Creates a new instance of the [DerivationActor]. - pub fn new( - engine_client: DerivationEngineClient_, - cancellation_token: CancellationToken, - inbound_request_rx: mpsc::Receiver<DerivationActorRequest>, - pipeline: PipelineSignalReceiver, - ) -> Self { - Self { - cancellation_token, - pipeline, - inbound_request_rx, - engine_client, - derivation_state_machine: DerivationStateMachine::default(), - finalizer: L2Finalizer::default(), - } - } - - /// Handles a [`Signal`] received over the derivation signal receiver channel. - async fn signal(&mut self, signal: Signal) { - if let Signal::Reset(ResetSignal { l1_origin, .. }) = signal { - kona_macros::set!(counter, Metrics::DERIVATION_L1_ORIGIN, l1_origin.number); - // Clear the finalization queue on reset. - self.finalizer.clear(); - } - - match self.pipeline.signal(signal).await { - Ok(_) => info!(target: "derivation", ?signal, "[SIGNAL] Executed Successfully"), - Err(e) => { - error!(target: "derivation", ?e, ?signal, "Failed to signal derivation pipeline") - } - } - } - - /// Attempts to step the derivation pipeline forward as much as possible in order to produce the - /// next safe payload. - async fn produce_next_attributes(&mut self) -> Result<OpAttributesWithParent, DerivationError> { - // As we start the safe head at the disputed block's parent, we step the pipeline until the - // first attributes are produced. All batches at and before the safe head will be - // dropped, so the first payload will always be the disputed one. - loop { - match self.pipeline.step(self.derivation_state_machine.last_confirmed_safe_head()).await - { - StepResult::PreparedAttributes => { /* continue; attributes will be sent off. */ } - StepResult::AdvancedOrigin => { - let origin = - self.pipeline.origin().ok_or(PipelineError::MissingOrigin.crit())?.number; - - kona_macros::set!(counter, Metrics::DERIVATION_L1_ORIGIN, origin); - debug!(target: "derivation", l1_block = origin, "Advanced L1 origin"); - } - StepResult::OriginAdvanceErr(e) | StepResult::StepFailed(e) => { - match e { - PipelineErrorKind::Temporary(e) => { - // NotEnoughData is transient, and doesn't imply we need to wait for - // more data. We can continue stepping until we receive an Eof. - if matches!(e, PipelineError::NotEnoughData) { - continue; - } - - debug!( - target: "derivation", - "Exhausted data source for now; Yielding until the chain has extended." - ); - return Err(DerivationError::Yield); - } - PipelineErrorKind::Reset(e) => { - warn!(target: "derivation", "Derivation pipeline is being reset: {e}"); - - let system_config = self - .pipeline - .system_config_by_number( - self.derivation_state_machine - .last_confirmed_safe_head() - .block_info - .number, - ) - .await?; - - if matches!(e, ResetError::HoloceneActivation) { - let l1_origin = self - .pipeline - .origin() - .ok_or(PipelineError::MissingOrigin.crit())?; - - self.pipeline - .signal( - ActivationSignal { - l2_safe_head: self - .derivation_state_machine - .last_confirmed_safe_head(), - l1_origin, - system_config: Some(system_config), - } - .signal(), - ) - .await?; - } else { - if let ResetError::ReorgDetected(expected, new) = e { - warn!( - target: "derivation", - "L1 reorg detected! Expected: {expected} | New: {new}" - ); - - kona_macros::inc!(counter, Metrics::L1_REORG_COUNT); - } - // send the `reset` signal to the engine actor only when interop is - // not active. - if !self.pipeline.rollup_config().is_interop_active( - self.derivation_state_machine - .last_confirmed_safe_head() - .block_info - .timestamp, - ) { - self.engine_client.reset_engine_forkchoice().await.map_err(|e| { - error!(target: "derivation", ?e, "Failed to send reset request"); - DerivationError::Sender(Box::new(e)) - })?; - } - self.derivation_state_machine - .update(&DerivationStateUpdate::SignalNeeded)?; - return Err(DerivationError::Yield); - } - } - PipelineErrorKind::Critical(_) => { - error!(target: "derivation", "Critical derivation error: {e}"); - kona_macros::inc!(counter, Metrics::DERIVATION_CRITICAL_ERROR); - return Err(e.into()); - } - } - } - } - - // If there are any new attributes, send them to the execution actor. - if let Some(attrs) = self.pipeline.next() { - return Ok(attrs); - } - } - } - - async fn handle_derivation_actor_request( - &mut self, - request_type: DerivationActorRequest, - ) -> Result<(), DerivationError> { - match request_type { - DerivationActorRequest::ProcessEngineSignalRequest(signal) => { - self.signal(*signal).await; - self.derivation_state_machine.update(&DerivationStateUpdate::SignalProcessed)?; - } - DerivationActorRequest::ProcessFinalizedL1Block(finalized_l1_block) => { - // Attempt to finalize the block. If successful, notify engine. - if let Some(l2_block_number) = self.finalizer.try_finalize_next(*finalized_l1_block) - { - self.engine_client - .send_finalized_l2_block(l2_block_number) - .await - .map_err(|e| DerivationError::Sender(Box::new(e)))?; - } - } - DerivationActorRequest::ProcessL1HeadUpdateRequest(l1_head) => { - info!(target: "derivation", l1_head = ?*l1_head, "Processing l1 head update"); - - self.derivation_state_machine.update(&DerivationStateUpdate::L1DataReceived)?; - - self.attempt_derivation().await?; - } - DerivationActorRequest::ProcessEngineSafeHeadUpdateRequest(safe_head) => { - info!(target: "derivation", safe_head = ?*safe_head, "Received safe head from engine."); - self.derivation_state_machine - .update(&DerivationStateUpdate::NewAttributesConfirmed(safe_head))?; - - self.attempt_derivation().await?; - } - DerivationActorRequest::ProcessEngineSyncCompletionRequest(safe_head) => { - info!(target: "derivation", "Engine finished syncing, starting derivation."); - self.derivation_state_machine - .update(&DerivationStateUpdate::ELSyncCompleted(safe_head))?; - - self.attempt_derivation().await?; - } - } - - Ok(()) - } - - /// Attempts to process the next payload attributes. - async fn attempt_derivation(&mut self) -> Result<(), DerivationError> { - if self.derivation_state_machine.current_state() != DerivationState::Deriving { - info!(target: "derivation", derivation_state=?self.derivation_state_machine, "Skipping derivation."); - return Ok(()); - } - - info!(target: "derivation", derivation_state=?self.derivation_state_machine, "Attempting derivation."); - - // Advance the pipeline as much as possible, new data may be available or there still may be - // payloads in the attributes queue. - let payload_attributes = match self.produce_next_attributes().await { - Ok(attrs) => attrs, - Err(DerivationError::Yield) => { - info!(target: "derivation", "Yielding derivation until more data is available."); - self.derivation_state_machine.update(&DerivationStateUpdate::MoreDataNeeded)?; - return Ok(()); - } - Err(e) => { - return Err(e); - } - }; - trace!(target: "derivation", ?payload_attributes, "Produced payload attributes."); - - self.derivation_state_machine.update(&DerivationStateUpdate::NewAttributesDerived( - Box::new(payload_attributes.clone()), - ))?; - - // Enqueue the payload attributes for finalization tracking. - self.finalizer.enqueue_for_finalization(&payload_attributes); - - // Send payload attributes out for processing. - self.engine_client - .send_safe_l2_signal(payload_attributes.into()) - .await - .map_err(|e| DerivationError::Sender(Box::new(e)))?; - - Ok(()) - } -} - -#[async_trait] -impl<DerivationEngineClient_, PipelineSignalReceiver> NodeActor - for DerivationActor<DerivationEngineClient_, PipelineSignalReceiver> -where - DerivationEngineClient_: DerivationEngineClient + 'static, - PipelineSignalReceiver: Pipeline + SignalReceiver + Send + Sync + 'static, -{ - type Error = DerivationError; - type StartData = (); - - async fn start(mut self, _: Self::StartData) -> Result<(), Self::Error> { - info!(target: "derivation", "Starting derivation"); - loop { - select! { - biased; - - _ = self.cancellation_token.cancelled() => { - info!( - target: "derivation", - "Received shutdown signal. Exiting derivation task." - ); - return Ok(()); - } - req = self.inbound_request_rx.recv() => { - let Some(request_type) = req else { - error!(target: "derivation", "DerivationActor inbound request receiver closed unexpectedly"); - self.cancellation_token.cancel(); - return Err(DerivationError::RequestReceiveFailed); - }; - - self.handle_derivation_actor_request(request_type).await?; - } - } - } - } -} - -/// An error from the [DerivationActor]. -#[derive(Error, Debug)] -pub enum DerivationError { - /// An error originating from the derivation pipeline. - #[error(transparent)] - Pipeline(#[from] PipelineErrorKind), - /// Waiting for more data to be available. - #[error("Waiting for more data to be available")] - Yield, - /// An error originating from the broadcast sender. - #[error("Failed to send event to broadcast sender: {0}")] - Sender(Box<dyn std::error::Error>), - /// Failed to receive inbound request - #[error("Failed to receive inbound request")] - RequestReceiveFailed, - /// An invalid state transition occurred. - #[error(transparent)] - StateTransitionError(#[from] DerivationStateTransitionError), -} diff --git a/kona/crates/node/service/src/actors/derivation/delegated/actor.rs b/kona/crates/node/service/src/actors/derivation/delegated/actor.rs deleted file mode 100644 index 0e1c30a073e..00000000000 --- a/kona/crates/node/service/src/actors/derivation/delegated/actor.rs +++ /dev/null @@ -1,256 +0,0 @@ -use crate::{ - CancellableContext, DerivationActorRequest, DerivationEngineClient, NodeActor, - actors::derivation::{DerivationDelegateClient, DerivationError}, -}; -use alloy_primitives::BlockHash; -use async_trait::async_trait; -use kona_protocol::{L2BlockInfo, SyncStatus}; -use kona_providers_alloy::AlloyChainProvider; -use thiserror::Error; -use tokio::{select, sync::mpsc, time}; -use tokio_util::sync::{CancellationToken, WaitForCancellationFuture}; - -/// The [NodeActor] for the delegate derivation sub-routine. -/// -/// This actor is responsible for receiving messages from [NodeActor]s and polls -/// an external derivation delegation provider for derivation state. It validates -/// the canonicality of the L1 information associated with delegated derivation -/// results against the canonical L1 chain before forwarding updates. -/// -/// Once validated, the actor sends the derived safe and finalized L2 info -/// to the [NodeActor] responsible for the execution sub-routine. -#[derive(Debug)] -pub struct DelegateDerivationActor<DerivationEngineClient_> -where - DerivationEngineClient_: DerivationEngineClient, -{ - /// The cancellation token, shared between all tasks. - cancellation_token: CancellationToken, - /// The channel on which all inbound requests are received by the [`DelegateDerivationActor`]. - inbound_request_rx: mpsc::Receiver<DerivationActorRequest>, - /// The Engine client used to interact with the engine. - engine_client: DerivationEngineClient_, - - /// Derivation delegate provider. - derivation_delegate_provider: DerivationDelegateClient, - /// L1 provider for validating L1 info for derivation delegation. - l1_provider: AlloyChainProvider, - - /// The engine's L2 safe head, according to updates from the Engine. - engine_l2_safe_head: L2BlockInfo, - /// Whether the engine sync has completed. This will only ever go from false -> true. - has_engine_sync_completed: bool, -} - -impl<DerivationEngineClient_> CancellableContext - for DelegateDerivationActor<DerivationEngineClient_> -where - DerivationEngineClient_: DerivationEngineClient, -{ - fn cancelled(&self) -> WaitForCancellationFuture<'_> { - self.cancellation_token.cancelled() - } -} - -impl<DerivationEngineClient_> DelegateDerivationActor<DerivationEngineClient_> -where - DerivationEngineClient_: DerivationEngineClient, -{ - /// Creates a new instance of the [DelegateDerivationActor]. - pub fn new( - engine_client: DerivationEngineClient_, - cancellation_token: CancellationToken, - inbound_request_rx: mpsc::Receiver<DerivationActorRequest>, - derivation_delegate_provider: DerivationDelegateClient, - l1_provider: AlloyChainProvider, - ) -> Self { - Self { - cancellation_token, - inbound_request_rx, - engine_client, - derivation_delegate_provider, - l1_provider, - engine_l2_safe_head: L2BlockInfo::default(), - has_engine_sync_completed: false, - } - } -} - -#[async_trait] -impl<DerivationEngineClient_> NodeActor for DelegateDerivationActor<DerivationEngineClient_> -where - DerivationEngineClient_: DerivationEngineClient + 'static, -{ - type Error = DerivationError; - type StartData = (); - - async fn start(mut self, _: Self::StartData) -> Result<(), Self::Error> { - self.start_delegate_derivation().await - } -} - -impl<DerivationEngineClient_> DelegateDerivationActor<DerivationEngineClient_> -where - DerivationEngineClient_: DerivationEngineClient + 'static, -{ - /// Hardcoded poll interval for Derivation Delegation - const DERIVATION_DELEGATE_POLL_INTERVAL: std::time::Duration = - std::time::Duration::from_secs(4); - - /// Validates a single L1 block height and hash against the canonical L1 chain. - async fn validate_l1_block( - &mut self, - context: &str, - l1_block_number: u64, - expected_hash: BlockHash, - ) -> Result<(), DerivationDelegationError> { - use kona_derive::ChainProvider; - - let block = self - .l1_provider - .block_info_by_number(l1_block_number) - .await - .map_err(|e| DerivationDelegationError::L1Provider(e.to_string()))?; - - if block.hash != expected_hash { - return Err(DerivationDelegationError::L1ValidationFailed { - context: context.to_string(), - number: l1_block_number, - expected: expected_hash, - actual: block.hash, - }); - } - - Ok(()) - } - - /// Verifies that the L1 info reported by the derivation delegate - /// are consistent with canonical L1 chain. - async fn validate_sync_status(&mut self, v: &SyncStatus) -> bool { - let checks = [ - ("L1 Origin of Safe L2", v.safe_l2.l1_origin.number, v.safe_l2.l1_origin.hash), - ( - "L1 Origin of Finalized L2", - v.finalized_l2.l1_origin.number, - v.finalized_l2.l1_origin.hash, - ), - ("Current L1", v.current_l1.number, v.current_l1.hash), - ]; - for (context, number, hash) in checks { - if let Err(err) = self.validate_l1_block(context, number, hash).await { - warn!( - target: "derivation", - context = context, - error = %err, - "L1 inconsistency detected at sync status from delegate" - ); - return false; - } - } - true - } - - /// Fetches, validates, and applies sync status from the derivation delegate. - async fn fetch_and_apply_delegate_safe_head(&mut self) -> Result<(), DerivationError> { - let sync_status = match self.derivation_delegate_provider.fetch_sync_status().await { - Ok(status) => status, - Err(_) => { - warn!(target: "derivation", "Failed to fetch sync status from delegate"); - return Ok(()); - } - }; - - if !self.validate_sync_status(&sync_status).await { - // Validation failures here are expected to be transient, so we skip processing - // this sync status and continue delegating derivation instead of treating it as - // fatal. - return Ok(()); - } - - self.engine_client - .send_safe_l2_signal(sync_status.safe_l2.into()) - .await - .map_err(|e| DerivationError::Sender(Box::new(e)))?; - - self.engine_client - .send_finalized_l2_block(sync_status.finalized_l2.block_info.number) - .await - .map_err(|e| DerivationError::Sender(Box::new(e)))?; - - debug!( - target: "derivation", - safe_l2 = ?sync_status.safe_l2, - finalized_l2 = ?sync_status.finalized_l2, - "Processed sync status from delegate" - ); - - Ok(()) - } - - async fn start_delegate_derivation(mut self) -> Result<(), DerivationError> { - info!(target: "derivation", "Starting derivation with delegation"); - let mut delegated_derivation_ticker = - time::interval(Self::DERIVATION_DELEGATE_POLL_INTERVAL); - delegated_derivation_ticker.set_missed_tick_behavior(time::MissedTickBehavior::Skip); - loop { - select! { - biased; - - _ = self.cancellation_token.cancelled() => { - info!( - target: "derivation", - "Received shutdown signal. Exiting derivation task." - ); - return Ok(()); - } - req = self.inbound_request_rx.recv() => { - let Some(request_type) = req else { - error!(target: "derivation", "DerivationActor inbound request receiver closed unexpectedly"); - self.cancellation_token.cancel(); - return Err(DerivationError::RequestReceiveFailed); - }; - - self.handle_derivation_delegation_actor_request(request_type).await?; - } - _ = delegated_derivation_ticker.tick(), - if self.has_engine_sync_completed => { - self.fetch_and_apply_delegate_safe_head().await?; - } - } - } - } - - async fn handle_derivation_delegation_actor_request( - &mut self, - request_type: DerivationActorRequest, - ) -> Result<(), DerivationError> { - match request_type { - DerivationActorRequest::ProcessEngineSafeHeadUpdateRequest(safe_head) => { - debug!(target: "derivation", safe_head = ?*safe_head, "Received safe head from engine."); - self.engine_l2_safe_head = *safe_head; - } - DerivationActorRequest::ProcessEngineSyncCompletionRequest(safe_head) => { - info!(target: "derivation", "Engine finished syncing, starting derivation."); - self.engine_l2_safe_head = *safe_head; - self.has_engine_sync_completed = true; - } - DerivationActorRequest::ProcessEngineSignalRequest(_) | - DerivationActorRequest::ProcessFinalizedL1Block(_) | - DerivationActorRequest::ProcessL1HeadUpdateRequest(_) => { - debug!(target: "derivation", "Ignoring request while derivation delegation: {:?}", request_type); - } - } - Ok(()) - } -} - -#[derive(Error, Debug)] -enum DerivationDelegationError { - /// The L1 provider returned an error (network, RPC, etc.) - #[error("L1 provider error: {0}")] - L1Provider(String), - - /// The hash provided by the derivation delegation does not match the canonical chain. - #[error("L1 inconsistency in {context} at block {number}: expected {expected}, got {actual}")] - L1ValidationFailed { context: String, number: u64, expected: BlockHash, actual: BlockHash }, -} diff --git a/kona/crates/node/service/src/actors/derivation/engine_client.rs b/kona/crates/node/service/src/actors/derivation/engine_client.rs deleted file mode 100644 index d27546eba1e..00000000000 --- a/kona/crates/node/service/src/actors/derivation/engine_client.rs +++ /dev/null @@ -1,76 +0,0 @@ -use crate::{EngineActorRequest, EngineClientError, EngineClientResult, ResetRequest}; -use async_trait::async_trait; -use derive_more::Constructor; -use kona_engine::ConsolidateInput; -use std::fmt::Debug; -use tokio::sync::mpsc; - -/// Client to use to interact with the engine. -#[cfg_attr(test, mockall::automock(type SafeL2Signal = OpAttributesWithParent;))] -#[async_trait] -pub trait DerivationEngineClient: Debug + Send + Sync { - /// Resets the engine's forkchoice. - async fn reset_engine_forkchoice(&self) -> EngineClientResult<()>; - - /// Sends a request to finalize the L2 block at the provided block number. - /// Note: This does not wait for the engine to process it. - async fn send_finalized_l2_block(&self, block_number: u64) -> EngineClientResult<()>; - - /// Sends a consolidation signal to the engine. - /// - /// This is the unified entry point for all consolidation-related inputs, - /// including derived attributes and safe L2 block information, as represented - /// by [`ConsolidateInput`]. - /// - /// Note: This does not wait for the engine to process it. - async fn send_safe_l2_signal(&self, signal: ConsolidateInput) -> EngineClientResult<()>; -} - -/// Client to use to send messages to the Engine Actor's inbound channel. -#[derive(Constructor, Debug)] -pub struct QueuedDerivationEngineClient { - /// A channel to use to send the [`EngineActorRequest`]s to the EngineActor. - pub engine_actor_request_tx: mpsc::Sender<EngineActorRequest>, -} - -#[async_trait] -impl DerivationEngineClient for QueuedDerivationEngineClient { - async fn reset_engine_forkchoice(&self) -> EngineClientResult<()> { - let (result_tx, mut result_rx) = mpsc::channel(1); - - info!(target: "derivation", "Sending reset request to engine."); - self.engine_actor_request_tx - .send(EngineActorRequest::ResetRequest(Box::new(ResetRequest { result_tx }))) - .await - .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; - - result_rx - .recv() - .await - .inspect(|_| info!(target: "derivation", "Engine reset successfully.")) - .ok_or_else(|| { - error!(target: "derivation_engine_client", "Failed to receive built payload"); - EngineClientError::ResponseError("response channel closed.".to_string()) - })? - } - - async fn send_finalized_l2_block(&self, block_number: u64) -> EngineClientResult<()> { - trace!(target: "derivation", block_number, "Sending finalized L2 block number to engine."); - self.engine_actor_request_tx - .send(EngineActorRequest::ProcessFinalizedL2BlockNumberRequest(Box::new(block_number))) - .await - .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; - - Ok(()) - } - - async fn send_safe_l2_signal(&self, signal: ConsolidateInput) -> EngineClientResult<()> { - trace!(target: "derivation", ?signal, "Sending safe L2 signal info to engine."); - self.engine_actor_request_tx - .send(EngineActorRequest::ProcessSafeL2SignalRequest(signal)) - .await - .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; - - Ok(()) - } -} diff --git a/kona/crates/node/service/src/actors/derivation/request.rs b/kona/crates/node/service/src/actors/derivation/request.rs deleted file mode 100644 index 3ee764324e6..00000000000 --- a/kona/crates/node/service/src/actors/derivation/request.rs +++ /dev/null @@ -1,36 +0,0 @@ -use kona_derive::Signal; -use kona_protocol::{BlockInfo, L2BlockInfo}; -use thiserror::Error; - -/// The result of an Engine client call. -pub type DerivationClientResult<T> = Result<T, DerivationClientError>; - -/// Error making requests to the [`crate::DerivationActor`]. -#[derive(Debug, Error)] -pub enum DerivationClientError { - /// Error making a request to the [`crate::DerivationActor`]. The request never made it there. - #[error("Error making a request to the derivation actor: {0}.")] - RequestError(String), - - /// Error receiving response from the [`crate::DerivationActor`]. - /// This means the request may or may not have succeeded. - #[error("Error receiving response from the derivation actor: {0}..")] - ResponseError(String), -} - -/// Inbound requests that the [`crate::DerivationActor`] can process. -#[derive(Debug)] -pub enum DerivationActorRequest { - /// Request to process the fact that Engine sync has completed, along with the current safe - /// head. - ProcessEngineSyncCompletionRequest(Box<L2BlockInfo>), - /// Request to process the provided L2 engine safe head update. - ProcessEngineSafeHeadUpdateRequest(Box<L2BlockInfo>), - /// A request containing a [`Signal`] to the derivation pipeline. - /// This allows the Engine to send the DerivationActor signals (e.g. to Flush or Reset). - ProcessEngineSignalRequest(Box<Signal>), - /// A request to process the provided finalized L1 [`BlockInfo`]. - ProcessFinalizedL1Block(Box<BlockInfo>), - /// Request to process the provided L1 head block update. - ProcessL1HeadUpdateRequest(Box<BlockInfo>), -} diff --git a/kona/crates/node/service/src/actors/engine/client.rs b/kona/crates/node/service/src/actors/engine/client.rs deleted file mode 100644 index a4af93ff22e..00000000000 --- a/kona/crates/node/service/src/actors/engine/client.rs +++ /dev/null @@ -1,77 +0,0 @@ -use crate::{DerivationActorRequest, DerivationClientError, DerivationClientResult}; -use async_trait::async_trait; -use derive_more::Constructor; -use kona_derive::Signal; -use kona_protocol::L2BlockInfo; -use std::fmt::Debug; -use tokio::sync::mpsc; - -/// Client to use to interact with the [`crate::DerivationActor`]. -#[cfg_attr(test, mockall::automock)] -#[async_trait] -pub trait EngineDerivationClient: Debug + Send + Sync { - /// Notifies the [`crate::DerivationActor`] that engine syncing has completed. - /// Note: Does not wait for the derivation client to process this message. - async fn notify_sync_completed(&self, safe_head: L2BlockInfo) -> DerivationClientResult<()>; - - /// Sends the new engine safe_head to the [`crate::DerivationActor`]. - /// Note: Does not wait for the derivation client to process this message. - async fn send_new_engine_safe_head(&self, safe_head: L2BlockInfo) - -> DerivationClientResult<()>; - - /// Sends the [`crate::DerivationActor`] the provided [`Signal`]. - /// Note: Does not wait for the derivation client to process this message. - async fn send_signal(&self, signal: Signal) -> DerivationClientResult<()>; -} - -/// Client to use to send messages to the [`crate::DerivationActor`]'s inbound channel. -#[derive(Constructor, Debug)] -pub struct QueuedEngineDerivationClient { - /// A channel to use to send the [`DerivationActorRequest`]s to the [`crate::DerivationActor`]. - pub derivation_actor_request_tx: mpsc::Sender<DerivationActorRequest>, -} - -#[async_trait] -impl EngineDerivationClient for QueuedEngineDerivationClient { - async fn notify_sync_completed(&self, safe_head: L2BlockInfo) -> DerivationClientResult<()> { - info!(target: "engine", "Sending sync completed to derivation actor"); - - self.derivation_actor_request_tx - .send(DerivationActorRequest::ProcessEngineSyncCompletionRequest(Box::new(safe_head))) - .await - .map_err(|_| { - DerivationClientError::RequestError("request channel closed.".to_string()) - })?; - - Ok(()) - } - - async fn send_new_engine_safe_head( - &self, - safe_head: L2BlockInfo, - ) -> DerivationClientResult<()> { - info!(target: "engine", safe_head = ?safe_head, "Sending new safe head to derivation actor"); - - self.derivation_actor_request_tx - .send(DerivationActorRequest::ProcessEngineSafeHeadUpdateRequest(Box::new(safe_head))) - .await - .map_err(|_| { - DerivationClientError::RequestError("request channel closed.".to_string()) - })?; - - Ok(()) - } - - async fn send_signal(&self, signal: Signal) -> DerivationClientResult<()> { - info!(target: "engine", signal = ?signal, "Sending signal to derivation actor"); - - self.derivation_actor_request_tx - .send(DerivationActorRequest::ProcessEngineSignalRequest(Box::new(signal))) - .await - .map_err(|_| { - DerivationClientError::RequestError("request channel closed.".to_string()) - })?; - - Ok(()) - } -} diff --git a/kona/crates/node/service/src/actors/engine/request.rs b/kona/crates/node/service/src/actors/engine/request.rs deleted file mode 100644 index ecca9ee1c6c..00000000000 --- a/kona/crates/node/service/src/actors/engine/request.rs +++ /dev/null @@ -1,97 +0,0 @@ -use alloy_rpc_types_engine::PayloadId; -use kona_engine::{BuildTaskError, ConsolidateInput, EngineQueries, SealTaskError}; -use kona_protocol::OpAttributesWithParent; -use kona_rpc::{RollupBoostAdminQuery, RollupBoostHealthQuery}; -use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; -use thiserror::Error; -use tokio::sync::mpsc; - -/// The result of an Engine client call. -pub type EngineClientResult<T> = Result<T, EngineClientError>; - -/// Error making requests to the BlockEngine. -#[derive(Debug, Error)] -pub enum EngineClientError { - /// Error making a request to the engine. The request never made it there. - #[error("Error making a request to the engine: {0}.")] - RequestError(String), - - /// Error receiving response from the engine. - /// This means the request may or may not have succeeded. - #[error("Error receiving response from the engine: {0}.")] - ResponseError(String), - - /// An error occurred starting to build a block. - #[error(transparent)] - StartBuildError(#[from] BuildTaskError), - - /// An error occurred sealing a block. - #[error(transparent)] - SealError(#[from] SealTaskError), - - /// An error occurred performing the reset. - #[error("An error occurred performing the reset: {0}.")] - ResetForkchoiceError(String), -} - -/// Inbound requests that the [`crate::EngineActor`] can process. -#[derive(Debug)] -pub enum EngineActorRequest { - /// Request to build. - BuildRequest(Box<BuildRequest>), - /// Request to consolidate using a safe L2 signal from attributes or delegated safe-block - /// derivation - ProcessSafeL2SignalRequest(ConsolidateInput), - /// Request to finalize the L2 block at the provided block number. - ProcessFinalizedL2BlockNumberRequest(Box<u64>), - /// Request to insert the provided unsafe block. - ProcessUnsafeL2BlockRequest(Box<OpExecutionPayloadEnvelope>), - /// Request to reset engine forkchoice. - ResetRequest(Box<ResetRequest>), - /// Request for the engine to process the provided RPC request. - RpcRequest(Box<EngineRpcRequest>), - /// Request to seal the block with the provided details. - SealRequest(Box<SealRequest>), -} - -/// RPC Request for the engine to handle. -#[derive(Debug)] -pub enum EngineRpcRequest { - /// Engine RPC query. - EngineQuery(Box<EngineQueries>), - /// Rollup boost admin request. - RollupBoostAdminRequest(Box<RollupBoostAdminQuery>), - /// Rollup boost health request. - RollupBoostHealthRequest(Box<RollupBoostHealthQuery>), -} - -/// A request to build a payload. -/// Contains the attributes to build and a channel to send back the resulting `PayloadId`. -#[derive(Debug)] -pub struct BuildRequest { - /// The [`OpAttributesWithParent`] from which the block build should be started. - pub attributes: OpAttributesWithParent, - /// The channel on which the result, successful or not, will be sent. - pub result_tx: mpsc::Sender<PayloadId>, -} - -/// A request to reset the engine forkchoice. -/// Optionally contains a channel to send back the response if the caller would like to know that -/// the request was successfully processed. -#[derive(Debug)] -pub struct ResetRequest { - /// response will be sent to this channel, if `Some`. - pub result_tx: mpsc::Sender<EngineClientResult<()>>, -} - -/// A request to seal and canonicalize a payload. -/// Contains the `PayloadId`, attributes, and a channel to send back the result. -#[derive(Debug)] -pub struct SealRequest { - /// The `PayloadId` to seal and canonicalize. - pub payload_id: PayloadId, - /// The attributes necessary for the seal operation. - pub attributes: OpAttributesWithParent, - /// The channel on which the result, successful or not, will be sent. - pub result_tx: mpsc::Sender<Result<OpExecutionPayloadEnvelope, SealTaskError>>, -} diff --git a/kona/crates/node/service/src/actors/mod.rs b/kona/crates/node/service/src/actors/mod.rs deleted file mode 100644 index eb63a0b280c..00000000000 --- a/kona/crates/node/service/src/actors/mod.rs +++ /dev/null @@ -1,56 +0,0 @@ -//! [NodeActor] services for the node. -//! -//! [NodeActor]: super::NodeActor - -mod traits; -pub use traits::{CancellableContext, NodeActor}; - -mod engine; -pub use engine::{ - BuildRequest, EngineActor, EngineActorRequest, EngineClientError, EngineClientResult, - EngineConfig, EngineDerivationClient, EngineError, EngineProcessingRequest, EngineProcessor, - EngineRequestReceiver, EngineRpcProcessor, EngineRpcRequest, EngineRpcRequestReceiver, - QueuedEngineDerivationClient, ResetRequest, SealRequest, -}; - -mod rpc; -pub use rpc::{ - QueuedEngineRpcClient, QueuedSequencerAdminAPIClient, RollupBoostAdminApiClient, - RollupBoostHealthRpcClient, RpcActor, RpcActorError, RpcContext, -}; - -mod derivation; -pub use derivation::{ - DelegateDerivationActor, DerivationActor, DerivationActorRequest, DerivationClientError, - DerivationClientResult, DerivationDelegateClient, DerivationDelegateClientError, - DerivationEngineClient, DerivationError, DerivationState, DerivationStateMachine, - DerivationStateTransitionError, DerivationStateUpdate, QueuedDerivationEngineClient, -}; - -mod l1_watcher; -pub use l1_watcher::{ - BlockStream, L1WatcherActor, L1WatcherActorError, L1WatcherDerivationClient, - QueuedL1WatcherDerivationClient, -}; - -mod network; -pub use network::{ - NetworkActor, NetworkActorError, NetworkBuilder, NetworkBuilderError, NetworkConfig, - NetworkDriver, NetworkDriverError, NetworkEngineClient, NetworkHandler, NetworkInboundData, - QueuedNetworkEngineClient, QueuedUnsafePayloadGossipClient, UnsafePayloadGossipClient, - UnsafePayloadGossipClientError, -}; - -mod sequencer; - -pub use sequencer::{ - Conductor, ConductorClient, ConductorError, DelayedL1OriginSelectorProvider, L1OriginSelector, - L1OriginSelectorError, L1OriginSelectorProvider, OriginSelector, QueuedSequencerEngineClient, - SequencerActor, SequencerActorError, SequencerAdminQuery, SequencerConfig, - SequencerEngineClient, -}; - -#[cfg(test)] -pub use network::MockUnsafePayloadGossipClient; -#[cfg(test)] -pub use sequencer::{MockConductor, MockOriginSelector, MockSequencerEngineClient}; diff --git a/kona/crates/node/service/src/actors/network/actor.rs b/kona/crates/node/service/src/actors/network/actor.rs deleted file mode 100644 index 58156f12555..00000000000 --- a/kona/crates/node/service/src/actors/network/actor.rs +++ /dev/null @@ -1,332 +0,0 @@ -use alloy_primitives::Address; -use async_trait::async_trait; -use kona_gossip::P2pRpcRequest; -use kona_rpc::NetworkAdminQuery; -use kona_sources::BlockSignerError; -use libp2p::TransportError; -use op_alloy_rpc_types_engine::{OpExecutionPayloadEnvelope, OpNetworkPayloadEnvelope}; -use thiserror::Error; -use tokio::{self, select, sync::mpsc}; -use tokio_util::sync::{CancellationToken, WaitForCancellationFuture}; - -use crate::{ - CancellableContext, NetworkEngineClient, NodeActor, - actors::network::{ - builder::NetworkBuilder, driver::NetworkDriverError, error::NetworkBuilderError, - }, -}; - -/// The network actor handles two core networking components of the rollup node: -/// - *discovery*: Peer discovery over UDP using discv5. -/// - *gossip*: Block gossip over TCP using libp2p. -/// -/// The network actor itself is a light wrapper around the [`NetworkBuilder`]. -/// -/// ## Example -/// -/// ```rust,ignore -/// use kona_gossip::NetworkDriver; -/// use std::net::{IpAddr, Ipv4Addr, SocketAddr}; -/// -/// let chain_id = 10; -/// let signer = Address::random(); -/// let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 9099); -/// -/// // Construct the `Network` using the builder. -/// // let mut driver = Network::builder() -/// // .with_unsafe_block_signer(signer) -/// // .with_chain_id(chain_id) -/// // .with_gossip_addr(socket) -/// // .build() -/// // .unwrap(); -/// -/// // Construct the `NetworkActor` with the [`Network`]. -/// // let actor = NetworkActor::new(driver); -/// ``` -#[derive(Debug)] -pub struct NetworkActor<NetworkEngineClient_: NetworkEngineClient> { - /// Network driver - pub(super) builder: NetworkBuilder, - /// The cancellation token, shared between all tasks. - pub(super) cancellation_token: CancellationToken, - /// A channel to receive the unsafe block signer address. - pub(super) signer: mpsc::Receiver<Address>, - /// Handler for p2p RPC Requests. - pub(super) p2p_rpc: mpsc::Receiver<P2pRpcRequest>, - /// A channel to receive admin rpc requests. - pub(super) admin_rpc: mpsc::Receiver<NetworkAdminQuery>, - /// A channel to receive unsafe blocks and send them through the gossip layer. - pub(super) publish_rx: mpsc::Receiver<OpExecutionPayloadEnvelope>, - /// A channel to use to interact with the engine actor. - pub(super) engine_client: NetworkEngineClient_, -} - -/// The inbound data for the network actor. -#[derive(Debug)] -pub struct NetworkInboundData { - /// A channel to send the unsafe block signer address to the network actor. - pub signer: mpsc::Sender<Address>, - /// Handler for p2p RPC Requests sent to the network actor. - pub p2p_rpc: mpsc::Sender<P2pRpcRequest>, - /// Handler for admin RPC Requests. - pub admin_rpc: mpsc::Sender<NetworkAdminQuery>, - /// A channel to send unsafe blocks to the network actor. - /// This channel should only be used by the sequencer actor/admin RPC api to forward their - /// newly produced unsafe blocks to the network actor. - pub gossip_payload_tx: mpsc::Sender<OpExecutionPayloadEnvelope>, -} - -impl<NetworkEngineClient_: NetworkEngineClient> NetworkActor<NetworkEngineClient_> { - /// Constructs a new [`NetworkActor`] given the [`NetworkBuilder`] - pub fn new( - engine_client: NetworkEngineClient_, - cancellation_token: CancellationToken, - driver: NetworkBuilder, - ) -> (NetworkInboundData, Self) { - let (signer_tx, signer_rx) = mpsc::channel(16); - let (rpc_tx, rpc_rx) = mpsc::channel(1024); - let (admin_rpc_tx, admin_rpc_rx) = mpsc::channel(1024); - let (publish_tx, publish_rx) = tokio::sync::mpsc::channel(256); - let actor = Self { - builder: driver, - cancellation_token, - signer: signer_rx, - p2p_rpc: rpc_rx, - admin_rpc: admin_rpc_rx, - publish_rx, - engine_client, - }; - let outbound_data = NetworkInboundData { - signer: signer_tx, - p2p_rpc: rpc_tx, - admin_rpc: admin_rpc_tx, - gossip_payload_tx: publish_tx, - }; - (outbound_data, actor) - } -} - -impl<E: NetworkEngineClient> CancellableContext for NetworkActor<E> { - fn cancelled(&self) -> WaitForCancellationFuture<'_> { - self.cancellation_token.cancelled() - } -} - -/// An error from the network actor. -#[derive(Debug, Error)] -pub enum NetworkActorError { - /// Network builder error. - #[error(transparent)] - NetworkBuilder(#[from] NetworkBuilderError), - /// Network driver error. - #[error(transparent)] - NetworkDriver(#[from] NetworkDriverError), - /// Driver startup failed. - #[error(transparent)] - DriverStartup(#[from] TransportError<std::io::Error>), - /// The network driver was missing its unsafe block receiver. - #[error("Missing unsafe block receiver in network driver")] - MissingUnsafeBlockReceiver, - /// The network driver was missing its unsafe block signer sender. - #[error("Missing unsafe block signer in network driver")] - MissingUnsafeBlockSigner, - /// Channel closed unexpectedly. - #[error("Channel closed unexpectedly")] - ChannelClosed, - /// Failed to sign the payload. - #[error("Failed to sign the payload: {0}")] - FailedToSignPayload(#[from] BlockSignerError), -} - -#[async_trait] -impl<NetworkEngineClient_: NetworkEngineClient + 'static> NodeActor - for NetworkActor<NetworkEngineClient_> -{ - type Error = NetworkActorError; - type StartData = (); - - async fn start(mut self, _: Self::StartData) -> Result<(), Self::Error> { - let mut handler = self.builder.build()?.start().await?; - - // New unsafe block channel. - let (unsafe_block_tx, mut unsafe_block_rx) = tokio::sync::mpsc::unbounded_channel(); - - loop { - select! { - _ = self.cancellation_token.cancelled() => { - info!( - target: "network", - "Received shutdown signal. Exiting network task." - ); - return Ok(()); - } - block = unsafe_block_rx.recv() => { - let Some(block) = block else { - error!(target: "node::p2p", "The unsafe block receiver channel has closed"); - return Err(NetworkActorError::ChannelClosed); - }; - - if self.engine_client.send_unsafe_block(block).await.is_err() { - warn!(target: "network", "Failed to forward unsafe block to engine"); - return Err(NetworkActorError::ChannelClosed); - } - } - signer = self.signer.recv() => { - let Some(signer) = signer else { - warn!( - target: "network", - "Found no unsafe block signer on receive" - ); - return Err(NetworkActorError::ChannelClosed); - }; - if handler.unsafe_block_signer_sender.send(signer).is_err() { - warn!( - target: "network", - "Failed to send unsafe block signer to network handler", - ); - } - } - Some(block) = self.publish_rx.recv(), if !self.publish_rx.is_closed() => { - let timestamp = block.execution_payload.timestamp(); - let selector = |handler: &kona_gossip::BlockHandler| { - handler.topic(timestamp) - }; - let Some(signer) = handler.signer.as_ref() else { - warn!(target: "net", "No local signer available to sign the payload"); - continue; - }; - - let chain_id = handler.discovery.chain_id; - - let sender_address = *handler.unsafe_block_signer_sender.borrow(); - - let payload_hash = block.payload_hash(); - let signature = signer.sign_block(payload_hash, chain_id, sender_address).await?; - - let payload = OpNetworkPayloadEnvelope { - payload: block.execution_payload, - parent_beacon_block_root: block.parent_beacon_block_root, - signature, - payload_hash, - }; - - match handler.gossip.publish(selector, Some(payload)) { - Ok(id) => info!("Published unsafe payload | {:?}", id), - Err(e) => warn!("Failed to publish unsafe payload: {:?}", e), - } - } - event = handler.gossip.next() => { - let Some(event) = event else { - error!(target: "node::p2p", "The gossip swarm stream has ended"); - return Err(NetworkActorError::ChannelClosed); - }; - - if let Some(payload) = handler.gossip.handle_event(event) { - if unsafe_block_tx.send(payload.into()).is_err() { - warn!(target: "node::p2p", "Failed to send unsafe block to network handler"); - } - } - }, - enr = handler.enr_receiver.recv() => { - let Some(enr) = enr else { - error!(target: "node::p2p", "The enr receiver channel has closed"); - return Err(NetworkActorError::ChannelClosed); - }; - handler.gossip.dial(enr); - }, - _ = handler.peer_score_inspector.tick(), if handler.gossip.peer_monitoring.as_ref().is_some() => { - handler.handle_peer_monitoring().await; - }, - Some(NetworkAdminQuery::PostUnsafePayload { payload }) = self.admin_rpc.recv(), if !self.admin_rpc.is_closed() => { - debug!(target: "node::p2p", "Broadcasting unsafe payload from admin api"); - if unsafe_block_tx.send(payload).is_err() { - warn!(target: "node::p2p", "Failed to send unsafe block to network handler"); - } - }, - Some(req) = self.p2p_rpc.recv(), if !self.p2p_rpc.is_closed() => { - req.handle(&mut handler.gossip, &handler.discovery); - }, - } - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::B256; - use alloy_rpc_types_engine::{ExecutionPayloadV1, ExecutionPayloadV3}; - use alloy_signer::SignerSync; - use alloy_signer_local::PrivateKeySigner; - use arbitrary::Arbitrary; - use op_alloy_rpc_types_engine::OpExecutionPayload; - use rand::Rng; - - #[test] - fn test_payload_signature_roundtrip_v1() { - let mut bytes = [0u8; 4096]; - rand::rng().fill(bytes.as_mut_slice()); - - let pubkey = PrivateKeySigner::random(); - let expected_address = pubkey.address(); - const CHAIN_ID: u64 = 1337; - - let block = OpExecutionPayloadEnvelope { - execution_payload: OpExecutionPayload::V1( - ExecutionPayloadV1::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(), - ), - parent_beacon_block_root: None, - }; - - let payload_hash = block.payload_hash(); - let signature = pubkey.sign_hash_sync(&payload_hash.signature_message(CHAIN_ID)).unwrap(); - let payload = OpNetworkPayloadEnvelope { - payload: block.execution_payload, - parent_beacon_block_root: block.parent_beacon_block_root, - signature, - payload_hash, - }; - let encoded_payload = payload.encode_v1().unwrap(); - - let decoded_payload = OpNetworkPayloadEnvelope::decode_v1(&encoded_payload).unwrap(); - - let msg = decoded_payload.payload_hash.signature_message(CHAIN_ID); - let msg_signer = decoded_payload.signature.recover_address_from_prehash(&msg).unwrap(); - - assert_eq!(expected_address, msg_signer); - } - - #[test] - fn test_payload_signature_roundtrip_v3() { - let mut bytes = [0u8; 4096]; - rand::rng().fill(bytes.as_mut_slice()); - - let pubkey = PrivateKeySigner::random(); - let expected_address = pubkey.address(); - const CHAIN_ID: u64 = 1337; - - let block = OpExecutionPayloadEnvelope { - execution_payload: OpExecutionPayload::V3( - ExecutionPayloadV3::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(), - ), - parent_beacon_block_root: Some(B256::random()), - }; - - let payload_hash = block.payload_hash(); - let signature = pubkey.sign_hash_sync(&payload_hash.signature_message(CHAIN_ID)).unwrap(); - let payload = OpNetworkPayloadEnvelope { - payload: block.execution_payload, - parent_beacon_block_root: block.parent_beacon_block_root, - signature, - payload_hash, - }; - let encoded_payload = payload.encode_v3().unwrap(); - - let decoded_payload = OpNetworkPayloadEnvelope::decode_v3(&encoded_payload).unwrap(); - - let msg = decoded_payload.payload_hash.signature_message(CHAIN_ID); - let msg_signer = decoded_payload.signature.recover_address_from_prehash(&msg).unwrap(); - - assert_eq!(expected_address, msg_signer); - } -} diff --git a/kona/crates/node/service/src/actors/network/engine_client.rs b/kona/crates/node/service/src/actors/network/engine_client.rs deleted file mode 100644 index 226b93063fc..00000000000 --- a/kona/crates/node/service/src/actors/network/engine_client.rs +++ /dev/null @@ -1,33 +0,0 @@ -use crate::{EngineActorRequest, EngineClientError, EngineClientResult}; -use async_trait::async_trait; -use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; -use std::fmt::Debug; -use tokio::sync::mpsc; - -/// Client used to interact with the Engine. -#[cfg_attr(test, mockall::automock)] -#[async_trait] -pub trait NetworkEngineClient: Debug + Send + Sync { - /// Note: a successful response does not mean the block was successfully inserted. - /// This function just sends the message to the engine. It does not wait for a response. - async fn send_unsafe_block(&self, block: OpExecutionPayloadEnvelope) -> EngineClientResult<()>; -} - -/// Client to use to send unsafe blocks to the Engine's inbound channel. -#[derive(Debug)] -pub struct QueuedNetworkEngineClient { - /// A channel to use to send the EngineActor requests. - pub engine_actor_request_tx: mpsc::Sender<EngineActorRequest>, -} - -#[async_trait] -impl NetworkEngineClient for QueuedNetworkEngineClient { - async fn send_unsafe_block(&self, block: OpExecutionPayloadEnvelope) -> EngineClientResult<()> { - trace!(target: "network", ?block, "Sending unsafe block to engine."); - Ok(self - .engine_actor_request_tx - .send(EngineActorRequest::ProcessUnsafeL2BlockRequest(Box::new(block))) - .await - .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?) - } -} diff --git a/kona/crates/node/service/src/actors/sequencer/actor.rs b/kona/crates/node/service/src/actors/sequencer/actor.rs deleted file mode 100644 index 59b4b8efd7c..00000000000 --- a/kona/crates/node/service/src/actors/sequencer/actor.rs +++ /dev/null @@ -1,525 +0,0 @@ -//! The [`SequencerActor`]. - -use crate::{ - CancellableContext, NodeActor, SequencerAdminQuery, UnsafePayloadGossipClient, - actors::{ - SequencerEngineClient, - engine::EngineClientError, - sequencer::{ - conductor::Conductor, - error::SequencerActorError, - metrics::{ - update_attributes_build_duration_metrics, update_block_build_duration_metrics, - update_conductor_commitment_duration_metrics, update_seal_duration_metrics, - update_total_transactions_sequenced, - }, - origin_selector::OriginSelector, - }, - }, -}; -use alloy_rpc_types_engine::PayloadId; -use async_trait::async_trait; -use kona_derive::{AttributesBuilder, PipelineErrorKind}; -use kona_engine::{InsertTaskError, SealTaskError, SynchronizeTaskError}; -use kona_genesis::RollupConfig; -use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; -use op_alloy_rpc_types_engine::OpPayloadAttributes; -use std::{ - sync::Arc, - time::{Duration, Instant, SystemTime, UNIX_EPOCH}, -}; -use tokio::{select, sync::mpsc}; -use tokio_util::sync::{CancellationToken, WaitForCancellationFuture}; - -/// The handle to a block that has been started but not sealed. -#[derive(Debug)] -pub(super) struct UnsealedPayloadHandle { - /// The [`PayloadId`] of the unsealed payload. - pub payload_id: PayloadId, - /// The [`OpAttributesWithParent`] used to start block building. - pub attributes_with_parent: OpAttributesWithParent, -} - -/// The return payload of the `seal_last_and_start_next` function. This allows the sequencer -/// to make an informed decision about when to seal and build the next block. -#[derive(Debug)] -struct SealLastStartNextResult { - /// The [`UnsealedPayloadHandle`] that was built. - pub unsealed_payload_handle: Option<UnsealedPayloadHandle>, - /// How long it took to execute the seal operation. - pub seal_duration: Duration, -} - -/// The [`SequencerActor`] is responsible for building L2 blocks on top of the current unsafe head -/// and scheduling them to be signed and gossipped by the P2P layer, extending the L2 chain with new -/// blocks. -#[derive(Debug)] -pub struct SequencerActor< - AttributesBuilder_, - Conductor_, - OriginSelector_, - SequencerEngineClient_, - UnsafePayloadGossipClient_, -> where - AttributesBuilder_: AttributesBuilder, - Conductor_: Conductor, - OriginSelector_: OriginSelector, - SequencerEngineClient_: SequencerEngineClient, - UnsafePayloadGossipClient_: UnsafePayloadGossipClient, -{ - /// Receiver for admin API requests. - pub admin_api_rx: mpsc::Receiver<SequencerAdminQuery>, - /// The attributes builder used for block building. - pub attributes_builder: AttributesBuilder_, - /// The cancellation token, shared between all tasks. - pub cancellation_token: CancellationToken, - /// The optional conductor RPC client. - pub conductor: Option<Conductor_>, - /// The struct used to interact with the engine. - pub engine_client: SequencerEngineClient_, - /// Whether the sequencer is active. - pub is_active: bool, - /// Whether the sequencer is in recovery mode. - pub in_recovery_mode: bool, - /// The struct used to determine the next L1 origin. - pub origin_selector: OriginSelector_, - /// The rollup configuration. - pub rollup_config: Arc<RollupConfig>, - /// A client to asynchronously sign and gossip built payloads to the network actor. - pub unsafe_payload_gossip_client: UnsafePayloadGossipClient_, -} - -impl< - AttributesBuilder_, - Conductor_, - OriginSelector_, - SequencerEngineClient_, - UnsafePayloadGossipClient_, -> - SequencerActor< - AttributesBuilder_, - Conductor_, - OriginSelector_, - SequencerEngineClient_, - UnsafePayloadGossipClient_, - > -where - AttributesBuilder_: AttributesBuilder, - Conductor_: Conductor, - OriginSelector_: OriginSelector, - SequencerEngineClient_: SequencerEngineClient, - UnsafePayloadGossipClient_: UnsafePayloadGossipClient, -{ - /// Seals and commits the last pending block, if one exists and starts the build job for the - /// next L2 block, on top of the current unsafe head. - /// - /// If a new block was started, it will return the associated [`UnsealedPayloadHandle`] so - /// that it may be sealed and committed in a future call to this function. - async fn seal_last_and_start_next( - &mut self, - payload_to_seal: Option<&UnsealedPayloadHandle>, - ) -> Result<SealLastStartNextResult, SequencerActorError> { - let seal_duration = match payload_to_seal { - Some(to_seal) => { - let seal_start = Instant::now(); - self.seal_and_commit_payload_if_applicable(to_seal).await?; - seal_start.elapsed() - } - None => Duration::default(), - }; - - let unsealed_payload_handle = self.build_unsealed_payload().await?; - - Ok(SealLastStartNextResult { unsealed_payload_handle, seal_duration }) - } - - /// Sends a seal request to seal the provided [`UnsealedPayloadHandle`], committing and - /// gossiping the resulting block, if one is built. - async fn seal_and_commit_payload_if_applicable( - &mut self, - unsealed_payload_handle: &UnsealedPayloadHandle, - ) -> Result<(), SequencerActorError> { - let seal_request_start = Instant::now(); - - // Send the seal request to the engine to seal the unsealed block. - let payload = self - .engine_client - .seal_and_canonicalize_block( - unsealed_payload_handle.payload_id, - unsealed_payload_handle.attributes_with_parent.clone(), - ) - .await?; - - update_seal_duration_metrics(seal_request_start.elapsed()); - - let payload_transaction_count = - unsealed_payload_handle.attributes_with_parent.count_transactions(); - update_total_transactions_sequenced(payload_transaction_count); - - // If the conductor is available, commit the payload to it. - if let Some(conductor) = &self.conductor { - let _conductor_commitment_start = Instant::now(); - if let Err(err) = conductor.commit_unsafe_payload(&payload).await { - error!(target: "sequencer", ?err, "Failed to commit unsafe payload to conductor"); - } - - update_conductor_commitment_duration_metrics(_conductor_commitment_start.elapsed()); - } - - self.unsafe_payload_gossip_client - .schedule_execution_payload_gossip(payload) - .await - .map_err(Into::into) - } - - /// Starts building an L2 block by creating and populating payload attributes referencing the - /// correct L1 origin block and sending them to the block engine. - pub(super) async fn build_unsealed_payload( - &mut self, - ) -> Result<Option<UnsealedPayloadHandle>, SequencerActorError> { - let unsafe_head = self.engine_client.get_unsafe_head().await?; - - let Some(l1_origin) = self.get_next_payload_l1_origin(unsafe_head).await? else { - // Temporary error - retry on next tick. - return Ok(None); - }; - - info!( - target: "sequencer", - parent_num = unsafe_head.block_info.number, - l1_origin_num = l1_origin.number, - "Started sequencing new block" - ); - - // Build the payload attributes for the next block. - let attributes_build_start = Instant::now(); - - let Some(attributes_with_parent) = self.build_attributes(unsafe_head, l1_origin).await? - else { - // Temporary error or reset - retry on next tick. - return Ok(None); - }; - - update_attributes_build_duration_metrics(attributes_build_start.elapsed()); - - // Send the built attributes to the engine to be built. - let build_request_start = Instant::now(); - - let payload_id = - self.engine_client.start_build_block(attributes_with_parent.clone()).await?; - - update_block_build_duration_metrics(build_request_start.elapsed()); - - Ok(Some(UnsealedPayloadHandle { payload_id, attributes_with_parent })) - } - - /// Determines and validates the L1 origin block for the provided L2 unsafe head. - /// Returns `Ok(None)` for temporary errors that should be retried. - async fn get_next_payload_l1_origin( - &mut self, - unsafe_head: L2BlockInfo, - ) -> Result<Option<BlockInfo>, SequencerActorError> { - let l1_origin = match self - .origin_selector - .next_l1_origin(unsafe_head, self.in_recovery_mode) - .await - { - Ok(l1_origin) => l1_origin, - Err(err) => { - warn!( - target: "sequencer", - ?err, - "Temporary error occurred while selecting next L1 origin. Re-attempting on next tick." - ); - return Ok(None); - } - }; - - if unsafe_head.l1_origin.hash != l1_origin.parent_hash && - unsafe_head.l1_origin.hash != l1_origin.hash - { - warn!( - target: "sequencer", - l1_origin = ?l1_origin, - unsafe_head_hash = %unsafe_head.l1_origin.hash, - unsafe_head_l1_origin = ?unsafe_head.l1_origin, - "Cannot build new L2 block on inconsistent L1 origin, resetting engine" - ); - self.engine_client.reset_engine_forkchoice().await?; - return Ok(None); - } - Ok(Some(l1_origin)) - } - - /// Builds the OpAttributesWithParent for the next block to build. If None is returned, it - /// indicates that no attributes could be built at this time but future attempts may be made. - async fn build_attributes( - &mut self, - unsafe_head: L2BlockInfo, - l1_origin: BlockInfo, - ) -> Result<Option<OpAttributesWithParent>, SequencerActorError> { - let mut attributes = match self - .attributes_builder - .prepare_payload_attributes(unsafe_head, l1_origin.id()) - .await - { - Ok(attrs) => attrs, - Err(PipelineErrorKind::Temporary(_)) => { - // Temporary error - retry on next tick. - return Ok(None); - } - Err(PipelineErrorKind::Reset(_)) => { - if let Err(err) = self.engine_client.reset_engine_forkchoice().await { - error!(target: "sequencer", ?err, "Failed to reset engine"); - return Err(SequencerActorError::ChannelClosed); - } - - warn!( - target: "sequencer", - "Resetting engine due to pipeline error while preparing payload attributes" - ); - return Ok(None); - } - Err(err @ PipelineErrorKind::Critical(_)) => { - error!(target: "sequencer", ?err, "Failed to prepare payload attributes"); - return Err(err.into()); - } - }; - - attributes.no_tx_pool = Some(!self.should_use_tx_pool(l1_origin, &attributes)); - - let attrs_with_parent = OpAttributesWithParent::new(attributes, unsafe_head, None, false); - Ok(Some(attrs_with_parent)) - } - - /// Determines, for the provided L1 origin block and payload attributes being constructed, if - /// transaction pool transactions should be enabled. - fn should_use_tx_pool(&self, l1_origin: BlockInfo, attributes: &OpPayloadAttributes) -> bool { - if self.in_recovery_mode { - warn!(target: "sequencer", "Sequencer is in recovery mode, producing empty block"); - return false; - } - - // If the next L2 block is beyond the sequencer drift threshold, we must produce an empty - // block. - if attributes.payload_attributes.timestamp > - l1_origin.timestamp + self.rollup_config.max_sequencer_drift(l1_origin.timestamp) - { - return false; - } - - // Do not include transactions in the first Ecotone block. - if self.rollup_config.is_first_ecotone_block(attributes.payload_attributes.timestamp) { - info!(target: "sequencer", "Sequencing ecotone upgrade block"); - return false; - } - - // Do not include transactions in the first Fjord block. - if self.rollup_config.is_first_fjord_block(attributes.payload_attributes.timestamp) { - info!(target: "sequencer", "Sequencing fjord upgrade block"); - return false; - } - - // Do not include transactions in the first Granite block. - if self.rollup_config.is_first_granite_block(attributes.payload_attributes.timestamp) { - info!(target: "sequencer", "Sequencing granite upgrade block"); - return false; - } - - // Do not include transactions in the first Holocene block. - if self.rollup_config.is_first_holocene_block(attributes.payload_attributes.timestamp) { - info!(target: "sequencer", "Sequencing holocene upgrade block"); - return false; - } - - // Do not include transactions in the first Isthmus block. - if self.rollup_config.is_first_isthmus_block(attributes.payload_attributes.timestamp) { - info!(target: "sequencer", "Sequencing isthmus upgrade block"); - return false; - } - - // Do not include transactions in the first Jovian block. - // See: `<https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/derivation.md#activation-block-rules>` - if self.rollup_config.is_first_jovian_block(attributes.payload_attributes.timestamp) { - info!(target: "sequencer", "Sequencing jovian upgrade block"); - return false; - } - - // Do not include transactions in the first Interop block. - if self.rollup_config.is_first_interop_block(attributes.payload_attributes.timestamp) { - info!(target: "sequencer", "Sequencing interop upgrade block"); - return false; - } - - // Transaction pool transactions are enabled if none of the reasons to disable are satisfied - // above. - true - } - - /// Schedules the initial engine reset request and waits for the unsafe head to be updated. - async fn schedule_initial_reset(&mut self) -> Result<(), SequencerActorError> { - // Reset the engine, in order to initialize the engine state. - // NB: this call waits for confirmation that the reset succeeded and we can proceed with - // post-reset logic. - self.engine_client.reset_engine_forkchoice().await.map_err(|err| { - error!(target: "sequencer", ?err, "Failed to send reset request to engine"); - err.into() - }) - } -} - -#[async_trait] -impl< - AttributesBuilder_, - Conductor_, - OriginSelector_, - SequencerEngineClient_, - UnsafePayloadGossipClient_, -> NodeActor - for SequencerActor< - AttributesBuilder_, - Conductor_, - OriginSelector_, - SequencerEngineClient_, - UnsafePayloadGossipClient_, - > -where - AttributesBuilder_: AttributesBuilder + Sync + 'static, - Conductor_: Conductor + Sync + 'static, - OriginSelector_: OriginSelector + Sync + 'static, - SequencerEngineClient_: SequencerEngineClient + Sync + 'static, - UnsafePayloadGossipClient_: UnsafePayloadGossipClient + Sync + 'static, -{ - type Error = SequencerActorError; - type StartData = (); - - async fn start(mut self, _: Self::StartData) -> Result<(), Self::Error> { - let mut build_ticker = - tokio::time::interval(Duration::from_secs(self.rollup_config.block_time)); - - self.update_metrics(); - - // Reset the engine state prior to beginning block building. - self.schedule_initial_reset().await?; - - let mut next_payload_to_seal: Option<UnsealedPayloadHandle> = None; - let mut last_seal_duration = Duration::from_secs(0); - loop { - select! { - // We are using a biased select here to ensure that the admin queries are given priority over the block building task. - // This is important to limit the occurrence of race conditions where a stopped query is received when a sequencer is building a new block. - biased; - _ = self.cancellation_token.cancelled() => { - info!( - target: "sequencer", - "Received shutdown signal. Exiting sequencer task." - ); - return Ok(()); - } - Some(query) = self.admin_api_rx.recv() => { - let active_before = self.is_active; - - self.handle_admin_query(query).await; - - // immediately attempt to build a block if the sequencer was just started - if !active_before && self.is_active { - build_ticker.reset_immediately(); - } - } - // The sequencer must be active to build new blocks. - _ = build_ticker.tick(), if self.is_active => { - - match self.seal_last_and_start_next(next_payload_to_seal.as_ref()).await { - Ok(res) => { - next_payload_to_seal = res.unsealed_payload_handle; - last_seal_duration = res.seal_duration; - }, - Err(SequencerActorError::EngineError(EngineClientError::SealError(err))) => { - if is_seal_task_err_fatal(&err) { - error!(target: "sequencer", err=?err, "Critical seal task error occurred"); - self.cancellation_token.cancel(); - return Err(SequencerActorError::EngineError(EngineClientError::SealError(err))); - } else { - next_payload_to_seal = None; - } - }, - Err(other_err) => { - error!(target: "sequencer", err = ?other_err, "Unexpected error building or sealing payload"); - self.cancellation_token.cancel(); - return Err(other_err); - } - } - - if let Some(ref payload) = next_payload_to_seal { - let next_block_seconds = payload.attributes_with_parent.parent().block_info.timestamp.saturating_add(self.rollup_config.block_time); - // next block time is last + block_time - time it takes to seal. - let next_block_time = UNIX_EPOCH + Duration::from_secs(next_block_seconds) - last_seal_duration; - match next_block_time.duration_since(SystemTime::now()) { - Ok(duration) => build_ticker.reset_after(duration), - Err(_) => build_ticker.reset_immediately(), - }; - } else { - build_ticker.reset_immediately(); - } - } - } - } - } -} - -impl< - AttributesBuilder_, - Conductor_, - OriginSelector_, - SequencerEngineClient_, - UnsafePayloadGossipClient_, -> CancellableContext - for SequencerActor< - AttributesBuilder_, - Conductor_, - OriginSelector_, - SequencerEngineClient_, - UnsafePayloadGossipClient_, - > -where - AttributesBuilder_: AttributesBuilder, - Conductor_: Conductor, - OriginSelector_: OriginSelector, - SequencerEngineClient_: SequencerEngineClient, - UnsafePayloadGossipClient_: UnsafePayloadGossipClient, -{ - fn cancelled(&self) -> WaitForCancellationFuture<'_> { - self.cancellation_token.cancelled() - } -} - -// Determines whether the provided [`SealTaskError`] is fatal for the sequencer. -// -// NB: We could use `err.severity()`, but that gives EngineActor control over this classification. -// `SequencerActor` may have different interpretations of severity, and it is not clear when making -// a change in that area of the codebase that it will affect this area. When a new task error is -// added, this approach guarantees compilation will fail until it is handled here. -fn is_seal_task_err_fatal(err: &SealTaskError) -> bool { - match err { - SealTaskError::PayloadInsertionFailed(insert_err) => match &**insert_err { - InsertTaskError::ForkchoiceUpdateFailed(synchronize_error) => match synchronize_error { - SynchronizeTaskError::FinalizedAheadOfUnsafe(_, _) => true, - SynchronizeTaskError::ForkchoiceUpdateFailed(_) => false, - SynchronizeTaskError::InvalidForkchoiceState => false, - SynchronizeTaskError::UnexpectedPayloadStatus(_) => false, - }, - InsertTaskError::FromBlockError(_) => true, - InsertTaskError::InsertFailed(_) => false, - InsertTaskError::UnexpectedPayloadStatus(_) => false, - InsertTaskError::L2BlockInfoConstruction(_) => true, - }, - SealTaskError::GetPayloadFailed(_) => false, - SealTaskError::DepositOnlyPayloadFailed => true, - SealTaskError::DepositOnlyPayloadReattemptFailed => true, - SealTaskError::HoloceneInvalidFlush => false, - SealTaskError::FromBlock(_) => true, - SealTaskError::MpscSend(_) => true, - SealTaskError::ClockWentBackwards => true, - SealTaskError::UnsafeHeadChangedSinceBuild => false, - } -} diff --git a/kona/crates/node/service/src/actors/sequencer/engine_client.rs b/kona/crates/node/service/src/actors/sequencer/engine_client.rs deleted file mode 100644 index 4b49aad6b00..00000000000 --- a/kona/crates/node/service/src/actors/sequencer/engine_client.rs +++ /dev/null @@ -1,139 +0,0 @@ -use crate::{ - EngineClientError, EngineClientResult, - actors::engine::{BuildRequest, EngineActorRequest, ResetRequest, SealRequest}, -}; -use alloy_rpc_types_engine::PayloadId; -use async_trait::async_trait; -use derive_more::Constructor; -use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; -use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; -use std::fmt::Debug; -use tokio::sync::{mpsc, watch}; - -/// Trait to be used by the Sequencer to interact with the engine, abstracting communication -/// mechanism. -#[cfg_attr(test, mockall::automock)] -#[async_trait] -pub trait SequencerEngineClient: Debug + Send + Sync { - /// Resets the engine's forkchoice, awaiting confirmation that it succeeded or returning the - /// error in performing the reset. - async fn reset_engine_forkchoice(&self) -> EngineClientResult<()>; - - /// Starts building a block with the provided attributes. - /// - /// Returns a `PayloadId` that can be used to seal the block later. - async fn start_build_block( - &self, - attributes: OpAttributesWithParent, - ) -> EngineClientResult<PayloadId>; - - /// Seals and canonicalizes a previously started block. - /// - /// Takes a `PayloadId` from a previous `start_build_block` call and returns - /// the finalized execution payload envelope. - async fn seal_and_canonicalize_block( - &self, - payload_id: PayloadId, - attributes: OpAttributesWithParent, - ) -> EngineClientResult<OpExecutionPayloadEnvelope>; - - /// Returns the current unsafe head [`L2BlockInfo`]. - async fn get_unsafe_head(&self) -> EngineClientResult<L2BlockInfo>; -} - -/// Queue-based implementation of the [`SequencerEngineClient`] trait. This handles all -/// channel-based communication. -#[derive(Constructor, Debug)] -pub struct QueuedSequencerEngineClient { - /// A channel to use to send the EngineActor requests. - pub engine_actor_request_tx: mpsc::Sender<EngineActorRequest>, - /// A channel to receive the latest unsafe head [`L2BlockInfo`]. - pub unsafe_head_rx: watch::Receiver<L2BlockInfo>, -} - -#[async_trait] -impl SequencerEngineClient for QueuedSequencerEngineClient { - async fn get_unsafe_head(&self) -> EngineClientResult<L2BlockInfo> { - Ok(*self.unsafe_head_rx.borrow()) - } - - async fn reset_engine_forkchoice(&self) -> EngineClientResult<()> { - let (result_tx, mut result_rx) = mpsc::channel(1); - - info!(target: "sequencer", "Sending reset request to engine."); - self.engine_actor_request_tx - .send(EngineActorRequest::ResetRequest(Box::new(ResetRequest { result_tx }))) - .await - .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; - - result_rx - .recv() - .await - .inspect(|_| info!(target: "sequencer", "Engine reset successfully.")) - .ok_or_else(|| { - error!(target: "block_engine", "Failed to receive built payload"); - EngineClientError::ResponseError("response channel closed.".to_string()) - })? - } - - async fn start_build_block( - &self, - attributes: OpAttributesWithParent, - ) -> EngineClientResult<PayloadId> { - let (payload_id_tx, mut payload_id_rx) = mpsc::channel(1); - - trace!(target: "sequencer", "Sending start build request to engine."); - if self - .engine_actor_request_tx - .send(EngineActorRequest::BuildRequest(Box::new(BuildRequest { - attributes, - result_tx: payload_id_tx, - }))) - .await - .is_err() - { - return Err(EngineClientError::RequestError("request channel closed.".to_string())); - } - - payload_id_rx.recv() - .await - .inspect(|payload_id| trace!(target: "sequencer", ?payload_id, "Start build request successfully.")) - .ok_or_else(|| { - error!(target: "block_engine", "Failed to receive payload for initiated block build"); - EngineClientError::ResponseError("response channel closed.".to_string()) - }) - } - - async fn seal_and_canonicalize_block( - &self, - payload_id: PayloadId, - attributes: OpAttributesWithParent, - ) -> EngineClientResult<OpExecutionPayloadEnvelope> { - let (result_tx, mut result_rx) = mpsc::channel(1); - - trace!(target: "sequencer", ?attributes, "Sending seal request to engine."); - self.engine_actor_request_tx - .send(EngineActorRequest::SealRequest(Box::new(SealRequest { - payload_id, - attributes, - result_tx, - }))) - .await - .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; - - match result_rx.recv().await { - Some(Ok(payload)) => { - trace!(target: "sequencer", ?payload, "Seal succeeded."); - Ok(payload) - } - Some(Err(err)) => { - info!(target: "sequencer", ?err, "Seal failed."); - Err(EngineClientError::SealError(err)) - } - None => { - error!(target: "block_engine", "Failed to receive built payload"); - Err(EngineClientError::ResponseError("response channel closed.".to_string())) - } - } - } -} diff --git a/kona/crates/node/service/src/actors/sequencer/metrics.rs b/kona/crates/node/service/src/actors/sequencer/metrics.rs deleted file mode 100644 index 49fc0db02d5..00000000000 --- a/kona/crates/node/service/src/actors/sequencer/metrics.rs +++ /dev/null @@ -1,77 +0,0 @@ -use std::time::Duration; - -use crate::{ - Conductor, OriginSelector, SequencerActor, SequencerEngineClient, UnsafePayloadGossipClient, -}; -use kona_derive::AttributesBuilder; - -/// SequencerActor metrics-related method implementations. -impl< - AttributesBuilder_, - Conductor_, - OriginSelector_, - SequencerEngineClient_, - UnsafePayloadGossipClient_, -> - SequencerActor< - AttributesBuilder_, - Conductor_, - OriginSelector_, - SequencerEngineClient_, - UnsafePayloadGossipClient_, - > -where - AttributesBuilder_: AttributesBuilder, - Conductor_: Conductor, - OriginSelector_: OriginSelector, - SequencerEngineClient_: SequencerEngineClient, - UnsafePayloadGossipClient_: UnsafePayloadGossipClient, -{ - /// Updates the metrics for the sequencer actor. - pub(super) fn update_metrics(&self) { - // no-op if disabled. - #[cfg(feature = "metrics")] - { - let state_flags: [(&str, String); 2] = [ - ("active", self.is_active.to_string()), - ("recovery", self.in_recovery_mode.to_string()), - ]; - - let gauge = metrics::gauge!(crate::Metrics::SEQUENCER_STATE, &state_flags); - gauge.set(1); - } - } -} - -#[inline] -pub(super) fn update_attributes_build_duration_metrics(duration: Duration) { - // Log the attributes build duration, if metrics are enabled. - kona_macros::set!(gauge, crate::Metrics::SEQUENCER_ATTRIBUTES_BUILDER_DURATION, duration); -} - -#[inline] -pub(super) fn update_conductor_commitment_duration_metrics(duration: Duration) { - kona_macros::set!(gauge, crate::Metrics::SEQUENCER_CONDUCTOR_COMMITMENT_DURATION, duration); -} - -#[inline] -pub(super) fn update_block_build_duration_metrics(duration: Duration) { - kona_macros::set!( - gauge, - crate::Metrics::SEQUENCER_BLOCK_BUILDING_START_TASK_DURATION, - duration - ); -} - -#[inline] -pub(super) fn update_seal_duration_metrics(duration: Duration) { - // Log the block building seal task duration, if metrics are enabled. - kona_macros::set!(gauge, crate::Metrics::SEQUENCER_BLOCK_BUILDING_SEAL_TASK_DURATION, duration); -} - -#[inline] -pub(super) fn update_total_transactions_sequenced(transaction_count: u64) { - #[cfg(feature = "metrics")] - metrics::counter!(crate::Metrics::SEQUENCER_TOTAL_TRANSACTIONS_SEQUENCED) - .increment(transaction_count); -} diff --git a/kona/crates/node/service/src/actors/traits.rs b/kona/crates/node/service/src/actors/traits.rs deleted file mode 100644 index 38fdf47d126..00000000000 --- a/kona/crates/node/service/src/actors/traits.rs +++ /dev/null @@ -1,28 +0,0 @@ -//! [NodeActor] trait. - -use async_trait::async_trait; -use tokio_util::sync::WaitForCancellationFuture; - -/// The communication context used by the actor. -pub trait CancellableContext: Send { - /// Returns a future that resolves when the actor is cancelled. - fn cancelled(&self) -> WaitForCancellationFuture<'_>; -} - -/// The [NodeActor] is an actor-like service for the node. -/// -/// Actors may: -/// - Handle incoming messages. -/// - Perform background tasks. -/// - Emit new events for other actors to process. -#[async_trait] -pub trait NodeActor: Send + 'static { - /// The error type for the actor. - type Error: std::fmt::Debug; - /// The type necessary to pass to the start function. - /// This is the result of - type StartData: Sized; - - /// Starts the actor. - async fn start(self, start_context: Self::StartData) -> Result<(), Self::Error>; -} diff --git a/kona/crates/node/service/src/lib.rs b/kona/crates/node/service/src/lib.rs deleted file mode 100644 index bc73912f786..00000000000 --- a/kona/crates/node/service/src/lib.rs +++ /dev/null @@ -1,46 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -#[macro_use] -extern crate tracing; - -mod service; -pub use service::{ - DerivationDelegateConfig, InteropMode, L1Config, L1ConfigBuilder, NodeMode, RollupNode, - RollupNodeBuilder, -}; - -mod actors; -pub use actors::{ - BlockStream, BuildRequest, CancellableContext, Conductor, ConductorClient, ConductorError, - DelayedL1OriginSelectorProvider, DelegateDerivationActor, DerivationActor, - DerivationActorRequest, DerivationClientError, DerivationClientResult, - DerivationDelegateClient, DerivationDelegateClientError, DerivationEngineClient, - DerivationError, DerivationState, DerivationStateMachine, DerivationStateTransitionError, - DerivationStateUpdate, EngineActor, EngineActorRequest, EngineClientError, EngineClientResult, - EngineConfig, EngineDerivationClient, EngineError, EngineProcessingRequest, EngineProcessor, - EngineRequestReceiver, EngineRpcProcessor, EngineRpcRequest, EngineRpcRequestReceiver, - L1OriginSelector, L1OriginSelectorError, L1OriginSelectorProvider, L1WatcherActor, - L1WatcherActorError, L1WatcherDerivationClient, NetworkActor, NetworkActorError, - NetworkBuilder, NetworkBuilderError, NetworkConfig, NetworkDriver, NetworkDriverError, - NetworkEngineClient, NetworkHandler, NetworkInboundData, NodeActor, OriginSelector, - QueuedDerivationEngineClient, QueuedEngineDerivationClient, QueuedEngineRpcClient, - QueuedL1WatcherDerivationClient, QueuedNetworkEngineClient, QueuedSequencerAdminAPIClient, - QueuedSequencerEngineClient, QueuedUnsafePayloadGossipClient, ResetRequest, - RollupBoostAdminApiClient, RollupBoostHealthRpcClient, RpcActor, RpcActorError, RpcContext, - SealRequest, SequencerActor, SequencerActorError, SequencerAdminQuery, SequencerConfig, - SequencerEngineClient, UnsafePayloadGossipClient, UnsafePayloadGossipClientError, -}; - -mod metrics; -pub use metrics::Metrics; - -#[cfg(test)] -pub use actors::{ - MockConductor, MockOriginSelector, MockSequencerEngineClient, MockUnsafePayloadGossipClient, -}; diff --git a/kona/crates/node/service/src/service/node.rs b/kona/crates/node/service/src/service/node.rs deleted file mode 100644 index 3690858eaeb..00000000000 --- a/kona/crates/node/service/src/service/node.rs +++ /dev/null @@ -1,425 +0,0 @@ -//! Contains the [`RollupNode`] implementation. -use crate::{ - ConductorClient, DelayedL1OriginSelectorProvider, DelegateDerivationActor, DerivationActor, - DerivationDelegateClient, DerivationError, EngineActor, EngineActorRequest, EngineConfig, - EngineProcessor, EngineRpcProcessor, InteropMode, L1OriginSelector, L1WatcherActor, - NetworkActor, NetworkBuilder, NetworkConfig, NodeActor, NodeMode, QueuedDerivationEngineClient, - QueuedEngineDerivationClient, QueuedEngineRpcClient, QueuedL1WatcherDerivationClient, - QueuedNetworkEngineClient, QueuedSequencerAdminAPIClient, QueuedSequencerEngineClient, - RollupBoostAdminApiClient, RollupBoostHealthRpcClient, RpcActor, RpcContext, SequencerActor, - SequencerConfig, - actors::{BlockStream, NetworkInboundData, QueuedUnsafePayloadGossipClient}, -}; -use alloy_eips::BlockNumberOrTag; -use alloy_provider::RootProvider; -use kona_derive::StatefulAttributesBuilder; -use kona_engine::{Engine, EngineState, OpEngineClient}; -use kona_genesis::{L1ChainConfig, RollupConfig}; -use kona_protocol::L2BlockInfo; -use kona_providers_alloy::{ - AlloyChainProvider, AlloyL2ChainProvider, OnlineBeaconClient, OnlineBlobProvider, - OnlinePipeline, -}; -use kona_rpc::RpcBuilder; -use op_alloy_network::Optimism; -use std::{ops::Not as _, sync::Arc, time::Duration}; -use tokio::sync::{mpsc, watch}; -use tokio_util::sync::CancellationToken; - -const DERIVATION_PROVIDER_CACHE_SIZE: usize = 1024; -const HEAD_STREAM_POLL_INTERVAL: u64 = 4; -const FINALIZED_STREAM_POLL_INTERVAL: u64 = 60; - -/// The configuration for the L1 chain. -#[derive(Debug, Clone)] -pub struct L1Config { - /// The L1 chain configuration. - pub chain_config: Arc<L1ChainConfig>, - /// Whether to trust the L1 RPC. - pub trust_rpc: bool, - /// The L1 beacon client. - pub beacon_client: OnlineBeaconClient, - /// The L1 engine provider. - pub engine_provider: RootProvider, -} - -/// The standard implementation of the [RollupNode] service, using the governance approved OP Stack -/// configuration of components. -#[derive(Debug)] -pub struct RollupNode { - /// The rollup configuration. - pub(crate) config: Arc<RollupConfig>, - /// The L1 configuration. - pub(crate) l1_config: L1Config, - /// The interop mode for the node. - pub(crate) interop_mode: InteropMode, - /// The L2 EL provider. - pub(crate) l2_provider: RootProvider<Optimism>, - /// Whether to trust the L2 RPC. - pub(crate) l2_trust_rpc: bool, - /// The [`EngineConfig`] for the node. - pub(crate) engine_config: EngineConfig, - /// The [`RpcBuilder`] for the node. - pub(crate) rpc_builder: Option<RpcBuilder>, - /// The P2P [`NetworkConfig`] for the node. - pub(crate) p2p_config: NetworkConfig, - /// The [`SequencerConfig`] for the node. - pub(crate) sequencer_config: SequencerConfig, - /// Optional derivation delegate provider. - pub(crate) derivation_delegate_provider: Option<DerivationDelegateClient>, -} - -/// A RollupNode-level derivation actor wrapper. -/// -/// This type selects the concrete derivation actor implementation -/// based on RollupNode configuration. -/// -/// It is not intended to be generic or reusable outside the -/// RollupNode wiring logic. -enum ConfiguredDerivationActor { - Delegate(Box<DelegateDerivationActor<QueuedDerivationEngineClient>>), - Normal(Box<DerivationActor<QueuedDerivationEngineClient, OnlinePipeline>>), -} - -#[async_trait::async_trait] -impl NodeActor for ConfiguredDerivationActor -where - DelegateDerivationActor<QueuedDerivationEngineClient>: - NodeActor<StartData = (), Error = DerivationError>, - DerivationActor<QueuedDerivationEngineClient, OnlinePipeline>: - NodeActor<StartData = (), Error = DerivationError>, -{ - type StartData = (); - type Error = DerivationError; - - async fn start(self, ctx: ()) -> Result<(), Self::Error> { - match self { - Self::Delegate(a) => a.start(ctx).await, - Self::Normal(a) => a.start(ctx).await, - } - } -} - -impl RollupNode { - /// The mode of operation for the node. - const fn mode(&self) -> NodeMode { - self.engine_config.mode - } - - /// Creates a network builder for the node. - fn network_builder(&self) -> NetworkBuilder { - NetworkBuilder::from(self.p2p_config.clone()) - } - - /// Returns an engine builder for the node. - fn engine_config(&self) -> EngineConfig { - self.engine_config.clone() - } - - /// Returns an rpc builder for the node. - fn rpc_builder(&self) -> Option<RpcBuilder> { - self.rpc_builder.clone() - } - - /// Returns the sequencer builder for the node. - fn create_attributes_builder( - &self, - ) -> StatefulAttributesBuilder<AlloyChainProvider, AlloyL2ChainProvider> { - let l1_derivation_provider = AlloyChainProvider::new_with_trust( - self.l1_config.engine_provider.clone(), - DERIVATION_PROVIDER_CACHE_SIZE, - self.l1_config.trust_rpc, - ); - let l2_derivation_provider = AlloyL2ChainProvider::new_with_trust( - self.l2_provider.clone(), - self.config.clone(), - DERIVATION_PROVIDER_CACHE_SIZE, - self.l2_trust_rpc, - ); - - StatefulAttributesBuilder::new( - self.config.clone(), - self.l1_config.chain_config.clone(), - l2_derivation_provider, - l1_derivation_provider, - ) - } - - async fn create_pipeline(&self) -> OnlinePipeline { - // Create the caching L1/L2 EL providers for derivation. - let l1_derivation_provider = AlloyChainProvider::new_with_trust( - self.l1_config.engine_provider.clone(), - DERIVATION_PROVIDER_CACHE_SIZE, - self.l1_config.trust_rpc, - ); - let l2_derivation_provider = AlloyL2ChainProvider::new_with_trust( - self.l2_provider.clone(), - self.config.clone(), - DERIVATION_PROVIDER_CACHE_SIZE, - self.l2_trust_rpc, - ); - - match self.interop_mode { - InteropMode::Polled => OnlinePipeline::new_polled( - self.config.clone(), - self.l1_config.chain_config.clone(), - OnlineBlobProvider::init(self.l1_config.beacon_client.clone()).await, - l1_derivation_provider, - l2_derivation_provider, - ), - InteropMode::Indexed => OnlinePipeline::new_indexed( - self.config.clone(), - self.l1_config.chain_config.clone(), - OnlineBlobProvider::init(self.l1_config.beacon_client.clone()).await, - l1_derivation_provider, - l2_derivation_provider, - ), - } - } - - /// Helper function to assemble the [`EngineActor`] since there are many structs created that - /// are not relevant to other actors or logic. - /// Note: ignoring complex type warning. This type only pertains to this function, so it is - /// better to have the full type here than have to piece it together from multiple type defs. - #[allow(clippy::type_complexity)] - fn create_engine_actor( - &self, - cancellation_token: CancellationToken, - engine_request_rx: mpsc::Receiver<EngineActorRequest>, - derivation_client: QueuedEngineDerivationClient, - unsafe_head_tx: watch::Sender<L2BlockInfo>, - ) -> Result< - EngineActor< - EngineProcessor< - OpEngineClient<RootProvider, RootProvider<Optimism>>, - QueuedEngineDerivationClient, - >, - EngineRpcProcessor<OpEngineClient<RootProvider, RootProvider<Optimism>>>, - >, - String, - > { - let engine_state = EngineState::default(); - let (engine_state_tx, engine_state_rx) = watch::channel(engine_state); - let (engine_queue_length_tx, engine_queue_length_rx) = watch::channel(0); - let engine = Engine::new(engine_state, engine_state_tx, engine_queue_length_tx); - - let engine_client = Arc::new(self.engine_config().build_engine_client().map_err(|e| { - error!(target: "service", error = ?e, "engine client build failed"); - format!("Engine client build failed: {e:?}") - })?); - - let engine_processor = EngineProcessor::new( - engine_client.clone(), - self.config.clone(), - derivation_client, - engine, - if self.mode().is_sequencer() { Some(unsafe_head_tx) } else { None }, - ); - - let engine_rpc_processor = EngineRpcProcessor::new( - engine_client.clone(), - engine_client.rollup_boost.clone(), - self.config.clone(), - engine_state_rx, - engine_queue_length_rx, - ); - - Ok(EngineActor::new( - cancellation_token, - engine_request_rx, - engine_processor, - engine_rpc_processor, - )) - } - - /// Starts the rollup node service. - /// - /// The rollup node, in validator mode, listens to two sources of information to sync the L2 - /// chain: - /// - /// 1. The data availability layer, with a watcher that listens for new updates. L2 inputs (L2 - /// transaction batches + deposits) are then derived from the DA layer. - /// 2. The L2 sequencer, which produces unsafe L2 blocks and sends them to the network over p2p - /// gossip. - /// - /// From these two sources, the node imports `unsafe` blocks from the L2 sequencer, `safe` - /// blocks from the L2 derivation pipeline into the L2 execution layer via the Engine API, - /// and finalizes `safe` blocks that it has derived when L1 finalized block updates are - /// received. - /// - /// In sequencer mode, the node is responsible for producing unsafe L2 blocks and sending them - /// to the network over p2p gossip. The node also listens for L1 finalized block updates and - /// finalizes `safe` blocks that it has derived when L1 finalized block updates are - /// received. - pub async fn start(&self) -> Result<(), String> { - // Create a global cancellation token for graceful shutdown of tasks. - let cancellation = CancellationToken::new(); - - let (derivation_actor_request_tx, derivation_actor_request_rx) = mpsc::channel(1024); - - let (engine_actor_request_tx, engine_actor_request_rx) = mpsc::channel(1024); - let (unsafe_head_tx, unsafe_head_rx) = watch::channel(L2BlockInfo::default()); - - let engine_actor = self.create_engine_actor( - cancellation.clone(), - engine_actor_request_rx, - QueuedEngineDerivationClient::new(derivation_actor_request_tx.clone()), - unsafe_head_tx, - )?; - - // Select the concrete derivation actor implementation based on - // RollupNode configuration. - let derivation: ConfiguredDerivationActor = if let Some(provider) = - self.derivation_delegate_provider.clone() - { - // L1 Provider for sanity checking Derivation Delegation - let l1_provider = AlloyChainProvider::new( - self.l1_config.engine_provider.clone(), - DERIVATION_PROVIDER_CACHE_SIZE, - ); - ConfiguredDerivationActor::Delegate(Box::new(DelegateDerivationActor::<_>::new( - QueuedDerivationEngineClient { - engine_actor_request_tx: engine_actor_request_tx.clone(), - }, - cancellation.clone(), - derivation_actor_request_rx, - provider, - l1_provider, - ))) - } else { - ConfiguredDerivationActor::Normal(Box::new(DerivationActor::<_, OnlinePipeline>::new( - QueuedDerivationEngineClient { - engine_actor_request_tx: engine_actor_request_tx.clone(), - }, - cancellation.clone(), - derivation_actor_request_rx, - self.create_pipeline().await, - ))) - }; - - // Create the p2p actor. - let ( - NetworkInboundData { - signer, - p2p_rpc: network_rpc, - gossip_payload_tx, - admin_rpc: net_admin_rpc, - }, - network, - ) = NetworkActor::new( - QueuedNetworkEngineClient { engine_actor_request_tx: engine_actor_request_tx.clone() }, - cancellation.clone(), - self.network_builder(), - ); - - let (l1_head_updates_tx, l1_head_updates_rx) = watch::channel(None); - let delayed_l1_provider = DelayedL1OriginSelectorProvider::new( - self.l1_config.engine_provider.clone(), - l1_head_updates_rx, - self.sequencer_config.l1_conf_delay, - ); - - let delayed_origin_selector = - L1OriginSelector::new(self.config.clone(), delayed_l1_provider); - - // Conditionally add conductor if configured - let conductor = - self.sequencer_config.conductor_rpc_url.clone().map(ConductorClient::new_http); - - // Create the L1 Watcher actor - - // A channel to send queries about the state of L1. - let (l1_query_tx, l1_query_rx) = mpsc::channel(1024); - - let head_stream = BlockStream::new_as_stream( - self.l1_config.engine_provider.clone(), - BlockNumberOrTag::Latest, - Duration::from_secs(HEAD_STREAM_POLL_INTERVAL), - )?; - let finalized_stream = BlockStream::new_as_stream( - self.l1_config.engine_provider.clone(), - BlockNumberOrTag::Finalized, - Duration::from_secs(FINALIZED_STREAM_POLL_INTERVAL), - )?; - - // Create the [`L1WatcherActor`]. Previously known as the DA watcher actor. - let l1_watcher = L1WatcherActor::new( - self.config.clone(), - self.l1_config.engine_provider.clone(), - l1_query_rx, - l1_head_updates_tx.clone(), - QueuedL1WatcherDerivationClient { derivation_actor_request_tx }, - signer, - cancellation.clone(), - head_stream, - finalized_stream, - ); - - // Create the sequencer if needed - let (sequencer_actor, sequencer_admin_client) = if self.mode().is_sequencer() { - let sequencer_engine_client = QueuedSequencerEngineClient { - engine_actor_request_tx: engine_actor_request_tx.clone(), - unsafe_head_rx, - }; - - // Create the admin API channel - let (sequencer_admin_api_tx, sequencer_admin_api_rx) = mpsc::channel(1024); - let queued_gossip_client = - QueuedUnsafePayloadGossipClient::new(gossip_payload_tx.clone()); - - ( - Some(SequencerActor { - admin_api_rx: sequencer_admin_api_rx, - attributes_builder: self.create_attributes_builder(), - cancellation_token: cancellation.clone(), - conductor, - engine_client: sequencer_engine_client, - is_active: self.sequencer_config.sequencer_stopped.not(), - in_recovery_mode: self.sequencer_config.sequencer_recovery_mode, - origin_selector: delayed_origin_selector, - rollup_config: self.config.clone(), - unsafe_payload_gossip_client: queued_gossip_client, - }), - Some(QueuedSequencerAdminAPIClient::new(sequencer_admin_api_tx)), - ) - } else { - (None, None) - }; - - // Create the RPC server actor. - let rpc = self.rpc_builder().map(|b| { - RpcActor::new( - b, - QueuedEngineRpcClient::new(engine_actor_request_tx.clone()), - RollupBoostAdminApiClient { - engine_actor_request_tx: engine_actor_request_tx.clone(), - }, - RollupBoostHealthRpcClient { - engine_actor_request_tx: engine_actor_request_tx.clone(), - }, - sequencer_admin_client, - ) - }); - - crate::service::spawn_and_wait!( - cancellation, - actors = [ - rpc.map(|r| ( - r, - RpcContext { - cancellation: cancellation.clone(), - p2p_network: network_rpc, - network_admin: net_admin_rpc, - l1_watcher_queries: l1_query_tx, - } - )), - sequencer_actor.map(|s| (s, ())), - Some((network, ())), - Some((l1_watcher, ())), - Some((derivation, ())), - Some((engine_actor, ())), - ] - ); - Ok(()) - } -} diff --git a/kona/crates/node/service/src/service/util.rs b/kona/crates/node/service/src/service/util.rs deleted file mode 100644 index d9d568595a1..00000000000 --- a/kona/crates/node/service/src/service/util.rs +++ /dev/null @@ -1,103 +0,0 @@ -//! Utilities for the rollup node service, internal to the crate. - -/// Spawns a set of parallel actors in a [JoinSet], and cancels all actors if any of them fail. The -/// type of the error in the [NodeActor]s is erased to avoid having to specify a common error type -/// between actors. -/// -/// Actors are passed in as optional arguments, in case a given actor is not needed. -/// -/// This macro also handles OS shutdown signals (SIGTERM, SIGINT) and triggers graceful shutdown -/// when received. -/// -/// [JoinSet]: tokio::task::JoinSet -/// [NodeActor]: crate::NodeActor -macro_rules! spawn_and_wait { - ($cancellation:expr, actors = [$($actor:expr$(,)?)*]) => { - let mut task_handles = tokio::task::JoinSet::new(); - - // Check if the actor is present, and spawn it if it is. - $( - if let Some((actor, context)) = $actor { - let cancellation = $cancellation.clone(); - task_handles.spawn(async move { - // This guard ensures that the cancellation token is cancelled when the actor is - // dropped. This ensures that the actor is properly shut down. - // Note the underscore prefix: this is to signal that we don't use the guard anywhere, but - // *the compiler shouldn't optimize it away*. - // Note that using a simple `_` would not work here because it gets optimized away in - // release mode. - let _guard = cancellation.drop_guard(); - - if let Err(e) = actor.start(context).await { - return Err(format!("{e:?}")); - } - Ok(()) - }); - } - )* - - // Create the shutdown signal future - let shutdown = $crate::service::shutdown_signal(); - tokio::pin!(shutdown); - - loop { - tokio::select! { - _ = &mut shutdown => { - tracing::info!(target: "rollup_node", "Received shutdown signal, initiating graceful shutdown..."); - $cancellation.cancel(); - break; - } - result = task_handles.join_next() => { - match result { - Some(Ok(Ok(()))) => { /* Actor completed successfully */ } - Some(Ok(Err(e))) => { - tracing::error!(target: "rollup_node", "Critical error in sub-routine: {e}"); - // Cancel all tasks and gracefully shutdown. - $cancellation.cancel(); - return Err(e); - } - Some(Err(e)) => { - let error_msg = format!("Task join error: {e}"); - // Log the error and cancel all tasks. - tracing::error!(target: "rollup_node", "Task join error: {e}"); - // Cancel all tasks and gracefully shutdown. - $cancellation.cancel(); - return Err(error_msg); - } - None => break, // All tasks completed - } - } - } - } - }; -} - -// Export the `spawn_and_wait` macro for use in other modules. -pub(crate) use spawn_and_wait; - -/// Listens for OS shutdown signals (SIGTERM, SIGINT) -pub(crate) async fn shutdown_signal() { - let ctrl_c = async { - tokio::signal::ctrl_c().await.expect("failed to install Ctrl+C handler"); - }; - - #[cfg(unix)] - let terminate = async { - tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) - .expect("failed to install SIGTERM handler") - .recv() - .await; - }; - - #[cfg(not(unix))] - let terminate = std::future::pending::<()>(); - - tokio::select! { - _ = ctrl_c => { - tracing::info!(target: "rollup_node", "Received SIGINT (Ctrl+C)"); - }, - _ = terminate => { - tracing::info!(target: "rollup_node", "Received SIGTERM"); - }, - } -} diff --git a/kona/crates/node/service/tests/actors/network/mocks/builder.rs b/kona/crates/node/service/tests/actors/network/mocks/builder.rs deleted file mode 100644 index 618bfe5139f..00000000000 --- a/kona/crates/node/service/tests/actors/network/mocks/builder.rs +++ /dev/null @@ -1,128 +0,0 @@ -use std::net::{IpAddr, Ipv4Addr}; - -use alloy_chains::Chain; -use alloy_signer::k256; -use discv5::{ConfigBuilder, Enr, ListenConfig}; - -use crate::actors::network::TestNetwork; -use alloy_primitives::Address; -use async_trait::async_trait; -use kona_disc::LocalNode; -use kona_genesis::RollupConfig; -use kona_node_service::{ - EngineClientResult, NetworkActor, NetworkBuilder, NetworkEngineClient, NodeActor, -}; -use kona_peers::BootNode; -use kona_sources::BlockSigner; -use libp2p::{Multiaddr, identity::Keypair, multiaddr::Protocol}; -use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; -use rand::RngCore; -use tokio::sync::mpsc; -use tokio_util::sync::CancellationToken; -use tracing::error; - -pub(crate) struct TestNetworkBuilder { - chain_id: u64, - unsafe_block_signer: Address, - custom_keypair: Option<Keypair>, -} - -impl TestNetworkBuilder { - fn rollup_config(&self) -> RollupConfig { - RollupConfig { l2_chain_id: Chain::from_id(self.chain_id), ..Default::default() } - } - - pub(crate) fn new() -> Self { - let chain_id = rand::rng().next_u64(); - - Self { chain_id, unsafe_block_signer: Address::ZERO, custom_keypair: None } - } - - /// Sets a sequencer keypair for the network. - /// The next network built will be the sequencer's network. This will set the unsafe block - /// signer to the sequencer's address and the custom keypair to the sequencer's keypair. - /// This amounts to calling [`Self::with_unsafe_block_signer`] and [`Self::with_custom_keypair`] - /// sequentially. - pub(crate) fn set_sequencer(mut self) -> Self { - let sequencer_keypair = Keypair::generate_secp256k1(); - let secp256k1_key = sequencer_keypair.clone().try_into_secp256k1() - .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to secp256k1. This is a bug since we only support secp256k1 keys: {e}")).unwrap() - .secret().to_bytes(); - let local_node_key = k256::ecdsa::SigningKey::from_bytes(&secp256k1_key.into()) - .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to k256 signing key. This is a bug since we only support secp256k1 keys: {e}")).unwrap(); - - self.custom_keypair = Some(sequencer_keypair); - self.unsafe_block_signer = Address::from_private_key(&local_node_key); - - self - } - - /// Minimal network configuration. - /// Only allows loopback addresses in the discovery table. - pub(crate) fn build(&mut self, bootnodes: Vec<Enr>) -> TestNetwork { - let keypair = self.custom_keypair.take().unwrap_or(Keypair::generate_secp256k1()); - - let secp256k1_key = keypair.clone().try_into_secp256k1() - .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to secp256k1. This is a bug since we only support secp256k1 keys: {e}")).unwrap() - .secret().to_bytes(); - let local_node_key = k256::ecdsa::SigningKey::from_bytes(&secp256k1_key.into()) - .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to k256 signing key. This is a bug since we only support secp256k1 keys: {e}")).unwrap(); - - let node_addr = IpAddr::V4(Ipv4Addr::UNSPECIFIED); - - let discovery_config = ConfigBuilder::new(ListenConfig::from_ip(node_addr, 0)) - // Only allow loopback addresses. - .table_filter(|enr| { - let Some(ip) = enr.ip4() else { - return false; - }; - - ip.is_loopback() - }) - .build(); - - let mut gossip_multiaddr = Multiaddr::from(node_addr); - gossip_multiaddr.push(Protocol::Tcp(0)); - - // Create a new network actor. No external connections - let builder = NetworkBuilder::new( - // Create a new rollup config. We don't need to specify any of the fields. - self.rollup_config(), - self.unsafe_block_signer, - gossip_multiaddr, - keypair, - LocalNode::new(local_node_key.clone(), node_addr, 0, 0), - discovery_config, - Some(BlockSigner::Local(local_node_key.into())), - ) - .with_bootnodes(bootnodes.into_iter().map(Into::into).collect::<Vec<BootNode>>().into()); - - let (blocks_tx, blocks_rx) = mpsc::channel(1024); - let (inbound_data, actor) = NetworkActor::new( - ForwardingNetworkEngineClient { blocks_tx }, - CancellationToken::new(), - builder, - ); - - let handle = tokio::spawn(async move { actor.start(()).await }); - - TestNetwork { inbound_data, blocks_rx, handle } - } -} - -#[derive(Debug)] -struct ForwardingNetworkEngineClient { - blocks_tx: mpsc::Sender<OpExecutionPayloadEnvelope>, -} - -#[async_trait] -impl NetworkEngineClient for ForwardingNetworkEngineClient { - async fn send_unsafe_block(&self, block: OpExecutionPayloadEnvelope) -> EngineClientResult<()> { - let _ = self - .blocks_tx - .send(block) - .await - .inspect_err(|e| error!(target: "net", "Failed to send block: {:?}", e)); - Ok(()) - } -} diff --git a/kona/crates/node/service/tests/actors/network/p2p.rs b/kona/crates/node/service/tests/actors/network/p2p.rs deleted file mode 100644 index 75e9a944818..00000000000 --- a/kona/crates/node/service/tests/actors/network/p2p.rs +++ /dev/null @@ -1,44 +0,0 @@ -use crate::actors::network::mocks::builder::TestNetworkBuilder; - -#[tokio::test(flavor = "multi_thread")] -async fn test_p2p_network_conn() -> anyhow::Result<()> { - let mut builder = TestNetworkBuilder::new(); - let network_1 = builder.build(vec![]); - let enr_1 = network_1.peer_enr().await?; - - let network_2 = builder.build(vec![enr_1]); - - network_2.is_connected_to_with_retries(&network_1).await?; - - network_1.is_connected_to_with_retries(&network_2).await?; - - Ok(()) -} - -#[tokio::test(flavor = "multi_thread")] -async fn test_large_network_conn() -> anyhow::Result<()> { - const NETWORKS: usize = 10; - - let mut builder = TestNetworkBuilder::new(); - - let (mut networks, mut bootnodes) = (vec![], vec![]); - - for _ in 0..NETWORKS { - let network = builder.build(bootnodes.clone()); - let enr = network.peer_enr().await?; - networks.push(network); - bootnodes.push(enr); - } - - for network in networks.iter() { - for other_network in networks.iter() { - if network.peer_id().await? == other_network.peer_id().await? { - continue; - } - - network.is_connected_to_with_retries(other_network).await?; - } - } - - Ok(()) -} diff --git a/kona/crates/node/service/tests/actors/network/sequencer.rs b/kona/crates/node/service/tests/actors/network/sequencer.rs deleted file mode 100644 index b864c9b5a56..00000000000 --- a/kona/crates/node/service/tests/actors/network/sequencer.rs +++ /dev/null @@ -1,78 +0,0 @@ -use crate::actors::{ - generator::{block_builder::PayloadVersion, seed::SEED_GENERATOR_BUILDER}, - network::mocks::builder::TestNetworkBuilder, -}; - -/// Test that we can properly gossip blocks to the sequencer. -#[tokio::test(flavor = "multi_thread")] -async fn test_sequencer_network_conn() -> anyhow::Result<()> { - let mut builder = TestNetworkBuilder::new().set_sequencer(); - - let sequencer_network = builder.build(vec![]); - let enr_1 = sequencer_network.peer_enr().await?; - - let mut validator_network = builder.build(vec![enr_1]); - - sequencer_network.is_connected_to_with_retries(&validator_network).await?; - - validator_network.is_connected_to_with_retries(&sequencer_network).await?; - - let mut seed_generator = SEED_GENERATOR_BUILDER.next_generator(); - - let envelope = seed_generator.random_valid_payload(PayloadVersion::V1)?; - - sequencer_network.inbound_data.gossip_payload_tx.send(envelope.clone()).await?; - - let block = - validator_network.blocks_rx.recv().await.ok_or(anyhow::anyhow!("No block received"))?; - - assert_eq!(block.parent_beacon_block_root, envelope.parent_beacon_block_root); - assert_eq!(block.execution_payload, envelope.execution_payload); - - Ok(()) -} - -/// Test that the network can properly propagate blocks to all connected peers. -/// -/// We are setting up a linear network topology, and we check that the block propagates to every -/// block of the network. -#[tokio::test(flavor = "multi_thread")] -async fn test_sequencer_network_propagation() -> anyhow::Result<()> { - const NETWORKS: usize = 10; - - let mut builder = TestNetworkBuilder::new().set_sequencer(); - - let sequencer_network = builder.build(vec![]); - let mut previous_enrs = vec![sequencer_network.peer_enr().await?]; - - let mut validator_networks = Vec::new(); - - for _ in 0..NETWORKS { - let network = builder.build(previous_enrs.clone()); - - previous_enrs.push(network.peer_enr().await?); - validator_networks.push(network); - } - - // Check that all networks are connected to the sequencer. - for network in validator_networks.iter() { - network.is_connected_to_with_retries(&sequencer_network).await?; - } - - // Send a block to the sequencer. - let mut seed_generator = SEED_GENERATOR_BUILDER.next_generator(); - - let envelope = seed_generator.random_valid_payload(PayloadVersion::V1)?; - - sequencer_network.inbound_data.gossip_payload_tx.send(envelope.clone()).await?; - - // Check that the block propagates to all networks. - for network in validator_networks.iter_mut() { - let block = network.blocks_rx.recv().await.ok_or(anyhow::anyhow!("No block received"))?; - - assert_eq!(block.parent_beacon_block_root, envelope.parent_beacon_block_root); - assert_eq!(block.execution_payload, envelope.execution_payload); - } - - Ok(()) -} diff --git a/kona/crates/node/sources/Cargo.toml b/kona/crates/node/sources/Cargo.toml deleted file mode 100644 index 5bf381e40ff..00000000000 --- a/kona/crates/node/sources/Cargo.toml +++ /dev/null @@ -1,51 +0,0 @@ -[package] -name = "kona-sources" -version = "0.1.2" -description = "Data source types and utilities for the kona-node" - -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -authors.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace - -# Alloy -alloy-transport.workspace = true -alloy-primitives.workspace = true -alloy-rpc-client.workspace = true -alloy-transport-http = { workspace = true, features = ["reqwest", "reqwest-rustls-tls", "hyper", "hyper-tls"] } - -alloy-signer.workspace = true -alloy-signer-local.workspace = true - -# OP Alloy -op-alloy-rpc-types-engine.workspace = true - -# Misc -tracing.workspace = true -thiserror.workspace = true -derive_more.workspace = true - -# HTTP client and TLS for remote signer -reqwest = { workspace = true, features = ["json", "rustls-tls", "stream"] } -url.workspace = true -serde.workspace = true -serde_json.workspace = true -rustls.workspace = true -tokio = { workspace = true, features = ["full"] } -notify.workspace = true - -[features] -default = [] - -[dev-dependencies] -tokio.workspace = true -serde_json.workspace = true diff --git a/kona/crates/node/sources/README.md b/kona/crates/node/sources/README.md deleted file mode 100644 index 34be87f8fa8..00000000000 --- a/kona/crates/node/sources/README.md +++ /dev/null @@ -1,8 +0,0 @@ -## `kona-sources` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-sources"><img src="https://img.shields.io/crates/v/kona-sources.svg" alt="kona-sources crate"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="MIT License"></a> -<a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> - -Data source types and utilities for the kona-node. diff --git a/kona/crates/node/sources/src/lib.rs b/kona/crates/node/sources/src/lib.rs deleted file mode 100644 index 4dac9628015..00000000000 --- a/kona/crates/node/sources/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -mod signer; -pub use signer::{ - BlockSigner, BlockSignerError, BlockSignerHandler, BlockSignerStartError, CertificateError, - ClientCert, RemoteSigner, RemoteSignerError, RemoteSignerHandler, RemoteSignerStartError, -}; diff --git a/kona/crates/node/sources/src/signer/remote/client.rs b/kona/crates/node/sources/src/signer/remote/client.rs deleted file mode 100644 index 0bc8db6676b..00000000000 --- a/kona/crates/node/sources/src/signer/remote/client.rs +++ /dev/null @@ -1,117 +0,0 @@ -use alloy_primitives::Address; -use alloy_rpc_client::ClientBuilder; -use alloy_transport_http::Http; -use reqwest::header::HeaderMap; -use std::sync::Arc; -use thiserror::Error; -use tokio::sync::RwLock; -use url::Url; - -use crate::{ - RemoteSignerHandler, - signer::remote::cert::{CertificateError, ClientCert}, -}; - -/// Configuration for the remote signer client -/// -/// This configuration supports various TLS/certificate scenarios: -/// -/// 1. **Basic HTTPS**: Only `endpoint` and `address` are required. -/// 2. **Custom CA**: Provide `ca_cert` to verify servers with custom/self-signed certificates. -/// 3. **Mutual TLS (mTLS)**: Provide both `client_cert` and `client_key` for client authentication. -/// 4. **Full mTLS with custom CA**: Combine all certificate options for maximum security. -/// -/// Certificate formats supported: -/// - PEM format for all certificates and keys -/// - Certificates should be provided as file paths. -/// -/// By default, the process will watch for changes in the client certificate files and reload the -/// client automatically. -#[derive(Debug, Clone)] -pub struct RemoteSigner { - /// The URL of the remote signer endpoint - pub endpoint: Url, - /// The address of the signer. - pub address: Address, - /// Optional client certificate for mTLS (PEM format) - pub client_cert: Option<ClientCert>, - /// Optional CA certificate for server verification (PEM format) - pub ca_cert: Option<std::path::PathBuf>, - /// Headers to pass to the remote signer. - pub headers: HeaderMap, -} - -/// Errors that can occur when starting a remote signer. -#[derive(Debug, Error)] -pub enum RemoteSignerStartError { - /// Failed to ping signer - #[error("Failed to ping signer: {0}")] - Ping(alloy_transport::TransportError), - /// HTTP client build error - #[error("HTTP client build error: {0}")] - HTTPClientBuild(#[from] reqwest::Error), - /// Invalid certificate error - #[error("Invalid certificate: {0}")] - Certificate(#[from] CertificateError), - /// Certificate watcher error - #[error("Certificate watcher error: {0}")] - CertificateWatcher(#[from] notify::Error), -} - -impl RemoteSigner { - /// Creates a new remote signer with the given configuration - /// - /// If client certificates are configured, this will automatically start a certificate watcher - /// that monitors the certificate files for changes. When certificates are updated (e.g., by - /// cert-manager in Kubernetes), the TLS client will be automatically reloaded with the new - /// certificates without requiring a restart. - /// - /// # Certificate Watching - /// - /// The certificate watcher monitors: - /// - Client certificate file (if mTLS is configured) - /// - Client private key file (if mTLS is configured) - /// - CA certificate file (if custom CA is configured) - /// - /// When any of these files are modified, the watcher will: - /// 1. Log the certificate change event - /// 2. Reload the certificate files from disk - /// 3. Rebuild the HTTP client with the new TLS configuration - /// 4. Replace the existing client atomically - /// - /// This enables zero-downtime certificate rotation in production environments. - pub async fn start(self) -> Result<RemoteSignerHandler, RemoteSignerStartError> { - let http_client = self.build_http_client()?; - let transport = Http::with_client(http_client, self.endpoint.clone()); - let client = ClientBuilder::default().transport(transport, true); - - // Try to ping the signer to check if it's reachable - let version: String = - client.request("health_status", ()).await.map_err(RemoteSignerStartError::Ping)?; - - tracing::info!(target: "signer", version, "Connected to op-signer server"); - - let client = Arc::new(RwLock::new(client)); - - // Start certificate watcher if client certificates are configured - let watcher_handle = self.start_certificate_watcher(client.clone()).await?; - - Ok(RemoteSignerHandler { client, watcher_handle, address: self.address }) - } - - /// Builds an HTTP client with certificate handling for the remote signer - pub(super) fn build_http_client(&self) -> Result<reqwest::Client, RemoteSignerStartError> { - let mut client_builder = reqwest::Client::builder(); - - // Configure TLS if certificates are provided - if self.client_cert.is_some() || self.ca_cert.is_some() { - let tls_config = self.build_tls_config()?; - client_builder = client_builder.use_preconfigured_tls(tls_config); - } - - // Set headers - client_builder = client_builder.default_headers(self.headers.clone()); - - client_builder.build().map_err(RemoteSignerStartError::HTTPClientBuild) - } -} diff --git a/kona/crates/proof/driver/src/core.rs b/kona/crates/proof/driver/src/core.rs deleted file mode 100644 index b5b315205e2..00000000000 --- a/kona/crates/proof/driver/src/core.rs +++ /dev/null @@ -1,333 +0,0 @@ -//! The driver of the kona derivation pipeline. - -use crate::{DriverError, DriverPipeline, DriverResult, Executor, PipelineCursor, TipCursor}; -use alloc::{sync::Arc, vec::Vec}; -use alloy_consensus::BlockBody; -use alloy_primitives::{B256, Bytes}; -use alloy_rlp::Decodable; -use core::fmt::Debug; -use kona_derive::{Pipeline, PipelineError, PipelineErrorKind, Signal, SignalReceiver}; -use kona_executor::BlockBuildingOutcome; -use kona_genesis::RollupConfig; -use kona_protocol::L2BlockInfo; -use op_alloy_consensus::{OpBlock, OpTxEnvelope, OpTxType}; -use spin::RwLock; - -/// The Rollup Driver entrypoint. -/// -/// The [`Driver`] is the main coordination component for the rollup derivation and execution -/// process. It manages the interaction between the derivation pipeline and block executor -/// to produce verified L2 blocks from L1 data. -/// -/// ## Architecture -/// The driver operates with three main components: -/// - **Pipeline**: Derives L2 block attributes from L1 data -/// - **Executor**: Builds and executes L2 blocks from attributes -/// - **Cursor**: Tracks the current state of derivation progress -/// -/// ## Usage Pattern -/// ```text -/// 1. Initialize driver with cursor, executor, and pipeline -/// 2. Call wait_for_executor() to ensure readiness -/// 3. Call advance_to_target() to derive blocks up to target -/// 4. Driver coordinates pipeline stepping and block execution -/// 5. Updates cursor with progress and maintains safe head artifacts -/// ``` -/// -/// ## Error Handling -/// The driver handles various error scenarios: -/// - Pipeline derivation failures (temporary, reset, critical) -/// - Block execution failures (with Holocene deposit-only retry) -/// - L1 data exhaustion (graceful halt) -/// - Interop mode considerations -#[derive(Debug)] -pub struct Driver<E, DP, P> -where - E: Executor + Send + Sync + Debug, - DP: DriverPipeline<P> + Send + Sync + Debug, - P: Pipeline + SignalReceiver + Send + Sync + Debug, -{ - /// Marker for the pipeline type parameter. - /// - /// This phantom data ensures type safety while allowing the driver - /// to work with different pipeline implementations. - _marker: core::marker::PhantomData<P>, - /// Cursor tracking the current L2 derivation state and safe head. - /// - /// The cursor maintains the current position in the derivation process, - /// including the L2 safe head, output root, and L1 origin. It's wrapped - /// in an `Arc<RwLock<_>>` for thread-safe shared access. - pub cursor: Arc<RwLock<PipelineCursor>>, - /// The block executor responsible for building and executing L2 blocks. - /// - /// The executor takes payload attributes from the pipeline and produces - /// complete blocks with execution results and state changes. - pub executor: E, - /// The derivation pipeline that produces block attributes from L1 data. - /// - /// The pipeline abstracts the complex derivation logic and provides - /// a high-level interface for producing sequential block attributes. - pub pipeline: DP, - /// Cached execution artifacts and transactions from the most recent safe head. - /// - /// This cache contains the [`BlockBuildingOutcome`] and raw transaction data - /// from the last successfully executed block. It's used for efficiency and - /// debugging purposes. `None` when no block has been executed yet. - pub safe_head_artifacts: Option<(BlockBuildingOutcome, Vec<Bytes>)>, -} - -impl<E, DP, P> Driver<E, DP, P> -where - E: Executor + Send + Sync + Debug, - DP: DriverPipeline<P> + Send + Sync + Debug, - P: Pipeline + SignalReceiver + Send + Sync + Debug, -{ - /// Creates a new [`Driver`] instance. - /// - /// Initializes the driver with the provided cursor, executor, and pipeline components. - /// The driver starts with no cached safe head artifacts. - /// - /// # Arguments - /// * `cursor` - Shared cursor for tracking derivation state - /// * `executor` - Block executor for building and executing L2 blocks - /// * `pipeline` - Derivation pipeline for producing block attributes - /// - /// # Returns - /// A new [`Driver`] instance ready for operation after calling [`Self::wait_for_executor`]. - /// - /// # Usage - /// ```rust,ignore - /// let driver = Driver::new(cursor, executor, pipeline); - /// driver.wait_for_executor().await; - /// let result = driver.advance_to_target(&config, Some(target_block)).await; - /// ``` - pub const fn new(cursor: Arc<RwLock<PipelineCursor>>, executor: E, pipeline: DP) -> Self { - Self { - _marker: core::marker::PhantomData, - cursor, - executor, - pipeline, - safe_head_artifacts: None, - } - } - - /// Waits until the executor is ready for block processing. - /// - /// This method blocks until the underlying executor has completed any necessary - /// initialization or synchronization required before it can begin processing - /// payload attributes and executing blocks. - /// - /// # Usage - /// Must be called after creating the driver and before calling [`Self::advance_to_target`]. - /// This ensures the executor is in a valid state for block execution. - /// - /// # Example - /// ```rust,ignore - /// let mut driver = Driver::new(cursor, executor, pipeline); - /// driver.wait_for_executor().await; // Required before derivation - /// ``` - pub async fn wait_for_executor(&mut self) { - self.executor.wait_until_ready().await; - } - - /// Advances the derivation pipeline to the target block number. - /// - /// This is the main driver method that coordinates the derivation pipeline and block - /// executor to produce L2 blocks up to the specified target. It handles the complete - /// lifecycle of block derivation including pipeline stepping, block execution, error - /// recovery, and state updates. - /// - /// # Arguments - /// * `cfg` - The rollup configuration containing chain parameters and activation heights - /// * `target` - Optional target block number. If `None`, derives indefinitely until data source - /// is exhausted or an error occurs - /// - /// # Returns - /// * `Ok((l2_safe_head, output_root))` - Tuple containing the final [`L2BlockInfo`] and output - /// root hash when target is reached or derivation completes - /// * `Err(DriverError)` - Various error conditions that prevent further derivation - /// - /// # Errors - /// This method can fail with several error types: - /// - /// ## Pipeline Errors - /// - **EndOfSource (Critical)**: L1 data source exhausted - /// - In interop mode: Returns error immediately for caller handling - /// - In normal mode: Adjusts target to current safe head and halts gracefully - /// - **Temporary**: Insufficient data, automatically retried - /// - **Reset**: Reorg detected, pipeline reset and derivation continues - /// - **Other Critical**: Fatal pipeline errors that stop derivation - /// - /// ## Execution Errors - /// - **Pre-Holocene**: Block execution failures cause block to be discarded - /// - **Holocene+**: Failed blocks are retried as deposit-only blocks - /// - Strips non-deposit transactions and flushes invalidated channel - /// - If deposit-only block also fails, returns critical error - /// - /// ## Other Errors - /// - **MissingOrigin**: Pipeline origin not available when expected - /// - **BlockConversion**: Failed to convert block format - /// - **RLP**: Failed to decode transaction data - /// - /// # Behavior Details - /// - /// ## Main Loop - /// The method operates in a continuous loop: - /// 1. Check if target block number reached (if specified) - /// 2. Produce payload attributes from pipeline - /// 3. Execute payload with executor - /// 4. Handle execution failures with retry logic - /// 5. Construct complete block and update cursor - /// 6. Cache artifacts and continue - /// - /// ## Target Handling - /// - If `target` is `Some(n)`: Stops when safe head reaches block `n` - /// - If `target` is `None`: Continues until data exhausted or critical error - /// - Target can be dynamically adjusted if data source is exhausted - /// - /// ## State Updates - /// Each successful block updates: - /// - Pipeline cursor with new L1 origin and L2 safe head - /// - Executor safe head for next block building - /// - Cached artifacts for the most recent block - /// - Output root computation for verification - /// - /// # Usage Pattern - /// ```rust,ignore - /// // Derive to specific block - /// let (safe_head, output_root) = driver - /// .advance_to_target(&rollup_config, Some(100)) - /// .await?; - /// - /// // Derive until data exhausted - /// let (final_head, output_root) = driver - /// .advance_to_target(&rollup_config, None) - /// .await?; - /// ``` - /// - /// # Panics - /// This method does not explicitly panic, but may propagate panics from: - /// - RwLock poisoning (if another thread panicked while holding the cursor lock) - /// - Executor or pipeline implementation panics - /// - Arithmetic overflow in block number operations (highly unlikely) - pub async fn advance_to_target( - &mut self, - cfg: &RollupConfig, - mut target: Option<u64>, - ) -> DriverResult<(L2BlockInfo, B256), E::Error> { - loop { - // Check if we have reached the target block number. - let pipeline_cursor = self.cursor.read(); - let tip_cursor = pipeline_cursor.tip(); - if let Some(tb) = target { - if tip_cursor.l2_safe_head.block_info.number >= tb { - info!(target: "client", "Derivation complete, reached L2 safe head."); - return Ok((tip_cursor.l2_safe_head, tip_cursor.l2_safe_head_output_root)); - } - } - - let mut attributes = match self.pipeline.produce_payload(tip_cursor.l2_safe_head).await - { - Ok(attrs) => attrs.take_inner(), - Err(PipelineErrorKind::Critical(PipelineError::EndOfSource)) => { - warn!(target: "client", "Exhausted data source; Halting derivation and using current safe head."); - - // Adjust the target block number to the current safe head, as no more blocks - // can be produced. - if target.is_some() { - target = Some(tip_cursor.l2_safe_head.block_info.number); - }; - - // If we are in interop mode, this error must be handled by the caller. - // Otherwise, we continue the loop to halt derivation on the next iteration. - if cfg.is_interop_active(self.cursor.read().l2_safe_head().block_info.number) { - return Err(PipelineError::EndOfSource.crit().into()); - } else { - continue; - } - } - Err(e) => { - error!(target: "client", "Failed to produce payload: {:?}", e); - return Err(DriverError::Pipeline(e)); - } - }; - - self.executor.update_safe_head(tip_cursor.l2_safe_head_header.clone()); - let outcome = match self.executor.execute_payload(attributes.clone()).await { - Ok(outcome) => outcome, - Err(e) => { - error!(target: "client", "Failed to execute L2 block: {}", e); - - if cfg.is_holocene_active(attributes.payload_attributes.timestamp) { - // Retry with a deposit-only block. - warn!(target: "client", "Flushing current channel and retrying deposit only block"); - - // Flush the current batch and channel - if a block was replaced with a - // deposit-only block due to execution failure, the - // batch and channel it is contained in is forwards - // invalidated. - self.pipeline.signal(Signal::FlushChannel).await?; - - // Strip out all transactions that are not deposits. - attributes.transactions = attributes.transactions.map(|txs| { - txs.into_iter() - .filter(|tx| !tx.is_empty() && tx[0] == OpTxType::Deposit as u8) - .collect::<Vec<_>>() - }); - - // Retry the execution. - self.executor.update_safe_head(tip_cursor.l2_safe_head_header.clone()); - match self.executor.execute_payload(attributes.clone()).await { - Ok(header) => header, - Err(e) => { - error!( - target: "client", - "Critical - Failed to execute deposit-only block: {e}", - ); - return Err(DriverError::Executor(e)); - } - } - } else { - // Pre-Holocene, discard the block if execution fails. - continue; - } - } - }; - - // Construct the block. - let block = OpBlock { - header: outcome.header.inner().clone(), - body: BlockBody { - transactions: attributes - .transactions - .as_ref() - .unwrap_or(&Vec::new()) - .iter() - .map(|tx| OpTxEnvelope::decode(&mut tx.as_ref()).map_err(DriverError::Rlp)) - .collect::<DriverResult<Vec<OpTxEnvelope>, E::Error>>()?, - ommers: Vec::new(), - withdrawals: None, - }, - }; - - // Get the pipeline origin and update the tip cursor. - let origin = self.pipeline.origin().ok_or(PipelineError::MissingOrigin.crit())?; - let l2_info = L2BlockInfo::from_block_and_genesis( - &block, - &self.pipeline.rollup_config().genesis, - )?; - let tip_cursor = TipCursor::new( - l2_info, - outcome.header.clone(), - self.executor.compute_output_root().map_err(DriverError::Executor)?, - ); - - // Advance the derivation pipeline cursor - drop(pipeline_cursor); - self.cursor.write().advance(origin, tip_cursor); - - // Update the latest safe head artifacts. - self.safe_head_artifacts = Some((outcome, attributes.transactions.unwrap_or_default())); - } - } -} diff --git a/kona/crates/proof/driver/src/lib.rs b/kona/crates/proof/driver/src/lib.rs deleted file mode 100644 index f83e3410a52..00000000000 --- a/kona/crates/proof/driver/src/lib.rs +++ /dev/null @@ -1,31 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(test), no_std)] - -extern crate alloc; - -#[macro_use] -extern crate tracing; - -mod errors; -pub use errors::{DriverError, DriverResult}; - -mod pipeline; -pub use pipeline::DriverPipeline; - -mod executor; -pub use executor::Executor; - -mod core; -pub use core::Driver; - -mod cursor; -pub use cursor::PipelineCursor; - -mod tip; -pub use tip::TipCursor; diff --git a/kona/crates/proof/driver/src/pipeline.rs b/kona/crates/proof/driver/src/pipeline.rs deleted file mode 100644 index a13b15eac0a..00000000000 --- a/kona/crates/proof/driver/src/pipeline.rs +++ /dev/null @@ -1,167 +0,0 @@ -//! Abstracts the derivation pipeline from the driver. -//! -//! This module provides the [`DriverPipeline`] trait which serves as a high-level -//! abstraction for the driver's derivation pipeline. The pipeline is responsible -//! for deriving L2 blocks from L1 data and producing payload attributes for execution. - -use alloc::boxed::Box; -use async_trait::async_trait; -use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; - -use kona_derive::{ - ActivationSignal, Pipeline, PipelineError, PipelineErrorKind, ResetError, ResetSignal, - SignalReceiver, StepResult, -}; - -/// High-level abstraction for the driver's derivation pipeline. -/// -/// The [`DriverPipeline`] trait extends the base [`Pipeline`] functionality with -/// driver-specific operations needed for block production. It handles the complex -/// logic of stepping through derivation stages, managing resets and reorgs, and -/// producing payload attributes for block building. -/// -/// ## Key Responsibilities -/// - Stepping through derivation pipeline stages -/// - Handling L1 origin advancement -/// - Managing pipeline resets due to reorgs or activation signals -/// - Producing payload attributes for disputed blocks -/// - Caching and cache invalidation -/// -/// ## Error Handling -/// The pipeline can encounter several types of errors: -/// - **Temporary**: Retryable errors (e.g., missing data) -/// - **Reset**: Errors requiring pipeline reset (e.g., reorgs, activations) -/// - **Critical**: Fatal errors that stop derivation -#[async_trait] -pub trait DriverPipeline<P>: Pipeline + SignalReceiver -where - P: Pipeline + SignalReceiver, -{ - /// Flushes any cached data due to a reorganization. - /// - /// This method clears internal caches that may contain stale data - /// when a reorganization is detected on the L1 chain. It ensures - /// that subsequent derivation operations work with fresh data. - /// - /// # Usage - /// Called automatically when a reorg is detected during pipeline - /// stepping, but can also be called manually if needed. - fn flush(&mut self); - - /// Produces payload attributes for the next block after the given L2 safe head. - /// - /// This method advances the derivation pipeline to produce the next set of - /// [`OpAttributesWithParent`] that can be used for block building. It handles - /// the complex stepping logic including error recovery, resets, and reorgs. - /// - /// # Arguments - /// * `l2_safe_head` - The current L2 safe head block info to build upon - /// - /// # Returns - /// * `Ok(OpAttributesWithParent)` - Successfully produced payload attributes - /// * `Err(PipelineErrorKind)` - Pipeline error preventing payload production - /// - /// # Errors - /// This method can fail with various error types: - /// - **Temporary errors**: Insufficient data, retries automatically - /// - **Reset errors**: Reorg detected or activation needed, triggers pipeline reset - /// - **Critical errors**: Fatal issues that require external intervention - /// - /// # Behavior - /// The method operates in a loop, continuously stepping the pipeline until: - /// 1. Payload attributes are successfully produced - /// 2. A critical error occurs - /// 3. The pipeline signals completion - /// - /// ## Reset Handling - /// When reset errors occur: - /// - **Reorg detected**: Flushes cache and resets to safe head - /// - **Holocene activation**: Sends activation signal - /// - **Other resets**: Standard reset to safe head with system config - /// - /// ## Step Results - /// The pipeline can return different step results: - /// - **PreparedAttributes**: Attributes ready for the next block - /// - **AdvancedOrigin**: L1 origin moved forward - /// - **OriginAdvanceErr/StepFailed**: Various error conditions - async fn produce_payload( - &mut self, - l2_safe_head: L2BlockInfo, - ) -> Result<OpAttributesWithParent, PipelineErrorKind> { - // As we start the safe head at the disputed block's parent, we step the pipeline until the - // first attributes are produced. All batches at and before the safe head will be - // dropped, so the first payload will always be the disputed one. - loop { - match self.step(l2_safe_head).await { - StepResult::PreparedAttributes => { - info!(target: "client_derivation_driver", "Stepped derivation pipeline") - } - StepResult::AdvancedOrigin => { - info!( - target: "client_derivation_driver", - l1_block_number = self.origin().map(|o| o.number).ok_or(PipelineError::MissingOrigin.crit())?, - "Advanced origin" - ) - } - StepResult::OriginAdvanceErr(e) | StepResult::StepFailed(e) => { - // Break the loop unless the error signifies that there is not enough data to - // complete the current step. In this case, we retry the step to see if other - // stages can make progress. - match e { - PipelineErrorKind::Temporary(_) => { - trace!(target: "client_derivation_driver", "Failed to step derivation pipeline temporarily: {:?}", e); - continue; - } - PipelineErrorKind::Reset(e) => { - warn!(target: "client_derivation_driver", "Failed to step derivation pipeline due to reset: {:?}", e); - let system_config = self - .system_config_by_number(l2_safe_head.block_info.number) - .await?; - - if matches!(e, ResetError::HoloceneActivation) { - let l1_origin = - self.origin().ok_or(PipelineError::MissingOrigin.crit())?; - self.signal( - ActivationSignal { - l2_safe_head, - l1_origin, - system_config: Some(system_config), - } - .signal(), - ) - .await?; - } else { - // Flushes cache if a reorg is detected. - if matches!(e, ResetError::ReorgDetected(_, _)) { - self.flush(); - } - - // Reset the pipeline to the initial L2 safe head and L1 origin, - // and try again. - let l1_origin = - self.origin().ok_or(PipelineError::MissingOrigin.crit())?; - self.signal( - ResetSignal { - l2_safe_head, - l1_origin, - system_config: Some(system_config), - } - .signal(), - ) - .await?; - } - } - PipelineErrorKind::Critical(_) => { - warn!(target: "client_derivation_driver", "Failed to step derivation pipeline: {:?}", e); - return Err(e); - } - } - } - } - - if let Some(attrs) = self.next() { - return Ok(attrs); - } - } - } -} diff --git a/kona/crates/proof/executor/README.md b/kona/crates/proof/executor/README.md deleted file mode 100644 index 0f3fbbc128a..00000000000 --- a/kona/crates/proof/executor/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# `kona-executor` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-executor"><img src="https://img.shields.io/crates/v/kona-executor.svg?label=kona-executor&labelColor=2a2f35" alt="Kona Stateless Executor"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -A `no_std` implementation of a stateless block executor for the OP stack, backed by [`kona-mpt`](../mpt)'s `TrieDB`. diff --git a/kona/crates/proof/executor/src/builder/core.rs b/kona/crates/proof/executor/src/builder/core.rs deleted file mode 100644 index 407c82a7c54..00000000000 --- a/kona/crates/proof/executor/src/builder/core.rs +++ /dev/null @@ -1,333 +0,0 @@ -//! Stateless OP Stack L2 block builder implementation. -//! -//! The [StatelessL2Builder] provides a complete block building and execution engine -//! for OP Stack L2 chains that operates in a stateless manner, pulling required state -//! data from a [TrieDB] during execution rather than maintaining full state. - -use crate::{ExecutorError, ExecutorResult, TrieDB, TrieDBError, TrieDBProvider}; -use alloc::{string::ToString, vec::Vec}; -use alloy_consensus::{Header, Sealed, crypto::RecoveryError}; -use alloy_evm::{ - EvmFactory, FromRecoveredTx, FromTxWithEncoded, - block::{BlockExecutionResult, BlockExecutor, BlockExecutorFactory}, -}; -use alloy_op_evm::{ - OpBlockExecutionCtx, OpBlockExecutorFactory, - block::{OpAlloyReceiptBuilder, OpTxEnv}, -}; -use core::fmt::Debug; -use kona_genesis::RollupConfig; -use kona_mpt::TrieHinter; -use op_alloy_consensus::{OpReceiptEnvelope, OpTxEnvelope}; -use op_alloy_rpc_types_engine::OpPayloadAttributes; -use op_revm::OpSpecId; -use revm::{ - context::BlockEnv, - database::{State, states::bundle_state::BundleRetention}, -}; - -/// Stateless OP Stack L2 block builder that derives state from trie proofs during execution. -/// -/// The [`StatelessL2Builder`] is a specialized block execution engine designed for fault proof -/// systems and stateless verification. Instead of maintaining full L2 state, it dynamically -/// retrieves required state data from a [`TrieDB`] backed by Merkle proofs and witnesses. -/// -/// # Architecture -/// -/// The builder operates in a stateless manner by: -/// 1. **Trie Database**: Uses [`TrieDB`] to access state via Merkle proofs -/// 2. **EVM Factory**: Creates execution environments with proof-backed state -/// 3. **Block Executor**: Executes transactions using witness-provided state -/// 4. **Receipt Generation**: Produces execution receipts and state commitments -/// -/// # Stateless Execution Model -/// -/// Traditional execution engines maintain full state databases, but the stateless model: -/// - Receives state witnesses containing only required data -/// - Verifies state access against Merkle proofs -/// - Executes transactions without persistent state storage -/// - Produces verifiable execution results and state commitments -/// -/// # Use Cases -/// -/// ## Fault Proof Systems -/// - Enables dispute resolution without full state replication -/// - Provides verifiable execution results for challenge games -/// - Supports optimistic rollup fraud proof generation -/// -/// ## Stateless Verification -/// - Allows third parties to verify L2 blocks without full state -/// - Enables light clients to validate L2 execution -/// - Supports decentralized verification networks -/// -/// # Performance Characteristics -/// -/// - **Memory**: Lower memory usage than stateful execution (no full state) -/// - **I/O**: Higher I/O for proof verification and witness access -/// - **CPU**: Additional overhead for cryptographic proof verification -/// - **Determinism**: Guaranteed deterministic execution results -/// -/// # Type Parameters -/// -/// * `P` - Trie database provider implementing [`TrieDBProvider`] -/// * `H` - Trie hinter implementing [`TrieHinter`] for state access optimization -/// * `Evm` - EVM factory implementing [`EvmFactory`] for execution environment creation -#[derive(Debug)] -pub struct StatelessL2Builder<'a, P, H, Evm> -where - P: TrieDBProvider, - H: TrieHinter, - Evm: EvmFactory, -{ - /// The rollup configuration containing chain parameters and activation heights. - /// - /// Provides access to network-specific parameters including gas limits, - /// hard fork activation heights, and system addresses needed for proper - /// L2 block execution and validation. - pub(crate) config: &'a RollupConfig, - /// The trie database providing stateless access to L2 state via Merkle proofs. - /// - /// The [`TrieDB`] serves as the primary interface for state access during - /// execution, resolving account and storage queries using witness data - /// and cryptographic proofs rather than a traditional state database. - pub(crate) trie_db: TrieDB<P, H>, - /// The block executor factory for creating OP Stack execution environments. - /// - /// This factory creates specialized OP Stack execution environments that - /// understand OP-specific transaction types, system calls, and state - /// management required for proper L2 block execution. - pub(crate) factory: OpBlockExecutorFactory<OpAlloyReceiptBuilder, RollupConfig, Evm>, -} - -impl<'a, P, H, Evm> StatelessL2Builder<'a, P, H, Evm> -where - P: TrieDBProvider + Debug, - H: TrieHinter + Debug, - Evm: EvmFactory<Spec = OpSpecId, BlockEnv = BlockEnv> + 'static, - <Evm as EvmFactory>::Tx: - FromTxWithEncoded<OpTxEnvelope> + FromRecoveredTx<OpTxEnvelope> + OpTxEnv, -{ - /// Creates a new stateless L2 block builder instance. - /// - /// Initializes the builder with the necessary components for stateless block execution - /// including the trie database, execution factory, and rollup configuration. - /// - /// # Arguments - /// * `config` - Rollup configuration with chain parameters and activation heights - /// * `evm_factory` - EVM factory for creating execution environments - /// * `provider` - Trie database provider for state access - /// * `hinter` - Trie hinter for optimizing state access patterns - /// * `parent_header` - Sealed header of the parent block to build upon - /// - /// # Returns - /// A new [`StatelessL2Builder`] ready for block building operations - /// - /// # Usage - /// ```rust,ignore - /// let builder = StatelessL2Builder::new( - /// &rollup_config, - /// evm_factory, - /// trie_provider, - /// trie_hinter, - /// parent_header, - /// ); - /// ``` - pub fn new( - config: &'a RollupConfig, - evm_factory: Evm, - provider: P, - hinter: H, - parent_header: Sealed<Header>, - ) -> Self { - let trie_db = TrieDB::new(parent_header, provider, hinter); - let factory = OpBlockExecutorFactory::new( - OpAlloyReceiptBuilder::default(), - config.clone(), - evm_factory, - ); - Self { config, trie_db, factory } - } - - /// Builds and executes a new L2 block using the provided payload attributes. - /// - /// This method performs the complete block building and execution process in a stateless - /// manner, dynamically retrieving required state data via the trie database and producing - /// a fully executed block with receipts and state commitments. - /// - /// # Arguments - /// * `attrs` - Payload attributes containing transactions and block metadata - /// - /// # Returns - /// * `Ok(BlockBuildingOutcome)` - Successfully built and executed block with receipts - /// * `Err(ExecutorError)` - Block building or execution failure - /// - /// # Errors - /// This method can fail due to various conditions: - /// - /// ## Input Validation Errors - /// - [`ExecutorError::MissingGasLimit`]: Gas limit not provided in attributes - /// - [`ExecutorError::MissingTransactions`]: Transaction list not provided - /// - [`ExecutorError::MissingEIP1559Params`]: Required fee parameters missing (post-Holocene) - /// - [`ExecutorError::MissingParentBeaconBlockRoot`]: Beacon root missing (post-Dencun) - /// - /// ## Execution Errors - /// - [`ExecutorError::BlockGasLimitExceeded`]: Cumulative gas exceeds block limit - /// - [`ExecutorError::UnsupportedTransactionType`]: Unknown transaction type encountered - /// - [`ExecutorError::ExecutionError`]: EVM-level execution failures - /// - /// ## State Access Errors - /// - [`ExecutorError::TrieDBError`]: State tree access or proof verification failures - /// - Missing account data in witness - /// - Invalid Merkle proofs - /// - /// ## Data Integrity Errors - /// - [`ExecutorError::Recovery`]: Transaction signature recovery failures - /// - [`ExecutorError::RLPError`]: Data encoding/decoding errors - /// - /// # Block Building Process - /// - /// The block building process follows these steps: - /// - /// 1. **Environment Setup**: Configure EVM environment with proper gas settings - /// 2. **Witness Hinting**: Send payload witness hints to optimize state access - /// 3. **Transaction Execution**: Execute each transaction in order with state updates - /// 4. **Receipt Generation**: Generate execution receipts for all transactions - /// 5. **State Commitment**: Compute final state roots and output commitments - /// 6. **Block Assembly**: Assemble complete block with header and execution results - /// - /// # Stateless Execution Details - /// - /// Unlike traditional execution engines, this builder: - /// - Resolves state access via Merkle proofs instead of database lookups - /// - Validates all state access against cryptographic witnesses - /// - Produces deterministic results independent of execution environment - /// - Enables verification without full state replication - /// - /// # Performance Considerations - /// - /// - State access latency depends on proof verification overhead - /// - Memory usage scales with witness size rather than full state - /// - CPU overhead from cryptographic proof verification - /// - I/O patterns optimized through trie hinter guidance - pub fn build_block( - &mut self, - attrs: OpPayloadAttributes, - ) -> ExecutorResult<BlockBuildingOutcome> { - // Step 1. Set up the execution environment. - let (base_fee_params, min_base_fee) = Self::active_base_fee_params( - self.config, - self.trie_db.parent_block_header(), - attrs.payload_attributes.timestamp, - )?; - let evm_env = self.evm_env( - self.config.spec_id(attrs.payload_attributes.timestamp), - self.trie_db.parent_block_header(), - &attrs, - &base_fee_params, - min_base_fee, - )?; - let block_env = evm_env.block_env().clone(); - let parent_hash = self.trie_db.parent_block_header().seal(); - - // Attempt to send a payload witness hint to the host. This hint instructs the host to - // populate its preimage store with the preimages required to statelessly execute - // this payload. This feature is experimental, so if the hint fails, we continue - // without it and fall back on on-demand preimage fetching for execution. - self.trie_db - .hinter - .hint_execution_witness(parent_hash, &attrs) - .map_err(|e| TrieDBError::Provider(e.to_string()))?; - - info!( - target: "block_builder", - block_number = %block_env.number, - block_timestamp = %block_env.timestamp, - block_gas_limit = block_env.gas_limit, - transactions = attrs.transactions.as_ref().map_or(0, |txs| txs.len()), - "Beginning block building." - ); - - // Step 2. Create the executor, using the trie database. - let mut state = State::builder() - .with_database(&mut self.trie_db) - .with_bundle_update() - .without_state_clear() - .build(); - let evm = self.factory.evm_factory().create_evm(&mut state, evm_env); - let ctx = OpBlockExecutionCtx { - parent_hash, - parent_beacon_block_root: attrs.payload_attributes.parent_beacon_block_root, - // This field is unused for individual block building jobs. - extra_data: Default::default(), - }; - let executor = self.factory.create_executor(evm, ctx); - - // Step 3. Execute the block containing the transactions within the payload attributes. - let transactions = attrs - .recovered_transactions_with_encoded() - .collect::<Result<Vec<_>, RecoveryError>>() - .map_err(ExecutorError::Recovery)?; - let ex_result = executor.execute_block(transactions.iter())?; - - info!( - target: "block_builder", - gas_used = ex_result.gas_used, - gas_limit = block_env.gas_limit, - "Finished block building. Beginning sealing job." - ); - - // Step 4. Merge state transitions and seal the block. - state.merge_transitions(BundleRetention::Reverts); - let bundle = state.take_bundle(); - let header = self.seal_block(&attrs, parent_hash, &block_env, &ex_result, bundle)?; - - info!( - target: "block_builder", - number = header.number, - hash = ?header.seal(), - state_root = ?header.state_root, - transactions_root = ?header.transactions_root, - receipts_root = ?header.receipts_root, - "Sealed new block", - ); - - // Update the parent block hash in the state database, preparing for the next block. - self.trie_db.set_parent_block_header(header.clone()); - Ok((header, ex_result).into()) - } -} - -/// The outcome of a block building operation, returning the sealed block [`Header`] and the -/// [`BlockExecutionResult`]. -#[derive(Debug, Clone)] -pub struct BlockBuildingOutcome { - /// The block header. - pub header: Sealed<Header>, - /// The block execution result. - pub execution_result: BlockExecutionResult<OpReceiptEnvelope>, -} - -impl From<(Sealed<Header>, BlockExecutionResult<OpReceiptEnvelope>)> for BlockBuildingOutcome { - fn from( - (header, execution_result): (Sealed<Header>, BlockExecutionResult<OpReceiptEnvelope>), - ) -> Self { - Self { header, execution_result } - } -} - -#[cfg(test)] -mod test { - use crate::test_utils::run_test_fixture; - use rstest::rstest; - use std::path::PathBuf; - - #[rstest] - #[tokio::test] - async fn test_statelessly_execute_block( - #[base_dir = "./testdata"] - #[files("*.tar.gz")] - path: PathBuf, - ) { - run_test_fixture(path).await; - } -} diff --git a/kona/crates/proof/executor/src/builder/env.rs b/kona/crates/proof/executor/src/builder/env.rs deleted file mode 100644 index 7ecfe57598c..00000000000 --- a/kona/crates/proof/executor/src/builder/env.rs +++ /dev/null @@ -1,167 +0,0 @@ -//! Environment utility functions for [StatelessL2Builder]. - -use super::StatelessL2Builder; -use crate::{ - ExecutorError, ExecutorResult, TrieDBProvider, - util::{ - decode_holocene_eip_1559_params_block_header, decode_jovian_eip_1559_params_block_header, - }, -}; -use alloy_consensus::{BlockHeader, Header}; -use alloy_eips::{calc_next_block_base_fee, eip1559::BaseFeeParams, eip7840::BlobParams}; -use alloy_evm::{EvmEnv, EvmFactory}; -use alloy_primitives::U256; -use kona_genesis::RollupConfig; -use kona_mpt::TrieHinter; -use op_alloy_rpc_types_engine::OpPayloadAttributes; -use op_revm::OpSpecId; -use revm::{ - context::{BlockEnv, CfgEnv}, - context_interface::block::BlobExcessGasAndPrice, - primitives::eip4844::{ - BLOB_BASE_FEE_UPDATE_FRACTION_CANCUN, BLOB_BASE_FEE_UPDATE_FRACTION_PRAGUE, - }, -}; - -impl<P, H, Evm> StatelessL2Builder<'_, P, H, Evm> -where - P: TrieDBProvider, - H: TrieHinter, - Evm: EvmFactory, -{ - /// Returns the active [`EvmEnv`] for the executor. - pub(crate) fn evm_env( - &self, - spec_id: OpSpecId, - parent_header: &Header, - payload_attrs: &OpPayloadAttributes, - base_fee_params: &BaseFeeParams, - min_base_fee: u64, - ) -> ExecutorResult<EvmEnv<OpSpecId>> { - let block_env = self.prepare_block_env( - spec_id, - parent_header, - payload_attrs, - base_fee_params, - min_base_fee, - )?; - let cfg_env = self.evm_cfg_env(payload_attrs.payload_attributes.timestamp); - Ok(EvmEnv::new(cfg_env, block_env)) - } - - /// Returns the active [CfgEnv] for the executor. - pub(crate) fn evm_cfg_env(&self, timestamp: u64) -> CfgEnv<OpSpecId> { - CfgEnv::new() - .with_chain_id(self.config.l2_chain_id.id()) - .with_spec(self.config.spec_id(timestamp)) - } - - fn next_block_base_fee( - &self, - params: BaseFeeParams, - parent: &Header, - min_base_fee: u64, - ) -> Option<u64> { - if !self.config.is_jovian_active(parent.timestamp()) { - return parent.next_block_base_fee(params); - } - - // Starting from Jovian, we use the maximum of the gas used and the blob gas used to - // calculate the next base fee. - let gas_used = if parent.blob_gas_used().unwrap_or_default() > parent.gas_used() { - parent.blob_gas_used().unwrap_or_default() - } else { - parent.gas_used() - }; - - let mut next_block_base_fee = calc_next_block_base_fee( - gas_used, - parent.gas_limit(), - parent.base_fee_per_gas().unwrap_or_default(), - params, - ); - - // If the next block base fee is less than the min base fee, set it to the min base fee. - // # Note - // Before Jovian activation, the min-base-fee is 0 so this is a no-op. - if next_block_base_fee < min_base_fee { - next_block_base_fee = min_base_fee; - } - - Some(next_block_base_fee) - } - - /// Prepares a [BlockEnv] with the given [OpPayloadAttributes]. - pub(crate) fn prepare_block_env( - &self, - spec_id: OpSpecId, - parent_header: &Header, - payload_attrs: &OpPayloadAttributes, - base_fee_params: &BaseFeeParams, - min_base_fee: u64, - ) -> ExecutorResult<BlockEnv> { - let (params, fraction) = if spec_id.is_enabled_in(OpSpecId::ISTHMUS) { - (Some(BlobParams::prague()), BLOB_BASE_FEE_UPDATE_FRACTION_PRAGUE) - } else if spec_id.is_enabled_in(OpSpecId::ECOTONE) { - (Some(BlobParams::cancun()), BLOB_BASE_FEE_UPDATE_FRACTION_CANCUN) - } else { - (None, 0) - }; - - let blob_excess_gas_and_price = parent_header - .maybe_next_block_excess_blob_gas(params) - .or_else(|| spec_id.is_enabled_in(OpSpecId::ECOTONE).then_some(0)) - .map(|excess| BlobExcessGasAndPrice::new(excess, fraction)); - - let next_block_base_fee = self - .next_block_base_fee(*base_fee_params, parent_header, min_base_fee) - .unwrap_or_default(); - - Ok(BlockEnv { - number: U256::from(parent_header.number + 1), - beneficiary: payload_attrs.payload_attributes.suggested_fee_recipient, - timestamp: U256::from(payload_attrs.payload_attributes.timestamp), - gas_limit: payload_attrs.gas_limit.ok_or(ExecutorError::MissingGasLimit)?, - basefee: next_block_base_fee, - prevrandao: Some(payload_attrs.payload_attributes.prev_randao), - blob_excess_gas_and_price, - ..Default::default() - }) - } - - /// Returns the active base fee parameters for the parent header. - /// Returns the min-base-fee as the second element of the tuple. - /// - /// ## Note - /// Before Jovian activation, the min-base-fee is 0. - pub(crate) fn active_base_fee_params( - config: &RollupConfig, - parent_header: &Header, - payload_timestamp: u64, - ) -> ExecutorResult<(BaseFeeParams, u64)> { - match config { - // After Holocene activation, the base fee parameters are stored in the - // `extraData` field of the parent header. If Holocene wasn't active in the - // parent block, the default base fee parameters are used. - _ if config.is_jovian_active(parent_header.timestamp) => { - decode_jovian_eip_1559_params_block_header(parent_header) - } - _ if config.is_holocene_active(parent_header.timestamp) => { - decode_holocene_eip_1559_params_block_header(parent_header) - .map(|base_fee_params| (base_fee_params, 0)) - } - // If the next payload attribute timestamp is past canyon activation, - // use the canyon base fee params from the rollup config. - _ if config.is_canyon_active(payload_timestamp) => { - // If the payload attribute timestamp is past canyon activation, - // use the canyon base fee params from the rollup config. - Ok((config.chain_op_config.post_canyon_params(), 0)) - } - _ => { - // If the next payload attribute timestamp is prior to canyon activation, - // use the default base fee params from the rollup config. - Ok((config.chain_op_config.pre_canyon_params(), 0)) - } - } - } -} diff --git a/kona/crates/proof/executor/src/db/mod.rs b/kona/crates/proof/executor/src/db/mod.rs deleted file mode 100644 index 5141b8aa1d2..00000000000 --- a/kona/crates/proof/executor/src/db/mod.rs +++ /dev/null @@ -1,477 +0,0 @@ -//! This module contains an implementation of an in-memory Trie DB for [`revm`], that allows for -//! incremental updates through fetching node preimages on the fly during execution. - -use crate::errors::{TrieDBError, TrieDBResult}; -use alloc::{string::ToString, vec::Vec}; -use alloy_consensus::{EMPTY_ROOT_HASH, Header, Sealed}; -use alloy_primitives::{Address, B256, U256, keccak256}; -use alloy_rlp::{Decodable, Encodable}; -use alloy_trie::TrieAccount; -use kona_mpt::{Nibbles, TrieHinter, TrieNode, TrieNodeError}; -use revm::{ - Database, - database::{BundleState, states::StorageSlot}, - primitives::{BLOCK_HASH_HISTORY, HashMap}, - state::{AccountInfo, Bytecode}, -}; - -mod traits; -pub use traits::{NoopTrieDBProvider, TrieDBProvider}; - -/// A Trie DB that caches open state in-memory. -/// -/// When accounts that don't already exist within the cached [`TrieNode`] are queried, the database -/// fetches the preimages of the trie nodes on the path to the account using the `PreimageFetcher` -/// (`F` generic). This allows for data to be fetched in a verifiable manner given an initial -/// trusted state root as it is needed during execution. -/// -/// The [`TrieDB`] is intended to be wrapped by a [`State`], which is then used by [`revm`] to -/// capture state transitions during block execution. -/// -/// **Behavior**: -/// - When an account is queried and the trie path has not already been opened by [Self::basic], we -/// fall through to the `PreimageFetcher` to fetch the preimages of the trie nodes on the path to -/// the account. After it has been fetched, the path will be cached until the next call to -/// [Self::state_root]. -/// - When querying for the code hash of an account, the [`TrieDBProvider`] is consulted to fetch -/// the code hash of the account. -/// - When a [`BundleState`] changeset is committed to the parent [`State`] database, the changes -/// are first applied to the [`State`]'s cache, then the trie hash is recomputed with -/// [Self::state_root]. -/// - When the block hash of a block number is needed via [Self::block_hash], the -/// `HeaderByHashFetcher` is consulted to walk back to the desired block number by revealing the -/// parent hash of block headers until the desired block number is reached, up to a maximum of -/// [BLOCK_HASH_HISTORY] blocks back relative to the current parent block hash. -/// -/// **Example Construction**: -/// ```rust -/// use alloy_consensus::{Header, Sealable}; -/// use alloy_evm::{EvmEnv, EvmFactory, block::BlockExecutorFactory}; -/// use alloy_op_evm::{ -/// OpBlockExecutionCtx, OpBlockExecutorFactory, OpEvmFactory, block::OpAlloyReceiptBuilder, -/// }; -/// use alloy_op_hardforks::OpChainHardforks; -/// use alloy_primitives::{B256, Bytes}; -/// use kona_executor::{NoopTrieDBProvider, TrieDB}; -/// use kona_mpt::NoopTrieHinter; -/// use revm::database::{State, states::bundle_state::BundleRetention}; -/// -/// let mock_parent_block_header = Header::default(); -/// let trie_db = -/// TrieDB::new(mock_parent_block_header.seal_slow(), NoopTrieDBProvider, NoopTrieHinter); -/// let executor_factory = OpBlockExecutorFactory::new( -/// OpAlloyReceiptBuilder::default(), -/// OpChainHardforks::op_mainnet(), -/// OpEvmFactory::default(), -/// ); -/// let mut state = State::builder().with_database(trie_db).with_bundle_update().build(); -/// let evm = executor_factory.evm_factory().create_evm(&mut state, EvmEnv::default()); -/// let executor = executor_factory.create_executor(evm, OpBlockExecutionCtx::default()); -/// -/// // Execute your block's transactions... -/// drop(executor); -/// -/// state.merge_transitions(BundleRetention::Reverts); -/// let bundle = state.take_bundle(); -/// let state_root = state.database.state_root(&bundle).expect("Failed to compute state root"); -/// ``` -/// -/// [`State`]: revm::database::State -#[derive(Debug, Clone)] -pub struct TrieDB<F, H> -where - F: TrieDBProvider, - H: TrieHinter, -{ - /// The [`TrieNode`] representation of the root node. - root_node: TrieNode, - /// Storage roots of accounts within the trie. - storage_roots: HashMap<Address, TrieNode>, - /// The parent block hash of the current block. - parent_block_header: Sealed<Header>, - /// The [`TrieDBProvider`] - pub fetcher: F, - /// The [`TrieHinter`] - pub hinter: H, -} - -impl<F, H> TrieDB<F, H> -where - F: TrieDBProvider, - H: TrieHinter, -{ - /// Creates a new [TrieDB] with the given root node. - pub fn new(parent_block_header: Sealed<Header>, fetcher: F, hinter: H) -> Self { - Self { - root_node: TrieNode::new_blinded(parent_block_header.state_root), - storage_roots: Default::default(), - parent_block_header, - fetcher, - hinter, - } - } - - /// Consumes `Self` and takes the current state root of the trie DB. - pub fn take_root_node(self) -> TrieNode { - self.root_node - } - - /// Returns a shared reference to the root [TrieNode] of the trie DB. - pub const fn root(&self) -> &TrieNode { - &self.root_node - } - - /// Returns the mapping of [Address]es to storage roots. - pub const fn storage_roots(&self) -> &HashMap<Address, TrieNode> { - &self.storage_roots - } - - /// Returns a reference to the current parent block header of the trie DB. - pub const fn parent_block_header(&self) -> &Sealed<Header> { - &self.parent_block_header - } - - /// Sets the parent block header of the trie DB. Should be called after a block has been - /// executed and the Header has been created. - /// - /// ## Takes - /// - `parent_block_header`: The parent block header of the current block. - pub fn set_parent_block_header(&mut self, parent_block_header: Sealed<Header>) { - self.parent_block_header = parent_block_header; - } - - /// Applies a [BundleState] changeset to the [TrieNode] and recomputes the state root hash. - /// - /// ## Takes - /// - `bundle`: The [BundleState] changeset to apply to the trie DB. - /// - /// ## Returns - /// - `Ok(B256)`: The new state root hash of the trie DB. - /// - `Err(_)`: If the state root hash could not be computed. - pub fn state_root(&mut self, bundle: &BundleState) -> TrieDBResult<B256> { - debug!(target: "client_executor", "Recomputing state root"); - - // Update the accounts in the trie with the changeset. - self.update_accounts(bundle)?; - - // Recompute the root hash of the trie. - let root = self.root_node.blind(); - - debug!( - target: "client_executor", - "Recomputed state root: {root}", - ); - - // Extract the new state root from the root node. - Ok(root) - } - - /// Fetches the [TrieAccount] of an account from the trie DB. - /// - /// ## Takes - /// - `address`: The address of the account. - /// - /// ## Returns - /// - `Ok(Some(TrieAccount))`: The [TrieAccount] of the account. - /// - `Ok(None)`: If the account does not exist in the trie. - /// - `Err(_)`: If the account could not be fetched. - pub fn get_trie_account( - &mut self, - address: &Address, - block_number: u64, - ) -> TrieDBResult<Option<TrieAccount>> { - // Send a hint to the host to fetch the account proof. - self.hinter - .hint_account_proof(*address, block_number) - .map_err(|e| TrieDBError::Provider(e.to_string()))?; - - // Fetch the account from the trie. - let hashed_address_nibbles = Nibbles::unpack(keccak256(address.as_slice())); - let Some(trie_account_rlp) = self.root_node.open(&hashed_address_nibbles, &self.fetcher)? - else { - return Ok(None); - }; - - // Decode the trie account from the RLP bytes. - TrieAccount::decode(&mut trie_account_rlp.as_ref()) - .map_err(TrieNodeError::RLPError) - .map_err(Into::into) - .map(Some) - } - - /// Modifies the accounts in the storage trie with the given [BundleState] changeset. - /// - /// ## Takes - /// - `bundle`: The [BundleState] changeset to apply to the trie DB. - /// - /// ## Returns - /// - `Ok(())` if the accounts were successfully updated. - /// - `Err(_)` if the accounts could not be updated. - fn update_accounts(&mut self, bundle: &BundleState) -> TrieDBResult<()> { - // Sort the storage keys prior to applying the changeset, to ensure that the order of - // application is deterministic between runs. - let mut sorted_state = - bundle.state().iter().map(|(k, v)| (k, keccak256(*k), v)).collect::<Vec<_>>(); - sorted_state.sort_by_key(|(_, hashed_addr, _)| *hashed_addr); - - for (address, hashed_address, bundle_account) in sorted_state { - if bundle_account.status.is_not_modified() { - continue; - } - - // Compute the path to the account in the trie. - let account_path = Nibbles::unpack(hashed_address.as_slice()); - - // If the account was destroyed, delete it from the trie. - if bundle_account.was_destroyed() { - self.root_node.delete(&account_path, &self.fetcher, &self.hinter)?; - self.storage_roots.remove(address); - continue; - } - - let account_info = - bundle_account.account_info().ok_or(TrieDBError::MissingAccountInfo)?; - - let mut trie_account = TrieAccount { - balance: account_info.balance, - nonce: account_info.nonce, - code_hash: account_info.code_hash, - ..Default::default() - }; - - // Update the account's storage root - let acc_storage_root = self - .storage_roots - .entry(*address) - .or_insert_with(|| TrieNode::new_blinded(EMPTY_ROOT_HASH)); - - // Sort the hashed storage keys prior to applying the changeset, to ensure that the - // order of application is deterministic between runs. - let mut sorted_storage = bundle_account - .storage - .iter() - .map(|(k, v)| (keccak256(k.to_be_bytes::<32>()), v)) - .collect::<Vec<_>>(); - sorted_storage.sort_by_key(|(slot, _)| *slot); - - sorted_storage.into_iter().try_for_each(|(hashed_key, value)| { - Self::change_storage( - acc_storage_root, - hashed_key, - value, - &self.fetcher, - &self.hinter, - ) - })?; - - // Recompute the account storage root. - let root = acc_storage_root.blind(); - trie_account.storage_root = root; - - // RLP encode the trie account for insertion. - let mut account_buf = Vec::with_capacity(trie_account.length()); - trie_account.encode(&mut account_buf); - - // Insert or update the account in the trie. - self.root_node.insert(&account_path, account_buf.into(), &self.fetcher)?; - } - - Ok(()) - } - - /// Modifies a storage slot of an account in the Merkle Patricia Trie. - /// - /// ## Takes - /// - `storage_root`: The storage root of the account. - /// - `hashed_key`: The hashed storage slot key. - /// - `value`: The new value of the storage slot. - /// - `fetcher`: The trie node fetcher. - /// - `hinter`: The trie hinter. - /// - /// ## Returns - /// - `Ok(())` if the storage slot was successfully modified. - /// - `Err(_)` if the storage slot could not be modified. - fn change_storage( - storage_root: &mut TrieNode, - hashed_key: B256, - value: &StorageSlot, - fetcher: &F, - hinter: &H, - ) -> TrieDBResult<()> { - if !value.is_changed() { - return Ok(()); - } - - // RLP encode the storage slot value. - let mut rlp_buf = Vec::with_capacity(value.present_value.length()); - value.present_value.encode(&mut rlp_buf); - - // Insert or update the storage slot in the trie. - let hashed_slot_key = Nibbles::unpack(hashed_key.as_slice()); - if value.present_value.is_zero() { - // If the storage slot is being set to zero, prune it from the trie. - storage_root.delete(&hashed_slot_key, fetcher, hinter)?; - } else { - // Otherwise, update the storage slot. - storage_root.insert(&hashed_slot_key, rlp_buf.into(), fetcher)?; - } - - Ok(()) - } -} - -impl<F, H> Database for TrieDB<F, H> -where - F: TrieDBProvider, - H: TrieHinter, -{ - type Error = TrieDBError; - - fn basic(&mut self, address: Address) -> Result<Option<AccountInfo>, Self::Error> { - // Fetch the account from the trie. - let Some(trie_account) = - self.get_trie_account(&address, self.parent_block_header.number)? - else { - // If the account does not exist in the trie, return `Ok(None)`. - return Ok(None); - }; - - // Insert the account's storage root into the cache. - self.storage_roots.insert(address, TrieNode::new_blinded(trie_account.storage_root)); - - // Return a partial DB account. The storage and code are not loaded out-right, and are - // loaded optimistically in the `Database` + `DatabaseRef` trait implementations. - Ok(Some(AccountInfo { - balance: trie_account.balance, - nonce: trie_account.nonce, - code_hash: trie_account.code_hash, - code: None, - })) - } - - fn code_by_hash(&mut self, code_hash: B256) -> Result<Bytecode, Self::Error> { - self.fetcher - .bytecode_by_hash(code_hash) - .map(Bytecode::new_raw) - .map_err(|e| TrieDBError::Provider(e.to_string())) - } - - fn storage(&mut self, address: Address, index: U256) -> Result<U256, Self::Error> { - // Send a hint to the host to fetch the storage proof. - self.hinter - .hint_storage_proof(address, index, self.parent_block_header.number) - .map_err(|e| TrieDBError::Provider(e.to_string()))?; - - // Fetch the account's storage root from the cache. If storage is being accessed, the - // account should have been loaded into the cache by the `basic` method. If the account was - // non-existing, the storage root will not be present. - match self.storage_roots.get_mut(&address) { - None => { - // If the storage root for the account does not exist, return zero. - Ok(U256::ZERO) - } - Some(storage_root) => { - // Fetch the storage slot from the trie. - let hashed_slot_key = keccak256(index.to_be_bytes::<32>().as_slice()); - match storage_root.open(&Nibbles::unpack(hashed_slot_key), &self.fetcher)? { - Some(slot_value) => { - // Decode the storage slot value. - let int_slot = U256::decode(&mut slot_value.as_ref()) - .map_err(TrieNodeError::RLPError)?; - Ok(int_slot) - } - None => { - // If the storage slot does not exist, return zero. - Ok(U256::ZERO) - } - } - } - } - } - - fn block_hash(&mut self, block_number: u64) -> Result<B256, Self::Error> { - // Copy the current header - let mut header = self.parent_block_header.inner().clone(); - - // Check if the block number is in range. If not, we can fail early. - if block_number > header.number || - header.number.saturating_sub(block_number) > BLOCK_HASH_HISTORY - { - return Ok(B256::default()); - } - - // Walk back the block headers to the desired block number. - while header.number > block_number { - header = self - .fetcher - .header_by_hash(header.parent_hash) - .map_err(|e| TrieDBError::Provider(e.to_string()))?; - } - - Ok(header.hash_slow()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_consensus::Sealable; - use alloy_primitives::b256; - use kona_mpt::NoopTrieHinter; - - fn new_test_db() -> TrieDB<NoopTrieDBProvider, NoopTrieHinter> { - TrieDB::new(Header::default().seal_slow(), NoopTrieDBProvider, NoopTrieHinter) - } - - #[test] - fn test_trie_db_take_root_node() { - let db = new_test_db(); - let root_node = db.take_root_node(); - assert_eq!(root_node.blind(), EMPTY_ROOT_HASH); - } - - #[test] - fn test_trie_db_root_node_ref() { - let db = new_test_db(); - let root_node = db.root(); - assert_eq!(root_node.blind(), EMPTY_ROOT_HASH); - } - - #[test] - fn test_trie_db_storage_roots() { - let db = new_test_db(); - let storage_roots = db.storage_roots(); - assert!(storage_roots.is_empty()); - } - - #[test] - fn test_block_hash_above_range() { - let mut db = new_test_db(); - db.parent_block_header = Header { number: 10, ..Default::default() }.seal_slow(); - let block_number = 11; - let block_hash = db.block_hash(block_number).unwrap(); - assert_eq!(block_hash, B256::default()); - } - - #[test] - fn test_block_hash_below_range() { - let mut db = new_test_db(); - db.parent_block_header = - Header { number: BLOCK_HASH_HISTORY + 10, ..Default::default() }.seal_slow(); - let block_number = 0; - let block_hash = db.block_hash(block_number).unwrap(); - assert_eq!(block_hash, B256::default()); - } - - #[test] - fn test_block_hash_provider_missing_hash() { - let mut db = new_test_db(); - db.parent_block_header = Header { number: 10, ..Default::default() }.seal_slow(); - let block_number = 5; - let block_hash = db.block_hash(block_number).unwrap(); - assert_eq!( - block_hash, - b256!("78dec18c6d7da925bbe773c315653cdc70f6444ed6c1de9ac30bdb36cff74c3b") - ); - } -} diff --git a/kona/crates/proof/executor/src/db/traits.rs b/kona/crates/proof/executor/src/db/traits.rs deleted file mode 100644 index 99368547c8e..00000000000 --- a/kona/crates/proof/executor/src/db/traits.rs +++ /dev/null @@ -1,57 +0,0 @@ -//! Contains the [TrieDBProvider] trait for fetching EVM bytecode hash preimages as well as [Header] -//! preimages. - -use alloc::string::String; -use alloy_consensus::Header; -use alloy_primitives::{B256, Bytes}; -use kona_mpt::{TrieNode, TrieProvider}; - -/// The [TrieDBProvider] trait defines the synchronous interface for fetching EVM bytecode hash -/// preimages as well as [Header] preimages. -pub trait TrieDBProvider: TrieProvider { - /// Fetches the preimage of the bytecode hash provided. - /// - /// ## Takes - /// - `hash`: The hash of the bytecode. - /// - /// ## Returns - /// - Ok(Bytes): The bytecode of the contract. - /// - Err(Self::Error): If the bytecode hash could not be fetched. - /// - /// [TrieDB]: crate::TrieDB - fn bytecode_by_hash(&self, code_hash: B256) -> Result<Bytes, Self::Error>; - - /// Fetches the preimage of [Header] hash provided. - /// - /// ## Takes - /// - `hash`: The hash of the RLP-encoded [Header]. - /// - /// ## Returns - /// - Ok(Bytes): The [Header]. - /// - Err(Self::Error): If the [Header] could not be fetched. - /// - /// [TrieDB]: crate::TrieDB - fn header_by_hash(&self, hash: B256) -> Result<Header, Self::Error>; -} - -/// The default, no-op implementation of the [TrieDBProvider] trait, used for testing. -#[derive(Debug, Clone, Copy)] -pub struct NoopTrieDBProvider; - -impl TrieProvider for NoopTrieDBProvider { - type Error = String; - - fn trie_node_by_hash(&self, _key: B256) -> Result<TrieNode, Self::Error> { - Ok(TrieNode::Empty) - } -} - -impl TrieDBProvider for NoopTrieDBProvider { - fn bytecode_by_hash(&self, _code_hash: B256) -> Result<Bytes, Self::Error> { - Ok(Bytes::default()) - } - - fn header_by_hash(&self, _hash: B256) -> Result<Header, Self::Error> { - Ok(Header::default()) - } -} diff --git a/kona/crates/proof/executor/src/lib.rs b/kona/crates/proof/executor/src/lib.rs deleted file mode 100644 index d9221974593..00000000000 --- a/kona/crates/proof/executor/src/lib.rs +++ /dev/null @@ -1,29 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(any(test, feature = "test-utils")), no_std)] - -extern crate alloc; - -#[macro_use] -extern crate tracing; - -mod db; -pub use db::{NoopTrieDBProvider, TrieDB, TrieDBProvider}; - -mod builder; -pub use builder::{BlockBuildingOutcome, StatelessL2Builder, compute_receipts_root}; - -mod errors; -pub use errors::{ - Eip1559ValidationError, ExecutorError, ExecutorResult, TrieDBError, TrieDBResult, -}; - -pub(crate) mod util; - -#[cfg(any(test, feature = "test-utils"))] -pub mod test_utils; diff --git a/kona/crates/proof/executor/src/test_utils.rs b/kona/crates/proof/executor/src/test_utils.rs deleted file mode 100644 index 0669392ab5a..00000000000 --- a/kona/crates/proof/executor/src/test_utils.rs +++ /dev/null @@ -1,372 +0,0 @@ -//! Test utilities for the executor. - -use crate::{StatelessL2Builder, TrieDBProvider}; -use alloy_consensus::Header; -use alloy_op_evm::OpEvmFactory; -use alloy_primitives::{B256, Bytes, Sealable}; -use alloy_provider::{Provider, RootProvider, network::primitives::BlockTransactions}; -use alloy_rlp::Decodable; -use alloy_rpc_client::RpcClient; -use alloy_rpc_types_engine::PayloadAttributes; -use alloy_transport_http::{Client, Http}; -use kona_genesis::RollupConfig; -use kona_mpt::{NoopTrieHinter, TrieNode, TrieProvider}; -use kona_registry::ROLLUP_CONFIGS; -use op_alloy_rpc_types_engine::OpPayloadAttributes; -use rocksdb::{DB, Options}; -use serde::{Deserialize, Serialize}; -use std::{path::PathBuf, sync::Arc}; -use tokio::{fs, runtime::Handle, sync::Mutex}; - -/// Executes a [ExecutorTestFixture] stored at the passed `fixture_path` and asserts that the -/// produced block hash matches the expected block hash. -pub async fn run_test_fixture(fixture_path: PathBuf) { - // First, untar the fixture. - let fixture_dir = tempfile::tempdir().expect("Failed to create temporary directory"); - tokio::process::Command::new("tar") - .arg("-xvf") - .arg(fixture_path.as_path()) - .arg("-C") - .arg(fixture_dir.path()) - .arg("--strip-components=1") - .output() - .await - .expect("Failed to untar fixture"); - - let mut options = Options::default(); - options.set_compression_type(rocksdb::DBCompressionType::Snappy); - options.create_if_missing(true); - let kv_store = DB::open(&options, fixture_dir.path().join("kv")) - .unwrap_or_else(|e| panic!("Failed to open database at {fixture_dir:?}: {e}")); - let provider = DiskTrieNodeProvider::new(kv_store); - let fixture: ExecutorTestFixture = - serde_json::from_slice(&fs::read(fixture_dir.path().join("fixture.json")).await.unwrap()) - .expect("Failed to deserialize fixture"); - - let mut executor = StatelessL2Builder::new( - &fixture.rollup_config, - OpEvmFactory::default(), - provider, - NoopTrieHinter, - fixture.parent_header.seal_slow(), - ); - - let outcome = executor.build_block(fixture.executing_payload).unwrap(); - - assert_eq!( - outcome.header.hash(), - fixture.expected_block_hash, - "Produced header does not match the expected header" - ); -} - -/// The test fixture format for the [`StatelessL2Builder`]. -#[derive(Debug, Serialize, Deserialize)] -pub struct ExecutorTestFixture { - /// The rollup configuration for the executing chain. - pub rollup_config: RollupConfig, - /// The parent block header. - pub parent_header: Header, - /// The executing payload attributes. - pub executing_payload: OpPayloadAttributes, - /// The expected block hash - pub expected_block_hash: B256, -} - -/// A test fixture creator for the [`StatelessL2Builder`]. -#[derive(Debug)] -pub struct ExecutorTestFixtureCreator { - /// The RPC provider for the L2 execution layer. - pub provider: RootProvider, - /// The block number to create the test fixture for. - pub block_number: u64, - /// The key value store for the test fixture. - pub kv_store: Arc<Mutex<rocksdb::DB>>, - /// The data directory for the test fixture. - pub data_dir: PathBuf, -} - -impl ExecutorTestFixtureCreator { - /// Creates a new [`ExecutorTestFixtureCreator`] with the given parameters. - pub fn new(provider_url: &str, block_number: u64, base_fixture_directory: PathBuf) -> Self { - let base = base_fixture_directory.join(format!("block-{block_number}")); - - let url = provider_url.parse().expect("Invalid provider URL"); - let http = Http::<Client>::new(url); - let provider = RootProvider::new(RpcClient::new(http, false)); - - let mut options = Options::default(); - options.set_compression_type(rocksdb::DBCompressionType::Snappy); - options.create_if_missing(true); - let db = DB::open(&options, base.join("kv").as_path()) - .unwrap_or_else(|e| panic!("Failed to open database at {base:?}: {e}")); - - Self { provider, block_number, kv_store: Arc::new(Mutex::new(db)), data_dir: base } - } -} - -impl ExecutorTestFixtureCreator { - /// Create a static test fixture with the configuration provided. - pub async fn create_static_fixture(self) { - let chain_id = self.provider.get_chain_id().await.expect("Failed to get chain ID"); - let rollup_config = ROLLUP_CONFIGS.get(&chain_id).expect("Rollup config not found"); - - let executing_block = self - .provider - .get_block_by_number(self.block_number.into()) - .await - .expect("Failed to get parent block") - .expect("Block not found"); - let parent_block = self - .provider - .get_block_by_number((self.block_number - 1).into()) - .await - .expect("Failed to get parent block") - .expect("Block not found"); - - let executing_header = executing_block.header; - let parent_header = parent_block.header.inner.seal_slow(); - - let encoded_executing_transactions = match executing_block.transactions { - BlockTransactions::Hashes(transactions) => { - let mut encoded_transactions = Vec::with_capacity(transactions.len()); - for tx_hash in transactions { - let tx = self - .provider - .client() - .request::<&[B256; 1], Bytes>("debug_getRawTransaction", &[tx_hash]) - .await - .expect("Block not found"); - encoded_transactions.push(tx); - } - encoded_transactions - } - _ => panic!("Only BlockTransactions::Hashes are supported."), - }; - - let payload_attrs = OpPayloadAttributes { - payload_attributes: PayloadAttributes { - timestamp: executing_header.timestamp, - parent_beacon_block_root: executing_header.parent_beacon_block_root, - prev_randao: executing_header.mix_hash, - withdrawals: Default::default(), - suggested_fee_recipient: executing_header.beneficiary, - }, - gas_limit: Some(executing_header.gas_limit), - transactions: Some(encoded_executing_transactions), - no_tx_pool: None, - eip_1559_params: rollup_config.is_holocene_active(executing_header.timestamp).then( - || { - executing_header.extra_data[1..9] - .try_into() - .expect("Invalid header format for Holocene") - }, - ), - min_base_fee: rollup_config.is_jovian_active(executing_header.timestamp).then(|| { - // The min base fee is the bytes 9-17 of the extra data. - executing_header.extra_data[9..17] - .try_into() - .map(u64::from_be_bytes) - .expect("Invalid header format for Jovian") - }), - }; - - let fixture_path = self.data_dir.join("fixture.json"); - let fixture = ExecutorTestFixture { - rollup_config: rollup_config.clone(), - parent_header: parent_header.inner().clone(), - executing_payload: payload_attrs.clone(), - expected_block_hash: executing_header.hash_slow(), - }; - - let mut executor = StatelessL2Builder::new( - rollup_config, - OpEvmFactory::default(), - self, - NoopTrieHinter, - parent_header, - ); - let outcome = executor.build_block(payload_attrs).expect("Failed to execute block"); - - assert_eq!( - outcome.header.inner(), - &executing_header.inner, - "Produced header does not match the expected header" - ); - fs::write(fixture_path.as_path(), serde_json::to_vec(&fixture).unwrap()).await.unwrap(); - - // Tar the fixture. - let data_dir = fixture_path.parent().unwrap(); - tokio::process::Command::new("tar") - .arg("-czf") - .arg(data_dir.with_extension("tar.gz").file_name().unwrap()) - .arg(data_dir.file_name().unwrap()) - .current_dir(data_dir.parent().unwrap()) - .output() - .await - .expect("Failed to tar fixture"); - - // Remove the leftover directory. - fs::remove_dir_all(data_dir).await.expect("Failed to remove temporary directory"); - } -} - -impl TrieProvider for ExecutorTestFixtureCreator { - type Error = TestTrieNodeProviderError; - - fn trie_node_by_hash(&self, key: B256) -> Result<TrieNode, Self::Error> { - // Fetch the preimage from the L2 chain provider. - let preimage: Bytes = tokio::task::block_in_place(move || { - Handle::current().block_on(async { - let preimage: Bytes = self - .provider - .client() - .request("debug_dbGet", &[key]) - .await - .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)?; - - self.kv_store - .lock() - .await - .put(key, preimage.clone()) - .map_err(|_| TestTrieNodeProviderError::KVStore)?; - - Ok(preimage) - }) - })?; - - // Decode the preimage into a trie node. - TrieNode::decode(&mut preimage.as_ref()).map_err(TestTrieNodeProviderError::Rlp) - } -} - -impl TrieDBProvider for ExecutorTestFixtureCreator { - fn bytecode_by_hash(&self, hash: B256) -> Result<Bytes, Self::Error> { - // geth hashdb scheme code hash key prefix - const CODE_PREFIX: u8 = b'c'; - - // Fetch the preimage from the L2 chain provider. - let preimage: Bytes = tokio::task::block_in_place(move || { - Handle::current().block_on(async { - // Attempt to fetch the code from the L2 chain provider. - let code_hash = [&[CODE_PREFIX], hash.as_slice()].concat(); - let code = self - .provider - .client() - .request::<&[Bytes; 1], Bytes>("debug_dbGet", &[code_hash.into()]) - .await; - - // Check if the first attempt to fetch the code failed. If it did, try fetching the - // code hash preimage without the geth hashdb scheme prefix. - let code = match code { - Ok(code) => code, - Err(_) => self - .provider - .client() - .request::<&[B256; 1], Bytes>("debug_dbGet", &[hash]) - .await - .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)?, - }; - - self.kv_store - .lock() - .await - .put(hash, code.clone()) - .map_err(|_| TestTrieNodeProviderError::KVStore)?; - - Ok(code) - }) - })?; - - Ok(preimage) - } - - fn header_by_hash(&self, hash: B256) -> Result<Header, Self::Error> { - let encoded_header: Bytes = tokio::task::block_in_place(move || { - Handle::current().block_on(async { - let preimage: Bytes = self - .provider - .client() - .request("debug_getRawHeader", &[hash]) - .await - .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)?; - - self.kv_store - .lock() - .await - .put(hash, preimage.clone()) - .map_err(|_| TestTrieNodeProviderError::KVStore)?; - - Ok(preimage) - }) - })?; - - // Decode the Header. - Header::decode(&mut encoded_header.as_ref()).map_err(TestTrieNodeProviderError::Rlp) - } -} - -/// A simple [`TrieDBProvider`] that reads data from a disk-based key-value store. -#[derive(Debug)] -pub struct DiskTrieNodeProvider { - kv_store: DB, -} - -impl DiskTrieNodeProvider { - /// Creates a new [`DiskTrieNodeProvider`] with the given [`rocksdb`] K/V store. - pub const fn new(kv_store: DB) -> Self { - Self { kv_store } - } -} - -impl TrieProvider for DiskTrieNodeProvider { - type Error = TestTrieNodeProviderError; - - fn trie_node_by_hash(&self, key: B256) -> Result<TrieNode, Self::Error> { - TrieNode::decode( - &mut self - .kv_store - .get(key) - .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)? - .ok_or(TestTrieNodeProviderError::PreimageNotFound)? - .as_slice(), - ) - .map_err(TestTrieNodeProviderError::Rlp) - } -} - -impl TrieDBProvider for DiskTrieNodeProvider { - fn bytecode_by_hash(&self, code_hash: B256) -> Result<Bytes, Self::Error> { - self.kv_store - .get(code_hash) - .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)? - .map(Bytes::from) - .ok_or(TestTrieNodeProviderError::PreimageNotFound) - } - - fn header_by_hash(&self, hash: B256) -> Result<Header, Self::Error> { - Header::decode( - &mut self - .kv_store - .get(hash) - .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)? - .ok_or(TestTrieNodeProviderError::PreimageNotFound)? - .as_slice(), - ) - .map_err(TestTrieNodeProviderError::Rlp) - } -} - -/// An error type for the [`DiskTrieNodeProvider`] and [`ExecutorTestFixtureCreator`]. -#[derive(Debug, thiserror::Error)] -pub enum TestTrieNodeProviderError { - /// The preimage was not found in the key-value store. - #[error("Preimage not found")] - PreimageNotFound, - /// Failed to decode the RLP-encoded data. - #[error("Failed to decode RLP: {0}")] - Rlp(alloy_rlp::Error), - /// Failed to write back to the key-value store. - #[error("Failed to write back to key value store")] - KVStore, -} diff --git a/kona/crates/proof/executor/src/util.rs b/kona/crates/proof/executor/src/util.rs deleted file mode 100644 index a0e86ad4298..00000000000 --- a/kona/crates/proof/executor/src/util.rs +++ /dev/null @@ -1,223 +0,0 @@ -//! Contains utilities for the L2 executor. - -use crate::{Eip1559ValidationError, ExecutorError, ExecutorResult}; -use alloy_consensus::{BlockHeader, Header}; -use alloy_eips::eip1559::BaseFeeParams; -use alloy_primitives::Bytes; -use kona_genesis::RollupConfig; -use op_alloy_consensus::{ - EIP1559ParamError, decode_holocene_extra_data, decode_jovian_extra_data, - encode_holocene_extra_data, encode_jovian_extra_data, -}; -use op_alloy_rpc_types_engine::OpPayloadAttributes; - -/// Parse Holocene [Header] extra data from the block header. -/// -/// ## Takes -/// - `extra_data`: The extra data field of the [Header]. -/// -/// ## Returns -/// - `Ok(BaseFeeParams)`: The EIP-1559 parameters. -/// - `Err(ExecutorError::InvalidExtraData)`: If the extra data is invalid. -pub(crate) fn decode_holocene_eip_1559_params_block_header( - header: &Header, -) -> ExecutorResult<BaseFeeParams> { - let (elasticity, denominator) = decode_holocene_extra_data(header.extra_data())?; - - // Check for potential division by zero. - // In the block header, the denominator is always non-zero. - // <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/holocene/exec-engine.md#eip-1559-parameters-in-block-header> - if denominator == 0 { - return Err(ExecutorError::InvalidExtraData(Eip1559ValidationError::ZeroDenominator)); - } - - Ok(BaseFeeParams { - elasticity_multiplier: elasticity.into(), - max_change_denominator: denominator.into(), - }) -} - -pub(crate) fn decode_jovian_eip_1559_params_block_header( - header: &Header, -) -> ExecutorResult<(BaseFeeParams, u64)> { - let (elasticity, denominator, min_base_fee) = decode_jovian_extra_data(header.extra_data())?; - - // Check for potential division by zero. - // In the block header, the denominator is always non-zero. - // <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/holocene/exec-engine.md#eip-1559-parameters-in-block-header> - if denominator == 0 { - return Err(ExecutorError::InvalidExtraData(Eip1559ValidationError::ZeroDenominator)); - } - - Ok(( - BaseFeeParams { - elasticity_multiplier: elasticity.into(), - max_change_denominator: denominator.into(), - }, - min_base_fee, - )) -} - -/// Encode Holocene [Header] extra data. -/// -/// ## Takes -/// - `config`: The [RollupConfig] for the chain. -/// - `attributes`: The [OpPayloadAttributes] for the block. -/// -/// ## Returns -/// - `Ok(data)`: The encoded extra data. -/// - `Err(ExecutorError::MissingEIP1559Params)`: If the EIP-1559 parameters are missing. -pub(crate) fn encode_holocene_eip_1559_params( - config: &RollupConfig, - attributes: &OpPayloadAttributes, -) -> ExecutorResult<Bytes> { - Ok(encode_holocene_extra_data( - attributes.eip_1559_params.ok_or(ExecutorError::MissingEIP1559Params)?, - config.chain_op_config.post_canyon_params(), - )?) -} - -/// Encode Jovian [Header] extra data. -/// -/// ## Takes -/// - `config`: The [RollupConfig] for the chain. -/// - `attributes`: The [OpPayloadAttributes] for the block. -/// -/// ## Returns -/// - `Ok(data)`: The encoded extra data. -/// - `Err(ExecutorError::MissingEIP1559Params)`: If the EIP-1559 parameters are missing. -pub(crate) fn encode_jovian_eip_1559_params( - config: &RollupConfig, - attributes: &OpPayloadAttributes, -) -> ExecutorResult<Bytes> { - Ok(encode_jovian_extra_data( - attributes.eip_1559_params.ok_or(ExecutorError::MissingEIP1559Params)?, - config.chain_op_config.post_canyon_params(), - attributes.min_base_fee.ok_or(ExecutorError::InvalidExtraData( - Eip1559ValidationError::Decode(EIP1559ParamError::MinBaseFeeNotSet), - ))?, - )?) -} - -#[cfg(test)] -mod test { - use super::decode_holocene_eip_1559_params_block_header; - use crate::util::{ - decode_jovian_eip_1559_params_block_header, encode_holocene_eip_1559_params, - }; - use alloy_consensus::Header; - use alloy_primitives::{B64, b64, bytes}; - use alloy_rpc_types_engine::PayloadAttributes; - use kona_genesis::{BaseFeeConfig, RollupConfig}; - use op_alloy_rpc_types_engine::OpPayloadAttributes; - - fn mock_payload(eip_1559_params: Option<B64>) -> OpPayloadAttributes { - OpPayloadAttributes { - payload_attributes: PayloadAttributes { - timestamp: 0, - prev_randao: Default::default(), - suggested_fee_recipient: Default::default(), - withdrawals: Default::default(), - parent_beacon_block_root: Default::default(), - }, - transactions: None, - no_tx_pool: None, - gas_limit: None, - eip_1559_params, - min_base_fee: None, - } - } - - #[test] - fn test_decode_holocene_eip_1559_params() { - let params = bytes!("00BEEFBABE0BADC0DE"); - let mock_header = Header { extra_data: params, ..Default::default() }; - let params = decode_holocene_eip_1559_params_block_header(&mock_header).unwrap(); - - assert_eq!(params.elasticity_multiplier, 0x0BAD_C0DE); - assert_eq!(params.max_change_denominator, 0xBEEF_BABE); - } - - #[test] - fn test_decode_jovian_eip_1559_params() { - let params = bytes!("01BEEFBABE0BADC0DE00000000DEADBEEF"); - let mock_header = Header { extra_data: params, ..Default::default() }; - let (params, base_fee) = decode_jovian_eip_1559_params_block_header(&mock_header).unwrap(); - - assert_eq!(params.elasticity_multiplier, 0x0BAD_C0DE); - assert_eq!(params.max_change_denominator, 0xBEEF_BABE); - assert_eq!(base_fee, 0xDEAD_BEEF); - } - - #[test] - fn test_decode_holocene_eip_1559_params_invalid_version() { - let params = bytes!("01BEEFBABE0BADC0DE"); - let mock_header = Header { extra_data: params, ..Default::default() }; - assert!(decode_holocene_eip_1559_params_block_header(&mock_header).is_err()); - } - - #[test] - fn test_decode_holocene_eip_1559_params_invalid_denominator() { - let params = bytes!("00000000000BADC0DE"); - let mock_header = Header { extra_data: params, ..Default::default() }; - assert!(decode_holocene_eip_1559_params_block_header(&mock_header).is_err()); - } - - #[test] - fn test_decode_holocene_eip_1559_params_invalid_length() { - let params = bytes!("00"); - let mock_header = Header { extra_data: params, ..Default::default() }; - assert!(decode_holocene_eip_1559_params_block_header(&mock_header).is_err()); - } - - #[test] - fn test_encode_holocene_eip_1559_params_missing() { - let cfg = RollupConfig { - chain_op_config: BaseFeeConfig { - eip1559_denominator: 50, - eip1559_elasticity: 64, - eip1559_denominator_canyon: 250, - }, - ..Default::default() - }; - let attrs = mock_payload(None); - - assert!(encode_holocene_eip_1559_params(&cfg, &attrs).is_err()); - } - - #[test] - fn test_encode_holocene_eip_1559_params_default() { - let cfg = RollupConfig { - chain_op_config: BaseFeeConfig { - eip1559_denominator: 50, - eip1559_elasticity: 64, - eip1559_denominator_canyon: 250, - }, - ..Default::default() - }; - let attrs = mock_payload(Some(B64::ZERO)); - - assert_eq!( - encode_holocene_eip_1559_params(&cfg, &attrs).unwrap(), - bytes!("00000000fa00000040") - ); - } - - #[test] - fn test_encode_holocene_eip_1559_params() { - let cfg = RollupConfig { - chain_op_config: BaseFeeConfig { - eip1559_denominator: 50, - eip1559_elasticity: 64, - eip1559_denominator_canyon: 250, - }, - ..Default::default() - }; - let attrs = mock_payload(Some(b64!("0000004000000060"))); - - assert_eq!( - encode_holocene_eip_1559_params(&cfg, &attrs).unwrap(), - bytes!("000000004000000060") - ); - } -} diff --git a/kona/crates/proof/mpt/Cargo.toml b/kona/crates/proof/mpt/Cargo.toml deleted file mode 100644 index 757b54af4de..00000000000 --- a/kona/crates/proof/mpt/Cargo.toml +++ /dev/null @@ -1,53 +0,0 @@ -[package] -name = "kona-mpt" -description = "Utilities for interacting with and iterating through a merkle patricia trie" -version = "0.3.0" -edition.workspace = true -authors.workspace = true -license.workspace = true -repository.workspace = true -homepage.workspace = true - -[lints] -workspace = true - -[dependencies] -# General -thiserror.workspace = true -serde = { workspace = true, optional = true, features = ["derive", "alloc"] } - -# Revm + Alloy -alloy-rlp.workspace = true -alloy-trie.workspace = true -alloy-primitives = { workspace = true, features = ["rlp"] } - -# Op-alloy -op-alloy-rpc-types-engine.workspace = true - -[dev-dependencies] -# Alloy -alloy-provider = { workspace = true, features = ["reqwest"] } -alloy-consensus.workspace = true -alloy-transport-http.workspace = true -alloy-rpc-types = { workspace = true, features = ["eth", "debug"] } - -# General -rand.workspace = true -reqwest.workspace = true -proptest.workspace = true -tokio = { workspace = true, features = ["full"] } -criterion = { workspace = true, features = ["html_reports"] } -pprof = { workspace = true, features = ["criterion", "flamegraph", "frame-pointer"] } - -[features] -default = [ "serde" ] -serde = [ - "alloy-primitives/serde", - "alloy-trie/serde", - "dep:serde", - "op-alloy-rpc-types-engine/serde", -] - -[[bench]] -name = "trie_node" -harness = false diff --git a/kona/crates/proof/mpt/README.md b/kona/crates/proof/mpt/README.md deleted file mode 100644 index 98aa19a2b2d..00000000000 --- a/kona/crates/proof/mpt/README.md +++ /dev/null @@ -1,18 +0,0 @@ -# `kona-mpt` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-mpt"><img src="https://img.shields.io/crates/v/kona-mpt.svg?label=kona-mpt&labelColor=2a2f35" alt="Kona MPT"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -A recursive, in-memory implementation of Ethereum's hexary Merkle Patricia Trie (MPT), supporting: -- Retrieval -- Insertion -- Deletion -- Root Computation - - Trie Node RLP Encoding - -This implementation is intended to serve as a backend for a stateless executor of Ethereum blocks, like -the one in the [`kona-executor`](../executor) crate. Starting with a trie root, the `TrieNode` can be -unravelled to access, insert, or delete values. These operations are all backed by the `TrieProvider`, -which enables fetching the preimages of hashed trie nodes. diff --git a/kona/crates/proof/mpt/src/errors.rs b/kona/crates/proof/mpt/src/errors.rs deleted file mode 100644 index 53c66d54b85..00000000000 --- a/kona/crates/proof/mpt/src/errors.rs +++ /dev/null @@ -1,45 +0,0 @@ -//! Errors for the `kona-derive` crate. - -use alloc::string::String; -use thiserror::Error; - -/// A [Result] type alias where the error is [TrieNodeError]. -pub type TrieNodeResult<T> = Result<T, TrieNodeError>; - -/// An error type for [TrieNode] operations. -/// -/// [TrieNode]: crate::TrieNode -#[derive(Error, Debug, PartialEq, Eq)] -pub enum TrieNodeError { - /// Invalid trie node type encountered. - #[error("Invalid trie node type encountered")] - InvalidNodeType, - /// The path was too short to index. - #[error("Path too short")] - PathTooShort, - /// Failed to decode trie node. - #[error("Failed to decode trie node: {0}")] - RLPError(alloy_rlp::Error), - /// Key does not exist in trie. - #[error("Key does not exist in trie.")] - KeyNotFound, - /// Trie node is not a leaf node. - #[error("Trie provider error: {0}")] - Provider(String), -} - -/// A [Result] type alias where the error is [OrderedListWalkerError]. -pub type OrderedListWalkerResult<T> = Result<T, OrderedListWalkerError>; - -/// An error type for [OrderedListWalker] operations. -/// -/// [OrderedListWalker]: crate::OrderedListWalker -#[derive(Error, Debug, PartialEq, Eq)] -pub enum OrderedListWalkerError { - /// Iterator has already been hydrated, and cannot be re-hydrated until it is exhausted. - #[error("Iterator has already been hydrated, and cannot be re-hydrated until it is exhausted")] - AlreadyHydrated, - /// Trie node error. - #[error("{0}")] - TrieNode(#[from] TrieNodeError), -} diff --git a/kona/crates/proof/mpt/src/lib.rs b/kona/crates/proof/mpt/src/lib.rs deleted file mode 100644 index 61552e733c8..00000000000 --- a/kona/crates/proof/mpt/src/lib.rs +++ /dev/null @@ -1,34 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(test), no_std)] - -extern crate alloc; - -mod errors; -pub use errors::{OrderedListWalkerError, OrderedListWalkerResult, TrieNodeError, TrieNodeResult}; - -mod traits; -pub use traits::{TrieHinter, TrieProvider}; - -mod node; -pub use node::TrieNode; - -mod list_walker; -pub use list_walker::OrderedListWalker; - -mod noop; -pub use noop::{NoopTrieHinter, NoopTrieProvider}; - -mod util; -pub use util::ordered_trie_with_encoder; - -// Re-export [alloy_trie::Nibbles]. -pub use alloy_trie::Nibbles; - -#[cfg(test)] -mod test_util; diff --git a/kona/crates/proof/mpt/src/node.rs b/kona/crates/proof/mpt/src/node.rs deleted file mode 100644 index 298e122326c..00000000000 --- a/kona/crates/proof/mpt/src/node.rs +++ /dev/null @@ -1,858 +0,0 @@ -//! This module contains the [TrieNode] type, which represents a node within a standard Merkle -//! Patricia Trie. - -use crate::{ - TrieHinter, TrieNodeError, TrieProvider, - errors::TrieNodeResult, - util::{rlp_list_element_length, unpack_path_to_nibbles}, -}; -use alloc::{boxed::Box, string::ToString, vec, vec::Vec}; -use alloy_primitives::{B256, Bytes, keccak256}; -use alloy_rlp::{Buf, Decodable, EMPTY_STRING_CODE, Encodable, Header, length_of_length}; -use alloy_trie::{EMPTY_ROOT_HASH, Nibbles}; - -/// The length of the branch list when RLP encoded -const BRANCH_LIST_LENGTH: usize = 17; - -/// The length of a leaf or extension node's RLP encoded list -const LEAF_OR_EXTENSION_LIST_LENGTH: usize = 2; - -/// The number of nibbles traversed in a branch node. -const BRANCH_NODE_NIBBLES: usize = 1; - -/// Prefix for even-nibbled extension node paths. -const PREFIX_EXTENSION_EVEN: u8 = 0; - -/// Prefix for odd-nibbled extension node paths. -const PREFIX_EXTENSION_ODD: u8 = 1; - -/// Prefix for even-nibbled leaf node paths. -const PREFIX_LEAF_EVEN: u8 = 2; - -/// Prefix for odd-nibbled leaf node paths. -const PREFIX_LEAF_ODD: u8 = 3; - -/// Nibble bit width. -const NIBBLE_WIDTH: usize = 4; - -/// A [TrieNode] is a node within a standard Ethereum Merkle Patricia Trie. In this implementation, -/// keys are expected to be fixed-size nibble sequences, and values are arbitrary byte sequences. -/// -/// The [TrieNode] has several variants: -/// - [TrieNode::Empty] represents an empty node. -/// - [TrieNode::Blinded] represents a node that has been blinded by a commitment. -/// - [TrieNode::Leaf] represents a 2-item node with the encoding `rlp([encoded_path, value])`. -/// - [TrieNode::Extension] represents a 2-item pointer node with the encoding `rlp([encoded_path, -/// key])`. -/// - [TrieNode::Branch] represents a node that refers to up to 16 child nodes with the encoding -/// `rlp([ v0, ..., v15, value ])`. -/// -/// In the Ethereum Merkle Patricia Trie, nodes longer than an encoded 32 byte string (33 total -/// bytes) are blinded with [keccak256] hashes. When a node is "opened", it is replaced with the -/// [TrieNode] that is decoded from to the preimage of the hash. -/// -/// The [alloy_rlp::Encodable] and [alloy_rlp::Decodable] traits are implemented for [TrieNode], -/// allowing for RLP encoding and decoding of the types for storage and retrieval. The -/// implementation of these traits will implicitly blind nodes that are longer than 32 bytes in -/// length when encoding. When decoding, the implementation will leave blinded nodes in place. -/// -/// ## SAFETY -/// As this implementation only supports uniform key sizes, the [TrieNode] data structure will fail -/// to behave correctly if confronted with keys of varying lengths. Namely, this is because it does -/// not support the `value` field in branch nodes, just like the Ethereum Merkle Patricia Trie. -#[derive(Debug, Clone, Eq, PartialEq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum TrieNode { - /// An empty [TrieNode] is represented as an [EMPTY_STRING_CODE] (0x80). - Empty, - /// A blinded node is a node that has been blinded by a [keccak256] commitment. - Blinded { - /// The commitment that blinds the node. - commitment: B256, - }, - /// A leaf node is a 2-item node with the encoding `rlp([encoded_path, value])` - Leaf { - /// The key of the leaf node - prefix: Nibbles, - /// The value of the leaf node - value: Bytes, - }, - /// An extension node is a 2-item pointer node with the encoding `rlp([encoded_path, key])` - Extension { - /// The path prefix of the extension - prefix: Nibbles, - /// The pointer to the child node - node: Box<TrieNode>, - }, - /// A branch node refers to up to 16 child nodes with the encoding - /// `rlp([ v0, ..., v15, value ])` - Branch { - /// The 16 child nodes and value of the branch. - stack: Vec<TrieNode>, - }, -} - -impl TrieNode { - /// Creates a new [TrieNode::Blinded] node. - /// - /// ## Takes - /// - `commitment` - The commitment that blinds the node - /// - /// ## Returns - /// - `Self` - The new blinded [TrieNode]. - pub const fn new_blinded(commitment: B256) -> Self { - Self::Blinded { commitment } - } - - /// Blinds the [TrieNode].. Alternatively, if the [TrieNode] is a [TrieNode::Blinded] node - /// already, its commitment is returned directly. - pub fn blind(&self) -> B256 { - match self { - Self::Blinded { commitment } => *commitment, - Self::Empty => EMPTY_ROOT_HASH, - _ => { - let mut rlp_buf = Vec::with_capacity(self.length()); - self.encode(&mut rlp_buf); - keccak256(rlp_buf) - } - } - } - - /// Unblinds the [TrieNode] if it is a [TrieNode::Blinded] node. - pub fn unblind<F: TrieProvider>(&mut self, fetcher: &F) -> TrieNodeResult<()> { - if let Self::Blinded { commitment } = self { - if *commitment == EMPTY_ROOT_HASH { - // If the commitment is the empty root hash, the node is empty, and we don't need to - // reach out to the fetcher. - *self = Self::Empty; - } else { - *self = fetcher - .trie_node_by_hash(*commitment) - .map_err(|e| TrieNodeError::Provider(e.to_string()))?; - } - } - Ok(()) - } - - /// Walks down the trie to a leaf value with the given key, if it exists. Preimages for blinded - /// nodes along the path are fetched using the `fetcher` function, and persisted in the inner - /// [TrieNode] elements. - /// - /// ## Takes - /// - `self` - The root trie node - /// - `path` - The nibbles representation of the path to the leaf node - /// - `fetcher` - The preimage fetcher for intermediate blinded nodes - /// - /// ## Returns - /// - `Err(_)` - Could not retrieve the node with the given key from the trie. - /// - `Ok(None)` - The node with the given key does not exist in the trie. - /// - `Ok(Some(_))` - The value of the node - pub fn open<'a, F: TrieProvider>( - &'a mut self, - path: &Nibbles, - fetcher: &F, - ) -> TrieNodeResult<Option<&'a mut Bytes>> { - match self { - Self::Branch { stack } => { - let branch_nibble = path.get(0).ok_or(TrieNodeError::PathTooShort)? as usize; - stack - .get_mut(branch_nibble) - .map(|node| node.open(&path.slice(BRANCH_NODE_NIBBLES..), fetcher)) - .unwrap_or(Ok(None)) - } - Self::Leaf { prefix, value } => Ok((path == prefix).then_some(value)), - Self::Extension { prefix, node } => { - if path.slice(..prefix.len()) == *prefix { - // Follow extension branch - node.unblind(fetcher)?; - node.open(&path.slice(prefix.len()..), fetcher) - } else { - Ok(None) - } - } - Self::Blinded { .. } => { - self.unblind(fetcher)?; - self.open(path, fetcher) - } - Self::Empty => Ok(None), - } - } - - /// Inserts a [TrieNode] at the given path into the trie rooted at Self. - /// - /// ## Takes - /// - `self` - The root trie node - /// - `path` - The nibbles representation of the path to the leaf node - /// - `node` - The node to insert at the given path - /// - `fetcher` - The preimage fetcher for intermediate blinded nodes - /// - /// ## Returns - /// - `Err(_)` - Could not insert the node at the given path in the trie. - /// - `Ok(())` - The node was successfully inserted at the given path. - pub fn insert<F: TrieProvider>( - &mut self, - path: &Nibbles, - value: Bytes, - fetcher: &F, - ) -> TrieNodeResult<()> { - match self { - Self::Empty => { - // If the trie node is null, insert the leaf node at the current path. - *self = Self::Leaf { prefix: *path, value }; - Ok(()) - } - Self::Leaf { prefix, value: leaf_value } => { - let shared_extension_nibbles = path.common_prefix_length(prefix); - - // If all nibbles are shared, update the leaf node with the new value. - if path == prefix { - *self = Self::Leaf { prefix: *prefix, value }; - return Ok(()); - } - - // Create a branch node stack containing the leaf node and the new value. - let mut stack = vec![Self::Empty; BRANCH_LIST_LENGTH]; - - // Insert the shortened extension into the branch stack. - let extension_nibble = - prefix.get(shared_extension_nibbles).ok_or(TrieNodeError::PathTooShort)? - as usize; - stack[extension_nibble] = Self::Leaf { - prefix: prefix.slice(shared_extension_nibbles + BRANCH_NODE_NIBBLES..), - value: leaf_value.clone(), - }; - - // Insert the new value into the branch stack. - let branch_nibble_new = - path.get(shared_extension_nibbles).ok_or(TrieNodeError::PathTooShort)? as usize; - stack[branch_nibble_new] = Self::Leaf { - prefix: path.slice(shared_extension_nibbles + BRANCH_NODE_NIBBLES..), - value, - }; - - // Replace the leaf node with the branch if no nibbles are shared, else create an - // extension. - if shared_extension_nibbles == 0 { - *self = Self::Branch { stack }; - } else { - let raw_ext_nibbles = path.slice(..shared_extension_nibbles); - *self = Self::Extension { - prefix: raw_ext_nibbles, - node: Box::new(Self::Branch { stack }), - }; - } - Ok(()) - } - Self::Extension { prefix, node } => { - let shared_extension_nibbles = path.common_prefix_length(prefix); - if shared_extension_nibbles == prefix.len() { - node.insert(&path.slice(shared_extension_nibbles..), value, fetcher)?; - return Ok(()); - } - - // Create a branch node stack containing the leaf node and the new value. - let mut stack = vec![Self::Empty; BRANCH_LIST_LENGTH]; - - // Insert the shortened extension into the branch stack. - let extension_nibble = - prefix.get(shared_extension_nibbles).ok_or(TrieNodeError::PathTooShort)? - as usize; - let new_prefix = prefix.slice(shared_extension_nibbles + BRANCH_NODE_NIBBLES..); - stack[extension_nibble] = if new_prefix.is_empty() { - // In the case that the extension node no longer has a prefix, insert the node - // verbatim into the branch. - node.as_ref().clone() - } else { - Self::Extension { prefix: new_prefix, node: node.clone() } - }; - - // Insert the new value into the branch stack. - let branch_nibble_new = - path.get(shared_extension_nibbles).ok_or(TrieNodeError::PathTooShort)? as usize; - stack[branch_nibble_new] = Self::Leaf { - prefix: path.slice(shared_extension_nibbles + BRANCH_NODE_NIBBLES..), - value, - }; - - // Replace the extension node with the branch if no nibbles are shared, else create - // an extension. - if shared_extension_nibbles == 0 { - *self = Self::Branch { stack }; - } else { - let extension = path.slice(..shared_extension_nibbles); - *self = Self::Extension { - prefix: extension, - node: Box::new(Self::Branch { stack }), - }; - } - Ok(()) - } - Self::Branch { stack } => { - // Follow the branch node to the next node in the path. - let branch_nibble = path.get(0).ok_or(TrieNodeError::PathTooShort)? as usize; - stack[branch_nibble].insert(&path.slice(BRANCH_NODE_NIBBLES..), value, fetcher) - } - Self::Blinded { .. } => { - // If a blinded node is approached, reveal the node and continue the insertion - // recursion. - self.unblind(fetcher)?; - self.insert(path, value, fetcher) - } - } - } - - /// Deletes a node in the trie at the given path. - /// - /// ## Takes - /// - `self` - The root trie node - /// - `path` - The nibbles representation of the path to the leaf node - /// - /// ## Returns - /// - `Err(_)` - Could not delete the node at the given path in the trie. - /// - `Ok(())` - The node was successfully deleted at the given path. - pub fn delete<F: TrieProvider, H: TrieHinter>( - &mut self, - path: &Nibbles, - fetcher: &F, - hinter: &H, - ) -> TrieNodeResult<()> { - match self { - Self::Empty => Err(TrieNodeError::KeyNotFound), - Self::Leaf { prefix, .. } => { - if path == prefix { - *self = Self::Empty; - Ok(()) - } else { - Err(TrieNodeError::KeyNotFound) - } - } - Self::Extension { prefix, node } => { - let shared_nibbles = path.common_prefix_length(prefix); - if shared_nibbles < prefix.len() { - return Err(TrieNodeError::KeyNotFound); - } else if shared_nibbles == path.len() { - *self = Self::Empty; - return Ok(()); - } - - node.delete(&path.slice(prefix.len()..), fetcher, hinter)?; - - // Simplify extension if possible after the deletion - self.collapse_if_possible(fetcher, hinter) - } - Self::Branch { stack } => { - let branch_nibble = path.get(0).ok_or(TrieNodeError::PathTooShort)? as usize; - stack[branch_nibble].delete(&path.slice(BRANCH_NODE_NIBBLES..), fetcher, hinter)?; - - // Simplify the branch if possible after the deletion - self.collapse_if_possible(fetcher, hinter) - } - Self::Blinded { .. } => { - self.unblind(fetcher)?; - self.delete(path, fetcher, hinter) - } - } - } - - /// If applicable, collapses `self` into a more compact form. - /// - /// ## Takes - /// - `self` - The root trie node - /// - /// ## Returns - /// - `Ok(())` - The node was successfully collapsed - /// - `Err(_)` - Could not collapse the node - fn collapse_if_possible<F: TrieProvider, H: TrieHinter>( - &mut self, - fetcher: &F, - hinter: &H, - ) -> TrieNodeResult<()> { - match self { - Self::Extension { prefix, node } => match node.as_mut() { - Self::Extension { prefix: child_prefix, node: child_node } => { - // Double extensions are collapsed into a single extension. - let new_prefix = Nibbles::from_nibbles_unchecked( - [prefix.to_vec(), child_prefix.to_vec()].concat(), - ); - *self = Self::Extension { prefix: new_prefix, node: child_node.clone() }; - } - Self::Leaf { prefix: child_prefix, value: child_value } => { - // If the child node is a leaf, convert the extension into a leaf with the full - // path. - let new_prefix = Nibbles::from_nibbles_unchecked( - [prefix.to_vec(), child_prefix.to_vec()].concat(), - ); - *self = Self::Leaf { prefix: new_prefix, value: child_value.clone() }; - } - Self::Empty => { - // If the child node is empty, convert the extension into an empty node. - *self = Self::Empty; - } - _ => { - // If the child is a (blinded?) branch then no need for collapse - // because deletion did not collapse the (blinded?) branch - } - }, - Self::Branch { stack } => { - // Count non-empty children - let mut non_empty_children = stack - .iter_mut() - .enumerate() - .filter(|(_, node)| !matches!(node, Self::Empty)) - .collect::<Vec<_>>(); - - if non_empty_children.len() == 1 { - let (index, non_empty_node) = &mut non_empty_children[0]; - - // If only one non-empty child and no value, convert to extension or leaf - match non_empty_node { - Self::Leaf { prefix, value } => { - let new_prefix = Nibbles::from_nibbles_unchecked( - [&[*index as u8], prefix.to_vec().as_slice()].concat(), - ); - *self = Self::Leaf { prefix: new_prefix, value: value.clone() }; - } - Self::Extension { prefix, node } => { - let new_prefix = Nibbles::from_nibbles_unchecked( - [&[*index as u8], prefix.to_vec().as_slice()].concat(), - ); - *self = Self::Extension { prefix: new_prefix, node: node.clone() }; - } - Self::Branch { .. } => { - *self = Self::Extension { - prefix: Nibbles::from_nibbles_unchecked([*index as u8]), - node: Box::new(non_empty_node.clone()), - }; - } - Self::Blinded { commitment } => { - // In this special case, we need to send a hint to fetch the preimage of - // the blinded node, since it is outside of the paths that have been - // traversed so far. - hinter - .hint_trie_node(*commitment) - .map_err(|e| TrieNodeError::Provider(e.to_string()))?; - - non_empty_node.unblind(fetcher)?; - self.collapse_if_possible(fetcher, hinter)?; - } - _ => {} - }; - } - } - _ => {} - } - Ok(()) - } - - /// Attempts to convert a `path` and `value` into a [TrieNode], if they correspond to a - /// [TrieNode::Leaf] or [TrieNode::Extension]. - /// - /// **Note:** This function assumes that the passed reader has already consumed the RLP header - /// of the [TrieNode::Leaf] or [TrieNode::Extension] node. - fn try_decode_leaf_or_extension_payload(buf: &mut &[u8]) -> TrieNodeResult<Self> { - // Decode the path and value of the leaf or extension node. - let path = Bytes::decode(buf).map_err(TrieNodeError::RLPError)?; - let first_nibble = path[0] >> NIBBLE_WIDTH; - let first = match first_nibble { - PREFIX_EXTENSION_ODD | PREFIX_LEAF_ODD => Some(path[0] & 0x0F), - PREFIX_EXTENSION_EVEN | PREFIX_LEAF_EVEN => None, - _ => return Err(TrieNodeError::InvalidNodeType), - }; - - // Check the high-order nibble of the path to determine the type of node. - match first_nibble { - PREFIX_EXTENSION_EVEN | PREFIX_EXTENSION_ODD => { - // Extension node - let extension_node_value = Self::decode(buf).map_err(TrieNodeError::RLPError)?; - Ok(Self::Extension { - prefix: unpack_path_to_nibbles(first, path[1..].as_ref()), - node: Box::new(extension_node_value), - }) - } - PREFIX_LEAF_EVEN | PREFIX_LEAF_ODD => { - // Leaf node - let value = Bytes::decode(buf).map_err(TrieNodeError::RLPError)?; - Ok(Self::Leaf { prefix: unpack_path_to_nibbles(first, path[1..].as_ref()), value }) - } - _ => Err(TrieNodeError::InvalidNodeType), - } - } - - /// Returns the RLP payload length of the [TrieNode]. - pub(crate) fn payload_length(&self) -> usize { - match self { - Self::Empty => 0, - Self::Blinded { commitment } => commitment.len(), - Self::Leaf { prefix, value } => { - let mut encoded_key_len = prefix.len() / 2 + 1; - if encoded_key_len != 1 { - encoded_key_len += length_of_length(encoded_key_len); - } - encoded_key_len + value.length() - } - Self::Extension { prefix, node } => { - let mut encoded_key_len = prefix.len() / 2 + 1; - if encoded_key_len != 1 { - encoded_key_len += length_of_length(encoded_key_len); - } - encoded_key_len + node.blinded_length() - } - Self::Branch { stack } => { - // In branch nodes, if an element is longer than an encoded 32 byte string, it is - // blinded. Assuming we have an open trie node, we must re-hash the - // elements that are longer than an encoded 32 byte string - // in length. - stack.iter().fold(0, |mut acc, node| { - acc += node.blinded_length(); - acc - }) - } - } - } - - /// Returns the encoded length of the trie node, blinding it if it is longer than an encoded - /// [B256] string in length. - /// - /// ## Returns - /// - `usize` - The encoded length of the value, blinded if the raw encoded length is longer - /// than a [B256]. - fn blinded_length(&self) -> usize { - let encoded_len = self.length(); - if encoded_len >= B256::ZERO.len() { B256::ZERO.length() } else { encoded_len } - } -} - -impl Encodable for TrieNode { - fn encode(&self, out: &mut dyn alloy_rlp::BufMut) { - let payload_length = self.payload_length(); - match self { - Self::Empty => out.put_u8(EMPTY_STRING_CODE), - Self::Blinded { commitment } => commitment.encode(out), - Self::Leaf { prefix, value } => { - // Encode the leaf node's header and key-value pair. - Header { list: true, payload_length }.encode(out); - alloy_trie::nodes::encode_path_leaf(prefix, true).as_slice().encode(out); - value.encode(out); - } - Self::Extension { prefix, node } => { - // Encode the extension node's header, prefix, and pointer node. - Header { list: true, payload_length }.encode(out); - alloy_trie::nodes::encode_path_leaf(prefix, false).as_slice().encode(out); - if node.length() >= B256::ZERO.len() { - let hash = node.blind(); - hash.encode(out); - } else { - node.encode(out); - } - } - Self::Branch { stack } => { - // In branch nodes, if an element is longer than 32 bytes in length, it is blinded. - // Assuming we have an open trie node, we must re-hash the elements - // that are longer than 32 bytes in length. - Header { list: true, payload_length }.encode(out); - stack.iter().for_each(|node| { - if node.length() >= B256::ZERO.len() { - let hash = node.blind(); - hash.encode(out); - } else { - node.encode(out); - } - }); - } - } - } - - fn length(&self) -> usize { - match self { - Self::Empty => 1, - Self::Blinded { commitment } => commitment.length(), - Self::Leaf { .. } => { - let payload_length = self.payload_length(); - Header { list: true, payload_length }.length() + payload_length - } - Self::Extension { .. } => { - let payload_length = self.payload_length(); - Header { list: true, payload_length }.length() + payload_length - } - Self::Branch { .. } => { - let payload_length = self.payload_length(); - Header { list: true, payload_length }.length() + payload_length - } - } - } -} - -impl Decodable for TrieNode { - /// Attempts to decode the [TrieNode]. - fn decode(buf: &mut &[u8]) -> alloy_rlp::Result<Self> { - // Peek at the header to determine the type of Trie node we're currently decoding. - let header = Header::decode(&mut (**buf).as_ref())?; - - if header.list { - // Peek at the RLP stream to determine the number of elements in the list. - let list_length = rlp_list_element_length(&mut (**buf).as_ref())?; - - match list_length { - BRANCH_LIST_LENGTH => { - let list = Vec::<Self>::decode(buf)?; - Ok(Self::Branch { stack: list }) - } - LEAF_OR_EXTENSION_LIST_LENGTH => { - // Advance the buffer to the start of the list payload. - buf.advance(header.length()); - // Decode the leaf or extension node's raw payload. - Self::try_decode_leaf_or_extension_payload(buf) - .map_err(|_| alloy_rlp::Error::UnexpectedList) - } - _ => Err(alloy_rlp::Error::UnexpectedLength), - } - } else { - match header.payload_length { - 0 => { - buf.advance(header.length()); - Ok(Self::Empty) - } - 32 => { - let commitment = B256::decode(buf)?; - Ok(Self::new_blinded(commitment)) - } - _ => Err(alloy_rlp::Error::UnexpectedLength), - } - } - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::{ - NoopTrieHinter, NoopTrieProvider, TrieNode, ordered_trie_with_encoder, - test_util::TrieNodeProvider, - }; - use alloc::{collections::BTreeMap, vec, vec::Vec}; - use alloy_primitives::{b256, bytes, hex, keccak256}; - use alloy_rlp::{Decodable, EMPTY_STRING_CODE, Encodable}; - use alloy_trie::{HashBuilder, Nibbles}; - use rand::prelude::IteratorRandom; - - #[test] - fn test_empty_blinded() { - let trie_node = TrieNode::Empty; - assert_eq!(trie_node.blind(), EMPTY_ROOT_HASH); - } - - #[test] - fn test_decode_branch() { - const BRANCH_RLP: [u8; 83] = hex!( - "f851a0eb08a66a94882454bec899d3e82952dcc918ba4b35a09a84acd98019aef4345080808080808080a05d87a81d9bbf5aee61a6bfeab3a5643347e2c751b36789d988a5b6b163d496518080808080808080" - ); - let expected = TrieNode::Branch { - stack: vec![ - TrieNode::new_blinded(b256!( - "eb08a66a94882454bec899d3e82952dcc918ba4b35a09a84acd98019aef43450" - )), - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::new_blinded(b256!( - "5d87a81d9bbf5aee61a6bfeab3a5643347e2c751b36789d988a5b6b163d49651" - )), - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - ], - }; - - let mut rlp_buf = Vec::with_capacity(expected.length()); - expected.encode(&mut rlp_buf); - assert_eq!(rlp_buf.len(), BRANCH_RLP.len()); - assert_eq!(expected.length(), BRANCH_RLP.len()); - - assert_eq!(expected, TrieNode::decode(&mut BRANCH_RLP.as_slice()).unwrap()); - assert_eq!(rlp_buf.as_slice(), &BRANCH_RLP[..]); - } - - #[test] - fn test_encode_decode_extension_open_short() { - const EXTENSION_RLP: [u8; 19] = hex!("d28300646fcd308b8a74657374207468726565"); - - let opened = TrieNode::Leaf { - prefix: Nibbles::from_nibbles([0x00]), - value: bytes!("8a74657374207468726565"), - }; - let expected = - TrieNode::Extension { prefix: Nibbles::unpack(bytes!("646f")), node: Box::new(opened) }; - - let mut rlp_buf = Vec::with_capacity(expected.length()); - expected.encode(&mut rlp_buf); - - assert_eq!(expected, TrieNode::decode(&mut EXTENSION_RLP.as_slice()).unwrap()); - } - - #[test] - fn test_encode_decode_extension_blinded_long() { - const EXTENSION_RLP: [u8; 38] = - hex!("e58300646fa0f3fe8b3c5b21d3e52860f1e4a5825a6100bb341069c1e88f4ebf6bd98de0c190"); - let mut rlp_buf = Vec::new(); - - let opened = - TrieNode::Leaf { prefix: Nibbles::from_nibbles([0x00]), value: [0xFF; 64].into() }; - opened.encode(&mut rlp_buf); - let blinded = TrieNode::new_blinded(keccak256(&rlp_buf)); - - rlp_buf.clear(); - let opened_extension = - TrieNode::Extension { prefix: Nibbles::unpack(bytes!("646f")), node: Box::new(opened) }; - opened_extension.encode(&mut rlp_buf); - - let expected = TrieNode::Extension { - prefix: Nibbles::unpack(bytes!("646f")), - node: Box::new(blinded), - }; - assert_eq!(expected, TrieNode::decode(&mut EXTENSION_RLP.as_slice()).unwrap()); - } - - #[test] - fn test_decode_leaf() { - const LEAF_RLP: [u8; 11] = hex!("ca8320646f8576657262FF"); - let expected = - TrieNode::Leaf { prefix: Nibbles::unpack(bytes!("646f")), value: bytes!("76657262FF") }; - assert_eq!(expected, TrieNode::decode(&mut LEAF_RLP.as_slice()).unwrap()); - } - - #[test] - fn test_retrieve_from_trie_simple() { - const VALUES: [&str; 5] = ["yeah", "dog", ", ", "laminar", "flow"]; - - let mut trie = ordered_trie_with_encoder(&VALUES, |v, buf| { - let mut encoded_value = Vec::with_capacity(v.length()); - v.encode(&mut encoded_value); - TrieNode::new_blinded(keccak256(encoded_value)).encode(buf); - }); - let root = trie.root(); - - let preimages = trie.take_proof_nodes().into_inner().into_iter().fold( - BTreeMap::default(), - |mut acc, (_, value)| { - acc.insert(keccak256(value.as_ref()), value); - acc - }, - ); - let fetcher = TrieNodeProvider::new(preimages); - - let mut root_node = fetcher.trie_node_by_hash(root).unwrap(); - for (i, value) in VALUES.iter().enumerate() { - let path_nibbles = Nibbles::unpack([if i == 0 { EMPTY_STRING_CODE } else { i as u8 }]); - let v = root_node.open(&path_nibbles, &fetcher).unwrap().unwrap(); - - let mut encoded_value = Vec::with_capacity(value.length()); - value.encode(&mut encoded_value); - let mut encoded_node = Vec::new(); - TrieNode::new_blinded(keccak256(&encoded_value)).encode(&mut encoded_node); - - assert_eq!(v, encoded_node.as_slice()); - } - - let commitment = root_node.blind(); - assert_eq!(commitment, root); - } - - #[test] - fn test_insert_static() { - let mut node = TrieNode::Empty; - let noop_fetcher = NoopTrieProvider; - node.insert(&Nibbles::unpack(hex!("012345")), bytes!("01"), &noop_fetcher).unwrap(); - node.insert(&Nibbles::unpack(hex!("012346")), bytes!("02"), &noop_fetcher).unwrap(); - - let expected = TrieNode::Extension { - prefix: Nibbles::from_nibbles([0, 1, 2, 3, 4]), - node: Box::new(TrieNode::Branch { - stack: vec![ - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Leaf { prefix: Nibbles::default(), value: bytes!("01") }, - TrieNode::Leaf { prefix: Nibbles::default(), value: bytes!("02") }, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - TrieNode::Empty, - ], - }), - }; - - assert_eq!(node, expected); - } - - proptest::proptest! { - /// Differential test for inserting an arbitrary number of keys into an empty `TrieNode` / `HashBuilder`. - #[test] - fn diff_hash_builder_insert(mut keys in proptest::collection::vec(proptest::prelude::any::<[u8; 32]>(), 1..4096)) { - // Ensure the keys are sorted; `HashBuilder` expects sorted keys.` - keys.sort(); - - let mut hb = HashBuilder::default(); - let mut node = TrieNode::Empty; - - for key in keys { - hb.add_leaf(Nibbles::unpack(key), key.as_ref()); - node.insert(&Nibbles::unpack(key), key.into(), &NoopTrieProvider).unwrap(); - } - - assert_eq!(node.blind(), hb.root()); - } - - /// Differential test for deleting an arbitrary number of keys from a `TrieNode` / `HashBuilder`. - #[test] - fn diff_hash_builder_delete(mut keys in proptest::collection::vec(proptest::prelude::any::<[u8; 32]>(), 1..4096)) { - // Ensure the keys are sorted; `HashBuilder` expects sorted keys.` - keys.sort(); - - let mut hb = HashBuilder::default(); - let mut node = TrieNode::Empty; - - let mut rng = rand::rng(); - let deleted_keys = - keys.clone().into_iter().choose_multiple(&mut rng, 5.min(keys.len())); - - // Insert the keys into the `HashBuilder` and `TrieNode`. - for key in keys { - // Don't add any keys that are to be deleted from the trie node to the `HashBuilder`. - if !deleted_keys.contains(&key) { - hb.add_leaf(Nibbles::unpack(key), key.as_ref()); - } - node.insert(&Nibbles::unpack(key), key.into(), &NoopTrieProvider).unwrap(); - } - - // Delete the keys that were randomly selected from the trie node. - for deleted_key in deleted_keys { - node.delete(&Nibbles::unpack(deleted_key), &NoopTrieProvider, &NoopTrieHinter) - .unwrap(); - } - - // Blind manually, since the single node remaining may be a leaf or empty node, and always must be blinded. - let mut rlp_buf = Vec::with_capacity(node.length()); - node.encode(&mut rlp_buf); - let trie_root = keccak256(rlp_buf); - - assert_eq!(trie_root, hb.root()); - } - } -} diff --git a/kona/crates/proof/mpt/src/traits.rs b/kona/crates/proof/mpt/src/traits.rs deleted file mode 100644 index acf33421551..00000000000 --- a/kona/crates/proof/mpt/src/traits.rs +++ /dev/null @@ -1,84 +0,0 @@ -//! Contains the [TrieProvider] trait for fetching trie node preimages, contract bytecode, and -//! headers. - -use crate::TrieNode; -use alloy_primitives::{Address, B256, U256}; -use core::fmt::Display; -use op_alloy_rpc_types_engine::OpPayloadAttributes; - -/// The [TrieProvider] trait defines the synchronous interface for fetching trie node preimages. -pub trait TrieProvider { - /// The error type for fetching trie node preimages. - type Error: Display; - - /// Fetches the preimage for the given trie node hash. - /// - /// ## Takes - /// - `key`: The key of the trie node to fetch. - /// - /// ## Returns - /// - Ok(TrieNode): The trie node preimage. - /// - Err(Self::Error): If the trie node preimage could not be fetched. - fn trie_node_by_hash(&self, key: B256) -> Result<TrieNode, Self::Error>; -} - -/// The [TrieHinter] trait defines the synchronous interface for hinting the host to fetch trie -/// node preimages. -pub trait TrieHinter { - /// The error type for hinting trie node preimages. - type Error: Display; - - /// Hints the host to fetch the trie node preimage by hash. - /// - /// ## Takes - /// - `hash`: The hash of the trie node to hint. - /// - /// ## Returns - /// - Ok(()): If the hint was successful. - fn hint_trie_node(&self, hash: B256) -> Result<(), Self::Error>; - - /// Hints the host to fetch the trie node preimages on the path to the given address. - /// - /// ## Takes - /// - `address` - The address of the contract whose trie node preimages are to be fetched. - /// - `block_number` - The block number at which the trie node preimages are to be fetched. - /// - /// ## Returns - /// - Ok(()): If the hint was successful. - /// - Err(Self::Error): If the hint was unsuccessful. - fn hint_account_proof(&self, address: Address, block_number: u64) -> Result<(), Self::Error>; - - /// Hints the host to fetch the trie node preimages on the path to the storage slot within the - /// given account's storage trie. - /// - /// ## Takes - /// - `address` - The address of the contract whose trie node preimages are to be fetched. - /// - `slot` - The storage slot whose trie node preimages are to be fetched. - /// - `block_number` - The block number at which the trie node preimages are to be fetched. - /// - /// ## Returns - /// - Ok(()): If the hint was successful. - /// - Err(Self::Error): If the hint was unsuccessful. - fn hint_storage_proof( - &self, - address: Address, - slot: U256, - block_number: u64, - ) -> Result<(), Self::Error>; - - /// Hints the host to fetch the execution witness for the [OpPayloadAttributes] applied on top - /// of the parent block's state. - /// - /// ## Takes - /// - `parent_hash` - The hash of the parent block. - /// - `op_payload_attributes` - The attributes of the operation payload. - /// - /// ## Returns - /// - Ok(()): If the hint was successful. - /// - Err(Self::Error): If the hint was unsuccessful. - fn hint_execution_witness( - &self, - parent_hash: B256, - op_payload_attributes: &OpPayloadAttributes, - ) -> Result<(), Self::Error>; -} diff --git a/kona/crates/proof/mpt/src/util.rs b/kona/crates/proof/mpt/src/util.rs deleted file mode 100644 index 23fd1f8bc11..00000000000 --- a/kona/crates/proof/mpt/src/util.rs +++ /dev/null @@ -1,90 +0,0 @@ -//! Utilities for `kona-mpt` - -use alloc::vec::Vec; -use alloy_rlp::{Buf, BufMut, Encodable, Header}; -use alloy_trie::{HashBuilder, Nibbles, proof::ProofRetainer}; - -/// Compute a trie root of the collection of items with a custom encoder. -pub fn ordered_trie_with_encoder<T, F>(items: &[T], mut encode: F) -> HashBuilder -where - F: FnMut(&T, &mut dyn BufMut), -{ - let mut index_buffer = Vec::new(); - let mut value_buffer = Vec::new(); - let items_len = items.len(); - - // Store preimages for all intermediates - let path_nibbles = (0..items_len) - .map(|i| { - let index = adjust_index_for_rlp(i, items_len); - index_buffer.clear(); - index.encode(&mut index_buffer); - Nibbles::unpack(&index_buffer) - }) - .collect::<Vec<_>>(); - - let mut hb = HashBuilder::default().with_proof_retainer(ProofRetainer::new(path_nibbles)); - for i in 0..items_len { - let index = adjust_index_for_rlp(i, items_len); - - index_buffer.clear(); - index.encode(&mut index_buffer); - - value_buffer.clear(); - encode(&items[index], &mut value_buffer); - - hb.add_leaf(Nibbles::unpack(&index_buffer), &value_buffer); - } - - hb -} - -/// Adjust the index of an item for rlp encoding. -pub(crate) const fn adjust_index_for_rlp(i: usize, len: usize) -> usize { - if i > 0x7f { - i - } else if i == 0x7f || i + 1 == len { - 0 - } else { - i + 1 - } -} - -/// Walks through a RLP list's elements and returns the total number of elements in the list. -/// Returns [alloy_rlp::Error::UnexpectedString] if the RLP stream is not a list. -/// -/// ## Takes -/// - `buf` - The RLP stream to walk through -/// -/// ## Returns -/// - `Ok(usize)` - The total number of elements in the list -/// - `Err(_)` - The RLP stream is not a list -pub(crate) fn rlp_list_element_length(buf: &mut &[u8]) -> alloy_rlp::Result<usize> { - let header = Header::decode(buf)?; - if !header.list { - return Err(alloy_rlp::Error::UnexpectedString); - } - let len_after_consume = buf.len() - header.payload_length; - - let mut list_element_length = 0; - while buf.len() > len_after_consume { - let header = Header::decode(buf)?; - buf.advance(header.payload_length); - list_element_length += 1; - } - Ok(list_element_length) -} - -/// Unpack node path to nibbles. -/// -/// ## Takes -/// - `first` - first nibble of the path if it is odd. Must be <= 0x0F, or will create invalid -/// nibbles. -/// - `rest` - rest of the nibbles packed -/// -/// ## Returns -/// - `Nibbles` - unpacked nibbles -pub(crate) fn unpack_path_to_nibbles(first: Option<u8>, rest: &[u8]) -> Nibbles { - let rest = Nibbles::unpack(rest); - Nibbles::from_iter_unchecked(first.into_iter().chain(rest.to_vec())) -} diff --git a/kona/crates/proof/preimage/README.md b/kona/crates/proof/preimage/README.md deleted file mode 100644 index 1b2710ac4d8..00000000000 --- a/kona/crates/proof/preimage/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# `kona-preimage` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-preimage"><img src="https://img.shields.io/crates/v/kona-preimage.svg?label=kona-preimage&labelColor=2a2f35" alt="Kona Preimage ABI client"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -This crate offers a high-level API over the [`Preimage Oracle`][preimage-abi-spec]. It is `no_std` compatible to be used in -`client` programs, and the `host` handles are `async` colored to allow for the `host` programs to reach out to external -data sources to populate the `Preimage Oracle`. - -[preimage-abi-spec]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle diff --git a/kona/crates/proof/preimage/src/errors.rs b/kona/crates/proof/preimage/src/errors.rs deleted file mode 100644 index 16b5aae033e..00000000000 --- a/kona/crates/proof/preimage/src/errors.rs +++ /dev/null @@ -1,53 +0,0 @@ -//! Errors for the `kona-preimage` crate. - -use alloc::string::String; -use thiserror::Error; - -/// A [PreimageOracleError] is an enum that differentiates pipe-related errors from other errors -/// in the [PreimageOracleServer] and [HintReaderServer] implementations. -/// -/// [PreimageOracleServer]: crate::PreimageOracleServer -/// [HintReaderServer]: crate::HintReaderServer -#[derive(Error, Debug)] -pub enum PreimageOracleError { - /// The pipe has been broken. - #[error(transparent)] - IOError(#[from] ChannelError), - /// The preimage key is invalid. - #[error("Invalid preimage key.")] - InvalidPreimageKey, - /// Key not found. - #[error("Key not found.")] - KeyNotFound, - /// Timeout while waiting for preimage. - #[error("Timeout while waiting for preimage.")] - Timeout, - /// Buffer length mismatch. - #[error("Buffer length mismatch. Expected {0}, got {1}.")] - BufferLengthMismatch(usize, usize), - /// Failed to parse hint. - #[error("Failed to parse hint: {0}")] - HintParseFailed(String), - /// Other errors. - #[error("Error in preimage server: {0}")] - Other(String), -} - -/// A [Result] type for the [PreimageOracleError] enum. -pub type PreimageOracleResult<T> = Result<T, PreimageOracleError>; - -/// A [ChannelError] is an enum that describes the error cases of a [Channel] trait implementation. -/// -/// [Channel]: crate::Channel -#[derive(Error, Debug)] -pub enum ChannelError { - /// The channel is closed. - #[error("Channel is closed.")] - Closed, - /// Unexpected EOF. - #[error("Unexpected EOF in channel read operation.")] - UnexpectedEOF, -} - -/// A [Result] type for the [ChannelError] enum. -pub type ChannelResult<T> = Result<T, ChannelError>; diff --git a/kona/crates/proof/preimage/src/hint.rs b/kona/crates/proof/preimage/src/hint.rs deleted file mode 100644 index 227d1dacdd7..00000000000 --- a/kona/crates/proof/preimage/src/hint.rs +++ /dev/null @@ -1,223 +0,0 @@ -use crate::{ - Channel, HintReaderServer, - errors::{PreimageOracleError, PreimageOracleResult}, - traits::{HintRouter, HintWriterClient}, -}; -use alloc::{boxed::Box, format, string::String, vec}; -use async_trait::async_trait; - -/// A [HintWriter] is a high-level interface to the hint channel. It provides a way to write hints -/// to the host. -#[derive(Debug, Clone, Copy)] -pub struct HintWriter<C> { - channel: C, -} - -impl<C> HintWriter<C> { - /// Create a new [HintWriter] from a [Channel]. - pub const fn new(channel: C) -> Self { - Self { channel } - } -} - -#[async_trait] -impl<C> HintWriterClient for HintWriter<C> -where - C: Channel + Send + Sync, -{ - /// Write a hint to the host. This will overwrite any existing hint in the channel, and block - /// until all data has been written. - async fn write(&self, hint: &str) -> PreimageOracleResult<()> { - trace!(target: "hint_writer", "Writing hint \"{hint}\""); - - // Form the hint into a byte buffer. The format is a 4-byte big-endian length prefix - // followed by the hint string. - self.channel.write(u32::to_be_bytes(hint.len() as u32).as_ref()).await?; - self.channel.write(hint.as_bytes()).await?; - - trace!(target: "hint_writer", "Successfully wrote hint"); - - // Read the hint acknowledgement from the host. - let mut hint_ack = [0u8; 1]; - self.channel.read_exact(&mut hint_ack).await?; - - trace!(target: "hint_writer", "Received hint acknowledgement"); - - Ok(()) - } -} - -/// A [HintReader] is a router for hints sent by the [HintWriter] from the client program. It -/// provides a way for the host to prepare preimages for reading. -#[derive(Debug, Clone, Copy)] -pub struct HintReader<C> { - channel: C, -} - -impl<C> HintReader<C> -where - C: Channel, -{ - /// Create a new [HintReader] from a [Channel]. - pub const fn new(channel: C) -> Self { - Self { channel } - } -} - -#[async_trait] -impl<C> HintReaderServer for HintReader<C> -where - C: Channel + Send + Sync, -{ - async fn next_hint<R>(&self, hint_router: &R) -> PreimageOracleResult<()> - where - R: HintRouter + Send + Sync, - { - // Read the length of the raw hint payload. - let mut len_buf = [0u8; 4]; - self.channel.read_exact(&mut len_buf).await?; - let len = u32::from_be_bytes(len_buf); - - // Read the raw hint payload. - let mut raw_payload = vec![0u8; len as usize]; - self.channel.read_exact(raw_payload.as_mut_slice()).await?; - let payload = match String::from_utf8(raw_payload) { - Ok(p) => p, - Err(e) => { - // Write back on error to prevent blocking the client. - self.channel.write(&[0x00]).await?; - - return Err(PreimageOracleError::Other(format!( - "Failed to decode hint payload: {e}" - ))); - } - }; - - trace!(target: "hint_reader", "Successfully read hint: \"{payload}\""); - - // Route the hint - if let Err(e) = hint_router.route_hint(payload).await { - // Write back on error to prevent blocking the client. - self.channel.write(&[0x00]).await?; - - error!(target: "hint_reader", "Failed to route hint: {e}"); - return Err(e); - } - - // Write back an acknowledgement to the client to unblock their process. - self.channel.write(&[0x00]).await?; - - trace!(target: "hint_reader", "Successfully routed and acknowledged hint"); - - Ok(()) - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::native_channel::BidirectionalChannel; - use alloc::{sync::Arc, vec::Vec}; - use tokio::sync::Mutex; - - struct TestRouter { - incoming_hints: Arc<Mutex<Vec<String>>>, - } - - #[async_trait] - impl HintRouter for TestRouter { - async fn route_hint(&self, hint: String) -> PreimageOracleResult<()> { - self.incoming_hints.lock().await.push(hint); - Ok(()) - } - } - - struct TestFailRouter; - - #[async_trait] - impl HintRouter for TestFailRouter { - async fn route_hint(&self, _hint: String) -> PreimageOracleResult<()> { - Err(PreimageOracleError::KeyNotFound) - } - } - - #[tokio::test(flavor = "multi_thread", worker_threads = 2)] - async fn test_unblock_on_bad_utf8() { - let mock_data = [0xf0, 0x90, 0x28, 0xbc]; - - let hint_channel = BidirectionalChannel::new().unwrap(); - - let client = tokio::task::spawn(async move { - let hint_writer = HintWriter::new(hint_channel.client); - - #[allow(invalid_from_utf8_unchecked)] - hint_writer.write(unsafe { alloc::str::from_utf8_unchecked(&mock_data) }).await - }); - let host = tokio::task::spawn(async move { - let router = TestRouter { incoming_hints: Default::default() }; - - let hint_reader = HintReader::new(hint_channel.host); - hint_reader.next_hint(&router).await - }); - - let (c, h) = tokio::join!(client, host); - c.unwrap().unwrap(); - assert!(h.unwrap().is_err_and(|e| { - let PreimageOracleError::Other(e) = e else { - return false; - }; - e.contains("Failed to decode hint payload") - })); - } - - #[tokio::test(flavor = "multi_thread", worker_threads = 2)] - async fn test_unblock_on_fetch_failure() { - const MOCK_DATA: &str = "test-hint 0xfacade"; - - let hint_channel = BidirectionalChannel::new().unwrap(); - - let client = tokio::task::spawn(async move { - let hint_writer = HintWriter::new(hint_channel.client); - - hint_writer.write(MOCK_DATA).await - }); - let host = tokio::task::spawn(async move { - let hint_reader = HintReader::new(hint_channel.host); - hint_reader.next_hint(&TestFailRouter).await - }); - - let (c, h) = tokio::join!(client, host); - c.unwrap().unwrap(); - assert!(h.unwrap().is_err_and(|e| matches!(e, PreimageOracleError::KeyNotFound))); - } - - #[tokio::test(flavor = "multi_thread", worker_threads = 2)] - async fn test_hint_client_and_host() { - const MOCK_DATA: &str = "test-hint 0xfacade"; - - let incoming_hints = Arc::new(Mutex::new(Vec::new())); - let hint_channel = BidirectionalChannel::new().unwrap(); - - let client = tokio::task::spawn(async move { - let hint_writer = HintWriter::new(hint_channel.client); - - hint_writer.write(MOCK_DATA).await - }); - let host = tokio::task::spawn({ - let incoming_hints_ref = Arc::clone(&incoming_hints); - async move { - let router = TestRouter { incoming_hints: incoming_hints_ref }; - - let hint_reader = HintReader::new(hint_channel.host); - hint_reader.next_hint(&router).await.unwrap(); - } - }); - - let _ = tokio::join!(client, host); - let mut hints = incoming_hints.lock().await; - - assert_eq!(hints.len(), 1); - let h = hints.remove(0); - assert_eq!(h, MOCK_DATA); - } -} diff --git a/kona/crates/proof/preimage/src/lib.rs b/kona/crates/proof/preimage/src/lib.rs deleted file mode 100644 index f9327992ec2..00000000000 --- a/kona/crates/proof/preimage/src/lib.rs +++ /dev/null @@ -1,35 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] - -extern crate alloc; - -#[macro_use] -extern crate tracing; - -pub mod errors; - -mod key; -pub use key::{PreimageKey, PreimageKeyType}; - -mod oracle; -pub use oracle::{OracleReader, OracleServer}; - -mod hint; -pub use hint::{HintReader, HintWriter}; - -mod traits; -pub use traits::{ - Channel, CommsClient, HintReaderServer, HintRouter, HintWriterClient, PreimageFetcher, - PreimageOracleClient, PreimageOracleServer, PreimageServerBackend, -}; - -#[cfg(feature = "std")] -mod native_channel; -#[cfg(feature = "std")] -pub use native_channel::{BidirectionalChannel, NativeChannel}; diff --git a/kona/crates/proof/preimage/src/traits.rs b/kona/crates/proof/preimage/src/traits.rs deleted file mode 100644 index affa8fe974b..00000000000 --- a/kona/crates/proof/preimage/src/traits.rs +++ /dev/null @@ -1,144 +0,0 @@ -use crate::{ - PreimageKey, - errors::{ChannelResult, PreimageOracleResult}, -}; -use alloc::{boxed::Box, string::String, vec::Vec}; -use async_trait::async_trait; - -/// A [PreimageOracleClient] is a high-level interface to read data from the host, keyed by a -/// [PreimageKey]. -#[async_trait] -pub trait PreimageOracleClient { - /// Get the data corresponding to the currently set key from the host. Return the data in a new - /// heap allocated `Vec<u8>` - /// - /// # Returns - /// - `Ok(Vec<u8>)` if the data was successfully fetched from the host. - /// - `Err(_)` if the data could not be fetched from the host. - async fn get(&self, key: PreimageKey) -> PreimageOracleResult<Vec<u8>>; - - /// Get the data corresponding to the currently set key from the host. Writes the data into the - /// provided buffer. - /// - /// # Returns - /// - `Ok(())` if the data was successfully written into the buffer. - /// - `Err(_)` if the data could not be written into the buffer. - async fn get_exact(&self, key: PreimageKey, buf: &mut [u8]) -> PreimageOracleResult<()>; -} - -/// A [HintWriterClient] is a high-level interface to the hint pipe. It provides a way to write -/// hints to the host. -#[async_trait] -pub trait HintWriterClient { - /// Write a hint to the host. This will overwrite any existing hint in the pipe, and block until - /// all data has been written. - /// - /// # Returns - /// - `Ok(())` if the hint was successfully written to the host. - /// - `Err(_)` if the hint could not be written to the host. - async fn write(&self, hint: &str) -> PreimageOracleResult<()>; -} - -/// A [CommsClient] is a trait that combines the [PreimageOracleClient] and [HintWriterClient] -pub trait CommsClient: PreimageOracleClient + Clone + HintWriterClient {} - -// Implement the super trait for any type that satisfies the bounds -impl<T: PreimageOracleClient + Clone + HintWriterClient> CommsClient for T {} - -/// A [PreimageOracleServer] is a high-level interface to accept read requests from the client and -/// write the preimage data to the client pipe. -#[async_trait] -pub trait PreimageOracleServer { - /// Get the next preimage request and return the response to the client. - /// - /// # Returns - /// - `Ok(())` if the data was successfully written into the client pipe. - /// - `Err(_)` if the data could not be written to the client. - async fn next_preimage_request<F>(&self, get_preimage: &F) -> PreimageOracleResult<()> - where - F: PreimageFetcher + Send + Sync; -} - -/// A [HintReaderServer] is a high-level interface to read preimage hints from the -/// [HintWriterClient] and prepare them for consumption by the client program. -#[async_trait] -pub trait HintReaderServer { - /// Get the next hint request and return the acknowledgement to the client. - /// - /// # Returns - /// - `Ok(())` if the hint was received and the client was notified of the host's - /// acknowledgement. - /// - `Err(_)` if the hint was not received correctly. - async fn next_hint<R>(&self, route_hint: &R) -> PreimageOracleResult<()> - where - R: HintRouter + Send + Sync; -} - -/// A [HintRouter] is a high-level interface to route hints to the appropriate handler. -#[async_trait] -pub trait HintRouter { - /// Routes a hint to the appropriate handler. - /// - /// # Arguments - /// - `hint`: The hint to route. - /// - /// # Returns - /// - `Ok(())` if the hint was successfully routed. - /// - `Err(_)` if the hint could not be routed. - async fn route_hint(&self, hint: String) -> PreimageOracleResult<()>; -} - -/// A [PreimageFetcher] is a high-level interface to fetch preimages during preimage requests. -#[async_trait] -pub trait PreimageFetcher { - /// Get the preimage corresponding to the given key. - /// - /// # Arguments - /// - `key`: The key to fetch the preimage for. - /// - /// # Returns - /// - `Ok(Vec<u8>)` if the preimage was successfully fetched. - /// - `Err(_)` if the preimage could not be fetched. - async fn get_preimage(&self, key: PreimageKey) -> PreimageOracleResult<Vec<u8>>; -} - -/// A [PreimageServerBackend] is a trait that combines the [PreimageFetcher] and [HintRouter] -/// traits. -pub trait PreimageServerBackend: PreimageFetcher + HintRouter {} - -// Implement the super trait for any type that satisfies the bounds -impl<T: PreimageFetcher + HintRouter> PreimageServerBackend for T {} - -/// A [Channel] is a high-level interface to read and write data to a counterparty. -#[async_trait] -pub trait Channel { - /// Asynchronously read data from the channel into the provided buffer. - /// - /// # Arguments - /// - `buf`: The buffer to read data into. - /// - /// # Returns - /// - `Ok(usize)`: The number of bytes read. - /// - `Err(_)` if the data could not be read. - async fn read(&self, buf: &mut [u8]) -> ChannelResult<usize>; - - /// Asynchronously read exactly `buf.len()` bytes into `buf` from the channel. - /// - /// # Arguments - /// - `buf`: The buffer to read data into. - /// - /// # Returns - /// - `Ok(())` if the data was successfully read. - /// - `Err(_)` if the data could not be read. - async fn read_exact(&self, buf: &mut [u8]) -> ChannelResult<usize>; - - /// Asynchronously write the provided buffer to the channel. - /// - /// # Arguments - /// - `buf`: The buffer to write to the host. - /// - /// # Returns - /// - `Ok(usize)`: The number of bytes written. - /// - `Err(_)` if the data could not be written. - async fn write(&self, buf: &[u8]) -> ChannelResult<usize>; -} diff --git a/kona/crates/proof/proof-interop/Cargo.toml b/kona/crates/proof/proof-interop/Cargo.toml deleted file mode 100644 index 2d7fdf808d5..00000000000 --- a/kona/crates/proof/proof-interop/Cargo.toml +++ /dev/null @@ -1,72 +0,0 @@ -[package] -name = "kona-proof-interop" -description = "OP Stack Proof SDK with Interop support" -version = "0.2.0" -edition.workspace = true -authors.workspace = true -license.workspace = true -repository.workspace = true -homepage.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -kona-preimage.workspace = true -kona-interop = { workspace = true, features = ["serde"] } -kona-proof.workspace = true -kona-mpt.workspace = true -kona-executor.workspace = true -kona-registry.workspace = true -kona-genesis = { workspace = true, features = ["serde"] } -kona-protocol.workspace = true - -# Alloy -alloy-rlp.workspace = true -alloy-eips.workspace = true -alloy-primitives.workspace = true -alloy-consensus.workspace = true -alloy-rpc-types-engine.workspace = true -alloy-evm = { workspace = true, features = ["op"] } - -# OP Alloy -op-alloy-consensus.workspace = true -op-alloy-rpc-types-engine.workspace = true -alloy-op-evm.workspace = true - -# revm -revm.workspace = true -op-revm.workspace = true - -# General -serde.workspace = true -tracing.workspace = true -serde_json.workspace = true -async-trait.workspace = true -spin.workspace = true -thiserror.workspace = true - -# Arbitrary -arbitrary = { version = "1.4", features = ["derive"], optional = true } - -[dev-dependencies] -alloy-primitives = { workspace = true, features = ["rlp", "arbitrary"] } -kona-interop = { workspace = true, features = ["arbitrary"] } -arbitrary = { version = "1.4", features = ["derive"] } -rand.workspace = true - -[features] -arbitrary = [ - "alloy-consensus/arbitrary", - "alloy-eips/arbitrary", - "alloy-primitives/arbitrary", - "alloy-rpc-types-engine/arbitrary", - "dep:arbitrary", - "kona-genesis/arbitrary", - "kona-interop/arbitrary", - "kona-protocol/arbitrary", - "op-alloy-consensus/arbitrary", - "op-alloy-rpc-types-engine/arbitrary", - "revm/arbitrary", -] diff --git a/kona/crates/proof/proof-interop/README.md b/kona/crates/proof/proof-interop/README.md deleted file mode 100644 index 4933aabbc35..00000000000 --- a/kona/crates/proof/proof-interop/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# `kona-proof-interop` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-proof-interop"><img src="https://img.shields.io/crates/v/kona-proof-interop.svg?label=kona-proof-interop&labelColor=2a2f35" alt="Kona Proof SDK"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -`kona-proof-interop` is an OP Stack state transition proof SDK, with interop support, built on top of [`kona-proof`](../proof/) diff --git a/kona/crates/proof/proof-interop/src/hint.rs b/kona/crates/proof/proof-interop/src/hint.rs deleted file mode 100644 index 1b02b4c282c..00000000000 --- a/kona/crates/proof/proof-interop/src/hint.rs +++ /dev/null @@ -1,193 +0,0 @@ -//! This module contains the [HintType] enum. - -use alloc::{string::ToString, vec::Vec}; -use core::{fmt::Display, str::FromStr}; -use kona_proof::{Hint, errors::HintParsingError}; - -/// The [HintType] enum is used to specify the type of hint that was received. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum HintType { - /// A hint that specifies the block header of a layer 1 block. - L1BlockHeader, - /// A hint that specifies the transactions of a layer 1 block. - L1Transactions, - /// A hint that specifies the state node of a layer 1 block. - L1Receipts, - /// A hint that specifies a blob in the layer 1 beacon chain. - L1Blob, - /// A hint that specifies a precompile call on layer 1. - L1Precompile, - /// A hint that specifies the block header of a layer 2 block. - L2BlockHeader, - /// A hint that specifies the transactions of a layer 2 block. - L2Transactions, - /// A hint that specifies the receipts of a layer 2 block. - L2Receipts, - /// A hint that specifies the code of a contract on layer 2. - L2Code, - /// A hint that specifies the preimage of the agreed upon pre-state claim. - AgreedPreState, - /// A hint that specifies the preimage of an L2 output root within the agreed upon pre-state, - /// by chain ID. - L2OutputRoot, - /// A hint that specifies the state node in the L2 state trie. - L2StateNode, - /// A hint that specifies the proof on the path to an account in the L2 state trie. - L2AccountProof, - /// A hint that specifies the proof on the path to a storage slot in an account within in the - /// L2 state trie. - L2AccountStorageProof, - /// A hint that specifies loading the payload witness for an optimistic block. - L2BlockData, - /// A hint that specifies bulk storage of all the code, state and keys generated by an - /// execution witness. - L2PayloadWitness, -} - -impl HintType { - /// Creates a new [Hint] from `self` and the specified data. The data passed will be - /// concatenated into a single byte array before being stored in the resulting [Hint]. - pub fn with_data(self, data: &[&[u8]]) -> Hint<Self> { - let total_len = data.iter().map(|d| d.len()).sum(); - let hint_data = data.iter().fold(Vec::with_capacity(total_len), |mut acc, d| { - acc.extend_from_slice(d); - acc - }); - Hint::new(self, hint_data) - } -} - -impl FromStr for HintType { - type Err = HintParsingError; - - fn from_str(value: &str) -> Result<Self, Self::Err> { - match value { - "l1-block-header" => Ok(Self::L1BlockHeader), - "l1-transactions" => Ok(Self::L1Transactions), - "l1-receipts" => Ok(Self::L1Receipts), - "l1-blob" => Ok(Self::L1Blob), - "l1-precompile" => Ok(Self::L1Precompile), - "l2-block-header" => Ok(Self::L2BlockHeader), - "l2-transactions" => Ok(Self::L2Transactions), - "l2-receipts" => Ok(Self::L2Receipts), - "l2-code" => Ok(Self::L2Code), - "agreed-pre-state" => Ok(Self::AgreedPreState), - "l2-output-root" => Ok(Self::L2OutputRoot), - "l2-state-node" => Ok(Self::L2StateNode), - "l2-account-proof" => Ok(Self::L2AccountProof), - "l2-account-storage-proof" => Ok(Self::L2AccountStorageProof), - "l2-block-data" => Ok(Self::L2BlockData), - "l2-payload-witness" => Ok(Self::L2PayloadWitness), - _ => Err(HintParsingError(value.to_string())), - } - } -} - -impl From<HintType> for &str { - fn from(value: HintType) -> Self { - match value { - HintType::L1BlockHeader => "l1-block-header", - HintType::L1Transactions => "l1-transactions", - HintType::L1Receipts => "l1-receipts", - HintType::L1Blob => "l1-blob", - HintType::L1Precompile => "l1-precompile", - HintType::L2BlockHeader => "l2-block-header", - HintType::L2Transactions => "l2-transactions", - HintType::L2Receipts => "l2-receipts", - HintType::L2Code => "l2-code", - HintType::AgreedPreState => "agreed-pre-state", - HintType::L2OutputRoot => "l2-output-root", - HintType::L2StateNode => "l2-state-node", - HintType::L2AccountProof => "l2-account-proof", - HintType::L2AccountStorageProof => "l2-account-storage-proof", - HintType::L2BlockData => "l2-block-data", - HintType::L2PayloadWitness => "l2-payload-witness", - } - } -} - -impl Display for HintType { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let s: &str = (*self).into(); - write!(f, "{s}") - } -} - -mod test { - #[test] - fn test_hint_type_from_str() { - use super::HintType; - use crate::alloc::string::ToString; - use core::str::FromStr; - use kona_proof::errors::HintParsingError; - - assert_eq!(HintType::from_str("l1-block-header").unwrap(), HintType::L1BlockHeader); - assert_eq!(HintType::from_str("l1-transactions").unwrap(), HintType::L1Transactions); - assert_eq!(HintType::from_str("l1-receipts").unwrap(), HintType::L1Receipts); - assert_eq!(HintType::from_str("l1-blob").unwrap(), HintType::L1Blob); - assert_eq!(HintType::from_str("l1-precompile").unwrap(), HintType::L1Precompile); - assert_eq!(HintType::from_str("l2-block-header").unwrap(), HintType::L2BlockHeader); - assert_eq!(HintType::from_str("l2-block-data").unwrap(), HintType::L2BlockData); - assert_eq!(HintType::from_str("l2-transactions").unwrap(), HintType::L2Transactions); - assert_eq!(HintType::from_str("l2-receipts").unwrap(), HintType::L2Receipts); - assert_eq!(HintType::from_str("l2-code").unwrap(), HintType::L2Code); - assert_eq!(HintType::from_str("agreed-pre-state").unwrap(), HintType::AgreedPreState); - assert_eq!(HintType::from_str("l2-output-root").unwrap(), HintType::L2OutputRoot); - assert_eq!(HintType::from_str("l2-account-proof").unwrap(), HintType::L2AccountProof); - assert_eq!( - HintType::from_str("l2-account-storage-proof").unwrap(), - HintType::L2AccountStorageProof - ); - assert_eq!(HintType::from_str("l2-block-data").unwrap(), HintType::L2BlockData); - assert_eq!(HintType::from_str("l2-payload-witness").unwrap(), HintType::L2PayloadWitness); - match HintType::from_str("invalid") { - Ok(_) => { - panic!("expected error"); - } - Err(parsing_err) => { - let HintParsingError(str) = parsing_err; - assert_eq!(str, "invalid".to_string()); - } - } - } - - #[test] - fn test_hint_type_to_str() { - use super::HintType; - - assert_eq!(<&str>::from(HintType::L1BlockHeader), "l1-block-header"); - assert_eq!(<&str>::from(HintType::L1Transactions), "l1-transactions"); - assert_eq!(<&str>::from(HintType::L1Receipts), "l1-receipts"); - assert_eq!(<&str>::from(HintType::L1Blob), "l1-blob"); - assert_eq!(<&str>::from(HintType::L1Precompile), "l1-precompile"); - assert_eq!(<&str>::from(HintType::L2BlockHeader), "l2-block-header"); - assert_eq!(<&str>::from(HintType::L2Transactions), "l2-transactions"); - assert_eq!(<&str>::from(HintType::L2Receipts), "l2-receipts"); - assert_eq!(<&str>::from(HintType::L2Code), "l2-code"); - assert_eq!(<&str>::from(HintType::AgreedPreState), "agreed-pre-state"); - assert_eq!(<&str>::from(HintType::L2OutputRoot), "l2-output-root"); - assert_eq!(<&str>::from(HintType::L2StateNode), "l2-state-node"); - assert_eq!(<&str>::from(HintType::L2AccountProof), "l2-account-proof"); - assert_eq!(<&str>::from(HintType::L2AccountStorageProof), "l2-account-storage-proof"); - assert_eq!(<&str>::from(HintType::L2BlockData), "l2-block-data"); - assert_eq!(<&str>::from(HintType::L2PayloadWitness), "l2-payload-witness"); - } - - #[test] - fn test_hint_with_data() { - use super::HintType; - use alloy_primitives::Bytes; - - let hint_data: &[u8] = &[1, 2]; - let l1_block_header = HintType::L1BlockHeader.with_data(&[hint_data]); - assert_eq!(l1_block_header.data, Bytes::from(hint_data)); - } - - #[test] - fn test_hint_fmt() { - use super::HintType; - use alloc::format; - - assert_eq!(format!("{}", HintType::L1BlockHeader), "l1-block-header"); - } -} diff --git a/kona/crates/proof/proof-interop/src/lib.rs b/kona/crates/proof/proof-interop/src/lib.rs deleted file mode 100644 index 593b89097c0..00000000000 --- a/kona/crates/proof/proof-interop/src/lib.rs +++ /dev/null @@ -1,28 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(feature = "arbitrary"), no_std)] - -extern crate alloc; - -mod pre_state; -pub use pre_state::{ - INVALID_TRANSITION, INVALID_TRANSITION_HASH, OptimisticBlock, PreState, - TRANSITION_STATE_MAX_STEPS, TransitionState, -}; - -mod hint; -pub use hint::HintType; - -mod provider; -pub use provider::OracleInteropProvider; - -pub mod boot; -pub use boot::BootInfo; - -mod consolidation; -pub use consolidation::{ConsolidationError, SuperchainConsolidator}; diff --git a/kona/crates/proof/proof-interop/src/provider.rs b/kona/crates/proof/proof-interop/src/provider.rs deleted file mode 100644 index ada827d2a0d..00000000000 --- a/kona/crates/proof/proof-interop/src/provider.rs +++ /dev/null @@ -1,270 +0,0 @@ -//! [InteropProvider] trait implementation using a [CommsClient] data source. - -use crate::{BootInfo, HintType}; -use alloc::{boxed::Box, string::ToString, sync::Arc, vec::Vec}; -use alloy_consensus::{Header, Sealed}; -use alloy_eips::eip2718::Decodable2718; -use alloy_primitives::{Address, B256}; -use alloy_rlp::Decodable; -use async_trait::async_trait; -use kona_interop::InteropProvider; -use kona_mpt::{OrderedListWalker, TrieHinter, TrieNode, TrieProvider}; -use kona_preimage::{CommsClient, PreimageKey, PreimageKeyType, errors::PreimageOracleError}; -use kona_proof::{eip_2935_history_lookup, errors::OracleProviderError}; -use kona_registry::HashMap; -use op_alloy_consensus::OpReceiptEnvelope; -use spin::RwLock; - -/// A [CommsClient] backed [InteropProvider] implementation. -#[derive(Debug, Clone)] -pub struct OracleInteropProvider<C> { - /// The oracle client. - oracle: Arc<C>, - /// The [BootInfo] for the current program execution. - boot: BootInfo, - /// The local safe head block header cache. - local_safe_heads: HashMap<u64, Sealed<Header>>, - /// The chain ID for the current call context. Used to declare the chain ID for the trie hints. - chain_id: Arc<RwLock<Option<u64>>>, -} - -impl<C> OracleInteropProvider<C> -where - C: CommsClient + Send + Sync, -{ - /// Creates a new [OracleInteropProvider] with the given oracle client and [BootInfo]. - pub fn new( - oracle: Arc<C>, - boot: BootInfo, - local_safe_headers: HashMap<u64, Sealed<Header>>, - ) -> Self { - Self { - oracle, - boot, - local_safe_heads: local_safe_headers, - chain_id: Arc::new(RwLock::new(None)), - } - } - - /// Returns a reference to the local safe heads map. - pub const fn local_safe_heads(&self) -> &HashMap<u64, Sealed<Header>> { - &self.local_safe_heads - } - - /// Replaces a local safe head with the given header. - pub fn replace_local_safe_head(&mut self, chain_id: u64, header: Sealed<Header>) { - self.local_safe_heads.insert(chain_id, header); - } - - /// Fetch the [Header] for the block with the given hash. - pub async fn header_by_hash( - &self, - chain_id: u64, - block_hash: B256, - ) -> Result<Header, <Self as InteropProvider>::Error> { - HintType::L2BlockHeader - .with_data(&[block_hash.as_slice(), chain_id.to_be_bytes().as_ref()]) - .send(self.oracle.as_ref()) - .await?; - - let header_rlp = self - .oracle - .get(PreimageKey::new(*block_hash, PreimageKeyType::Keccak256)) - .await - .map_err(OracleProviderError::Preimage)?; - - Header::decode(&mut header_rlp.as_ref()).map_err(OracleProviderError::Rlp) - } - - /// Fetch the [OpReceiptEnvelope]s for the block with the given hash. - async fn derive_receipts( - &self, - chain_id: u64, - block_hash: B256, - header: &Header, - ) -> Result<Vec<OpReceiptEnvelope>, <Self as InteropProvider>::Error> { - // Send a hint for the block's receipts, and walk through the receipts trie in the header to - // verify them. - HintType::L2Receipts - .with_data(&[block_hash.as_ref(), chain_id.to_be_bytes().as_slice()]) - .send(self.oracle.as_ref()) - .await?; - let trie_walker = OrderedListWalker::try_new_hydrated(header.receipts_root, self) - .map_err(OracleProviderError::TrieWalker)?; - - // Decode the receipts within the receipts trie. - let receipts = trie_walker - .into_iter() - .map(|(_, rlp)| { - let envelope = OpReceiptEnvelope::decode_2718(&mut rlp.as_ref())?; - Ok(envelope) - }) - .collect::<Result<Vec<_>, _>>() - .map_err(OracleProviderError::Rlp)?; - - Ok(receipts) - } -} - -#[async_trait] -impl<C> InteropProvider for OracleInteropProvider<C> -where - C: CommsClient + Send + Sync, -{ - type Error = OracleProviderError; - - /// Fetch a [Header] by its number. - async fn header_by_number(&self, chain_id: u64, number: u64) -> Result<Header, Self::Error> { - let Some(mut header) = - self.local_safe_heads.get(&chain_id).cloned().map(|h| h.into_inner()) - else { - return Err(PreimageOracleError::Other("Missing local safe header".to_string()).into()); - }; - - // Check if the block number is in range. If not, we can fail early. - if number > header.number { - return Err(OracleProviderError::BlockNumberPastHead(number, header.number)); - } - - // Set the chain ID for the trie hints, and explicitly drop the lock. - let mut chain_id_lock = self.chain_id.write(); - *chain_id_lock = Some(chain_id); - drop(chain_id_lock); - - // Walk back the block headers to the desired block number. - let rollup_config = self.boot.rollup_config(chain_id).ok_or_else(|| { - PreimageOracleError::Other("Missing rollup config for chain ID".to_string()) - })?; - let mut linear_fallback = false; - - while header.number > number { - if rollup_config.is_isthmus_active(header.timestamp) && !linear_fallback { - // If Isthmus is active, the EIP-2935 contract is used to perform leaping lookbacks - // through consulting the ring buffer within the contract. If this - // lookup fails for any reason, we fall back to linear walk back. - let block_hash = match eip_2935_history_lookup(&header, 0, self, self).await { - Ok(hash) => hash, - Err(_) => { - // If the EIP-2935 lookup fails for any reason, attempt fallback to linear - // walk back. - linear_fallback = true; - continue; - } - }; - - header = self.header_by_hash(chain_id, block_hash).await?; - } else { - // Walk back the block headers one-by-one until the desired block number is reached. - header = self.header_by_hash(chain_id, header.parent_hash).await?; - } - } - - Ok(header) - } - - /// Fetch all receipts for a given block by number. - async fn receipts_by_number( - &self, - chain_id: u64, - number: u64, - ) -> Result<Vec<OpReceiptEnvelope>, Self::Error> { - let header = self.header_by_number(chain_id, number).await?; - self.derive_receipts(chain_id, header.hash_slow(), &header).await - } - - /// Fetch all receipts for a given block by hash. - async fn receipts_by_hash( - &self, - chain_id: u64, - block_hash: B256, - ) -> Result<Vec<OpReceiptEnvelope>, Self::Error> { - let header = self.header_by_hash(chain_id, block_hash).await?; - self.derive_receipts(chain_id, block_hash, &header).await - } -} - -impl<C> TrieProvider for OracleInteropProvider<C> -where - C: CommsClient + Send + Sync + Clone, -{ - type Error = OracleProviderError; - - fn trie_node_by_hash(&self, key: B256) -> Result<TrieNode, Self::Error> { - kona_proof::block_on(async move { - let trie_node_rlp = self - .oracle - .get(PreimageKey::new(*key, PreimageKeyType::Keccak256)) - .await - .map_err(OracleProviderError::Preimage)?; - TrieNode::decode(&mut trie_node_rlp.as_ref()).map_err(OracleProviderError::Rlp) - }) - } -} - -impl<C: CommsClient> TrieHinter for OracleInteropProvider<C> { - type Error = OracleProviderError; - - fn hint_trie_node(&self, hash: B256) -> Result<(), Self::Error> { - kona_proof::block_on(async move { - HintType::L2StateNode - .with_data(&[hash.as_slice()]) - .with_data( - self.chain_id.read().map_or_else(Vec::new, |id| id.to_be_bytes().to_vec()), - ) - .send(self.oracle.as_ref()) - .await - }) - } - - fn hint_account_proof(&self, address: Address, block_number: u64) -> Result<(), Self::Error> { - kona_proof::block_on(async move { - HintType::L2AccountProof - .with_data(&[block_number.to_be_bytes().as_ref(), address.as_slice()]) - .with_data( - self.chain_id.read().map_or_else(Vec::new, |id| id.to_be_bytes().to_vec()), - ) - .send(self.oracle.as_ref()) - .await - }) - } - - fn hint_storage_proof( - &self, - address: alloy_primitives::Address, - slot: alloy_primitives::U256, - block_number: u64, - ) -> Result<(), Self::Error> { - kona_proof::block_on(async move { - HintType::L2AccountStorageProof - .with_data(&[ - block_number.to_be_bytes().as_ref(), - address.as_slice(), - slot.to_be_bytes::<32>().as_ref(), - ]) - .with_data( - self.chain_id.read().map_or_else(Vec::new, |id| id.to_be_bytes().to_vec()), - ) - .send(self.oracle.as_ref()) - .await - }) - } - - fn hint_execution_witness( - &self, - parent_hash: B256, - op_payload_attributes: &op_alloy_rpc_types_engine::OpPayloadAttributes, - ) -> Result<(), Self::Error> { - kona_proof::block_on(async move { - let encoded_attributes = - serde_json::to_vec(op_payload_attributes).map_err(OracleProviderError::Serde)?; - - HintType::L2PayloadWitness - .with_data(&[parent_hash.as_slice(), &encoded_attributes]) - .with_data( - self.chain_id.read().map_or_else(Vec::new, |id| id.to_be_bytes().to_vec()), - ) - .send(self.oracle.as_ref()) - .await - }) - } -} diff --git a/kona/crates/proof/proof/README.md b/kona/crates/proof/proof/README.md deleted file mode 100644 index e9aec8c680d..00000000000 --- a/kona/crates/proof/proof/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# `kona-proof` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-proof"><img src="https://img.shields.io/crates/v/kona-proof.svg?label=kona-proof&labelColor=2a2f35" alt="Kona Proof SDK"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -`kona-proof` is an OP Stack state transition proof SDK. diff --git a/kona/crates/proof/proof/src/hint.rs b/kona/crates/proof/proof/src/hint.rs deleted file mode 100644 index 6291a037466..00000000000 --- a/kona/crates/proof/proof/src/hint.rs +++ /dev/null @@ -1,177 +0,0 @@ -//! This module contains the [HintType] enum. - -use crate::errors::{HintParsingError, OracleProviderError}; -use alloc::{ - string::{String, ToString}, - vec::Vec, -}; -use alloy_primitives::{Bytes, hex}; -use core::{fmt::Display, str::FromStr}; -use kona_preimage::HintWriterClient; - -/// A [Hint] is parsed in the format `<hint_type> <hint_data>`, where `<hint_type>` is a string that -/// represents the type of hint, and `<hint_data>` is the data associated with the hint (bytes -/// encoded as hex UTF-8). -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub struct Hint<HT> { - /// The type of hint. - pub ty: HT, - /// The data associated with the hint. - pub data: Bytes, -} - -impl<HT> Hint<HT> -where - HT: Display, -{ - /// Creates a new [Hint] with the specified type and data. - pub fn new<T: Into<Bytes>>(ty: HT, data: T) -> Self { - Self { ty, data: data.into() } - } - - /// Splits the [Hint] into its components. - pub fn split(self) -> (HT, Bytes) { - (self.ty, self.data) - } - - /// Appends more data to [Hint::data]. - pub fn with_data<T: AsRef<[u8]>>(self, data: T) -> Self { - // No-op if the data is empty. - if data.as_ref().is_empty() { - return self; - } - - let mut hint_data = Vec::with_capacity(self.data.len() + data.as_ref().len()); - hint_data.extend_from_slice(self.data.as_ref()); - hint_data.extend_from_slice(data.as_ref()); - - Self { data: hint_data.into(), ..self } - } - - /// Sends the hint to the passed [HintWriterClient]. - pub async fn send<T: HintWriterClient>(&self, comms: &T) -> Result<(), OracleProviderError> { - comms.write(&self.encode()).await.map_err(OracleProviderError::Preimage) - } - - /// Encodes the hint as a string. - pub fn encode(&self) -> String { - alloc::format!("{} {}", self.ty, self.data) - } -} - -impl<HT> FromStr for Hint<HT> -where - HT: FromStr<Err = HintParsingError>, -{ - type Err = HintParsingError; - - fn from_str(s: &str) -> Result<Self, Self::Err> { - let mut parts = s.split(' ').collect::<Vec<_>>(); - - if parts.len() != 2 { - return Err(HintParsingError(alloc::format!("Invalid hint format: {s}"))); - } - - let hint_type = parts.remove(0).parse::<HT>()?; - let hint_data = - hex::decode(parts.remove(0)).map_err(|e| HintParsingError(e.to_string()))?.into(); - - Ok(Self { ty: hint_type, data: hint_data }) - } -} - -/// The [HintType] enum is used to specify the type of hint that was received. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum HintType { - /// A hint that specifies the block header of a layer 1 block. - L1BlockHeader, - /// A hint that specifies the transactions of a layer 1 block. - L1Transactions, - /// A hint that specifies the state node of a layer 1 block. - L1Receipts, - /// A hint that specifies a blob in the layer 1 beacon chain. - L1Blob, - /// A hint that specifies a precompile call on layer 1. - L1Precompile, - /// A hint that specifies the block header of a layer 2 block. - L2BlockHeader, - /// A hint that specifies the transactions of a layer 2 block. - L2Transactions, - /// A hint that specifies the code of a contract on layer 2. - L2Code, - /// A hint that specifies the preimage of the starting L2 output root on layer 2. - StartingL2Output, - /// A hint that specifies the state node in the L2 state trie. - L2StateNode, - /// A hint that specifies the proof on the path to an account in the L2 state trie. - L2AccountProof, - /// A hint that specifies the proof on the path to a storage slot in an account within in the - /// L2 state trie. - L2AccountStorageProof, - /// A hint that specifies bulk storage of all the code, state and keys generated by an - /// execution witness. - L2PayloadWitness, -} - -impl HintType { - /// Creates a new [Hint] from `self` and the specified data. The data passed will be - /// concatenated into a single byte array before being stored in the resulting [Hint]. - pub fn with_data(self, data: &[&[u8]]) -> Hint<Self> { - let total_len = data.iter().map(|d| d.len()).sum(); - let hint_data = data.iter().fold(Vec::with_capacity(total_len), |mut acc, d| { - acc.extend_from_slice(d); - acc - }); - Hint::new(self, hint_data) - } -} - -impl FromStr for HintType { - type Err = HintParsingError; - - fn from_str(value: &str) -> Result<Self, Self::Err> { - match value { - "l1-block-header" => Ok(Self::L1BlockHeader), - "l1-transactions" => Ok(Self::L1Transactions), - "l1-receipts" => Ok(Self::L1Receipts), - "l1-blob" => Ok(Self::L1Blob), - "l1-precompile" => Ok(Self::L1Precompile), - "l2-block-header" => Ok(Self::L2BlockHeader), - "l2-transactions" => Ok(Self::L2Transactions), - "l2-code" => Ok(Self::L2Code), - "starting-l2-output" => Ok(Self::StartingL2Output), - "l2-state-node" => Ok(Self::L2StateNode), - "l2-account-proof" => Ok(Self::L2AccountProof), - "l2-account-storage-proof" => Ok(Self::L2AccountStorageProof), - "l2-payload-witness" => Ok(Self::L2PayloadWitness), - _ => Err(HintParsingError(value.to_string())), - } - } -} - -impl From<HintType> for &str { - fn from(value: HintType) -> Self { - match value { - HintType::L1BlockHeader => "l1-block-header", - HintType::L1Transactions => "l1-transactions", - HintType::L1Receipts => "l1-receipts", - HintType::L1Blob => "l1-blob", - HintType::L1Precompile => "l1-precompile", - HintType::L2BlockHeader => "l2-block-header", - HintType::L2Transactions => "l2-transactions", - HintType::L2Code => "l2-code", - HintType::StartingL2Output => "starting-l2-output", - HintType::L2StateNode => "l2-state-node", - HintType::L2AccountProof => "l2-account-proof", - HintType::L2AccountStorageProof => "l2-account-storage-proof", - HintType::L2PayloadWitness => "l2-payload-witness", - } - } -} - -impl Display for HintType { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let s: &str = (*self).into(); - write!(f, "{s}") - } -} diff --git a/kona/crates/proof/proof/src/l1/blob_provider.rs b/kona/crates/proof/proof/src/l1/blob_provider.rs deleted file mode 100644 index 434652831b7..00000000000 --- a/kona/crates/proof/proof/src/l1/blob_provider.rs +++ /dev/null @@ -1,214 +0,0 @@ -//! Contains the concrete implementation of the [BlobProvider] trait for the client program. - -use crate::{HintType, errors::OracleProviderError}; -use alloc::{boxed::Box, sync::Arc, vec::Vec}; -use alloy_consensus::Blob; -use alloy_eips::eip4844::{FIELD_ELEMENTS_PER_BLOB, IndexedBlobHash}; -use alloy_primitives::keccak256; -use ark_bls12_381::Fr; -use ark_ff::{AdditiveGroup, BigInteger, BigInteger256, Field, PrimeField}; -use async_trait::async_trait; -use core::str::FromStr; -use kona_derive::BlobProvider; -use kona_preimage::{CommsClient, PreimageKey, PreimageKeyType}; -use kona_protocol::BlockInfo; -use spin::Lazy; - -/// An oracle-backed blob provider. -#[derive(Debug, Clone)] -pub struct OracleBlobProvider<T: CommsClient> { - oracle: Arc<T>, -} - -impl<T: CommsClient> OracleBlobProvider<T> { - /// Constructs a new `OracleBlobProvider`. - pub const fn new(oracle: Arc<T>) -> Self { - Self { oracle } - } - - /// Retrieves a blob from the oracle. - /// - /// ## Takes - /// - `block_ref`: The block reference. - /// - `blob_hash`: The blob hash. - /// - /// ## Returns - /// - `Ok(blob)`: The blob. - /// - `Err(e)`: The blob could not be retrieved. - async fn get_blob( - &self, - block_ref: &BlockInfo, - blob_hash: &IndexedBlobHash, - ) -> Result<Blob, OracleProviderError> { - let mut blob_req_meta = [0u8; 48]; - blob_req_meta[0..32].copy_from_slice(blob_hash.hash.as_ref()); - blob_req_meta[32..40].copy_from_slice((blob_hash.index).to_be_bytes().as_ref()); - blob_req_meta[40..48].copy_from_slice(block_ref.timestamp.to_be_bytes().as_ref()); - - // Send a hint for the blob commitment and field elements. - HintType::L1Blob.with_data(&[blob_req_meta.as_ref()]).send(self.oracle.as_ref()).await?; - - // Fetch the blob commitment. - let mut commitment = [0u8; 48]; - self.oracle - .get_exact(PreimageKey::new(*blob_hash.hash, PreimageKeyType::Sha256), &mut commitment) - .await - .map_err(OracleProviderError::Preimage)?; - - // Reconstruct the blob from the 4096 field elements. - let mut blob = Blob::default(); - let mut field_element_key = [0u8; 80]; - field_element_key[..48].copy_from_slice(commitment.as_ref()); - for i in 0..FIELD_ELEMENTS_PER_BLOB { - field_element_key[48..] - .copy_from_slice(ROOTS_OF_UNITY[i as usize].into_bigint().to_bytes_be().as_ref()); - - let mut field_element = [0u8; 32]; - self.oracle - .get_exact( - PreimageKey::new(*keccak256(field_element_key), PreimageKeyType::Blob), - &mut field_element, - ) - .await - .map_err(OracleProviderError::Preimage)?; - blob[(i as usize) << 5..(i as usize + 1) << 5].copy_from_slice(field_element.as_ref()); - } - - tracing::info!( - target: "client_blob_oracle", - index = blob_hash.index, - hash = ?blob_hash.hash, - "Retrieved blob" - ); - - Ok(blob) - } -} - -#[async_trait] -impl<T: CommsClient + Sync + Send> BlobProvider for OracleBlobProvider<T> { - type Error = OracleProviderError; - - async fn get_and_validate_blobs( - &mut self, - block_ref: &BlockInfo, - blob_hashes: &[IndexedBlobHash], - ) -> Result<Vec<Box<Blob>>, Self::Error> { - let mut blobs = Vec::with_capacity(blob_hashes.len()); - for hash in blob_hashes { - blobs.push(Box::new(self.get_blob(block_ref, hash).await?)); - } - Ok(blobs) - } -} - -/// The 4096th bit-reversed roots of unity used in EIP-4844 as predefined evaluation points. -/// -/// See `generate_roots_of_unity` for details on how these roots of unity are generated. -pub static ROOTS_OF_UNITY: Lazy<[Fr; FIELD_ELEMENTS_PER_BLOB as usize]> = - Lazy::new(generate_roots_of_unity); - -/// Generates the 4096th bit-reversed roots of unity used in EIP-4844 as predefined evaluation -/// points. To compute the field element at index i in a blob, the blob polynomial is evaluated at -/// the i'th root of unity. Based on go-kzg-4844: <https://github.com/crate-crypto/go-kzg-4844/blob/8bcf6163d3987313a3194595cf1f33fd45d7301a/internal/kzg/domain.go#L44-L98> -/// Also, see the consensus specs: -/// - compute_roots_of_unity <https://github.com/ethereum/consensus-specs/blob/bf09edef17e2900258f7e37631e9452941c26e86/specs/deneb/polynomial-commitments.md#compute_roots_of_unity> -/// - bit-reversal permutation: <https://github.com/ethereum/consensus-specs/blob/bf09edef17e2900258f7e37631e9452941c26e86/specs/deneb/polynomial-commitments.md#bit-reversal-permutation> -fn generate_roots_of_unity() -> [Fr; FIELD_ELEMENTS_PER_BLOB as usize] { - const MAX_ORDER_ROOT: u64 = 32; - - let mut roots_of_unity = [Fr::ZERO; FIELD_ELEMENTS_PER_BLOB as usize]; - - // Generator of the largest 2-adic subgroup of order 2^32. - let root_of_unity = Fr::new( - BigInteger256::from_str( - "10238227357739495823651030575849232062558860180284477541189508159991286009131", - ) - .expect("Failed to initialize root of unity"), - ); - - // Find generator subgroup of order x. - // This can be constructed by powering a generator of the largest 2-adic subgroup of order 2^32 - // by an exponent of (2^32)/x, provided x is <= 2^32. - let log_x = FIELD_ELEMENTS_PER_BLOB.trailing_zeros() as u64; - let expo = 1u64 << (MAX_ORDER_ROOT - log_x); - - // Generator has order x now - let generator = root_of_unity.pow([expo]); - - // Compute all relevant roots of unity, i.e. the multiplicative subgroup of size x - let mut current = Fr::ONE; - (0..FIELD_ELEMENTS_PER_BLOB).for_each(|i| { - roots_of_unity[i as usize] = current; - current *= generator; - }); - - let shift_correction = 64 - FIELD_ELEMENTS_PER_BLOB.trailing_zeros(); - (0..FIELD_ELEMENTS_PER_BLOB).for_each(|i| { - // Find index irev, such that i and irev get swapped - let irev = i.reverse_bits() >> shift_correction; - if irev > i { - roots_of_unity.swap(i as usize, irev as usize); - } - }); - - roots_of_unity -} - -#[cfg(test)] -mod test { - use super::ROOTS_OF_UNITY; - use alloy_eips::eip4844::{FIELD_ELEMENTS_PER_BLOB, env_settings::EnvKzgSettings}; - use ark_ff::{BigInteger, PrimeField}; - use c_kzg::{BYTES_PER_BLOB, Blob, Bytes32, Bytes48}; - use rand::Rng; - use rayon::iter::{IntoParallelIterator, ParallelIterator}; - - #[test] - fn test_roots_of_unity() { - // Initiate the default Ethereum KZG settings. - let kzg = EnvKzgSettings::default(); - - // Create a blob with random data - let mut bytes = [0u8; BYTES_PER_BLOB]; - rand::rng().fill(bytes.as_mut_slice()); - - // Ensure the blob is valid by keeping each field element within range. - (0..FIELD_ELEMENTS_PER_BLOB).for_each(|i| { - bytes[(i as usize) << 5] = 0; - }); - - let blob = Blob::new(bytes); - let blob_commitment = { - let raw = kzg.get().blob_to_kzg_commitment(&blob).unwrap(); - Bytes48::new(raw.as_slice().try_into().unwrap()) - }; - - // Validate each field element in the blob - (0..FIELD_ELEMENTS_PER_BLOB).into_par_iter().for_each(|i| { - let field_element = { - let mut fe = [0u8; 32]; - fe.copy_from_slice(&blob[(i as usize) << 5..(i as usize + 1) << 5]); - Bytes32::new(fe) - }; - - let z_bytes = Bytes32::new(ROOTS_OF_UNITY[i as usize].into_bigint().to_bytes_be().try_into().unwrap()); - let (proof, fe) = kzg.get().compute_kzg_proof(&blob, &z_bytes).unwrap(); - - // Ensure the field element matches the expected value - assert_eq!( - fe.as_slice(), - field_element.as_slice(), - "Field element {i} does not match the expected value. Expected: {field_element:?}, Got: {fe:?}" - ); - - // Ensure the proof can be verified - let proof_bytes = Bytes48::new(proof.as_slice().try_into().unwrap()); - let is_valid = kzg.get().verify_kzg_proof(&blob_commitment, &z_bytes, &field_element, &proof_bytes).unwrap(); - assert!( - is_valid, - "KZG proof verification failed for field element {i}. Commitment: {blob_commitment:?}, Z: {z_bytes:?}, Field Element: {field_element:?}, Proof: {proof_bytes:?}" - ); - }); - } -} diff --git a/kona/crates/proof/proof/src/l1/chain_provider.rs b/kona/crates/proof/proof/src/l1/chain_provider.rs deleted file mode 100644 index 32cffd9365f..00000000000 --- a/kona/crates/proof/proof/src/l1/chain_provider.rs +++ /dev/null @@ -1,141 +0,0 @@ -//! Contains the concrete implementation of the [ChainProvider] trait for the proof. - -use crate::{HintType, errors::OracleProviderError}; -use alloc::{boxed::Box, sync::Arc, vec::Vec}; -use alloy_consensus::{Header, Receipt, ReceiptEnvelope, TxEnvelope}; -use alloy_eips::eip2718::Decodable2718; -use alloy_primitives::B256; -use alloy_rlp::Decodable; -use async_trait::async_trait; -use kona_derive::ChainProvider; -use kona_mpt::{OrderedListWalker, TrieNode, TrieProvider}; -use kona_preimage::{CommsClient, PreimageKey, PreimageKeyType}; -use kona_protocol::BlockInfo; - -/// The oracle-backed L1 chain provider for the client program. -#[derive(Debug, Clone)] -pub struct OracleL1ChainProvider<T: CommsClient> { - /// The L1 head hash. - pub l1_head: B256, - /// The preimage oracle client. - pub oracle: Arc<T>, -} - -impl<T: CommsClient> OracleL1ChainProvider<T> { - /// Creates a new [OracleL1ChainProvider] with the given boot information and oracle client. - pub const fn new(l1_head: B256, oracle: Arc<T>) -> Self { - Self { l1_head, oracle } - } -} - -#[async_trait] -impl<T: CommsClient + Sync + Send> ChainProvider for OracleL1ChainProvider<T> { - type Error = OracleProviderError; - - async fn header_by_hash(&mut self, hash: B256) -> Result<Header, Self::Error> { - // Fetch the header RLP from the oracle. - HintType::L1BlockHeader.with_data(&[hash.as_ref()]).send(self.oracle.as_ref()).await?; - let header_rlp = self.oracle.get(PreimageKey::new_keccak256(*hash)).await?; - - // Decode the header RLP into a Header. - Header::decode(&mut header_rlp.as_slice()).map_err(OracleProviderError::Rlp) - } - - async fn block_info_by_number(&mut self, block_number: u64) -> Result<BlockInfo, Self::Error> { - // Fetch the starting block header. - let mut header = self.header_by_hash(self.l1_head).await?; - - // Check if the block number is in range. If not, we can fail early. - if block_number > header.number { - return Err(OracleProviderError::BlockNumberPastHead(block_number, header.number)); - } - - // Walk back the block headers to the desired block number. - while header.number > block_number { - header = self.header_by_hash(header.parent_hash).await?; - } - - Ok(BlockInfo { - hash: header.hash_slow(), - number: header.number, - parent_hash: header.parent_hash, - timestamp: header.timestamp, - }) - } - - async fn receipts_by_hash(&mut self, hash: B256) -> Result<Vec<Receipt>, Self::Error> { - // Fetch the block header to find the receipts root. - let header = self.header_by_hash(hash).await?; - - // Send a hint for the block's receipts, and walk through the receipts trie in the header to - // verify them. - HintType::L1Receipts.with_data(&[hash.as_ref()]).send(self.oracle.as_ref()).await?; - let trie_walker = OrderedListWalker::try_new_hydrated(header.receipts_root, self) - .map_err(OracleProviderError::TrieWalker)?; - - // Decode the receipts within the receipts trie. - let receipts = trie_walker - .into_iter() - .map(|(_, rlp)| { - let envelope = ReceiptEnvelope::decode_2718(&mut rlp.as_ref())?; - Ok(envelope.as_receipt().expect("Infallible").clone()) - }) - .collect::<Result<Vec<_>, _>>() - .map_err(OracleProviderError::Rlp)?; - - Ok(receipts) - } - - async fn block_info_and_transactions_by_hash( - &mut self, - hash: B256, - ) -> Result<(BlockInfo, Vec<TxEnvelope>), Self::Error> { - // Fetch the block header to construct the block info. - let header = self.header_by_hash(hash).await?; - let block_info = BlockInfo { - hash, - number: header.number, - parent_hash: header.parent_hash, - timestamp: header.timestamp, - }; - - // Send a hint for the block's transactions, and walk through the transactions trie in the - // header to verify them. - HintType::L1Transactions.with_data(&[hash.as_ref()]).send(self.oracle.as_ref()).await?; - let trie_walker = OrderedListWalker::try_new_hydrated(header.transactions_root, self) - .map_err(OracleProviderError::TrieWalker)?; - - // Decode the transactions within the transactions trie. - let transactions = trie_walker - .into_iter() - .map(|(_, rlp)| { - // note: not short-handed for error type coercion w/ `?`. - let rlp = TxEnvelope::decode_2718(&mut rlp.as_ref())?; - Ok(rlp) - }) - .collect::<Result<Vec<_>, _>>() - .map_err(OracleProviderError::Rlp)?; - - Ok((block_info, transactions)) - } -} - -impl<T: CommsClient> TrieProvider for OracleL1ChainProvider<T> { - type Error = OracleProviderError; - - fn trie_node_by_hash(&self, key: B256) -> Result<TrieNode, Self::Error> { - // On L1, trie node preimages are stored as keccak preimage types in the oracle. We assume - // that a hint for these preimages has already been sent, prior to this call. - crate::block_on(async move { - TrieNode::decode( - &mut self - .oracle - .get(PreimageKey::new(*key, PreimageKeyType::Keccak256)) - .await - .map_err(OracleProviderError::Preimage)? - .as_ref(), - ) - .map_err(OracleProviderError::Rlp) - }) - } -} diff --git a/kona/crates/proof/proof/src/l1/pipeline.rs b/kona/crates/proof/proof/src/l1/pipeline.rs deleted file mode 100644 index a27f7e2262e..00000000000 --- a/kona/crates/proof/proof/src/l1/pipeline.rs +++ /dev/null @@ -1,183 +0,0 @@ -//! Contains an oracle-backed pipeline. - -use crate::FlushableCache; -use alloc::{boxed::Box, sync::Arc}; -use async_trait::async_trait; -use core::fmt::Debug; -use kona_derive::{ - ChainProvider, DataAvailabilityProvider, DerivationPipeline, L2ChainProvider, OriginProvider, - Pipeline, PipelineBuilder, PipelineErrorKind, PipelineResult, PolledAttributesQueueStage, - ResetSignal, Signal, SignalReceiver, StatefulAttributesBuilder, StepResult, -}; -use kona_driver::{DriverPipeline, PipelineCursor}; -use kona_genesis::{L1ChainConfig, RollupConfig, SystemConfig}; -use kona_preimage::CommsClient; -use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; -use spin::RwLock; - -/// An oracle-backed derivation pipeline. -pub type ProviderDerivationPipeline<L1, L2, DA> = DerivationPipeline< - PolledAttributesQueueStage<DA, L1, L2, ProviderAttributesBuilder<L1, L2>>, - L2, ->; - -/// An oracle-backed payload attributes builder for the `AttributesQueue` stage of the derivation -/// pipeline. -pub type ProviderAttributesBuilder<L1, L2> = StatefulAttributesBuilder<L1, L2>; - -/// An oracle-backed derivation pipeline. -#[derive(Debug)] -pub struct OraclePipeline<O, L1, L2, DA> -where - O: CommsClient + FlushableCache + Send + Sync + Debug, - L1: ChainProvider + Send + Sync + Debug + Clone, - L2: L2ChainProvider + Send + Sync + Debug + Clone, - DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, -{ - /// The internal derivation pipeline. - pub pipeline: ProviderDerivationPipeline<L1, L2, DA>, - /// The caching oracle. - pub caching_oracle: Arc<O>, -} - -impl<O, L1, L2, DA> OraclePipeline<O, L1, L2, DA> -where - O: CommsClient + FlushableCache + Send + Sync + Debug, - L1: ChainProvider + Send + Sync + Debug + Clone, - L2: L2ChainProvider + Send + Sync + Debug + Clone, - DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, -{ - /// Constructs a new oracle-backed derivation pipeline. - pub async fn new( - cfg: Arc<RollupConfig>, - l1_cfg: Arc<L1ChainConfig>, - sync_start: Arc<RwLock<PipelineCursor>>, - caching_oracle: Arc<O>, - da_provider: DA, - chain_provider: L1, - mut l2_chain_provider: L2, - ) -> PipelineResult<Self> { - let attributes = StatefulAttributesBuilder::new( - cfg.clone(), - l1_cfg, - l2_chain_provider.clone(), - chain_provider.clone(), - ); - - let cfg_for_reset = cfg.clone(); - - let mut pipeline = PipelineBuilder::new() - .rollup_config(cfg) - .dap_source(da_provider) - .l2_chain_provider(l2_chain_provider.clone()) - .chain_provider(chain_provider) - .builder(attributes) - .origin(sync_start.read().origin()) - .build_polled(); - - // Reset the pipeline to populate the initial system configuration in L1 Traversal. - let l2_safe_head = *sync_start.read().l2_safe_head(); - pipeline - .signal( - ResetSignal { - l2_safe_head, - l1_origin: sync_start.read().origin(), - system_config: l2_chain_provider - .system_config_by_number(l2_safe_head.block_info.number, cfg_for_reset) - .await - .ok(), - } - .signal(), - ) - .await?; - - Ok(Self { pipeline, caching_oracle }) - } -} - -impl<O, L1, L2, DA> DriverPipeline<ProviderDerivationPipeline<L1, L2, DA>> - for OraclePipeline<O, L1, L2, DA> -where - O: CommsClient + FlushableCache + Send + Sync + Debug, - L1: ChainProvider + Send + Sync + Debug + Clone, - L2: L2ChainProvider + Send + Sync + Debug + Clone, - DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, -{ - /// Flushes the cache on re-org. - fn flush(&mut self) { - self.caching_oracle.flush(); - } -} - -#[async_trait] -impl<O, L1, L2, DA> SignalReceiver for OraclePipeline<O, L1, L2, DA> -where - O: CommsClient + FlushableCache + Send + Sync + Debug, - L1: ChainProvider + Send + Sync + Debug + Clone, - L2: L2ChainProvider + Send + Sync + Debug + Clone, - DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, -{ - /// Receives a signal from the driver. - async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { - self.pipeline.signal(signal).await - } -} - -impl<O, L1, L2, DA> OriginProvider for OraclePipeline<O, L1, L2, DA> -where - O: CommsClient + FlushableCache + Send + Sync + Debug, - L1: ChainProvider + Send + Sync + Debug + Clone, - L2: L2ChainProvider + Send + Sync + Debug + Clone, - DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, -{ - /// Returns the optional L1 [BlockInfo] origin. - fn origin(&self) -> Option<BlockInfo> { - self.pipeline.origin() - } -} - -impl<O, L1, L2, DA> Iterator for OraclePipeline<O, L1, L2, DA> -where - O: CommsClient + FlushableCache + Send + Sync + Debug, - L1: ChainProvider + Send + Sync + Debug + Clone, - L2: L2ChainProvider + Send + Sync + Debug + Clone, - DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, -{ - type Item = OpAttributesWithParent; - - fn next(&mut self) -> Option<Self::Item> { - self.pipeline.next() - } -} - -#[async_trait] -impl<O, L1, L2, DA> Pipeline for OraclePipeline<O, L1, L2, DA> -where - O: CommsClient + FlushableCache + Send + Sync + Debug, - L1: ChainProvider + Send + Sync + Debug + Clone, - L2: L2ChainProvider + Send + Sync + Debug + Clone, - DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, -{ - /// Peeks at the next [OpAttributesWithParent] from the pipeline. - fn peek(&self) -> Option<&OpAttributesWithParent> { - self.pipeline.peek() - } - - /// Attempts to progress the pipeline. - async fn step(&mut self, cursor: L2BlockInfo) -> StepResult { - self.pipeline.step(cursor).await - } - - /// Returns the rollup config. - fn rollup_config(&self) -> &RollupConfig { - self.pipeline.rollup_config() - } - - /// Returns the [SystemConfig] by L2 number. - async fn system_config_by_number( - &mut self, - number: u64, - ) -> Result<SystemConfig, PipelineErrorKind> { - self.pipeline.system_config_by_number(number).await - } -} diff --git a/kona/crates/proof/proof/src/l2/chain_provider.rs b/kona/crates/proof/proof/src/l2/chain_provider.rs deleted file mode 100644 index 75336173777..00000000000 --- a/kona/crates/proof/proof/src/l2/chain_provider.rs +++ /dev/null @@ -1,281 +0,0 @@ -//! Contains the concrete implementation of the [L2ChainProvider] trait for the client program. - -use crate::{HintType, eip2935::eip_2935_history_lookup, errors::OracleProviderError}; -use alloc::{boxed::Box, sync::Arc, vec::Vec}; -use alloy_consensus::{BlockBody, Header}; -use alloy_eips::eip2718::Decodable2718; -use alloy_primitives::{Address, B256, Bytes}; -use alloy_rlp::Decodable; -use async_trait::async_trait; -use kona_derive::L2ChainProvider; -use kona_driver::PipelineCursor; -use kona_executor::TrieDBProvider; -use kona_genesis::{RollupConfig, SystemConfig}; -use kona_mpt::{OrderedListWalker, TrieHinter, TrieNode, TrieProvider}; -use kona_preimage::{CommsClient, PreimageKey, PreimageKeyType}; -use kona_protocol::{BatchValidationProvider, L2BlockInfo, to_system_config}; -use op_alloy_consensus::{OpBlock, OpTxEnvelope}; -use spin::RwLock; - -/// The oracle-backed L2 chain provider for the client program. -#[derive(Debug, Clone)] -pub struct OracleL2ChainProvider<T: CommsClient> { - /// The L2 safe head block hash. - l2_head: B256, - /// The rollup configuration. - rollup_config: Arc<RollupConfig>, - /// The preimage oracle client. - oracle: Arc<T>, - /// The derivation pipeline cursor - cursor: Option<Arc<RwLock<PipelineCursor>>>, - /// The L2 chain ID to use for the provider's hints. - chain_id: Option<u64>, -} - -impl<T: CommsClient> OracleL2ChainProvider<T> { - /// Creates a new [OracleL2ChainProvider] with the given boot information and oracle client. - pub const fn new(l2_head: B256, rollup_config: Arc<RollupConfig>, oracle: Arc<T>) -> Self { - Self { l2_head, rollup_config, oracle, cursor: None, chain_id: None } - } - - /// Sets the L2 chain ID to use for the provider's hints. - pub const fn set_chain_id(&mut self, chain_id: Option<u64>) { - self.chain_id = chain_id; - } - - /// Updates the derivation pipeline cursor - pub fn set_cursor(&mut self, cursor: Arc<RwLock<PipelineCursor>>) { - self.cursor = Some(cursor); - } - - /// Fetches the latest known safe head block hash according to the derivation pipeline cursor - /// or uses the initial l2_head value if no cursor is set. - pub async fn l2_safe_head(&self) -> Result<B256, OracleProviderError> { - self.cursor - .as_ref() - .map_or(Ok(self.l2_head), |cursor| Ok(cursor.read().l2_safe_head().block_info.hash)) - } -} - -impl<T: CommsClient> OracleL2ChainProvider<T> { - /// Returns a [Header] corresponding to the given L2 block number, by walking back from the - /// L2 safe head. - async fn header_by_number(&mut self, block_number: u64) -> Result<Header, OracleProviderError> { - // Fetch the starting block header. - let mut header = self.header_by_hash(self.l2_safe_head().await?)?; - - // Check if the block number is in range. If not, we can fail early. - if block_number > header.number { - return Err(OracleProviderError::BlockNumberPastHead(block_number, header.number)); - } - - let mut linear_fallback = false; - while header.number > block_number { - if self.rollup_config.is_isthmus_active(header.timestamp) && !linear_fallback { - // If Isthmus is active, the EIP-2935 contract is used to perform leaping lookbacks - // through consulting the ring buffer within the contract. If this - // lookup fails for any reason, we fall back to linear walk back. - let block_hash = - match eip_2935_history_lookup(&header, block_number, self, self).await { - Ok(hash) => hash, - Err(_) => { - // If the EIP-2935 lookup fails for any reason, attempt fallback to - // linear walk back. - linear_fallback = true; - continue; - } - }; - - header = self.header_by_hash(block_hash)?; - } else { - // Walk back the block headers one-by-one until the desired block number is reached. - header = self.header_by_hash(header.parent_hash)?; - } - } - - Ok(header) - } -} - -#[async_trait] -impl<T: CommsClient + Send + Sync> BatchValidationProvider for OracleL2ChainProvider<T> { - type Error = OracleProviderError; - - async fn l2_block_info_by_number(&mut self, number: u64) -> Result<L2BlockInfo, Self::Error> { - // Get the block at the given number. - let block = self.block_by_number(number).await?; - - // Construct the system config from the payload. - L2BlockInfo::from_block_and_genesis(&block, &self.rollup_config.genesis) - .map_err(OracleProviderError::BlockInfo) - } - - async fn block_by_number(&mut self, number: u64) -> Result<OpBlock, Self::Error> { - // Fetch the header for the given block number. - let header @ Header { transactions_root, timestamp, .. } = - self.header_by_number(number).await?; - let header_hash = header.hash_slow(); - - // Fetch the transactions in the block. - HintType::L2Transactions - .with_data(&[header_hash.as_ref()]) - .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) - .send(self.oracle.as_ref()) - .await?; - let trie_walker = OrderedListWalker::try_new_hydrated(transactions_root, self) - .map_err(OracleProviderError::TrieWalker)?; - - // Decode the transactions within the transactions trie. - let transactions = trie_walker - .into_iter() - .map(|(_, rlp)| { - let res = OpTxEnvelope::decode_2718(&mut rlp.as_ref())?; - Ok(res) - }) - .collect::<Result<Vec<_>, _>>() - .map_err(OracleProviderError::Rlp)?; - - let optimism_block = OpBlock { - header, - body: BlockBody { - transactions, - ommers: Vec::new(), - withdrawals: self - .rollup_config - .is_canyon_active(timestamp) - .then(|| alloy_eips::eip4895::Withdrawals::new(Vec::new())), - }, - }; - Ok(optimism_block) - } -} - -#[async_trait] -impl<T: CommsClient + Send + Sync> L2ChainProvider for OracleL2ChainProvider<T> { - type Error = OracleProviderError; - - async fn system_config_by_number( - &mut self, - number: u64, - rollup_config: Arc<RollupConfig>, - ) -> Result<SystemConfig, <Self as L2ChainProvider>::Error> { - // Get the block at the given number. - let block = self.block_by_number(number).await?; - - // Construct the system config from the payload. - to_system_config(&block, rollup_config.as_ref()) - .map_err(OracleProviderError::OpBlockConversion) - } -} - -impl<T: CommsClient> TrieProvider for OracleL2ChainProvider<T> { - type Error = OracleProviderError; - - fn trie_node_by_hash(&self, key: B256) -> Result<TrieNode, OracleProviderError> { - // On L2, trie node preimages are stored as keccak preimage types in the oracle. We assume - // that a hint for these preimages has already been sent, prior to this call. - crate::block_on(async move { - TrieNode::decode( - &mut self - .oracle - .get(PreimageKey::new(*key, PreimageKeyType::Keccak256)) - .await - .map_err(OracleProviderError::Preimage)? - .as_ref(), - ) - .map_err(OracleProviderError::Rlp) - }) - } -} - -impl<T: CommsClient> TrieDBProvider for OracleL2ChainProvider<T> { - fn bytecode_by_hash(&self, hash: B256) -> Result<Bytes, OracleProviderError> { - // Fetch the bytecode preimage from the caching oracle. - crate::block_on(async move { - HintType::L2Code - .with_data(&[hash.as_slice()]) - .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) - .send(self.oracle.as_ref()) - .await?; - self.oracle - .get(PreimageKey::new_keccak256(*hash)) - .await - .map(Into::into) - .map_err(OracleProviderError::Preimage) - }) - } - - fn header_by_hash(&self, hash: B256) -> Result<Header, OracleProviderError> { - // Fetch the header from the caching oracle. - crate::block_on(async move { - HintType::L2BlockHeader - .with_data(&[hash.as_slice()]) - .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) - .send(self.oracle.as_ref()) - .await?; - let header_bytes = self.oracle.get(PreimageKey::new_keccak256(*hash)).await?; - - Header::decode(&mut header_bytes.as_slice()).map_err(OracleProviderError::Rlp) - }) - } -} - -impl<T: CommsClient> TrieHinter for OracleL2ChainProvider<T> { - type Error = OracleProviderError; - - fn hint_trie_node(&self, hash: B256) -> Result<(), Self::Error> { - crate::block_on(async move { - HintType::L2StateNode - .with_data(&[hash.as_slice()]) - .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) - .send(self.oracle.as_ref()) - .await - }) - } - - fn hint_account_proof(&self, address: Address, block_number: u64) -> Result<(), Self::Error> { - crate::block_on(async move { - HintType::L2AccountProof - .with_data(&[block_number.to_be_bytes().as_ref(), address.as_slice()]) - .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) - .send(self.oracle.as_ref()) - .await - }) - } - - fn hint_storage_proof( - &self, - address: alloy_primitives::Address, - slot: alloy_primitives::U256, - block_number: u64, - ) -> Result<(), Self::Error> { - crate::block_on(async move { - HintType::L2AccountStorageProof - .with_data(&[ - block_number.to_be_bytes().as_ref(), - address.as_slice(), - slot.to_be_bytes::<32>().as_ref(), - ]) - .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) - .send(self.oracle.as_ref()) - .await - }) - } - - fn hint_execution_witness( - &self, - parent_hash: B256, - op_payload_attributes: &op_alloy_rpc_types_engine::OpPayloadAttributes, - ) -> Result<(), Self::Error> { - crate::block_on(async move { - let encoded_attributes = - serde_json::to_vec(op_payload_attributes).map_err(OracleProviderError::Serde)?; - - HintType::L2PayloadWitness - .with_data(&[parent_hash.as_slice(), &encoded_attributes]) - .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) - .send(self.oracle.as_ref()) - .await - }) - } -} diff --git a/kona/crates/proof/proof/src/lib.rs b/kona/crates/proof/proof/src/lib.rs deleted file mode 100644 index bfb7eda7eca..00000000000 --- a/kona/crates/proof/proof/src/lib.rs +++ /dev/null @@ -1,38 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![no_std] - -extern crate alloc; - -#[macro_use] -extern crate tracing; - -pub mod l1; - -pub mod l2; - -pub mod sync; - -pub mod errors; - -pub mod executor; - -mod hint; -pub use hint::{Hint, HintType}; - -pub mod boot; -pub use boot::BootInfo; - -mod caching_oracle; -pub use caching_oracle::{CachingOracle, FlushableCache}; - -mod blocking_runtime; -pub use blocking_runtime::block_on; - -mod eip2935; -pub use eip2935::eip_2935_history_lookup; diff --git a/kona/crates/proof/std-fpvm-proc/Cargo.toml b/kona/crates/proof/std-fpvm-proc/Cargo.toml deleted file mode 100644 index 533b386087d..00000000000 --- a/kona/crates/proof/std-fpvm-proc/Cargo.toml +++ /dev/null @@ -1,27 +0,0 @@ -[package] -name = "kona-std-fpvm-proc" -description = "Proc macro entry point for `kona-std-fpvm` targeted programs." -version = "0.2.0" -edition.workspace = true -authors.workspace = true -license.workspace = true -repository.workspace = true -homepage.workspace = true - -[lib] -proc-macro = true - -[dependencies] -# General -cfg-if.workspace = true - -# Workspace -kona-std-fpvm.workspace = true - -# Proc Macros -quote = "1.0" -proc-macro2 = "1.0" -syn = { version = "2.0", features = ["full"] } - -[package.metadata.cargo-udeps.ignore] -normal = ["kona-std-fpvm"] diff --git a/kona/crates/proof/std-fpvm/README.md b/kona/crates/proof/std-fpvm/README.md deleted file mode 100644 index 6eb566d9bc9..00000000000 --- a/kona/crates/proof/std-fpvm/README.md +++ /dev/null @@ -1,10 +0,0 @@ -# `kona-std-fpvm` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-std-fpvm"><img src="https://img.shields.io/crates/v/kona-std-fpvm.svg?label=kona-std-fpvm&labelColor=2a2f35" alt="Kona Proof SDK"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -Platform specific [Fault Proof VM][g-fault-proof-vm] kernel APIs. - -[g-fault-proof-vm]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-vm diff --git a/kona/crates/proof/std-fpvm/src/channel.rs b/kona/crates/proof/std-fpvm/src/channel.rs deleted file mode 100644 index 7d0abc91a11..00000000000 --- a/kona/crates/proof/std-fpvm/src/channel.rs +++ /dev/null @@ -1,158 +0,0 @@ -//! This module contains a rudimentary channel between two file descriptors, using [crate::io] -//! for reading and writing from the file descriptors. - -use crate::{FileDescriptor, io}; -use alloc::boxed::Box; -use async_trait::async_trait; -use core::{ - cell::RefCell, - cmp::Ordering, - future::Future, - pin::Pin, - task::{Context, Poll}, -}; -use kona_preimage::{ - Channel, - errors::{ChannelError, ChannelResult}, -}; - -/// [FileChannel] is a handle for one end of a bidirectional channel. -#[derive(Debug, Clone, Copy)] -pub struct FileChannel { - /// File descriptor to read from - read_handle: FileDescriptor, - /// File descriptor to write to - write_handle: FileDescriptor, -} - -impl FileChannel { - /// Create a new [FileChannel] from two file descriptors. - pub const fn new(read_handle: FileDescriptor, write_handle: FileDescriptor) -> Self { - Self { read_handle, write_handle } - } - - /// Returns a copy of the [FileDescriptor] used for the read end of the channel. - pub const fn read_handle(&self) -> FileDescriptor { - self.read_handle - } - - /// Returns a copy of the [FileDescriptor] used for the write end of the channel. - pub const fn write_handle(&self) -> FileDescriptor { - self.write_handle - } -} - -#[async_trait] -impl Channel for FileChannel { - async fn read(&self, buf: &mut [u8]) -> ChannelResult<usize> { - io::read(self.read_handle, buf).map_err(|_| ChannelError::Closed) - } - - async fn read_exact(&self, buf: &mut [u8]) -> ChannelResult<usize> { - ReadFuture::new(*self, buf).await.map_err(|_| ChannelError::Closed) - } - - async fn write(&self, buf: &[u8]) -> ChannelResult<usize> { - WriteFuture::new(*self, buf).await.map_err(|_| ChannelError::Closed) - } -} - -/// A future that reads from a channel, returning [Poll::Ready] when the buffer is full. -struct ReadFuture<'a> { - /// The channel to read from - channel: FileChannel, - /// The buffer to read into - buf: RefCell<&'a mut [u8]>, - /// The number of bytes read so far - read: usize, -} - -impl<'a> ReadFuture<'a> { - /// Create a new [ReadFuture] from a channel and a buffer. - #[allow(clippy::missing_const_for_fn)] - fn new(channel: FileChannel, buf: &'a mut [u8]) -> Self { - Self { channel, buf: RefCell::new(buf), read: 0 } - } -} - -impl Future for ReadFuture<'_> { - type Output = ChannelResult<usize>; - - fn poll(mut self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Self::Output> { - let mut buf = self.buf.borrow_mut(); - let buf_len = buf.len(); - let chunk_read = io::read(self.channel.read_handle, &mut buf[self.read..]) - .map_err(|_| ChannelError::Closed)?; - - // Drop the borrow on self. - drop(buf); - - self.read += chunk_read; - - match self.read.cmp(&buf_len) { - Ordering::Greater | Ordering::Equal => Poll::Ready(Ok(self.read)), - Ordering::Less => { - // Register the current task to be woken up when it can make progress - ctx.waker().wake_by_ref(); - Poll::Pending - } - } - } -} - -/// A future that writes to a channel, returning [Poll::Ready] when the full buffer has been -/// written. -struct WriteFuture<'a> { - /// The channel to write to - channel: FileChannel, - /// The buffer to write - buf: &'a [u8], - /// The number of bytes written so far - written: usize, -} - -impl<'a> WriteFuture<'a> { - /// Create a new [WriteFuture] from a channel and a buffer. - const fn new(channel: FileChannel, buf: &'a [u8]) -> Self { - Self { channel, buf, written: 0 } - } -} - -impl Future for WriteFuture<'_> { - type Output = ChannelResult<usize>; - - fn poll(mut self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll<Self::Output> { - match io::write(self.channel.write_handle(), &self.buf[self.written..]) { - Ok(n) => { - self.written += n; - - match self.written.cmp(&self.buf.len()) { - Ordering::Equal | Ordering::Greater => { - // Finished writing - Poll::Ready(Ok(self.written)) - } - Ordering::Less => { - // Register the current task to be woken up when it can make progress - ctx.waker().wake_by_ref(); - Poll::Pending - } - } - } - Err(_) => Poll::Ready(Err(ChannelError::Closed)), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_get_read_handle() { - let read_handle = FileDescriptor::StdIn; - let write_handle = FileDescriptor::StdOut; - let chan = FileChannel::new(read_handle, write_handle); - let ref_read_handle = chan.read_handle(); - assert_eq!(read_handle, ref_read_handle); - } -} diff --git a/kona/crates/proof/std-fpvm/src/errors.rs b/kona/crates/proof/std-fpvm/src/errors.rs deleted file mode 100644 index a8fbec07c74..00000000000 --- a/kona/crates/proof/std-fpvm/src/errors.rs +++ /dev/null @@ -1,11 +0,0 @@ -//! Errors for the `kona-std-fpvm` crate. - -use thiserror::Error; - -/// An error that can occur when reading from or writing to a file descriptor. -#[derive(Error, Debug, PartialEq, Eq)] -#[error("IO error (errno: {_0})")] -pub struct IOError(pub i32); - -/// A [Result] type for the [IOError]. -pub type IOResult<T> = Result<T, IOError>; diff --git a/kona/crates/proof/std-fpvm/src/io.rs b/kona/crates/proof/std-fpvm/src/io.rs deleted file mode 100644 index 011b9ad7b86..00000000000 --- a/kona/crates/proof/std-fpvm/src/io.rs +++ /dev/null @@ -1,93 +0,0 @@ -//! This module contains the `ClientIO` struct, which is a system call interface for the kernel. - -use crate::{BasicKernelInterface, FileDescriptor, errors::IOResult}; -use cfg_if::cfg_if; - -cfg_if! { - if #[cfg(target_arch = "mips64")] { - #[doc = "Concrete implementation of the [BasicKernelInterface] trait for the `MIPS64r2` target architecture."] - pub(crate) type ClientIO = crate::mips64::io::Mips64IO; - } else if #[cfg(target_arch = "riscv64")] { - #[doc = "Concrete implementation of the [BasicKernelInterface] trait for the `riscv64` target architecture."] - pub(crate) type ClientIO = crate::riscv64::io::RiscV64IO; - } else { - use std::{fs::File, os::fd::FromRawFd, io::{Read, Write}}; - use crate::errors::IOError; - - #[doc = "Native implementation of the [BasicKernelInterface] trait."] - pub(crate) struct NativeClientIO; - - impl BasicKernelInterface for NativeClientIO { - fn write(fd: FileDescriptor, buf: &[u8]) -> IOResult<usize> { - unsafe { - let mut file = File::from_raw_fd(fd as i32); - file.write_all(buf).map_err(|_| IOError(-9))?; - std::mem::forget(file); - Ok(buf.len()) - } - } - - fn read(fd: FileDescriptor, buf: &mut [u8]) -> IOResult<usize> { - unsafe { - let mut file = File::from_raw_fd(fd as i32); - file.read_exact(buf).map_err(|_| IOError(-9))?; - std::mem::forget(file); - Ok(buf.len()) - } - } - - fn mmap(_size: usize) -> IOResult<usize> { - unimplemented!("mmap is unimplemented for the native target; The default global allocator is favored."); - } - - fn exit(code: usize) -> ! { - std::process::exit(code as i32) - } - } - - #[doc = "Native implementation of the [BasicKernelInterface] trait."] - pub(crate) type ClientIO = NativeClientIO; - } -} - -/// Print the passed string to the standard output [FileDescriptor]. -/// -/// # Panics -/// Panics if the write operation fails. -#[inline] -pub fn print(s: &str) { - ClientIO::write(FileDescriptor::StdOut, s.as_bytes()).expect("Error writing to stdout."); -} - -/// Print the passed string to the standard error [FileDescriptor]. -/// -/// # Panics -/// Panics if the write operation fails. -#[inline] -pub fn print_err(s: &str) { - ClientIO::write(FileDescriptor::StdErr, s.as_bytes()).expect("Error writing to stderr."); -} - -/// Write the passed buffer to the given [FileDescriptor]. -#[inline] -pub fn write(fd: FileDescriptor, buf: &[u8]) -> IOResult<usize> { - ClientIO::write(fd, buf) -} - -/// Write the passed buffer to the given [FileDescriptor]. -#[inline] -pub fn read(fd: FileDescriptor, buf: &mut [u8]) -> IOResult<usize> { - ClientIO::read(fd, buf) -} - -/// Map new memory of block size `size`. Returns the new heap pointer. -#[inline] -pub fn mmap(size: usize) -> IOResult<usize> { - ClientIO::mmap(size) -} - -/// Exit the process with the given exit code. -#[inline] -pub fn exit(code: usize) -> ! { - ClientIO::exit(code) -} diff --git a/kona/crates/proof/std-fpvm/src/lib.rs b/kona/crates/proof/std-fpvm/src/lib.rs deleted file mode 100644 index 6c7c549025e..00000000000 --- a/kona/crates/proof/std-fpvm/src/lib.rs +++ /dev/null @@ -1,37 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(target_arch = "mips64", feature(asm_experimental_arch))] -#![cfg_attr(any(target_arch = "mips64", target_arch = "riscv64"), no_std)] - -extern crate alloc; - -pub mod errors; - -pub mod io; - -#[cfg(feature = "tracing")] -pub mod tracing; - -pub mod malloc; - -mod traits; -pub use traits::BasicKernelInterface; - -mod types; -pub use types::FileDescriptor; - -mod channel; -pub use channel::FileChannel; - -pub(crate) mod linux; - -#[cfg(target_arch = "mips64")] -pub(crate) mod mips64; - -#[cfg(target_arch = "riscv64")] -pub(crate) mod riscv64; diff --git a/kona/crates/proof/std-fpvm/src/mips64/io.rs b/kona/crates/proof/std-fpvm/src/mips64/io.rs deleted file mode 100644 index 9f6495593b1..00000000000 --- a/kona/crates/proof/std-fpvm/src/mips64/io.rs +++ /dev/null @@ -1,68 +0,0 @@ -use crate::{BasicKernelInterface, FileDescriptor, errors::IOResult, mips64::syscall}; - -/// Concrete implementation of the [BasicKernelInterface] trait for the `MIPS64r2` target -/// architecture. Exposes a safe interface for performing IO operations within the kernel. -#[derive(Debug)] -pub(crate) struct Mips64IO; - -/// Relevant system call numbers for the `MIPS64r2` target architecture. -/// -/// See [Cannon System Call Specification](https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#syscalls) -/// -/// **Note**: This is not an exhaustive list of system calls available to the `client` program, -/// only the ones necessary for the [BasicKernelInterface] trait implementation. If an extension -/// trait for the [BasicKernelInterface] trait is created for the `Cannon` kernel, this list should -/// be extended accordingly. -#[repr(usize)] -pub(crate) enum SyscallNumber { - /// Sets the Exited and ExitCode states to true and $a0 respectively. - Exit = 5205, - /// Similar behavior as Linux/MIPS with support for unaligned reads. - Read = 5000, - /// Similar behavior as Linux/MIPS with support for unaligned writes. - Write = 5001, - /// Similar behavior as Linux/MIPS for mapping memory on the host machine. Only accepts 2 - /// arguments for cannon. - Mmap = 5009, -} - -impl BasicKernelInterface for Mips64IO { - fn write(fd: FileDescriptor, buf: &[u8]) -> IOResult<usize> { - unsafe { - crate::linux::from_ret(syscall::syscall3( - SyscallNumber::Write as usize, - fd.into(), - buf.as_ptr() as usize, - buf.len(), - )) - } - } - - fn read(fd: FileDescriptor, buf: &mut [u8]) -> IOResult<usize> { - unsafe { - crate::linux::from_ret(syscall::syscall3( - SyscallNumber::Read as usize, - fd.into(), - buf.as_ptr() as usize, - buf.len(), - )) - } - } - - fn mmap(size: usize) -> IOResult<usize> { - unsafe { - crate::linux::from_ret(syscall::syscall2( - SyscallNumber::Mmap as usize, - 0usize, // anonymous map - size, - )) - } - } - - fn exit(code: usize) -> ! { - unsafe { - let _ = syscall::syscall1(SyscallNumber::Exit as usize, code); - panic!("exit syscall returned unexpectedly with code: {}", code) - } - } -} diff --git a/kona/crates/proof/std-fpvm/src/mips64/mod.rs b/kona/crates/proof/std-fpvm/src/mips64/mod.rs deleted file mode 100644 index f4f2e06d2ca..00000000000 --- a/kona/crates/proof/std-fpvm/src/mips64/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -//! This module contains raw syscall bindings for the `MIPS64r2` target architecture, as well as a -//! high-level implementation of the [crate::BasicKernelInterface] trait for the `Cannon` kernel. - -pub(crate) mod io; -mod syscall; diff --git a/kona/crates/proof/std-fpvm/src/riscv64/io.rs b/kona/crates/proof/std-fpvm/src/riscv64/io.rs deleted file mode 100644 index e680d0d50d3..00000000000 --- a/kona/crates/proof/std-fpvm/src/riscv64/io.rs +++ /dev/null @@ -1,71 +0,0 @@ -use crate::{BasicKernelInterface, FileDescriptor, errors::IOResult, riscv64::syscall}; - -/// Concrete implementation of the [`KernelIO`] trait for the `riscv64` target architecture. -#[derive(Debug)] -pub(crate) struct RiscV64IO; - -/// Relevant system call numbers for the `riscv64` target architecture. -/// -/// See https://jborza.com/post/2021-05-11-riscv-linux-syscalls/ -/// -/// **Note**: This is not an exhaustive list of system calls available to the `client` program, -/// only the ones necessary for the [BasicKernelInterface] trait implementation. If an extension -/// trait for the [BasicKernelInterface] trait is created for the linux kernel, this list -/// should be extended accordingly. -#[repr(usize)] -pub(crate) enum SyscallNumber { - /// Sets the Exited and ExitCode states to true and $a0 respectively. - Exit = 93, - /// Similar behavior as Linux with support for unaligned reads. - Read = 63, - /// Similar behavior as Linux with support for unaligned writes. - Write = 64, - /// Similar behavior as Linux for mapping memory on the host machine. - Mmap = 222, -} - -impl BasicKernelInterface for RiscV64IO { - fn write(fd: FileDescriptor, buf: &[u8]) -> IOResult<usize> { - unsafe { - crate::linux::from_ret(syscall::syscall3( - SyscallNumber::Write as usize, - fd.into(), - buf.as_ptr() as usize, - buf.len(), - )) - } - } - - fn read(fd: FileDescriptor, buf: &mut [u8]) -> IOResult<usize> { - unsafe { - crate::linux::from_ret(syscall::syscall3( - SyscallNumber::Read as usize, - fd.into(), - buf.as_ptr() as usize, - buf.len(), - )) - } - } - - fn mmap(size: usize) -> IOResult<usize> { - // https://github.com/ethereum-optimism/asterisc/blob/master/rvgo/fast/vm.go#L360-L398 - unsafe { - crate::linux::from_ret(syscall::syscall6( - SyscallNumber::Mmap as usize, - 0usize, // address hint - 0 for anonymous maps - size, // block size - 0usize, // prot, ignored. - 0x20, // flags - set MAP_ANONYMOUS - u64::MAX as usize, // fd = -1, anonymous memory maps only. - 0usize, // offset - ignored, anonymous memory maps only. - )) - } - } - - fn exit(code: usize) -> ! { - unsafe { - let _ = syscall::syscall1(SyscallNumber::Exit as usize, code); - panic!() - } - } -} diff --git a/kona/crates/proof/std-fpvm/src/riscv64/mod.rs b/kona/crates/proof/std-fpvm/src/riscv64/mod.rs deleted file mode 100644 index f90970ea7cc..00000000000 --- a/kona/crates/proof/std-fpvm/src/riscv64/mod.rs +++ /dev/null @@ -1,5 +0,0 @@ -//! This module contains raw syscall bindings for the `riscv64imac` target architecture, as well as -//! a high-level implementation of the [crate::BasicKernelInterface] trait for the kernel. - -pub(crate) mod io; -mod syscall; diff --git a/kona/crates/proof/std-fpvm/src/tracing.rs b/kona/crates/proof/std-fpvm/src/tracing.rs deleted file mode 100644 index 4ceb4f3308b..00000000000 --- a/kona/crates/proof/std-fpvm/src/tracing.rs +++ /dev/null @@ -1,97 +0,0 @@ -//! This module contains - -use crate::io; -use alloc::{ - format, - string::{String, ToString}, - vec::Vec, -}; -use tracing::{ - Event, Level, Metadata, Subscriber, - field::{Field, Visit}, - span::{Attributes, Id, Record}, -}; - -/// Custom [Subscriber] implementation that uses [crate::io] to write log entries to -/// [crate::FileDescriptor::StdOut]. -#[derive(Debug, Clone)] -pub struct FpvmTracingSubscriber { - min_level: Level, -} - -impl FpvmTracingSubscriber { - /// Create a new [FpvmTracingSubscriber] with the specified minimum log level. - pub const fn new(min_level: Level) -> Self { - Self { min_level } - } -} - -impl Subscriber for FpvmTracingSubscriber { - fn enabled(&self, _metadata: &Metadata<'_>) -> bool { - true - } - - fn new_span(&self, _span: &Attributes<'_>) -> Id { - Id::from_u64(1) - } - - fn record(&self, _span: &Id, _values: &Record<'_>) {} - - fn record_follows_from(&self, _span: &Id, _follows: &Id) {} - - fn event(&self, event: &Event<'_>) { - let metadata = event.metadata(); - // Comparisons for the [Level] type are inverted. See the [Level] documentation for more - // information. - if *metadata.level() > self.min_level { - return; - } - - let mut visitor = FieldVisitor::new(); - event.record(&mut visitor); - - let formatted_message = if visitor.fields.is_empty() { - visitor.message - } else if visitor.message.is_empty() { - visitor.fields.join(", ") - } else { - format!("{} {}", visitor.message, visitor.fields.join(", ")) - }; - - io::print(&format!("[{}] {}: {}", metadata.level(), metadata.target(), formatted_message)); - } - - fn enter(&self, _span: &Id) {} - - fn exit(&self, _span: &Id) {} -} - -/// Custom [`Visit`] implementation to extract log field values. -struct FieldVisitor { - message: String, - fields: Vec<String>, -} - -impl FieldVisitor { - const fn new() -> Self { - Self { message: String::new(), fields: Vec::new() } - } -} - -impl Visit for FieldVisitor { - fn record_debug(&mut self, field: &Field, value: &dyn core::fmt::Debug) { - if field.name() == "message" { - self.message = format!("{value:?}"); - } else { - self.fields.push(format!("{}={:?}", field.name(), value)); - } - } - - fn record_str(&mut self, field: &Field, value: &str) { - if field.name() == "message" { - self.message = value.to_string(); - } else { - self.fields.push(format!("{}={}", field.name(), value)); - } - } -} diff --git a/kona/crates/protocol/derive/README.md b/kona/crates/protocol/derive/README.md deleted file mode 100644 index bf6b19f50e3..00000000000 --- a/kona/crates/protocol/derive/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# `kona-derive` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-derive"><img src="https://img.shields.io/crates/v/kona-derive.svg?label=kona-derive&labelColor=2a2f35" alt="Kona Derive"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://app.codecov.io/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -A `no_std` compatible implementation of the OP Stack's [derivation pipeline][derive]. - -[derive]: (https://specs.optimism.io/protocol/derivation.html#l2-chain-derivation-specification). - -## Usage - -The intended way of working with `kona-derive` is to use the [`DerivationPipeline`][dp] which implements the [`Pipeline`][p] trait. To create an instance of the [`DerivationPipeline`][dp], it's recommended to use the [`PipelineBuilder`][pb] as follows. - -```rust,ignore -use std::sync::Arc; -use kona_genesis::RollupConfig; -use kona_derive::EthereumDataSource; -use kona_derive::PipelineBuilder; -use kona_derive::StatefulAttributesBuilder; - -let chain_provider = todo!(); -let l2_chain_provider = todo!(); -let blob_provider = todo!(); -let l1_origin = todo!(); - -let cfg = Arc::new(RollupConfig::default()); -let attributes = StatefulAttributesBuilder::new( - cfg.clone(), - l2_chain_provider.clone(), - chain_provider.clone(), -); -let dap = EthereumDataSource::new( - chain_provider.clone(), - blob_provider, - cfg.as_ref() -); - -// Construct a new derivation pipeline. -let pipeline = PipelineBuilder::new() - .rollup_config(cfg) - .dap_source(dap) - .l2_chain_provider(l2_chain_provider) - .chain_provider(chain_provider) - .builder(attributes) - .origin(l1_origin) - .build(); -``` - -[p]: ./src/traits/pipeline.rs -[pb]: ./src/pipeline/builder.rs -[dp]: ./src/pipeline/core.rs - -## Features - -The most up-to-date feature list will be available on the [docs.rs `Feature Flags` tab][ff] of the `kona-derive` crate. - -Some features include the following. -- `serde`: Serialization and Deserialization support for `kona-derive` types. -- `test-utils`: Test utilities for downstream libraries. - -By default, `kona-derive` enables the `serde` feature. - -[ap]: https://docs.rs/crate/alloy-providers/latest -[ff]: https://docs.rs/crate/kona-derive/latest/features diff --git a/kona/crates/protocol/derive/src/attributes/mod.rs b/kona/crates/protocol/derive/src/attributes/mod.rs deleted file mode 100644 index 5691685f99a..00000000000 --- a/kona/crates/protocol/derive/src/attributes/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -//! Module containing the [AttributesBuilder] trait implementations. -//! -//! [AttributesBuilder]: crate::traits::AttributesBuilder - -mod stateful; -pub use stateful::StatefulAttributesBuilder; diff --git a/kona/crates/protocol/derive/src/errors/pipeline.rs b/kona/crates/protocol/derive/src/errors/pipeline.rs deleted file mode 100644 index 8253a695c95..00000000000 --- a/kona/crates/protocol/derive/src/errors/pipeline.rs +++ /dev/null @@ -1,439 +0,0 @@ -//! This module contains derivation errors thrown within the pipeline. - -use crate::BuilderError; -use alloc::string::String; -use alloy_primitives::B256; -use kona_genesis::SystemConfigUpdateError; -use kona_protocol::{DepositError, SpanBatchError}; -use thiserror::Error; - -/// [crate::ensure] is a short-hand for bubbling up errors in the case of a condition not being met. -#[macro_export] -macro_rules! ensure { - ($cond:expr, $err:expr) => { - if !($cond) { - return Err($err); - } - }; -} - -/// A top-level severity filter for [`PipelineError`] that categorizes errors by handling strategy. -/// -/// The [`PipelineErrorKind`] wrapper provides a severity classification system that enables -/// sophisticated error handling in the derivation pipeline. Different error types require -/// different response strategies: -/// -/// - **Temporary**: Retry-able errors that may resolve with more data -/// - **Critical**: Fatal errors that require external intervention -/// - **Reset**: Errors that require pipeline state reset but allow continued operation -/// -/// # Error Handling Strategy -/// ```text -/// Temporary -> Retry operation, may succeed with more data -/// Critical -> Stop derivation, external intervention required -/// Reset -> Reset pipeline state, continue with clean slate -/// ``` -/// -/// # Usage in Pipeline -/// Error kinds are used by pipeline stages to determine appropriate error handling: -/// - Temporary errors trigger retries in the main derivation loop -/// - Critical errors halt derivation and bubble up to the caller -/// - Reset errors trigger pipeline resets with appropriate recovery logic -#[derive(Error, Debug, PartialEq, Eq)] -pub enum PipelineErrorKind { - /// A temporary error that may resolve with additional data or time. - /// - /// Temporary errors indicate transient conditions such as insufficient data, - /// network timeouts, or resource unavailability. These errors suggest that - /// retrying the operation may succeed once the underlying condition resolves. - /// - /// # Examples - /// - Not enough L1 data available yet - /// - Network communication timeouts - /// - Insufficient channel data for frame assembly - /// - /// # Handling - /// The pipeline typically retries temporary errors in a loop, waiting for - /// conditions to improve or for additional data to become available. - #[error("Temporary error: {0}")] - Temporary(#[source] PipelineError), - /// A critical error that requires external intervention to resolve. - /// - /// Critical errors indicate fundamental issues that cannot be resolved through - /// retries or pipeline resets. These errors require external intervention such - /// as updated L1 data, configuration changes, or system fixes. - /// - /// # Examples - /// - Data source completely exhausted - /// - Fundamental configuration errors - /// - Irrecoverable data corruption - /// - /// # Handling - /// Critical errors halt the derivation process and are returned to the caller - /// for external resolution. The pipeline cannot continue without intervention. - #[error("Critical error: {0}")] - Critical(#[source] PipelineError), - /// A reset error that requires pipeline state reset but allows continued operation. - /// - /// Reset errors indicate conditions that invalidate the current pipeline state - /// but can be resolved by resetting to a known good state and continuing - /// derivation. These typically occur due to chain reorganizations or state - /// inconsistencies. - /// - /// # Examples - /// - L1 chain reorganization detected - /// - Block hash mismatches indicating reorg - /// - Hard fork activation requiring state reset - /// - /// # Handling - /// Reset errors trigger pipeline state cleanup and reset to a safe state, - /// after which derivation can continue with fresh state. - #[error("Pipeline reset: {0}")] - Reset(#[from] ResetError), -} - -/// An error encountered during derivation pipeline processing. -/// -/// [`PipelineError`] represents specific error conditions that can occur during the -/// various stages of L2 block derivation from L1 data. Each error variant provides -/// detailed context about the failure mode and suggests appropriate recovery strategies. -/// -/// # Error Categories -/// -/// ## Data Availability Errors -/// - [`Self::Eof`]: No more data available from source -/// - [`Self::NotEnoughData`]: Insufficient data for current operation -/// - [`Self::MissingL1Data`]: Required L1 data not available -/// - [`Self::EndOfSource`]: Data source completely exhausted -/// -/// ## Stage-Specific Errors -/// - [`Self::ChannelProviderEmpty`]: No channels available for processing -/// - [`Self::ChannelReaderEmpty`]: Channel reader has no data -/// - [`Self::BatchQueueEmpty`]: No batches available for processing -/// -/// ## Validation Errors -/// - [`Self::InvalidBatchType`]: Unsupported or malformed batch type -/// - [`Self::InvalidBatchValidity`]: Batch failed validation checks -/// - [`Self::BadEncoding`]: Data decoding/encoding failures -/// -/// ## System Errors -/// - [`Self::SystemConfigUpdate`]: System configuration update failures -/// - [`Self::AttributesBuilder`]: Block attribute construction failures -/// - [`Self::Provider`]: External provider communication failures -#[derive(Error, Debug, PartialEq, Eq)] -pub enum PipelineError { - /// End of file: no more data available from the channel bank. - /// - /// This error indicates that the channel bank has been completely drained - /// and no additional frame data is available for processing. It typically - /// occurs at the end of a derivation sequence when all available L1 data - /// has been consumed. - /// - /// # Recovery - /// Usually indicates completion of derivation for available data. May - /// require waiting for new L1 blocks to provide additional frame data. - #[error("EOF")] - Eof, - /// Insufficient data available to complete the current processing stage. - /// - /// This error indicates that the current operation requires more data than - /// is currently available, but additional data may become available in the - /// future. It suggests that retrying the operation later may succeed. - /// - /// # Common Scenarios - /// - Partial frame received, waiting for completion - /// - Channel assembly requires more frames - /// - Batch construction needs additional channel data - /// - /// # Recovery - /// Retry the operation after more L1 data becomes available or after - /// waiting for network propagation delays. - #[error("Not enough data")] - NotEnoughData, - /// No channels are available in the [`ChannelProvider`]. - /// - /// This error occurs when the channel provider stage has no assembled - /// channels ready for reading. It typically indicates that frame assembly - /// is still in progress or that no valid channels have been constructed - /// from available L1 data. - /// - /// [`ChannelProvider`]: crate::stages::ChannelProvider - #[error("The channel provider is empty")] - ChannelProviderEmpty, - /// The channel has already been fully processed by the [`ChannelAssembler`] stage. - /// - /// This error indicates an attempt to reprocess a channel that has already - /// been assembled and consumed. It suggests a logic error in channel tracking - /// or an attempt to double-process the same channel data. - /// - /// [`ChannelAssembler`]: crate::stages::ChannelAssembler - #[error("Channel already built")] - ChannelAlreadyBuilt, - /// Failed to locate the requested channel in the [`ChannelProvider`]. - /// - /// This error occurs when attempting to access a specific channel that - /// is not available in the channel provider's cache or storage. It may - /// indicate a channel ID mismatch or premature channel eviction. - /// - /// [`ChannelProvider`]: crate::stages::ChannelProvider - #[error("Channel not found in channel provider")] - ChannelNotFound, - /// No channel data returned by the [`ChannelReader`] stage. - /// - /// This error indicates that the channel reader stage has no channels - /// available for reading. It typically occurs when all channels have - /// been consumed or when no valid channels have been assembled yet. - /// - /// [`ChannelReader`]: crate::stages::ChannelReader - #[error("The channel reader has no channel available")] - ChannelReaderEmpty, - /// The [`BatchQueue`] contains no batches ready for processing. - /// - /// This error occurs when the batch queue stage has no assembled batches - /// available for attribute generation. It indicates that batch assembly - /// is still in progress or that no valid batches have been constructed. - /// - /// [`BatchQueue`]: crate::stages::BatchQueue - #[error("The batch queue has no batches available")] - BatchQueueEmpty, - /// Required L1 origin information is missing from the previous pipeline stage. - /// - /// This error indicates a pipeline stage dependency violation where a stage - /// expects L1 origin information that wasn't provided by the preceding stage. - /// It suggests a configuration or sequencing issue in the pipeline setup. - #[error("Missing L1 origin from previous stage")] - MissingOrigin, - /// Required L1 data is missing from the [`L1Retrieval`] stage. - /// - /// This error occurs when the L1 retrieval stage cannot provide the - /// requested L1 block data, transactions, or receipts. It may indicate - /// network issues, data availability problems, or L1 node synchronization lag. - /// - /// [`L1Retrieval`]: crate::stages::L1Retrieval - #[error("L1 Retrieval missing data")] - MissingL1Data, - /// Invalid or unsupported batch type encountered during processing. - /// - /// This error occurs when a pipeline stage receives a batch type that - /// it cannot process or that violates the expected batch format. It - /// indicates either malformed L1 data or unsupported batch versions. - #[error("Invalid batch type passed to stage")] - InvalidBatchType, - /// Batch failed validation checks during processing. - /// - /// This error indicates that a batch contains invalid data that fails - /// validation rules such as timestamp constraints, parent hash checks, - /// or format requirements. It suggests potentially malicious or corrupted L1 data. - #[error("Invalid batch validity")] - InvalidBatchValidity, - /// [`SystemConfig`] update operation failed. - /// - /// This error occurs when attempting to update the system configuration - /// fails due to invalid parameters, version mismatches, or other - /// configuration-related issues. - /// - /// [`SystemConfig`]: kona_genesis::SystemConfig - #[error("Error updating system config: {0}")] - SystemConfigUpdate(SystemConfigUpdateError), - /// Block attributes construction failed with detailed error information. - /// - /// This error wraps [`BuilderError`] variants that occur during the - /// construction of block attributes from batch data. It indicates issues - /// with attribute validation, formatting, or consistency checks. - #[error("Attributes builder error: {0}")] - AttributesBuilder(#[from] BuilderError), - /// Data encoding or decoding operation failed. - /// - /// This error wraps [`PipelineEncodingError`] variants that occur during - /// serialization or deserialization of pipeline data structures. It - /// indicates malformed input data or encoding format violations. - #[error("Decode error: {0}")] - BadEncoding(#[from] PipelineEncodingError), - /// The data source has been completely exhausted and cannot provide more data. - /// - /// This error indicates that the underlying L1 data source has reached - /// its end and no additional data will become available. It typically - /// occurs when derivation has caught up to the L1 chain head. - #[error("Data source exhausted")] - EndOfSource, - /// External provider communication or operation failed. - /// - /// This error wraps failures from external data providers such as L1 - /// nodes, blob providers, or other data sources. It includes network - /// failures, API errors, and provider-specific issues. - #[error("Provider error: {0}")] - Provider(String), - /// The pipeline received an unsupported signal type. - /// - /// This error occurs when a pipeline stage receives a signal that it - /// cannot process or that is not supported in the current configuration. - /// It indicates a protocol version mismatch or configuration issue. - #[error("Unsupported signal")] - UnsupportedSignal, -} - -impl PipelineError { - /// Wraps this [`PipelineError`] as a [PipelineErrorKind::Critical]. - /// - /// Critical errors indicate fundamental issues that cannot be resolved through - /// retries or pipeline resets. They require external intervention to resolve. - /// - /// # Usage - /// Use this method when an error condition is unrecoverable and requires - /// halting the derivation process for external intervention. - /// - /// # Example - /// ```rust,ignore - /// if data_source_corrupted { - /// return Err(PipelineError::Provider("corrupted data".to_string()).crit()); - /// } - /// ``` - pub const fn crit(self) -> PipelineErrorKind { - PipelineErrorKind::Critical(self) - } - - /// Wraps this [`PipelineError`] as a [PipelineErrorKind::Temporary]. - /// - /// Temporary errors indicate transient conditions that may resolve with - /// additional data, time, or retries. The pipeline can attempt to recover - /// by retrying the operation. - /// - /// # Usage - /// Use this method when an error condition might resolve if the operation - /// is retried, particularly for data availability or network issues. - /// - /// # Example - /// ```rust,ignore - /// if insufficient_data { - /// return Err(PipelineError::NotEnoughData.temp()); - /// } - /// ``` - pub const fn temp(self) -> PipelineErrorKind { - PipelineErrorKind::Temporary(self) - } -} - -/// A reset error -#[derive(Error, Clone, Debug, Eq, PartialEq)] -pub enum ResetError { - /// The batch has a bad parent hash. - /// The first argument is the expected parent hash, and the second argument is the actual - /// parent hash. - #[error("Bad parent hash: expected {0}, got {1}")] - BadParentHash(B256, B256), - /// The batch has a bad timestamp. - /// The first argument is the expected timestamp, and the second argument is the actual - /// timestamp. - #[error("Bad timestamp: expected {0}, got {1}")] - BadTimestamp(u64, u64), - /// L1 origin mismatch. - #[error("L1 origin mismatch. Expected {0:?}, got {1:?}")] - L1OriginMismatch(u64, u64), - /// The stage detected a block reorg. - /// The first argument is the expected block hash. - /// The second argument is the parent_hash of the next l1 origin block. - #[error("L1 reorg detected: expected {0}, got {1}")] - ReorgDetected(B256, B256), - /// Attributes builder error variant, with [`BuilderError`]. - #[error("Attributes builder error: {0}")] - AttributesBuilder(#[from] BuilderError), - /// A Holocene activation temporary error. - #[error("Holocene activation reset")] - HoloceneActivation, - /// The next l1 block provided to the managed traversal stage is not the expected one. - #[error("Next L1 block hash mismatch: expected {0}, got {1}")] - NextL1BlockHashMismatch(B256, B256), -} - -impl ResetError { - /// Wrap [`ResetError`] as a [PipelineErrorKind::Reset]. - pub const fn reset(self) -> PipelineErrorKind { - PipelineErrorKind::Reset(self) - } -} - -/// A decoding error. -#[derive(Error, Debug, PartialEq, Eq)] -pub enum PipelineEncodingError { - /// The buffer is empty. - #[error("Empty buffer")] - EmptyBuffer, - /// Deposit decoding error. - #[error("Error decoding deposit: {0}")] - DepositError(#[from] DepositError), - /// Alloy RLP Encoding Error. - #[error("RLP error: {0}")] - AlloyRlpError(alloy_rlp::Error), - /// Span Batch Error. - #[error("{0}")] - SpanBatchError(#[from] SpanBatchError), -} - -#[cfg(test)] -mod tests { - use super::*; - use core::error::Error; - - #[test] - fn test_pipeline_error_kind_source() { - let err = PipelineErrorKind::Temporary(PipelineError::Eof); - assert!(err.source().is_some()); - - let err = PipelineErrorKind::Critical(PipelineError::Eof); - assert!(err.source().is_some()); - - let err = PipelineErrorKind::Reset(ResetError::BadParentHash( - Default::default(), - Default::default(), - )); - assert!(err.source().is_some()); - } - - #[test] - fn test_pipeline_error_source() { - let err = PipelineError::AttributesBuilder(BuilderError::BlockMismatch( - Default::default(), - Default::default(), - )); - assert!(err.source().is_some()); - - let encoding_err = PipelineEncodingError::EmptyBuffer; - let err: PipelineError = encoding_err.into(); - assert!(err.source().is_some()); - - let err = PipelineError::Eof; - assert!(err.source().is_none()); - } - - #[test] - fn test_pipeline_encoding_error_source() { - let err = PipelineEncodingError::DepositError(DepositError::UnexpectedTopicsLen(0)); - assert!(err.source().is_some()); - - let err = SpanBatchError::TooBigSpanBatchSize; - let err: PipelineEncodingError = err.into(); - assert!(err.source().is_some()); - - let err = PipelineEncodingError::EmptyBuffer; - assert!(err.source().is_none()); - } - - #[test] - fn test_reset_error_kinds() { - let reset_errors = [ - ResetError::BadParentHash(Default::default(), Default::default()), - ResetError::BadTimestamp(0, 0), - ResetError::L1OriginMismatch(0, 0), - ResetError::ReorgDetected(Default::default(), Default::default()), - ResetError::AttributesBuilder(BuilderError::BlockMismatch( - Default::default(), - Default::default(), - )), - ResetError::HoloceneActivation, - ]; - for error in reset_errors.into_iter() { - let expected = PipelineErrorKind::Reset(error.clone()); - assert_eq!(error.reset(), expected); - } - } -} diff --git a/kona/crates/protocol/derive/src/lib.rs b/kona/crates/protocol/derive/src/lib.rs deleted file mode 100644 index 004a6569405..00000000000 --- a/kona/crates/protocol/derive/src/lib.rs +++ /dev/null @@ -1,56 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(feature = "metrics"), no_std)] - -extern crate alloc; - -#[macro_use] -extern crate tracing; - -mod attributes; -pub use attributes::StatefulAttributesBuilder; - -mod errors; -pub use errors::{ - BatchDecompressionError, BlobDecodingError, BlobProviderError, BuilderError, - PipelineEncodingError, PipelineError, PipelineErrorKind, ResetError, -}; - -mod pipeline; -pub use pipeline::{ - AttributesQueueStage, BatchProviderStage, BatchStreamStage, ChannelProviderStage, - ChannelReaderStage, DerivationPipeline, FrameQueueStage, IndexedAttributesQueueStage, - L1RetrievalStage, PipelineBuilder, PolledAttributesQueueStage, -}; - -mod sources; -pub use sources::{BlobData, BlobSource, CalldataSource, EthereumDataSource}; - -mod stages; -pub use stages::{ - AttributesQueue, BatchProvider, BatchQueue, BatchStream, BatchStreamProvider, BatchValidator, - ChannelAssembler, ChannelBank, ChannelProvider, ChannelReader, ChannelReaderProvider, - FrameQueue, FrameQueueProvider, IndexedTraversal, L1Retrieval, L1RetrievalProvider, - NextBatchProvider, NextFrameProvider, PollingTraversal, TraversalStage, -}; - -mod traits; -pub use traits::{ - AttributesBuilder, AttributesProvider, BatchValidationProviderDerive, BlobProvider, - ChainProvider, DataAvailabilityProvider, L2ChainProvider, NextAttributes, OriginAdvancer, - OriginProvider, Pipeline, ResetProvider, SignalReceiver, -}; - -mod types; -pub use types::{ActivationSignal, PipelineResult, ResetSignal, Signal, StepResult}; - -mod metrics; -pub use metrics::Metrics; - -#[cfg(any(test, feature = "test-utils"))] -pub mod test_utils; diff --git a/kona/crates/protocol/derive/src/pipeline/core.rs b/kona/crates/protocol/derive/src/pipeline/core.rs deleted file mode 100644 index 6071a07d312..00000000000 --- a/kona/crates/protocol/derive/src/pipeline/core.rs +++ /dev/null @@ -1,368 +0,0 @@ -//! Contains the core derivation pipeline. - -use crate::{ - ActivationSignal, L2ChainProvider, NextAttributes, OriginAdvancer, OriginProvider, Pipeline, - PipelineError, PipelineErrorKind, PipelineResult, ResetSignal, Signal, SignalReceiver, - StepResult, -}; -use alloc::{boxed::Box, collections::VecDeque, sync::Arc}; -use async_trait::async_trait; -use core::fmt::Debug; -use kona_genesis::{RollupConfig, SystemConfig}; -use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; - -/// The derivation pipeline is responsible for deriving L2 inputs from L1 data. -#[derive(Debug)] -pub struct DerivationPipeline<S, P> -where - S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send, - P: L2ChainProvider + Send + Sync + Debug, -{ - /// A handle to the next attributes. - pub attributes: S, - /// Reset provider for the pipeline. - /// A list of prepared [`OpAttributesWithParent`] to be used by the derivation pipeline - /// consumer. - pub prepared: VecDeque<OpAttributesWithParent>, - /// The rollup config. - pub rollup_config: Arc<RollupConfig>, - /// The L2 Chain Provider used to fetch the system config on reset. - pub l2_chain_provider: P, -} - -impl<S, P> DerivationPipeline<S, P> -where - S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send, - P: L2ChainProvider + Send + Sync + Debug, -{ - /// Creates a new instance of the [`DerivationPipeline`]. - pub const fn new( - attributes: S, - rollup_config: Arc<RollupConfig>, - l2_chain_provider: P, - ) -> Self { - Self { attributes, prepared: VecDeque::new(), rollup_config, l2_chain_provider } - } -} - -impl<S, P> OriginProvider for DerivationPipeline<S, P> -where - S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send, - P: L2ChainProvider + Send + Sync + Debug, -{ - fn origin(&self) -> Option<BlockInfo> { - self.attributes.origin() - } -} - -impl<S, P> Iterator for DerivationPipeline<S, P> -where - S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send + Sync, - P: L2ChainProvider + Send + Sync + Debug, -{ - type Item = OpAttributesWithParent; - - fn next(&mut self) -> Option<Self::Item> { - kona_macros::set!( - gauge, - crate::metrics::Metrics::PIPELINE_PAYLOAD_ATTRIBUTES_BUFFER, - self.prepared.len().saturating_sub(1) as f64 - ); - self.prepared.pop_front() - } -} - -#[async_trait] -impl<S, P> SignalReceiver for DerivationPipeline<S, P> -where - S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send + Sync, - P: L2ChainProvider + Send + Sync + Debug, -{ - /// Signals the pipeline by calling the [`SignalReceiver::signal`] method. - /// - /// During a [`Signal::Reset`], each stage is recursively called from the top-level - /// [crate::stages::AttributesQueue] to the bottom [crate::PollingTraversal] - /// with a head-recursion pattern. This effectively clears the internal state - /// of each stage in the pipeline from bottom on up. - /// - /// [`Signal::Activation`] does a similar thing to the reset, with different - /// holocene-specific reset rules. - /// - /// ### Parameters - /// - /// The `signal` is contains the signal variant with any necessary parameters. - async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { - match signal { - mut s @ Signal::Reset(ResetSignal { l2_safe_head, .. }) | - mut s @ Signal::Activation(ActivationSignal { l2_safe_head, .. }) => { - let system_config = self - .l2_chain_provider - .system_config_by_number( - l2_safe_head.block_info.number, - Arc::clone(&self.rollup_config), - ) - .await - .map_err(Into::into)?; - s = s.with_system_config(system_config); - match self.attributes.signal(s).await { - Ok(()) => trace!(target: "pipeline", "Stages reset"), - Err(err) => { - if let PipelineErrorKind::Temporary(PipelineError::Eof) = err { - trace!(target: "pipeline", "Stages reset with EOF"); - } else { - error!(target: "pipeline", "Stage reset errored: {:?}", err); - return Err(err); - } - } - } - } - Signal::FlushChannel => { - self.attributes.signal(signal).await?; - } - Signal::ProvideBlock(_) => { - self.attributes.signal(signal).await?; - } - } - kona_macros::inc!( - gauge, - crate::metrics::Metrics::PIPELINE_SIGNALS, - "type" => signal.to_string(), - ); - Ok(()) - } -} - -#[async_trait] -impl<S, P> Pipeline for DerivationPipeline<S, P> -where - S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send + Sync, - P: L2ChainProvider + Send + Sync + Debug, -{ - /// Peeks at the next prepared [`OpAttributesWithParent`] from the pipeline. - fn peek(&self) -> Option<&OpAttributesWithParent> { - self.prepared.front() - } - - /// Returns the rollup config. - fn rollup_config(&self) -> &RollupConfig { - &self.rollup_config - } - - /// Returns the [`SystemConfig`] by L2 number. - async fn system_config_by_number( - &mut self, - number: u64, - ) -> Result<SystemConfig, PipelineErrorKind> { - self.l2_chain_provider - .system_config_by_number(number, self.rollup_config.clone()) - .await - .map_err(Into::into) - } - - /// Attempts to progress the pipeline. - /// - /// ## Returns - /// - /// A [PipelineError::Eof] is returned if the pipeline is blocked by waiting for new L1 data. - /// Any other error is critical and the derivation pipeline should be reset. - /// An error is expected when the underlying source closes. - /// - /// When [DerivationPipeline::step] returns [Ok(())], it should be called again, to continue the - /// derivation process. - /// - /// [`PipelineError`]: crate::errors::PipelineError - async fn step(&mut self, cursor: L2BlockInfo) -> StepResult { - kona_macros::inc!(gauge, crate::metrics::Metrics::PIPELINE_STEPS); - kona_macros::set!( - gauge, - crate::metrics::Metrics::PIPELINE_STEP_BLOCK, - cursor.block_info.number as f64 - ); - match self.attributes.next_attributes(cursor).await { - Ok(a) => { - trace!(target: "pipeline", "Prepared L2 attributes: {:?}", a); - kona_macros::inc!( - gauge, - crate::metrics::Metrics::PIPELINE_PAYLOAD_ATTRIBUTES_BUFFER - ); - kona_macros::set!( - gauge, - crate::metrics::Metrics::PIPELINE_LATEST_PAYLOAD_TX_COUNT, - a.attributes.transactions.as_ref().map_or(0.0, |txs| txs.len() as f64) - ); - if !a.is_last_in_span { - kona_macros::inc!(gauge, crate::metrics::Metrics::PIPELINE_DERIVED_SPAN_SIZE); - } else { - kona_macros::set!( - gauge, - crate::metrics::Metrics::PIPELINE_DERIVED_SPAN_SIZE, - 0 - ); - } - self.prepared.push_back(a); - kona_macros::inc!(gauge, crate::metrics::Metrics::PIPELINE_PREPARED_ATTRIBUTES); - StepResult::PreparedAttributes - } - Err(err) => match err { - PipelineErrorKind::Temporary(PipelineError::Eof) => { - trace!(target: "pipeline", "Pipeline advancing origin"); - if let Err(e) = self.attributes.advance_origin().await { - return StepResult::OriginAdvanceErr(e); - } - StepResult::AdvancedOrigin - } - PipelineErrorKind::Temporary(_) => { - trace!(target: "pipeline", "Attributes queue step failed due to temporary error: {:?}", err); - StepResult::StepFailed(err) - } - _ => { - warn!(target: "pipeline", "Attributes queue step failed: {:?}", err); - StepResult::StepFailed(err) - } - }, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{DerivationPipeline, test_utils::*}; - use alloc::{string::ToString, sync::Arc}; - use alloy_rpc_types_engine::PayloadAttributes; - use kona_genesis::{RollupConfig, SystemConfig}; - use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; - use op_alloy_rpc_types_engine::OpPayloadAttributes; - - fn default_test_payload_attributes() -> OpAttributesWithParent { - OpAttributesWithParent { - attributes: OpPayloadAttributes { - payload_attributes: PayloadAttributes { - timestamp: 0, - prev_randao: Default::default(), - suggested_fee_recipient: Default::default(), - withdrawals: None, - parent_beacon_block_root: None, - }, - transactions: None, - no_tx_pool: None, - gas_limit: None, - eip_1559_params: None, - min_base_fee: None, - }, - parent: Default::default(), - derived_from: Default::default(), - is_last_in_span: false, - } - } - - #[test] - fn test_pipeline_next_attributes_empty() { - let mut pipeline = new_test_pipeline(); - let result = pipeline.next(); - assert_eq!(result, None); - } - - #[test] - fn test_pipeline_next_attributes_with_peek() { - let mut pipeline = new_test_pipeline(); - let expected = default_test_payload_attributes(); - pipeline.prepared.push_back(expected.clone()); - - let result = pipeline.peek(); - assert_eq!(result, Some(&expected)); - - let result = pipeline.next(); - assert_eq!(result, Some(expected)); - } - - #[tokio::test] - async fn test_derivation_pipeline_missing_block() { - let mut pipeline = new_test_pipeline(); - let cursor = L2BlockInfo::default(); - let result = pipeline.step(cursor).await; - assert_eq!( - result, - StepResult::OriginAdvanceErr( - PipelineError::Provider("Block not found".to_string()).temp() - ) - ); - } - - #[tokio::test] - async fn test_derivation_pipeline_prepared_attributes() { - let rollup_config = Arc::new(RollupConfig::default()); - let l2_chain_provider = TestL2ChainProvider::default(); - let expected = default_test_payload_attributes(); - let attributes = TestNextAttributes { next_attributes: Some(expected) }; - let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); - - // Step on the pipeline and expect the result. - let cursor = L2BlockInfo::default(); - let result = pipeline.step(cursor).await; - assert_eq!(result, StepResult::PreparedAttributes); - } - - #[tokio::test] - async fn test_derivation_pipeline_advance_origin() { - let rollup_config = Arc::new(RollupConfig::default()); - let l2_chain_provider = TestL2ChainProvider::default(); - let attributes = TestNextAttributes::default(); - let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); - - // Step on the pipeline and expect the result. - let cursor = L2BlockInfo::default(); - let result = pipeline.step(cursor).await; - assert_eq!(result, StepResult::AdvancedOrigin); - } - - #[tokio::test] - async fn test_derivation_pipeline_signal_activation() { - let rollup_config = Arc::new(RollupConfig::default()); - let mut l2_chain_provider = TestL2ChainProvider::default(); - l2_chain_provider.system_configs.insert(0, SystemConfig::default()); - let attributes = TestNextAttributes::default(); - let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); - - // Signal the pipeline to reset. - let result = pipeline.signal(ActivationSignal::default().signal()).await; - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_derivation_pipeline_flush_channel() { - let rollup_config = Arc::new(RollupConfig::default()); - let l2_chain_provider = TestL2ChainProvider::default(); - let attributes = TestNextAttributes::default(); - let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); - - // Signal the pipeline to reset. - let result = pipeline.signal(Signal::FlushChannel).await; - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_derivation_pipeline_signal_reset_missing_sys_config() { - let rollup_config = Arc::new(RollupConfig::default()); - let l2_chain_provider = TestL2ChainProvider::default(); - let attributes = TestNextAttributes::default(); - let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); - - // Signal the pipeline to reset. - let result = pipeline.signal(ResetSignal::default().signal()).await.unwrap_err(); - assert_eq!(result, PipelineError::Provider("System config not found".to_string()).temp()); - } - - #[tokio::test] - async fn test_derivation_pipeline_signal_reset_ok() { - let rollup_config = Arc::new(RollupConfig::default()); - let mut l2_chain_provider = TestL2ChainProvider::default(); - l2_chain_provider.system_configs.insert(0, SystemConfig::default()); - let attributes = TestNextAttributes::default(); - let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); - - // Signal the pipeline to reset. - let result = pipeline.signal(ResetSignal::default().signal()).await; - assert!(result.is_ok()); - } -} diff --git a/kona/crates/protocol/derive/src/sources/blobs.rs b/kona/crates/protocol/derive/src/sources/blobs.rs deleted file mode 100644 index f258beac5e4..00000000000 --- a/kona/crates/protocol/derive/src/sources/blobs.rs +++ /dev/null @@ -1,355 +0,0 @@ -//! Blob Data Source - -use crate::{ - BlobData, BlobProvider, BlobProviderError, ChainProvider, DataAvailabilityProvider, - PipelineError, PipelineResult, -}; -use alloc::{boxed::Box, string::ToString, vec::Vec}; -use alloy_consensus::{ - Transaction, TxEip4844Variant, TxEnvelope, TxType, transaction::SignerRecoverable, -}; -use alloy_eips::eip4844::IndexedBlobHash; -use alloy_primitives::{Address, Bytes}; -use async_trait::async_trait; -use kona_protocol::BlockInfo; - -/// A data iterator that reads from a blob. -#[derive(Debug, Clone)] -pub struct BlobSource<F, B> -where - F: ChainProvider + Send, - B: BlobProvider + Send, -{ - /// Chain provider. - pub chain_provider: F, - /// Fetches blobs. - pub blob_fetcher: B, - /// The address of the batcher contract. - pub batcher_address: Address, - /// Data. - pub data: Vec<BlobData>, - /// Whether the source is open. - pub open: bool, -} - -impl<F, B> BlobSource<F, B> -where - F: ChainProvider + Send, - B: BlobProvider + Send, -{ - /// Creates a new blob source. - pub const fn new(chain_provider: F, blob_fetcher: B, batcher_address: Address) -> Self { - Self { chain_provider, blob_fetcher, batcher_address, data: Vec::new(), open: false } - } - - fn extract_blob_data( - &self, - txs: Vec<TxEnvelope>, - batcher_address: Address, - ) -> (Vec<BlobData>, Vec<IndexedBlobHash>) { - let mut index: u64 = 0; - let mut data = Vec::new(); - let mut hashes = Vec::new(); - for tx in txs { - let (tx_kind, calldata, blob_hashes) = match &tx { - TxEnvelope::Legacy(tx) => (tx.tx().to(), tx.tx().input.clone(), None), - TxEnvelope::Eip2930(tx) => (tx.tx().to(), tx.tx().input.clone(), None), - TxEnvelope::Eip1559(tx) => (tx.tx().to(), tx.tx().input.clone(), None), - TxEnvelope::Eip4844(blob_tx_wrapper) => match blob_tx_wrapper.tx() { - TxEip4844Variant::TxEip4844(tx) => { - (tx.to(), tx.input.clone(), Some(tx.blob_versioned_hashes.clone())) - } - TxEip4844Variant::TxEip4844WithSidecar(tx) => { - let tx = tx.tx(); - (tx.to(), tx.input.clone(), Some(tx.blob_versioned_hashes.clone())) - } - }, - _ => continue, - }; - let Some(to) = tx_kind else { continue }; - - if to != self.batcher_address { - index += blob_hashes.map_or(0, |h| h.len() as u64); - continue; - } - if tx.recover_signer().unwrap_or_default() != batcher_address { - index += blob_hashes.map_or(0, |h| h.len() as u64); - continue; - } - if tx.tx_type() != TxType::Eip4844 { - let blob_data = BlobData { data: None, calldata: Some(calldata.to_vec().into()) }; - data.push(blob_data); - continue; - } - if !calldata.is_empty() { - let hash = match &tx { - TxEnvelope::Legacy(tx) => Some(tx.hash()), - TxEnvelope::Eip2930(tx) => Some(tx.hash()), - TxEnvelope::Eip1559(tx) => Some(tx.hash()), - TxEnvelope::Eip4844(blob_tx_wrapper) => Some(blob_tx_wrapper.hash()), - _ => None, - }; - warn!(target: "blob_source", "Blob tx has calldata, which will be ignored: {hash:?}"); - } - let blob_hashes = if let Some(b) = blob_hashes { - b - } else { - continue; - }; - for hash in blob_hashes { - let indexed = IndexedBlobHash { hash, index }; - hashes.push(indexed); - data.push(BlobData::default()); - index += 1; - } - } - #[cfg(feature = "metrics")] - metrics::gauge!( - crate::metrics::Metrics::PIPELINE_DATA_AVAILABILITY_PROVIDER, - "source" => "blobs", - ) - .increment(data.len() as f64); - (data, hashes) - } - - /// Loads blob data into the source if it is not open. - async fn load_blobs( - &mut self, - block_ref: &BlockInfo, - batcher_address: Address, - ) -> Result<(), BlobProviderError> { - if self.open { - return Ok(()); - } - - let info = self - .chain_provider - .block_info_and_transactions_by_hash(block_ref.hash) - .await - .map_err(|e| BlobProviderError::Backend(e.to_string()))?; - - let (mut data, blob_hashes) = self.extract_blob_data(info.1, batcher_address); - - // If there are no hashes, set the calldata and return. - if blob_hashes.is_empty() { - self.open = true; - self.data = data; - return Ok(()); - } - - let blobs = - self.blob_fetcher.get_and_validate_blobs(block_ref, &blob_hashes).await.map_err( - |e| { - warn!(target: "blob_source", "Failed to fetch blobs: {e}"); - BlobProviderError::Backend(e.to_string()) - }, - )?; - - // Fill the blob pointers. - let mut blob_index = 0; - for blob in data.iter_mut() { - match blob.fill(&blobs, blob_index) { - Ok(should_increment) => { - if should_increment { - blob_index += 1; - } - } - Err(e) => { - return Err(e.into()); - } - } - } - - self.open = true; - self.data = data; - Ok(()) - } - - /// Extracts the next data from the source. - fn next_data(&mut self) -> PipelineResult<BlobData> { - if self.data.is_empty() { - return Err(PipelineError::Eof.temp()); - } - - Ok(self.data.remove(0)) - } -} - -#[async_trait] -impl<F, B> DataAvailabilityProvider for BlobSource<F, B> -where - F: ChainProvider + Sync + Send, - B: BlobProvider + Sync + Send, -{ - type Item = Bytes; - - async fn next( - &mut self, - block_ref: &BlockInfo, - batcher_address: Address, - ) -> PipelineResult<Self::Item> { - self.load_blobs(block_ref, batcher_address).await?; - - let next_data = self.next_data()?; - if let Some(c) = next_data.calldata { - return Ok(c); - } - - // Decode the blob data to raw bytes. - // Otherwise, ignore blob and recurse next. - match next_data.decode() { - Ok(d) => Ok(d), - Err(_) => { - warn!(target: "blob_source", "Failed to decode blob data, skipping"); - self.next(block_ref, batcher_address).await - } - } - } - - fn clear(&mut self) { - self.data.clear(); - self.open = false; - } -} - -#[cfg(test)] -pub(crate) mod tests { - use super::*; - use crate::{ - errors::PipelineErrorKind, - test_utils::{TestBlobProvider, TestChainProvider}, - }; - use alloc::vec; - use alloy_rlp::Decodable; - - pub(crate) fn default_test_blob_source() -> BlobSource<TestChainProvider, TestBlobProvider> { - let chain_provider = TestChainProvider::default(); - let blob_fetcher = TestBlobProvider::default(); - let batcher_address = Address::default(); - BlobSource::new(chain_provider, blob_fetcher, batcher_address) - } - - pub(crate) fn valid_blob_txs() -> Vec<TxEnvelope> { - // https://sepolia.etherscan.io/getRawTx?tx=0x9a22ccb0029bc8b0ddd073be1a1d923b7ae2b2ea52100bae0db4424f9107e9c0 - let raw_tx = alloy_primitives::hex::decode("0x03f9011d83aa36a7820fa28477359400852e90edd0008252089411e9ca82a3a762b4b5bd264d4173a242e7a770648080c08504a817c800f8a5a0012ec3d6f66766bedb002a190126b3549fce0047de0d4c25cffce0dc1c57921aa00152d8e24762ff22b1cfd9f8c0683786a7ca63ba49973818b3d1e9512cd2cec4a0013b98c6c83e066d5b14af2b85199e3d4fc7d1e778dd53130d180f5077e2d1c7a001148b495d6e859114e670ca54fb6e2657f0cbae5b08063605093a4b3dc9f8f1a0011ac212f13c5dff2b2c6b600a79635103d6f580a4221079951181b25c7e654901a0c8de4cced43169f9aa3d36506363b2d2c44f6c49fc1fd91ea114c86f3757077ea01e11fdd0d1934eda0492606ee0bb80a7bf8f35cc5f86ec60fe5031ba48bfd544").unwrap(); - let eip4844 = TxEnvelope::decode(&mut raw_tx.as_slice()).unwrap(); - vec![eip4844] - } - - #[tokio::test] - async fn test_load_blobs_open() { - let mut source = default_test_blob_source(); - source.open = true; - assert!(source.load_blobs(&BlockInfo::default(), Address::ZERO).await.is_ok()); - } - - #[tokio::test] - async fn test_load_blobs_chain_provider_err() { - let mut source = default_test_blob_source(); - assert!(matches!( - source.load_blobs(&BlockInfo::default(), Address::ZERO).await, - Err(BlobProviderError::Backend(_)) - )); - } - - #[tokio::test] - async fn test_load_blobs_chain_provider_empty_txs() { - let mut source = default_test_blob_source(); - let block_info = BlockInfo::default(); - source.chain_provider.insert_block_with_transactions(0, block_info, Vec::new()); - assert!(!source.open); // Source is not open by default. - assert!(source.load_blobs(&BlockInfo::default(), Address::ZERO).await.is_ok()); - assert!(source.data.is_empty()); - assert!(source.open); - } - - #[tokio::test] - async fn test_load_blobs_chain_provider_4844_txs_blob_fetch_error() { - let mut source = default_test_blob_source(); - let block_info = BlockInfo::default(); - let batcher_address = - alloy_primitives::address!("A83C816D4f9b2783761a22BA6FADB0eB0606D7B2"); - source.batcher_address = - alloy_primitives::address!("11E9CA82A3a762b4B5bd264d4173a242e7a77064"); - let txs = valid_blob_txs(); - source.blob_fetcher.should_error = true; - source.chain_provider.insert_block_with_transactions(1, block_info, txs); - assert!(matches!( - source.load_blobs(&BlockInfo::default(), batcher_address).await, - Err(BlobProviderError::Backend(_)) - )); - } - - #[tokio::test] - async fn test_load_blobs_chain_provider_4844_txs_succeeds() { - use alloy_consensus::Blob; - - let mut source = default_test_blob_source(); - let block_info = BlockInfo::default(); - let batcher_address = - alloy_primitives::address!("A83C816D4f9b2783761a22BA6FADB0eB0606D7B2"); - source.batcher_address = - alloy_primitives::address!("11E9CA82A3a762b4B5bd264d4173a242e7a77064"); - let txs = valid_blob_txs(); - source.chain_provider.insert_block_with_transactions(1, block_info, txs); - let hashes = [ - alloy_primitives::b256!( - "012ec3d6f66766bedb002a190126b3549fce0047de0d4c25cffce0dc1c57921a" - ), - alloy_primitives::b256!( - "0152d8e24762ff22b1cfd9f8c0683786a7ca63ba49973818b3d1e9512cd2cec4" - ), - alloy_primitives::b256!( - "013b98c6c83e066d5b14af2b85199e3d4fc7d1e778dd53130d180f5077e2d1c7" - ), - alloy_primitives::b256!( - "01148b495d6e859114e670ca54fb6e2657f0cbae5b08063605093a4b3dc9f8f1" - ), - alloy_primitives::b256!( - "011ac212f13c5dff2b2c6b600a79635103d6f580a4221079951181b25c7e6549" - ), - ]; - for hash in hashes { - source.blob_fetcher.insert_blob(hash, Blob::with_last_byte(1u8)); - } - source.load_blobs(&BlockInfo::default(), batcher_address).await.unwrap(); - assert!(source.open); - assert!(!source.data.is_empty()); - } - - #[tokio::test] - async fn test_open_empty_data_eof() { - let mut source = default_test_blob_source(); - source.open = true; - - let err = source.next(&BlockInfo::default(), Address::ZERO).await.unwrap_err(); - assert!(matches!(err, PipelineErrorKind::Temporary(PipelineError::Eof))); - } - - #[tokio::test] - async fn test_open_calldata() { - let mut source = default_test_blob_source(); - source.open = true; - source.data.push(BlobData { data: None, calldata: Some(Bytes::default()) }); - - let data = source.next(&BlockInfo::default(), Address::ZERO).await.unwrap(); - assert_eq!(data, Bytes::default()); - } - - #[tokio::test] - async fn test_open_blob_data_decode_missing_data() { - let mut source = default_test_blob_source(); - source.open = true; - source.data.push(BlobData { data: Some(Bytes::from(&[1; 32])), calldata: None }); - - let err = source.next(&BlockInfo::default(), Address::ZERO).await.unwrap_err(); - assert!(matches!(err, PipelineErrorKind::Temporary(PipelineError::Eof))); - } - - #[tokio::test] - async fn test_blob_source_pipeline_error() { - let mut source = default_test_blob_source(); - let err = source.next(&BlockInfo::default(), Address::ZERO).await.unwrap_err(); - assert!(matches!(err, PipelineErrorKind::Temporary(PipelineError::Provider(_)))); - } -} diff --git a/kona/crates/protocol/derive/src/sources/mod.rs b/kona/crates/protocol/derive/src/sources/mod.rs deleted file mode 100644 index d7a5e5f1455..00000000000 --- a/kona/crates/protocol/derive/src/sources/mod.rs +++ /dev/null @@ -1,20 +0,0 @@ -//! The data source module. -//! -//! Data sources are data providers for the kona derivation pipeline. -//! They implement the [DataAvailabilityProvider] trait, providing a way -//! to iterate over data for a given (L2) [BlockInfo]. -//! -//! [DataAvailabilityProvider]: crate::traits::DataAvailabilityProvider -//! [BlockInfo]: kona_protocol::BlockInfo - -mod blob_data; -pub use blob_data::BlobData; - -mod ethereum; -pub use ethereum::EthereumDataSource; - -mod blobs; -pub use blobs::BlobSource; - -mod calldata; -pub use calldata::CalldataSource; diff --git a/kona/crates/protocol/derive/src/stages/batch/mod.rs b/kona/crates/protocol/derive/src/stages/batch/mod.rs deleted file mode 100644 index dbc05265094..00000000000 --- a/kona/crates/protocol/derive/src/stages/batch/mod.rs +++ /dev/null @@ -1,58 +0,0 @@ -//! Contains stages pertaining to the processing of [Batch]es. -//! -//! Sitting after the [ChannelReader] stage, the [`BatchStream`] and [`BatchProvider`] stages are -//! responsible for validating and ordering the [Batch]es. The [`BatchStream`] stage is -//! responsible for streaming [SingleBatch]es from [SpanBatch]es, while the [`BatchProvider`] -//! stage is responsible for ordering and validating the [Batch]es for the [AttributesQueue] -//! stage. -//! -//! [Batch]: kona_protocol::Batch -//! [SingleBatch]: kona_protocol::SingleBatch -//! [SpanBatch]: kona_protocol::SpanBatch -//! [ChannelReader]: crate::stages::channel::ChannelReader -//! [AttributesQueue]: crate::stages::attributes_queue::AttributesQueue - -use crate::types::PipelineResult; -use alloc::boxed::Box; -use async_trait::async_trait; -use kona_protocol::{Batch, BlockInfo, L2BlockInfo}; - -mod batch_stream; -pub use batch_stream::{BatchStream, BatchStreamProvider}; - -mod batch_queue; -pub use batch_queue::BatchQueue; - -mod batch_validator; -pub use batch_validator::BatchValidator; - -mod batch_provider; -pub use batch_provider::BatchProvider; - -/// Provides [`Batch`]es for the [`BatchQueue`] and [`BatchValidator`] stages. -#[async_trait] -pub trait NextBatchProvider { - /// Returns the next [`Batch`] in the [`ChannelReader`] stage, if the stage is not complete. - /// This function can only be called once while the stage is in progress, and will return - /// [`None`] on subsequent calls unless the stage is reset or complete. If the stage is - /// complete and the batch has been consumed, an [PipelineError::Eof] error is returned. - /// - /// [`ChannelReader`]: crate::stages::ChannelReader - /// [PipelineError::Eof]: crate::errors::PipelineError::Eof - async fn next_batch( - &mut self, - parent: L2BlockInfo, - l1_origins: &[BlockInfo], - ) -> PipelineResult<Batch>; - - /// Returns the number of [`SingleBatch`]es that are currently buffered in the [`BatchStream`] - /// from a [`SpanBatch`]. - /// - /// [`SpanBatch`]: kona_protocol::SpanBatch - /// [`SingleBatch`]: kona_protocol::SingleBatch - fn span_buffer_size(&self) -> usize; - - /// Allows the stage to flush the buffer in the [crate::stages::BatchStream] - /// if an invalid single batch is found. Pre-holocene hardfork, this will be a no-op. - fn flush(&mut self); -} diff --git a/kona/crates/protocol/derive/src/stages/channel/channel_provider.rs b/kona/crates/protocol/derive/src/stages/channel/channel_provider.rs deleted file mode 100644 index 688f9468a8c..00000000000 --- a/kona/crates/protocol/derive/src/stages/channel/channel_provider.rs +++ /dev/null @@ -1,364 +0,0 @@ -//! This module contains the [ChannelProvider] stage. - -use super::{ChannelAssembler, ChannelBank, ChannelReaderProvider, NextFrameProvider}; -use crate::{ - errors::PipelineError, - traits::{OriginAdvancer, OriginProvider, SignalReceiver}, - types::{PipelineResult, Signal}, -}; -use alloc::{boxed::Box, sync::Arc}; -use alloy_primitives::Bytes; -use async_trait::async_trait; -use core::fmt::Debug; -use kona_genesis::RollupConfig; -use kona_protocol::BlockInfo; - -/// The [`ChannelProvider`] stage is a mux between the [`ChannelBank`] and [`ChannelAssembler`] -/// stages. -/// -/// Rules: -/// When Holocene is not active, the [`ChannelBank`] is used. -/// When Holocene is active, the [`ChannelAssembler`] is used. -#[derive(Debug)] -pub struct ChannelProvider<P> -where - P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, -{ - /// The rollup configuration. - pub cfg: Arc<RollupConfig>, - /// The previous stage of the derivation pipeline. - /// - /// If this is set to [`None`], the multiplexer has been activated and the active stage - /// owns the previous stage. - /// - /// Must be [`None`] if `channel_bank` or `channel_assembler` is [`Some`]. - pub prev: Option<P>, - /// The channel bank stage of the provider. - /// - /// Must be [`None`] if `prev` or `channel_assembler` is [`Some`]. - pub channel_bank: Option<ChannelBank<P>>, - /// The channel assembler stage of the provider. - /// - /// Must be [`None`] if `prev` or `channel_bank` is [`Some`]. - pub channel_assembler: Option<ChannelAssembler<P>>, -} - -impl<P> ChannelProvider<P> -where - P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, -{ - /// Creates a new [`ChannelProvider`] with the given configuration and previous stage. - pub const fn new(cfg: Arc<RollupConfig>, prev: P) -> Self { - Self { cfg, prev: Some(prev), channel_bank: None, channel_assembler: None } - } - - /// Attempts to update the active stage of the mux. - pub(crate) fn attempt_update(&mut self) -> PipelineResult<()> { - let origin = self.origin().ok_or(PipelineError::MissingOrigin.crit())?; - if let Some(prev) = self.prev.take() { - // On the first call to `attempt_update`, we need to determine the active stage to - // initialize the mux with. - if self.cfg.is_holocene_active(origin.timestamp) { - self.channel_assembler = Some(ChannelAssembler::new(self.cfg.clone(), prev)); - } else { - self.channel_bank = Some(ChannelBank::new(self.cfg.clone(), prev)); - } - } else if self.channel_bank.is_some() && self.cfg.is_holocene_active(origin.timestamp) { - // If the channel bank is active and Holocene is also active, transition to the channel - // assembler. - let channel_bank = self.channel_bank.take().expect("Must have channel bank"); - self.channel_assembler = - Some(ChannelAssembler::new(self.cfg.clone(), channel_bank.prev)); - } else if self.channel_assembler.is_some() && !self.cfg.is_holocene_active(origin.timestamp) - { - // If the channel assembler is active, and Holocene is not active, it indicates an L1 - // reorg around Holocene activation. Transition back to the channel bank - // until Holocene re-activates. - let channel_assembler = - self.channel_assembler.take().expect("Must have channel assembler"); - self.channel_bank = Some(ChannelBank::new(self.cfg.clone(), channel_assembler.prev)); - } - Ok(()) - } -} - -#[async_trait] -impl<P> OriginAdvancer for ChannelProvider<P> -where - P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, -{ - async fn advance_origin(&mut self) -> PipelineResult<()> { - self.attempt_update()?; - - if let Some(channel_assembler) = self.channel_assembler.as_mut() { - channel_assembler.advance_origin().await - } else if let Some(channel_bank) = self.channel_bank.as_mut() { - channel_bank.advance_origin().await - } else { - Err(PipelineError::NotEnoughData.temp()) - } - } -} - -impl<P> OriginProvider for ChannelProvider<P> -where - P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, -{ - fn origin(&self) -> Option<BlockInfo> { - self.channel_assembler.as_ref().map_or_else( - || { - self.channel_bank.as_ref().map_or_else( - || self.prev.as_ref().and_then(|prev| prev.origin()), - |channel_bank| channel_bank.origin(), - ) - }, - |channel_assembler| channel_assembler.origin(), - ) - } -} - -#[async_trait] -impl<P> SignalReceiver for ChannelProvider<P> -where - P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, -{ - async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { - self.attempt_update()?; - - if let Some(channel_assembler) = self.channel_assembler.as_mut() { - channel_assembler.signal(signal).await - } else if let Some(channel_bank) = self.channel_bank.as_mut() { - channel_bank.signal(signal).await - } else { - Err(PipelineError::NotEnoughData.temp()) - } - } -} - -#[async_trait] -impl<P> ChannelReaderProvider for ChannelProvider<P> -where - P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, -{ - async fn next_data(&mut self) -> PipelineResult<Option<Bytes>> { - self.attempt_update()?; - - if let Some(channel_assembler) = self.channel_assembler.as_mut() { - channel_assembler.next_data().await - } else if let Some(channel_bank) = self.channel_bank.as_mut() { - channel_bank.next_data().await - } else { - Err(PipelineError::NotEnoughData.temp()) - } - } -} - -#[cfg(test)] -mod test { - use crate::{ - ChannelProvider, ChannelReaderProvider, OriginProvider, PipelineError, ResetSignal, - SignalReceiver, test_utils::TestNextFrameProvider, - }; - use alloc::{sync::Arc, vec}; - use kona_genesis::{HardForkConfig, RollupConfig}; - use kona_protocol::BlockInfo; - - #[test] - fn test_channel_provider_assembler_active() { - let provider = TestNextFrameProvider::new(vec![]); - let cfg = Arc::new(RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, - ..Default::default() - }); - let mut channel_provider = ChannelProvider::new(cfg, provider); - - assert!(channel_provider.attempt_update().is_ok()); - assert!(channel_provider.prev.is_none()); - assert!(channel_provider.channel_bank.is_none()); - assert!(channel_provider.channel_assembler.is_some()); - } - - #[test] - fn test_channel_provider_bank_active() { - let provider = TestNextFrameProvider::new(vec![]); - let cfg = Arc::new(RollupConfig::default()); - let mut channel_provider = ChannelProvider::new(cfg, provider); - - assert!(channel_provider.attempt_update().is_ok()); - assert!(channel_provider.prev.is_none()); - assert!(channel_provider.channel_bank.is_some()); - assert!(channel_provider.channel_assembler.is_none()); - } - - #[test] - fn test_channel_provider_retain_current_bank() { - let provider = TestNextFrameProvider::new(vec![]); - let cfg = Arc::new(RollupConfig::default()); - let mut channel_provider = ChannelProvider::new(cfg, provider); - - // Assert the multiplexer hasn't been initialized. - assert!(channel_provider.channel_bank.is_none()); - assert!(channel_provider.channel_assembler.is_none()); - assert!(channel_provider.prev.is_some()); - - // Load in the active stage. - channel_provider.attempt_update().unwrap(); - assert!(channel_provider.channel_bank.is_some()); - assert!(channel_provider.channel_assembler.is_none()); - assert!(channel_provider.prev.is_none()); - // Ensure the active stage is retained on the second call. - channel_provider.attempt_update().unwrap(); - assert!(channel_provider.channel_bank.is_some()); - assert!(channel_provider.channel_assembler.is_none()); - assert!(channel_provider.prev.is_none()); - } - - #[test] - fn test_channel_provider_retain_current_assembler() { - let provider = TestNextFrameProvider::new(vec![]); - let cfg = Arc::new(RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, - ..Default::default() - }); - let mut channel_provider = ChannelProvider::new(cfg, provider); - - // Assert the multiplexer hasn't been initialized. - assert!(channel_provider.channel_bank.is_none()); - assert!(channel_provider.channel_assembler.is_none()); - assert!(channel_provider.prev.is_some()); - - // Load in the active stage. - channel_provider.attempt_update().unwrap(); - assert!(channel_provider.channel_bank.is_none()); - assert!(channel_provider.channel_assembler.is_some()); - assert!(channel_provider.prev.is_none()); - // Ensure the active stage is retained on the second call. - channel_provider.attempt_update().unwrap(); - assert!(channel_provider.channel_bank.is_none()); - assert!(channel_provider.channel_assembler.is_some()); - assert!(channel_provider.prev.is_none()); - } - - #[test] - fn test_channel_provider_transition_stage() { - let provider = TestNextFrameProvider::new(vec![]); - let cfg = Arc::new(RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(2), ..Default::default() }, - ..Default::default() - }); - let mut channel_provider = ChannelProvider::new(cfg, provider); - - channel_provider.attempt_update().unwrap(); - - // Update the L1 origin to Holocene activation. - let Some(ref mut stage) = channel_provider.channel_bank else { - panic!("Expected ChannelBank"); - }; - stage.prev.block_info = Some(BlockInfo { number: 1, timestamp: 2, ..Default::default() }); - - // Transition to the ChannelAssembler stage. - channel_provider.attempt_update().unwrap(); - assert!(channel_provider.channel_bank.is_none()); - assert!(channel_provider.channel_assembler.is_some()); - - assert_eq!(channel_provider.origin().unwrap().number, 1); - } - - #[test] - fn test_channel_provider_transition_stage_backwards() { - let provider = TestNextFrameProvider::new(vec![]); - let cfg = Arc::new(RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(2), ..Default::default() }, - ..Default::default() - }); - let mut channel_provider = ChannelProvider::new(cfg, provider); - - channel_provider.attempt_update().unwrap(); - - // Update the L1 origin to Holocene activation. - let Some(ref mut stage) = channel_provider.channel_bank else { - panic!("Expected ChannelBank"); - }; - stage.prev.block_info = Some(BlockInfo { number: 1, timestamp: 2, ..Default::default() }); - - // Transition to the ChannelAssembler stage. - channel_provider.attempt_update().unwrap(); - assert!(channel_provider.channel_bank.is_none()); - assert!(channel_provider.channel_assembler.is_some()); - - // Update the L1 origin to before Holocene activation, to simulate a re-org. - let Some(ref mut stage) = channel_provider.channel_assembler else { - panic!("Expected ChannelAssembler"); - }; - stage.prev.block_info = Some(BlockInfo::default()); - - channel_provider.attempt_update().unwrap(); - assert!(channel_provider.channel_bank.is_some()); - assert!(channel_provider.channel_assembler.is_none()); - } - - #[tokio::test] - async fn test_channel_provider_reset_bank() { - let frames = [ - crate::frame!(0xFF, 0, vec![0xDD; 50], false), - crate::frame!(0xFF, 1, vec![0xDD; 50], true), - ]; - let provider = TestNextFrameProvider::new(frames.into_iter().rev().map(Ok).collect()); - let cfg = Arc::new(RollupConfig::default()); - let mut channel_provider = ChannelProvider::new(cfg.clone(), provider); - - // Load in the first frame. - assert_eq!( - channel_provider.next_data().await.unwrap_err(), - PipelineError::NotEnoughData.temp() - ); - let Some(channel_bank) = channel_provider.channel_bank.as_mut() else { - panic!("Expected ChannelBank"); - }; - // Ensure a channel is in the queue. - assert!(channel_bank.channel_queue.len() == 1); - - // Reset the channel provider. - channel_provider.signal(ResetSignal::default().signal()).await.unwrap(); - - // Ensure the channel queue is empty after reset. - let Some(channel_bank) = channel_provider.channel_bank.as_mut() else { - panic!("Expected ChannelBank"); - }; - assert!(channel_bank.channel_queue.is_empty()); - } - - #[tokio::test] - async fn test_channel_provider_reset_assembler() { - let frames = [ - crate::frame!(0xFF, 0, vec![0xDD; 50], false), - crate::frame!(0xFF, 1, vec![0xDD; 50], true), - ]; - let provider = TestNextFrameProvider::new(frames.into_iter().rev().map(Ok).collect()); - let cfg = Arc::new(RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, - ..Default::default() - }); - let mut channel_provider = ChannelProvider::new(cfg.clone(), provider); - - // Load in the first frame. - assert_eq!( - channel_provider.next_data().await.unwrap_err(), - PipelineError::NotEnoughData.temp() - ); - let Some(channel_assembler) = channel_provider.channel_assembler.as_mut() else { - panic!("Expected ChannelAssembler"); - }; - // Ensure a channel is being built. - assert!(channel_assembler.channel.is_some()); - - // Reset the channel provider. - channel_provider.signal(ResetSignal::default().signal()).await.unwrap(); - - // Ensure the channel assembler is empty after reset. - let Some(channel_assembler) = channel_provider.channel_assembler.as_mut() else { - panic!("Expected ChannelAssembler"); - }; - assert!(channel_assembler.channel.is_none()); - } -} diff --git a/kona/crates/protocol/derive/src/stages/channel/channel_reader.rs b/kona/crates/protocol/derive/src/stages/channel/channel_reader.rs deleted file mode 100644 index 48aa0d293ba..00000000000 --- a/kona/crates/protocol/derive/src/stages/channel/channel_reader.rs +++ /dev/null @@ -1,294 +0,0 @@ -//! This module contains the `ChannelReader` struct. - -use crate::{ - BatchStreamProvider, OriginAdvancer, OriginProvider, PipelineError, PipelineResult, Signal, - SignalReceiver, -}; -use alloc::{boxed::Box, sync::Arc}; -use alloy_primitives::Bytes; -use async_trait::async_trait; -use core::fmt::Debug; -use kona_genesis::{ - MAX_RLP_BYTES_PER_CHANNEL_BEDROCK, MAX_RLP_BYTES_PER_CHANNEL_FJORD, RollupConfig, -}; -use kona_protocol::{Batch, BatchReader, BlockInfo}; -use tracing::{debug, warn}; - -/// The [`ChannelReader`] provider trait. -#[async_trait] -pub trait ChannelReaderProvider { - /// Pulls the next piece of data from the channel bank. Note that it attempts to pull data out - /// of the channel bank prior to loading data in (unlike most other stages). This is to - /// ensure maintain consistency around channel bank pruning which depends upon the order - /// of operations. - async fn next_data(&mut self) -> PipelineResult<Option<Bytes>>; -} - -/// [`ChannelReader`] is a stateful stage that reads [`Batch`]es from `Channel`s. -/// -/// The [`ChannelReader`] pulls `Channel`s from the channel bank as raw data -/// and pipes it into a `BatchReader`. Since the raw data is compressed, -/// the `BatchReader` first decompresses the data using the first bytes as -/// a compression algorithm identifier. -/// -/// Once the data is decompressed, it is decoded into a `Batch` and passed -/// to the next stage in the pipeline. -#[derive(Debug)] -pub struct ChannelReader<P> -where - P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, -{ - /// The previous stage of the derivation pipeline. - pub prev: P, - /// The batch reader. - pub next_batch: Option<BatchReader>, - /// The rollup configuration. - pub cfg: Arc<RollupConfig>, -} - -impl<P> ChannelReader<P> -where - P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, -{ - /// Create a new [`ChannelReader`] stage. - pub const fn new(prev: P, cfg: Arc<RollupConfig>) -> Self { - Self { prev, next_batch: None, cfg } - } - - /// Creates the batch reader from available channel data. - async fn set_batch_reader(&mut self) -> PipelineResult<()> { - if self.next_batch.is_none() { - let channel = - self.prev.next_data().await?.ok_or(PipelineError::ChannelReaderEmpty.temp())?; - - let origin = self.prev.origin().ok_or(PipelineError::MissingOrigin.crit())?; - let max_rlp_bytes_per_channel = if self.cfg.is_fjord_active(origin.timestamp) { - MAX_RLP_BYTES_PER_CHANNEL_FJORD - } else { - MAX_RLP_BYTES_PER_CHANNEL_BEDROCK - }; - - self.next_batch = - Some(BatchReader::new(&channel[..], max_rlp_bytes_per_channel as usize)); - kona_macros::set!(gauge, crate::metrics::Metrics::PIPELINE_BATCH_READER_SET, 1); - } - Ok(()) - } - - /// Forces the read to continue with the next channel, resetting any - /// decoding / decompression state to a fresh start. - pub fn next_channel(&mut self) { - self.next_batch = None; - kona_macros::set!(gauge, crate::metrics::Metrics::PIPELINE_BATCH_READER_SET, 0); - } -} - -#[async_trait] -impl<P> OriginAdvancer for ChannelReader<P> -where - P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, -{ - async fn advance_origin(&mut self) -> PipelineResult<()> { - self.prev.advance_origin().await - } -} - -#[async_trait] -impl<P> BatchStreamProvider for ChannelReader<P> -where - P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, -{ - /// This method is called by the BatchStream if an invalid span batch is found. - /// In the case of an invalid span batch, the associated channel must be flushed. - /// - /// See: <https://specs.optimism.io/protocol/holocene/derivation.html#span-batches> - /// - /// SAFETY: Only called post-holocene activation. - fn flush(&mut self) { - debug!(target: "channel_reader", "[POST-HOLOCENE] Flushing channel"); - self.next_channel(); - } - - async fn next_batch(&mut self) -> PipelineResult<Batch> { - if let Err(e) = self.set_batch_reader().await { - debug!(target: "channel_reader", "Failed to set batch reader: {:?}", e); - self.next_channel(); - return Err(e); - } - - // SAFETY: The batch reader must be set above. - let next_batch = self.next_batch.as_mut().expect("Batch reader must be set"); - match next_batch.decompress() { - Ok(()) => { - // Record the decompressed size and type. - let size = next_batch.decompressed.len() as f64; - let ty = if next_batch.brotli_used { - BatchReader::CHANNEL_VERSION_BROTLI - } else { - BatchReader::ZLIB_DEFLATE_COMPRESSION_METHOD - }; - kona_macros::set!( - gauge, - crate::metrics::Metrics::PIPELINE_LATEST_DECOMPRESSED_BATCH_SIZE, - size - ); - kona_macros::set!( - gauge, - crate::metrics::Metrics::PIPELINE_LATEST_DECOMPRESSED_BATCH_TYPE, - ty as f64 - ); - } - Err(err) => { - debug!(target: "channel_reader", ?err, "Failed to decompress batch"); - self.next_channel(); - return Err(PipelineError::NotEnoughData.temp()); - } - } - - // Read the next batch from the reader's decompressed data - match next_batch.next_batch(self.cfg.as_ref()).ok_or(PipelineError::NotEnoughData.temp()) { - Ok(batch) => { - kona_macros::inc!( - gauge, - crate::metrics::Metrics::PIPELINE_READ_BATCHES, - "type" => batch.to_string(), - ); - Ok(batch) - } - Err(e) => { - self.next_channel(); - Err(e) - } - } - } -} - -impl<P> OriginProvider for ChannelReader<P> -where - P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, -{ - fn origin(&self) -> Option<BlockInfo> { - self.prev.origin() - } -} - -#[async_trait] -impl<P> SignalReceiver for ChannelReader<P> -where - P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug + Send, -{ - async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { - match signal { - Signal::FlushChannel => { - // Drop the current in-progress channel. - warn!(target: "channel_reader", "Flushed channel"); - self.next_batch = None; - kona_macros::set!(gauge, crate::metrics::Metrics::PIPELINE_BATCH_READER_SET, 0); - } - s => { - self.prev.signal(s).await?; - self.next_channel(); - } - } - Ok(()) - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::{ - errors::PipelineErrorKind, test_utils::TestChannelReaderProvider, types::ResetSignal, - }; - use alloc::vec; - use kona_genesis::HardForkConfig; - - fn new_compressed_batch_data() -> Bytes { - let file_contents = - alloc::string::String::from_utf8_lossy(include_bytes!("../../../testdata/batch.hex")); - let file_contents = &(&*file_contents)[..file_contents.len() - 1]; - let data = alloy_primitives::hex::decode(file_contents).unwrap(); - data.into() - } - - #[tokio::test] - async fn test_flush_channel_reader() { - let mock = TestChannelReaderProvider::new(vec![Ok(Some(new_compressed_batch_data()))]); - let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); - reader.next_batch = Some(BatchReader::new( - new_compressed_batch_data(), - MAX_RLP_BYTES_PER_CHANNEL_FJORD as usize, - )); - reader.signal(Signal::FlushChannel).await.unwrap(); - assert!(reader.next_batch.is_none()); - } - - #[tokio::test] - async fn test_reset_channel_reader() { - let mock = TestChannelReaderProvider::new(vec![Ok(None)]); - let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); - reader.next_batch = Some(BatchReader::new( - vec![0x00, 0x01, 0x02], - MAX_RLP_BYTES_PER_CHANNEL_FJORD as usize, - )); - assert!(!reader.prev.reset); - reader.signal(ResetSignal::default().signal()).await.unwrap(); - assert!(reader.next_batch.is_none()); - assert!(reader.prev.reset); - } - - #[tokio::test] - async fn test_next_batch_batch_reader_set_fails() { - let mock = TestChannelReaderProvider::new(vec![Err(PipelineError::Eof.temp())]); - let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); - assert_eq!(reader.next_batch().await, Err(PipelineError::Eof.temp())); - assert!(reader.next_batch.is_none()); - } - - #[tokio::test] - async fn test_next_batch_batch_reader_no_data() { - let mock = TestChannelReaderProvider::new(vec![Ok(None)]); - let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); - assert!(matches!( - reader.next_batch().await.unwrap_err(), - PipelineErrorKind::Temporary(PipelineError::ChannelReaderEmpty) - )); - assert!(reader.next_batch.is_none()); - } - - #[tokio::test] - async fn test_next_batch_batch_reader_not_enough_data() { - let mut first = new_compressed_batch_data(); - let second = first.split_to(first.len() / 2); - let mock = TestChannelReaderProvider::new(vec![Ok(Some(first)), Ok(Some(second))]); - let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); - assert_eq!(reader.next_batch().await, Err(PipelineError::NotEnoughData.temp())); - assert!(reader.next_batch.is_none()); - } - - #[tokio::test] - async fn test_next_batch_succeeds() { - let raw = new_compressed_batch_data(); - let mock = TestChannelReaderProvider::new(vec![Ok(Some(raw))]); - let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); - let res = reader.next_batch().await.unwrap(); - matches!(res, Batch::Span(_)); - assert!(reader.next_batch.is_some()); - } - - #[tokio::test] - async fn test_flush_post_holocene() { - let raw = new_compressed_batch_data(); - let config = Arc::new(RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, - ..Default::default() - }); - let mock = TestChannelReaderProvider::new(vec![Ok(Some(raw))]); - let mut reader = ChannelReader::new(mock, config); - let res = reader.next_batch().await.unwrap(); - matches!(res, Batch::Span(_)); - assert!(reader.next_batch.is_some()); - reader.flush(); - assert!(reader.next_batch.is_none()); - } -} diff --git a/kona/crates/protocol/derive/src/stages/channel/mod.rs b/kona/crates/protocol/derive/src/stages/channel/mod.rs deleted file mode 100644 index 85518fd5481..00000000000 --- a/kona/crates/protocol/derive/src/stages/channel/mod.rs +++ /dev/null @@ -1,38 +0,0 @@ -//! Stages pertaining to the reading and decoding of channels. -//! -//! Sitting after the [FrameQueue] stage, the [ChannelBank] and [ChannelAssembler] stages are -//! responsible for reading and decoding the [Frame]s into [Channel]s. The [ChannelReader] stage -//! is responsible for decoding the [Channel]s into [Batch]es, forwarding the [Batch]es to the -//! [BatchQueue] stage. -//! -//! [Frame]: kona_protocol::Frame -//! [Channel]: kona_protocol::Channel -//! [Batch]: kona_protocol::Batch -//! [FrameQueue]: crate::stages::FrameQueue -//! [BatchQueue]: crate::stages::BatchQueue - -use crate::types::PipelineResult; -use alloc::boxed::Box; -use async_trait::async_trait; -use kona_protocol::Frame; - -pub(crate) mod channel_provider; -pub use channel_provider::ChannelProvider; - -pub(crate) mod channel_bank; -pub use channel_bank::ChannelBank; - -pub(crate) mod channel_assembler; -pub use channel_assembler::ChannelAssembler; - -pub(crate) mod channel_reader; -pub use channel_reader::{ChannelReader, ChannelReaderProvider}; - -/// Provides frames for the [`ChannelBank`] and [`ChannelAssembler`] stages. -#[async_trait] -pub trait NextFrameProvider { - /// Retrieves the next [`Frame`] from the [`FrameQueue`] stage. - /// - /// [`FrameQueue`]: crate::stages::FrameQueue - async fn next_frame(&mut self) -> PipelineResult<Frame>; -} diff --git a/kona/crates/protocol/derive/src/test_utils/blob_provider.rs b/kona/crates/protocol/derive/src/test_utils/blob_provider.rs deleted file mode 100644 index df3e2d9f049..00000000000 --- a/kona/crates/protocol/derive/src/test_utils/blob_provider.rs +++ /dev/null @@ -1,51 +0,0 @@ -//! An implementation of the [BlobProvider] trait for tests. - -use crate::{BlobProvider, errors::BlobProviderError}; -use alloc::{boxed::Box, vec::Vec}; -use alloy_eips::eip4844::{Blob, IndexedBlobHash}; -use alloy_primitives::{B256, map::HashMap}; -use async_trait::async_trait; -use kona_protocol::BlockInfo; - -/// A mock blob provider for testing. -#[derive(Debug, Clone, Default)] -pub struct TestBlobProvider { - /// Maps block hashes to blob data. - pub blobs: HashMap<B256, Blob>, - /// whether the blob provider should return an error. - pub should_error: bool, -} - -impl TestBlobProvider { - /// Insert a blob into the mock blob provider. - pub fn insert_blob(&mut self, hash: B256, blob: Blob) { - self.blobs.insert(hash, blob); - } - - /// Clears blobs from the mock blob provider. - pub fn clear(&mut self) { - self.blobs.clear(); - } -} - -#[async_trait] -impl BlobProvider for TestBlobProvider { - type Error = BlobProviderError; - - async fn get_and_validate_blobs( - &mut self, - _block_ref: &BlockInfo, - blob_hashes: &[IndexedBlobHash], - ) -> Result<Vec<Box<Blob>>, Self::Error> { - if self.should_error { - return Err(BlobProviderError::SlotDerivation); - } - let mut blobs = Vec::new(); - for blob_hash in blob_hashes { - if let Some(data) = self.blobs.get(&blob_hash.hash) { - blobs.push(Box::new(*data)); - } - } - Ok(blobs) - } -} diff --git a/kona/crates/protocol/derive/src/test_utils/channel_provider.rs b/kona/crates/protocol/derive/src/test_utils/channel_provider.rs deleted file mode 100644 index 74211a1e5a7..00000000000 --- a/kona/crates/protocol/derive/src/test_utils/channel_provider.rs +++ /dev/null @@ -1,65 +0,0 @@ -//! Mock testing utilities for the [ChannelBank] stage. -//! -//! [ChannelBank]: crate::stages::ChannelBank - -use crate::{ - errors::PipelineError, - stages::NextFrameProvider, - traits::{OriginAdvancer, OriginProvider, SignalReceiver}, - types::{PipelineResult, Signal}, -}; -use alloc::{boxed::Box, vec::Vec}; -use async_trait::async_trait; -use kona_protocol::{BlockInfo, Frame}; - -/// A mock [`NextFrameProvider`] for testing the [`ChannelBank`] stage. -/// -/// [`ChannelBank`]: crate::stages::ChannelBank -#[derive(Debug, Default)] -pub struct TestNextFrameProvider { - /// The data to return. - pub data: Vec<PipelineResult<Frame>>, - /// The block info - pub block_info: Option<BlockInfo>, - /// Tracks if the channel bank provider has been reset. - pub reset: bool, -} - -impl TestNextFrameProvider { - /// Creates a new [`TestNextFrameProvider`] with the given data. - pub fn new(data: Vec<PipelineResult<Frame>>) -> Self { - Self { data, block_info: Some(BlockInfo::default()), reset: false } - } -} - -impl OriginProvider for TestNextFrameProvider { - fn origin(&self) -> Option<BlockInfo> { - self.block_info - } -} - -#[async_trait] -impl OriginAdvancer for TestNextFrameProvider { - async fn advance_origin(&mut self) -> PipelineResult<()> { - self.block_info = self.block_info.map(|mut bi| { - bi.number += 1; - bi - }); - Ok(()) - } -} - -#[async_trait] -impl NextFrameProvider for TestNextFrameProvider { - async fn next_frame(&mut self) -> PipelineResult<Frame> { - self.data.pop().unwrap_or(Err(PipelineError::Eof.temp())) - } -} - -#[async_trait] -impl SignalReceiver for TestNextFrameProvider { - async fn signal(&mut self, _: Signal) -> PipelineResult<()> { - self.reset = true; - Ok(()) - } -} diff --git a/kona/crates/protocol/derive/src/test_utils/channel_reader.rs b/kona/crates/protocol/derive/src/test_utils/channel_reader.rs deleted file mode 100644 index 5c3f23d1afb..00000000000 --- a/kona/crates/protocol/derive/src/test_utils/channel_reader.rs +++ /dev/null @@ -1,60 +0,0 @@ -//! Test utilities for the [ChannelReader] stage. -//! -//! [ChannelReader]: crate::stages::ChannelReader - -use crate::{ - ChannelReaderProvider, OriginAdvancer, OriginProvider, PipelineError, PipelineResult, Signal, - SignalReceiver, -}; -use alloc::{boxed::Box, vec::Vec}; -use alloy_primitives::Bytes; -use async_trait::async_trait; -use kona_protocol::BlockInfo; - -/// A mock [`ChannelReaderProvider`] for testing the [`ChannelReader`] stage. -/// -/// [`ChannelReader`]: crate::stages::ChannelReader -#[derive(Debug, Default)] -pub struct TestChannelReaderProvider { - /// The data to return. - pub data: Vec<PipelineResult<Option<Bytes>>>, - /// The origin block info - pub block_info: Option<BlockInfo>, - /// Tracks if the channel reader provider has been reset. - pub reset: bool, -} - -impl TestChannelReaderProvider { - /// Creates a new [`TestChannelReaderProvider`] with the given data. - pub fn new(data: Vec<PipelineResult<Option<Bytes>>>) -> Self { - Self { data, block_info: Some(BlockInfo::default()), reset: false } - } -} - -impl OriginProvider for TestChannelReaderProvider { - fn origin(&self) -> Option<BlockInfo> { - self.block_info - } -} - -#[async_trait] -impl OriginAdvancer for TestChannelReaderProvider { - async fn advance_origin(&mut self) -> PipelineResult<()> { - Ok(()) - } -} - -#[async_trait] -impl ChannelReaderProvider for TestChannelReaderProvider { - async fn next_data(&mut self) -> PipelineResult<Option<Bytes>> { - self.data.pop().unwrap_or(Err(PipelineError::Eof.temp())) - } -} - -#[async_trait] -impl SignalReceiver for TestChannelReaderProvider { - async fn signal(&mut self, _: Signal) -> PipelineResult<()> { - self.reset = true; - Ok(()) - } -} diff --git a/kona/crates/protocol/derive/src/test_utils/tracing.rs b/kona/crates/protocol/derive/src/test_utils/tracing.rs deleted file mode 100644 index dec02084105..00000000000 --- a/kona/crates/protocol/derive/src/test_utils/tracing.rs +++ /dev/null @@ -1,57 +0,0 @@ -//! This module contains a subscriber layer for `tracing-subscriber` that collects traces and their -//! log levels. - -use alloc::{format, string::String, sync::Arc, vec::Vec}; -use spin::Mutex; -use tracing::{Event, Level, Subscriber}; -use tracing_subscriber::{Layer, layer::Context}; - -/// The storage for the collected traces. -#[derive(Debug, Default, Clone)] -pub struct TraceStorage(pub Arc<Mutex<Vec<(Level, String)>>>); - -impl TraceStorage { - /// Returns the items in the storage that match the specified level. - pub fn get_by_level(&self, level: Level) -> Vec<String> { - self.0 - .lock() - .iter() - .filter_map(|(l, message)| if *l == level { Some(message.clone()) } else { None }) - .collect() - } - - /// Locks the storage and returns the items. - pub fn lock(&self) -> spin::MutexGuard<'_, Vec<(Level, String)>> { - self.0.lock() - } - - /// Returns if the storage is empty. - pub fn is_empty(&self) -> bool { - self.0.lock().is_empty() - } -} - -/// A subscriber layer that collects traces and their log levels. -#[derive(Debug, Default)] -pub struct CollectingLayer { - /// The storage for the collected traces. - pub storage: TraceStorage, -} - -impl CollectingLayer { - /// Creates a new collecting layer with the specified storage. - pub const fn new(storage: TraceStorage) -> Self { - Self { storage } - } -} - -impl<S: Subscriber> Layer<S> for CollectingLayer { - fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) { - let metadata = event.metadata(); - let level = *metadata.level(); - let message = format!("{event:?}"); - - let mut storage = self.storage.0.lock(); - storage.push((level, message)); - } -} diff --git a/kona/crates/protocol/genesis/Cargo.toml b/kona/crates/protocol/genesis/Cargo.toml deleted file mode 100644 index c41f2e7db4e..00000000000 --- a/kona/crates/protocol/genesis/Cargo.toml +++ /dev/null @@ -1,88 +0,0 @@ -[package] -name = "kona-genesis" -version = "0.4.5" -description = "Optimism genesis types" - -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Alloy -alloy-sol-types.workspace = true -alloy-primitives.workspace = true -alloy-consensus.workspace = true -alloy-eips.workspace = true -alloy-hardforks.workspace = true -alloy-op-hardforks.workspace = true -alloy-chains.workspace = true -alloy-genesis.workspace = true - -# Misc -thiserror.workspace = true -derive_more = { workspace = true, features = ["from", "try_from"] } - -# `revm` feature -op-revm = { workspace = true, optional = true } - -# `arbitrary` feature -arbitrary = { workspace = true, features = ["derive"], optional = true } - -# `serde` feature -serde = { workspace = true, optional = true } -serde_repr = { workspace = true, optional = true } - -# `tabled` feature -tabled = { workspace = true, features = ["derive"], optional = true } - -[dev-dependencies] -toml = { workspace = true, features = ["parse", "serde"] } -rand = { workspace = true, features = ["thread_rng"] } -serde_json.workspace = true -arbitrary = { workspace = true, features = ["derive"] } -alloy-primitives = { workspace = true, features = ["rand", "arbitrary"] } - -[features] -default = [] -revm = [ "dep:op-revm" ] -tabled = [ "dep:tabled", "std" ] -std = [ - "alloy-chains/std", - "alloy-consensus/std", - "alloy-eips/std", - "alloy-genesis/std", - "alloy-primitives/std", - "alloy-sol-types/std", - "derive_more/std", - "op-revm?/std", - "serde?/std", - "thiserror/std", -] -arbitrary = [ - "alloy-chains/arbitrary", - "alloy-consensus/arbitrary", - "alloy-eips/arbitrary", - "alloy-primitives/arbitrary", - "alloy-primitives/rand", - "alloy-sol-types/arbitrary", - "dep:arbitrary", - "std", -] -serde = [ - "alloy-chains/serde", - "alloy-consensus/serde", - "alloy-eips/serde", - "alloy-hardforks/serde", - "alloy-op-hardforks/serde", - "alloy-primitives/serde", - "dep:serde", - "dep:serde_repr", - "op-revm?/serde", -] diff --git a/kona/crates/protocol/genesis/README.md b/kona/crates/protocol/genesis/README.md deleted file mode 100644 index 82f84e83f8a..00000000000 --- a/kona/crates/protocol/genesis/README.md +++ /dev/null @@ -1,30 +0,0 @@ -## `kona-genesis` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-genesis"><img src="https://img.shields.io/crates/v/kona-genesis.svg" alt="kona-genesis crate"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="MIT License"></a> -<a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> - - -Genesis types for Optimism. - -### Usage - -_By default, `kona-genesis` enables both `std` and `serde` features._ - -If you're working in a `no_std` environment (like [`kona`][kona]), disable default features like so. - -```toml -[dependencies] -kona-genesis = { version = "x.y.z", default-features = false, features = ["serde"] } -``` - -#### Rollup Config - -`kona-genesis` exports a `RollupConfig`, the primary genesis type for Optimism Consensus. - - -<!-- Links --> - -[alloy-genesis]: https://github.com/alloy-rs -[kona]: https://github.com/op-rs/kona/blob/main/Cargo.toml#L137 diff --git a/kona/crates/protocol/genesis/src/chain/config.rs b/kona/crates/protocol/genesis/src/chain/config.rs deleted file mode 100644 index 29c4e30b271..00000000000 --- a/kona/crates/protocol/genesis/src/chain/config.rs +++ /dev/null @@ -1,357 +0,0 @@ -//! Contains the chain config type. - -use alloc::string::String; -use alloy_chains::Chain; -use alloy_eips::eip1559::BaseFeeParams; -use alloy_primitives::Address; - -use crate::{ - AddressList, AltDAConfig, BaseFeeConfig, ChainGenesis, GRANITE_CHANNEL_TIMEOUT, HardForkConfig, - Roles, RollupConfig, SuperchainLevel, base_fee_params, base_fee_params_canyon, - params::base_fee_config, rollup::DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, -}; - -/// L1 chain configuration from the `alloy-genesis` crate. -pub type L1ChainConfig = alloy_genesis::ChainConfig; - -/// Defines core blockchain settings per block. -/// -/// Tailors unique settings for each network based on -/// its genesis block and superchain configuration. -/// -/// This struct bridges the interface between the [`ChainConfig`][ccr] -/// defined in the [`superchain-registry`][scr] and the [`ChainConfig`][ccg] -/// defined in [`op-geth`][opg]. -/// -/// [opg]: https://github.com/ethereum-optimism/op-geth -/// [scr]: https://github.com/ethereum-optimism/superchain-registry -/// [ccg]: https://github.com/ethereum-optimism/op-geth/blob/optimism/params/config.go#L342 -/// [ccr]: https://github.com/ethereum-optimism/superchain-registry/blob/main/ops/internal/config/superchain.go#L70 -#[derive(Debug, Clone, Default, Eq, PartialEq)] -#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub struct ChainConfig { - /// Chain name (e.g. "Base") - #[cfg_attr(feature = "serde", serde(rename = "Name", alias = "name"))] - pub name: String, - /// L1 chain ID - #[cfg_attr(feature = "serde", serde(skip))] - pub l1_chain_id: u64, - /// Chain public RPC endpoint - #[cfg_attr(feature = "serde", serde(rename = "PublicRPC", alias = "public_rpc"))] - pub public_rpc: String, - /// Chain sequencer RPC endpoint - #[cfg_attr(feature = "serde", serde(rename = "SequencerRPC", alias = "sequencer_rpc"))] - pub sequencer_rpc: String, - /// Chain explorer HTTP endpoint - #[cfg_attr(feature = "serde", serde(rename = "Explorer", alias = "explorer"))] - pub explorer: String, - /// Level of integration with the superchain. - #[cfg_attr(feature = "serde", serde(rename = "SuperchainLevel", alias = "superchain_level"))] - pub superchain_level: SuperchainLevel, - /// Whether the chain is governed by optimism. - #[cfg_attr( - feature = "serde", - serde(rename = "GovernedByOptimism", alias = "governed_by_optimism") - )] - #[cfg_attr(feature = "serde", serde(default))] - pub governed_by_optimism: bool, - /// Time of when a given chain is opted in to the Superchain. - /// If set, hardforks times after the superchain time - /// will be inherited from the superchain-wide config. - #[cfg_attr(feature = "serde", serde(rename = "SuperchainTime", alias = "superchain_time"))] - pub superchain_time: Option<u64>, - /// Data availability type. - #[cfg_attr( - feature = "serde", - serde(rename = "DataAvailabilityType", alias = "data_availability_type") - )] - pub data_availability_type: String, - /// Chain ID - #[cfg_attr(feature = "serde", serde(rename = "l2_chain_id", alias = "chain_id"))] - pub chain_id: u64, - /// Chain-specific batch inbox address - #[cfg_attr( - feature = "serde", - serde(rename = "batch_inbox_address", alias = "batch_inbox_addr") - )] - #[cfg_attr(feature = "serde", serde(default))] - pub batch_inbox_addr: Address, - /// The block time in seconds. - #[cfg_attr(feature = "serde", serde(rename = "block_time"))] - pub block_time: u64, - /// The sequencer window size in seconds. - #[cfg_attr(feature = "serde", serde(rename = "seq_window_size"))] - pub seq_window_size: u64, - /// The maximum sequencer drift in seconds. - #[cfg_attr(feature = "serde", serde(rename = "max_sequencer_drift"))] - pub max_sequencer_drift: u64, - /// Gas paying token metadata. Not consumed by downstream OPStack components. - #[cfg_attr(feature = "serde", serde(rename = "GasPayingToken", alias = "gas_paying_token"))] - pub gas_paying_token: Option<Address>, - /// Hardfork Config. These values may override the superchain-wide defaults. - #[cfg_attr(feature = "serde", serde(rename = "hardfork_configuration", alias = "hardforks"))] - pub hardfork_config: HardForkConfig, - /// Optimism configuration - #[cfg_attr(feature = "serde", serde(rename = "optimism"))] - pub optimism: Option<BaseFeeConfig>, - /// Alternative DA configuration - #[cfg_attr(feature = "serde", serde(rename = "alt_da"))] - pub alt_da: Option<AltDAConfig>, - /// Chain-specific genesis information - pub genesis: ChainGenesis, - /// Roles - #[cfg_attr(feature = "serde", serde(rename = "Roles", alias = "roles"))] - pub roles: Option<Roles>, - /// Addresses - #[cfg_attr(feature = "serde", serde(rename = "Addresses", alias = "addresses"))] - pub addresses: Option<AddressList>, -} - -impl ChainConfig { - /// Returns the base fee params for the chain. - pub fn base_fee_params(&self) -> BaseFeeParams { - self.optimism - .as_ref() - .map(|op| op.pre_canyon_params()) - .unwrap_or_else(|| base_fee_params(self.chain_id)) - } - - /// Returns the canyon base fee params for the chain. - pub fn canyon_base_fee_params(&self) -> BaseFeeParams { - self.optimism - .as_ref() - .map(|op| op.post_canyon_params()) - .unwrap_or_else(|| base_fee_params_canyon(self.chain_id)) - } - - /// Returns the base fee config for the chain. - pub fn base_fee_config(&self) -> BaseFeeConfig { - self.optimism.as_ref().map(|op| *op).unwrap_or_else(|| base_fee_config(self.chain_id)) - } - - /// Loads the rollup config for the OP-Stack chain given the chain config and address list. - #[deprecated(since = "0.2.1", note = "please use `as_rollup_config` instead")] - pub fn load_op_stack_rollup_config(&self) -> RollupConfig { - self.as_rollup_config() - } - - /// Loads the rollup config for the OP-Stack chain given the chain config and address list. - pub fn as_rollup_config(&self) -> RollupConfig { - RollupConfig { - genesis: self.genesis, - l1_chain_id: self.l1_chain_id, - l2_chain_id: Chain::from(self.chain_id), - block_time: self.block_time, - seq_window_size: self.seq_window_size, - max_sequencer_drift: self.max_sequencer_drift, - hardforks: self.hardfork_config, - batch_inbox_address: self.batch_inbox_addr, - deposit_contract_address: self - .addresses - .as_ref() - .and_then(|a| a.optimism_portal_proxy) - .unwrap_or_default(), - l1_system_config_address: self - .addresses - .as_ref() - .and_then(|a| a.system_config_proxy) - .unwrap_or_default(), - protocol_versions_address: self - .addresses - .as_ref() - .and_then(|a| a.address_manager) - .unwrap_or_default(), - superchain_config_address: None, - blobs_enabled_l1_timestamp: None, - da_challenge_address: self - .alt_da - .as_ref() - .and_then(|alt_da| alt_da.da_challenge_address), - - // The below chain parameters can be different per OP-Stack chain, - // but since none of the superchain chains differ, it's not represented in the - // superchain-registry yet. This restriction on superchain-chains may change in the - // future. Test/Alt configurations can still load custom rollup-configs when - // necessary. - channel_timeout: 300, - granite_channel_timeout: GRANITE_CHANNEL_TIMEOUT, - interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, - chain_op_config: self.base_fee_config(), - alt_da_config: self.alt_da.clone(), - } - } -} - -#[cfg(test)] -#[cfg(feature = "serde")] -mod tests { - use super::*; - - #[test] - fn test_chain_config_json() { - let raw: &str = r#" - { - "Name": "Base", - "PublicRPC": "https://mainnet.base.org", - "SequencerRPC": "https://mainnet-sequencer.base.org", - "Explorer": "https://explorer.base.org", - "SuperchainLevel": 1, - "GovernedByOptimism": false, - "SuperchainTime": 0, - "DataAvailabilityType": "eth-da", - "l2_chain_id": 8453, - "batch_inbox_address": "0xff00000000000000000000000000000000008453", - "block_time": 2, - "seq_window_size": 3600, - "max_sequencer_drift": 600, - "GasPayingToken": null, - "hardfork_configuration": { - "canyon_time": 1704992401, - "delta_time": 1708560000, - "ecotone_time": 1710374401, - "fjord_time": 1720627201, - "granite_time": 1726070401, - "holocene_time": 1736445601 - }, - "optimism": { - "eip1559Elasticity": 6, - "eip1559Denominator": 50, - "eip1559DenominatorCanyon": 250 - }, - "alt_da": null, - "genesis": { - "l1": { - "number": 17481768, - "hash": "0x5c13d307623a926cd31415036c8b7fa14572f9dac64528e857a470511fc30771" - }, - "l2": { - "number": 0, - "hash": "0xf712aa9241cc24369b143cf6dce85f0902a9731e70d66818a3a5845b296c73dd" - }, - "l2_time": 1686789347, - "system_config": { - "batcherAddress": "0x5050f69a9786f081509234f1a7f4684b5e5b76c9", - "overhead": "0xbc", - "scalar": "0xa6fe0", - "gasLimit": 30000000 - } - }, - "Roles": { - "SystemConfigOwner": "0x14536667cd30e52c0b458baaccb9fada7046e056", - "ProxyAdminOwner": "0x7bb41c3008b3f03fe483b28b8db90e19cf07595c", - "Guardian": "0x09f7150d8c019bef34450d6920f6b3608cefdaf2", - "Challenger": "0x6f8c5ba3f59ea3e76300e3becdc231d656017824", - "Proposer": "0x642229f238fb9de03374be34b0ed8d9de80752c5", - "UnsafeBlockSigner": "0xaf6e19be0f9ce7f8afd49a1824851023a8249e8a", - "BatchSubmitter": "0x5050f69a9786f081509234f1a7f4684b5e5b76c9" - }, - "Addresses": { - "AddressManager": "0x8efb6b5c4767b09dc9aa6af4eaa89f749522bae2", - "L1CrossDomainMessengerProxy": "0x866e82a600a1414e583f7f13623f1ac5d58b0afa", - "L1Erc721BridgeProxy": "0x608d94945a64503e642e6370ec598e519a2c1e53", - "L1StandardBridgeProxy": "0x3154cf16ccdb4c6d922629664174b904d80f2c35", - "L2OutputOracleProxy": "0x56315b90c40730925ec5485cf004d835058518a0", - "OptimismMintableErc20FactoryProxy": "0x05cc379ebd9b30bba19c6fa282ab29218ec61d84", - "OptimismPortalProxy": "0x49048044d57e1c92a77f79988d21fa8faf74e97e", - "SystemConfigProxy": "0x73a79fab69143498ed3712e519a88a918e1f4072", - "ProxyAdmin": "0x0475cbcaebd9ce8afa5025828d5b98dfb67e059e", - "AnchorStateRegistryProxy": "0xdb9091e48b1c42992a1213e6916184f9ebdbfedf", - "DelayedWethProxy": "0xa2f2ac6f5af72e494a227d79db20473cf7a1ffe8", - "DisputeGameFactoryProxy": "0x43edb88c4b80fdd2adff2412a7bebf9df42cb40e", - "FaultDisputeGame": "0xcd3c0194db74c23807d4b90a5181e1b28cf7007c", - "Mips": "0x16e83ce5ce29bf90ad9da06d2fe6a15d5f344ce4", - "PermissionedDisputeGame": "0x19009debf8954b610f207d5925eede827805986e", - "PreimageOracle": "0x9c065e11870b891d214bc2da7ef1f9ddfa1be277" - } - } - "#; - - let deserialized: ChainConfig = serde_json::from_str(raw).unwrap(); - assert_eq!(deserialized.name, "Base"); - } - - #[test] - fn test_chain_config_unknown_field_json() { - let raw: &str = r#" - { - "Name": "Base", - "PublicRPC": "https://mainnet.base.org", - "SequencerRPC": "https://mainnet-sequencer.base.org", - "Explorer": "https://explorer.base.org", - "SuperchainLevel": 1, - "GovernedByOptimism": false, - "SuperchainTime": 0, - "DataAvailabilityType": "eth-da", - "l2_chain_id": 8453, - "batch_inbox_address": "0xff00000000000000000000000000000000008453", - "block_time": 2, - "seq_window_size": 3600, - "max_sequencer_drift": 600, - "GasPayingToken": null, - "hardfork_configuration": { - "canyon_time": 1704992401, - "delta_time": 1708560000, - "ecotone_time": 1710374401, - "fjord_time": 1720627201, - "granite_time": 1726070401, - "holocene_time": 1736445601 - }, - "optimism": { - "eip1559Elasticity": "0x6", - "eip1559Denominator": "0x32", - "eip1559DenominatorCanyon": "0xfa" - }, - "alt_da": null, - "genesis": { - "l1": { - "number": 17481768, - "hash": "0x5c13d307623a926cd31415036c8b7fa14572f9dac64528e857a470511fc30771" - }, - "l2": { - "number": 0, - "hash": "0xf712aa9241cc24369b143cf6dce85f0902a9731e70d66818a3a5845b296c73dd" - }, - "l2_time": 1686789347, - "system_config": { - "batcherAddress": "0x5050f69a9786f081509234f1a7f4684b5e5b76c9", - "overhead": "0xbc", - "scalar": "0xa6fe0", - "gasLimit": 30000000 - } - }, - "Roles": { - "SystemConfigOwner": "0x14536667cd30e52c0b458baaccb9fada7046e056", - "ProxyAdminOwner": "0x7bb41c3008b3f03fe483b28b8db90e19cf07595c", - "Guardian": "0x09f7150d8c019bef34450d6920f6b3608cefdaf2", - "Challenger": "0x6f8c5ba3f59ea3e76300e3becdc231d656017824", - "Proposer": "0x642229f238fb9de03374be34b0ed8d9de80752c5", - "UnsafeBlockSigner": "0xaf6e19be0f9ce7f8afd49a1824851023a8249e8a", - "BatchSubmitter": "0x5050f69a9786f081509234f1a7f4684b5e5b76c9" - }, - "Addresses": { - "AddressManager": "0x8efb6b5c4767b09dc9aa6af4eaa89f749522bae2", - "L1CrossDomainMessengerProxy": "0x866e82a600a1414e583f7f13623f1ac5d58b0afa", - "L1Erc721BridgeProxy": "0x608d94945a64503e642e6370ec598e519a2c1e53", - "L1StandardBridgeProxy": "0x3154cf16ccdb4c6d922629664174b904d80f2c35", - "L2OutputOracleProxy": "0x56315b90c40730925ec5485cf004d835058518a0", - "OptimismMintableErc20FactoryProxy": "0x05cc379ebd9b30bba19c6fa282ab29218ec61d84", - "OptimismPortalProxy": "0x49048044d57e1c92a77f79988d21fa8faf74e97e", - "SystemConfigProxy": "0x73a79fab69143498ed3712e519a88a918e1f4072", - "ProxyAdmin": "0x0475cbcaebd9ce8afa5025828d5b98dfb67e059e", - "AnchorStateRegistryProxy": "0xdb9091e48b1c42992a1213e6916184f9ebdbfedf", - "DelayedWethProxy": "0xa2f2ac6f5af72e494a227d79db20473cf7a1ffe8", - "DisputeGameFactoryProxy": "0x43edb88c4b80fdd2adff2412a7bebf9df42cb40e", - "FaultDisputeGame": "0xcd3c0194db74c23807d4b90a5181e1b28cf7007c", - "Mips": "0x16e83ce5ce29bf90ad9da06d2fe6a15d5f344ce4", - "PermissionedDisputeGame": "0x19009debf8954b610f207d5925eede827805986e", - "PreimageOracle": "0x9c065e11870b891d214bc2da7ef1f9ddfa1be277" - }, - "unknown_field": "unknown" - } - "#; - - let err = serde_json::from_str::<ChainConfig>(raw).unwrap_err(); - assert_eq!(err.classify(), serde_json::error::Category::Data); - } -} diff --git a/kona/crates/protocol/genesis/src/lib.rs b/kona/crates/protocol/genesis/src/lib.rs deleted file mode 100644 index ad10131f195..00000000000 --- a/kona/crates/protocol/genesis/src/lib.rs +++ /dev/null @@ -1,66 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] - -extern crate alloc; - -mod params; -pub use params::{ - BASE_MAINNET_BASE_FEE_CONFIG, BASE_MAINNET_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON, - BASE_MAINNET_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR, - BASE_MAINNET_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER, BASE_SEPOLIA_BASE_FEE_CONFIG, - BASE_SEPOLIA_BASE_FEE_PARAMS, BASE_SEPOLIA_BASE_FEE_PARAMS_CANYON, - BASE_SEPOLIA_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON, - BASE_SEPOLIA_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR, - BASE_SEPOLIA_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER, BaseFeeConfig, OP_MAINNET_BASE_FEE_CONFIG, - OP_MAINNET_BASE_FEE_PARAMS, OP_MAINNET_BASE_FEE_PARAMS_CANYON, - OP_MAINNET_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON, - OP_MAINNET_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR, - OP_MAINNET_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER, OP_SEPOLIA_BASE_FEE_CONFIG, - OP_SEPOLIA_BASE_FEE_PARAMS, OP_SEPOLIA_BASE_FEE_PARAMS_CANYON, - OP_SEPOLIA_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON, - OP_SEPOLIA_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR, - OP_SEPOLIA_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER, base_fee_config, base_fee_params, - base_fee_params_canyon, -}; - -mod superchain; -pub use superchain::{ - Chain, ChainList, FaultProofs, Superchain, SuperchainConfig, SuperchainL1Info, SuperchainLevel, - SuperchainParent, Superchains, -}; - -mod updates; -pub use updates::{ - BatcherUpdate, DaFootprintGasScalarUpdate, Eip1559Update, GasConfigUpdate, GasLimitUpdate, - MinBaseFeeUpdate, OperatorFeeUpdate, UnsafeBlockSignerUpdate, -}; - -mod system; -pub use system::{ - BatcherUpdateError, CONFIG_UPDATE_EVENT_VERSION_0, CONFIG_UPDATE_TOPIC, - DaFootprintGasScalarUpdateError, EIP1559UpdateError, GasConfigUpdateError, GasLimitUpdateError, - LogProcessingError, MinBaseFeeUpdateError, OperatorFeeUpdateError, SystemConfig, - SystemConfigLog, SystemConfigUpdate, SystemConfigUpdateError, SystemConfigUpdateKind, - UnsafeBlockSignerUpdateError, -}; - -mod chain; -pub use chain::{ - AddressList, AltDAConfig, BASE_MAINNET_CHAIN_ID, BASE_SEPOLIA_CHAIN_ID, ChainConfig, - HardForkConfig, L1ChainConfig, OP_MAINNET_CHAIN_ID, OP_SEPOLIA_CHAIN_ID, Roles, -}; - -mod genesis; -pub use genesis::ChainGenesis; - -mod rollup; -pub use rollup::{ - DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, FJORD_MAX_SEQUENCER_DRIFT, GRANITE_CHANNEL_TIMEOUT, - MAX_RLP_BYTES_PER_CHANNEL_BEDROCK, MAX_RLP_BYTES_PER_CHANNEL_FJORD, RollupConfig, -}; diff --git a/kona/crates/protocol/genesis/src/rollup.rs b/kona/crates/protocol/genesis/src/rollup.rs deleted file mode 100644 index a6048d51e5d..00000000000 --- a/kona/crates/protocol/genesis/src/rollup.rs +++ /dev/null @@ -1,950 +0,0 @@ -//! Rollup Config Types - -use crate::{AltDAConfig, BaseFeeConfig, ChainGenesis, HardForkConfig, OP_MAINNET_BASE_FEE_CONFIG}; -use alloy_chains::Chain; -use alloy_hardforks::{EthereumHardfork, EthereumHardforks, ForkCondition}; -use alloy_op_hardforks::{OpHardfork, OpHardforks}; -use alloy_primitives::Address; - -/// The max rlp bytes per channel for the Bedrock hardfork. -pub const MAX_RLP_BYTES_PER_CHANNEL_BEDROCK: u64 = 10_000_000; - -/// The max rlp bytes per channel for the Fjord hardfork. -pub const MAX_RLP_BYTES_PER_CHANNEL_FJORD: u64 = 100_000_000; - -/// The max sequencer drift when the Fjord hardfork is active. -pub const FJORD_MAX_SEQUENCER_DRIFT: u64 = 1800; - -/// The channel timeout once the Granite hardfork is active. -pub const GRANITE_CHANNEL_TIMEOUT: u64 = 50; - -/// The default interop message expiry window. (1 hour, in seconds) -pub const DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW: u64 = 60 * 60; - -#[cfg(feature = "serde")] -const fn default_granite_channel_timeout() -> u64 { - GRANITE_CHANNEL_TIMEOUT -} - -#[cfg(feature = "serde")] -const fn default_interop_message_expiry_window() -> u64 { - DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW -} - -/// The Rollup configuration. -#[derive(Debug, Clone, Eq, PartialEq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "serde", serde(deny_unknown_fields))] -pub struct RollupConfig { - /// The genesis state of the rollup. - pub genesis: ChainGenesis, - /// The block time of the L2, in seconds. - pub block_time: u64, - /// Sequencer batches may not be more than MaxSequencerDrift seconds after - /// the L1 timestamp of the sequencing window end. - /// - /// Note: When L1 has many 1 second consecutive blocks, and L2 grows at fixed 2 seconds, - /// the L2 time may still grow beyond this difference. - /// - /// Note: After the Fjord hardfork, this value becomes a constant of `1800`. - pub max_sequencer_drift: u64, - /// The sequencer window size. - pub seq_window_size: u64, - /// Number of L1 blocks between when a channel can be opened and when it can be closed. - pub channel_timeout: u64, - /// The channel timeout after the Granite hardfork. - #[cfg_attr(feature = "serde", serde(default = "default_granite_channel_timeout"))] - pub granite_channel_timeout: u64, - /// The L1 chain ID - pub l1_chain_id: u64, - /// The L2 chain ID - pub l2_chain_id: Chain, - /// Hardfork timestamps. - #[cfg_attr(feature = "serde", serde(flatten))] - pub hardforks: HardForkConfig, - /// `batch_inbox_address` is the L1 address that batches are sent to. - pub batch_inbox_address: Address, - /// `deposit_contract_address` is the L1 address that deposits are sent to. - pub deposit_contract_address: Address, - /// `l1_system_config_address` is the L1 address that the system config is stored at. - pub l1_system_config_address: Address, - /// `protocol_versions_address` is the L1 address that the protocol versions are stored at. - pub protocol_versions_address: Address, - /// The superchain config address. - #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] - pub superchain_config_address: Option<Address>, - /// `blobs_enabled_l1_timestamp` is the timestamp to start reading blobs as a batch data - /// source. Optional. - #[cfg_attr( - feature = "serde", - serde(rename = "blobs_data", skip_serializing_if = "Option::is_none") - )] - pub blobs_enabled_l1_timestamp: Option<u64>, - /// `da_challenge_address` is the L1 address that the data availability challenge contract is - /// stored at. - #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] - pub da_challenge_address: Option<Address>, - /// `interop_message_expiry_window` is the maximum time (in seconds) that an initiating message - /// can be referenced on a remote chain before it expires. - #[cfg_attr(feature = "serde", serde(default = "default_interop_message_expiry_window"))] - pub interop_message_expiry_window: u64, - /// `alt_da_config` is the chain-specific DA config for the rollup. - #[cfg_attr(feature = "serde", serde(rename = "alt_da"))] - pub alt_da_config: Option<AltDAConfig>, - /// `chain_op_config` is the chain-specific EIP1559 config for the rollup. - #[cfg_attr(feature = "serde", serde(default = "BaseFeeConfig::optimism"))] - pub chain_op_config: BaseFeeConfig, -} - -#[cfg(feature = "arbitrary")] -impl<'a> arbitrary::Arbitrary<'a> for RollupConfig { - fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> { - use crate::{ - BASE_SEPOLIA_BASE_FEE_CONFIG, OP_MAINNET_BASE_FEE_CONFIG, OP_SEPOLIA_BASE_FEE_CONFIG, - }; - let chain_op_config = match u32::arbitrary(u)? % 3 { - 0 => OP_MAINNET_BASE_FEE_CONFIG, - 1 => OP_SEPOLIA_BASE_FEE_CONFIG, - _ => BASE_SEPOLIA_BASE_FEE_CONFIG, - }; - - Ok(Self { - genesis: ChainGenesis::arbitrary(u)?, - block_time: u.arbitrary()?, - max_sequencer_drift: u.arbitrary()?, - seq_window_size: u.arbitrary()?, - channel_timeout: u.arbitrary()?, - granite_channel_timeout: u.arbitrary()?, - l1_chain_id: u.arbitrary()?, - l2_chain_id: u.arbitrary()?, - hardforks: HardForkConfig::arbitrary(u)?, - batch_inbox_address: Address::arbitrary(u)?, - deposit_contract_address: Address::arbitrary(u)?, - l1_system_config_address: Address::arbitrary(u)?, - protocol_versions_address: Address::arbitrary(u)?, - superchain_config_address: Option::<Address>::arbitrary(u)?, - blobs_enabled_l1_timestamp: Option::<u64>::arbitrary(u)?, - da_challenge_address: Option::<Address>::arbitrary(u)?, - interop_message_expiry_window: u.arbitrary()?, - chain_op_config, - alt_da_config: Option::<AltDAConfig>::arbitrary(u)?, - }) - } -} - -// Need to manually implement Default because [`BaseFeeParams`] has no Default impl. -impl Default for RollupConfig { - fn default() -> Self { - Self { - genesis: ChainGenesis::default(), - block_time: 0, - max_sequencer_drift: 0, - seq_window_size: 0, - channel_timeout: 0, - granite_channel_timeout: GRANITE_CHANNEL_TIMEOUT, - l1_chain_id: 0, - l2_chain_id: Chain::from_id(0), - hardforks: HardForkConfig::default(), - batch_inbox_address: Address::ZERO, - deposit_contract_address: Address::ZERO, - l1_system_config_address: Address::ZERO, - protocol_versions_address: Address::ZERO, - superchain_config_address: None, - blobs_enabled_l1_timestamp: None, - da_challenge_address: None, - interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, - alt_da_config: None, - chain_op_config: OP_MAINNET_BASE_FEE_CONFIG, - } - } -} - -#[cfg(feature = "revm")] -impl RollupConfig { - /// Returns the active [`op_revm::OpSpecId`] for the executor. - /// - /// ## Takes - /// - `timestamp`: The timestamp of the executing block. - /// - /// ## Returns - /// The active [`op_revm::OpSpecId`] for the executor. - pub fn spec_id(&self, timestamp: u64) -> op_revm::OpSpecId { - if self.is_interop_active(timestamp) { - op_revm::OpSpecId::INTEROP - } else if self.is_jovian_active(timestamp) { - op_revm::OpSpecId::JOVIAN - } else if self.is_isthmus_active(timestamp) { - op_revm::OpSpecId::ISTHMUS - } else if self.is_holocene_active(timestamp) { - op_revm::OpSpecId::HOLOCENE - } else if self.is_fjord_active(timestamp) { - op_revm::OpSpecId::FJORD - } else if self.is_ecotone_active(timestamp) { - op_revm::OpSpecId::ECOTONE - } else if self.is_canyon_active(timestamp) { - op_revm::OpSpecId::CANYON - } else if self.is_regolith_active(timestamp) { - op_revm::OpSpecId::REGOLITH - } else { - op_revm::OpSpecId::BEDROCK - } - } -} - -impl RollupConfig { - /// Returns true if Regolith is active at the given timestamp. - pub fn is_regolith_active(&self, timestamp: u64) -> bool { - self.hardforks.regolith_time.is_some_and(|t| timestamp >= t) || - self.is_canyon_active(timestamp) - } - - /// Returns true if the timestamp marks the first Regolith block. - pub fn is_first_regolith_block(&self, timestamp: u64) -> bool { - self.is_regolith_active(timestamp) && - !self.is_regolith_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if Canyon is active at the given timestamp. - pub fn is_canyon_active(&self, timestamp: u64) -> bool { - self.hardforks.canyon_time.is_some_and(|t| timestamp >= t) || - self.is_delta_active(timestamp) - } - - /// Returns true if the timestamp marks the first Canyon block. - pub fn is_first_canyon_block(&self, timestamp: u64) -> bool { - self.is_canyon_active(timestamp) && - !self.is_canyon_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if Delta is active at the given timestamp. - pub fn is_delta_active(&self, timestamp: u64) -> bool { - self.hardforks.delta_time.is_some_and(|t| timestamp >= t) || - self.is_ecotone_active(timestamp) - } - - /// Returns true if the timestamp marks the first Delta block. - pub fn is_first_delta_block(&self, timestamp: u64) -> bool { - self.is_delta_active(timestamp) && - !self.is_delta_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if Ecotone is active at the given timestamp. - pub fn is_ecotone_active(&self, timestamp: u64) -> bool { - self.hardforks.ecotone_time.is_some_and(|t| timestamp >= t) || - self.is_fjord_active(timestamp) - } - - /// Returns true if the timestamp marks the first Ecotone block. - pub fn is_first_ecotone_block(&self, timestamp: u64) -> bool { - self.is_ecotone_active(timestamp) && - !self.is_ecotone_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if Fjord is active at the given timestamp. - pub fn is_fjord_active(&self, timestamp: u64) -> bool { - self.hardforks.fjord_time.is_some_and(|t| timestamp >= t) || - self.is_granite_active(timestamp) - } - - /// Returns true if the timestamp marks the first Fjord block. - pub fn is_first_fjord_block(&self, timestamp: u64) -> bool { - self.is_fjord_active(timestamp) && - !self.is_fjord_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if Granite is active at the given timestamp. - pub fn is_granite_active(&self, timestamp: u64) -> bool { - self.hardforks.granite_time.is_some_and(|t| timestamp >= t) || - self.is_holocene_active(timestamp) - } - - /// Returns true if the timestamp marks the first Granite block. - pub fn is_first_granite_block(&self, timestamp: u64) -> bool { - self.is_granite_active(timestamp) && - !self.is_granite_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if Holocene is active at the given timestamp. - pub fn is_holocene_active(&self, timestamp: u64) -> bool { - self.hardforks.holocene_time.is_some_and(|t| timestamp >= t) || - self.is_isthmus_active(timestamp) - } - - /// Returns true if the timestamp marks the first Holocene block. - pub fn is_first_holocene_block(&self, timestamp: u64) -> bool { - self.is_holocene_active(timestamp) && - !self.is_holocene_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if the pectra blob schedule is active at the given timestamp. - pub fn is_pectra_blob_schedule_active(&self, timestamp: u64) -> bool { - self.hardforks.pectra_blob_schedule_time.is_some_and(|t| timestamp >= t) - } - - /// Returns true if the timestamp marks the first pectra blob schedule block. - pub fn is_first_pectra_blob_schedule_block(&self, timestamp: u64) -> bool { - self.is_pectra_blob_schedule_active(timestamp) && - !self.is_pectra_blob_schedule_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if Isthmus is active at the given timestamp. - pub fn is_isthmus_active(&self, timestamp: u64) -> bool { - self.hardforks.isthmus_time.is_some_and(|t| timestamp >= t) || - self.is_jovian_active(timestamp) - } - - /// Returns true if the timestamp marks the first Isthmus block. - pub fn is_first_isthmus_block(&self, timestamp: u64) -> bool { - self.is_isthmus_active(timestamp) && - !self.is_isthmus_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if Jovian is active at the given timestamp. - pub fn is_jovian_active(&self, timestamp: u64) -> bool { - self.hardforks.jovian_time.is_some_and(|t| timestamp >= t) || - self.is_interop_active(timestamp) - } - - /// Returns true if the timestamp marks the first Jovian block. - pub fn is_first_jovian_block(&self, timestamp: u64) -> bool { - self.is_jovian_active(timestamp) && - !self.is_jovian_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if Interop is active at the given timestamp. - pub fn is_interop_active(&self, timestamp: u64) -> bool { - self.hardforks.interop_time.is_some_and(|t| timestamp >= t) - } - - /// Returns true if the timestamp marks the first Interop block. - pub fn is_first_interop_block(&self, timestamp: u64) -> bool { - self.is_interop_active(timestamp) && - !self.is_interop_active(timestamp.saturating_sub(self.block_time)) - } - - /// Returns true if a DA Challenge proxy Address is provided in the rollup config and the - /// address is not zero. - pub fn is_alt_da_enabled(&self) -> bool { - self.da_challenge_address.is_some_and(|addr| !addr.is_zero()) - } - - /// Returns the max sequencer drift for the given timestamp. - pub fn max_sequencer_drift(&self, timestamp: u64) -> u64 { - if self.is_fjord_active(timestamp) { - FJORD_MAX_SEQUENCER_DRIFT - } else { - self.max_sequencer_drift - } - } - - /// Returns the max rlp bytes per channel for the given timestamp. - pub fn max_rlp_bytes_per_channel(&self, timestamp: u64) -> u64 { - if self.is_fjord_active(timestamp) { - MAX_RLP_BYTES_PER_CHANNEL_FJORD - } else { - MAX_RLP_BYTES_PER_CHANNEL_BEDROCK - } - } - - /// Returns the channel timeout for the given timestamp. - pub fn channel_timeout(&self, timestamp: u64) -> u64 { - if self.is_granite_active(timestamp) { - self.granite_channel_timeout - } else { - self.channel_timeout - } - } - - /// Returns the [HardForkConfig] using [RollupConfig] timestamps. - #[deprecated(since = "0.1.0", note = "Use the `hardforks` field instead.")] - pub const fn hardfork_config(&self) -> HardForkConfig { - self.hardforks - } - - /// Computes a block number from a timestamp, relative to the L2 genesis time and the block - /// time. - /// - /// This function assumes that the timestamp is aligned with the block time, and uses floor - /// division in its computation. - pub const fn block_number_from_timestamp(&self, timestamp: u64) -> u64 { - timestamp.saturating_sub(self.genesis.l2_time).saturating_div(self.block_time) - } - - /// Checks the scalar value in Ecotone. - pub fn check_ecotone_l1_system_config_scalar(scalar: [u8; 32]) -> Result<(), &'static str> { - let version_byte = scalar[0]; - match version_byte { - 0 => { - if scalar[1..28] != [0; 27] { - return Err("Bedrock scalar padding not empty"); - } - Ok(()) - } - 1 => { - if scalar[1..24] != [0; 23] { - return Err("Invalid version 1 scalar padding"); - } - Ok(()) - } - _ => { - // ignore the event if it's an unknown scalar format - Err("Unrecognized scalar version") - } - } - } -} - -impl EthereumHardforks for RollupConfig { - fn ethereum_fork_activation(&self, fork: EthereumHardfork) -> ForkCondition { - if fork <= EthereumHardfork::Berlin { - // We assume that OP chains were launched with all forks before Berlin activated. - ForkCondition::Block(0) - } else if fork <= EthereumHardfork::Paris { - // Bedrock activates all hardforks up to Paris. - self.op_fork_activation(OpHardfork::Bedrock) - } else if fork <= EthereumHardfork::Shanghai { - // Canyon activates Shanghai hardfork. - self.op_fork_activation(OpHardfork::Canyon) - } else if fork <= EthereumHardfork::Cancun { - // Ecotone activates Cancun hardfork. - self.op_fork_activation(OpHardfork::Ecotone) - } else if fork <= EthereumHardfork::Prague { - // Isthmus activates Prague hardfork. - self.op_fork_activation(OpHardfork::Isthmus) - } else { - ForkCondition::Never - } - } -} - -impl OpHardforks for RollupConfig { - fn op_fork_activation(&self, fork: OpHardfork) -> ForkCondition { - match fork { - OpHardfork::Bedrock => ForkCondition::Block(0), - OpHardfork::Regolith => self - .hardforks - .regolith_time - .map(ForkCondition::Timestamp) - .unwrap_or(self.op_fork_activation(OpHardfork::Canyon)), - OpHardfork::Canyon => self - .hardforks - .canyon_time - .map(ForkCondition::Timestamp) - .unwrap_or(self.op_fork_activation(OpHardfork::Ecotone)), - OpHardfork::Ecotone => self - .hardforks - .ecotone_time - .map(ForkCondition::Timestamp) - .unwrap_or(self.op_fork_activation(OpHardfork::Fjord)), - OpHardfork::Fjord => self - .hardforks - .fjord_time - .map(ForkCondition::Timestamp) - .unwrap_or(self.op_fork_activation(OpHardfork::Granite)), - OpHardfork::Granite => self - .hardforks - .granite_time - .map(ForkCondition::Timestamp) - .unwrap_or(self.op_fork_activation(OpHardfork::Holocene)), - OpHardfork::Holocene => self - .hardforks - .holocene_time - .map(ForkCondition::Timestamp) - .unwrap_or(self.op_fork_activation(OpHardfork::Isthmus)), - OpHardfork::Isthmus => self - .hardforks - .isthmus_time - .map(ForkCondition::Timestamp) - .unwrap_or(self.op_fork_activation(OpHardfork::Jovian)), - OpHardfork::Jovian => self - .hardforks - .jovian_time - .map(ForkCondition::Timestamp) - .unwrap_or(ForkCondition::Never), - OpHardfork::Interop => self - .hardforks - .interop_time - .map(ForkCondition::Timestamp) - .unwrap_or(ForkCondition::Never), - _ => ForkCondition::Never, - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - #[cfg(feature = "serde")] - use alloy_eips::BlockNumHash; - use alloy_primitives::address; - #[cfg(feature = "serde")] - use alloy_primitives::{U256, b256}; - - #[test] - #[cfg(feature = "arbitrary")] - fn test_arbitrary_rollup_config() { - use arbitrary::Arbitrary; - use rand::Rng; - let mut bytes = [0u8; 1024]; - rand::rng().fill(bytes.as_mut_slice()); - RollupConfig::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(); - } - - #[test] - #[cfg(feature = "revm")] - fn test_revm_spec_id() { - // By default, the spec ID should be BEDROCK. - let mut config = RollupConfig { - hardforks: HardForkConfig { regolith_time: Some(10), ..Default::default() }, - ..Default::default() - }; - assert_eq!(config.spec_id(0), op_revm::OpSpecId::BEDROCK); - assert_eq!(config.spec_id(10), op_revm::OpSpecId::REGOLITH); - config.hardforks.canyon_time = Some(20); - assert_eq!(config.spec_id(20), op_revm::OpSpecId::CANYON); - config.hardforks.ecotone_time = Some(30); - assert_eq!(config.spec_id(30), op_revm::OpSpecId::ECOTONE); - config.hardforks.fjord_time = Some(40); - assert_eq!(config.spec_id(40), op_revm::OpSpecId::FJORD); - config.hardforks.holocene_time = Some(50); - assert_eq!(config.spec_id(50), op_revm::OpSpecId::HOLOCENE); - config.hardforks.isthmus_time = Some(60); - assert_eq!(config.spec_id(60), op_revm::OpSpecId::ISTHMUS); - } - - #[test] - fn test_regolith_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_regolith_active(0)); - config.hardforks.regolith_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(!config.is_regolith_active(9)); - } - - #[test] - fn test_canyon_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_canyon_active(0)); - config.hardforks.canyon_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(config.is_canyon_active(10)); - assert!(!config.is_canyon_active(9)); - } - - #[test] - fn test_delta_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_delta_active(0)); - config.hardforks.delta_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(config.is_canyon_active(10)); - assert!(config.is_delta_active(10)); - assert!(!config.is_delta_active(9)); - } - - #[test] - fn test_ecotone_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_ecotone_active(0)); - config.hardforks.ecotone_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(config.is_canyon_active(10)); - assert!(config.is_delta_active(10)); - assert!(config.is_ecotone_active(10)); - assert!(!config.is_ecotone_active(9)); - } - - #[test] - fn test_fjord_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_fjord_active(0)); - config.hardforks.fjord_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(config.is_canyon_active(10)); - assert!(config.is_delta_active(10)); - assert!(config.is_ecotone_active(10)); - assert!(config.is_fjord_active(10)); - assert!(!config.is_fjord_active(9)); - } - - #[test] - fn test_granite_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_granite_active(0)); - config.hardforks.granite_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(config.is_canyon_active(10)); - assert!(config.is_delta_active(10)); - assert!(config.is_ecotone_active(10)); - assert!(config.is_fjord_active(10)); - assert!(config.is_granite_active(10)); - assert!(!config.is_granite_active(9)); - } - - #[test] - fn test_holocene_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_holocene_active(0)); - config.hardforks.holocene_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(config.is_canyon_active(10)); - assert!(config.is_delta_active(10)); - assert!(config.is_ecotone_active(10)); - assert!(config.is_fjord_active(10)); - assert!(config.is_granite_active(10)); - assert!(config.is_holocene_active(10)); - assert!(!config.is_holocene_active(9)); - } - - #[test] - fn test_pectra_blob_schedule_active() { - let mut config = RollupConfig::default(); - config.hardforks.pectra_blob_schedule_time = Some(10); - // Pectra blob schedule is a unique fork, not included in the hierarchical ordering. Its - // activation does not imply the activation of any other forks. - assert!(!config.is_regolith_active(10)); - assert!(!config.is_canyon_active(10)); - assert!(!config.is_delta_active(10)); - assert!(!config.is_ecotone_active(10)); - assert!(!config.is_fjord_active(10)); - assert!(!config.is_granite_active(10)); - assert!(!config.is_holocene_active(0)); - assert!(config.is_pectra_blob_schedule_active(10)); - assert!(!config.is_pectra_blob_schedule_active(9)); - } - - #[test] - fn test_isthmus_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_isthmus_active(0)); - config.hardforks.isthmus_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(config.is_canyon_active(10)); - assert!(config.is_delta_active(10)); - assert!(config.is_ecotone_active(10)); - assert!(config.is_fjord_active(10)); - assert!(config.is_granite_active(10)); - assert!(config.is_holocene_active(10)); - assert!(!config.is_pectra_blob_schedule_active(10)); - assert!(config.is_isthmus_active(10)); - assert!(!config.is_isthmus_active(9)); - } - - #[test] - fn test_jovian_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_interop_active(0)); - config.hardforks.jovian_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(config.is_canyon_active(10)); - assert!(config.is_delta_active(10)); - assert!(config.is_ecotone_active(10)); - assert!(config.is_fjord_active(10)); - assert!(config.is_granite_active(10)); - assert!(config.is_holocene_active(10)); - assert!(!config.is_pectra_blob_schedule_active(10)); - assert!(config.is_isthmus_active(10)); - assert!(config.is_jovian_active(10)); - assert!(!config.is_jovian_active(9)); - } - - #[test] - fn test_interop_active() { - let mut config = RollupConfig::default(); - assert!(!config.is_interop_active(0)); - config.hardforks.interop_time = Some(10); - assert!(config.is_regolith_active(10)); - assert!(config.is_canyon_active(10)); - assert!(config.is_delta_active(10)); - assert!(config.is_ecotone_active(10)); - assert!(config.is_fjord_active(10)); - assert!(config.is_granite_active(10)); - assert!(config.is_holocene_active(10)); - assert!(!config.is_pectra_blob_schedule_active(10)); - assert!(config.is_isthmus_active(10)); - assert!(config.is_interop_active(10)); - assert!(!config.is_interop_active(9)); - } - - #[test] - fn test_is_first_fork_block() { - let cfg = RollupConfig { - hardforks: HardForkConfig { - regolith_time: Some(10), - canyon_time: Some(20), - delta_time: Some(30), - ecotone_time: Some(40), - fjord_time: Some(50), - granite_time: Some(60), - holocene_time: Some(70), - pectra_blob_schedule_time: Some(80), - isthmus_time: Some(90), - jovian_time: Some(100), - interop_time: Some(110), - }, - block_time: 2, - ..Default::default() - }; - - // Regolith - assert!(!cfg.is_first_regolith_block(8)); - assert!(cfg.is_first_regolith_block(10)); - assert!(!cfg.is_first_regolith_block(12)); - - // Canyon - assert!(!cfg.is_first_canyon_block(18)); - assert!(cfg.is_first_canyon_block(20)); - assert!(!cfg.is_first_canyon_block(22)); - - // Delta - assert!(!cfg.is_first_delta_block(28)); - assert!(cfg.is_first_delta_block(30)); - assert!(!cfg.is_first_delta_block(32)); - - // Ecotone - assert!(!cfg.is_first_ecotone_block(38)); - assert!(cfg.is_first_ecotone_block(40)); - assert!(!cfg.is_first_ecotone_block(42)); - - // Fjord - assert!(!cfg.is_first_fjord_block(48)); - assert!(cfg.is_first_fjord_block(50)); - assert!(!cfg.is_first_fjord_block(52)); - - // Granite - assert!(!cfg.is_first_granite_block(58)); - assert!(cfg.is_first_granite_block(60)); - assert!(!cfg.is_first_granite_block(62)); - - // Holocene - assert!(!cfg.is_first_holocene_block(68)); - assert!(cfg.is_first_holocene_block(70)); - assert!(!cfg.is_first_holocene_block(72)); - - // Pectra blob schedule - assert!(!cfg.is_first_pectra_blob_schedule_block(78)); - assert!(cfg.is_first_pectra_blob_schedule_block(80)); - assert!(!cfg.is_first_pectra_blob_schedule_block(82)); - - // Isthmus - assert!(!cfg.is_first_isthmus_block(88)); - assert!(cfg.is_first_isthmus_block(90)); - assert!(!cfg.is_first_isthmus_block(92)); - - // Jovian - assert!(!cfg.is_first_jovian_block(98)); - assert!(cfg.is_first_jovian_block(100)); - assert!(!cfg.is_first_jovian_block(102)); - - // Interop - assert!(!cfg.is_first_interop_block(108)); - assert!(cfg.is_first_interop_block(110)); - assert!(!cfg.is_first_interop_block(112)); - } - - #[test] - fn test_alt_da_enabled() { - let mut config = RollupConfig::default(); - assert!(!config.is_alt_da_enabled()); - config.da_challenge_address = Some(Address::ZERO); - assert!(!config.is_alt_da_enabled()); - config.da_challenge_address = Some(address!("0000000000000000000000000000000000000001")); - assert!(config.is_alt_da_enabled()); - } - - #[test] - fn test_granite_channel_timeout() { - let mut config = RollupConfig { - channel_timeout: 100, - hardforks: HardForkConfig { granite_time: Some(10), ..Default::default() }, - ..Default::default() - }; - assert_eq!(config.channel_timeout(0), 100); - assert_eq!(config.channel_timeout(10), GRANITE_CHANNEL_TIMEOUT); - config.hardforks.granite_time = None; - assert_eq!(config.channel_timeout(10), 100); - } - - #[test] - fn test_max_sequencer_drift() { - let mut config = RollupConfig { max_sequencer_drift: 100, ..Default::default() }; - assert_eq!(config.max_sequencer_drift(0), 100); - config.hardforks.fjord_time = Some(10); - assert_eq!(config.max_sequencer_drift(0), 100); - assert_eq!(config.max_sequencer_drift(10), FJORD_MAX_SEQUENCER_DRIFT); - } - - #[test] - #[cfg(feature = "serde")] - fn test_deserialize_reference_rollup_config() { - use crate::{OP_MAINNET_BASE_FEE_CONFIG, SystemConfig}; - - let raw: &str = r#" - { - "genesis": { - "l1": { - "hash": "0x481724ee99b1f4cb71d826e2ec5a37265f460e9b112315665c977f4050b0af54", - "number": 10 - }, - "l2": { - "hash": "0x88aedfbf7dea6bfa2c4ff315784ad1a7f145d8f650969359c003bbed68c87631", - "number": 0 - }, - "l2_time": 1725557164, - "system_config": { - "batcherAddr": "0xc81f87a644b41e49b3221f41251f15c6cb00ce03", - "overhead": "0x0000000000000000000000000000000000000000000000000000000000000000", - "scalar": "0x00000000000000000000000000000000000000000000000000000000000f4240", - "gasLimit": 30000000, - "baseFeeScalar": 1234, - "blobBaseFeeScalar": 5678, - "eip1559Denominator": 10, - "eip1559Elasticity": 20, - "operatorFeeScalar": 30, - "operatorFeeConstant": 40, - "minBaseFee": 50, - "daFootprintGasScalar": 10 - } - }, - "block_time": 2, - "max_sequencer_drift": 600, - "seq_window_size": 3600, - "channel_timeout": 300, - "l1_chain_id": 3151908, - "l2_chain_id": 1337, - "regolith_time": 0, - "canyon_time": 0, - "delta_time": 0, - "ecotone_time": 0, - "fjord_time": 0, - "batch_inbox_address": "0xff00000000000000000000000000000000042069", - "deposit_contract_address": "0x08073dc48dde578137b8af042bcbc1c2491f1eb2", - "l1_system_config_address": "0x94ee52a9d8edd72a85dea7fae3ba6d75e4bf1710", - "protocol_versions_address": "0x0000000000000000000000000000000000000000", - "chain_op_config": { - "eip1559Elasticity": 6, - "eip1559Denominator": 50, - "eip1559DenominatorCanyon": 250 - }, - "alt_da": null - } - "#; - - let expected = RollupConfig { - genesis: ChainGenesis { - l1: BlockNumHash { - hash: b256!("481724ee99b1f4cb71d826e2ec5a37265f460e9b112315665c977f4050b0af54"), - number: 10, - }, - l2: BlockNumHash { - hash: b256!("88aedfbf7dea6bfa2c4ff315784ad1a7f145d8f650969359c003bbed68c87631"), - number: 0, - }, - l2_time: 1725557164, - system_config: Some(SystemConfig { - batcher_address: address!("c81f87a644b41e49b3221f41251f15c6cb00ce03"), - overhead: U256::ZERO, - scalar: U256::from(0xf4240), - gas_limit: 30_000_000, - base_fee_scalar: Some(1234), - blob_base_fee_scalar: Some(5678), - eip1559_denominator: Some(10), - eip1559_elasticity: Some(20), - operator_fee_scalar: Some(30), - operator_fee_constant: Some(40), - min_base_fee: Some(50), - da_footprint_gas_scalar: Some(10), - }), - }, - block_time: 2, - max_sequencer_drift: 600, - seq_window_size: 3600, - channel_timeout: 300, - granite_channel_timeout: GRANITE_CHANNEL_TIMEOUT, - l1_chain_id: 3151908, - l2_chain_id: Chain::from_id(1337), - hardforks: HardForkConfig { - regolith_time: Some(0), - canyon_time: Some(0), - delta_time: Some(0), - ecotone_time: Some(0), - fjord_time: Some(0), - ..Default::default() - }, - batch_inbox_address: address!("ff00000000000000000000000000000000042069"), - deposit_contract_address: address!("08073dc48dde578137b8af042bcbc1c2491f1eb2"), - l1_system_config_address: address!("94ee52a9d8edd72a85dea7fae3ba6d75e4bf1710"), - protocol_versions_address: Address::ZERO, - superchain_config_address: None, - blobs_enabled_l1_timestamp: None, - da_challenge_address: None, - interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, - chain_op_config: OP_MAINNET_BASE_FEE_CONFIG, - alt_da_config: None, - }; - - let deserialized: RollupConfig = serde_json::from_str(raw).unwrap(); - assert_eq!(deserialized, expected); - } - - #[test] - fn test_rollup_config_unknown_field() { - let raw: &str = r#" - { - "genesis": { - "l1": { - "hash": "0x481724ee99b1f4cb71d826e2ec5a37265f460e9b112315665c977f4050b0af54", - "number": 10 - }, - "l2": { - "hash": "0x88aedfbf7dea6bfa2c4ff315784ad1a7f145d8f650969359c003bbed68c87631", - "number": 0 - }, - "l2_time": 1725557164, - "system_config": { - "batcherAddr": "0xc81f87a644b41e49b3221f41251f15c6cb00ce03", - "overhead": "0x0000000000000000000000000000000000000000000000000000000000000000", - "scalar": "0x00000000000000000000000000000000000000000000000000000000000f4240", - "gasLimit": 30000000 - } - }, - "block_time": 2, - "max_sequencer_drift": 600, - "seq_window_size": 3600, - "channel_timeout": 300, - "l1_chain_id": 3151908, - "l2_chain_id": 1337, - "regolith_time": 0, - "canyon_time": 0, - "delta_time": 0, - "ecotone_time": 0, - "fjord_time": 0, - "batch_inbox_address": "0xff00000000000000000000000000000000042069", - "deposit_contract_address": "0x08073dc48dde578137b8af042bcbc1c2491f1eb2", - "l1_system_config_address": "0x94ee52a9d8edd72a85dea7fae3ba6d75e4bf1710", - "protocol_versions_address": "0x0000000000000000000000000000000000000000", - "chain_op_config": { - "eip1559_elasticity": 100, - "eip1559_denominator": 100, - "eip1559_denominator_canyon": 100 - }, - "unknown_field": "unknown" - } - "#; - - let err = serde_json::from_str::<RollupConfig>(raw).unwrap_err(); - assert_eq!(err.classify(), serde_json::error::Category::Data); - } - - #[test] - fn test_compute_block_number_from_time() { - let cfg = RollupConfig { - genesis: ChainGenesis { l2_time: 10, ..Default::default() }, - block_time: 2, - ..Default::default() - }; - - assert_eq!(cfg.block_number_from_timestamp(20), 5); - assert_eq!(cfg.block_number_from_timestamp(30), 10); - } -} diff --git a/kona/crates/protocol/genesis/src/system/config.rs b/kona/crates/protocol/genesis/src/system/config.rs deleted file mode 100644 index bbf94cb9559..00000000000 --- a/kona/crates/protocol/genesis/src/system/config.rs +++ /dev/null @@ -1,584 +0,0 @@ -//! Contains the [`SystemConfig`] type. - -use crate::{ - CONFIG_UPDATE_TOPIC, RollupConfig, SystemConfigLog, SystemConfigUpdateError, - SystemConfigUpdateKind, -}; -use alloy_consensus::{Eip658Value, Receipt}; -use alloy_primitives::{Address, B64, Log, U256}; - -/// System configuration. -#[derive(Debug, Copy, Clone, Default, Hash, Eq, PartialEq)] -#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -#[cfg_attr(feature = "serde", derive(serde::Serialize))] -#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] -#[cfg_attr(feature = "serde", serde(deny_unknown_fields))] -pub struct SystemConfig { - /// Batcher address - #[cfg_attr(feature = "serde", serde(rename = "batcherAddr"))] - pub batcher_address: Address, - /// Fee overhead value - #[cfg_attr(feature = "serde", serde(serialize_with = "serialize_u256_full"))] - pub overhead: U256, - /// Fee scalar value - #[cfg_attr(feature = "serde", serde(serialize_with = "serialize_u256_full"))] - pub scalar: U256, - /// Gas limit value - pub gas_limit: u64, - /// Base fee scalar value - pub base_fee_scalar: Option<u64>, - /// Blob base fee scalar value - pub blob_base_fee_scalar: Option<u64>, - /// EIP-1559 denominator - pub eip1559_denominator: Option<u32>, - /// EIP-1559 elasticity - pub eip1559_elasticity: Option<u32>, - /// The operator fee scalar (isthmus hardfork) - pub operator_fee_scalar: Option<u32>, - /// The operator fee constant (isthmus hardfork) - pub operator_fee_constant: Option<u64>, - /// Min base fee (jovian hardfork) - /// Note: according to the [spec](https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/system-config.md#initialization), as long as the MinBaseFee is not - /// explicitly set, the default value (`0`) will be systematically applied. - pub min_base_fee: Option<u64>, - /// DA footprint gas scalar (Jovian hardfork) - /// Note: according to the [spec](https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/system-config.md#initialization), as long as the DAFootprintGasScalar is not - /// explicitly set, the default value (`400`) will be systematically applied. - pub da_footprint_gas_scalar: Option<u16>, -} - -/// Custom EIP-1559 parameter decoding is needed here for holocene encoding. -/// -/// This is used by the Optimism monorepo [here][here]. -/// -/// [here]: https://github.com/ethereum-optimism/optimism/blob/cf28bffc7d880292794f53bb76bfc4df7898307b/op-service/eth/types.go#L519 -#[cfg(feature = "serde")] -impl<'a> serde::Deserialize<'a> for SystemConfig { - fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> - where - D: serde::Deserializer<'a>, - { - use alloy_primitives::B256; - // An alias struct that is identical to `SystemConfig`. - // We use the alias to decode the eip1559 params as their u32 values. - #[derive(serde::Deserialize)] - #[serde(rename_all = "camelCase")] - #[serde(deny_unknown_fields)] - struct SystemConfigAlias { - #[serde(rename = "batcherAddress", alias = "batcherAddr")] - batcher_address: Address, - overhead: U256, - scalar: U256, - gas_limit: u64, - base_fee_scalar: Option<u64>, - blob_base_fee_scalar: Option<u64>, - eip1559_params: Option<B64>, - eip1559_denominator: Option<u32>, - eip1559_elasticity: Option<u32>, - operator_fee_params: Option<B256>, - operator_fee_scalar: Option<u32>, - operator_fee_constant: Option<u64>, - min_base_fee: Option<u64>, - da_footprint_gas_scalar: Option<u16>, - } - - let mut alias = SystemConfigAlias::deserialize(deserializer)?; - if let Some(params) = alias.eip1559_params { - alias.eip1559_denominator = - Some(u32::from_be_bytes(params.as_slice().get(0..4).unwrap().try_into().unwrap())); - alias.eip1559_elasticity = - Some(u32::from_be_bytes(params.as_slice().get(4..8).unwrap().try_into().unwrap())); - } - if let Some(params) = alias.operator_fee_params { - alias.operator_fee_scalar = Some(u32::from_be_bytes( - params.as_slice().get(20..24).unwrap().try_into().unwrap(), - )); - alias.operator_fee_constant = Some(u64::from_be_bytes( - params.as_slice().get(24..32).unwrap().try_into().unwrap(), - )); - } - - Ok(Self { - batcher_address: alias.batcher_address, - overhead: alias.overhead, - scalar: alias.scalar, - gas_limit: alias.gas_limit, - base_fee_scalar: alias.base_fee_scalar, - blob_base_fee_scalar: alias.blob_base_fee_scalar, - eip1559_denominator: alias.eip1559_denominator, - eip1559_elasticity: alias.eip1559_elasticity, - operator_fee_scalar: alias.operator_fee_scalar, - operator_fee_constant: alias.operator_fee_constant, - min_base_fee: alias.min_base_fee, - da_footprint_gas_scalar: alias.da_footprint_gas_scalar, - }) - } -} - -impl SystemConfig { - /// Filters all L1 receipts to find config updates and applies the config updates. - /// - /// Returns `true` if any config updates were applied, `false` otherwise. - pub fn update_with_receipts( - &mut self, - receipts: &[Receipt], - l1_system_config_address: Address, - ecotone_active: bool, - ) -> Result<bool, SystemConfigUpdateError> { - let mut updated = false; - for receipt in receipts { - if Eip658Value::Eip658(false) == receipt.status { - continue; - } - - receipt.logs.iter().try_for_each(|log| { - let topics = log.topics(); - if log.address == l1_system_config_address && - !topics.is_empty() && - topics[0] == CONFIG_UPDATE_TOPIC - { - // Safety: Error is bubbled up by the trailing `?` - self.process_config_update_log(log, ecotone_active)?; - updated = true; - } - Ok::<(), SystemConfigUpdateError>(()) - })?; - } - Ok(updated) - } - - /// Returns the eip1559 parameters from a [SystemConfig] encoded as a [B64]. - pub fn eip_1559_params( - &self, - rollup_config: &RollupConfig, - parent_timestamp: u64, - next_timestamp: u64, - ) -> Option<B64> { - let is_holocene = rollup_config.is_holocene_active(next_timestamp); - - // For the first holocene block, a zero'd out B64 is returned to signal the - // execution layer to use the canyon base fee parameters. Else, the system - // config's eip1559 parameters are encoded as a B64. - if is_holocene && !rollup_config.is_holocene_active(parent_timestamp) { - Some(B64::ZERO) - } else { - is_holocene.then_some(B64::from_slice( - &[ - self.eip1559_denominator.unwrap_or_default().to_be_bytes(), - self.eip1559_elasticity.unwrap_or_default().to_be_bytes(), - ] - .concat(), - )) - } - } - - /// Decodes an EVM log entry emitted by the system config contract and applies it as a - /// [SystemConfig] change. - /// - /// Parse log data for: - /// - /// ```text - /// event ConfigUpdate( - /// uint256 indexed version, - /// UpdateType indexed updateType, - /// bytes data - /// ); - /// ``` - fn process_config_update_log( - &mut self, - log: &Log, - ecotone_active: bool, - ) -> Result<SystemConfigUpdateKind, SystemConfigUpdateError> { - // Construct the system config log from the log. - let log = SystemConfigLog::new(log.clone(), ecotone_active); - - // Construct the update type from the log. - let update = log.build()?; - - // Apply the update to the system config. - update.apply(self); - - // Return the update type. - Ok(update.kind()) - } -} - -/// Compatibility helper function to serialize a [`U256`] as a [`B256`]. -/// -/// [`B256`]: alloy_primitives::B256 -#[cfg(feature = "serde")] -fn serialize_u256_full<S>(ts: &U256, ser: S) -> Result<S::Ok, S::Error> -where - S: serde::Serializer, -{ - use serde::Serialize; - - alloy_primitives::B256::from(ts.to_be_bytes::<32>()).serialize(ser) -} - -#[cfg(test)] -mod test { - use super::*; - use crate::{CONFIG_UPDATE_EVENT_VERSION_0, HardForkConfig}; - use alloc::vec; - use alloy_primitives::{B256, LogData, address, b256, hex}; - - #[test] - #[cfg(feature = "serde")] - fn test_system_config_da_footprint_gas_scalar() { - let raw = r#"{ - "batcherAddress": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985", - "overhead": "0x00000000000000000000000000000000000000000000000000000000000000bc", - "scalar": "0x00000000000000000000000000000000000000000000000000000000000a6fe0", - "gasLimit": 30000000, - "eip1559Params": "0x000000ab000000cd", - "daFootprintGasScalar": 10 - }"#; - let system_config: SystemConfig = serde_json::from_str(raw).unwrap(); - assert_eq!(system_config.da_footprint_gas_scalar, Some(10), "da_footprint_gas_scalar"); - } - - #[test] - #[cfg(feature = "serde")] - fn test_system_config_eip1559_params() { - let raw = r#"{ - "batcherAddress": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985", - "overhead": "0x00000000000000000000000000000000000000000000000000000000000000bc", - "scalar": "0x00000000000000000000000000000000000000000000000000000000000a6fe0", - "gasLimit": 30000000, - "eip1559Params": "0x000000ab000000cd" - }"#; - let system_config: SystemConfig = serde_json::from_str(raw).unwrap(); - assert_eq!(system_config.eip1559_denominator, Some(0xab_u32), "eip1559_denominator"); - assert_eq!(system_config.eip1559_elasticity, Some(0xcd_u32), "eip1559_elasticity"); - } - - #[test] - #[cfg(feature = "serde")] - fn test_system_config_serde() { - let raw = r#"{ - "batcherAddr": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985", - "overhead": "0x00000000000000000000000000000000000000000000000000000000000000bc", - "scalar": "0x00000000000000000000000000000000000000000000000000000000000a6fe0", - "gasLimit": 30000000 - }"#; - let expected = SystemConfig { - batcher_address: address!("6887246668a3b87F54DeB3b94Ba47a6f63F32985"), - overhead: U256::from(0xbc), - scalar: U256::from(0xa6fe0), - gas_limit: 30000000, - ..Default::default() - }; - - let deserialized: SystemConfig = serde_json::from_str(raw).unwrap(); - assert_eq!(deserialized, expected); - } - - #[test] - #[cfg(feature = "serde")] - fn test_system_config_unknown_field() { - let raw = r#"{ - "batcherAddr": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985", - "overhead": "0x00000000000000000000000000000000000000000000000000000000000000bc", - "scalar": "0x00000000000000000000000000000000000000000000000000000000000a6fe0", - "gasLimit": 30000000, - "unknown": 0 - }"#; - let err = serde_json::from_str::<SystemConfig>(raw).unwrap_err(); - assert_eq!(err.classify(), serde_json::error::Category::Data); - } - - #[test] - #[cfg(feature = "arbitrary")] - fn test_arbitrary_system_config() { - use arbitrary::Arbitrary; - use rand::Rng; - let mut bytes = [0u8; 1024]; - rand::rng().fill(bytes.as_mut_slice()); - SystemConfig::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(); - } - - #[test] - fn test_eip_1559_params_from_system_config_none() { - let rollup_config = RollupConfig::default(); - let sys_config = SystemConfig::default(); - assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), None); - } - - #[test] - fn test_eip_1559_params_from_system_config_some() { - let rollup_config = RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, - ..Default::default() - }; - let sys_config = SystemConfig { - eip1559_denominator: Some(1), - eip1559_elasticity: None, - ..Default::default() - }; - let expected = Some(B64::from_slice(&[1u32.to_be_bytes(), 0u32.to_be_bytes()].concat())); - assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), expected); - } - - #[test] - fn test_eip_1559_params_from_system_config() { - let rollup_config = RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, - ..Default::default() - }; - let sys_config = SystemConfig { - eip1559_denominator: Some(1), - eip1559_elasticity: Some(2), - ..Default::default() - }; - let expected = Some(B64::from_slice(&[1u32.to_be_bytes(), 2u32.to_be_bytes()].concat())); - assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), expected); - } - - #[test] - fn test_default_eip_1559_params_from_system_config() { - let rollup_config = RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, - ..Default::default() - }; - let sys_config = SystemConfig { - eip1559_denominator: None, - eip1559_elasticity: None, - ..Default::default() - }; - let expected = Some(B64::ZERO); - assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), expected); - } - - #[test] - fn test_default_eip_1559_params_from_system_config_pre_holocene() { - let rollup_config = RollupConfig::default(); - let sys_config = SystemConfig { - eip1559_denominator: Some(1), - eip1559_elasticity: Some(2), - ..Default::default() - }; - assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), None); - } - - #[test] - fn test_default_eip_1559_params_first_block_holocene() { - let rollup_config = RollupConfig { - hardforks: HardForkConfig { holocene_time: Some(2), ..Default::default() }, - ..Default::default() - }; - let sys_config = SystemConfig { - eip1559_denominator: Some(1), - eip1559_elasticity: Some(2), - ..Default::default() - }; - assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 2), Some(B64::ZERO)); - } - - #[test] - fn test_system_config_update_with_receipts_unchanged() { - let mut system_config = SystemConfig::default(); - let receipts = vec![]; - let l1_system_config_address = Address::ZERO; - let ecotone_active = false; - - let updated = system_config - .update_with_receipts(&receipts, l1_system_config_address, ecotone_active) - .unwrap(); - assert!(!updated); - - assert_eq!(system_config, SystemConfig::default()); - } - - #[test] - fn test_system_config_update_with_receipts_batcher_address() { - const UPDATE_TYPE: B256 = - b256!("0000000000000000000000000000000000000000000000000000000000000000"); - let mut system_config = SystemConfig::default(); - let l1_system_config_address = Address::ZERO; - let ecotone_active = false; - - let update_log = Log { - address: Address::ZERO, - data: LogData::new_unchecked( - vec![ - CONFIG_UPDATE_TOPIC, - CONFIG_UPDATE_EVENT_VERSION_0, - UPDATE_TYPE, - ], - hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000beef").into() - ) - }; - - let receipt = Receipt { - logs: vec![update_log], - status: Eip658Value::Eip658(true), - cumulative_gas_used: 0, - }; - - let updated = system_config - .update_with_receipts(&[receipt], l1_system_config_address, ecotone_active) - .unwrap(); - assert!(updated); - - assert_eq!( - system_config.batcher_address, - address!("000000000000000000000000000000000000bEEF"), - ); - } - - #[test] - fn test_system_config_update_batcher_log() { - const UPDATE_TYPE: B256 = - b256!("0000000000000000000000000000000000000000000000000000000000000000"); - - let mut system_config = SystemConfig::default(); - - let update_log = Log { - address: Address::ZERO, - data: LogData::new_unchecked( - vec![ - CONFIG_UPDATE_TOPIC, - CONFIG_UPDATE_EVENT_VERSION_0, - UPDATE_TYPE, - ], - hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000beef").into() - ) - }; - - // Update the batcher address. - system_config.process_config_update_log(&update_log, false).unwrap(); - - assert_eq!( - system_config.batcher_address, - address!("000000000000000000000000000000000000bEEF") - ); - } - - #[test] - fn test_system_config_update_gas_config_log() { - const UPDATE_TYPE: B256 = - b256!("0000000000000000000000000000000000000000000000000000000000000001"); - - let mut system_config = SystemConfig::default(); - - let update_log = Log { - address: Address::ZERO, - data: LogData::new_unchecked( - vec![ - CONFIG_UPDATE_TOPIC, - CONFIG_UPDATE_EVENT_VERSION_0, - UPDATE_TYPE, - ], - hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000babe000000000000000000000000000000000000000000000000000000000000beef").into() - ) - }; - - // Update the batcher address. - system_config.process_config_update_log(&update_log, false).unwrap(); - - assert_eq!(system_config.overhead, U256::from(0xbabe)); - assert_eq!(system_config.scalar, U256::from(0xbeef)); - } - - #[test] - fn test_system_config_update_gas_config_log_ecotone() { - const UPDATE_TYPE: B256 = - b256!("0000000000000000000000000000000000000000000000000000000000000001"); - - let mut system_config = SystemConfig::default(); - - let update_log = Log { - address: Address::ZERO, - data: LogData::new_unchecked( - vec![ - CONFIG_UPDATE_TOPIC, - CONFIG_UPDATE_EVENT_VERSION_0, - UPDATE_TYPE, - ], - hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000babe000000000000000000000000000000000000000000000000000000000000beef").into() - ) - }; - - // Update the gas limit. - system_config.process_config_update_log(&update_log, true).unwrap(); - - assert_eq!(system_config.overhead, U256::from(0)); - assert_eq!(system_config.scalar, U256::from(0xbeef)); - } - - #[test] - fn test_system_config_update_gas_limit_log() { - const UPDATE_TYPE: B256 = - b256!("0000000000000000000000000000000000000000000000000000000000000002"); - - let mut system_config = SystemConfig::default(); - - let update_log = Log { - address: Address::ZERO, - data: LogData::new_unchecked( - vec![ - CONFIG_UPDATE_TOPIC, - CONFIG_UPDATE_EVENT_VERSION_0, - UPDATE_TYPE, - ], - hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000beef").into() - ) - }; - - // Update the gas limit. - system_config.process_config_update_log(&update_log, false).unwrap(); - - assert_eq!(system_config.gas_limit, 0xbeef_u64); - } - - #[test] - fn test_system_config_update_eip1559_params_log() { - const UPDATE_TYPE: B256 = - b256!("0000000000000000000000000000000000000000000000000000000000000004"); - - let mut system_config = SystemConfig::default(); - let update_log = Log { - address: Address::ZERO, - data: LogData::new_unchecked( - vec![ - CONFIG_UPDATE_TOPIC, - CONFIG_UPDATE_EVENT_VERSION_0, - UPDATE_TYPE, - ], - hex!("000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000babe0000beef").into() - ) - }; - - // Update the EIP-1559 parameters. - system_config.process_config_update_log(&update_log, false).unwrap(); - - assert_eq!(system_config.eip1559_denominator, Some(0xbabe_u32)); - assert_eq!(system_config.eip1559_elasticity, Some(0xbeef_u32)); - } - - #[test] - fn test_system_config_update_operator_fee_log() { - const UPDATE_TYPE: B256 = - b256!("0000000000000000000000000000000000000000000000000000000000000005"); - - let mut system_config = SystemConfig::default(); - let update_log = Log { - address: Address::ZERO, - data: LogData::new_unchecked( - vec![ - CONFIG_UPDATE_TOPIC, - CONFIG_UPDATE_EVENT_VERSION_0, - UPDATE_TYPE, - ], - hex!("0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000babe000000000000beef").into() - ) - }; - - // Update the operator fee. - system_config.process_config_update_log(&update_log, false).unwrap(); - - assert_eq!(system_config.operator_fee_scalar, Some(0xbabe_u32)); - assert_eq!(system_config.operator_fee_constant, Some(0xbeef_u64)); - } -} diff --git a/kona/crates/protocol/genesis/src/system/errors.rs b/kona/crates/protocol/genesis/src/system/errors.rs deleted file mode 100644 index c7992d6be0d..00000000000 --- a/kona/crates/protocol/genesis/src/system/errors.rs +++ /dev/null @@ -1,257 +0,0 @@ -//! Contains error types for system config updates. - -use alloy_primitives::B256; -use derive_more::From; - -/// An error for processing the [crate::SystemConfig] update log. -#[derive(Debug, From, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum SystemConfigUpdateError { - /// An error occurred while processing the update log. - #[error("Log processing error: {0}")] - LogProcessing(LogProcessingError), - /// A batcher update error. - #[error("Batcher update error: {0}")] - Batcher(BatcherUpdateError), - /// A gas config update error. - #[error("Gas config update error: {0}")] - GasConfig(GasConfigUpdateError), - /// A gas limit update error. - #[error("Gas limit update error: {0}")] - GasLimit(GasLimitUpdateError), - /// An EIP-1559 parameter update error. - #[error("EIP-1559 parameter update error: {0}")] - Eip1559(EIP1559UpdateError), - /// An operator fee parameter update error. - #[error("Operator fee parameter update error: {0}")] - OperatorFee(OperatorFeeUpdateError), - /// An unsafe block signer update error. - #[error("Unsafe block signer update error: {0}")] - UnsafeBlockSigner(UnsafeBlockSignerUpdateError), - /// A min base fee parameter update error. - #[error("Min base fee parameter update error: {0}")] - MinBaseFee(MinBaseFeeUpdateError), - /// A da footprint gas scalar update error. - #[error("DA footprint gas scalar update error: {0}")] - DaFootprintGasScalar(DaFootprintGasScalarUpdateError), -} - -/// An error occurred while processing the update log. -#[derive(Debug, From, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum LogProcessingError { - /// Received an incorrect number of log topics. - #[error("Invalid config update log: invalid topic length: {0}")] - InvalidTopicLen(usize), - /// The log topic is invalid. - #[error("Invalid config update log: invalid topic")] - InvalidTopic, - /// The config update log version is unsupported. - #[error("Invalid config update log: unsupported version: {0}")] - UnsupportedVersion(B256), - /// Failed to decode the update type from the config update log. - #[error("Failed to decode config update log: update type")] - UpdateTypeDecodingError, - /// An invalid system config update type. - #[error("Invalid system config update type: {0}")] - InvalidSystemConfigUpdateType(u64), -} - -/// An error for updating the batcher address on the [crate::SystemConfig]. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum BatcherUpdateError { - /// Invalid data length. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLen(usize), - /// Failed to decode the data pointer argument from the batcher update log. - #[error("Failed to decode batcher update log: data pointer")] - PointerDecodingError, - /// The data pointer is invalid. - #[error("Invalid config update log: invalid data pointer: {0}")] - InvalidDataPointer(u64), - /// Failed to decode the data length argument from the batcher update log. - #[error("Failed to decode batcher update log: data length")] - LengthDecodingError, - /// The data length is invalid. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLength(u64), - /// Failed to decode the batcher address argument from the batcher update log. - #[error("Failed to decode batcher update log: batcher address")] - BatcherAddressDecodingError, -} - -/// An error for updating the unsafe block signer address on the [crate::SystemConfig]. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum UnsafeBlockSignerUpdateError { - /// Invalid data length. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLen(usize), - /// Failed to decode the data pointer argument from the update log. - #[error("Failed to decode unsafe block signer update log: data pointer")] - PointerDecodingError, - /// The data pointer is invalid. - #[error("Invalid config update log: invalid data pointer: {0}")] - InvalidDataPointer(u64), - /// Failed to decode the data length argument from the update log. - #[error("Failed to decode unsafe block signer update log: data length")] - LengthDecodingError, - /// The data length is invalid. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLength(u64), - /// Failed to decode the unsafe block signer address argument from the update log. - #[error("Failed to decode unsafe block signer update log: unsafe block signer address")] - UnsafeBlockSignerAddressDecodingError, -} - -/// An error for updating the gas config on the [crate::SystemConfig]. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum GasConfigUpdateError { - /// Invalid data length. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLen(usize), - /// Failed to decode the data pointer argument from the gas config update log. - #[error("Failed to decode gas config update log: data pointer")] - PointerDecodingError, - /// The data pointer is invalid. - #[error("Invalid config update log: invalid data pointer: {0}")] - InvalidDataPointer(u64), - /// Failed to decode the data length argument from the gas config update log. - #[error("Failed to decode gas config update log: data length")] - LengthDecodingError, - /// The data length is invalid. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLength(u64), - /// Failed to decode the overhead argument from the gas config update log. - #[error("Failed to decode gas config update log: overhead")] - OverheadDecodingError, - /// Failed to decode the scalar argument from the gas config update log. - #[error("Failed to decode gas config update log: scalar")] - ScalarDecodingError, -} - -/// An error for updating the min base fee on the [crate::SystemConfig]. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum MinBaseFeeUpdateError { - /// Invalid data length. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLen(usize), - /// Failed to decode the data pointer argument from the min base fee update log. - #[error("Failed to decode gas limit update log: data pointer")] - PointerDecodingError, - /// The data pointer is invalid. - #[error("Invalid config update log: invalid data pointer: {0}")] - InvalidDataPointer(u64), - /// Failed to decode the data length argument from the min base fee update log. - #[error("Failed to decode gas limit update log: data length")] - LengthDecodingError, - /// The data length is invalid. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLength(u64), - /// Failed to decode the min base fee argument from the min base fee update log. - #[error("Failed to decode min base fee update log: min base fee")] - MinBaseFeeDecodingError, -} - -/// An error for updating the da footprint gas scalar on the [crate::SystemConfig]. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum DaFootprintGasScalarUpdateError { - /// Invalid data length. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLen(usize), - /// Failed to decode the data pointer argument from the min base fee update log. - #[error("Failed to decode gas limit update log: data pointer")] - PointerDecodingError, - /// The data pointer is invalid. - #[error("Invalid config update log: invalid data pointer: {0}")] - InvalidDataPointer(u64), - /// Failed to decode the data length argument from the min base fee update log. - #[error("Failed to decode gas limit update log: data length")] - LengthDecodingError, - /// The data length is invalid. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLength(u64), - /// Failed to decode the da footprint gas scalar argument from the da footprint gas scalar - /// update log. - #[error("Failed to decode da footprint gas scalar update log: da footprint gas scalar")] - DaFootprintGasScalarDecodingError, -} - -/// An error for updating the gas limit on the [crate::SystemConfig]. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum GasLimitUpdateError { - /// Invalid data length. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLen(usize), - /// Failed to decode the data pointer argument from the gas limit update log. - #[error("Failed to decode gas limit update log: data pointer")] - PointerDecodingError, - /// The data pointer is invalid. - #[error("Invalid config update log: invalid data pointer: {0}")] - InvalidDataPointer(u64), - /// Failed to decode the data length argument from the gas limit update log. - #[error("Failed to decode gas limit update log: data length")] - LengthDecodingError, - /// The data length is invalid. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLength(u64), - /// Failed to decode the gas limit argument from the gas limit update log. - #[error("Failed to decode gas limit update log: gas limit")] - GasLimitDecodingError, -} - -/// An error for updating the EIP-1559 parameters on the [crate::SystemConfig]. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum EIP1559UpdateError { - /// Invalid data length. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLen(usize), - /// Failed to decode the data pointer argument from the eip 1559 update log. - #[error("Failed to decode eip1559 parameter update log: data pointer")] - PointerDecodingError, - /// The data pointer is invalid. - #[error("Invalid config update log: invalid data pointer: {0}")] - InvalidDataPointer(u64), - /// Failed to decode the data length argument from the eip 1559 update log. - #[error("Failed to decode eip1559 parameter update log: data length")] - LengthDecodingError, - /// The data length is invalid. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLength(u64), - /// Failed to decode the eip1559 params argument from the eip 1559 update log. - #[error("Failed to decode eip1559 parameter update log: eip1559 parameters")] - EIP1559DecodingError, -} - -/// An error for updating the operator fee parameters on the [crate::SystemConfig]. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum OperatorFeeUpdateError { - /// Invalid data length. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLen(usize), - /// Failed to decode the data pointer argument from the operator fee update log. - #[error("Failed to decode operator fee parameter update log: data pointer")] - PointerDecodingError, - /// The data pointer is invalid. - #[error("Invalid config update log: invalid data pointer: {0}")] - InvalidDataPointer(u64), - /// Failed to decode the data length argument from the operator fee update log. - #[error("Failed to decode operator fee parameter update log: data length")] - LengthDecodingError, - /// The data length is invalid. - #[error("Invalid config update log: invalid data length: {0}")] - InvalidDataLength(u64), - /// Failed to decode the scalar argument from the update log. - #[error("Failed to decode operator fee parameter update log: scalar")] - ScalarDecodingError, - /// Failed to decode the constant argument from the update log. - #[error("Failed to decode operator fee parameter update log: constant")] - ConstantDecodingError, -} diff --git a/kona/crates/protocol/genesis/src/updates/mod.rs b/kona/crates/protocol/genesis/src/updates/mod.rs deleted file mode 100644 index 1b2dbdbbc56..00000000000 --- a/kona/crates/protocol/genesis/src/updates/mod.rs +++ /dev/null @@ -1,27 +0,0 @@ -//! Contains all updates to the [crate::SystemConfig] type. - -mod common; - -mod batcher; -pub use batcher::BatcherUpdate; - -mod signer; -pub use signer::UnsafeBlockSignerUpdate; - -mod gas_config; -pub use gas_config::GasConfigUpdate; - -mod gas_limit; -pub use gas_limit::GasLimitUpdate; - -mod eip1559; -pub use eip1559::Eip1559Update; - -mod operator_fee; -pub use operator_fee::OperatorFeeUpdate; - -mod min_base_fee; -pub use min_base_fee::MinBaseFeeUpdate; - -mod da_footprint_gas_scalar; -pub use da_footprint_gas_scalar::DaFootprintGasScalarUpdate; diff --git a/kona/crates/protocol/hardforks/Cargo.toml b/kona/crates/protocol/hardforks/Cargo.toml deleted file mode 100644 index 4547c63b004..00000000000 --- a/kona/crates/protocol/hardforks/Cargo.toml +++ /dev/null @@ -1,42 +0,0 @@ -[package] -name = "kona-hardforks" -version = "0.4.5" -description = "Consensus hardfork types for the OP Stack" - -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -kona-protocol.workspace = true - -# Alloy -alloy-eips.workspace = true -alloy-primitives = { workspace = true, features = ["rlp"] } - -# OP Alloy -op-alloy-consensus.workspace = true - -[dev-dependencies] -alloy-primitives = { workspace = true, features = ["rand", "arbitrary"] } -revm.workspace = true -op-revm.workspace = true - -[features] -default = [] -std = [ - "alloy-eips/std", - "alloy-primitives/std", - "kona-protocol/std", - "op-alloy-consensus/std", -] -k256 = [ "alloy-primitives/k256", "op-alloy-consensus/k256" ] -kzg = [ "alloy-eips/kzg", "op-alloy-consensus/kzg", "std" ] diff --git a/kona/crates/protocol/hardforks/README.md b/kona/crates/protocol/hardforks/README.md deleted file mode 100644 index f66f0a1a116..00000000000 --- a/kona/crates/protocol/hardforks/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# `kona-hardforks` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-hardforks"><img src="https://img.shields.io/crates/v/kona-hardforks.svg" alt="kona-hardforks crate"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="MIT License"></a> -<a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> - -Consensus layer hardfork types for the OP Stack including network upgrade transactions. - -### Provenance - -This code was ported [op-alloy] as part of `kona` monorepo migrations. - -[op-alloy]: https://github.com/alloy-rs/op-alloy diff --git a/kona/crates/protocol/hardforks/src/ecotone.rs b/kona/crates/protocol/hardforks/src/ecotone.rs deleted file mode 100644 index e2b255ac66b..00000000000 --- a/kona/crates/protocol/hardforks/src/ecotone.rs +++ /dev/null @@ -1,307 +0,0 @@ -//! Module containing a [`TxDeposit`] builder for the Ecotone network upgrade transactions. - -use alloc::{string::String, vec::Vec}; -use alloy_eips::eip2718::Encodable2718; -use alloy_primitives::{Address, B256, Bytes, TxKind, U256, address, hex}; -use kona_protocol::Predeploys; -use op_alloy_consensus::{TxDeposit, UpgradeDepositSource}; - -use crate::Hardfork; - -/// The Ecotone network upgrade transactions. -#[derive(Debug, Default, Clone, Copy)] -pub struct Ecotone; - -impl Ecotone { - /// The Gas Price Oracle Address - /// This is computed by using go-ethereum's `crypto.CreateAddress` function, - /// with the Gas Price Oracle Deployer Address and nonce 0. - pub const GAS_PRICE_ORACLE: Address = address!("b528d11cc114e026f138fe568744c6d45ce6da7a"); - - /// The depositor account address. - pub const DEPOSITOR_ACCOUNT: Address = address!("DeaDDEaDDeAdDeAdDEAdDEaddeAddEAdDEAd0001"); - - /// The Enable Ecotone Input Method 4Byte Signature - pub const ENABLE_ECOTONE_INPUT: [u8; 4] = hex!("22b90ab3"); - - /// L1 Block Deployer Address - pub const L1_BLOCK_DEPLOYER: Address = address!("4210000000000000000000000000000000000000"); - - /// The Gas Price Oracle Deployer Address - pub const GAS_PRICE_ORACLE_DEPLOYER: Address = - address!("4210000000000000000000000000000000000001"); - - /// The new L1 Block Address - /// This is computed by using go-ethereum's `crypto.CreateAddress` function, - /// with the L1 Block Deployer Address and nonce 0. - pub const NEW_L1_BLOCK: Address = address!("07dbe8500fc591d1852b76fee44d5a05e13097ff"); - - /// EIP-4788 From Address - pub const EIP4788_FROM: Address = address!("0B799C86a49DEeb90402691F1041aa3AF2d3C875"); - - /// The L1 Block Deployer Code Hash - /// See: <https://specs.optimism.io/protocol/ecotone/derivation.html#l1block-deployment> - pub const L1_BLOCK_DEPLOYER_CODE_HASH: B256 = alloy_primitives::b256!( - "0xc88a313aa75dc4fbf0b6850d9f9ae41e04243b7008cf3eadb29256d4a71c1dfd" - ); - /// The Gas Price Oracle Code Hash - /// See: <https://specs.optimism.io/protocol/ecotone/derivation.html#gaspriceoracle-deployment> - pub const GAS_PRICE_ORACLE_CODE_HASH: B256 = alloy_primitives::b256!( - "0x8b71360ea773b4cfaf1ae6d2bd15464a4e1e2e360f786e475f63aeaed8da0ae5" - ); - - /// Returns the source hash for the deployment of the l1 block contract. - pub fn deploy_l1_block_source() -> B256 { - UpgradeDepositSource { intent: String::from("Ecotone: L1 Block Deployment") }.source_hash() - } - - /// Returns the source hash for the deployment of the gas price oracle contract. - pub fn deploy_gas_price_oracle_source() -> B256 { - UpgradeDepositSource { intent: String::from("Ecotone: Gas Price Oracle Deployment") } - .source_hash() - } - - /// Returns the source hash for the update of the l1 block proxy. - pub fn update_l1_block_source() -> B256 { - UpgradeDepositSource { intent: String::from("Ecotone: L1 Block Proxy Update") } - .source_hash() - } - - /// Returns the source hash for the update of the gas price oracle proxy. - pub fn update_gas_price_oracle_source() -> B256 { - UpgradeDepositSource { intent: String::from("Ecotone: Gas Price Oracle Proxy Update") } - .source_hash() - } - - /// Returns the source hash for the Ecotone Beacon Block Roots Contract deployment. - pub fn beacon_roots_source() -> B256 { - UpgradeDepositSource { - intent: String::from("Ecotone: beacon block roots contract deployment"), - } - .source_hash() - } - - /// Returns the source hash for the Ecotone Gas Price Oracle activation. - pub fn enable_ecotone_source() -> B256 { - UpgradeDepositSource { intent: String::from("Ecotone: Gas Price Oracle Set Ecotone") } - .source_hash() - } - - /// Returns the EIP-4788 creation data. - pub fn eip4788_creation_data() -> Bytes { - hex::decode(include_str!("./bytecode/eip4788_ecotone.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the raw bytecode for the L1 Block deployment. - pub fn l1_block_deployment_bytecode() -> Bytes { - hex::decode(include_str!("./bytecode/l1_block_ecotone.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the gas price oracle deployment bytecode. - pub fn ecotone_gas_price_oracle_deployment_bytecode() -> Bytes { - hex::decode(include_str!("./bytecode/gpo_ecotone.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the list of [`TxDeposit`]s for the Ecotone network upgrade. - pub fn deposits() -> impl Iterator<Item = TxDeposit> { - ([ - // Deploy the L1 Block contract for Ecotone. - // See: <https://specs.optimism.io/protocol/ecotone/derivation.html#l1block-deployment> - TxDeposit { - source_hash: Self::deploy_l1_block_source(), - from: Self::L1_BLOCK_DEPLOYER, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 375_000, - is_system_transaction: false, - input: Self::l1_block_deployment_bytecode(), - }, - // Deploy the Gas Price Oracle contract for Ecotone. - // See: <https://specs.optimism.io/protocol/ecotone/derivation.html#gaspriceoracle-deployment> - TxDeposit { - source_hash: Self::deploy_gas_price_oracle_source(), - from: Self::GAS_PRICE_ORACLE_DEPLOYER, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 1_000_000, - is_system_transaction: false, - input: Self::ecotone_gas_price_oracle_deployment_bytecode(), - }, - // Updates the l1 block proxy to point to the new L1 Block contract. - // See: <https://specs.optimism.io/protocol/ecotone/derivation.html#l1block-proxy-update> - TxDeposit { - source_hash: Self::update_l1_block_source(), - from: Address::ZERO, - to: TxKind::Call(Predeploys::L1_BLOCK_INFO), - mint: 0, - value: U256::ZERO, - gas_limit: 50_000, - is_system_transaction: false, - input: super::upgrade_to_calldata(Self::NEW_L1_BLOCK), - }, - // Updates the gas price oracle proxy to point to the new Gas Price Oracle contract. - // See: <https://specs.optimism.io/protocol/ecotone/derivation.html#gaspriceoracle-proxy-update> - TxDeposit { - source_hash: Self::update_gas_price_oracle_source(), - from: Address::ZERO, - to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), - mint: 0, - value: U256::ZERO, - gas_limit: 50_000, - is_system_transaction: false, - input: super::upgrade_to_calldata(Self::GAS_PRICE_ORACLE), - }, - // Enables the Ecotone Gas Price Oracle. - // See: <https://specs.optimism.io/protocol/ecotone/derivation.html#gaspriceoracle-enable-ecotone> - TxDeposit { - source_hash: Self::enable_ecotone_source(), - from: Self::DEPOSITOR_ACCOUNT, - to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), - mint: 0, - value: U256::ZERO, - gas_limit: 80_000, - is_system_transaction: false, - input: Self::ENABLE_ECOTONE_INPUT.into(), - }, - // Deploys the beacon block roots contract. - // See: <https://specs.optimism.io/protocol/ecotone/derivation.html#beacon-block-roots-contract-deployment-eip-4788> - TxDeposit { - source_hash: Self::beacon_roots_source(), - from: Self::EIP4788_FROM, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 250_000, - is_system_transaction: false, - input: Self::eip4788_creation_data(), - }, - ]) - .into_iter() - } -} - -impl Hardfork for Ecotone { - /// Constructs the Ecotone network upgrade transactions. - fn txs(&self) -> impl Iterator<Item = Bytes> + '_ { - Self::deposits().map(|tx| { - let mut encoded = Vec::new(); - tx.encode_2718(&mut encoded); - Bytes::from(encoded) - }) - } -} - -#[cfg(test)] -mod tests { - use crate::test_utils::check_deployment_code; - - use super::*; - use alloc::vec; - - #[test] - fn test_deploy_l1_block_source() { - assert_eq!( - Ecotone::deploy_l1_block_source(), - hex!("877a6077205782ea15a6dc8699fa5ebcec5e0f4389f09cb8eda09488231346f8") - ); - } - #[test] - fn test_verify_ecotone_l1_deployment_code_hash() { - let txs = Ecotone::deposits().collect::<Vec<_>>(); - - check_deployment_code( - txs[0].clone(), - Ecotone::NEW_L1_BLOCK, - Ecotone::L1_BLOCK_DEPLOYER_CODE_HASH, - ); - } - - #[test] - fn test_verify_ecotone_gas_price_oracle_deployment_code_hash() { - let txs = Ecotone::deposits().collect::<Vec<_>>(); - - check_deployment_code( - txs[1].clone(), - Ecotone::GAS_PRICE_ORACLE, - Ecotone::GAS_PRICE_ORACLE_CODE_HASH, - ); - } - - #[test] - fn test_deploy_gas_price_oracle_source() { - assert_eq!( - Ecotone::deploy_gas_price_oracle_source(), - hex!("a312b4510adf943510f05fcc8f15f86995a5066bd83ce11384688ae20e6ecf42") - ); - } - - #[test] - fn test_update_l1_block_source() { - assert_eq!( - Ecotone::update_l1_block_source(), - hex!("18acb38c5ff1c238a7460ebc1b421fa49ec4874bdf1e0a530d234104e5e67dbc") - ); - } - - #[test] - fn test_update_gas_price_oracle_source() { - assert_eq!( - Ecotone::update_gas_price_oracle_source(), - hex!("ee4f9385eceef498af0be7ec5862229f426dec41c8d42397c7257a5117d9230a") - ); - } - - #[test] - fn test_enable_ecotone_source() { - assert_eq!( - Ecotone::enable_ecotone_source(), - hex!("0c1cb38e99dbc9cbfab3bb80863380b0905290b37eb3d6ab18dc01c1f3e75f93") - ); - } - - #[test] - fn test_beacon_block_roots_source() { - assert_eq!( - Ecotone::beacon_roots_source(), - hex!("69b763c48478b9dc2f65ada09b3d92133ec592ea715ec65ad6e7f3dc519dc00c") - ); - } - - #[test] - fn test_ecotone_txs_encoded() { - let ecotone_upgrade_tx = Ecotone.txs().collect::<Vec<_>>(); - assert_eq!(ecotone_upgrade_tx.len(), 6); - - let expected_txs: Vec<Bytes> = vec![ - hex::decode(include_str!("./bytecode/ecotone_tx_0.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/ecotone_tx_1.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/ecotone_tx_2.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/ecotone_tx_3.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/ecotone_tx_4.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/ecotone_tx_5.hex").replace("\n", "")) - .unwrap() - .into(), - ]; - for (i, expected) in expected_txs.iter().enumerate() { - assert_eq!(ecotone_upgrade_tx[i], *expected); - } - } -} diff --git a/kona/crates/protocol/hardforks/src/interop.rs b/kona/crates/protocol/hardforks/src/interop.rs deleted file mode 100644 index 23ff5342150..00000000000 --- a/kona/crates/protocol/hardforks/src/interop.rs +++ /dev/null @@ -1,223 +0,0 @@ -//! Module containing a [`TxDeposit`] builder for the Interop network upgrade transactions. -//! -//! Interop network upgrade transactions are defined in the [OP Stack Specs][specs]. -//! -//! [specs]: https://specs.optimism.io/interop/derivation.html#network-upgrade-transactions - -use alloc::string::String; -use alloy_eips::Encodable2718; -use alloy_primitives::{Address, B256, Bytes, TxKind, U256, address, b256, hex}; -use kona_protocol::Predeploys; -use op_alloy_consensus::{TxDeposit, UpgradeDepositSource}; - -use crate::Hardfork; - -/// The Interop network upgrade transactions. -#[derive(Debug, Default, Clone, Copy)] -pub struct Interop; - -impl Interop { - /// The deployer of the `CrossL2Inbox` contract. - pub const CROSS_L2_INBOX_DEPLOYER: Address = - address!("0x4220000000000000000000000000000000000000"); - - /// The deployer of the `L2ToL2CrossDomainMessenger` contract. - pub const L2_TO_L2_XDM_DEPLOYER: Address = - address!("0x4220000000000000000000000000000000000001"); - - /// The deployed address of the `CrossL2Inbox` implementation contract. - pub const NEW_CROSS_L2_INBOX_IMPL: Address = - address!("0x691300f512e48B463C2617b34Eef1A9f82EE7dBf"); - - /// The code hash of the deployed `CrossL2Inbox` implementation contract. - pub const CROSS_L2_INBOX_IMPL_CODE_HASH: B256 = - b256!("0x0e7d028dd71bac22d1fb28966043c8d35c3232c78b7fb99fd1db112b5b60d9dd"); - - /// The deployment address of the `L2ToL2CrossDomainMessenger` implementation contract. - pub const NEW_L2_TO_L2_XDM_IMPL: Address = - address!("0x0D0eDd0ebd0e94d218670a8De867Eb5C4d37cadD"); - - /// The code hash of the deployed `L2ToL2CrossDomainMessenger` implementation contract. - pub const L2_TO_L2_XDM_IMPL_CODE_HASH: B256 = - b256!("0x458925c90ec70736600bef3d6529643a0e7a0a848e62626d61314c057b4a71a9"); - - /// Returns the source hash for the `CrossL2Inbox` contract deployment transaction. - pub fn deploy_cross_l2_inbox_source() -> B256 { - UpgradeDepositSource { intent: String::from("Interop: CrossL2Inbox Deployment") } - .source_hash() - } - - /// Returns the source hash for the `CrossL2Inbox` proxy upgrade transaction. - pub fn upgrade_cross_l2_inbox_proxy_source() -> B256 { - UpgradeDepositSource { intent: String::from("Interop: CrossL2Inbox Proxy Update") } - .source_hash() - } - - /// Returns the source hash for the `L2ToL2CrossDomainMessenger` deployment transaction. - pub fn deploy_l2_to_l2_xdm_source() -> B256 { - UpgradeDepositSource { - intent: String::from("Interop: L2ToL2CrossDomainMessenger Deployment"), - } - .source_hash() - } - - /// Returns the source hash for the `L2ToL2CrossDomainMessenger` proxy upgrade transaction. - pub fn upgrade_l2_to_l2_xdm_proxy_source() -> B256 { - UpgradeDepositSource { - intent: String::from("Interop: L2ToL2CrossDomainMessenger Proxy Update"), - } - .source_hash() - } - - /// Returns the `CrossL2Inbox` deployment bytecode. - pub fn cross_l2_inbox_deployment_bytecode() -> Bytes { - hex::decode(include_str!("./bytecode/crossl2inbox_interop.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the `L2ToL2CrossDomainMessenger` proxy upgrade bytecode. - pub fn l2_to_l2_xdm_deployment_bytecode() -> Bytes { - hex::decode(include_str!("./bytecode/l2tol2_xdm_interop.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the list of [`TxDeposit`]s for the network upgrade. - pub fn deposits() -> impl Iterator<Item = TxDeposit> { - ([ - TxDeposit { - source_hash: Self::deploy_cross_l2_inbox_source(), - from: Self::CROSS_L2_INBOX_DEPLOYER, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 420_000, - is_system_transaction: false, - input: Self::cross_l2_inbox_deployment_bytecode(), - }, - TxDeposit { - source_hash: Self::upgrade_cross_l2_inbox_proxy_source(), - from: Address::ZERO, - to: TxKind::Call(Predeploys::CROSS_L2_INBOX), - mint: 0, - value: U256::ZERO, - gas_limit: 50_000, - is_system_transaction: false, - input: super::upgrade_to_calldata(Self::NEW_CROSS_L2_INBOX_IMPL), - }, - TxDeposit { - source_hash: Self::deploy_l2_to_l2_xdm_source(), - from: Self::L2_TO_L2_XDM_DEPLOYER, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 1_100_000, - is_system_transaction: false, - input: Self::l2_to_l2_xdm_deployment_bytecode(), - }, - TxDeposit { - source_hash: Self::upgrade_l2_to_l2_xdm_proxy_source(), - from: Address::ZERO, - to: TxKind::Call(Predeploys::L2_TO_L2_XDM), - mint: 0, - value: U256::ZERO, - gas_limit: 50_000, - is_system_transaction: false, - input: super::upgrade_to_calldata(Self::NEW_L2_TO_L2_XDM_IMPL), - }, - ]) - .into_iter() - } -} - -impl Hardfork for Interop { - /// Constructs the network upgrade transactions. - fn txs(&self) -> impl Iterator<Item = Bytes> { - Self::deposits().map(|tx| tx.encoded_2718().into()) - } -} - -#[cfg(test)] -mod test { - use alloc::{vec, vec::Vec}; - - use super::*; - use crate::test_utils::check_deployment_code; - - #[test] - fn test_deploy_cross_l2_inbox_source() { - assert_eq!( - Interop::deploy_cross_l2_inbox_source(), - b256!("0x6e5e214f73143df8fe6f6054a3ed7eb472d373376458a9c8aecdf23475beb616") - ); - } - - #[test] - fn test_upgrade_cross_l2_inbox_proxy_source() { - assert_eq!( - Interop::upgrade_cross_l2_inbox_proxy_source(), - b256!("0x88c6b48354c367125a59792a93a7b60ad7cd66e516157dbba16558c68a46d3cb") - ); - } - - #[test] - fn test_deploy_l2_to_l2_xdm_source() { - assert_eq!( - Interop::deploy_l2_to_l2_xdm_source(), - b256!("0xf5484697c7a9a791db32a3bf0763bf2ba686c77ae7d4c0a5ee8c222a92a8dcc2") - ); - } - - #[test] - fn test_upgrade_l2_to_l2_xdm_proxy_source() { - assert_eq!( - Interop::upgrade_l2_to_l2_xdm_proxy_source(), - b256!("0xe54b4d06bbcc857f41ae00e89d820339ac5ce0034aac722c817b2873e03a7e68") - ); - } - - #[test] - fn test_deploy_cross_l2_inbox_address_and_code() { - let txs = Interop::deposits().collect::<Vec<_>>(); - check_deployment_code( - txs[0].clone(), - Interop::NEW_CROSS_L2_INBOX_IMPL, - Interop::CROSS_L2_INBOX_IMPL_CODE_HASH, - ); - } - - #[test] - fn test_deploy_l2_to_l2_xdm_address_and_code() { - let txs = Interop::deposits().collect::<Vec<_>>(); - check_deployment_code( - txs[2].clone(), - Interop::NEW_L2_TO_L2_XDM_IMPL, - Interop::L2_TO_L2_XDM_IMPL_CODE_HASH, - ); - } - - #[test] - fn test_interop_txs_encoded() { - let interop_upgrade_tx = Interop.txs().collect::<Vec<_>>(); - assert_eq!(interop_upgrade_tx.len(), 4); - - let expected_txs: Vec<Bytes> = vec![ - hex::decode(include_str!("./bytecode/interop_tx_0.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/interop_tx_1.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/interop_tx_2.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/interop_tx_3.hex").replace("\n", "")) - .unwrap() - .into(), - ]; - for (i, expected) in expected_txs.iter().enumerate() { - assert_eq!(interop_upgrade_tx[i], *expected); - } - } -} diff --git a/kona/crates/protocol/hardforks/src/isthmus.rs b/kona/crates/protocol/hardforks/src/isthmus.rs deleted file mode 100644 index efdfa84a068..00000000000 --- a/kona/crates/protocol/hardforks/src/isthmus.rs +++ /dev/null @@ -1,361 +0,0 @@ -//! Module containing a [`TxDeposit`] builder for the Isthmus network upgrade transactions. -//! -//! Isthmus network upgrade transactions are defined in the [OP Stack Specs][specs]. -//! -//! [specs]: https://specs.optimism.io/protocol/isthmus/derivation.html#network-upgrade-automation-transactions - -use alloc::{string::String, vec::Vec}; -use alloy_eips::eip2718::Encodable2718; -use alloy_primitives::{Address, B256, Bytes, TxKind, U256, address, hex}; -use kona_protocol::Predeploys; -use op_alloy_consensus::{TxDeposit, UpgradeDepositSource}; - -use crate::Hardfork; - -/// The Isthmus network upgrade transactions. -#[derive(Debug, Default, Clone, Copy)] -pub struct Isthmus; - -impl Isthmus { - /// The depositor account address. - pub const DEPOSITOR_ACCOUNT: Address = address!("DeaDDEaDDeAdDeAdDEAdDEaddeAddEAdDEAd0001"); - - /// The Enable Isthmus Input Method 4Byte Signature. - /// - /// Derive this by running `cast sig "setIsthmus()"`. - pub const ENABLE_ISTHMUS_INPUT: [u8; 4] = hex!("291b0383"); - - /// EIP-2935 From Address - pub const EIP2935_FROM: Address = address!("3462413Af4609098e1E27A490f554f260213D685"); - - /// L1 Block Deployer Address - pub const L1_BLOCK_DEPLOYER: Address = address!("4210000000000000000000000000000000000003"); - - /// The Gas Price Oracle Deployer Address - pub const GAS_PRICE_ORACLE_DEPLOYER: Address = - address!("4210000000000000000000000000000000000004"); - - /// The Operator Fee Vault Deployer Address - pub const OPERATOR_FEE_VAULT_DEPLOYER: Address = - address!("4210000000000000000000000000000000000005"); - - /// The new L1 Block Address - /// This is computed by using go-ethereum's `crypto.CreateAddress` function, - /// with the L1 Block Deployer Address and nonce 0. - pub const NEW_L1_BLOCK: Address = address!("ff256497d61dcd71a9e9ff43967c13fde1f72d12"); - - /// The Gas Price Oracle Address - /// This is computed by using go-ethereum's `crypto.CreateAddress` function, - /// with the Gas Price Oracle Deployer Address and nonce 0. - pub const GAS_PRICE_ORACLE: Address = address!("93e57a196454cb919193fa9946f14943cf733845"); - - /// The Operator Fee Vault Address - /// This is computed by using go-ethereum's `crypto.CreateAddress` function, - /// with the Operator Fee Vault Deployer Address and nonce 0. - pub const OPERATOR_FEE_VAULT: Address = address!("4fa2be8cd41504037f1838bce3bcc93bc68ff537"); - - /// The Isthmus L1 Block Deployer Code Hash - /// See: <https://specs.optimism.io/protocol/isthmus/derivation.html#l1block-deployment> - pub const L1_BLOCK_DEPLOYER_CODE_HASH: B256 = alloy_primitives::b256!( - "0x8e3fe7a416d3e5f3b7be74ddd4e7e58e516fa3f80b67c6d930e3cd7297da4a4b" - ); - - /// The Isthmus Gas Price Oracle Code Hash - /// See: <https://specs.optimism.io/protocol/isthmus/derivation.html#gaspriceoracle-deployment> - pub const GAS_PRICE_ORACLE_CODE_HASH: B256 = alloy_primitives::b256!( - "0x4d195a9d7caf9fb6d4beaf80de252c626c853afd5868c4f4f8d19c9d301c2679" - ); - /// The Isthmus Operator Fee Vault Code Hash - /// See: <https://specs.optimism.io/protocol/isthmus/derivation.html#operator-fee-vault-deployment> - pub const OPERATOR_FEE_VAULT_CODE_HASH: B256 = alloy_primitives::b256!( - "0x57dc55c9c09ca456fa728f253fe7b895d3e6aae0706104935fe87c7721001971" - ); - /// Returns the source hash for the Isthmus Gas Price Oracle activation. - pub fn enable_isthmus_source() -> B256 { - UpgradeDepositSource { intent: String::from("Isthmus: Gas Price Oracle Set Isthmus") } - .source_hash() - } - - /// Returns the source hash for the EIP-2935 block hash history contract deployment. - pub fn block_hash_history_contract_source() -> B256 { - UpgradeDepositSource { intent: String::from("Isthmus: EIP-2935 Contract Deployment") } - .source_hash() - } - - /// Returns the source hash for the deployment of the gas price oracle contract. - pub fn deploy_gas_price_oracle_source() -> B256 { - UpgradeDepositSource { intent: String::from("Isthmus: Gas Price Oracle Deployment") } - .source_hash() - } - - /// Returns the source hash for the deployment of the l1 block contract. - pub fn deploy_l1_block_source() -> B256 { - UpgradeDepositSource { intent: String::from("Isthmus: L1 Block Deployment") }.source_hash() - } - - /// Returns the source hash for the deployment of the operator fee vault contract. - pub fn deploy_operator_fee_vault_source() -> B256 { - UpgradeDepositSource { intent: String::from("Isthmus: Operator Fee Vault Deployment") } - .source_hash() - } - - /// Returns the source hash for the update of the l1 block proxy. - pub fn update_l1_block_source() -> B256 { - UpgradeDepositSource { intent: String::from("Isthmus: L1 Block Proxy Update") } - .source_hash() - } - - /// Returns the source hash for the update of the gas price oracle proxy. - pub fn update_gas_price_oracle_source() -> B256 { - UpgradeDepositSource { intent: String::from("Isthmus: Gas Price Oracle Proxy Update") } - .source_hash() - } - - /// Returns the source hash for the update of the operator fee vault proxy. - pub fn update_operator_fee_vault_source() -> B256 { - UpgradeDepositSource { intent: String::from("Isthmus: Operator Fee Vault Proxy Update") } - .source_hash() - } - - /// Returns the raw bytecode for the L1 Block deployment. - pub fn l1_block_deployment_bytecode() -> Bytes { - hex::decode(include_str!("./bytecode/l1_block_isthmus.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the gas price oracle deployment bytecode. - pub fn gas_price_oracle_deployment_bytecode() -> Bytes { - hex::decode(include_str!("./bytecode/gpo_isthmus.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the gas price oracle deployment bytecode. - pub fn operator_fee_vault_deployment_bytecode() -> Bytes { - hex::decode(include_str!("./bytecode/ofv_isthmus.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the EIP-2935 creation data. - pub fn eip2935_creation_data() -> Bytes { - hex::decode(include_str!("./bytecode/eip2935_isthmus.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the list of [`TxDeposit`]s for the network upgrade. - pub fn deposits() -> impl Iterator<Item = TxDeposit> { - ([ - TxDeposit { - source_hash: Self::deploy_l1_block_source(), - from: Self::L1_BLOCK_DEPLOYER, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 425_000, - is_system_transaction: false, - input: Self::l1_block_deployment_bytecode(), - }, - TxDeposit { - source_hash: Self::deploy_gas_price_oracle_source(), - from: Self::GAS_PRICE_ORACLE_DEPLOYER, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 1_625_000, - is_system_transaction: false, - input: Self::gas_price_oracle_deployment_bytecode(), - }, - TxDeposit { - source_hash: Self::deploy_operator_fee_vault_source(), - from: Self::OPERATOR_FEE_VAULT_DEPLOYER, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 500_000, - is_system_transaction: false, - input: Self::operator_fee_vault_deployment_bytecode(), - }, - TxDeposit { - source_hash: Self::update_l1_block_source(), - from: Address::default(), - to: TxKind::Call(Predeploys::L1_BLOCK_INFO), - mint: 0, - value: U256::ZERO, - gas_limit: 50_000, - is_system_transaction: false, - input: super::upgrade_to_calldata(Self::NEW_L1_BLOCK), - }, - TxDeposit { - source_hash: Self::update_gas_price_oracle_source(), - from: Address::default(), - to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), - mint: 0, - value: U256::ZERO, - gas_limit: 50_000, - is_system_transaction: false, - input: super::upgrade_to_calldata(Self::GAS_PRICE_ORACLE), - }, - TxDeposit { - source_hash: Self::update_operator_fee_vault_source(), - from: Address::default(), - to: TxKind::Call(Predeploys::OPERATOR_FEE_VAULT), - mint: 0, - value: U256::ZERO, - gas_limit: 50_000, - is_system_transaction: false, - input: super::upgrade_to_calldata(Self::OPERATOR_FEE_VAULT), - }, - TxDeposit { - source_hash: Self::enable_isthmus_source(), - from: Self::DEPOSITOR_ACCOUNT, - to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), - mint: 0, - value: U256::ZERO, - gas_limit: 90_000, - is_system_transaction: false, - input: Self::ENABLE_ISTHMUS_INPUT.into(), - }, - TxDeposit { - source_hash: Self::block_hash_history_contract_source(), - from: Self::EIP2935_FROM, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 250_000, - is_system_transaction: false, - input: Self::eip2935_creation_data(), - }, - ]) - .into_iter() - } -} - -impl Hardfork for Isthmus { - /// Constructs the network upgrade transactions. - fn txs(&self) -> impl Iterator<Item = Bytes> + '_ { - Self::deposits().map(|tx| { - let mut encoded = Vec::new(); - tx.encode_2718(&mut encoded); - Bytes::from(encoded) - }) - } -} - -#[cfg(test)] -mod tests { - use crate::test_utils::check_deployment_code; - - use super::*; - use alloc::vec; - use alloy_primitives::b256; - - #[test] - fn test_l1_block_source_hash() { - let expected = b256!("3b2d0821ca2411ad5cd3595804d1213d15737188ae4cbd58aa19c821a6c211bf"); - assert_eq!(Isthmus::deploy_l1_block_source(), expected); - } - - #[test] - fn test_gas_price_oracle_source_hash() { - let expected = b256!("fc70b48424763fa3fab9844253b4f8d508f91eb1f7cb11a247c9baec0afb8035"); - assert_eq!(Isthmus::deploy_gas_price_oracle_source(), expected); - } - - #[test] - fn test_operator_fee_vault_source_hash() { - let expected = b256!("107a570d3db75e6110817eb024f09f3172657e920634111ce9875d08a16daa96"); - assert_eq!(Isthmus::deploy_operator_fee_vault_source(), expected); - } - - #[test] - fn test_l1_block_update_source_hash() { - let expected = b256!("ebe8b5cb10ca47e0d8bda8f5355f2d66711a54ddeb0ef1d30e29418c9bf17a0e"); - assert_eq!(Isthmus::update_l1_block_source(), expected); - } - - #[test] - fn test_gas_price_oracle_update_source_hash() { - let expected = b256!("ecf2d9161d26c54eda6b7bfdd9142719b1e1199a6e5641468d1bf705bc531ab0"); - assert_eq!(Isthmus::update_gas_price_oracle_source(), expected); - } - - #[test] - fn test_operator_fee_vault_update_source_hash() { - let expected = b256!("ad74e1adb877ccbe176b8fa1cc559388a16e090ddbe8b512f5b37d07d887a927"); - assert_eq!(Isthmus::update_operator_fee_vault_source(), expected); - } - - #[test] - fn test_enable_isthmus_source() { - let expected = b256!("3ddf4b1302548dd92939826e970f260ba36167f4c25f18390a5e8b194b295319"); - assert_eq!(Isthmus::enable_isthmus_source(), expected); - } - - #[test] - fn test_isthmus_txs_encoded() { - let isthmus_upgrade_tx = Isthmus.txs().collect::<Vec<_>>(); - assert_eq!(isthmus_upgrade_tx.len(), 8); - - let expected_txs: Vec<Bytes> = vec![ - hex::decode(include_str!("./bytecode/isthmus_tx_0.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/isthmus_tx_1.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/isthmus_tx_2.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/isthmus_tx_3.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/isthmus_tx_4.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/isthmus_tx_5.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/isthmus_tx_6.hex").replace("\n", "")) - .unwrap() - .into(), - hex::decode(include_str!("./bytecode/isthmus_tx_7.hex").replace("\n", "")) - .unwrap() - .into(), - ]; - for (i, expected) in expected_txs.iter().enumerate() { - assert_eq!(isthmus_upgrade_tx[i], *expected); - } - } - #[test] - fn test_verify_isthmus_l1_block_deployment_code_hash() { - let txs = Isthmus::deposits().collect::<Vec<_>>(); - check_deployment_code( - txs[0].clone(), - Isthmus::NEW_L1_BLOCK, - Isthmus::L1_BLOCK_DEPLOYER_CODE_HASH, - ); - } - #[test] - fn test_verify_isthmus_gas_price_oracle_deployment_code_hash() { - let txs = Isthmus::deposits().collect::<Vec<_>>(); - - check_deployment_code( - txs[1].clone(), - Isthmus::GAS_PRICE_ORACLE, - Isthmus::GAS_PRICE_ORACLE_CODE_HASH, - ); - } - #[test] - fn test_verify_isthmus_operator_fee_vault_deployment_code_hash() { - let txs = Isthmus::deposits().collect::<Vec<_>>(); - - check_deployment_code( - txs[2].clone(), - Isthmus::OPERATOR_FEE_VAULT, - Isthmus::OPERATOR_FEE_VAULT_CODE_HASH, - ); - } -} diff --git a/kona/crates/protocol/hardforks/src/jovian.rs b/kona/crates/protocol/hardforks/src/jovian.rs deleted file mode 100644 index 7c52fe49c84..00000000000 --- a/kona/crates/protocol/hardforks/src/jovian.rs +++ /dev/null @@ -1,246 +0,0 @@ -//! Module containing a [`TxDeposit`] builder for the Jovian network upgrade transactions. -//! -//! Jovian network upgrade transactions are defined in the [OP Stack Specs][specs]. -//! -//! [specs]: https://specs.optimism.io/protocol/jovian/derivation.html#network-upgrade-automation-transactions - -use alloc::{string::String, vec::Vec}; -use alloy_eips::eip2718::Encodable2718; -use alloy_primitives::{Address, B256, Bytes, TxKind, U256, address, hex, keccak256}; -use kona_protocol::Predeploys; -use op_alloy_consensus::{TxDeposit, UpgradeDepositSource}; - -use crate::{Hardfork, upgrade_to_calldata}; - -/// The Jovian network upgrade transactions. -#[derive(Debug, Default, Clone, Copy)] -pub struct Jovian; - -impl Jovian { - /// The depositor account address. - pub const DEPOSITOR_ACCOUNT: Address = address!("DeaDDEaDDeAdDeAdDEAdDEaddeAddEAdDEAd0001"); - - /// L1 Block Deployer Address - pub const L1_BLOCK_DEPLOYER: Address = address!("4210000000000000000000000000000000000006"); - - /// Zero address - pub const ZERO_ADDRESS: Address = address!("0x0000000000000000000000000000000000000000"); - - /// The Gas Price Oracle Deployer Address - pub const GAS_PRICE_ORACLE_DEPLOYER: Address = - address!("4210000000000000000000000000000000000007"); - - /// Returns the source hash for the deployment of the l1 block contract. - pub fn deploy_l1_block_source() -> B256 { - UpgradeDepositSource { intent: String::from("Jovian: L1 Block Deployment") }.source_hash() - } - - /// Returns the source hash for the deployment of the gas price oracle contract. - pub fn l1_block_proxy_update() -> B256 { - UpgradeDepositSource { intent: String::from("Jovian: L1 Block Proxy Update") }.source_hash() - } - - /// Returns the source hash for the deployment of the operator fee vault contract. - pub fn gas_price_oracle() -> B256 { - UpgradeDepositSource { intent: String::from("Jovian: Gas Price Oracle Deployment") } - .source_hash() - } - - /// Returns the source hash for the update of the l1 block proxy. - pub fn gas_price_oracle_proxy_update() -> B256 { - UpgradeDepositSource { intent: String::from("Jovian: Gas Price Oracle Proxy Update") } - .source_hash() - } - - /// The Jovian L1 Block Address - /// This is computed by using `Address::create` function, - /// with the L1 Block Deployer Address and nonce 0. - pub fn l1_block_address() -> Address { - Self::L1_BLOCK_DEPLOYER.create(0) - } - - /// The Jovian Gas Price Oracle Address - /// This is computed by using `Address::create` function, - /// with the Gas Price Oracle Deployer Address and nonce 0. - pub fn gas_price_oracle_address() -> Address { - Self::GAS_PRICE_ORACLE_DEPLOYER.create(0) - } - - /// Returns the source hash to the enable the gas price oracle for Jovian. - pub fn gas_price_oracle_enable_jovian() -> B256 { - UpgradeDepositSource { intent: String::from("Jovian: Gas Price Oracle Set Jovian") } - .source_hash() - } - - /// Returns the raw bytecode for the L1 Block deployment. - pub fn l1_block_deployment_bytecode() -> Bytes { - hex::decode(include_str!("./bytecode/jovian-l1-block-deployment.hex").replace("\n", "")) - .expect("Expected hex byte string") - .into() - } - - /// Returns the gas price oracle deployment bytecode. - pub fn gas_price_oracle_deployment_bytecode() -> Bytes { - hex::decode( - include_str!("./bytecode/jovian-gas-price-oracle-deployment.hex").replace("\n", ""), - ) - .expect("Expected hex byte string") - .into() - } - - /// Returns the bytecode to enable the gas price oracle for Jovian. - pub fn gas_price_oracle_enable_jovian_bytecode() -> Bytes { - let mut bytes = Vec::new(); - bytes.extend_from_slice(&keccak256("setJovian()")[..4]); - bytes.into() - } - - /// Returns the list of [`TxDeposit`]s for the network upgrade. - pub fn deposits() -> impl Iterator<Item = TxDeposit> { - ([ - TxDeposit { - source_hash: Self::deploy_l1_block_source(), - from: Self::L1_BLOCK_DEPLOYER, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 447_315, - is_system_transaction: false, - input: Self::l1_block_deployment_bytecode(), - }, - TxDeposit { - source_hash: Self::l1_block_proxy_update(), - from: Self::ZERO_ADDRESS, - to: TxKind::Call(Predeploys::L1_BLOCK_INFO), - mint: 0, - value: U256::ZERO, - gas_limit: 50_000, - is_system_transaction: false, - input: upgrade_to_calldata(Self::l1_block_address()), - }, - TxDeposit { - source_hash: Self::gas_price_oracle(), - from: Self::GAS_PRICE_ORACLE_DEPLOYER, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 1_750_714, - is_system_transaction: false, - input: Self::gas_price_oracle_deployment_bytecode(), - }, - TxDeposit { - source_hash: Self::gas_price_oracle_proxy_update(), - from: Self::ZERO_ADDRESS, - to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), - mint: 0, - value: U256::ZERO, - gas_limit: 50_000, - is_system_transaction: false, - input: upgrade_to_calldata(Self::gas_price_oracle_address()), - }, - TxDeposit { - source_hash: Self::gas_price_oracle_enable_jovian(), - from: Self::DEPOSITOR_ACCOUNT, - to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), - mint: 0, - value: U256::ZERO, - gas_limit: 90_000, - is_system_transaction: false, - input: Self::gas_price_oracle_enable_jovian_bytecode(), - }, - ]) - .into_iter() - } -} - -impl Hardfork for Jovian { - /// Constructs the network upgrade transactions. - fn txs(&self) -> impl Iterator<Item = Bytes> + '_ { - Self::deposits().map(|tx| { - let mut encoded = Vec::new(); - tx.encode_2718(&mut encoded); - Bytes::from(encoded) - }) - } -} - -#[cfg(test)] -mod tests { - use crate::test_utils::check_deployment_code; - - use super::*; - use alloy_primitives::b256; - - #[test] - fn test_l1_block_source_hash() { - let expected = b256!("bb1a656f65401240fac3db12e7a79ebb954b11e62f7626eb11691539b798d3bf"); - assert_eq!(Jovian::deploy_l1_block_source(), expected); - } - - #[test] - fn test_l1_block_proxy_update_source_hash() { - let expected = b256!("f3275f829340521028f9ad5bce4ecb1c64a45d448794effa2a77674627338e76"); - assert_eq!(Jovian::l1_block_proxy_update(), expected); - } - - #[test] - fn test_gas_price_oracle_source_hash() { - let expected = b256!("239b7021a6c2cf3a918481242bbb5a9499057f24501539467536c691bb133962"); - assert_eq!(Jovian::gas_price_oracle(), expected); - } - - #[test] - fn test_upgrade_to_calldata_for_gas_price_oracle() { - assert_eq!( - **upgrade_to_calldata(Jovian::gas_price_oracle_address()), - hex!("0x3659cfe60000000000000000000000004f1db3c6abd250ba86e0928471a8f7db3afd88f1") - ); - } - - #[test] - fn test_upgrade_to_calldata_for_l1_block_proxy_update() { - assert_eq!( - **upgrade_to_calldata(Jovian::l1_block_address()), - hex!("0x3659cfe60000000000000000000000003ba4007f5c922fbb33c454b41ea7a1f11e83df2c") - ); - } - - #[test] - fn test_gas_price_oracle_proxy_update_source_hash() { - let expected = b256!("a70c60aa53b8c1c0d52b39b1e901e7d7c09f7819595cb24048a6bb1983b401ff"); - assert_eq!(Jovian::gas_price_oracle_proxy_update(), expected); - } - - #[test] - fn test_gas_price_oracle_enable_jovian_source_hash() { - let expected = b256!("e836db6a959371756f8941be3e962d000f7e12a32e49e2c9ca42ba177a92716c"); - assert_eq!(Jovian::gas_price_oracle_enable_jovian(), expected); - } - - #[test] - fn test_verify_jovian_l1_block_deployment_code_hash() { - let txs = Jovian::deposits().collect::<Vec<_>>(); - check_deployment_code( - txs[0].clone(), - Jovian::l1_block_address(), - hex!("5f885ca815d2cf27a203123e50b8ae204fdca910b6995d90b2d7700cbb9240d1").into(), - ); - } - - #[test] - fn test_verify_set_jovian() { - let hash = &keccak256("setJovian()")[..4]; - assert_eq!(hash, hex!("0xb3d72079")) - } - - #[test] - fn test_verify_jovian_gas_price_oracle_deployment_code_hash() { - let txs = Jovian::deposits().collect::<Vec<_>>(); - - check_deployment_code( - txs[2].clone(), - Jovian::gas_price_oracle_address(), - hex!("e9fc7c96c4db0d6078e3d359d7e8c982c350a513cb2c31121adf5e1e8a446614").into(), - ); - } -} diff --git a/kona/crates/protocol/hardforks/src/lib.rs b/kona/crates/protocol/hardforks/src/lib.rs deleted file mode 100644 index 917dc3cd9b1..00000000000 --- a/kona/crates/protocol/hardforks/src/lib.rs +++ /dev/null @@ -1,37 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] - -extern crate alloc; - -mod traits; -pub use traits::Hardfork; - -mod forks; -pub use forks::Hardforks; - -mod fjord; -pub use fjord::Fjord; - -mod ecotone; -pub use ecotone::Ecotone; - -mod isthmus; -pub use isthmus::Isthmus; - -mod interop; -pub use interop::Interop; - -mod jovian; -pub use jovian::Jovian; - -mod utils; -pub(crate) use utils::upgrade_to_calldata; - -#[cfg(test)] -mod test_utils; diff --git a/kona/crates/protocol/hardforks/src/utils.rs b/kona/crates/protocol/hardforks/src/utils.rs deleted file mode 100644 index d2a5fe3196a..00000000000 --- a/kona/crates/protocol/hardforks/src/utils.rs +++ /dev/null @@ -1,54 +0,0 @@ -//! Utilities for creating hardforks. - -use alloy_primitives::{Address, Bytes, hex}; - -/// UpgradeTo Function 4Byte Signature -pub(crate) const UPGRADE_TO_FUNC_BYTES_4: [u8; 4] = hex!("3659cfe6"); - -/// Turns the given address into calldata for the `upgradeTo` function. -pub(crate) fn upgrade_to_calldata(addr: Address) -> Bytes { - let mut v = UPGRADE_TO_FUNC_BYTES_4.to_vec(); - v.extend_from_slice(addr.into_word().as_slice()); - v.into() -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::{Ecotone, Fjord, Isthmus}; - use alloy_primitives::keccak256; - - #[test] - fn test_upgrade_to_selector_is_valid() { - let expected_selector = &keccak256("upgradeTo(address)")[..4]; - assert_eq!(UPGRADE_TO_FUNC_BYTES_4, expected_selector); - } - - #[test] - fn test_upgrade_to_calldata_format() { - let test_addr = Address::from([0x42; 20]); - let calldata = upgrade_to_calldata(test_addr); - - assert_eq!(calldata.len(), 36); - assert_eq!(&calldata[..4], UPGRADE_TO_FUNC_BYTES_4); - assert_eq!(&calldata[4..36], test_addr.into_word().as_slice()); - } - - #[test] - fn test_ecotone_selector_is_valid() { - let expected_selector = &keccak256("setEcotone()")[..4]; - assert_eq!(Ecotone::ENABLE_ECOTONE_INPUT, expected_selector); - } - - #[test] - fn test_fjord_selector_is_valid() { - let expected_selector = &keccak256("setFjord()")[..4]; - assert_eq!(Fjord::SET_FJORD_METHOD_SIGNATURE, expected_selector); - } - - #[test] - fn test_isthmus_selector_is_valid() { - let expected_selector = &keccak256("setIsthmus()")[..4]; - assert_eq!(Isthmus::ENABLE_ISTHMUS_INPUT, expected_selector); - } -} diff --git a/kona/crates/protocol/interop/Cargo.toml b/kona/crates/protocol/interop/Cargo.toml deleted file mode 100644 index bae121376dc..00000000000 --- a/kona/crates/protocol/interop/Cargo.toml +++ /dev/null @@ -1,90 +0,0 @@ -[package] -name = "kona-interop" -description = "Core functionality and primitives for the Interop feature of the OP Stack." -version = "0.4.5" -edition.workspace = true -authors.workspace = true -license.workspace = true -repository.workspace = true -homepage.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -kona-genesis.workspace = true -kona-registry.workspace = true -kona-protocol.workspace = true - -# General -thiserror.workspace = true -async-trait.workspace = true -tracing.workspace = true -derive_more = { workspace = true, features = ["from", "as_ref", "constructor"] } - -# Alloy -alloy-serde = { workspace = true, optional = true } -alloy-rlp.workspace = true -alloy-eips.workspace = true -alloy-sol-types.workspace = true -alloy-consensus.workspace = true -alloy-primitives = { workspace = true, features = ["rlp"] } -op-alloy-consensus.workspace = true - -# Arbitrary -arbitrary = { version = "1.4", features = ["derive"], optional = true } - -# Serde -serde = { workspace = true, optional = true } - -[dev-dependencies] -serde_json.workspace = true -tokio = { workspace = true, features = ["full"] } -alloy-primitives = { workspace = true, features = ["rlp", "arbitrary"] } -arbitrary = { version = "1.4", features = ["derive"] } -rand = { workspace = true, features = ["thread_rng"] } - -[features] -default = [] -std = [ - "alloy-consensus/std", - "alloy-eips/std", - "alloy-primitives/std", - "alloy-rlp/std", - "alloy-serde?/std", - "alloy-sol-types/std", - "derive_more/display", - "derive_more/std", - "kona-genesis/std", - "kona-protocol/std", - "kona-registry/std", - "op-alloy-consensus/std", - "serde?/std", - "thiserror/std", - "tracing/std", -] -arbitrary = [ - "alloy-consensus/arbitrary", - "alloy-eips/arbitrary", - "alloy-primitives/arbitrary", - "alloy-serde?/arbitrary", - "alloy-sol-types/arbitrary", - "dep:arbitrary", - "kona-genesis/arbitrary", - "kona-protocol/arbitrary", - "op-alloy-consensus/arbitrary", - "std", -] -serde = [ - "alloy-consensus/serde", - "alloy-eips/serde", - "alloy-primitives/serde", - "dep:alloy-serde", - "dep:serde", - "kona-genesis/serde", - "kona-protocol/serde", - "op-alloy-consensus/serde", -] -test-utils = [ "kona-protocol/test-utils", "std" ] - diff --git a/kona/crates/protocol/interop/README.md b/kona/crates/protocol/interop/README.md deleted file mode 100644 index 2c387a06a47..00000000000 --- a/kona/crates/protocol/interop/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# `kona-interop` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-interop"><img src="https://img.shields.io/crates/v/kona-interop.svg?label=kona-interop&labelColor=2a2f35" alt="Kona MPT"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://app.codecov.io/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -Core functionality and primitives for the [Interop feature](https://specs.optimism.io/interop/overview.html) of the OP Stack. diff --git a/kona/crates/protocol/interop/src/errors.rs b/kona/crates/protocol/interop/src/errors.rs deleted file mode 100644 index 66e61601ec5..00000000000 --- a/kona/crates/protocol/interop/src/errors.rs +++ /dev/null @@ -1,135 +0,0 @@ -//! Error types for the `kona-interop` crate. - -use crate::InteropProvider; -use alloy_primitives::{Address, B256}; -use core::fmt::Debug; -use kona_registry::HashMap; -use thiserror::Error; - -/// An error type for the [MessageGraph] struct. -/// -/// [MessageGraph]: crate::MessageGraph -#[derive(Debug, Clone, PartialEq, Eq, Error)] -pub enum MessageGraphError<E: Debug> { - /// Dependency set is impossibly empty - #[error("Dependency set is impossibly empty")] - EmptyDependencySet, - /// Missing a [RollupConfig] for a chain ID - /// - /// [RollupConfig]: kona_genesis::RollupConfig - #[error("Missing a RollupConfig for chain ID {0}")] - MissingRollupConfig(u64), - /// Interop provider error - #[error("Interop provider: {0}")] - InteropProviderError(#[from] E), - /// Remote message not found - #[error("Remote message not found on chain ID {chain_id} with message hash {message_hash}")] - RemoteMessageNotFound { - /// The remote chain ID - chain_id: u64, - /// The message hash - message_hash: B256, - }, - /// Invalid message origin - #[error("Invalid message origin. Expected {expected}, got {actual}")] - InvalidMessageOrigin { - /// The expected message origin - expected: Address, - /// The actual message origin - actual: Address, - }, - /// Invalid message payload hash - #[error("Invalid message hash. Expected {expected}, got {actual}")] - InvalidMessageHash { - /// The expected message hash - expected: B256, - /// The actual message hash - actual: B256, - }, - /// Invalid message timestamp - #[error("Invalid message timestamp. Expected {expected}, got {actual}")] - InvalidMessageTimestamp { - /// The expected timestamp - expected: u64, - /// The actual timestamp - actual: u64, - }, - /// Interop has not been activated for at least one block on the initiating message's chain. - #[error( - "Interop has not been active for at least one block on initiating message's chain. Activation time: {activation_time}, initiating message time: {initiating_message_time}" - )] - InitiatedTooEarly { - /// The timestamp of the interop activation - activation_time: u64, - /// The timestamp of the initiating message - initiating_message_time: u64, - }, - /// Message is in the future - #[error("Message is in the future. Expected timestamp to be <= {max}, got {actual}")] - MessageInFuture { - /// The expected max timestamp - max: u64, - /// The actual timestamp - actual: u64, - }, - /// Message has exceeded the expiry window. - #[error( - "Message has exceeded the expiry window. Initiating Timestamp: {initiating_timestamp}, Executing Timestamp: {executing_timestamp}" - )] - MessageExpired { - /// The timestamp of the initiating message - initiating_timestamp: u64, - /// The timestamp of the executing message - executing_timestamp: u64, - }, - /// Invalid messages were found - #[error("Invalid messages found on chains: {0:?}")] - InvalidMessages(HashMap<u64, MessageGraphError<E>>), -} - -/// A [Result] alias for the [MessageGraphError] type. -#[allow(type_alias_bounds)] -pub type MessageGraphResult<T, P: InteropProvider> = - core::result::Result<T, MessageGraphError<P::Error>>; - -/// An error type for the [SuperRoot] struct's serialization and deserialization. -/// -/// [SuperRoot]: crate::SuperRoot -#[derive(Debug, Clone, Error)] -pub enum SuperRootError { - /// Invalid super root version byte - #[error("Invalid super root version byte")] - InvalidVersionByte, - /// Unexpected encoded super root length - #[error("Unexpected encoded super root length")] - UnexpectedLength, - /// Slice conversion error - #[error("Slice conversion error: {0}")] - SliceConversionError(#[from] core::array::TryFromSliceError), -} - -/// A [Result] alias for the [SuperRootError] type. -pub type SuperRootResult<T> = core::result::Result<T, SuperRootError>; - -/// Errors that can occur during interop validation. -#[derive(Debug, Error, PartialEq, Eq)] -pub enum InteropValidationError { - /// Interop is not enabled on one or both chains at the required timestamp. - #[error("interop not enabled")] - InteropNotEnabled, - - /// Executing timestamp is earlier than the initiating timestamp. - #[error( - "executing timestamp is earlier than initiating timestamp, executing: {executing}, initiating: {initiating}" - )] - InvalidTimestampInvariant { - /// Executing timestamp of the message - executing: u64, - /// Initiating timestamp of the message - initiating: u64, - }, - - /// Timestamp is outside the allowed interop expiry window. - #[error("timestamp outside allowed interop window, timestamp: {0}")] - InvalidInteropTimestamp(u64), -} diff --git a/kona/crates/protocol/interop/src/event.rs b/kona/crates/protocol/interop/src/event.rs deleted file mode 100644 index d2b3ee5e19b..00000000000 --- a/kona/crates/protocol/interop/src/event.rs +++ /dev/null @@ -1,65 +0,0 @@ -//! Contains the managed node event. - -use crate::{BlockReplacement, DerivedRefPair}; -use alloc::{format, string::String, vec::Vec}; -use derive_more::Constructor; -use kona_protocol::BlockInfo; - -/// Event sent by the node to the supervisor to share updates. -/// -/// This struct is used to communicate various events that occur within the node. -/// At least one of the fields will be `Some`, and the rest will be `None`. -/// -/// See: <https://specs.optimism.io/interop/managed-mode.html#node---supervisor> -#[derive(Debug, Clone, Default, PartialEq, Eq, Constructor)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] -pub struct ManagedEvent { - /// This is emitted when the node has determined that it needs a reset. - /// It tells the supervisor to send the interop_reset event with the - /// required parameters. - pub reset: Option<String>, - - /// New L2 unsafe block was processed, updating local-unsafe head. - pub unsafe_block: Option<BlockInfo>, - - /// Signals that an L2 block is considered local-safe. - pub derivation_update: Option<DerivedRefPair>, - - /// Emitted when no more L1 Blocks are available. - /// Ready to take new L1 blocks from supervisor. - pub exhaust_l1: Option<DerivedRefPair>, - - /// Emitted when a block gets replaced for any reason. - pub replace_block: Option<BlockReplacement>, - - /// Signals that an L2 block is now local-safe because of the given L1 traversal. - /// This would be accompanied with [`Self::derivation_update`]. - pub derivation_origin_update: Option<BlockInfo>, -} - -impl core::fmt::Display for ManagedEvent { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - let mut parts = Vec::new(); - if let Some(ref reset) = self.reset { - parts.push(format!("reset: {reset}")); - } - if let Some(ref block) = self.unsafe_block { - parts.push(format!("unsafe_block: {block}")); - } - if let Some(ref pair) = self.derivation_update { - parts.push(format!("derivation_update: {pair}")); - } - if let Some(ref pair) = self.exhaust_l1 { - parts.push(format!("exhaust_l1: {pair}")); - } - if let Some(ref replacement) = self.replace_block { - parts.push(format!("replace_block: {replacement}")); - } - if let Some(ref origin) = self.derivation_origin_update { - parts.push(format!("derivation_origin_update: {origin}")); - } - - if parts.is_empty() { write!(f, "none") } else { write!(f, "{}", parts.join(", ")) } - } -} diff --git a/kona/crates/protocol/interop/src/lib.rs b/kona/crates/protocol/interop/src/lib.rs deleted file mode 100644 index b6ebf9995be..00000000000 --- a/kona/crates/protocol/interop/src/lib.rs +++ /dev/null @@ -1,65 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] - -extern crate alloc; - -mod graph; -pub use graph::MessageGraph; - -mod event; -pub use event::ManagedEvent; - -mod control; -pub use control::ControlEvent; - -mod replacement; -pub use replacement::BlockReplacement; - -mod traits; -pub use traits::{InteropProvider, InteropValidator}; - -mod safety; -pub use safety::SafetyLevelParseError; - -mod errors; -pub use errors::{ - InteropValidationError, MessageGraphError, MessageGraphResult, SuperRootError, SuperRootResult, -}; - -mod root; -pub use root::{ChainRootInfo, OutputRootWithChain, SuperRoot, SuperRootOutput}; - -mod message; -pub use message::{ - EnrichedExecutingMessage, ExecutingDescriptor, ExecutingMessage, MessageIdentifier, - RawMessagePayload, extract_executing_messages, parse_log_to_executing_message, - parse_logs_to_executing_msgs, -}; - -mod depset; -pub use depset::{ChainDependency, DependencySet}; - -pub use op_alloy_consensus::interop::SafetyLevel; - -mod access_list; -pub use access_list::{ - parse_access_list_item_to_inbox_entries, parse_access_list_items_to_inbox_entries, -}; -mod derived; -pub use derived::{DerivedIdPair, DerivedRefPair}; - -mod constants; -pub use constants::{MESSAGE_EXPIRY_WINDOW, SUPER_ROOT_VERSION}; - -#[cfg(any(test, feature = "test-utils"))] -mod test_util; -#[cfg(any(test, feature = "test-utils"))] -pub use test_util::{ - ChainBuilder, ExecutingMessageBuilder, InteropProviderError, MockInteropProvider, - SuperchainBuilder, -}; diff --git a/kona/crates/protocol/interop/src/message.rs b/kona/crates/protocol/interop/src/message.rs deleted file mode 100644 index 0a05209adf0..00000000000 --- a/kona/crates/protocol/interop/src/message.rs +++ /dev/null @@ -1,241 +0,0 @@ -//! Interop message primitives. -//! -//! <https://specs.optimism.io/interop/messaging.html#messaging> -//! <https://github.com/ethereum-optimism/optimism/blob/34d5f66ade24bd1f3ce4ce7c0a6cfc1a6540eca1/packages/contracts-bedrock/src/L2/CrossL2Inbox.sol> - -use alloc::{vec, vec::Vec}; -use alloy_primitives::{Bytes, ChainId, Log, keccak256}; -use alloy_sol_types::{SolEvent, sol}; -use derive_more::{AsRef, Constructor, From}; -use kona_protocol::Predeploys; -use op_alloy_consensus::OpReceiptEnvelope; - -sol! { - /// @notice The struct for a pointer to a message payload in a remote (or local) chain. - #[derive(Default, Debug, PartialEq, Eq)] - #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] - struct MessageIdentifier { - address origin; - uint256 blockNumber; - uint256 logIndex; - uint256 timestamp; - #[cfg_attr(feature = "serde", serde(rename = "chainID"))] - uint256 chainId; - } - - /// @notice Emitted when a cross chain message is being executed. - /// @param payloadHash Hash of message payload being executed. - /// @param identifier Encoded Identifier of the message. - /// - /// Parameter names are derived from the `op-supervisor` JSON field names. - /// See the relevant definition in the Optimism repository: - /// [Ethereum-Optimism/op-supervisor](https://github.com/ethereum-optimism/optimism/blob/4ba2eb00eafc3d7de2c8ceb6fd83913a8c0a2c0d/op-supervisor/supervisor/types/types.go#L61-L64). - #[derive(Default, Debug, PartialEq, Eq)] - #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] - event ExecutingMessage(bytes32 indexed payloadHash, MessageIdentifier identifier); - - /// @notice Executes a cross chain message on the destination chain. - /// @param _id Identifier of the message. - /// @param _target Target address to call. - /// @param _message Message payload to call target with. - function executeMessage( - MessageIdentifier calldata _id, - address _target, - bytes calldata _message - ) external; -} - -/// A [RawMessagePayload] is the raw payload of an initiating message. -#[derive(Debug, Clone, From, AsRef, PartialEq, Eq)] -pub struct RawMessagePayload(Bytes); - -impl From<&Log> for RawMessagePayload { - fn from(log: &Log) -> Self { - let mut data = vec![0u8; log.topics().len() * 32 + log.data.data.len()]; - for (i, topic) in log.topics().iter().enumerate() { - data[i * 32..(i + 1) * 32].copy_from_slice(topic.as_ref()); - } - data[(log.topics().len() * 32)..].copy_from_slice(log.data.data.as_ref()); - data.into() - } -} - -impl From<Vec<u8>> for RawMessagePayload { - fn from(data: Vec<u8>) -> Self { - Self(Bytes::from(data)) - } -} - -impl From<executeMessageCall> for ExecutingMessage { - fn from(call: executeMessageCall) -> Self { - Self { identifier: call._id, payloadHash: keccak256(call._message.as_ref()) } - } -} - -/// An [`ExecutingDescriptor`] is a part of the payload to `supervisor_checkAccessList` -/// Spec: <https://github.com/ethereum-optimism/specs/blob/main/specs/interop/supervisor.md#executingdescriptor> -#[derive(Default, Debug, PartialEq, Eq, Clone, Constructor)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub struct ExecutingDescriptor { - /// The timestamp used to enforce timestamp [invariant](https://github.com/ethereum-optimism/specs/blob/main/specs/interop/derivation.md#invariants) - #[cfg_attr(feature = "serde", serde(with = "alloy_serde::quantity"))] - pub timestamp: u64, - /// The timeout that requests verification to still hold at `timestamp+timeout` - /// (message expiry may drop previously valid messages). - #[cfg_attr( - feature = "serde", - serde( - default, - skip_serializing_if = "Option::is_none", - with = "alloy_serde::quantity::opt" - ) - )] - pub timeout: Option<u64>, - /// Chain ID of the chain that the message was executed on. - #[cfg_attr( - feature = "serde", - serde( - default, - rename = "chainID", - skip_serializing_if = "Option::is_none", - with = "alloy_serde::quantity::opt" - ) - )] - pub chain_id: Option<ChainId>, -} - -/// A wrapper type for [ExecutingMessage] containing the chain ID of the chain that the message was -/// executed on. -#[derive(Debug)] -pub struct EnrichedExecutingMessage { - /// The inner [ExecutingMessage]. - pub inner: ExecutingMessage, - /// The chain ID of the chain that the message was executed on. - pub executing_chain_id: u64, - /// The timestamp of the block that the executing message was included in. - pub executing_timestamp: u64, -} - -impl EnrichedExecutingMessage { - /// Create a new [EnrichedExecutingMessage] from an [ExecutingMessage] and a chain ID. - pub const fn new( - inner: ExecutingMessage, - executing_chain_id: u64, - executing_timestamp: u64, - ) -> Self { - Self { inner, executing_chain_id, executing_timestamp } - } -} - -/// Extracts all [ExecutingMessage] events from list of [OpReceiptEnvelope]s. -/// -/// See [`parse_log_to_executing_message`]. -/// -/// Note: filters out logs that don't contain executing message events. -pub fn extract_executing_messages(receipts: &[OpReceiptEnvelope]) -> Vec<ExecutingMessage> { - receipts.iter().fold(Vec::new(), |mut acc, envelope| { - let executing_messages = envelope.logs().iter().filter_map(parse_log_to_executing_message); - - acc.extend(executing_messages); - acc - }) -} - -/// Parses [`Log`]s to [`ExecutingMessage`]s. -/// -/// See [`parse_log_to_executing_message`] for more details. Return iterator maps 1-1 with input. -pub fn parse_logs_to_executing_msgs<'a>( - logs: impl Iterator<Item = &'a Log>, -) -> impl Iterator<Item = Option<ExecutingMessage>> { - logs.map(parse_log_to_executing_message) -} - -/// Parse [`Log`] to [`ExecutingMessage`], if any. -/// -/// Max one [`ExecutingMessage`] event can exist per log. Returns `None` if log doesn't contain -/// executing message event. -pub fn parse_log_to_executing_message(log: &Log) -> Option<ExecutingMessage> { - (log.address == Predeploys::CROSS_L2_INBOX && log.topics().len() == 2) - .then(|| ExecutingMessage::decode_log_data(&log.data).ok()) - .flatten() -} - -#[cfg(test)] -mod tests { - use alloy_primitives::{Address, B256, LogData, U256}; - - use super::*; - - // Test the serialization of ExecutingDescriptor - #[cfg(feature = "serde")] - #[test] - fn test_serialize_executing_descriptor() { - let descriptor = ExecutingDescriptor { - timestamp: 1234567890, - timeout: Some(3600), - chain_id: Some(1000), - }; - let serialized = serde_json::to_string(&descriptor).unwrap(); - let expected = r#"{"timestamp":"0x499602d2","timeout":"0xe10","chainID":"0x3e8"}"#; - assert_eq!(serialized, expected); - - let deserialized: ExecutingDescriptor = serde_json::from_str(&serialized).unwrap(); - assert_eq!(descriptor, deserialized); - } - - #[cfg(feature = "serde")] - #[test] - fn test_deserialize_executing_descriptor_missing_chain_id() { - let json = r#"{ - "timestamp": "0x499602d2", - "timeout": "0xe10" - }"#; - - let expected = - ExecutingDescriptor { timestamp: 1234567890, timeout: Some(3600), chain_id: None }; - - let deserialized: ExecutingDescriptor = serde_json::from_str(json).unwrap(); - assert_eq!(deserialized, expected); - } - - #[cfg(feature = "serde")] - #[test] - fn test_deserialize_executing_descriptor_missing_timeout() { - let json = r#"{ - "timestamp": "0x499602d2", - "chainID": "0x3e8" - }"#; - - let expected = - ExecutingDescriptor { timestamp: 1234567890, timeout: None, chain_id: Some(1000) }; - - let deserialized: ExecutingDescriptor = serde_json::from_str(json).unwrap(); - assert_eq!(deserialized, expected); - } - - #[test] - fn test_parse_logs_to_executing_msgs_iterator() { - // One valid, one invalid log - let identifier = MessageIdentifier { - origin: Address::repeat_byte(0x77), - blockNumber: U256::from(200), - logIndex: U256::from(3), - timestamp: U256::from(777777), - chainId: U256::from(12), - }; - let payload_hash = B256::repeat_byte(0x88); - let event = ExecutingMessage { payloadHash: payload_hash, identifier }; - let data = ExecutingMessage::encode_log_data(&event); - - let valid_log = Log { address: Predeploys::CROSS_L2_INBOX, data }; - let invalid_log = Log { - address: Address::repeat_byte(0x99), - data: LogData::new_unchecked([B256::ZERO, B256::ZERO].to_vec(), Bytes::default()), - }; - - let logs = vec![&valid_log, &invalid_log]; - let mut iter = parse_logs_to_executing_msgs(logs.into_iter()); - assert_eq!(iter.next().unwrap().unwrap(), event); - assert!(iter.next().unwrap().is_none()); - } -} diff --git a/kona/crates/protocol/protocol/Cargo.toml b/kona/crates/protocol/protocol/Cargo.toml deleted file mode 100644 index 993595f9630..00000000000 --- a/kona/crates/protocol/protocol/Cargo.toml +++ /dev/null @@ -1,130 +0,0 @@ -[package] -name = "kona-protocol" -version = "0.4.5" -description = "Optimism protocol-specific types" - -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -kona-genesis.workspace = true - -# OP Alloy -op-alloy-consensus.workspace = true -op-alloy-rpc-types.workspace = true -op-alloy-rpc-types-engine.workspace = true - -# Alloy -alloy-primitives = { workspace = true, features = ["map"] } -alloy-rlp.workspace = true -alloy-hardforks.workspace = true -alloy-eips.workspace = true -alloy-consensus.workspace = true -alloy-rpc-types-eth.workspace = true -alloy-rpc-types-engine.workspace = true - -# Misc -tracing.workspace = true -thiserror.workspace = true -async-trait.workspace = true -unsigned-varint.workspace = true -derive_more = { workspace = true, features = ["display"] } - -# Compression -brotli.workspace = true -miniz_oxide.workspace = true -alloc-no-stdlib.workspace = true - -# `arbitrary` feature -arbitrary = { workspace = true, features = ["derive"], optional = true } - -# `serde` feature -serde = { workspace = true, optional = true } -alloy-serde = { workspace = true, optional = true } - -# `test-utils` feature -spin = { workspace = true, optional = true } -tracing-subscriber = { workspace = true, features = ["fmt"], optional = true } -ambassador = "0.4.2" - -[dev-dependencies] -brotli = { workspace = true, features = ["std"] } -spin.workspace = true -rand = { workspace = true, features = ["std", "std_rng"] } -rstest.workspace = true -proptest.workspace = true -serde_json.workspace = true -alloy-sol-types.workspace = true -tokio = { workspace = true, features = ["full"] } -arbitrary = { workspace = true, features = ["derive"] } -tracing-subscriber = { workspace = true, features = ["fmt"] } -alloy-primitives = { workspace = true, features = ["arbitrary"] } -op-alloy-consensus.workspace = true -alloy-rpc-types-eth.workspace = true -op-alloy-rpc-types.workspace = true - -kona-registry.workspace = true - -[features] -default = [] -std = [ - "alloy-consensus/std", - "alloy-eips/std", - "alloy-primitives/std", - "alloy-rlp/std", - "alloy-rpc-types-engine/std", - "alloy-rpc-types-eth/std", - "alloy-serde?/std", - "brotli/std", - "derive_more/std", - "kona-genesis/std", - "miniz_oxide/std", - "op-alloy-consensus/std", - "op-alloy-rpc-types-engine/std", - "op-alloy-rpc-types/std", - "serde?/std", - "spin?/std", - "thiserror/std", - "tracing/std", - "unsigned-varint/std", -] -test-utils = [ "dep:spin", "dep:tracing-subscriber" ] -arbitrary = [ - "alloy-consensus/arbitrary", - "alloy-eips/arbitrary", - "alloy-primitives/arbitrary", - "alloy-primitives/rand", - "alloy-rpc-types-engine/arbitrary", - "alloy-rpc-types-eth/arbitrary", - "alloy-serde?/arbitrary", - "dep:arbitrary", - "kona-genesis/arbitrary", - "op-alloy-consensus/arbitrary", - "op-alloy-rpc-types-engine/arbitrary", - "op-alloy-rpc-types/arbitrary", - "std", -] -serde = [ - "alloy-consensus/serde", - "alloy-eips/serde", - "alloy-hardforks/serde", - "alloy-primitives/serde", - "alloy-rpc-types-engine/serde", - "alloy-rpc-types-eth/serde", - "dep:alloy-serde", - "dep:serde", - "kona-genesis/serde", - "op-alloy-consensus/serde", - "op-alloy-rpc-types-engine/serde", - "op-alloy-rpc-types/serde", - "tracing-subscriber?/serde", -] diff --git a/kona/crates/protocol/protocol/README.md b/kona/crates/protocol/protocol/README.md deleted file mode 100644 index 799f7e0829a..00000000000 --- a/kona/crates/protocol/protocol/README.md +++ /dev/null @@ -1,11 +0,0 @@ -## `kona-protocol` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-protocol"><img src="https://img.shields.io/crates/v/kona-protocol.svg" alt="kona-protocol crate"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="MIT License"></a> -<a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> - - -Core protocol types for Optimism. - -These include types, constants, and methods for derivation as well as batch-submission. diff --git a/kona/crates/protocol/protocol/src/attributes.rs b/kona/crates/protocol/protocol/src/attributes.rs deleted file mode 100644 index c2ecdb6a0c3..00000000000 --- a/kona/crates/protocol/protocol/src/attributes.rs +++ /dev/null @@ -1,233 +0,0 @@ -//! Optimism Payload attributes that reference the parent L2 block. - -use crate::{BlockInfo, L2BlockInfo}; -use op_alloy_consensus::OpTxType; -use op_alloy_rpc_types_engine::OpPayloadAttributes; - -/// Optimism Payload Attributes with parent block reference and the L1 origin block. -#[derive(Debug, Clone, PartialEq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub struct OpAttributesWithParent { - /// The payload attributes. - pub attributes: OpPayloadAttributes, - /// The parent block reference. - pub parent: L2BlockInfo, - /// The L1 block that the attributes were derived from. - pub derived_from: Option<BlockInfo>, - /// Whether the current batch is the last in its span. - pub is_last_in_span: bool, -} - -impl OpAttributesWithParent { - /// Create a new [`OpAttributesWithParent`] instance. - pub const fn new( - attributes: OpPayloadAttributes, - parent: L2BlockInfo, - derived_from: Option<BlockInfo>, - is_last_in_span: bool, - ) -> Self { - Self { attributes, parent, derived_from, is_last_in_span } - } - - /// Returns the L2 block number for the payload attributes if made canonical. - /// Derived as the parent block height plus one. - pub const fn block_number(&self) -> u64 { - self.parent.block_info.number.saturating_add(1) - } - - /// Consumes `self` and returns the inner [`OpPayloadAttributes`]. - pub fn take_inner(self) -> OpPayloadAttributes { - self.attributes - } - - /// Returns the payload attributes. - pub const fn attributes(&self) -> &OpPayloadAttributes { - &self.attributes - } - - /// Returns the parent block reference. - pub const fn parent(&self) -> &L2BlockInfo { - &self.parent - } - - /// Returns the L1 origin block reference. - pub const fn derived_from(&self) -> Option<&BlockInfo> { - self.derived_from.as_ref() - } - - /// Returns whether the current batch is the last in its span. - pub const fn is_last_in_span(&self) -> bool { - self.is_last_in_span - } - - /// Returns `true` if all transactions in the payload are deposits. - pub fn is_deposits_only(&self) -> bool { - self.attributes - .transactions - .iter() - .all(|tx| tx.first().is_some_and(|tx| tx[0] == OpTxType::Deposit as u8)) - } - - /// Converts the [`OpAttributesWithParent`] into a deposits-only payload. - pub fn as_deposits_only(&self) -> Self { - let mut attributes = self.attributes.clone(); - - attributes - .transactions - .iter_mut() - .for_each(|txs| txs.retain(|tx| tx.first().cloned() == Some(OpTxType::Deposit as u8))); - - Self { - attributes, - parent: self.parent, - derived_from: self.derived_from, - is_last_in_span: self.is_last_in_span, - } - } - - /// Returns the number of transactions in the attributes. - pub fn count_transactions(&self) -> u64 { - self.attributes().decoded_transactions().count().try_into().unwrap() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::vec; - - #[test] - fn test_op_attributes_with_parent() { - let attributes = OpPayloadAttributes::default(); - let parent = L2BlockInfo::default(); - let is_last_in_span = true; - let op_attributes_with_parent = - OpAttributesWithParent::new(attributes.clone(), parent, None, is_last_in_span); - - assert_eq!(op_attributes_with_parent.attributes(), &attributes); - assert_eq!(op_attributes_with_parent.parent(), &parent); - assert_eq!(op_attributes_with_parent.is_last_in_span(), is_last_in_span); - assert_eq!(op_attributes_with_parent.derived_from(), None); - } - - /// Test that the [`OpAttributesWithParent::as_deposits_only`] method strips out all - /// transactions that are not deposits. - #[test] - fn test_op_attributes_with_parent_as_deposits_only() { - let attributes = OpPayloadAttributes { - transactions: Some(vec![ - vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), - vec![OpTxType::Legacy as u8, 0x0, 0x11, 0x21].into(), - vec![OpTxType::Eip2930 as u8, 0x0, 0x12, 0x22].into(), - vec![OpTxType::Eip1559 as u8, 0x0, 0x13, 0x23].into(), - vec![OpTxType::Eip7702 as u8, 0x0, 0x14, 0x24].into(), - vec![].into(), - ]), - ..OpPayloadAttributes::default() - }; - let parent = L2BlockInfo::default(); - let is_last_in_span = true; - let op_attributes_with_parent = - OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); - let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); - - assert_eq!( - deposits_only_attributes.attributes().transactions, - Some(vec![vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into()]) - ); - } - - #[test] - fn test_op_attributes_with_parent_as_deposits_multi_deposits() { - let attributes = OpPayloadAttributes { - transactions: Some(vec![ - vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), - vec![OpTxType::Legacy as u8, 0x0, 0x11, 0x21].into(), - vec![OpTxType::Eip2930 as u8, 0x0, 0x12, 0x22].into(), - vec![OpTxType::Deposit as u8, 0x98, 0x21, 0x31].into(), - vec![OpTxType::Eip1559 as u8, 0x0, 0x13, 0x23].into(), - vec![OpTxType::Eip7702 as u8, 0x0, 0x14, 0x24].into(), - vec![OpTxType::Deposit as u8, 0x56, 0x31, 0x41].into(), - vec![].into(), - ]), - ..OpPayloadAttributes::default() - }; - let parent = L2BlockInfo::default(); - let is_last_in_span = true; - let op_attributes_with_parent = - OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); - let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); - - assert_eq!( - deposits_only_attributes.attributes().transactions, - Some(vec![ - vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), - vec![OpTxType::Deposit as u8, 0x98, 0x21, 0x31].into(), - vec![OpTxType::Deposit as u8, 0x56, 0x31, 0x41].into(), - ]) - ); - } - - /// Test that the [`OpAttributesWithParent::as_deposits_only`] method strips out all - /// transactions that are not deposits. - #[test] - fn test_op_attributes_with_parent_as_deposits_no_deposits() { - let attributes = OpPayloadAttributes { - transactions: Some(vec![ - vec![OpTxType::Legacy as u8, 0x0, 0x11, 0x21].into(), - vec![OpTxType::Eip2930 as u8, 0x0, 0x12, 0x22].into(), - vec![OpTxType::Eip1559 as u8, 0x0, 0x13, 0x23].into(), - vec![OpTxType::Eip7702 as u8, 0x0, 0x14, 0x24].into(), - vec![].into(), - ]), - ..OpPayloadAttributes::default() - }; - let parent = L2BlockInfo::default(); - let is_last_in_span = true; - let op_attributes_with_parent = - OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); - let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); - - assert_eq!(deposits_only_attributes.attributes().transactions, Some(vec![])); - } - - #[test] - fn test_op_attributes_with_parent_as_deposits_only_deposits() { - let attributes = OpPayloadAttributes { - transactions: Some(vec![ - vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), - vec![OpTxType::Deposit as u8, 0x98, 0x21, 0x31].into(), - vec![OpTxType::Deposit as u8, 0x56, 0x31, 0x41].into(), - vec![].into(), - ]), - ..OpPayloadAttributes::default() - }; - let parent = L2BlockInfo::default(); - let is_last_in_span = true; - let op_attributes_with_parent = - OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); - let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); - - assert_eq!( - deposits_only_attributes.attributes().transactions, - Some(vec![ - vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), - vec![OpTxType::Deposit as u8, 0x98, 0x21, 0x31].into(), - vec![OpTxType::Deposit as u8, 0x56, 0x31, 0x41].into(), - ]) - ); - } - - #[test] - fn test_op_attributes_with_parent_as_deposits_no_txs() { - let attributes = - OpPayloadAttributes { transactions: None, ..OpPayloadAttributes::default() }; - let parent = L2BlockInfo::default(); - let is_last_in_span = true; - let op_attributes_with_parent = - OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); - let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); - - assert_eq!(deposits_only_attributes.attributes().transactions, None); - } -} diff --git a/kona/crates/protocol/protocol/src/batch/payload.rs b/kona/crates/protocol/protocol/src/batch/payload.rs deleted file mode 100644 index 62219c9f0fb..00000000000 --- a/kona/crates/protocol/protocol/src/batch/payload.rs +++ /dev/null @@ -1,197 +0,0 @@ -//! Raw Span Batch Payload - -use super::MAX_SPAN_BATCH_ELEMENTS; -use crate::{SpanBatchBits, SpanBatchError, SpanBatchTransactions, SpanDecodingError}; -use alloc::vec::Vec; -use alloy_primitives::bytes; - -/// Span Batch Payload -#[derive(Debug, Clone, Default, PartialEq, Eq)] -pub struct SpanBatchPayload { - /// Number of L2 block in the span - pub block_count: u64, - /// Standard span-batch bitlist of blockCount bits. Each bit indicates if the L1 origin is - /// changed at the L2 block. - pub origin_bits: SpanBatchBits, - /// List of transaction counts for each L2 block - pub block_tx_counts: Vec<u64>, - /// Transactions encoded in SpanBatch specs - pub txs: SpanBatchTransactions, -} - -impl SpanBatchPayload { - /// Decodes a [`SpanBatchPayload`] from a reader. - pub fn decode_payload(r: &mut &[u8]) -> Result<Self, SpanBatchError> { - let mut payload = Self::default(); - payload.decode_block_count(r)?; - payload.decode_origin_bits(r)?; - payload.decode_block_tx_counts(r)?; - payload.decode_txs(r)?; - Ok(payload) - } - - /// Encodes a [`SpanBatchPayload`] into a writer. - pub fn encode_payload(&self, w: &mut dyn bytes::BufMut) -> Result<(), SpanBatchError> { - self.encode_block_count(w); - self.encode_origin_bits(w)?; - self.encode_block_tx_counts(w); - self.encode_txs(w) - } - - /// Decodes the origin bits from a reader. - pub fn decode_origin_bits(&mut self, r: &mut &[u8]) -> Result<(), SpanBatchError> { - if self.block_count > MAX_SPAN_BATCH_ELEMENTS { - return Err(SpanBatchError::TooBigSpanBatchSize); - } - - self.origin_bits = SpanBatchBits::decode(r, self.block_count as usize)?; - Ok(()) - } - - /// Decode a block count from a reader. - pub fn decode_block_count(&mut self, r: &mut &[u8]) -> Result<(), SpanBatchError> { - let (block_count, remaining) = unsigned_varint::decode::u64(r) - .map_err(|_| SpanBatchError::Decoding(SpanDecodingError::BlockCount))?; - // The number of transactions in a single L2 block cannot be greater than - // [MAX_SPAN_BATCH_ELEMENTS]. - if block_count > MAX_SPAN_BATCH_ELEMENTS { - return Err(SpanBatchError::TooBigSpanBatchSize); - } - if block_count == 0 { - return Err(SpanBatchError::EmptySpanBatch); - } - self.block_count = block_count; - *r = remaining; - Ok(()) - } - - /// Decode block transaction counts from a reader. - pub fn decode_block_tx_counts(&mut self, r: &mut &[u8]) -> Result<(), SpanBatchError> { - // Initially allocate the vec with the block count, to reduce re-allocations in the first - // few blocks. - let mut block_tx_counts = Vec::with_capacity(self.block_count as usize); - - for _ in 0..self.block_count { - let (block_tx_count, remaining) = unsigned_varint::decode::u64(r) - .map_err(|_| SpanBatchError::Decoding(SpanDecodingError::BlockTxCounts))?; - - // The number of transactions in a single L2 block cannot be greater than - // [MAX_SPAN_BATCH_ELEMENTS]. - if block_tx_count > MAX_SPAN_BATCH_ELEMENTS { - return Err(SpanBatchError::TooBigSpanBatchSize); - } - block_tx_counts.push(block_tx_count); - *r = remaining; - } - self.block_tx_counts = block_tx_counts; - Ok(()) - } - - /// Decode transactions from a reader. - pub fn decode_txs(&mut self, r: &mut &[u8]) -> Result<(), SpanBatchError> { - if self.block_tx_counts.is_empty() { - return Err(SpanBatchError::EmptySpanBatch); - } - - let total_block_tx_count = - self.block_tx_counts.iter().try_fold(0u64, |acc, block_tx_count| { - acc.checked_add(*block_tx_count).ok_or(SpanBatchError::TooBigSpanBatchSize) - })?; - - // The total number of transactions in a span batch cannot be greater than - // [MAX_SPAN_BATCH_ELEMENTS]. - if total_block_tx_count > MAX_SPAN_BATCH_ELEMENTS { - return Err(SpanBatchError::TooBigSpanBatchSize); - } - self.txs.total_block_tx_count = total_block_tx_count; - self.txs.decode(r)?; - Ok(()) - } - - /// Encode the origin bits into a writer. - pub fn encode_origin_bits(&self, w: &mut dyn bytes::BufMut) -> Result<(), SpanBatchError> { - SpanBatchBits::encode(w, self.block_count as usize, &self.origin_bits) - } - - /// Encode the block count into a writer. - pub fn encode_block_count(&self, w: &mut dyn bytes::BufMut) { - let mut u64_varint_buf = [0u8; 10]; - w.put_slice(unsigned_varint::encode::u64(self.block_count, &mut u64_varint_buf)); - } - - /// Encode the block transaction counts into a writer. - pub fn encode_block_tx_counts(&self, w: &mut dyn bytes::BufMut) { - let mut u64_varint_buf = [0u8; 10]; - for block_tx_count in &self.block_tx_counts { - u64_varint_buf.fill(0); - w.put_slice(unsigned_varint::encode::u64(*block_tx_count, &mut u64_varint_buf)); - } - } - - /// Encode the transactions into a writer. - pub fn encode_txs(&self, w: &mut dyn bytes::BufMut) -> Result<(), SpanBatchError> { - self.txs.encode(w) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::vec; - - #[test] - fn test_decode_origin_bits() { - let block_count = 10; - let encoded = vec![2; block_count / 8 + 1]; - let mut payload = - SpanBatchPayload { block_count: block_count as u64, ..Default::default() }; - payload.decode_origin_bits(&mut encoded.as_slice()).unwrap(); - assert_eq!(payload.origin_bits, SpanBatchBits::new(vec![2; block_count / 8 + 1])); - } - - #[test] - fn test_zero_block_count() { - let mut u64_varint_buf = [0; 10]; - let mut encoded = unsigned_varint::encode::u64(0, &mut u64_varint_buf); - let mut payload = SpanBatchPayload::default(); - let err = payload.decode_block_count(&mut encoded).unwrap_err(); - assert_eq!(err, SpanBatchError::EmptySpanBatch); - } - - #[test] - fn test_decode_block_count() { - let block_count = MAX_SPAN_BATCH_ELEMENTS; - let mut u64_varint_buf = [0; 10]; - let mut encoded = unsigned_varint::encode::u64(block_count, &mut u64_varint_buf); - let mut payload = SpanBatchPayload::default(); - payload.decode_block_count(&mut encoded).unwrap(); - assert_eq!(payload.block_count, block_count); - } - - #[test] - fn test_decode_block_count_errors() { - let block_count = MAX_SPAN_BATCH_ELEMENTS + 1; - let mut u64_varint_buf = [0; 10]; - let mut encoded = unsigned_varint::encode::u64(block_count, &mut u64_varint_buf); - let mut payload = SpanBatchPayload::default(); - let err = payload.decode_block_count(&mut encoded).unwrap_err(); - assert_eq!(err, SpanBatchError::TooBigSpanBatchSize); - } - - #[test] - fn test_decode_block_tx_counts() { - let block_count = 2; - let mut u64_varint_buf = [0; 10]; - let mut encoded = unsigned_varint::encode::u64(block_count, &mut u64_varint_buf); - let mut payload = SpanBatchPayload::default(); - payload.decode_block_count(&mut encoded).unwrap(); - let mut r: Vec<u8> = Vec::new(); - for _ in 0..2 { - let mut buf = [0u8; 10]; - let encoded = unsigned_varint::encode::u64(2, &mut buf); - r.append(&mut encoded.to_vec()); - } - payload.decode_block_tx_counts(&mut r.as_slice()).unwrap(); - assert_eq!(payload.block_tx_counts, vec![2, 2]); - } -} diff --git a/kona/crates/protocol/protocol/src/batch/tx.rs b/kona/crates/protocol/protocol/src/batch/tx.rs deleted file mode 100644 index a761c1fd82c..00000000000 --- a/kona/crates/protocol/protocol/src/batch/tx.rs +++ /dev/null @@ -1,55 +0,0 @@ -//! Transaction Types - -use crate::Frame; -use alloc::vec::Vec; -use alloy_primitives::Bytes; - -/// BatchTransaction is a set of [`Frame`]s that can be [Into::into] [`Bytes`]. -/// if the size exceeds the desired threshold. -#[derive(Debug, Clone)] -pub struct BatchTransaction { - /// The frames in the batch. - pub frames: Vec<Frame>, - /// The size of the potential transaction. - pub size: usize, -} - -impl BatchTransaction { - /// Returns the size of the transaction. - pub const fn size(&self) -> usize { - self.size - } - - /// Returns if the transaction has reached the max frame count. - pub const fn is_full(&self, max_frames: u16) -> bool { - self.frames.len() as u16 >= max_frames - } - - /// Returns the [`BatchTransaction`] as a [`Bytes`]. - pub fn to_bytes(&self) -> Bytes { - self.frames - .iter() - .fold(Vec::new(), |mut acc, frame| { - acc.append(&mut frame.encode()); - acc - }) - .into() - } -} - -#[cfg(test)] -mod test { - use super::*; - use alloc::vec; - - #[test] - fn test_batch_transaction() { - let frame = Frame { id: [0xFF; 16], number: 0xEE, data: vec![0xDD; 50], is_last: true }; - let batch = BatchTransaction { frames: vec![frame.clone(); 5], size: 5 * frame.size() }; - let bytes: Bytes = batch.to_bytes(); - let bytes = - [crate::DERIVATION_VERSION_0].iter().chain(bytes.iter()).copied().collect::<Vec<_>>(); - let frames = Frame::parse_frames(&bytes).unwrap(); - assert_eq!(frames, vec![frame; 5]); - } -} diff --git a/kona/crates/protocol/protocol/src/block.rs b/kona/crates/protocol/protocol/src/block.rs deleted file mode 100644 index 0ef02993629..00000000000 --- a/kona/crates/protocol/protocol/src/block.rs +++ /dev/null @@ -1,586 +0,0 @@ -//! Block Types for Optimism. - -use crate::{DecodeError, L1BlockInfoTx}; -use alloc::vec::Vec; -use alloy_consensus::{Block, Transaction, Typed2718}; -use alloy_eips::{BlockNumHash, eip2718::Eip2718Error, eip7685::EMPTY_REQUESTS_HASH}; -use alloy_primitives::B256; -use alloy_rpc_types_engine::{CancunPayloadFields, PraguePayloadFields}; -use alloy_rpc_types_eth::Block as RpcBlock; -use derive_more::Display; -use kona_genesis::ChainGenesis; -use op_alloy_consensus::{OpBlock, OpTxEnvelope}; -use op_alloy_rpc_types_engine::{OpExecutionPayload, OpExecutionPayloadSidecar, OpPayloadError}; - -/// Block Header Info -#[derive(Debug, Clone, Display, Copy, Eq, Hash, PartialEq, Default)] -#[display( - "BlockInfo {{ hash: {hash}, number: {number}, parent_hash: {parent_hash}, timestamp: {timestamp} }}" -)] -#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] -pub struct BlockInfo { - /// The block hash - pub hash: B256, - /// The block number - pub number: u64, - /// The parent block hash - pub parent_hash: B256, - /// The block timestamp - pub timestamp: u64, -} - -impl BlockInfo { - /// Instantiates a new [`BlockInfo`]. - pub const fn new(hash: B256, number: u64, parent_hash: B256, timestamp: u64) -> Self { - Self { hash, number, parent_hash, timestamp } - } - - /// Returns the block ID. - pub const fn id(&self) -> BlockNumHash { - BlockNumHash { hash: self.hash, number: self.number } - } - - /// Returns `true` if this [`BlockInfo`] is the direct parent of the given block. - pub fn is_parent_of(&self, block: &Self) -> bool { - self.number + 1 == block.number && self.hash == block.parent_hash - } -} - -impl<T> From<Block<T>> for BlockInfo { - fn from(block: Block<T>) -> Self { - Self::from(&block) - } -} - -impl<T> From<&Block<T>> for BlockInfo { - fn from(block: &Block<T>) -> Self { - Self { - hash: block.header.hash_slow(), - number: block.header.number, - parent_hash: block.header.parent_hash, - timestamp: block.header.timestamp, - } - } -} - -impl<T> From<RpcBlock<T>> for BlockInfo { - fn from(block: RpcBlock<T>) -> Self { - Self { - hash: block.header.hash_slow(), - number: block.header.number, - parent_hash: block.header.parent_hash, - timestamp: block.header.timestamp, - } - } -} - -impl<T> From<&RpcBlock<T>> for BlockInfo { - fn from(block: &RpcBlock<T>) -> Self { - Self { - hash: block.header.hash_slow(), - number: block.header.number, - parent_hash: block.header.parent_hash, - timestamp: block.header.timestamp, - } - } -} - -/// L2 Block Header Info -#[derive(Debug, Display, Clone, Copy, Hash, Eq, PartialEq, Default)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] -#[display( - "L2BlockInfo {{ block_info: {block_info}, l1_origin: {l1_origin:?}, seq_num: {seq_num} }}" -)] -pub struct L2BlockInfo { - /// The base [`BlockInfo`] - #[cfg_attr(feature = "serde", serde(flatten))] - pub block_info: BlockInfo, - /// The L1 origin [`BlockNumHash`] - #[cfg_attr(feature = "serde", serde(rename = "l1origin", alias = "l1Origin"))] - pub l1_origin: BlockNumHash, - /// The sequence number of the L2 block - #[cfg_attr(feature = "serde", serde(rename = "sequenceNumber", alias = "seqNum"))] - pub seq_num: u64, -} - -impl L2BlockInfo { - /// Returns the block hash. - pub const fn hash(&self) -> B256 { - self.block_info.hash - } -} - -#[cfg(feature = "arbitrary")] -impl arbitrary::Arbitrary<'_> for L2BlockInfo { - fn arbitrary(g: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result<Self> { - Ok(Self { - block_info: g.arbitrary()?, - l1_origin: BlockNumHash { number: g.arbitrary()?, hash: g.arbitrary()? }, - seq_num: g.arbitrary()?, - }) - } -} - -/// An error that can occur when converting an OP [`Block`] to [`L2BlockInfo`]. -#[derive(Debug, thiserror::Error)] -pub enum FromBlockError { - /// The genesis block hash does not match the expected value. - #[error("Invalid genesis hash")] - InvalidGenesisHash, - /// The L2 block is missing the L1 info deposit transaction. - #[error("L2 block is missing L1 info deposit transaction ({0})")] - MissingL1InfoDeposit(B256), - /// The first payload transaction has an unexpected type. - #[error("First payload transaction has unexpected type: {0}")] - UnexpectedTxType(u8), - /// Failed to decode the first transaction into an OP transaction. - #[error("Failed to decode the first transaction into an OP transaction: {0}")] - TxEnvelopeDecodeError(Eip2718Error), - /// The first payload transaction is not a deposit transaction. - #[error("First payload transaction is not a deposit transaction, type: {0}")] - FirstTxNonDeposit(u8), - /// Failed to decode the [`L1BlockInfoTx`] from the deposit transaction. - #[error("Failed to decode the L1BlockInfoTx from the deposit transaction: {0}")] - BlockInfoDecodeError(#[from] DecodeError), - /// Failed to convert [`OpExecutionPayload`] to [`OpBlock`]. - #[error(transparent)] - OpPayload(#[from] OpPayloadError), -} - -impl PartialEq<Self> for FromBlockError { - fn eq(&self, other: &Self) -> bool { - match (self, other) { - (Self::InvalidGenesisHash, Self::InvalidGenesisHash) => true, - (Self::MissingL1InfoDeposit(a), Self::MissingL1InfoDeposit(b)) => a == b, - (Self::UnexpectedTxType(a), Self::UnexpectedTxType(b)) => a == b, - (Self::TxEnvelopeDecodeError(_), Self::TxEnvelopeDecodeError(_)) => true, - (Self::FirstTxNonDeposit(a), Self::FirstTxNonDeposit(b)) => a == b, - (Self::BlockInfoDecodeError(a), Self::BlockInfoDecodeError(b)) => a == b, - _ => false, - } - } -} - -impl From<Eip2718Error> for FromBlockError { - fn from(value: Eip2718Error) -> Self { - Self::TxEnvelopeDecodeError(value) - } -} - -impl L2BlockInfo { - /// Instantiates a new [`L2BlockInfo`]. - pub const fn new(block_info: BlockInfo, l1_origin: BlockNumHash, seq_num: u64) -> Self { - Self { block_info, l1_origin, seq_num } - } - - /// Constructs an [`L2BlockInfo`] from a given OP [`Block`] and [`ChainGenesis`]. - pub fn from_block_and_genesis<T: Typed2718 + AsRef<OpTxEnvelope>>( - block: &Block<T>, - genesis: &ChainGenesis, - ) -> Result<Self, FromBlockError> { - let block_info = BlockInfo::from(block); - - let (l1_origin, sequence_number) = if block_info.number == genesis.l2.number { - if block_info.hash != genesis.l2.hash { - return Err(FromBlockError::InvalidGenesisHash); - } - (genesis.l1, 0) - } else { - if block.body.transactions.is_empty() { - return Err(FromBlockError::MissingL1InfoDeposit(block_info.hash)); - } - - let tx = block.body.transactions[0].as_ref(); - let Some(tx) = tx.as_deposit() else { - return Err(FromBlockError::FirstTxNonDeposit(tx.ty())); - }; - - let l1_info = L1BlockInfoTx::decode_calldata(tx.input().as_ref()) - .map_err(FromBlockError::BlockInfoDecodeError)?; - (l1_info.id(), l1_info.sequence_number()) - }; - - Ok(Self { block_info, l1_origin, seq_num: sequence_number }) - } - - /// Constructs an [`L2BlockInfo`] From a given [`OpExecutionPayload`] and [`ChainGenesis`]. - pub fn from_payload_and_genesis( - payload: OpExecutionPayload, - parent_beacon_block_root: Option<B256>, - genesis: &ChainGenesis, - ) -> Result<Self, FromBlockError> { - let block: OpBlock = match payload { - OpExecutionPayload::V4(_) => { - let sidecar = OpExecutionPayloadSidecar::v4( - CancunPayloadFields::new( - parent_beacon_block_root.unwrap_or_default(), - Vec::new(), - ), - PraguePayloadFields::new(EMPTY_REQUESTS_HASH), - ); - payload.try_into_block_with_sidecar(&sidecar)? - } - OpExecutionPayload::V3(_) => { - let sidecar = OpExecutionPayloadSidecar::v3(CancunPayloadFields::new( - parent_beacon_block_root.unwrap_or_default(), - Vec::new(), - )); - payload.try_into_block_with_sidecar(&sidecar)? - } - _ => payload.try_into_block()?, - }; - Self::from_block_and_genesis(&block, genesis) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::{string::ToString, vec}; - use alloy_consensus::{Header, TxEnvelope}; - use alloy_primitives::b256; - use op_alloy_consensus::OpBlock; - - #[test] - fn test_rpc_block_into_info() { - let block: alloy_rpc_types_eth::Block<OpTxEnvelope> = alloy_rpc_types_eth::Block { - header: alloy_rpc_types_eth::Header { - hash: b256!("04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b"), - inner: alloy_consensus::Header { - number: 1, - parent_hash: b256!( - "0202020202020202020202020202020202020202020202020202020202020202" - ), - timestamp: 1, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - let expected = BlockInfo { - hash: b256!("04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b"), - number: 1, - parent_hash: b256!("0202020202020202020202020202020202020202020202020202020202020202"), - timestamp: 1, - }; - let block = block.into_consensus(); - assert_eq!(BlockInfo::from(block), expected); - } - - #[test] - fn test_from_block_and_genesis() { - use crate::test_utils::RAW_BEDROCK_INFO_TX; - use alloc::vec; - let genesis = ChainGenesis { - l1: BlockNumHash { hash: B256::from([4; 32]), number: 2 }, - l2: BlockNumHash { hash: B256::from([5; 32]), number: 1 }, - ..Default::default() - }; - let tx_env = alloy_rpc_types_eth::Transaction { - inner: alloy_consensus::transaction::Recovered::new_unchecked( - op_alloy_consensus::OpTxEnvelope::Deposit(alloy_primitives::Sealed::new( - op_alloy_consensus::TxDeposit { - input: alloy_primitives::Bytes::from(&RAW_BEDROCK_INFO_TX), - ..Default::default() - }, - )), - Default::default(), - ), - block_hash: None, - block_number: Some(1), - effective_gas_price: Some(1), - transaction_index: Some(0), - }; - let block: alloy_rpc_types_eth::Block<op_alloy_rpc_types::Transaction> = - alloy_rpc_types_eth::Block { - header: alloy_rpc_types_eth::Header { - hash: b256!("04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b"), - inner: alloy_consensus::Header { - number: 3, - parent_hash: b256!( - "0202020202020202020202020202020202020202020202020202020202020202" - ), - timestamp: 1, - ..Default::default() - }, - ..Default::default() - }, - transactions: alloy_rpc_types_eth::BlockTransactions::Full(vec![ - op_alloy_rpc_types::Transaction { - inner: tx_env, - deposit_nonce: None, - deposit_receipt_version: None, - }, - ]), - ..Default::default() - }; - let expected = L2BlockInfo { - block_info: BlockInfo { - hash: b256!("e65ecd961cee8e4d2d6e1d424116f6fe9a794df0244578b6d5860a3d2dfcd97e"), - number: 3, - parent_hash: b256!( - "0202020202020202020202020202020202020202020202020202020202020202" - ), - timestamp: 1, - }, - l1_origin: BlockNumHash { - hash: b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc"), - number: 18334955, - }, - seq_num: 4, - }; - let block = block.into_consensus(); - let derived = L2BlockInfo::from_block_and_genesis(&block, &genesis).unwrap(); - assert_eq!(derived, expected); - } - - #[test] - fn test_from_block_error_partial_eq() { - assert_eq!(FromBlockError::InvalidGenesisHash, FromBlockError::InvalidGenesisHash); - assert_eq!( - FromBlockError::MissingL1InfoDeposit(b256!( - "04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b" - )), - FromBlockError::MissingL1InfoDeposit(b256!( - "04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b" - )), - ); - assert_eq!(FromBlockError::UnexpectedTxType(1), FromBlockError::UnexpectedTxType(1)); - assert_eq!( - FromBlockError::TxEnvelopeDecodeError(Eip2718Error::UnexpectedType(1)), - FromBlockError::TxEnvelopeDecodeError(Eip2718Error::UnexpectedType(1)) - ); - assert_eq!(FromBlockError::FirstTxNonDeposit(1), FromBlockError::FirstTxNonDeposit(1)); - assert_eq!( - FromBlockError::BlockInfoDecodeError(DecodeError::InvalidSelector), - FromBlockError::BlockInfoDecodeError(DecodeError::InvalidSelector) - ); - } - - #[test] - fn test_l2_block_info_invalid_genesis_hash() { - let genesis = ChainGenesis { - l1: BlockNumHash { hash: B256::from([4; 32]), number: 2 }, - l2: BlockNumHash { hash: B256::from([5; 32]), number: 1 }, - ..Default::default() - }; - let op_block = OpBlock { - header: Header { - number: 1, - parent_hash: B256::from([2; 32]), - timestamp: 1, - ..Default::default() - }, - body: Default::default(), - }; - let err = L2BlockInfo::from_block_and_genesis(&op_block, &genesis).unwrap_err(); - assert_eq!(err, FromBlockError::InvalidGenesisHash); - } - - #[test] - fn test_from_block() { - let block: Block<TxEnvelope, Header> = Block { - header: Header { - number: 1, - parent_hash: B256::from([2; 32]), - timestamp: 1, - ..Default::default() - }, - body: Default::default(), - }; - let block_info = BlockInfo::from(&block); - assert_eq!( - block_info, - BlockInfo { - hash: b256!("04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b"), - number: block.header.number, - parent_hash: block.header.parent_hash, - timestamp: block.header.timestamp, - } - ); - } - - #[test] - fn test_block_info_display() { - let hash = B256::from([1; 32]); - let parent_hash = B256::from([2; 32]); - let block_info = BlockInfo::new(hash, 1, parent_hash, 1); - assert_eq!( - block_info.to_string(), - "BlockInfo { hash: 0x0101010101010101010101010101010101010101010101010101010101010101, number: 1, parent_hash: 0x0202020202020202020202020202020202020202020202020202020202020202, timestamp: 1 }" - ); - } - - #[test] - #[cfg(feature = "arbitrary")] - fn test_arbitrary_block_info() { - use arbitrary::Arbitrary; - use rand::Rng; - let mut bytes = [0u8; 1024]; - rand::rng().fill(bytes.as_mut_slice()); - BlockInfo::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(); - } - - #[test] - #[cfg(feature = "arbitrary")] - fn test_arbitrary_l2_block_info() { - use arbitrary::Arbitrary; - use rand::Rng; - let mut bytes = [0u8; 1024]; - rand::rng().fill(bytes.as_mut_slice()); - L2BlockInfo::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(); - } - - #[test] - fn test_block_id_bounds() { - let block_info = BlockInfo { - hash: B256::from([1; 32]), - number: 0, - parent_hash: B256::from([2; 32]), - timestamp: 1, - }; - let expected = BlockNumHash { hash: B256::from([1; 32]), number: 0 }; - assert_eq!(block_info.id(), expected); - - let block_info = BlockInfo { - hash: B256::from([1; 32]), - number: u64::MAX, - parent_hash: B256::from([2; 32]), - timestamp: 1, - }; - let expected = BlockNumHash { hash: B256::from([1; 32]), number: u64::MAX }; - assert_eq!(block_info.id(), expected); - } - - #[test] - #[cfg(feature = "serde")] - fn test_deserialize_block_info() { - let block_info = BlockInfo { - hash: B256::from([1; 32]), - number: 1, - parent_hash: B256::from([2; 32]), - timestamp: 1, - }; - - let json = r#"{ - "hash": "0x0101010101010101010101010101010101010101010101010101010101010101", - "number": 1, - "parentHash": "0x0202020202020202020202020202020202020202020202020202020202020202", - "timestamp": 1 - }"#; - - let deserialized: BlockInfo = serde_json::from_str(json).unwrap(); - assert_eq!(deserialized, block_info); - } - - #[test] - #[cfg(feature = "serde")] - fn test_deserialize_block_info_with_hex() { - let block_info = BlockInfo { - hash: B256::from([1; 32]), - number: 1, - parent_hash: B256::from([2; 32]), - timestamp: 1, - }; - - let json = r#"{ - "hash": "0x0101010101010101010101010101010101010101010101010101010101010101", - "number": 1, - "parentHash": "0x0202020202020202020202020202020202020202020202020202020202020202", - "timestamp": 1 - }"#; - - let deserialized: BlockInfo = serde_json::from_str(json).unwrap(); - assert_eq!(deserialized, block_info); - } - - #[test] - #[cfg(feature = "serde")] - fn test_deserialize_l2_block_info() { - let l2_block_info = L2BlockInfo { - block_info: BlockInfo { - hash: B256::from([1; 32]), - number: 1, - parent_hash: B256::from([2; 32]), - timestamp: 1, - }, - l1_origin: BlockNumHash { hash: B256::from([3; 32]), number: 2 }, - seq_num: 3, - }; - - let json = r#"{ - "hash": "0x0101010101010101010101010101010101010101010101010101010101010101", - "number": 1, - "parentHash": "0x0202020202020202020202020202020202020202020202020202020202020202", - "timestamp": 1, - "l1origin": { - "hash": "0x0303030303030303030303030303030303030303030303030303030303030303", - "number": 2 - }, - "sequenceNumber": 3 - }"#; - - let deserialized: L2BlockInfo = serde_json::from_str(json).unwrap(); - assert_eq!(deserialized, l2_block_info); - } - - #[test] - #[cfg(feature = "serde")] - fn test_deserialize_l2_block_info_hex() { - let l2_block_info = L2BlockInfo { - block_info: BlockInfo { - hash: B256::from([1; 32]), - number: 1, - parent_hash: B256::from([2; 32]), - timestamp: 1, - }, - l1_origin: BlockNumHash { hash: B256::from([3; 32]), number: 2 }, - seq_num: 3, - }; - - let json = r#"{ - "hash": "0x0101010101010101010101010101010101010101010101010101010101010101", - "number": 1, - "parentHash": "0x0202020202020202020202020202020202020202020202020202020202020202", - "timestamp": 1, - "l1origin": { - "hash": "0x0303030303030303030303030303030303030303030303030303030303030303", - "number": 2 - }, - "sequenceNumber": 3 - }"#; - - let deserialized: L2BlockInfo = serde_json::from_str(json).unwrap(); - assert_eq!(deserialized, l2_block_info); - } - - #[test] - fn test_is_parent_of() { - let parent = BlockInfo { - hash: B256::from([1u8; 32]), - number: 10, - parent_hash: B256::from([0u8; 32]), - timestamp: 1000, - }; - let child = BlockInfo { - hash: B256::from([2u8; 32]), - number: 11, - parent_hash: parent.hash, - timestamp: 1010, - }; - let unrelated = BlockInfo { - hash: B256::from([3u8; 32]), - number: 12, - parent_hash: B256::from([9u8; 32]), - timestamp: 1020, - }; - - assert!(parent.is_parent_of(&child)); - assert!(!child.is_parent_of(&parent)); - assert!(!parent.is_parent_of(&unrelated)); - } -} diff --git a/kona/crates/protocol/protocol/src/brotli.rs b/kona/crates/protocol/protocol/src/brotli.rs deleted file mode 100644 index bd20ece7414..00000000000 --- a/kona/crates/protocol/protocol/src/brotli.rs +++ /dev/null @@ -1,107 +0,0 @@ -//! Contains brotli decompression utilities. - -use alloc::{vec, vec::Vec}; -use alloc_no_stdlib::*; -use brotli::*; -use core::ops; - -use crate::MAX_SPAN_BATCH_ELEMENTS; - -/// A frame decompression error. -#[derive(thiserror::Error, Debug, PartialEq, Eq)] -pub enum BrotliDecompressionError { - /// The buffer exceeds the [`MAX_SPAN_BATCH_ELEMENTS`] protocol parameter. - #[error("The batch exceeds the maximum number of elements: {max_size}", max_size = MAX_SPAN_BATCH_ELEMENTS)] - BatchTooLarge, -} - -/// Decompresses the given bytes data using the Brotli decompressor implemented -/// in the [`brotli`](https://crates.io/crates/brotli) crate. -pub fn decompress_brotli( - data: &[u8], - max_rlp_bytes_per_channel: usize, -) -> Result<Vec<u8>, BrotliDecompressionError> { - declare_stack_allocator_struct!(MemPool, 4096, stack); - - let mut u8_buffer = vec![0; 32 * 1024 * 1024].into_boxed_slice(); - let mut u32_buffer = vec![0; 1024 * 1024].into_boxed_slice(); - let mut hc_buffer = vec![HuffmanCode::default(); 4 * 1024 * 1024].into_boxed_slice(); - let u8_allocator = MemPool::<u8>::new_allocator(&mut u8_buffer, bzero); - let u32_allocator = MemPool::<u32>::new_allocator(&mut u32_buffer, bzero); - let hc_allocator = MemPool::<HuffmanCode>::new_allocator(&mut hc_buffer, bzero); - let mut brotli_state = BrotliState::new(u8_allocator, u32_allocator, hc_allocator); - - // Setup the decompressor inputs and outputs - let mut output = vec![0; data.len()]; - let mut available_in = data.len(); - let mut input_offset = 0; - let mut available_out = output.len(); - let mut output_offset = 0; - let mut written = 0; - - // Decompress the data stream until success or failure - loop { - match brotli::BrotliDecompressStream( - &mut available_in, - &mut input_offset, - data, - &mut available_out, - &mut output_offset, - &mut output, - &mut written, - &mut brotli_state, - ) { - brotli::BrotliResult::ResultSuccess => break, - brotli::BrotliResult::NeedsMoreOutput => { - // Resize the output buffer to double the size, following standard - // practice for buffer resizing in streams. - let old_len = output.len(); - let new_len = old_len * 2; - - if new_len > max_rlp_bytes_per_channel { - return Err(BrotliDecompressionError::BatchTooLarge); - } - - output.resize(new_len, 0); - available_out += old_len; - } - _ => break, - } - } - - // Truncate the output buffer to the written bytes - output.truncate(written); - - Ok(output) -} - -#[cfg(test)] -mod test { - use super::*; - use alloy_primitives::hex; - use kona_genesis::MAX_RLP_BYTES_PER_CHANNEL_FJORD; - - #[test] - fn test_decompress_brotli() { - let expected = hex!("75ed184249e9bc19675e"); - let compressed = hex!("8b048075ed184249e9bc19675e03"); - - let decompressed = - decompress_brotli(&compressed, MAX_RLP_BYTES_PER_CHANNEL_FJORD as usize).unwrap(); - assert_eq!(decompressed, expected); - } - - #[test] - fn test_decompress_batch_brotli() { - let raw_batch_decompressed = hex!( - "b930d700f930d3a0a8d01076e1235e0c33674a449c13fc37ee57f9ea065bf41af3aa03d5981f1432833bd0b0a0652a19cd927ae4a22e8f8069385002252d78e1c3cc91a59ac188708b7074449184766cbcf3f93085b903ee02f903ea82014d884062b70d4e215ee885019d47a37c8543ae9f382a8310c97b9451294f5cd6e52c003ecfb412ca8b42705c618d29883782dace9d900000b903690d669b0cd98174ac3b57393839029ac04ad36454109851443b4f6580664fe06766a7dea5b1ed31e14e7c11aa738eecb86e979f874873cd3d7ca9481681b4b17d134316e7bbe828ef69339ef85c6f0e9dcdfe1dc85309effb487569383d5464b519bdc1c85fffc72bfe93d4081a3e1b75e5dd39f95a91df0997a22d8fbdeca57a8b35b4f0e277ec8502cc55581a94eec1d1000b2921b4d7c3985ace205713641d03c3975e4049e13b3d2c5926b224684e38beb3b8d2e5d4060b109aafc3f2d144783aadf6086aa1d5a931d21282711484a9c0537bd4981fc222444f2c057211708e70dc4223063cbf39e4af0b795d3ec0dfba32391611d151145c1b6bb33d53ce2bb7983bd7b6c1516f7a1a719fd876f4b20910aba76c16dbfc57199a60e2ab938bc285613c3802c17aa03cb9654f5142d607bac01293c9aaf4e58b422c543f7e5e458af0b7cf57f33109558bef71e8b5506da723d996eb8e2c265b1cae43dba571d07d3ea1bcfdcb73089597e3744344e049bf21b4244d5aff60d559010b69a6335f4bb21178de504f50808204da652c7767dbf11f2a34b4fb710e6df9ad8810aa75dcdb2c99dfe9bf898912817e490b4982d44fe09f8adb43e0da2a0c824a9069ce8cc36b5fb0074c2db895ee92d92fa6b7efdf5c97ae05ae27556bc07ddc9d9d6261a53e3a10c350c3b1da26b27b345768e17da7dabfe6e30e019c88ef4a0e8df840bbd3fbbb639edf775449d8be7510cc811564789b861372fe97f7b5b1389f20c9872517634e9225669ee80cf077f9c8606cdbad53819a875ecd9f7b6d778c1dc302ca19ae67ffb054eb99206fc90eacbac8177712d0b4c72700df3f5e2c88fb4e9c8284cefa66390a78605ad9320aee34f72f3cb263020204393d9359a65f48b0e6e942b016a1f2c5bd6579f0a65997635ab15fa38db76ae8a5d3be516441499819bfaf730ebaec389db082e41443660dcc6280315154888b9e726b971237fae5e06b01958aac081398c814e446a003039dd090c0efa5d39735ed0ab46c7b4e4c960ae414b045fd19117089e65aaf3779cc9045d6e62538b1b75c2689d23ba3c08ceed46d4fdf9b969b34a1903ebd96a3a6b091842480e638b095c1ec11bb5c599668ea1b0a5a714d13462edb39dfd992b569897ac8f45c587182770631c262fc459afa6f23d5670eee2aac2ddaa89314607d30c6bfd408980c082749ad6b48a5310ac75b880cc080a00b5d23a075615f50233ce278d11b7b0ba0ad6a01486dbf31c54aae096f0f066aa02d9feeb4771b5a37d1247a4cc58a64d392f3916b5602d9d41d97b52b391ffd47b9011801f9011482014d88a793ab3f17510b308821f5d9030532aae9831708c1940b6f262f685c8d0ff7dfc9ba9686d8f75b78923c80b89f7644852b70713a788b69f191c54ec8368a7f2675623b2369f9078516605d0d4550ff9f5b92b9da2147fa3a24cc17605f30cccedc5bacafb2bb86e2640db6654a514b8eb13d3c3ab6b5e344498de0c709dd9bef58a8af16d3efcd2c0b2cb69d6089d0af8d42baab434dea885253e42050aeec01f233e64289b2e894c680fbab4f25a653745dbd89edb19d97e35bdd4293794c69503b0e60ed9cffe7e9ab3cbbc080a0dd08ebab0802fc61ccf26c357b638a55cbcd6b366251c17e2fa52d328d9d59e5a027d334772553048d6b76fc39ddee5f85363810c235219356cb4c5c3dbf9661d5b90298f9029588e383f18817bb0d1c882c58aa6b12de88f3830a7831945c1c1314ed944220436fad3742023cba2a71c4a2886124fee993bc0000b90219fb039c014cd76a327bb9b3f59e8176f377249385e67cb1681f8eacff1dee5a5a949511438ce370f8ad6618f3af81cb1f775a0b365546dd7791b0ad71fb1f2f29154265a8175b7e518580732a5a46dae3752e1234ff779d4eb614af2c66beec964181ecd0cfd1640bb2ca2b860649c41930a60de0cc754884a780488f05d1d5833a381670b368c85bf08d6650e26122f6714056382a006fcd5f9c97f55a98d68dd9293bb1be24823eaa8cb007481dc78a7a670123976e7b6e81fc223f42637759a0c933b73ba89a1d902c0874fedeb0a97dfab298972a18378539c2894ca6df9c0a423c2e98df4c133e5e808809849785b069e323640bf93d4b82a0917aaea8fda9a3072ab9a00a4b8b9b7b3a3eb326e54231d0f6a064cdf4a1fc06c961e5087359c029b13e229fb477d6651bad52c75e503ac45002a803a7457488966cc16bbc9be5c1c9a797d0377710c028e4f05a6cb929cc1fd4018912929252e04e107ffbcbd4c81ba01ab4b11faa90be0f9f9a6a22c87257e4a2aa8283e6f71d7b9e03b5308b16525c4d79705bb0906be0e947e8075ac6ce2235356aa0a66bec39e918e47a6220b322e326bf8fd65e47778e14074c47cb62b7ef8ef956c996097d2919df7aac8ea2ed69c1fd9f1d96b6b82b411c524cacec0f4a4269821fd6766d24954b8870fb1d85f5cda0528ae18419915a8b30b25baf6a162978a4bec86009cece83017d50667a202b3fad18f8ed8b5140c97fa74e91be608fdb788202bea05f469660e363ec580825d1e2bf753c01db044279f862720a27831744b91494f5a050fa7445e0e6156dfdb712a647ef73a2dd35b73d5cc988430c831352d4ac7e8bb90458f9045588a106e4c16d06833a881973c4c642fba1bb83068f2294050c84206ba9d32d93d144884644e5bd36fc92d0883782dace9d900000b903d9b303f8efb68766822d7eea21ca4b7c5dd79dce832c4893247f6784fe47cd7a18caea7b5b4d8bdf02da0276aca185add01fa2d16c2f1188ff7cbf6fb8c6308999037b2b92d725094d8faed86f0b1a45b55de4f36dbb71dcbf4be12fe624077213e0c170afbbbb546a343ac3f2a1333a7a7a7db7be46640a73d61b3aabc805b022be416198d809b62f99d26cf4a3bf555d40686f4b8970ec15386462bec5f2b728de0da047d6b3f3ea51f571507f32f047322fa204f0c5697cbb56b4b5c7792acaa40f02926651fa715a40e1f212c78cd4ecca285ada2c8cbb6e5dcfa3823725b44e29aacbeb9b6224f90fbc895a5980d63da46688832e9776b0666e90deacbcf8a4c559b625cf004cd04c686aaf9d7d6e2d394f5d36311f7afdcec5033daccc63c0540935f59514c9aa8ac3c2aeff48f624f2dbd38062fcd046651e92fc7ffce4dd914bb0dae704e5b26a8b73b3baef8ea022881e15666fada8e43fd621793713cb8c867775b9cdcf3b066582fc9baa705a0e1dc61a4b33b1b33ad3ba3bd0cc41b5850cadc04654dec222178709910209c6ac3db9054ef91facae2d729d7ee54898a18411b6d20d599a3de14d5375e5a9c90f3bce78479cb0f20afca895e40b576940e063587f451a8828ec2dd4a8538b4bebc39f72a6c54e379a07b7d5e0c02ccd57dbff13729bbfe5e78498c01cea12e830944fd0a123b7383fdcda97d8d9cc831e542ab6d9b36774d540b180c2bd52d46ca7f0e17d400cf3cd559b1b4e51ba93cd954777ba27a9f0327eb6c68aafe74fabca4610210db7498aecffd3164c5eef8cede655e1b42d5f54f5a52b4f5fe9698a4463f30f20693263d41074d0403a737c4d4986f0ee7fee828fb7072a80603613fb4d6c219dfa47adad433af6b437dd199f3bbc651487718b2e6d42728034c242672a98a9f36fab6d4162f4e8eb7bf2a9868cead8ad657a67f0aa50286113db972936260323d7b11353328151e80691d551bbe1f7f11774e15db4f175aeac5b91668a712c3c2399a977abb9fd9c2b53c5ba68f2c0ea353028416b36a47028f78918e2b205bf9b3bce6f1a08bd4448abc3f12a240482b4be98dcb77c74fff47e92d833735e802465e50b79d51de5a7fe45a95b650b051c61a529d5f51cd0c603a2de67a3123be1c52263e1c9167765b13ad1e01cfb27531c9203f39e8913fe0cab9d8c14b17bad0100b76c41d41d68ae3b7aeef5f6af4f66d113fd29eb9c4bf994f04decad13880d9d1eb3865a30e2540e86923b36369c121ef2a6a43a618aa4b15560fa806601a85be361468bd09c6dca39ad7ec44809adc0907dd0458177343a7c23330605b802f3ffd3ae61b3be952ca2effae8222e9ed0b6ea4240728a7800e4882efa7dd1ef8202bea05db690cab7dc8c52c2c375428c0aa9ead02bf44e2b1f8ee06e1cf7af25eecc13a07d967fb12e1f0073adac46e0676a6006b30d780e6a1387afec76cbd1f07016e3b9012401f9012082014d88df6f092495b7f4148840c5b5541d013c63830408e194aef36f2041e560a641af89e0ba2799ea630a9592881bc16d674ec80000b8a3afb9380f9228224c1aa59eab115ed4172b471aa2ee11b3d4ac93f4b6a33518007a798170801f4f582e188b489005d8f108e2a4acd6f7ac28852580e73b6a1590ea1af1443666f1d14affb0a9d0655a5c57cd4190b2a00c07276054641ee4204ed8a806ded2b3aaa7453c24e442992434d060b51d2255c1cc2a002264b5dadb32057f4a5d52626e0ff453e2f05f1e0d8294614916c00110853462d51d9ab7e03b7019c6c001a06028ddc42f0d3e1cd6cb1ed7377d518480626d56c80e6d15eacd42ecf2f30957a03f6e1098b300b6329997bacc5e667eeed72a38f6c4e1db7199483bc9a18267d8b90222f9021f88c0988653bce0e07388fbc67f04e5c6772e8311bd5c94eeecd6da1ee441093ef70d8c86a26f4dc4da11588853444835ec580000b901a349e745c1cca19957c43f15309935f7bf49547884332dfe6d5b8b9d61542dd88ecc61187fda813a7f700ca96e8847a33bf8552690d91ec8e8fa70c21b380c9c681b54e859add36c3c19e7fda3075ec1a3cf47ed39c89241bb73f206d7497f93c47db9a85be7135948e19809c195ccd4c9a379ed464bf77ec562e360c52b9225f103d323364a72e8a725ad2b34a355928acc6aa563b67d120ddf54cf68f710624499ddeb30b0c94b8722ef2d641ae49f17f4a916d54350ec483ec5bcfd9748e0a228c3e73cee9ea248ad85060ac51b3e6834e1f771f725a466affa28453ad3726d794caab223fa76c8b994ac5d3a1e8ee830e4fadfe0786174364af3109c04d7d607aca17933c4366d44d9c5376ca34febaaa612707eec4e2fc5c6b1668b3450340938d17e5552df96ae84a905d069f9e3455bccab30640a0720f9b4598d8f82ebd19bd32b7e82165303123a0ed80c57375174c08d32ad3ae354251c97316b2977f3a2fdf2dba1c595093c88275badc54e3aad65f77c56f55d04b1e6d668406058ea01da2364fc207659b028d9c55371c776f732e63255dd177b95f857e3cbdb4c66fabd8202bda060830662664d96755362addcc0908287c99c60761cf9c7a613058894eab6e599a059cd2461d4a89458dc68adf287fee71a783dab0aaa05587a21b4aba1ca4f5efeb9017801f9017482014d88d15c09b7ee8f9562880ae58585f383aacc831e72f6808853444835ec580000b9010a2e818d2c4fa7a974f5c3acf3c0f9439f4c83721b2bb9df4fa290c7fa57bc1f9f77e4b80866845a8bbbf8030b707b1f07a54a0ab901188eb2e1262a45618a08517f943cb032eeec926e4343d5d3089c145da1d53128ae901ce91a813c205c615bc1ce9b8658a9da4c2d258fe36f6ffb6289df910566386dd1a9f73b44053bb64523d8faf7b9055c592695fc426c360479c1e2d1f68ca5c7965dd20b6879989606cea7c0db28f27ead4a591ee264f755b7358146586c6a1a8530ec463dd754f100fac603ec3360c0440874c12bb179c43a23e40957bd446f2573af413f3314e9f0668af2491de96156a9bf35bc469d51935305f4df051580b84e98ec8395fbd42fc0c3f3e7410ac4719af4c080a09a774db7e3a26966edb91c1f7956a091425044ead1589f435c8d04aac9533764a04325d5543464929773cc6ac555f5ce1830c997f4d26f2dad5a7e056db6f0a2e6b9032d02f9032982014d88828a67bc288355d78498c2cc318542aa1a60df8305fbb6808853444835ec580000b902bd082cb3f3fa41ebf06fbb17afeed9ccdcf3d2999e2fdd1e1171e0b1549c06de17dffc4ee7785232184a698311c7487fdf090e34b9954a41affc0d0ad44104f70750f6a896b1b2b5ff1024de66ba877c5494e67735cdfd45f9ec0df1c198b357b60e4d840abaa72c5667074c43bfa5e1f07b5970f018820db6fc2bf84341cd024cefe455c92426f876e51aec0fedded8d4aa4003aaf6970c48d898d8d82a8411990e73c8ec792a2cc4a129e526d0fa34a54c37ac13ecf4e3c597304cdbd327704fc97f2ba0b110afee78da5c3f46d3354bd20f56cb91b7ba8d302422428082748faf8b4828ba925ab1a02ba695e686da4d1e759b6456b0388ac8fd769f3b726332be36d3153ebee040b5d822fe62d73b629a6251c8e49a988cdfe599762759df03c9100db5f7a87ce7102ddd21831e0736924f230ffe6aaf6b012423e351627e118f2bc12736a3694b5468858ec6310017b10de24fe75ff0abc060b1e60271dc5274b4bbf0b755a0a617bc23f57ee2286c805086d5824ca4bb6297545c5c1ccaf03be03b7df33c953ddb183730313f09c88392e4bdf688f1d2b730318cc9b148e488c2f1e383505a383672755a221ee7dffec5a4f77e7efe66043d686a126480ea01a8ef0f72f9a5799e03e863a85b7aa56c88b7575d6ebb9df809a240969d3a2b2e086e742130e38cfe7870db79bbd281849912fa611e04b8dd0dea9b7da5d16a66969e54ab9def159b9c1d351d719a93821c40ad6c6014644c5f77374cbd486d6a7cfe75d7d849ce240ac86a1c0843aab27fba4d317c725eb101752803ea67d3e12b784bb424eee6f766e33d6664ca113af63c54ba27b8a8e904c572dc3fd09848cca3499c403a1c601db77a7f36d244024ceacfd9d6ae494b7e7e0f92fa5f83458d5da139eb127709e3dd75c88fd5f75244e15f1bb8cdbd3056bfa56139442c0bacbf3263f29ef34946e928b9a4f1c085e5df3b09f31c6e87397bd939c001a08b9ac3bc299eff8eedc51ed3ff077e49da6fb145a0c495f430964581fd4d230ba05fef2837a800e231a3178226f59a981d2c4bcebc4b4cfba9680371da1e2c1a61b9042bf904288821c649ab1ae8ea668896d6c78054ad7a6583121a8994e3294b628e98892fc56ae3fcbce852265aa657e7884563918244f40000b903ac0177c66fecad5135344e89f45ec7e083130a3e5eab1abb75bab0aa357cf044c0582542047a3f9985d3439a6f850466061142af44a9208656e278b7ad1bd0e03539cc019d6ebf8758bde3e0489ba540c523f178a0b055c1fedc3627fee427467ab67545c154106bb9e0c12a7120c175d66f9e3eb9183ae5c7640d4cb4bd3dc94c7b4e0c9fe70e692c3fd027e0ebb46bb32b73a269037a76731a9f114343ea0584c3f7e9cb4530d086609b59ab6b72e7dc6c2c0c95699091e06a33af5ba200a168ef483fe11056330e84da4f2a59db72d5d697d262b9565fe81a738a48d24a9f1c8c49a671101bb7db5eb64deb454a117eb00f4ccc31bc93c061e975ab6d375967544a2a06ff8b9d59bfe1ecb1dc47d5536c645d764028c5de77f3f34d6c7999785b70b187d9ec4631e83cc69499a4ff8ace98a6f17b77f648ab7a07d5ee0558a8efc19d4601573156a0264d2e6574e867c1eca423eac1fdbfe0967bb8f02524cc2d9933141acf619ffe99483305fbdd6913f1e1feb78a17fc6b81c705c81eb08d5602b097ddec64f6c334509caeed7525e3e34845b21e56e4424aa9609f4df8bb13f31c5448b6bdede84d9a9aeba9fcc38a3c8eb1f3f31b80918e045266c7d69b252c86f8b5711b2cf7136e2c3d86d1301608c7c16655c3ffe6d04014dfd55a9563c2a307525088fd017486ffeaeed45873013a7940a7a91442b975065c765c32546aee9b001ba78d8563e039c8edc24a92f9f457ae28172eb29e16cc588d52c8e75a565aad1a8f9d6d341189a24718c26c19a83c6cfe1bbec2f4b878759a7dbeb4ffc0568b902b1dfb18af00c7014f2822965ddfb56d7aec508822531834ad2c869affba1f95bf3dfdf1d1dd1c2994d904b9c5133900962c8137d7fce9f0b9a7d0474dff9173edbcefb4bf355539dfa791241031e90770c8f09af595eb1aa0d083bac4fb9b929ad7e23c0fc8d3ecc7458a0790929cf7588cc255916a6c16811f09d0c972b294dee6e1f739c5e9d3eab8016b565c8570e41bcddeef2dfbbf95910ae6a46a2834919742ec599b9ed204d1f86ce6baa534039ed308d8be0d289824303deb54af5f9f50d88807134b8f42485cec121432e58b83c8aecb32fc62623b06c39c3f1e0e921b1bb880d2eb017578e5f33a25a335a813f02259e1b12b8a76a90a65d015bb214032a095cd8918b78003d310a06a246ac95c126188911bda8a6623407c0dad308e25a438f78c7409267b729413b7d248a6a88cd64c73118999f00981aa4f6b639e4252d39b1706c686c7763ae9c41aea7b46fdd48bc490502ae876175e5aff8361ccc530ad8202bea0b0209fabc8a5c0e2a5bd08e9a6b532d51670f41513cf007781f27e49b070ccdba0795755f4fe231840196d847d100e7cf1e5650ae172890c469428269cb105c16cb9031ef9031b882565c357c3279f0c88e90114422a470a4682e988808829a2241af62c0000b902b424fb91666edaa16addea67f72c9e0bc7a8053bda59776ede2a0ec3f7c78ffac0eee97ff259f92b21378193aeeadd0253b08897a14f10ab537db63202a4c9f78eb4b399d55c5a256a8414f58f45b109e6228a75ed1eb09627f44b56eb539c334df412b30ee6f4ea39a04aa671aee9e7157b9cb69aad4ab1d9d75c6d90f3488342b29bb59c97ecfd2bec4f991b095038b9e20eeb591b641f64e32e5020130f8a8daf7c51caf93ca460a4e60132835119f99d0484529cf541ab9f922bf15a782521a0f6739c1edb8d4bc26a07e63790087b4c098e4df74534340bf7815039326d1bdcafa53932deeaff03a31e97c6733cc702cdd42be18e4716dd0d014f3e916b0cee3a16bd52cf717f5efb59fb7e41c8e4c0d7eee8ba92ee5b293b25612ee9a3b0043664e918a2aa2b602accd357c8f22f382b16f637b57f2fedb7d8f66172f22e67cc04f230e28ec96b928f449fba63b7862bc3102181d6c7bf063d9376363b8be8200169aa88c46732c5ab1e19dcbd8abeb34f1e1cbc632484d9864e630c4567c0f04a2bf5895d3cafae1b0e70e4c1ea28d4d9578a82611f09ddb22c3c4440e8236be2bf9cecd3fa64b19930af8664d78d6f10aa9c913be537bf2b539e3a9042d5744eb3d1bbc16d98564488a51ba45edb2713b466beac560789c4eda3c0961bab002b95eba9f512108dee2e39a8759c04b18a923f2f2aab2e1ca30ec7361b25ae71923027c950c089469820a4ec3ec60529f1509b92ef04fb7fac70f25d3e5ea5c6a28226fe19317bd4d0f42085884020a2b22dcb0ed8e5600ac969b4f910e54f617597a84b05774776d694ba38ccd3d1055a7245334cddb1ca20d7e001285a57001d03b2fc1ff893ab044612dba9b311247528d7490a9a7f3e7c3ed8531844d3b829de3604e8546ee8d4c3d7a308d32035159aecfa20ae4660e6dc94b6a155aa78150a01fb0e6c48b660a0f051ab59accaf4508202bda080d51bfef036fd4c4ebe7151b2755d6606122e565323878701113b84fc86548fa06fb34b02deb66359ae8095d3c339673ab2a8b138fcf9aed2d4276c8a16435a60b88801f88582014d88bbd39acc70c3229d884ec80fa5565439d283119a84942d89ae04c33fcbd75e3c6c43b826b266625b854f883782dace9d9000008911d1f14d3a721904f1c001a046bf61e70c69943c277ef7d09ce5e779a10e3671cfec81423e0f951254dfaad2a012fa75748afaa79673d94a17d35666009001775a2b868b9b839c77065649bbebb90143f9014088e1cba06e2ce482dc8804b98caf86fcf0898305c61980880de0b6b3a7640000b8d9854e530ac567b7d29eedd91690a0d2397591c6a1b1f5068bc292b740f6aa5d38003a933c0560971d4701b31d537fb7c1ff68c40ef07221089f37671b101309000e0eccbc42284732aa002f2cb3197def9947c2b2fe47d3fea2efc71b1f3cd681082d043dbc1471a56a5d0a5c757b8c115277a2af2e044e56e5e3c2cf8756dbe51a347096a4ead46fe53f4c03fc100fe0009f6b2fd6ade28fc89230602e9221962f4512740857b87f415f134a224c5149e374fe22f3048f0620f1bddbc9acdc268a5de1296d265bac65fc2650b3de55e6bcbc26bc4d01dbf7548202bda03e35d4429ee24e44134f7f51b32fb69691a16c60a0347d9283a8e593d5a095baa01c590af4c1fcd3aca728bb5aaf03f48aca22c756a87607b4153a5ac6be59ebb5b9029002f9028c82014d88aab881c6fe3d0b7484b0da2b368542c231bfe483115994808829a2241af62c0000b90220a8317aae8cca53d039d79f09934b9c5d0b07bf13ceeffacf1011fda22a85505eb7c717168c18d8fb230a7a3f166a4e93326fa82884ad3093b5e07b4edee095d98bb92f357fd4a98201be26960d4253da6fcd09874b364595a47b95d2b50f8cd45921931469a302be9699779775b59f27deea2aaae41a010a47b825a46103b7d355f1c154b3422b4fbe4e62c71c5b6b98b627beb82014ad990bda2b6c06ddd237543b3652c7a029928153a8cec540311406260fd3a55cc5788610321d66c29f168ffe5d93f92378359231ff89492db2bd2e90a4d9c28263d75b77842584d253fd7316e61c27f71771ac7e7a3c8ae6921ff2280c459c36348e0a098fe8da94c1546c15db7968d6b2821b24edced45a7ca8f2bfb2b9bb7a497b950bdaaf771bd777e918887c0d2d6ad3b72c168228f49fae155862e0baef308ace6952606a660beee10da3fd2d29b5ac31f2d55e34da94a4274e1bd679fa42bccc5db074a070b899e28948680d82c7229223d846a1a2c19143dd99c78bc42c33490b85be5067a25f6361d6b803b315519de254191557ec691967ccc3d087b8799dfa5888ad748b7a6e164da0c726bc1f916110b6fe6a013ce0e28b79bee045d250657a70211dc11a5dee69a2c05e9eedde536a9911883e5ef2ee76729ff8fbc3aae0fa13a36daf01199a7ac60b21c7fcac00d7c6a80f5ce10b79f4666d69a1a45b3ec864a57f1f6fd492223c539351326d7a25b18bcfd8697f55e972607b9675b1d40dea3ba4c0b3c080a0e69a3802e5dbe5284f817eaa05c76127a3898633d4524f3da9ba8d7e7b98af23a05a2672729a0136c572a68b494cdd49ce47c2c0e33582b601632b3a1d15f3cc38b9016001f9015c82014d889e607b89f9d2717488ee3a5d83a713a9fa831ab7e68080b8fb754cefe26136c37abae044d7be8e1a3b8aa3ff230de4579b08bf12020e9ea66a2f282ef549cd7f72d056ded10c2fa21fe339fe56715960a4bacb65525bde1671a0a691f44c0ed582e64d3799c4ee453a4fbb700cc130eef66cc66913d919b6a96bd31efc3d77e4accf3a7c695275188ed2e5a76526e4706bea7df44cf6a36fb9e43d0e37cf5d6e3c5b984062e57ceeb1c5e6a9d0c418a5a83b77c4c99e8799fba27bd884e51d5df3db1562fa0b13cb1051ef5d5269b4215078384fa84cbcdd93cd7e67d166ebfb88eadc77cfab6a09fd1ea8f82f530ecf62d60d176d3bdf4f2eebf57b45b532ba6471fb53312e32c3452ac69c7b0ce227a61e69cac080a0434df311dffabb4af9df6fd81f48814ad8f5363567d421c5466423bf3bdacc05a0032341e2314432f05701cb222c2868894039e6e156ee6872ebc8739a4c45a43db9027d01f9027982014d880843386325d71bf988456fca4e1ec42cda830601c994c5e72917d21e4aa0f724ed1cbe014171f1be66ff80b90203e082cfea48d8bbd73dc4f299c37a26fcfe1286a62d17e6bfd13084a47fbccd302a44770baa03092d7aa3bf8f15281bde3418b5a6f610199a7ca97fc11df8058de81fdc05527047d32e0e4527db10cddaa2e1a190d7dde1987c0501a200df8eea07d61ea0028930e7422451b44295ce91f79de155d6169bd64c0cadae791e59b67544023e5fcde77eb509d6418daa17dba99d0f09c23c7df78d609f4af7c1ad95b01c26edae2080556b8e63ac632d78b87eb57ef23791c2336775ccf12f62dba46b65a5b5c7017068194fd2b7bff11923ac2dba3ba0d7e28c1ed2ef1c5d2069e189c09bc51efb571c63f2891acacd6a327dc810180290f9699541f4b65bdd8935e074f80887d3f6f4c3ecd75a54c95476b26b42f02964c16ae02532433d48fb5b5f779562224d1bc099f51d332c67cecb1e619bcda1aee26011a463952719987f705b12fbbbf34e3989d6b5c5182bddc569fb545de391ef10031bf1b0f673f0ea1a9763f652624852bee8f09dd517250da77dd194f8310086ba52032212ed38e014a9bb3f47d8a16cd463a977a443ee02d5548ebb5c518e5a0125c6645f2ad2d52f99aec5c88cf4aba79167cb8f7012386916fe2b863da27d16a7c3c350442ebf9b54a569ccfcfe4f4e64853fd810e6a5b3b3cba9ac8525a260505d12492b99437309f94b91dd68c7658291052e2c4d414f87c1d7b7bde565791fdf99004316f02ef4d7c001a05044b928ccada6036e32565da0b9ac1b51d4a0eb5d702efb781a832c120665aca027befe34f4cf0deb37ef259882c20be1af0efa2ab726e06eb33736ab2f0b34e5b90186f90183881a09a2f1c8cde2c488c2eb098e1a51326d83159c2580884563918244f40000b9011b643c223acabd55c37efc426850758db45eb7a0ccb908d9e2ab6a122d812921618aaf4e30c377ed8c7c5b829846b473702496e87f2fac0a78fe92a7602239414117ba9d42c354b05e5561f234e4fc76ecf8285abc17060e980e1713a3f0ab031a53c6757c972e363485581436b20fcb4aa524281e6765ae59362fe284cb6c9c26e3980cec0a9b2f61d1446e9a1679fd055fca089b838872a26f866cb09ceaa5a57a061440ba3a342807d83a5a83589a7297afba2c456c628954a3daa451cb42207f9de22fd5dad066647b8e8ed43fccd3f335298291601fd8737a2ed69cb89e0573fc8eef594568c236f8f976870f2da93c65f77aeda9ae17d812e16dae936ca069e489d3d820580c636f12164c73795e287db92ddcc73dd6b341408202bda0b8ad8ad3d5218e0e27145286459b952ffce119c42b7b143d3ae68f08991c6198a07bd60b6dd3efcb39d42fbd3b15f2f65f9561ed6106484285f3a9d235d2962c2cb903a9f903a6883c0753f96351f096886eb111ddc0775d1c8308a6ae80881bc16d674ec80000b9033e6cc26ae2edabe8f726535a61e77b09496c76d81407ade4466993d4785c16ae669c39a5f9ee18875389a6004576a39465d66329e18646036b9ff5657ba1ec659bb2acedda2862458a642949d15f2108c9c9a712216e2d9d13077a134a69c64daa48018d835b542cfa7861a12febf7b79023af48f860377d4d8bf99639ba627ae9844ddd982438e2a508b6cb89c87d4b78f31e42f842f62af9cd59a69f4e899720156f7a2adf1d348e9b665481165af600a3f781aceea0589215f06dc022fd28fc6025ff85e3d4b7c25c358f35ed5f5f025eb2b0ec5511634494515a197f3e06f4e8a2fef699f33f58ab71376581b455cbf592e1e657115448db5237d010399045e023d0d69797131720de65ffba81c41037657951db3bd5fcc555b8bf6944a67f1fc0ae9ddecbdbb955743a86d2ca82b6239a47f0d37759cb3bcca9d95d7ad084bd8269d06f6cee9effb2173096ef22875db79714328f2d80beac6cff4b3f8fbde3ea1a1040b6885d86bc92390ed2efa52181d3fcf6b761c0a14b8417ea3878d311d3690f93258e57848e926364fc0a60dcaa161a1cd9ea4fda657c5e868f59bc6d2ded1e264a100ff752fbc32d30728f13d74f60a1931cf1cd302aec02f4ca94541335c0f0717cda44c966db4c2c1e522794e0cc5a9dd84ed6355f979c4931231225096d3f651aa1970fd8a6de80325a6b7b3362b11eeeb3401df138bf8742bb94fca940ed45f8b4937d1645c98adad12836b19e09b59dd1e4cf020a2d4efeae49aff02a0c92537dfbcd4a560e876d0a3da71a38302efd5986e70a0592c02c4a8e5638869db811e47ce514bbe71acb864580d9f3be29e73f8af1584130a448b85c0a4a790d750a3d67a4f1c3e52b0db1c7ec28b891c66570c894b9955f0914981f28efef48616b004ca747fcdb448d0a1b6d7196e2ca002e17cfe65e7bb08027b95bea17ba0dd5b9a479726b5cd32a0fe24052c2afb163e60733e6ab77f8d1d2f606de15a31a2db1c8b7827434b64f794b808287f612854c7df802822340442cb00b8c508eb8d74a6334da415319557d4a8cb58247a7e65c74ef2238843fd02d24d6a859f02c547fab6e35903f69394659a2b1bb02fb89a613733cce7c4af817f6b8cf2ce38f425fa8b59b3fea76273664b8215d0503198393443c926b578202bda0115d2f3409265aaa2d214d11e19f314193884ce34c3274f4258d5f09a97172fca0418e2cf579d94373b0a81e66636160ad2f1de4597445af60d0ec37e9a97770deb882f880880f511ab07ca9dce1889745de5325aa780e8311fec19424eb7935928d6e5fc275944276ee070e90b9619e8853444835ec58000086428a36f8feba8202bda0d3d221e5abc91d1bf4721d9f51100bdb7e25f4e1b2eb363d200aa1b0c09727bba07688424185824dde9b365f31e258987ffcdbf3c850f9992ed80d0e71e54712ffb902d702f902d382014d88e4400f9aa703b1f98501db23a8d88543ec7b3d868309954b94e59842fa49a842609ce51ec1a4e9f75a00da8e1280b9025a30fadb0cd19a05ca7d20dbd28ffd1ec743d59a1169a730091be383f6c571c51a8514f9ddf9961a588f38bd388786c9e7efc5d0e71ca89e7f24a73201839f40e9378e5305f4174752c6eef07273a2c51009f04350abed1b6dbfff400ac6f790013028b56aa08f5090e4483b7bfd1b08042b8651dfb27520b3167e9b912e37bbefe7f13153571ef8ae23f2034df09ae737e672bd09d896bb01cc035322407ab3ca2a026f1d8d5beab70178c580a650874a57787d92b6f31f7f86ee939bf8fac22b23c6b6666b5e0241fb55dd4d397f1c78fe6da9fc3e66c2e34058e223a4567d259e3e1a3560bae9f5e2e3e7df1b7384b6af9a4155f1eeb61a6bf4b5e149db22109c635cbe9a4266ef48c211fe1236becc472cb7869906e27166f3f017ce75d188fa708e037fe1a5729b43892460458478cdaa91af1f9367cd1164204b240212101e631cbd027c814efd1e46368b37041836964dc6a76701c38810f36cc02ae93eddd5ebe83c24527244a55eceec6d47ec8df4b158fd1166a7d0d7bbee043632852ecd8e5aab24d71717a232eae9facb45b534f75103fc57f5cd8f978a362249a16e6b3783443bc5100bd1d8bbbd45144b7c63393f5d8169c4381f645bbbabc899e022d58e7b4293125d6c4d7ef75436b4542618636fb247b48ff823f52f416348fb767f6146c1f443147baeea5c6ca7fdcfe3795e09112224301f87c5667027b74b54dcc0f3c4e149a1e67aa6f8a940e1f2891980a6e565821a1f06d522eee5803650f6c0b8c8f5452804f9c456550cb8f1d4827c7fd1c8fe77b71aca3aef9be16494a4bf7d40b274d28ed9cd92a2169b6de5fdfa3ed1b6ef8318c080a008c406d42212f12e384b8f8bb7bb40d0c4660b67026646436ca589d143edc5a9a055fb6596377274cd6af52d95a127c503c0af5b7df6df59ec493d2bf15cf02bcbb9046102f9045d82014d8822e3c64dba5192b7843cffd35685424e576804831aa2e894b002add3a6fe3cfc260c378a187213b6bac436f3887ce66c50e2840000b903dd35dffee48e5855b9f4e7d47630f215334f242c738b2aaccc6e4a815ad70d29a94bd5fea67cd0cc855835ab9bf81c789806e311f744dfc370960d5246099d70e509571437c3c61e11c2971782d7ebbe3dd231c3025966d5ae37fea256ab601339db76c325884b7939ac8e772ff54c8196d35cb823cd42287ccad89e0f1a8092caae92612bc897cee16c73c18a39a5b1ba5bc5df73beb108cf5c896a420837ff53f6e601052ec017e75d3554c0ada83b7874ded4edab8b1a25e39c56c4666ae2812fe82f65f5f7d423ab3a173261ff29495a5ed0851171d1c261129b2062fffa4fc682cb41394f5ebe335bc2220abe7e950d9afa85f305eac439eec8eba9227352f592804f5b47208c262b220c1eb39d6ef89a92ec3ef051e9cca642658a8d8e55b35e78583d7a6cfc01bc5b9d579a1514c201d34230684e4385a1774f8b5f38b5191682a8b91b536ccd3821ee409028180d0f5eabf6e1e2e3dcbeeae0d92cd83e52ae68842bf781824cb7dc8c1507361d7d03b03bb15f7f7a0a9bf12171e01408f60b35722a5a819d7d9107fcea1b94184160cd9890f1f510207d47752fc27f58729ca8490b81ea720d5fcae71db92a9b140099047f45526d26af5da8bfe3e41beffe14d5d1cbe31bd1e50b9c38b9b393ef4b1b5514050e4a934d9501fc70d9ee3720a22fe18533b420cda21aea8c483e5bd3cb4786d6ce2d0f97d1a653253efd1c0283772e8ae43013dba4990bb6c7d9c7087c0d9b2fd3b79decd9a775989c81b87ccbb1e2d6b3c4df6dbe1b7e3a147dd8ff6998a0dcbe3f517899f2dbbbc788d5004d2de3d23224268406d02fecb0ba553123528c6b41f6f55aeaf8f32aa767a9f3113ca91d92e2dcf656cdef77f966a6b2cba83340658aa5c26aa0cb8ce54ae3a55b1eaafef66763ff4de971cd6a0b65a680169837dac945b0a7f13864795670922c99dfc6b5a5465e5043ad1b3205e4579cfc0e037f0b4e0a8b22b5d6ddba7d24b31388620d4aba83f84c5a1334261955d52294bd8b56d7175afbae015933ab1e0ef91e8161468f8eaa76a6f7a9bb8c8fc1195b9d8ff5dc4a51ff73a74b0640999bebcecb6036ef676c65e9fa5b1be22872082989c55a789fc4c2252452f786a13c4e868b85fbcd09bab689bb66dfae14c2ea7024647ad97728deed03314b007dbe461c1836e97f928308d39e5afc43ee3ae22ff47fff183553f56711880cc5ef72c5d66b4e2c6f651c57311d48fcc0aec762fae6444a5be11793be04c85ba97450673687734e681a1f3c64699686880d32d4cf87202b49ce13fbc8771fcf30d5593b41ffa61462c64061449b2c0a24ad8a03d280500bc86049bd55a27a05d70b12c7fd700454dbf3869b329a1ffa9994ecc2a6ec9572e3adaa0056c080a013fed42f6ecae05ccdb9bd8dc88ed44579b6a8871118710058f72c29f6db3b8ea03d200c0fb3e4416a51538d2ba41be88cfe830fa74c280e8b4b66cc3fad24ec06" - ); - let raw_batch = hex!( - "1bd930e08e94a89daf73710d130fc039db221fa427e3e9d10b5ff602fca4577fc203ad9313f493c51668a017c2a4ed1260401ae0dd8967eb390d13f2fab12f43bdb0cf432a6630bc76a84c50bedb2a48e562bff35eeabe9cc219de13de55412f6692e1708609ce3440ac1909a693fdf68b581342ecf8d480342c3e3b435349a5d903609718170fa9a4702fc7df772fec119dd097c017e8531040192c66d18eaa4261721c01c8932d0e8890ac2be0630cc398f04f556750355a3a608612f9d782f52746c2c5c83c8e01cc0b5afb9b97080505da0ed526076535d4a34650979f8f1f98ddaf306fa58591a92e25a86a1d62a3ba6d6b53be59da78c1b1a3128059e51e7fdef133a3e0979cfbb47040a51c6e684b6320b624ee51f731fd95ddf7fc672367b4bce94f92714dc4ab37394f3b3e612dab56829e8171d3af31a6cf940504421122cf830dfe1783a42dc48c2296849ef352bf18ee96eb5deff308e094b61e61eae5c02c14320345cbf250a6c15f725d6c2b12e8a10c1331f91d4161667dda26ea1f2a7cbdcd1d73070b70c818d9f543b7b3523e02b58f08f6858b951c735820579cf0ca7e4dff854cb2414a29556658374c977897ff125470427dfcfbf5c8bec622fd5b5d9cfcb898b3ea3846440ecdc29a7f99da330597db06d49dfd085d0b56bcee9b1031aacb1d71d7df7509b2cd76ab53620623cc85f880037e10a14e6b55758925f8ae7eac9489aafb831809662dd12013e9e8ebf67fba771c88da3157aec7ad6a4ee554abe967f1ccb486c47592eba5ae33812285bf3f26dd11d232f63c24a5b6e5fb285aa8950dbecc16f501c87665df4d159b307d36d554d54240306bd6ccdeb6eb37648c5c2d6fae684e2fb5608c2acfffebcc595b277d515158a141f2c8f2a005d5ed82e875c9ed3546149042a2dddfd82107d3067825968eb4cbe455b6b2f6ab2da38c3ad83a3a6d87fec0ff797916e6a5220218436a438d6bb44dfe5cba3f7602cbd7fa0ef7d000b9e02b05b4b867b1eef9b76ecbfc2d6f2df9955e4f8ca9d06f563e3991d86e9f194fad8d7c05e413bf68f02c5592696cf28f51aebd5fc6cd1cd76b3543b37f994c17f83b79c7920c01ff10d4d97e35689d65913b4fa0d5748de37963cdb48cd1416d899a3083df547241e17f5f6df8917ccc0c5639912eb99ed8849a2c8140187ee114fd3253b986c3138906dcc2db911e6bdfeb32fd0c4b8346d3e2b876fbe3d2f95e752b71f94c82be7a77b4ae73bebc06d03e8ea40dea94450887ba163826dfcd21038bf7f560db0190165d83809d398eb32f038186ce9b49ecbf2a9dcfe0be406a71f457514a47dac76990fe20c074893a34a8e7f59d4a945e3aa4e16b6c37a28d9a132cee8fbd5c7052ddca49cfe12a4c14e9492f2e6b480aa70e39e46b481b38c7ec36d24fff714a8464e0aa8c2dc3bacebfb59adc6a17e5377e6fa4e70af286e318b47897ce7e75a65ab445bb64ac6159ab48c1310b641fed5b40c84441a093af75902be5401a3304a3f48740908da9209ee6a66a5442bb3eb344fec8905a7b809c531fc788421da2333a9c3d84a5e0b2c59bc8807796da4f6924da6a3ef92ec94107b8ba4092d1cac44ff621db09c007bc007040006570794ab5289e3a323b98e261151a96b3ea240c0f612015d99996ed87511cfad3d644577ae4ca93a14fb250484781975404938bab804f8cdd4dd288ca384f7430ada7852095dd0b7c04ae9931aab4da57816172e71a85ecab00f5149e9929fbd4dfff8635f54ddd91bb56a86dd60aea8af18dc242026dad7b52f271db63881b39577a15f5b8f357d3ccc8cc6d79665133f571125dd592caa7600dcd7d72b5ba73c0edf74389a8a6e3d4d190b76a559a324d0fe39ea88bc6bc8c3dc30d89145f253b354134b38bdcafa3936aa1eefe10c806c2593502f0dd7cead691dbdf325a7b72da81c7427d2088ad9485332e4fff004237cfe54da30913e7e0f5cebf71691ac1c38731c84d91a233a96424dc976ebed809cc7c01a681f7c26ec078dda8c46066bd2a07ac4df05d18920f47aa113136ce45aa04b9a4732daf0450a88bd175b8086c4efd7992f21b0a0a90e00d3a17a0b46ccfe9dfd9fc901fea75e74d9d127118d0f8832cbee68be4d2c020350d533276cfe5b9d606ffae3e7492ccdb0099475b66c33ba9a1d6f58d8c8de19b8475059e61907a44883ba381ccda9e272b16d797779e4a1b4e3db34def79ba78e8f9ccbf592be4a63f4c9170f2c304ec65a8db539e72e1e5217209b0b38b61027cb82ecd3fc60dafe36cd476cd291f5dc574f818a19ca74d73331e0c3297e25619041b7ba9412255b10df0722463d17eb600aa8c9ffe3f43df2945252cbdf52113dfdb052bb2491299113c3e371b2a035f9b323318f17923f807a394cab6729124845833b794b0454c42c088e119110d767b5456c82fc28a2048925f5dc54765313c632704493126c75f40a499f6408263e61162357d5ff80e37617e80e0aedfcfd0284259d0e2bd644d54ab3166a22630ac06ac802e97f600a73b0e38fcce39189828cf98e1f5c6e8a7dfbf3670ec6498225b00446125276b6cab6004bf4d2e8c1341085b1ac9aa127bd10bb2ed29c7dd74f78baa4061874f24fef9d0adec31b81a46cabe2e860d890edb27b2c7f006a37f29b9b9ed21650ee7fc27f8fb7e16e4cd947bb47d094b26b2def138f04ab29316ed57f12f3a13e988810c045b7e35f1451776031f0524e96d1d4ce2c41a4a35e7e80a127620b2252f27ea3445b0cb1b49c4c33444237a279c20c92086bdc9b0de1e97c1a7a477dc0cf1efdf3040a09a8d1f3993682dfef3458cbad84470b94a52af59c2ba0f08d80b31954937dbb33cd743a099ddedf31402acc348f83e5bb821d185e14975e2a43e40d45e3da4b70fbf397db46395c95eb9176d70b70b1b4d802551c2b035166a82623a61f45e60b4c18570fb034e7061026002f7e15189b7c2ee30b804ca545894707287ca7996945929b08cd4410fcf7bf28c385be9abcdd0cf576dbf6c402c41a7147f14038c97f3fe8631cba55007db867fca4efbe1ff39f537548ed902ae01bd6a0a236a67c88a661dd930c15f017dce1da3ec5159d0fe4cc9cb3488ca09752bcec884d2adc6fb774eddaefffb1477d80ea9e1ddb0b7075ceabbbbb5ecb904866e0bbf0bf8f905b6f7ca5821b92f1109548fc33650f68a9b67ae20b6b165cd39de17f7691b8bfd70568c7239ffc66765d13b72db4ebf890a915d6abe3b557f70550be6bc96e5642b82b91eb10be8d669691df365fc53820e4cb6517f753510dbb9c51a8b5d38ff436fb0c61cdbfdd3f85f318897a64585a16af22cc782fa05fd7794817ec89270890d388c35c3abc1e667e266cdefe79211fd369a7f504a334a3fecebf3027fb2f0ab1af37090f97dfc1d8116ae99b2ecd742e47e48c399a88a1e1aacfb927ba4be5d9f0fb1789f91b1264d7e0f7edfdf48526c583b823968b28f716feeba8a87508249bfd938d756ec8b2e51f8f2624fc6467a7b764eff1384b306bde754b918a0918c122a7e6f6c1698ef129c99126f8d40a9ed97d1da1ca4c4fb859804441cad11ee84557921aba96371cb0b3a90cb2c0cc76c9b43d5cf16de51d6f43ca89c4017fceb239bdb708bf45e91b68fac6b27b66da9172c4d08a63f6759a8d08c513c1b2a702b1b51e1cd866f5fdcee679ed65dffc276cbe93b380acfec273ec53a664f559d29a46ae713fdbf96b1b23a1546aac5d8b6da6cebb128d61832d8a3b1e0587ebd1328867237ad9d43a4a2de95329d26ebdd455779cd19d4361a5d7fa45afd47068302b55d3efafc6b1e57c9e42af6e2507ba785c554eba19449d5f4c42e5acaf20e9ddc8ed37201c363464cc03d40593ef2fa32f81294d00ecf1862c683fda6ec4891f72a5b5b2b29f0d8c2bb415020f8db1ae7976b0cab93845b08d7a0842d6366e59d73b593b8c5fdf199ff6d6564ece94aadb59fed75951abd39f67a06030f2d34d57223b62667a8fa315cd2a27af7ced30d9ec78e71cb8d675d8d61924db42bb3105556a57775e7472e93e648d78fdbfe536e767a71079e1217faa728fcdd26d8be1cc1bfce84083d5272d543378cd430a096deccffed011e5ff741c92bdfdd4d42a8ad0f907d17490eca3fa52b0dad916189cd4b19161f886746a18b366d8bb1047746282d772670bdad1b0566b789dfe8348993a1eff2a3b03f51aaf362711afd6b0150ed8ee20b243fea04fd2e1f1eeb556d66b13f18ce72155f52af95cf6bb1c1a879a4cd9106ecbb5a6891c9823c3cb958a4b7652502e6d1258dda66af2136800ac33d739998995ca73ffcb541c37288b5fd898133d2a1de5c020154dfe1603b80775ff375e6cdbd69cc4557afc794acf9336da712626ed13e50fb60d6d7c0d92b10b01762dc96f8a7fd7facc6e090a7442c52e5e90cd3bd0a1359fcf64fe2a77a9acb296c48607a70232b19947b6d8dccb6adbd195c33aa0f9a3df6affa73afc9d96b17dcbd4e0035e005400e022883b79c11a9d3daef71c06223ad5a240021cb3018849dd4ba3b6772f103b332f1faa8ed2ebaac534ba4b46430d18093adca381454c5f59d7ce8c9f4944a84a5f9d598260b784cb284459798cd0b3529f76dc5dcf8507ebea12e2164aa7aacf8317289b02b3708bb25354b4f35f41134214782f6df124f096fa4786c6e6615be1a2a67ac0d8c74a7c5139b2028f074665a56a4fbe42a2b15709b73cd55e5d242d4fb1259d45c3366ad2494da03538c509456ad6beb9cb0c10ac61a163fd1ef3577af4d495141a9e6f2b8fd008c082e8b4592ecf66d411782d17e00c48c7e63980d5584786992749937503d3cc4c249671ccde9dbe9b4c4f9ed1da22e44f427466633541b675646d794894dd0e53223dfe3f0ceba6b969ce04421c876a51348f9022403f767466afedede7607bf8d06c31c8c7ab38661f618a55e9e2fad91ee8b238a3ca1c64616392b0faf61ea8135a5e4b8cff5a0a0008ae58fa407a60ab3748745bfb167713ff5c96bf9847f67f974328cc933d76259899f32c70f5e0b15087641a9fc09962d167cd6a64d5c251d3f7e751924e243c9fd41a475ac5f3bef284470f4510c6f3250fc4ff6827f3c59bcdbfd166e593e386538b0b3c2f0085b5f6e271371206d6a61a2d8f74246f12968c462cf6c842999e6067a9e8a47c1edb89ca69689ab583b397acabed4b22d100b754bebdf8f270c0ba9ac8d33f68609c55f94572c5684fb0578f795b88b926ae7722223bf3f32e4b68be8878e842ef38be46a23e0904688447e70ed3cb93ed194d8d4bfd24b0bccbb39f92a553551bd7a8d77a6d6180b90c61fb3efbf6e6dfb987bf028dc61e4c22c2fc1d714fa7e1fe671925a1de1752c563dab2ac372093a57611b196db489e152e342e49b0dd2d6d84aaf0baf849db17bd993369caa66b74282277f69d18f4b009dcde6cc3305817035a1b104d056507479d53dfae3386b05f6b4688833381c18bcef8a3e6ed70b47d21085c07486b5232a02b5d64f013a0fc6308d874b3fc4ccf44e016b5456efe45efa0df4ab239aae635e4f9c879cda1b78fe69cfba7b93eb4a36af3d20600fc42c0ccec24639dd53d3a2f67f7f22e8d744ae9917f1cb5819362c38f5b4ed200ba23f4d6dbe5091aaf7ff47ededafcf23421fe16aa42a583d3f8a96eac23faa269f9d001fc00bc003045006cf1a21b65f26a45980910e2222eec2aaa6c248dd1e433ae25f22b186c631ab96577a3c0cd5dcf5bf48162885b91131756ea916258ebdeafe262bf0deef40b0093788e97e864676f127832f5540ea04e0c737edd0324a9b4723a807a70a35705e9e27ff94945c9c47c8c5312e5ce4a0af4b243e210c15223732371cf89b13a957b9a6c44293b0e7ecfc6611b595046bc3e7345bf92428052bd8264db5f2fad4096ba44f9bf62ee1c803e33bb03bfb185b3a966e3c87fcc337331dee6f79ff3afd6d50ad823ee9aed593763b77a88c9ea33d6104fbb98cf0b2d60dd4eb28f4f977b37e29048f01a646df6101aa7d44dd1e29671af77a71d1ef3827d736d1b7f22427e63a957ddcbf65f2d4533461efb760bf8574a8649e87a5bd2db0f50fdd1d89230dbb66dff78740b2bd95dbf78aec6c2e3a89c97c752049126a52a7b37a059246713055139abc5610499a452d2eabe40cf729fb11ed87bff8ec1319f773ce2cb50641b04e6dd745879dd02cc01768061040190c8ab6fd4d1fa6bd1c9e3938c51121514568b61506fbd696f91b12600f0273f3ddabf8d9b573375efde5ead4ffbbb9ac7cb60d524cd7ed46ad5cb84dbcad7795231f0d4e7c05bb30cc31b9e02d4434aece3405f1fa7754a40571982778b5c78af4c6a6d62f0cea4d9bae5f015aa987dedcd31fd22fe7a8370399cdba6d68cae1485de5cc3ab6f04a927da53bd7fefa2ed7f820d4b677a66749f169a0d2d5bef60435edb3d701e139fac5e6ca42951874d563068adc4ae6ca0a633866169afbf8b92f23f37021c301edcc2b57a9126f0df6f9fdde4806bbd2fa3c9d8bea443013a411a3fed267cd4854669e5b710e5d6732a9bd2b8e9d9a522204e491501f2347df956cd008612a4b3b8c5c5326f5ccb1d269e08b1efff02a1074b3e4ece599ff26d2bb2dd6ba42f969b12c68916da13ebe9f9d19bb7590e545a7bf053d8181dafa54117084c1b24111460acf93ac4a85fe695fef00a0a6da53b708c24c601aa0e329b653d4fa11113fca0185d788baab7a647a5ddd6fd6780874fdafe1d1d27dddae0d29c3fb4df510b44bef18a216b908522ae9b6c8d0323222fe732db82d1878279426bc8ecfcbce218a381e96bcdff308be996b67e7889d6894db070fdeec85a919f0f1b8791a50921e6d7d8e943c05057ddac008ffb0c7b20a3905545ca1bbbd94fae6431f5b5618fa953a82db758d7f76e73d231689a5e70930b122fcf4a060df8bfdf47159f7ed9e0b0dcfc27a352785e9d8403dcd092c9db5b749cfd7aacebbfa96934bc24de29a9d022216ab7534c3b15232f5e655ea9173b20ff8f45c5e91ff4b8d346e4f8c2059d514dca5cc11e066d208f0a4873eb59ddf61f2516ca1be3c7cb2d913b6b1fa8329f028a4d545d751710233e2f65f7426536eaa583e574c80d88ca4dd2f98674e0aa874fa6f75a94e5e3128083df9d5344c3aceb890ff0ccb1b716fc3733c61f149436ac794a863ba875da7afd49c5f8a19b9a68fd3f236ff4e5ee684beb3e4a63fe2604b10f18ef8e72f7eff55fe7e0024267be83743fe57fcc508e9fc177c90fc9a73a3346438ed9e3d5d3af443990a19627a45cf5b01b5cb518c07a27dc8ce246156fcfb5b51e9adf207b4eb1a2933a179270cd30b0c3d986254be9af0f8d4069cbe3416a255eb671d86451895bac7a068119f19c53662bff7fefb5883d6a04cf7082c6d990492ba8782025d03f01e753eaf55e7e65289ba3719db0ec3461231a926ecf6ec6aa8e20eb896ead7a39180f113cd8a9897cc768e80b181c394a897aa248fd4d9f569af259ad9e6e69f02e4fdecfba5d7b3b72d97532a364275e30369d01ef8fccf43f7b94f27e3d7e6293da085e1d0b93dc0e84a3ee0b9e49c2fd2892f70306685aad4d2233ca1e4af8252708466c72c3a43b77dd6e2d0cce45e6407ede7e54e58802929790a1b3ef4743229cd3e136996a35fede076f4df911925cd2e3169dbfe7bbd611154e18f2b39d11d0c9def68e16baa8cfaeb6e8b4b1973169d3aa6c784eed172730a05c4b1f265ae1844edeb266dca67d20a98410de84a531cbf53facd4f3cab9d78f56db51418e1be62f2f4fb76ce1bffcb2e6a3a5a197b89d18f6c7adfdd293bfa66f918ba34fe5a3d97e138161a4dcd2af98afe9b5976e3effd2857ed07bf7809ab577135902703d0e5d081d02ab35a7b1cdb0e9c97509d0e7cf46da7fb775cd3504fb1647dc721fc675ef09925f71df66dc30efb66e7b33d1aefdd21740c769cb4214e07d890b1716ef538c4a5965b77e149b3b72727dd44aab32fa1506956a0fcdc8d7d47ac25d7d67371ac9c9d7d56f93e142d14df7877471492140fa36133b69443c31cf9dcea4ac4fc84fd93593872961d17616cc0467be8eb70460c676bd120cd72b0185e430dfc01f088fc3abd5cd0730708f88a9557e248747ac2197919716ad95fe6401195c745586ef38f5f0c2a24bfdcebd6d1e3b136e5e34ee9c5698c1f19e818d41226e43971614615c9e20f3a125408397e12f50ede77f8786607f6b67cf5ebc4243291bce1d7438d0154e929d38db75a9dfcced5c0949af85cb5cc91d95f5d64697dc21f37b31bc40ca9ab309d23d8fc50e9cca1bccbef27d79de533b2ca5f49ee17bfaf5afab8b5f9b7ca93a831384ee05dc6afb31fd2ce082133615dc36f39c9d9cbbb42e8e3f3e763d2d1f089c9b94f7ab183da49f68eb8a1648833136e4da99b873b4ffc2327f3a71d00071da308977da2e9cd2b96b7beb424a4c3127b7aaea40c8973fd9cfc3998d967c7c3ae522ee8fc7984955e54fa4c6a76e133ad7ad302b515303cb66282849cd139160ee7414cd878dd24e7bb858520dc50ae28295a32115147c8dc19c0d3e7e04e80a698bb02fb9a527fa79129daab12c97ae65b37851827246d3a0abf3d047a1e03624f6d3f6184650e4e225a8bb6a1120b40ad658fa729e17b8af540a4f5774bc56e9f932bab885d5272c78ccaba460cad5275b0cc97d098cfc1831b8d1cf3123819263cf597f95888194e54633cf6c23331f80a339f1a61af05017b210de405d5e3a5fdbba53d082765ad9c8bb82ef7dfb0ff417987de06c937b84cad437c75b5ef3fa9f0c5089cc20331d0026e0eac9176dca2506452e969731b61071c3ba1495fa089c034d643ba43740528e013008e04a32c920ce8041c026628a2267c648682026ca17e4bb2f9b95668bf716afbc49c8f3c56012bb8a6effd7393116de8692ce1b5fff224f856ff8589823734a5ee7403a8d900ce2854c5a8d60c6ce304964c3cc5b734672d1a19d0d887e33c244837221e52467b5e9036a4d3dd2bea9c69e67e57bec76a463bbc3fe5872894b9d69d1c7df3cf6dcbae55685c5d36724abc930b9368ca69cdcd38ff603a57cd224254e3ebdc453bd327b222b3da635523c7468f8eab0f50fff3225462567208e00c532778c98309d7c87d10af2e4866ba31f0a1a1803cbae792aec7290edac31ff22622f82b21c62b3f497371213f85aaf1733a11fdaf2fe5e7dc3cfd822e26cd1875171a034e2f30edc4cbe26ea0025445921c502e05707b34feb9069bbce9bf05898feff72f5f1e77255f1a3208d298b39e1437c0f0589de017553199314ecdeb8edfb2f131e13ef2b606b35db4af3c9abbddb2da4ec1dcb2efc64f38242748157459b647320d6150842e8df5c109a778f108c61c9303ecaac0c3b69c23d5a404ff7ba27b9b5549897f5b5287af46aa58a248dbf65f2b303d44190bd5d711a1b9ec0f9cd22facea4683ccc910379a9885a4c48ea91c76fb72cfe75fad1ab3d8eb23ad96e39c31aa7293040f78fb9834f3051225163d549a67bb079c3275a4c0a5442526eb74d82d36bd353af051c317c5fde944d5504c6967950f58187197acfc159eb9308dd1c9a26c8cd5acc4c568633c443475aec9ec74136afd513299e425e722a3b00c376a39c957306fc1352eb7c62226a5a34520da4f020eff85997bf208b018795113cb24daca8119d2845dcb0bc681aab967468522acb7acd7526a17dfd4fc2cac819bf477a58dc63fe4cbdb007a035d2812e8a677b2e7946a1819acf5ca664c6a4ffa6579a4ec60910091154d7ca9f90e864d1e9863ad9fc70b43cbc508f3e4dcdfb2cf5fc9eb64cc0effa7b6156a57f97c4302bca139cab59941aed5abf56bfedcab81803d909045a2cf6b9e0f25955e57f5264f631b382c561d4daa5fbf009882e1ef915a0910e76645e0669ba57e5d48dafc10bfad40534523dffb4bdddc029d6334aea481590718f01c01022883bbe7b3a75c8628f3c02ae3e8a53a5afc736198d9e1a92c51753043a293cb26428e921db44d36168611aaffb96e38e6ec8db2801b01cc4d3f0022d3677e8462972a4417f434937b70e45b88c6e3faef3c5442043d0d4b6bab6a0e82f5eae911fd5a9eeebeaa8037af63039508f036608a8cc909cbf586d391ef3eeb0448be00c4c03b93909fccfba0ff6098ced8fac8f7eba830d851821030ea765b73b9151454ab112a9a4823b6ed73f917abb88990397ecfa4d1c2c607b898c1e476b1c72a633e2881142158b30c12594033896670fbc0d78f61b46b370a84025e5b220c6c442834b4a9df12f4b29c55506ccccd04815759b2834d9fb2f39f4557634464424ba1082c30c2bf715c4bed8d918c3cfd633135bc8bea596154740ef606fffdd2593f20e472492f395d703e1055827ec740df862a70605baadd4d184f6637634da6486793c6f240d0ed081637c556a0545297dff3f8a4bc83498023bfe9599fa8f94f1b6dbcc3e0446b5863fd4eabd6bca97df8fb37ed6f65c0fa9356316944b81724f27755a4b05583d59bd9dad2930a1dcd205c81c9611507298b90b42e08b13ed2fdc0fb7c4d397db7413df47df41fca319d0a2ff8966f0206a3bdfa67ad9dc044e00b301699aa8ed0d14f61648ff08635269e0889418ebe7d04fdd4a1e711915770f8d5c5fed19ce15f2e404c51cc354686efc3fe7bf5fa0f03f3a3883142cda47d0c0f37167fe58d0ac94f14d75e2585d3b9823ebc963da575db5f65733b6d35a6938d3b78a11204e8a54d4795d05c739e46fca5c8239d56e29f36d78a1ebba04f918263570cec4dcff2cef06c2da0db3c65acda270420c976d0949843b7cf6bdf0c68354b30a9f6aa588c111b3a64b6d1f57690e3d46621af3139c26dccf16a09f2688c41189243352bfe8e8871e0c0d1a2cf971a8df844627092d80c16e0267c1aa7bc50f97027737c45c9b334f5b02696ac0e822c970dbdc369c2e7343fdc710f89e99ef05c6b82fc84a20f93c96ee951a47379b8e29110138ed75207b41cbfadfbfe586a0211515ffc5d3008a8b8ed6beaecf693f74f435eabf7265af63ec10707a0b2d8cfb733e382e8ef0beabee9596c775db147ffb5d330b3b741bedc412dfe606168ade1b85d34e15a4b2da153215af27d95f83d65dce00171e9c8da8d92fb810a1aa34ca65a91292c1a4892dcc81a8b1966fe2e8f1cd1ab665b646a69bed401ddfc4d3d6f578be09beeb91d81edd4d0ccaf0edfbc573d70ad478cdf4f5c65c818ed6fb224738cb64f7b80d0f66e8c6fefb6f49c9ab59f0b05c900a1f1a55d51bf49fec5a6a67d162658c4e4f6d2cbade0f96da86afb15bbd8a91e4090ed378a4c31f65e03b53c5a816eb483ec7b6fe36457586228326e551a4ce6bc904c29a499a2cee9e447d318f36fc52e58fbc4cdcc3fddb37101f554b0a4bfb93f047298cd073c583fa0570d0daa821d33a72b8e8afcbea0a12a5cd91517e49f594f0531a07573cb06f08cb895c5c82b6dc8ff951decdfe306b5012c990448bedd4df17502cc002f00231040199c6fe61ff532e7ea01be14300e2bc7ae7c0d236c3f0c09e978f354bada35e719f2ebda965aee8d27148c2efea242ddd7cd41e8c302a2e597d9b3d1b33ec84f07bbcde86ccea2b01591edf17feab8ea9b95744d5a6b186ee2ba42ca92ee95d0164187cabc59c397d202aadd2e2803ec978f8ea376d8ab046d950ef3a4efc2defb35ff0402ec343cf1e3e70ecaad69f75d1c4e03ef951e8b9d3bf785d178ff19ff1432cc14b33808b86c1c39ac9c19c62fad10f41e9ec8ff95f556e4bf127e40627cb7fecde215197b1243fdca58c3ae8542cd874fb542e9f746ca7490edaccdd91bf8f4bff7bf6a7bc40fd28a67364db47f164ba8784e825baaac670ffed2ad9c5d56ae6f9a1cac9c43d28ca3b9fe28bb7465a4767ffa432092ca77985bafdd0a2f5bce2b6472a10a2b0f3cdcb60b14233256547d826b53b010682af15d0e29ce6b5dc0242533fd8f2831fec31d9dcb1f6e67e3eed94ad225c29dc040c00bd0170450062348f259d22c13bd80a59dffa8900e33af85c1012652478e18f0e64815204fd417c4bd0071d79b5e9baf904e20f436e8dfa9dc4af7b2f0f06dd6901fedfb275664190bde61df2c7b7849f0ef697646296fe42a684416bfe2be846ff4449b5cf8ad658f1804d90195c10324cfb071c764ff61355c64e759d1a4e9d631a6d78a760a139737763203600145505bf1a7f04ba4106014fb9104b57a8b44dadfa4a7bc1ded25dc9c252594da3a5f52fd364f29a088e9f451502be292785c15de7a651e3ae2a050e0539c5981c2d3406d5a0331ed451d7988b643bc658d258b4f47506bd02d6fd2e0775bcfa91b368bf51207ebd2d63180cb0f02d5b9f6be1b02aa1a962e41c8f26e2ce9dc15b131b9dd4e547fce08e99b2eb1e56d14e19f697bcec0710c7c60e28b5d9af87d9be14614f7b6c733c2aff9c7fba1f36503ad092daf2607896b06ab01fb6d1a4e4961b9374353ef340b4a65a2feac0792efccb67f2749cd73a60beb76cd304b2cd3e80832835d0cb1debaef54f8a3965a47f0993646ecdeb48cf792ae30a0896e1a1eddaf4f09332c1f352ce4347a8faff316f16850cb0367539f39a022bb34a029b12ef8b6712abb4565570a1d172c2bbd4b242f818b5af1dd46eaf106009a512b53c6b945b6acba91f1d8fbeaf224dbc904172c3e3bdc4fa648e1a240dcf2a1213529d6be1cad52bba9f74f5515ab08d1158cc3d2e6e6c9ca9a089a223335632c79a62c4977c417c5a48d1f63d6a0245856666571d55f03cbed3d07d6be645b595092b8d7acf7cbfd00889a5427fd546d19f44f4e1d6348670d91fa02e4ccd885f5cd87308c190bceba0642d7fbc975ff0ff58cf78a26133488bc538ff6cad84ebbfdb39997a79a0d99eba01310f9020803132216dd8c4fef0e8307cf10e309d5399dc2bee5d2845cdfd30320b212a214f8d3a33d14f42ef143cd33aec5a41d32d589b0ba5b6d8fc512ca40611e5dafc23ee47d111b6008ca94697177a14e3f0e66ab41f2f94c2e37a3e41717c7ebca9318d26a30d136bfe5da7ff73a7fa637f88d0787968986875d7c5d0d4da839ea1990c1cac315a187c3d3843ea9504a4d4f6a6b5da7cfc3b61b3ee9984bbb9789728e94c3663e2bf5331bd7f703d6f40f424e18d8adc839d2b121f7b4b4d40f0e47ac4b808b1e7e45c0204c2fdb2da3be8b59dad1224aab78ad447d52823c386f976d716dc6c6ca3f3e7e41746afe8e9b01946446b6e2eec7ba94db910febfe1e7fa52ffd6390e7f9c5eb173a4ba590f593df45651dde0ad68e535d8a23c46e3f6a7e855c0fc5d2190b57c9ddd7843eb093e5ff98f052b3b81808d803e9a88d9e5fa48847a3c3d18894ce49637bdf211866f2c71116384c40c82236cf84f82f213bcd5f4df22fb0f5087ebb7d344d33bf3087939388b8ab9ce39e4b6766ee84ae7c812e030bc16bbe58aa5fa7837f36626ef47b1b13872194d585381f3b17b488e3a0fdee45f5f113ada681f9913fa2bca3d70a0e7cedbe8c5dca828f116e9d4d2e7fe9cb25fe2fcda8322869afe254eb254b869d4819688782076bffc273eb9ca69a8b357a0be9682b06f959530a848989722c8a9f2c79d8f07ed80a76a3ca557280b830432de6571ff342b6b3a7f644aa7ab96733de40e5989774fe2e0bbf9370e58d4c6abedd5284b6c9a8f140459f3b5289678947c69769fdb20295a80cf26fce7e8c5b245cf139365aa1b8068f50c54fa1359710bde46fd74efd941ff4ef66345f117b0bed8346dc09dc17a6a64093a686736d4c4ba9547503826011b8fbe3a2cfc7ac3b51bd6a575eebd016edc987e49d435d4c2db9750dde190cfe44e96daa99a58c0c6c3cf24debafaed0610c5e6b5ff575c1c5f711ed8e3e3ba9b260b2febabccdc9f54e6d0f81c8b93fa036aa6cd9eb796ffd49c1a9ca5563bf99f01e90dd8cb3e365fe57131e7bfa15e52fe3f60c1cce049690de1ac72f45d1849ebd53420cb136b071e4ef647eb4b9ab528ad0f9f7c776f3fe42c570bc533e9f79cc793bf4149a78ddf0f8644bab7724b3ff55b884c4aba7aa6bd601269109fabf9d582e48691530d09f34de8658d8dd12b09755cd0a53886fa6d919cf81f77f52b5b0b9fbaf5d03d6a4266cd695984935e852b7a70cf2565496b84d3372e539a8b068109d44ad090b33d057ca3643380d1cfcbf5b34925e368b91dcf0f5fd92e84e7daf14bb907e6e4909c4959e885e9ba5e769cc476ccd9bddff07446251f9ac93afbf664449d60c7c9b56d041fbe584245c4ec8c7cefaa11f7984049bdccfac10afc31799d781b91f7080d37d443819291db27ad7cb70241a7da327ce5e22d76184a4e08bea89246c5b723374c084da38764edf91346aed329eda99668a889349467f752567ee00a5542efbbe2e158744e4e49abefb078a15efdfa1897f43085da7e1295e17ea626789af9b83d13c23faae98c6607da3e521fcf7c36aefe7d9b947b8cd6fc5842c8de3ed200fae36555fc510d0af47ac08a5c06720884a4c8ab90139562dbe6359c1926b4d5c93403b5021b615245b7e68e47145c9172e3ac342bd54a17fcdac155cfd933b51d48e5f46bfa8b11bf8165586ed2ef43740e119efb1e31ff35e828469456b8ee8a9171d8f550785312c3441588a9450b2832e08d803c13466e342a435a862a150c6dc29e0104f012bed29717adcd3c4992256bababd43c4e3991f7c5725dd1b2a486d2ccdcd6ad948f6f53da4ebcd66ce794f2abd5d363b40cf21607475c28680caff3be00ce94d4d9a2a1fb430cccf8154ea335feee4b89fa6839ea9125e97f068899de6f916004e229ae7f9b32b009af9398a83ea0912a27b379202750ce4f5209afb9da6331e6172a4c286fe0cba6e881758423c02a4bc99c363cab1ab9719fcbe7e37aef692c5ba828ae67a208bf5d5095c06be00e7b786da7d31f4ec72e8c69708c03a55c54a84e4b9bf706418629a62ea41a6c4ab7c858459ee01e940c9c99301d45a3c16b5c980fd751d65361bf64f20f9fa5cc207e998e46236a65d393b22d15ed8e388eb086104cecbc64b3aa15e025f0fdfafc889a3ab919923e28afc60ea724e405881d8096fbc26ec3d9eacc6e8e5b59b7bd10806e2b5a45af5059153df9718d2e85322809dbed51e92a096b82f27e8ff418400c314a9bed5e449de8c1b9493c4cadb7d7574c3a1a94bfa5c47750bad7c9d7dc47be8d683b892de0d9882d6a414f36bfec308742689333c6d07f88c8494a9fd52d0f5094c6ebb230b8ebc4cc9797e1d64a21a6db37130018abf696f28f30713fb7c3a55be8bd80cee89e9ed5295e804d2e48b10729b759d3cecee1d6d11b987b7d5678b6bdf5cb6113adcb7eec8af5362e45a1af5664bd85cc90d627e62f1f44ec932b74766c1edbadadc5de3fdf59c05bfbac44307ef94bae54846519b4987fb4c5725d593a5d84e635a912b5203b130482d897b8001a12a1fa4323c31bc30f83ce9caa3e5b6802130c69d633fe389c8c6e2d9b110b5869b54a9c9df7327d9f3b8fb46bd0f4c9bf299e5ee4b181ece08d6e978836aea653cbc22ced393d749e956ae2775e877dd87e9848c681e4af9c29f0ebc6152822318c8b32bdd3dd2388a0196fca2c6a176c37c645686ddd5e359db948bf1fe122958c68eca414f5a3c2d5ab4f896ce4db22d09cf540f6ce296726f5ec1e63203f79238fe75a7468ee51ffff67c4d103129a9d9c97e8dd8b8d0b52b6afdefbf1bde912f3cf42b7bae14dbb98d2208293bb0061192c12d525e1e84f0a83df6778c3f48d3c3bc0ceb68a374dc2c80028267c73fbddb09ff085ce5ec58a596f4058a3579ccc5af4e2717e1e6381d7cbc8accb65d85e1f787401086e11628b16e58f9141362dabbc566866d906d813632928ea551b39217239510ed37eb745e378f69fb0796b442ba11e8fe7ac3c0c72dfd737961a61ba36ba6c94e1873e00b8c3108a00ca6dc1b55ee524f6e0f17fa9ad7899050d1fd01134658749cda00ac9d2ffb147aa745e18dc677c36eeb1ae6b903071c3aaaed860ba4c06f706f7deec7eb6977de1f2d78b1df7efbae4acdec1ec35833f55321d4601995a15271f1b32c60662a428fbb3ae799d827136e0ff3496a6bc8251d55430631cfe500511787776894147330030fb47cd62b3cc73104d4b759ace3fd2cd2a936c3e65ff71aa2012bfaf2d7c47bb33d2885a6cf1b75504d4bd007fc59c947270c49fe53976cce349ef177c7d17d209abfb4b1cb7064cbbdb711e19f5194bd0402ef97e6e3210096b51fefc8985babbfb642d0c76373e1a23a8690662f5767d8c67e3794ed98cdfae16981aa5a008fd3fc8b41dec0642602d37576d01c2b87dce2eb5575143429ceaf6ad2fbdd709012a937280d35fb35e20ef67498ff72fcac92d25de3213944d550963c9696891285b439efe77376f2b9c8b5fac954998475745cbc76b3898f9eb09fe33be0b7619e6ef6379c41c0bc04fb0f15c988426fd51853be56025c50452791a6e3341fc5a558223d3e2aba49f5e3ceeedeffded3ed55e615118dba1fe14c4fa120a5f6ffb1dfe0794802a11b041b4d83fd90726e285cb771101e91b9dd180d42f30293e0df4f8952f1c5cda633136f1e30c803653dd90683f5dc722be491434fdd504dff1c917432e6e04065c1044b6b38d1d61b57f4eded135d7de22cce4eee11cd1e20e7f27536a75c291269e3c0a229a428a701de5d562f79c98bd87622beb7904f17119ec6ca8918ce4fca462efd6541cf982dd3a411f920068679b346efb363af976421b78dad8e2104a0e6b0cdb7e79daf967b66e68676044c36ee2e350f6f39f5120509e004ae7cd96542fef78aaafb64ddae778f8117a19459f6e638a969c3e166d8ce1bbab439a834621dc41f1f0c4e9fef18cb6d2bef30852a499277ff3fea4c5f79bfd894354d567c17b38e2e1db4874cc61e28ec951a92567d3eda5a7e299fb84edb235b9785e066f2ae4d483794dff059f9eab82433676d8db696ca98a849d61271c2eeebe6bea3410723ab20c550b62e6d7405523763832d5015bac29e950cb0b96809b41729537b627496f10cdeea00fadfab49d15e4843cd6512e2abbcca9e2abb631306080cf3121efe2ba87fb9972bc28965e59cfd9d34e3b9b275b43e793524daa5774360881a31f029181ea4a1d6788a2c1452898c89789b46ec6a8beab6d9aac3193c75a1b6f25bf5a6dfbc80e650840aec9521c6e739094e0e4398cf377897bc14d865bb0ffec2e7d67cb0c504a38c5cb98d2c39a8303b5b13eb7290c8bdf7d78d59bc1e4a2918eee0a5f4c28c1b567aa8d9f2fc7257f94148266465971e0946e55cf8f78b9c49fe2ff6dbb837d93ff6457d41f1af321c8b513173a91c6624eada68e8b91035e47133f91eed223fd86564acb10f1718adf5bbc81cce6cb2d7acd4f3c1b2f334b7bdda2a289dfe1008f6e702dbcf3fdb46d39d3d71e3f10bd2be6d15bf30f15da1f49e98191ed705e321c2e428e8cdfbe2f6ea9a714c2544c7b19f61e8e54af468318a3653a2b5d4e770" - ); - - let decompressed = - decompress_brotli(&raw_batch, MAX_RLP_BYTES_PER_CHANNEL_FJORD as usize).unwrap(); - assert_eq!(decompressed, raw_batch_decompressed); - } -} diff --git a/kona/crates/protocol/protocol/src/channel.rs b/kona/crates/protocol/protocol/src/channel.rs deleted file mode 100644 index e62689cdd58..00000000000 --- a/kona/crates/protocol/protocol/src/channel.rs +++ /dev/null @@ -1,343 +0,0 @@ -//! Channel Types - -use alloc::vec::Vec; -use alloy_primitives::{Bytes, map::HashMap}; - -use crate::{BlockInfo, Frame}; - -/// [`CHANNEL_ID_LENGTH`] is the length of the channel ID. -pub const CHANNEL_ID_LENGTH: usize = 16; - -/// [`ChannelId`] is an opaque identifier for a channel. -pub type ChannelId = [u8; CHANNEL_ID_LENGTH]; - -/// [`MAX_RLP_BYTES_PER_CHANNEL`] is the maximum amount of bytes that will be read from -/// a channel. This limit is set when decoding the RLP. -pub const MAX_RLP_BYTES_PER_CHANNEL: u64 = 10_000_000; - -/// [`FJORD_MAX_RLP_BYTES_PER_CHANNEL`] is the maximum amount of bytes that will be read from -/// a channel when the Fjord Hardfork is activated. This limit is set when decoding the RLP. -pub const FJORD_MAX_RLP_BYTES_PER_CHANNEL: u64 = 100_000_000; - -/// An error returned when adding a frame to a channel. -#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq, Hash)] -pub enum ChannelError { - /// The frame id does not match the channel id. - #[error("Frame id does not match channel id")] - FrameIdMismatch, - /// The channel is closed. - #[error("Channel is closed")] - ChannelClosed, - /// The frame number is already in the channel. - #[error("Frame number {0} already exists")] - FrameNumberExists(usize), - /// The frame number is beyond the end frame. - #[error("Frame number {0} is beyond end frame")] - FrameBeyondEndFrame(usize), -} - -/// A Channel is a set of batches that are split into at least one, but possibly multiple frames. -/// -/// Frames are allowed to be ingested out of order. -/// Each frame is ingested one by one. Once a frame with `closed` is added to the channel, the -/// channel may mark itself as ready for reading once all intervening frames have been added -#[derive(Debug, Clone, Default)] -pub struct Channel { - /// The unique identifier for this channel - pub id: ChannelId, - /// The block that the channel is currently open at - pub open_block: BlockInfo, - /// Estimated memory size, used to drop the channel if we have too much data - pub estimated_size: usize, - /// True if the last frame has been buffered - pub closed: bool, - /// The highest frame number that has been ingested - pub highest_frame_number: u16, - /// The frame number of the frame where `is_last` is true - /// No other frame number may be higher than this - pub last_frame_number: u16, - /// Store a map of frame number to frame for constant time ordering - pub inputs: HashMap<u16, Frame>, - /// The highest L1 inclusion block that a frame was included in - pub highest_l1_inclusion_block: BlockInfo, -} - -impl Channel { - /// Create a new [`Channel`] with the given [`ChannelId`] and [`BlockInfo`]. - pub fn new(id: ChannelId, open_block: BlockInfo) -> Self { - Self { id, open_block, inputs: HashMap::default(), ..Default::default() } - } - - /// Returns the current [`ChannelId`] for the channel. - pub const fn id(&self) -> ChannelId { - self.id - } - - /// Returns the number of frames ingested. - pub fn len(&self) -> usize { - self.inputs.len() - } - - /// Returns if the channel is empty. - pub fn is_empty(&self) -> bool { - self.inputs.is_empty() - } - - /// Add a frame to the channel. - /// - /// ## Takes - /// - `frame`: The frame to add to the channel - /// - `l1_inclusion_block`: The block that the frame was included in - /// - /// ## Returns - /// - `Ok(()):` If the frame was successfully buffered - /// - `Err(_):` If the frame was invalid - pub fn add_frame( - &mut self, - frame: Frame, - l1_inclusion_block: BlockInfo, - ) -> Result<(), ChannelError> { - // Ensure that the frame ID is equal to the channel ID. - if frame.id != self.id { - return Err(ChannelError::FrameIdMismatch); - } - if frame.is_last && self.closed { - return Err(ChannelError::ChannelClosed); - } - if self.inputs.contains_key(&frame.number) { - return Err(ChannelError::FrameNumberExists(frame.number as usize)); - } - if self.closed && frame.number >= self.last_frame_number { - return Err(ChannelError::FrameBeyondEndFrame(frame.number as usize)); - } - - // Guaranteed to succeed at this point. Update the channel state. - if frame.is_last { - self.last_frame_number = frame.number; - self.closed = true; - - // Prune frames with a higher number than the last frame number when we receive a - // closing frame. - if self.last_frame_number < self.highest_frame_number { - self.inputs.retain(|id, frame| { - self.estimated_size -= frame.size(); - *id < self.last_frame_number - }); - self.highest_frame_number = self.last_frame_number; - } - } - - // Update the highest frame number. - if frame.number > self.highest_frame_number { - self.highest_frame_number = frame.number; - } - - if self.highest_l1_inclusion_block.number < l1_inclusion_block.number { - self.highest_l1_inclusion_block = l1_inclusion_block; - } - - self.estimated_size += frame.size(); - self.inputs.insert(frame.number, frame); - Ok(()) - } - - /// Returns the block number of the L1 block that contained the first [`Frame`] in this channel. - pub const fn open_block_number(&self) -> u64 { - self.open_block.number - } - - /// Returns the estimated size of the channel including [`Frame`] overhead. - pub const fn size(&self) -> usize { - self.estimated_size - } - - /// Returns `true` if the channel is ready to be read. - pub fn is_ready(&self) -> bool { - // Must have buffered the last frame before the channel is ready. - if !self.closed { - return false; - } - - // Must have the possibility of contiguous frames. - if self.inputs.len() != (self.last_frame_number + 1) as usize { - return false; - } - - // Check for contiguous frames. - for i in 0..=self.last_frame_number { - if !self.inputs.contains_key(&i) { - return false; - } - } - - true - } - - /// Returns all of the channel's [`Frame`]s concatenated together. - /// - /// ## Returns - /// - /// - `Some(Bytes)`: The concatenated frame data - /// - `None`: If the channel is missing frames - pub fn frame_data(&self) -> Option<Bytes> { - if self.is_empty() { - return None; - } - let mut data = Vec::with_capacity(self.size()); - (0..=self.last_frame_number).try_for_each(|i| { - let frame = self.inputs.get(&i)?; - data.extend_from_slice(&frame.data); - Some(()) - })?; - Some(data.into()) - } -} - -#[cfg(test)] -mod test { - use super::*; - use alloc::{ - string::{String, ToString}, - vec, - }; - - struct FrameValidityTestCase { - #[allow(dead_code)] - name: String, - frames: Vec<Frame>, - should_error: Vec<bool>, - sizes: Vec<u64>, - frame_data: Option<Bytes>, - } - - fn run_frame_validity_test(test_case: FrameValidityTestCase) { - // #[cfg(feature = "std")] - // println!("Running test: {}", test_case.name); - - let id = [0xFF; 16]; - let block = BlockInfo::default(); - let mut channel = Channel::new(id, block); - - if test_case.frames.len() != test_case.should_error.len() || - test_case.frames.len() != test_case.sizes.len() - { - panic!("Test case length mismatch"); - } - - for (i, frame) in test_case.frames.iter().enumerate() { - let result = channel.add_frame(frame.clone(), block); - if test_case.should_error[i] { - assert!(result.is_err()); - } else { - assert!(result.is_ok()); - } - assert_eq!(channel.size(), test_case.sizes[i] as usize); - } - - if test_case.frame_data.is_some() { - assert_eq!(channel.frame_data().unwrap(), test_case.frame_data.unwrap()); - } - } - - #[test] - fn test_channel_accessors() { - let id = [0xFF; 16]; - let block = BlockInfo { number: 42, timestamp: 0, ..Default::default() }; - let channel = Channel::new(id, block); - - assert_eq!(channel.id(), id); - assert_eq!(channel.open_block_number(), block.number); - assert_eq!(channel.size(), 0); - assert_eq!(channel.len(), 0); - assert!(channel.is_empty()); - assert!(!channel.is_ready()); - } - - #[test] - fn test_frame_validity() { - let id = [0xFF; 16]; - let test_cases = [ - FrameValidityTestCase { - name: "wrong channel".to_string(), - frames: vec![Frame { id: [0xEE; 16], ..Default::default() }], - should_error: vec![true], - sizes: vec![0], - frame_data: None, - }, - FrameValidityTestCase { - name: "double close".to_string(), - frames: vec![ - Frame { id, is_last: true, number: 2, data: b"four".to_vec() }, - Frame { id, is_last: true, number: 1, ..Default::default() }, - ], - should_error: vec![false, true], - sizes: vec![204, 204], - frame_data: None, - }, - FrameValidityTestCase { - name: "duplicate frame".to_string(), - frames: vec![ - Frame { id, number: 2, data: b"four".to_vec(), ..Default::default() }, - Frame { id, number: 2, data: b"seven".to_vec(), ..Default::default() }, - ], - should_error: vec![false, true], - sizes: vec![204, 204], - frame_data: None, - }, - FrameValidityTestCase { - name: "duplicate closing frames".to_string(), - frames: vec![ - Frame { id, number: 2, is_last: true, data: b"four".to_vec() }, - Frame { id, number: 2, is_last: true, data: b"seven".to_vec() }, - ], - should_error: vec![false, true], - sizes: vec![204, 204], - frame_data: None, - }, - FrameValidityTestCase { - name: "frame past closing".to_string(), - frames: vec![ - Frame { id, number: 2, is_last: true, data: b"four".to_vec() }, - Frame { id, number: 10, data: b"seven".to_vec(), ..Default::default() }, - ], - should_error: vec![false, true], - sizes: vec![204, 204], - frame_data: None, - }, - FrameValidityTestCase { - name: "prune after close frame".to_string(), - frames: vec![ - Frame { id, number: 0, is_last: false, data: b"seven".to_vec() }, - Frame { id, number: 1, is_last: true, data: b"four".to_vec() }, - ], - should_error: vec![false, false], - sizes: vec![205, 409], - frame_data: Some(b"sevenfour".to_vec().into()), - }, - FrameValidityTestCase { - name: "multiple valid frames, no data".to_string(), - frames: vec![ - Frame { id, number: 1, data: b"seven__".to_vec(), ..Default::default() }, - Frame { id, number: 2, data: b"four".to_vec(), ..Default::default() }, - ], - should_error: vec![false, false], - sizes: vec![207, 411], - // Notice: this is none because there is no frame at index 0, - // which causes the frame_data to short-circuit to None. - frame_data: None, - }, - FrameValidityTestCase { - name: "multiple valid frames".to_string(), - frames: vec![ - Frame { id, number: 0, data: b"seven__".to_vec(), ..Default::default() }, - Frame { id, number: 1, data: b"four".to_vec(), ..Default::default() }, - ], - should_error: vec![false, false], - sizes: vec![207, 411], - frame_data: Some(b"seven__".to_vec().into()), - }, - ]; - - test_cases.into_iter().for_each(run_frame_validity_test); - } -} diff --git a/kona/crates/protocol/protocol/src/info/ecotone.rs b/kona/crates/protocol/protocol/src/info/ecotone.rs deleted file mode 100644 index cc6fa79079c..00000000000 --- a/kona/crates/protocol/protocol/src/info/ecotone.rs +++ /dev/null @@ -1,256 +0,0 @@ -//! Contains ecotone-specific L1 block info types. - -use crate::{ - DecodeError, - info::{ - L1BlockInfoEcotoneBaseFields, - bedrock_base::{ - L1BlockInfoBedrockBaseFields, ambassador_impl_L1BlockInfoBedrockBaseFields, - }, - ecotone_base::{L1BlockInfoEcotoneBase, ambassador_impl_L1BlockInfoEcotoneBaseFields}, - }, -}; -use alloc::vec::Vec; -use alloy_primitives::{Address, B256, Bytes, U256}; -use ambassador::Delegate; - -/// Represents the fields within an Ecotone L1 block info transaction. -/// -/// Ecotone Binary Format -/// +---------+--------------------------+ -/// | Bytes | Field | -/// +---------+--------------------------+ -/// | 4 | Function signature | -/// | 4 | BaseFeeScalar | -/// | 4 | BlobBaseFeeScalar | -/// | 8 | SequenceNumber | -/// | 8 | Timestamp | -/// | 8 | L1BlockNumber | -/// | 32 | BaseFee | -/// | 32 | BlobBaseFee | -/// | 32 | BlockHash | -/// | 32 | BatcherHash | -/// +---------+--------------------------+ -#[derive(Debug, Clone, Hash, Eq, PartialEq, Default, Copy, Delegate)] -#[allow(clippy::duplicated_attributes)] -#[delegate(L1BlockInfoBedrockBaseFields, target = "base")] -#[delegate(L1BlockInfoEcotoneBaseFields, target = "base")] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub struct L1BlockInfoEcotone { - #[cfg_attr(feature = "serde", serde(flatten))] - base: L1BlockInfoEcotoneBase, - /// Indicates that the scalars are empty. - /// This is an edge case where the first block in ecotone has no scalars, - /// so the bedrock tx l1 cost function needs to be used. - pub empty_scalars: bool, - /// The l1 fee overhead used along with the `empty_scalars` field for the - /// bedrock tx l1 cost function. - /// - /// This field is deprecated in the Ecotone Hardfork. - pub l1_fee_overhead: U256, -} - -/// Accessors to fields deprecated in later Isthmus. -pub trait L1BlockInfoEcotoneOnlyFields { - /// Indicates that the scalars are empty. - /// This is an edge case where the first block in ecotone has no scalars, - /// so the bedrock tx l1 cost function needs to be used. - fn empty_scalars(&self) -> bool; - - /// The l1 fee overhead used along with the `empty_scalars` field for the - /// bedrock tx l1 cost function. - /// - /// This field is deprecated in the Ecotone Hardfork. - fn l1_fee_overhead(&self) -> U256; -} - -impl L1BlockInfoEcotoneOnlyFields for L1BlockInfoEcotone { - fn empty_scalars(&self) -> bool { - self.empty_scalars - } - - fn l1_fee_overhead(&self) -> U256 { - self.l1_fee_overhead - } -} - -/// Accessors for all Ecotone fields. -pub trait L1BlockInfoEcotoneFields: - L1BlockInfoBedrockBaseFields + L1BlockInfoEcotoneOnlyFields -{ -} - -impl L1BlockInfoEcotoneFields for L1BlockInfoEcotone {} - -impl L1BlockInfoEcotone { - /// The type byte identifier for the L1 scalar format in Ecotone. - pub const L1_SCALAR: u8 = 1; - - /// The length of an L1 info transaction in Ecotone. - pub const L1_INFO_TX_LEN: usize = 4 + 32 * 5; - - /// The 4 byte selector of "setL1BlockValuesEcotone()" - pub const L1_INFO_TX_SELECTOR: [u8; 4] = [0x44, 0x0a, 0x5e, 0x20]; - - /// Encodes the [`L1BlockInfoEcotone`] object into Ethereum transaction calldata. - pub fn encode_calldata(&self) -> Bytes { - let mut buf = Vec::with_capacity(Self::L1_INFO_TX_LEN); - self.encode_ecotone_header(&mut buf); - self.base.encode_calldata_body(&mut buf); - // Notice: do not include the `empty_scalars` field in the calldata. - // Notice: do not include the `l1_fee_overhead` field in the calldata. - buf.into() - } - - /// Encodes the header part of the [`L1BlockInfoEcotone`] object. - pub fn encode_ecotone_header(&self, buf: &mut Vec<u8>) { - buf.extend_from_slice(Self::L1_INFO_TX_SELECTOR.as_ref()) - } - - /// Decodes the [`L1BlockInfoEcotone`] object from ethereum transaction calldata. - pub fn decode_calldata(r: &[u8]) -> Result<Self, DecodeError> { - if r.len() != Self::L1_INFO_TX_LEN { - return Err(DecodeError::InvalidEcotoneLength(Self::L1_INFO_TX_LEN, r.len())); - } - // SAFETY: For all below slice operations, the full - // length is validated above to be `164`. - let base = L1BlockInfoEcotoneBase::decode_calldata_body(r); - - Ok(Self::new( - base.number(), - base.time(), - base.base_fee(), - base.block_hash(), - base.sequence_number(), - base.batcher_address(), - base.blob_base_fee, - base.blob_base_fee_scalar, - base.base_fee_scalar, - // Notice: the `empty_scalars` field is not included in the calldata. - // This is used by the evm to indicate that the bedrock tx l1 cost function - // needs to be used. - false, - // Notice: the `l1_fee_overhead` field is not included in the calldata. - U256::ZERO, - )) - } - - /// Construct from all values. - #[allow(clippy::too_many_arguments)] - pub(crate) const fn new( - number: u64, - time: u64, - base_fee: u64, - block_hash: B256, - sequence_number: u64, - batcher_address: Address, - blob_base_fee: u128, - blob_base_fee_scalar: u32, - base_fee_scalar: u32, - empty_scalars: bool, - l1_fee_overhead: U256, - ) -> Self { - Self { - base: L1BlockInfoEcotoneBase::new( - number, - time, - base_fee, - block_hash, - sequence_number, - batcher_address, - blob_base_fee, - blob_base_fee_scalar, - base_fee_scalar, - ), - empty_scalars, - l1_fee_overhead, - } - } - /// Construct from default values and `base_fee`. - pub fn new_from_base_fee(base_fee: u64) -> Self { - Self { base: L1BlockInfoEcotoneBase::new_from_base_fee(base_fee), ..Default::default() } - } - /// Construct from default values and `block_hash`. - pub fn new_from_block_hash(block_hash: B256) -> Self { - let base = L1BlockInfoEcotoneBase::new_from_block_hash(block_hash); - Self { base, ..Default::default() } - } - /// Construct from default values and `sequence_number`. - pub fn new_from_sequence_number(sequence_number: u64) -> Self { - Self { - base: L1BlockInfoEcotoneBase::new_from_sequence_number(sequence_number), - ..Default::default() - } - } - /// Construct from default values and `batcher_address`. - pub fn new_from_batcher_address(batcher_address: Address) -> Self { - Self { - base: L1BlockInfoEcotoneBase::new_from_batcher_address(batcher_address), - ..Default::default() - } - } - /// Construct from default values and `blob_base_fee`. - pub fn new_from_blob_base_fee(blob_base_fee: u128) -> Self { - let base = L1BlockInfoEcotoneBase::new_from_blob_base_fee(blob_base_fee); - Self { base, ..Default::default() } - } - /// Construct from default values and `blob_base_fee_scalar`. - pub fn new_from_blob_base_fee_scalar(base_fee_scalar: u32) -> Self { - let base = L1BlockInfoEcotoneBase::new_from_blob_base_fee_scalar(base_fee_scalar); - Self { base, ..Default::default() } - } - /// Construct from default values and `base_fee_scalar`. - pub fn new_from_base_fee_scalar(base_fee: u32) -> Self { - let base = L1BlockInfoEcotoneBase::new_from_base_fee_scalar(base_fee); - Self { base, ..Default::default() } - } - /// Construct from default values and `l1_fee_overhead`. - pub fn new_from_l1_fee_overhead(l1_fee_overhead: U256) -> Self { - Self { l1_fee_overhead, ..Default::default() } - } - /// Construct from default values and `empty_scalars`. - pub fn new_from_empty_scalars(empty_scalars: bool) -> Self { - Self { empty_scalars, ..Default::default() } - } - /// Construct from default values, `number` and `block_hash`. - pub fn new_from_number_and_block_hash(number: u64, block_hash: B256) -> Self { - let base = L1BlockInfoEcotoneBase::new_from_number_and_block_hash(number, block_hash); - Self { base, ..Default::default() } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::vec; - - #[test] - fn test_decode_calldata_ecotone_invalid_length() { - let r = vec![0u8; 1]; - assert_eq!( - L1BlockInfoEcotone::decode_calldata(&r), - Err(DecodeError::InvalidEcotoneLength(L1BlockInfoEcotone::L1_INFO_TX_LEN, r.len(),)) - ); - } - - #[test] - fn test_l1_block_info_ecotone_roundtrip_calldata_encoding() { - let info = L1BlockInfoEcotone::new( - 1, - 2, - 3, - B256::from([4u8; 32]), - 5, - Address::from([6u8; 20]), - 7, - 8, - 9, - false, - U256::ZERO, - ); - - let calldata = info.encode_calldata(); - let decoded_info = L1BlockInfoEcotone::decode_calldata(&calldata).unwrap(); - assert_eq!(info, decoded_info); - } -} diff --git a/kona/crates/protocol/protocol/src/info/isthmus.rs b/kona/crates/protocol/protocol/src/info/isthmus.rs deleted file mode 100644 index c51f6a8e67f..00000000000 --- a/kona/crates/protocol/protocol/src/info/isthmus.rs +++ /dev/null @@ -1,265 +0,0 @@ -//! Isthmus L1 Block Info transaction types. - -use crate::info::{ - bedrock_base::ambassador_impl_L1BlockInfoBedrockBaseFields, - ecotone_base::ambassador_impl_L1BlockInfoEcotoneBaseFields, -}; -use alloc::vec::Vec; -use alloy_primitives::{Address, B256, Bytes}; -use ambassador::{Delegate, delegatable_trait}; - -use crate::{ - DecodeError, - info::{ - bedrock_base::L1BlockInfoBedrockBaseFields, - ecotone_base::{L1BlockInfoEcotoneBase, L1BlockInfoEcotoneBaseFields}, - }, -}; - -/// Represents the fields within an Isthmus L1 block info transaction. -/// -/// Isthmus Binary Format -/// +---------+--------------------------+ -/// | Bytes | Field | -/// +---------+--------------------------+ -/// | 4 | Function signature | -/// | 4 | BaseFeeScalar | -/// | 4 | BlobBaseFeeScalar | -/// | 8 | SequenceNumber | -/// | 8 | Timestamp | -/// | 8 | L1BlockNumber | -/// | 32 | BaseFee | -/// | 32 | BlobBaseFee | -/// | 32 | BlockHash | -/// | 32 | BatcherHash | -/// | 4 | OperatorFeeScalar | -/// | 8 | OperatorFeeConstant | -/// +---------+--------------------------+ -#[derive(Debug, Clone, Hash, Eq, PartialEq, Default, Copy, Delegate)] -#[allow(clippy::duplicated_attributes)] -#[delegate(L1BlockInfoBedrockBaseFields, target = "base")] -#[delegate(L1BlockInfoEcotoneBaseFields, target = "base")] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub struct L1BlockInfoIsthmus { - #[cfg_attr(feature = "serde", serde(flatten))] - base: L1BlockInfoEcotoneBase, - /// The operator fee scalar - pub operator_fee_scalar: u32, - /// The operator fee constant - pub operator_fee_constant: u64, -} - -/// Accessors for fields in Isthmus and later. -#[delegatable_trait] -pub trait L1BlockInfoIsthmusBaseFields: L1BlockInfoEcotoneBaseFields { - /// The operator fee scalar - fn operator_fee_scalar(&self) -> u32; - /// The operator fee constant - fn operator_fee_constant(&self) -> u64; -} - -impl L1BlockInfoIsthmusBaseFields for L1BlockInfoIsthmus { - /// The operator fee scalar - fn operator_fee_scalar(&self) -> u32 { - self.operator_fee_scalar - } - /// The operator fee constant - fn operator_fee_constant(&self) -> u64 { - self.operator_fee_constant - } -} - -/// Accessors for all Isthmus fields. -pub trait L1BlockInfoIsthmusFields: - L1BlockInfoEcotoneBaseFields + L1BlockInfoIsthmusBaseFields -{ -} - -impl L1BlockInfoIsthmusFields for L1BlockInfoIsthmus {} - -impl L1BlockInfoIsthmus { - /// The type byte identifier for the L1 scalar format in Isthmus. - pub const L1_SCALAR: u8 = 2; - - /// The length of an L1 info transaction in Isthmus. - pub const L1_INFO_TX_LEN: usize = 4 + 32 * 5 + 4 + 8; - - /// The 4 byte selector of "setL1BlockValuesIsthmus()" - pub const L1_INFO_TX_SELECTOR: [u8; 4] = [0x09, 0x89, 0x99, 0xbe]; - - /// Encodes the [`L1BlockInfoIsthmus`] object into Ethereum transaction calldata. - pub fn encode_calldata(&self) -> Bytes { - let mut buf = Vec::with_capacity(Self::L1_INFO_TX_LEN); - self.encode_calldata_header(&mut buf); - self.encode_calldata_body(&mut buf); - buf.into() - } - - /// Encodes the header of the [`L1BlockInfoIsthmus`] object. - pub fn encode_calldata_header(&self, buf: &mut Vec<u8>) { - buf.extend_from_slice(Self::L1_INFO_TX_SELECTOR.as_ref()); - } - - /// Encodes the base of the [`L1BlockInfoIsthmus`] object. - pub fn encode_calldata_body(&self, buf: &mut Vec<u8>) { - self.base.encode_calldata_body(buf); - - // Encode Isthmus-specific fields - buf.extend_from_slice(self.operator_fee_scalar.to_be_bytes().as_ref()); - buf.extend_from_slice(self.operator_fee_constant.to_be_bytes().as_ref()); - } - - /// Decodes the [`L1BlockInfoIsthmus`] object from ethereum transaction calldata. - pub fn decode_calldata(r: &[u8]) -> Result<Self, DecodeError> { - if r.len() != Self::L1_INFO_TX_LEN { - return Err(DecodeError::InvalidIsthmusLength(Self::L1_INFO_TX_LEN, r.len())); - } - // SAFETY: For all below slice operations, the full - // length is validated above to be `176`. - Self::decode_calldata_body(r) - } - - /// Decodes the body of the [`L1BlockInfoIsthmus`] object. - pub fn decode_calldata_body(r: &[u8]) -> Result<Self, DecodeError> { - let base = L1BlockInfoEcotoneBase::decode_calldata_body(r); - - // Decode Isthmus-specific fields - // SAFETY: 4 bytes are copied directly into the array - let mut operator_fee_scalar = [0u8; 4]; - operator_fee_scalar.copy_from_slice(&r[164..168]); - let operator_fee_scalar = u32::from_be_bytes(operator_fee_scalar); - - // SAFETY: 8 bytes are copied directly into the array - let mut operator_fee_constant = [0u8; 8]; - operator_fee_constant.copy_from_slice(&r[168..176]); - let operator_fee_constant = u64::from_be_bytes(operator_fee_constant); - - Ok(Self::new( - base.number(), - base.time(), - base.base_fee(), - base.block_hash(), - base.sequence_number(), - base.batcher_address(), - base.blob_base_fee(), - base.blob_base_fee_scalar(), - base.base_fee_scalar(), - operator_fee_scalar, - operator_fee_constant, - )) - } - /// Construct from all values. - #[allow(clippy::too_many_arguments)] - pub const fn new( - number: u64, - time: u64, - base_fee: u64, - block_hash: alloy_primitives::FixedBytes<32>, - sequence_number: u64, - batcher_address: Address, - blob_base_fee: u128, - blob_base_fee_scalar: u32, - base_fee_scalar: u32, - operator_fee_scalar: u32, - operator_fee_constant: u64, - ) -> Self { - Self { - base: L1BlockInfoEcotoneBase::new( - number, - time, - base_fee, - block_hash, - sequence_number, - batcher_address, - blob_base_fee, - blob_base_fee_scalar, - base_fee_scalar, - ), - operator_fee_scalar, - operator_fee_constant, - } - } - /// Construct from default values and `base_fee`. - pub fn new_from_base_fee(base_fee: u64) -> Self { - Self { base: L1BlockInfoEcotoneBase::new_from_base_fee(base_fee), ..Default::default() } - } - /// Construct from default values and `sequence_number`. - pub fn new_from_sequence_number(sequence_number: u64) -> Self { - Self { - base: L1BlockInfoEcotoneBase::new_from_sequence_number(sequence_number), - ..Default::default() - } - } - /// Construct from default values and `batcher_address`. - pub fn new_from_batcher_address(batcher_address: Address) -> Self { - Self { - base: L1BlockInfoEcotoneBase::new_from_batcher_address(batcher_address), - ..Default::default() - } - } - /// Construct from default values and `base_fee_scalar`. - pub fn new_from_base_fee_scalar(base_fee: u32) -> Self { - let base = L1BlockInfoEcotoneBase::new_from_base_fee_scalar(base_fee); - Self { base, ..Default::default() } - } - /// Construct from default values and `blob_base_fee`. - pub fn new_from_blob_base_fee(blob_base_fee: u128) -> Self { - let base = L1BlockInfoEcotoneBase::new_from_blob_base_fee(blob_base_fee); - Self { base, ..Default::default() } - } - /// Construct from default values and `blob_base_fee_scalar`. - pub fn new_from_blob_base_fee_scalar(base_fee_scalar: u32) -> Self { - let base = L1BlockInfoEcotoneBase::new_from_blob_base_fee_scalar(base_fee_scalar); - Self { base, ..Default::default() } - } - /// Construct from default values and `operator_fee_scalar`. - pub fn new_from_operator_fee_scalar(operator_fee_scalar: u32) -> Self { - Self { operator_fee_scalar, ..Default::default() } - } - /// Construct from default values and `operator_fee_constant`. - pub fn new_from_operator_fee_constant(operator_fee_constant: u64) -> Self { - Self { operator_fee_constant, ..Default::default() } - } - /// Construct from default values, `number` and `block_hash`. - pub fn new_from_number_and_block_hash(number: u64, block_hash: B256) -> Self { - let base = L1BlockInfoEcotoneBase::new_from_number_and_block_hash(number, block_hash); - Self { base, ..Default::default() } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::vec; - - #[test] - fn test_decode_calldata_isthmus_invalid_length() { - let r = vec![0u8; 1]; - assert_eq!( - L1BlockInfoIsthmus::decode_calldata(&r), - Err(DecodeError::InvalidIsthmusLength(L1BlockInfoIsthmus::L1_INFO_TX_LEN, r.len())) - ); - } - - #[test] - fn test_l1_block_info_isthmus_roundtrip_calldata_encoding() { - let info = L1BlockInfoIsthmus::new( - 1, - 2, - 3, - B256::from([4; 32]), - 5, - Address::from_slice(&[6; 20]), - 7, - 8, - 9, - 10, - 11, - ); - - let calldata = info.encode_calldata(); - let decoded_info = L1BlockInfoIsthmus::decode_calldata(&calldata).unwrap(); - - assert_eq!(info, decoded_info); - } -} diff --git a/kona/crates/protocol/protocol/src/info/jovian.rs b/kona/crates/protocol/protocol/src/info/jovian.rs deleted file mode 100644 index 8ea78e875b4..00000000000 --- a/kona/crates/protocol/protocol/src/info/jovian.rs +++ /dev/null @@ -1,225 +0,0 @@ -//! Jovian L1 Block Info transaction types. - -use crate::{ - DecodeError, L1BlockInfoIsthmus, - info::{ - L1BlockInfoBedrockBaseFields, L1BlockInfoEcotoneBaseFields, - bedrock_base::ambassador_impl_L1BlockInfoBedrockBaseFields, - ecotone_base::ambassador_impl_L1BlockInfoEcotoneBaseFields, - isthmus::{L1BlockInfoIsthmusBaseFields, ambassador_impl_L1BlockInfoIsthmusBaseFields}, - }, -}; -use alloc::vec::Vec; -use alloy_primitives::{Address, B256, Bytes}; -use ambassador::{self, Delegate}; - -/// Represents the fields within an Jovian L1 block info transaction. -/// -/// Jovian Binary Format -/// +---------+--------------------------+ -/// | Bytes | Field | -/// +---------+--------------------------+ -/// | 4 | Function signature | -/// | 4 | BaseFeeScalar | -/// | 4 | BlobBaseFeeScalar | -/// | 8 | SequenceNumber | -/// | 8 | Timestamp | -/// | 8 | L1BlockNumber | -/// | 32 | BaseFee | -/// | 32 | BlobBaseFee | -/// | 32 | BlockHash | -/// | 32 | BatcherHash | -/// | 4 | OperatorFeeScalar | -/// | 8 | OperatorFeeConstant | -/// | 2 | DAFootprintGasScalar | -/// +---------+--------------------------+ -#[derive(Debug, Clone, Hash, Eq, PartialEq, Default, Copy, Delegate)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[allow(clippy::duplicated_attributes)] -#[delegate(L1BlockInfoBedrockBaseFields, target = "base")] -#[delegate(L1BlockInfoEcotoneBaseFields, target = "base")] -#[delegate(L1BlockInfoIsthmusBaseFields, target = "base")] -pub struct L1BlockInfoJovian { - /// Fields inherited from Isthmus. - #[cfg_attr(feature = "serde", serde(flatten))] - pub base: L1BlockInfoIsthmus, - /// The DA footprint gas scalar - pub da_footprint_gas_scalar: u16, -} -/// Accessors to fields available in Jovian and later. -pub trait L1BlockInfoJovianBaseFields: L1BlockInfoIsthmusBaseFields { - /// The DA footprint gas scalar - fn da_footprint_gas_scalar(&self) -> u16; -} - -impl L1BlockInfoJovianBaseFields for L1BlockInfoJovian { - fn da_footprint_gas_scalar(&self) -> u16 { - self.da_footprint_gas_scalar - } -} - -/// Accessors for all Jovian fields. -pub trait L1BlockInfoJovianFields: - L1BlockInfoIsthmusBaseFields + L1BlockInfoJovianBaseFields -{ -} - -impl L1BlockInfoJovianFields for L1BlockInfoJovian {} - -impl L1BlockInfoJovian { - /// The default DA footprint gas scalar - /// <https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/l1-attributes.md#overview> - pub const DEFAULT_DA_FOOTPRINT_GAS_SCALAR: u16 = 400; - - /// The type byte identifier for the L1 scalar format in Jovian. - pub const L1_SCALAR: u8 = 2; - - /// The length of an L1 info transaction in Jovian. - pub const L1_INFO_TX_LEN: usize = 4 + 32 * 5 + 4 + 8 + 2; - - /// The 4 byte selector of "setL1BlockValuesJovian()" - /// Those are the first 4 calldata bytes -> `<https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/l1-attributes.md#overview>` - pub const L1_INFO_TX_SELECTOR: [u8; 4] = [0x3d, 0xb6, 0xbe, 0x2b]; - - /// Encodes the [`L1BlockInfoJovian`] object into Ethereum transaction calldata. - pub fn encode_calldata(&self) -> Bytes { - let mut buf = Vec::with_capacity(Self::L1_INFO_TX_LEN); - self.encode_calldata_header(&mut buf); - self.encode_calldata_body(&mut buf); - buf.into() - } - - /// Encodes the header part of the [`L1BlockInfoJovian`] object. - pub fn encode_calldata_header(&self, buf: &mut Vec<u8>) { - buf.extend_from_slice(Self::L1_INFO_TX_SELECTOR.as_ref()); - } - - /// Encodes the base part of the [`L1BlockInfoJovian`] object. - pub fn encode_calldata_body(&self, buf: &mut Vec<u8>) { - self.base.encode_calldata_body(buf); - buf.extend_from_slice(self.da_footprint_gas_scalar.to_be_bytes().as_ref()); - } - - /// Decodes the [`L1BlockInfoJovian`] object from ethereum transaction calldata. - pub fn decode_calldata(r: &[u8]) -> Result<Self, DecodeError> { - if r.len() != Self::L1_INFO_TX_LEN { - return Err(DecodeError::InvalidJovianLength(Self::L1_INFO_TX_LEN, r.len())); - } - Self::decode_calldata_body(r) - } - - /// Decodes the body of the [`L1BlockInfoJovian`] object. - pub fn decode_calldata_body(r: &[u8]) -> Result<Self, DecodeError> { - // SAFETY: For all below slice operations, the full - // length is validated above to be `178`. - - let base = L1BlockInfoIsthmus::decode_calldata_body(r)?; - - // SAFETY: 2 bytes are copied directly into the array - let mut da_footprint_gas_scalar = [0u8; 2]; - da_footprint_gas_scalar.copy_from_slice(&r[176..178]); - let mut da_footprint_gas_scalar = u16::from_be_bytes(da_footprint_gas_scalar); - - // If the da footprint gas scalar is 0, use the default value (`https://github.com/ethereum-optimism/specs/blob/664cba65ab9686b0e70ad19fdf2ad054d6295986/specs/protocol/jovian/l1-attributes.md#overview`). - if da_footprint_gas_scalar == 0 { - da_footprint_gas_scalar = Self::DEFAULT_DA_FOOTPRINT_GAS_SCALAR; - } - - Ok(Self::new( - base.number(), - base.time(), - base.base_fee(), - base.block_hash(), - base.sequence_number(), - base.batcher_address(), - base.blob_base_fee(), - base.blob_base_fee_scalar(), - base.base_fee_scalar(), - base.operator_fee_scalar(), - base.operator_fee_constant(), - da_footprint_gas_scalar, - )) - } - - /// Construct from all values. - #[allow(clippy::too_many_arguments)] - pub const fn new( - number: u64, - time: u64, - base_fee: u64, - block_hash: B256, - sequence_number: u64, - batcher_address: Address, - blob_base_fee: u128, - blob_base_fee_scalar: u32, - base_fee_scalar: u32, - operator_fee_scalar: u32, - operator_fee_constant: u64, - da_footprint_gas_scalar: u16, - ) -> Self { - Self { - base: L1BlockInfoIsthmus::new( - number, - time, - base_fee, - block_hash, - sequence_number, - batcher_address, - blob_base_fee, - blob_base_fee_scalar, - base_fee_scalar, - operator_fee_scalar, - operator_fee_constant, - ), - da_footprint_gas_scalar, - } - } -} - -#[cfg(test)] -mod tests { - - use super::*; - use alloc::vec; - use alloy_primitives::keccak256; - - #[test] - fn test_decode_calldata_jovian_invalid_length() { - let r = vec![0u8; 1]; - assert_eq!( - L1BlockInfoJovian::decode_calldata(&r), - Err(DecodeError::InvalidJovianLength(L1BlockInfoJovian::L1_INFO_TX_LEN, r.len())) - ); - } - - #[test] - fn test_function_selector() { - assert_eq!( - keccak256("setL1BlockValuesJovian()")[..4].to_vec(), - L1BlockInfoJovian::L1_INFO_TX_SELECTOR - ); - } - - #[test] - fn test_l1_block_info_jovian_roundtrip_calldata_encoding() { - let info = L1BlockInfoJovian::new( - 1, - 2, - 3, - B256::from([4; 32]), - 5, - Address::from_slice(&[6; 20]), - 7, - 8, - 9, - 10, - 11, - 12, - ); - - let calldata = info.encode_calldata(); - let decoded_info = L1BlockInfoJovian::decode_calldata(&calldata).unwrap(); - - assert_eq!(info, decoded_info); - } -} diff --git a/kona/crates/protocol/protocol/src/info/mod.rs b/kona/crates/protocol/protocol/src/info/mod.rs deleted file mode 100644 index aefc6a2571d..00000000000 --- a/kona/crates/protocol/protocol/src/info/mod.rs +++ /dev/null @@ -1,44 +0,0 @@ -//! Module containing L1 Attributes types (aka the L1 block info transaction). -//! -//! # Developer notes -//! -//! The structs implemented throughout this module form three chains of -//! embedding to emulate inheritance. By `a < b` we denote that the fields of -//! struct `a` are a subset of the fields of struct `b`. Delegation is -//! implemented through accessors and by help of the `ambassador` crate. The -//! hardforks `Bedrock` and `Ecotone` each contain both fields that are used by -//! all later hardforks and some that are not. They are implemented by -//! splitting them in two, e.g. `L1BlockInfoBedrockBase` and -//! `L1BlockInfoBedrock`, where the former contains exactly the fields are used -//! by later hardforks and the latter embeds the former and then adds some -//! fields. -//! -//! The chains of embedding are: -//! -//! 1. L1BlockInfoBedrockBase < L1BlockInfoEcotoneBase < L1BlockInfoIsthmus < L1BlockInfoJovian -//! 2. L1BlockInfoBedrockBase < L1BlockInfoBedrock -//! 3. L1BlockInfoEcotoneBase < L1BlockInfoEcotone - -mod variant; -pub use variant::L1BlockInfoTx; - -mod bedrock; -pub use bedrock::{L1BlockInfoBedrock, L1BlockInfoBedrockFields, L1BlockInfoBedrockOnlyFields}; - -mod bedrock_base; -pub use bedrock_base::L1BlockInfoBedrockBaseFields; - -mod ecotone; -pub use ecotone::{L1BlockInfoEcotone, L1BlockInfoEcotoneFields, L1BlockInfoEcotoneOnlyFields}; - -mod ecotone_base; -pub use ecotone_base::L1BlockInfoEcotoneBaseFields; - -mod isthmus; -pub use isthmus::{L1BlockInfoIsthmus, L1BlockInfoIsthmusBaseFields, L1BlockInfoIsthmusFields}; - -mod jovian; -pub use jovian::{L1BlockInfoJovian, L1BlockInfoJovianBaseFields, L1BlockInfoJovianFields}; - -mod errors; -pub use errors::{BlockInfoError, DecodeError}; diff --git a/kona/crates/protocol/protocol/src/info/variant.rs b/kona/crates/protocol/protocol/src/info/variant.rs deleted file mode 100644 index bfc34d18fa5..00000000000 --- a/kona/crates/protocol/protocol/src/info/variant.rs +++ /dev/null @@ -1,1058 +0,0 @@ -//! Contains the `L1BlockInfoTx` enum, containing different variants of the L1 block info -//! transaction. - -use alloy_consensus::Header; -use alloy_eips::{BlockNumHash, eip7840::BlobParams}; -use alloy_primitives::{Address, B256, Bytes, Sealable, Sealed, TxKind, U256, address}; -use kona_genesis::{L1ChainConfig, RollupConfig, SystemConfig}; -use op_alloy_consensus::{DepositSourceDomain, L1InfoDepositSource, TxDeposit}; - -use crate::{ - BlockInfoError, DecodeError, L1BlockInfoBedrock, L1BlockInfoEcotone, L1BlockInfoIsthmus, - Predeploys, - info::{ - L1BlockInfoBedrockBaseFields, L1BlockInfoEcotoneBaseFields as _, L1BlockInfoJovian, - bedrock::L1BlockInfoBedrockOnlyFields as _, ecotone::L1BlockInfoEcotoneOnlyFields as _, - isthmus::L1BlockInfoIsthmusBaseFields as _, - }, -}; - -/// The system transaction gas limit post-Regolith -const REGOLITH_SYSTEM_TX_GAS: u64 = 1_000_000; - -/// The depositor address of the L1 info transaction -pub(crate) const L1_INFO_DEPOSITOR_ADDRESS: Address = - address!("deaddeaddeaddeaddeaddeaddeaddeaddead0001"); - -/// The [`L1BlockInfoTx`] enum contains variants for the different versions of the L1 block info -/// transaction on OP Stack chains. -/// -/// This transaction always sits at the top of the block, and alters the `L1 Block` contract's -/// knowledge of the L1 chain. -#[derive(Debug, Clone, Eq, PartialEq, Copy)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub enum L1BlockInfoTx { - /// A Bedrock L1 info transaction - Bedrock(L1BlockInfoBedrock), - /// An Ecotone L1 info transaction - Ecotone(L1BlockInfoEcotone), - /// An Isthmus L1 info transaction - Isthmus(L1BlockInfoIsthmus), - /// A Jovian L1 info transaction - Jovian(L1BlockInfoJovian), -} - -impl L1BlockInfoTx { - /// Creates a new [`L1BlockInfoTx`] from the given information. - pub fn try_new( - rollup_config: &RollupConfig, - l1_config: &L1ChainConfig, - system_config: &SystemConfig, - sequence_number: u64, - l1_header: &Header, - l2_block_time: u64, - ) -> Result<Self, BlockInfoError> { - // In the first block of Ecotone, the L1Block contract has not been upgraded yet due to the - // upgrade transactions being placed after the L1 info transaction. Because of this, - // for the first block of Ecotone, we send a Bedrock style L1 block info transaction - if !rollup_config.is_ecotone_active(l2_block_time) || - rollup_config.is_first_ecotone_block(l2_block_time) - { - return Ok(Self::Bedrock(L1BlockInfoBedrock::new( - l1_header.number, - l1_header.timestamp, - l1_header.base_fee_per_gas.unwrap_or(0), - l1_header.hash_slow(), - sequence_number, - system_config.batcher_address, - system_config.overhead, - system_config.scalar, - ))); - } - - // --- Post-Ecotone Operations --- - - let scalar = system_config.scalar.to_be_bytes::<32>(); - let blob_base_fee_scalar = (scalar[0] == L1BlockInfoEcotone::L1_SCALAR) - .then(|| { - Ok::<u32, BlockInfoError>(u32::from_be_bytes( - scalar[24..28].try_into().map_err(|_| BlockInfoError::L1BlobBaseFeeScalar)?, - )) - }) - .transpose()? - .unwrap_or_default(); - let base_fee_scalar = u32::from_be_bytes( - scalar[28..32].try_into().map_err(|_| BlockInfoError::BaseFeeScalar)?, - ); - - // Determine the blob fee configuration based on the timestamp. - // We start with the scheduled blob fee parameters, and then check for the osaka and prague - // parameters. - let blob_fee_params = l1_config.blob_schedule_blob_params(); - - let blob_fee_config = - match blob_fee_params.active_scheduled_params_at_timestamp(l1_header.timestamp) { - Some(blob_fee_param) => *blob_fee_param, - None if l1_config.osaka_time.is_some_and(|time| time <= l1_header.timestamp) => { - BlobParams::osaka() - } - None if l1_config - .prague_time.is_some_and(|time| time <= l1_header.timestamp) && - // There was an incident on OP Stack Sepolia chains (03-05-2025) when L1 activated pectra, - // where the sequencer followed the incorrect chain, using the legacy Cancun blob fee - // schedule instead of the new Prague blob fee schedule. This portion of the chain was - // chosen to be canonicalized in favor of the prospect of a deep reorg imposed by the - // sequencers of the testnet chains. An optional hardfork was introduced for Sepolia only, - // where if present, activates the use of the Prague blob fee schedule. If the hardfork is - // not present, and L1 has activated pectra, the Prague blob fee schedule is used - // immediately. - (rollup_config.hardforks.pectra_blob_schedule_time.is_none() || - rollup_config.is_pectra_blob_schedule_active(l1_header.timestamp)) => - { - BlobParams::prague() - } - _ => BlobParams::cancun(), - }; - - let blob_base_fee = l1_header.blob_fee(blob_fee_config).unwrap_or(1); - let block_hash = l1_header.hash_slow(); - let base_fee = l1_header.base_fee_per_gas.unwrap_or(0); - - if rollup_config.is_jovian_active(l2_block_time) && - !rollup_config.is_first_jovian_block(l2_block_time) - { - let operator_fee_scalar = system_config.operator_fee_scalar.unwrap_or_default(); - let operator_fee_constant = system_config.operator_fee_constant.unwrap_or_default(); - let mut da_footprint_gas_scalar = system_config - .da_footprint_gas_scalar - .unwrap_or(L1BlockInfoJovian::DEFAULT_DA_FOOTPRINT_GAS_SCALAR); - - if da_footprint_gas_scalar == 0 { - da_footprint_gas_scalar = L1BlockInfoJovian::DEFAULT_DA_FOOTPRINT_GAS_SCALAR; - } - - return Ok(Self::Jovian(L1BlockInfoJovian::new( - l1_header.number, - l1_header.timestamp, - base_fee, - block_hash, - sequence_number, - system_config.batcher_address, - blob_base_fee, - blob_base_fee_scalar, - base_fee_scalar, - operator_fee_scalar, - operator_fee_constant, - da_footprint_gas_scalar, - ))); - } - - if rollup_config.is_isthmus_active(l2_block_time) && - !rollup_config.is_first_isthmus_block(l2_block_time) - { - let operator_fee_scalar = system_config.operator_fee_scalar.unwrap_or_default(); - let operator_fee_constant = system_config.operator_fee_constant.unwrap_or_default(); - return Ok(Self::Isthmus(L1BlockInfoIsthmus::new( - l1_header.number, - l1_header.timestamp, - base_fee, - block_hash, - sequence_number, - system_config.batcher_address, - blob_base_fee, - blob_base_fee_scalar, - base_fee_scalar, - operator_fee_scalar, - operator_fee_constant, - ))); - } - - Ok(Self::Ecotone(L1BlockInfoEcotone::new( - l1_header.number, - l1_header.timestamp, - base_fee, - block_hash, - sequence_number, - system_config.batcher_address, - blob_base_fee, - blob_base_fee_scalar, - base_fee_scalar, - false, - U256::ZERO, - ))) - } - - /// Creates a new [`L1BlockInfoTx`] from the given information and returns a typed [`TxDeposit`] - /// to include at the top of a block. - pub fn try_new_with_deposit_tx( - rollup_config: &RollupConfig, - l1_config: &L1ChainConfig, - system_config: &SystemConfig, - sequence_number: u64, - l1_header: &Header, - l2_block_time: u64, - ) -> Result<(Self, Sealed<TxDeposit>), BlockInfoError> { - let l1_info = Self::try_new( - rollup_config, - l1_config, - system_config, - sequence_number, - l1_header, - l2_block_time, - )?; - - let source = DepositSourceDomain::L1Info(L1InfoDepositSource { - l1_block_hash: l1_info.block_hash(), - seq_number: sequence_number, - }); - - let mut deposit_tx = TxDeposit { - source_hash: source.source_hash(), - from: L1_INFO_DEPOSITOR_ADDRESS, - to: TxKind::Call(Predeploys::L1_BLOCK_INFO), - mint: 0, - value: U256::ZERO, - gas_limit: 150_000_000, - is_system_transaction: true, - input: l1_info.encode_calldata(), - }; - - // With the regolith hardfork, system transactions were deprecated, and we allocate - // a constant amount of gas for special transactions like L1 block info. - if rollup_config.is_regolith_active(l2_block_time) { - deposit_tx.is_system_transaction = false; - deposit_tx.gas_limit = REGOLITH_SYSTEM_TX_GAS; - } - - Ok((l1_info, deposit_tx.seal_slow())) - } - - /// Decodes the [`L1BlockInfoTx`] object from Ethereum transaction calldata. - pub fn decode_calldata(r: &[u8]) -> Result<Self, DecodeError> { - if r.len() < 4 { - return Err(DecodeError::MissingSelector); - } - // SAFETY: The length of `r` must be at least 4 bytes. - let mut selector = [0u8; 4]; - selector.copy_from_slice(&r[0..4]); - match selector { - L1BlockInfoBedrock::L1_INFO_TX_SELECTOR => { - L1BlockInfoBedrock::decode_calldata(r).map(Self::Bedrock) - } - L1BlockInfoEcotone::L1_INFO_TX_SELECTOR => { - L1BlockInfoEcotone::decode_calldata(r).map(Self::Ecotone) - } - L1BlockInfoIsthmus::L1_INFO_TX_SELECTOR => { - L1BlockInfoIsthmus::decode_calldata(r).map(Self::Isthmus) - } - L1BlockInfoJovian::L1_INFO_TX_SELECTOR => { - L1BlockInfoJovian::decode_calldata(r).map(Self::Jovian) - } - _ => Err(DecodeError::InvalidSelector), - } - } - - /// Returns whether the scalars are empty. - pub fn empty_scalars(&self) -> bool { - match self { - Self::Bedrock(_) | Self::Isthmus(..) | Self::Jovian(_) => false, - Self::Ecotone(info) => info.empty_scalars(), - } - } - - /// Returns the block hash for the [`L1BlockInfoTx`]. - pub fn block_hash(&self) -> B256 { - match self { - Self::Bedrock(tx) => tx.block_hash(), - Self::Ecotone(tx) => tx.block_hash(), - Self::Isthmus(tx) => tx.block_hash(), - Self::Jovian(tx) => tx.block_hash(), - } - } - - /// Encodes the [`L1BlockInfoTx`] object into Ethereum transaction calldata. - pub fn encode_calldata(&self) -> Bytes { - match self { - Self::Bedrock(bedrock_tx) => bedrock_tx.encode_calldata(), - Self::Ecotone(ecotone_tx) => ecotone_tx.encode_calldata(), - Self::Isthmus(isthmus_tx) => isthmus_tx.encode_calldata(), - Self::Jovian(jovian_tx) => jovian_tx.encode_calldata(), - } - } - - /// Returns the L1 [`BlockNumHash`] for the info transaction. - pub fn id(&self) -> BlockNumHash { - match self { - Self::Bedrock(tx) => BlockNumHash { number: tx.number(), hash: tx.block_hash() }, - Self::Ecotone(tx) => BlockNumHash { number: tx.number(), hash: tx.block_hash() }, - Self::Isthmus(tx) => BlockNumHash { number: tx.number(), hash: tx.block_hash() }, - Self::Jovian(tx) => BlockNumHash { number: tx.number(), hash: tx.block_hash() }, - } - } - - /// Returns the operator fee scalar. - pub fn operator_fee_scalar(&self) -> u32 { - match self { - Self::Jovian(block_info) => block_info.operator_fee_scalar(), - Self::Isthmus(block_info) => block_info.operator_fee_scalar(), - _ => 0, - } - } - - /// Returns the operator fee constant. - pub fn operator_fee_constant(&self) -> u64 { - match self { - Self::Jovian(block_info) => block_info.operator_fee_constant(), - Self::Isthmus(block_info) => block_info.operator_fee_constant(), - _ => 0, - } - } - - /// Returns the da footprint - pub const fn da_footprint(&self) -> Option<u16> { - match self { - Self::Jovian(L1BlockInfoJovian { da_footprint_gas_scalar, .. }) => { - Some(*da_footprint_gas_scalar) - } - _ => None, - } - } - - /// Returns the l1 base fee. - pub fn l1_base_fee(&self) -> U256 { - match self { - Self::Bedrock(block_info) => U256::from(block_info.base_fee()), - Self::Ecotone(block_info) => U256::from(block_info.base_fee()), - Self::Isthmus(block_info) => U256::from(block_info.base_fee()), - Self::Jovian(block_info) => U256::from(block_info.base_fee()), - } - } - - /// Returns the l1 fee scalar. - pub fn l1_fee_scalar(&self) -> U256 { - match self { - Self::Bedrock(block) => U256::from(block.l1_fee_scalar()), - Self::Ecotone(block) => U256::from(block.base_fee_scalar()), - Self::Isthmus(block) => U256::from(block.base_fee_scalar()), - Self::Jovian(block) => U256::from(block.base_fee_scalar()), - } - } - - /// Returns the blob base fee. - pub fn blob_base_fee(&self) -> U256 { - match self { - Self::Bedrock(_) => U256::ZERO, - Self::Ecotone(block) => U256::from(block.blob_base_fee()), - Self::Isthmus(block) => U256::from(block.blob_base_fee()), - Self::Jovian(block) => U256::from(block.blob_base_fee()), - } - } - - /// Returns the blob base fee scalar. - pub fn blob_base_fee_scalar(&self) -> U256 { - match self { - Self::Bedrock(_) => U256::ZERO, - Self::Ecotone(block_info) => U256::from(block_info.blob_base_fee_scalar()), - Self::Isthmus(block_info) => U256::from(block_info.blob_base_fee_scalar()), - Self::Jovian(block_info) => U256::from(block_info.blob_base_fee_scalar()), - } - } - - /// Returns the L1 fee overhead for the info transaction. After ecotone, this value is ignored. - pub fn l1_fee_overhead(&self) -> U256 { - match self { - Self::Bedrock(block_info) => block_info.l1_fee_overhead(), - Self::Ecotone(block_info) => block_info.l1_fee_overhead(), - Self::Isthmus(_) | Self::Jovian(_) => U256::ZERO, - } - } - - /// Returns the batcher address for the info transaction - pub fn batcher_address(&self) -> Address { - match self { - Self::Bedrock(block) => block.batcher_address(), - Self::Ecotone(block) => block.batcher_address(), - Self::Isthmus(block) => block.batcher_address(), - Self::Jovian(block) => block.batcher_address(), - } - } - - /// Returns the sequence number for the info transaction - pub fn sequence_number(&self) -> u64 { - match self { - Self::Bedrock(block) => block.sequence_number(), - Self::Ecotone(block) => block.sequence_number(), - Self::Isthmus(block) => block.sequence_number(), - Self::Jovian(block) => block.sequence_number(), - } - } -} - -#[cfg(test)] -mod test { - use super::*; - use crate::test_utils::{RAW_BEDROCK_INFO_TX, RAW_ECOTONE_INFO_TX, RAW_ISTHMUS_INFO_TX}; - use alloc::{string::ToString, vec::Vec}; - use alloy_primitives::{address, b256}; - use kona_genesis::HardForkConfig; - use kona_registry::L1Config; - use rstest::rstest; - - #[test] - fn test_l1_block_info_missing_selector() { - let err = L1BlockInfoTx::decode_calldata(&[]); - assert_eq!(err, Err(DecodeError::MissingSelector)); - } - - #[test] - fn test_l1_block_info_tx_invalid_len() { - let calldata = L1BlockInfoBedrock::L1_INFO_TX_SELECTOR - .into_iter() - .chain([0xde, 0xad]) - .collect::<Vec<u8>>(); - let err = L1BlockInfoTx::decode_calldata(&calldata); - assert!(err.is_err()); - assert_eq!( - err.err().unwrap().to_string(), - "Invalid bedrock data length. Expected 260, got 6" - ); - - let calldata = L1BlockInfoEcotone::L1_INFO_TX_SELECTOR - .into_iter() - .chain([0xde, 0xad]) - .collect::<Vec<u8>>(); - let err = L1BlockInfoTx::decode_calldata(&calldata); - assert!(err.is_err()); - assert_eq!( - err.err().unwrap().to_string(), - "Invalid ecotone data length. Expected 164, got 6" - ); - - let calldata = L1BlockInfoIsthmus::L1_INFO_TX_SELECTOR - .into_iter() - .chain([0xde, 0xad]) - .collect::<Vec<u8>>(); - let err = L1BlockInfoTx::decode_calldata(&calldata); - assert!(err.is_err()); - assert_eq!( - err.err().unwrap().to_string(), - "Invalid isthmus data length. Expected 176, got 6" - ); - } - - #[test] - fn test_l1_block_info_tx_block_hash() { - let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_block_hash(b256!( - "392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc" - ))); - assert_eq!( - bedrock.block_hash(), - b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc") - ); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_block_hash(b256!( - "1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3" - ))); - assert_eq!( - ecotone.block_hash(), - b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3") - ); - } - - #[test] - fn test_decode_calldata_invalid_selector() { - let err = L1BlockInfoTx::decode_calldata(&[0xde, 0xad, 0xbe, 0xef]); - assert_eq!(err, Err(DecodeError::InvalidSelector)); - } - - #[test] - fn test_l1_block_info_id() { - let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_number_and_block_hash( - 123, - b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc"), - )); - assert_eq!( - bedrock.id(), - BlockNumHash { - number: 123, - hash: b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc") - } - ); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_number_and_block_hash( - 456, - b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3"), - )); - - assert_eq!( - ecotone.id(), - BlockNumHash { - number: 456, - hash: b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3") - } - ); - - let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_number_and_block_hash( - 101112, - b256!("4f98b83baf52c498b49bfff33e59965b27da7febbea9a2fcc4719d06dc06932a"), - )); - assert_eq!( - isthmus.id(), - BlockNumHash { - number: 101112, - hash: b256!("4f98b83baf52c498b49bfff33e59965b27da7febbea9a2fcc4719d06dc06932a") - } - ); - } - - #[test] - fn test_l1_block_info_sequence_number() { - let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_sequence_number(123)); - assert_eq!(bedrock.sequence_number(), 123); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_sequence_number(456)); - assert_eq!(ecotone.sequence_number(), 456); - - let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_sequence_number(101112)); - assert_eq!(isthmus.sequence_number(), 101112); - } - - #[test] - fn test_operator_fee_constant() { - let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::default()); - assert_eq!(bedrock.operator_fee_constant(), 0); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::default()); - assert_eq!(ecotone.operator_fee_constant(), 0); - - let isthmus = - L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_operator_fee_constant(123)); - assert_eq!(isthmus.operator_fee_constant(), 123); - } - - #[test] - fn test_operator_fee_scalar() { - let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::default()); - assert_eq!(bedrock.operator_fee_scalar(), 0); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::default()); - assert_eq!(ecotone.operator_fee_scalar(), 0); - - let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_operator_fee_scalar(123)); - assert_eq!(isthmus.operator_fee_scalar(), 123); - } - - #[test] - fn test_l1_base_fee() { - let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_base_fee(123)); - assert_eq!(bedrock.l1_base_fee(), U256::from(123)); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_base_fee(456)); - assert_eq!(ecotone.l1_base_fee(), U256::from(456)); - - let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_base_fee(101112)); - assert_eq!(isthmus.l1_base_fee(), U256::from(101112)); - } - - #[test] - fn test_l1_fee_overhead() { - let bedrock = - L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_l1_fee_overhead(U256::from(123))); - assert_eq!(bedrock.l1_fee_overhead(), U256::from(123)); - - let ecotone = - L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_l1_fee_overhead(U256::from(456))); - assert_eq!(ecotone.l1_fee_overhead(), U256::from(456)); - - let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::default()); - assert_eq!(isthmus.l1_fee_overhead(), U256::ZERO); - } - - #[test] - fn test_batcher_address() { - let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_batcher_address( - address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), - )); - assert_eq!(bedrock.batcher_address(), address!("6887246668a3b87f54deb3b94ba47a6f63f32985")); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_batcher_address( - address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), - )); - assert_eq!(ecotone.batcher_address(), address!("6887246668a3b87f54deb3b94ba47a6f63f32985")); - - let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_batcher_address( - address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), - )); - assert_eq!(isthmus.batcher_address(), address!("6887246668a3b87f54deb3b94ba47a6f63f32985")); - } - - #[test] - fn test_l1_fee_scalar() { - let bedrock = - L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_l1_fee_scalar(U256::from(123))); - assert_eq!(bedrock.l1_fee_scalar(), U256::from(123)); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_base_fee_scalar(456)); - assert_eq!(ecotone.l1_fee_scalar(), U256::from(456)); - - let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_base_fee_scalar(101112)); - assert_eq!(isthmus.l1_fee_scalar(), U256::from(101112)); - } - - #[test] - fn test_blob_base_fee() { - let bedrock = L1BlockInfoTx::Bedrock(Default::default()); - assert_eq!(bedrock.blob_base_fee(), U256::ZERO); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_blob_base_fee(456)); - assert_eq!(ecotone.blob_base_fee(), U256::from(456)); - - let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_blob_base_fee(101112)); - assert_eq!(isthmus.blob_base_fee(), U256::from(101112)); - } - - #[test] - fn test_blob_base_fee_scalar() { - let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::default()); - assert_eq!(bedrock.blob_base_fee_scalar(), U256::ZERO); - - let ecotone = - L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_blob_base_fee_scalar(456)); - //dbg!("{}", ecotone); - assert_eq!(ecotone.blob_base_fee_scalar(), U256::from(456)); - - let isthmus = - L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_blob_base_fee_scalar(101112)); - assert_eq!(isthmus.blob_base_fee_scalar(), U256::from(101112)); - } - - #[test] - fn test_empty_scalars() { - let bedrock = L1BlockInfoTx::Bedrock(Default::default()); - assert!(!bedrock.empty_scalars()); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_empty_scalars(true)); - assert!(ecotone.empty_scalars()); - - let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::default()); - assert!(!ecotone.empty_scalars()); - - let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::default()); - assert!(!isthmus.empty_scalars()); - } - - #[test] - fn test_isthmus_l1_block_info_tx_roundtrip() { - let expected = L1BlockInfoIsthmus::new( - 19655712, - 1713121139, - 10445852825, - b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3"), - 5, - address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), - 1, - 810949, - 1368, - 0xabcd, - 0xdcba, - ); - - let L1BlockInfoTx::Isthmus(decoded) = - L1BlockInfoTx::decode_calldata(RAW_ISTHMUS_INFO_TX.as_ref()).unwrap() - else { - panic!("Wrong fork"); - }; - assert_eq!(expected, decoded); - assert_eq!(L1BlockInfoTx::Isthmus(decoded).encode_calldata().as_ref(), RAW_ISTHMUS_INFO_TX); - } - - #[test] - fn test_bedrock_l1_block_info_tx_roundtrip() { - let expected = L1BlockInfoBedrock::new( - 18334955, - 1697121143, - 10419034451, - b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc"), - 4, - address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), - U256::from(0xbc), - U256::from(0xa6fe0), - ); - - let L1BlockInfoTx::Bedrock(decoded) = - L1BlockInfoTx::decode_calldata(RAW_BEDROCK_INFO_TX.as_ref()).unwrap() - else { - panic!("Wrong fork"); - }; - assert_eq!(expected, decoded); - assert_eq!(L1BlockInfoTx::Bedrock(decoded).encode_calldata().as_ref(), RAW_BEDROCK_INFO_TX); - } - - #[test] - fn test_ecotone_l1_block_info_tx_roundtrip() { - let expected = L1BlockInfoEcotone::new( - 19655712, - 1713121139, - 10445852825, - b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3"), - 5, - address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), - 1, - 810949, - 1368, - false, - U256::ZERO, - ); - - let L1BlockInfoTx::Ecotone(decoded) = - L1BlockInfoTx::decode_calldata(RAW_ECOTONE_INFO_TX.as_ref()).unwrap() - else { - panic!("Wrong fork"); - }; - assert_eq!(expected, decoded); - assert_eq!(L1BlockInfoTx::Ecotone(decoded).encode_calldata().as_ref(), RAW_ECOTONE_INFO_TX); - } - - #[test] - fn test_try_new_bedrock() { - let rollup_config = RollupConfig::default(); - let l1_config = L1Config::sepolia(); - let system_config = SystemConfig::default(); - let sequence_number = 0; - let l1_header = Header::default(); - let l2_block_time = 0; - - let l1_info = L1BlockInfoTx::try_new( - &rollup_config, - &l1_config, - &system_config, - sequence_number, - &l1_header, - l2_block_time, - ) - .unwrap(); - - let L1BlockInfoTx::Bedrock(l1_info) = l1_info else { - panic!("Wrong fork"); - }; - - assert_eq!(l1_info.number(), l1_header.number); - assert_eq!(l1_info.time(), l1_header.timestamp); - assert_eq!(l1_info.base_fee(), { l1_header.base_fee_per_gas.unwrap_or(0) }); - assert_eq!(l1_info.block_hash(), l1_header.hash_slow()); - assert_eq!(l1_info.sequence_number(), sequence_number); - assert_eq!(l1_info.batcher_address(), system_config.batcher_address); - assert_eq!(l1_info.l1_fee_overhead(), system_config.overhead); - assert_eq!(l1_info.l1_fee_scalar(), system_config.scalar); - } - - #[test] - fn test_try_new_ecotone() { - let rollup_config = RollupConfig { - hardforks: HardForkConfig { ecotone_time: Some(1), ..Default::default() }, - ..Default::default() - }; - let l1_config = L1Config::sepolia(); - let system_config = SystemConfig::default(); - let sequence_number = 0; - let l1_header = Header::default(); - let l2_block_time = 0xFF; - - let l1_info = L1BlockInfoTx::try_new( - &rollup_config, - &l1_config, - &system_config, - sequence_number, - &l1_header, - l2_block_time, - ) - .unwrap(); - - let L1BlockInfoTx::Ecotone(l1_info) = l1_info else { - panic!("Wrong fork"); - }; - - assert_eq!(l1_info.number(), l1_header.number); - assert_eq!(l1_info.time(), l1_header.timestamp); - assert_eq!(l1_info.base_fee(), { l1_header.base_fee_per_gas.unwrap_or(0) }); - assert_eq!(l1_info.block_hash(), l1_header.hash_slow()); - assert_eq!(l1_info.sequence_number(), sequence_number); - assert_eq!(l1_info.batcher_address(), system_config.batcher_address); - assert_eq!(l1_info.blob_base_fee(), l1_header.blob_fee(BlobParams::cancun()).unwrap_or(1)); - - let scalar = system_config.scalar.to_be_bytes::<32>(); - let blob_base_fee_scalar = if scalar[0] == L1BlockInfoEcotone::L1_SCALAR { - { - u32::from_be_bytes( - scalar[24..28].try_into().expect("Failed to parse L1 blob base fee scalar"), - ) - } - } else { - Default::default() - }; - let base_fee_scalar = - u32::from_be_bytes(scalar[28..32].try_into().expect("Failed to parse base fee scalar")); - assert_eq!(l1_info.blob_base_fee_scalar(), blob_base_fee_scalar); - assert_eq!(l1_info.base_fee_scalar(), base_fee_scalar); - } - - #[rstest] - #[case::fork_active(true, false)] - #[case::fork_inactive(false, false)] - #[should_panic] - #[case::fork_active_wrong_params(true, true)] - #[should_panic] - #[case::fork_inactive_wrong_params(false, true)] - fn test_try_new_ecotone_with_optional_prague_fee_fork( - #[case] fork_active: bool, - #[case] use_wrong_params: bool, - ) { - let rollup_config = RollupConfig { - hardforks: HardForkConfig { - ecotone_time: Some(1), - pectra_blob_schedule_time: Some(2), - ..Default::default() - }, - ..Default::default() - }; - let mut l1_genesis: L1ChainConfig = L1Config::sepolia().into(); - l1_genesis.prague_time = Some(2); - - let system_config = SystemConfig::default(); - let sequence_number = 0; - let l1_header = Header { - timestamp: if fork_active { 2 } else { 1 }, - excess_blob_gas: Some(0x5080000), - blob_gas_used: Some(0x100000), - requests_hash: Some(B256::ZERO), - ..Default::default() - }; - let l2_block_time = 0xFF; - - let l1_info = L1BlockInfoTx::try_new( - &rollup_config, - &l1_genesis, - &system_config, - sequence_number, - &l1_header, - l2_block_time, - ) - .unwrap(); - - let L1BlockInfoTx::Ecotone(l1_info) = l1_info else { - panic!("Wrong fork"); - }; - - assert_eq!(l1_info.number(), l1_header.number); - assert_eq!(l1_info.time(), l1_header.timestamp); - assert_eq!(l1_info.base_fee(), { l1_header.base_fee_per_gas.unwrap_or(0) }); - assert_eq!(l1_info.block_hash(), l1_header.hash_slow()); - assert_eq!(l1_info.sequence_number(), sequence_number); - assert_eq!(l1_info.batcher_address(), system_config.batcher_address); - assert_eq!( - l1_info.blob_base_fee(), - l1_header - .blob_fee(if fork_active != use_wrong_params { - BlobParams::prague() - } else { - BlobParams::cancun() - }) - .unwrap_or(1) - ); - - let scalar = system_config.scalar.to_be_bytes::<32>(); - let blob_base_fee_scalar = if scalar[0] == L1BlockInfoEcotone::L1_SCALAR { - { - u32::from_be_bytes( - scalar[24..28].try_into().expect("Failed to parse L1 blob base fee scalar"), - ) - } - } else { - Default::default() - }; - let base_fee_scalar = - u32::from_be_bytes(scalar[28..32].try_into().expect("Failed to parse base fee scalar")); - assert_eq!(l1_info.blob_base_fee_scalar(), blob_base_fee_scalar); - assert_eq!(l1_info.base_fee_scalar(), base_fee_scalar); - } - - #[test] - fn test_try_new_isthmus_before_pectra_blob_schedule() { - let rollup_config = RollupConfig { - hardforks: HardForkConfig { - isthmus_time: Some(1), - pectra_blob_schedule_time: Some(1713121140), - ..Default::default() - }, - ..Default::default() - }; - let l1_config = L1Config::sepolia(); - let system_config = SystemConfig { - batcher_address: address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), - operator_fee_scalar: Some(0xabcd), - operator_fee_constant: Some(0xdcba), - ..Default::default() - }; - let sequence_number = 0; - let l1_header = Header { - number: 19655712, - timestamp: 1713121139, - base_fee_per_gas: Some(10445852825), - // Assume Pectra is active on L1 - requests_hash: Some(B256::ZERO), - ..Default::default() - }; - let l2_block_time = 0xFF; - - let l1_info = L1BlockInfoTx::try_new( - &rollup_config, - &l1_config, - &system_config, - sequence_number, - &l1_header, - l2_block_time, - ) - .unwrap(); - - assert!(matches!(l1_info, L1BlockInfoTx::Isthmus(_))); - - let scalar = system_config.scalar.to_be_bytes::<32>(); - let blob_base_fee_scalar = if scalar[0] == L1BlockInfoIsthmus::L1_SCALAR { - { - u32::from_be_bytes( - scalar[24..28].try_into().expect("Failed to parse L1 blob base fee scalar"), - ) - } - } else { - Default::default() - }; - let base_fee_scalar = - u32::from_be_bytes(scalar[28..32].try_into().expect("Failed to parse base fee scalar")); - - assert_eq!( - l1_info, - L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new( - l1_header.number, - l1_header.timestamp, - l1_header.base_fee_per_gas.unwrap_or(0), - l1_header.hash_slow(), - sequence_number, - system_config.batcher_address, - // Expect cancun blob schedule to be used, since pectra blob schedule is scheduled - // but not active yet. - l1_header.blob_fee(BlobParams::cancun()).unwrap_or(1), - blob_base_fee_scalar, - base_fee_scalar, - system_config.operator_fee_scalar.unwrap_or_default(), - system_config.operator_fee_constant.unwrap_or_default(), - )) - ); - } - - #[test] - fn test_try_new_isthmus() { - let rollup_config = RollupConfig { - hardforks: HardForkConfig { isthmus_time: Some(1), ..Default::default() }, - ..Default::default() - }; - let l1_config = L1Config::sepolia(); - let system_config = SystemConfig { - batcher_address: address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), - operator_fee_scalar: Some(0xabcd), - operator_fee_constant: Some(0xdcba), - ..Default::default() - }; - let sequence_number = 0; - let l1_header = Header { - number: 19655712, - timestamp: 1713121139, - base_fee_per_gas: Some(10445852825), - ..Default::default() - }; - let l2_block_time = 0xFF; - - let l1_info = L1BlockInfoTx::try_new( - &rollup_config, - &l1_config, - &system_config, - sequence_number, - &l1_header, - l2_block_time, - ) - .unwrap(); - - assert!(matches!(l1_info, L1BlockInfoTx::Isthmus(_))); - - let scalar = system_config.scalar.to_be_bytes::<32>(); - let blob_base_fee_scalar = if scalar[0] == L1BlockInfoIsthmus::L1_SCALAR { - { - u32::from_be_bytes( - scalar[24..28].try_into().expect("Failed to parse L1 blob base fee scalar"), - ) - } - } else { - Default::default() - }; - let base_fee_scalar = - u32::from_be_bytes(scalar[28..32].try_into().expect("Failed to parse base fee scalar")); - - assert_eq!( - l1_info, - L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new( - l1_header.number, - l1_header.timestamp, - l1_header.base_fee_per_gas.unwrap_or(0), - l1_header.hash_slow(), - sequence_number, - system_config.batcher_address, - l1_header.blob_fee(BlobParams::prague()).unwrap_or(1), - blob_base_fee_scalar, - base_fee_scalar, - system_config.operator_fee_scalar.unwrap_or_default(), - system_config.operator_fee_constant.unwrap_or_default(), - )) - ); - } - - #[test] - fn test_try_new_with_deposit_tx() { - let rollup_config = RollupConfig { - hardforks: HardForkConfig { isthmus_time: Some(1), ..Default::default() }, - ..Default::default() - }; - let l1_config = L1Config::sepolia(); - let system_config = SystemConfig { - batcher_address: address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), - operator_fee_scalar: Some(0xabcd), - operator_fee_constant: Some(0xdcba), - ..Default::default() - }; - let sequence_number = 0; - let l1_header = Header { - number: 19655712, - timestamp: 1713121139, - base_fee_per_gas: Some(10445852825), - ..Default::default() - }; - let l2_block_time = 0xFF; - - let (l1_info, deposit_tx) = L1BlockInfoTx::try_new_with_deposit_tx( - &rollup_config, - &l1_config, - &system_config, - sequence_number, - &l1_header, - l2_block_time, - ) - .unwrap(); - - assert!(matches!(l1_info, L1BlockInfoTx::Isthmus(_))); - assert_eq!(deposit_tx.from, L1_INFO_DEPOSITOR_ADDRESS); - assert_eq!(deposit_tx.to, TxKind::Call(Predeploys::L1_BLOCK_INFO)); - assert_eq!(deposit_tx.mint, 0); - assert_eq!(deposit_tx.value, U256::ZERO); - assert_eq!(deposit_tx.gas_limit, REGOLITH_SYSTEM_TX_GAS); - assert!(!deposit_tx.is_system_transaction); - assert_eq!(deposit_tx.input, l1_info.encode_calldata()); - } -} diff --git a/kona/crates/protocol/protocol/src/lib.rs b/kona/crates/protocol/protocol/src/lib.rs deleted file mode 100644 index 84f292975e3..00000000000 --- a/kona/crates/protocol/protocol/src/lib.rs +++ /dev/null @@ -1,74 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] - -extern crate alloc; - -mod batch; -pub use batch::{ - Batch, BatchDecodingError, BatchDropReason, BatchEncodingError, BatchReader, BatchTransaction, - BatchType, BatchValidationProvider, BatchValidity, BatchWithInclusionBlock, DecompressionError, - MAX_SPAN_BATCH_ELEMENTS, RawSpanBatch, SINGLE_BATCH_TYPE, SPAN_BATCH_TYPE, SingleBatch, - SpanBatch, SpanBatchBits, SpanBatchEip1559TransactionData, SpanBatchEip2930TransactionData, - SpanBatchEip7702TransactionData, SpanBatchElement, SpanBatchError, - SpanBatchLegacyTransactionData, SpanBatchPayload, SpanBatchPrefix, SpanBatchTransactionData, - SpanBatchTransactions, SpanDecodingError, -}; - -mod brotli; -pub use brotli::{BrotliDecompressionError, decompress_brotli}; - -mod sync; -pub use sync::SyncStatus; - -mod attributes; -pub use attributes::OpAttributesWithParent; - -mod errors; -pub use errors::OpBlockConversionError; - -mod block; -pub use block::{BlockInfo, FromBlockError, L2BlockInfo}; - -mod frame; -pub use frame::{ - DERIVATION_VERSION_0, FRAME_OVERHEAD, Frame, FrameDecodingError, FrameParseError, MAX_FRAME_LEN, -}; - -mod utils; -pub use utils::{read_tx_data, to_system_config}; - -mod channel; -pub use channel::{ - CHANNEL_ID_LENGTH, Channel, ChannelError, ChannelId, FJORD_MAX_RLP_BYTES_PER_CHANNEL, - MAX_RLP_BYTES_PER_CHANNEL, -}; - -mod deposits; -pub use deposits::{ - DEPOSIT_EVENT_ABI, DEPOSIT_EVENT_ABI_HASH, DEPOSIT_EVENT_VERSION_0, DepositError, - decode_deposit, -}; - -mod info; -pub use info::{ - BlockInfoError, DecodeError, L1BlockInfoBedrock, L1BlockInfoBedrockBaseFields, - L1BlockInfoBedrockFields, L1BlockInfoBedrockOnlyFields, L1BlockInfoEcotone, - L1BlockInfoEcotoneBaseFields, L1BlockInfoEcotoneFields, L1BlockInfoEcotoneOnlyFields, - L1BlockInfoIsthmus, L1BlockInfoIsthmusBaseFields, L1BlockInfoIsthmusFields, L1BlockInfoJovian, - L1BlockInfoJovianBaseFields, L1BlockInfoJovianFields, L1BlockInfoTx, -}; - -mod predeploys; -pub use predeploys::Predeploys; - -mod output_root; -pub use output_root::OutputRoot; - -#[cfg(any(test, feature = "test-utils"))] -pub mod test_utils; diff --git a/kona/crates/protocol/protocol/src/test_utils.rs b/kona/crates/protocol/protocol/src/test_utils.rs deleted file mode 100644 index bf1347d4091..00000000000 --- a/kona/crates/protocol/protocol/src/test_utils.rs +++ /dev/null @@ -1,131 +0,0 @@ -//! Test utilities for the protocol crate. - -use alloc::{boxed::Box, format, string::String, sync::Arc, vec::Vec}; -use alloy_primitives::hex; -use async_trait::async_trait; -use op_alloy_consensus::OpBlock; -use spin::Mutex; -use tracing::{Event, Level, Subscriber}; -use tracing_subscriber::{Layer, layer::Context}; - -use crate::{ - BatchValidationProvider, L1BlockInfoBedrock, L1BlockInfoEcotone, L1BlockInfoIsthmus, - L2BlockInfo, -}; - -/// Raw encoded bedrock L1 block info transaction. -pub const RAW_BEDROCK_INFO_TX: [u8; L1BlockInfoBedrock::L1_INFO_TX_LEN] = hex!( - "015d8eb9000000000000000000000000000000000000000000000000000000000117c4eb0000000000000000000000000000000000000000000000000000000065280377000000000000000000000000000000000000000000000000000000026d05d953392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc00000000000000000000000000000000000000000000000000000000000000040000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f3298500000000000000000000000000000000000000000000000000000000000000bc00000000000000000000000000000000000000000000000000000000000a6fe0" -); - -/// Raw encoded ecotone L1 block info transaction. -pub const RAW_ECOTONE_INFO_TX: [u8; L1BlockInfoEcotone::L1_INFO_TX_LEN] = hex!( - "440a5e2000000558000c5fc5000000000000000500000000661c277300000000012bec20000000000000000000000000000000000000000000000000000000026e9f109900000000000000000000000000000000000000000000000000000000000000011c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add30000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f32985" -); - -/// Raw encoded isthmus L1 block info transaction. -pub const RAW_ISTHMUS_INFO_TX: [u8; L1BlockInfoIsthmus::L1_INFO_TX_LEN] = hex!( - "098999be00000558000c5fc5000000000000000500000000661c277300000000012bec20000000000000000000000000000000000000000000000000000000026e9f109900000000000000000000000000000000000000000000000000000000000000011c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add30000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f329850000abcd000000000000dcba" -); - -/// An error for implementations of the [`BatchValidationProvider`] trait. -#[derive(Debug, thiserror::Error)] -pub enum TestBatchValidatorError { - /// The block was not found. - #[error("Block not found")] - BlockNotFound, - /// The L2 block was not found. - #[error("L2 Block not found")] - L2BlockNotFound, -} - -/// An [`TestBatchValidator`] implementation for testing. -#[derive(Default, Debug, Clone)] -pub struct TestBatchValidator { - /// Blocks - pub blocks: Vec<L2BlockInfo>, - /// Short circuit the block return to be the first block. - pub short_circuit: bool, - /// Blocks - pub op_blocks: Vec<OpBlock>, -} - -impl TestBatchValidator { - /// Creates a new [`TestBatchValidator`] with the given origin and batches. - pub const fn new(blocks: Vec<L2BlockInfo>, op_blocks: Vec<OpBlock>) -> Self { - Self { blocks, short_circuit: false, op_blocks } - } -} - -#[async_trait] -impl BatchValidationProvider for TestBatchValidator { - type Error = TestBatchValidatorError; - - async fn l2_block_info_by_number(&mut self, number: u64) -> Result<L2BlockInfo, Self::Error> { - if self.short_circuit { - return self - .blocks - .first() - .copied() - .ok_or_else(|| TestBatchValidatorError::BlockNotFound); - } - self.blocks - .iter() - .find(|b| b.block_info.number == number) - .cloned() - .ok_or_else(|| TestBatchValidatorError::BlockNotFound) - } - - async fn block_by_number(&mut self, number: u64) -> Result<OpBlock, Self::Error> { - self.op_blocks - .iter() - .find(|p| p.header.number == number) - .cloned() - .ok_or_else(|| TestBatchValidatorError::L2BlockNotFound) - } -} - -/// The storage for the collected traces. -#[derive(Debug, Default, Clone)] -pub struct TraceStorage(pub Arc<Mutex<Vec<(Level, String)>>>); - -impl TraceStorage { - /// Returns the items in the storage that match the specified level. - pub fn get_by_level(&self, level: Level) -> Vec<String> { - self.0 - .lock() - .iter() - .filter_map(|(l, message)| if *l == level { Some(message.clone()) } else { None }) - .collect() - } - - /// Returns if the storage is empty. - pub fn is_empty(&self) -> bool { - self.0.lock().is_empty() - } -} - -/// A subscriber layer that collects traces and their log levels. -#[derive(Debug, Default)] -pub struct CollectingLayer { - /// The storage for the collected traces. - pub storage: TraceStorage, -} - -impl CollectingLayer { - /// Creates a new collecting layer with the specified storage. - pub const fn new(storage: TraceStorage) -> Self { - Self { storage } - } -} - -impl<S: Subscriber> Layer<S> for CollectingLayer { - fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) { - let metadata = event.metadata(); - let level = *metadata.level(); - let message = format!("{event:?}"); - - let mut storage = self.storage.0.lock(); - storage.push((level, message)); - } -} diff --git a/kona/crates/protocol/registry/Cargo.toml b/kona/crates/protocol/registry/Cargo.toml deleted file mode 100644 index f865ad7e569..00000000000 --- a/kona/crates/protocol/registry/Cargo.toml +++ /dev/null @@ -1,60 +0,0 @@ -[package] -name = "kona-registry" -version = "0.4.5" -description = "A registry of superchain configs" - -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -kona-genesis = { workspace = true, features = ["serde"] } - -# Alloy -alloy-primitives = { workspace = true, features = ["map"] } -alloy-genesis.workspace = true -alloy-eips.workspace = true -alloy-hardforks.workspace = true -alloy-chains = { workspace = true, features = ["serde"] } -alloy-op-hardforks = { workspace = true } - -# `serde` -serde = { workspace = true, features = ["derive", "alloc"] } -serde_json = { workspace = true, features = ["raw_value"] } - -# misc -lazy_static = { workspace = true, features = ["spin_no_std"] } - -# `tabled` feature -tabled = { workspace = true, features = ["derive"], optional = true } - -[build-dependencies] -toml = { workspace = true, features = ["parse", "serde"] } -serde = { workspace = true } -serde_json = { workspace = true, features = ["raw_value"] } -kona-genesis = { workspace = true, features = ["serde"] } - -[dev-dependencies] -alloy-eips.workspace = true - -[features] -default = [] -tabled = [ "dep:tabled", "std" ] -std = [ - "alloy-chains/std", - "alloy-eips/std", - "alloy-genesis/std", - "alloy-primitives/std", - "kona-genesis/std", - "serde/std", - "serde_json/std", - "tabled?/std", -] diff --git a/kona/crates/protocol/registry/README.md b/kona/crates/protocol/registry/README.md deleted file mode 100644 index 223dbc19b3f..00000000000 --- a/kona/crates/protocol/registry/README.md +++ /dev/null @@ -1,128 +0,0 @@ -## `kona-registry` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-registry"><img src="https://img.shields.io/crates/v/kona-registry.svg?label=kona-registry&labelColor=2a2f35" alt="kona-registry"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="MIT License"></a> -<a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> - -[`kona-registry`][sc] is a `no_std` crate that exports rust type definitions for chains -in the [`superchain-registry`][osr]. Since it reads static files to read configurations for -various chains into instantiated objects, the [`kona-registry`][sc] crate requires -[`serde`][serde] as a dependency. To use the [`kona-registry`][sc] crate, add the crate -as a dependency to a `Cargo.toml`. - -```toml -kona-registry = "0.1.0" -``` - -[`kona-registry`][sc] declares lazy evaluated statics that expose `ChainConfig`s, `RollupConfig`s, -and `Chain` objects for all chains with static definitions in the superchain registry. The way this works -is the golang side of the superchain registry contains an "internal code generation" script that has -been modified to output configuration files to the [`crates/registry`][s] directory in the -`etc` folder that are read by the [`kona-registry`][sc] rust crate. These static config files -contain an up-to-date list of all superchain configurations with their chain configs. It is expected -that if the commit hash of the [`superchain-registry`][osr] pulled in as a git submodule has breaking -changes, the tests in this crate (`kona-registry`) will break and updates will need to be made. - -There are three core statics exposed by the [`kona-registry`][sc]. -- `CHAINS`: A list of chain objects containing the superchain metadata for this chain. -- `OPCHAINS`: A map from chain id to `ChainConfig`. -- `ROLLUP_CONFIGS`: A map from chain id to `RollupConfig`. - -[`kona-registry`][sc] exports the _complete_ list of chains within the superchain, as well as each -chain's `RollupConfig`s and `ChainConfig`s. - -### Custom chain configurations - -`kona-registry` embeds a frozen snapshot of the upstream superchain registry, but downstream -users can extend that snapshot at build time. This is useful when you need bespoke test chains or -partner networks that are not yet part of the public registry but still want to rely on the crate's -lazy statics. - -1. Produce JSON files that follow the same schema as the generated artifacts in `etc/`: - - `chainList.json` containing additional [`Chain`][chains] entries. - - `configs.json` containing [`Superchain`][superchains] structures with matching `ChainConfig`s and - `RollupConfig`s for the new chain ids. -2. Point the build to those files by setting the following environment variables during `cargo build` - (or `cargo test`): - ```sh - export KONA_CUSTOM_CONFIGS=true - export KONA_CUSTOM_CONFIGS_DIR=/absolute/path/to/custom-configs - cargo build -p kona-registry - ``` -3. The build script merges the custom files into the generated `etc/chainList.json` and - `etc/configs.json` before compiling the crate. Attempting to override existing chain ids will - result in build failures. - -Both JSON files must stay in lockstep: every chain listed in `configs.json` must also appear in -`chainList.json`, and chain identifiers must map to a single chain id. The build script validates -those invariants and will fail fast if it detects duplicates or mismatches. When publishing another -crate that depends on `kona-registry`, you can check the custom artifacts into your workspace and set -`KONA_CUSTOM_CONFIGS_DIR` via a build script or `just` recipe so that consumers automatically embed -the additional definitions. - -### Usage - -Add the following to your `Cargo.toml`. - -```toml -[dependencies] -kona-registry = "0.1.0" -``` - -To make `kona-registry` `no_std`, toggle `default-features` off like so. - -```toml -[dependencies] -kona-registry = { version = "0.1.0", default-features = false } -``` - -Below demonstrates getting the `RollupConfig` for OP Mainnet (Chain ID `10`). - -```rust -use kona_registry::ROLLUP_CONFIGS; - -let op_chain_id = 10; -let op_rollup_config = ROLLUP_CONFIGS.get(&op_chain_id); -println!("OP Mainnet Rollup Config: {:?}", op_rollup_config); -``` - -A mapping from chain id to `ChainConfig` is also available. - -```rust -use kona_registry::OPCHAINS; - -let op_chain_id = 10; -let op_chain_config = OPCHAINS.get(&op_chain_id); -println!("OP Mainnet Chain Config: {:?}", op_chain_config); -``` - - -### Feature Flags - -- `std`: Uses the standard library to pull in environment variables. - - -### Credits - -[superchain-registry][osr] contributors for building and maintaining superchain types. - -[alloy] and [op-alloy] for creating and maintaining high quality Ethereum and Optimism types in rust. - - -<!-- Hyperlinks --> - -[serde]: https://crates.io/crates/serde -[alloy]: https://github.com/alloy-rs/alloy -[op-alloy]: https://github.com/alloy-rs/op-alloy -[op-superchain]: https://docs.optimism.io/stack/explainer -[osr]: https://github.com/ethereum-optimism/superchain-registry - -[s]: ./crates/registry -[sc]: https://crates.io/crates/kona-registry -[g]: https://crates.io/crates/kona-genesis - -[chains]: https://docs.rs/kona-registry/latest/kona_registry/struct.CHAINS.html -[opchains]: https://docs.rs/kona-registry/latest/kona_registry/struct.OPCHAINS.html -[rollups]: https://docs.rs/kona-registry/latest/kona_registry/struct.ROLLUP_CONFIGS.html -[superchains]: https://docs.rs/kona-genesis/latest/kona_genesis/struct.Superchain.html diff --git a/kona/crates/protocol/registry/build.rs b/kona/crates/protocol/registry/build.rs deleted file mode 100644 index c362b82655b..00000000000 --- a/kona/crates/protocol/registry/build.rs +++ /dev/null @@ -1,325 +0,0 @@ -//! Build script that generates a `configs.json` file from the configs. - -use std::{ - collections::{BTreeMap, BTreeSet, btree_map::Entry}, - fs, - path::{Path, PathBuf}, -}; - -use kona_genesis::{Chain, ChainConfig, ChainList, Superchain, SuperchainConfig, Superchains}; -use serde::de::DeserializeOwned; - -fn main() { - // If the `KONA_BIND` environment variable is _not_ set, then return early. - let kona_bind: bool = - std::env::var("KONA_BIND").unwrap_or_else(|_| "false".to_string()) == "true"; - println!("cargo:rerun-if-env-changed=KONA_BIND"); - if !kona_bind { - merge_custom_configs(); - return; - } - - // Get the directory of this file from the environment - let src_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); - - // Check if the `superchain-registry` directory exists - let superchain_registry = format!("{src_dir}/superchain-registry"); - if !std::path::Path::new(&superchain_registry).exists() { - panic!("Git Submodule missing. Please run `just source` to initialize the submodule."); - } - - // Copy the `superchain-registry/chainList.json` file to `etc/chainList.json` - let chain_list = format!("{src_dir}/superchain-registry/chainList.json"); - let etc_dir = std::path::Path::new("etc"); - if !etc_dir.exists() { - std::fs::create_dir_all(etc_dir).unwrap(); - } - std::fs::copy(chain_list, "etc/chainList.json").unwrap(); - - // Get the `superchain-registry/superchain/configs` directory` - let configs_dir = format!("{src_dir}/superchain-registry/superchain/configs"); - let configs = std::fs::read_dir(configs_dir).unwrap(); - - // Get all the directories in the `configs` directory - let mut superchains = Superchains::default(); - for config in configs { - let config = config.unwrap(); - let config_path = config.path(); - let superchain_name = config.file_name().into_string().unwrap(); - let mut superchain = - Superchain { name: superchain_name, chains: Vec::new(), ..Default::default() }; - if config_path.is_dir() { - let config_files = std::fs::read_dir(&config_path).unwrap(); - for config_file in config_files { - let config_file = config_file.unwrap(); - let config_file_path = config_file.path(); - - // Read the `superchain.toml` as the `SuperchainConfig` - let config_file_name = config_file.file_name().into_string().unwrap(); - if config_file_name == "superchain.toml" { - let config = std::fs::read_to_string(config_file_path).unwrap(); - let config: SuperchainConfig = toml::from_str(&config).unwrap(); - superchain.config = config; - continue; - } - - // Read the config file as a `ChainConfig` - let config = std::fs::read_to_string(config_file_path).unwrap(); - let config: ChainConfig = toml::from_str(&config).unwrap(); - superchain.chains.push(config); - } - superchains.superchains.push(superchain); - } - } - - // Sort the superchains by name. - superchains.superchains.sort_by(|a, b| a.name.cmp(&b.name)); - - // For each superchain, sort the list of chains by chain id. - for superchain in superchains.superchains.iter_mut() { - superchain.chains.sort_by(|a, b| a.chain_id.cmp(&b.chain_id)); - } - - let output_path = std::path::Path::new("etc/configs.json"); - std::fs::write(output_path, serde_json::to_string_pretty(&superchains).unwrap()).unwrap(); - merge_custom_configs(); -} - -fn merge_custom_configs() { - let kona_custom_configs = - std::env::var("KONA_CUSTOM_CONFIGS").unwrap_or_else(|_| "false".to_string()) == "true"; - println!("cargo:rerun-if-env-changed=KONA_CUSTOM_CONFIGS"); - println!("cargo:rerun-if-env-changed=KONA_CUSTOM_CONFIGS_TEST"); - - // if we're running tests, bust the cache if the base etc configs are updated. This ensures that - // the test build can be repeated after modifying the base configs - if std::env::var("KONA_CUSTOM_CONFIGS_TEST") == Ok("true".to_string()) { - println!("cargo:rerun-if-changed=etc/chainList.json"); - println!("cargo:rerun-if-changed=etc/configs.json"); - } - - if !kona_custom_configs { - return; - } - - let custom_configs_dir = std::env::var("KONA_CUSTOM_CONFIGS_DIR") - .expect("KONA_CUSTOM_CONFIGS_DIR must be set when KONA_CUSTOM_CONFIGS is enabled"); - println!("cargo:rerun-if-env-changed=KONA_CUSTOM_CONFIGS_DIR"); - let custom_configs_dir = PathBuf::from(custom_configs_dir); - if !custom_configs_dir.exists() { - panic!("Custom configs directory {} does not exist", custom_configs_dir.display()); - } - - let custom_chain_list_path = custom_configs_dir.join("chainList.json"); - let custom_configs_path = custom_configs_dir.join("configs.json"); - - println!("cargo:rerun-if-changed={}", custom_chain_list_path.display()); - println!("cargo:rerun-if-changed={}", custom_configs_path.display()); - - let target_chain_list = Path::new("etc/chainList.json"); - let target_superchains = Path::new("etc/configs.json"); - - validate_chain_configs(&custom_chain_list_path, &custom_configs_path); - - merge_chain_list(&custom_chain_list_path, target_chain_list); - merge_superchain_configs(&custom_configs_path, target_superchains); - validate_chain_configs(target_chain_list, target_superchains); -} - -fn merge_chain_list(custom_path: &Path, target_path: &Path) { - if !custom_path.exists() { - panic!("Custom chain list {} does not exist", custom_path.display()); - } - if !target_path.exists() { - panic!("Target chain list {} does not exist", target_path.display()); - } - - let mut merged_chain_list: ChainList = read_json(target_path); - let custom_chain_list: ChainList = read_json(custom_path); - - let mut chains_by_id: BTreeMap<u64, Chain> = BTreeMap::new(); - let mut identifiers: BTreeMap<String, Chain> = BTreeMap::new(); - - for chain in merged_chain_list.chains.iter() { - let ident_key = chain.identifier.to_ascii_lowercase(); - identifiers.insert(ident_key, chain.clone()); - chains_by_id.insert(chain.chain_id, chain.clone()); - } - // preserve ordering of chains in etc/chainList.json - for chain in custom_chain_list.chains.iter() { - let ident_key = chain.identifier.to_ascii_lowercase(); - if let Some(existing_chain) = identifiers.get(&ident_key) { - if existing_chain == chain { - continue; - } else { - panic!( - "Chain identifier `{}` in {} already exists in the registry with a different config", - chain.identifier, - custom_path.display() - ); - } - } - if let Some(existing_chain) = chains_by_id.get(&chain.chain_id) { - if existing_chain == chain { - continue; - } else { - panic!( - "Chain id {} in {} already exists in the registry with a different config for identifier `{}`", - chain.chain_id, - custom_path.display(), - existing_chain.identifier - ); - } - } - identifiers.insert(ident_key, chain.clone()); - chains_by_id.insert(chain.chain_id, chain.clone()); - merged_chain_list.chains.push(chain.clone()); - } - - write_pretty_json(target_path, &merged_chain_list); -} - -fn merge_superchain_configs(custom_path: &Path, target_path: &Path) { - if !custom_path.exists() { - panic!("Custom configs {} does not exist", custom_path.display()); - } - if !target_path.exists() { - panic!("Target configs {} does not exist", target_path.display()); - } - - let mut superchains: BTreeMap<String, Superchain> = read_json::<Superchains>(target_path) - .superchains - .into_iter() - .map(|sc| (sc.name.clone(), sc)) - .collect(); - - let custom_superchains: Superchains = read_json(custom_path); - - for custom in custom_superchains.superchains { - match superchains.entry(custom.name.clone()) { - Entry::Occupied(mut entry) => { - println!( - "cargo:warning=debug: merging custom chains {}: [{}]", - custom.name, - custom.chains.iter().map(|c| c.name.as_str()).collect::<Vec<_>>().join(",") - ); - let existing = entry.get_mut(); - *existing = merge_superchain_entry(std::mem::take(existing), custom); - } - Entry::Vacant(entry) => { - println!( - "cargo:warning=debug: inserting new custom chain {}: [{}]", - custom.name, - custom.chains.iter().map(|c| c.name.as_str()).collect::<Vec<_>>().join(",") - ); - entry.insert(custom); - } - } - } - - let mut merged: Vec<Superchain> = superchains.into_values().collect(); - merged.sort_by(|a, b| a.name.cmp(&b.name)); - for superchain in merged.iter_mut() { - superchain.chains.sort_by(|a, b| a.chain_id.cmp(&b.chain_id)); - } - - let merged = Superchains { superchains: merged }; - write_pretty_json(target_path, &merged); -} - -/// Merges the custom chains to the chains in the superchain-registry, panicking on conflicts -fn merge_superchain_entry(base: Superchain, custom: Superchain) -> Superchain { - let mut merged = base; - - // maintain the ordering of chains in base - let mut chain_map: BTreeMap<u64, ChainConfig> = - merged.chains.clone().into_iter().map(|chain| (chain.chain_id, chain)).collect(); - for chain in custom.chains { - if let Some(existing_config) = chain_map.get(&chain.chain_id) { - if existing_config == &chain { - continue; - } else { - panic!( - "conflict merging superchain `{}`: chain id {} has differing configs", - merged.name, chain.chain_id - ); - } - } - chain_map.insert(chain.chain_id, chain.clone()); - merged.chains.push(chain.clone()); - } - merged -} - -fn validate_chain_configs(chain_list_path: &Path, superchains_path: &Path) { - if !chain_list_path.exists() || !superchains_path.exists() { - return; - } - - let chain_list: ChainList = read_json(chain_list_path); - let superchains: Superchains = read_json(superchains_path); - - let mut list_chain_ids = BTreeSet::new(); - for chain in &chain_list.chains { - if !list_chain_ids.insert(chain.chain_id) { - panic!( - "Duplicate chain id {} (identifier `{}`) detected in {}", - chain.chain_id, - chain.identifier, - chain_list_path.display() - ); - } - } - - let mut config_chain_ids = BTreeSet::new(); - for superchain in &superchains.superchains { - for chain in &superchain.chains { - if !config_chain_ids.insert(chain.chain_id) { - panic!( - "Duplicate chain id {} detected across superchain configs in {}", - chain.chain_id, - superchains_path.display() - ); - } - } - } - - for chain_id in &config_chain_ids { - if !list_chain_ids.contains(chain_id) { - panic!( - "Chain id {} present in {} but missing from {}", - chain_id, - superchains_path.display(), - chain_list_path.display() - ); - } - } - - for chain in chain_list.chains { - if !config_chain_ids.contains(&chain.chain_id) { - panic!( - "Chain `{}` (chain id {}) present in {} but missing from {}", - chain.identifier, - chain.chain_id, - chain_list_path.display(), - superchains_path.display() - ); - } - } -} - -fn read_json<T: DeserializeOwned>(path: &Path) -> T { - let contents = fs::read_to_string(path) - .unwrap_or_else(|e| panic!("Failed to read {}: {e}", path.display())); - serde_json::from_str(&contents) - .unwrap_or_else(|e| panic!("Failed to parse {}: {e}", path.display())) -} - -fn write_pretty_json<T: serde::Serialize>(path: &Path, value: &T) { - fs::write( - path, - serde_json::to_string_pretty(value) - .unwrap_or_else(|e| panic!("Failed to serialize {}: {e}", path.display())), - ) - .unwrap_or_else(|e| panic!("Failed to write {}: {e}", path.display())); -} diff --git a/kona/crates/protocol/registry/src/lib.rs b/kona/crates/protocol/registry/src/lib.rs deleted file mode 100644 index d3ab32b16f8..00000000000 --- a/kona/crates/protocol/registry/src/lib.rs +++ /dev/null @@ -1,193 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] - -extern crate alloc; - -pub use alloy_primitives::map::HashMap; -use kona_genesis::L1ChainConfig; -pub use kona_genesis::{Chain, ChainConfig, ChainList, RollupConfig}; - -pub mod superchain; -pub use superchain::Registry; - -/// L1 chain configurations. -pub mod l1; -pub use l1::L1Config; - -#[cfg(test)] -pub mod test_utils; - -lazy_static::lazy_static! { - /// Private initializer that loads the superchain configurations. - static ref _INIT: Registry = Registry::from_chain_list(); - - /// Chain configurations exported from the registry - pub static ref CHAINS: ChainList = _INIT.chain_list.clone(); - - /// OP Chain configurations exported from the registry - pub static ref OPCHAINS: HashMap<u64, ChainConfig> = _INIT.op_chains.clone(); - - /// Rollup configurations exported from the registry - pub static ref ROLLUP_CONFIGS: HashMap<u64, RollupConfig> = _INIT.rollup_configs.clone(); - - /// L1 chain configurations exported from the registry - /// Note: the l1 chain configurations are not exported from the superchain registry but rather from a genesis dump file. - pub static ref L1_CONFIGS: HashMap<u64, L1ChainConfig> = _INIT.l1_configs.clone(); -} - -/// Returns a [RollupConfig] by its identifier. -pub fn scr_rollup_config_by_ident(ident: &str) -> Option<&RollupConfig> { - let chain_id = CHAINS.get_chain_by_ident(ident)?.chain_id; - ROLLUP_CONFIGS.get(&chain_id) -} - -/// Returns a [RollupConfig] by its identifier. -pub fn scr_rollup_config_by_alloy_ident(chain: &alloy_chains::Chain) -> Option<&RollupConfig> { - ROLLUP_CONFIGS.get(&chain.id()) -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_chains::Chain as AlloyChain; - use alloy_hardforks::{ - holesky::{HOLESKY_BPO1_TIMESTAMP, HOLESKY_BPO2_TIMESTAMP}, - sepolia::{SEPOLIA_BPO1_TIMESTAMP, SEPOLIA_BPO2_TIMESTAMP}, - }; - use alloy_op_hardforks::{ - BASE_MAINNET_JOVIAN_TIMESTAMP, BASE_SEPOLIA_JOVIAN_TIMESTAMP, OP_MAINNET_JOVIAN_TIMESTAMP, - OP_SEPOLIA_JOVIAN_TIMESTAMP, - }; - - #[test] - fn test_hardcoded_rollup_configs() { - let test_cases = [ - (10, test_utils::OP_MAINNET_CONFIG), - (8453, test_utils::BASE_MAINNET_CONFIG), - (11155420, test_utils::OP_SEPOLIA_CONFIG), - (84532, test_utils::BASE_SEPOLIA_CONFIG), - ] - .to_vec(); - - for (chain_id, expected) in test_cases { - let derived = super::ROLLUP_CONFIGS.get(&chain_id).unwrap(); - assert_eq!(expected, *derived); - } - } - - #[test] - fn test_chain_by_ident() { - const ALLOY_BASE: AlloyChain = AlloyChain::base_mainnet(); - - let chain_by_ident = CHAINS.get_chain_by_ident("mainnet/base").unwrap(); - let chain_by_alloy_ident = CHAINS.get_chain_by_alloy_ident(&ALLOY_BASE).unwrap(); - let chain_by_id = CHAINS.get_chain_by_id(8453).unwrap(); - - assert_eq!(chain_by_ident, chain_by_id); - assert_eq!(chain_by_alloy_ident, chain_by_id); - } - - #[test] - fn test_rollup_config_by_ident() { - const ALLOY_BASE: AlloyChain = AlloyChain::base_mainnet(); - - let rollup_config_by_ident = scr_rollup_config_by_ident("mainnet/base").unwrap(); - let rollup_config_by_alloy_ident = scr_rollup_config_by_alloy_ident(&ALLOY_BASE).unwrap(); - let rollup_config_by_id = ROLLUP_CONFIGS.get(&8453).unwrap(); - - assert_eq!(rollup_config_by_ident, rollup_config_by_id); - assert_eq!(rollup_config_by_alloy_ident, rollup_config_by_id); - } - - #[test] - fn test_jovian_timestamps() { - let base_mainnet_config_by_ident = scr_rollup_config_by_ident("mainnet/base").unwrap(); - assert_eq!( - base_mainnet_config_by_ident.hardforks.jovian_time, - Some(BASE_MAINNET_JOVIAN_TIMESTAMP) - ); - - let base_sepolia_config_by_ident = scr_rollup_config_by_ident("sepolia/base").unwrap(); - assert_eq!( - base_sepolia_config_by_ident.hardforks.jovian_time, - Some(BASE_SEPOLIA_JOVIAN_TIMESTAMP) - ); - - let op_mainnet_config_by_ident = scr_rollup_config_by_ident("mainnet/op").unwrap(); - assert_eq!( - op_mainnet_config_by_ident.hardforks.jovian_time, - Some(OP_MAINNET_JOVIAN_TIMESTAMP) - ); - - let op_sepolia_config_by_ident = scr_rollup_config_by_ident("sepolia/op").unwrap(); - assert_eq!( - op_sepolia_config_by_ident.hardforks.jovian_time, - Some(OP_SEPOLIA_JOVIAN_TIMESTAMP) - ); - } - - #[test] - fn test_bpo_timestamps() { - let sepolia_config = L1_CONFIGS.get(&11155111).unwrap(); - assert_eq!(sepolia_config.bpo1_time, Some(SEPOLIA_BPO1_TIMESTAMP)); - assert_eq!(sepolia_config.bpo2_time, Some(SEPOLIA_BPO2_TIMESTAMP)); - - let holesky_config = L1_CONFIGS.get(&17000).unwrap(); - assert_eq!(holesky_config.bpo1_time, Some(HOLESKY_BPO1_TIMESTAMP)); - assert_eq!(holesky_config.bpo2_time, Some(HOLESKY_BPO2_TIMESTAMP)); - } - - const CUSTOM_CONFIGS_TEST_ENABLED: Option<&str> = option_env!("KONA_CUSTOM_CONFIGS_TEST"); - const CUSTOM_CONFIGS: Option<&str> = option_env!("KONA_CUSTOM_CONFIGS"); - const CUSTOM_CONFIGS_DIR: Option<&str> = option_env!("KONA_CUSTOM_CONFIGS_DIR"); - - #[test] - fn custom_chain_is_loaded_when_enabled() { - if CUSTOM_CONFIGS_TEST_ENABLED != Some("true") { - return; - }; - if CUSTOM_CONFIGS != Some("true") { - panic!("KONA_CUSTOM_CONFIGS is required when KONA_CUSTOM_CONFIGS_TEST is set"); - } - if CUSTOM_CONFIGS_DIR.is_none() { - panic!("KONA_CUSTOM_CONFIGS_DIR is required when KONA_CUSTOM_CONFIGS_TEST is set"); - } - - let test1_chain_id = 123999119; - let test2_chain_id = 223999119; - let test1_ident = "test1/testnet"; - let test2_ident = "test2/testnet"; - - let chain1 = CHAINS - .get_chain_by_ident(test1_ident) - .unwrap_or_else(|| panic!("custom chain `{test1_ident}` missing")); - assert_eq!(chain1.chain_id, test1_chain_id); - let chain2 = CHAINS - .get_chain_by_ident(test2_ident) - .unwrap_or_else(|| panic!("custom chain `{test2_ident}` missing")); - assert_eq!(chain2.chain_id, test2_chain_id); - - assert!( - OPCHAINS.contains_key(&test1_chain_id), - "chain config missing for {test1_chain_id}" - ); - assert!( - ROLLUP_CONFIGS.contains_key(&test1_chain_id), - "rollup config missing for {test1_chain_id}" - ); - assert!( - OPCHAINS.contains_key(&test2_chain_id), - "chain config missing for {test2_chain_id}" - ); - assert!( - ROLLUP_CONFIGS.contains_key(&test2_chain_id), - "rollup config missing for {test2_chain_id}" - ); - } -} diff --git a/kona/crates/protocol/registry/src/test_utils/base_sepolia.rs b/kona/crates/protocol/registry/src/test_utils/base_sepolia.rs deleted file mode 100644 index b52d182efb9..00000000000 --- a/kona/crates/protocol/registry/src/test_utils/base_sepolia.rs +++ /dev/null @@ -1,73 +0,0 @@ -//! Base Sepolia Rollup Config. - -use alloy_chains::Chain; -use alloy_eips::BlockNumHash; -use alloy_op_hardforks::{ - BASE_SEPOLIA_CANYON_TIMESTAMP, BASE_SEPOLIA_ECOTONE_TIMESTAMP, BASE_SEPOLIA_FJORD_TIMESTAMP, - BASE_SEPOLIA_GRANITE_TIMESTAMP, BASE_SEPOLIA_HOLOCENE_TIMESTAMP, - BASE_SEPOLIA_ISTHMUS_TIMESTAMP, BASE_SEPOLIA_JOVIAN_TIMESTAMP, -}; -use alloy_primitives::{address, b256, uint}; -use kona_genesis::{ - BASE_SEPOLIA_BASE_FEE_CONFIG, ChainGenesis, DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, - HardForkConfig, RollupConfig, SystemConfig, -}; - -/// The [RollupConfig] for Base Sepolia. -pub const BASE_SEPOLIA_CONFIG: RollupConfig = RollupConfig { - genesis: ChainGenesis { - l1: BlockNumHash { - hash: b256!("cac9a83291d4dec146d6f7f69ab2304f23f5be87b1789119a0c5b1e4482444ed"), - number: 4370868, - }, - l2: BlockNumHash { - hash: b256!("0dcc9e089e30b90ddfc55be9a37dd15bc551aeee999d2e2b51414c54eaf934e4"), - number: 0, - }, - l2_time: 1695768288, - system_config: Some(SystemConfig { - batcher_address: address!("6cdebe940bc0f26850285caca097c11c33103e47"), - overhead: uint!(0x834_U256), - scalar: uint!(0xf4240_U256), - gas_limit: 25000000, - base_fee_scalar: None, - blob_base_fee_scalar: None, - eip1559_denominator: None, - eip1559_elasticity: None, - operator_fee_scalar: None, - operator_fee_constant: None, - min_base_fee: None, - da_footprint_gas_scalar: None, - }), - }, - block_time: 2, - max_sequencer_drift: 600, - seq_window_size: 3600, - channel_timeout: 300, - granite_channel_timeout: 50, - l1_chain_id: 11155111, - l2_chain_id: Chain::base_sepolia(), - chain_op_config: BASE_SEPOLIA_BASE_FEE_CONFIG, - alt_da_config: None, - hardforks: HardForkConfig { - regolith_time: None, - canyon_time: Some(BASE_SEPOLIA_CANYON_TIMESTAMP), - delta_time: Some(1703203200), - ecotone_time: Some(BASE_SEPOLIA_ECOTONE_TIMESTAMP), - fjord_time: Some(BASE_SEPOLIA_FJORD_TIMESTAMP), - granite_time: Some(BASE_SEPOLIA_GRANITE_TIMESTAMP), - holocene_time: Some(BASE_SEPOLIA_HOLOCENE_TIMESTAMP), - pectra_blob_schedule_time: Some(1742486400), - isthmus_time: Some(BASE_SEPOLIA_ISTHMUS_TIMESTAMP), - jovian_time: Some(BASE_SEPOLIA_JOVIAN_TIMESTAMP), - interop_time: None, - }, - batch_inbox_address: address!("ff00000000000000000000000000000000084532"), - deposit_contract_address: address!("49f53e41452c74589e85ca1677426ba426459e85"), - l1_system_config_address: address!("f272670eb55e895584501d564afeb048bed26194"), - protocol_versions_address: address!("79add5713b383daa0a138d3c4780c7a1804a8090"), - superchain_config_address: Some(address!("C2Be75506d5724086DEB7245bd260Cc9753911Be")), - da_challenge_address: None, - blobs_enabled_l1_timestamp: None, - interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, -}; diff --git a/kona/crates/protocol/registry/src/test_utils/op_sepolia.rs b/kona/crates/protocol/registry/src/test_utils/op_sepolia.rs deleted file mode 100644 index e912d72ca9d..00000000000 --- a/kona/crates/protocol/registry/src/test_utils/op_sepolia.rs +++ /dev/null @@ -1,73 +0,0 @@ -//! OP Sepolia Rollup Config. - -use alloy_chains::Chain; -use alloy_eips::BlockNumHash; -use alloy_op_hardforks::{ - OP_SEPOLIA_CANYON_TIMESTAMP, OP_SEPOLIA_ECOTONE_TIMESTAMP, OP_SEPOLIA_FJORD_TIMESTAMP, - OP_SEPOLIA_GRANITE_TIMESTAMP, OP_SEPOLIA_HOLOCENE_TIMESTAMP, OP_SEPOLIA_ISTHMUS_TIMESTAMP, - OP_SEPOLIA_JOVIAN_TIMESTAMP, -}; -use alloy_primitives::{address, b256, uint}; -use kona_genesis::{ - ChainGenesis, DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, HardForkConfig, - OP_SEPOLIA_BASE_FEE_CONFIG, RollupConfig, SystemConfig, -}; - -/// The [RollupConfig] for OP Sepolia. -pub const OP_SEPOLIA_CONFIG: RollupConfig = RollupConfig { - genesis: ChainGenesis { - l1: BlockNumHash { - hash: b256!("48f520cf4ddaf34c8336e6e490632ea3cf1e5e93b0b2bc6e917557e31845371b"), - number: 4071408, - }, - l2: BlockNumHash { - hash: b256!("102de6ffb001480cc9b8b548fd05c34cd4f46ae4aa91759393db90ea0409887d"), - number: 0, - }, - l2_time: 1691802540, - system_config: Some(SystemConfig { - batcher_address: address!("8f23bb38f531600e5d8fddaaec41f13fab46e98c"), - overhead: uint!(0xbc_U256), - scalar: uint!(0xa6fe0_U256), - gas_limit: 30_000_000, - base_fee_scalar: None, - blob_base_fee_scalar: None, - eip1559_denominator: None, - eip1559_elasticity: None, - operator_fee_scalar: None, - operator_fee_constant: None, - min_base_fee: None, - da_footprint_gas_scalar: None, - }), - }, - block_time: 2, - max_sequencer_drift: 600, - seq_window_size: 3600, - channel_timeout: 300, - granite_channel_timeout: 50, - l1_chain_id: 11155111, - l2_chain_id: Chain::optimism_sepolia(), - chain_op_config: OP_SEPOLIA_BASE_FEE_CONFIG, - alt_da_config: None, - hardforks: HardForkConfig { - regolith_time: None, - canyon_time: Some(OP_SEPOLIA_CANYON_TIMESTAMP), - delta_time: Some(1703203200), - ecotone_time: Some(OP_SEPOLIA_ECOTONE_TIMESTAMP), - fjord_time: Some(OP_SEPOLIA_FJORD_TIMESTAMP), - granite_time: Some(OP_SEPOLIA_GRANITE_TIMESTAMP), - holocene_time: Some(OP_SEPOLIA_HOLOCENE_TIMESTAMP), - pectra_blob_schedule_time: Some(1742486400), - isthmus_time: Some(OP_SEPOLIA_ISTHMUS_TIMESTAMP), - jovian_time: Some(OP_SEPOLIA_JOVIAN_TIMESTAMP), - interop_time: None, - }, - batch_inbox_address: address!("ff00000000000000000000000000000011155420"), - deposit_contract_address: address!("16fc5058f25648194471939df75cf27a2fdc48bc"), - l1_system_config_address: address!("034edd2a225f7f429a63e0f1d2084b9e0a93b538"), - protocol_versions_address: address!("79add5713b383daa0a138d3c4780c7a1804a8090"), - superchain_config_address: Some(address!("C2Be75506d5724086DEB7245bd260Cc9753911Be")), - da_challenge_address: None, - blobs_enabled_l1_timestamp: None, - interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, -}; diff --git a/kona/crates/protocol/registry/superchain-registry b/kona/crates/protocol/registry/superchain-registry deleted file mode 160000 index 300d18077f2..00000000000 --- a/kona/crates/protocol/registry/superchain-registry +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 300d18077f2799e21e5f2becb64f9cf04c9a0760 diff --git a/kona/crates/providers/providers-alloy/Cargo.toml b/kona/crates/providers/providers-alloy/Cargo.toml deleted file mode 100644 index 734dd25508f..00000000000 --- a/kona/crates/providers/providers-alloy/Cargo.toml +++ /dev/null @@ -1,62 +0,0 @@ -[package] -name = "kona-providers-alloy" -version = "0.3.3" -description = "Alloy Backed Providers" - -edition.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -keywords.workspace = true -categories.workspace = true -repository.workspace = true -rust-version.workspace = true - -[lints] -workspace = true - -[dependencies] -# Kona -kona-macros.workspace = true -kona-genesis.workspace = true -kona-protocol.workspace = true -kona-derive.workspace = true - -# Alloy -alloy-serde.workspace = true -alloy-eips = { workspace = true, features = ["kzg"] } -alloy-transport.workspace = true -alloy-transport-http = { workspace = true, features = ["reqwest", "reqwest-rustls-tls", "hyper", "hyper-tls", "jwt-auth"] } -alloy-consensus.workspace = true -alloy-rpc-types-beacon.workspace = true -alloy-rpc-types-engine.workspace = true -alloy-rpc-client.workspace = true -alloy-provider = { workspace = true, features = ["ipc", "ws", "reqwest"] } -alloy-primitives = { workspace = true, features = ["map"] } - -# Op Alloy -op-alloy-consensus.workspace = true -op-alloy-network.workspace = true - -# Misc -lru.workspace = true -serde.workspace = true -thiserror.workspace = true -async-trait.workspace = true -reqwest = { workspace = true, features = ["json"] } -tower.workspace = true -http-body-util.workspace = true - -c-kzg.workspace = true - -# `metrics` feature -metrics = { workspace = true, optional = true } - -[features] -default = [] -metrics = [ "dep:metrics", "kona-derive/metrics" ] - -[dev-dependencies] -tokio.workspace = true -httpmock.workspace = true -serde_json.workspace = true diff --git a/kona/crates/providers/providers-alloy/README.md b/kona/crates/providers/providers-alloy/README.md deleted file mode 100644 index 68e76d096d8..00000000000 --- a/kona/crates/providers/providers-alloy/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# `kona-providers-alloy` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-providers-alloy"><img src="https://img.shields.io/crates/v/kona-providers-alloy.svg?label=kona-providers-alloy&labelColor=2a2f35" alt="kona-provides-alloy"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -Alloy-backed providers for `kona`. diff --git a/kona/crates/providers/providers-alloy/src/blobs.rs b/kona/crates/providers/providers-alloy/src/blobs.rs deleted file mode 100644 index 022a22ac0d1..00000000000 --- a/kona/crates/providers/providers-alloy/src/blobs.rs +++ /dev/null @@ -1,197 +0,0 @@ -//! Contains an online implementation of the `BlobProvider` trait. - -use crate::BeaconClient; -#[cfg(feature = "metrics")] -use crate::Metrics; -use alloy_eips::eip4844::{ - Blob, BlobTransactionSidecarItem, IndexedBlobHash, env_settings::EnvKzgSettings, -}; -use alloy_primitives::FixedBytes; -use async_trait::async_trait; -use kona_derive::{BlobProvider, BlobProviderError}; -use kona_protocol::BlockInfo; -use std::{boxed::Box, string::ToString, vec::Vec}; - -/// A boxed blob with index. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct BoxedBlobWithIndex { - /// The index of the blob. - pub index: u64, - /// The blob data. - pub blob: Box<Blob>, -} - -/// An online implementation of the [BlobProvider] trait. -#[derive(Debug, Clone)] -pub struct OnlineBlobProvider<B: BeaconClient> { - /// The Beacon API client. - pub beacon_client: B, - /// Beacon Genesis time used for the time to slot conversion. - pub genesis_time: u64, - /// Slot interval used for the time to slot conversion. - pub slot_interval: u64, -} - -impl<B: BeaconClient> OnlineBlobProvider<B> { - /// Creates a new instance of the [OnlineBlobProvider]. - /// - /// The `genesis_time` and `slot_interval` arguments are _optional_ and the - /// [OnlineBlobProvider] will attempt to load them dynamically at runtime if they are not - /// provided. - /// - /// ## Panics - /// Panics if the genesis time or slot interval cannot be loaded from the beacon client. - pub async fn init(beacon_client: B) -> Self { - let genesis_time = beacon_client - .genesis_time() - .await - .map(|r| r.data.genesis_time) - .map_err(|e| BlobProviderError::Backend(e.to_string())) - .expect("Failed to load genesis time from beacon client"); - let slot_interval = beacon_client - .slot_interval() - .await - .map(|r| r.data.seconds_per_slot) - .map_err(|e| BlobProviderError::Backend(e.to_string())) - .expect("Failed to load slot interval from beacon client"); - Self { beacon_client, genesis_time, slot_interval } - } - - /// Computes the slot for the given timestamp. - pub const fn slot( - genesis: u64, - slot_time: u64, - timestamp: u64, - ) -> Result<u64, BlobProviderError> { - if timestamp < genesis { - return Err(BlobProviderError::SlotDerivation); - } - Ok((timestamp - genesis) / slot_time) - } - - /// Fetches blobs for the given slot. - async fn fetch_filtered_blobs( - &self, - slot: u64, - blob_hashes: &[IndexedBlobHash], - ) -> Result<Vec<BoxedBlobWithIndex>, BlobProviderError> { - kona_macros::inc!(gauge, Metrics::BLOB_FETCHES); - - let result = self - .beacon_client - .filtered_beacon_blobs(slot, blob_hashes) - .await - .map_err(|e| BlobProviderError::Backend(e.to_string())); - - #[cfg(feature = "metrics")] - if result.is_err() { - kona_macros::inc!(gauge, Metrics::BLOB_FETCH_ERRORS); - } - - result - } - - /// Converts a vector of boxed blobs with index to a vector of blob transaction sidecar items. - /// - /// Note: for performance reasons, we need to transmute the blobs to the c_kzg::Blob type to - /// avoid the overhead of moving the blobs around or reallocating the memory. - fn sidecar_from_blobs( - blobs: Vec<BoxedBlobWithIndex>, - ) -> Result<Vec<BlobTransactionSidecarItem>, c_kzg::Error> { - blobs - .into_iter() - .map(|blob| { - let kzg_settings = EnvKzgSettings::Default; - - // SAFETY: all types have the same size and alignment - let kzg_blob = - unsafe { Box::from_raw(Box::<Blob>::into_raw(blob.blob) as *mut c_kzg::Blob) }; - - let commitment = kzg_settings - .get() - .blob_to_kzg_commitment(&kzg_blob) - .map(|blob| blob.to_bytes())?; - let proof = kzg_settings - .get() - .compute_blob_kzg_proof(&kzg_blob, &commitment) - .map(|proof| proof.to_bytes())?; - - // SAFETY: all types have the same size and alignment - let alloy_blob = - unsafe { Box::from_raw(Box::<c_kzg::Blob>::into_raw(kzg_blob) as *mut Blob) }; - - Ok(BlobTransactionSidecarItem { - index: blob.index, - blob: alloy_blob, - kzg_commitment: FixedBytes::from(*commitment), - kzg_proof: FixedBytes::from(*proof), - }) - }) - .collect() - } - - /// Fetches blob sidecars for the given block reference and blob hashes. - /// Does not validate the blobs. Recomputes the kzg proofs associated with the blobs. - /// - /// Use [`Self::beacon_client`] to fetch the blobs without recomputing the kzg - /// proofs/commitments. - pub async fn fetch_filtered_blob_sidecars( - &self, - block_ref: &BlockInfo, - blob_hashes: &[IndexedBlobHash], - ) -> Result<Vec<BlobTransactionSidecarItem>, BlobProviderError> { - if blob_hashes.is_empty() { - return Ok(Default::default()); - } - - // Calculate the slot for the given timestamp. - let slot = Self::slot(self.genesis_time, self.slot_interval, block_ref.timestamp)?; - - // Fetch blobs for the slot using. - let blobs = self.fetch_filtered_blobs(slot, blob_hashes).await?; - - Self::sidecar_from_blobs(blobs) - .map_err(|e| BlobProviderError::Backend(format!("KZG commitment error: {e}"))) - } -} - -#[async_trait] -impl<B> BlobProvider for OnlineBlobProvider<B> -where - B: BeaconClient + Send + Sync, -{ - type Error = BlobProviderError; - - /// Fetches blobs that were confirmed in the specified L1 block with the given indexed - /// hashes. The blobs are validated for their index and hashes using the specified - /// [IndexedBlobHash]. - async fn get_and_validate_blobs( - &mut self, - block_ref: &BlockInfo, - blob_hashes: &[IndexedBlobHash], - ) -> Result<Vec<Box<Blob>>, Self::Error> { - // Fetch the blob sidecars for the given block reference and blob hashes. - let blobs = self.fetch_filtered_blob_sidecars(block_ref, blob_hashes).await?; - - // Validate the blob sidecars straight away with the num hashes. - let blobs = blobs - .into_iter() - .enumerate() - .map(|(i, sidecar)| { - let hash = blob_hashes - .get(i) - .ok_or(BlobProviderError::Backend("Missing blob hash".to_string()))? - .hash - .as_slice(); - - if sidecar.to_kzg_versioned_hash() != hash { - return Err(BlobProviderError::Backend("KZG commitment mismatch".to_string())); - } - - Ok(sidecar.blob) - }) - .collect::<Result<Vec<Box<Blob>>, BlobProviderError>>() - .map_err(|e| BlobProviderError::Backend(e.to_string()))?; - Ok(blobs) - } -} diff --git a/kona/crates/providers/providers-alloy/src/chain_provider.rs b/kona/crates/providers/providers-alloy/src/chain_provider.rs deleted file mode 100644 index 22f68b7b29a..00000000000 --- a/kona/crates/providers/providers-alloy/src/chain_provider.rs +++ /dev/null @@ -1,271 +0,0 @@ -//! Providers that use alloy provider types on the backend. - -#[cfg(feature = "metrics")] -use crate::Metrics; -use alloy_consensus::{Header, Receipt, TxEnvelope}; -use alloy_eips::BlockId; -use alloy_primitives::B256; -use alloy_provider::{Provider, RootProvider}; -use alloy_transport::{RpcError, TransportErrorKind}; -use async_trait::async_trait; -use kona_derive::{ChainProvider, PipelineError, PipelineErrorKind}; -use kona_protocol::BlockInfo; -use lru::LruCache; -use std::{boxed::Box, num::NonZeroUsize, vec::Vec}; - -/// The [AlloyChainProvider] is a concrete implementation of the [ChainProvider] trait, providing -/// data over Ethereum JSON-RPC using an alloy provider as the backend. -#[derive(Debug, Clone)] -pub struct AlloyChainProvider { - /// The inner Ethereum JSON-RPC provider. - pub inner: RootProvider, - /// Whether to trust the RPC without verification. - pub trust_rpc: bool, - /// `header_by_hash` LRU cache. - header_by_hash_cache: LruCache<B256, Header>, - /// `receipts_by_hash_cache` LRU cache. - receipts_by_hash_cache: LruCache<B256, Vec<Receipt>>, - /// `block_info_and_transactions_by_hash` LRU cache. - block_info_and_transactions_by_hash_cache: LruCache<B256, (BlockInfo, Vec<TxEnvelope>)>, -} - -impl AlloyChainProvider { - /// Creates a new [AlloyChainProvider] with the given alloy provider. - /// - /// ## Panics - /// - Panics if `cache_size` is zero. - pub fn new(inner: RootProvider, cache_size: usize) -> Self { - Self::new_with_trust(inner, cache_size, true) - } - - /// Creates a new [AlloyChainProvider] with the given alloy provider and trust setting. - /// - /// ## Panics - /// - Panics if `cache_size` is zero. - pub fn new_with_trust(inner: RootProvider, cache_size: usize, trust_rpc: bool) -> Self { - Self { - inner, - trust_rpc, - header_by_hash_cache: LruCache::new(NonZeroUsize::new(cache_size).unwrap()), - receipts_by_hash_cache: LruCache::new(NonZeroUsize::new(cache_size).unwrap()), - block_info_and_transactions_by_hash_cache: LruCache::new( - NonZeroUsize::new(cache_size).unwrap(), - ), - } - } - - /// Creates a new [AlloyChainProvider] from the provided [reqwest::Url]. - pub fn new_http(url: reqwest::Url, cache_size: usize) -> Self { - let inner = RootProvider::new_http(url); - Self::new(inner, cache_size) - } - - /// Returns the latest L2 block number. - pub async fn latest_block_number(&mut self) -> Result<u64, RpcError<TransportErrorKind>> { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "block_number"); - - let result = self.inner.get_block_number().await; - - #[cfg(feature = "metrics")] - if result.is_err() { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "block_number"); - } - - result - } - - /// Returns the chain ID. - pub async fn chain_id(&mut self) -> Result<u64, RpcError<TransportErrorKind>> { - self.inner.get_chain_id().await - } - - /// Verifies that a header's hash matches the expected hash when trust_rpc is false. - fn verify_header_hash( - &self, - header: &Header, - expected_hash: B256, - ) -> Result<(), AlloyChainProviderError> { - if self.trust_rpc { - return Ok(()); - } - - let actual_hash = header.hash_slow(); - if actual_hash != expected_hash { - return Err(AlloyChainProviderError::Transport(RpcError::Transport( - TransportErrorKind::Custom( - format!( - "Header hash mismatch: expected {expected_hash:?}, got {actual_hash:?}" - ) - .into(), - ), - ))); - } - - Ok(()) - } -} - -/// An error for the [AlloyChainProvider]. -#[allow(clippy::enum_variant_names)] -#[derive(Debug, thiserror::Error)] -pub enum AlloyChainProviderError { - /// Transport error - #[error(transparent)] - Transport(#[from] RpcError<TransportErrorKind>), - /// Block not found. - #[error("Block not found: {0}")] - BlockNotFound(BlockId), - /// Failed to convert RPC receipts into consensus receipts. - #[error("Failed to convert RPC receipts into consensus receipts: {0}")] - ReceiptsConversion(B256), -} - -impl From<AlloyChainProviderError> for PipelineErrorKind { - fn from(e: AlloyChainProviderError) -> Self { - match e { - AlloyChainProviderError::Transport(e) => { - Self::Temporary(PipelineError::Provider(format!("Transport error: {e}"))) - } - AlloyChainProviderError::BlockNotFound(id) => { - Self::Temporary(PipelineError::Provider(format!("L1 Block not found: {id}"))) - } - AlloyChainProviderError::ReceiptsConversion(_) => { - Self::Temporary(PipelineError::Provider( - "Failed to convert RPC receipts into consensus receipts".to_string(), - )) - } - } - } -} - -#[async_trait] -impl ChainProvider for AlloyChainProvider { - type Error = AlloyChainProviderError; - - async fn header_by_hash(&mut self, hash: B256) -> Result<Header, Self::Error> { - if let Some(header) = self.header_by_hash_cache.get(&hash) { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_HITS, "cache" => "header_by_hash"); - return Ok(header.clone()); - } - - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_MISSES, "cache" => "header_by_hash"); - - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "header_by_hash"); - - let block = self - .inner - .get_block_by_hash(hash) - .await - .inspect_err(|_e| { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "header_by_hash"); - })? - .ok_or(AlloyChainProviderError::BlockNotFound(hash.into()))?; - let header = block.header.into_consensus(); - - // Verify the header hash matches what we requested - self.verify_header_hash(&header, hash)?; - - self.header_by_hash_cache.put(hash, header.clone()); - - kona_macros::inc!(gauge, Metrics::CACHE_ENTRIES, "cache" => "header_by_hash"); - - Ok(header) - } - - async fn block_info_by_number(&mut self, number: u64) -> Result<BlockInfo, Self::Error> { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "block_by_number"); - - let block = self - .inner - .get_block_by_number(number.into()) - .await - .inspect_err(|_e| { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "block_by_number"); - })? - .ok_or(AlloyChainProviderError::BlockNotFound(number.into()))?; - let header = block.header.into_consensus(); - - let block_info = BlockInfo { - hash: header.hash_slow(), - number, - parent_hash: header.parent_hash, - timestamp: header.timestamp, - }; - Ok(block_info) - } - - async fn receipts_by_hash(&mut self, hash: B256) -> Result<Vec<Receipt>, Self::Error> { - if let Some(receipts) = self.receipts_by_hash_cache.get(&hash) { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_HITS, "cache" => "receipts_by_hash"); - return Ok(receipts.clone()); - } - - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_MISSES, "cache" => "receipts_by_hash"); - - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "receipts_by_hash"); - - let receipts = self - .inner - .get_block_receipts(hash.into()) - .await - .inspect_err(|_e| { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "receipts_by_hash"); - })? - .ok_or(AlloyChainProviderError::BlockNotFound(hash.into()))?; - let consensus_receipts = receipts - .into_iter() - .map(|r| r.inner.into_primitives_receipt().as_receipt().cloned()) - .collect::<Option<Vec<_>>>() - .ok_or(AlloyChainProviderError::ReceiptsConversion(hash))?; - - self.receipts_by_hash_cache.put(hash, consensus_receipts.clone()); - - kona_macros::inc!(gauge, Metrics::CACHE_ENTRIES, "cache" => "receipts_by_hash"); - - Ok(consensus_receipts) - } - - async fn block_info_and_transactions_by_hash( - &mut self, - hash: B256, - ) -> Result<(BlockInfo, Vec<TxEnvelope>), Self::Error> { - if let Some(block_info_and_txs) = self.block_info_and_transactions_by_hash_cache.get(&hash) - { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_HITS, "cache" => "block_info_and_tx"); - return Ok(block_info_and_txs.clone()); - } - - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_MISSES, "cache" => "block_info_and_tx"); - - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "block_by_hash"); - - let block = self - .inner - .get_block_by_hash(hash) - .full() - .await - .inspect_err(|_e| { - kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "block_by_hash"); - })? - .ok_or(AlloyChainProviderError::BlockNotFound(hash.into()))? - .into_consensus() - .map_transactions(|t| t.inner.into_inner()); - - // Verify the block hash matches what we requested - self.verify_header_hash(&block.header, hash)?; - - let block_info = BlockInfo { - hash, // Use the already verified hash instead of recomputing - number: block.header.number, - parent_hash: block.header.parent_hash, - timestamp: block.header.timestamp, - }; - - self.block_info_and_transactions_by_hash_cache - .put(hash, (block_info, block.body.transactions.clone())); - - kona_macros::inc!(gauge, Metrics::CACHE_ENTRIES, "cache" => "block_info_and_tx"); - - Ok((block_info, block.body.transactions)) - } -} diff --git a/kona/crates/providers/providers-alloy/src/lib.rs b/kona/crates/providers/providers-alloy/src/lib.rs deleted file mode 100644 index c3559a6dfe1..00000000000 --- a/kona/crates/providers/providers-alloy/src/lib.rs +++ /dev/null @@ -1,29 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -mod metrics; -pub use beacon_client::BeaconClientError; -pub use metrics::Metrics; - -mod beacon_client; -pub use beacon_client::{ - APIConfigResponse, APIGenesisResponse, BeaconClient, OnlineBeaconClient, ReducedConfigData, - ReducedGenesisData, -}; - -mod blobs; -pub use blobs::{BoxedBlobWithIndex, OnlineBlobProvider}; - -mod chain_provider; -pub use chain_provider::{AlloyChainProvider, AlloyChainProviderError}; - -mod l2_chain_provider; -pub use l2_chain_provider::{AlloyL2ChainProvider, AlloyL2ChainProviderError}; - -mod pipeline; -pub use pipeline::OnlinePipeline; diff --git a/kona/crates/providers/providers-alloy/src/pipeline.rs b/kona/crates/providers/providers-alloy/src/pipeline.rs deleted file mode 100644 index 818e153d9d7..00000000000 --- a/kona/crates/providers/providers-alloy/src/pipeline.rs +++ /dev/null @@ -1,230 +0,0 @@ -//! Contains an online derivation pipeline. - -use crate::{AlloyChainProvider, AlloyL2ChainProvider, OnlineBeaconClient, OnlineBlobProvider}; -use async_trait::async_trait; -use core::fmt::Debug; -use kona_derive::{ - DerivationPipeline, EthereumDataSource, IndexedAttributesQueueStage, L2ChainProvider, - OriginProvider, Pipeline, PipelineBuilder, PipelineErrorKind, PipelineResult, - PolledAttributesQueueStage, ResetSignal, Signal, SignalReceiver, StatefulAttributesBuilder, - StepResult, -}; -use kona_genesis::{L1ChainConfig, RollupConfig, SystemConfig}; -use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; -use std::sync::Arc; - -/// An online polled derivation pipeline. -type OnlinePolledDerivationPipeline = DerivationPipeline< - PolledAttributesQueueStage< - OnlineDataProvider, - AlloyChainProvider, - AlloyL2ChainProvider, - OnlineAttributesBuilder, - >, - AlloyL2ChainProvider, ->; - -/// An online managed derivation pipeline. -type OnlineManagedDerivationPipeline = DerivationPipeline< - IndexedAttributesQueueStage< - OnlineDataProvider, - AlloyChainProvider, - AlloyL2ChainProvider, - OnlineAttributesBuilder, - >, - AlloyL2ChainProvider, ->; - -/// An RPC-backed Ethereum data source. -type OnlineDataProvider = - EthereumDataSource<AlloyChainProvider, OnlineBlobProvider<OnlineBeaconClient>>; - -/// An RPC-backed payload attributes builder for the `AttributesQueue` stage of the derivation -/// pipeline. -type OnlineAttributesBuilder = StatefulAttributesBuilder<AlloyChainProvider, AlloyL2ChainProvider>; - -/// An online derivation pipeline. -#[derive(Debug)] -pub enum OnlinePipeline { - /// An online derivation pipeline that uses a polled traversal stage. - Polled(OnlinePolledDerivationPipeline), - /// An online derivation pipeline that uses a managed traversal stage. - Managed(OnlineManagedDerivationPipeline), -} - -impl OnlinePipeline { - /// Constructs a new polled derivation pipeline that is initialized. - pub async fn new( - cfg: Arc<RollupConfig>, - l1_cfg: Arc<L1ChainConfig>, - l2_safe_head: L2BlockInfo, - l1_origin: BlockInfo, - blob_provider: OnlineBlobProvider<OnlineBeaconClient>, - chain_provider: AlloyChainProvider, - mut l2_chain_provider: AlloyL2ChainProvider, - ) -> PipelineResult<Self> { - let mut pipeline = Self::new_polled( - cfg.clone(), - l1_cfg.clone(), - blob_provider, - chain_provider, - l2_chain_provider.clone(), - ); - - // Reset the pipeline to populate the initial L1/L2 cursor and system configuration in L1 - // Traversal. - pipeline - .signal( - ResetSignal { - l2_safe_head, - l1_origin, - system_config: l2_chain_provider - .system_config_by_number(l2_safe_head.block_info.number, cfg.clone()) - .await - .ok(), - } - .signal(), - ) - .await?; - - Ok(pipeline) - } - - /// Constructs a new polled derivation pipeline that is uninitialized. - /// - /// Uses online providers as specified by the arguments. - /// - /// Before using the returned pipeline, a [`ResetSignal`] must be sent to - /// instantiate the pipeline state. [`Self::new`] is a convenience method that - /// constructs a new online pipeline and sends the reset signal. - pub fn new_polled( - cfg: Arc<RollupConfig>, - l1_cfg: Arc<L1ChainConfig>, - blob_provider: OnlineBlobProvider<OnlineBeaconClient>, - chain_provider: AlloyChainProvider, - l2_chain_provider: AlloyL2ChainProvider, - ) -> Self { - let attributes = StatefulAttributesBuilder::new( - cfg.clone(), - l1_cfg, - l2_chain_provider.clone(), - chain_provider.clone(), - ); - let dap = EthereumDataSource::new_from_parts(chain_provider.clone(), blob_provider, &cfg); - - let pipeline = PipelineBuilder::new() - .rollup_config(cfg) - .dap_source(dap) - .l2_chain_provider(l2_chain_provider) - .chain_provider(chain_provider) - .builder(attributes) - .origin(BlockInfo::default()) - .build_polled(); - - Self::Polled(pipeline) - } - - /// Constructs a new indexed derivation pipeline that is uninitialized. - /// - /// Uses online providers as specified by the arguments. - /// - /// Before using the returned pipeline, a [`ResetSignal`] must be sent to - /// instantiate the pipeline state. [`Self::new`] is a convenience method that - /// constructs a new online pipeline and sends the reset signal. - pub fn new_indexed( - cfg: Arc<RollupConfig>, - l1_cfg: Arc<L1ChainConfig>, - blob_provider: OnlineBlobProvider<OnlineBeaconClient>, - chain_provider: AlloyChainProvider, - l2_chain_provider: AlloyL2ChainProvider, - ) -> Self { - let attributes = StatefulAttributesBuilder::new( - cfg.clone(), - l1_cfg, - l2_chain_provider.clone(), - chain_provider.clone(), - ); - let dap = EthereumDataSource::new_from_parts(chain_provider.clone(), blob_provider, &cfg); - - let pipeline = PipelineBuilder::new() - .rollup_config(cfg) - .dap_source(dap) - .l2_chain_provider(l2_chain_provider) - .chain_provider(chain_provider) - .builder(attributes) - .origin(BlockInfo::default()) - .build_indexed(); - - Self::Managed(pipeline) - } -} - -#[async_trait] -impl SignalReceiver for OnlinePipeline { - /// Receives a signal from the driver. - async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { - match self { - Self::Polled(pipeline) => pipeline.signal(signal).await, - Self::Managed(pipeline) => pipeline.signal(signal).await, - } - } -} - -impl OriginProvider for OnlinePipeline { - /// Returns the optional L1 [BlockInfo] origin. - fn origin(&self) -> Option<BlockInfo> { - match self { - Self::Polled(pipeline) => pipeline.origin(), - Self::Managed(pipeline) => pipeline.origin(), - } - } -} - -impl Iterator for OnlinePipeline { - type Item = OpAttributesWithParent; - - fn next(&mut self) -> Option<Self::Item> { - match self { - Self::Polled(pipeline) => pipeline.next(), - Self::Managed(pipeline) => pipeline.next(), - } - } -} - -#[async_trait] -impl Pipeline for OnlinePipeline { - /// Peeks at the next [OpAttributesWithParent] from the pipeline. - fn peek(&self) -> Option<&OpAttributesWithParent> { - match self { - Self::Polled(pipeline) => pipeline.peek(), - Self::Managed(pipeline) => pipeline.peek(), - } - } - - /// Attempts to progress the pipeline. - async fn step(&mut self, cursor: L2BlockInfo) -> StepResult { - match self { - Self::Polled(pipeline) => pipeline.step(cursor).await, - Self::Managed(pipeline) => pipeline.step(cursor).await, - } - } - - /// Returns the rollup config. - fn rollup_config(&self) -> &RollupConfig { - match self { - Self::Polled(pipeline) => pipeline.rollup_config(), - Self::Managed(pipeline) => pipeline.rollup_config(), - } - } - - /// Returns the [SystemConfig] by L2 number. - async fn system_config_by_number( - &mut self, - number: u64, - ) -> Result<SystemConfig, PipelineErrorKind> { - match self { - Self::Polled(pipeline) => pipeline.system_config_by_number(number).await, - Self::Managed(pipeline) => pipeline.system_config_by_number(number).await, - } - } -} diff --git a/kona/crates/providers/providers-local/README.md b/kona/crates/providers/providers-local/README.md deleted file mode 100644 index a64dc94d796..00000000000 --- a/kona/crates/providers/providers-local/README.md +++ /dev/null @@ -1,83 +0,0 @@ -# `kona-providers-local` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-providers-local"><img src="https://img.shields.io/crates/v/kona-providers-alloy.svg?label=kona-providers-local&labelColor=2a2f35" alt="kona-provides-local"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - - -This crate provides a pure in-memory L2 provider implementation for the Kona OP Stack. It operates without any external RPC dependencies, serving all data from its internal cache. - -## Features - -- **BufferedL2Provider**: A pure in-memory L2 provider that serves data from cached blocks -- **ChainStateBuffer**: LRU cache for managing chain state with reorganization support -- **Chain Event Handling**: Support for processing execution extension notifications for chain events (commits, reorgs, reverts) -- **No External Dependencies**: Operates entirely from in-memory state without RPC calls - -## Architecture - -The buffered provider operates as a standalone in-memory data store: - -1. **In-Memory Storage**: Complete blocks with L2 block info are stored in memory -2. **Dual Indexing**: Blocks are indexed by both hash and number for efficient queries -3. **Reorg Handling**: Intelligent cache invalidation during chain reorganizations up to a configurable depth -4. **Event Processing**: Integration with execution extension notifications to maintain cache consistency -5. **Genesis Support**: Special handling for genesis blocks from the rollup configuration - -## Usage - -```rust,ignore -use kona_providers_local::{BufferedL2Provider, ChainStateEvent}; -use kona_genesis::RollupConfig; -use kona_protocol::{BatchValidationProvider, L2BlockInfo}; -use op_alloy_consensus::OpBlock; -use std::sync::Arc; - -async fn example() -> Result<(), Box<dyn std::error::Error>> { - // Create a buffered provider with rollup configuration - let rollup_config = Arc::new(RollupConfig::default()); - let provider = BufferedL2Provider::new(rollup_config, 1000, 64); - - // Add blocks to the provider - // In practice, these would come from execution extension or other sources - let block: OpBlock = unimplemented!(); - let l2_info: L2BlockInfo = unimplemented!(); - provider.add_block(block, l2_info).await?; - - // Handle chain events from execution extension notifications - let event = ChainStateEvent::ChainCommitted { - new_head: alloy_primitives::B256::ZERO, - committed: vec![], - }; - provider.handle_chain_event(event).await?; - - // Query blocks from the cache - let mut provider_clone = provider.clone(); - let block = provider_clone.block_by_number(1).await?; - let l2_info = provider_clone.l2_block_info_by_number(1).await?; - - Ok(()) -} -``` - -## Configuration - -- `cache_size`: Number of blocks to cache (affects memory usage) -- `max_reorg_depth`: Maximum reorganization depth to handle before clearing cache - -## Provider Traits - -The `BufferedL2Provider` implements the following traits from `kona-derive`: - -- `ChainProvider`: Basic block and receipt access -- `L2ChainProvider`: L2-specific functionality including system config access -- `BatchValidationProvider`: Batch validation support - -## Error Handling - -The provider returns specific errors for different failure scenarios: -- `BlockNotFound`: When a requested block is not in the cache -- `L2BlockInfoConstruction`: When L2 block info cannot be constructed -- `SystemConfigConversion`: When a block cannot be converted to system config -- `Buffer` errors: For cache-related issues including deep reorgs diff --git a/kona/crates/providers/providers-local/src/lib.rs b/kona/crates/providers/providers-local/src/lib.rs deleted file mode 100644 index 5b7db5d7307..00000000000 --- a/kona/crates/providers/providers-local/src/lib.rs +++ /dev/null @@ -1,18 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -mod buffer; -pub use buffer::{CacheStats, CachedBlock, ChainBufferError, ChainStateBuffer, ChainStateEvent}; - -mod buffered; -pub use buffered::{BufferedL2Provider, BufferedProviderError}; - -#[cfg(feature = "metrics")] -mod metrics; -#[cfg(feature = "metrics")] -pub use metrics::Metrics; diff --git a/kona/crates/supervisor/core/Cargo.toml b/kona/crates/supervisor/core/Cargo.toml deleted file mode 100644 index 7a673a4742f..00000000000 --- a/kona/crates/supervisor/core/Cargo.toml +++ /dev/null @@ -1,66 +0,0 @@ -[package] -name = "kona-supervisor-core" -version = "0.1.0" - -edition.workspace = true -license.workspace = true -rust-version.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true -exclude.workspace = true - -[dependencies] -# workspace -kona-interop.workspace = true -kona-protocol.workspace = true -kona-supervisor-types.workspace = true -kona-supervisor-rpc = { workspace = true, features = ["jsonrpsee", "client"] } -kona-supervisor-storage.workspace = true -kona-supervisor-metrics.workspace = true -kona-genesis.workspace = true - -# alloy -alloy-eips.workspace = true -alloy-network.workspace = true -alloy-provider = { workspace = true, features = ["reqwest"] } -alloy-primitives = { workspace = true, features = ["map", "rlp", "serde"] } -alloy-rpc-types-engine = { workspace = true, features = ["jwt", "serde"] } -alloy-rpc-client.workspace = true -alloy-rpc-types-eth.workspace = true -alloy-consensus.workspace = true - -# op-alloy -op-alloy-rpc-types = { workspace = true, features = ["jsonrpsee"] } -op-alloy-consensus.workspace = true - -# jsonrpsee -jsonrpsee = { workspace = true, features = [ "macros", "server", "client", "ws-client" ] } - -# general -async-trait.workspace = true -serde.workspace = true -serde_json.workspace = true -tracing.workspace = true -thiserror.workspace = true -tokio = { workspace = true, features = ["sync", "macros"] } -tokio-util.workspace = true -auto_impl.workspace = true -reqwest = { workspace = true } -futures = { workspace = true } -derive_more = { workspace = true, features = ["try_from"] } - -# `metrics` feature -metrics = { workspace = true } - -[dev-dependencies] -serde_json.workspace = true -tempfile.workspace = true -alloy-transport.workspace = true -kona-interop = {workspace = true, features = ["std", "test-utils"]} -mockall.workspace = true - -[lints] -workspace = true diff --git a/kona/crates/supervisor/core/src/error.rs b/kona/crates/supervisor/core/src/error.rs deleted file mode 100644 index 08622fb5959..00000000000 --- a/kona/crates/supervisor/core/src/error.rs +++ /dev/null @@ -1,208 +0,0 @@ -//! [`SupervisorService`](crate::SupervisorService) errors. - -use crate::syncnode::ManagedNodeError; -use derive_more; -use jsonrpsee::types::{ErrorCode, ErrorObjectOwned}; -use kona_supervisor_storage::StorageError; -use kona_supervisor_types::AccessListError; -use op_alloy_rpc_types::SuperchainDAError; -use thiserror::Error; - -/// Custom error type for the Supervisor core logic. -#[derive(Debug, Error)] -pub enum SupervisorError { - /// Indicates that a feature or method is not yet implemented. - #[error("functionality not implemented")] - Unimplemented, - - /// No chains are configured for supervision. - #[error("empty dependency set")] - EmptyDependencySet, - - /// Unsupported chain ID. - #[error("unsupported chain ID")] - UnsupportedChainId, - - /// Data availability errors. - /// - /// Spec <https://github.com/ethereum-optimism/specs/blob/main/specs/interop/supervisor.md#protocol-specific-error-codes>. - #[error(transparent)] - SpecError(#[from] SpecError), - - /// Indicates that error occurred while interacting with the storage layer. - #[error(transparent)] - StorageError(#[from] StorageError), - - /// Indicates that managed node not found for the chain. - #[error("managed node not found for chain: {0}")] - ManagedNodeMissing(u64), - - /// Indicates the error occurred while interacting with the managed node. - #[error(transparent)] - ManagedNodeError(#[from] ManagedNodeError), - - /// Indicates the error occurred while parsing the access_list - #[error(transparent)] - AccessListError(#[from] AccessListError), - - /// Indicates the error occurred while serializing or deserializing JSON. - #[error(transparent)] - SerdeJson(#[from] serde_json::Error), - - /// Indicates the L1 block does not match the expected L1 block. - #[error("L1 block number mismatch. expected: {expected}, but got {got}")] - L1BlockMismatch { - /// Expected L1 block. - expected: u64, - /// Received L1 block. - got: u64, - }, - - /// Indicates that the chain ID could not be parsed from the access list. - #[error("failed to parse chain id from access list")] - ChainIdParseError(), -} - -impl PartialEq for SupervisorError { - fn eq(&self, other: &Self) -> bool { - use SupervisorError::*; - match (self, other) { - (Unimplemented, Unimplemented) => true, - (EmptyDependencySet, EmptyDependencySet) => true, - (SpecError(a), SpecError(b)) => a == b, - (StorageError(a), StorageError(b)) => a == b, - (ManagedNodeMissing(a), ManagedNodeMissing(b)) => a == b, - (ManagedNodeError(a), ManagedNodeError(b)) => a == b, - (AccessListError(a), AccessListError(b)) => a == b, - (SerdeJson(a), SerdeJson(b)) => a.to_string() == b.to_string(), - (L1BlockMismatch { expected: a, got: b }, L1BlockMismatch { expected: c, got: d }) => { - a == c && b == d - } - _ => false, - } - } -} - -impl Eq for SupervisorError {} - -/// Extending the [`SuperchainDAError`] to include errors not in the spec. -#[derive(Error, Debug, PartialEq, Eq, derive_more::TryFrom)] -#[repr(i32)] -#[try_from(repr)] -pub enum SpecError { - /// [`SuperchainDAError`] from the spec. - #[error(transparent)] - SuperchainDAError(#[from] SuperchainDAError), - - /// Error not in spec. - #[error("error not in spec")] - ErrorNotInSpec, -} - -impl SpecError { - /// Maps the proper error code from SuperchainDAError. - /// Introduced a new error code for errors not in the spec. - pub const fn code(&self) -> i32 { - match self { - Self::SuperchainDAError(e) => *e as i32, - Self::ErrorNotInSpec => -321300, - } - } -} - -impl From<SpecError> for ErrorObjectOwned { - fn from(err: SpecError) -> Self { - ErrorObjectOwned::owned(err.code(), err.to_string(), None::<()>) - } -} - -impl From<SupervisorError> for ErrorObjectOwned { - fn from(err: SupervisorError) -> Self { - match err { - // todo: handle these errors more gracefully - SupervisorError::Unimplemented | - SupervisorError::EmptyDependencySet | - SupervisorError::UnsupportedChainId | - SupervisorError::L1BlockMismatch { .. } | - SupervisorError::ManagedNodeMissing(_) | - SupervisorError::ManagedNodeError(_) | - SupervisorError::StorageError(_) | - SupervisorError::AccessListError(_) | - SupervisorError::ChainIdParseError() | - SupervisorError::SerdeJson(_) => ErrorObjectOwned::from(ErrorCode::InternalError), - SupervisorError::SpecError(err) => err.into(), - } - } -} - -impl From<StorageError> for SpecError { - fn from(err: StorageError) -> Self { - match err { - StorageError::Database(_) => Self::from(SuperchainDAError::DataCorruption), - StorageError::FutureData => Self::from(SuperchainDAError::FutureData), - StorageError::EntryNotFound(_) => Self::from(SuperchainDAError::MissedData), - StorageError::ConflictError => Self::from(SuperchainDAError::ConflictingData), - StorageError::BlockOutOfOrder => Self::from(SuperchainDAError::OutOfOrder), - StorageError::DatabaseNotInitialised => Self::ErrorNotInSpec, - _ => Self::ErrorNotInSpec, - } - } -} - -#[cfg(test)] -mod test { - use kona_supervisor_storage::EntryNotFoundError; - - use super::*; - - #[test] - fn test_storage_error_conversion() { - let test_err = SpecError::from(StorageError::DatabaseNotInitialised); - let expected_err = SpecError::ErrorNotInSpec; - - assert_eq!(test_err, expected_err); - } - - #[test] - fn test_unmapped_storage_error_conversion() { - let spec_err = ErrorObjectOwned::from(SpecError::ErrorNotInSpec); - let expected_err = SpecError::ErrorNotInSpec; - - assert_eq!(spec_err, expected_err.into()); - - let spec_err = ErrorObjectOwned::from(SpecError::from(StorageError::LockPoisoned)); - let expected_err = SpecError::ErrorNotInSpec; - - assert_eq!(spec_err, expected_err.into()); - - let spec_err = ErrorObjectOwned::from(SpecError::from(StorageError::FutureData)); - let expected_err = SpecError::SuperchainDAError(SuperchainDAError::FutureData); - - assert_eq!(spec_err, expected_err.into()); - - let spec_err = ErrorObjectOwned::from(SpecError::from(StorageError::EntryNotFound( - EntryNotFoundError::DerivedBlockNotFound(12), - ))); - let expected_err = SpecError::SuperchainDAError(SuperchainDAError::MissedData); - - assert_eq!(spec_err, expected_err.into()); - } - - #[test] - fn test_supervisor_error_conversion() { - // This will happen implicitly in server rpc response calls. - let supervisor_err = ErrorObjectOwned::from(SupervisorError::SpecError(SpecError::from( - StorageError::LockPoisoned, - ))); - let expected_err = SpecError::ErrorNotInSpec; - - assert_eq!(supervisor_err, expected_err.into()); - - let supervisor_err = ErrorObjectOwned::from(SupervisorError::SpecError(SpecError::from( - StorageError::FutureData, - ))); - let expected_err = SpecError::SuperchainDAError(SuperchainDAError::FutureData); - - assert_eq!(supervisor_err, expected_err.into()); - } -} diff --git a/kona/crates/supervisor/core/src/reorg/task.rs b/kona/crates/supervisor/core/src/reorg/task.rs deleted file mode 100644 index 3fba4e52ad2..00000000000 --- a/kona/crates/supervisor/core/src/reorg/task.rs +++ /dev/null @@ -1,1253 +0,0 @@ -use super::metrics::Metrics; -use crate::ReorgHandlerError; -use alloy_eips::BlockNumberOrTag; -use alloy_primitives::{B256, ChainId}; -use alloy_rpc_client::RpcClient; -use alloy_rpc_types_eth::Block; -use derive_more::Constructor; -use kona_interop::DerivedRefPair; -use kona_protocol::BlockInfo; -use kona_supervisor_storage::{DbReader, StorageError, StorageRewinder}; -use std::sync::Arc; -use tracing::{debug, info, trace, warn}; - -/// Handles reorg for a single chain -#[derive(Debug, Constructor)] -pub(crate) struct ReorgTask<DB> { - chain_id: ChainId, - db: Arc<DB>, - rpc_client: RpcClient, -} - -#[derive(Debug)] -struct RewoundState { - source: BlockInfo, - derived: Option<BlockInfo>, -} - -impl<DB> ReorgTask<DB> -where - DB: DbReader + StorageRewinder + Send + Sync + 'static, -{ - /// Processes reorg for a single chain. If the chain is consistent with the L1 chain, - /// does nothing. - pub(crate) async fn process_chain_reorg(&self) -> Result<(), ReorgHandlerError> { - trace!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - "Processing reorg for chain..." - ); - - let latest_state = self.db.latest_derivation_state()?; - - // Find last valid source block for this chain - let rewound_state = match self.find_rewind_target(latest_state).await { - Ok(Some(rewind_target_source)) => { - Some(self.rewind_to_target_source(rewind_target_source).await?) - } - Ok(None) => { - // No reorg needed, latest source block is still canonical - return Ok(()); - } - Err(ReorgHandlerError::RewindTargetPreInterop) => { - self.rewind_to_activation_block().await? - } - Err(err) => { - return Err(err); - } - }; - - // record metrics - if let Some(rewound_state) = rewound_state { - let l1_depth = latest_state.source.number - rewound_state.source.number; - let mut l2_depth = 0; - if let Some(derived) = rewound_state.derived { - l2_depth = latest_state.derived.number - derived.number; - } - Metrics::record_block_depth(self.chain_id, l1_depth, l2_depth); - } - info!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - "Processed reorged successfully" - ); - Ok(()) - } - - async fn rewind_to_target_source( - &self, - rewind_target_source: BlockInfo, - ) -> Result<RewoundState, ReorgHandlerError> { - info!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - rewind_target_source = rewind_target_source.number, - "Reorg detected - rewinding to target source block..." - ); - - // Call the rewinder to handle the DB rewinding - let derived_block_rewound = - self.db.rewind_to_source(&rewind_target_source.id()).inspect_err(|err| { - warn!( - target: "supervisor::reorg_handler::db", - chain_id = %self.chain_id, - %err, - "Failed to rewind DB to derived block" - ); - })?; - - Ok(RewoundState { source: rewind_target_source, derived: derived_block_rewound }) - } - - async fn rewind_to_activation_block(&self) -> Result<Option<RewoundState>, ReorgHandlerError> { - info!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - "Reorg detected - rewinding to activation block..." - ); - - // If the rewind target is pre-interop, we need to rewind to the activation block - match self.db.get_activation_block() { - Ok(activation_block) => { - let activation_source_block = self.db.derived_to_source(activation_block.id())?; - self.db.rewind(&activation_block.id()).inspect_err(|err| { - warn!( - target: "supervisor::reorg_handler::db", - chain_id = %self.chain_id, - %err, - "Failed to rewind DB to activation block" - ); - })?; - Ok(Some(RewoundState { - source: activation_source_block, - derived: Some(activation_block), - })) - } - Err(StorageError::DatabaseNotInitialised) => { - debug!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - "No activation block found, no rewind required" - ); - Ok(None) - } - Err(err) => Err(ReorgHandlerError::StorageError(err)), - } - } - - /// Finds the rewind target for a chain during a reorg - /// - /// Returns `None` if no rewind is needed, or the target block to rewind to. - /// Returns ReorgHandlerError::RewindTargetPreInterop if the rewind target is before the interop - /// activation block. - async fn find_rewind_target( - &self, - latest_state: DerivedRefPair, - ) -> Result<Option<BlockInfo>, ReorgHandlerError> { - trace!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - "Finding rewind target..." - ); - - // Check if the latest source block is still canonical - if self.is_block_canonical(latest_state.source.number, latest_state.source.hash).await? { - debug!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - block_number = latest_state.source.number, - "Latest source block is still canonical, no reorg needed" - ); - return Ok(None); - } - - let common_ancestor = self.find_common_ancestor().await?; - let mut prev_source = latest_state.source; - let mut current_source = self.db.get_source_block(prev_source.number - 1)?; - - while current_source.number > common_ancestor.number { - if current_source.number % 5 == 0 { - trace!( - target: "supervisor::reorg_handler", - current_block=current_source.number, - common_ancestor=common_ancestor.number, - "Finding rewind target..." - ) - } - - // If the current source block is canonical, we found the rewind target - if self.is_block_canonical(current_source.number, current_source.hash).await? { - info!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - block_number = current_source.number, - "Found canonical block as rewind target" - ); - break; - } - - // Otherwise, walk back to the previous source block - prev_source = current_source; - current_source = self.db.get_source_block(current_source.number - 1)?; - } - - // return the previous source block as the rewind target since rewinding is inclusive - Ok(Some(prev_source)) - } - - async fn find_common_ancestor(&self) -> Result<BlockInfo, ReorgHandlerError> { - trace!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - "Finding common ancestor." - ); - - match self.db.get_safety_head_ref(kona_interop::SafetyLevel::Finalized) { - Ok(finalized_block) => { - let common_ancestor = self.db.derived_to_source(finalized_block.id())?; - return Ok(common_ancestor); - } - Err(StorageError::FutureData) => { /* fall through to activation block */ } - Err(err) => { - return Err(ReorgHandlerError::StorageError(err)); - } - } - - debug!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - "No finalized block found, checking activation block." - ); - - match self.db.get_activation_block() { - Ok(activation_block) => { - let activation_source_block = self.db.derived_to_source(activation_block.id())?; - if self - .is_block_canonical( - activation_source_block.number, - activation_source_block.hash, - ) - .await? - { - Ok(activation_source_block) - } else { - debug!( - target: "supervisor::reorg_handler", - chain_id = %self.chain_id, - "Activation block is not canonical, no common ancestor found" - ); - Err(ReorgHandlerError::RewindTargetPreInterop) - } - } - Err(StorageError::DatabaseNotInitialised) => { - Err(ReorgHandlerError::RewindTargetPreInterop) - } - Err(err) => Err(ReorgHandlerError::StorageError(err)), - } - } - - /// Checks if a block is canonical on L1 - async fn is_block_canonical( - &self, - block_number: u64, - expected_hash: B256, - ) -> Result<bool, ReorgHandlerError> { - let canonical_l1 = self - .rpc_client - .request::<_, Block>( - "eth_getBlockByNumber", - (BlockNumberOrTag::Number(block_number), false), - ) - .await - .map_err(|err| { - warn!( - target: "supervisor::reorg_handler", - block_number, - %err, - "Failed to fetch L1 block from RPC" - ); - ReorgHandlerError::RPCError(err.to_string()) - })?; - Ok(canonical_l1.hash() == expected_hash) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_eips::BlockNumHash; - use alloy_rpc_types_eth::Header; - use alloy_transport::mock::*; - use kona_interop::{DerivedRefPair, SafetyLevel}; - use kona_protocol::BlockInfo; - use kona_supervisor_storage::{ - DerivationStorageReader, HeadRefStorageReader, LogStorageReader, StorageError, - }; - use kona_supervisor_types::{Log, SuperHead}; - use mockall::{mock, predicate}; - - mock!( - #[derive(Debug)] - pub Db {} - - impl LogStorageReader for Db { - fn get_block(&self, block_number: u64) -> Result<BlockInfo, StorageError>; - fn get_latest_block(&self) -> Result<BlockInfo, StorageError>; - fn get_log(&self, block_number: u64,log_index: u32) -> Result<Log, StorageError>; - fn get_logs(&self, block_number: u64) -> Result<Vec<Log>, StorageError>; - } - - impl DerivationStorageReader for Db { - fn derived_to_source(&self, derived_block_id: BlockNumHash) -> Result<BlockInfo, StorageError>; - fn latest_derived_block_at_source(&self, source_block_id: BlockNumHash) -> Result<BlockInfo, StorageError>; - fn latest_derivation_state(&self) -> Result<DerivedRefPair, StorageError>; - fn get_source_block(&self, source_block_number: u64) -> Result<BlockInfo, StorageError>; - fn get_activation_block(&self) -> Result<BlockInfo, StorageError>; - } - - impl HeadRefStorageReader for Db { - fn get_safety_head_ref(&self, safety_level: SafetyLevel) -> Result<BlockInfo, StorageError>; - fn get_super_head(&self) -> Result<SuperHead, StorageError>; - } - - impl StorageRewinder for Db { - fn rewind(&self, to: &BlockNumHash) -> Result<(), StorageError>; - fn rewind_log_storage(&self, to: &BlockNumHash) -> Result<(), StorageError>; - fn rewind_to_source(&self, to: &BlockNumHash) -> Result<Option<BlockInfo>, StorageError>; - } - ); - - mock! ( - pub chain_db {} - ); - - #[tokio::test] - async fn test_process_chain_reorg_no_reorg_needed() { - let mut mock_db = MockDb::new(); - - let latest_source = - BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); - - let latest_state = DerivedRefPair { - source: latest_source, - derived: BlockInfo::new(B256::from([3u8; 32]), 50, B256::from([4u8; 32]), 12346), - }; - - // Mock the latest derivation state - mock_db.expect_latest_derivation_state().times(1).returning(move || Ok(latest_state)); - - // Mock the RPC to return the same block (no reorg) - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let rpc_client = RpcClient::new(transport, false); - - let canonical_block: Block = Block { - header: Header { - hash: latest_source.hash, - inner: alloy_consensus::Header { - number: latest_source.number, - parent_hash: latest_source.parent_hash, - timestamp: latest_source.timestamp, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - asserter.push_success(&canonical_block); - - let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); - - let result = reorg_task.process_chain_reorg().await; - - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_process_chain_reorg_with_rewind() { - let mut mock_db = MockDb::new(); - - let latest_source = - BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); - - let latest_state = DerivedRefPair { - source: latest_source, - derived: BlockInfo::new(B256::from([3u8; 32]), 50, B256::from([4u8; 32]), 12346), - }; - - let canonical_source = - BlockInfo::new(B256::from([5u8; 32]), 95, B256::from([6u8; 32]), 12344); - - let rewind_target_source = - BlockInfo::new(B256::from([10u8; 32]), 96, B256::from([11u8; 32]), 12340); - - let rewind_target_derived = - BlockInfo::new(B256::from([12u8; 32]), 45, B256::from([13u8; 32]), 12341); - - let finalized_block = - BlockInfo::new(B256::from([20u8; 32]), 40, B256::from([21u8; 32]), 12330); - - // Mock the latest derivation state - mock_db.expect_latest_derivation_state().times(1).returning(move || Ok(latest_state)); - - // Mock finding common ancestor - mock_db.expect_get_safety_head_ref().times(1).returning(move |_| Ok(finalized_block)); - - mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(canonical_source)); - - mock_db.expect_get_source_block().times(5).returning( - move |block_number| match block_number { - 99 => Ok(BlockInfo::new(B256::from([16u8; 32]), 99, B256::from([17u8; 32]), 12344)), - 98 => Ok(BlockInfo::new(B256::from([17u8; 32]), 98, B256::from([18u8; 32]), 12343)), - 97 => Ok(BlockInfo::new(B256::from([18u8; 32]), 97, B256::from([19u8; 32]), 12342)), - 96 => Ok(rewind_target_source), - 95 => Ok(canonical_source), - _ => Err(StorageError::ConflictError), - }, - ); - - // Mock the RPC to show reorg happened - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let rpc_client = RpcClient::new(transport, false); - - // First call shows different hash (reorg detected) - let different_block: Block = Block { - header: Header { - hash: B256::from([99u8; 32]), // Different hash - inner: alloy_consensus::Header { - number: latest_source.number, - parent_hash: latest_source.parent_hash, - timestamp: latest_source.timestamp, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - asserter.push_success(&different_block); - asserter.push_success(&different_block); - asserter.push_success(&different_block); - asserter.push_success(&different_block); - asserter.push_success(&different_block); - - // Second call for checking if rewind target is canonical - let canonical_block: Block = Block { - header: Header { - hash: canonical_source.hash, - inner: alloy_consensus::Header { - number: canonical_source.number, - parent_hash: canonical_source.parent_hash, - timestamp: canonical_source.timestamp, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - asserter.push_success(&canonical_block); - - // Mock rewind operations - mock_db - .expect_rewind_to_source() - .times(1) - .returning(move |_| Ok(Some(rewind_target_derived))); - - let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); - - let result = reorg_task.process_chain_reorg().await; - - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_process_chain_reorg_rewind_pre_interop() { - let mut mock_db = MockDb::new(); - - let latest_source = - BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); - - let latest_state = DerivedRefPair { - source: latest_source, - derived: BlockInfo::new(B256::from([3u8; 32]), 50, B256::from([4u8; 32]), 12346), - }; - - let activation_block = - BlockInfo::new(B256::from([10u8; 32]), 1, B256::from([11u8; 32]), 12000); - - let activation_source = - BlockInfo::new(B256::from([12u8; 32]), 10, B256::from([13u8; 32]), 11999); - - // Mock the latest derivation state - mock_db.expect_latest_derivation_state().times(1).returning(move || Ok(latest_state)); - - // Mock finding common ancestor fails with pre-interop - mock_db.expect_get_safety_head_ref().times(1).returning(|_| Err(StorageError::FutureData)); - - mock_db - .expect_get_activation_block() - .times(2) // Once in find_common_ancestor, once in rewind_to_activation_block - .returning(move || Ok(activation_block)); - - mock_db - .expect_derived_to_source() - .times(2) // Once in find_common_ancestor, once in rewind_to_activation_block - .returning(move |_| Ok(activation_source)); - - // Mock the RPC calls - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let rpc_client = RpcClient::new(transport, false); - - // First call shows different hash (reorg detected) - let different_block: Block = Block { - header: Header { - hash: B256::from([99u8; 32]), - inner: alloy_consensus::Header { - number: latest_source.number, - parent_hash: latest_source.parent_hash, - timestamp: latest_source.timestamp, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - asserter.push_success(&different_block); - - // Activation block is not canonical - let non_canonical_activation: Block = Block { - header: Header { - hash: B256::from([99u8; 32]), // Different from expected - inner: alloy_consensus::Header { - number: activation_source.number, - parent_hash: activation_source.parent_hash, - timestamp: activation_source.timestamp, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - asserter.push_success(&non_canonical_activation); - - // Mock rewind to activation block - mock_db.expect_rewind().times(1).returning(|_| Ok(())); - - let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); - - let result = reorg_task.process_chain_reorg().await; - - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_process_chain_reorg_storage_error() { - let mut mock_db = MockDb::new(); - - // DB fails to get latest derivation state - mock_db - .expect_latest_derivation_state() - .times(1) - .returning(|| Err(StorageError::LockPoisoned)); - - let reorg_task = ReorgTask::new( - 1, - Arc::new(mock_db), - RpcClient::new(MockTransport::new(Asserter::new()), false), - ); - - let result = reorg_task.process_chain_reorg().await; - assert!(result.is_err()); - assert!(matches!( - result.unwrap_err(), - ReorgHandlerError::StorageError(StorageError::LockPoisoned) - )); - } - - #[tokio::test] - async fn test_find_rewind_target_without_reorg() { - let mut mock_db = MockDb::new(); - let latest_source: Block = Block { - header: Header { - hash: B256::from([1u8; 32]), - inner: alloy_consensus::Header { - number: 42, - parent_hash: B256::ZERO, - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let latest_state = DerivedRefPair { - source: BlockInfo::new( - latest_source.header.hash, - latest_source.header.number, - latest_source.header.parent_hash, - latest_source.header.timestamp, - ), - derived: BlockInfo::new(B256::from([5u8; 32]), 200, B256::ZERO, 1100), - }; - - // Mock the latest derivation state and expect this to be called once - mock_db.expect_latest_derivation_state().times(1).returning(move || Ok(latest_state)); - - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let rpc_client = RpcClient::new(transport, false); - // Mock RPC response - asserter.push_success(&latest_source); - - let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); - let rewind_target = reorg_task.process_chain_reorg().await; - - // Should succeed since the latest source block is still canonical - assert!(rewind_target.is_ok()); - } - - #[tokio::test] - async fn test_find_rewind_target_with_reorg() { - let mut mock_db = MockDb::new(); - let latest_source: Block = Block { - header: Header { - hash: B256::from([1u8; 32]), - inner: alloy_consensus::Header { - number: 41, - parent_hash: B256::from([2u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let latest_state = DerivedRefPair { - source: BlockInfo::new( - latest_source.header.hash, - latest_source.header.number, - latest_source.header.parent_hash, - latest_source.header.timestamp, - ), - derived: BlockInfo::new(B256::from([10u8; 32]), 200, B256::ZERO, 1100), - }; - - let finalized_source: Block = Block { - header: Header { - hash: B256::from([2u8; 32]), - inner: alloy_consensus::Header { - number: 38, - parent_hash: B256::from([1u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let finalized_state = DerivedRefPair { - source: BlockInfo::new( - finalized_source.header.hash, - finalized_source.header.number, - finalized_source.header.parent_hash, - finalized_source.header.timestamp, - ), - derived: BlockInfo::new(B256::from([20u8; 32]), 200, B256::ZERO, 1100), - }; - - let reorg_source: Block = Block { - header: Header { - hash: B256::from([14u8; 32]), - inner: alloy_consensus::Header { - number: 40, - parent_hash: B256::from([13u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let reorg_source_info = BlockInfo::new( - reorg_source.header.hash, - reorg_source.header.number, - reorg_source.header.parent_hash, - reorg_source.header.timestamp, - ); - - let mut source_39: Block = reorg_source.clone(); - source_39.header.inner.number = 39; - let source_39_info = BlockInfo::new( - source_39.header.hash, - source_39.header.number, - source_39.header.parent_hash, - source_39.header.timestamp, - ); - - let incorrect_source: Block = Block { - header: Header { - hash: B256::from([15u8; 32]), - inner: alloy_consensus::Header { - number: 5000, - parent_hash: B256::from([13u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - mock_db.expect_latest_derivation_state().returning(move || Ok(latest_state)); - mock_db - .expect_get_safety_head_ref() - .times(1) - .returning(move |_| Ok(finalized_state.derived)); - mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(finalized_state.source)); - - mock_db.expect_get_source_block().times(3).returning( - move |block_number| match block_number { - 41 => Ok(latest_state.source), - 40 => Ok(reorg_source_info), - 39 => Ok(source_39_info), - 38 => Ok(finalized_state.source), - _ => Ok(finalized_state.source), - }, - ); - - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let rpc_client = RpcClient::new(transport, false); - - // First return the reorged block - asserter.push_success(&reorg_source); - - // Then returning some random incorrect blocks 3 times till it reaches the finalized block - asserter.push_success(&incorrect_source); - asserter.push_success(&incorrect_source); - asserter.push_success(&incorrect_source); - - // Finally returning the correct block - asserter.push_success(&finalized_source); - - let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); - let rewind_target = reorg_task.find_rewind_target(latest_state).await; - - // Should succeed since the latest source block is still canonical - assert!(rewind_target.is_ok()); - assert_eq!(rewind_target.unwrap(), Some(source_39_info)); - } - - #[tokio::test] - async fn test_find_rewind_target_with_finalized_future_activation_canonical() { - let mut mock_db = MockDb::new(); - let latest_source: Block = Block { - header: Header { - hash: B256::from([1u8; 32]), - inner: alloy_consensus::Header { - number: 41, - parent_hash: B256::from([2u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let latest_state = DerivedRefPair { - source: BlockInfo::new( - latest_source.header.hash, - latest_source.header.number, - latest_source.header.parent_hash, - latest_source.header.timestamp, - ), - derived: BlockInfo::new(B256::from([10u8; 32]), 200, B256::ZERO, 1100), - }; - - let activation_source: Block = Block { - header: Header { - hash: B256::from([2u8; 32]), - inner: alloy_consensus::Header { - number: 38, - parent_hash: B256::from([1u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let activation_state = DerivedRefPair { - source: BlockInfo::new( - activation_source.header.hash, - activation_source.header.number, - activation_source.header.parent_hash, - activation_source.header.timestamp, - ), - derived: BlockInfo::new(B256::from([20u8; 32]), 200, B256::ZERO, 1100), - }; - - let reorg_source: Block = Block { - header: Header { - hash: B256::from([14u8; 32]), - inner: alloy_consensus::Header { - number: 40, - parent_hash: B256::from([13u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let reorg_source_info = BlockInfo::new( - reorg_source.header.hash, - reorg_source.header.number, - reorg_source.header.parent_hash, - reorg_source.header.timestamp, - ); - - let mut source_39: Block = reorg_source.clone(); - source_39.header.inner.number = 39; - let source_39_info = BlockInfo::new( - source_39.header.hash, - source_39.header.number, - source_39.header.parent_hash, - source_39.header.timestamp, - ); - - let incorrect_source: Block = Block { - header: Header { - hash: B256::from([15u8; 32]), - inner: alloy_consensus::Header { - number: 5000, - parent_hash: B256::from([13u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - mock_db - .expect_get_safety_head_ref() - .times(1) - .returning(move |_| Err(StorageError::FutureData)); - mock_db - .expect_get_activation_block() - .times(1) - .returning(move || Ok(activation_state.derived)); - mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(activation_state.source)); - - mock_db.expect_get_source_block().times(3).returning( - move |block_number| match block_number { - 41 => Ok(latest_state.source), - 40 => Ok(reorg_source_info), - 39 => Ok(source_39_info), - 38 => Ok(activation_state.source), - _ => Ok(activation_state.source), - }, - ); - - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let rpc_client = RpcClient::new(transport, false); - - // First return the reorged block - asserter.push_success(&reorg_source); - - // Return the activation block source to make sure it is canonical - // Used in `find_common_ancestor` - asserter.push_success(&activation_source); - - // Then returning some random incorrect blocks 3 times till it reaches the finalized block - asserter.push_success(&incorrect_source); - asserter.push_success(&incorrect_source); - asserter.push_success(&incorrect_source); - - // Finally returning the correct block - asserter.push_success(&activation_source); - - let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); - let rewind_target = reorg_task.find_rewind_target(latest_state).await; - - // Should succeed since the latest source block is still canonical - assert!(rewind_target.is_ok()); - assert_eq!(rewind_target.unwrap(), Some(source_39_info)); - } - - #[tokio::test] - async fn test_find_rewind_target_with_finalized_future_activation_not_canonical() { - let mut mock_db = MockDb::new(); - let latest_source: Block = Block { - header: Header { - hash: B256::from([1u8; 32]), - inner: alloy_consensus::Header { - number: 41, - parent_hash: B256::from([2u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let latest_state = DerivedRefPair { - source: BlockInfo::new( - latest_source.header.hash, - latest_source.header.number, - latest_source.header.parent_hash, - latest_source.header.timestamp, - ), - derived: BlockInfo::new(B256::from([10u8; 32]), 200, B256::ZERO, 1100), - }; - - let activation_source: Block = Block { - header: Header { - hash: B256::from([2u8; 32]), - inner: alloy_consensus::Header { - number: 38, - parent_hash: B256::from([1u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let activation_state = DerivedRefPair { - source: BlockInfo::new( - activation_source.header.hash, - activation_source.header.number, - activation_source.header.parent_hash, - activation_source.header.timestamp, - ), - derived: BlockInfo::new(B256::from([20u8; 32]), 200, B256::ZERO, 1100), - }; - - let reorg_source: Block = Block { - header: Header { - hash: B256::from([14u8; 32]), - inner: alloy_consensus::Header { - number: 40, - parent_hash: B256::from([13u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let incorrect_source: Block = Block { - header: Header { - hash: B256::from([15u8; 32]), - inner: alloy_consensus::Header { - number: 5000, - parent_hash: B256::from([13u8; 32]), - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - mock_db - .expect_get_safety_head_ref() - .times(1) - .returning(move |_| Err(StorageError::FutureData)); - mock_db - .expect_get_activation_block() - .times(1) - .returning(move || Ok(activation_state.derived)); - mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(activation_state.source)); - - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let rpc_client = RpcClient::new(transport, false); - - // First return the reorged block - asserter.push_success(&reorg_source); - - // Return the incorrect source to make sure activation block is not canonical - // Used in `find_common_ancestor` - asserter.push_success(&incorrect_source); - - let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); - let rewind_target = reorg_task.find_rewind_target(latest_state).await; - - assert!(matches!(rewind_target, Err(ReorgHandlerError::RewindTargetPreInterop))); - } - - #[tokio::test] - async fn test_is_block_canonical() { - let canonical_hash = B256::from([1u8; 32]); - let non_canonical_hash = B256::from([2u8; 32]); - - let canonical_block: Block = Block { - header: Header { - hash: canonical_hash, - inner: alloy_consensus::Header { - number: 100, - parent_hash: B256::ZERO, - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let non_canonical_block: Block = Block { - header: Header { - hash: non_canonical_hash, - inner: alloy_consensus::Header { - number: 100, - parent_hash: B256::ZERO, - timestamp: 12345, - ..Default::default() - }, - ..Default::default() - }, - ..Default::default() - }; - - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let rpc_client = RpcClient::new(transport, false); - asserter.push_success(&canonical_block); - asserter.push_success(&non_canonical_block); - - let reorg_task = ReorgTask::new(1, Arc::new(MockDb::new()), rpc_client); - - let result = reorg_task.is_block_canonical(100, canonical_hash).await; - assert!(result.is_ok()); - - // Should return false - let result = reorg_task.is_block_canonical(100, canonical_hash).await; - assert!(result.is_ok()); - assert!(!result.unwrap()); - } - - #[tokio::test] - async fn test_rewind_to_activation_block_success() { - let mut mock_db = MockDb::new(); - - let activation_block = - BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); - - let activation_source = - BlockInfo::new(B256::from([3u8; 32]), 200, B256::from([4u8; 32]), 12346); - - // Expect get_activation_block to be called - mock_db.expect_get_activation_block().times(1).returning(move || Ok(activation_block)); - - // Expect derived_to_source to be called - mock_db - .expect_derived_to_source() - .times(1) - .with(mockall::predicate::eq(activation_block.id())) - .returning(move |_| Ok(activation_source)); - - // Expect rewind to be called - mock_db - .expect_rewind() - .times(1) - .with(mockall::predicate::eq(activation_block.id())) - .returning(|_| Ok(())); - - let reorg_task = ReorgTask::new( - 1, - Arc::new(mock_db), - RpcClient::new(MockTransport::new(Asserter::new()), false), - ); - - let result = reorg_task.rewind_to_activation_block().await; - - assert!(result.is_ok()); - let pair = result.unwrap().unwrap(); - assert_eq!(pair.source, activation_source); - assert_eq!(pair.derived.unwrap(), activation_block); - } - - #[tokio::test] - async fn test_rewind_to_activation_block_database_not_initialized() { - let mut mock_db = MockDb::new(); - - // Expect get_activation_block to return DatabaseNotInitialised - mock_db - .expect_get_activation_block() - .times(1) - .returning(|| Err(StorageError::DatabaseNotInitialised)); - - let reorg_task = ReorgTask::new( - 1, - Arc::new(mock_db), - RpcClient::new(MockTransport::new(Asserter::new()), false), - ); - - let result = reorg_task.rewind_to_activation_block().await; - - // Should succeed with None (no-op case) - assert!(result.is_ok()); - assert!(result.unwrap().is_none()); - } - - #[tokio::test] - async fn test_rewind_to_activation_block_storage_error() { - let mut mock_db = MockDb::new(); - - // Expect get_activation_block to return a different storage error - mock_db - .expect_get_activation_block() - .times(1) - .returning(|| Err(StorageError::LockPoisoned)); - - let reorg_task = ReorgTask::new( - 1, - Arc::new(mock_db), - RpcClient::new(MockTransport::new(Asserter::new()), false), - ); - - let result = reorg_task.rewind_to_activation_block().await; - - // Should return storage error - assert!(result.is_err()); - assert!(matches!( - result.unwrap_err(), - ReorgHandlerError::StorageError(StorageError::LockPoisoned) - )); - } - - #[tokio::test] - async fn test_rewind_to_activation_block_derived_to_source_fails() { - let mut mock_db = MockDb::new(); - - let activation_block = - BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); - - // Expect get_activation_block to succeed - mock_db.expect_get_activation_block().times(1).returning(move || Ok(activation_block)); - - // Expect derived_to_source to fail - mock_db.expect_derived_to_source().times(1).returning(|_| Err(StorageError::LockPoisoned)); - - let reorg_task = ReorgTask::new( - 1, - Arc::new(mock_db), - RpcClient::new(MockTransport::new(Asserter::new()), false), - ); - - let result = reorg_task.rewind_to_activation_block().await; - - // Should return storage error - assert!(result.is_err()); - assert!(matches!( - result.unwrap_err(), - ReorgHandlerError::StorageError(StorageError::LockPoisoned) - )); - } - - #[tokio::test] - async fn test_rewind_to_activation_block_rewind_fails() { - let mut mock_db = MockDb::new(); - - let activation_block = - BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); - - let activation_source = - BlockInfo::new(B256::from([3u8; 32]), 200, B256::from([4u8; 32]), 12346); - - // Expect get_activation_block to succeed - mock_db.expect_get_activation_block().times(1).returning(move || Ok(activation_block)); - - // Expect derived_to_source to succeed - mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(activation_source)); - - // Expect rewind to fail - mock_db.expect_rewind().times(1).returning(|_| Err(StorageError::LockPoisoned)); - - let reorg_task = ReorgTask::new( - 1, - Arc::new(mock_db), - RpcClient::new(MockTransport::new(Asserter::new()), false), - ); - - let result = reorg_task.rewind_to_activation_block().await; - - // Should return storage error - assert!(result.is_err()); - assert!(matches!( - result.unwrap_err(), - ReorgHandlerError::StorageError(StorageError::LockPoisoned) - )); - } - - #[tokio::test] - async fn test_rewind_to_target_source_success() { - let mut mock_db = MockDb::new(); - - let rewind_target_source = - BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); - - let rewind_target_derived = - BlockInfo::new(B256::from([3u8; 32]), 50, B256::from([4u8; 32]), 12346); - - // Expect rewind to be called - mock_db - .expect_rewind_to_source() - .times(1) - .with(predicate::eq(rewind_target_source.id())) - .returning(move |_| Ok(Some(rewind_target_derived))); - - let reorg_task = ReorgTask::new( - 1, - Arc::new(mock_db), - RpcClient::new(MockTransport::new(Asserter::new()), false), - ); - - let result = reorg_task.rewind_to_target_source(rewind_target_source).await; - - assert!(result.is_ok()); - let pair = result.unwrap(); - assert_eq!(pair.source, rewind_target_source); - assert_eq!(pair.derived.unwrap(), rewind_target_derived); - } - - #[tokio::test] - async fn test_rewind_to_target_source_rewind_fails() { - let mut mock_db = MockDb::new(); - - let rewind_target_source = - BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); - - // Expect rewind to fail - mock_db.expect_rewind_to_source().times(1).returning(|_| Err(StorageError::LockPoisoned)); - - let reorg_task = ReorgTask::new( - 1, - Arc::new(mock_db), - RpcClient::new(MockTransport::new(Asserter::new()), false), - ); - - let result = reorg_task.rewind_to_target_source(rewind_target_source).await; - - assert!(result.is_err()); - assert!(matches!( - result.unwrap_err(), - ReorgHandlerError::StorageError(StorageError::LockPoisoned) - )); - } -} diff --git a/kona/crates/supervisor/core/src/rpc/server.rs b/kona/crates/supervisor/core/src/rpc/server.rs deleted file mode 100644 index 7154ea89e1f..00000000000 --- a/kona/crates/supervisor/core/src/rpc/server.rs +++ /dev/null @@ -1,520 +0,0 @@ -//! Server-side implementation of the Supervisor RPC API. - -use super::Metrics; -use crate::{SpecError, SupervisorError, SupervisorService}; -use alloy_eips::eip1898::BlockNumHash; -use alloy_primitives::{B256, ChainId, map::HashMap}; -use async_trait::async_trait; -use jsonrpsee::{core::RpcResult, types::ErrorObject}; -use kona_interop::{DependencySet, DerivedIdPair, ExecutingDescriptor, SafetyLevel}; -use kona_protocol::BlockInfo; -use kona_supervisor_rpc::{ - SuperRootOutputRpc, SupervisorApiServer, SupervisorChainSyncStatus, SupervisorSyncStatus, -}; -use kona_supervisor_types::{HexStringU64, SuperHead}; -use std::sync::Arc; -use tracing::{trace, warn}; - -/// The server-side implementation struct for the [`SupervisorApiServer`]. -/// It holds a reference to the core Supervisor logic. -#[derive(Debug)] -pub struct SupervisorRpc<T> { - /// Reference to the core Supervisor logic. - /// Using Arc allows sharing the Supervisor instance if needed, - supervisor: Arc<T>, -} - -impl<T> SupervisorRpc<T> { - /// Creates a new [`SupervisorRpc`] instance. - pub fn new(supervisor: Arc<T>) -> Self { - Metrics::init(); - trace!(target: "supervisor::rpc", "Creating new SupervisorRpc handler"); - Self { supervisor } - } -} - -#[async_trait] -impl<T> SupervisorApiServer for SupervisorRpc<T> -where - T: SupervisorService + 'static, -{ - async fn cross_derived_to_source( - &self, - chain_id_hex: HexStringU64, - derived: BlockNumHash, - ) -> RpcResult<BlockInfo> { - let chain_id = ChainId::from(chain_id_hex); - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_CROSS_DERIVED_TO_SOURCE, - async { - trace!( - target: "supervisor::rpc", - %chain_id, - ?derived, - "Received cross_derived_to_source request" - ); - - let source_block = - self.supervisor.derived_to_source_block(chain_id, derived).map_err(|err| { - warn!( - target: "supervisor::rpc", - %chain_id, - ?derived, - %err, - "Failed to get source block for derived block" - ); - ErrorObject::from(err) - })?; - - Ok(source_block) - } - .await - ) - } - - async fn local_unsafe(&self, chain_id_hex: HexStringU64) -> RpcResult<BlockNumHash> { - let chain_id = ChainId::from(chain_id_hex); - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_LOCAL_UNSAFE, - async { - trace!(target: "supervisor::rpc", - %chain_id, - "Received local_unsafe request" - ); - - Ok(self.supervisor.local_unsafe(chain_id)?.id()) - } - .await - ) - } - - async fn local_safe(&self, chain_id_hex: HexStringU64) -> RpcResult<DerivedIdPair> { - let chain_id = ChainId::from(chain_id_hex); - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_LOCAL_SAFE, - async { - trace!(target: "supervisor::rpc", - %chain_id, - "Received local_safe request" - ); - - let derived = self.supervisor.local_safe(chain_id)?.id(); - let source = self.supervisor.derived_to_source_block(chain_id, derived)?.id(); - - Ok(DerivedIdPair { source, derived }) - } - .await - ) - } - - async fn dependency_set_v1(&self) -> RpcResult<DependencySet> { - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_DEPENDENCY_SET, - async { - trace!(target: "supervisor::rpc", - "Received the dependency set" - ); - - Ok(self.supervisor.dependency_set().to_owned()) - } - .await - ) - } - - async fn cross_safe(&self, chain_id_hex: HexStringU64) -> RpcResult<DerivedIdPair> { - let chain_id = ChainId::from(chain_id_hex); - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_CROSS_SAFE, - async { - trace!(target: "supervisor::rpc", - %chain_id, - "Received cross_safe request" - ); - - let derived = self.supervisor.cross_safe(chain_id)?.id(); - let source = self.supervisor.derived_to_source_block(chain_id, derived)?.id(); - - Ok(DerivedIdPair { source, derived }) - } - .await - ) - } - - async fn finalized(&self, chain_id_hex: HexStringU64) -> RpcResult<BlockNumHash> { - let chain_id = ChainId::from(chain_id_hex); - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_FINALIZED, - async { - trace!(target: "supervisor::rpc", - %chain_id, - "Received finalized request" - ); - - Ok(self.supervisor.finalized(chain_id)?.id()) - } - .await - ) - } - - async fn finalized_l1(&self) -> RpcResult<BlockInfo> { - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_FINALIZED_L1, - async { - trace!(target: "supervisor::rpc", "Received finalized_l1 request"); - Ok(self.supervisor.finalized_l1()?) - } - .await - ) - } - - async fn super_root_at_timestamp( - &self, - timestamp_hex: HexStringU64, - ) -> RpcResult<SuperRootOutputRpc> { - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_SUPER_ROOT_AT_TIMESTAMP, - async { - let timestamp = u64::from(timestamp_hex); - trace!(target: "supervisor::rpc", - %timestamp, - "Received super_root_at_timestamp request" - ); - - self.supervisor.super_root_at_timestamp(timestamp) - .await - .map_err(|err| { - warn!(target: "supervisor::rpc", %err, "Error from core supervisor super_root_at_timestamp"); - ErrorObject::from(err) - }) - }.await - ) - } - - async fn check_access_list( - &self, - inbox_entries: Vec<B256>, - min_safety: SafetyLevel, - executing_descriptor: ExecutingDescriptor, - ) -> RpcResult<()> { - // TODO:: refactor, maybe build proc macro to record metrics - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_CHECK_ACCESS_LIST, - async { - trace!(target: "supervisor::rpc", - num_inbox_entries = inbox_entries.len(), - ?min_safety, - ?executing_descriptor, - "Received check_access_list request", - ); - self.supervisor - .check_access_list(inbox_entries, min_safety, executing_descriptor) - .map_err(|err| { - warn!(target: "supervisor::rpc", %err, "Error from core supervisor check_access_list"); - ErrorObject::from(err) - }) - }.await - ) - } - - async fn sync_status(&self) -> RpcResult<SupervisorSyncStatus> { - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_SYNC_STATUS, - async { - trace!(target: "supervisor::rpc", "Received sync_status request"); - - let mut chains = self - .supervisor - .chain_ids() - .map(|id| (id, Default::default())) - .collect::<HashMap<_, SupervisorChainSyncStatus>>(); - - if chains.is_empty() { - // return error if no chains configured - // - // <https://github.com/ethereum-optimism/optimism/blob/fac40575a8bcefd325c50a52e12b0e93254ac3f8/op-supervisor/supervisor/backend/status/status.go#L100-L104> - // - // todo: add to spec - Err(SupervisorError::EmptyDependencySet)?; - } - - let mut min_synced_l1 = BlockInfo { number: u64::MAX, ..Default::default() }; - let mut cross_safe_timestamp = u64::MAX; - let mut finalized_timestamp = u64::MAX; - let mut uninitialized_chain_db_count = 0; - - for (id, status) in chains.iter_mut() { - let head = match self.supervisor.super_head(*id) { - Ok(head) => head, - Err(SupervisorError::SpecError(SpecError::ErrorNotInSpec)) => { - uninitialized_chain_db_count += 1; - continue; - } - Err(err) => return Err(ErrorObject::from(err)), - }; - - // uses lowest safe and finalized timestamps, as well as l1 block, of all l2s - // - // <https://github.com/ethereum-optimism/optimism/blob/fac40575a8bcefd325c50a52e12b0e93254ac3f8/op-supervisor/supervisor/backend/status/status.go#L117-L131> - // - // todo: add to spec - let SuperHead { l1_source, cross_safe, finalized, .. } = &head; - - let default_block = BlockInfo::default(); - let l1_source = l1_source.as_ref().unwrap_or(&default_block); - let cross_safe = cross_safe.as_ref().unwrap_or(&default_block); - let finalized = finalized.as_ref().unwrap_or(&default_block); - - if l1_source.number < min_synced_l1.number { - min_synced_l1 = *l1_source; - } - if cross_safe.timestamp < cross_safe_timestamp { - cross_safe_timestamp = cross_safe.timestamp; - } - if finalized.timestamp < finalized_timestamp { - finalized_timestamp = finalized.timestamp; - } - - *status = head.into(); - } - - if uninitialized_chain_db_count == chains.len() { - warn!(target: "supervisor::rpc", "No chain db initialized"); - return Err(ErrorObject::from(SupervisorError::SpecError( - SpecError::ErrorNotInSpec, - ))); - } - - Ok(SupervisorSyncStatus { - min_synced_l1, - cross_safe_timestamp, - finalized_timestamp, - chains, - }) - } - .await - ) - } - - async fn all_safe_derived_at( - &self, - derived_from: BlockNumHash, - ) -> RpcResult<HashMap<ChainId, BlockNumHash>> { - crate::observe_rpc_call!( - Metrics::SUPERVISOR_RPC_METHOD_ALL_SAFE_DERIVED_AT, - async { - trace!(target: "supervisor::rpc", - ?derived_from, - "Received all_safe_derived_at request" - ); - - let mut chains = self - .supervisor - .chain_ids() - .map(|id| (id, Default::default())) - .collect::<HashMap<_, BlockNumHash>>(); - - for (id, block) in chains.iter_mut() { - *block = self.supervisor.latest_block_from(derived_from, *id)?.id(); - } - - Ok(chains) - } - .await - ) - } -} - -impl<T> Clone for SupervisorRpc<T> { - fn clone(&self) -> Self { - Self { supervisor: self.supervisor.clone() } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::ChainId; - use kona_protocol::BlockInfo; - use kona_supervisor_storage::{EntryNotFoundError, StorageError}; - use mockall::*; - use std::sync::Arc; - - mock!( - #[derive(Debug)] - pub SupervisorService {} - - #[async_trait] - impl SupervisorService for SupervisorService { - fn chain_ids(&self) -> impl Iterator<Item = ChainId>; - fn dependency_set(&self) -> &DependencySet; - fn super_head(&self, chain: ChainId) -> Result<SuperHead, SupervisorError>; - fn latest_block_from(&self, l1_block: BlockNumHash, chain: ChainId) -> Result<BlockInfo, SupervisorError>; - fn derived_to_source_block(&self, chain: ChainId, derived: BlockNumHash) -> Result<BlockInfo, SupervisorError>; - fn local_unsafe(&self, chain: ChainId) -> Result<BlockInfo, SupervisorError>; - fn local_safe(&self, chain: ChainId) -> Result<BlockInfo, SupervisorError>; - fn cross_safe(&self, chain: ChainId) -> Result<BlockInfo, SupervisorError>; - fn finalized(&self, chain: ChainId) -> Result<BlockInfo, SupervisorError>; - fn finalized_l1(&self) -> Result<BlockInfo, SupervisorError>; - fn check_access_list(&self, inbox_entries: Vec<B256>, min_safety: SafetyLevel, executing_descriptor: ExecutingDescriptor) -> Result<(), SupervisorError>; - async fn super_root_at_timestamp(&self, timestamp: u64) -> Result<SuperRootOutputRpc, SupervisorError>; - } - ); - - #[tokio::test] - async fn test_sync_status_empty_chains() { - let mut mock_service = MockSupervisorService::new(); - mock_service.expect_chain_ids().returning(|| Box::new(vec![].into_iter())); - - let rpc = SupervisorRpc::new(Arc::new(mock_service)); - let result = rpc.sync_status().await; - - assert!(result.is_err()); - assert_eq!(result.unwrap_err(), ErrorObject::from(SupervisorError::EmptyDependencySet)); - } - - #[tokio::test] - async fn test_sync_status_single_chain() { - let chain_id = ChainId::from(1u64); - - let block_info = BlockInfo { number: 42, ..Default::default() }; - let super_head = SuperHead { - l1_source: Some(block_info), - cross_safe: Some(BlockInfo { timestamp: 100, ..Default::default() }), - finalized: Some(BlockInfo { timestamp: 50, ..Default::default() }), - ..Default::default() - }; - - let mut mock_service = MockSupervisorService::new(); - mock_service.expect_chain_ids().returning(move || Box::new(vec![chain_id].into_iter())); - mock_service.expect_super_head().returning(move |_| Ok(super_head)); - - let rpc = SupervisorRpc::new(Arc::new(mock_service)); - let result = rpc.sync_status().await.unwrap(); - - assert_eq!(result.min_synced_l1.number, 42); - assert_eq!(result.cross_safe_timestamp, 100); - assert_eq!(result.finalized_timestamp, 50); - assert_eq!(result.chains.len(), 1); - } - - #[tokio::test] - async fn test_sync_status_missing_super_head() { - let chain_id_1 = ChainId::from(1u64); - let chain_id_2 = ChainId::from(2u64); - - // Only chain_id_1 has a SuperHead, chain_id_2 is missing - let block_info = BlockInfo { number: 42, ..Default::default() }; - let super_head = SuperHead { - l1_source: Some(block_info), - cross_safe: Some(BlockInfo { timestamp: 100, ..Default::default() }), - finalized: Some(BlockInfo { timestamp: 50, ..Default::default() }), - ..Default::default() - }; - - let mut mock_service = MockSupervisorService::new(); - mock_service - .expect_chain_ids() - .returning(move || Box::new(vec![chain_id_1, chain_id_2].into_iter())); - mock_service.expect_super_head().returning(move |chain_id| { - if chain_id == chain_id_1 { - Ok(super_head) - } else { - Err(SupervisorError::StorageError(StorageError::EntryNotFound( - EntryNotFoundError::DerivedBlockNotFound(1), - ))) - } - }); - - let rpc = SupervisorRpc::new(Arc::new(mock_service)); - let result = rpc.sync_status().await; - - assert!(result.is_err()); - } - - #[tokio::test] - async fn test_sync_status_uninitialized_chain_db() { - let chain_id_1 = ChainId::from(1u64); - let chain_id_2 = ChainId::from(2u64); - - // Case 1: No chain db is initialized - let mut mock_service = MockSupervisorService::new(); - mock_service - .expect_chain_ids() - .returning(move || Box::new(vec![chain_id_1, chain_id_2].into_iter())); - mock_service - .expect_super_head() - .times(2) - .returning(move |_| Err(SupervisorError::SpecError(SpecError::ErrorNotInSpec))); - - let rpc = SupervisorRpc::new(Arc::new(mock_service)); - let result = rpc.sync_status().await; - assert!(result.is_err()); - assert_eq!( - result.unwrap_err(), - ErrorObject::from(SupervisorError::SpecError(SpecError::ErrorNotInSpec,)) - ); - - // Case 2: Only one chain db is initialized - let mut super_head_map = std::collections::HashMap::new(); - let block_info = BlockInfo { number: 42, ..Default::default() }; - let super_head = SuperHead { - l1_source: Some(block_info), - cross_safe: Some(BlockInfo { timestamp: 100, ..Default::default() }), - finalized: Some(BlockInfo { timestamp: 50, ..Default::default() }), - ..Default::default() - }; - super_head_map.insert(chain_id_1, super_head); - - let mut mock_service = MockSupervisorService::new(); - mock_service - .expect_chain_ids() - .returning(move || Box::new(vec![chain_id_1, chain_id_2].into_iter())); - mock_service.expect_super_head().times(2).returning(move |chain_id| { - if chain_id == chain_id_1 { - Ok(super_head) - } else { - Err(SupervisorError::SpecError(SpecError::ErrorNotInSpec)) - } - }); - - let rpc = SupervisorRpc::new(Arc::new(mock_service)); - let result = rpc.sync_status().await; - assert!(result.is_ok()); - - // Case 3: Both chain dbs are initialized - let mut super_head_map = std::collections::HashMap::new(); - let block_info_1 = BlockInfo { number: 42, ..Default::default() }; - let super_head_1 = SuperHead { - l1_source: Some(block_info_1), - cross_safe: Some(BlockInfo { timestamp: 100, ..Default::default() }), - finalized: Some(BlockInfo { timestamp: 50, ..Default::default() }), - ..Default::default() - }; - let block_info_2 = BlockInfo { number: 43, ..Default::default() }; - let super_head_2 = SuperHead { - l1_source: Some(block_info_2), - cross_safe: Some(BlockInfo { timestamp: 110, ..Default::default() }), - finalized: Some(BlockInfo { timestamp: 60, ..Default::default() }), - ..Default::default() - }; - super_head_map.insert(chain_id_1, super_head_1); - super_head_map.insert(chain_id_2, super_head_2); - - let mut mock_service = MockSupervisorService::new(); - mock_service - .expect_chain_ids() - .returning(move || Box::new(vec![chain_id_1, chain_id_2].into_iter())); - mock_service.expect_super_head().times(2).returning(move |chain_id| { - if chain_id == chain_id_1 { Ok(super_head_1) } else { Ok(super_head_2) } - }); - - let rpc = SupervisorRpc::new(Arc::new(mock_service)); - let result = rpc.sync_status().await; - assert!(result.is_ok()); - let status = result.unwrap(); - assert_eq!(status.min_synced_l1.number, 42); - assert_eq!(status.cross_safe_timestamp, 100); - assert_eq!(status.finalized_timestamp, 50); - assert_eq!(status.chains.len(), 2); - } -} diff --git a/kona/crates/supervisor/core/src/syncnode/client.rs b/kona/crates/supervisor/core/src/syncnode/client.rs deleted file mode 100644 index 9675636e605..00000000000 --- a/kona/crates/supervisor/core/src/syncnode/client.rs +++ /dev/null @@ -1,406 +0,0 @@ -use super::{AuthenticationError, ClientError, metrics::Metrics}; -use alloy_primitives::{B256, ChainId}; -use alloy_rpc_types_engine::{Claims, JwtSecret}; -use alloy_rpc_types_eth::BlockNumHash; -use async_trait::async_trait; -use jsonrpsee::{ - core::client::Subscription, - ws_client::{HeaderMap, HeaderValue, WsClient, WsClientBuilder}, -}; -use kona_supervisor_metrics::observe_metrics_for_result_async; -use kona_supervisor_rpc::{BlockInfo, ManagedModeApiClient, jsonrpsee::SubscriptionTopic}; -use kona_supervisor_types::{BlockSeal, OutputV0, Receipts, SubscriptionEvent}; -use std::{ - fmt::Debug, - sync::{Arc, OnceLock}, -}; -use tokio::sync::Mutex; -use tracing::{error, info}; - -/// Trait for a managed node client that provides various methods to interact with the node. -#[async_trait] -pub trait ManagedNodeClient: Send + Sync + Debug { - /// Returns the [`ChainId`] of the managed node. - async fn chain_id(&self) -> Result<ChainId, ClientError>; - - /// Subscribes to [`SubscriptionEvent`] from the managed node. - async fn subscribe_events(&self) -> Result<Subscription<SubscriptionEvent>, ClientError>; - - /// Fetches [`Receipts`] for a given block hash. - async fn fetch_receipts(&self, block_hash: B256) -> Result<Receipts, ClientError>; - - /// Fetches the [`OutputV0`] at a specific timestamp. - async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result<OutputV0, ClientError>; - - /// Fetches the pending [`OutputV0`] at a specific timestamp. - async fn pending_output_v0_at_timestamp(&self, timestamp: u64) - -> Result<OutputV0, ClientError>; - - /// Fetches the L2 [`BlockInfo`] by timestamp. - async fn l2_block_ref_by_timestamp(&self, timestamp: u64) -> Result<BlockInfo, ClientError>; - - /// Fetches the [`BlockInfo`] by block number. - async fn block_ref_by_number(&self, block_number: u64) -> Result<BlockInfo, ClientError>; - - /// Resets the managed node to the pre-interop state. - async fn reset_pre_interop(&self) -> Result<(), ClientError>; - - /// Resets the node state with the provided block IDs. - async fn reset( - &self, - unsafe_id: BlockNumHash, - cross_unsafe_id: BlockNumHash, - local_safe_id: BlockNumHash, - cross_safe_id: BlockNumHash, - finalised_id: BlockNumHash, - ) -> Result<(), ClientError>; - - /// Invalidates a block in the managed node. - async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ClientError>; - - /// Provides L1 [`BlockInfo`] to the managed node. - async fn provide_l1(&self, block_info: BlockInfo) -> Result<(), ClientError>; - - /// Updates the finalized block ID in the managed node. - async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ClientError>; - - /// Updates the cross-unsafe block ID in the managed node. - async fn update_cross_unsafe( - &self, - cross_unsafe_block_id: BlockNumHash, - ) -> Result<(), ClientError>; - - /// Updates the cross-safe block ID in the managed node. - async fn update_cross_safe( - &self, - source_block_id: BlockNumHash, - derived_block_id: BlockNumHash, - ) -> Result<(), ClientError>; - - /// Resets the ws-client to None when server disconnects - async fn reset_ws_client(&self); -} - -/// [`ClientConfig`] sets the configuration for the managed node client. -#[derive(Debug, Clone)] -pub struct ClientConfig { - /// The URL + port of the managed node - pub url: String, - /// jwt secret for the managed node interop rpc - pub jwt_secret: JwtSecret, -} - -/// Client for interacting with a managed node. -#[derive(Debug)] -pub struct Client { - config: ClientConfig, - /// Chain ID of the managed node - chain_id: OnceLock<ChainId>, - /// The attached web socket client - ws_client: Mutex<Option<Arc<WsClient>>>, -} - -impl Client { - /// Creates a new [`Client`] with the given configuration. - pub fn new(config: ClientConfig) -> Self { - Metrics::init(config.url.as_ref()); - Self { config, chain_id: OnceLock::new(), ws_client: Mutex::new(None) } - } - - /// Creates authentication headers using JWT secret. - fn create_auth_headers(&self) -> Result<HeaderMap, ClientError> { - // Create JWT claims with current time - let claims = Claims::with_current_timestamp(); - let token = self.config.jwt_secret.encode(&claims).map_err(|err| { - error!(target: "supervisor::managed_node", %err, "Failed to encode JWT claims"); - AuthenticationError::InvalidJwt - })?; - - let mut headers = HeaderMap::new(); - let auth_header = format!("Bearer {token}"); - - headers.insert( - "Authorization", - HeaderValue::from_str(&auth_header).map_err(|err| { - error!(target: "supervisor::managed_node", %err, "Invalid authorization header"); - AuthenticationError::InvalidHeader - })?, - ); - - Ok(headers) - } - - /// Returns a reference to the WebSocket client, creating it if it doesn't exist. - // todo: support http client as well - pub async fn get_ws_client(&self) -> Result<Arc<WsClient>, ClientError> { - let mut ws_client_guard = self.ws_client.lock().await; - if ws_client_guard.is_none() { - let headers = self.create_auth_headers().inspect_err(|err| { - error!(target: "supervisor::managed_node", %err, "Failed to create auth headers"); - })?; - - info!(target: "supervisor::managed_node", ws_url = self.config.url, "Creating a new web socket client"); - let client = - WsClientBuilder::default().set_headers(headers).build(&self.config.url).await?; - - *ws_client_guard = Some(Arc::new(client)); - } - Ok(ws_client_guard.clone().unwrap()) - } -} - -#[async_trait] -impl ManagedNodeClient for Client { - async fn reset_ws_client(&self) { - let mut ws_client_guard = self.ws_client.lock().await; - if ws_client_guard.is_some() { - *ws_client_guard = None; - }; - } - - async fn chain_id(&self) -> Result<ChainId, ClientError> { - if let Some(chain_id) = self.chain_id.get() { - return Ok(*chain_id); - } - - let client = self.get_ws_client().await?; - let chain_id_str = observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_CHAIN_ID, - async { - client.chain_id().await - }, - "node" => self.config.url.clone() - ) - .inspect_err(|err| { - error!(target: "supervisor::managed_node", %err, "Failed to get chain ID"); - })?; - - let chain_id = chain_id_str.parse::<u64>().inspect_err(|err| { - error!(target: "supervisor::managed_node", %err, "Failed to parse chain ID"); - })?; - - let _ = self.chain_id.set(chain_id); - Ok(chain_id) - } - - async fn subscribe_events(&self) -> Result<Subscription<SubscriptionEvent>, ClientError> { - let client = self.get_ws_client().await?; // This returns ManagedNodeError, handled by your function - let subscription = observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_SUBSCRIBE_EVENTS, - async { - ManagedModeApiClient::subscribe_events(client.as_ref(), SubscriptionTopic::Events).await - }, - "node" => self.config.url.clone() - )?; - - Ok(subscription) - } - - async fn fetch_receipts(&self, block_hash: B256) -> Result<Receipts, ClientError> { - let client = self.get_ws_client().await?; // This returns ManagedNodeError, handled by your function - let receipts = observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_FETCH_RECEIPTS, - async { - ManagedModeApiClient::fetch_receipts(client.as_ref(), block_hash).await - }, - "node" => self.config.url.clone() - )?; - - Ok(receipts) - } - - async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result<OutputV0, ClientError> { - let client = self.get_ws_client().await?; - let output_v0 = observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_OUTPUT_V0_AT_TIMESTAMP, - async { - ManagedModeApiClient::output_v0_at_timestamp(client.as_ref(), timestamp).await - }, - "node" => self.config.url.clone() - )?; - - Ok(output_v0) - } - - async fn pending_output_v0_at_timestamp( - &self, - timestamp: u64, - ) -> Result<OutputV0, ClientError> { - let client = self.get_ws_client().await?; - let output_v0 = observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_PENDING_OUTPUT_V0_AT_TIMESTAMP, - async { - ManagedModeApiClient::pending_output_v0_at_timestamp(client.as_ref(), timestamp).await - }, - "node" => self.config.url.clone() - )?; - - Ok(output_v0) - } - - async fn l2_block_ref_by_timestamp(&self, timestamp: u64) -> Result<BlockInfo, ClientError> { - let client = self.get_ws_client().await?; - let block_info = observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_L2_BLOCK_REF_BY_TIMESTAMP, - async { - ManagedModeApiClient::l2_block_ref_by_timestamp(client.as_ref(), timestamp).await - }, - "node" => self.config.url.clone() - )?; - - Ok(block_info) - } - - async fn block_ref_by_number(&self, block_number: u64) -> Result<BlockInfo, ClientError> { - let client = self.get_ws_client().await?; - let block_info = observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_BLOCK_REF_BY_NUMBER, - async { - ManagedModeApiClient::l2_block_ref_by_number(client.as_ref(), block_number).await - }, - "node" => self.config.url.clone() - )?; - - Ok(block_info) - } - - async fn reset_pre_interop(&self) -> Result<(), ClientError> { - let client = self.get_ws_client().await?; - observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_RESET_PRE_INTEROP, - async { - ManagedModeApiClient::reset_pre_interop(client.as_ref()).await - }, - "node" => self.config.url.clone() - )?; - Ok(()) - } - - async fn reset( - &self, - unsafe_id: BlockNumHash, - cross_unsafe_id: BlockNumHash, - local_safe_id: BlockNumHash, - cross_safe_id: BlockNumHash, - finalised_id: BlockNumHash, - ) -> Result<(), ClientError> { - let client = self.get_ws_client().await?; - observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_RESET, - async { - ManagedModeApiClient::reset(client.as_ref(), unsafe_id, cross_unsafe_id, local_safe_id, cross_safe_id, finalised_id).await - }, - "node" => self.config.url.clone() - )?; - Ok(()) - } - - async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ClientError> { - let client = self.get_ws_client().await?; - observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_INVALIDATE_BLOCK, - async { - ManagedModeApiClient::invalidate_block(client.as_ref(), seal).await - }, - "node" => self.config.url.clone() - )?; - Ok(()) - } - - async fn provide_l1(&self, block_info: BlockInfo) -> Result<(), ClientError> { - let client = self.get_ws_client().await?; - observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_PROVIDE_L1, - async { - ManagedModeApiClient::provide_l1(client.as_ref(), block_info).await - }, - "node" => self.config.url.clone() - )?; - Ok(()) - } - - async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ClientError> { - let client = self.get_ws_client().await?; - observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_UPDATE_FINALIZED, - async { - ManagedModeApiClient::update_finalized(client.as_ref(), finalized_block_id).await - }, - "node" => self.config.url.clone() - )?; - Ok(()) - } - - async fn update_cross_unsafe( - &self, - cross_unsafe_block_id: BlockNumHash, - ) -> Result<(), ClientError> { - let client = self.get_ws_client().await?; - observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_UPDATE_CROSS_UNSAFE, - async { - ManagedModeApiClient::update_cross_unsafe(client.as_ref(), cross_unsafe_block_id).await - }, - "node" => self.config.url.clone() - )?; - Ok(()) - } - - async fn update_cross_safe( - &self, - source_block_id: BlockNumHash, - derived_block_id: BlockNumHash, - ) -> Result<(), ClientError> { - let client = self.get_ws_client().await?; - observe_metrics_for_result_async!( - Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, - Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, - Metrics::RPC_METHOD_UPDATE_CROSS_SAFE, - async { - ManagedModeApiClient::update_cross_safe(client.as_ref(), derived_block_id, source_block_id).await - }, - "node" => self.config.url.clone() - )?; - Ok(()) - } -} diff --git a/kona/crates/supervisor/core/src/syncnode/error.rs b/kona/crates/supervisor/core/src/syncnode/error.rs deleted file mode 100644 index cd429fe6e4c..00000000000 --- a/kona/crates/supervisor/core/src/syncnode/error.rs +++ /dev/null @@ -1,67 +0,0 @@ -use kona_supervisor_storage::StorageError; -use thiserror::Error; - -/// Represents various errors that can occur during node management. -#[derive(Debug, Error, PartialEq, Eq)] -pub enum ManagedNodeError { - /// Represents an error that occurred while starting the managed node. - #[error(transparent)] - ClientError(#[from] ClientError), - - /// Represents an error that occurred while fetching data from the storage. - #[error(transparent)] - StorageError(#[from] StorageError), - - /// Unable to successfully fetch block. - #[error("failed to get block by number, number: {0}")] - GetBlockByNumberFailed(u64), - - /// Represents an error that occurred while sending an event to the channel. - #[error("failed to send event to channel: {0}")] - ChannelSendFailed(String), - - /// Represents an error that occurred while resetting the managed node. - #[error("failed to reset the managed node")] - ResetFailed, -} - -/// Error establishing authenticated connection to managed node. -#[derive(Debug, Error, PartialEq, Eq)] -pub enum AuthenticationError { - /// Missing valid JWT secret for authentication header. - #[error("jwt secret not found or invalid")] - InvalidJwt, - /// Invalid header format. - #[error("invalid authorization header")] - InvalidHeader, -} - -/// Represents errors that can occur while interacting with the managed node client. -#[derive(Debug, Error)] -pub enum ClientError { - /// Represents an error that occurred while starting the managed node. - #[error(transparent)] - Client(#[from] jsonrpsee::core::ClientError), - - /// Represents an error that occurred while authenticating to the managed node. - #[error("failed to authenticate: {0}")] - Authentication(#[from] AuthenticationError), - - /// Represents an error that occurred while parsing a chain ID from a string. - #[error(transparent)] - ChainIdParseError(#[from] std::num::ParseIntError), -} - -impl PartialEq for ClientError { - fn eq(&self, other: &Self) -> bool { - use ClientError::*; - match (self, other) { - (Client(a), Client(b)) => a.to_string() == b.to_string(), - (Authentication(a), Authentication(b)) => a == b, - (ChainIdParseError(a), ChainIdParseError(b)) => a == b, - _ => false, - } - } -} - -impl Eq for ClientError {} diff --git a/kona/crates/supervisor/core/src/syncnode/node.rs b/kona/crates/supervisor/core/src/syncnode/node.rs deleted file mode 100644 index b76c89b4585..00000000000 --- a/kona/crates/supervisor/core/src/syncnode/node.rs +++ /dev/null @@ -1,943 +0,0 @@ -//! [`ManagedNode`] implementation for handling events from the managed node. - -use super::{ - BlockProvider, ManagedNodeClient, ManagedNodeController, ManagedNodeDataProvider, - ManagedNodeError, SubscriptionHandler, resetter::Resetter, -}; -use crate::event::ChainEvent; -use alloy_eips::BlockNumberOrTag; -use alloy_network::Ethereum; -use alloy_primitives::{B256, ChainId}; -use alloy_provider::{Provider, RootProvider}; -use alloy_rpc_types_eth::BlockNumHash; -use async_trait::async_trait; -use kona_interop::{BlockReplacement, DerivedRefPair}; -use kona_protocol::BlockInfo; -use kona_supervisor_storage::{DerivationStorageReader, HeadRefStorageReader, LogStorageReader}; -use kona_supervisor_types::{BlockSeal, OutputV0, Receipts}; -use std::sync::Arc; -use tokio::sync::{Mutex, mpsc}; -use tracing::{debug, error, trace, warn}; - -/// [`ManagedNode`] processes events dispatched from the managed node. -/// -/// It implements `SubscriptionHandler`, forwards resulting `ChainEvent`s to the chain -/// processor, and delegates control operations to the underlying client/resetter. -/// The WebSocket subscription lifecycle (subscription creation, reconnection/restart) -/// is managed by the supervisor actor and the client, not by this type. -#[derive(Debug)] -pub struct ManagedNode<DB, C> { - /// The attached web socket client - client: Arc<C>, - /// Shared L1 provider for fetching receipts - l1_provider: RootProvider<Ethereum>, - /// Resetter for handling node resets - resetter: Arc<Resetter<DB, C>>, - /// Channel for sending events to the chain processor - chain_event_sender: mpsc::Sender<ChainEvent>, - - /// Cached chain ID - chain_id: Mutex<Option<ChainId>>, -} - -impl<DB, C> ManagedNode<DB, C> -where - DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, - C: ManagedNodeClient + Send + Sync + 'static, -{ - /// Creates a new [`ManagedNode`] with the specified client. - pub fn new( - client: Arc<C>, - db_provider: Arc<DB>, - l1_provider: RootProvider<Ethereum>, - chain_event_sender: mpsc::Sender<ChainEvent>, - ) -> Self { - let resetter = Arc::new(Resetter::new(client.clone(), l1_provider.clone(), db_provider)); - - Self { client, resetter, l1_provider, chain_event_sender, chain_id: Mutex::new(None) } - } - - /// Returns the [`ChainId`] of the [`ManagedNode`]. - /// If the chain ID is already cached, it returns that. - /// If not, it fetches the chain ID from the managed node. - pub async fn chain_id(&self) -> Result<ChainId, ManagedNodeError> { - // we are caching the chain ID here to avoid multiple calls to the client - // there is a possibility that chain ID might be being cached in the client already - // but we are caching it here to make sure it caches in the `ManagedNode` context - let mut cache = self.chain_id.lock().await; - if let Some(chain_id) = *cache { - Ok(chain_id) - } else { - let chain_id = self.client.chain_id().await?; - *cache = Some(chain_id); - Ok(chain_id) - } - } -} - -#[async_trait] -impl<DB, C> SubscriptionHandler for ManagedNode<DB, C> -where - DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, - C: ManagedNodeClient + Send + Sync + 'static, -{ - async fn handle_exhaust_l1( - &self, - derived_ref_pair: &DerivedRefPair, - ) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!( - target: "supervisor::managed_node", - %chain_id, - %derived_ref_pair, - "Handling L1 exhaust event" - ); - - let next_block_number = derived_ref_pair.source.number + 1; - let next_block = self - .l1_provider - .get_block_by_number(BlockNumberOrTag::Number(next_block_number)) - .await - .map_err(|err| { - error!(target: "supervisor::managed_node", %chain_id, %err, "Failed to fetch next L1 block"); - ManagedNodeError::GetBlockByNumberFailed(next_block_number) - })?; - - let block = match next_block { - Some(block) => block, - None => { - // If the block is None, it means the block is either empty or unavailable. - // ignore this case - return Ok(()); - } - }; - - let new_source = BlockInfo { - hash: block.header.hash, - number: block.header.number, - parent_hash: block.header.parent_hash, - timestamp: block.header.timestamp, - }; - - if new_source.parent_hash != derived_ref_pair.source.hash { - // this could happen due to a reorg. - // this case should be handled by the reorg manager - debug!( - target: "supervisor::managed_node", - %chain_id, - %new_source, - current_source = %derived_ref_pair.source, - "Parent hash mismatch. Possible reorg detected" - ); - } - - self.client.provide_l1(new_source).await.inspect_err(|err| { - error!( - target: "supervisor::managed_node", - %chain_id, - %new_source, - %err, - "Failed to provide L1 block" - ); - })?; - Ok(()) - } - - async fn handle_reset(&self, reset_id: &str) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, reset_id, "Handling reset event"); - - self.resetter.reset().await?; - Ok(()) - } - - async fn handle_unsafe_block(&self, unsafe_block: &BlockInfo) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, %unsafe_block, "Unsafe block event received"); - - self.chain_event_sender.send(ChainEvent::UnsafeBlock { block: *unsafe_block }).await.map_err(|err| { - warn!(target: "supervisor::managed_node", %chain_id, %err, "Failed to send unsafe block event"); - ManagedNodeError::ChannelSendFailed(err.to_string()) - })?; - Ok(()) - } - - async fn handle_derivation_update( - &self, - derived_ref_pair: &DerivedRefPair, - ) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, "Derivation update event received"); - - self.chain_event_sender.send(ChainEvent::DerivedBlock { derived_ref_pair: *derived_ref_pair }).await.map_err(|err| { - warn!(target: "supervisor::managed_node", %chain_id, %err, "Failed to send derivation update event"); - ManagedNodeError::ChannelSendFailed(err.to_string()) - })?; - Ok(()) - } - - async fn handle_replace_block( - &self, - replacement: &BlockReplacement, - ) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, %replacement, "Block replacement received"); - - self.chain_event_sender.send(ChainEvent::BlockReplaced { replacement: *replacement }).await.map_err(|err| { - warn!(target: "supervisor::managed_node", %chain_id, %err, "Failed to send block replacement event"); - ManagedNodeError::ChannelSendFailed(err.to_string()) - })?; - Ok(()) - } - - async fn handle_derivation_origin_update( - &self, - origin: &BlockInfo, - ) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, %origin, "Derivation origin update received"); - - self.chain_event_sender.send(ChainEvent::DerivationOriginUpdate { origin: *origin }).await.map_err(|err| { - warn!(target: "supervisor::managed_node", %chain_id, %err, "Failed to send derivation origin update event"); - ManagedNodeError::ChannelSendFailed(err.to_string()) - })?; - Ok(()) - } -} - -/// Implements [`BlockProvider`] for [`ManagedNode`] by delegating to the underlying WebSocket -/// client. -#[async_trait] -impl<DB, C> BlockProvider for ManagedNode<DB, C> -where - DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, - C: ManagedNodeClient + Send + Sync + 'static, -{ - async fn block_by_number(&self, block_number: u64) -> Result<BlockInfo, ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, block_number, "Fetching block by number"); - - let block = self.client.block_ref_by_number(block_number).await?; - Ok(block) - } - async fn fetch_receipts(&self, block_hash: B256) -> Result<Receipts, ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, %block_hash, "Fetching receipts for block"); - - let receipt = self.client.fetch_receipts(block_hash).await?; - Ok(receipt) - } -} - -#[async_trait] -impl<DB, C> ManagedNodeDataProvider for ManagedNode<DB, C> -where - DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, - C: ManagedNodeClient + Send + Sync + 'static, -{ - async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result<OutputV0, ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, timestamp, "Fetching output v0 at timestamp"); - - let outputv0 = self.client.output_v0_at_timestamp(timestamp).await?; - Ok(outputv0) - } - - async fn pending_output_v0_at_timestamp( - &self, - timestamp: u64, - ) -> Result<OutputV0, ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, timestamp, "Fetching pending output v0 at timestamp"); - - let outputv0 = self.client.pending_output_v0_at_timestamp(timestamp).await?; - Ok(outputv0) - } - - async fn l2_block_ref_by_timestamp( - &self, - timestamp: u64, - ) -> Result<BlockInfo, ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, timestamp, "Fetching L2 block ref by timestamp"); - - let block = self.client.l2_block_ref_by_timestamp(timestamp).await?; - Ok(block) - } -} - -#[async_trait] -impl<DB, C> ManagedNodeController for ManagedNode<DB, C> -where - DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, - C: ManagedNodeClient + Send + Sync + 'static, -{ - async fn update_finalized( - &self, - finalized_block_id: BlockNumHash, - ) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!( - target: "supervisor::managed_node", - %chain_id, - finalized_block_number = finalized_block_id.number, - "Updating finalized block" - ); - - self.client.update_finalized(finalized_block_id).await?; - Ok(()) - } - - async fn update_cross_unsafe( - &self, - cross_unsafe_block_id: BlockNumHash, - ) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!( - target: "supervisor::managed_node", - %chain_id, - cross_unsafe_block_number = cross_unsafe_block_id.number, - "Updating cross unsafe block", - ); - - self.client.update_cross_unsafe(cross_unsafe_block_id).await?; - Ok(()) - } - - async fn update_cross_safe( - &self, - source_block_id: BlockNumHash, - derived_block_id: BlockNumHash, - ) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!( - target: "supervisor::managed_node", - %chain_id, - source_block_number = source_block_id.number, - derived_block_number = derived_block_id.number, - "Updating cross safe block" - ); - self.client.update_cross_safe(source_block_id, derived_block_id).await?; - Ok(()) - } - - async fn reset(&self) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!(target: "supervisor::managed_node", %chain_id, "Resetting managed node state"); - - self.resetter.reset().await?; - Ok(()) - } - - async fn invalidate_block(&self, block_seal: BlockSeal) -> Result<(), ManagedNodeError> { - let chain_id = self.chain_id().await?; - trace!( - target: "supervisor::managed_node", - %chain_id, - block_number = block_seal.number, - "Invalidating block" - ); - - self.client.invalidate_block(block_seal).await?; - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::syncnode::ClientError; - use alloy_primitives::{B256, ChainId, hex::FromHex}; - use alloy_provider::RootProvider; - use alloy_rpc_client::RpcClient; - use alloy_transport::mock::*; - use jsonrpsee::core::client::Subscription; - use kona_interop::{BlockReplacement, DerivedRefPair, SafetyLevel}; - use kona_protocol::BlockInfo; - use kona_supervisor_storage::{ - DerivationStorageReader, HeadRefStorageReader, LogStorageReader, StorageError, - }; - use kona_supervisor_types::{BlockSeal, Log, OutputV0, Receipts, SubscriptionEvent, SuperHead}; - use mockall::{mock, predicate::*}; - use std::sync::Arc; - use tokio::sync::mpsc; - - mock! { - #[derive(Debug)] - pub Client {} - - #[async_trait] - impl ManagedNodeClient for Client { - async fn chain_id(&self) -> Result<ChainId, ClientError>; - async fn subscribe_events(&self) -> Result<Subscription<SubscriptionEvent>, ClientError>; - async fn fetch_receipts(&self, block_hash: B256) -> Result<Receipts, ClientError>; - async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result<OutputV0, ClientError>; - async fn pending_output_v0_at_timestamp(&self, timestamp: u64) -> Result<OutputV0, ClientError>; - async fn l2_block_ref_by_timestamp(&self, timestamp: u64) -> Result<BlockInfo, ClientError>; - async fn block_ref_by_number(&self, block_number: u64) -> Result<BlockInfo, ClientError>; - async fn reset_pre_interop(&self) -> Result<(), ClientError>; - async fn reset(&self, unsafe_id: BlockNumHash, cross_unsafe_id: BlockNumHash, local_safe_id: BlockNumHash, cross_safe_id: BlockNumHash, finalised_id: BlockNumHash) -> Result<(), ClientError>; - async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ClientError>; - async fn provide_l1(&self, block_info: BlockInfo) -> Result<(), ClientError>; - async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ClientError>; - async fn update_cross_unsafe(&self, cross_unsafe_block_id: BlockNumHash) -> Result<(), ClientError>; - async fn update_cross_safe(&self, source_block_id: BlockNumHash, derived_block_id: BlockNumHash) -> Result<(), ClientError>; - async fn reset_ws_client(&self); - } - } - - mock! { - #[derive(Debug)] - pub Db {} - - impl LogStorageReader for Db { - fn get_block(&self, block_number: u64) -> Result<BlockInfo, StorageError>; - fn get_latest_block(&self) -> Result<BlockInfo, StorageError>; - fn get_log(&self, block_number: u64, log_index: u32) -> Result<Log, StorageError>; - fn get_logs(&self, block_number: u64) -> Result<Vec<Log>, StorageError>; - } - - impl DerivationStorageReader for Db { - fn derived_to_source(&self, derived_block_id: BlockNumHash) -> Result<BlockInfo, StorageError>; - fn latest_derived_block_at_source(&self, _source_block_id: BlockNumHash) -> Result<BlockInfo, StorageError>; - fn latest_derivation_state(&self) -> Result<DerivedRefPair, StorageError>; - fn get_source_block(&self, source_block_number: u64) -> Result<BlockInfo, StorageError>; - fn get_activation_block(&self) -> Result<BlockInfo, StorageError>; - } - - impl HeadRefStorageReader for Db { - fn get_safety_head_ref(&self, level: SafetyLevel) -> Result<BlockInfo, StorageError>; - fn get_super_head(&self) -> Result<SuperHead, StorageError>; - } - } - - #[tokio::test] - async fn test_chain_id_caching() { - let mut client = MockClient::new(); - - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - // First call fetches from client - let id1 = node.chain_id().await.unwrap(); - assert_eq!(id1, ChainId::from(42u64)); - // Second call uses cache - let id2 = node.chain_id().await.unwrap(); - assert_eq!(id2, ChainId::from(42u64)); - } - - #[tokio::test] - async fn test_handle_unsafe_block_sends_event() { - let unsafe_block = - BlockInfo { hash: B256::ZERO, number: 1, parent_hash: B256::ZERO, timestamp: 123 }; - - let mut client = MockClient::new(); - - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, mut rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let result = node.handle_unsafe_block(&unsafe_block).await; - assert!(result.is_ok()); - - let event = rx.recv().await.unwrap(); - match event { - ChainEvent::UnsafeBlock { block } => assert_eq!(block.number, 1), - _ => panic!("Wrong event"), - } - } - - #[tokio::test] - async fn test_handle_derivation_update_sends_event() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, mut rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let derived_ref_pair = DerivedRefPair { - source: BlockInfo::new(B256::from([0u8; 32]), 0, B256::ZERO, 0), - derived: BlockInfo::new(B256::from([1u8; 32]), 1, B256::ZERO, 0), - }; - - let result = node.handle_derivation_update(&derived_ref_pair).await; - assert!(result.is_ok()); - - let event = rx.recv().await.unwrap(); - match event { - ChainEvent::DerivedBlock { derived_ref_pair: pair } => { - assert_eq!(pair, derived_ref_pair); - } - _ => panic!("Wrong event"), - } - } - - #[tokio::test] - async fn test_handle_replace_block_sends_event() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, mut rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let replacement = BlockReplacement { - replacement: BlockInfo::new(B256::from([1u8; 32]), 1, B256::ZERO, 0), - invalidated: B256::from([2u8; 32]), - }; - - let result = node.handle_replace_block(&replacement).await; - assert!(result.is_ok()); - - let event = rx.recv().await.unwrap(); - match event { - ChainEvent::BlockReplaced { replacement: rep } => assert_eq!(rep, replacement), - _ => panic!("Wrong event"), - } - } - - #[tokio::test] - async fn test_handle_derivation_origin_update_sends_event() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, mut rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let origin = - BlockInfo { hash: B256::ZERO, number: 10, parent_hash: B256::ZERO, timestamp: 12345 }; - - let result = node.handle_derivation_origin_update(&origin).await; - assert!(result.is_ok()); - - let event = rx.recv().await.unwrap(); - match event { - ChainEvent::DerivationOriginUpdate { origin: block } => assert_eq!(block.number, 10), - _ => panic!("Wrong event"), - } - } - - #[tokio::test] - async fn test_handle_exhaust_l1_calls_provide_l1_on_success() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client.expect_provide_l1().times(1).returning(|_| Ok(())); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - - let derived_ref_pair = DerivedRefPair { - source: BlockInfo { - hash: B256::from_hex( - "0x1f68ac259155e2f38211ddad0f0a15394d55417b185a93923e2abe71bb7a4d6d", - ) - .unwrap(), - number: 5, - parent_hash: B256::from([14u8; 32]), - timestamp: 300, - }, - derived: BlockInfo { - hash: B256::from([11u8; 32]), - number: 40, - parent_hash: B256::from([12u8; 32]), - timestamp: 301, - }, - }; - - let next_block = r#"{ - "number": "6", - "hash": "0xd5f1812548be429cbdc6376b29611fc49e06f1359758c4ceaaa3b393e2239f9c", - "mixHash": "0x24900fb3da77674a861c428429dce0762707ecb6052325bbd9b3c64e74b5af9d", - "parentHash": "0x1f68ac259155e2f38211ddad0f0a15394d55417b185a93923e2abe71bb7a4d6d", - "nonce": "0x378da40ff335b070", - "sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", - "logsBloom": "0x00000000000000100000004080000000000500000000000000020000100000000800001000000004000001000000000000000800040010000020100000000400000010000000000000000040000000000000040000000000000000000000000000000400002400000000000000000000000000000004000004000000000000840000000800000080010004000000001000000800000000000000000000000000000000000800000000000040000000020000000000000000000800000400000000000000000000000600000400000000002000000000000000000000004000000000000000100000000000000000000000000000000000040000900010000000", - "transactionsRoot":"0x4d0c8e91e16bdff538c03211c5c73632ed054d00a7e210c0eb25146c20048126", - "stateRoot": "0x91309efa7e42c1f137f31fe9edbe88ae087e6620d0d59031324da3e2f4f93233", - "receiptsRoot": "0x68461ab700003503a305083630a8fb8d14927238f0bc8b6b3d246c0c64f21f4a", - "miner":"0xb42b6c4a95406c78ff892d270ad20b22642e102d", - "difficulty": "0x66e619a", - "totalDifficulty": "0x1e875d746ae", - "extraData": "0xd583010502846765746885676f312e37856c696e7578", - "size": "0x334", - "gasLimit": "0x47e7c4", - "gasUsed": "0x37993", - "timestamp": "0x5835c54d", - "uncles": [], - "transactions": [ - "0xa0807e117a8dd124ab949f460f08c36c72b710188f01609595223b325e58e0fc", - "0xeae6d797af50cb62a596ec3939114d63967c374fa57de9bc0f4e2b576ed6639d" - ], - "baseFeePerGas": "0x7", - "withdrawalsRoot": "0x7a4ecf19774d15cf9c15adf0dd8e8a250c128b26c9e2ab2a08d6c9c8ffbd104f", - "withdrawals": [], - "blobGasUsed": "0x0", - "excessBlobGas": "0x0", - "parentBeaconBlockRoot": "0x95c4dbd5b19f6fe3cbc3183be85ff4e85ebe75c5b4fc911f1c91e5b7a554a685" - }"#; - - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - - asserter.push(MockResponse::Success(serde_json::from_str(next_block).unwrap())); - - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let result = node.handle_exhaust_l1(&derived_ref_pair).await; - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_handle_exhaust_l1_calls_provide_l1_on_parent_hash_mismatch() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client.expect_provide_l1().times(1).returning(|_| Ok(())); // Should be called - - let client = Arc::new(client); - let db = MockDb::new(); - - let derived_ref_pair = DerivedRefPair { - source: BlockInfo { - hash: B256::from([1u8; 32]), // This will NOT match parent_hash below - number: 5, - parent_hash: B256::from([14u8; 32]), - timestamp: 300, - }, - derived: BlockInfo { - hash: B256::from([11u8; 32]), - number: 40, - parent_hash: B256::from([12u8; 32]), - timestamp: 301, - }, - }; - - // Block with mismatched parent_hash - let next_block = r#"{ - "number": "10", - "hash": "0xd5f1812548be429cbdc6376b29611fc49e06f1359758c4ceaaa3b393e2239f9c", - "mixHash": "0x24900fb3da77674a861c428429dce0762707ecb6052325bbd9b3c64e74b5af9d", - "parentHash": "0x1f68ac259155e2f38211ddad0f0a15394d55417b185a93923e2abe71bb7a4d6d", - "nonce": "0x378da40ff335b070", - "sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", - "logsBloom": "0x00000000000000100000004080000000000500000000000000020000100000000800001000000004000001000000000000000800040010000020100000000400000010000000000000000040000000000000040000000000000000000000000000000400002400000000000000000000000000000004000004000000000000840000000800000080010004000000001000000800000000000000000000000000000000000800000000000040000000020000000000000000000800000400000000000000000000000600000400000000002000000000000000000000004000000000000000100000000000000000000000000000000000040000900010000000", - "transactionsRoot":"0x4d0c8e91e16bdff538c03211c5c73632ed054d00a7e210c0eb25146c20048126", - "stateRoot": "0x91309efa7e42c1f137f31fe9edbe88ae087e6620d0d59031324da3e2f4f93233", - "receiptsRoot": "0x68461ab700003503a305083630a8fb8d14927238f0bc8b6b3d246c0c64f21f4a", - "miner":"0xb42b6c4a95406c78ff892d270ad20b22642e102d", - "difficulty": "0x66e619a", - "totalDifficulty": "0x1e875d746ae", - "extraData": "0xd583010502846765746885676f312e37856c696e7578", - "size": "0x334", - "gasLimit": "0x47e7c4", - "gasUsed": "0x37993", - "timestamp": "0x5835c54d", - "uncles": [], - "transactions": [ - "0xa0807e117a8dd124ab949f460f08c36c72b710188f01609595223b325e58e0fc", - "0xeae6d797af50cb62a596ec3939114d63967c374fa57de9bc0f4e2b576ed6639d" - ], - "baseFeePerGas": "0x7", - "withdrawalsRoot": "0x7a4ecf19774d15cf9c15adf0dd8e8a250c128b26c9e2ab2a08d6c9c8ffbd104f", - "withdrawals": [], - "blobGasUsed": "0x0", - "excessBlobGas": "0x0", - "parentBeaconBlockRoot": "0x95c4dbd5b19f6fe3cbc3183be85ff4e85ebe75c5b4fc911f1c91e5b7a554a685" - }"#; - - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - - asserter.push(MockResponse::Success(serde_json::from_str(next_block).unwrap())); - - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), Arc::new(db), l1_provider, tx); - - let result = node.handle_exhaust_l1(&derived_ref_pair).await; - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_handle_reset_calls_resetter() { - let mut client = MockClient::new(); - client.expect_chain_id().times(2).returning(|| Ok(ChainId::from(42u64))); - client.expect_reset_pre_interop().times(1).returning(|| Ok(())); - - let mut db = MockDb::new(); - db.expect_latest_derivation_state() - .times(1) - .returning(|| Err(StorageError::DatabaseNotInitialised)); - - let client = Arc::new(client); - let db = Arc::new(db); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - // Just check that it completes without error - let result = node.handle_reset("reset_id").await; - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_block_by_number_delegates_to_client() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client.expect_block_ref_by_number().with(eq(10)).times(1).returning(|_| { - Ok(BlockInfo { - hash: B256::from([1u8; 32]), - number: 10, - parent_hash: B256::from([2u8; 32]), - timestamp: 12345, - }) - }); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let block = node.block_by_number(10).await.unwrap(); - assert_eq!(block.number, 10); - assert_eq!(block.hash, B256::from([1u8; 32])); - } - - #[tokio::test] - async fn test_fetch_receipts_delegates_to_client() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client - .expect_fetch_receipts() - .withf(|hash| *hash == B256::from([1u8; 32])) - .times(1) - .returning(|_| Ok(Receipts::default())); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let receipts = node.fetch_receipts(B256::from([1u8; 32])).await.unwrap(); - assert!(receipts.is_empty()); - } - - #[tokio::test] - async fn test_output_v0_at_timestamp_delegates_to_client() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client - .expect_output_v0_at_timestamp() - .with(eq(12345)) - .times(1) - .returning(|_| Ok(OutputV0::default())); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let output = node.output_v0_at_timestamp(12345).await.unwrap(); - assert_eq!(output, OutputV0::default()); - } - - #[tokio::test] - async fn test_pending_output_v0_at_timestamp_delegates_to_client() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client - .expect_pending_output_v0_at_timestamp() - .with(eq(54321)) - .times(1) - .returning(|_| Ok(OutputV0::default())); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let output = node.pending_output_v0_at_timestamp(54321).await.unwrap(); - assert_eq!(output, OutputV0::default()); - } - - #[tokio::test] - async fn test_l2_block_ref_by_timestamp_delegates_to_client() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client.expect_l2_block_ref_by_timestamp().with(eq(11111)).times(1).returning(|_| { - Ok(BlockInfo { - hash: B256::from([9u8; 32]), - number: 99, - parent_hash: B256::from([8u8; 32]), - timestamp: 11111, - }) - }); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let block = node.l2_block_ref_by_timestamp(11111).await.unwrap(); - assert_eq!(block.number, 99); - assert_eq!(block.timestamp, 11111); - } - - #[tokio::test] - async fn test_update_finalized_delegates_to_client() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client - .expect_update_finalized() - .withf(|block_id| block_id.number == 100) - .times(1) - .returning(|_| Ok(())); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let block_id = BlockNumHash { number: 100, hash: B256::from([1u8; 32]) }; - let result = node.update_finalized(block_id).await; - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_update_cross_unsafe_delegates_to_client() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client - .expect_update_cross_unsafe() - .withf(|block_id| block_id.number == 200) - .times(1) - .returning(|_| Ok(())); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let block_id = BlockNumHash { number: 200, hash: B256::from([2u8; 32]) }; - let result = node.update_cross_unsafe(block_id).await; - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_update_cross_safe_delegates_to_client() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client - .expect_update_cross_safe() - .withf(|source, derived| source.number == 300 && derived.number == 301) - .times(1) - .returning(|_, _| Ok(())); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let source_block_id = BlockNumHash { number: 300, hash: B256::from([3u8; 32]) }; - let derived_block_id = BlockNumHash { number: 301, hash: B256::from([4u8; 32]) }; - let result = node.update_cross_safe(source_block_id, derived_block_id).await; - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_invalidate_block_delegates_to_client() { - let mut client = MockClient::new(); - client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); - client - .expect_invalidate_block() - .withf(|seal| seal.number == 400) - .times(1) - .returning(|_| Ok(())); - - let client = Arc::new(client); - let db = Arc::new(MockDb::new()); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let block_seal = BlockSeal { number: 400, hash: B256::from([5u8; 32]), timestamp: 0 }; - let result = node.invalidate_block(block_seal).await; - assert!(result.is_ok()); - } - - #[tokio::test] - async fn test_reset_calls_resetter() { - let mut client = MockClient::new(); - client.expect_chain_id().times(2).returning(|| Ok(ChainId::from(42u64))); - client.expect_reset_pre_interop().times(1).returning(|| Ok(())); - - let mut db = MockDb::new(); - db.expect_latest_derivation_state() - .times(1) - .returning(|| Err(StorageError::DatabaseNotInitialised)); - - let client = Arc::new(client); - let db = Arc::new(db); - let asserter = Asserter::new(); - let transport = MockTransport::new(asserter.clone()); - let l1_provider = RootProvider::<Ethereum>::new(RpcClient::new(transport, false)); - let (tx, _rx) = mpsc::channel(10); - let node = ManagedNode::new(client.clone(), db, l1_provider, tx); - - let result = node.reset().await; - assert!(result.is_ok()); - } -} diff --git a/kona/crates/supervisor/service/src/actors/node.rs b/kona/crates/supervisor/service/src/actors/node.rs deleted file mode 100644 index 445bde8bb6e..00000000000 --- a/kona/crates/supervisor/service/src/actors/node.rs +++ /dev/null @@ -1,362 +0,0 @@ -use anyhow::Error; -use async_trait::async_trait; -use derive_more::Constructor; -use kona_interop::ManagedEvent; -use kona_supervisor_core::syncnode::{ - ManagedNodeClient, ManagedNodeCommand, ManagedNodeController, SubscriptionHandler, -}; -use std::sync::Arc; -use thiserror::Error; -use tokio::sync::mpsc; -use tokio_util::sync::CancellationToken; -use tracing::{error, info, warn}; - -use crate::{SupervisorActor, actors::utils::spawn_task_with_retry}; - -/// Actor for managing a node in the supervisor environment. -#[derive(Debug, Constructor)] -pub struct ManagedNodeActor<C, N> { - client: Arc<C>, - node: Arc<N>, - command_rx: mpsc::Receiver<ManagedNodeCommand>, - cancel_token: CancellationToken, -} - -#[async_trait] -impl<C, N> SupervisorActor for ManagedNodeActor<C, N> -where - C: ManagedNodeClient + 'static, - N: ManagedNodeController + SubscriptionHandler + 'static, -{ - type InboundEvent = ManagedNodeCommand; - type Error = SupervisorRpcActorError; - - async fn start(mut self) -> Result<(), Self::Error> { - // Task 1: Subscription handling - let node = self.node.clone(); - let client = self.client.clone(); - let cancel_token = self.cancel_token.clone(); - - spawn_task_with_retry( - move || { - let handler = node.clone(); - let client = client.clone(); - - async move { run_subscription_task(client, handler).await } - }, - cancel_token, - usize::MAX, - ); - - // Task 2: Command handling - let node = self.node.clone(); - let cancel_token = self.cancel_token.clone(); - run_command_task(node, self.command_rx, cancel_token).await?; - Ok(()) - } -} - -async fn run_command_task<N>( - node: Arc<N>, - mut command_rx: mpsc::Receiver<ManagedNodeCommand>, - cancel_token: CancellationToken, -) -> Result<(), SupervisorRpcActorError> -where - N: ManagedNodeController + SubscriptionHandler + 'static, -{ - info!(target: "supervisor::syncnode_actor", "Starting command task for managed node"); - loop { - tokio::select! { - _ = cancel_token.cancelled() => { - info!(target: "supervisor::syncnode", "Cancellation requested, shutting down command task"); - return Ok(()); - } - maybe_cmd = command_rx.recv() => { - match maybe_cmd { - Some(cmd) => { - match cmd { - ManagedNodeCommand::UpdateFinalized { block_id } => { - let result = node.update_finalized(block_id).await; - if let Err(err) = result { - warn!( - target: "supervisor::syncnode", - %err, - "Failed to update finalized block" - ); - } - } - ManagedNodeCommand::UpdateCrossUnsafe { block_id } => { - let result = node.update_cross_unsafe(block_id).await; - if let Err(err) = result { - warn!( - target: "supervisor::syncnode", - %err, - "Failed to update cross unsafe block" - ); - } - } - ManagedNodeCommand::UpdateCrossSafe { source_block_id, derived_block_id } => { - let result = node.update_cross_safe(source_block_id, derived_block_id).await; - if let Err(err) = result { - warn!( - target: "supervisor::syncnode", - %err, - "Failed to update cross safe block" - ); - } - } - ManagedNodeCommand::Reset {} => { - let result = node.reset().await; - if let Err(err) = result { - warn!( - target: "supervisor::syncnode", - %err, - "Failed to reset managed node" - ); - } - } - ManagedNodeCommand::InvalidateBlock { seal } => { - let result = node.invalidate_block(seal).await; - if let Err(err) = result { - warn!( - target: "supervisor::syncnode", - %err, - "Failed to invalidate block" - ); - } - } - } - } - None => { - info!(target: "supervisor::syncnode", "Command channel closed, shutting down command task"); - return Err(SupervisorRpcActorError::CommandReceiverClosed); - } - } - } - } - } -} - -async fn run_subscription_task<C: ManagedNodeClient, N: SubscriptionHandler>( - client: Arc<C>, - handler: Arc<N>, -) -> Result<(), Error> { - info!(target: "supervisor::syncnode", "Starting subscription task for managed node"); - - let mut subscription = client.subscribe_events().await.inspect_err(|err| { - error!( - target: "supervisor::syncnode", - %err, - "Failed to subscribe to node events" - ); - })?; - - loop { - tokio::select! { - incoming_event = subscription.next() => { - match incoming_event { - Some(Ok(subscription_event)) => { - if let Some(event) = subscription_event.data { - handle_subscription_event(&handler, event).await; - } - } - Some(Err(err)) => { - error!( - target: "supervisor::managed_event_task", - %err, - "Error in event deserialization" - ); - return Err(err.into()); - } - None => { - warn!(target: "supervisor::managed_event_task", "Subscription closed by server"); - client.reset_ws_client().await; - break; - } - } - } - } - } - Ok(()) -} - -async fn handle_subscription_event<N: SubscriptionHandler>(handler: &Arc<N>, event: ManagedEvent) { - if let Some(reset_id) = &event.reset { - if let Err(err) = handler.handle_reset(reset_id).await { - warn!( - target: "supervisor::syncnode", - %err, - %reset_id, - "Failed to handle reset event" - ); - } - } - - if let Some(unsafe_block) = &event.unsafe_block { - if let Err(err) = handler.handle_unsafe_block(unsafe_block).await { - warn!( - target: "supervisor::syncnode", - %err, - %unsafe_block, - "Failed to handle unsafe block event" - ); - } - } - - if let Some(derived_ref_pair) = &event.derivation_update { - if event.derivation_origin_update.is_none() { - if let Err(err) = handler.handle_derivation_update(derived_ref_pair).await { - warn!( - target: "supervisor::syncnode", - %err, - %derived_ref_pair, - "Failed to handle derivation update event" - ); - } - } - } - - if let Some(origin) = &event.derivation_origin_update { - if let Err(err) = handler.handle_derivation_origin_update(origin).await { - warn!( - target: "supervisor::syncnode", - %err, - %origin, - "Failed to handle derivation origin update event" - ); - } - } - - if let Some(derived_ref_pair) = &event.exhaust_l1 { - if let Err(err) = handler.handle_exhaust_l1(derived_ref_pair).await { - warn!( - target: "supervisor::syncnode", - %err, - %derived_ref_pair, - "Failed to handle L1 exhaust event" - ); - } - } - - if let Some(replacement) = &event.replace_block { - if let Err(err) = handler.handle_replace_block(replacement).await { - warn!( - target: "supervisor::syncnode", - %err, - %replacement, - "Failed to handle block replacement event" - ); - } - } -} - -#[derive(Debug, Error)] -pub enum SupervisorRpcActorError { - /// Error indicating that command receiver is closed. - #[error("managed node command receiver closed")] - CommandReceiverClosed, -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_eips::BlockNumHash; - use alloy_primitives::{B256, ChainId}; - use jsonrpsee::core::client::Subscription; - use kona_interop::{BlockReplacement, DerivedRefPair}; - use kona_protocol::BlockInfo; - use kona_supervisor_core::syncnode::{ - ClientError, ManagedNodeClient, ManagedNodeCommand, ManagedNodeController, - ManagedNodeError, SubscriptionHandler, - }; - use kona_supervisor_types::{BlockSeal, OutputV0, Receipts, SubscriptionEvent}; - use mockall::{mock, predicate::*}; - use std::sync::Arc; - use tokio::sync::mpsc; - use tokio_util::sync::CancellationToken; - - // Mock the ManagedNodeController trait - mock! { - #[derive(Debug)] - pub Node {} - - #[async_trait::async_trait] - impl ManagedNodeController for Node { - async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ManagedNodeError>; - async fn update_cross_unsafe(&self, cross_unsafe_block_id: BlockNumHash) -> Result<(), ManagedNodeError>; - async fn update_cross_safe(&self,source_block_id: BlockNumHash,derived_block_id: BlockNumHash) -> Result<(), ManagedNodeError>; - async fn reset(&self) -> Result<(), ManagedNodeError>; - async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ManagedNodeError>; - } - - #[async_trait::async_trait] - impl SubscriptionHandler for Node { - async fn handle_exhaust_l1(&self, derived_ref_pair: &DerivedRefPair) -> Result<(), ManagedNodeError>; - async fn handle_reset(&self, reset_id: &str) -> Result<(), ManagedNodeError>; - async fn handle_unsafe_block(&self, block: &BlockInfo) -> Result<(), ManagedNodeError>; - async fn handle_derivation_update(&self, derived_ref_pair: &DerivedRefPair) -> Result<(), ManagedNodeError>; - async fn handle_replace_block(&self, replacement: &BlockReplacement) -> Result<(), ManagedNodeError>; - async fn handle_derivation_origin_update(&self, origin: &BlockInfo) -> Result<(), ManagedNodeError>; - } - } - - mock! { - #[derive(Debug)] - pub NodeClient {} - - #[async_trait::async_trait] - impl ManagedNodeClient for NodeClient { - async fn chain_id(&self) -> Result<ChainId, ClientError>; - async fn subscribe_events(&self) -> Result<Subscription<SubscriptionEvent>, ClientError>; - async fn fetch_receipts(&self, block_hash: B256) -> Result<Receipts, ClientError>; - async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result<OutputV0, ClientError>; - async fn pending_output_v0_at_timestamp(&self, timestamp: u64)-> Result<OutputV0, ClientError>; - async fn l2_block_ref_by_timestamp(&self, timestamp: u64) -> Result<BlockInfo, ClientError>; - async fn block_ref_by_number(&self, block_number: u64) -> Result<BlockInfo, ClientError>; - async fn reset_pre_interop(&self) -> Result<(), ClientError>; - async fn reset( - &self, - unsafe_id: BlockNumHash, - cross_unsafe_id: BlockNumHash, - local_safe_id: BlockNumHash, - cross_safe_id: BlockNumHash, - finalised_id: BlockNumHash, - ) -> Result<(), ClientError>; - async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ClientError>; - async fn provide_l1(&self, block_info: BlockInfo) -> Result<(), ClientError>; - async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ClientError>; - async fn update_cross_unsafe(&self,cross_unsafe_block_id: BlockNumHash) -> Result<(), ClientError>; - async fn update_cross_safe(&self,source_block_id: BlockNumHash,derived_block_id: BlockNumHash) -> Result<(), ClientError>; - async fn reset_ws_client(&self); - } - } - - #[tokio::test] - async fn test_run_command_task_update_finalized_and_reset() { - let mut mock_node = MockNode::new(); - mock_node.expect_update_finalized().times(1).returning(|_| Ok(())); - mock_node.expect_reset().times(1).returning(|| Ok(())); - - let node = Arc::new(mock_node); - let (tx, rx) = mpsc::channel(10); - let cancel_token = CancellationToken::new(); - - // Spawn the command task - let handle = tokio::spawn(super::run_command_task(node.clone(), rx, cancel_token.clone())); - - // Send commands - tx.send(ManagedNodeCommand::UpdateFinalized { - block_id: BlockNumHash::new(1, B256::random()), - }) - .await - .unwrap(); - tx.send(ManagedNodeCommand::Reset {}).await.unwrap(); - - // Drop the sender to close the channel and end the task - drop(tx); - - // Wait for the task to finish - let result = handle.await.unwrap(); - assert!(matches!(result, Err(SupervisorRpcActorError::CommandReceiverClosed))); - } -} diff --git a/kona/crates/supervisor/storage/Cargo.toml b/kona/crates/supervisor/storage/Cargo.toml deleted file mode 100644 index ea995d1042b..00000000000 --- a/kona/crates/supervisor/storage/Cargo.toml +++ /dev/null @@ -1,54 +0,0 @@ -[package] -name = "kona-supervisor-storage" -version = "0.1.0" - -edition.workspace = true -license.workspace = true -rust-version.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true -keywords.workspace = true -categories.workspace = true -exclude.workspace = true - -[dependencies] -# Workspace -kona-protocol.workspace = true -kona-interop.workspace = true -kona-supervisor-types.workspace = true -kona-supervisor-metrics.workspace = true - -# Alloy -alloy-primitives = { workspace = true, features = ["map", "rlp", "serde", "rand"] } -alloy-eips = { workspace = true } - -# Op-Alloy -op-alloy-consensus.workspace = true - -# Misc -serde = { workspace = true, features = ["derive"] } -derive_more.workspace = true -bytes.workspace = true -modular-bitfield.workspace = true -thiserror.workspace = true -tracing.workspace = true -eyre.workspace = true -metrics.workspace = true - -#reth -reth-db-api = { workspace = true } -reth-db = { workspace = true } -reth-codecs = { workspace = true } - -# HTTP client and TLS for remote signer -tokio = { workspace = true, features = ["full"] } - -[dev-dependencies] -test-fuzz = { workspace = true } -tempfile = { workspace = true } -tokio.workspace = true -kona-cli.workspace = true - -[lints] -workspace = true diff --git a/kona/crates/supervisor/storage/src/error.rs b/kona/crates/supervisor/storage/src/error.rs deleted file mode 100644 index f7b0f2b75f0..00000000000 --- a/kona/crates/supervisor/storage/src/error.rs +++ /dev/null @@ -1,97 +0,0 @@ -use alloy_eips::BlockNumHash; -use reth_db::DatabaseError; -use thiserror::Error; - -/// Errors that may occur while interacting with supervisor log storage. -/// -/// This enum is used across all implementations of the Storage traits. -#[derive(Debug, Error)] -pub enum StorageError { - /// Represents a database error that occurred while interacting with storage. - #[error(transparent)] - Database(#[from] DatabaseError), - - /// Represents an error that occurred while initializing the database. - #[error(transparent)] - DatabaseInit(#[from] eyre::Report), - - /// Represents an error that occurred while writing to the database. - #[error("lock poisoned")] - LockPoisoned, - - /// The expected entry was not found in the database. - #[error(transparent)] - EntryNotFound(#[from] EntryNotFoundError), - - /// Represents an error that occurred while getting data that is not yet available. - #[error("data not yet available")] - FutureData, - - /// Represents an error that occurred when database is not initialized. - #[error("database not initialized")] - DatabaseNotInitialised, - - /// Represents a conflict occurred while attempting to write to the database. - #[error("conflicting data")] - ConflictError, - - /// Represents an error that occurred while writing to log database. - #[error("latest stored block is not parent of the incoming block")] - BlockOutOfOrder, - - /// Represents an error that occurred when there is inconsistency in log storage - #[error("reorg required due to inconsistent storage state")] - ReorgRequired, - - /// Represents an error that occurred when attempting to rewind log storage beyond the local - /// safe head. - #[error("rewinding log storage beyond local safe head. to: {to}, local_safe: {local_safe}")] - RewindBeyondLocalSafeHead { - /// The target block number to rewind to. - to: u64, - /// The local safe head block number. - local_safe: u64, - }, -} - -impl PartialEq for StorageError { - fn eq(&self, other: &Self) -> bool { - use StorageError::*; - match (self, other) { - (Database(a), Database(b)) => a == b, - (DatabaseInit(a), DatabaseInit(b)) => format!("{a}") == format!("{b}"), - (EntryNotFound(a), EntryNotFound(b)) => a == b, - (DatabaseNotInitialised, DatabaseNotInitialised) | (ConflictError, ConflictError) => { - true - } - _ => false, - } - } -} - -impl Eq for StorageError {} - -/// Entry not found error. -#[derive(Debug, Error, PartialEq, Eq)] -pub enum EntryNotFoundError { - /// No derived blocks found for given source block. - #[error("no derived blocks for source block, number: {}, hash: {}", .0.number, .0.hash)] - MissingDerivedBlocks(BlockNumHash), - - /// Expected source block not found. - #[error("source block not found, number: {0}")] - SourceBlockNotFound(u64), - - /// Expected derived block not found. - #[error("derived block not found, number: {0}")] - DerivedBlockNotFound(u64), - - /// Expected log not found. - #[error("log not found at block {block_number} index {log_index}")] - LogNotFound { - /// Block number. - block_number: u64, - /// Log index within the block. - log_index: u32, - }, -} diff --git a/kona/crates/supervisor/storage/src/lib.rs b/kona/crates/supervisor/storage/src/lib.rs deleted file mode 100644 index f7022421f65..00000000000 --- a/kona/crates/supervisor/storage/src/lib.rs +++ /dev/null @@ -1,44 +0,0 @@ -//! Persistent storage for the Supervisor. -//! -//! This crate provides structured, append-only storage for the Supervisor, -//! exposing high-level APIs to write and query logs, block metadata, and -//! other execution states. -//! -//! The storage system is built on top of [`reth-db`], using MDBX, -//! and defines schemas for supervisor-specific data like: -//! - L2 log entries -//! - Block ancestry metadata -//! - Source and Derived Blocks -//! - Chain heads for safety levels: **SAFE**, **UNSAFE**, and **CROSS-SAFE** -//! -//! -//! ## Capabilities -//! -//! - Append logs emitted by L2 execution -//! - Look up logs by block number and index -//! - Rewind logs during reorgs -//! - Track sealed blocks and ancestry metadata - -pub mod models; -pub use models::SourceBlockTraversal; - -mod error; -pub use error::{EntryNotFoundError, StorageError}; - -mod providers; - -mod chaindb; -pub use chaindb::ChainDb; - -mod metrics; -pub(crate) use metrics::Metrics; - -mod chaindb_factory; -pub use chaindb_factory::ChainDbFactory; - -mod traits; -pub use traits::{ - CrossChainSafetyProvider, DbReader, DerivationStorage, DerivationStorageReader, - DerivationStorageWriter, FinalizedL1Storage, HeadRefStorage, HeadRefStorageReader, - HeadRefStorageWriter, LogStorage, LogStorageReader, LogStorageWriter, StorageRewinder, -}; diff --git a/kona/crates/supervisor/storage/src/metrics.rs b/kona/crates/supervisor/storage/src/metrics.rs deleted file mode 100644 index 5aa8b0b9b12..00000000000 --- a/kona/crates/supervisor/storage/src/metrics.rs +++ /dev/null @@ -1,118 +0,0 @@ -use alloy_primitives::ChainId; - -/// Container for ChainDb metrics. -#[derive(Debug, Clone)] -pub(crate) struct Metrics; - -// todo: implement this using the reth metrics for tables -impl Metrics { - pub(crate) const STORAGE_REQUESTS_SUCCESS_TOTAL: &'static str = - "kona_supervisor_storage_success_total"; - pub(crate) const STORAGE_REQUESTS_ERROR_TOTAL: &'static str = - "kona_supervisor_storage_error_total"; - pub(crate) const STORAGE_REQUEST_DURATION_SECONDS: &'static str = - "kona_supervisor_storage_duration_seconds"; - - pub(crate) const STORAGE_METHOD_DERIVED_TO_SOURCE: &'static str = "derived_to_source"; - pub(crate) const STORAGE_METHOD_LATEST_DERIVED_BLOCK_AT_SOURCE: &'static str = - "latest_derived_block_at_source"; - pub(crate) const STORAGE_METHOD_LATEST_DERIVATION_STATE: &'static str = - "latest_derivation_state"; - pub(crate) const STORAGE_METHOD_GET_SOURCE_BLOCK: &'static str = "get_source_block"; - pub(crate) const STORAGE_METHOD_GET_ACTIVATION_BLOCK: &'static str = "get_activation_block"; - pub(crate) const STORAGE_METHOD_INITIALISE_DERIVATION_STORAGE: &'static str = - "initialise_derivation_storage"; - pub(crate) const STORAGE_METHOD_SAVE_DERIVED_BLOCK: &'static str = "save_derived_block"; - pub(crate) const STORAGE_METHOD_SAVE_SOURCE_BLOCK: &'static str = "save_source_block"; - pub(crate) const STORAGE_METHOD_GET_LATEST_BLOCK: &'static str = "get_latest_block"; - pub(crate) const STORAGE_METHOD_GET_BLOCK: &'static str = "get_block"; - pub(crate) const STORAGE_METHOD_GET_LOG: &'static str = "get_log"; - pub(crate) const STORAGE_METHOD_GET_LOGS: &'static str = "get_logs"; - pub(crate) const STORAGE_METHOD_INITIALISE_LOG_STORAGE: &'static str = "initialise_log_storage"; - pub(crate) const STORAGE_METHOD_STORE_BLOCK_LOGS: &'static str = "store_block_logs"; - pub(crate) const STORAGE_METHOD_GET_SAFETY_HEAD_REF: &'static str = "get_safety_head_ref"; - pub(crate) const STORAGE_METHOD_GET_SUPER_HEAD: &'static str = "get_super_head"; - pub(crate) const STORAGE_METHOD_UPDATE_FINALIZED_USING_SOURCE: &'static str = - "update_finalized_using_source"; - pub(crate) const STORAGE_METHOD_UPDATE_CURRENT_CROSS_UNSAFE: &'static str = - "update_current_cross_unsafe"; - pub(crate) const STORAGE_METHOD_UPDATE_CURRENT_CROSS_SAFE: &'static str = - "update_current_cross_safe"; - pub(crate) const STORAGE_METHOD_UPDATE_FINALIZED_L1: &'static str = "update_finalized_l1"; - pub(crate) const STORAGE_METHOD_GET_FINALIZED_L1: &'static str = "get_finalized_l1"; - pub(crate) const STORAGE_METHOD_REWIND_LOG_STORAGE: &'static str = "rewind_log_storage"; - pub(crate) const STORAGE_METHOD_REWIND: &'static str = "rewind"; - pub(crate) const STORAGE_METHOD_REWIND_TO_SOURCE: &'static str = "rewind_to_source"; - - pub(crate) fn init(chain_id: ChainId) { - Self::describe(); - Self::zero(chain_id); - } - - fn describe() { - metrics::describe_counter!( - Self::STORAGE_REQUESTS_SUCCESS_TOTAL, - metrics::Unit::Count, - "Total number of successful Kona Supervisor Storage requests" - ); - metrics::describe_counter!( - Self::STORAGE_REQUESTS_ERROR_TOTAL, - metrics::Unit::Count, - "Total number of failed Kona Supervisor Storage requests" - ); - metrics::describe_histogram!( - Self::STORAGE_REQUEST_DURATION_SECONDS, - metrics::Unit::Seconds, - "Duration of Kona Supervisor Storage requests" - ); - } - - fn zero_storage_methods(chain_id: ChainId, method_name: &'static str) { - metrics::counter!( - Self::STORAGE_REQUESTS_SUCCESS_TOTAL, - "method" => method_name, - "chain_id" => chain_id.to_string() - ) - .increment(0); - - metrics::counter!( - Self::STORAGE_REQUESTS_ERROR_TOTAL, - "method" => method_name, - "chain_id" => chain_id.to_string() - ) - .increment(0); - - metrics::histogram!( - Self::STORAGE_REQUEST_DURATION_SECONDS, - "method" => method_name, - "chain_id" => chain_id.to_string() - ) - .record(0.0); - } - - fn zero(chain_id: ChainId) { - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_DERIVED_TO_SOURCE); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_LATEST_DERIVED_BLOCK_AT_SOURCE); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_LATEST_DERIVATION_STATE); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_SOURCE_BLOCK); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_INITIALISE_DERIVATION_STORAGE); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_SAVE_DERIVED_BLOCK); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_SAVE_SOURCE_BLOCK); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_LATEST_BLOCK); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_BLOCK); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_LOG); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_LOGS); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_INITIALISE_LOG_STORAGE); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_STORE_BLOCK_LOGS); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_SAFETY_HEAD_REF); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_SUPER_HEAD); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_UPDATE_FINALIZED_USING_SOURCE); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_UPDATE_CURRENT_CROSS_UNSAFE); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_UPDATE_CURRENT_CROSS_SAFE); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_UPDATE_FINALIZED_L1); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_FINALIZED_L1); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_REWIND_LOG_STORAGE); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_REWIND); - Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_REWIND_TO_SOURCE); - } -} diff --git a/kona/crates/supervisor/storage/src/traits.rs b/kona/crates/supervisor/storage/src/traits.rs deleted file mode 100644 index b7a527ccfa1..00000000000 --- a/kona/crates/supervisor/storage/src/traits.rs +++ /dev/null @@ -1,475 +0,0 @@ -use crate::StorageError; -use alloy_eips::eip1898::BlockNumHash; -use alloy_primitives::ChainId; -use kona_interop::DerivedRefPair; -use kona_protocol::BlockInfo; -use kona_supervisor_types::{Log, SuperHead}; -use op_alloy_consensus::interop::SafetyLevel; -use std::fmt::Debug; - -/// Provides an interface for supervisor storage to manage source and derived blocks. -/// -/// Defines methods to retrieve derived block information, -/// enabling the supervisor to track the derivation progress. -/// -/// Implementations are expected to provide persistent and thread-safe access to block data. -pub trait DerivationStorageReader: Debug { - /// Gets the source [`BlockInfo`] for a given derived block [`BlockNumHash`]. - /// - /// NOTE: [`LocalUnsafe`] block is not pushed to L1 yet, hence it cannot be part of derivation - /// storage. - /// - /// # Arguments - /// * `derived_block_id` - The identifier (number and hash) of the derived (L2) block. - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the source block information if it exists. - /// * `Err(StorageError)` if there is an issue retrieving the source block. - /// - /// [`LocalUnsafe`]: SafetyLevel::LocalUnsafe - fn derived_to_source(&self, derived_block_id: BlockNumHash) -> Result<BlockInfo, StorageError>; - - /// Gets the latest derived [`BlockInfo`] associated with the given source block - /// [`BlockNumHash`]. - /// - /// # Arguments - /// * `source_block_id` - The identifier (number and hash) of the L1 source block. - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the latest derived block information if it exists. - /// * `Err(StorageError)` if there is an issue retrieving the derived block. - fn latest_derived_block_at_source( - &self, - source_block_id: BlockNumHash, - ) -> Result<BlockInfo, StorageError>; - - /// Gets the latest derivation state [`DerivedRefPair`] from the storage, which includes the - /// latest source block and the latest derived block. - /// - /// # Returns - /// - /// * `Ok(DerivedRefPair)` containing the latest derived block pair if it exists. - /// * `Err(StorageError)` if there is an issue retrieving the pair. - fn latest_derivation_state(&self) -> Result<DerivedRefPair, StorageError>; - - /// Gets the source block for the given source block number. - /// - /// # Arguments - /// * `source_block_number` - The number of the source block to retrieve. - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the source block information if it exists. - /// * `Err(StorageError)` if there is an issue retrieving the source block. - fn get_source_block(&self, source_block_number: u64) -> Result<BlockInfo, StorageError>; - - /// Gets the interop activation [`BlockInfo`]. - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the activation block information if it exists. - /// * `Err(StorageError)` if there is an issue retrieving the activation block. - fn get_activation_block(&self) -> Result<BlockInfo, StorageError>; -} - -/// Provides an interface for supervisor storage to write source and derived blocks. -/// -/// Defines methods to persist derived block information, -/// enabling the supervisor to track the derivation progress. -/// -/// Implementations are expected to provide persistent and thread-safe access to block data. -pub trait DerivationStorageWriter: Debug { - /// Initializes the derivation storage with a given [`DerivedRefPair`]. - /// This method is typically called once to set up the storage with the initial pair. - /// - /// # Arguments - /// * `incoming_pair` - The derived block pair to initialize the storage with. - /// - /// # Returns - /// * `Ok(())` if the storage was successfully initialized. - /// * `Err(StorageError)` if there is an issue initializing the storage. - fn initialise_derivation_storage( - &self, - incoming_pair: DerivedRefPair, - ) -> Result<(), StorageError>; - - /// Saves a [`DerivedRefPair`] to the storage. - /// - /// This method is **append-only**: it does not overwrite existing pairs. - /// - If a pair with the same block number already exists and is identical to the incoming pair, - /// the request is silently ignored (idempotent). - /// - If a pair with the same block number exists but differs from the incoming pair, an error - /// is returned to indicate a data inconsistency. - /// - If the pair is new and consistent, it is appended to the storage. - /// - /// Ensures that the latest stored pair is the parent of the incoming pair before saving. - /// - /// # Arguments - /// * `incoming_pair` - The derived block pair to save. - /// - /// # Returns - /// * `Ok(())` if the pair was successfully saved. - /// * `Err(StorageError)` if there is an issue saving the pair. - fn save_derived_block(&self, incoming_pair: DerivedRefPair) -> Result<(), StorageError>; - - /// Saves the latest incoming source [`BlockInfo`] to the storage. - /// - /// This method is **append-only**: it does not overwrite existing source blocks. - /// - If a source block with the same number already exists and is identical to the incoming - /// block, the request is silently ignored (idempotent). - /// - If a source block with the same number exists but differs from the incoming block, an - /// error is returned to indicate a data inconsistency. - /// - If the block is new and consistent, it is appended to the storage. - /// - /// Ensures that the latest stored source block is the parent of the incoming block before - /// saving. - /// - /// # Arguments - /// * `source` - The source block to save. - /// - /// # Returns - /// * `Ok(())` if the source block was successfully saved. - /// * `Err(StorageError)` if there is an issue saving the source block. - fn save_source_block(&self, source: BlockInfo) -> Result<(), StorageError>; -} - -/// Combines both reading and writing capabilities for derivation storage. -/// -/// Any type that implements both [`DerivationStorageReader`] and [`DerivationStorageWriter`] -/// automatically implements this trait. -pub trait DerivationStorage: DerivationStorageReader + DerivationStorageWriter {} - -impl<T: DerivationStorageReader + DerivationStorageWriter> DerivationStorage for T {} - -/// Provides an interface for retrieving logs associated with blocks. -/// -/// This trait defines methods to retrieve the latest block, -/// find a block by a specific log, and retrieve logs for a given block number. -/// -/// Implementations are expected to provide persistent and thread-safe access to block logs. -pub trait LogStorageReader: Debug { - /// Retrieves the latest [`BlockInfo`] from the storage. - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the latest block information. - /// * `Err(StorageError)` if there is an issue retrieving the latest block. - fn get_latest_block(&self) -> Result<BlockInfo, StorageError>; - - /// Retrieves the [`BlockInfo`] from the storage for a given block number - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the block information. - /// * `Err(StorageError)` if there is an issue retrieving the block. - fn get_block(&self, block_number: u64) -> Result<BlockInfo, StorageError>; - - /// Finds a [`Log`] by block_number and log_index - /// - /// # Arguments - /// * `block_number` - The block number to search for the log. - /// * `log_index` - The index of the log within the block. - /// - /// # Returns - /// * `Ok(Log)` containing the [`Log`] object. - /// * `Err(StorageError)` if there is an issue retrieving the log or if the log is not found. - fn get_log(&self, block_number: u64, log_index: u32) -> Result<Log, StorageError>; - - /// Retrieves all [`Log`]s associated with a specific block number. - /// - /// # Arguments - /// * `block_number` - The block number for which to retrieve logs. - /// - /// # Returns - /// * `Ok(Vec<Log>)` containing the logs associated with the block number. - /// * `Err(StorageError)` if there is an issue retrieving the logs or if no logs are found. - fn get_logs(&self, block_number: u64) -> Result<Vec<Log>, StorageError>; -} - -/// Provides an interface for storing blocks and logs associated with blocks. -/// -/// Implementations are expected to provide persistent and thread-safe access to block logs. -pub trait LogStorageWriter: Send + Sync + Debug { - /// Initializes the log storage with a given [`BlockInfo`]. - /// This method is typically called once to set up the storage with the initial block. - /// - /// # Arguments - /// * `block` - The [`BlockInfo`] to initialize the storage with. - /// - /// # Returns - /// * `Ok(())` if the storage was successfully initialized. - /// * `Err(StorageError)` if there is an issue initializing the storage. - fn initialise_log_storage(&self, block: BlockInfo) -> Result<(), StorageError>; - - /// Stores [`BlockInfo`] and [`Log`]s in the storage. - /// This method is append-only and does not overwrite existing logs. - /// Ensures that the latest stored block is the parent of the incoming block before saving. - /// - /// # Arguments - /// * `block` - [`BlockInfo`] to associate with the logs. - /// * `logs` - The [`Log`] events associated with the block. - /// - /// # Returns - /// * `Ok(())` if the logs were successfully stored. - /// * `Err(StorageError)` if there is an issue storing the logs. - fn store_block_logs(&self, block: &BlockInfo, logs: Vec<Log>) -> Result<(), StorageError>; -} - -/// Combines both reading and writing capabilities for log storage. -/// -/// Any type that implements both [`LogStorageReader`] and [`LogStorageWriter`] -/// automatically implements this trait. -pub trait LogStorage: LogStorageReader + LogStorageWriter {} - -impl<T: LogStorageReader + LogStorageWriter> LogStorage for T {} - -/// Provides an interface for retrieving head references. -/// -/// This trait defines methods to manage safety head references for different safety levels. -/// Each safety level maintains a reference to a block. -/// -/// Implementations are expected to provide persistent and thread-safe access to safety head -/// references. -pub trait HeadRefStorageReader: Debug { - /// Retrieves the current [`BlockInfo`] for a given [`SafetyLevel`]. - /// - /// # Arguments - /// * `safety_level` - The safety level for which to retrieve the head reference. - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the current safety head reference. - /// * `Err(StorageError)` if there is an issue retrieving the reference. - fn get_safety_head_ref(&self, safety_level: SafetyLevel) -> Result<BlockInfo, StorageError>; - - /// Retrieves the super head reference from the storage. - /// - /// # Returns - /// * `Ok(SuperHead)` containing the super head reference. - /// * `Err(StorageError)` if there is an issue retrieving the super head reference. - fn get_super_head(&self) -> Result<SuperHead, StorageError>; -} - -/// Provides an interface for storing head references. -/// -/// This trait defines methods to manage safety head references for different safety levels. -/// Each safety level maintains a reference to a block. -/// -/// Implementations are expected to provide persistent and thread-safe access to safety head -/// references. -pub trait HeadRefStorageWriter: Debug { - /// Updates the finalized head reference using a finalized source(l1) block. - /// - /// # Arguments - /// * `source_block` - The [`BlockInfo`] of the source block to use for the update. - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the updated finalized derived(l2) block information. - /// * `Err(StorageError)` if there is an issue updating the finalized head reference. - fn update_finalized_using_source( - &self, - finalized_source_block: BlockInfo, - ) -> Result<BlockInfo, StorageError>; - - /// Updates the current [`CrossUnsafe`](SafetyLevel::CrossUnsafe) head reference in storage. - /// - /// Ensures the provided block still exists in log storage and was not removed due to a re-org. - /// If the stored block's hash does not match the provided block, the update is aborted. - /// # Arguments - /// * `block` - The [`BlockInfo`] to set as the head reference - /// - /// # Returns - /// * `Ok(())` if the reference was successfully updated. - /// * `Err(StorageError)` if there is an issue updating the reference. - fn update_current_cross_unsafe(&self, block: &BlockInfo) -> Result<(), StorageError>; - - /// Updates the current [`CrossSafe`](SafetyLevel::CrossSafe) head reference in storage and - /// returns the corresponding derived pair. - /// - /// Ensures the provided block still exists in derivation storage and was not removed due to a - /// re-org. # Arguments - /// * `block` - The [`BlockInfo`] to set as the head reference - /// - /// # Returns - /// * `Ok(DerivedRefPair)` if the reference was successfully updated. - /// * `Err(StorageError)` if there is an issue updating the reference. - fn update_current_cross_safe(&self, block: &BlockInfo) -> Result<DerivedRefPair, StorageError>; -} - -/// Combines both reading and writing capabilities for safety head ref storage. -/// -/// Any type that implements both [`HeadRefStorageReader`] and [`HeadRefStorageWriter`] -/// automatically implements this trait. -pub trait HeadRefStorage: HeadRefStorageReader + HeadRefStorageWriter {} - -impl<T: HeadRefStorageReader + HeadRefStorageWriter> HeadRefStorage for T {} - -/// Provides an interface for managing the finalized L1 block reference in the storage. -/// -/// This trait defines methods to update and retrieve the finalized L1 block reference. -pub trait FinalizedL1Storage { - /// Updates the finalized L1 block reference in the storage. - /// - /// # Arguments - /// * `block` - The new [`BlockInfo`] to set as the finalized L1 block reference. - /// - /// # Returns - /// * `Ok(())` if the reference was successfully updated. - /// * `Err(StorageError)` if there is an issue updating the reference. - fn update_finalized_l1(&self, block: BlockInfo) -> Result<(), StorageError>; - - /// Retrieves the finalized L1 block reference from the storage. - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the finalized L1 block reference. - /// * `Err(StorageError)` if there is an issue retrieving the reference. - fn get_finalized_l1(&self) -> Result<BlockInfo, StorageError>; -} - -/// Provides an interface for retrieving block and safety information across multiple chains. -/// -/// This trait defines methods required by the cross-chain safety checker to access -/// block metadata, logs, and safe head references for various chains. -pub trait CrossChainSafetyProvider { - /// Retrieves the [`BlockInfo`] for a given block number on the specified chain. - /// - /// # Arguments - /// * `chain_id` - The [`ChainId`] of the target chain. - /// * `block_number` - The number of the block to retrieve. - /// - /// # Returns - /// * `Ok(BlockInfo)` containing the block metadata if available. - /// * `Err(StorageError)` if there is an issue fetching the block. - fn get_block(&self, chain_id: ChainId, block_number: u64) -> Result<BlockInfo, StorageError>; - - /// Retrieves a [`Log`] by block_number and log_index - /// - /// # Arguments - /// * `chain_id` - The [`ChainId`] of the target chain. - /// * `block_number` - The block number to search for the log. - /// * `log_index` - The index of the log within the block. - /// - /// # Returns - /// * `Ok(Log)` containing the [`Log`] object. - /// * `Err(StorageError)` if there is an issue retrieving the log or if the log is not found. - fn get_log( - &self, - chain_id: ChainId, - block_number: u64, - log_index: u32, - ) -> Result<Log, StorageError>; - - /// Retrieves all logs associated with the specified block on the given chain. - /// - /// # Arguments - /// * `chain_id` - The [`ChainId`] of the target chain. - /// * `block_number` - The number of the block whose logs should be retrieved. - /// - /// # Returns - /// * `Ok(Vec<Log>)` containing all logs for the block. - /// * `Err(StorageError)` if there is an issue fetching the logs. - fn get_block_logs( - &self, - chain_id: ChainId, - block_number: u64, - ) -> Result<Vec<Log>, StorageError>; - - /// Retrieves the latest known safe head reference for a given chain at the specified safety - /// level. - /// - /// # Arguments - /// * `chain_id` - The [`ChainId`] of the target chain. - /// * `level` - The desired [`SafetyLevel`] (e.g., `CrossSafe`, `LocalSafe`). - /// - /// # Returns - /// * `Ok(BlockInfo)` representing the safe head block at the requested safety level. - /// * `Err(StorageError)` if the safe head cannot be retrieved. - fn get_safety_head_ref( - &self, - chain_id: ChainId, - level: SafetyLevel, - ) -> Result<BlockInfo, StorageError>; - - /// Updates the current [`CrossUnsafe`](SafetyLevel::CrossUnsafe) head reference in storage. - /// - /// Ensures the provided block still exists in log storage and was not removed due to a re-org. - /// If the stored block's hash does not match the provided block, the update is aborted. - /// # Arguments - /// * `chain_id` - The [`ChainId`] of the target chain. - /// * `block` - The [`BlockInfo`] to set as the head reference - /// - /// # Returns - /// * `Ok(())` if the reference was successfully updated. - /// * `Err(StorageError)` if there is an issue updating the reference. - fn update_current_cross_unsafe( - &self, - chain_id: ChainId, - block: &BlockInfo, - ) -> Result<(), StorageError>; - - /// Updates the current [`CrossSafe`](SafetyLevel::CrossSafe) head reference in storage and - /// returns the corresponding derived pair. - /// - /// Ensures the provided block still exists in derivation storage and was not removed due to a - /// re-org. # Arguments - /// * `chain_id` - The [`ChainId`] of the target chain. - /// * `block` - The [`BlockInfo`] to set as the head reference - /// - /// # Returns - /// * `Ok(DerivedRefPair)` if the reference was successfully updated. - /// * `Err(StorageError)` if there is an issue updating the reference. - fn update_current_cross_safe( - &self, - chain_id: ChainId, - block: &BlockInfo, - ) -> Result<DerivedRefPair, StorageError>; -} - -/// Trait for rewinding supervisor-related state in the database. -/// -/// This trait provides an interface to revert persisted log data, derivation records, -/// and safety head references from the latest block back to a specified block number (inclusive). -/// It is typically used during chain reorganizations or when invalid blocks are detected and need -/// to be rolled back. -pub trait StorageRewinder { - /// Rewinds the log storage from the latest block down to the specified block (inclusive). - /// This method ensures that log storage is never rewound to(since it's inclusive) and beyond - /// the local safe head. If the target block is beyond the local safe head, an error is - /// returned. Use [`StorageRewinder::rewind`] to rewind to and beyond the local safe head. - /// - /// # Arguments - /// * `to` - The block id to rewind to. - /// - /// # Errors - /// Returns a [`StorageError`] if any database operation fails during the rewind. - fn rewind_log_storage(&self, to: &BlockNumHash) -> Result<(), StorageError>; - - /// Rewinds all supervisor-managed state (log storage, derivation, and safety head refs) - /// from the latest block back to the given block (inclusive). - /// - /// This method performs a coordinated rewind across all components, ensuring consistency - /// of supervisor state after chain reorganizations or rollback of invalid blocks. - /// - /// # Arguments - /// * `to` - The target block id to rewind to. Rewind is performed from the latest block down to - /// this block. - /// - /// # Errors - /// Returns a [`StorageError`] if any part of the rewind process fails. - fn rewind(&self, to: &BlockNumHash) -> Result<(), StorageError>; - - /// Rewinds the storage to a specific source block (inclusive), ensuring that all derived blocks - /// and logs associated with that source blocks are also reverted. - /// - /// # Arguments - /// * `to` - The source block [`BlockNumHash`] to rewind to. - /// - /// # Returns - /// * [`BlockInfo`] of the derived block that was rewound to, or `None` if no derived blocks - /// were found. - /// * `Err(StorageError)` if there is an issue during the rewind operation. - fn rewind_to_source(&self, to: &BlockNumHash) -> Result<Option<BlockInfo>, StorageError>; -} - -/// Combines the reader traits for the database. -/// -/// Any type that implements [`DerivationStorageReader`], [`HeadRefStorageReader`], and -/// [`LogStorageReader`] automatically implements this trait. -pub trait DbReader: DerivationStorageReader + HeadRefStorageReader + LogStorageReader {} - -impl<T: DerivationStorageReader + HeadRefStorageReader + LogStorageReader> DbReader for T {} diff --git a/kona/crates/supervisor/types/src/access_list.rs b/kona/crates/supervisor/types/src/access_list.rs deleted file mode 100644 index 9371e7dbe8b..00000000000 --- a/kona/crates/supervisor/types/src/access_list.rs +++ /dev/null @@ -1,396 +0,0 @@ -use alloy_primitives::{B256, keccak256}; -use thiserror::Error; - -/// A structured representation of a parsed CrossL2Inbox message access entry. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct Access { - /// Full 256-bit chain ID (combined from lookup + extension) - pub chain_id: [u8; 32], - /// Block number in the source chain - pub block_number: u64, - /// Timestamp of the message's block - pub timestamp: u64, - /// Log index of the message within the block - pub log_index: u32, - /// Provided checksum entry (prefix 0x03) - pub checksum: B256, -} - -impl Access { - /// Constructs a new [`Access`] from a `LookupEntry`, optional `ChainIdExtensionEntry`, - /// and a `ChecksumEntry`. Used internally by the parser. - fn from_entries( - lookup: LookupEntry, - chain_id_ext: Option<ChainIdExtensionEntry>, - checksum: ChecksumEntry, - ) -> Self { - let mut chain_id = [0u8; 32]; - - if let Some(ext) = chain_id_ext { - chain_id[0..24].copy_from_slice(&ext.upper_bytes); - } - - chain_id[24..32].copy_from_slice(&lookup.chain_id_low); - - Self { - chain_id, - block_number: lookup.block_number, - timestamp: lookup.timestamp, - log_index: lookup.log_index, - checksum: checksum.raw, - } - } - - /// Recomputes the checksum for this access entry. - /// - /// This follows the spec: - /// - `idPacked = 12 zero bytes ++ block_number ++ timestamp ++ log_index` - /// - `idLogHash = keccak256(log_hash ++ idPacked)` - /// - `bareChecksum = keccak256(idLogHash ++ chain_id)` - /// - Prepend 0x03 to `bareChecksum[1..]` - /// - /// Returns the full 32-byte checksum with prefix 0x03. - /// - /// Reference: [Checksum Calculation](https://github.com/ethereum-optimism/specs/blob/main/specs/interop/predeploys.md#type-3-checksum) - pub fn recompute_checksum(&self, log_hash: &B256) -> B256 { - // Step 1: idPacked = [0u8; 12] ++ block_number ++ timestamp ++ log_index - let mut id_packed = [0u8; 12 + 8 + 8 + 4]; // 32 bytes - id_packed[12..20].copy_from_slice(&self.block_number.to_be_bytes()); - id_packed[20..28].copy_from_slice(&self.timestamp.to_be_bytes()); - id_packed[28..32].copy_from_slice(&self.log_index.to_be_bytes()); - - // Step 2: keccak256(log_hash ++ id_packed) - let id_log_hash = keccak256([log_hash.as_slice(), &id_packed].concat()); - - // Step 3: keccak256(id_log_hash ++ chain_id) - let bare_checksum = keccak256([id_log_hash.as_slice(), &self.chain_id].concat()); - - // Step 4: Prepend type byte 0x03 (overwrite first byte) - let mut checksum = bare_checksum; - checksum.0[0] = 0x03; - - checksum - } - - /// Verify the checksums after recalculation - pub fn verify_checksum(&self, log_hash: &B256) -> Result<(), AccessListError> { - if self.recompute_checksum(log_hash) != self.checksum { - return Err(AccessListError::MalformedEntry); - } - Ok(()) - } -} - -/// Represents a single entry in the access list. -#[derive(Debug, Clone)] -enum AccessListEntry { - Lookup(LookupEntry), - ChainIdExtension(ChainIdExtensionEntry), - Checksum(ChecksumEntry), -} - -/// Parsed lookup identity entry (type 0x01). -#[derive(Debug, Clone)] -struct LookupEntry { - pub chain_id_low: [u8; 8], - pub block_number: u64, - pub timestamp: u64, - pub log_index: u32, -} - -/// Parsed Chain ID extension entry (type 0x02). -#[derive(Debug, Clone)] -struct ChainIdExtensionEntry { - pub upper_bytes: [u8; 24], -} - -/// Parsed checksum entry (type 0x03). -#[derive(Debug, Clone)] -struct ChecksumEntry { - pub raw: B256, -} - -/// Error returned when access list parsing fails. -#[derive(Debug, Error, PartialEq, Eq)] -pub enum AccessListError { - /// Input ended before a complete message group was parsed. - #[error("unexpected end of access list")] - UnexpectedEnd, - - /// Unexpected entry type found. - #[error("expected type {expected:#x}, got {found:#x}")] - UnexpectedType { - /// The type we expected (e.g. 0x01, 0x02, or 0x03) - expected: u8, - /// The actual type byte we found - found: u8, - }, - - /// Malformed entry sequence or invalid prefix structure. - #[error("malformed entry")] - MalformedEntry, - - /// Message expired. - #[error("message expired")] - MessageExpired, - - /// Timestamp invariant violated. - #[error("executing timestamp is earlier than initiating timestamp")] - InvalidTimestampInvariant, -} - -// Access list entry type byte constants -const PREFIX_LOOKUP: u8 = 0x01; -const PREFIX_CHAIN_ID_EXTENSION: u8 = 0x02; -const PREFIX_CHECKSUM: u8 = 0x03; - -/// Parses a vector of raw `B256` access list entries into structured [`Access`] objects. -/// -/// Each `Access` group must follow the pattern: -/// - One `Lookup` entry (prefix `0x01`) -/// - Optionally one `ChainIdExtension` entry (prefix `0x02`) -/// - One `Checksum` entry (prefix `0x03`) -/// -/// Entries are consumed in order. If any group is malformed, this function returns a -/// [`AccessListError`]. -/// -/// # Arguments -/// -/// * `entries` - A `Vec<B256>` representing the raw access list entries. -/// -/// # Returns -/// -/// A vector of fully parsed [`Access`] items if all entries are valid. -/// -/// # Errors -/// -/// Returns [`AccessListError`] if entries are out-of-order, malformed, or incomplete. -pub fn parse_access_list(entries: Vec<B256>) -> Result<Vec<Access>, AccessListError> { - let mut list = Vec::with_capacity(entries.len() / 2); - let mut lookup_entry: Option<LookupEntry> = None; - let mut chain_id_ext: Option<ChainIdExtensionEntry> = None; - - for entry in entries { - let parsed = parse_entry(&entry)?; - - match parsed { - AccessListEntry::Lookup(lookup) => { - if lookup_entry.is_some() { - return Err(AccessListError::MalformedEntry); - } - lookup_entry = Some(lookup); - } - - AccessListEntry::ChainIdExtension(ext) => { - if lookup_entry.is_none() || chain_id_ext.is_some() { - return Err(AccessListError::MalformedEntry); - } - chain_id_ext = Some(ext); - } - - AccessListEntry::Checksum(checksum) => { - let lookup = lookup_entry.take().ok_or(AccessListError::MalformedEntry)?; - let access = Access::from_entries(lookup, chain_id_ext.take(), checksum); - list.push(access); - } - } - } - - if lookup_entry.is_some() { - return Err(AccessListError::UnexpectedEnd); - } - - Ok(list) -} - -/// Parses a single 32-byte access list entry into a typed [`AccessListEntry`]. -/// -/// This function performs a prefix-based decoding of the input hash: -/// -/// ### Entry Type Encoding -/// -/// | Prefix Byte | Type | Description | -/// |-------------|------------------------|-------------------------------------------------------------------| -/// | `0x01` | `LookupEntry` | Contains chain ID (low bits), block number, timestamp, log index. | -/// | `0x02` | `ChainIdExtensionEntry`| Contains upper 24 bytes of a 256-bit chain ID. | -/// | `0x03` | `ChecksumEntry` | Contains the checksum hash used for message validation. | -/// -/// ### Spec References -/// -/// - [Optimism Access List Format](https://github.com/ethereum-optimism/specs/blob/main/specs/interop/predeploys.md#access-list) -/// - Entry format and layout based on CrossL2Inbox access-list encoding. -fn parse_entry(entry: &B256) -> Result<AccessListEntry, AccessListError> { - match entry[0] { - PREFIX_LOOKUP => { - if entry[1..4] != [0; 3] { - return Err(AccessListError::MalformedEntry); - } - Ok(AccessListEntry::Lookup(LookupEntry { - chain_id_low: entry[4..12].try_into().unwrap(), - block_number: u64::from_be_bytes(entry[12..20].try_into().unwrap()), - timestamp: u64::from_be_bytes(entry[20..28].try_into().unwrap()), - log_index: u32::from_be_bytes(entry[28..32].try_into().unwrap()), - })) - } - - PREFIX_CHAIN_ID_EXTENSION => { - if entry[1..8] != [0; 7] { - return Err(AccessListError::MalformedEntry); - } - Ok(AccessListEntry::ChainIdExtension(ChainIdExtensionEntry { - upper_bytes: entry[8..32].try_into().unwrap(), - })) - } - - PREFIX_CHECKSUM => Ok(AccessListEntry::Checksum(ChecksumEntry { raw: *entry })), - - other => Err(AccessListError::UnexpectedType { expected: PREFIX_LOOKUP, found: other }), - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::{B256, U256, b256}; - - fn make_lookup_entry( - block_number: u64, - timestamp: u64, - log_index: u32, - chain_id_low: [u8; 8], - ) -> B256 { - let mut buf = [0u8; 32]; - buf[0] = PREFIX_LOOKUP; - // 3 zero padding - buf[4..12].copy_from_slice(&chain_id_low); - buf[12..20].copy_from_slice(&block_number.to_be_bytes()); - buf[20..28].copy_from_slice(×tamp.to_be_bytes()); - buf[28..32].copy_from_slice(&log_index.to_be_bytes()); - B256::from(buf) - } - - fn make_chain_id_ext(upper: [u8; 24]) -> B256 { - let mut buf = [0u8; 32]; - buf[0] = PREFIX_CHAIN_ID_EXTENSION; - // 7 zero padding - buf[8..32].copy_from_slice(&upper); - B256::from(buf) - } - - fn make_checksum(access: &Access, log_hash: &B256) -> B256 { - access.recompute_checksum(log_hash) - } - - #[test] - fn test_parse_valid_access_list_with_chain_id_ext() { - let block_number = 1234; - let timestamp = 9999; - let log_index = 5; - let chain_id_low = [1u8; 8]; - let upper_bytes = [2u8; 24]; - let log_hash = keccak256([0u8; 32]); - - let lookup = make_lookup_entry(block_number, timestamp, log_index, chain_id_low); - let chain_ext = make_chain_id_ext(upper_bytes); - - let access = Access::from_entries( - LookupEntry { chain_id_low, block_number, timestamp, log_index }, - Some(ChainIdExtensionEntry { upper_bytes }), - ChecksumEntry { - raw: B256::default(), // will override later - }, - ); - - let checksum = make_checksum(&access, &log_hash); - - let access = Access::from_entries( - LookupEntry { chain_id_low, block_number, timestamp, log_index }, - Some(ChainIdExtensionEntry { upper_bytes }), - ChecksumEntry { raw: checksum }, - ); - - let list = vec![lookup, chain_ext, checksum]; - let parsed = parse_access_list(list).unwrap(); - assert_eq!(parsed.len(), 1); - assert_eq!(parsed[0], access); - assert!(parsed[0].verify_checksum(&log_hash).is_ok()); - } - - #[test] - fn test_parse_access_list_without_chain_id_ext() { - let block_number = 1; - let timestamp = 2; - let log_index = 3; - let chain_id_low = [0xaa; 8]; - let log_hash = keccak256([1u8; 32]); - - let lookup = make_lookup_entry(block_number, timestamp, log_index, chain_id_low); - let access = Access::from_entries( - LookupEntry { chain_id_low, block_number, timestamp, log_index }, - None, - ChecksumEntry { raw: B256::default() }, - ); - let checksum = make_checksum(&access, &log_hash); - let access = Access::from_entries( - LookupEntry { chain_id_low, block_number, timestamp, log_index }, - None, - ChecksumEntry { raw: checksum }, - ); - - let list = vec![lookup, checksum]; - let parsed = parse_access_list(list).unwrap(); - assert_eq!(parsed.len(), 1); - assert_eq!(parsed[0], access); - assert!(parsed[0].verify_checksum(&log_hash).is_ok()); - } - - #[test] - fn test_recompute_checksum_against_known_value() { - // Input data - let access = Access { - chain_id: U256::from(3).to_be_bytes(), - block_number: 2587, - timestamp: 4660, - log_index: 66, - checksum: B256::default(), // not used in this test - }; - - let log_hash = b256!("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"); - - // Expected checksum computed previously using spec logic - let expected = b256!("0x03ca886771056d8ea647bb809b888ba14986f57daaf28954d40408321717716a"); - - let computed = access.recompute_checksum(&log_hash); - assert_eq!(computed, expected, "Checksum does not match expected value"); - } - - #[test] - fn test_checksum_mismatch() { - let block_number = 1; - let timestamp = 2; - let log_index = 3; - let chain_id_low = [0xaa; 8]; - let log_hash = keccak256([1u8; 32]); - - let lookup = make_lookup_entry(block_number, timestamp, log_index, chain_id_low); - let fake_checksum = - b256!("0x03ca886771056d8ea647bb809b888ba14986f57daaf28954d40408321717716a"); - let list = vec![lookup, fake_checksum]; - - let parsed = parse_access_list(list).unwrap(); - let err = parsed[0].verify_checksum(&log_hash); - assert_eq!(err, Err(AccessListError::MalformedEntry)); - } - - #[test] - fn test_invalid_entry_order_should_fail() { - let mut raw = [0u8; 32]; - raw[0] = PREFIX_CHECKSUM; - let checksum = B256::from(raw); - - let lookup = make_lookup_entry(0, 0, 0, [0u8; 8]); - let entries = vec![checksum, lookup]; - - assert!(matches!(parse_access_list(entries), Err(AccessListError::MalformedEntry))); - } -} diff --git a/kona/crates/utilities/cli/Cargo.toml b/kona/crates/utilities/cli/Cargo.toml deleted file mode 100644 index d42db96aa7d..00000000000 --- a/kona/crates/utilities/cli/Cargo.toml +++ /dev/null @@ -1,44 +0,0 @@ -[package] -name = "kona-cli" -version = "0.3.2" -description = "Shared CLI utilities for Kona crates" -edition.workspace = true -license.workspace = true -authors.workspace = true -repository.workspace = true -homepage.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -kona-genesis.workspace = true -kona-registry.workspace = true - -# Alloy -alloy-chains.workspace = true - -# General -tracing.workspace = true -serde = { workspace = true, features = ["derive"]} -clap = { workspace = true, features = ["derive", "env"] } -tracing-subscriber = { workspace = true, features = ["fmt", "env-filter", "json", "tracing-log"] } -tracing-appender.workspace = true -metrics-exporter-prometheus = { workspace = true, features = ["http-listener"] } -metrics-process.workspace = true -thiserror.workspace = true - -# `secrets` feature -libp2p = { workspace = true, features = ["secp256k1"], optional = true } -alloy-primitives.workspace = true - -[dev-dependencies] -rstest.workspace = true - -[target.'cfg(unix)'.dependencies] -libc = "0.2" - -[features] -default = [] -secrets = [ "dep:libp2p" ] diff --git a/kona/crates/utilities/cli/src/flags/metrics.rs b/kona/crates/utilities/cli/src/flags/metrics.rs deleted file mode 100644 index 7333270bb22..00000000000 --- a/kona/crates/utilities/cli/src/flags/metrics.rs +++ /dev/null @@ -1,97 +0,0 @@ -//! Utility module to house implementation and declaration of MetricsArgs since it's being used in -//! multiple places, it's just being referenced from this module. - -use crate::{CliResult, init_prometheus_server}; -use clap::{Parser, arg}; -use std::net::IpAddr; - -/// Configuration for Prometheus metrics. -#[derive(Debug, Clone, Parser)] -#[command(next_help_heading = "Metrics")] -pub struct MetricsArgs { - /// Controls whether Prometheus metrics are enabled. Disabled by default. - #[arg( - long = "metrics.enabled", - global = true, - default_value_t = false, - env = "KONA_METRICS_ENABLED" - )] - pub enabled: bool, - - /// The port to serve Prometheus metrics on. - #[arg(long = "metrics.port", global = true, default_value = "9090", env = "KONA_METRICS_PORT")] - pub port: u16, - - /// The IP address to use for Prometheus metrics. - #[arg( - long = "metrics.addr", - global = true, - default_value = "0.0.0.0", - env = "KONA_METRICS_ADDR" - )] - pub addr: IpAddr, -} - -impl Default for MetricsArgs { - fn default() -> Self { - Self::parse_from::<[_; 0], &str>([]) - } -} - -impl MetricsArgs { - /// Initialize the tracing stack and Prometheus metrics recorder. - /// - /// This function should be called at the beginning of the program. - pub fn init_metrics(&self) -> CliResult<()> { - if self.enabled { - init_prometheus_server(self.addr, self.port)?; - } - - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use clap::Parser; - use std::net::{IpAddr, Ipv4Addr}; - - /// Helper struct to parse MetricsArgs within a test CLI structure. - #[derive(Parser, Debug)] - struct TestCli { - #[command(flatten)] - metrics: MetricsArgs, - } - - #[test] - fn test_default_metrics_args() { - let cli = TestCli::parse_from(["test_app"]); - assert!(!cli.metrics.enabled, "Default for metrics.enabled should be false."); - assert_eq!(cli.metrics.port, 9090, "Default for metrics.port should be 9090."); - assert_eq!( - cli.metrics.addr, - IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), - "Default for metrics.addr should be 0.0.0.0." - ); - } - - #[test] - fn test_metrics_args_from_cli() { - let cli = TestCli::parse_from([ - "test_app", - "--metrics.enabled", - "--metrics.port", - "9999", - "--metrics.addr", - "127.0.0.1", - ]); - assert!(cli.metrics.enabled, "metrics.enabled should be true."); - assert_eq!(cli.metrics.port, 9999, "metrics.port should be parsed from CLI."); - assert_eq!( - cli.metrics.addr, - IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), - "metrics.addr should be parsed from CLI." - ); - } -} diff --git a/kona/crates/utilities/cli/src/lib.rs b/kona/crates/utilities/cli/src/lib.rs deleted file mode 100644 index 4c151dec0a2..00000000000 --- a/kona/crates/utilities/cli/src/lib.rs +++ /dev/null @@ -1,33 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] - -mod error; -pub use error::{CliError, CliResult}; - -mod flags; -pub use flags::{GlobalArgs, LogArgs, MetricsArgs, OverrideArgs}; - -mod logs; -pub use logs::{FileLogConfig, LogConfig, LogRotation, StdoutLogConfig}; - -mod clap; -pub use clap::cli_styles; - -#[cfg(feature = "secrets")] -mod secrets; -#[cfg(feature = "secrets")] -pub use secrets::{KeypairError, ParseKeyError, SecretKeyLoader}; - -pub mod backtrace; - -mod tracing; -pub use tracing::{LogFormat, init_test_tracing}; - -mod prometheus; -pub use prometheus::init_prometheus_server; - -pub mod sigsegv_handler; diff --git a/kona/crates/utilities/cli/src/tracing.rs b/kona/crates/utilities/cli/src/tracing.rs deleted file mode 100644 index 05920d4cbee..00000000000 --- a/kona/crates/utilities/cli/src/tracing.rs +++ /dev/null @@ -1,152 +0,0 @@ -//! [tracing_subscriber] utilities. - -use tracing_subscriber::{ - Layer, - fmt::{ - format::{FormatEvent, FormatFields, Writer}, - time::{FormatTime, SystemTime}, - }, - prelude::__tracing_subscriber_SubscriberExt, - registry::LookupSpan, - util::{SubscriberInitExt, TryInitError}, -}; - -use serde::{Deserialize, Serialize}; -use std::fmt; -use tracing_subscriber::EnvFilter; - -use crate::{LogConfig, LogRotation}; - -/// The format of the logs. -#[derive( - Default, Debug, Clone, Copy, PartialEq, Eq, Hash, clap::ValueEnum, Serialize, Deserialize, -)] -#[serde(rename_all = "lowercase")] -#[clap(rename_all = "lowercase")] -pub enum LogFormat { - /// Full format (default). - #[default] - Full, - /// JSON format. - Json, - /// Pretty format. - Pretty, - /// Compact format. - Compact, - /// Logfmt format. - Logfmt, -} - -/// Custom logfmt formatter for tracing events. -struct LogfmtFormatter; - -impl<S, N> FormatEvent<S, N> for LogfmtFormatter -where - S: tracing::Subscriber + for<'a> LookupSpan<'a>, - N: for<'a> FormatFields<'a> + 'static, -{ - fn format_event( - &self, - ctx: &tracing_subscriber::fmt::FmtContext<'_, S, N>, - mut writer: Writer<'_>, - event: &tracing::Event<'_>, - ) -> fmt::Result { - let meta = event.metadata(); - - // Write timestamp - let time_format = SystemTime; - write!(writer, "time=\"")?; - time_format.format_time(&mut writer)?; - write!(writer, "\" ")?; - - // Write level - write!(writer, "level={} ", meta.level())?; - - // Write target - write!(writer, "target={} ", meta.target())?; - - // Write the message and fields - ctx.field_format().format_fields(writer.by_ref(), event)?; - - writeln!(writer) - } -} - -impl LogConfig { - /// Initializes the tracing subscriber - /// - /// # Arguments - /// * `verbosity_level` - The verbosity level (0-5). If `0`, no logs are printed. - /// * `env_filter` - Optional environment filter for the subscriber. - /// - /// # Returns - /// * `Result<()>` - Ok if successful, Err otherwise. - pub fn init_tracing_subscriber( - &self, - env_filter: Option<EnvFilter>, - ) -> Result<(), TryInitError> { - let file_layer = self.file_logs.as_ref().map(|file_logs| { - let directory_path = file_logs.directory_path.clone(); - - let appender = match file_logs.rotation { - LogRotation::Minutely => { - tracing_appender::rolling::minutely(directory_path, "kona.log") - } - LogRotation::Hourly => { - tracing_appender::rolling::hourly(directory_path, "kona.log") - } - LogRotation::Daily => tracing_appender::rolling::daily(directory_path, "kona.log"), - LogRotation::Never => tracing_appender::rolling::never(directory_path, "kona.log"), - }; - - match file_logs.format { - LogFormat::Full => tracing_subscriber::fmt::layer().with_writer(appender).boxed(), - LogFormat::Json => { - tracing_subscriber::fmt::layer().json().with_writer(appender).boxed() - } - LogFormat::Pretty => { - tracing_subscriber::fmt::layer().pretty().with_writer(appender).boxed() - } - LogFormat::Compact => { - tracing_subscriber::fmt::layer().compact().with_writer(appender).boxed() - } - LogFormat::Logfmt => tracing_subscriber::fmt::layer() - .event_format(LogfmtFormatter) - .with_writer(appender) - .boxed(), - } - }); - - let stdout_layer = self.stdout_logs.as_ref().map(|stdout_logs| match stdout_logs.format { - LogFormat::Full => tracing_subscriber::fmt::layer().boxed(), - LogFormat::Json => tracing_subscriber::fmt::layer().json().boxed(), - LogFormat::Pretty => tracing_subscriber::fmt::layer().pretty().boxed(), - LogFormat::Compact => tracing_subscriber::fmt::layer().compact().boxed(), - LogFormat::Logfmt => { - tracing_subscriber::fmt::layer().event_format(LogfmtFormatter).boxed() - } - }); - - let env_filter = env_filter - .unwrap_or(EnvFilter::from_default_env()) - .add_directive(self.global_level.into()); - - tracing_subscriber::registry() - .with(env_filter) - .with(file_layer) - .with(stdout_layer) - .try_init()?; - - Ok(()) - } -} - -/// This provides function for init tracing in testing -/// -/// # Functions -/// - `init_test_tracing`: A helper function for initializing tracing in test environments. -/// - `init_tracing_subscriber`: Initializes the tracing subscriber with a specified verbosity level -/// and optional environment filter. -pub fn init_test_tracing() { - let _ = LogConfig::default().init_tracing_subscriber(None::<EnvFilter>); -} diff --git a/kona/crates/utilities/macros/README.md b/kona/crates/utilities/macros/README.md deleted file mode 100644 index a0d229a0f2f..00000000000 --- a/kona/crates/utilities/macros/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# `kona-macros` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-macros"><img src="https://img.shields.io/crates/v/kona-macros.svg?label=kona-macros&labelColor=2a2f35" alt="Kona Engine"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://img.shields.io/codecov/c/github/op-rs/kona"><img src="https://img.shields.io/codecov/c/github/op-rs/kona" alt="Codecov"></a> - -Utility helper macros for kona crates. diff --git a/kona/crates/utilities/macros/src/lib.rs b/kona/crates/utilities/macros/src/lib.rs deleted file mode 100644 index f0796b849b7..00000000000 --- a/kona/crates/utilities/macros/src/lib.rs +++ /dev/null @@ -1,11 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![no_std] - -mod metrics; diff --git a/kona/crates/utilities/serde/README.md b/kona/crates/utilities/serde/README.md deleted file mode 100644 index d5ca4b6f13c..00000000000 --- a/kona/crates/utilities/serde/README.md +++ /dev/null @@ -1,64 +0,0 @@ -## `kona-serde` - -<a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml/badge.svg?label=ci" alt="CI"></a> -<a href="https://crates.io/crates/kona-serde"><img src="https://img.shields.io/crates/v/kona-serde.svg" alt="kona-serde crate"></a> -<a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="MIT License"></a> -<a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> - -Serde related helpers for kona. - -### Graceful Serialization - -This crate extends the serialization and deserialization -functionality provided by [`alloy-serde`][alloy-serde] to -deserialize raw number quantity values. - -This issue arose in `u128` toml deserialization where -deserialization of a raw number fails. -[This rust playground][invalid] demonstrates how toml fails to -deserialize a native `u128` internal value. - -With `kona-serde`, tagging the inner `u128` field with `#[serde(with = "kona_serde::quantity")]`, -allows the `u128` or any other type within the following constraints to be deserialized by toml properly. - -These are the supported native types: -- `bool` -- `u8` -- `u16` -- `u32` -- `u64` -- `u128` - -Below demonstrates the use of the `#[serde(with = "kona_serde::quantity")]` attribute. - -```rust -use serde::{Serialize, Deserialize}; - -/// My wrapper type. -#[derive(Debug, Serialize, Deserialize)] -pub struct MyStruct { - /// The inner `u128` value. - #[serde(with = "kona_serde::quantity")] - pub inner: u128, -} - -// Correctly deserializes a raw value. -let raw_toml = r#"inner = 120"#; -let b: MyStruct = toml::from_str(raw_toml).expect("failed to deserialize toml"); -println!("{}", b.inner); - -// Notice that a string value is also deserialized correctly. -let raw_toml = r#"inner = "120""#; -let b: MyStruct = toml::from_str(raw_toml).expect("failed to deserialize toml"); -println!("{}", b.inner); -``` - -### Provenance - -This code is heavily based on the [`alloy-serde`][alloy-serde] crate. - - -<!-- Hyperlinks --> - -[invalid]: https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=d3c674d02a90c574e3f543144621418d -[alloy-serde]: https://crates.io/crates/alloy-serde diff --git a/kona/crates/utilities/serde/src/lib.rs b/kona/crates/utilities/serde/src/lib.rs deleted file mode 100644 index 9433e41851f..00000000000 --- a/kona/crates/utilities/serde/src/lib.rs +++ /dev/null @@ -1,12 +0,0 @@ -#![doc = include_str!("../README.md")] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/square.png", - html_favicon_url = "https://raw.githubusercontent.com/op-rs/kona/main/assets/favicon.ico", - issue_tracker_base_url = "https://github.com/op-rs/kona/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -#![no_std] - -extern crate alloc; - -pub mod quantity; diff --git a/kona/deny.toml b/kona/deny.toml deleted file mode 100644 index eb525696d15..00000000000 --- a/kona/deny.toml +++ /dev/null @@ -1,81 +0,0 @@ -[graph] -targets = [] -all-features = false -no-default-features = false - -[output] -feature-depth = 1 - -[advisories] -ignore = [ - # paste crate is no longer maintained. - "RUSTSEC-2024-0436", - "RUSTSEC-2024-0384", - "RUSTSEC-2025-0012", - # bincode is unmaintained but still functional; transitive dep from reth-nippy-jar and test-fuzz. - "RUSTSEC-2025-0141", - # Integer overflow in `BytesMut::reserve` - "RUSTSEC-2026-0007" -] - -[licenses] -allow = [ - "MIT", - "Apache-2.0", - "Apache-2.0 WITH LLVM-exception", - "BSD-2-Clause", - "BSD-3-Clause", - "ISC", - "Unlicense", - "Unicode-3.0", - "MPL-2.0", - "Zlib", - "0BSD", - "CDLA-Permissive-2.0", -] -confidence-threshold = 0.8 -exceptions = [ - # CC0 is a permissive license but somewhat unclear status for source code - # so we prefer to not have dependencies using it - # https://tldrlegal.com/license/creative-commons-cc0-1.0-universal - { allow = ["CC0-1.0"], name = "secp256k1" }, - { allow = ["CC0-1.0"], name = "aurora-engine-modexp" }, - { allow = ["CC0-1.0"], name = "secp256k1-sys" }, - { allow = ["CC0-1.0"], name = "tiny-keccak" }, - { allow = ["CC0-1.0"], name = "notify" }, - # aws-lc-sys includes OpenSSL in its composite license expression - { allow = ["OpenSSL"], name = "aws-lc-sys" }, -] - -[[licenses.clarify]] -name = "ring" -expression = "LicenseRef-ring" -license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }] - -[licenses.private] -ignore = false -registries = [] - -[bans] -multiple-versions = "warn" -wildcards = "allow" -highlight = "all" -workspace-default-features = "allow" -external-default-features = "allow" -allow = [] -deny = [] -skip = [] -skip-tree = [] - -[sources] -unknown-registry = "deny" -unknown-git = "deny" -allow-registry = ["https://github.com/rust-lang/crates.io-index"] -allow-git = [ - "https://github.com/paradigmxyz/reth", -] - -[sources.allow-org] -github = [] -gitlab = [] -bitbucket = [] diff --git a/kona/docker/README.md b/kona/docker/README.md deleted file mode 100644 index 6545ec61c8e..00000000000 --- a/kona/docker/README.md +++ /dev/null @@ -1,92 +0,0 @@ -# `docker` - -This directory contains all of the repositories' dockerfiles as well as the [bake file](https://docs.docker.com/build/bake/) -used to define this repository's docker build configuration. In addition, the [recipes](./recipes) directory contains -example deployment strategies + grafana dashboards for applications such as [`kona-node`](../bin/node). - -## Install Dependencies - -* `docker`: https://www.docker.com/get-started/ -* `docker-buildx`: https://github.com/docker/buildx?tab=readme-ov-file#installing - -## Building Locally - -To build any image in the bake file locally, use `docker buildx bake`: - -```sh -# The target is one of the available bake targets within the `docker-bake.hcl`. -# A list can be viewed by running `docker buildx bake --list-targets` -export TARGET="<target_name>" - -(cd "$(git rev-parse --show-toplevel)" && docker buildx bake \ - --progress plain \ - -f docker/docker-bake.hcl \ - $TARGET) -``` - -### Build Options - -Relevant build options (variables) for each target can be viewed by running `docker buildx bake --list-variables` or -manually inspecting the targets in the `docker-bake.hcl`. - -#### Troubleshooting - -If you receive an error like the following: - -``` -ERROR: Multi-platform build is not supported for the docker driver. -Switch to a different driver, or turn on the containerd image store, and try again. -Learn more at https://docs.docker.com/go/build-multi-platform/ -``` - -Create and activate a new builder and retry the bake command. - -```sh -docker buildx create --name kona-builder --use -``` - -## Nightly Builds - -Nightly Docker images are automatically built and published every day at 2 AM UTC for: -- `kona-node` -- `kona-host` -- `kona-supervisor` - -### Using Nightly Images - -```sh -# Pull the latest nightly build (multi-platform: linux/amd64, linux/arm64) -docker pull ghcr.io/op-rs/kona/kona-node:nightly -docker pull ghcr.io/op-rs/kona/kona-host:nightly -docker pull ghcr.io/op-rs/kona/kona-supervisor:nightly - -# Pull a specific date's nightly build -docker pull ghcr.io/op-rs/kona/kona-node:nightly-2024-12-10 -``` - -### Manual Trigger - -To manually trigger a nightly build: -```sh -gh workflow run "Build and Publish Nightly Docker Images" -``` - -## Cutting a Release (for maintainers / forks) - -To cut a release of the docker image for any of the targets, cut a new annotated tag for the target like so: - -```sh -# Example formats: -# - `kona-host/v0.1.0-beta.8` -# - `cannon-builder/v1.2.0` -TAG="<target_name>/<version>" -git tag -a $TAG -m "<tag description>" && git push origin tag $TAG -``` - -To run the workflow manually, navigate over to the ["Build and Publish Docker Image"](https://github.com/op-rs/kona/actions/workflows/docker.yaml) -action. From there, run a `workflow_dispatch` trigger, select the tag you just pushed, and then finally select the image to release. - -Or, if you prefer to use the `gh` CLI, you can run: -```sh -gh workflow run "Build and Publish Docker Image" --ref <tag> -f image_to_release=<target> -``` diff --git a/kona/docker/apps/justfile b/kona/docker/apps/justfile deleted file mode 100644 index dc58332131e..00000000000 --- a/kona/docker/apps/justfile +++ /dev/null @@ -1,48 +0,0 @@ -DOCKER_JUSTFILE := source_directory() - -_docker_arch: - #!/bin/bash - if [[ -z "$PLATFORMS" ]]; then - echo $(docker system info --format '{{"{{"}}.OSType{{"}}"}}/{{"{{"}}.Architecture{{"}}"}}') - else - echo "$PLATFORMS" - fi - -# Builds an application image from the local repository. -build-local bin_name image_tag='kona:local' load_flag='': - #!/bin/bash - export BIN_TARGET="{{bin_name}}" - export DEFAULT_TAG="{{image_tag}}" - export PLATFORMS="$(just _docker_arch)" - export REPO_LOCATION="local" - - LOAD_FLAG="" - if [[ "{{load_flag}}" == "load" ]]; then - LOAD_FLAG="--load" - fi - - (cd {{DOCKER_JUSTFILE}}/../../ && docker buildx bake \ - --progress plain \ - -f docker/docker-bake.hcl \ - $LOAD_FLAG \ - generic) - -# Builds an application image from a remote revision. -build-remote bin_name git_tag='' image_tag='kona:local': - #!/bin/bash - export BIN_TARGET="{{bin_name}}" - export DEFAULT_TAG="{{image_tag}}" - export PLATFORMS="$(just _docker_arch)" - export REPO_LOCATION="remote" - - # If no git tag is provided, use `main` - if [[ -z "{{git_tag}}" ]]; then - export GIT_REF_NAME="main" - else - export GIT_REF_NAME="{{git_tag}}" - fi - - (cd {{DOCKER_JUSTFILE}}/../../ && docker buildx bake \ - --progress plain \ - -f docker/docker-bake.hcl \ - generic) diff --git a/kona/docker/fpvm-prestates/README.md b/kona/docker/fpvm-prestates/README.md deleted file mode 100644 index f30f4ed7872..00000000000 --- a/kona/docker/fpvm-prestates/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# `fpvm-prestates` - -Images for creating reproducible `kona-client` prestate builds for supported fault proof virtual machines. - -## Usage - -### All prestate artifacts - -```sh -# Produce the prestate artifacts for `kona-client` running on `asterisc` and `cannon` -# (FPVM versions specified by `asterisc_tag` + `cannon_tag`) -just all <kona|kona-int> <kona_tag> <asterisc_tag> <cannon_tag> -``` - -### `kona-client` + `asterisc` prestate artifacts - -```sh -# Produce the prestate artifacts for `kona-client` running on `asterisc` (version specified by `asterisc_tag`) -just asterisc <kona|kona-int> <kona_tag> <asterisc_tag> -``` - -### `kona-client` + `cannon` prestate artifacts - -```sh -# Produce the prestate artifacts for `kona-client` running on `cannon` (version specified by `cannon_tag`) -just cannon <kona|kona-int> <kona_tag> <cannon_tag> -``` - -### `kona-client` + `cannon` prestate artifacts for custom chains - -To create a reproducible kona-client prestate build that supports custom or devnet chain configurations that are not in the superchain-registry: - -```sh -# Produce the prestate artifacts for `kona-client` running on `cannon` (version specified by `cannon_tag`) -just cannon <kona|kona-int> <kona_tag> <cannon_tag> <artifacts_output_dir> <custom_config_dir> -``` - diff --git a/kona/docker/fpvm-prestates/justfile b/kona/docker/fpvm-prestates/justfile deleted file mode 100644 index 50f096ece37..00000000000 --- a/kona/docker/fpvm-prestates/justfile +++ /dev/null @@ -1,58 +0,0 @@ -set positional-arguments -alias cannon := build-client-prestate-cannon-artifacts - -# default recipe to display help information -default: - @just --list - -# Build the `kona-client` prestate artifacts from local source (cannon). -build-client-prestate-cannon-artifacts \ - kona_client_variant \ - cannon_tag \ - out='./prestate-artifacts-cannon' \ - custom_config_dir='': - #!/bin/bash - OUTPUT_DIR={{out}} - - # Docker bake env - export CLIENT_BIN="{{kona_client_variant}}" - export CANNON_TAG="{{cannon_tag}}" - export DEFAULT_TAG="kona-cannon-prestate:local" - - # Navigate to workspace root - cd ../.. - - if [[ -n "{{custom_config_dir}}" ]]; then - export KONA_CUSTOM_CONFIGS="true" - export CUSTOM_CONFIGS_CONTEXT="{{custom_config_dir}}" - if [ ! -d "{{custom_config_dir}}" ]; then - echo "Invalid custom config directory: {{custom_config_dir}}" - exit 1 - fi - echo "Using custom config directory: {{custom_config_dir}}" - else - # set to an empty directory to satisfy the docker build context requirement - TEMP_DIR=$(mktemp -d) - trap "rm -rf $TEMP_DIR" EXIT - export CUSTOM_CONFIGS_CONTEXT="$TEMP_DIR" - fi - - # Create the output directory - mkdir -p $OUTPUT_DIR - - echo "Building kona-client (variant: {{kona_client_variant}}) prestate artifacts for the cannon target. 🔫 Cannon Tag: {{cannon_tag}}" - - # Build the --allow flag conditionally (requires Docker Buildx v0.15.0+) - ALLOW_FLAG="" - BUILDX_VERSION=$(docker buildx version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1 || echo "0.0.0") - MAJOR=$(echo "$BUILDX_VERSION" | cut -d. -f1) - MINOR=$(echo "$BUILDX_VERSION" | cut -d. -f2) - if [[ "$MAJOR" -gt 0 ]] || [[ "$MAJOR" -eq 0 && "$MINOR" -ge 15 ]]; then - ALLOW_FLAG="--allow fs=${CUSTOM_CONFIGS_CONTEXT}" - fi - - docker buildx bake \ - --set "*.output=$OUTPUT_DIR" \ - -f docker/docker-bake.hcl \ - $ALLOW_FLAG \ - kona-cannon-prestate diff --git a/kona/docker/recipes/kona-node-dev/README.md b/kona/docker/recipes/kona-node-dev/README.md deleted file mode 100644 index 2735ce0b30b..00000000000 --- a/kona/docker/recipes/kona-node-dev/README.md +++ /dev/null @@ -1,83 +0,0 @@ -# Requirements - -This is intended to run on x86-64 architecture. - -## Purpose - -This recipe, `kona-node-dev`, is different from the `kona-node` recipe in that -it builds a local container image of `kona-node` instead of pulling a nightly -image of `main`. This is useful, because it allows developers to checkout a -development branch and see how it behaves on a network. - -## Set up - -Assuming you are on Ubuntu and your user is member of the group `docker`, first time run - - git clone 'https://github.com/op-rs/kona.git' - cd kona/docker/recipes/kona-node-dev/ - just init - -If the last step fails due to missing packages, you can run `just setup-ubuntu` -and then run `just init` again. This will install the required packages for -Ubuntu. `just init` will also set up a virtual network, and finally spin up -`kona-node`, `op-reth`, `prometheus` and `grafana`. - -## Normal usage - -For future invocation it suffices to spin the system up and down with: - - just up - just down - -You can also run `just upd` if you want to detach from the docker logs. -If you want to update the `kona` submodule, you can run `just update`. - -A typical workflow after init could look like this: - - # remove existing images causing them to be rebuild - just rmi - # pull latest commits - just update - # checkout dev branch - just checkout <my-branch> - # build images and start containers - just upd - # visit Grafana - just stop - -For more info on the commands please refer to `justfile`. - -## Environment - -This setup uses `publicnode.com` as default L1, and the environment is configured in `publicnode.env`. -To use different RPC servers or ports, you can copy the file and make modifications. Then run: - - just up myenv.env - just down myenv.env - -or change the default in the `justfile. - -## Services and observability - -The following services are provided: - - http://localhost:3000 - -Default credentials are `admin:admin` and you should change that if you plan to -use this instance over longer time. - -## Storage - -The data is stored in current directory `./datadirs`, but you can modify the -`volume` mapping in `docker-compose.yml` to use a different volume. - -## Caveats - -The port numbers are fixed, so it would not be possible to run more than one -instance on a machine at the same time. Please bear this in mind when running -an instance for longer time. You can check if ports are in use with `docker -ps`. - -## Bugs and development - -Everything is orchestrated from `justfile`. Feel free to edit and submit PRs. diff --git a/kona/docker/recipes/kona-node/README.md b/kona/docker/recipes/kona-node/README.md deleted file mode 100644 index 4d5a9a08c22..00000000000 --- a/kona/docker/recipes/kona-node/README.md +++ /dev/null @@ -1,88 +0,0 @@ -# `kona-node` recipe - -> [!WARNING] -> -> `kona-node` is in active development, and this recipe is subject to frequent change (and may not work!) For the time -> being, it is intended to be used for development purposes. Please [file an issue][new-issue] if you have any problems -> during development. - -This directory contains a simple `docker-compose` setup for `kona-node` and `op-reth`, including example Grafana -dashboards and a default Prometheus configuration. - -By default, this recipe is configured to sync the [`OP Sepolia`][op-sepolia] L2. - -## Usage - -### Running - -An L1 Execution Client RPC and L1 Beacon API endpoint must be configured in your environment. The `L1_PROVIDER_RPC` and -`L1_BEACON_API` environment variables can be set in [`cfg.env`](./cfg.env). - -Once these two environment variables are set, the environment can be spun up and shut down as follows: - -```sh -# Start `kona-node`, `op-reth`, and `grafana` + `prometheus` -just up - -# Shutdown the docker compose environment -just down - -# Restart the docker compose environment -just restart -``` - -### Grafana - -The grafana instance can be accessed at `http://localhost:3000` in your browser. The username and password, by default, -are both `admin`. - -#### Adding a new visualization - -The `kona-node` dashboard is provisioned within the grafana instance by default. A new visualization can be added to the -dashboard by navigating to the `Kona Node` dashboard, and then clicking `Add` > `Visualization` in the top right. - -Once your visualization has been added, click `Share` > `Export` (tab), and toggle "Export for sharing externally" on. -Then, copy the JSON, and replace the contents of [`overview.json`](./grafana/dashboards/overview.json) -before making a PR. - -## Default Ports - -| Port | Service | -|---------|-----------------------------| -| `9223` | `kona-node` discovery | -| `9002` | `kona-node` metrics | -| `5060` | `kona-node` RPC | -| `30303` | `op-reth` discovery | -| `9001` | `op-reth` metrics | -| `8545` | `op-reth` RPC | -| `8551` | `op-reth` engine | -| `9090` | `prometheus` metrics server | -| `3000` | `grafana` dashboard UI | - -## Configuration - -### Adjusting host ports - -Host ports for both `op-reth` and `kona-node` can be configured in [`cfg.env`](./cfg.env). - -### Syncing a different OP Stack chain - -To adjust the chain that the node is syncing, you must modify the `docker-compose.yml` file to specify the desired -network parameters. Specifically: -1. Ensure `L1_PROVIDER_RPC` and `L1_BEACON_API` are set to L1 clients that represent the settlement layer of the L2. -1. `op-reth` - - `--chain` must specify the desired chain. - - `--rollup.sequencer-http` must specify the sequencer endpoint. -1. `kona-node` - - `--chain` must specify the chain ID of the desired chain. - -### Adjusting log filters - -Log filters can be adjusted by setting the `RUST_LOG` environment variable. This environment variable will be forwarded -to the `kona-node` container's entrypoint. - -Example: `export RUST_LOG=engine_builder=trace,runtime=debug` - -[op-sepolia]: https://sepolia-optimism.etherscan.io -[op-reth]: https://github.com/paradigmxyz/reth -[new-issue]: https://github.com/op-rs/kona/issues/new diff --git a/kona/docs/README.md b/kona/docs/README.md deleted file mode 100644 index 736588de63c..00000000000 --- a/kona/docs/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# Vocs - -This is a [Vocs](https://vocs.dev) project bootstrapped with the Vocs CLI. - -## Usage - -Run the vocs site in developer mode, hosting it. - -```bash -just run-vocs -``` - -Build the vocs site as a static site. - -```bash -just build-vocs -``` - -Open the static site. - -```bash -just open-site -``` diff --git a/kona/docs/bun.lock b/kona/docs/bun.lock deleted file mode 100644 index a8e82c65146..00000000000 --- a/kona/docs/bun.lock +++ /dev/null @@ -1,1442 +0,0 @@ -{ - "lockfileVersion": 1, - "workspaces": { - "": { - "name": "kona-docs", - "dependencies": { - "react": "19.2.1", - "react-dom": "19.2.1", - "vocs": "1.2.1", - }, - "devDependencies": { - "@types/node": "latest", - "@types/react": "latest", - "tailwindcss": "^4.1.11", - "typescript": "latest", - }, - }, - }, - "packages": { - "@antfu/install-pkg": ["@antfu/install-pkg@1.1.0", "", { "dependencies": { "package-manager-detector": "^1.3.0", "tinyexec": "^1.0.1" } }, ""], - - "@antfu/utils": ["@antfu/utils@9.3.0", "", {}, ""], - - "@babel/code-frame": ["@babel/code-frame@7.27.1", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.27.1", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, ""], - - "@babel/compat-data": ["@babel/compat-data@7.28.5", "", {}, ""], - - "@babel/core": ["@babel/core@7.28.5", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.5", "@babel/helper-compilation-targets": "^7.27.2", "@babel/helper-module-transforms": "^7.28.3", "@babel/helpers": "^7.28.4", "@babel/parser": "^7.28.5", "@babel/template": "^7.27.2", "@babel/traverse": "^7.28.5", "@babel/types": "^7.28.5", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, ""], - - "@babel/generator": ["@babel/generator@7.28.5", "", { "dependencies": { "@babel/parser": "^7.28.5", "@babel/types": "^7.28.5", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" } }, ""], - - "@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.27.2", "", { "dependencies": { "@babel/compat-data": "^7.27.2", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" } }, ""], - - "@babel/helper-globals": ["@babel/helper-globals@7.28.0", "", {}, ""], - - "@babel/helper-module-imports": ["@babel/helper-module-imports@7.27.1", "", { "dependencies": { "@babel/traverse": "^7.27.1", "@babel/types": "^7.27.1" } }, ""], - - "@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.28.3", "", { "dependencies": { "@babel/helper-module-imports": "^7.27.1", "@babel/helper-validator-identifier": "^7.27.1", "@babel/traverse": "^7.28.3" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, ""], - - "@babel/helper-plugin-utils": ["@babel/helper-plugin-utils@7.27.1", "", {}, ""], - - "@babel/helper-string-parser": ["@babel/helper-string-parser@7.27.1", "", {}, ""], - - "@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.28.5", "", {}, ""], - - "@babel/helper-validator-option": ["@babel/helper-validator-option@7.27.1", "", {}, ""], - - "@babel/helpers": ["@babel/helpers@7.28.4", "", { "dependencies": { "@babel/template": "^7.27.2", "@babel/types": "^7.28.4" } }, ""], - - "@babel/parser": ["@babel/parser@7.28.5", "", { "dependencies": { "@babel/types": "^7.28.5" }, "bin": { "parser": "bin/babel-parser.js" } }, ""], - - "@babel/plugin-syntax-typescript": ["@babel/plugin-syntax-typescript@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, ""], - - "@babel/plugin-transform-react-jsx-self": ["@babel/plugin-transform-react-jsx-self@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, ""], - - "@babel/plugin-transform-react-jsx-source": ["@babel/plugin-transform-react-jsx-source@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, ""], - - "@babel/runtime": ["@babel/runtime@7.28.4", "", {}, ""], - - "@babel/template": ["@babel/template@7.27.2", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/parser": "^7.27.2", "@babel/types": "^7.27.1" } }, ""], - - "@babel/traverse": ["@babel/traverse@7.28.5", "", { "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.5", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.28.5", "@babel/template": "^7.27.2", "@babel/types": "^7.28.5", "debug": "^4.3.1" } }, ""], - - "@babel/types": ["@babel/types@7.28.5", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.28.5" } }, ""], - - "@braintree/sanitize-url": ["@braintree/sanitize-url@7.1.1", "", {}, ""], - - "@chevrotain/cst-dts-gen": ["@chevrotain/cst-dts-gen@11.0.3", "", { "dependencies": { "@chevrotain/gast": "11.0.3", "@chevrotain/types": "11.0.3", "lodash-es": "4.17.21" } }, ""], - - "@chevrotain/gast": ["@chevrotain/gast@11.0.3", "", { "dependencies": { "@chevrotain/types": "11.0.3", "lodash-es": "4.17.21" } }, ""], - - "@chevrotain/regexp-to-ast": ["@chevrotain/regexp-to-ast@11.0.3", "", {}, ""], - - "@chevrotain/types": ["@chevrotain/types@11.0.3", "", {}, ""], - - "@chevrotain/utils": ["@chevrotain/utils@11.0.3", "", {}, ""], - - "@clack/core": ["@clack/core@0.3.5", "", { "dependencies": { "picocolors": "^1.0.0", "sisteransi": "^1.0.5" } }, ""], - - "@clack/prompts": ["@clack/prompts@0.7.0", "", { "dependencies": { "@clack/core": "^0.3.3", "picocolors": "^1.0.0", "sisteransi": "^1.0.5" } }, ""], - - "@emotion/hash": ["@emotion/hash@0.9.2", "", {}, ""], - - "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.25.11", "", { "os": "aix", "cpu": "ppc64" }, "sha512-Xt1dOL13m8u0WE8iplx9Ibbm+hFAO0GsU2P34UNoDGvZYkY8ifSiy6Zuc1lYxfG7svWE2fzqCUmFp5HCn51gJg=="], - - "@esbuild/android-arm": ["@esbuild/android-arm@0.25.11", "", { "os": "android", "cpu": "arm" }, "sha512-uoa7dU+Dt3HYsethkJ1k6Z9YdcHjTrSb5NUy66ZfZaSV8hEYGD5ZHbEMXnqLFlbBflLsl89Zke7CAdDJ4JI+Gg=="], - - "@esbuild/android-arm64": ["@esbuild/android-arm64@0.25.11", "", { "os": "android", "cpu": "arm64" }, "sha512-9slpyFBc4FPPz48+f6jyiXOx/Y4v34TUeDDXJpZqAWQn/08lKGeD8aDp9TMn9jDz2CiEuHwfhRmGBvpnd/PWIQ=="], - - "@esbuild/android-x64": ["@esbuild/android-x64@0.25.11", "", { "os": "android", "cpu": "x64" }, "sha512-Sgiab4xBjPU1QoPEIqS3Xx+R2lezu0LKIEcYe6pftr56PqPygbB7+szVnzoShbx64MUupqoE0KyRlN7gezbl8g=="], - - "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.25.11", "", { "os": "darwin", "cpu": "arm64" }, "sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w=="], - - "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.25.11", "", { "os": "darwin", "cpu": "x64" }, "sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ=="], - - "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.25.11", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-CmKjrnayyTJF2eVuO//uSjl/K3KsMIeYeyN7FyDBjsR3lnSJHaXlVoAK8DZa7lXWChbuOk7NjAc7ygAwrnPBhA=="], - - "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.25.11", "", { "os": "freebsd", "cpu": "x64" }, "sha512-Dyq+5oscTJvMaYPvW3x3FLpi2+gSZTCE/1ffdwuM6G1ARang/mb3jvjxs0mw6n3Lsw84ocfo9CrNMqc5lTfGOw=="], - - "@esbuild/linux-arm": ["@esbuild/linux-arm@0.25.11", "", { "os": "linux", "cpu": "arm" }, "sha512-TBMv6B4kCfrGJ8cUPo7vd6NECZH/8hPpBHHlYI3qzoYFvWu2AdTvZNuU/7hsbKWqu/COU7NIK12dHAAqBLLXgw=="], - - "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.25.11", "", { "os": "linux", "cpu": "arm64" }, "sha512-Qr8AzcplUhGvdyUF08A1kHU3Vr2O88xxP0Tm8GcdVOUm25XYcMPp2YqSVHbLuXzYQMf9Bh/iKx7YPqECs6ffLA=="], - - "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.25.11", "", { "os": "linux", "cpu": "ia32" }, "sha512-TmnJg8BMGPehs5JKrCLqyWTVAvielc615jbkOirATQvWWB1NMXY77oLMzsUjRLa0+ngecEmDGqt5jiDC6bfvOw=="], - - "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.25.11", "", { "os": "linux", "cpu": "none" }, "sha512-DIGXL2+gvDaXlaq8xruNXUJdT5tF+SBbJQKbWy/0J7OhU8gOHOzKmGIlfTTl6nHaCOoipxQbuJi7O++ldrxgMw=="], - - "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.25.11", "", { "os": "linux", "cpu": "none" }, "sha512-Osx1nALUJu4pU43o9OyjSCXokFkFbyzjXb6VhGIJZQ5JZi8ylCQ9/LFagolPsHtgw6himDSyb5ETSfmp4rpiKQ=="], - - "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.25.11", "", { "os": "linux", "cpu": "ppc64" }, "sha512-nbLFgsQQEsBa8XSgSTSlrnBSrpoWh7ioFDUmwo158gIm5NNP+17IYmNWzaIzWmgCxq56vfr34xGkOcZ7jX6CPw=="], - - "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.25.11", "", { "os": "linux", "cpu": "none" }, "sha512-HfyAmqZi9uBAbgKYP1yGuI7tSREXwIb438q0nqvlpxAOs3XnZ8RsisRfmVsgV486NdjD7Mw2UrFSw51lzUk1ww=="], - - "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.25.11", "", { "os": "linux", "cpu": "s390x" }, "sha512-HjLqVgSSYnVXRisyfmzsH6mXqyvj0SA7pG5g+9W7ESgwA70AXYNpfKBqh1KbTxmQVaYxpzA/SvlB9oclGPbApw=="], - - "@esbuild/linux-x64": ["@esbuild/linux-x64@0.25.11", "", { "os": "linux", "cpu": "x64" }, ""], - - "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.25.11", "", { "os": "none", "cpu": "arm64" }, "sha512-hr9Oxj1Fa4r04dNpWr3P8QKVVsjQhqrMSUzZzf+LZcYjZNqhA3IAfPQdEh1FLVUJSiu6sgAwp3OmwBfbFgG2Xg=="], - - "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.25.11", "", { "os": "none", "cpu": "x64" }, "sha512-u7tKA+qbzBydyj0vgpu+5h5AeudxOAGncb8N6C9Kh1N4n7wU1Xw1JDApsRjpShRpXRQlJLb9wY28ELpwdPcZ7A=="], - - "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.25.11", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-Qq6YHhayieor3DxFOoYM1q0q1uMFYb7cSpLD2qzDSvK1NAvqFi8Xgivv0cFC6J+hWVw2teCYltyy9/m/14ryHg=="], - - "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.25.11", "", { "os": "openbsd", "cpu": "x64" }, "sha512-CN+7c++kkbrckTOz5hrehxWN7uIhFFlmS/hqziSFVWpAzpWrQoAG4chH+nN3Be+Kzv/uuo7zhX716x3Sn2Jduw=="], - - "@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.25.11", "", { "os": "none", "cpu": "arm64" }, "sha512-rOREuNIQgaiR+9QuNkbkxubbp8MSO9rONmwP5nKncnWJ9v5jQ4JxFnLu4zDSRPf3x4u+2VN4pM4RdyIzDty/wQ=="], - - "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.25.11", "", { "os": "sunos", "cpu": "x64" }, "sha512-nq2xdYaWxyg9DcIyXkZhcYulC6pQ2FuCgem3LI92IwMgIZ69KHeY8T4Y88pcwoLIjbed8n36CyKoYRDygNSGhA=="], - - "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.25.11", "", { "os": "win32", "cpu": "arm64" }, "sha512-3XxECOWJq1qMZ3MN8srCJ/QfoLpL+VaxD/WfNRm1O3B4+AZ/BnLVgFbUV3eiRYDMXetciH16dwPbbHqwe1uU0Q=="], - - "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.25.11", "", { "os": "win32", "cpu": "ia32" }, "sha512-3ukss6gb9XZ8TlRyJlgLn17ecsK4NSQTmdIXRASVsiS2sQ6zPPZklNJT5GR5tE/MUarymmy8kCEf5xPCNCqVOA=="], - - "@esbuild/win32-x64": ["@esbuild/win32-x64@0.25.11", "", { "os": "win32", "cpu": "x64" }, "sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA=="], - - "@floating-ui/core": ["@floating-ui/core@1.7.3", "", { "dependencies": { "@floating-ui/utils": "^0.2.10" } }, ""], - - "@floating-ui/dom": ["@floating-ui/dom@1.7.4", "", { "dependencies": { "@floating-ui/core": "^1.7.3", "@floating-ui/utils": "^0.2.10" } }, ""], - - "@floating-ui/react": ["@floating-ui/react@0.27.16", "", { "dependencies": { "@floating-ui/react-dom": "^2.1.6", "@floating-ui/utils": "^0.2.10", "tabbable": "^6.0.0" }, "peerDependencies": { "react": ">=17.0.0", "react-dom": ">=17.0.0" } }, ""], - - "@floating-ui/react-dom": ["@floating-ui/react-dom@2.1.6", "", { "dependencies": { "@floating-ui/dom": "^1.7.4" }, "peerDependencies": { "react": ">=16.8.0", "react-dom": ">=16.8.0" } }, ""], - - "@floating-ui/utils": ["@floating-ui/utils@0.2.10", "", {}, ""], - - "@fortawesome/fontawesome-free": ["@fortawesome/fontawesome-free@6.7.2", "", {}, ""], - - "@hono/node-server": ["@hono/node-server@1.19.5", "", { "peerDependencies": { "hono": "^4" } }, ""], - - "@iconify/types": ["@iconify/types@2.0.0", "", {}, ""], - - "@iconify/utils": ["@iconify/utils@3.0.2", "", { "dependencies": { "@antfu/install-pkg": "^1.1.0", "@antfu/utils": "^9.2.0", "@iconify/types": "^2.0.0", "debug": "^4.4.1", "globals": "^15.15.0", "kolorist": "^1.8.0", "local-pkg": "^1.1.1", "mlly": "^1.7.4" } }, ""], - - "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, ""], - - "@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, ""], - - "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, ""], - - "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, ""], - - "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, ""], - - "@mdx-js/mdx": ["@mdx-js/mdx@3.1.1", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdx": "^2.0.0", "acorn": "^8.0.0", "collapse-white-space": "^2.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "estree-util-scope": "^1.0.0", "estree-walker": "^3.0.0", "hast-util-to-jsx-runtime": "^2.0.0", "markdown-extensions": "^2.0.0", "recma-build-jsx": "^1.0.0", "recma-jsx": "^1.0.0", "recma-stringify": "^1.0.0", "rehype-recma": "^1.0.0", "remark-mdx": "^3.0.0", "remark-parse": "^11.0.0", "remark-rehype": "^11.0.0", "source-map": "^0.7.0", "unified": "^11.0.0", "unist-util-position-from-estree": "^2.0.0", "unist-util-stringify-position": "^4.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, ""], - - "@mdx-js/react": ["@mdx-js/react@3.1.1", "", { "dependencies": { "@types/mdx": "^2.0.0" }, "peerDependencies": { "@types/react": ">=16", "react": ">=16" } }, ""], - - "@mdx-js/rollup": ["@mdx-js/rollup@3.1.1", "", { "dependencies": { "@mdx-js/mdx": "^3.0.0", "@rollup/pluginutils": "^5.0.0", "source-map": "^0.7.0", "vfile": "^6.0.0" }, "peerDependencies": { "rollup": ">=2" } }, ""], - - "@mermaid-js/parser": ["@mermaid-js/parser@0.6.3", "", { "dependencies": { "langium": "3.3.1" } }, ""], - - "@noble/hashes": ["@noble/hashes@1.8.0", "", {}, ""], - - "@radix-ui/colors": ["@radix-ui/colors@3.0.0", "", {}, ""], - - "@radix-ui/number": ["@radix-ui/number@1.1.1", "", {}, ""], - - "@radix-ui/primitive": ["@radix-ui/primitive@1.1.3", "", {}, ""], - - "@radix-ui/react-accessible-icon": ["@radix-ui/react-accessible-icon@1.1.7", "", { "dependencies": { "@radix-ui/react-visually-hidden": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-accordion": ["@radix-ui/react-accordion@1.2.12", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collapsible": "1.1.12", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-alert-dialog": ["@radix-ui/react-alert-dialog@1.1.15", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dialog": "1.1.15", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-arrow": ["@radix-ui/react-arrow@1.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-aspect-ratio": ["@radix-ui/react-aspect-ratio@1.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-avatar": ["@radix-ui/react-avatar@1.1.10", "", { "dependencies": { "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-is-hydrated": "0.1.0", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-checkbox": ["@radix-ui/react-checkbox@1.3.3", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-use-size": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-collapsible": ["@radix-ui/react-collapsible@1.1.12", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-collection": ["@radix-ui/react-collection@1.1.7", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-compose-refs": ["@radix-ui/react-compose-refs@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-context": ["@radix-ui/react-context@1.1.2", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-context-menu": ["@radix-ui/react-context-menu@2.2.16", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-menu": "2.1.16", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-dialog": ["@radix-ui/react-dialog@1.1.15", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-controllable-state": "1.2.2", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-direction": ["@radix-ui/react-direction@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-dismissable-layer": ["@radix-ui/react-dismissable-layer@1.1.11", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-escape-keydown": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-dropdown-menu": ["@radix-ui/react-dropdown-menu@2.1.16", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-menu": "2.1.16", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-focus-guards": ["@radix-ui/react-focus-guards@1.1.3", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-focus-scope": ["@radix-ui/react-focus-scope@1.1.7", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-form": ["@radix-ui/react-form@0.1.8", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-label": "2.1.7", "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-hover-card": ["@radix-ui/react-hover-card@1.1.15", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-icons": ["@radix-ui/react-icons@1.3.2", "", { "peerDependencies": { "react": "^16.x || ^17.x || ^18.x || ^19.0.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-id": ["@radix-ui/react-id@1.1.1", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-label": ["@radix-ui/react-label@2.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-menu": ["@radix-ui/react-menu@2.1.16", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-callback-ref": "1.1.1", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-menubar": ["@radix-ui/react-menubar@1.1.16", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-menu": "2.1.16", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-navigation-menu": ["@radix-ui/react-navigation-menu@1.2.14", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-visually-hidden": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-one-time-password-field": ["@radix-ui/react-one-time-password-field@0.1.8", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-effect-event": "0.0.2", "@radix-ui/react-use-is-hydrated": "0.1.0", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-password-toggle-field": ["@radix-ui/react-password-toggle-field@0.1.3", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-effect-event": "0.0.2", "@radix-ui/react-use-is-hydrated": "0.1.0" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-popover": ["@radix-ui/react-popover@1.1.15", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-controllable-state": "1.2.2", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-popper": ["@radix-ui/react-popper@1.2.8", "", { "dependencies": { "@floating-ui/react-dom": "^2.0.0", "@radix-ui/react-arrow": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-rect": "1.1.1", "@radix-ui/react-use-size": "1.1.1", "@radix-ui/rect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-portal": ["@radix-ui/react-portal@1.1.9", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-presence": ["@radix-ui/react-presence@1.1.5", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-primitive": ["@radix-ui/react-primitive@2.1.3", "", { "dependencies": { "@radix-ui/react-slot": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-progress": ["@radix-ui/react-progress@1.1.7", "", { "dependencies": { "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-radio-group": ["@radix-ui/react-radio-group@1.3.8", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-use-size": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-roving-focus": ["@radix-ui/react-roving-focus@1.1.11", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-scroll-area": ["@radix-ui/react-scroll-area@1.2.10", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-select": ["@radix-ui/react-select@2.2.6", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-visually-hidden": "1.2.3", "aria-hidden": "^1.2.4", "react-remove-scroll": "^2.6.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-separator": ["@radix-ui/react-separator@1.1.7", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-slider": ["@radix-ui/react-slider@1.3.6", "", { "dependencies": { "@radix-ui/number": "1.1.1", "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-use-size": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-slot": ["@radix-ui/react-slot@1.2.3", "", { "dependencies": { "@radix-ui/react-compose-refs": "1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-switch": ["@radix-ui/react-switch@1.2.6", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-previous": "1.1.1", "@radix-ui/react-use-size": "1.1.1" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-tabs": ["@radix-ui/react-tabs@1.1.13", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-toast": ["@radix-ui/react-toast@1.2.15", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-visually-hidden": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-toggle": ["@radix-ui/react-toggle@1.1.10", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-toggle-group": ["@radix-ui/react-toggle-group@1.1.11", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-toggle": "1.1.10", "@radix-ui/react-use-controllable-state": "1.2.2" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-toolbar": ["@radix-ui/react-toolbar@1.1.11", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-separator": "1.1.7", "@radix-ui/react-toggle-group": "1.1.11" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-tooltip": ["@radix-ui/react-tooltip@1.2.8", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-id": "1.1.1", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-visually-hidden": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/react-use-callback-ref": ["@radix-ui/react-use-callback-ref@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-use-controllable-state": ["@radix-ui/react-use-controllable-state@1.2.2", "", { "dependencies": { "@radix-ui/react-use-effect-event": "0.0.2", "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-use-effect-event": ["@radix-ui/react-use-effect-event@0.0.2", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-use-escape-keydown": ["@radix-ui/react-use-escape-keydown@1.1.1", "", { "dependencies": { "@radix-ui/react-use-callback-ref": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-use-is-hydrated": ["@radix-ui/react-use-is-hydrated@0.1.0", "", { "dependencies": { "use-sync-external-store": "^1.5.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-use-layout-effect": ["@radix-ui/react-use-layout-effect@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-use-previous": ["@radix-ui/react-use-previous@1.1.1", "", { "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-use-rect": ["@radix-ui/react-use-rect@1.1.1", "", { "dependencies": { "@radix-ui/rect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-use-size": ["@radix-ui/react-use-size@1.1.1", "", { "dependencies": { "@radix-ui/react-use-layout-effect": "1.1.1" }, "peerDependencies": { "@types/react": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" } }, ""], - - "@radix-ui/react-visually-hidden": ["@radix-ui/react-visually-hidden@1.2.3", "", { "dependencies": { "@radix-ui/react-primitive": "2.1.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "@radix-ui/rect": ["@radix-ui/rect@1.1.1", "", {}, ""], - - "@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.43", "", {}, ""], - - "@rollup/pluginutils": ["@rollup/pluginutils@5.3.0", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-walker": "^2.0.2", "picomatch": "^4.0.2" }, "peerDependencies": { "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" } }, ""], - - "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.52.5", "", { "os": "android", "cpu": "arm" }, "sha512-8c1vW4ocv3UOMp9K+gToY5zL2XiiVw3k7f1ksf4yO1FlDFQ1C2u72iACFnSOceJFsWskc2WZNqeRhFRPzv+wtQ=="], - - "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.52.5", "", { "os": "android", "cpu": "arm64" }, "sha512-mQGfsIEFcu21mvqkEKKu2dYmtuSZOBMmAl5CFlPGLY94Vlcm+zWApK7F/eocsNzp8tKmbeBP8yXyAbx0XHsFNA=="], - - "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.52.5", "", { "os": "darwin", "cpu": "arm64" }, "sha512-takF3CR71mCAGA+v794QUZ0b6ZSrgJkArC+gUiG6LB6TQty9T0Mqh3m2ImRBOxS2IeYBo4lKWIieSvnEk2OQWA=="], - - "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.52.5", "", { "os": "darwin", "cpu": "x64" }, "sha512-W901Pla8Ya95WpxDn//VF9K9u2JbocwV/v75TE0YIHNTbhqUTv9w4VuQ9MaWlNOkkEfFwkdNhXgcLqPSmHy0fA=="], - - "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.52.5", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-QofO7i7JycsYOWxe0GFqhLmF6l1TqBswJMvICnRUjqCx8b47MTo46W8AoeQwiokAx3zVryVnxtBMcGcnX12LvA=="], - - "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.52.5", "", { "os": "freebsd", "cpu": "x64" }, "sha512-jr21b/99ew8ujZubPo9skbrItHEIE50WdV86cdSoRkKtmWa+DDr6fu2c/xyRT0F/WazZpam6kk7IHBerSL7LDQ=="], - - "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.52.5", "", { "os": "linux", "cpu": "arm" }, "sha512-PsNAbcyv9CcecAUagQefwX8fQn9LQ4nZkpDboBOttmyffnInRy8R8dSg6hxxl2Re5QhHBf6FYIDhIj5v982ATQ=="], - - "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.52.5", "", { "os": "linux", "cpu": "arm" }, "sha512-Fw4tysRutyQc/wwkmcyoqFtJhh0u31K+Q6jYjeicsGJJ7bbEq8LwPWV/w0cnzOqR2m694/Af6hpFayLJZkG2VQ=="], - - "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.52.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-a+3wVnAYdQClOTlyapKmyI6BLPAFYs0JM8HRpgYZQO02rMR09ZcV9LbQB+NL6sljzG38869YqThrRnfPMCDtZg=="], - - "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.52.5", "", { "os": "linux", "cpu": "arm64" }, "sha512-AvttBOMwO9Pcuuf7m9PkC1PUIKsfaAJ4AYhy944qeTJgQOqJYJ9oVl2nYgY7Rk0mkbsuOpCAYSs6wLYB2Xiw0Q=="], - - "@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.52.5", "", { "os": "linux", "cpu": "none" }, "sha512-DkDk8pmXQV2wVrF6oq5tONK6UHLz/XcEVow4JTTerdeV1uqPeHxwcg7aFsfnSm9L+OO8WJsWotKM2JJPMWrQtA=="], - - "@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.52.5", "", { "os": "linux", "cpu": "ppc64" }, "sha512-W/b9ZN/U9+hPQVvlGwjzi+Wy4xdoH2I8EjaCkMvzpI7wJUs8sWJ03Rq96jRnHkSrcHTpQe8h5Tg3ZzUPGauvAw=="], - - "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.52.5", "", { "os": "linux", "cpu": "none" }, "sha512-sjQLr9BW7R/ZiXnQiWPkErNfLMkkWIoCz7YMn27HldKsADEKa5WYdobaa1hmN6slu9oWQbB6/jFpJ+P2IkVrmw=="], - - "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.52.5", "", { "os": "linux", "cpu": "none" }, "sha512-hq3jU/kGyjXWTvAh2awn8oHroCbrPm8JqM7RUpKjalIRWWXE01CQOf/tUNWNHjmbMHg/hmNCwc/Pz3k1T/j/Lg=="], - - "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.52.5", "", { "os": "linux", "cpu": "s390x" }, "sha512-gn8kHOrku8D4NGHMK1Y7NA7INQTRdVOntt1OCYypZPRt6skGbddska44K8iocdpxHTMMNui5oH4elPH4QOLrFQ=="], - - "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.52.5", "", { "os": "linux", "cpu": "x64" }, ""], - - "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.52.5", "", { "os": "linux", "cpu": "x64" }, ""], - - "@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.52.5", "", { "os": "none", "cpu": "arm64" }, "sha512-QoFqB6+/9Rly/RiPjaomPLmR/13cgkIGfA40LHly9zcH1S0bN2HVFYk3a1eAyHQyjs3ZJYlXvIGtcCs5tko9Cw=="], - - "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.52.5", "", { "os": "win32", "cpu": "arm64" }, "sha512-w0cDWVR6MlTstla1cIfOGyl8+qb93FlAVutcor14Gf5Md5ap5ySfQ7R9S/NjNaMLSFdUnKGEasmVnu3lCMqB7w=="], - - "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.52.5", "", { "os": "win32", "cpu": "ia32" }, "sha512-Aufdpzp7DpOTULJCuvzqcItSGDH73pF3ko/f+ckJhxQyHtp67rHw3HMNxoIdDMUITJESNE6a8uh4Lo4SLouOUg=="], - - "@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.52.5", "", { "os": "win32", "cpu": "x64" }, "sha512-UGBUGPFp1vkj6p8wCRraqNhqwX/4kNQPS57BCFc8wYh0g94iVIW33wJtQAx3G7vrjjNtRaxiMUylM0ktp/TRSQ=="], - - "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.52.5", "", { "os": "win32", "cpu": "x64" }, "sha512-TAcgQh2sSkykPRWLrdyy2AiceMckNf5loITqXxFI5VuQjS5tSuw3WlwdN8qv8vzjLAUTvYaH/mVjSFpbkFbpTg=="], - - "@shikijs/core": ["@shikijs/core@1.29.2", "", { "dependencies": { "@shikijs/engine-javascript": "1.29.2", "@shikijs/engine-oniguruma": "1.29.2", "@shikijs/types": "1.29.2", "@shikijs/vscode-textmate": "^10.0.1", "@types/hast": "^3.0.4", "hast-util-to-html": "^9.0.4" } }, ""], - - "@shikijs/engine-javascript": ["@shikijs/engine-javascript@1.29.2", "", { "dependencies": { "@shikijs/types": "1.29.2", "@shikijs/vscode-textmate": "^10.0.1", "oniguruma-to-es": "^2.2.0" } }, ""], - - "@shikijs/engine-oniguruma": ["@shikijs/engine-oniguruma@1.29.2", "", { "dependencies": { "@shikijs/types": "1.29.2", "@shikijs/vscode-textmate": "^10.0.1" } }, ""], - - "@shikijs/langs": ["@shikijs/langs@1.29.2", "", { "dependencies": { "@shikijs/types": "1.29.2" } }, ""], - - "@shikijs/rehype": ["@shikijs/rehype@1.29.2", "", { "dependencies": { "@shikijs/types": "1.29.2", "@types/hast": "^3.0.4", "hast-util-to-string": "^3.0.1", "shiki": "1.29.2", "unified": "^11.0.5", "unist-util-visit": "^5.0.0" } }, ""], - - "@shikijs/themes": ["@shikijs/themes@1.29.2", "", { "dependencies": { "@shikijs/types": "1.29.2" } }, ""], - - "@shikijs/transformers": ["@shikijs/transformers@1.29.2", "", { "dependencies": { "@shikijs/core": "1.29.2", "@shikijs/types": "1.29.2" } }, ""], - - "@shikijs/twoslash": ["@shikijs/twoslash@1.29.2", "", { "dependencies": { "@shikijs/core": "1.29.2", "@shikijs/types": "1.29.2", "twoslash": "^0.2.12" } }, ""], - - "@shikijs/types": ["@shikijs/types@1.29.2", "", { "dependencies": { "@shikijs/vscode-textmate": "^10.0.1", "@types/hast": "^3.0.4" } }, ""], - - "@shikijs/vscode-textmate": ["@shikijs/vscode-textmate@10.0.2", "", {}, ""], - - "@standard-schema/spec": ["@standard-schema/spec@1.0.0", "", {}, ""], - - "@tailwindcss/node": ["@tailwindcss/node@4.1.15", "", { "dependencies": { "@jridgewell/remapping": "^2.3.4", "enhanced-resolve": "^5.18.3", "jiti": "^2.6.0", "lightningcss": "1.30.2", "magic-string": "^0.30.19", "source-map-js": "^1.2.1", "tailwindcss": "4.1.15" } }, ""], - - "@tailwindcss/oxide": ["@tailwindcss/oxide@4.1.15", "", { "optionalDependencies": { "@tailwindcss/oxide-android-arm64": "4.1.15", "@tailwindcss/oxide-darwin-arm64": "4.1.15", "@tailwindcss/oxide-darwin-x64": "4.1.15", "@tailwindcss/oxide-freebsd-x64": "4.1.15", "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.15", "@tailwindcss/oxide-linux-arm64-gnu": "4.1.15", "@tailwindcss/oxide-linux-arm64-musl": "4.1.15", "@tailwindcss/oxide-linux-x64-gnu": "4.1.15", "@tailwindcss/oxide-linux-x64-musl": "4.1.15", "@tailwindcss/oxide-wasm32-wasi": "4.1.15", "@tailwindcss/oxide-win32-arm64-msvc": "4.1.15", "@tailwindcss/oxide-win32-x64-msvc": "4.1.15" } }, ""], - - "@tailwindcss/oxide-android-arm64": ["@tailwindcss/oxide-android-arm64@4.1.15", "", { "os": "android", "cpu": "arm64" }, "sha512-TkUkUgAw8At4cBjCeVCRMc/guVLKOU1D+sBPrHt5uVcGhlbVKxrCaCW9OKUIBv1oWkjh4GbunD/u/Mf0ql6kEA=="], - - "@tailwindcss/oxide-darwin-arm64": ["@tailwindcss/oxide-darwin-arm64@4.1.15", "", { "os": "darwin", "cpu": "arm64" }, "sha512-xt5XEJpn2piMSfvd1UFN6jrWXyaKCwikP4Pidcf+yfHTSzSpYhG3dcMktjNkQO3JiLCp+0bG0HoWGvz97K162w=="], - - "@tailwindcss/oxide-darwin-x64": ["@tailwindcss/oxide-darwin-x64@4.1.15", "", { "os": "darwin", "cpu": "x64" }, "sha512-TnWaxP6Bx2CojZEXAV2M01Yl13nYPpp0EtGpUrY+LMciKfIXiLL2r/SiSRpagE5Fp2gX+rflp/Os1VJDAyqymg=="], - - "@tailwindcss/oxide-freebsd-x64": ["@tailwindcss/oxide-freebsd-x64@4.1.15", "", { "os": "freebsd", "cpu": "x64" }, "sha512-quISQDWqiB6Cqhjc3iWptXVZHNVENsWoI77L1qgGEHNIdLDLFnw3/AfY7DidAiiCIkGX/MjIdB3bbBZR/G2aJg=="], - - "@tailwindcss/oxide-linux-arm-gnueabihf": ["@tailwindcss/oxide-linux-arm-gnueabihf@4.1.15", "", { "os": "linux", "cpu": "arm" }, "sha512-ObG76+vPlab65xzVUQbExmDU9FIeYLQ5k2LrQdR2Ud6hboR+ZobXpDoKEYXf/uOezOfIYmy2Ta3w0ejkTg9yxg=="], - - "@tailwindcss/oxide-linux-arm64-gnu": ["@tailwindcss/oxide-linux-arm64-gnu@4.1.15", "", { "os": "linux", "cpu": "arm64" }, "sha512-4WbBacRmk43pkb8/xts3wnOZMDKsPFyEH/oisCm2q3aLZND25ufvJKcDUpAu0cS+CBOL05dYa8D4U5OWECuH/Q=="], - - "@tailwindcss/oxide-linux-arm64-musl": ["@tailwindcss/oxide-linux-arm64-musl@4.1.15", "", { "os": "linux", "cpu": "arm64" }, "sha512-AbvmEiteEj1nf42nE8skdHv73NoR+EwXVSgPY6l39X12Ex8pzOwwfi3Kc8GAmjsnsaDEbk+aj9NyL3UeyHcTLg=="], - - "@tailwindcss/oxide-linux-x64-gnu": ["@tailwindcss/oxide-linux-x64-gnu@4.1.15", "", { "os": "linux", "cpu": "x64" }, ""], - - "@tailwindcss/oxide-linux-x64-musl": ["@tailwindcss/oxide-linux-x64-musl@4.1.15", "", { "os": "linux", "cpu": "x64" }, ""], - - "@tailwindcss/oxide-wasm32-wasi": ["@tailwindcss/oxide-wasm32-wasi@4.1.15", "", { "cpu": "none" }, "sha512-sJ4yd6iXXdlgIMfIBXuVGp/NvmviEoMVWMOAGxtxhzLPp9LOj5k0pMEMZdjeMCl4C6Up+RM8T3Zgk+BMQ0bGcQ=="], - - "@tailwindcss/oxide-win32-arm64-msvc": ["@tailwindcss/oxide-win32-arm64-msvc@4.1.15", "", { "os": "win32", "cpu": "arm64" }, "sha512-sJGE5faXnNQ1iXeqmRin7Ds/ru2fgCiaQZQQz3ZGIDtvbkeV85rAZ0QJFMDg0FrqsffZG96H1U9AQlNBRLsHVg=="], - - "@tailwindcss/oxide-win32-x64-msvc": ["@tailwindcss/oxide-win32-x64-msvc@4.1.15", "", { "os": "win32", "cpu": "x64" }, "sha512-NLeHE7jUV6HcFKS504bpOohyi01zPXi2PXmjFfkzTph8xRxDdxkRsXm/xDO5uV5K3brrE1cCwbUYmFUSHR3u1w=="], - - "@tailwindcss/vite": ["@tailwindcss/vite@4.1.15", "", { "dependencies": { "@tailwindcss/node": "4.1.15", "@tailwindcss/oxide": "4.1.15", "tailwindcss": "4.1.15" }, "peerDependencies": { "vite": "^5.2.0 || ^6 || ^7" } }, ""], - - "@types/babel__core": ["@types/babel__core@7.20.5", "", { "dependencies": { "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7", "@types/babel__generator": "*", "@types/babel__template": "*", "@types/babel__traverse": "*" } }, ""], - - "@types/babel__generator": ["@types/babel__generator@7.27.0", "", { "dependencies": { "@babel/types": "^7.0.0" } }, ""], - - "@types/babel__template": ["@types/babel__template@7.4.4", "", { "dependencies": { "@babel/parser": "^7.1.0", "@babel/types": "^7.0.0" } }, ""], - - "@types/babel__traverse": ["@types/babel__traverse@7.28.0", "", { "dependencies": { "@babel/types": "^7.28.2" } }, ""], - - "@types/d3": ["@types/d3@7.4.3", "", { "dependencies": { "@types/d3-array": "*", "@types/d3-axis": "*", "@types/d3-brush": "*", "@types/d3-chord": "*", "@types/d3-color": "*", "@types/d3-contour": "*", "@types/d3-delaunay": "*", "@types/d3-dispatch": "*", "@types/d3-drag": "*", "@types/d3-dsv": "*", "@types/d3-ease": "*", "@types/d3-fetch": "*", "@types/d3-force": "*", "@types/d3-format": "*", "@types/d3-geo": "*", "@types/d3-hierarchy": "*", "@types/d3-interpolate": "*", "@types/d3-path": "*", "@types/d3-polygon": "*", "@types/d3-quadtree": "*", "@types/d3-random": "*", "@types/d3-scale": "*", "@types/d3-scale-chromatic": "*", "@types/d3-selection": "*", "@types/d3-shape": "*", "@types/d3-time": "*", "@types/d3-time-format": "*", "@types/d3-timer": "*", "@types/d3-transition": "*", "@types/d3-zoom": "*" } }, ""], - - "@types/d3-array": ["@types/d3-array@3.2.2", "", {}, ""], - - "@types/d3-axis": ["@types/d3-axis@3.0.6", "", { "dependencies": { "@types/d3-selection": "*" } }, ""], - - "@types/d3-brush": ["@types/d3-brush@3.0.6", "", { "dependencies": { "@types/d3-selection": "*" } }, ""], - - "@types/d3-chord": ["@types/d3-chord@3.0.6", "", {}, ""], - - "@types/d3-color": ["@types/d3-color@3.1.3", "", {}, ""], - - "@types/d3-contour": ["@types/d3-contour@3.0.6", "", { "dependencies": { "@types/d3-array": "*", "@types/geojson": "*" } }, ""], - - "@types/d3-delaunay": ["@types/d3-delaunay@6.0.4", "", {}, ""], - - "@types/d3-dispatch": ["@types/d3-dispatch@3.0.7", "", {}, ""], - - "@types/d3-drag": ["@types/d3-drag@3.0.7", "", { "dependencies": { "@types/d3-selection": "*" } }, ""], - - "@types/d3-dsv": ["@types/d3-dsv@3.0.7", "", {}, ""], - - "@types/d3-ease": ["@types/d3-ease@3.0.2", "", {}, ""], - - "@types/d3-fetch": ["@types/d3-fetch@3.0.7", "", { "dependencies": { "@types/d3-dsv": "*" } }, ""], - - "@types/d3-force": ["@types/d3-force@3.0.10", "", {}, ""], - - "@types/d3-format": ["@types/d3-format@3.0.4", "", {}, ""], - - "@types/d3-geo": ["@types/d3-geo@3.1.0", "", { "dependencies": { "@types/geojson": "*" } }, ""], - - "@types/d3-hierarchy": ["@types/d3-hierarchy@3.1.7", "", {}, ""], - - "@types/d3-interpolate": ["@types/d3-interpolate@3.0.4", "", { "dependencies": { "@types/d3-color": "*" } }, ""], - - "@types/d3-path": ["@types/d3-path@3.1.1", "", {}, ""], - - "@types/d3-polygon": ["@types/d3-polygon@3.0.2", "", {}, ""], - - "@types/d3-quadtree": ["@types/d3-quadtree@3.0.6", "", {}, ""], - - "@types/d3-random": ["@types/d3-random@3.0.3", "", {}, ""], - - "@types/d3-scale": ["@types/d3-scale@4.0.9", "", { "dependencies": { "@types/d3-time": "*" } }, ""], - - "@types/d3-scale-chromatic": ["@types/d3-scale-chromatic@3.1.0", "", {}, ""], - - "@types/d3-selection": ["@types/d3-selection@3.0.11", "", {}, ""], - - "@types/d3-shape": ["@types/d3-shape@3.1.7", "", { "dependencies": { "@types/d3-path": "*" } }, ""], - - "@types/d3-time": ["@types/d3-time@3.0.4", "", {}, ""], - - "@types/d3-time-format": ["@types/d3-time-format@4.0.3", "", {}, ""], - - "@types/d3-timer": ["@types/d3-timer@3.0.2", "", {}, ""], - - "@types/d3-transition": ["@types/d3-transition@3.0.9", "", { "dependencies": { "@types/d3-selection": "*" } }, ""], - - "@types/d3-zoom": ["@types/d3-zoom@3.0.8", "", { "dependencies": { "@types/d3-interpolate": "*", "@types/d3-selection": "*" } }, ""], - - "@types/debug": ["@types/debug@4.1.12", "", { "dependencies": { "@types/ms": "*" } }, ""], - - "@types/estree": ["@types/estree@1.0.8", "", {}, ""], - - "@types/estree-jsx": ["@types/estree-jsx@1.0.5", "", { "dependencies": { "@types/estree": "*" } }, ""], - - "@types/geojson": ["@types/geojson@7946.0.16", "", {}, ""], - - "@types/hast": ["@types/hast@3.0.4", "", { "dependencies": { "@types/unist": "*" } }, ""], - - "@types/mdast": ["@types/mdast@4.0.4", "", { "dependencies": { "@types/unist": "*" } }, ""], - - "@types/mdx": ["@types/mdx@2.0.13", "", {}, ""], - - "@types/ms": ["@types/ms@2.1.0", "", {}, ""], - - "@types/node": ["@types/node@24.9.2", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-uWN8YqxXxqFMX2RqGOrumsKeti4LlmIMIyV0lgut4jx7KQBcBiW6vkDtIBvHnHIquwNfJhk8v2OtmO8zXWHfPA=="], - - "@types/react": ["@types/react@19.2.2", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA=="], - - "@types/trusted-types": ["@types/trusted-types@2.0.7", "", {}, ""], - - "@types/unist": ["@types/unist@3.0.3", "", {}, ""], - - "@typescript/vfs": ["@typescript/vfs@1.6.2", "", { "dependencies": { "debug": "^4.1.1" }, "peerDependencies": { "typescript": "*" } }, ""], - - "@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, ""], - - "@vanilla-extract/babel-plugin-debug-ids": ["@vanilla-extract/babel-plugin-debug-ids@1.2.2", "", { "dependencies": { "@babel/core": "^7.23.9" } }, ""], - - "@vanilla-extract/compiler": ["@vanilla-extract/compiler@0.3.1", "", { "dependencies": { "@vanilla-extract/css": "^1.17.4", "@vanilla-extract/integration": "^8.0.4", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0", "vite-node": "^3.2.2" } }, ""], - - "@vanilla-extract/css": ["@vanilla-extract/css@1.17.4", "", { "dependencies": { "@emotion/hash": "^0.9.0", "@vanilla-extract/private": "^1.0.9", "css-what": "^6.1.0", "cssesc": "^3.0.0", "csstype": "^3.0.7", "dedent": "^1.5.3", "deep-object-diff": "^1.1.9", "deepmerge": "^4.2.2", "lru-cache": "^10.4.3", "media-query-parser": "^2.0.2", "modern-ahocorasick": "^1.0.0", "picocolors": "^1.0.0" } }, ""], - - "@vanilla-extract/dynamic": ["@vanilla-extract/dynamic@2.1.5", "", { "dependencies": { "@vanilla-extract/private": "^1.0.9" } }, ""], - - "@vanilla-extract/integration": ["@vanilla-extract/integration@8.0.4", "", { "dependencies": { "@babel/core": "^7.23.9", "@babel/plugin-syntax-typescript": "^7.23.3", "@vanilla-extract/babel-plugin-debug-ids": "^1.2.2", "@vanilla-extract/css": "^1.17.4", "dedent": "^1.5.3", "esbuild": "npm:esbuild@>=0.17.6 <0.26.0", "eval": "0.1.8", "find-up": "^5.0.0", "javascript-stringify": "^2.0.1", "mlly": "^1.4.2" } }, ""], - - "@vanilla-extract/private": ["@vanilla-extract/private@1.0.9", "", {}, ""], - - "@vanilla-extract/vite-plugin": ["@vanilla-extract/vite-plugin@5.1.1", "", { "dependencies": { "@vanilla-extract/compiler": "^0.3.1", "@vanilla-extract/integration": "^8.0.4" }, "peerDependencies": { "vite": "^5.0.0 || ^6.0.0 || ^7.0.0" } }, ""], - - "@vitejs/plugin-react": ["@vitejs/plugin-react@5.1.0", "", { "dependencies": { "@babel/core": "^7.28.4", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", "@rolldown/pluginutils": "1.0.0-beta.43", "@types/babel__core": "^7.20.5", "react-refresh": "^0.18.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, ""], - - "acorn": ["acorn@8.15.0", "", { "bin": "bin/acorn" }, ""], - - "acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, ""], - - "ansi-regex": ["ansi-regex@6.2.2", "", {}, ""], - - "aria-hidden": ["aria-hidden@1.2.6", "", { "dependencies": { "tslib": "^2.0.0" } }, ""], - - "astring": ["astring@1.9.0", "", { "bin": "bin/astring" }, ""], - - "autoprefixer": ["autoprefixer@10.4.21", "", { "dependencies": { "browserslist": "^4.24.4", "caniuse-lite": "^1.0.30001702", "fraction.js": "^4.3.7", "normalize-range": "^0.1.2", "picocolors": "^1.1.1", "postcss-value-parser": "^4.2.0" }, "peerDependencies": { "postcss": "^8.1.0" }, "bin": "bin/autoprefixer" }, ""], - - "bail": ["bail@2.0.2", "", {}, ""], - - "base64-js": ["base64-js@1.5.1", "", {}, ""], - - "baseline-browser-mapping": ["baseline-browser-mapping@2.8.21", "", { "bin": "dist/cli.js" }, ""], - - "bcp-47-match": ["bcp-47-match@2.0.3", "", {}, ""], - - "bl": ["bl@5.1.0", "", { "dependencies": { "buffer": "^6.0.3", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, ""], - - "boolbase": ["boolbase@1.0.0", "", {}, ""], - - "browserslist": ["browserslist@4.27.0", "", { "dependencies": { "baseline-browser-mapping": "^2.8.19", "caniuse-lite": "^1.0.30001751", "electron-to-chromium": "^1.5.238", "node-releases": "^2.0.26", "update-browserslist-db": "^1.1.4" }, "bin": "cli.js" }, ""], - - "buffer": ["buffer@6.0.3", "", { "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" } }, ""], - - "bytes": ["bytes@3.1.2", "", {}, ""], - - "cac": ["cac@6.7.14", "", {}, ""], - - "caniuse-lite": ["caniuse-lite@1.0.30001751", "", {}, ""], - - "ccount": ["ccount@2.0.1", "", {}, ""], - - "chalk": ["chalk@5.6.2", "", {}, ""], - - "character-entities": ["character-entities@2.0.2", "", {}, ""], - - "character-entities-html4": ["character-entities-html4@2.1.0", "", {}, ""], - - "character-entities-legacy": ["character-entities-legacy@3.0.0", "", {}, ""], - - "character-reference-invalid": ["character-reference-invalid@2.0.1", "", {}, ""], - - "chevrotain": ["chevrotain@11.0.3", "", { "dependencies": { "@chevrotain/cst-dts-gen": "11.0.3", "@chevrotain/gast": "11.0.3", "@chevrotain/regexp-to-ast": "11.0.3", "@chevrotain/types": "11.0.3", "@chevrotain/utils": "11.0.3", "lodash-es": "4.17.21" } }, ""], - - "chevrotain-allstar": ["chevrotain-allstar@0.3.1", "", { "dependencies": { "lodash-es": "^4.17.21" }, "peerDependencies": { "chevrotain": "^11.0.0" } }, ""], - - "chroma-js": ["chroma-js@3.1.2", "", {}, ""], - - "cli-cursor": ["cli-cursor@4.0.0", "", { "dependencies": { "restore-cursor": "^4.0.0" } }, ""], - - "cli-spinners": ["cli-spinners@2.9.2", "", {}, ""], - - "clsx": ["clsx@2.1.1", "", {}, ""], - - "collapse-white-space": ["collapse-white-space@2.1.0", "", {}, ""], - - "comma-separated-tokens": ["comma-separated-tokens@2.0.3", "", {}, ""], - - "commander": ["commander@8.3.0", "", {}, ""], - - "compressible": ["compressible@2.0.18", "", { "dependencies": { "mime-db": ">= 1.43.0 < 2" } }, ""], - - "compression": ["compression@1.8.1", "", { "dependencies": { "bytes": "3.1.2", "compressible": "~2.0.18", "debug": "2.6.9", "negotiator": "~0.6.4", "on-headers": "~1.1.0", "safe-buffer": "5.2.1", "vary": "~1.1.2" } }, ""], - - "confbox": ["confbox@0.1.8", "", {}, ""], - - "convert-source-map": ["convert-source-map@2.0.0", "", {}, ""], - - "cookie": ["cookie@1.0.2", "", {}, ""], - - "cose-base": ["cose-base@1.0.3", "", { "dependencies": { "layout-base": "^1.0.0" } }, ""], - - "create-vocs": ["create-vocs@1.0.0", "", { "dependencies": { "@clack/prompts": "^0.7.0", "cac": "^6.7.14", "detect-package-manager": "^3.0.2", "fs-extra": "^11.3.0", "picocolors": "^1.1.1" }, "bin": "_lib/bin.js" }, ""], - - "cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, ""], - - "css-selector-parser": ["css-selector-parser@3.1.3", "", {}, ""], - - "css-what": ["css-what@6.2.2", "", {}, ""], - - "cssesc": ["cssesc@3.0.0", "", { "bin": "bin/cssesc" }, ""], - - "csstype": ["csstype@3.1.3", "", {}, ""], - - "cytoscape": ["cytoscape@3.33.1", "", {}, ""], - - "cytoscape-cose-bilkent": ["cytoscape-cose-bilkent@4.1.0", "", { "dependencies": { "cose-base": "^1.0.0" }, "peerDependencies": { "cytoscape": "^3.2.0" } }, ""], - - "cytoscape-fcose": ["cytoscape-fcose@2.2.0", "", { "dependencies": { "cose-base": "^2.2.0" }, "peerDependencies": { "cytoscape": "^3.2.0" } }, ""], - - "d3": ["d3@7.9.0", "", { "dependencies": { "d3-array": "3", "d3-axis": "3", "d3-brush": "3", "d3-chord": "3", "d3-color": "3", "d3-contour": "4", "d3-delaunay": "6", "d3-dispatch": "3", "d3-drag": "3", "d3-dsv": "3", "d3-ease": "3", "d3-fetch": "3", "d3-force": "3", "d3-format": "3", "d3-geo": "3", "d3-hierarchy": "3", "d3-interpolate": "3", "d3-path": "3", "d3-polygon": "3", "d3-quadtree": "3", "d3-random": "3", "d3-scale": "4", "d3-scale-chromatic": "3", "d3-selection": "3", "d3-shape": "3", "d3-time": "3", "d3-time-format": "4", "d3-timer": "3", "d3-transition": "3", "d3-zoom": "3" } }, ""], - - "d3-array": ["d3-array@3.2.4", "", { "dependencies": { "internmap": "1 - 2" } }, ""], - - "d3-axis": ["d3-axis@3.0.0", "", {}, ""], - - "d3-brush": ["d3-brush@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-drag": "2 - 3", "d3-interpolate": "1 - 3", "d3-selection": "3", "d3-transition": "3" } }, ""], - - "d3-chord": ["d3-chord@3.0.1", "", { "dependencies": { "d3-path": "1 - 3" } }, ""], - - "d3-color": ["d3-color@3.1.0", "", {}, ""], - - "d3-contour": ["d3-contour@4.0.2", "", { "dependencies": { "d3-array": "^3.2.0" } }, ""], - - "d3-delaunay": ["d3-delaunay@6.0.4", "", { "dependencies": { "delaunator": "5" } }, ""], - - "d3-dispatch": ["d3-dispatch@3.0.1", "", {}, ""], - - "d3-drag": ["d3-drag@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-selection": "3" } }, ""], - - "d3-dsv": ["d3-dsv@3.0.1", "", { "dependencies": { "commander": "7", "iconv-lite": "0.6", "rw": "1" }, "bin": { "csv2json": "bin/dsv2json.js", "csv2tsv": "bin/dsv2dsv.js", "dsv2dsv": "bin/dsv2dsv.js", "dsv2json": "bin/dsv2json.js", "json2csv": "bin/json2dsv.js", "json2dsv": "bin/json2dsv.js", "json2tsv": "bin/json2dsv.js", "tsv2csv": "bin/dsv2dsv.js", "tsv2json": "bin/dsv2json.js" } }, ""], - - "d3-ease": ["d3-ease@3.0.1", "", {}, ""], - - "d3-fetch": ["d3-fetch@3.0.1", "", { "dependencies": { "d3-dsv": "1 - 3" } }, ""], - - "d3-force": ["d3-force@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-quadtree": "1 - 3", "d3-timer": "1 - 3" } }, ""], - - "d3-format": ["d3-format@3.1.0", "", {}, ""], - - "d3-geo": ["d3-geo@3.1.1", "", { "dependencies": { "d3-array": "2.5.0 - 3" } }, ""], - - "d3-hierarchy": ["d3-hierarchy@3.1.2", "", {}, ""], - - "d3-interpolate": ["d3-interpolate@3.0.1", "", { "dependencies": { "d3-color": "1 - 3" } }, ""], - - "d3-path": ["d3-path@3.1.0", "", {}, ""], - - "d3-polygon": ["d3-polygon@3.0.1", "", {}, ""], - - "d3-quadtree": ["d3-quadtree@3.0.1", "", {}, ""], - - "d3-random": ["d3-random@3.0.1", "", {}, ""], - - "d3-sankey": ["d3-sankey@0.12.3", "", { "dependencies": { "d3-array": "1 - 2", "d3-shape": "^1.2.0" } }, ""], - - "d3-scale": ["d3-scale@4.0.2", "", { "dependencies": { "d3-array": "2.10.0 - 3", "d3-format": "1 - 3", "d3-interpolate": "1.2.0 - 3", "d3-time": "2.1.1 - 3", "d3-time-format": "2 - 4" } }, ""], - - "d3-scale-chromatic": ["d3-scale-chromatic@3.1.0", "", { "dependencies": { "d3-color": "1 - 3", "d3-interpolate": "1 - 3" } }, ""], - - "d3-selection": ["d3-selection@3.0.0", "", {}, ""], - - "d3-shape": ["d3-shape@3.2.0", "", { "dependencies": { "d3-path": "^3.1.0" } }, ""], - - "d3-time": ["d3-time@3.1.0", "", { "dependencies": { "d3-array": "2 - 3" } }, ""], - - "d3-time-format": ["d3-time-format@4.1.0", "", { "dependencies": { "d3-time": "1 - 3" } }, ""], - - "d3-timer": ["d3-timer@3.0.1", "", {}, ""], - - "d3-transition": ["d3-transition@3.0.1", "", { "dependencies": { "d3-color": "1 - 3", "d3-dispatch": "1 - 3", "d3-ease": "1 - 3", "d3-interpolate": "1 - 3", "d3-timer": "1 - 3" }, "peerDependencies": { "d3-selection": "2 - 3" } }, ""], - - "d3-zoom": ["d3-zoom@3.0.0", "", { "dependencies": { "d3-dispatch": "1 - 3", "d3-drag": "2 - 3", "d3-interpolate": "1 - 3", "d3-selection": "2 - 3", "d3-transition": "2 - 3" } }, ""], - - "dagre-d3-es": ["dagre-d3-es@7.0.13", "", { "dependencies": { "d3": "^7.9.0", "lodash-es": "^4.17.21" } }, ""], - - "dayjs": ["dayjs@1.11.18", "", {}, ""], - - "debug": ["debug@2.6.9", "", { "dependencies": { "ms": "2.0.0" } }, ""], - - "decode-named-character-reference": ["decode-named-character-reference@1.2.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, ""], - - "dedent": ["dedent@1.7.0", "", { "peerDependencies": { "babel-plugin-macros": "^3.1.0" }, "optionalPeers": ["babel-plugin-macros"] }, ""], - - "deep-object-diff": ["deep-object-diff@1.1.9", "", {}, ""], - - "deepmerge": ["deepmerge@4.3.1", "", {}, ""], - - "delaunator": ["delaunator@5.0.1", "", { "dependencies": { "robust-predicates": "^3.0.2" } }, ""], - - "depd": ["depd@2.0.0", "", {}, ""], - - "dequal": ["dequal@2.0.3", "", {}, ""], - - "destroy": ["destroy@1.2.0", "", {}, ""], - - "detect-libc": ["detect-libc@2.1.2", "", {}, ""], - - "detect-node-es": ["detect-node-es@1.1.0", "", {}, ""], - - "detect-package-manager": ["detect-package-manager@3.0.2", "", { "dependencies": { "execa": "^5.1.1" } }, ""], - - "devlop": ["devlop@1.1.0", "", { "dependencies": { "dequal": "^2.0.0" } }, ""], - - "direction": ["direction@2.0.1", "", { "bin": "cli.js" }, ""], - - "dompurify": ["dompurify@3.3.0", "", { "optionalDependencies": { "@types/trusted-types": "^2.0.7" } }, ""], - - "eastasianwidth": ["eastasianwidth@0.2.0", "", {}, ""], - - "ee-first": ["ee-first@1.1.1", "", {}, ""], - - "electron-to-chromium": ["electron-to-chromium@1.5.243", "", {}, ""], - - "emoji-regex": ["emoji-regex@10.6.0", "", {}, ""], - - "emoji-regex-xs": ["emoji-regex-xs@1.0.0", "", {}, ""], - - "encodeurl": ["encodeurl@2.0.0", "", {}, ""], - - "enhanced-resolve": ["enhanced-resolve@5.18.3", "", { "dependencies": { "graceful-fs": "^4.2.4", "tapable": "^2.2.0" } }, ""], - - "entities": ["entities@6.0.1", "", {}, ""], - - "es-module-lexer": ["es-module-lexer@1.7.0", "", {}, ""], - - "esast-util-from-estree": ["esast-util-from-estree@2.0.0", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "devlop": "^1.0.0", "estree-util-visit": "^2.0.0", "unist-util-position-from-estree": "^2.0.0" } }, ""], - - "esast-util-from-js": ["esast-util-from-js@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "acorn": "^8.0.0", "esast-util-from-estree": "^2.0.0", "vfile-message": "^4.0.0" } }, ""], - - "esbuild": ["esbuild@0.25.11", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.25.11", "@esbuild/android-arm": "0.25.11", "@esbuild/android-arm64": "0.25.11", "@esbuild/android-x64": "0.25.11", "@esbuild/darwin-arm64": "0.25.11", "@esbuild/darwin-x64": "0.25.11", "@esbuild/freebsd-arm64": "0.25.11", "@esbuild/freebsd-x64": "0.25.11", "@esbuild/linux-arm": "0.25.11", "@esbuild/linux-arm64": "0.25.11", "@esbuild/linux-ia32": "0.25.11", "@esbuild/linux-loong64": "0.25.11", "@esbuild/linux-mips64el": "0.25.11", "@esbuild/linux-ppc64": "0.25.11", "@esbuild/linux-riscv64": "0.25.11", "@esbuild/linux-s390x": "0.25.11", "@esbuild/linux-x64": "0.25.11", "@esbuild/netbsd-arm64": "0.25.11", "@esbuild/netbsd-x64": "0.25.11", "@esbuild/openbsd-arm64": "0.25.11", "@esbuild/openbsd-x64": "0.25.11", "@esbuild/openharmony-arm64": "0.25.11", "@esbuild/sunos-x64": "0.25.11", "@esbuild/win32-arm64": "0.25.11", "@esbuild/win32-ia32": "0.25.11", "@esbuild/win32-x64": "0.25.11" }, "bin": "bin/esbuild" }, ""], - - "escalade": ["escalade@3.2.0", "", {}, ""], - - "escape-html": ["escape-html@1.0.3", "", {}, ""], - - "escape-string-regexp": ["escape-string-regexp@5.0.0", "", {}, ""], - - "estree-util-attach-comments": ["estree-util-attach-comments@3.0.0", "", { "dependencies": { "@types/estree": "^1.0.0" } }, ""], - - "estree-util-build-jsx": ["estree-util-build-jsx@3.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "estree-walker": "^3.0.0" } }, ""], - - "estree-util-is-identifier-name": ["estree-util-is-identifier-name@3.0.0", "", {}, ""], - - "estree-util-scope": ["estree-util-scope@1.0.0", "", { "dependencies": { "@types/estree": "^1.0.0", "devlop": "^1.0.0" } }, ""], - - "estree-util-to-js": ["estree-util-to-js@2.0.0", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "astring": "^1.8.0", "source-map": "^0.7.0" } }, ""], - - "estree-util-value-to-estree": ["estree-util-value-to-estree@3.5.0", "", { "dependencies": { "@types/estree": "^1.0.0" } }, ""], - - "estree-util-visit": ["estree-util-visit@2.0.0", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/unist": "^3.0.0" } }, ""], - - "estree-walker": ["estree-walker@3.0.3", "", { "dependencies": { "@types/estree": "^1.0.0" } }, ""], - - "etag": ["etag@1.8.1", "", {}, ""], - - "eval": ["eval@0.1.8", "", { "dependencies": { "@types/node": "*", "require-like": ">= 0.1.1" } }, ""], - - "execa": ["execa@5.1.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^4.0.1", "onetime": "^5.1.2", "signal-exit": "^3.0.3", "strip-final-newline": "^2.0.0" } }, ""], - - "exsolve": ["exsolve@1.0.7", "", {}, ""], - - "extend": ["extend@3.0.2", "", {}, ""], - - "fault": ["fault@2.0.1", "", { "dependencies": { "format": "^0.2.0" } }, ""], - - "fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" } }, ""], - - "find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, ""], - - "format": ["format@0.2.2", "", {}, ""], - - "fraction.js": ["fraction.js@4.3.7", "", {}, ""], - - "fresh": ["fresh@0.5.2", "", {}, ""], - - "fs-extra": ["fs-extra@11.3.2", "", { "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, ""], - - "fsevents": ["fsevents@2.3.2", "", { "os": "darwin" }, "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA=="], - - "gensync": ["gensync@1.0.0-beta.2", "", {}, ""], - - "get-nonce": ["get-nonce@1.0.1", "", {}, ""], - - "get-stream": ["get-stream@6.0.1", "", {}, ""], - - "github-slugger": ["github-slugger@2.0.0", "", {}, ""], - - "globals": ["globals@15.15.0", "", {}, ""], - - "graceful-fs": ["graceful-fs@4.2.11", "", {}, ""], - - "hachure-fill": ["hachure-fill@0.5.2", "", {}, ""], - - "hast-util-classnames": ["hast-util-classnames@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0", "space-separated-tokens": "^2.0.0" } }, ""], - - "hast-util-from-dom": ["hast-util-from-dom@5.0.1", "", { "dependencies": { "@types/hast": "^3.0.0", "hastscript": "^9.0.0", "web-namespaces": "^2.0.0" } }, ""], - - "hast-util-from-html": ["hast-util-from-html@2.0.3", "", { "dependencies": { "@types/hast": "^3.0.0", "devlop": "^1.1.0", "hast-util-from-parse5": "^8.0.0", "parse5": "^7.0.0", "vfile": "^6.0.0", "vfile-message": "^4.0.0" } }, ""], - - "hast-util-from-html-isomorphic": ["hast-util-from-html-isomorphic@2.0.0", "", { "dependencies": { "@types/hast": "^3.0.0", "hast-util-from-dom": "^5.0.0", "hast-util-from-html": "^2.0.0", "unist-util-remove-position": "^5.0.0" } }, ""], - - "hast-util-from-parse5": ["hast-util-from-parse5@8.0.3", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "devlop": "^1.0.0", "hastscript": "^9.0.0", "property-information": "^7.0.0", "vfile": "^6.0.0", "vfile-location": "^5.0.0", "web-namespaces": "^2.0.0" } }, ""], - - "hast-util-has-property": ["hast-util-has-property@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, ""], - - "hast-util-heading-rank": ["hast-util-heading-rank@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, ""], - - "hast-util-is-element": ["hast-util-is-element@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, ""], - - "hast-util-parse-selector": ["hast-util-parse-selector@4.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, ""], - - "hast-util-select": ["hast-util-select@6.0.4", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "bcp-47-match": "^2.0.0", "comma-separated-tokens": "^2.0.0", "css-selector-parser": "^3.0.0", "devlop": "^1.0.0", "direction": "^2.0.0", "hast-util-has-property": "^3.0.0", "hast-util-to-string": "^3.0.0", "hast-util-whitespace": "^3.0.0", "nth-check": "^2.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "unist-util-visit": "^5.0.0", "zwitch": "^2.0.0" } }, ""], - - "hast-util-to-estree": ["hast-util-to-estree@3.1.3", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "devlop": "^1.0.0", "estree-util-attach-comments": "^3.0.0", "estree-util-is-identifier-name": "^3.0.0", "hast-util-whitespace": "^3.0.0", "mdast-util-mdx-expression": "^2.0.0", "mdast-util-mdx-jsx": "^3.0.0", "mdast-util-mdxjs-esm": "^2.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "style-to-js": "^1.0.0", "unist-util-position": "^5.0.0", "zwitch": "^2.0.0" } }, ""], - - "hast-util-to-html": ["hast-util-to-html@9.0.5", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-whitespace": "^3.0.0", "html-void-elements": "^3.0.0", "mdast-util-to-hast": "^13.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "stringify-entities": "^4.0.0", "zwitch": "^2.0.4" } }, ""], - - "hast-util-to-jsx-runtime": ["hast-util-to-jsx-runtime@2.3.6", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "comma-separated-tokens": "^2.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "hast-util-whitespace": "^3.0.0", "mdast-util-mdx-expression": "^2.0.0", "mdast-util-mdx-jsx": "^3.0.0", "mdast-util-mdxjs-esm": "^2.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0", "style-to-js": "^1.0.0", "unist-util-position": "^5.0.0", "vfile-message": "^4.0.0" } }, ""], - - "hast-util-to-string": ["hast-util-to-string@3.0.1", "", { "dependencies": { "@types/hast": "^3.0.0" } }, ""], - - "hast-util-to-text": ["hast-util-to-text@4.0.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/unist": "^3.0.0", "hast-util-is-element": "^3.0.0", "unist-util-find-after": "^5.0.0" } }, ""], - - "hast-util-whitespace": ["hast-util-whitespace@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0" } }, ""], - - "hastscript": ["hastscript@8.0.0", "", { "dependencies": { "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-parse-selector": "^4.0.0", "property-information": "^6.0.0", "space-separated-tokens": "^2.0.0" } }, ""], - - "hono": ["hono@4.10.3", "", {}, ""], - - "html-void-elements": ["html-void-elements@3.0.0", "", {}, ""], - - "http-errors": ["http-errors@2.0.0", "", { "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": "2.0.1", "toidentifier": "1.0.1" } }, ""], - - "human-signals": ["human-signals@2.1.0", "", {}, ""], - - "iconv-lite": ["iconv-lite@0.6.3", "", { "dependencies": { "safer-buffer": ">= 2.1.2 < 3.0.0" } }, ""], - - "ieee754": ["ieee754@1.2.1", "", {}, ""], - - "inherits": ["inherits@2.0.4", "", {}, ""], - - "inline-style-parser": ["inline-style-parser@0.2.4", "", {}, ""], - - "internmap": ["internmap@1.0.1", "", {}, ""], - - "is-alphabetical": ["is-alphabetical@2.0.1", "", {}, ""], - - "is-alphanumerical": ["is-alphanumerical@2.0.1", "", { "dependencies": { "is-alphabetical": "^2.0.0", "is-decimal": "^2.0.0" } }, ""], - - "is-decimal": ["is-decimal@2.0.1", "", {}, ""], - - "is-hexadecimal": ["is-hexadecimal@2.0.1", "", {}, ""], - - "is-interactive": ["is-interactive@2.0.0", "", {}, ""], - - "is-plain-obj": ["is-plain-obj@4.1.0", "", {}, ""], - - "is-stream": ["is-stream@2.0.1", "", {}, ""], - - "is-unicode-supported": ["is-unicode-supported@1.3.0", "", {}, ""], - - "isexe": ["isexe@2.0.0", "", {}, ""], - - "javascript-stringify": ["javascript-stringify@2.1.0", "", {}, ""], - - "jiti": ["jiti@2.6.1", "", { "bin": "lib/jiti-cli.mjs" }, ""], - - "js-tokens": ["js-tokens@4.0.0", "", {}, ""], - - "jsesc": ["jsesc@3.1.0", "", { "bin": "bin/jsesc" }, ""], - - "json5": ["json5@2.2.3", "", { "bin": "lib/cli.js" }, ""], - - "jsonfile": ["jsonfile@6.2.0", "", { "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, ""], - - "katex": ["katex@0.16.25", "", { "dependencies": { "commander": "^8.3.0" }, "bin": "cli.js" }, ""], - - "khroma": ["khroma@2.1.0", "", {}, ""], - - "kolorist": ["kolorist@1.8.0", "", {}, ""], - - "langium": ["langium@3.3.1", "", { "dependencies": { "chevrotain": "~11.0.3", "chevrotain-allstar": "~0.3.0", "vscode-languageserver": "~9.0.1", "vscode-languageserver-textdocument": "~1.0.11", "vscode-uri": "~3.0.8" } }, ""], - - "layout-base": ["layout-base@1.0.2", "", {}, ""], - - "lightningcss": ["lightningcss@1.30.2", "", { "dependencies": { "detect-libc": "^2.0.3" }, "optionalDependencies": { "lightningcss-android-arm64": "1.30.2", "lightningcss-darwin-arm64": "1.30.2", "lightningcss-darwin-x64": "1.30.2", "lightningcss-freebsd-x64": "1.30.2", "lightningcss-linux-arm-gnueabihf": "1.30.2", "lightningcss-linux-arm64-gnu": "1.30.2", "lightningcss-linux-arm64-musl": "1.30.2", "lightningcss-linux-x64-gnu": "1.30.2", "lightningcss-linux-x64-musl": "1.30.2", "lightningcss-win32-arm64-msvc": "1.30.2", "lightningcss-win32-x64-msvc": "1.30.2" } }, ""], - - "lightningcss-android-arm64": ["lightningcss-android-arm64@1.30.2", "", { "os": "android", "cpu": "arm64" }, "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A=="], - - "lightningcss-darwin-arm64": ["lightningcss-darwin-arm64@1.30.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA=="], - - "lightningcss-darwin-x64": ["lightningcss-darwin-x64@1.30.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ=="], - - "lightningcss-freebsd-x64": ["lightningcss-freebsd-x64@1.30.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA=="], - - "lightningcss-linux-arm-gnueabihf": ["lightningcss-linux-arm-gnueabihf@1.30.2", "", { "os": "linux", "cpu": "arm" }, "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA=="], - - "lightningcss-linux-arm64-gnu": ["lightningcss-linux-arm64-gnu@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A=="], - - "lightningcss-linux-arm64-musl": ["lightningcss-linux-arm64-musl@1.30.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA=="], - - "lightningcss-linux-x64-gnu": ["lightningcss-linux-x64-gnu@1.30.2", "", { "os": "linux", "cpu": "x64" }, ""], - - "lightningcss-linux-x64-musl": ["lightningcss-linux-x64-musl@1.30.2", "", { "os": "linux", "cpu": "x64" }, ""], - - "lightningcss-win32-arm64-msvc": ["lightningcss-win32-arm64-msvc@1.30.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ=="], - - "lightningcss-win32-x64-msvc": ["lightningcss-win32-x64-msvc@1.30.2", "", { "os": "win32", "cpu": "x64" }, "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw=="], - - "local-pkg": ["local-pkg@1.1.2", "", { "dependencies": { "mlly": "^1.7.4", "pkg-types": "^2.3.0", "quansync": "^0.2.11" } }, ""], - - "locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, ""], - - "lodash-es": ["lodash-es@4.17.21", "", {}, ""], - - "log-symbols": ["log-symbols@5.1.0", "", { "dependencies": { "chalk": "^5.0.0", "is-unicode-supported": "^1.1.0" } }, ""], - - "longest-streak": ["longest-streak@3.1.0", "", {}, ""], - - "lru-cache": ["lru-cache@10.4.3", "", {}, ""], - - "magic-string": ["magic-string@0.30.21", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.5" } }, ""], - - "mark.js": ["mark.js@8.11.1", "", {}, ""], - - "markdown-extensions": ["markdown-extensions@2.0.0", "", {}, ""], - - "markdown-table": ["markdown-table@3.0.4", "", {}, ""], - - "marked": ["marked@16.4.1", "", { "bin": "bin/marked.js" }, ""], - - "mdast-util-directive": ["mdast-util-directive@3.1.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "parse-entities": "^4.0.0", "stringify-entities": "^4.0.0", "unist-util-visit-parents": "^6.0.0" } }, ""], - - "mdast-util-find-and-replace": ["mdast-util-find-and-replace@3.0.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "escape-string-regexp": "^5.0.0", "unist-util-is": "^6.0.0", "unist-util-visit-parents": "^6.0.0" } }, ""], - - "mdast-util-from-markdown": ["mdast-util-from-markdown@2.0.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "mdast-util-to-string": "^4.0.0", "micromark": "^4.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "unist-util-stringify-position": "^4.0.0" } }, ""], - - "mdast-util-frontmatter": ["mdast-util-frontmatter@2.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "escape-string-regexp": "^5.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "micromark-extension-frontmatter": "^2.0.0" } }, ""], - - "mdast-util-gfm": ["mdast-util-gfm@3.1.0", "", { "dependencies": { "mdast-util-from-markdown": "^2.0.0", "mdast-util-gfm-autolink-literal": "^2.0.0", "mdast-util-gfm-footnote": "^2.0.0", "mdast-util-gfm-strikethrough": "^2.0.0", "mdast-util-gfm-table": "^2.0.0", "mdast-util-gfm-task-list-item": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, ""], - - "mdast-util-gfm-autolink-literal": ["mdast-util-gfm-autolink-literal@2.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "ccount": "^2.0.0", "devlop": "^1.0.0", "mdast-util-find-and-replace": "^3.0.0", "micromark-util-character": "^2.0.0" } }, ""], - - "mdast-util-gfm-footnote": ["mdast-util-gfm-footnote@2.1.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "devlop": "^1.1.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0" } }, ""], - - "mdast-util-gfm-strikethrough": ["mdast-util-gfm-strikethrough@2.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, ""], - - "mdast-util-gfm-table": ["mdast-util-gfm-table@2.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "markdown-table": "^3.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, ""], - - "mdast-util-gfm-task-list-item": ["mdast-util-gfm-task-list-item@2.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, ""], - - "mdast-util-mdx": ["mdast-util-mdx@3.0.0", "", { "dependencies": { "mdast-util-from-markdown": "^2.0.0", "mdast-util-mdx-expression": "^2.0.0", "mdast-util-mdx-jsx": "^3.0.0", "mdast-util-mdxjs-esm": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, ""], - - "mdast-util-mdx-expression": ["mdast-util-mdx-expression@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, ""], - - "mdast-util-mdx-jsx": ["mdast-util-mdx-jsx@3.2.0", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "ccount": "^2.0.0", "devlop": "^1.1.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "parse-entities": "^4.0.0", "stringify-entities": "^4.0.0", "unist-util-stringify-position": "^4.0.0", "vfile-message": "^4.0.0" } }, ""], - - "mdast-util-mdxjs-esm": ["mdast-util-mdxjs-esm@2.0.1", "", { "dependencies": { "@types/estree-jsx": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "devlop": "^1.0.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0" } }, ""], - - "mdast-util-phrasing": ["mdast-util-phrasing@4.1.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "unist-util-is": "^6.0.0" } }, ""], - - "mdast-util-to-hast": ["mdast-util-to-hast@13.2.0", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@ungap/structured-clone": "^1.0.0", "devlop": "^1.0.0", "micromark-util-sanitize-uri": "^2.0.0", "trim-lines": "^3.0.0", "unist-util-position": "^5.0.0", "unist-util-visit": "^5.0.0", "vfile": "^6.0.0" } }, ""], - - "mdast-util-to-markdown": ["mdast-util-to-markdown@2.1.2", "", { "dependencies": { "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", "longest-streak": "^3.0.0", "mdast-util-phrasing": "^4.0.0", "mdast-util-to-string": "^4.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-decode-string": "^2.0.0", "unist-util-visit": "^5.0.0", "zwitch": "^2.0.0" } }, ""], - - "mdast-util-to-string": ["mdast-util-to-string@4.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0" } }, ""], - - "media-query-parser": ["media-query-parser@2.0.2", "", { "dependencies": { "@babel/runtime": "^7.12.5" } }, ""], - - "merge-stream": ["merge-stream@2.0.0", "", {}, ""], - - "mermaid": ["mermaid@11.12.1", "", { "dependencies": { "@braintree/sanitize-url": "^7.1.1", "@iconify/utils": "^3.0.1", "@mermaid-js/parser": "^0.6.3", "@types/d3": "^7.4.3", "cytoscape": "^3.29.3", "cytoscape-cose-bilkent": "^4.1.0", "cytoscape-fcose": "^2.2.0", "d3": "^7.9.0", "d3-sankey": "^0.12.3", "dagre-d3-es": "7.0.13", "dayjs": "^1.11.18", "dompurify": "^3.2.5", "katex": "^0.16.22", "khroma": "^2.1.0", "lodash-es": "^4.17.21", "marked": "^16.2.1", "roughjs": "^4.6.6", "stylis": "^4.3.6", "ts-dedent": "^2.2.0", "uuid": "^11.1.0" } }, ""], - - "mermaid-isomorphic": ["mermaid-isomorphic@3.0.4", "", { "dependencies": { "@fortawesome/fontawesome-free": "^6.0.0", "mermaid": "^11.0.0" }, "peerDependencies": { "playwright": "1" } }, ""], - - "micromark": ["micromark@4.0.2", "", { "dependencies": { "@types/debug": "^4.0.0", "debug": "^4.0.0", "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-core-commonmark": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-combine-extensions": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-core-commonmark": ["micromark-core-commonmark@2.0.3", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "devlop": "^1.0.0", "micromark-factory-destination": "^2.0.0", "micromark-factory-label": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-factory-title": "^2.0.0", "micromark-factory-whitespace": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-html-tag-name": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-subtokenize": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-directive": ["micromark-extension-directive@3.0.2", "", { "dependencies": { "devlop": "^1.0.0", "micromark-factory-space": "^2.0.0", "micromark-factory-whitespace": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "parse-entities": "^4.0.0" } }, ""], - - "micromark-extension-frontmatter": ["micromark-extension-frontmatter@2.0.0", "", { "dependencies": { "fault": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-gfm": ["micromark-extension-gfm@3.0.0", "", { "dependencies": { "micromark-extension-gfm-autolink-literal": "^2.0.0", "micromark-extension-gfm-footnote": "^2.0.0", "micromark-extension-gfm-strikethrough": "^2.0.0", "micromark-extension-gfm-table": "^2.0.0", "micromark-extension-gfm-tagfilter": "^2.0.0", "micromark-extension-gfm-task-list-item": "^2.0.0", "micromark-util-combine-extensions": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-gfm-autolink-literal": ["micromark-extension-gfm-autolink-literal@2.1.0", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-gfm-footnote": ["micromark-extension-gfm-footnote@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-core-commonmark": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-normalize-identifier": "^2.0.0", "micromark-util-sanitize-uri": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-gfm-strikethrough": ["micromark-extension-gfm-strikethrough@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-classify-character": "^2.0.0", "micromark-util-resolve-all": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-gfm-table": ["micromark-extension-gfm-table@2.1.1", "", { "dependencies": { "devlop": "^1.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-gfm-tagfilter": ["micromark-extension-gfm-tagfilter@2.0.0", "", { "dependencies": { "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-gfm-task-list-item": ["micromark-extension-gfm-task-list-item@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-mdx-expression": ["micromark-extension-mdx-expression@3.0.1", "", { "dependencies": { "@types/estree": "^1.0.0", "devlop": "^1.0.0", "micromark-factory-mdx-expression": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-events-to-acorn": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-mdx-jsx": ["micromark-extension-mdx-jsx@3.0.2", "", { "dependencies": { "@types/estree": "^1.0.0", "devlop": "^1.0.0", "estree-util-is-identifier-name": "^3.0.0", "micromark-factory-mdx-expression": "^2.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-events-to-acorn": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "vfile-message": "^4.0.0" } }, ""], - - "micromark-extension-mdx-md": ["micromark-extension-mdx-md@2.0.0", "", { "dependencies": { "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-mdxjs": ["micromark-extension-mdxjs@3.0.0", "", { "dependencies": { "acorn": "^8.0.0", "acorn-jsx": "^5.0.0", "micromark-extension-mdx-expression": "^3.0.0", "micromark-extension-mdx-jsx": "^3.0.0", "micromark-extension-mdx-md": "^2.0.0", "micromark-extension-mdxjs-esm": "^3.0.0", "micromark-util-combine-extensions": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-extension-mdxjs-esm": ["micromark-extension-mdxjs-esm@3.0.0", "", { "dependencies": { "@types/estree": "^1.0.0", "devlop": "^1.0.0", "micromark-core-commonmark": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-events-to-acorn": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "unist-util-position-from-estree": "^2.0.0", "vfile-message": "^4.0.0" } }, ""], - - "micromark-factory-destination": ["micromark-factory-destination@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-factory-label": ["micromark-factory-label@2.0.1", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-factory-mdx-expression": ["micromark-factory-mdx-expression@2.0.3", "", { "dependencies": { "@types/estree": "^1.0.0", "devlop": "^1.0.0", "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-events-to-acorn": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "unist-util-position-from-estree": "^2.0.0", "vfile-message": "^4.0.0" } }, ""], - - "micromark-factory-space": ["micromark-factory-space@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-factory-title": ["micromark-factory-title@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-factory-whitespace": ["micromark-factory-whitespace@2.0.1", "", { "dependencies": { "micromark-factory-space": "^2.0.0", "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-util-character": ["micromark-util-character@2.1.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-util-chunked": ["micromark-util-chunked@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, ""], - - "micromark-util-classify-character": ["micromark-util-classify-character@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-util-combine-extensions": ["micromark-util-combine-extensions@2.0.1", "", { "dependencies": { "micromark-util-chunked": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-util-decode-numeric-character-reference": ["micromark-util-decode-numeric-character-reference@2.0.2", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, ""], - - "micromark-util-decode-string": ["micromark-util-decode-string@2.0.1", "", { "dependencies": { "decode-named-character-reference": "^1.0.0", "micromark-util-character": "^2.0.0", "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, ""], - - "micromark-util-encode": ["micromark-util-encode@2.0.1", "", {}, ""], - - "micromark-util-events-to-acorn": ["micromark-util-events-to-acorn@2.0.3", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/unist": "^3.0.0", "devlop": "^1.0.0", "estree-util-visit": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0", "vfile-message": "^4.0.0" } }, ""], - - "micromark-util-html-tag-name": ["micromark-util-html-tag-name@2.0.1", "", {}, ""], - - "micromark-util-normalize-identifier": ["micromark-util-normalize-identifier@2.0.1", "", { "dependencies": { "micromark-util-symbol": "^2.0.0" } }, ""], - - "micromark-util-resolve-all": ["micromark-util-resolve-all@2.0.1", "", { "dependencies": { "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-util-sanitize-uri": ["micromark-util-sanitize-uri@2.0.1", "", { "dependencies": { "micromark-util-character": "^2.0.0", "micromark-util-encode": "^2.0.0", "micromark-util-symbol": "^2.0.0" } }, ""], - - "micromark-util-subtokenize": ["micromark-util-subtokenize@2.1.0", "", { "dependencies": { "devlop": "^1.0.0", "micromark-util-chunked": "^2.0.0", "micromark-util-symbol": "^2.0.0", "micromark-util-types": "^2.0.0" } }, ""], - - "micromark-util-symbol": ["micromark-util-symbol@2.0.1", "", {}, ""], - - "micromark-util-types": ["micromark-util-types@2.0.2", "", {}, ""], - - "mime": ["mime@1.6.0", "", { "bin": "cli.js" }, ""], - - "mime-db": ["mime-db@1.54.0", "", {}, ""], - - "mimic-fn": ["mimic-fn@2.1.0", "", {}, ""], - - "mini-svg-data-uri": ["mini-svg-data-uri@1.4.4", "", { "bin": "cli.js" }, ""], - - "minisearch": ["minisearch@7.2.0", "", {}, ""], - - "mlly": ["mlly@1.8.0", "", { "dependencies": { "acorn": "^8.15.0", "pathe": "^2.0.3", "pkg-types": "^1.3.1", "ufo": "^1.6.1" } }, ""], - - "modern-ahocorasick": ["modern-ahocorasick@1.1.0", "", {}, ""], - - "ms": ["ms@2.0.0", "", {}, ""], - - "nanoid": ["nanoid@3.3.11", "", { "bin": "bin/nanoid.cjs" }, ""], - - "negotiator": ["negotiator@0.6.4", "", {}, ""], - - "node-releases": ["node-releases@2.0.27", "", {}, ""], - - "normalize-range": ["normalize-range@0.1.2", "", {}, ""], - - "npm-run-path": ["npm-run-path@4.0.1", "", { "dependencies": { "path-key": "^3.0.0" } }, ""], - - "nth-check": ["nth-check@2.1.1", "", { "dependencies": { "boolbase": "^1.0.0" } }, ""], - - "nuqs": ["nuqs@2.7.2", "", { "dependencies": { "@standard-schema/spec": "1.0.0" }, "peerDependencies": { "@remix-run/react": ">=2", "@tanstack/react-router": "^1", "next": ">=14.2.0", "react": ">=18.2.0 || ^19.0.0-0", "react-router": "^6 || ^7", "react-router-dom": "^6 || ^7" }, "optionalPeers": ["@remix-run/react", "@tanstack/react-router", "next", "react-router-dom"] }, ""], - - "on-finished": ["on-finished@2.4.1", "", { "dependencies": { "ee-first": "1.1.1" } }, ""], - - "on-headers": ["on-headers@1.1.0", "", {}, ""], - - "onetime": ["onetime@5.1.2", "", { "dependencies": { "mimic-fn": "^2.1.0" } }, ""], - - "oniguruma-to-es": ["oniguruma-to-es@2.3.0", "", { "dependencies": { "emoji-regex-xs": "^1.0.0", "regex": "^5.1.1", "regex-recursion": "^5.1.1" } }, ""], - - "ora": ["ora@7.0.1", "", { "dependencies": { "chalk": "^5.3.0", "cli-cursor": "^4.0.0", "cli-spinners": "^2.9.0", "is-interactive": "^2.0.0", "is-unicode-supported": "^1.3.0", "log-symbols": "^5.1.0", "stdin-discarder": "^0.1.0", "string-width": "^6.1.0", "strip-ansi": "^7.1.0" } }, ""], - - "p-limit": ["p-limit@5.0.0", "", { "dependencies": { "yocto-queue": "^1.0.0" } }, ""], - - "p-locate": ["p-locate@5.0.0", "", { "dependencies": { "p-limit": "^3.0.2" } }, ""], - - "package-manager-detector": ["package-manager-detector@1.5.0", "", {}, ""], - - "parse-entities": ["parse-entities@4.0.2", "", { "dependencies": { "@types/unist": "^2.0.0", "character-entities-legacy": "^3.0.0", "character-reference-invalid": "^2.0.0", "decode-named-character-reference": "^1.0.0", "is-alphanumerical": "^2.0.0", "is-decimal": "^2.0.0", "is-hexadecimal": "^2.0.0" } }, ""], - - "parse5": ["parse5@7.3.0", "", { "dependencies": { "entities": "^6.0.0" } }, ""], - - "parseurl": ["parseurl@1.3.3", "", {}, ""], - - "path-data-parser": ["path-data-parser@0.1.0", "", {}, ""], - - "path-exists": ["path-exists@4.0.0", "", {}, ""], - - "path-key": ["path-key@3.1.1", "", {}, ""], - - "pathe": ["pathe@2.0.3", "", {}, ""], - - "picocolors": ["picocolors@1.1.1", "", {}, ""], - - "picomatch": ["picomatch@4.0.3", "", {}, ""], - - "pkg-types": ["pkg-types@1.3.1", "", { "dependencies": { "confbox": "^0.1.8", "mlly": "^1.7.4", "pathe": "^2.0.1" } }, ""], - - "playwright": ["playwright@1.56.1", "", { "dependencies": { "playwright-core": "1.56.1" }, "optionalDependencies": { "fsevents": "2.3.2" }, "bin": "cli.js" }, ""], - - "playwright-core": ["playwright-core@1.56.1", "", { "bin": "cli.js" }, ""], - - "points-on-curve": ["points-on-curve@0.2.0", "", {}, ""], - - "points-on-path": ["points-on-path@0.2.1", "", { "dependencies": { "path-data-parser": "0.1.0", "points-on-curve": "0.2.0" } }, ""], - - "postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, ""], - - "postcss-value-parser": ["postcss-value-parser@4.2.0", "", {}, ""], - - "property-information": ["property-information@6.5.0", "", {}, ""], - - "quansync": ["quansync@0.2.11", "", {}, ""], - - "radix-ui": ["radix-ui@1.4.3", "", { "dependencies": { "@radix-ui/primitive": "1.1.3", "@radix-ui/react-accessible-icon": "1.1.7", "@radix-ui/react-accordion": "1.2.12", "@radix-ui/react-alert-dialog": "1.1.15", "@radix-ui/react-arrow": "1.1.7", "@radix-ui/react-aspect-ratio": "1.1.7", "@radix-ui/react-avatar": "1.1.10", "@radix-ui/react-checkbox": "1.3.3", "@radix-ui/react-collapsible": "1.1.12", "@radix-ui/react-collection": "1.1.7", "@radix-ui/react-compose-refs": "1.1.2", "@radix-ui/react-context": "1.1.2", "@radix-ui/react-context-menu": "2.2.16", "@radix-ui/react-dialog": "1.1.15", "@radix-ui/react-direction": "1.1.1", "@radix-ui/react-dismissable-layer": "1.1.11", "@radix-ui/react-dropdown-menu": "2.1.16", "@radix-ui/react-focus-guards": "1.1.3", "@radix-ui/react-focus-scope": "1.1.7", "@radix-ui/react-form": "0.1.8", "@radix-ui/react-hover-card": "1.1.15", "@radix-ui/react-label": "2.1.7", "@radix-ui/react-menu": "2.1.16", "@radix-ui/react-menubar": "1.1.16", "@radix-ui/react-navigation-menu": "1.2.14", "@radix-ui/react-one-time-password-field": "0.1.8", "@radix-ui/react-password-toggle-field": "0.1.3", "@radix-ui/react-popover": "1.1.15", "@radix-ui/react-popper": "1.2.8", "@radix-ui/react-portal": "1.1.9", "@radix-ui/react-presence": "1.1.5", "@radix-ui/react-primitive": "2.1.3", "@radix-ui/react-progress": "1.1.7", "@radix-ui/react-radio-group": "1.3.8", "@radix-ui/react-roving-focus": "1.1.11", "@radix-ui/react-scroll-area": "1.2.10", "@radix-ui/react-select": "2.2.6", "@radix-ui/react-separator": "1.1.7", "@radix-ui/react-slider": "1.3.6", "@radix-ui/react-slot": "1.2.3", "@radix-ui/react-switch": "1.2.6", "@radix-ui/react-tabs": "1.1.13", "@radix-ui/react-toast": "1.2.15", "@radix-ui/react-toggle": "1.1.10", "@radix-ui/react-toggle-group": "1.1.11", "@radix-ui/react-toolbar": "1.1.11", "@radix-ui/react-tooltip": "1.2.8", "@radix-ui/react-use-callback-ref": "1.1.1", "@radix-ui/react-use-controllable-state": "1.2.2", "@radix-ui/react-use-effect-event": "0.0.2", "@radix-ui/react-use-escape-keydown": "1.1.1", "@radix-ui/react-use-is-hydrated": "0.1.0", "@radix-ui/react-use-layout-effect": "1.1.1", "@radix-ui/react-use-size": "1.1.1", "@radix-ui/react-visually-hidden": "1.2.3" }, "peerDependencies": { "@types/react": "*", "@types/react-dom": "*", "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react-dom"] }, ""], - - "range-parser": ["range-parser@1.2.1", "", {}, ""], - - "react": ["react@19.2.0", "", {}, "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ=="], - - "react-dom": ["react-dom@19.2.0", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.0" } }, "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ=="], - - "react-intersection-observer": ["react-intersection-observer@9.16.0", "", { "peerDependencies": { "react": "^17.0.0 || ^18.0.0 || ^19.0.0", "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0" } }, ""], - - "react-refresh": ["react-refresh@0.18.0", "", {}, ""], - - "react-remove-scroll": ["react-remove-scroll@2.7.1", "", { "dependencies": { "react-remove-scroll-bar": "^2.3.7", "react-style-singleton": "^2.2.3", "tslib": "^2.1.0", "use-callback-ref": "^1.3.3", "use-sidecar": "^1.1.3" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, ""], - - "react-remove-scroll-bar": ["react-remove-scroll-bar@2.3.8", "", { "dependencies": { "react-style-singleton": "^2.2.2", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, ""], - - "react-router": ["react-router@7.9.5", "", { "dependencies": { "cookie": "^1.0.1", "set-cookie-parser": "^2.6.0" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" } }, ""], - - "react-style-singleton": ["react-style-singleton@2.2.3", "", { "dependencies": { "get-nonce": "^1.0.0", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, ""], - - "readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, ""], - - "recma-build-jsx": ["recma-build-jsx@1.0.0", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-util-build-jsx": "^3.0.0", "vfile": "^6.0.0" } }, ""], - - "recma-jsx": ["recma-jsx@1.0.1", "", { "dependencies": { "acorn-jsx": "^5.0.0", "estree-util-to-js": "^2.0.0", "recma-parse": "^1.0.0", "recma-stringify": "^1.0.0", "unified": "^11.0.0" }, "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, ""], - - "recma-parse": ["recma-parse@1.0.0", "", { "dependencies": { "@types/estree": "^1.0.0", "esast-util-from-js": "^2.0.0", "unified": "^11.0.0", "vfile": "^6.0.0" } }, ""], - - "recma-stringify": ["recma-stringify@1.0.0", "", { "dependencies": { "@types/estree": "^1.0.0", "estree-util-to-js": "^2.0.0", "unified": "^11.0.0", "vfile": "^6.0.0" } }, ""], - - "regex": ["regex@5.1.1", "", { "dependencies": { "regex-utilities": "^2.3.0" } }, ""], - - "regex-recursion": ["regex-recursion@5.1.1", "", { "dependencies": { "regex": "^5.1.1", "regex-utilities": "^2.3.0" } }, ""], - - "regex-utilities": ["regex-utilities@2.3.0", "", {}, ""], - - "rehype-autolink-headings": ["rehype-autolink-headings@7.1.0", "", { "dependencies": { "@types/hast": "^3.0.0", "@ungap/structured-clone": "^1.0.0", "hast-util-heading-rank": "^3.0.0", "hast-util-is-element": "^3.0.0", "unified": "^11.0.0", "unist-util-visit": "^5.0.0" } }, ""], - - "rehype-class-names": ["rehype-class-names@2.0.0", "", { "dependencies": { "@types/hast": "^3.0.0", "hast-util-classnames": "^3.0.0", "hast-util-select": "^6.0.0", "unified": "^11.0.4" } }, ""], - - "rehype-mermaid": ["rehype-mermaid@3.0.0", "", { "dependencies": { "@types/hast": "^3.0.0", "hast-util-from-html-isomorphic": "^2.0.0", "hast-util-to-text": "^4.0.0", "mermaid-isomorphic": "^3.0.0", "mini-svg-data-uri": "^1.0.0", "space-separated-tokens": "^2.0.0", "unified": "^11.0.0", "unist-util-visit-parents": "^6.0.0", "vfile": "^6.0.0" }, "peerDependencies": { "playwright": "1" } }, ""], - - "rehype-recma": ["rehype-recma@1.0.0", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/hast": "^3.0.0", "hast-util-to-estree": "^3.0.0" } }, ""], - - "rehype-slug": ["rehype-slug@6.0.0", "", { "dependencies": { "@types/hast": "^3.0.0", "github-slugger": "^2.0.0", "hast-util-heading-rank": "^3.0.0", "hast-util-to-string": "^3.0.0", "unist-util-visit": "^5.0.0" } }, ""], - - "remark-directive": ["remark-directive@3.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-directive": "^3.0.0", "micromark-extension-directive": "^3.0.0", "unified": "^11.0.0" } }, ""], - - "remark-frontmatter": ["remark-frontmatter@5.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-frontmatter": "^2.0.0", "micromark-extension-frontmatter": "^2.0.0", "unified": "^11.0.0" } }, ""], - - "remark-gfm": ["remark-gfm@4.0.1", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-gfm": "^3.0.0", "micromark-extension-gfm": "^3.0.0", "remark-parse": "^11.0.0", "remark-stringify": "^11.0.0", "unified": "^11.0.0" } }, ""], - - "remark-mdx": ["remark-mdx@3.1.1", "", { "dependencies": { "mdast-util-mdx": "^3.0.0", "micromark-extension-mdxjs": "^3.0.0" } }, ""], - - "remark-mdx-frontmatter": ["remark-mdx-frontmatter@5.2.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "estree-util-value-to-estree": "^3.0.0", "toml": "^3.0.0", "unified": "^11.0.0", "unist-util-mdx-define": "^1.0.0", "yaml": "^2.0.0" } }, ""], - - "remark-parse": ["remark-parse@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-from-markdown": "^2.0.0", "micromark-util-types": "^2.0.0", "unified": "^11.0.0" } }, ""], - - "remark-rehype": ["remark-rehype@11.1.2", "", { "dependencies": { "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "mdast-util-to-hast": "^13.0.0", "unified": "^11.0.0", "vfile": "^6.0.0" } }, ""], - - "remark-stringify": ["remark-stringify@11.0.0", "", { "dependencies": { "@types/mdast": "^4.0.0", "mdast-util-to-markdown": "^2.0.0", "unified": "^11.0.0" } }, ""], - - "require-like": ["require-like@0.1.2", "", {}, ""], - - "restore-cursor": ["restore-cursor@4.0.0", "", { "dependencies": { "onetime": "^5.1.0", "signal-exit": "^3.0.2" } }, ""], - - "robust-predicates": ["robust-predicates@3.0.2", "", {}, ""], - - "rollup": ["rollup@4.52.5", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.52.5", "@rollup/rollup-android-arm64": "4.52.5", "@rollup/rollup-darwin-arm64": "4.52.5", "@rollup/rollup-darwin-x64": "4.52.5", "@rollup/rollup-freebsd-arm64": "4.52.5", "@rollup/rollup-freebsd-x64": "4.52.5", "@rollup/rollup-linux-arm-gnueabihf": "4.52.5", "@rollup/rollup-linux-arm-musleabihf": "4.52.5", "@rollup/rollup-linux-arm64-gnu": "4.52.5", "@rollup/rollup-linux-arm64-musl": "4.52.5", "@rollup/rollup-linux-loong64-gnu": "4.52.5", "@rollup/rollup-linux-ppc64-gnu": "4.52.5", "@rollup/rollup-linux-riscv64-gnu": "4.52.5", "@rollup/rollup-linux-riscv64-musl": "4.52.5", "@rollup/rollup-linux-s390x-gnu": "4.52.5", "@rollup/rollup-linux-x64-gnu": "4.52.5", "@rollup/rollup-linux-x64-musl": "4.52.5", "@rollup/rollup-openharmony-arm64": "4.52.5", "@rollup/rollup-win32-arm64-msvc": "4.52.5", "@rollup/rollup-win32-ia32-msvc": "4.52.5", "@rollup/rollup-win32-x64-gnu": "4.52.5", "@rollup/rollup-win32-x64-msvc": "4.52.5", "fsevents": "~2.3.2" }, "bin": "dist/bin/rollup" }, ""], - - "roughjs": ["roughjs@4.6.6", "", { "dependencies": { "hachure-fill": "^0.5.2", "path-data-parser": "^0.1.0", "points-on-curve": "^0.2.0", "points-on-path": "^0.2.1" } }, ""], - - "rw": ["rw@1.3.3", "", {}, ""], - - "safe-buffer": ["safe-buffer@5.2.1", "", {}, ""], - - "safer-buffer": ["safer-buffer@2.1.2", "", {}, ""], - - "scheduler": ["scheduler@0.27.0", "", {}, ""], - - "semver": ["semver@6.3.1", "", { "bin": "bin/semver.js" }, ""], - - "send": ["send@0.19.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, ""], - - "serve-static": ["serve-static@1.16.2", "", { "dependencies": { "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.19.0" } }, ""], - - "set-cookie-parser": ["set-cookie-parser@2.7.2", "", {}, ""], - - "setprototypeof": ["setprototypeof@1.2.0", "", {}, ""], - - "shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, ""], - - "shebang-regex": ["shebang-regex@3.0.0", "", {}, ""], - - "shiki": ["shiki@1.29.2", "", { "dependencies": { "@shikijs/core": "1.29.2", "@shikijs/engine-javascript": "1.29.2", "@shikijs/engine-oniguruma": "1.29.2", "@shikijs/langs": "1.29.2", "@shikijs/themes": "1.29.2", "@shikijs/types": "1.29.2", "@shikijs/vscode-textmate": "^10.0.1", "@types/hast": "^3.0.4" } }, ""], - - "signal-exit": ["signal-exit@3.0.7", "", {}, ""], - - "sisteransi": ["sisteransi@1.0.5", "", {}, ""], - - "source-map": ["source-map@0.7.6", "", {}, ""], - - "source-map-js": ["source-map-js@1.2.1", "", {}, ""], - - "space-separated-tokens": ["space-separated-tokens@2.0.2", "", {}, ""], - - "statuses": ["statuses@2.0.1", "", {}, ""], - - "stdin-discarder": ["stdin-discarder@0.1.0", "", { "dependencies": { "bl": "^5.0.0" } }, ""], - - "string-width": ["string-width@6.1.0", "", { "dependencies": { "eastasianwidth": "^0.2.0", "emoji-regex": "^10.2.1", "strip-ansi": "^7.0.1" } }, ""], - - "string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, ""], - - "stringify-entities": ["stringify-entities@4.0.4", "", { "dependencies": { "character-entities-html4": "^2.0.0", "character-entities-legacy": "^3.0.0" } }, ""], - - "strip-ansi": ["strip-ansi@7.1.2", "", { "dependencies": { "ansi-regex": "^6.0.1" } }, ""], - - "strip-final-newline": ["strip-final-newline@2.0.0", "", {}, ""], - - "style-to-js": ["style-to-js@1.1.18", "", { "dependencies": { "style-to-object": "1.0.11" } }, ""], - - "style-to-object": ["style-to-object@1.0.11", "", { "dependencies": { "inline-style-parser": "0.2.4" } }, ""], - - "stylis": ["stylis@4.3.6", "", {}, ""], - - "tabbable": ["tabbable@6.3.0", "", {}, ""], - - "tailwindcss": ["tailwindcss@4.1.16", "", {}, ""], - - "tapable": ["tapable@2.3.0", "", {}, ""], - - "tinyexec": ["tinyexec@1.0.1", "", {}, ""], - - "tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, ""], - - "toidentifier": ["toidentifier@1.0.1", "", {}, ""], - - "toml": ["toml@3.0.0", "", {}, ""], - - "trim-lines": ["trim-lines@3.0.1", "", {}, ""], - - "trough": ["trough@2.2.0", "", {}, ""], - - "ts-dedent": ["ts-dedent@2.2.0", "", {}, ""], - - "tslib": ["tslib@2.8.1", "", {}, ""], - - "twoslash": ["twoslash@0.3.4", "", { "dependencies": { "@typescript/vfs": "^1.6.1", "twoslash-protocol": "0.3.4" }, "peerDependencies": { "typescript": "^5.5.0" } }, ""], - - "twoslash-protocol": ["twoslash-protocol@0.3.4", "", {}, ""], - - "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], - - "ua-parser-js": ["ua-parser-js@1.0.41", "", { "bin": "script/cli.js" }, ""], - - "ufo": ["ufo@1.6.1", "", {}, ""], - - "undici-types": ["undici-types@7.16.0", "", {}, ""], - - "unified": ["unified@11.0.5", "", { "dependencies": { "@types/unist": "^3.0.0", "bail": "^2.0.0", "devlop": "^1.0.0", "extend": "^3.0.0", "is-plain-obj": "^4.0.0", "trough": "^2.0.0", "vfile": "^6.0.0" } }, ""], - - "unist-util-find-after": ["unist-util-find-after@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, ""], - - "unist-util-is": ["unist-util-is@6.0.1", "", { "dependencies": { "@types/unist": "^3.0.0" } }, ""], - - "unist-util-mdx-define": ["unist-util-mdx-define@1.1.2", "", { "dependencies": { "@types/estree": "^1.0.0", "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "estree-util-is-identifier-name": "^3.0.0", "estree-util-scope": "^1.0.0", "estree-walker": "^3.0.0", "vfile": "^6.0.0" } }, ""], - - "unist-util-position": ["unist-util-position@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, ""], - - "unist-util-position-from-estree": ["unist-util-position-from-estree@2.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, ""], - - "unist-util-remove-position": ["unist-util-remove-position@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-visit": "^5.0.0" } }, ""], - - "unist-util-stringify-position": ["unist-util-stringify-position@4.0.0", "", { "dependencies": { "@types/unist": "^3.0.0" } }, ""], - - "unist-util-visit": ["unist-util-visit@5.0.0", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0", "unist-util-visit-parents": "^6.0.0" } }, ""], - - "unist-util-visit-parents": ["unist-util-visit-parents@6.0.2", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-is": "^6.0.0" } }, ""], - - "universalify": ["universalify@2.0.1", "", {}, ""], - - "update-browserslist-db": ["update-browserslist-db@1.1.4", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": "cli.js" }, ""], - - "use-callback-ref": ["use-callback-ref@1.3.3", "", { "dependencies": { "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, ""], - - "use-sidecar": ["use-sidecar@1.1.3", "", { "dependencies": { "detect-node-es": "^1.1.0", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" } }, ""], - - "use-sync-external-store": ["use-sync-external-store@1.6.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, ""], - - "util-deprecate": ["util-deprecate@1.0.2", "", {}, ""], - - "uuid": ["uuid@11.1.0", "", { "bin": "dist/esm/bin/uuid" }, ""], - - "vary": ["vary@1.1.2", "", {}, ""], - - "vfile": ["vfile@6.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile-message": "^4.0.0" } }, ""], - - "vfile-location": ["vfile-location@5.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "vfile": "^6.0.0" } }, ""], - - "vfile-matter": ["vfile-matter@5.0.1", "", { "dependencies": { "vfile": "^6.0.0", "yaml": "^2.0.0" } }, ""], - - "vfile-message": ["vfile-message@4.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-stringify-position": "^4.0.0" } }, ""], - - "vite": ["vite@7.1.12", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["less", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx"], "bin": "bin/vite.js" }, ""], - - "vite-node": ["vite-node@3.2.4", "", { "dependencies": { "cac": "^6.7.14", "debug": "^4.4.1", "es-module-lexer": "^1.7.0", "pathe": "^2.0.3", "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": "vite-node.mjs" }, ""], - - "vocs": ["vocs@1.1.0", "", { "dependencies": { "@floating-ui/react": "^0.27.16", "@hono/node-server": "^1.19.5", "@mdx-js/mdx": "^3.1.1", "@mdx-js/react": "^3.1.1", "@mdx-js/rollup": "^3.1.1", "@noble/hashes": "^1.7.1", "@radix-ui/colors": "^3.0.0", "@radix-ui/react-accordion": "^1.2.3", "@radix-ui/react-dialog": "^1.1.6", "@radix-ui/react-icons": "^1.3.2", "@radix-ui/react-label": "^2.1.2", "@radix-ui/react-navigation-menu": "^1.2.5", "@radix-ui/react-popover": "^1.1.6", "@radix-ui/react-tabs": "^1.1.3", "@shikijs/rehype": "^1", "@shikijs/transformers": "^1", "@shikijs/twoslash": "^1", "@tailwindcss/vite": "4.1.15", "@vanilla-extract/css": "^1.17.4", "@vanilla-extract/dynamic": "^2.1.5", "@vanilla-extract/vite-plugin": "^5.1.1", "@vitejs/plugin-react": "^5.0.4", "autoprefixer": "^10.4.21", "cac": "^6.7.14", "chroma-js": "^3.1.2", "clsx": "^2.1.1", "compression": "^1.8.1", "create-vocs": "^1.0.0-alpha.5", "cross-spawn": "^7.0.6", "fs-extra": "^11.3.2", "hastscript": "^8.0.0", "hono": "^4.10.3", "mark.js": "^8.11.1", "mdast-util-directive": "^3.1.0", "mdast-util-from-markdown": "^2.0.2", "mdast-util-frontmatter": "^2.0.1", "mdast-util-gfm": "^3.1.0", "mdast-util-mdx": "^3.0.0", "mdast-util-mdx-jsx": "^3.2.0", "mdast-util-to-hast": "^13.2.0", "mdast-util-to-markdown": "^2.1.2", "minisearch": "^7.2.0", "nuqs": "^2.7.2", "ora": "^7.0.1", "p-limit": "^5.0.0", "picomatch": "^4.0.3", "playwright": "^1.52.0", "postcss": "^8.5.2", "radix-ui": "^1.1.3", "react-intersection-observer": "^9.15.1", "react-router": "^7.9.4", "rehype-autolink-headings": "^7.1.0", "rehype-class-names": "^2.0.0", "rehype-mermaid": "^3.0.0", "rehype-slug": "^6.0.0", "remark-directive": "^3.0.1", "remark-frontmatter": "^5.0.0", "remark-gfm": "^4.0.1", "remark-mdx": "^3.1.1", "remark-mdx-frontmatter": "^5.2.0", "remark-parse": "^11.0.0", "serve-static": "^1.16.2", "shiki": "^1", "toml": "^3.0.0", "twoslash": "~0.3.4", "ua-parser-js": "^1.0.40", "unified": "^11.0.5", "unist-util-visit": "^5.0.0", "vfile-matter": "^5.0.1", "vite": "^7.1.11", "yaml": "^2.8.1" }, "peerDependencies": { "react": "^19", "react-dom": "^19" }, "bin": "_lib/cli/index.js" }, "sha512-RvSdP+OP5w/mzY6rupup8RLRRfi+9dqZmiiB/NOFrQds5iZNR2h2hNui+oKXUwAxv/Dywo2ma4YYSy6v/aw0Dw=="], - - "vscode-jsonrpc": ["vscode-jsonrpc@8.2.0", "", {}, ""], - - "vscode-languageserver": ["vscode-languageserver@9.0.1", "", { "dependencies": { "vscode-languageserver-protocol": "3.17.5" }, "bin": { "installServerIntoExtension": "bin/installServerIntoExtension" } }, ""], - - "vscode-languageserver-protocol": ["vscode-languageserver-protocol@3.17.5", "", { "dependencies": { "vscode-jsonrpc": "8.2.0", "vscode-languageserver-types": "3.17.5" } }, ""], - - "vscode-languageserver-textdocument": ["vscode-languageserver-textdocument@1.0.12", "", {}, ""], - - "vscode-languageserver-types": ["vscode-languageserver-types@3.17.5", "", {}, ""], - - "vscode-uri": ["vscode-uri@3.0.8", "", {}, ""], - - "web-namespaces": ["web-namespaces@2.0.1", "", {}, ""], - - "which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "bin/node-which" } }, ""], - - "yallist": ["yallist@3.1.1", "", {}, ""], - - "yaml": ["yaml@2.8.1", "", { "bin": "bin.mjs" }, ""], - - "yocto-queue": ["yocto-queue@1.2.1", "", {}, ""], - - "zwitch": ["zwitch@2.0.4", "", {}, ""], - - "@babel/core/debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, ""], - - "@babel/helper-compilation-targets/lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, ""], - - "@babel/traverse/debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, ""], - - "@iconify/utils/debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, ""], - - "@rollup/pluginutils/estree-walker": ["estree-walker@2.0.2", "", {}, ""], - - "@shikijs/twoslash/twoslash": ["twoslash@0.2.12", "", { "dependencies": { "@typescript/vfs": "^1.6.0", "twoslash-protocol": "0.2.12" }, "peerDependencies": { "typescript": "*" } }, ""], - - "@tailwindcss/node/tailwindcss": ["tailwindcss@4.1.15", "", {}, ""], - - "@tailwindcss/vite/tailwindcss": ["tailwindcss@4.1.15", "", {}, ""], - - "@typescript/vfs/debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, ""], - - "cytoscape-fcose/cose-base": ["cose-base@2.2.0", "", { "dependencies": { "layout-base": "^2.0.0" } }, ""], - - "d3-dsv/commander": ["commander@7.2.0", "", {}, ""], - - "d3-sankey/d3-array": ["d3-array@2.12.1", "", { "dependencies": { "internmap": "^1.0.0" } }, ""], - - "d3-sankey/d3-shape": ["d3-shape@1.3.7", "", { "dependencies": { "d3-path": "1" } }, ""], - - "hast-util-from-dom/hastscript": ["hastscript@9.0.1", "", { "dependencies": { "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-parse-selector": "^4.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0" } }, ""], - - "hast-util-from-parse5/hastscript": ["hastscript@9.0.1", "", { "dependencies": { "@types/hast": "^3.0.0", "comma-separated-tokens": "^2.0.0", "hast-util-parse-selector": "^4.0.0", "property-information": "^7.0.0", "space-separated-tokens": "^2.0.0" } }, ""], - - "hast-util-from-parse5/property-information": ["property-information@7.1.0", "", {}, ""], - - "hast-util-select/property-information": ["property-information@7.1.0", "", {}, ""], - - "hast-util-to-estree/property-information": ["property-information@7.1.0", "", {}, ""], - - "hast-util-to-html/property-information": ["property-information@7.1.0", "", {}, ""], - - "hast-util-to-jsx-runtime/property-information": ["property-information@7.1.0", "", {}, ""], - - "local-pkg/pkg-types": ["pkg-types@2.3.0", "", { "dependencies": { "confbox": "^0.2.2", "exsolve": "^1.0.7", "pathe": "^2.0.3" } }, ""], - - "micromark/debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, ""], - - "p-locate/p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, ""], - - "parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, ""], - - "send/encodeurl": ["encodeurl@1.0.2", "", {}, ""], - - "send/ms": ["ms@2.1.3", "", {}, ""], - - "vite/fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], - - "vite-node/debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, ""], - - "@babel/core/debug/ms": ["ms@2.1.3", "", {}, ""], - - "@babel/traverse/debug/ms": ["ms@2.1.3", "", {}, ""], - - "@iconify/utils/debug/ms": ["ms@2.1.3", "", {}, ""], - - "@shikijs/twoslash/twoslash/twoslash-protocol": ["twoslash-protocol@0.2.12", "", {}, ""], - - "@typescript/vfs/debug/ms": ["ms@2.1.3", "", {}, ""], - - "cytoscape-fcose/cose-base/layout-base": ["layout-base@2.0.1", "", {}, ""], - - "d3-sankey/d3-shape/d3-path": ["d3-path@1.0.9", "", {}, ""], - - "hast-util-from-dom/hastscript/property-information": ["property-information@7.1.0", "", {}, ""], - - "local-pkg/pkg-types/confbox": ["confbox@0.2.2", "", {}, ""], - - "micromark/debug/ms": ["ms@2.1.3", "", {}, ""], - - "p-locate/p-limit/yocto-queue": ["yocto-queue@0.1.0", "", {}, ""], - - "vite-node/debug/ms": ["ms@2.1.3", "", {}, ""], - } -} diff --git a/kona/docs/docs/pages/glossary.mdx b/kona/docs/docs/pages/glossary.mdx deleted file mode 100644 index 41b12e47383..00000000000 --- a/kona/docs/docs/pages/glossary.mdx +++ /dev/null @@ -1,41 +0,0 @@ -# Glossary - -*This document contains definitions for terms used throughout the Kona book.* - -#### Fault Proof VM -A `Fault Proof VM` is a virtual machine, commonly supporting a subset of the Linux kernel's syscalls and a modified subset of an existing reduced instruction set architecture, -that is designed to execute verifiable programs. - -Full specification for the `cannon` & `cannon-rs` FPVMs, as an example, is available in the [Optimism Monorepo][cannon-specs]. - -#### Fault Proof Program -A `Fault Proof Program` is a program, commonly written in a general-purpose language such as Golang, C, or Rust, that may be compiled down -to a compatible `Fault Proof VM` target and provably executed on that target VM. - -Examples of `Fault Proof Programs` include the [OP Program][op-program], which runs on top of [`cannon`][cannon], [`cannon-rs`][cannon-rs], and -[`asterisc`][asterisc] to verify a claim about the state of an [OP Stack][op-stack] layer two. - -#### Preimage ABI -The `Preimage ABI` is a specification for a synchronous communication protocol between a `client` and a `host` that is used to request and read data from the `host`'s -datastore. Full specifications for the `Preimage ABI` are available in the [Optimism Monorepo][preimage-specs]. - -[op-stack]: https://github.com/ethereum-optimism/optimism -[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program -[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon -[cannon-rs]: https://github.com/op-rs/cannon-rs -[asterisc]: https://github.com/ethereum-optimism/asterisc -[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html -[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program -[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle -[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine -[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction -[op-succinct]: https://github.com/succinctlabs/op-succinct -[revm]: https://github.com/bluealloy/revm - -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md - -[op-labs]: https://github.com/ethereum-optimism -[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/index.mdx b/kona/docs/docs/pages/index.mdx deleted file mode 100644 index f2724332145..00000000000 --- a/kona/docs/docs/pages/index.mdx +++ /dev/null @@ -1,146 +0,0 @@ ---- -layout: landing ---- - -import { HomePage } from 'vocs/components' -import { SdkShowcase } from '../components/SdkShowcase' -import { CodeGroup, Code } from '../components/CodeGroup' - -<div className="full-width pt-16 lg:pt-24 min-h-114"> - <div className="flex flex-col lg:flex-row items-center justify-between full-width-inner"> - <div className="lg:w-1/2 mb-4 lg:mb-0 lg:pr-8 self-start"> - <div className="mb-2"> - <h1 className="text-5xl lg:text-7xl font-bold mb-8 bg-gradient-to-r from-black to-gray-600 dark:from-white dark:to-gray-300 bg-clip-text text-transparent leading-tight"> - Kona - </h1> - <p className="text-2xl lg:text-3xl text-gray-600 dark:text-gray-400 mb-8 leading-relaxed"> - OP Stack Components built in Rust - </p> - <p className="text-lg lg:text-lg text-gray-600 dark:text-gray-400 mb-4 leading-relaxed"> - A comprehensive suite of low-level OP Stack types, portable `no_std` - components, and services built in Rust. Kona provides a rollup node - implementation called the `kona-node` that is spec-compliant, performant, - and modular as well as the Kona Fault Proof implementation. Built by <a className="font-bold underline" href="https://www.oplabs.co/">OP Labs</a>. - </p> - </div> - </div> - <div className="lg:w-1/2 self-start"> - <CodeGroup> - <Code - title="Run a Node" - code={`# Install the kona-node -cargo install kona-node - -# Start the node -kona-node --chain base --port 8545`} - /> - <Code - title="Build a Node" - code={`# Clone the repository -git clone https://github.com/op-rs/kona.git - -# Build the node -just build-node - -# Run your custom node -./target/release/kona-node`} - /> - </CodeGroup> - </div> - </div> -</div> - -<div className="full-width mt-1 mb-32 pb-16"> - <div className="flex flex-wrap items-center justify-between gap-6 full-width-inner"> - <div className="flex flex-wrap gap-2 sm:gap-4"> - <a - href="/node/run/overview" - className="inline-flex items-center justify-center px-3 py-2 sm:px-4 sm:py-2 md:px-6 md:py-3 border border-gray-300 dark:border-gray-700 text-sm sm:text-base font-medium rounded-md text-gray-700 dark:text-gray-300 bg-white dark:bg-gray-800 hover:bg-gray-50 dark:hover:bg-gray-700 transition-colors flex-shrink-0" - > - Run a Node - </a> - <a - href="/node/install/overview" - className="inline-flex items-center justify-center px-3 py-2 sm:px-4 sm:py-2 md:px-6 md:py-3 border border-gray-300 dark:border-gray-700 text-sm sm:text-base font-medium rounded-md text-gray-700 dark:text-gray-300 bg-white dark:bg-gray-800 hover:bg-gray-50 dark:hover:bg-gray-700 transition-colors flex-shrink-0" - > - Build a Node - </a> - <a - href="/intro/why" - className="inline-flex items-center justify-center px-3 py-2 sm:px-4 sm:py-2 md:px-6 md:py-3 border border-gray-300 dark:border-gray-700 text-sm sm:text-base font-medium rounded-md text-gray-700 dark:text-gray-300 bg-white dark:bg-gray-800 hover:bg-gray-50 dark:hover:bg-gray-700 transition-colors flex-shrink-0" - > - Why Kona - </a> - </div> - - <div className="flex flex-wrap gap-2 sm:gap-4 mt-4 lg:mt-0"> - <div className="bg-white/10 dark:bg-white/5 backdrop-blur-md border border-white/20 dark:border-white/10 rounded-lg px-3 py-2 sm:px-4 sm:py-2 md:px-6 md:py-3 text-sm sm:text-base"> - <span className="text-gray-600 dark:text-gray-400">⭐ </span> - <span className="font-semibold">200+ </span> - <span className="text-gray-600 dark:text-gray-400">stars</span> - </div> - <div className="bg-white/10 dark:bg-white/5 backdrop-blur-md border border-white/20 dark:border-white/10 rounded-lg px-3 py-2 sm:px-4 sm:py-2 md:px-6 md:py-3 text-sm sm:text-base"> - <span className="text-gray-600 dark:text-gray-400">👥 </span> - <span className="font-semibold">50+ </span> - <span className="text-gray-600 dark:text-gray-400">contributors</span> - </div> - <div className="bg-white/10 dark:bg-white/5 backdrop-blur-md border border-white/20 dark:border-white/10 rounded-lg px-3 py-2 sm:px-4 sm:py-2 md:px-6 md:py-3 text-sm sm:text-base"> - <span className="text-gray-600 dark:text-gray-400">📜 </span> - <span className="font-semibold">MIT </span> - <span className="text-gray-600 dark:text-gray-400">license</span> - </div> - </div> - </div> -</div> - -<div className="full-width"> - <div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-4 gap-6 full-width-inner"> - <div className="text-center"> - <div className="bg-white/5 dark:bg-white/5 border border-black/10 dark:border-white/10 rounded-xl p-6 hover:bg-black/5 dark:hover:bg-white/10 transition-colors h-full"> - <div className="text-4xl mb-4">🔒</div> - <h3 className="text-lg font-semibold mb-3">Secure</h3> - <p className="text-sm text-gray-600 dark:text-gray-400 leading-relaxed"> - Built with Rust's memory safety guarantees and comprehensive test coverage for production reliability. - </p> - </div> - </div> - <div className="text-center"> - <div className="bg-white/5 dark:bg-white/5 border border-black/10 dark:border-white/10 rounded-xl p-6 hover:bg-black/5 dark:hover:bg-white/10 transition-colors h-full"> - <div className="text-4xl mb-4">⚡</div> - <h3 className="text-lg font-semibold mb-3">Performant</h3> - <p className="text-sm text-gray-600 dark:text-gray-400 leading-relaxed"> - Optimized for minimal resource usage with `no_std` compatibility and efficient execution. - </p> - </div> - </div> - <div className="text-center"> - <div className="bg-white/5 dark:bg-white/5 border border-black/10 dark:border-white/10 rounded-xl p-6 hover:bg-black/5 dark:hover:bg-white/10 transition-colors h-full"> - <div className="text-4xl mb-4">🧩</div> - <h3 className="text-lg font-semibold mb-3">Customizable</h3> - <p className="text-sm text-gray-600 dark:text-gray-400 leading-relaxed"> - Extensible architecture with composable crates that can be used independently or together. - </p> - </div> - </div> - <div className="text-center"> - <div className="bg-white/5 dark:bg-white/5 border border-black/10 dark:border-white/10 rounded-xl p-6 hover:bg-black/5 dark:hover:bg-white/10 transition-colors h-full"> - <div className="text-4xl mb-4">🌐</div> - <h3 className="text-lg font-semibold mb-3">Extensible</h3> - <p className="text-sm text-gray-600 dark:text-gray-400 leading-relaxed"> - Supports multiple proof backends including FPVM, SP-1, Risc0, and other verifiable environments. - </p> - </div> - </div> - </div> -</div> - - -<div className="full-width mt-20"> - <div className="full-width-inner"> - <h3 className="text-2xl lg:text-2xl font-bold mb-4 pb-2">Built with Kona SDK</h3> - <p className="text-gray-600 dark:text-gray-400 mb-16 pb-4 text-lg lg:text-lg"> - Production implementations using Kona's modular architecture - </p> - <SdkShowcase /> - </div> -</div> diff --git a/kona/docs/docs/pages/intro/overview.mdx b/kona/docs/docs/pages/intro/overview.mdx deleted file mode 100644 index 18d1c63de10..00000000000 --- a/kona/docs/docs/pages/intro/overview.mdx +++ /dev/null @@ -1,93 +0,0 @@ -import { Callout } from 'vocs/components' - -# Kona [Documentation for Kona users and developers] - -Kona is an implementation of the [OP Stack][op-stack] written in Rust, -designed to be modular and extensible. `no_std` support is prioritized -to provide the building blocks for fault proofs. - -<Callout type="warning"> -Kona is in active development and should be considered experimental. -</Callout> - -<Callout type="note"> -These docs may contain inaccuracies as it evolves. - -Please [open an issue][new-issue] if you find any errors or have any suggestions -for improvements, and also feel free to [contribute][contributing] to the project! -</Callout> - - -## Introduction - -Originally a suite of portable implementations of the OP Stack rollup state transition, -Kona has been extended to be _the monorepo_ for <a href="https://specs.optimism.io/">OP Stack</a> -types, components, and services built in Rust. Kona provides an ecosystem of extensible, low-level -crates that compose into components and services required for the OP Stack. - -Protocol crates are `no_std` compatible for use within the Fault Proof. Types defined in these -libraries are shared by other components of the OP Stack including the rollup node. - -Proof crates are available for developing verifiable Rust programs targeting -[Fault Proof VMs](/glossary#fault-proof-vm). -These libraries provide tooling and abstractions around low-level syscalls, memory management, -and other common structures that authors of verifiable programs will need to interact with. -It also provides build pipelines for compiling `no_std` Rust programs to a format that can be -executed by supported Fault Proof VM targets. - -Kona is built and maintained by open source contributors and is licensed under the MIT License. - -## Goals of Kona - -**1. Composability** - -Kona provides a common set of tools and abstractions for developing verifiable Rust programs -on top of several supported Fault Proof VM targets. This is done to ensure that programs -written for one supported FPVM can be easily ported to another supported FPVM, and that the -ecosystem of programs built on top of these targets can be easily shared and reused. - -**2. Safety** - -Through standardization of these low-level system interfaces and build pipelines, Kona seeks -to increase coverage over the low-level operations that are required to build on top of a FPVM. - -**3. Developer Experience** - -Building on top of custom Rust targets can be difficult, especially when the target is -nascent and tooling is not yet mature. Kona seeks to improve this experience by standardizing -and streamlining the process of developing and compiling verifiable Rust programs, targeted -at supported FPVMs. - -**4. Performance** - -Kona is opinionated in that it favors `no_std` Rust programs for embedded FPVM development, -for both performance and portability. In contrast with alternative approaches, such as the -[`op-program`][op-program] using the Golang `MIPS64` target, `no_std` Rust programs produce -much smaller binaries, resulting in fewer instructions that need to be executed on the FPVM. -In addition, this offers developers more low-level control over interactions with the FPVM -kernel, which can be useful for optimizing performance-critical code. - -## Contributing - -Contributors are welcome! Please see the [contributing guide][contributing] for more information. - -[op-stack]: https://github.com/ethereum-optimism/optimism -[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program -[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon -[cannon-rs]: https://github.com/op-rs/cannon-rs -[asterisc]: https://github.com/ethereum-optimism/asterisc -[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html -[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program -[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle -[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine -[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction -[op-succinct]: https://github.com/succinctlabs/op-succinct -[revm]: https://github.com/bluealloy/revm - -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md - -[op-labs]: https://github.com/ethereum-optimism -[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/node/design/intro.mdx b/kona/docs/docs/pages/node/design/intro.mdx deleted file mode 100644 index d549fd0ff25..00000000000 --- a/kona/docs/docs/pages/node/design/intro.mdx +++ /dev/null @@ -1,78 +0,0 @@ -# Node Design Overview - -The entry-point for the `kona-node` is the [`RollupNodeService`][trait] -trait which encapsulates the core wiring for the node. The default -implementation of the trait [`start` method][start] handles connecting -all the different components of the node, running each in a spawned -thread. As such, each node component is considered an actor. - -The [`RollupNodeService`][trait] abstracts individual actors through -the [`NodeActor` trait][actor]. With the `NodeActor` trait, the -`RollupNodeService` builds the actor and then starts it. - -Kona provides implementations for all `NodeActor`s required -to run a `RollupNodeService`. Actors are defined in the -[actors][actors] module of the `kona-node-service` crate. - -The `kona-node` is an implementation of the `RollupNodeService` -that lives in the [standard][standard] module. - - -### Actors - -The architecture of `kona-node` is a web of actors that share -state through message passing, using channels, rather than using -shared memory. - -The [`RollupNodeService`][trait] defines the set of required -actors using associated types. These are subject to change, -but are currently defined as follows. - -- **Derivation Actor**: Orchestrates the derivation pipeline, - deriving L2 payload attributes from l1 blocks. Payload - attributes prepared this way are forwarded to the Engine - Actor to be executed. The [derivation][derivation] docs - dive deeper into how the derivation actor works. -- **Engine Actor**: Brokers the connection to the execution - layer client (or "execution engine"). The engine actor - turns messages from other actors into engine "tasks" - that are executed in a round-robin against the EL client. - The [engine][engine] docs expand on this. -- **Network Actor**: Manages the P2P Network for the rollup - node. The P2P stack consists of `discv5` peer discovery - and block gossip through libp2p. Visit the [network][p2p] - docs for more detail. -- **Supervisor Actor (beta)**: The supervisor actor is an - interop feature that allows the `kona-node` to be - "managed" (or "indexed") by the supervisor - a new - component in the OP Stack. A detailed overview of - interop and the supervisor's role is provided in the - [supervisor][supervisor] docs. -- **Runtime Actor**: Loads runtime values from the contracts - on the L1 chain for the OP Stack. This is a very - light-weight actor described in [runtime][runtime] docs. -- **Sequencer Actor**: The sequencer actor extends the - `kona-node` to be run as a sequencer. Sequencing is - periphery to the basic rollup node operation. See - the [sequencer][sequencer]. -- **RPC Actor**: The RPC actor spins up and serves an - RPC server that exposes the rpc methods required by - the [OP Stack Specs][specs]. - - - - -[p2p]: ./p2p -[engine]: ./engine -[derivation]: ./derivation -[supervisor]: #TODO -[runtime]: #TODO -[sequencer]: ./sequencer - -[specs]: https://specs.optimism.io/protocol/rollup-node.html - -[standard]: https://github.com/op-rs/kona/blob/main/crates/node/service/src/service/standard/node.rs -[actors]: https://github.com/op-rs/kona/tree/main/crates/node/service/src/actors -[actor]: https://github.com/op-rs/kona/blob/main/crates/node/service/src/actors/traits.rs#L19 -[start]: https://github.com/op-rs/kona/blob/main/crates/node/service/src/service/core.rs#L161-L162 -[trait]: https://github.com/op-rs/kona/blob/main/crates/node/service/src/service/core.rs#L56 diff --git a/kona/docs/docs/pages/node/faq/overview.mdx b/kona/docs/docs/pages/node/faq/overview.mdx deleted file mode 100644 index 0869f331287..00000000000 --- a/kona/docs/docs/pages/node/faq/overview.mdx +++ /dev/null @@ -1,6 +0,0 @@ -# FAQ - -1. [Ports](/node/faq/ports) - Detailed account of ports used by the `kona-node` for P2P communication, JSON-RPC APIs, and the Engine API for execution layer communication. - -2. [Profiling](/node/faq/profiling) - Profile performance of the Kona node including CPU profiling and memory analysis. - diff --git a/kona/docs/docs/pages/node/install/binaries.mdx b/kona/docs/docs/pages/node/install/binaries.mdx deleted file mode 100644 index 66995da2760..00000000000 --- a/kona/docs/docs/pages/node/install/binaries.mdx +++ /dev/null @@ -1,3 +0,0 @@ -# Kona Binaries - -Download the latest pre-built binaries from the [GitHub releases page](https://github.com/op-rs/kona/releases). diff --git a/kona/docs/docs/pages/node/install/docker.mdx b/kona/docs/docs/pages/node/install/docker.mdx deleted file mode 100644 index 77867e54299..00000000000 --- a/kona/docs/docs/pages/node/install/docker.mdx +++ /dev/null @@ -1,64 +0,0 @@ -import { Callout } from 'vocs/components' - -# Docker - -There are two ways to obtain a Kona Docker image: - -1. [GitHub](#github) -2. [Building it from source](#building-the-docker-image) - -Once you have obtained the Docker image, you can run the node. - -Jump ahead to [Run a Node using Docker page](/node/run/docker). - - -## GitHub - -Kona docker images are published with every release on GitHub Container Registry. - -You can obtain the latest `kona-node` image with: - -```bash -docker pull ghcr.io/op-rs/kona/kona-node -``` - -<Callout type="note"> -Specify a specific version (e.g. v0.1.0) like so. - -```bash -docker pull ghcr.io/op-rs/kona/kona-node:v0.1.0 -``` -</Callout> - -You can test the image with: - -```bash -docker run --rm ghcr.io/op-rs/kona/kona-node --version -``` - -If you can see the [latest release](https://github.com/op-rs/kona/releases) version, -then you've successfully installed Kona via Docker. - - -## Building the Docker image - -To build the image from source, navigate to the root of the repository and run: - -```bash -just build-local kona-node -``` - -<Callout type="note"> -This will create an image with the tag `kona:local`. To specify a custom -tag, just pass it in after `kona-node` in the command above, like so: - -```bash -just build-local kona-node my-custom-tag -``` -</Callout> - -The build will likely take several minutes. Once it's built, test it with: - -```bash -docker run kona:local --version -``` diff --git a/kona/docs/docs/pages/node/install/overview.mdx b/kona/docs/docs/pages/node/install/overview.mdx deleted file mode 100644 index 4e2cb496600..00000000000 --- a/kona/docs/docs/pages/node/install/overview.mdx +++ /dev/null @@ -1,56 +0,0 @@ ---- -description: Installation instructions for Kona. ---- - -## Prerequisites - -Before installing Kona, ensure you have the following prerequisites: - -- **Rust toolchain** (MSRV: 1.82) -- **`just`** command runner -- **Docker** (optional, for containerized builds) - -### Installing Rust - -If you don't have Rust installed, you can install it using [rustup](https://rustup.rs/): - -```bash -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -``` - -Rustup is an easy way to update the Rust compiler, and works on all platforms. - -:::tip - -- During installation, when prompted, enter `1` for the default installation. -- After Rust installation completes, try running `cargo version` . If it cannot - be found, run `source $HOME/.cargo/env`. After that, running `cargo version` should return the version, for example `cargo 1.68.2`. -- It's generally advisable to append `source $HOME/.cargo/env` to `~/.bashrc`. - -::: - -The Minimum Supported Rust Version (MSRV) of this project is 1.82.0. If you -already have a version of Rust installed, you can check your version by running -`rustc --version`. To update your version of Rust, run rustup update. - - -### Installing Just - -`just` is a command runner that Kona uses for build tasks. Install it with: - -```bash -cargo install just -``` - -## Installation Methods - -There are three ways to obtain Kona: - -- [Docker images](/node/install/docker) -- [Pre-built binaries](/node/install/binaries) -- [Building from source](/node/install/source) - -:::note -If you have Docker installed, we recommend using the [Docker recipe](/node/run/docker) configuration -that will have kona-node, op-reth, Prometheus and Grafana running and syncing with just one command. -::: diff --git a/kona/docs/docs/pages/node/rpc/overview.mdx b/kona/docs/docs/pages/node/rpc/overview.mdx deleted file mode 100644 index d293249f094..00000000000 --- a/kona/docs/docs/pages/node/rpc/overview.mdx +++ /dev/null @@ -1,25 +0,0 @@ -# JSON-RPC - -The `kona-node` supports JSON-RPC for interacting with the node. - -By default, `kona-node` exposes an HTTP JSON-RPC server. A WebSocket JSON-RPC -endpoint is also available and can be enabled with the `--rpc.ws-enabled` flag -or the `KONA_NODE_RPC_WS_ENABLED` environment variable. IPC transport is not -supported. - -### Namespaces - -JSON-RPC methods are grouped into namespaces, which are listed below: - -| Namespace | Description | Sensitive | -| ---------------------------- | ------------------------------------------------------------------------------------------------------ | --------- | -| [`p2p`](/node/rpc/p2p) | The `p2p` API allows you to configure the p2p stack. | Maybe | -| [`rollup`](/node/rpc/rollup) | The `rollup` API provides OP Stack specific rpc methods. | No | -| [`admin`](/node/rpc/admin) | The `admin` API allows you to configure your node. | **Yes** | - - -### Interacting with the RPC - -Kona enables these RPC methods by default. - -You can interact with the RPC using any JSON-RPC client, such as `curl`, `httpie`, or a custom client in your preferred programming language. diff --git a/kona/docs/docs/pages/node/run/docker.mdx b/kona/docs/docs/pages/node/run/docker.mdx deleted file mode 100644 index 53ff6e98ff2..00000000000 --- a/kona/docs/docs/pages/node/run/docker.mdx +++ /dev/null @@ -1,177 +0,0 @@ -# Docker Guide - -:::info - -This guide uses Kona's pre-packaged docker config. - -For detailed usage of the `kona-node` binary, head -over to [the binary guide](/node/run/binary). - -::: - -Kona provides a [`kona-node` docker recipe][recipe] -with detailed instructions for running a complete node setup. - -## Quick Start - -The easiest way to run `kona-node` with Docker is using the provided recipe: - -1. **Navigate to the recipe directory:** - ```bash - cd docker/recipes/kona-node - ``` - -2. **Configure environment variables:** - Edit `cfg.env` to set your L1 RPC endpoints: - ```bash - L1_PROVIDER_RPC=https://your-l1-rpc-endpoint - L1_BEACON_API=https://your-l1-beacon-endpoint - ``` - -3. **Start the services:** - ```bash - just up - ``` - -This will start: -- `kona-node` - The OP Stack node implementation -- `op-reth` - Execution layer client -- `prometheus` - Metrics collection -- `grafana` - Monitoring dashboards (accessible at http://localhost:3000) - -## Docker Compose - -In the [provided docker compose][compose], there are a few services -aside from the `kona-node` and `op-reth`. These are `prometheus` -and `grafana` which automatically come provisioned with dashboards -for monitoring and insight into the `kona-node` and `op-reth` services. -For more detail into how Prometheus and Grafana work, head over to the -[Monitoring][monitoring] docs. - -The `docker-compose.yaml` uses published images from GitHub Container Registry: - -- **`op-reth`**: ghcr.io/paradigmxyz/op-reth:latest -- **`kona-node`**: ghcr.io/op-rs/kona/kona-node:latest - -### Service Configuration - -#### kona-node Service - -The `kona-node` service is configured with the following key settings: - -- **Ports**: - - `5060` - RPC endpoint - - `9223` - P2P discovery (TCP/UDP) - - `9002` - Metrics -- **Environment**: L1 RPC and Beacon API endpoints are required -- **Volumes**: Persistent data storage and JWT token for engine API authentication - -#### op-reth Service - -The `op-reth` service provides the execution layer: - -- **Ports**: - - `8545` - HTTP RPC - - `8551` - Engine API (authenticated) - - `30303` - P2P discovery - - `9001` - Metrics -- **Configuration**: Pre-configured for OP Sepolia testnet - -## Configuration - -### Network Selection - -By default, the recipe is configured for **OP Sepolia**. To sync a different OP Stack chain: - -1. Set appropriate L1 endpoints for your target network in `cfg.env` -2. Modify the docker-compose.yaml: - - Update `op-reth --chain` parameter - - Update `op-reth --rollup.sequencer-http` endpoint - - Update `kona-node --chain` parameter - -### RPC Trust Configuration - -By default, `kona-node` trusts RPC providers (both L1 and L2). When using public or untrusted RPC endpoints, you should disable trust to enable block hash verification: - -```bash -# In cfg.env or as environment variables: -KONA_NODE_L1_TRUST_RPC=false -KONA_NODE_L2_TRUST_RPC=false -``` - -Or modify the docker-compose.yaml command: -```yaml -kona-node: - command: | - node - --chain op-sepolia - --l1-eth-rpc ${L1_PROVIDER_RPC} - --l1-beacon ${L1_BEACON_API} - --l1-trust-rpc false # Add this for untrusted L1 RPCs - --l2-engine-rpc ws://op-reth:8551 - --l2-trust-rpc false # Add this for untrusted L2 RPCs -``` - -See the [configuration guide](/node/configuration#rpc-trust-configuration) for more details on RPC trust settings. - -### Port Configuration - -All host ports can be customized via environment variables in `cfg.env`: - -```bash -# Kona Node ports -KONA_NODE_RPC_PORT=5060 -KONA_NODE_DISCOVERY_PORT=9223 -KONA_NODE_METRICS_PORT=9002 - -# OP Reth ports -OP_RETH_RPC_PORT=8545 -OP_RETH_ENGINE_PORT=8551 -OP_RETH_METRICS_PORT=9001 -OP_RETH_DISCOVERY_PORT=30303 - -# Monitoring -PROMETHEUS_PORT=9090 -``` - -### Logging - -Adjust log levels by setting the `RUST_LOG` environment variable: - -```bash -export RUST_LOG=engine_builder=trace,runtime=debug -``` - -## Management Commands - -The recipe includes convenient Just commands: - -```bash -# Start all services -just up - -# Stop all services -just down - -# Restart all services -just restart - -# Generate JWT token (if needed) -./generate-jwt.sh -``` - -## Using Local Images - -To use locally built images instead of published ones: - -1. **Build the kona-node image:** - ```bash - just build-local kona-node - ``` - -2. **Update docker-compose.yaml** to use `kona-node:local` instead of the published image. - -[monitoring]: ../monitoring.mdx - -[recipe]: https://github.com/op-rs/kona/blob/f86052b5dacec7da46b12441aafab2867069f7e7/docker/recipes/kona-node/README.md -[compose]: https://github.com/op-rs/kona/blob/f86052b5dacec7da46b12441aafab2867069f7e7/docker/recipes/kona-node/docker-compose.yaml diff --git a/kona/docs/docs/pages/node/run/overview.mdx b/kona/docs/docs/pages/node/run/overview.mdx deleted file mode 100644 index b462ad2afb9..00000000000 --- a/kona/docs/docs/pages/node/run/overview.mdx +++ /dev/null @@ -1,38 +0,0 @@ -# Run a Node - -Now that you have [installed the `kona-node`](/node/install/overview), -it's time to run it. - -In this section, we'll guide you through running the kona-node on -various networks and with different configurations. - - -## Supported Networks - -Kona uses the [superchain-registry][scr] to dynamically load -chain configurations for the specified network. As such, Kona -can only support networks that are defined this way. - -[scr]: https://github.com/ethereum-optimism/superchain-registry - -To view available networks, the `kona-node` binary provides -a `registry` subcommand that lists all available networks: - -```bash -kona-node registry -``` - -:::tip -Want to add support for a new network? -Feel free to [add a chain](https://github.com/ethereum-optimism/superchain-registry/blob/main/docs/ops.md#adding-a-chain) -to the superchain-registry! -::: - - -## Configuration & Monitoring - -Learn how to configure and monitor your node: - -- **[Configuration](/node/configuration)** - Configure your node -- **[Monitoring](/node/monitoring)** - Set up logs, metrics, and observability - diff --git a/kona/docs/docs/pages/rfc/active/intro.mdx b/kona/docs/docs/pages/rfc/active/intro.mdx deleted file mode 100644 index 5e8a5234946..00000000000 --- a/kona/docs/docs/pages/rfc/active/intro.mdx +++ /dev/null @@ -1,8 +0,0 @@ -# Request For Comment [RFC] - -Documents in this section are in the request-for-comment stage. - -To comment on these documents, [open an issue in the kona repository](https://github.com/op-rs/kona/issues/new) -and provide detail on the changes you're requesting. - -Once the document has been reviewed, they will be moved to the archives. diff --git a/kona/docs/docs/pages/run.mdx b/kona/docs/docs/pages/run.mdx deleted file mode 100644 index 082c82b6ca6..00000000000 --- a/kona/docs/docs/pages/run.mdx +++ /dev/null @@ -1,56 +0,0 @@ -import { Callout } from 'vocs/components' - -# Run a Node - -<Callout type="info"> -This tutorial walks through running the `kona-node` as -a binary. To use docker, head over to the -[Docker Guide](/node/install/docker) which uses a `docker-compose` -setup provided by `kona`. The `docker-compose` setup -automatically bootstraps the `kona-node` with `op-reth`, -provisioning grafana dashboards and a default Prometheus -data source. -</Callout> - -## Prerequisites - -In order to follow this tutorial, you'll need: - -1. An L1 Archive node (e.g., `op-geth`) with enough history for the rollup network you want to run. -2. A `kona-node` binary. See [installation](/node/install/binaries) for instructions. -3. A rollup configuration file. See [rollup configuration](/sdk/protocol/genesis/rollup-config) for more information. - -## Quick Start - -The fastest way to get started is to use one of the pre-built configurations: - -```bash -# Download a rollup configuration -curl -o rollup.json https://raw.githubusercontent.com/op-rs/kona/main/configs/base-mainnet.json - -# Run the node -kona-node --rollup-config rollup.json --l1-rpc-url http://localhost:8545 -``` - -## Configuration - -The `kona-node` can be configured using command-line flags or environment variables. For a complete list of options, run: - -```bash -kona-node --help -``` - -### Key Configuration Options - -- `--rollup-config`: Path to the rollup configuration file -- `--l1-rpc-url`: L1 RPC endpoint URL -- `--l2-rpc-url`: L2 RPC endpoint URL (optional) -- `--data-dir`: Directory for storing node data -- `--log-level`: Logging level (debug, info, warn, error) - -## Next Steps - -- [Docker Support](/node/run/docker) - Run with Docker -- [Monitoring](/node/monitoring) - Set up monitoring and metrics -- [CLI Reference](/node/configuration) - Complete CLI documentation -- [Subcommands](/node/subcommands) - Available subcommands diff --git a/kona/docs/docs/pages/sdk/examples/batch-to-frames.mdx b/kona/docs/docs/pages/sdk/examples/batch-to-frames.mdx deleted file mode 100644 index 67b614bea06..00000000000 --- a/kona/docs/docs/pages/sdk/examples/batch-to-frames.mdx +++ /dev/null @@ -1,212 +0,0 @@ -# Transform a Batch into Frames - -:::info -This example performs the reverse transformation as the [frames-to-batch][frames-to-batch] example. -::: - -This example walks through transforming a [`Batch`][batch] into [`Frame`][frame]s. - -Effectively, this example demonstrates the _encoding_ process from an L2 batch into the -serialized bytes that are posted to the data availability layer. - -:::danger -Steps and handling of types with respect to chain tip, ordering of frames, re-orgs, and -more are not covered by this example. This example solely demonstrates the most trivial -way to transform an individual [`Batch`][batch] into [`Frame`][frame]s. -::: - - - -## Walkthrough - -The high level transformation is the following. - -```ignore -Batch -> decompressed batch data -> ChannelOut -> frames[] -> bytes[] -``` - -Given the [`Batch`][batch], the first step to encode the batch -using the [`Batch::encode()`][encode-batch] method. The output bytes -need to then be compressed prior to adding them to the -[`ChannelOut`][channel-out]. - -:::info -The [`ChannelOut`][channel-out] type also provides a method for adding -the [`Batch`][batch] itself, handling encoding and compression, but -this method is not available yet. -::: - -Once compressed using the [`compress_brotli`][compress-brotli] method, the -compressed bytes can be added to a newly constructed [`ChannelOut`][channel-out]. -As long as the [`ChannelOut`][channel-out] has [`ready_bytes()`][ready-bytes], -[`Frame`][frame]s can be constructed using the -[`ChannelOut::output_frame()`][output-frame] method, specifying the maximum -frame size. - -Once [`Frame`][frame]s are returned from the [`ChannelOut`][channel-out], -they can be [`Frame::encode`][encode-frame] into raw, serialized data -ready to be batch-submitted to the data-availability layer. - - -## Running this example: - -- Clone the examples repository: `git clone git@github.com:op-rs/kona.git` -- Run: `cargo run --example batch_to_frames` - -```rust -//! An example encoding and decoding a [SingleBatch]. -//! -//! This example demonstrates EIP-2718 encoding a [SingleBatch] -//! through a [ChannelOut] and into individual [Frame]s. -//! -//! Notice, the raw batch is first _encoded_. -//! Once encoded, it is compressed into raw data that the channel is constructed with. -//! -//! The [ChannelOut] then outputs frames individually using the maximum frame size, -//! in this case hardcoded to 100, to construct the frames. -//! -//! Finally, once [Frame]s are built from the [ChannelOut], they are encoded and ready -//! to be batch-submitted to the data availability layer. - -#[cfg(feature = "std")] -fn main() { - use alloy_primitives::BlockHash; - use kona_comp::{ChannelOut, CompressionAlgo, VariantCompressor}; - use kona_genesis::RollupConfig; - use kona_protocol::{Batch, ChannelId, SingleBatch}; - - // Use the example transaction - let transactions = example_transactions(); - - // Construct a basic `SingleBatch` - let parent_hash = BlockHash::ZERO; - let epoch_num = 1; - let epoch_hash = BlockHash::ZERO; - let timestamp = 1; - let single_batch = SingleBatch { parent_hash, epoch_num, epoch_hash, timestamp, transactions }; - let batch = Batch::Single(single_batch); - - // Create a new channel. - let id = ChannelId::default(); - let config = RollupConfig::default(); - let compressor: VariantCompressor = CompressionAlgo::Brotli10.into(); - let mut channel_out = ChannelOut::new(id, &config, compressor); - - // Add the compressed batch to the `ChannelOut`. - channel_out.add_batch(batch).unwrap(); - - // Output frames - while channel_out.ready_bytes() > 0 { - let frame = channel_out.output_frame(100).expect("outputs frame"); - println!("Frame: {}", alloy_primitives::hex::encode(frame.encode())); - if channel_out.ready_bytes() <= 100 { - channel_out.close(); - } - } - - assert!(channel_out.closed); - println!("Successfully encoded Batch to frames"); -} - -#[cfg(feature = "std")] -fn example_transactions() -> Vec<alloy_primitives::Bytes> { - use alloy_consensus::{SignableTransaction, TxEip1559, TxEnvelope}; - use alloy_eips::eip2718::{Decodable2718, Encodable2718}; - use alloy_primitives::{Address, Signature, U256}; - - let mut transactions = Vec::new(); - - // First Transaction in the batch. - let tx = TxEip1559 { - chain_id: 10u64, - nonce: 2, - max_fee_per_gas: 3, - max_priority_fee_per_gas: 4, - gas_limit: 5, - to: Address::left_padding_from(&[6]).into(), - value: U256::from(7_u64), - input: vec![8].into(), - access_list: Default::default(), - }; - let sig = Signature::test_signature(); - let tx_signed = tx.into_signed(sig); - let envelope: TxEnvelope = tx_signed.into(); - let encoded = envelope.encoded_2718(); - transactions.push(encoded.clone().into()); - let mut slice = encoded.as_slice(); - let decoded = TxEnvelope::decode_2718(&mut slice).unwrap(); - assert!(matches!(decoded, TxEnvelope::Eip1559(_))); - - // Second transaction in the batch. - let tx = TxEip1559 { - chain_id: 10u64, - nonce: 2, - max_fee_per_gas: 3, - max_priority_fee_per_gas: 4, - gas_limit: 5, - to: Address::left_padding_from(&[7]).into(), - value: U256::from(7_u64), - input: vec![8].into(), - access_list: Default::default(), - }; - let sig = Signature::test_signature(); - let tx_signed = tx.into_signed(sig); - let envelope: TxEnvelope = tx_signed.into(); - let encoded = envelope.encoded_2718(); - transactions.push(encoded.clone().into()); - let mut slice = encoded.as_slice(); - let decoded = TxEnvelope::decode_2718(&mut slice).unwrap(); - assert!(matches!(decoded, TxEnvelope::Eip1559(_))); - - transactions -} - -#[cfg(not(feature = "std"))] -fn main() { - /* not implemented for no_std */ -} -``` - -[frame]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Frame.html -[batch]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.Batch.html -[channel]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html -[add-frame]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html#method.add_frame -[decode-frame]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Frame.html#method.decode -[hex]: https://docs.rs/alloy_primitives/latest/alloy_primitives/macro.hex.html -[is-ready]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html#method.is_ready -[frame-data]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html#method.frame_data -[bytes]: https://docs.rs/alloy_primitives/latest/alloy_primitives/struct.Bytes.html -[decode-batch]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.Batch.html#method.decode -[fjord]: https://specs.optimism.io/protocol/fjord/overview.html -[channel-id]: https://docs.rs/kona-protocol/latest/kona_protocol/type.ChannelId.html - -[encode-batch]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.Batch.html#method.encode -[compress-brotli]: https://docs.rs/op-alloy-protocol/latest/op_alloy_protocol/struct.BrotliCompressor.html -[channel-out]: https://docs.rs/kona-comp/latest/kona_comp/struct.ChannelOut.html -[ready-bytes]: https://docs.rs/kona-comp/latest/kona_comp/struct.ChannelOut.html#method.ready_bytes -[output-frame]: https://docs.rs/kona-comp/latest/kona_comp/struct.ChannelOut.html#method.output_frame -[encode-frame]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Frame.html#method.encode - - -[frames-to-batch]: /sdk/examples/frames-to-batch - -[op-stack]: https://github.com/ethereum-optimism/optimism -[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program -[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon -[cannon-rs]: https://github.com/op-rs/cannon-rs -[asterisc]: https://github.com/ethereum-optimism/asterisc -[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html -[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program -[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle -[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine -[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction -[op-succinct]: https://github.com/succinctlabs/op-succinct -[revm]: https://github.com/bluealloy/revm - -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md - -[op-labs]: https://github.com/ethereum-optimism -[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/sdk/examples/intro.mdx b/kona/docs/docs/pages/sdk/examples/intro.mdx deleted file mode 100644 index 0eab9b96d06..00000000000 --- a/kona/docs/docs/pages/sdk/examples/intro.mdx +++ /dev/null @@ -1,11 +0,0 @@ -# Examples - -Examples for working with `kona` crates. - -- [Load a Rollup Config for a Chain ID](/sdk/examples/load-a-rollup-config) -- [Transform Frames to a Batch](/sdk/examples/frames-to-batch) -- [Transform a Batch to Frames](/sdk/examples/batch-to-frames) -- [Create a new L1BlockInfoTx Hardfork Variant](/sdk/examples/new-l1-block-info-tx-hardfork) -- [Create a new `kona-executor` test fixture](/sdk/examples/executor-test-fixtures) -- [Configuring P2P Network Peer Scoring](/sdk/examples/p2p-peer-scoring) -- [Custom Derivation Pipeline with New Stage](/sdk/examples/custom-derivation-pipeline) diff --git a/kona/docs/docs/pages/sdk/fpp-dev/execution.mdx b/kona/docs/docs/pages/sdk/fpp-dev/execution.mdx deleted file mode 100644 index ec2df1b371b..00000000000 --- a/kona/docs/docs/pages/sdk/fpp-dev/execution.mdx +++ /dev/null @@ -1,21 +0,0 @@ -# Execution - -The execution phase of the program is commonly the heaviest portion of the fault proof program, where the computation -that is being verified is performed. - -This phase consumes the outputs of the [prologue phase](/sdk/fpp-dev/prologue), and performs the bulk of the verifiable -computation. After execution has concluded, the outputs are passed along to the [epilogue phase](/sdk/fpp-dev/epilogue) for -final verification. - -## Example - -At a high-level, in the `kona-client` program, the execution phase: - -1. Derives the inputs to the L2 derivation pipeline by unrolling the L1 head hash fetched in the epilogue. -1. Passes the inputs to the L2 derivation pipeline, producing the L2 execution payloads required to reproduce - the L2 safe chain at the claimed height. -1. Executes the payloads produced by the L2 derivation pipeline, producing the [L2 output root][l2-output-root] at the - L2 claim height. - - -[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction diff --git a/kona/docs/docs/pages/sdk/fpp-dev/intro.mdx b/kona/docs/docs/pages/sdk/fpp-dev/intro.mdx deleted file mode 100644 index e64d9beac34..00000000000 --- a/kona/docs/docs/pages/sdk/fpp-dev/intro.mdx +++ /dev/null @@ -1,21 +0,0 @@ -# Fault Proof Program Development - -This chapter provides an overview of [Fault Proof Program](/glossary#fault-proof-program) development -on top of the custom FPVM targets supported by [Kona][kona]. - -At a high level, a Fault Proof Program is not much different from a regular `no_std` Rust program. A custom entrypoint is provided, and the program -is compiled down to a custom target, which is then executed on the FPVM. - -Fault Proof Programs are structured with 3 stages: -1. **Prologue**: The bootstrapping stage, where the program is loaded into memory and the initial state is set up. During this phase, the program's initial - state is written to the FPVM's memory, and the program's entrypoint is set. -1. **Execution**: The main execution stage, where the program is executed on the FPVM. During this phase, the program's entrypoint is called, and the - program is executed until it exits. -1. **Epilogue**: The finalization stage, where the program's final state is read from the FPVM's memory. During this phase, the program's final state is - inspected and properties of the state transition are verified. - -The following sections will provide a more in-depth overview of each of these stages, as well as the tools and abstractions provided by Kona for -developing your own Fault Proof Programs. - - -[kona]: https://github.com/op-rs/kona diff --git a/kona/docs/docs/pages/sdk/overview.mdx b/kona/docs/docs/pages/sdk/overview.mdx deleted file mode 100644 index f62a6db7c8e..00000000000 --- a/kona/docs/docs/pages/sdk/overview.mdx +++ /dev/null @@ -1,42 +0,0 @@ -# Kona as a Library - -<div style={{display: 'flex', gap: '8px', flexWrap: 'wrap'}}> - <a href="https://github.com/op-rs/kona/actions/workflows/rust_ci.yaml"><img src="https://img.shields.io/github/actions/workflow/status/op-rs/kona/rust_ci.yaml?style=flat&labelColor=1C2C2E&label=ci&color=BEC5C9&logo=GitHub%20Actions&logoColor=BEC5C9" alt="CI" /></a> - <a href="https://codecov.io/gh/op-rs/kona"><img src="https://img.shields.io/codecov/c/gh/op-rs/kona?style=flat&labelColor=1C2C2E&logo=Codecov&color=BEC5C9&logoColor=BEC5C9" alt="Codecov" /></a> - <a href="https://github.com/op-rs/kona/blob/main/LICENSE.md"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=googledocs&label=license&logoColor=BEC5C9" alt="License" /></a> - <a href="https://rollup.yoga"><img src="https://img.shields.io/badge/Docs-854a15?style=flat&labelColor=1C2C2E&color=BEC5C9&logo=mdBook&logoColor=BEC5C9" alt="Docs" /></a> -</div> - - -Kona is designed as a modular, library-first OP Stack implementation in Rust. This design philosophy allows developers to integrate Kona components into their applications and build custom solutions on top of the OP Stack. - -## Library Structure - -Kona is organized as a collection of focused crates that can be used independently or together: - -- **Protocol Libraries**: Core protocol logic and data structures -- **Node Components**: Modular node architecture for building custom rollup nodes -- **Proof System**: Fault proof generation and verification -- **Utilities**: Common utilities and helper functions - -## Key Benefits - -- **Modularity**: Use only the components you need -- **Performance**: Rust's zero-cost abstractions and memory safety -- **Extensibility**: Easy to extend and customize for specific use cases -- **Reliability**: Strong typing and comprehensive testing - -## Getting Started - -To use Kona as a library, add the relevant crates to your `Cargo.toml`: - -```toml -[dependencies] -kona-derive = "0.1" -kona-protocol = "0.1" -kona-node = "0.1" -``` - -## Examples - -See the [Examples](/sdk/examples/intro) section for practical usage examples and integration patterns. diff --git a/kona/docs/docs/pages/sdk/proof/intro.mdx b/kona/docs/docs/pages/sdk/proof/intro.mdx deleted file mode 100644 index 17529bc862b..00000000000 --- a/kona/docs/docs/pages/sdk/proof/intro.mdx +++ /dev/null @@ -1,61 +0,0 @@ -# Kona Proof SDK - -Welcome to the Kona Proof SDK, a powerful set of libraries designed -from first principles to build proofs with the OP Stack STF on top -of the OP Stack's FPVMs and other verifiable backends like [SP-1][sp-1], -[Risc0][rzero], [Intel TDX][tdx], and [AMD SEV-SNP][sev-snp]. At its -core, Kona is built on the principles of modularity, extensibility, -and developer empowerment. - -## A Foundation of Flexibility - -The kona repository is more than a fault proof program for the OP Stack -— it's an ecosystem of interoperable components, each crafted with -reusability and extensibility as primary goals. While we provide -[Fault Proof VM](/glossary#fault-proof-vm) and "online" backends -for key components like `kona-derive` and `kona-executor`, the true -power of `kona` lies in its adaptability. - -## Extend Without Forking - -One of Kona's standout features is its ability to support custom -features and data sources without requiring you to fork the entire -project. Through careful use of Rust's powerful trait system and -abstract interfaces, we've created a framework that allows you to -plug in your own features and ideas seamlessly. - -## What You'll Learn - -In this section of the developer book, we'll dive deep into the Kona SDK, covering: -* **Building on the FPVM Backend**: Learn how to leverage the Fault Proof VM tooling to create your own fault proof programs. -* **Creating Custom Backends**: Discover the process of designing and implementing your own backend to run `kona-client` or a variation of it on different targets. -* **Extending Core Components**: Explore techniques for creating new constructs that integrate smoothly with crates like `kona-derive` and `kona-executor`. - -Whether you're looking to use Kona as-is, extend its functionality, or create entirely new programs based on its libraries, -this guide is intended to provide you with the knowledge and tools you need to succeed. - -[sp-1]: https://github.com/succinctlabs/sp1 -[rzero]: https://github.com/risc0/risc0 -[tdx]: https://www.intel.com/content/www/us/en/developer/tools/trust-domain-extensions/documentation.html -[sev-snp]: https://www.amd.com/en/developer/sev.html - -[op-stack]: https://github.com/ethereum-optimism/optimism -[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program -[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon -[cannon-rs]: https://github.com/op-rs/cannon-rs -[asterisc]: https://github.com/ethereum-optimism/asterisc -[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html -[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program -[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle -[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine -[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction -[op-succinct]: https://github.com/succinctlabs/op-succinct -[revm]: https://github.com/bluealloy/revm - -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md - -[op-labs]: https://github.com/ethereum-optimism -[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/sdk/protocol/derive/intro.mdx b/kona/docs/docs/pages/sdk/protocol/derive/intro.mdx deleted file mode 100644 index 16178bcf999..00000000000 --- a/kona/docs/docs/pages/sdk/protocol/derive/intro.mdx +++ /dev/null @@ -1,312 +0,0 @@ -import { Callout } from 'vocs/components' - -# The `kona-derive` Derivation Pipeline - -[`kona-derive`][kd] defines an entirely trait-abstracted, `no_std` derivation -pipeline for the OP Stack. It can be used through the [`Pipeline`][p] trait, -which is implemented for the concrete [`DerivationPipeline`][dp] object. - -This document dives into the inner workings of the derivation pipeline, its -stages, and how to build and interface with Kona's pipeline. Other documents -in this section will provide a comprehensive overview of Derivation Pipeline -extensibility including trait-abstracted providers, custom stages, signaling, -and hardfork activation including multiplexed stages. - -- [Swapping out a stage](/sdk/protocol/derive/stages) -- [Defining a custom Provider](/sdk/protocol/derive/providers) -- [Extending Pipeline Signals](/sdk/protocol/derive/signaling) -- [Implementing Hardfork Activations](/sdk/protocol/hardforks) - - -## What is a Derivation Pipeline? - -Simply put, an OP Stack Derivation Pipeline transforms data on L1 into L2 -payload attributes that can be executed to produce the canonical L2 block. - -Within a pipeline, there are a set of stages that break up this transformation -further. When composed, these stages operate over the input data, sequentially -producing payload attributes. - -In [`kona-derive`][kd], stages are architected using composition - each sequential -stage owns the previous one, forming a stack. For example, let's define stage A -as the first stage, accepting raw L1 input data, and stage C produces the pipeline -output - payload attributes. Stage B "owns" stage A, and stage C then owns stage B. -Using this example, the [`DerivationPipeline`][dp] type in [`kona-derive`][kd] only -holds stage C, since ownership of the other stages is nested within stage C. - -<Callout type="info"> -In a future architecture of the derivation pipeline, stages could be made -standalone such that communication between stages happens through channels. -In a multi-threaded, non-fault-proof environment, these stages can then -run in parallel since stage ownership is decoupled. -</Callout> - - -## Kona's Derivation Pipeline - -The top-level stage in [`kona-derive`][kd] that produces -[`OpAttributesWithParent`][attributes] is the [`AttributesQueue`][attributes-queue]. - -Post-Holocene (the Holocene hardfork), the following stages are composed by -the [`DerivationPipeline`][dp]. -- [`AttributesQueue`][attributes-queue] - - [`BatchProvider`][batch-provider] - - [`BatchStream`][batch-stream] - - [`ChannelReader`][channel-reader] - - [`ChannelProvider`][channel-provider] - - [`FrameQueue`][frame-queue] - - [`L1Retrieval`][retrieval] - - [`IndexedTraversal` or `PollingTraversal`][traversal] - -Notice, from top to bottom, each stage owns the stage nested below it. -Where the [`IndexedTraversal` or `PollingTraversal`][traversal] stage iterates over L1 data, the -[`AttributesQueue`][attributes-queue] stage produces -[`OpAttributesWithParent`][attributes], creating a function that transforms -L1 data into payload attributes. - - -## The [`Pipeline`][p] interface - -Now that we've broken down the stages inside the [`DerivationPipeline`][dp] -type, let's move up another level to break down how the [`DerivationPipeline`][dp] -type functions itself. At the highest level, [`kona-derive`][kd] defines the -interface for working with the pipeline through the [`Pipeline`][p] trait. - -[`Pipeline`][p] provides two core methods. -- `peek() -> Option<&OpAttributesWithParent>` -- `async step() -> StepResult` - -Functionally, a pipeline can be "stepped" on, which attempts to derive -payload attributes from input data. Steps do not guarantee that payload attributes -are produced, they only attempt to advance the stages within the pipeline. - -The `peek()` method provides a way to check if attributes are prepared. -Beyond `peek()` returning `Option::Some(&OpAttributesWithParent)`, the [`Pipeline`][p] -extends the [Iterator][iterator] trait, providing a way to consume the generated payload -attributes. - - -## Constructing a Derivation Pipeline - -[`kona-derive`][kd] provides a [`PipelineBuilder`][builder] to abstract the complexity -of generics away from the downstream consumers. Below we provide an example for using -the [`PipelineBuilder`][builder] to instantiate a [`DerivationPipeline`][dp]. - -```rust,ignore -// Imports -use std::sync::Arc; -use kona_protocol::BlockInfo; -use kona_genesis::RollupConfig; -use kona_providers_alloy::*; - -// Use a default rollup config. -let rollup_config = Arc::new(RollupConfig::default()); - -// Providers are instantiated to with localhost urls (`127.0.0.1`) -let chain_provider = - AlloyChainProvider::new_http("http://127.0.0.1:8545".try_into().unwrap()); -let l2_chain_provider = AlloyL2ChainProvider::new_http( - "http://127.0.0.1:9545".try_into().unwrap(), - rollup_config.clone(), -); -let beacon_client = OnlineBeaconClient::new_http("http://127.0.0.1:5555".into()); -let blob_provider = OnlineBlobProvider::new(beacon_client, None, None); -let blob_provider = OnlineBlobProviderWithFallback::new(blob_provider, None); -let dap_source = - EthereumDataSource::new(chain_provider.clone(), blob_provider, &rollup_config); -let builder = StatefulAttributesBuilder::new( - rollup_config.clone(), - l2_chain_provider.clone(), - chain_provider.clone(), -); - -// This is the starting L1 block for the pipeline. -// -// To get the starting L1 block for a given L2 block, -// use the `AlloyL2ChainProvider::l2_block_info_by_number` -// method to get the `L2BlockInfo.l1_origin`. This l1_origin -// is the origin that can be passed here. -let origin = BlockInfo::default(); - -// Build the pipeline using the `PipelineBuilder`. -// Alternatively, use the `new_online_pipeline` helper -// method provided by the `kona-derive-alloy` crate. -let pipeline = PipelineBuilder::new() - .rollup_config(rollup_config.clone()) - .dap_source(dap_source) - .l2_chain_provider(l2_chain_provider) - .chain_provider(chain_provider) - .builder(builder) - .origin(origin) - .build(); - -assert_eq!(pipeline.rollup_config, rollup_config); -assert_eq!(pipeline.origin(), Some(origin)); -``` - - -## Producing Payload Attributes - -Since the [`Pipeline`][p] trait extends the [`Iterator`][iterator] trait, -producing [`OpAttributesWithParent`][attributes] is as simple as calling -[`Iterator::next()`][next] method on the [`DerivationPipeline`][dp]. - -Extending the example from above, producing the attributes is shown below. - -```rust -// Import the iterator trait to show where `.next` is sourced. -use core::iter::Iterator; - -// ... -// example from above constructing the pipeline -// ... - -let attributes = pipeline.next(); - -// Since we haven't stepped on the pipeline, -// there shouldn't be any payload attributes prepared. -assert!(attributes.is_none()); -``` - -As demonstrated, the pipeline won't have any payload attributes -without having been "stepped" on. Naively, we can continuously -step on the pipeline until attributes are ready, and then consume them. - -```rust -// Import the iterator trait to show where `.next` is sourced. -use core::iter::Iterator; - -// ... -// example from constructing the pipeline -// ... - -// Continuously step on the pipeline until attributes are prepared. -let l2_safe_head = L2BlockInfo::default(); -loop { - if matches!(pipeline.step(l2_safe_head).await, StepResult::PreparedAttributes) { - // The pipeline has successfully prepared payload attributes, break the loop. - break; - } -} - -// Since the loop is only broken once attributes are prepared, -// this must be `Option::Some`. -let attributes = pipeline.next().expect("Must contain payload attributes"); - -// The parent of the prepared payload attributes should be -// the l2 safe head that we "stepped on". -assert_eq!(attributes.parent, l2_safe_head); -``` - -Importantly, the above is not sufficient logic to produce payload attributes and drive -the derivation pipeline. There are multiple different `StepResult`s to handle when -stepping on the pipeline, including advancing the origin, re-orgs, and pipeline resets. -In the next section, pipeline resets are outlined. - -For an up-to-date driver that runs the derivation pipeline as part of the fault proof -program, reference kona's [client driver][driver]. - - -## Resets - -When stepping on the [`DerivationPipeline`][dp] produces a reset error, the driver -of the pipeline must perform a reset on the pipeline. This is done by sending a "signal" -through the [`DerivationPipeline`][dp]. Below demonstrates this. - -```rust -// Import the iterator trait to show where `.next` is sourced. -use core::iter::Iterator; - -// ... -// example from constructing the pipeline -// ... - -// Continuously step on the pipeline until attributes are prepared. -let l2_safe_head = L2BlockInfo::default(); -loop { - match pipeline.step(l2_safe_head).await { - StepResult::StepFailed(e) | StepResult::OriginAdvanceErr(e) => { - match e { - PipelineErrorKind::Reset(e) => { - // Get the system config from the provider. - let system_config = l2_chain_provider - .system_config_by_number( - l2_safe_head.block_info.number, - rollup_config.clone(), - ) - .await?; - // Reset the pipeline to the initial L2 safe head and L1 origin. - self.pipeline - .signal( - ResetSignal { - l2_safe_head: l2_safe_head, - l1_origin: pipeline - .origin() - .ok_or_else(|| anyhow!("Missing L1 origin"))?, - system_config: Some(system_config), - } - .signal(), - ) - .await?; - // ... - } - _ => { /* Handling left to the driver */ } - } - } - _ => { /* Handling left to the driver */ } - } -} -``` - - -## Learn More - -[`kona-derive`][kd] is one implementation of the OP Stack derivation pipeline. - -To learn more, it is highly encouraged to read the ["first" derivation pipeline][op-dp] -written in [golang][go]. It is often colloquially referred to as the "reference" -implementation and provides the basis for how much of Kona's derivation pipeline -was built. - - -## Provenance - -> The lore do be bountiful. -> -> - Bard XVIII of the Logic Gates - -The kona project spawned out of the need to build a secondary fault proof for the OP Stack. -Initially, we sought to re-use [magi][magi]'s derivation pipeline, but the ethereum-rust -ecosystem moves quickly and [magi][magi] was behind by a generation of types - using -[ethers-rs] instead of new [alloy][alloy] types. Additionally, [magi][magi]'s derivation -pipeline was not `no_std` compatible - a hard requirement for running a rust fault proof -program on top of the RISCV or MIPS ISAs. - -So, [@clabby][clabby] and [@refcell][refcell] stood up [kona][kona] in a few months. - - -[driver]: https://docs.rs/kona-driver/latest/kona_driver/struct.Driver.html -[next]: https://doc.rust-lang.org/nightly/core/iter/trait.Iterator.html#tymethod.next -[builder]: https://docs.rs/kona-derive/latest/kona_derive/struct.PipelineBuilder.html -[alloy]: https://github.com/alloy-rs/alloy -[ethers-rs]: https://github.com/gakonst/ethers-rs -[kona]: https://github.com/op-rs/kona -[clabby]: https://github.com/clabby -[refcell]: https://github.com/refcell -[go]: https://go.dev/ -[magi]: https://github.com/a16z/magi -[kd]: https://crates.io/crates/kona-derive -[iterator]: https://doc.rust-lang.org/nightly/core/iter/trait.Iterator.html -[p]: https://docs.rs/kona-derive/latest/kona_derive/trait.Pipeline.html -[op-dp]: https://github.com/ethereum-optimism/optimism/tree/develop/op-node/rollup/derive -[dp]: https://docs.rs/kona-derive/latest/kona_derive/struct.DerivationPipeline.html -[attributes]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.OpAttributesWithParent.html - -[attributes-queue]: https://docs.rs/kona-derive/latest/kona_derive/struct.AttributesQueue.html -[batch-provider]: https://docs.rs/kona-derive/latest/kona_derive/struct.BatchProvider.html -[batch-stream]: https://docs.rs/kona-derive/latest/kona_derive/struct.BatchStream.html -[channel-reader]: https://docs.rs/kona-derive/latest/kona_derive/struct.ChannelReader.html -[channel-provider]: https://docs.rs/kona-derive/latest/kona_derive/struct.ChannelProvider.html -[frame-queue]: https://docs.rs/kona-derive/latest/kona_derive/struct.FrameQueue.html -[retrieval]: https://docs.rs/kona-derive/latest/kona_derive/struct.L1Retrieval.html -[traversal]: https://docs.rs/kona-derive/latest/kona_derive/struct.IndexedTraversal.html diff --git a/kona/docs/docs/pages/sdk/protocol/genesis/intro.mdx b/kona/docs/docs/pages/sdk/protocol/genesis/intro.mdx deleted file mode 100644 index f22cac45220..00000000000 --- a/kona/docs/docs/pages/sdk/protocol/genesis/intro.mdx +++ /dev/null @@ -1,32 +0,0 @@ -# Genesis - -<a href="https://crates.io/crates/kona-genesis"><img src="https://img.shields.io/crates/v/kona-genesis.svg?label=kona-genesis" alt="kona-genesis crate" /></a> - -The genesis crate contains types related to chain genesis. - -This section contains in-depth sections on building with [`kona-genesis`][genesis] crate types. - -- [The Rollup Config](./rollup-config.mdx) -- [The System Config](./system-config.mdx) - - -[op-stack]: https://github.com/ethereum-optimism/optimism -[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program -[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon -[cannon-rs]: https://github.com/op-rs/cannon-rs -[asterisc]: https://github.com/ethereum-optimism/asterisc -[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html -[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program -[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle -[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine -[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction -[op-succinct]: https://github.com/succinctlabs/op-succinct -[revm]: https://github.com/bluealloy/revm - -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md - -[op-labs]: https://github.com/ethereum-optimism -[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/sdk/protocol/interop.mdx b/kona/docs/docs/pages/sdk/protocol/interop.mdx deleted file mode 100644 index a11d1e346de..00000000000 --- a/kona/docs/docs/pages/sdk/protocol/interop.mdx +++ /dev/null @@ -1,5 +0,0 @@ -# Interop - -<a href="https://crates.io/crates/kona-interop"><img src="https://img.shields.io/crates/v/kona-interop.svg?label=kona-interop" alt="kona-interop crate" /></a> - -`kona-interop` provides core types for the interop protocol. diff --git a/kona/docs/docs/pages/sdk/protocol/intro.mdx b/kona/docs/docs/pages/sdk/protocol/intro.mdx deleted file mode 100644 index f8c1c14f3e4..00000000000 --- a/kona/docs/docs/pages/sdk/protocol/intro.mdx +++ /dev/null @@ -1,38 +0,0 @@ -# Kona Protocol Libraries - -The Kona monorepo contains a set of protocol crates that are designed -to be `no_std` compatible for Kona's fault proof sdk. Protocol crates -are built on [alloy][alloy] and [op-alloy][op-alloy] types. - -The following protocol crates are published to [crates.io][crates]. - -<div style={{display: 'flex', gap: '8px', flexWrap: 'wrap'}}> - <a href="https://crates.io/crates/kona-hardforks"><img src="https://img.shields.io/crates/v/kona-hardforks.svg?label=kona-hardforks" alt="kona-hardforks crate" /></a> - <a href="https://crates.io/crates/kona-registry"><img src="https://img.shields.io/crates/v/kona-registry.svg?label=kona-registry" alt="kona-registry crate" /></a> - <a href="https://crates.io/crates/kona-protocol"><img src="https://img.shields.io/crates/v/kona-protocol.svg?label=kona-protocol" alt="kona-protocol crate" /></a> - <a href="https://crates.io/crates/kona-genesis"><img src="https://img.shields.io/crates/v/kona-genesis.svg?label=kona-genesis" alt="kona-genesis crate" /></a> - <a href="https://crates.io/crates/kona-interop"><img src="https://img.shields.io/crates/v/kona-interop.svg?label=kona-interop" alt="kona-interop crate" /></a> - <a href="https://crates.io/crates/kona-derive"><img src="https://img.shields.io/crates/v/kona-derive.svg?label=kona-derive" alt="kona-derive crate" /></a> - <a href="https://crates.io/crates/kona-driver"><img src="https://img.shields.io/crates/v/kona-driver.svg?label=kona-driver" alt="kona-driver crate" /></a> -</div> - -At the lowest level, `kona-genesis` and `kona-hardforks` expose -core genesis and hardfork types. - -`kona-protocol` sits just above `kona-genesis`, composing genesis types -into other core protocol types, as well as many independent protocol types. - -More recently, the `kona-interop` crate was introduced that contains types -specific to [Interop][interop]. - -`kona-registry` contains bindings to the [superchain-registry][scr]. -The registry is available in a `no_std` environment -but requires `serde` to read serialized configs at compile time. `kona-registry` uses -types defined in `kona-genesis` to deserialize the superchain registry configs at compile time. - - -[crates]: https://crates.io -[alloy]: https://github.com/alloy-rs/alloy -[op-alloy]: https://github.com/alloy-rs/op-alloy -[interop]: https://specs.optimism.io/interop/overview.html -[scr]: https://github.com/ethereum-optimism/superchain-registry diff --git a/kona/docs/docs/pages/sdk/protocol/protocol/intro.mdx b/kona/docs/docs/pages/sdk/protocol/protocol/intro.mdx deleted file mode 100644 index ef8bd48d05f..00000000000 --- a/kona/docs/docs/pages/sdk/protocol/protocol/intro.mdx +++ /dev/null @@ -1,65 +0,0 @@ -# Protocol - -<a href="https://crates.io/crates/kona-protocol"><img src="https://img.shields.io/crates/v/kona-protocol.svg?label=kona-protocol" alt="kona-protocol crate" /></a> - -The [`kona-protocol`][protocol] crate contains types, constants, and methods -specific to Optimism derivation and batch-submission. - -[`kona-protocol`][protocol] supports `no_std`. - -## Background - -Protocol types are primarily used for L2 chain derivation. This section will -break down L2 chain derivation as it relates to types defined in -`kona-protocol` - that is, from the raw L2 chain data posted to L1, to the -[`Batch`][batch] type. And since the [`Batch`][batch] type naively breaks up -into the payload attributes, once executed, it becomes the canonical L2 block! -Note though, this provides an incredibly simplified introduction. It is advised -to reference [the specs][s] for the most up-to-date information regarding -derivation. - -The L2 chain is derived from data posted to the L1 chain - either as calldata -or blob data. Data is iteratively pulled from each L1 block and translated -into the first type defined by `kona-protocol`: the [`Frame`][frame] type. - -[`Frame`][frame]s are [parsed][parsed] from the raw data. Each [`Frame`][frame] -is a part of a [`Channel`][channel], the next type one level up in deriving -L2 blocks. [`Channel`][channel]s have IDs that frames reference. [`Frame`][frame]s -are [added][added] iteratively to the [`Channel`][channel]. Once a -[`Channel`][channel] [is ready][ready], it can be used to read a [`Batch`][batch]. - -Since a [`Channel`][channel] stitches together frames, it contains the raw frame -data. In order to turn this [`Channel`][channel] data into a [`Batch`][batch], -it needs to be decompressed using the respective (de)compression algorithm -(see [the channel specs][channel-specs] for more detail on this). Once -decompressed, the raw data can be [decoded][decoded] into the [`Batch`][batch] -type. - - -## Sections - -#### Core Derivation Types (discussed above) - -- [Frames](./frames.mdx) -- [Channels](./channels.mdx) -- [Batches](./batches.mdx) - -#### Other Critical Protocol Types - -- [BlockInfo](./block-info.mdx) -- [L2BlockInfo](./l2-block-info.mdx) - - - -[decoded]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.Batch.html#method.decode -[batch]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.Batch.html -[ready]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html#method.is_ready -[added]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html#method.add_frame -[channel]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html -[frame]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Frame.html -[parsed]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Frame.html#method.parse_frames - -[protocol]: https://crates.io/crates/kona-protocol -[s]: https://specs.optimism.io/protocol/derivation.html#overview -[lcd]: https://specs.optimism.io/protocol/derivation.html#overview -[channel-specs]: https://specs.optimism.io/protocol/derivation.html#channel-format diff --git a/kona/docs/justfile b/kona/docs/justfile deleted file mode 100644 index d0f2ebd5806..00000000000 --- a/kona/docs/justfile +++ /dev/null @@ -1,11 +0,0 @@ -# Run the vocs documentation -run-vocs: - npm install && npm run dev -- --host - -# Build the vocs static site -build-vocs: - npm install && npm run build - -# Builds and opens the static site in the browser -open-site: build-vocs - open docs/dist/index.html diff --git a/kona/docs/package-lock.json b/kona/docs/package-lock.json deleted file mode 100644 index 6c160d3391c..00000000000 --- a/kona/docs/package-lock.json +++ /dev/null @@ -1,10448 +0,0 @@ -{ - "name": "kona-docs", - "version": "0.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "kona-docs", - "version": "0.0.0", - "dependencies": { - "react": "19.2.1", - "react-dom": "19.2.1", - "vocs": "1.2.1" - }, - "devDependencies": { - "@types/node": "latest", - "@types/react": "latest", - "tailwindcss": "^4.1.11", - "typescript": "latest" - } - }, - "node_modules/@antfu/install-pkg": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@antfu/install-pkg/-/install-pkg-1.1.0.tgz", - "integrity": "sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==", - "license": "MIT", - "dependencies": { - "package-manager-detector": "^1.3.0", - "tinyexec": "^1.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/antfu" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", - "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.5.tgz", - "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.5", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-module-transforms": "^7.28.3", - "@babel/helpers": "^7.28.4", - "@babel/parser": "^7.28.5", - "@babel/template": "^7.27.2", - "@babel/traverse": "^7.28.5", - "@babel/types": "^7.28.5", - "@jridgewell/remapping": "^2.3.5", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/generator": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", - "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.28.5", - "@babel/types": "^7.28.5", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", - "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.27.2", - "@babel/helper-validator-option": "^7.27.1", - "browserslist": "^4.24.0", - "lru-cache": "^5.1.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", - "license": "ISC", - "dependencies": { - "yallist": "^3.0.2" - } - }, - "node_modules/@babel/helper-globals": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", - "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", - "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", - "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.28.3" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-plugin-utils": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.27.1.tgz", - "integrity": "sha512-1gn1Up5YXka3YYAHGKpbideQ5Yjf1tDa9qYcgysz+cNCXukyLl6DjPXhD3VRwSb8c0J9tA4b2+rHEZtc6R0tlw==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", - "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", - "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-option": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", - "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helpers": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", - "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", - "license": "MIT", - "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/parser": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", - "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.28.5" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.27.1.tgz", - "integrity": "sha512-xfYCBMxveHrRMnAWl1ZlPXOZjzkN82THFvLhQhFXFt81Z5HnN+EtUkZhv/zcKpmT3fzmWZB0ywiBrbC3vogbwQ==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-react-jsx-self": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", - "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-react-jsx-source": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", - "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/runtime": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.4.tgz", - "integrity": "sha512-Q/N6JNWvIvPnLDvjlE1OUBLPQHH6l3CltCEsHIujp45zQUSSh8K+gHnaEX45yAT1nyngnINhvWtzN+Nb9D8RAQ==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/template": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", - "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/parser": "^7.27.2", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/traverse": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", - "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.5", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.5", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.5", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/types": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", - "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", - "license": "MIT", - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.28.5" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@braintree/sanitize-url": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.1.1.tgz", - "integrity": "sha512-i1L7noDNxtFyL5DmZafWy1wRVhGehQmzZaz1HiN5e7iylJMSZR7ekOV7NsIqa5qBldlLrsKv4HbgFUVlQrz8Mw==", - "license": "MIT" - }, - "node_modules/@chevrotain/cst-dts-gen": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.0.3.tgz", - "integrity": "sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==", - "license": "Apache-2.0", - "dependencies": { - "@chevrotain/gast": "11.0.3", - "@chevrotain/types": "11.0.3", - "lodash-es": "4.17.21" - } - }, - "node_modules/@chevrotain/gast": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.0.3.tgz", - "integrity": "sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==", - "license": "Apache-2.0", - "dependencies": { - "@chevrotain/types": "11.0.3", - "lodash-es": "4.17.21" - } - }, - "node_modules/@chevrotain/regexp-to-ast": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.0.3.tgz", - "integrity": "sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==", - "license": "Apache-2.0" - }, - "node_modules/@chevrotain/types": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.0.3.tgz", - "integrity": "sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==", - "license": "Apache-2.0" - }, - "node_modules/@chevrotain/utils": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.0.3.tgz", - "integrity": "sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==", - "license": "Apache-2.0" - }, - "node_modules/@clack/core": { - "version": "0.3.5", - "resolved": "https://registry.npmjs.org/@clack/core/-/core-0.3.5.tgz", - "integrity": "sha512-5cfhQNH+1VQ2xLQlmzXMqUoiaH0lRBq9/CLW9lTyMbuKLC3+xEK01tHVvyut++mLOn5urSHmkm6I0Lg9MaJSTQ==", - "license": "MIT", - "dependencies": { - "picocolors": "^1.0.0", - "sisteransi": "^1.0.5" - } - }, - "node_modules/@clack/prompts": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/@clack/prompts/-/prompts-0.7.0.tgz", - "integrity": "sha512-0MhX9/B4iL6Re04jPrttDm+BsP8y6mS7byuv0BvXgdXhbV5PdlsHt55dvNsuBCPZ7xq1oTAOOuotR9NFbQyMSA==", - "bundleDependencies": [ - "is-unicode-supported" - ], - "license": "MIT", - "dependencies": { - "@clack/core": "^0.3.3", - "is-unicode-supported": "*", - "picocolors": "^1.0.0", - "sisteransi": "^1.0.5" - } - }, - "node_modules/@clack/prompts/node_modules/is-unicode-supported": { - "version": "1.3.0", - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@emotion/hash": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.2.tgz", - "integrity": "sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g==", - "license": "MIT" - }, - "node_modules/@esbuild/aix-ppc64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.0.tgz", - "integrity": "sha512-KuZrd2hRjz01y5JK9mEBSD3Vj3mbCvemhT466rSuJYeE/hjuBrHfjjcjMdTm/sz7au+++sdbJZJmuBwQLuw68A==", - "cpu": [ - "ppc64" - ], - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.0.tgz", - "integrity": "sha512-j67aezrPNYWJEOHUNLPj9maeJte7uSMM6gMoxfPC9hOg8N02JuQi/T7ewumf4tNvJadFkvLZMlAq73b9uwdMyQ==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-arm64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.0.tgz", - "integrity": "sha512-CC3vt4+1xZrs97/PKDkl0yN7w8edvU2vZvAFGD16n9F0Cvniy5qvzRXjfO1l94efczkkQE6g1x0i73Qf5uthOQ==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/android-x64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.0.tgz", - "integrity": "sha512-wurMkF1nmQajBO1+0CJmcN17U4BP6GqNSROP8t0X/Jiw2ltYGLHpEksp9MpoBqkrFR3kv2/te6Sha26k3+yZ9Q==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-arm64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.0.tgz", - "integrity": "sha512-uJOQKYCcHhg07DL7i8MzjvS2LaP7W7Pn/7uA0B5S1EnqAirJtbyw4yC5jQ5qcFjHK9l6o/MX9QisBg12kNkdHg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/darwin-x64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.0.tgz", - "integrity": "sha512-8mG6arH3yB/4ZXiEnXof5MK72dE6zM9cDvUcPtxhUZsDjESl9JipZYW60C3JGreKCEP+p8P/72r69m4AZGJd5g==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-arm64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.0.tgz", - "integrity": "sha512-9FHtyO988CwNMMOE3YIeci+UV+x5Zy8fI2qHNpsEtSF83YPBmE8UWmfYAQg6Ux7Gsmd4FejZqnEUZCMGaNQHQw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/freebsd-x64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.0.tgz", - "integrity": "sha512-zCMeMXI4HS/tXvJz8vWGexpZj2YVtRAihHLk1imZj4efx1BQzN76YFeKqlDr3bUWI26wHwLWPd3rwh6pe4EV7g==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.0.tgz", - "integrity": "sha512-t76XLQDpxgmq2cNXKTVEB7O7YMb42atj2Re2Haf45HkaUpjM2J0UuJZDuaGbPbamzZ7bawyGFUkodL+zcE+jvQ==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-arm64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.0.tgz", - "integrity": "sha512-AS18v0V+vZiLJyi/4LphvBE+OIX682Pu7ZYNsdUHyUKSoRwdnOsMf6FDekwoAFKej14WAkOef3zAORJgAtXnlQ==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ia32": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.0.tgz", - "integrity": "sha512-Mz1jxqm/kfgKkc/KLHC5qIujMvnnarD9ra1cEcrs7qshTUSksPihGrWHVG5+osAIQ68577Zpww7SGapmzSt4Nw==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-loong64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.0.tgz", - "integrity": "sha512-QbEREjdJeIreIAbdG2hLU1yXm1uu+LTdzoq1KCo4G4pFOLlvIspBm36QrQOar9LFduavoWX2msNFAAAY9j4BDg==", - "cpu": [ - "loong64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-mips64el": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.0.tgz", - "integrity": "sha512-sJz3zRNe4tO2wxvDpH/HYJilb6+2YJxo/ZNbVdtFiKDufzWq4JmKAiHy9iGoLjAV7r/W32VgaHGkk35cUXlNOg==", - "cpu": [ - "mips64el" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-ppc64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.0.tgz", - "integrity": "sha512-z9N10FBD0DCS2dmSABDBb5TLAyF1/ydVb+N4pi88T45efQ/w4ohr/F/QYCkxDPnkhkp6AIpIcQKQ8F0ANoA2JA==", - "cpu": [ - "ppc64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-riscv64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.0.tgz", - "integrity": "sha512-pQdyAIZ0BWIC5GyvVFn5awDiO14TkT/19FTmFcPdDec94KJ1uZcmFs21Fo8auMXzD4Tt+diXu1LW1gHus9fhFQ==", - "cpu": [ - "riscv64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-s390x": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.0.tgz", - "integrity": "sha512-hPlRWR4eIDDEci953RI1BLZitgi5uqcsjKMxwYfmi4LcwyWo2IcRP+lThVnKjNtk90pLS8nKdroXYOqW+QQH+w==", - "cpu": [ - "s390x" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/linux-x64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.0.tgz", - "integrity": "sha512-1hBWx4OUJE2cab++aVZ7pObD6s+DK4mPGpemtnAORBvb5l/g5xFGk0vc0PjSkrDs0XaXj9yyob3d14XqvnQ4gw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-arm64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.0.tgz", - "integrity": "sha512-6m0sfQfxfQfy1qRuecMkJlf1cIzTOgyaeXaiVaaki8/v+WB+U4hc6ik15ZW6TAllRlg/WuQXxWj1jx6C+dfy3w==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/netbsd-x64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.0.tgz", - "integrity": "sha512-xbbOdfn06FtcJ9d0ShxxvSn2iUsGd/lgPIO2V3VZIPDbEaIj1/3nBBe1AwuEZKXVXkMmpr6LUAgMkLD/4D2PPA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-arm64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.0.tgz", - "integrity": "sha512-fWgqR8uNbCQ/GGv0yhzttj6sU/9Z5/Sv/VGU3F5OuXK6J6SlriONKrQ7tNlwBrJZXRYk5jUhuWvF7GYzGguBZQ==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openbsd-x64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.0.tgz", - "integrity": "sha512-aCwlRdSNMNxkGGqQajMUza6uXzR/U0dIl1QmLjPtRbLOx3Gy3otfFu/VjATy4yQzo9yFDGTxYDo1FfAD9oRD2A==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/openharmony-arm64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.0.tgz", - "integrity": "sha512-nyvsBccxNAsNYz2jVFYwEGuRRomqZ149A39SHWk4hV0jWxKM0hjBPm3AmdxcbHiFLbBSwG6SbpIcUbXjgyECfA==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/sunos-x64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.0.tgz", - "integrity": "sha512-Q1KY1iJafM+UX6CFEL+F4HRTgygmEW568YMqDA5UV97AuZSm21b7SXIrRJDwXWPzr8MGr75fUZPV67FdtMHlHA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-arm64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.0.tgz", - "integrity": "sha512-W1eyGNi6d+8kOmZIwi/EDjrL9nxQIQ0MiGqe/AWc6+IaHloxHSGoeRgDRKHFISThLmsewZ5nHFvGFWdBYlgKPg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-ia32": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.0.tgz", - "integrity": "sha512-30z1aKL9h22kQhilnYkORFYt+3wp7yZsHWus+wSKAJR8JtdfI76LJ4SBdMsCopTR3z/ORqVu5L1vtnHZWVj4cQ==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@esbuild/win32-x64": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.0.tgz", - "integrity": "sha512-aIitBcjQeyOhMTImhLZmtxfdOcuNRpwlPNmlFKPcHQYPhEssw75Cl1TSXJXpMkzaua9FUetx/4OQKq7eJul5Cg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/@floating-ui/core": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.3.tgz", - "integrity": "sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==", - "license": "MIT", - "dependencies": { - "@floating-ui/utils": "^0.2.10" - } - }, - "node_modules/@floating-ui/dom": { - "version": "1.7.4", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.4.tgz", - "integrity": "sha512-OOchDgh4F2CchOX94cRVqhvy7b3AFb+/rQXyswmzmGakRfkMgoWVjfnLWkRirfLEfuD4ysVW16eXzwt3jHIzKA==", - "license": "MIT", - "dependencies": { - "@floating-ui/core": "^1.7.3", - "@floating-ui/utils": "^0.2.10" - } - }, - "node_modules/@floating-ui/react": { - "version": "0.27.16", - "resolved": "https://registry.npmjs.org/@floating-ui/react/-/react-0.27.16.tgz", - "integrity": "sha512-9O8N4SeG2z++TSM8QA/KTeKFBVCNEz/AGS7gWPJf6KFRzmRWixFRnCnkPHRDwSVZW6QPDO6uT0P2SpWNKCc9/g==", - "license": "MIT", - "dependencies": { - "@floating-ui/react-dom": "^2.1.6", - "@floating-ui/utils": "^0.2.10", - "tabbable": "^6.0.0" - }, - "peerDependencies": { - "react": ">=17.0.0", - "react-dom": ">=17.0.0" - } - }, - "node_modules/@floating-ui/react-dom": { - "version": "2.1.6", - "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.6.tgz", - "integrity": "sha512-4JX6rEatQEvlmgU80wZyq9RT96HZJa88q8hp0pBd+LrczeDI4o6uA2M+uvxngVHo4Ihr8uibXxH6+70zhAFrVw==", - "license": "MIT", - "dependencies": { - "@floating-ui/dom": "^1.7.4" - }, - "peerDependencies": { - "react": ">=16.8.0", - "react-dom": ">=16.8.0" - } - }, - "node_modules/@floating-ui/utils": { - "version": "0.2.10", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", - "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", - "license": "MIT" - }, - "node_modules/@fortawesome/fontawesome-free": { - "version": "6.7.2", - "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-6.7.2.tgz", - "integrity": "sha512-JUOtgFW6k9u4Y+xeIaEiLr3+cjoUPiAuLXoyKOJSia6Duzb7pq+A76P9ZdPDoAoxHdHzq6gE9/jKBGXlZT8FbA==", - "license": "(CC-BY-4.0 AND OFL-1.1 AND MIT)", - "engines": { - "node": ">=6" - } - }, - "node_modules/@hono/node-server": { - "version": "1.19.6", - "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.6.tgz", - "integrity": "sha512-Shz/KjlIeAhfiuE93NDKVdZ7HdBVLQAfdbaXEaoAVO3ic9ibRSLGIQGkcBbFyuLr+7/1D5ZCINM8B+6IvXeMtw==", - "license": "MIT", - "engines": { - "node": ">=18.14.1" - }, - "peerDependencies": { - "hono": "^4" - } - }, - "node_modules/@iconify/types": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz", - "integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==", - "license": "MIT" - }, - "node_modules/@iconify/utils": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@iconify/utils/-/utils-3.1.0.tgz", - "integrity": "sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw==", - "license": "MIT", - "dependencies": { - "@antfu/install-pkg": "^1.1.0", - "@iconify/types": "^2.0.0", - "mlly": "^1.8.0" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.13", - "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", - "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/remapping": { - "version": "2.3.5", - "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", - "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", - "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.5", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", - "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.31", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", - "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@mdx-js/mdx": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-3.1.1.tgz", - "integrity": "sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdx": "^2.0.0", - "acorn": "^8.0.0", - "collapse-white-space": "^2.0.0", - "devlop": "^1.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "estree-util-scope": "^1.0.0", - "estree-walker": "^3.0.0", - "hast-util-to-jsx-runtime": "^2.0.0", - "markdown-extensions": "^2.0.0", - "recma-build-jsx": "^1.0.0", - "recma-jsx": "^1.0.0", - "recma-stringify": "^1.0.0", - "rehype-recma": "^1.0.0", - "remark-mdx": "^3.0.0", - "remark-parse": "^11.0.0", - "remark-rehype": "^11.0.0", - "source-map": "^0.7.0", - "unified": "^11.0.0", - "unist-util-position-from-estree": "^2.0.0", - "unist-util-stringify-position": "^4.0.0", - "unist-util-visit": "^5.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/@mdx-js/react": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.1.1.tgz", - "integrity": "sha512-f++rKLQgUVYDAtECQ6fn/is15GkEH9+nZPM3MS0RcxVqoTfawHvDlSCH7JbMhAM6uJ32v3eXLvLmLvjGu7PTQw==", - "license": "MIT", - "dependencies": { - "@types/mdx": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - }, - "peerDependencies": { - "@types/react": ">=16", - "react": ">=16" - } - }, - "node_modules/@mdx-js/rollup": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@mdx-js/rollup/-/rollup-3.1.1.tgz", - "integrity": "sha512-v8satFmBB+DqDzYohnm1u2JOvxx6Hl3pUvqzJvfs2Zk/ngZ1aRUhsWpXvwPkNeGN9c2NCm/38H29ZqXQUjf8dw==", - "license": "MIT", - "dependencies": { - "@mdx-js/mdx": "^3.0.0", - "@rollup/pluginutils": "^5.0.0", - "source-map": "^0.7.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - }, - "peerDependencies": { - "rollup": ">=2" - } - }, - "node_modules/@mermaid-js/parser": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.6.3.tgz", - "integrity": "sha512-lnjOhe7zyHjc+If7yT4zoedx2vo4sHaTmtkl1+or8BRTnCtDmcTpAjpzDSfCZrshM5bCoz0GyidzadJAH1xobA==", - "license": "MIT", - "dependencies": { - "langium": "3.3.1" - } - }, - "node_modules/@noble/hashes": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", - "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", - "license": "MIT", - "engines": { - "node": "^14.21.3 || >=16" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - } - }, - "node_modules/@radix-ui/colors": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@radix-ui/colors/-/colors-3.0.0.tgz", - "integrity": "sha512-FUOsGBkHrYJwCSEtWRCIfQbZG7q1e6DgxCIOe1SUQzDe/7rXXeA47s8yCn6fuTNQAj1Zq4oTFi9Yjp3wzElcxg==", - "license": "MIT" - }, - "node_modules/@radix-ui/number": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz", - "integrity": "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==", - "license": "MIT" - }, - "node_modules/@radix-ui/primitive": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", - "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==", - "license": "MIT" - }, - "node_modules/@radix-ui/react-accessible-icon": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-accessible-icon/-/react-accessible-icon-1.1.7.tgz", - "integrity": "sha512-XM+E4WXl0OqUJFovy6GjmxxFyx9opfCAIUku4dlKRd5YEPqt4kALOkQOp0Of6reHuUkJuiPBEc5k0o4z4lTC8A==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-visually-hidden": "1.2.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-accordion": { - "version": "1.2.12", - "resolved": "https://registry.npmjs.org/@radix-ui/react-accordion/-/react-accordion-1.2.12.tgz", - "integrity": "sha512-T4nygeh9YE9dLRPhAHSeOZi7HBXo+0kYIPJXayZfvWOWA0+n3dESrZbjfDPUABkUNym6Hd+f2IR113To8D2GPA==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-collapsible": "1.1.12", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-controllable-state": "1.2.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-alert-dialog": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/@radix-ui/react-alert-dialog/-/react-alert-dialog-1.1.15.tgz", - "integrity": "sha512-oTVLkEw5GpdRe29BqJ0LSDFWI3qu0vR1M0mUkOQWDIUnY/QIkLpgDMWuKxP94c2NAC2LGcgVhG1ImF3jkZ5wXw==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-dialog": "1.1.15", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-slot": "1.2.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-arrow": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz", - "integrity": "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-primitive": "2.1.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-aspect-ratio": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-aspect-ratio/-/react-aspect-ratio-1.1.7.tgz", - "integrity": "sha512-Yq6lvO9HQyPwev1onK1daHCHqXVLzPhSVjmsNjCa2Zcxy2f7uJD2itDtxknv6FzAKCwD1qQkeVDmX/cev13n/g==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-primitive": "2.1.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-avatar": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/@radix-ui/react-avatar/-/react-avatar-1.1.10.tgz", - "integrity": "sha512-V8piFfWapM5OmNCXTzVQY+E1rDa53zY+MQ4Y7356v4fFz6vqCyUtIz2rUD44ZEdwg78/jKmMJHj07+C/Z/rcog==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-is-hydrated": "0.1.0", - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-checkbox": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-checkbox/-/react-checkbox-1.3.3.tgz", - "integrity": "sha512-wBbpv+NQftHDdG86Qc0pIyXk5IR3tM8Vd0nWLKDcX8nNn4nXFOFwsKuqw2okA/1D/mpaAkmuyndrPJTYDNZtFw==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-previous": "1.1.1", - "@radix-ui/react-use-size": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-collapsible": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-1.1.12.tgz", - "integrity": "sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-collection": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.7.tgz", - "integrity": "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-slot": "1.2.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-compose-refs": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz", - "integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==", - "license": "MIT", - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-context": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", - "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", - "license": "MIT", - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-context-menu": { - "version": "2.2.16", - "resolved": "https://registry.npmjs.org/@radix-ui/react-context-menu/-/react-context-menu-2.2.16.tgz", - "integrity": "sha512-O8morBEW+HsVG28gYDZPTrT9UUovQUlJue5YO836tiTJhuIWBm/zQHc7j388sHWtdH/xUZurK9olD2+pcqx5ww==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-menu": "2.1.16", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-controllable-state": "1.2.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-dialog": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.15.tgz", - "integrity": "sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-dismissable-layer": "1.1.11", - "@radix-ui/react-focus-guards": "1.1.3", - "@radix-ui/react-focus-scope": "1.1.7", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-portal": "1.1.9", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-slot": "1.2.3", - "@radix-ui/react-use-controllable-state": "1.2.2", - "aria-hidden": "^1.2.4", - "react-remove-scroll": "^2.6.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-direction": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz", - "integrity": "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==", - "license": "MIT", - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-dismissable-layer": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz", - "integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-escape-keydown": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-dropdown-menu": { - "version": "2.1.16", - "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.16.tgz", - "integrity": "sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-menu": "2.1.16", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-controllable-state": "1.2.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-focus-guards": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz", - "integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==", - "license": "MIT", - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-focus-scope": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz", - "integrity": "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-callback-ref": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-form": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/@radix-ui/react-form/-/react-form-0.1.8.tgz", - "integrity": "sha512-QM70k4Zwjttifr5a4sZFts9fn8FzHYvQ5PiB19O2HsYibaHSVt9fH9rzB0XZo/YcM+b7t/p7lYCT/F5eOeF5yQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-label": "2.1.7", - "@radix-ui/react-primitive": "2.1.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-form/node_modules/@radix-ui/react-label": { - "version": "2.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.7.tgz", - "integrity": "sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-primitive": "2.1.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-hover-card": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/@radix-ui/react-hover-card/-/react-hover-card-1.1.15.tgz", - "integrity": "sha512-qgTkjNT1CfKMoP0rcasmlH2r1DAiYicWsDsufxl940sT2wHNEWWv6FMWIQXWhVdmC1d/HYfbhQx60KYyAtKxjg==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-dismissable-layer": "1.1.11", - "@radix-ui/react-popper": "1.2.8", - "@radix-ui/react-portal": "1.1.9", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-controllable-state": "1.2.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-icons": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-icons/-/react-icons-1.3.2.tgz", - "integrity": "sha512-fyQIhGDhzfc9pK2kH6Pl9c4BDJGfMkPqkyIgYDthyNYoNg3wVhoJMMh19WS4Up/1KMPFVpNsT2q3WmXn2N1m6g==", - "license": "MIT", - "peerDependencies": { - "react": "^16.x || ^17.x || ^18.x || ^19.0.0 || ^19.0.0-rc" - } - }, - "node_modules/@radix-ui/react-id": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz", - "integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-label": { - "version": "2.1.8", - "resolved": "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.8.tgz", - "integrity": "sha512-FmXs37I6hSBVDlO4y764TNz1rLgKwjJMQ0EGte6F3Cb3f4bIuHB/iLa/8I9VKkmOy+gNHq8rql3j686ACVV21A==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-primitive": "2.1.4" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-label/node_modules/@radix-ui/react-primitive": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz", - "integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-slot": "1.2.4" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-label/node_modules/@radix-ui/react-slot": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.4.tgz", - "integrity": "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-compose-refs": "1.1.2" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-menu": { - "version": "2.1.16", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.16.tgz", - "integrity": "sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-dismissable-layer": "1.1.11", - "@radix-ui/react-focus-guards": "1.1.3", - "@radix-ui/react-focus-scope": "1.1.7", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-popper": "1.2.8", - "@radix-ui/react-portal": "1.1.9", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-roving-focus": "1.1.11", - "@radix-ui/react-slot": "1.2.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "aria-hidden": "^1.2.4", - "react-remove-scroll": "^2.6.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-menubar": { - "version": "1.1.16", - "resolved": "https://registry.npmjs.org/@radix-ui/react-menubar/-/react-menubar-1.1.16.tgz", - "integrity": "sha512-EB1FktTz5xRRi2Er974AUQZWg2yVBb1yjip38/lgwtCVRd3a+maUoGHN/xs9Yv8SY8QwbSEb+YrxGadVWbEutA==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-menu": "2.1.16", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-roving-focus": "1.1.11", - "@radix-ui/react-use-controllable-state": "1.2.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-navigation-menu": { - "version": "1.2.14", - "resolved": "https://registry.npmjs.org/@radix-ui/react-navigation-menu/-/react-navigation-menu-1.2.14.tgz", - "integrity": "sha512-YB9mTFQvCOAQMHU+C/jVl96WmuWeltyUEpRJJky51huhds5W2FQr1J8D/16sQlf0ozxkPK8uF3niQMdUwZPv5w==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-dismissable-layer": "1.1.11", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-layout-effect": "1.1.1", - "@radix-ui/react-use-previous": "1.1.1", - "@radix-ui/react-visually-hidden": "1.2.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-one-time-password-field": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/@radix-ui/react-one-time-password-field/-/react-one-time-password-field-0.1.8.tgz", - "integrity": "sha512-ycS4rbwURavDPVjCb5iS3aG4lURFDILi6sKI/WITUMZ13gMmn/xGjpLoqBAalhJaDk8I3UbCM5GzKHrnzwHbvg==", - "license": "MIT", - "dependencies": { - "@radix-ui/number": "1.1.1", - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-roving-focus": "1.1.11", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-effect-event": "0.0.2", - "@radix-ui/react-use-is-hydrated": "0.1.0", - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-password-toggle-field": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-password-toggle-field/-/react-password-toggle-field-0.1.3.tgz", - "integrity": "sha512-/UuCrDBWravcaMix4TdT+qlNdVwOM1Nck9kWx/vafXsdfj1ChfhOdfi3cy9SGBpWgTXwYCuboT/oYpJy3clqfw==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-effect-event": "0.0.2", - "@radix-ui/react-use-is-hydrated": "0.1.0" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-popover": { - "version": "1.1.15", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.1.15.tgz", - "integrity": "sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-dismissable-layer": "1.1.11", - "@radix-ui/react-focus-guards": "1.1.3", - "@radix-ui/react-focus-scope": "1.1.7", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-popper": "1.2.8", - "@radix-ui/react-portal": "1.1.9", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-slot": "1.2.3", - "@radix-ui/react-use-controllable-state": "1.2.2", - "aria-hidden": "^1.2.4", - "react-remove-scroll": "^2.6.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-popper": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz", - "integrity": "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==", - "license": "MIT", - "dependencies": { - "@floating-ui/react-dom": "^2.0.0", - "@radix-ui/react-arrow": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-layout-effect": "1.1.1", - "@radix-ui/react-use-rect": "1.1.1", - "@radix-ui/react-use-size": "1.1.1", - "@radix-ui/rect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-portal": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz", - "integrity": "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-presence": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", - "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-primitive": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", - "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-slot": "1.2.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-progress": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.1.7.tgz", - "integrity": "sha512-vPdg/tF6YC/ynuBIJlk1mm7Le0VgW6ub6J2UWnTQ7/D23KXcPI1qy+0vBkgKgd38RCMJavBXpB83HPNFMTb0Fg==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-primitive": "2.1.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-radio-group": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.3.8.tgz", - "integrity": "sha512-VBKYIYImA5zsxACdisNQ3BjCBfmbGH3kQlnFVqlWU4tXwjy7cGX8ta80BcrO+WJXIn5iBylEH3K6ZTlee//lgQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-roving-focus": "1.1.11", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-previous": "1.1.1", - "@radix-ui/react-use-size": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-roving-focus": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz", - "integrity": "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-controllable-state": "1.2.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-scroll-area": { - "version": "1.2.10", - "resolved": "https://registry.npmjs.org/@radix-ui/react-scroll-area/-/react-scroll-area-1.2.10.tgz", - "integrity": "sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A==", - "license": "MIT", - "dependencies": { - "@radix-ui/number": "1.1.1", - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-select": { - "version": "2.2.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-select/-/react-select-2.2.6.tgz", - "integrity": "sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/number": "1.1.1", - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-dismissable-layer": "1.1.11", - "@radix-ui/react-focus-guards": "1.1.3", - "@radix-ui/react-focus-scope": "1.1.7", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-popper": "1.2.8", - "@radix-ui/react-portal": "1.1.9", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-slot": "1.2.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-layout-effect": "1.1.1", - "@radix-ui/react-use-previous": "1.1.1", - "@radix-ui/react-visually-hidden": "1.2.3", - "aria-hidden": "^1.2.4", - "react-remove-scroll": "^2.6.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-separator": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.7.tgz", - "integrity": "sha512-0HEb8R9E8A+jZjvmFCy/J4xhbXy3TV+9XSnGJ3KvTtjlIUy/YQ/p6UYZvi7YbeoeXdyU9+Y3scizK6hkY37baA==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-primitive": "2.1.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-slider": { - "version": "1.3.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slider/-/react-slider-1.3.6.tgz", - "integrity": "sha512-JPYb1GuM1bxfjMRlNLE+BcmBC8onfCi60Blk7OBqi2MLTFdS+8401U4uFjnwkOr49BLmXxLC6JHkvAsx5OJvHw==", - "license": "MIT", - "dependencies": { - "@radix-ui/number": "1.1.1", - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-layout-effect": "1.1.1", - "@radix-ui/react-use-previous": "1.1.1", - "@radix-ui/react-use-size": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-slot": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", - "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-compose-refs": "1.1.2" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-switch": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.2.6.tgz", - "integrity": "sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-previous": "1.1.1", - "@radix-ui/react-use-size": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-tabs": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-1.1.13.tgz", - "integrity": "sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-roving-focus": "1.1.11", - "@radix-ui/react-use-controllable-state": "1.2.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-toast": { - "version": "1.2.15", - "resolved": "https://registry.npmjs.org/@radix-ui/react-toast/-/react-toast-1.2.15.tgz", - "integrity": "sha512-3OSz3TacUWy4WtOXV38DggwxoqJK4+eDkNMl5Z/MJZaoUPaP4/9lf81xXMe1I2ReTAptverZUpbPY4wWwWyL5g==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-dismissable-layer": "1.1.11", - "@radix-ui/react-portal": "1.1.9", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-layout-effect": "1.1.1", - "@radix-ui/react-visually-hidden": "1.2.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-toggle": { - "version": "1.1.10", - "resolved": "https://registry.npmjs.org/@radix-ui/react-toggle/-/react-toggle-1.1.10.tgz", - "integrity": "sha512-lS1odchhFTeZv3xwHH31YPObmJn8gOg7Lq12inrr0+BH/l3Tsq32VfjqH1oh80ARM3mlkfMic15n0kg4sD1poQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-use-controllable-state": "1.2.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-toggle-group": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/@radix-ui/react-toggle-group/-/react-toggle-group-1.1.11.tgz", - "integrity": "sha512-5umnS0T8JQzQT6HbPyO7Hh9dgd82NmS36DQr+X/YJ9ctFNCiiQd6IJAYYZ33LUwm8M+taCz5t2ui29fHZc4Y6Q==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-roving-focus": "1.1.11", - "@radix-ui/react-toggle": "1.1.10", - "@radix-ui/react-use-controllable-state": "1.2.2" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-toolbar": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/@radix-ui/react-toolbar/-/react-toolbar-1.1.11.tgz", - "integrity": "sha512-4ol06/1bLoFu1nwUqzdD4Y5RZ9oDdKeiHIsntug54Hcr1pgaHiPqHFEaXI1IFP/EsOfROQZ8Mig9VTIRza6Tjg==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-roving-focus": "1.1.11", - "@radix-ui/react-separator": "1.1.7", - "@radix-ui/react-toggle-group": "1.1.11" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-tooltip": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.2.8.tgz", - "integrity": "sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-dismissable-layer": "1.1.11", - "@radix-ui/react-id": "1.1.1", - "@radix-ui/react-popper": "1.2.8", - "@radix-ui/react-portal": "1.1.9", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-slot": "1.2.3", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-visually-hidden": "1.2.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-callback-ref": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz", - "integrity": "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==", - "license": "MIT", - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-controllable-state": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", - "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-use-effect-event": "0.0.2", - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-effect-event": { - "version": "0.0.2", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz", - "integrity": "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-escape-keydown": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz", - "integrity": "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-use-callback-ref": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-is-hydrated": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-is-hydrated/-/react-use-is-hydrated-0.1.0.tgz", - "integrity": "sha512-U+UORVEq+cTnRIaostJv9AGdV3G6Y+zbVd+12e18jQ5A3c0xL03IhnHuiU4UV69wolOQp5GfR58NW/EgdQhwOA==", - "license": "MIT", - "dependencies": { - "use-sync-external-store": "^1.5.0" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-layout-effect": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", - "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", - "license": "MIT", - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-previous": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.1.1.tgz", - "integrity": "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==", - "license": "MIT", - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-rect": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz", - "integrity": "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==", - "license": "MIT", - "dependencies": { - "@radix-ui/rect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-use-size": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz", - "integrity": "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-use-layout-effect": "1.1.1" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/@radix-ui/react-visually-hidden": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.2.3.tgz", - "integrity": "sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-primitive": "2.1.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/@radix-ui/rect": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz", - "integrity": "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==", - "license": "MIT" - }, - "node_modules/@rolldown/pluginutils": { - "version": "1.0.0-beta.47", - "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.47.tgz", - "integrity": "sha512-8QagwMH3kNCuzD8EWL8R2YPW5e4OrHNSAHRFDdmFqEwEaD/KcNKjVoumo+gP2vW5eKB2UPbM6vTYiGZX0ixLnw==", - "license": "MIT" - }, - "node_modules/@rollup/pluginutils": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", - "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "estree-walker": "^2.0.2", - "picomatch": "^4.0.2" - }, - "engines": { - "node": ">=14.0.0" - }, - "peerDependencies": { - "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" - }, - "peerDependenciesMeta": { - "rollup": { - "optional": true - } - } - }, - "node_modules/@rollup/pluginutils/node_modules/estree-walker": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", - "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", - "license": "MIT" - }, - "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.53.3.tgz", - "integrity": "sha512-mRSi+4cBjrRLoaal2PnqH82Wqyb+d3HsPUN/W+WslCXsZsyHa9ZeQQX/pQsZaVIWDkPcpV6jJ+3KLbTbgnwv8w==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-android-arm64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.53.3.tgz", - "integrity": "sha512-CbDGaMpdE9sh7sCmTrTUyllhrg65t6SwhjlMJsLr+J8YjFuPmCEjbBSx4Z/e4SmDyH3aB5hGaJUP2ltV/vcs4w==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.53.3.tgz", - "integrity": "sha512-Nr7SlQeqIBpOV6BHHGZgYBuSdanCXuw09hon14MGOLGmXAFYjx1wNvquVPmpZnl0tLjg25dEdr4IQ6GgyToCUA==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.53.3.tgz", - "integrity": "sha512-DZ8N4CSNfl965CmPktJ8oBnfYr3F8dTTNBQkRlffnUarJ2ohudQD17sZBa097J8xhQ26AwhHJ5mvUyQW8ddTsQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.53.3.tgz", - "integrity": "sha512-yMTrCrK92aGyi7GuDNtGn2sNW+Gdb4vErx4t3Gv/Tr+1zRb8ax4z8GWVRfr3Jw8zJWvpGHNpss3vVlbF58DZ4w==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.53.3.tgz", - "integrity": "sha512-lMfF8X7QhdQzseM6XaX0vbno2m3hlyZFhwcndRMw8fbAGUGL3WFMBdK0hbUBIUYcEcMhVLr1SIamDeuLBnXS+Q==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.53.3.tgz", - "integrity": "sha512-k9oD15soC/Ln6d2Wv/JOFPzZXIAIFLp6B+i14KhxAfnq76ajt0EhYc5YPeX6W1xJkAdItcVT+JhKl1QZh44/qw==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.53.3.tgz", - "integrity": "sha512-vTNlKq+N6CK/8UktsrFuc+/7NlEYVxgaEgRXVUVK258Z5ymho29skzW1sutgYjqNnquGwVUObAaxae8rZ6YMhg==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.53.3.tgz", - "integrity": "sha512-RGrFLWgMhSxRs/EWJMIFM1O5Mzuz3Xy3/mnxJp/5cVhZ2XoCAxJnmNsEyeMJtpK+wu0FJFWz+QF4mjCA7AUQ3w==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.53.3.tgz", - "integrity": "sha512-kASyvfBEWYPEwe0Qv4nfu6pNkITLTb32p4yTgzFCocHnJLAHs+9LjUu9ONIhvfT/5lv4YS5muBHyuV84epBo/A==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.53.3.tgz", - "integrity": "sha512-JiuKcp2teLJwQ7vkJ95EwESWkNRFJD7TQgYmCnrPtlu50b4XvT5MOmurWNrCj3IFdyjBQ5p9vnrX4JM6I8OE7g==", - "cpu": [ - "loong64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.53.3.tgz", - "integrity": "sha512-EoGSa8nd6d3T7zLuqdojxC20oBfNT8nexBbB/rkxgKj5T5vhpAQKKnD+h3UkoMuTyXkP5jTjK/ccNRmQrPNDuw==", - "cpu": [ - "ppc64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.53.3.tgz", - "integrity": "sha512-4s+Wped2IHXHPnAEbIB0YWBv7SDohqxobiiPA1FIWZpX+w9o2i4LezzH/NkFUl8LRci/8udci6cLq+jJQlh+0g==", - "cpu": [ - "riscv64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.53.3.tgz", - "integrity": "sha512-68k2g7+0vs2u9CxDt5ktXTngsxOQkSEV/xBbwlqYcUrAVh6P9EgMZvFsnHy4SEiUl46Xf0IObWVbMvPrr2gw8A==", - "cpu": [ - "riscv64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.53.3.tgz", - "integrity": "sha512-VYsFMpULAz87ZW6BVYw3I6sWesGpsP9OPcyKe8ofdg9LHxSbRMd7zrVrr5xi/3kMZtpWL/wC+UIJWJYVX5uTKg==", - "cpu": [ - "s390x" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.53.3.tgz", - "integrity": "sha512-3EhFi1FU6YL8HTUJZ51imGJWEX//ajQPfqWLI3BQq4TlvHy4X0MOr5q3D2Zof/ka0d5FNdPwZXm3Yyib/UEd+w==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.53.3.tgz", - "integrity": "sha512-eoROhjcc6HbZCJr+tvVT8X4fW3/5g/WkGvvmwz/88sDtSJzO7r/blvoBDgISDiCjDRZmHpwud7h+6Q9JxFwq1Q==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.53.3.tgz", - "integrity": "sha512-OueLAWgrNSPGAdUdIjSWXw+u/02BRTcnfw9PN41D2vq/JSEPnJnVuBgw18VkN8wcd4fjUs+jFHVM4t9+kBSNLw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ] - }, - "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.53.3.tgz", - "integrity": "sha512-GOFuKpsxR/whszbF/bzydebLiXIHSgsEUp6M0JI8dWvi+fFa1TD6YQa4aSZHtpmh2/uAlj/Dy+nmby3TJ3pkTw==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.53.3.tgz", - "integrity": "sha512-iah+THLcBJdpfZ1TstDFbKNznlzoxa8fmnFYK4V67HvmuNYkVdAywJSoteUszvBQ9/HqN2+9AZghbajMsFT+oA==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.53.3.tgz", - "integrity": "sha512-J9QDiOIZlZLdcot5NXEepDkstocktoVjkaKUtqzgzpt2yWjGlbYiKyp05rWwk4nypbYUNoFAztEgixoLaSETkg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.53.3.tgz", - "integrity": "sha512-UhTd8u31dXadv0MopwGgNOBpUVROFKWVQgAg5N1ESyCz8AuBcMqm4AuTjrwgQKGDfoFuz02EuMRHQIw/frmYKQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@shikijs/core": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.29.2.tgz", - "integrity": "sha512-vju0lY9r27jJfOY4Z7+Rt/nIOjzJpZ3y+nYpqtUZInVoXQ/TJZcfGnNOGnKjFdVZb8qexiCuSlZRKcGfhhTTZQ==", - "license": "MIT", - "dependencies": { - "@shikijs/engine-javascript": "1.29.2", - "@shikijs/engine-oniguruma": "1.29.2", - "@shikijs/types": "1.29.2", - "@shikijs/vscode-textmate": "^10.0.1", - "@types/hast": "^3.0.4", - "hast-util-to-html": "^9.0.4" - } - }, - "node_modules/@shikijs/engine-javascript": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-1.29.2.tgz", - "integrity": "sha512-iNEZv4IrLYPv64Q6k7EPpOCE/nuvGiKl7zxdq0WFuRPF5PAE9PRo2JGq/d8crLusM59BRemJ4eOqrFrC4wiQ+A==", - "license": "MIT", - "dependencies": { - "@shikijs/types": "1.29.2", - "@shikijs/vscode-textmate": "^10.0.1", - "oniguruma-to-es": "^2.2.0" - } - }, - "node_modules/@shikijs/engine-oniguruma": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-1.29.2.tgz", - "integrity": "sha512-7iiOx3SG8+g1MnlzZVDYiaeHe7Ez2Kf2HrJzdmGwkRisT7r4rak0e655AcM/tF9JG/kg5fMNYlLLKglbN7gBqA==", - "license": "MIT", - "dependencies": { - "@shikijs/types": "1.29.2", - "@shikijs/vscode-textmate": "^10.0.1" - } - }, - "node_modules/@shikijs/langs": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-1.29.2.tgz", - "integrity": "sha512-FIBA7N3LZ+223U7cJDUYd5shmciFQlYkFXlkKVaHsCPgfVLiO+e12FmQE6Tf9vuyEsFe3dIl8qGWKXgEHL9wmQ==", - "license": "MIT", - "dependencies": { - "@shikijs/types": "1.29.2" - } - }, - "node_modules/@shikijs/rehype": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/@shikijs/rehype/-/rehype-1.29.2.tgz", - "integrity": "sha512-sxi53HZe5XDz0s2UqF+BVN/kgHPMS9l6dcacM4Ra3ZDzCJa5rDGJ+Ukpk4LxdD1+MITBM6hoLbPfGv9StV8a5Q==", - "license": "MIT", - "dependencies": { - "@shikijs/types": "1.29.2", - "@types/hast": "^3.0.4", - "hast-util-to-string": "^3.0.1", - "shiki": "1.29.2", - "unified": "^11.0.5", - "unist-util-visit": "^5.0.0" - } - }, - "node_modules/@shikijs/themes": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-1.29.2.tgz", - "integrity": "sha512-i9TNZlsq4uoyqSbluIcZkmPL9Bfi3djVxRnofUHwvx/h6SRW3cwgBC5SML7vsDcWyukY0eCzVN980rqP6qNl9g==", - "license": "MIT", - "dependencies": { - "@shikijs/types": "1.29.2" - } - }, - "node_modules/@shikijs/transformers": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/@shikijs/transformers/-/transformers-1.29.2.tgz", - "integrity": "sha512-NHQuA+gM7zGuxGWP9/Ub4vpbwrYCrho9nQCLcCPfOe3Yc7LOYwmSuhElI688oiqIXk9dlZwDiyAG9vPBTuPJMA==", - "license": "MIT", - "dependencies": { - "@shikijs/core": "1.29.2", - "@shikijs/types": "1.29.2" - } - }, - "node_modules/@shikijs/twoslash": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/@shikijs/twoslash/-/twoslash-1.29.2.tgz", - "integrity": "sha512-2S04ppAEa477tiaLfGEn1QJWbZUmbk8UoPbAEw4PifsrxkBXtAtOflIZJNtuCwz8ptc/TPxy7CO7gW4Uoi6o/g==", - "license": "MIT", - "dependencies": { - "@shikijs/core": "1.29.2", - "@shikijs/types": "1.29.2", - "twoslash": "^0.2.12" - } - }, - "node_modules/@shikijs/twoslash/node_modules/twoslash": { - "version": "0.2.12", - "resolved": "https://registry.npmjs.org/twoslash/-/twoslash-0.2.12.tgz", - "integrity": "sha512-tEHPASMqi7kqwfJbkk7hc/4EhlrKCSLcur+TcvYki3vhIfaRMXnXjaYFgXpoZRbT6GdprD4tGuVBEmTpUgLBsw==", - "license": "MIT", - "dependencies": { - "@typescript/vfs": "^1.6.0", - "twoslash-protocol": "0.2.12" - }, - "peerDependencies": { - "typescript": "*" - } - }, - "node_modules/@shikijs/twoslash/node_modules/twoslash-protocol": { - "version": "0.2.12", - "resolved": "https://registry.npmjs.org/twoslash-protocol/-/twoslash-protocol-0.2.12.tgz", - "integrity": "sha512-5qZLXVYfZ9ABdjqbvPc4RWMr7PrpPaaDSeaYY55vl/w1j6H6kzsWK/urAEIXlzYlyrFmyz1UbwIt+AA0ck+wbg==", - "license": "MIT" - }, - "node_modules/@shikijs/types": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-1.29.2.tgz", - "integrity": "sha512-VJjK0eIijTZf0QSTODEXCqinjBn0joAHQ+aPSBzrv4O2d/QSbsMw+ZeSRx03kV34Hy7NzUvV/7NqfYGRLrASmw==", - "license": "MIT", - "dependencies": { - "@shikijs/vscode-textmate": "^10.0.1", - "@types/hast": "^3.0.4" - } - }, - "node_modules/@shikijs/vscode-textmate": { - "version": "10.0.2", - "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz", - "integrity": "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==", - "license": "MIT" - }, - "node_modules/@standard-schema/spec": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", - "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", - "license": "MIT" - }, - "node_modules/@tailwindcss/node": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.15.tgz", - "integrity": "sha512-HF4+7QxATZWY3Jr8OlZrBSXmwT3Watj0OogeDvdUY/ByXJHQ+LBtqA2brDb3sBxYslIFx6UP94BJ4X6a4L9Bmw==", - "license": "MIT", - "dependencies": { - "@jridgewell/remapping": "^2.3.4", - "enhanced-resolve": "^5.18.3", - "jiti": "^2.6.0", - "lightningcss": "1.30.2", - "magic-string": "^0.30.19", - "source-map-js": "^1.2.1", - "tailwindcss": "4.1.15" - } - }, - "node_modules/@tailwindcss/node/node_modules/tailwindcss": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.15.tgz", - "integrity": "sha512-k2WLnWkYFkdpRv+Oby3EBXIyQC8/s1HOFMBUViwtAh6Z5uAozeUSMQlIsn/c6Q2iJzqG6aJT3wdPaRNj70iYxQ==", - "license": "MIT" - }, - "node_modules/@tailwindcss/oxide": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.15.tgz", - "integrity": "sha512-krhX+UOOgnsUuks2SR7hFafXmLQrKxB4YyRTERuCE59JlYL+FawgaAlSkOYmDRJdf1Q+IFNDMl9iRnBW7QBDfQ==", - "license": "MIT", - "engines": { - "node": ">= 10" - }, - "optionalDependencies": { - "@tailwindcss/oxide-android-arm64": "4.1.15", - "@tailwindcss/oxide-darwin-arm64": "4.1.15", - "@tailwindcss/oxide-darwin-x64": "4.1.15", - "@tailwindcss/oxide-freebsd-x64": "4.1.15", - "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.15", - "@tailwindcss/oxide-linux-arm64-gnu": "4.1.15", - "@tailwindcss/oxide-linux-arm64-musl": "4.1.15", - "@tailwindcss/oxide-linux-x64-gnu": "4.1.15", - "@tailwindcss/oxide-linux-x64-musl": "4.1.15", - "@tailwindcss/oxide-wasm32-wasi": "4.1.15", - "@tailwindcss/oxide-win32-arm64-msvc": "4.1.15", - "@tailwindcss/oxide-win32-x64-msvc": "4.1.15" - } - }, - "node_modules/@tailwindcss/oxide-android-arm64": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.15.tgz", - "integrity": "sha512-TkUkUgAw8At4cBjCeVCRMc/guVLKOU1D+sBPrHt5uVcGhlbVKxrCaCW9OKUIBv1oWkjh4GbunD/u/Mf0ql6kEA==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-darwin-arm64": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.15.tgz", - "integrity": "sha512-xt5XEJpn2piMSfvd1UFN6jrWXyaKCwikP4Pidcf+yfHTSzSpYhG3dcMktjNkQO3JiLCp+0bG0HoWGvz97K162w==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-darwin-x64": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.15.tgz", - "integrity": "sha512-TnWaxP6Bx2CojZEXAV2M01Yl13nYPpp0EtGpUrY+LMciKfIXiLL2r/SiSRpagE5Fp2gX+rflp/Os1VJDAyqymg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-freebsd-x64": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.15.tgz", - "integrity": "sha512-quISQDWqiB6Cqhjc3iWptXVZHNVENsWoI77L1qgGEHNIdLDLFnw3/AfY7DidAiiCIkGX/MjIdB3bbBZR/G2aJg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.15.tgz", - "integrity": "sha512-ObG76+vPlab65xzVUQbExmDU9FIeYLQ5k2LrQdR2Ud6hboR+ZobXpDoKEYXf/uOezOfIYmy2Ta3w0ejkTg9yxg==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.15.tgz", - "integrity": "sha512-4WbBacRmk43pkb8/xts3wnOZMDKsPFyEH/oisCm2q3aLZND25ufvJKcDUpAu0cS+CBOL05dYa8D4U5OWECuH/Q==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-arm64-musl": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.15.tgz", - "integrity": "sha512-AbvmEiteEj1nf42nE8skdHv73NoR+EwXVSgPY6l39X12Ex8pzOwwfi3Kc8GAmjsnsaDEbk+aj9NyL3UeyHcTLg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-x64-gnu": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.15.tgz", - "integrity": "sha512-+rzMVlvVgrXtFiS+ES78yWgKqpThgV19ISKD58Ck+YO5pO5KjyxLt7AWKsWMbY0R9yBDC82w6QVGz837AKQcHg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-linux-x64-musl": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.15.tgz", - "integrity": "sha512-fPdEy7a8eQN9qOIK3Em9D3TO1z41JScJn8yxl/76mp4sAXFDfV4YXxsiptJcOwy6bGR+70ZSwFIZhTXzQeqwQg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-wasm32-wasi": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.15.tgz", - "integrity": "sha512-sJ4yd6iXXdlgIMfIBXuVGp/NvmviEoMVWMOAGxtxhzLPp9LOj5k0pMEMZdjeMCl4C6Up+RM8T3Zgk+BMQ0bGcQ==", - "bundleDependencies": [ - "@napi-rs/wasm-runtime", - "@emnapi/core", - "@emnapi/runtime", - "@tybys/wasm-util", - "@emnapi/wasi-threads", - "tslib" - ], - "cpu": [ - "wasm32" - ], - "license": "MIT", - "optional": true, - "dependencies": { - "@emnapi/core": "^1.5.0", - "@emnapi/runtime": "^1.5.0", - "@emnapi/wasi-threads": "^1.1.0", - "@napi-rs/wasm-runtime": "^1.0.7", - "@tybys/wasm-util": "^0.10.1", - "tslib": "^2.4.0" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.15.tgz", - "integrity": "sha512-sJGE5faXnNQ1iXeqmRin7Ds/ru2fgCiaQZQQz3ZGIDtvbkeV85rAZ0QJFMDg0FrqsffZG96H1U9AQlNBRLsHVg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/oxide-win32-x64-msvc": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.15.tgz", - "integrity": "sha512-NLeHE7jUV6HcFKS504bpOohyi01zPXi2PXmjFfkzTph8xRxDdxkRsXm/xDO5uV5K3brrE1cCwbUYmFUSHR3u1w==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 10" - } - }, - "node_modules/@tailwindcss/vite": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.15.tgz", - "integrity": "sha512-B6s60MZRTUil+xKoZoGe6i0Iar5VuW+pmcGlda2FX+guDuQ1G1sjiIy1W0frneVpeL/ZjZ4KEgWZHNrIm++2qA==", - "license": "MIT", - "dependencies": { - "@tailwindcss/node": "4.1.15", - "@tailwindcss/oxide": "4.1.15", - "tailwindcss": "4.1.15" - }, - "peerDependencies": { - "vite": "^5.2.0 || ^6 || ^7" - } - }, - "node_modules/@tailwindcss/vite/node_modules/tailwindcss": { - "version": "4.1.15", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.15.tgz", - "integrity": "sha512-k2WLnWkYFkdpRv+Oby3EBXIyQC8/s1HOFMBUViwtAh6Z5uAozeUSMQlIsn/c6Q2iJzqG6aJT3wdPaRNj70iYxQ==", - "license": "MIT" - }, - "node_modules/@types/babel__core": { - "version": "7.20.5", - "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", - "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.20.7", - "@babel/types": "^7.20.7", - "@types/babel__generator": "*", - "@types/babel__template": "*", - "@types/babel__traverse": "*" - } - }, - "node_modules/@types/babel__generator": { - "version": "7.27.0", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", - "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__template": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", - "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", - "license": "MIT", - "dependencies": { - "@babel/parser": "^7.1.0", - "@babel/types": "^7.0.0" - } - }, - "node_modules/@types/babel__traverse": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", - "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", - "license": "MIT", - "dependencies": { - "@babel/types": "^7.28.2" - } - }, - "node_modules/@types/d3": { - "version": "7.4.3", - "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.3.tgz", - "integrity": "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==", - "license": "MIT", - "dependencies": { - "@types/d3-array": "*", - "@types/d3-axis": "*", - "@types/d3-brush": "*", - "@types/d3-chord": "*", - "@types/d3-color": "*", - "@types/d3-contour": "*", - "@types/d3-delaunay": "*", - "@types/d3-dispatch": "*", - "@types/d3-drag": "*", - "@types/d3-dsv": "*", - "@types/d3-ease": "*", - "@types/d3-fetch": "*", - "@types/d3-force": "*", - "@types/d3-format": "*", - "@types/d3-geo": "*", - "@types/d3-hierarchy": "*", - "@types/d3-interpolate": "*", - "@types/d3-path": "*", - "@types/d3-polygon": "*", - "@types/d3-quadtree": "*", - "@types/d3-random": "*", - "@types/d3-scale": "*", - "@types/d3-scale-chromatic": "*", - "@types/d3-selection": "*", - "@types/d3-shape": "*", - "@types/d3-time": "*", - "@types/d3-time-format": "*", - "@types/d3-timer": "*", - "@types/d3-transition": "*", - "@types/d3-zoom": "*" - } - }, - "node_modules/@types/d3-array": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", - "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", - "license": "MIT" - }, - "node_modules/@types/d3-axis": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.6.tgz", - "integrity": "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==", - "license": "MIT", - "dependencies": { - "@types/d3-selection": "*" - } - }, - "node_modules/@types/d3-brush": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.6.tgz", - "integrity": "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==", - "license": "MIT", - "dependencies": { - "@types/d3-selection": "*" - } - }, - "node_modules/@types/d3-chord": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.6.tgz", - "integrity": "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==", - "license": "MIT" - }, - "node_modules/@types/d3-color": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", - "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", - "license": "MIT" - }, - "node_modules/@types/d3-contour": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.6.tgz", - "integrity": "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==", - "license": "MIT", - "dependencies": { - "@types/d3-array": "*", - "@types/geojson": "*" - } - }, - "node_modules/@types/d3-delaunay": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz", - "integrity": "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==", - "license": "MIT" - }, - "node_modules/@types/d3-dispatch": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.7.tgz", - "integrity": "sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==", - "license": "MIT" - }, - "node_modules/@types/d3-drag": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz", - "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==", - "license": "MIT", - "dependencies": { - "@types/d3-selection": "*" - } - }, - "node_modules/@types/d3-dsv": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.7.tgz", - "integrity": "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==", - "license": "MIT" - }, - "node_modules/@types/d3-ease": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", - "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", - "license": "MIT" - }, - "node_modules/@types/d3-fetch": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.7.tgz", - "integrity": "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==", - "license": "MIT", - "dependencies": { - "@types/d3-dsv": "*" - } - }, - "node_modules/@types/d3-force": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.10.tgz", - "integrity": "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==", - "license": "MIT" - }, - "node_modules/@types/d3-format": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.4.tgz", - "integrity": "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==", - "license": "MIT" - }, - "node_modules/@types/d3-geo": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.1.0.tgz", - "integrity": "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==", - "license": "MIT", - "dependencies": { - "@types/geojson": "*" - } - }, - "node_modules/@types/d3-hierarchy": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz", - "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==", - "license": "MIT" - }, - "node_modules/@types/d3-interpolate": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", - "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", - "license": "MIT", - "dependencies": { - "@types/d3-color": "*" - } - }, - "node_modules/@types/d3-path": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", - "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", - "license": "MIT" - }, - "node_modules/@types/d3-polygon": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-3.0.2.tgz", - "integrity": "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==", - "license": "MIT" - }, - "node_modules/@types/d3-quadtree": { - "version": "3.0.6", - "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz", - "integrity": "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==", - "license": "MIT" - }, - "node_modules/@types/d3-random": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-3.0.3.tgz", - "integrity": "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==", - "license": "MIT" - }, - "node_modules/@types/d3-scale": { - "version": "4.0.9", - "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", - "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", - "license": "MIT", - "dependencies": { - "@types/d3-time": "*" - } - }, - "node_modules/@types/d3-scale-chromatic": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", - "integrity": "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==", - "license": "MIT" - }, - "node_modules/@types/d3-selection": { - "version": "3.0.11", - "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz", - "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==", - "license": "MIT" - }, - "node_modules/@types/d3-shape": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.7.tgz", - "integrity": "sha512-VLvUQ33C+3J+8p+Daf+nYSOsjB4GXp19/S/aGo60m9h1v6XaxjiT82lKVWJCfzhtuZ3yD7i/TPeC/fuKLLOSmg==", - "license": "MIT", - "dependencies": { - "@types/d3-path": "*" - } - }, - "node_modules/@types/d3-time": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", - "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", - "license": "MIT" - }, - "node_modules/@types/d3-time-format": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.3.tgz", - "integrity": "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==", - "license": "MIT" - }, - "node_modules/@types/d3-timer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", - "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", - "license": "MIT" - }, - "node_modules/@types/d3-transition": { - "version": "3.0.9", - "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz", - "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==", - "license": "MIT", - "dependencies": { - "@types/d3-selection": "*" - } - }, - "node_modules/@types/d3-zoom": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz", - "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==", - "license": "MIT", - "dependencies": { - "@types/d3-interpolate": "*", - "@types/d3-selection": "*" - } - }, - "node_modules/@types/debug": { - "version": "4.1.12", - "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", - "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", - "license": "MIT", - "dependencies": { - "@types/ms": "*" - } - }, - "node_modules/@types/estree": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", - "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", - "license": "MIT" - }, - "node_modules/@types/estree-jsx": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", - "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", - "license": "MIT", - "dependencies": { - "@types/estree": "*" - } - }, - "node_modules/@types/geojson": { - "version": "7946.0.16", - "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", - "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==", - "license": "MIT" - }, - "node_modules/@types/hast": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", - "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", - "license": "MIT", - "dependencies": { - "@types/unist": "*" - } - }, - "node_modules/@types/mdast": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", - "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", - "license": "MIT", - "dependencies": { - "@types/unist": "*" - } - }, - "node_modules/@types/mdx": { - "version": "2.0.13", - "resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.13.tgz", - "integrity": "sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==", - "license": "MIT" - }, - "node_modules/@types/ms": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", - "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "24.10.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.10.1.tgz", - "integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==", - "license": "MIT", - "dependencies": { - "undici-types": "~7.16.0" - } - }, - "node_modules/@types/react": { - "version": "19.2.7", - "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz", - "integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==", - "license": "MIT", - "dependencies": { - "csstype": "^3.2.2" - } - }, - "node_modules/@types/trusted-types": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", - "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", - "license": "MIT", - "optional": true - }, - "node_modules/@types/unist": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", - "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", - "license": "MIT" - }, - "node_modules/@typescript/vfs": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/@typescript/vfs/-/vfs-1.6.2.tgz", - "integrity": "sha512-hoBwJwcbKHmvd2QVebiytN1aELvpk9B74B4L1mFm/XT1Q/VOYAWl2vQ9AWRFtQq8zmz6enTpfTV8WRc4ATjW/g==", - "license": "MIT", - "dependencies": { - "debug": "^4.1.1" - }, - "peerDependencies": { - "typescript": "*" - } - }, - "node_modules/@ungap/structured-clone": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", - "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", - "license": "ISC" - }, - "node_modules/@vanilla-extract/babel-plugin-debug-ids": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/@vanilla-extract/babel-plugin-debug-ids/-/babel-plugin-debug-ids-1.2.2.tgz", - "integrity": "sha512-MeDWGICAF9zA/OZLOKwhoRlsUW+fiMwnfuOAqFVohL31Agj7Q/RBWAYweqjHLgFBCsdnr6XIfwjJnmb2znEWxw==", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.23.9" - } - }, - "node_modules/@vanilla-extract/compiler": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@vanilla-extract/compiler/-/compiler-0.3.3.tgz", - "integrity": "sha512-y/RCcjhITi/JV/jbH22QN0aDSTtWELOBbkod/rcrUfGTS8bfVrthSsFmH+0ZoL9LJBx3vHrf0Qaf24xZkoiJoQ==", - "license": "MIT", - "dependencies": { - "@vanilla-extract/css": "^1.17.5", - "@vanilla-extract/integration": "^8.0.6", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0", - "vite-node": "^3.2.2" - } - }, - "node_modules/@vanilla-extract/css": { - "version": "1.17.5", - "resolved": "https://registry.npmjs.org/@vanilla-extract/css/-/css-1.17.5.tgz", - "integrity": "sha512-u29cUVL5Z2qjJ2Eh8pusT1ToGtTeA4eb/y0ygaw2vWv9XFQSixtkBYEsVkrJExSI/0+SR1g8n5NYas4KlWOdfA==", - "license": "MIT", - "dependencies": { - "@emotion/hash": "^0.9.0", - "@vanilla-extract/private": "^1.0.9", - "css-what": "^6.1.0", - "cssesc": "^3.0.0", - "csstype": "^3.2.3", - "dedent": "^1.5.3", - "deep-object-diff": "^1.1.9", - "deepmerge": "^4.2.2", - "lru-cache": "^10.4.3", - "media-query-parser": "^2.0.2", - "modern-ahocorasick": "^1.0.0", - "picocolors": "^1.0.0" - } - }, - "node_modules/@vanilla-extract/dynamic": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@vanilla-extract/dynamic/-/dynamic-2.1.5.tgz", - "integrity": "sha512-QGIFGb1qyXQkbzx6X6i3+3LMc/iv/ZMBttMBL+Wm/DetQd36KsKsFg5CtH3qy+1hCA/5w93mEIIAiL4fkM8ycw==", - "license": "MIT", - "dependencies": { - "@vanilla-extract/private": "^1.0.9" - } - }, - "node_modules/@vanilla-extract/integration": { - "version": "8.0.6", - "resolved": "https://registry.npmjs.org/@vanilla-extract/integration/-/integration-8.0.6.tgz", - "integrity": "sha512-BlDtXtb6Fin8XEGwf4BhsJkKQh0rhj/YiN6ylNNOqXtRU0+DQmzE5WGE056ScKg3p5e0IFaeH7PPxuWJca9aXw==", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.23.9", - "@babel/plugin-syntax-typescript": "^7.23.3", - "@vanilla-extract/babel-plugin-debug-ids": "^1.2.2", - "@vanilla-extract/css": "^1.17.5", - "dedent": "^1.5.3", - "esbuild": "npm:esbuild@>=0.17.6 <0.28.0", - "eval": "0.1.8", - "find-up": "^5.0.0", - "javascript-stringify": "^2.0.1", - "mlly": "^1.4.2" - } - }, - "node_modules/@vanilla-extract/private": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/@vanilla-extract/private/-/private-1.0.9.tgz", - "integrity": "sha512-gT2jbfZuaaCLrAxwXbRgIhGhcXbRZCG3v4TTUnjw0EJ7ArdBRxkq4msNJkbuRkCgfIK5ATmprB5t9ljvLeFDEA==", - "license": "MIT" - }, - "node_modules/@vanilla-extract/vite-plugin": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/@vanilla-extract/vite-plugin/-/vite-plugin-5.1.3.tgz", - "integrity": "sha512-QKojhn+O4NIjPQsjfF3Lz+DCC9VaGE/P6eNXcZGoWhdCuGXbMOdSX0xogCX9O6ewzwJOiJJ++3NvuSlh7oVkcw==", - "license": "MIT", - "dependencies": { - "@vanilla-extract/compiler": "^0.3.3", - "@vanilla-extract/integration": "^8.0.6" - }, - "peerDependencies": { - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0" - } - }, - "node_modules/@vitejs/plugin-react": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.1.tgz", - "integrity": "sha512-WQfkSw0QbQ5aJ2CHYw23ZGkqnRwqKHD/KYsMeTkZzPT4Jcf0DcBxBtwMJxnu6E7oxw5+JC6ZAiePgh28uJ1HBA==", - "license": "MIT", - "dependencies": { - "@babel/core": "^7.28.5", - "@babel/plugin-transform-react-jsx-self": "^7.27.1", - "@babel/plugin-transform-react-jsx-source": "^7.27.1", - "@rolldown/pluginutils": "1.0.0-beta.47", - "@types/babel__core": "^7.20.5", - "react-refresh": "^0.18.0" - }, - "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "peerDependencies": { - "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" - } - }, - "node_modules/acorn": { - "version": "8.15.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", - "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", - "license": "MIT", - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-jsx": { - "version": "5.3.2", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", - "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "license": "MIT", - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/ansi-regex": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", - "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/aria-hidden": { - "version": "1.2.6", - "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz", - "integrity": "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==", - "license": "MIT", - "dependencies": { - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/astring": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz", - "integrity": "sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==", - "license": "MIT", - "bin": { - "astring": "bin/astring" - } - }, - "node_modules/autoprefixer": { - "version": "10.4.22", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.22.tgz", - "integrity": "sha512-ARe0v/t9gO28Bznv6GgqARmVqcWOV3mfgUPn9becPHMiD3o9BwlRgaeccZnwTpZ7Zwqrm+c1sUSsMxIzQzc8Xg==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "browserslist": "^4.27.0", - "caniuse-lite": "^1.0.30001754", - "fraction.js": "^5.3.4", - "normalize-range": "^0.1.2", - "picocolors": "^1.1.1", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/bail": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", - "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/baseline-browser-mapping": { - "version": "2.9.0", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.0.tgz", - "integrity": "sha512-Mh++g+2LPfzZToywfE1BUzvZbfOY52Nil0rn9H1CPC5DJ7fX+Vir7nToBeoiSbB1zTNeGYbELEvJESujgGrzXw==", - "license": "Apache-2.0", - "bin": { - "baseline-browser-mapping": "dist/cli.js" - } - }, - "node_modules/bcp-47-match": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/bcp-47-match/-/bcp-47-match-2.0.3.tgz", - "integrity": "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/bl": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz", - "integrity": "sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==", - "license": "MIT", - "dependencies": { - "buffer": "^6.0.3", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/boolbase": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", - "license": "ISC" - }, - "node_modules/browserslist": { - "version": "4.28.1", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", - "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "baseline-browser-mapping": "^2.9.0", - "caniuse-lite": "^1.0.30001759", - "electron-to-chromium": "^1.5.263", - "node-releases": "^2.0.27", - "update-browserslist-db": "^1.2.0" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/cac": { - "version": "6.7.14", - "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", - "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/caniuse-lite": { - "version": "1.0.30001759", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001759.tgz", - "integrity": "sha512-Pzfx9fOKoKvevQf8oCXoyNRQ5QyxJj+3O0Rqx2V5oxT61KGx8+n6hV/IUyJeifUci2clnmmKVpvtiqRzgiWjSw==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "CC-BY-4.0" - }, - "node_modules/ccount": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", - "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/chalk": { - "version": "5.6.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz", - "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==", - "license": "MIT", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/character-entities": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", - "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/character-entities-html4": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", - "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/character-entities-legacy": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", - "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/character-reference-invalid": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", - "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/chevrotain": { - "version": "11.0.3", - "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz", - "integrity": "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==", - "license": "Apache-2.0", - "dependencies": { - "@chevrotain/cst-dts-gen": "11.0.3", - "@chevrotain/gast": "11.0.3", - "@chevrotain/regexp-to-ast": "11.0.3", - "@chevrotain/types": "11.0.3", - "@chevrotain/utils": "11.0.3", - "lodash-es": "4.17.21" - } - }, - "node_modules/chevrotain-allstar": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz", - "integrity": "sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==", - "license": "MIT", - "dependencies": { - "lodash-es": "^4.17.21" - }, - "peerDependencies": { - "chevrotain": "^11.0.0" - } - }, - "node_modules/chroma-js": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/chroma-js/-/chroma-js-3.2.0.tgz", - "integrity": "sha512-os/OippSlX1RlWWr+QDPcGUZs0uoqr32urfxESG9U93lhUfbnlyckte84Q8P1UQY/qth983AS1JONKmLS4T0nw==", - "license": "(BSD-3-Clause AND Apache-2.0)" - }, - "node_modules/cli-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", - "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", - "license": "MIT", - "dependencies": { - "restore-cursor": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-spinners": { - "version": "2.9.2", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", - "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/clsx": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", - "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/collapse-white-space": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-2.1.0.tgz", - "integrity": "sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/comma-separated-tokens": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", - "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/compressible": { - "version": "2.0.18", - "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", - "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", - "license": "MIT", - "dependencies": { - "mime-db": ">= 1.43.0 < 2" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/compression": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/compression/-/compression-1.8.1.tgz", - "integrity": "sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==", - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "compressible": "~2.0.18", - "debug": "2.6.9", - "negotiator": "~0.6.4", - "on-headers": "~1.1.0", - "safe-buffer": "5.2.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/compression/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/compression/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/confbox": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", - "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", - "license": "MIT" - }, - "node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "license": "MIT" - }, - "node_modules/cookie": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", - "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/cose-base": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", - "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", - "license": "MIT", - "dependencies": { - "layout-base": "^1.0.0" - } - }, - "node_modules/create-vocs": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/create-vocs/-/create-vocs-1.0.0.tgz", - "integrity": "sha512-Lv1Bd3WZEgwG4nrogkM54m8viW+TWPlGivLyEi7aNb3cuKPsEfMDZ/kTbo87fzOGtsZ2yh7scO54ZmVhhgBgTw==", - "dependencies": { - "@clack/prompts": "^0.7.0", - "cac": "^6.7.14", - "detect-package-manager": "^3.0.2", - "fs-extra": "^11.3.0", - "picocolors": "^1.1.1" - }, - "bin": { - "create-vocs": "_lib/bin.js" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", - "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/css-selector-parser": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-3.2.0.tgz", - "integrity": "sha512-L1bdkNKUP5WYxiW5dW6vA2hd3sL8BdRNLy2FCX0rLVise4eNw9nBdeBuJHxlELieSE2H1f6bYQFfwVUwWCV9rQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/mdevils" - }, - { - "type": "patreon", - "url": "https://patreon.com/mdevils" - } - ], - "license": "MIT" - }, - "node_modules/css-what": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz", - "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==", - "license": "BSD-2-Clause", - "engines": { - "node": ">= 6" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/cssesc": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", - "license": "MIT", - "bin": { - "cssesc": "bin/cssesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/csstype": { - "version": "3.2.3", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", - "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", - "license": "MIT" - }, - "node_modules/cytoscape": { - "version": "3.33.1", - "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz", - "integrity": "sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==", - "license": "MIT", - "engines": { - "node": ">=0.10" - } - }, - "node_modules/cytoscape-cose-bilkent": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", - "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", - "license": "MIT", - "dependencies": { - "cose-base": "^1.0.0" - }, - "peerDependencies": { - "cytoscape": "^3.2.0" - } - }, - "node_modules/cytoscape-fcose": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz", - "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==", - "license": "MIT", - "dependencies": { - "cose-base": "^2.2.0" - }, - "peerDependencies": { - "cytoscape": "^3.2.0" - } - }, - "node_modules/cytoscape-fcose/node_modules/cose-base": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz", - "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==", - "license": "MIT", - "dependencies": { - "layout-base": "^2.0.0" - } - }, - "node_modules/cytoscape-fcose/node_modules/layout-base": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz", - "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==", - "license": "MIT" - }, - "node_modules/d3": { - "version": "7.9.0", - "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", - "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", - "license": "ISC", - "dependencies": { - "d3-array": "3", - "d3-axis": "3", - "d3-brush": "3", - "d3-chord": "3", - "d3-color": "3", - "d3-contour": "4", - "d3-delaunay": "6", - "d3-dispatch": "3", - "d3-drag": "3", - "d3-dsv": "3", - "d3-ease": "3", - "d3-fetch": "3", - "d3-force": "3", - "d3-format": "3", - "d3-geo": "3", - "d3-hierarchy": "3", - "d3-interpolate": "3", - "d3-path": "3", - "d3-polygon": "3", - "d3-quadtree": "3", - "d3-random": "3", - "d3-scale": "4", - "d3-scale-chromatic": "3", - "d3-selection": "3", - "d3-shape": "3", - "d3-time": "3", - "d3-time-format": "4", - "d3-timer": "3", - "d3-transition": "3", - "d3-zoom": "3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-array": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", - "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", - "license": "ISC", - "dependencies": { - "internmap": "1 - 2" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-axis": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", - "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-brush": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", - "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", - "license": "ISC", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "3", - "d3-transition": "3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-chord": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", - "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", - "license": "ISC", - "dependencies": { - "d3-path": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-color": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", - "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-contour": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", - "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", - "license": "ISC", - "dependencies": { - "d3-array": "^3.2.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-delaunay": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", - "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", - "license": "ISC", - "dependencies": { - "delaunator": "5" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-dispatch": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", - "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-drag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", - "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", - "license": "ISC", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-selection": "3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-dsv": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", - "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", - "license": "ISC", - "dependencies": { - "commander": "7", - "iconv-lite": "0.6", - "rw": "1" - }, - "bin": { - "csv2json": "bin/dsv2json.js", - "csv2tsv": "bin/dsv2dsv.js", - "dsv2dsv": "bin/dsv2dsv.js", - "dsv2json": "bin/dsv2json.js", - "json2csv": "bin/json2dsv.js", - "json2dsv": "bin/json2dsv.js", - "json2tsv": "bin/json2dsv.js", - "tsv2csv": "bin/dsv2dsv.js", - "tsv2json": "bin/dsv2json.js" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-ease": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", - "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-fetch": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", - "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", - "license": "ISC", - "dependencies": { - "d3-dsv": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-force": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", - "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", - "license": "ISC", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-quadtree": "1 - 3", - "d3-timer": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-format": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", - "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-geo": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", - "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", - "license": "ISC", - "dependencies": { - "d3-array": "2.5.0 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-hierarchy": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", - "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-interpolate": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", - "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", - "license": "ISC", - "dependencies": { - "d3-color": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-path": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", - "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-polygon": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", - "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-quadtree": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", - "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-random": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", - "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-sankey": { - "version": "0.12.3", - "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", - "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", - "license": "BSD-3-Clause", - "dependencies": { - "d3-array": "1 - 2", - "d3-shape": "^1.2.0" - } - }, - "node_modules/d3-sankey/node_modules/d3-array": { - "version": "2.12.1", - "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", - "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", - "license": "BSD-3-Clause", - "dependencies": { - "internmap": "^1.0.0" - } - }, - "node_modules/d3-sankey/node_modules/d3-path": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", - "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==", - "license": "BSD-3-Clause" - }, - "node_modules/d3-sankey/node_modules/d3-shape": { - "version": "1.3.7", - "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", - "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", - "license": "BSD-3-Clause", - "dependencies": { - "d3-path": "1" - } - }, - "node_modules/d3-sankey/node_modules/internmap": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", - "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==", - "license": "ISC" - }, - "node_modules/d3-scale": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", - "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", - "license": "ISC", - "dependencies": { - "d3-array": "2.10.0 - 3", - "d3-format": "1 - 3", - "d3-interpolate": "1.2.0 - 3", - "d3-time": "2.1.1 - 3", - "d3-time-format": "2 - 4" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-scale-chromatic": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", - "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", - "license": "ISC", - "dependencies": { - "d3-color": "1 - 3", - "d3-interpolate": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-selection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", - "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-shape": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", - "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", - "license": "ISC", - "dependencies": { - "d3-path": "^3.1.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-time": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", - "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", - "license": "ISC", - "dependencies": { - "d3-array": "2 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-time-format": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", - "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", - "license": "ISC", - "dependencies": { - "d3-time": "1 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-timer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", - "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/d3-transition": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", - "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", - "license": "ISC", - "dependencies": { - "d3-color": "1 - 3", - "d3-dispatch": "1 - 3", - "d3-ease": "1 - 3", - "d3-interpolate": "1 - 3", - "d3-timer": "1 - 3" - }, - "engines": { - "node": ">=12" - }, - "peerDependencies": { - "d3-selection": "2 - 3" - } - }, - "node_modules/d3-zoom": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", - "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", - "license": "ISC", - "dependencies": { - "d3-dispatch": "1 - 3", - "d3-drag": "2 - 3", - "d3-interpolate": "1 - 3", - "d3-selection": "2 - 3", - "d3-transition": "2 - 3" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/dagre-d3-es": { - "version": "7.0.13", - "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.13.tgz", - "integrity": "sha512-efEhnxpSuwpYOKRm/L5KbqoZmNNukHa/Flty4Wp62JRvgH2ojwVgPgdYyr4twpieZnyRDdIH7PY2mopX26+j2Q==", - "license": "MIT", - "dependencies": { - "d3": "^7.9.0", - "lodash-es": "^4.17.21" - } - }, - "node_modules/dayjs": { - "version": "1.11.19", - "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.19.tgz", - "integrity": "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==", - "license": "MIT" - }, - "node_modules/debug": { - "version": "4.4.3", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", - "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/decode-named-character-reference": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz", - "integrity": "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==", - "license": "MIT", - "dependencies": { - "character-entities": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/dedent": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.7.0.tgz", - "integrity": "sha512-HGFtf8yhuhGhqO07SV79tRp+br4MnbdjeVxotpn1QBl30pcLLCQjX5b2295ll0fv8RKDKsmWYrl05usHM9CewQ==", - "license": "MIT", - "peerDependencies": { - "babel-plugin-macros": "^3.1.0" - }, - "peerDependenciesMeta": { - "babel-plugin-macros": { - "optional": true - } - } - }, - "node_modules/deep-object-diff": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/deep-object-diff/-/deep-object-diff-1.1.9.tgz", - "integrity": "sha512-Rn+RuwkmkDwCi2/oXOFS9Gsr5lJZu/yTGpK7wAaAIE75CC+LCGEZHpY6VQJa/RoJcrmaA/docWJZvYohlNkWPA==", - "license": "MIT" - }, - "node_modules/deepmerge": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", - "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/delaunator": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", - "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", - "license": "ISC", - "dependencies": { - "robust-predicates": "^3.0.2" - } - }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", - "license": "MIT", - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/detect-libc": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", - "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", - "license": "Apache-2.0", - "engines": { - "node": ">=8" - } - }, - "node_modules/detect-node-es": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", - "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", - "license": "MIT" - }, - "node_modules/detect-package-manager": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/detect-package-manager/-/detect-package-manager-3.0.2.tgz", - "integrity": "sha512-8JFjJHutStYrfWwzfretQoyNGoZVW1Fsrp4JO9spa7h/fBfwgTMEIy4/LBzRDGsxwVPHU0q+T9YvwLDJoOApLQ==", - "license": "MIT", - "dependencies": { - "execa": "^5.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/devlop": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", - "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", - "license": "MIT", - "dependencies": { - "dequal": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/direction": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/direction/-/direction-2.0.1.tgz", - "integrity": "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA==", - "license": "MIT", - "bin": { - "direction": "cli.js" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/dompurify": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.0.tgz", - "integrity": "sha512-r+f6MYR1gGN1eJv0TVQbhA7if/U7P87cdPl3HN5rikqaBSBxLiCb/b9O+2eG0cxz0ghyU+mU1QkbsOwERMYlWQ==", - "license": "(MPL-2.0 OR Apache-2.0)", - "optionalDependencies": { - "@types/trusted-types": "^2.0.7" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "license": "MIT" - }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", - "license": "MIT" - }, - "node_modules/electron-to-chromium": { - "version": "1.5.263", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.263.tgz", - "integrity": "sha512-DrqJ11Knd+lo+dv+lltvfMDLU27g14LMdH2b0O3Pio4uk0x+z7OR+JrmyacTPN2M8w3BrZ7/RTwG3R9B7irPlg==", - "license": "ISC" - }, - "node_modules/emoji-regex": { - "version": "10.6.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", - "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==", - "license": "MIT" - }, - "node_modules/emoji-regex-xs": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex-xs/-/emoji-regex-xs-1.0.0.tgz", - "integrity": "sha512-LRlerrMYoIDrT6jgpeZ2YYl/L8EulRTt5hQcYjy5AInh7HWXKimpqx68aknBFpGL2+/IcogTcaydJEgaTmOpDg==", - "license": "MIT" - }, - "node_modules/encodeurl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", - "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/enhanced-resolve": { - "version": "5.18.3", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", - "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.4", - "tapable": "^2.2.0" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/entities": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", - "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/es-module-lexer": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", - "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", - "license": "MIT" - }, - "node_modules/esast-util-from-estree": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/esast-util-from-estree/-/esast-util-from-estree-2.0.0.tgz", - "integrity": "sha512-4CyanoAudUSBAn5K13H4JhsMH6L9ZP7XbLVe/dKybkxMO7eDyLsT8UHl9TRNrU2Gr9nz+FovfSIjuXWJ81uVwQ==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "devlop": "^1.0.0", - "estree-util-visit": "^2.0.0", - "unist-util-position-from-estree": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/esast-util-from-js": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/esast-util-from-js/-/esast-util-from-js-2.0.1.tgz", - "integrity": "sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "acorn": "^8.0.0", - "esast-util-from-estree": "^2.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/esbuild": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.0.tgz", - "integrity": "sha512-jd0f4NHbD6cALCyGElNpGAOtWxSq46l9X/sWB0Nzd5er4Kz2YTm+Vl0qKFT9KUJvD8+fiO8AvoHhFvEatfVixA==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.27.0", - "@esbuild/android-arm": "0.27.0", - "@esbuild/android-arm64": "0.27.0", - "@esbuild/android-x64": "0.27.0", - "@esbuild/darwin-arm64": "0.27.0", - "@esbuild/darwin-x64": "0.27.0", - "@esbuild/freebsd-arm64": "0.27.0", - "@esbuild/freebsd-x64": "0.27.0", - "@esbuild/linux-arm": "0.27.0", - "@esbuild/linux-arm64": "0.27.0", - "@esbuild/linux-ia32": "0.27.0", - "@esbuild/linux-loong64": "0.27.0", - "@esbuild/linux-mips64el": "0.27.0", - "@esbuild/linux-ppc64": "0.27.0", - "@esbuild/linux-riscv64": "0.27.0", - "@esbuild/linux-s390x": "0.27.0", - "@esbuild/linux-x64": "0.27.0", - "@esbuild/netbsd-arm64": "0.27.0", - "@esbuild/netbsd-x64": "0.27.0", - "@esbuild/openbsd-arm64": "0.27.0", - "@esbuild/openbsd-x64": "0.27.0", - "@esbuild/openharmony-arm64": "0.27.0", - "@esbuild/sunos-x64": "0.27.0", - "@esbuild/win32-arm64": "0.27.0", - "@esbuild/win32-ia32": "0.27.0", - "@esbuild/win32-x64": "0.27.0" - } - }, - "node_modules/escalade": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", - "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", - "license": "MIT" - }, - "node_modules/escape-string-regexp": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", - "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/estree-util-attach-comments": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/estree-util-attach-comments/-/estree-util-attach-comments-3.0.0.tgz", - "integrity": "sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-build-jsx": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/estree-util-build-jsx/-/estree-util-build-jsx-3.0.1.tgz", - "integrity": "sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "devlop": "^1.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "estree-walker": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-is-identifier-name": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", - "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", - "license": "MIT", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-scope": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/estree-util-scope/-/estree-util-scope-1.0.0.tgz", - "integrity": "sha512-2CAASclonf+JFWBNJPndcOpA8EMJwa0Q8LUFJEKqXLW6+qBvbFZuF5gItbQOs/umBUkjviCSDCbBwU2cXbmrhQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-to-js": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/estree-util-to-js/-/estree-util-to-js-2.0.0.tgz", - "integrity": "sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "astring": "^1.8.0", - "source-map": "^0.7.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-value-to-estree": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/estree-util-value-to-estree/-/estree-util-value-to-estree-3.5.0.tgz", - "integrity": "sha512-aMV56R27Gv3QmfmF1MY12GWkGzzeAezAX+UplqHVASfjc9wNzI/X6hC0S9oxq61WT4aQesLGslWP9tKk6ghRZQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/remcohaszing" - } - }, - "node_modules/estree-util-visit": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-2.0.0.tgz", - "integrity": "sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-walker": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", - "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0" - } - }, - "node_modules/etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/eval": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", - "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", - "dependencies": { - "@types/node": "*", - "require-like": ">= 0.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", - "license": "MIT" - }, - "node_modules/fault": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fault/-/fault-2.0.1.tgz", - "integrity": "sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==", - "license": "MIT", - "dependencies": { - "format": "^0.2.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/fdir": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", - "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", - "license": "MIT", - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "picomatch": "^3 || ^4" - }, - "peerDependenciesMeta": { - "picomatch": { - "optional": true - } - } - }, - "node_modules/find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", - "license": "MIT", - "dependencies": { - "locate-path": "^6.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/format": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", - "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==", - "engines": { - "node": ">=0.4.x" - } - }, - "node_modules/fraction.js": { - "version": "5.3.4", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", - "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", - "license": "MIT", - "engines": { - "node": "*" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/rawify" - } - }, - "node_modules/fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fs-extra": { - "version": "11.3.2", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.2.tgz", - "integrity": "sha512-Xr9F6z6up6Ws+NjzMCZc6WXg2YFRlrLP9NQDO3VQrWrfiojdhS56TzueT88ze0uBdCTwEIhQ3ptnmKeWGFAe0A==", - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" - }, - "engines": { - "node": ">=14.14" - } - }, - "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-nonce": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", - "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/github-slugger": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz", - "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==", - "license": "ISC" - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "license": "ISC" - }, - "node_modules/hachure-fill": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz", - "integrity": "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==", - "license": "MIT" - }, - "node_modules/hast-util-classnames": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hast-util-classnames/-/hast-util-classnames-3.0.0.tgz", - "integrity": "sha512-tI3JjoGDEBVorMAWK4jNRsfLMYmih1BUOG3VV36pH36njs1IEl7xkNrVTD2mD2yYHmQCa5R/fj61a8IAF4bRaQ==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "space-separated-tokens": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-from-dom": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/hast-util-from-dom/-/hast-util-from-dom-5.0.1.tgz", - "integrity": "sha512-N+LqofjR2zuzTjCPzyDUdSshy4Ma6li7p/c3pA78uTwzFgENbgbUrm2ugwsOdcjI1muO+o6Dgzp9p8WHtn/39Q==", - "license": "ISC", - "dependencies": { - "@types/hast": "^3.0.0", - "hastscript": "^9.0.0", - "web-namespaces": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-from-dom/node_modules/hastscript": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz", - "integrity": "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "comma-separated-tokens": "^2.0.0", - "hast-util-parse-selector": "^4.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-from-html": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/hast-util-from-html/-/hast-util-from-html-2.0.3.tgz", - "integrity": "sha512-CUSRHXyKjzHov8yKsQjGOElXy/3EKpyX56ELnkHH34vDVw1N1XSQ1ZcAvTyAPtGqLTuKP/uxM+aLkSPqF/EtMw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "devlop": "^1.1.0", - "hast-util-from-parse5": "^8.0.0", - "parse5": "^7.0.0", - "vfile": "^6.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-from-html-isomorphic": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hast-util-from-html-isomorphic/-/hast-util-from-html-isomorphic-2.0.0.tgz", - "integrity": "sha512-zJfpXq44yff2hmE0XmwEOzdWin5xwH+QIhMLOScpX91e/NSGPsAzNCvLQDIEPyO2TXi+lBmU6hjLIhV8MwP2kw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "hast-util-from-dom": "^5.0.0", - "hast-util-from-html": "^2.0.0", - "unist-util-remove-position": "^5.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-from-parse5": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.3.tgz", - "integrity": "sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/unist": "^3.0.0", - "devlop": "^1.0.0", - "hastscript": "^9.0.0", - "property-information": "^7.0.0", - "vfile": "^6.0.0", - "vfile-location": "^5.0.0", - "web-namespaces": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-from-parse5/node_modules/hastscript": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz", - "integrity": "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "comma-separated-tokens": "^2.0.0", - "hast-util-parse-selector": "^4.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-has-property": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hast-util-has-property/-/hast-util-has-property-3.0.0.tgz", - "integrity": "sha512-MNilsvEKLFpV604hwfhVStK0usFY/QmM5zX16bo7EjnAEGofr5YyI37kzopBlZJkHD4t887i+q/C8/tr5Q94cA==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-heading-rank": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hast-util-heading-rank/-/hast-util-heading-rank-3.0.0.tgz", - "integrity": "sha512-EJKb8oMUXVHcWZTDepnr+WNbfnXKFNf9duMesmr4S8SXTJBJ9M4Yok08pu9vxdJwdlGRhVumk9mEhkEvKGifwA==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-is-element": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-3.0.0.tgz", - "integrity": "sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-parse-selector": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", - "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-select": { - "version": "6.0.4", - "resolved": "https://registry.npmjs.org/hast-util-select/-/hast-util-select-6.0.4.tgz", - "integrity": "sha512-RqGS1ZgI0MwxLaKLDxjprynNzINEkRHY2i8ln4DDjgv9ZhcYVIHN9rlpiYsqtFwrgpYU361SyWDQcGNIBVu3lw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/unist": "^3.0.0", - "bcp-47-match": "^2.0.0", - "comma-separated-tokens": "^2.0.0", - "css-selector-parser": "^3.0.0", - "devlop": "^1.0.0", - "direction": "^2.0.0", - "hast-util-has-property": "^3.0.0", - "hast-util-to-string": "^3.0.0", - "hast-util-whitespace": "^3.0.0", - "nth-check": "^2.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0", - "unist-util-visit": "^5.0.0", - "zwitch": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-to-estree": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/hast-util-to-estree/-/hast-util-to-estree-3.1.3.tgz", - "integrity": "sha512-48+B/rJWAp0jamNbAAf9M7Uf//UVqAoMmgXhBdxTDJLGKY+LRnZ99qcG+Qjl5HfMpYNzS5v4EAwVEF34LeAj7w==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "comma-separated-tokens": "^2.0.0", - "devlop": "^1.0.0", - "estree-util-attach-comments": "^3.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "hast-util-whitespace": "^3.0.0", - "mdast-util-mdx-expression": "^2.0.0", - "mdast-util-mdx-jsx": "^3.0.0", - "mdast-util-mdxjs-esm": "^2.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0", - "style-to-js": "^1.0.0", - "unist-util-position": "^5.0.0", - "zwitch": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-to-html": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz", - "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/unist": "^3.0.0", - "ccount": "^2.0.0", - "comma-separated-tokens": "^2.0.0", - "hast-util-whitespace": "^3.0.0", - "html-void-elements": "^3.0.0", - "mdast-util-to-hast": "^13.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0", - "stringify-entities": "^4.0.0", - "zwitch": "^2.0.4" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-to-jsx-runtime": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.6.tgz", - "integrity": "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/unist": "^3.0.0", - "comma-separated-tokens": "^2.0.0", - "devlop": "^1.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "hast-util-whitespace": "^3.0.0", - "mdast-util-mdx-expression": "^2.0.0", - "mdast-util-mdx-jsx": "^3.0.0", - "mdast-util-mdxjs-esm": "^2.0.0", - "property-information": "^7.0.0", - "space-separated-tokens": "^2.0.0", - "style-to-js": "^1.0.0", - "unist-util-position": "^5.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-to-string": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-3.0.1.tgz", - "integrity": "sha512-XelQVTDWvqcl3axRfI0xSeoVKzyIFPwsAGSLIsKdJKQMXDYJS4WYrBNF/8J7RdhIcFI2BOHgAifggsvsxp/3+A==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-to-text": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/hast-util-to-text/-/hast-util-to-text-4.0.2.tgz", - "integrity": "sha512-KK6y/BN8lbaq654j7JgBydev7wuNMcID54lkRav1P0CaE1e47P72AWWPiGKXTJU271ooYzcvTAn/Zt0REnvc7A==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/unist": "^3.0.0", - "hast-util-is-element": "^3.0.0", - "unist-util-find-after": "^5.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hast-util-whitespace": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", - "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hastscript": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-8.0.0.tgz", - "integrity": "sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "comma-separated-tokens": "^2.0.0", - "hast-util-parse-selector": "^4.0.0", - "property-information": "^6.0.0", - "space-separated-tokens": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/hastscript/node_modules/property-information": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", - "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/hono": { - "version": "4.10.7", - "resolved": "https://registry.npmjs.org/hono/-/hono-4.10.7.tgz", - "integrity": "sha512-icXIITfw/07Q88nLSkB9aiUrd8rYzSweK681Kjo/TSggaGbOX4RRyxxm71v+3PC8C/j+4rlxGeoTRxQDkaJkUw==", - "license": "MIT", - "engines": { - "node": ">=16.9.0" - } - }, - "node_modules/html-void-elements": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", - "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "license": "MIT", - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "BSD-3-Clause" - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "license": "ISC" - }, - "node_modules/inline-style-parser": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.7.tgz", - "integrity": "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==", - "license": "MIT" - }, - "node_modules/internmap": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", - "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/is-alphabetical": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", - "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/is-alphanumerical": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", - "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", - "license": "MIT", - "dependencies": { - "is-alphabetical": "^2.0.0", - "is-decimal": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/is-decimal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", - "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/is-hexadecimal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", - "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/is-interactive": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz", - "integrity": "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-plain-obj": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", - "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-unicode-supported": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", - "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "license": "ISC" - }, - "node_modules/javascript-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.1.0.tgz", - "integrity": "sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==", - "license": "MIT" - }, - "node_modules/jiti": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", - "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", - "license": "MIT", - "bin": { - "jiti": "lib/jiti-cli.mjs" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", - "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", - "license": "MIT" - }, - "node_modules/jsesc": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", - "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "license": "MIT", - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/jsonfile": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", - "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", - "license": "MIT", - "dependencies": { - "universalify": "^2.0.0" - }, - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/katex": { - "version": "0.16.25", - "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.25.tgz", - "integrity": "sha512-woHRUZ/iF23GBP1dkDQMh1QBad9dmr8/PAwNA54VrSOVYgI12MAcE14TqnDdQOdzyEonGzMepYnqBMYdsoAr8Q==", - "funding": [ - "https://opencollective.com/katex", - "https://github.com/sponsors/katex" - ], - "license": "MIT", - "dependencies": { - "commander": "^8.3.0" - }, - "bin": { - "katex": "cli.js" - } - }, - "node_modules/katex/node_modules/commander": { - "version": "8.3.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", - "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", - "license": "MIT", - "engines": { - "node": ">= 12" - } - }, - "node_modules/khroma": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", - "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==" - }, - "node_modules/langium": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/langium/-/langium-3.3.1.tgz", - "integrity": "sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==", - "license": "MIT", - "dependencies": { - "chevrotain": "~11.0.3", - "chevrotain-allstar": "~0.3.0", - "vscode-languageserver": "~9.0.1", - "vscode-languageserver-textdocument": "~1.0.11", - "vscode-uri": "~3.0.8" - }, - "engines": { - "node": ">=16.0.0" - } - }, - "node_modules/layout-base": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", - "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==", - "license": "MIT" - }, - "node_modules/lightningcss": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz", - "integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==", - "license": "MPL-2.0", - "dependencies": { - "detect-libc": "^2.0.3" - }, - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - }, - "optionalDependencies": { - "lightningcss-android-arm64": "1.30.2", - "lightningcss-darwin-arm64": "1.30.2", - "lightningcss-darwin-x64": "1.30.2", - "lightningcss-freebsd-x64": "1.30.2", - "lightningcss-linux-arm-gnueabihf": "1.30.2", - "lightningcss-linux-arm64-gnu": "1.30.2", - "lightningcss-linux-arm64-musl": "1.30.2", - "lightningcss-linux-x64-gnu": "1.30.2", - "lightningcss-linux-x64-musl": "1.30.2", - "lightningcss-win32-arm64-msvc": "1.30.2", - "lightningcss-win32-x64-msvc": "1.30.2" - } - }, - "node_modules/lightningcss-android-arm64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.30.2.tgz", - "integrity": "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==", - "cpu": [ - "arm64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-darwin-arm64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.2.tgz", - "integrity": "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==", - "cpu": [ - "arm64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-darwin-x64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.2.tgz", - "integrity": "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==", - "cpu": [ - "x64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-freebsd-x64": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.2.tgz", - "integrity": "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==", - "cpu": [ - "x64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-arm-gnueabihf": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.2.tgz", - "integrity": "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==", - "cpu": [ - "arm" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-arm64-gnu": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.2.tgz", - "integrity": "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==", - "cpu": [ - "arm64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-arm64-musl": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.2.tgz", - "integrity": "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==", - "cpu": [ - "arm64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-x64-gnu": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.2.tgz", - "integrity": "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==", - "cpu": [ - "x64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-linux-x64-musl": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.2.tgz", - "integrity": "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==", - "cpu": [ - "x64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-win32-arm64-msvc": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.2.tgz", - "integrity": "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==", - "cpu": [ - "arm64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/lightningcss-win32-x64-msvc": { - "version": "1.30.2", - "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.2.tgz", - "integrity": "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==", - "cpu": [ - "x64" - ], - "license": "MPL-2.0", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">= 12.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/parcel" - } - }, - "node_modules/locate-path": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", - "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", - "license": "MIT", - "dependencies": { - "p-locate": "^5.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lodash-es": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", - "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", - "license": "MIT" - }, - "node_modules/log-symbols": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-5.1.0.tgz", - "integrity": "sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==", - "license": "MIT", - "dependencies": { - "chalk": "^5.0.0", - "is-unicode-supported": "^1.1.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/longest-streak": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", - "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "license": "ISC" - }, - "node_modules/magic-string": { - "version": "0.30.21", - "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", - "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.5" - } - }, - "node_modules/mark.js": { - "version": "8.11.1", - "resolved": "https://registry.npmjs.org/mark.js/-/mark.js-8.11.1.tgz", - "integrity": "sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==", - "license": "MIT" - }, - "node_modules/markdown-extensions": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-2.0.0.tgz", - "integrity": "sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==", - "license": "MIT", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/markdown-table": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", - "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/marked": { - "version": "16.4.2", - "resolved": "https://registry.npmjs.org/marked/-/marked-16.4.2.tgz", - "integrity": "sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA==", - "license": "MIT", - "bin": { - "marked": "bin/marked.js" - }, - "engines": { - "node": ">= 20" - } - }, - "node_modules/mdast-util-directive": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-directive/-/mdast-util-directive-3.1.0.tgz", - "integrity": "sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "ccount": "^2.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "parse-entities": "^4.0.0", - "stringify-entities": "^4.0.0", - "unist-util-visit-parents": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-find-and-replace": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz", - "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "escape-string-regexp": "^5.0.0", - "unist-util-is": "^6.0.0", - "unist-util-visit-parents": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-from-markdown": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz", - "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "decode-named-character-reference": "^1.0.0", - "devlop": "^1.0.0", - "mdast-util-to-string": "^4.0.0", - "micromark": "^4.0.0", - "micromark-util-decode-numeric-character-reference": "^2.0.0", - "micromark-util-decode-string": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "unist-util-stringify-position": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-frontmatter": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz", - "integrity": "sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "escape-string-regexp": "^5.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "micromark-extension-frontmatter": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz", - "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==", - "license": "MIT", - "dependencies": { - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-gfm-autolink-literal": "^2.0.0", - "mdast-util-gfm-footnote": "^2.0.0", - "mdast-util-gfm-strikethrough": "^2.0.0", - "mdast-util-gfm-table": "^2.0.0", - "mdast-util-gfm-task-list-item": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-autolink-literal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz", - "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "ccount": "^2.0.0", - "devlop": "^1.0.0", - "mdast-util-find-and-replace": "^3.0.0", - "micromark-util-character": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-footnote": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz", - "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.1.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-strikethrough": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", - "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-table": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", - "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "markdown-table": "^3.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-gfm-task-list-item": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", - "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-mdx": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz", - "integrity": "sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==", - "license": "MIT", - "dependencies": { - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-mdx-expression": "^2.0.0", - "mdast-util-mdx-jsx": "^3.0.0", - "mdast-util-mdxjs-esm": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-mdx-expression": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz", - "integrity": "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-mdx-jsx": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz", - "integrity": "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "ccount": "^2.0.0", - "devlop": "^1.1.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "parse-entities": "^4.0.0", - "stringify-entities": "^4.0.0", - "unist-util-stringify-position": "^4.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-mdxjs-esm": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", - "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", - "license": "MIT", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-phrasing": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", - "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "unist-util-is": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-to-hast": { - "version": "13.2.1", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", - "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "@ungap/structured-clone": "^1.0.0", - "devlop": "^1.0.0", - "micromark-util-sanitize-uri": "^2.0.0", - "trim-lines": "^3.0.0", - "unist-util-position": "^5.0.0", - "unist-util-visit": "^5.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-to-markdown": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", - "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "longest-streak": "^3.0.0", - "mdast-util-phrasing": "^4.0.0", - "mdast-util-to-string": "^4.0.0", - "micromark-util-classify-character": "^2.0.0", - "micromark-util-decode-string": "^2.0.0", - "unist-util-visit": "^5.0.0", - "zwitch": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/mdast-util-to-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", - "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/media-query-parser": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/media-query-parser/-/media-query-parser-2.0.2.tgz", - "integrity": "sha512-1N4qp+jE0pL5Xv4uEcwVUhIkwdUO3S/9gML90nqKA7v7FcOS5vUtatfzok9S9U1EJU8dHWlcv95WLnKmmxZI9w==", - "license": "MIT", - "dependencies": { - "@babel/runtime": "^7.12.5" - } - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "license": "MIT" - }, - "node_modules/mermaid": { - "version": "11.12.2", - "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.2.tgz", - "integrity": "sha512-n34QPDPEKmaeCG4WDMGy0OT6PSyxKCfy2pJgShP+Qow2KLrvWjclwbc3yXfSIf4BanqWEhQEpngWwNp/XhZt6w==", - "license": "MIT", - "dependencies": { - "@braintree/sanitize-url": "^7.1.1", - "@iconify/utils": "^3.0.1", - "@mermaid-js/parser": "^0.6.3", - "@types/d3": "^7.4.3", - "cytoscape": "^3.29.3", - "cytoscape-cose-bilkent": "^4.1.0", - "cytoscape-fcose": "^2.2.0", - "d3": "^7.9.0", - "d3-sankey": "^0.12.3", - "dagre-d3-es": "7.0.13", - "dayjs": "^1.11.18", - "dompurify": "^3.2.5", - "katex": "^0.16.22", - "khroma": "^2.1.0", - "lodash-es": "^4.17.21", - "marked": "^16.2.1", - "roughjs": "^4.6.6", - "stylis": "^4.3.6", - "ts-dedent": "^2.2.0", - "uuid": "^11.1.0" - } - }, - "node_modules/mermaid-isomorphic": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/mermaid-isomorphic/-/mermaid-isomorphic-3.0.4.tgz", - "integrity": "sha512-XQTy7H1XwHK3DPEHf+ZNWiqUEd9BwX3Xws38R9Fj2gx718srmgjlZoUzHr+Tca+O+dqJOJsAJaKzCoP65QDfDg==", - "license": "MIT", - "dependencies": { - "@fortawesome/fontawesome-free": "^6.0.0", - "mermaid": "^11.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/remcohaszing" - }, - "peerDependencies": { - "playwright": "1" - }, - "peerDependenciesMeta": { - "playwright": { - "optional": true - } - } - }, - "node_modules/micromark": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", - "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "@types/debug": "^4.0.0", - "debug": "^4.0.0", - "decode-named-character-reference": "^1.0.0", - "devlop": "^1.0.0", - "micromark-core-commonmark": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-chunked": "^2.0.0", - "micromark-util-combine-extensions": "^2.0.0", - "micromark-util-decode-numeric-character-reference": "^2.0.0", - "micromark-util-encode": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0", - "micromark-util-resolve-all": "^2.0.0", - "micromark-util-sanitize-uri": "^2.0.0", - "micromark-util-subtokenize": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-core-commonmark": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", - "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "decode-named-character-reference": "^1.0.0", - "devlop": "^1.0.0", - "micromark-factory-destination": "^2.0.0", - "micromark-factory-label": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-factory-title": "^2.0.0", - "micromark-factory-whitespace": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-chunked": "^2.0.0", - "micromark-util-classify-character": "^2.0.0", - "micromark-util-html-tag-name": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0", - "micromark-util-resolve-all": "^2.0.0", - "micromark-util-subtokenize": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-directive": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/micromark-extension-directive/-/micromark-extension-directive-3.0.2.tgz", - "integrity": "sha512-wjcXHgk+PPdmvR58Le9d7zQYWy+vKEU9Se44p2CrCDPiLr2FMyiT4Fyb5UFKFC66wGB3kPlgD7q3TnoqPS7SZA==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-factory-whitespace": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "parse-entities": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-frontmatter": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-2.0.0.tgz", - "integrity": "sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg==", - "license": "MIT", - "dependencies": { - "fault": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz", - "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==", - "license": "MIT", - "dependencies": { - "micromark-extension-gfm-autolink-literal": "^2.0.0", - "micromark-extension-gfm-footnote": "^2.0.0", - "micromark-extension-gfm-strikethrough": "^2.0.0", - "micromark-extension-gfm-table": "^2.0.0", - "micromark-extension-gfm-tagfilter": "^2.0.0", - "micromark-extension-gfm-task-list-item": "^2.0.0", - "micromark-util-combine-extensions": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-autolink-literal": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz", - "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==", - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-sanitize-uri": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-footnote": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz", - "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-core-commonmark": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0", - "micromark-util-sanitize-uri": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-strikethrough": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", - "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-util-chunked": "^2.0.0", - "micromark-util-classify-character": "^2.0.0", - "micromark-util-resolve-all": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-table": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz", - "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-tagfilter": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz", - "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==", - "license": "MIT", - "dependencies": { - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-gfm-task-list-item": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", - "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-mdx-expression": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-3.0.1.tgz", - "integrity": "sha512-dD/ADLJ1AeMvSAKBwO22zG22N4ybhe7kFIZ3LsDI0GlsNr2A3KYxb0LdC1u5rj4Nw+CHKY0RVdnHX8vj8ejm4Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0", - "micromark-factory-mdx-expression": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-events-to-acorn": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-extension-mdx-jsx": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-3.0.2.tgz", - "integrity": "sha512-e5+q1DjMh62LZAJOnDraSSbDMvGJ8x3cbjygy2qFEi7HCeUT4BDKCvMozPozcD6WmOt6sVvYDNBKhFSz3kjOVQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "micromark-factory-mdx-expression": "^2.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-events-to-acorn": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-mdx-md": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-mdx-md/-/micromark-extension-mdx-md-2.0.0.tgz", - "integrity": "sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ==", - "license": "MIT", - "dependencies": { - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-mdxjs": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs/-/micromark-extension-mdxjs-3.0.0.tgz", - "integrity": "sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ==", - "license": "MIT", - "dependencies": { - "acorn": "^8.0.0", - "acorn-jsx": "^5.0.0", - "micromark-extension-mdx-expression": "^3.0.0", - "micromark-extension-mdx-jsx": "^3.0.0", - "micromark-extension-mdx-md": "^2.0.0", - "micromark-extension-mdxjs-esm": "^3.0.0", - "micromark-util-combine-extensions": "^2.0.0", - "micromark-util-types": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-extension-mdxjs-esm": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-3.0.0.tgz", - "integrity": "sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0", - "micromark-core-commonmark": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-events-to-acorn": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "unist-util-position-from-estree": "^2.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/micromark-factory-destination": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", - "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-label": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", - "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-mdx-expression": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-2.0.3.tgz", - "integrity": "sha512-kQnEtA3vzucU2BkrIa8/VaSAsP+EJ3CKOvhMuJgOEGg9KDC6OAY6nSnNDVRiVNRqj7Y4SlSzcStaH/5jge8JdQ==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "devlop": "^1.0.0", - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-events-to-acorn": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "unist-util-position-from-estree": "^2.0.0", - "vfile-message": "^4.0.0" - } - }, - "node_modules/micromark-factory-space": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", - "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-title": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", - "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-factory-whitespace": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", - "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-factory-space": "^2.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-character": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", - "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-chunked": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", - "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-classify-character": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", - "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-combine-extensions": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", - "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-chunked": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-decode-numeric-character-reference": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", - "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-decode-string": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", - "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "decode-named-character-reference": "^1.0.0", - "micromark-util-character": "^2.0.0", - "micromark-util-decode-numeric-character-reference": "^2.0.0", - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-encode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", - "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-events-to-acorn": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-2.0.3.tgz", - "integrity": "sha512-jmsiEIiZ1n7X1Rr5k8wVExBQCg5jy4UXVADItHmNk1zkwEVhBuIUKRu3fqv+hs4nxLISi2DQGlqIOGiFxgbfHg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/unist": "^3.0.0", - "devlop": "^1.0.0", - "estree-util-visit": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "vfile-message": "^4.0.0" - } - }, - "node_modules/micromark-util-html-tag-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", - "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-normalize-identifier": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", - "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-resolve-all": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", - "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-sanitize-uri": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", - "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "micromark-util-character": "^2.0.0", - "micromark-util-encode": "^2.0.0", - "micromark-util-symbol": "^2.0.0" - } - }, - "node_modules/micromark-util-subtokenize": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", - "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT", - "dependencies": { - "devlop": "^1.0.0", - "micromark-util-chunked": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" - } - }, - "node_modules/micromark-util-symbol": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", - "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/micromark-util-types": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", - "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ], - "license": "MIT" - }, - "node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/mime-db": { - "version": "1.54.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", - "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/mini-svg-data-uri": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.4.4.tgz", - "integrity": "sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==", - "license": "MIT", - "bin": { - "mini-svg-data-uri": "cli.js" - } - }, - "node_modules/minisearch": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/minisearch/-/minisearch-7.2.0.tgz", - "integrity": "sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg==", - "license": "MIT" - }, - "node_modules/mlly": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz", - "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==", - "license": "MIT", - "dependencies": { - "acorn": "^8.15.0", - "pathe": "^2.0.3", - "pkg-types": "^1.3.1", - "ufo": "^1.6.1" - } - }, - "node_modules/modern-ahocorasick": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/modern-ahocorasick/-/modern-ahocorasick-1.1.0.tgz", - "integrity": "sha512-sEKPVl2rM+MNVkGQt3ChdmD8YsigmXdn5NifZn6jiwn9LRJpWm8F3guhaqrJT/JOat6pwpbXEk6kv+b9DMIjsQ==", - "license": "MIT" - }, - "node_modules/ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", - "license": "MIT" - }, - "node_modules/nanoid": { - "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/negotiator": { - "version": "0.6.4", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", - "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/node-releases": { - "version": "2.0.27", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", - "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", - "license": "MIT" - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/normalize-range/-/normalize-range-0.1.2.tgz", - "integrity": "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/nth-check": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", - "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", - "license": "BSD-2-Clause", - "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" - } - }, - "node_modules/nuqs": { - "version": "2.8.2", - "resolved": "https://registry.npmjs.org/nuqs/-/nuqs-2.8.2.tgz", - "integrity": "sha512-KMb6gmUJaLVRw+SbKUmBTo0IWLGU2s1Z4Iz/N64+EIDcu6Iw51CuppgKmxZR2EW3iXaOz5LF4avGKD2wq45eqg==", - "license": "MIT", - "dependencies": { - "@standard-schema/spec": "1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/franky47" - }, - "peerDependencies": { - "@remix-run/react": ">=2", - "@tanstack/react-router": "^1", - "next": ">=14.2.0", - "react": ">=18.2.0 || ^19.0.0-0", - "react-router": "^5 || ^6 || ^7", - "react-router-dom": "^5 || ^6 || ^7" - }, - "peerDependenciesMeta": { - "@remix-run/react": { - "optional": true - }, - "@tanstack/react-router": { - "optional": true - }, - "next": { - "optional": true - }, - "react-router": { - "optional": true - }, - "react-router-dom": { - "optional": true - } - } - }, - "node_modules/on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "license": "MIT", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/on-headers": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz", - "integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/oniguruma-to-es": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-2.3.0.tgz", - "integrity": "sha512-bwALDxriqfKGfUufKGGepCzu9x7nJQuoRoAFp4AnwehhC2crqrDIAP/uN2qdlsAvSMpeRC3+Yzhqc7hLmle5+g==", - "license": "MIT", - "dependencies": { - "emoji-regex-xs": "^1.0.0", - "regex": "^5.1.1", - "regex-recursion": "^5.1.1" - } - }, - "node_modules/ora": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-7.0.1.tgz", - "integrity": "sha512-0TUxTiFJWv+JnjWm4o9yvuskpEJLXTcng8MJuKd+SzAzp2o+OP3HWqNhB4OdJRt1Vsd9/mR0oyaEYlOnL7XIRw==", - "license": "MIT", - "dependencies": { - "chalk": "^5.3.0", - "cli-cursor": "^4.0.0", - "cli-spinners": "^2.9.0", - "is-interactive": "^2.0.0", - "is-unicode-supported": "^1.3.0", - "log-symbols": "^5.1.0", - "stdin-discarder": "^0.1.0", - "string-width": "^6.1.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-limit": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", - "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", - "license": "MIT", - "dependencies": { - "yocto-queue": "^1.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", - "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", - "license": "MIT", - "dependencies": { - "p-limit": "^3.0.2" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate/node_modules/p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", - "license": "MIT", - "dependencies": { - "yocto-queue": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate/node_modules/yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/package-manager-detector": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.6.0.tgz", - "integrity": "sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==", - "license": "MIT" - }, - "node_modules/parse-entities": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.2.tgz", - "integrity": "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==", - "license": "MIT", - "dependencies": { - "@types/unist": "^2.0.0", - "character-entities-legacy": "^3.0.0", - "character-reference-invalid": "^2.0.0", - "decode-named-character-reference": "^1.0.0", - "is-alphanumerical": "^2.0.0", - "is-decimal": "^2.0.0", - "is-hexadecimal": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/parse-entities/node_modules/@types/unist": { - "version": "2.0.11", - "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", - "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", - "license": "MIT" - }, - "node_modules/parse5": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", - "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", - "license": "MIT", - "dependencies": { - "entities": "^6.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parseurl": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/path-data-parser": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/path-data-parser/-/path-data-parser-0.1.0.tgz", - "integrity": "sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==", - "license": "MIT" - }, - "node_modules/path-exists": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", - "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/pathe": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", - "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "license": "MIT" - }, - "node_modules/picocolors": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", - "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pkg-types": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", - "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", - "license": "MIT", - "dependencies": { - "confbox": "^0.1.8", - "mlly": "^1.7.4", - "pathe": "^2.0.1" - } - }, - "node_modules/playwright": { - "version": "1.57.0", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.57.0.tgz", - "integrity": "sha512-ilYQj1s8sr2ppEJ2YVadYBN0Mb3mdo9J0wQ+UuDhzYqURwSoW4n1Xs5vs7ORwgDGmyEh33tRMeS8KhdkMoLXQw==", - "license": "Apache-2.0", - "dependencies": { - "playwright-core": "1.57.0" - }, - "bin": { - "playwright": "cli.js" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "fsevents": "2.3.2" - } - }, - "node_modules/playwright-core": { - "version": "1.57.0", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.57.0.tgz", - "integrity": "sha512-agTcKlMw/mjBWOnD6kFZttAAGHgi/Nw0CZ2o6JqWSbMlI219lAFLZZCyqByTsvVAJq5XA5H8cA6PrvBRpBWEuQ==", - "license": "Apache-2.0", - "bin": { - "playwright-core": "cli.js" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/points-on-curve": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/points-on-curve/-/points-on-curve-0.2.0.tgz", - "integrity": "sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==", - "license": "MIT" - }, - "node_modules/points-on-path": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/points-on-path/-/points-on-path-0.2.1.tgz", - "integrity": "sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==", - "license": "MIT", - "dependencies": { - "path-data-parser": "0.1.0", - "points-on-curve": "0.2.0" - } - }, - "node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "nanoid": "^3.3.11", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", - "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", - "license": "MIT" - }, - "node_modules/property-information": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", - "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/radix-ui": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/radix-ui/-/radix-ui-1.4.3.tgz", - "integrity": "sha512-aWizCQiyeAenIdUbqEpXgRA1ya65P13NKn/W8rWkcN0OPkRDxdBVLWnIEDsS2RpwCK2nobI7oMUSmexzTDyAmA==", - "license": "MIT", - "dependencies": { - "@radix-ui/primitive": "1.1.3", - "@radix-ui/react-accessible-icon": "1.1.7", - "@radix-ui/react-accordion": "1.2.12", - "@radix-ui/react-alert-dialog": "1.1.15", - "@radix-ui/react-arrow": "1.1.7", - "@radix-ui/react-aspect-ratio": "1.1.7", - "@radix-ui/react-avatar": "1.1.10", - "@radix-ui/react-checkbox": "1.3.3", - "@radix-ui/react-collapsible": "1.1.12", - "@radix-ui/react-collection": "1.1.7", - "@radix-ui/react-compose-refs": "1.1.2", - "@radix-ui/react-context": "1.1.2", - "@radix-ui/react-context-menu": "2.2.16", - "@radix-ui/react-dialog": "1.1.15", - "@radix-ui/react-direction": "1.1.1", - "@radix-ui/react-dismissable-layer": "1.1.11", - "@radix-ui/react-dropdown-menu": "2.1.16", - "@radix-ui/react-focus-guards": "1.1.3", - "@radix-ui/react-focus-scope": "1.1.7", - "@radix-ui/react-form": "0.1.8", - "@radix-ui/react-hover-card": "1.1.15", - "@radix-ui/react-label": "2.1.7", - "@radix-ui/react-menu": "2.1.16", - "@radix-ui/react-menubar": "1.1.16", - "@radix-ui/react-navigation-menu": "1.2.14", - "@radix-ui/react-one-time-password-field": "0.1.8", - "@radix-ui/react-password-toggle-field": "0.1.3", - "@radix-ui/react-popover": "1.1.15", - "@radix-ui/react-popper": "1.2.8", - "@radix-ui/react-portal": "1.1.9", - "@radix-ui/react-presence": "1.1.5", - "@radix-ui/react-primitive": "2.1.3", - "@radix-ui/react-progress": "1.1.7", - "@radix-ui/react-radio-group": "1.3.8", - "@radix-ui/react-roving-focus": "1.1.11", - "@radix-ui/react-scroll-area": "1.2.10", - "@radix-ui/react-select": "2.2.6", - "@radix-ui/react-separator": "1.1.7", - "@radix-ui/react-slider": "1.3.6", - "@radix-ui/react-slot": "1.2.3", - "@radix-ui/react-switch": "1.2.6", - "@radix-ui/react-tabs": "1.1.13", - "@radix-ui/react-toast": "1.2.15", - "@radix-ui/react-toggle": "1.1.10", - "@radix-ui/react-toggle-group": "1.1.11", - "@radix-ui/react-toolbar": "1.1.11", - "@radix-ui/react-tooltip": "1.2.8", - "@radix-ui/react-use-callback-ref": "1.1.1", - "@radix-ui/react-use-controllable-state": "1.2.2", - "@radix-ui/react-use-effect-event": "0.0.2", - "@radix-ui/react-use-escape-keydown": "1.1.1", - "@radix-ui/react-use-is-hydrated": "0.1.0", - "@radix-ui/react-use-layout-effect": "1.1.1", - "@radix-ui/react-use-size": "1.1.1", - "@radix-ui/react-visually-hidden": "1.2.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/radix-ui/node_modules/@radix-ui/react-label": { - "version": "2.1.7", - "resolved": "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.7.tgz", - "integrity": "sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ==", - "license": "MIT", - "dependencies": { - "@radix-ui/react-primitive": "2.1.3" - }, - "peerDependencies": { - "@types/react": "*", - "@types/react-dom": "*", - "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", - "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - }, - "@types/react-dom": { - "optional": true - } - } - }, - "node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/react": { - "version": "19.2.1", - "resolved": "https://registry.npmjs.org/react/-/react-19.2.1.tgz", - "integrity": "sha512-DGrYcCWK7tvYMnWh79yrPHt+vdx9tY+1gPZa7nJQtO/p8bLTDaHp4dzwEhQB7pZ4Xe3ok4XKuEPrVuc+wlpkmw==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-dom": { - "version": "19.2.1", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.1.tgz", - "integrity": "sha512-ibrK8llX2a4eOskq1mXKu/TGZj9qzomO+sNfO98M6d9zIPOEhlBkMkBUBLd1vgS0gQsLDBzA+8jJBVXDnfHmJg==", - "license": "MIT", - "dependencies": { - "scheduler": "^0.27.0" - }, - "peerDependencies": { - "react": "^19.2.1" - } - }, - "node_modules/react-intersection-observer": { - "version": "9.16.0", - "resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-9.16.0.tgz", - "integrity": "sha512-w9nJSEp+DrW9KmQmeWHQyfaP6b03v+TdXynaoA964Wxt7mdR3An11z4NNCQgL4gKSK7y1ver2Fq+JKH6CWEzUA==", - "license": "MIT", - "peerDependencies": { - "react": "^17.0.0 || ^18.0.0 || ^19.0.0", - "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0" - }, - "peerDependenciesMeta": { - "react-dom": { - "optional": true - } - } - }, - "node_modules/react-refresh": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.18.0.tgz", - "integrity": "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/react-remove-scroll": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.2.tgz", - "integrity": "sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==", - "license": "MIT", - "dependencies": { - "react-remove-scroll-bar": "^2.3.7", - "react-style-singleton": "^2.2.3", - "tslib": "^2.1.0", - "use-callback-ref": "^1.3.3", - "use-sidecar": "^1.1.3" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/react-remove-scroll-bar": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.8.tgz", - "integrity": "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==", - "license": "MIT", - "dependencies": { - "react-style-singleton": "^2.2.2", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/react-router": { - "version": "7.10.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.10.0.tgz", - "integrity": "sha512-FVyCOH4IZ0eDDRycODfUqoN8ZSR2LbTvtx6RPsBgzvJ8xAXlMZNCrOFpu+jb8QbtZnpAd/cEki2pwE848pNGxw==", - "license": "MIT", - "dependencies": { - "cookie": "^1.0.1", - "set-cookie-parser": "^2.6.0" - }, - "engines": { - "node": ">=20.0.0" - }, - "peerDependencies": { - "react": ">=18", - "react-dom": ">=18" - }, - "peerDependenciesMeta": { - "react-dom": { - "optional": true - } - } - }, - "node_modules/react-style-singleton": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.3.tgz", - "integrity": "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==", - "license": "MIT", - "dependencies": { - "get-nonce": "^1.0.0", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/recma-build-jsx": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/recma-build-jsx/-/recma-build-jsx-1.0.0.tgz", - "integrity": "sha512-8GtdyqaBcDfva+GUKDr3nev3VpKAhup1+RvkMvUxURHpW7QyIvk9F5wz7Vzo06CEMSilw6uArgRqhpiUcWp8ew==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "estree-util-build-jsx": "^3.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/recma-jsx": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/recma-jsx/-/recma-jsx-1.0.1.tgz", - "integrity": "sha512-huSIy7VU2Z5OLv6oFLosQGGDqPqdO1iq6bWNAdhzMxSJP7RAso4fCZ1cKu8j9YHCZf3TPrq4dw3okhrylgcd7w==", - "license": "MIT", - "dependencies": { - "acorn-jsx": "^5.0.0", - "estree-util-to-js": "^2.0.0", - "recma-parse": "^1.0.0", - "recma-stringify": "^1.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - }, - "peerDependencies": { - "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" - } - }, - "node_modules/recma-parse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/recma-parse/-/recma-parse-1.0.0.tgz", - "integrity": "sha512-OYLsIGBB5Y5wjnSnQW6t3Xg7q3fQ7FWbw/vcXtORTnyaSFscOtABg+7Pnz6YZ6c27fG1/aN8CjfwoUEUIdwqWQ==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "esast-util-from-js": "^2.0.0", - "unified": "^11.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/recma-stringify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/recma-stringify/-/recma-stringify-1.0.0.tgz", - "integrity": "sha512-cjwII1MdIIVloKvC9ErQ+OgAtwHBmcZ0Bg4ciz78FtbT8In39aAYbaA7zvxQ61xVMSPE8WxhLwLbhif4Js2C+g==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "estree-util-to-js": "^2.0.0", - "unified": "^11.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/regex": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/regex/-/regex-5.1.1.tgz", - "integrity": "sha512-dN5I359AVGPnwzJm2jN1k0W9LPZ+ePvoOeVMMfqIMFz53sSwXkxaJoxr50ptnsC771lK95BnTrVSZxq0b9yCGw==", - "license": "MIT", - "dependencies": { - "regex-utilities": "^2.3.0" - } - }, - "node_modules/regex-recursion": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-5.1.1.tgz", - "integrity": "sha512-ae7SBCbzVNrIjgSbh7wMznPcQel1DNlDtzensnFxpiNpXt1U2ju/bHugH422r+4LAVS1FpW1YCwilmnNsjum9w==", - "license": "MIT", - "dependencies": { - "regex": "^5.1.1", - "regex-utilities": "^2.3.0" - } - }, - "node_modules/regex-utilities": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/regex-utilities/-/regex-utilities-2.3.0.tgz", - "integrity": "sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==", - "license": "MIT" - }, - "node_modules/rehype-autolink-headings": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/rehype-autolink-headings/-/rehype-autolink-headings-7.1.0.tgz", - "integrity": "sha512-rItO/pSdvnvsP4QRB1pmPiNHUskikqtPojZKJPPPAVx9Hj8i8TwMBhofrrAYRhYOOBZH9tgmG5lPqDLuIWPWmw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@ungap/structured-clone": "^1.0.0", - "hast-util-heading-rank": "^3.0.0", - "hast-util-is-element": "^3.0.0", - "unified": "^11.0.0", - "unist-util-visit": "^5.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/rehype-class-names": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/rehype-class-names/-/rehype-class-names-2.0.0.tgz", - "integrity": "sha512-jldCIiAEvXKdq8hqr5f5PzNdIDkvHC6zfKhwta9oRoMu7bn0W7qLES/JrrjBvr9rKz3nJ8x4vY1EWI+dhjHVZQ==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "hast-util-classnames": "^3.0.0", - "hast-util-select": "^6.0.0", - "unified": "^11.0.4" - } - }, - "node_modules/rehype-mermaid": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/rehype-mermaid/-/rehype-mermaid-3.0.0.tgz", - "integrity": "sha512-fxrD5E4Fa1WXUjmjNDvLOMT4XB1WaxcfycFIWiYU0yEMQhcTDElc9aDFnbDFRLxG1Cfo1I3mfD5kg4sjlWaB+Q==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "hast-util-from-html-isomorphic": "^2.0.0", - "hast-util-to-text": "^4.0.0", - "mermaid-isomorphic": "^3.0.0", - "mini-svg-data-uri": "^1.0.0", - "space-separated-tokens": "^2.0.0", - "unified": "^11.0.0", - "unist-util-visit-parents": "^6.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/remcohaszing" - }, - "peerDependencies": { - "playwright": "1" - }, - "peerDependenciesMeta": { - "playwright": { - "optional": true - } - } - }, - "node_modules/rehype-recma": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/rehype-recma/-/rehype-recma-1.0.0.tgz", - "integrity": "sha512-lqA4rGUf1JmacCNWWZx0Wv1dHqMwxzsDWYMTowuplHF3xH0N/MmrZ/G3BDZnzAkRmxDadujCjaKM2hqYdCBOGw==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/hast": "^3.0.0", - "hast-util-to-estree": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/rehype-slug": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/rehype-slug/-/rehype-slug-6.0.0.tgz", - "integrity": "sha512-lWyvf/jwu+oS5+hL5eClVd3hNdmwM1kAC0BUvEGD19pajQMIzcNUd/k9GsfQ+FfECvX+JE+e9/btsKH0EjJT6A==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "github-slugger": "^2.0.0", - "hast-util-heading-rank": "^3.0.0", - "hast-util-to-string": "^3.0.0", - "unist-util-visit": "^5.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-directive": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/remark-directive/-/remark-directive-3.0.1.tgz", - "integrity": "sha512-gwglrEQEZcZYgVyG1tQuA+h58EZfq5CSULw7J90AFuCTyib1thgHPoqQ+h9iFvU6R+vnZ5oNFQR5QKgGpk741A==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-directive": "^3.0.0", - "micromark-extension-directive": "^3.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-frontmatter": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/remark-frontmatter/-/remark-frontmatter-5.0.0.tgz", - "integrity": "sha512-XTFYvNASMe5iPN0719nPrdItC9aU0ssC4v14mH1BCi1u0n1gAocqcujWUrByftZTbLhRtiKRyjYTSIOcr69UVQ==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-frontmatter": "^2.0.0", - "micromark-extension-frontmatter": "^2.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-gfm": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz", - "integrity": "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-gfm": "^3.0.0", - "micromark-extension-gfm": "^3.0.0", - "remark-parse": "^11.0.0", - "remark-stringify": "^11.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-mdx": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-3.1.1.tgz", - "integrity": "sha512-Pjj2IYlUY3+D8x00UJsIOg5BEvfMyeI+2uLPn9VO9Wg4MEtN/VTIq2NEJQfde9PnX15KgtHyl9S0BcTnWrIuWg==", - "license": "MIT", - "dependencies": { - "mdast-util-mdx": "^3.0.0", - "micromark-extension-mdxjs": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-mdx-frontmatter": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/remark-mdx-frontmatter/-/remark-mdx-frontmatter-5.2.0.tgz", - "integrity": "sha512-U/hjUYTkQqNjjMRYyilJgLXSPF65qbLPdoESOkXyrwz2tVyhAnm4GUKhfXqOOS9W34M3545xEMq+aMpHgVjEeQ==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "estree-util-value-to-estree": "^3.0.0", - "toml": "^3.0.0", - "unified": "^11.0.0", - "unist-util-mdx-define": "^1.0.0", - "yaml": "^2.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/remcohaszing" - } - }, - "node_modules/remark-parse": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", - "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-from-markdown": "^2.0.0", - "micromark-util-types": "^2.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-rehype": { - "version": "11.1.2", - "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz", - "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==", - "license": "MIT", - "dependencies": { - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "mdast-util-to-hast": "^13.0.0", - "unified": "^11.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/remark-stringify": { - "version": "11.0.0", - "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz", - "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==", - "license": "MIT", - "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-to-markdown": "^2.0.0", - "unified": "^11.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/require-like": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", - "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", - "engines": { - "node": "*" - } - }, - "node_modules/restore-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", - "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", - "license": "MIT", - "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/robust-predicates": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", - "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", - "license": "Unlicense" - }, - "node_modules/rollup": { - "version": "4.53.3", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.53.3.tgz", - "integrity": "sha512-w8GmOxZfBmKknvdXU1sdM9NHcoQejwF/4mNgj2JuEEdRaHwwF12K7e9eXn1nLZ07ad+du76mkVsyeb2rKGllsA==", - "license": "MIT", - "dependencies": { - "@types/estree": "1.0.8" - }, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.53.3", - "@rollup/rollup-android-arm64": "4.53.3", - "@rollup/rollup-darwin-arm64": "4.53.3", - "@rollup/rollup-darwin-x64": "4.53.3", - "@rollup/rollup-freebsd-arm64": "4.53.3", - "@rollup/rollup-freebsd-x64": "4.53.3", - "@rollup/rollup-linux-arm-gnueabihf": "4.53.3", - "@rollup/rollup-linux-arm-musleabihf": "4.53.3", - "@rollup/rollup-linux-arm64-gnu": "4.53.3", - "@rollup/rollup-linux-arm64-musl": "4.53.3", - "@rollup/rollup-linux-loong64-gnu": "4.53.3", - "@rollup/rollup-linux-ppc64-gnu": "4.53.3", - "@rollup/rollup-linux-riscv64-gnu": "4.53.3", - "@rollup/rollup-linux-riscv64-musl": "4.53.3", - "@rollup/rollup-linux-s390x-gnu": "4.53.3", - "@rollup/rollup-linux-x64-gnu": "4.53.3", - "@rollup/rollup-linux-x64-musl": "4.53.3", - "@rollup/rollup-openharmony-arm64": "4.53.3", - "@rollup/rollup-win32-arm64-msvc": "4.53.3", - "@rollup/rollup-win32-ia32-msvc": "4.53.3", - "@rollup/rollup-win32-x64-gnu": "4.53.3", - "@rollup/rollup-win32-x64-msvc": "4.53.3", - "fsevents": "~2.3.2" - } - }, - "node_modules/roughjs": { - "version": "4.6.6", - "resolved": "https://registry.npmjs.org/roughjs/-/roughjs-4.6.6.tgz", - "integrity": "sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==", - "license": "MIT", - "dependencies": { - "hachure-fill": "^0.5.2", - "path-data-parser": "^0.1.0", - "points-on-curve": "^0.2.0", - "points-on-path": "^0.2.1" - } - }, - "node_modules/rw": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", - "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", - "license": "BSD-3-Clause" - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "license": "MIT" - }, - "node_modules/scheduler": { - "version": "0.27.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", - "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", - "license": "MIT" - }, - "node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/send": { - "version": "0.19.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.19.0.tgz", - "integrity": "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw==", - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/send/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/send/node_modules/debug/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "license": "MIT" - }, - "node_modules/send/node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/serve-static": { - "version": "1.16.2", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.2.tgz", - "integrity": "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw==", - "license": "MIT", - "dependencies": { - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.19.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/set-cookie-parser": { - "version": "2.7.2", - "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", - "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", - "license": "MIT" - }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", - "license": "ISC" - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/shiki": { - "version": "1.29.2", - "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.29.2.tgz", - "integrity": "sha512-njXuliz/cP+67jU2hukkxCNuH1yUi4QfdZZY+sMr5PPrIyXSu5iTb/qYC4BiWWB0vZ+7TbdvYUCeL23zpwCfbg==", - "license": "MIT", - "dependencies": { - "@shikijs/core": "1.29.2", - "@shikijs/engine-javascript": "1.29.2", - "@shikijs/engine-oniguruma": "1.29.2", - "@shikijs/langs": "1.29.2", - "@shikijs/themes": "1.29.2", - "@shikijs/types": "1.29.2", - "@shikijs/vscode-textmate": "^10.0.1", - "@types/hast": "^3.0.4" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "license": "ISC" - }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", - "license": "MIT" - }, - "node_modules/source-map": { - "version": "0.7.6", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz", - "integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==", - "license": "BSD-3-Clause", - "engines": { - "node": ">= 12" - } - }, - "node_modules/source-map-js": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", - "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/space-separated-tokens": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", - "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/stdin-discarder": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.1.0.tgz", - "integrity": "sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==", - "license": "MIT", - "dependencies": { - "bl": "^5.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-6.1.0.tgz", - "integrity": "sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ==", - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^10.2.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/stringify-entities": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", - "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", - "license": "MIT", - "dependencies": { - "character-entities-html4": "^2.0.0", - "character-entities-legacy": "^3.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/strip-ansi": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", - "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/style-to-js": { - "version": "1.1.21", - "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz", - "integrity": "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==", - "license": "MIT", - "dependencies": { - "style-to-object": "1.0.14" - } - }, - "node_modules/style-to-object": { - "version": "1.0.14", - "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.14.tgz", - "integrity": "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==", - "license": "MIT", - "dependencies": { - "inline-style-parser": "0.2.7" - } - }, - "node_modules/stylis": { - "version": "4.3.6", - "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz", - "integrity": "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==", - "license": "MIT" - }, - "node_modules/tabbable": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.3.0.tgz", - "integrity": "sha512-EIHvdY5bPLuWForiR/AN2Bxngzpuwn1is4asboytXtpTgsArc+WmSJKVLlhdh71u7jFcryDqB2A8lQvj78MkyQ==", - "license": "MIT" - }, - "node_modules/tailwindcss": { - "version": "4.1.17", - "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.17.tgz", - "integrity": "sha512-j9Ee2YjuQqYT9bbRTfTZht9W/ytp5H+jJpZKiYdP/bpnXARAuELt9ofP0lPnmHjbga7SNQIxdTAXCmtKVYjN+Q==", - "dev": true, - "license": "MIT" - }, - "node_modules/tapable": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", - "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/tinyexec": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", - "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", - "license": "MIT", - "engines": { - "node": ">=18" - } - }, - "node_modules/tinyglobby": { - "version": "0.2.15", - "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", - "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", - "license": "MIT", - "dependencies": { - "fdir": "^6.5.0", - "picomatch": "^4.0.3" - }, - "engines": { - "node": ">=12.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/SuperchupuDev" - } - }, - "node_modules/toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", - "license": "MIT", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/toml": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", - "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==", - "license": "MIT" - }, - "node_modules/trim-lines": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", - "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/trough": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", - "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/ts-dedent": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", - "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", - "license": "MIT", - "engines": { - "node": ">=6.10" - } - }, - "node_modules/tslib": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", - "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", - "license": "0BSD" - }, - "node_modules/twoslash": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/twoslash/-/twoslash-0.3.4.tgz", - "integrity": "sha512-RtJURJlGRxrkJmTcZMjpr7jdYly1rfgpujJr1sBM9ch7SKVht/SjFk23IOAyvwT1NLCk+SJiMrvW4rIAUM2Wug==", - "license": "MIT", - "dependencies": { - "@typescript/vfs": "^1.6.1", - "twoslash-protocol": "0.3.4" - }, - "peerDependencies": { - "typescript": "^5.5.0" - } - }, - "node_modules/twoslash-protocol": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/twoslash-protocol/-/twoslash-protocol-0.3.4.tgz", - "integrity": "sha512-HHd7lzZNLUvjPzG/IE6js502gEzLC1x7HaO1up/f72d8G8ScWAs9Yfa97igelQRDl5h9tGcdFsRp+lNVre1EeQ==", - "license": "MIT" - }, - "node_modules/typescript": { - "version": "5.9.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", - "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", - "license": "Apache-2.0", - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/ua-parser-js": { - "version": "1.0.41", - "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.41.tgz", - "integrity": "sha512-LbBDqdIC5s8iROCUjMbW1f5dJQTEFB1+KO9ogbvlb3nm9n4YHa5p4KTvFPWvh2Hs8gZMBuiB1/8+pdfe/tDPug==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/ua-parser-js" - }, - { - "type": "paypal", - "url": "https://paypal.me/faisalman" - }, - { - "type": "github", - "url": "https://github.com/sponsors/faisalman" - } - ], - "license": "MIT", - "bin": { - "ua-parser-js": "script/cli.js" - }, - "engines": { - "node": "*" - } - }, - "node_modules/ufo": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.1.tgz", - "integrity": "sha512-9a4/uxlTWJ4+a5i0ooc1rU7C7YOw3wT+UGqdeNNHWnOF9qcMBgLRS+4IYUqbczewFx4mLEig6gawh7X6mFlEkA==", - "license": "MIT" - }, - "node_modules/undici-types": { - "version": "7.16.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", - "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", - "license": "MIT" - }, - "node_modules/unified": { - "version": "11.0.5", - "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", - "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "bail": "^2.0.0", - "devlop": "^1.0.0", - "extend": "^3.0.0", - "is-plain-obj": "^4.0.0", - "trough": "^2.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-find-after": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/unist-util-find-after/-/unist-util-find-after-5.0.0.tgz", - "integrity": "sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "unist-util-is": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-is": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", - "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-mdx-define": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/unist-util-mdx-define/-/unist-util-mdx-define-1.1.2.tgz", - "integrity": "sha512-9ncH7i7TN5Xn7/tzX5bE3rXgz1X/u877gYVAUB3mLeTKYJmQHmqKTDBi6BTGXV7AeolBCI9ErcVsOt2qryoD0g==", - "license": "MIT", - "dependencies": { - "@types/estree": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "estree-util-scope": "^1.0.0", - "estree-walker": "^3.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/remcohaszing" - } - }, - "node_modules/unist-util-position": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", - "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-position-from-estree": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/unist-util-position-from-estree/-/unist-util-position-from-estree-2.0.0.tgz", - "integrity": "sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-remove-position": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-5.0.0.tgz", - "integrity": "sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "unist-util-visit": "^5.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-stringify-position": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", - "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-visit": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz", - "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "unist-util-is": "^6.0.0", - "unist-util-visit-parents": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/unist-util-visit-parents": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", - "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "unist-util-is": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/universalify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", - "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", - "license": "MIT", - "engines": { - "node": ">= 10.0.0" - } - }, - "node_modules/update-browserslist-db": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.0.tgz", - "integrity": "sha512-Dn+NlSF/7+0lVSEZ57SYQg6/E44arLzsVOGgrElBn/BlG1B8WKdbLppOocFrXwRNTkNlgdGNaBgH1o0lggDPiw==", - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/use-callback-ref": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz", - "integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==", - "license": "MIT", - "dependencies": { - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/use-sidecar": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz", - "integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==", - "license": "MIT", - "dependencies": { - "detect-node-es": "^1.1.0", - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "@types/react": "*", - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" - }, - "peerDependenciesMeta": { - "@types/react": { - "optional": true - } - } - }, - "node_modules/use-sync-external-store": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz", - "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==", - "license": "MIT", - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "license": "MIT" - }, - "node_modules/uuid": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", - "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", - "funding": [ - "https://github.com/sponsors/broofa", - "https://github.com/sponsors/ctavan" - ], - "license": "MIT", - "bin": { - "uuid": "dist/esm/bin/uuid" - } - }, - "node_modules/vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/vfile": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", - "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/vfile-location": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.3.tgz", - "integrity": "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/vfile-matter": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/vfile-matter/-/vfile-matter-5.0.1.tgz", - "integrity": "sha512-o6roP82AiX0XfkyTHyRCMXgHfltUNlXSEqCIS80f+mbAyiQBE2fxtDVMtseyytGx75sihiJFo/zR6r/4LTs2Cw==", - "license": "MIT", - "dependencies": { - "vfile": "^6.0.0", - "yaml": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/vfile-message": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", - "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", - "license": "MIT", - "dependencies": { - "@types/unist": "^3.0.0", - "unist-util-stringify-position": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/vite": { - "version": "7.2.6", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.2.6.tgz", - "integrity": "sha512-tI2l/nFHC5rLh7+5+o7QjKjSR04ivXDF4jcgV0f/bTQ+OJiITy5S6gaynVsEM+7RqzufMnVbIon6Sr5x1SDYaQ==", - "license": "MIT", - "dependencies": { - "esbuild": "^0.25.0", - "fdir": "^6.5.0", - "picomatch": "^4.0.3", - "postcss": "^8.5.6", - "rollup": "^4.43.0", - "tinyglobby": "^0.2.15" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^20.19.0 || >=22.12.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^20.19.0 || >=22.12.0", - "jiti": ">=1.21.0", - "less": "^4.0.0", - "lightningcss": "^1.21.0", - "sass": "^1.70.0", - "sass-embedded": "^1.70.0", - "stylus": ">=0.54.8", - "sugarss": "^5.0.0", - "terser": "^5.16.0", - "tsx": "^4.8.1", - "yaml": "^2.4.2" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "jiti": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - }, - "tsx": { - "optional": true - }, - "yaml": { - "optional": true - } - } - }, - "node_modules/vite-node": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", - "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", - "license": "MIT", - "dependencies": { - "cac": "^6.7.14", - "debug": "^4.4.1", - "es-module-lexer": "^1.7.0", - "pathe": "^2.0.3", - "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" - }, - "bin": { - "vite-node": "vite-node.mjs" - }, - "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" - }, - "funding": { - "url": "https://opencollective.com/vitest" - } - }, - "node_modules/vite/node_modules/@esbuild/aix-ppc64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", - "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==", - "cpu": [ - "ppc64" - ], - "license": "MIT", - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/android-arm": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.12.tgz", - "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/android-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz", - "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/android-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.12.tgz", - "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/darwin-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", - "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/darwin-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz", - "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz", - "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/freebsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz", - "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-arm": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz", - "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==", - "cpu": [ - "arm" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz", - "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-ia32": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz", - "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-loong64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz", - "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==", - "cpu": [ - "loong64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-mips64el": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz", - "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==", - "cpu": [ - "mips64el" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-ppc64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz", - "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==", - "cpu": [ - "ppc64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-riscv64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz", - "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==", - "cpu": [ - "riscv64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-s390x": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz", - "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==", - "cpu": [ - "s390x" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/linux-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz", - "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz", - "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/netbsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz", - "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz", - "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/openbsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz", - "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz", - "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "openharmony" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/sunos-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz", - "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/win32-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz", - "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==", - "cpu": [ - "arm64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/win32-ia32": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz", - "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==", - "cpu": [ - "ia32" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/@esbuild/win32-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz", - "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==", - "cpu": [ - "x64" - ], - "license": "MIT", - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=18" - } - }, - "node_modules/vite/node_modules/esbuild": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", - "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", - "hasInstallScript": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.12", - "@esbuild/android-arm": "0.25.12", - "@esbuild/android-arm64": "0.25.12", - "@esbuild/android-x64": "0.25.12", - "@esbuild/darwin-arm64": "0.25.12", - "@esbuild/darwin-x64": "0.25.12", - "@esbuild/freebsd-arm64": "0.25.12", - "@esbuild/freebsd-x64": "0.25.12", - "@esbuild/linux-arm": "0.25.12", - "@esbuild/linux-arm64": "0.25.12", - "@esbuild/linux-ia32": "0.25.12", - "@esbuild/linux-loong64": "0.25.12", - "@esbuild/linux-mips64el": "0.25.12", - "@esbuild/linux-ppc64": "0.25.12", - "@esbuild/linux-riscv64": "0.25.12", - "@esbuild/linux-s390x": "0.25.12", - "@esbuild/linux-x64": "0.25.12", - "@esbuild/netbsd-arm64": "0.25.12", - "@esbuild/netbsd-x64": "0.25.12", - "@esbuild/openbsd-arm64": "0.25.12", - "@esbuild/openbsd-x64": "0.25.12", - "@esbuild/openharmony-arm64": "0.25.12", - "@esbuild/sunos-x64": "0.25.12", - "@esbuild/win32-arm64": "0.25.12", - "@esbuild/win32-ia32": "0.25.12", - "@esbuild/win32-x64": "0.25.12" - } - }, - "node_modules/vite/node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/vocs": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/vocs/-/vocs-1.2.1.tgz", - "integrity": "sha512-rQ5aoD68+UJQeJ9G/nPcqcwhbBpMFZnHJ9ZkIsRHaeqBdiA4S86ufplJRKxmX56XZLEpY+wlU+TGz8Qsxtb8Sw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/wevm" - } - ], - "license": "MIT", - "dependencies": { - "@floating-ui/react": "^0.27.16", - "@hono/node-server": "^1.19.5", - "@mdx-js/mdx": "^3.1.1", - "@mdx-js/react": "^3.1.1", - "@mdx-js/rollup": "^3.1.1", - "@noble/hashes": "^1.7.1", - "@radix-ui/colors": "^3.0.0", - "@radix-ui/react-accordion": "^1.2.3", - "@radix-ui/react-dialog": "^1.1.6", - "@radix-ui/react-icons": "^1.3.2", - "@radix-ui/react-label": "^2.1.2", - "@radix-ui/react-navigation-menu": "^1.2.5", - "@radix-ui/react-popover": "^1.1.6", - "@radix-ui/react-tabs": "^1.1.3", - "@shikijs/rehype": "^1", - "@shikijs/transformers": "^1", - "@shikijs/twoslash": "^1", - "@tailwindcss/vite": "4.1.15", - "@vanilla-extract/css": "^1.17.4", - "@vanilla-extract/dynamic": "^2.1.5", - "@vanilla-extract/vite-plugin": "^5.1.1", - "@vitejs/plugin-react": "^5.0.4", - "autoprefixer": "^10.4.21", - "cac": "^6.7.14", - "chroma-js": "^3.1.2", - "clsx": "^2.1.1", - "compression": "^1.8.1", - "create-vocs": "^1.0.0-alpha.5", - "cross-spawn": "^7.0.6", - "fs-extra": "^11.3.2", - "hastscript": "^8.0.0", - "hono": "^4.10.3", - "mark.js": "^8.11.1", - "mdast-util-directive": "^3.1.0", - "mdast-util-from-markdown": "^2.0.2", - "mdast-util-frontmatter": "^2.0.1", - "mdast-util-gfm": "^3.1.0", - "mdast-util-mdx": "^3.0.0", - "mdast-util-mdx-jsx": "^3.2.0", - "mdast-util-to-hast": "^13.2.0", - "mdast-util-to-markdown": "^2.1.2", - "minisearch": "^7.2.0", - "nuqs": "^2.7.2", - "ora": "^7.0.1", - "p-limit": "^5.0.0", - "picomatch": "^4.0.3", - "playwright": "^1.52.0", - "postcss": "^8.5.2", - "radix-ui": "^1.1.3", - "react-intersection-observer": "^9.15.1", - "react-router": "^7.9.4", - "rehype-autolink-headings": "^7.1.0", - "rehype-class-names": "^2.0.0", - "rehype-mermaid": "^3.0.0", - "rehype-slug": "^6.0.0", - "remark-directive": "^3.0.1", - "remark-frontmatter": "^5.0.0", - "remark-gfm": "^4.0.1", - "remark-mdx": "^3.1.1", - "remark-mdx-frontmatter": "^5.2.0", - "remark-parse": "^11.0.0", - "serve-static": "^1.16.2", - "shiki": "^1", - "toml": "^3.0.0", - "twoslash": "~0.3.4", - "ua-parser-js": "^1.0.40", - "unified": "^11.0.5", - "unist-util-visit": "^5.0.0", - "vfile-matter": "^5.0.1", - "vite": "^7.1.11", - "yaml": "^2.8.1" - }, - "bin": { - "vocs": "_lib/cli/index.js" - }, - "engines": { - "node": ">=22" - }, - "peerDependencies": { - "react": "^19", - "react-dom": "^19" - } - }, - "node_modules/vscode-jsonrpc": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", - "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", - "license": "MIT", - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/vscode-languageserver": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz", - "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==", - "license": "MIT", - "dependencies": { - "vscode-languageserver-protocol": "3.17.5" - }, - "bin": { - "installServerIntoExtension": "bin/installServerIntoExtension" - } - }, - "node_modules/vscode-languageserver-protocol": { - "version": "3.17.5", - "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", - "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", - "license": "MIT", - "dependencies": { - "vscode-jsonrpc": "8.2.0", - "vscode-languageserver-types": "3.17.5" - } - }, - "node_modules/vscode-languageserver-textdocument": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", - "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", - "license": "MIT" - }, - "node_modules/vscode-languageserver-types": { - "version": "3.17.5", - "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", - "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", - "license": "MIT" - }, - "node_modules/vscode-uri": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz", - "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==", - "license": "MIT" - }, - "node_modules/web-namespaces": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", - "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - }, - "node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "license": "ISC" - }, - "node_modules/yaml": { - "version": "2.8.2", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", - "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", - "license": "ISC", - "bin": { - "yaml": "bin.mjs" - }, - "engines": { - "node": ">= 14.6" - }, - "funding": { - "url": "https://github.com/sponsors/eemeli" - } - }, - "node_modules/yocto-queue": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.2.tgz", - "integrity": "sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ==", - "license": "MIT", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/zwitch": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", - "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" - } - } - } -} diff --git a/kona/docs/sidebar.ts b/kona/docs/sidebar.ts deleted file mode 100644 index 54a107226a7..00000000000 --- a/kona/docs/sidebar.ts +++ /dev/null @@ -1,213 +0,0 @@ -import { SidebarItem } from "vocs"; - -export const sidebar: SidebarItem[] = [ - { - text: "Introduction", - items: [ - { text: "Overview", link: "/intro/overview" }, - { text: "Why Kona?", link: "/intro/why" }, - { text: "Contributing", link: "/intro/contributing" }, - { text: "Kona Lore", link: "/intro/lore" } - ] - }, - { - text: "Kona for Node Operators", - items: [ - { text: "System Requirements", link: "/node/requirements" }, - { - text: "Installation", - collapsed: true, - items: [ - { - text: "Prerequisites", - link: "/node/install/overview" - }, - { - text: "Pre-Built Binaries", - link: "/node/install/binaries" - }, - { - text: "Docker", - link: "/node/install/docker" - }, - { - text: "Build from Source", - link: "/node/install/source" - } - ] - }, - { - text: "Run a Node", - items: [ - { - text: "Overview", - link: "/node/run/overview", - }, - { - text: "Binary", - link: "/node/run/binary", - }, - { - text: "Docker", - link: "/node/run/docker", - }, - { - text: "How it Works", - link: "/node/run/mechanics", - } - ] - }, - { - text: "JSON-RPC Reference", - items: [ - { - text: "Overview", - link: "/node/rpc/overview", - }, - { - text: "p2p", - link: "/node/rpc/p2p", - }, - { - text: "rollup", - link: "/node/rpc/rollup", - }, - { - text: "admin", - link: "/node/rpc/admin", - } - ] - }, - { text: "Configuration", link: "/node/configuration" }, - { text: "Kurtosis Integration", link: "/kurtosis/overview" }, - { text: "Monitoring", link: "/node/monitoring" }, - { text: "Subcommands", link: "/node/subcommands" }, - { - text: "FAQ", - link: "/node/faq/overview", - collapsed: true, - items: [ - { - text: "Ports", - link: "/node/faq/ports" - }, - { - text: "Profiling", - link: "/node/faq/profiling" - } - ] - } - ] - }, - { - text: "Kona as a Library", - items: [ - { text: "Overview", link: "/sdk/overview" }, - { - text: "Node SDK", - items: [ - { text: "Introduction", link: "/node/design/intro" }, - { text: "Derivation", link: "/node/design/derivation" }, - { text: "Engine", link: "/node/design/engine" }, - { text: "P2P", link: "/node/design/p2p" }, - { text: "Sequencer", link: "/node/design/sequencer" } - ] - }, - { - text: "Proof SDK", - items: [ - { text: "Introduction", link: "/sdk/proof/intro" }, - { text: "FPVM Backend", link: "/sdk/proof/fpvm-backend" }, - { text: "Custom Backend", link: "/sdk/proof/custom-backend" }, - { text: "kona-executor Extensions", link: "/sdk/proof/exec-ext" } - ] - }, - { - text: "Fault Proof Program Development", - collapsed: true, - items: [ - { text: "Introduction", link: "/sdk/fpp-dev/intro" }, - { text: "Environment", link: "/sdk/fpp-dev/env" }, - { text: "Supported Targets", link: "/sdk/fpp-dev/targets" }, - { text: "Prologue", link: "/sdk/fpp-dev/prologue" }, - { text: "Execution", link: "/sdk/fpp-dev/execution" }, - { text: "Epilogue", link: "/sdk/fpp-dev/epilogue" } - ] - }, - { - text: "Protocol Libraries", - collapsed: true, - items: [ - { text: "Introduction", link: "/sdk/protocol/intro" }, - { text: "Registry", link: "/sdk/protocol/registry" }, - { text: "Interop", link: "/sdk/protocol/interop" }, - { text: "Hardforks", link: "/sdk/protocol/hardforks" }, - { - text: "Derivation", - collapsed: true, - items: [ - { text: "Introduction", link: "/sdk/protocol/derive/intro" }, - { text: "Custom Providers", link: "/sdk/protocol/derive/providers" }, - { text: "Stage Swapping", link: "/sdk/protocol/derive/stages" }, - { text: "Signaling", link: "/sdk/protocol/derive/signaling" } - ] - }, - { - text: "Genesis", - collapsed: true, - items: [ - { text: "Introduction", link: "/sdk/protocol/genesis/intro" }, - { text: "Rollup Config", link: "/sdk/protocol/genesis/rollup-config" }, - { text: "System Config", link: "/sdk/protocol/genesis/system-config" } - ] - }, - { - text: "Protocol", - collapsed: true, - items: [ - { text: "Introduction", link: "/sdk/protocol/protocol/intro" }, - { text: "BlockInfo", link: "/sdk/protocol/protocol/block-info" }, - { text: "L2BlockInfo", link: "/sdk/protocol/protocol/l2-block-info" }, - { text: "Frames", link: "/sdk/protocol/protocol/frames" }, - { text: "Channels", link: "/sdk/protocol/protocol/channels" }, - { text: "Batches", link: "/sdk/protocol/protocol/batches" } - ] - } - ] - }, - { - text: "Examples", - collapsed: true, - items: [ - { text: "Introduction", link: "/sdk/examples/intro" }, - { text: "Load a Rollup Config", link: "/sdk/examples/load-a-rollup-config" }, - { text: "Transform Frames to a Batch", link: "/sdk/examples/frames-to-batch" }, - { text: "Transform a Batch into Frames", link: "/sdk/examples/batch-to-frames" }, - { text: "Create a new L1BlockInfoTx Hardfork Variant", link: "/sdk/examples/new-l1-block-info-tx-hardfork" }, - { text: "Create a new kona-executor test fixture", link: "/sdk/examples/executor-test-fixtures" }, - { text: "Configuring P2P Network Peer Scoring", link: "/sdk/examples/p2p-peer-scoring" }, - { text: "Custom Derivation Pipeline with New Stage", link: "/sdk/examples/custom-derivation-pipeline" }, - { text: "Testing Kona Sequencing with Kurtosis", link: "/sdk/examples/kurtosis-sequencing-test" } - ] - } - ] - }, - { - text: "RFC", - link: "/rfc/active/intro", - items: [ - { - text: "Active RFCs", - items: [ ] - }, - { - text: "Archived RFCs", - collapsed: true, - items: [ - { text: "Umbrellas", link: "/rfc/archived/umbrellas" }, - { text: "Monorepo", link: "/rfc/archived/monorepo" } - ] - } - ] - } -]; diff --git a/kona/docs/vocs.config.ts b/kona/docs/vocs.config.ts deleted file mode 100644 index 0338924d974..00000000000 --- a/kona/docs/vocs.config.ts +++ /dev/null @@ -1,57 +0,0 @@ -import { defineConfig } from 'vocs' -import { sidebar } from './sidebar' - -export default defineConfig({ - title: 'Kona', - description: 'Modular, performant, and secure OP Stack infrastructure in Rust', - logoUrl: '/logo.png', - iconUrl: '/logo.png', - ogImageUrl: '/kona-prod.png', - sidebar, - topNav: [ - { text: 'Run', link: '/node/run/overview' }, - { text: 'SDK', link: '/sdk/overview' }, - { text: 'Rustdocs', link: 'https://docs.rs/kona-node/latest/' }, - { text: 'GitHub', link: 'https://github.com/op-rs/kona' }, - { - text: 'v0.1.0', - items: [ - { - text: 'Releases', - link: 'https://github.com/op-rs/kona/releases' - }, - { - text: 'Contributing', - link: 'https://github.com/op-rs/kona/blob/main/CONTRIBUTING.md' - } - ] - } - ], - socials: [ - { - icon: 'github', - link: 'https://github.com/op-rs/kona', - }, - ], - theme: { - accentColor: { - light: '#1f1f1f', - dark: '#ffffff' - } - }, - sponsors: [ - { - name: 'Supporters', - height: 120, - items: [ - [ - { - name: 'OP Labs', - link: 'https://oplabs.co', - image: 'https://avatars.githubusercontent.com/u/109625874?s=200&v=4', - } - ] - ] - } - ] -}) diff --git a/kona/examples/README.md b/kona/examples/README.md deleted file mode 100644 index 0055c80934d..00000000000 --- a/kona/examples/README.md +++ /dev/null @@ -1,17 +0,0 @@ -## Examples - -These examples demonstrate how to work with kona crates. -Some examples are isolated services broken out from OP Stack components. - -To run an example, use the command `cargo run -p <example>`. - -If you have an idea for a new example, [open an issue][issue]. -Otherwise if you already have an example you'd like to add, open a PR! - -#### Discovery - - - -<!-- Links --> - -[issue]: https://github.com/op-rs/kona/issues/new diff --git a/kona/justfile b/kona/justfile deleted file mode 100644 index f8ef5b5359c..00000000000 --- a/kona/justfile +++ /dev/null @@ -1,237 +0,0 @@ - -# E2e integration tests for kona. -import "./tests/justfile" -# Builds docker images for kona -import "./docker/apps/justfile" -# Vocs Documentation commands -import "./docs/justfile" - -KONA_ROOT := source_directory() - -set positional-arguments -alias t := tests -alias la := lint-all -alias l := lint-native -alias lint := lint-native -alias f := fmt-native-fix -alias b := build-native -alias h := hack - -# default recipe to display help information -default: - @just --list - -# Build the rollup node in a single command. -build-node: - cargo build --release --bin kona-node - -# Build the supervisor -build-supervisor: - cargo build --release --bin kona-supervisor - -# Run all tests (excluding online tests) -tests: test test-docs - -# Test for the native target with all features. By default, excludes online tests. -test *args="-E '!test(test_online)'": - cargo nextest run --release --workspace --all-features {{args}} - just test-custom-embeds - -# Run all online tests -test-online: - just test "-E 'test(test_online)'" - -# Test custom embedded chain configuration functionality -test-custom-embeds: - cargo test --release --package kona-registry custom_chain_is_loaded_when_enabled \ - --config 'env.KONA_CUSTOM_CONFIGS="true"' \ - --config "env.KONA_CUSTOM_CONFIGS_DIR=\"{{justfile_directory()}}/crates/protocol/registry/tests/fixtures/custom\"" \ - --config 'env.KONA_CUSTOM_CONFIGS_TEST="true"' - -# Runs the tests with llvm-cov -llvm-cov-tests: - #!/usr/bin/env bash - # collect coverage of `just test` and `just test-custom-embeds` - cargo llvm-cov nextest --no-report --locked --workspace \ - --all-features \ - --exclude kona-node --exclude kona-p2p --exclude kona-sources \ - --ignore-run-fail --profile ci -E '!test(test_online)' - - cargo llvm-cov nextest --no-report --locked \ - --all-features \ - --ignore-run-fail --profile ci \ - --package kona-registry \ - -E 'test(custom_chain_is_loaded_when_enabled)' \ - --config 'env.KONA_CUSTOM_CONFIGS="true"' \ - --config "env.KONA_CUSTOM_CONFIGS_DIR=\"{{justfile_directory()}}/crates/protocol/registry/tests/fixtures/custom\"" \ - --config 'env.KONA_CUSTOM_CONFIGS_TEST="true"' - - cargo llvm-cov report --lcov --output-path lcov.info - -# Runs benchmarks -benches: - cargo bench --no-run --workspace --features test-utils --exclude example-gossip --exclude example-discovery - -# Lint the workspace for all available targets -lint-all: lint-native lint-cannon lint-asterisc lint-docs lint-typos - -# Check spelling with typos (`cargo install typos-cli`) -lint-typos: - typos - -# Runs `cargo hack check` against the workspace -hack: - cargo hack check --feature-powerset --no-dev-deps - -# Fixes the formatting of the workspace -fmt-native-fix: - cargo +nightly fmt --all - -# Check the formatting of the workspace -fmt-native-check: - cargo +nightly fmt --all -- --check - -# Lint the workspace -lint-native: fmt-native-check lint-docs - cargo clippy --workspace --all-features --all-targets -- -D warnings - -# Lint the workspace (mips arch). Currently, only the `kona-std-fpvm` crate is linted for the `cannon` target, as it is the only crate with architecture-specific code. -lint-cannon: - docker run \ - --rm \ - -v {{KONA_ROOT}}/../:/workdir \ - -w="/workdir/kona" \ - ghcr.io/op-rs/kona/cannon-builder:0.3.0 cargo clippy -p kona-std-fpvm --all-features -Zbuild-std=core,alloc -- -D warnings - -# Lint the workspace (risc-v arch). Currently, only the `kona-std-fpvm` crate is linted for the `asterisc` target, as it is the only crate with architecture-specific code. -lint-asterisc: - docker run \ - --rm \ - -v {{KONA_ROOT}}/../:/workdir \ - -w="/workdir/kona" \ - ghcr.io/op-rs/kona/asterisc-builder:0.3.0 cargo clippy -p kona-std-fpvm --all-features -Zbuild-std=core,alloc -- -D warnings - -# Lint the Rust documentation -lint-docs: - RUSTDOCFLAGS="-D warnings" cargo doc --workspace --no-deps --document-private-items - -# Test the Rust documentation -test-docs: - cargo test --doc --workspace --locked - -# Build for the native target -build-native *args='': - #!/usr/bin/env bash - cargo build --workspace $@ - -# Build `kona-client` for the `cannon` target. -build-cannon-client: - docker run \ - --rm \ - -v {{KONA_ROOT}}/../:/workdir \ - -w="/workdir/kona" \ - ghcr.io/op-rs/kona/cannon-builder:0.3.0 cargo build -Zbuild-std=core,alloc -p kona-client --bin kona-client --profile release-client-lto - -# Build `kona-client` for the `asterisc` target. -build-asterisc-client: - docker run \ - --rm \ - -v {{KONA_ROOT}}/../:/workdir \ - -w="/workdir/kona" \ - ghcr.io/op-rs/kona/asterisc-builder:0.3.0 cargo build -Zbuild-std=core,alloc -p kona-client --bin kona-client --profile release-client-lto - -# Check for unused dependencies in the crate graph. -check-udeps: - cargo +nightly udeps --release --workspace --all-features --all-targets - - -# Updates the `superchain-registry` git submodule source -source-registry: - @just --justfile ./crates/protocol/registry/justfile source - -# Generate file bindings for super-registry -bind-registry: - @just --justfile ./crates/protocol/registry/justfile bind - -check-no-std: - #!/usr/bin/env bash - no_std_packages=( - # proof crates - kona-executor - kona-mpt - kona-preimage - kona-proof - kona-proof-interop - - # protocol crates - kona-genesis - kona-hardforks - kona-registry - kona-protocol - kona-derive - kona-driver - kona-interop - - # utilities - kona-serde - ) - - for package in "${no_std_packages[@]}"; do - echo "Checking no-std build for: $package" - - cargo build -p $package --target riscv32imac-unknown-none-elf --no-default-features - - if [ $? -ne 0 ]; then - echo "Failed to build no-std for: $package" - exit 1 - fi - - echo "Successfully checked no-std build for: $package" - done - -### TODO(ethereum-optimism/optimism#18654): Remove these recipes once the migration is complete - -build-prestates: build-cannon-prestate build-interop-prestate - -build-cannon-prestate: - @just build-prestate kona-client prestate-artifacts-cannon - -build-interop-prestate: - @just build-prestate kona-client-int prestate-artifacts-cannon-interop - -build-prestate VARIANT OUTPUT_DIR: - #!/usr/bin/env bash - set -euo pipefail - - echo "Building prestate for {{VARIANT}}..." - cd "{{KONA_ROOT}}/docker/fpvm-prestates" - CANNON_TAG=$(cat ../../.config/cannon_tag) - just cannon {{VARIANT}} "${CANNON_TAG}" "{{KONA_ROOT}}/{{OUTPUT_DIR}}" - - cd "{{KONA_ROOT}}" - - # Copy with hash-based name for challenger lookup - HASH=$(jq -r .pre "{{OUTPUT_DIR}}/prestate-proof.json") - cp "{{OUTPUT_DIR}}/prestate.bin.gz" "{{OUTPUT_DIR}}/${HASH}.bin.gz" - echo "Prestate for {{VARIANT}}: ${HASH}" - -build-reproducible-prestate: - @just build-prestates - -output-prestate-hash: - @echo "-------------------- Kona Prestates --------------------" - @echo "" - @echo "Cannon Absolute prestate hash:" - @jq -r .pre {{KONA_ROOT}}/prestate-artifacts-cannon/prestate-proof.json - @echo "" - @echo "Cannon Interop Absolute prestate hash:" - @jq -r .pre {{KONA_ROOT}}/prestate-artifacts-cannon-interop/prestate-proof.json - @echo "" - -reproducible-prestate: build-reproducible-prestate output-prestate-hash - -clean: - #!/usr/bin/env bash - set -euo pipefail - rm -rf "{{KONA_ROOT}}/build" - rm -rf "{{KONA_ROOT}}/prestate-artifacts-cannon" "{{KONA_ROOT}}/prestate-artifacts-cannon-interop" diff --git a/kona/rustfmt.toml b/kona/rustfmt.toml deleted file mode 100644 index 68c3c93033d..00000000000 --- a/kona/rustfmt.toml +++ /dev/null @@ -1,11 +0,0 @@ -reorder_imports = true -imports_granularity = "Crate" -use_small_heuristics = "Max" -comment_width = 100 -wrap_comments = true -binop_separator = "Back" -trailing_comma = "Vertical" -trailing_semicolon = false -use_field_init_shorthand = true -format_code_in_doc_comments = true -doc_comment_code_block_width = 100 diff --git a/kona/tests/node/common/init_test.go b/kona/tests/node/common/init_test.go deleted file mode 100644 index af0d44b16b5..00000000000 --- a/kona/tests/node/common/init_test.go +++ /dev/null @@ -1,17 +0,0 @@ -package node - -import ( - "fmt" - "testing" - - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" - "github.com/ethereum-optimism/optimism/op-devstack/presets" -) - -// TestMain creates the test-setups against the shared backend -func TestMain(m *testing.M) { - config := node_utils.ParseL2NodeConfigFromEnv() - - fmt.Printf("Running e2e tests with Config: %d\n", config) - presets.DoMain(m, node_utils.WithMixedOpKona(config)) -} diff --git a/kona/tests/node/long-running/init_test.go b/kona/tests/node/long-running/init_test.go deleted file mode 100644 index 4efdc2319df..00000000000 --- a/kona/tests/node/long-running/init_test.go +++ /dev/null @@ -1,32 +0,0 @@ -package node - -import ( - "flag" - "testing" - - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" - "github.com/ethereum-optimism/optimism/op-devstack/presets" -) - -var ( - num_threads = flag.Int("num-threads", 10, "number of threads to use for the test") - percentageNewAccounts = flag.Int("percentage-new-accounts", 20, "percentage of new accounts to produce transactions for") - fundAmount = flag.Int("fund-amount", 10, "eth amount to fund each new account with") - initNumAccounts = flag.Int("init-num-accounts", 10, "initial number of accounts to fund") -) - -// TestMain creates the test-setups against the shared backend -func TestMain(m *testing.M) { - flag.Parse() - - presets.DoMain(m, node_utils.WithMixedOpKona(node_utils.L2NodeConfig{ - OpSequencerNodesWithGeth: 0, - OpSequencerNodesWithReth: 0, - KonaSequencerNodesWithGeth: 1, - KonaSequencerNodesWithReth: 0, - OpNodesWithGeth: 1, - OpNodesWithReth: 1, - KonaNodesWithGeth: 1, - KonaNodesWithReth: 1, - })) -} diff --git a/kona/tests/node/reorgs/init_test.go b/kona/tests/node/reorgs/init_test.go deleted file mode 100644 index ec943d524ef..00000000000 --- a/kona/tests/node/reorgs/init_test.go +++ /dev/null @@ -1,18 +0,0 @@ -package reorgs - -import ( - "fmt" - "testing" - - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" - "github.com/ethereum-optimism/optimism/op-devstack/presets" -) - -// TestMain creates the test-setups against the shared backend -func TestMain(m *testing.M) { - l2Config := node_utils.ParseL2NodeConfigFromEnv() - - fmt.Printf("Running e2e reorg tests with Config: %d\n", l2Config) - - presets.DoMain(m, node_utils.WithMixedWithTestSequencer(l2Config)) -} diff --git a/kona/tests/node/restart/init_test.go b/kona/tests/node/restart/init_test.go deleted file mode 100644 index 11763ac7a10..00000000000 --- a/kona/tests/node/restart/init_test.go +++ /dev/null @@ -1,21 +0,0 @@ -package node_restart - -import ( - "fmt" - "testing" - - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" - "github.com/ethereum-optimism/optimism/op-devstack/presets" -) - -// TestMain creates the test-setups against the shared backend -func TestMain(m *testing.M) { - // Currently, the restart tests only support kona nodes. The op node based configs are not supported (because of req-resp sync incompatibility). - config := node_utils.L2NodeConfig{ - KonaSequencerNodesWithGeth: 1, - KonaNodesWithGeth: 1, - } - - fmt.Printf("Running restart e2e tests with Config: %d\n", config) - presets.DoMain(m, node_utils.WithMixedOpKona(config)) -} diff --git a/kona/tests/supervisor/l2reorgAfterL1reorg/init_test.go b/kona/tests/supervisor/l2reorgAfterL1reorg/init_test.go deleted file mode 100644 index 4764a31a444..00000000000 --- a/kona/tests/supervisor/l2reorgAfterL1reorg/init_test.go +++ /dev/null @@ -1,14 +0,0 @@ -package sysgo - -import ( - "testing" - - spresets "github.com/ethereum-optimism/optimism/kona/tests/supervisor/presets" - "github.com/ethereum-optimism/optimism/op-devstack/presets" -) - -// TestMain creates the test-setups against the shared backend -func TestMain(m *testing.M) { - // Other setups may be added here, hydrated from the same orchestrator - presets.DoMain(m, spresets.WithSimpleInteropMinimal()) -} diff --git a/kona/tests/supervisor/pre_interop/init_test.go b/kona/tests/supervisor/pre_interop/init_test.go deleted file mode 100644 index b32be2f8b75..00000000000 --- a/kona/tests/supervisor/pre_interop/init_test.go +++ /dev/null @@ -1,20 +0,0 @@ -package preinterop - -// todo: add tests -import ( - "testing" - - spresets "github.com/ethereum-optimism/optimism/kona/tests/supervisor/presets" - "github.com/ethereum-optimism/optimism/op-devstack/presets" -) - -// TestMain creates the test-setups against the shared backend -func TestMain(m *testing.M) { - // sleep to ensure the backend is ready - - presets.DoMain(m, - spresets.WithSimpleInteropMinimal(), - presets.WithSuggestedInteropActivationOffset(30), - presets.WithInteropNotAtGenesis()) - -} diff --git a/kona/typos.toml b/kona/typos.toml deleted file mode 100644 index 85a9527a25a..00000000000 --- a/kona/typos.toml +++ /dev/null @@ -1,82 +0,0 @@ -[files] -extend-exclude = [ - "target", - "Cargo.lock", - "docker/recipes/kona-node-dev/kona-node/bootstores", -] - -[default] -extend-ignore-re = [ - # Base64 encoded strings (common in tests and configs) - "[A-Za-z0-9+/]{20,}={0,2}", -] - -[default.extend-words] -# Valid Rust/Cargo terms -crate = "crate" -crates = "crates" - -# Blockchain/Ethereum specific terms -alloy = "alloy" -anvil = "anvil" -asm = "asm" -asterisc = "asterisc" -batcher = "batcher" -bedrock = "bedrock" -bootnode = "bootnode" -cannon = "cannon" -chainid = "chainid" -codegen = "codegen" -derivation = "derivation" -enr = "enr" -ethereum = "ethereum" -fpvm = "fpvm" -hel = "hel" # Part of hostname bootnode-hetzner-hel -interop = "interop" -kona = "kona" -libmdbx = "libmdbx" -merkle = "merkle" -mips = "mips" -mpsc = "mpsc" -optimism = "optimism" -preimage = "preimage" -revm = "revm" -risc = "risc" -rollup = "rollup" -rpc = "rpc" -sequencer = "sequencer" -ser = "ser" # Serialization abbreviation -serde = "serde" -supervisor = "supervisor" -superchain = "superchain" -trie = "trie" -txs = "txs" # Transactions abbreviation -udeps = "udeps" # Unused dependencies tool -usize = "usize" -workspaces = "workspaces" - -# Technical abbreviations and acronyms -api = "api" -cli = "cli" -cfg = "cfg" -const = "const" -env = "env" -impl = "impl" -io = "io" -lru = "lru" -mpt = "mpt" # Merkle Patricia Trie -msg = "msg" -mut = "mut" -nums = "nums" -num = "num" -ok = "ok" -std = "std" -structs = "structs" -ty = "ty" # Type abbreviation -vec = "vec" -typ = "typ" - -# Additional allowed words from typos scan -flate = "flate" # zlib-flate tool name -ratatui = "ratatui" # TUI crate name -superseed = "superseed" # Superseed network name (proper noun) \ No newline at end of file diff --git a/mise.toml b/mise.toml index 6c05be5c0a8..3778a293719 100644 --- a/mise.toml +++ b/mise.toml @@ -1,6 +1,7 @@ [tools] # Core dependencies +bun = "1.2.5" go = "1.24.10" golangci-lint = "2.8.0" gotestsum = "1.12.3" @@ -23,7 +24,7 @@ svm-rs = "0.5.19" # Python dependencies "pipx:slither-analyzer" = "0.10.2" -"pipx:semgrep" = "1.90.0" +"pipx:semgrep" = "1.137.0" "pipx:md_toc" = "9.0.0" # Foundry dependencies @@ -39,7 +40,7 @@ anvil = "1.2.3" codecov-uploader = "0.8.0" goreleaser-pro = "2.11.2" kurtosis = "1.8.1" -op-acceptor = "op-acceptor/v3.8.2" +op-acceptor = "op-acceptor/v3.8.3" git-cliff = "2.12.0" # Fake dependencies @@ -49,7 +50,7 @@ git-cliff = "2.12.0" kontrol = "1.0.90" binary_signer = "1.0.4" -[alias] +[tool_alias] forge = "ubi:foundry-rs/foundry[exe=forge]" cast = "ubi:foundry-rs/foundry[exe=cast]" anvil = "ubi:foundry-rs/foundry[exe=anvil]" diff --git a/op-acceptance-tests/acceptance-tests.yaml b/op-acceptance-tests/acceptance-tests.yaml index cc53f79f22b..0c4652fea05 100644 --- a/op-acceptance-tests/acceptance-tests.yaml +++ b/op-acceptance-tests/acceptance-tests.yaml @@ -49,6 +49,13 @@ gates: - id: flake-shake description: "Quarantine gate for new and potentially flaky tests requiring stability validation." tests: + - package: github.com/ethereum-optimism/optimism/op-acceptance-tests/tests/depreqres/reqressyncdisabled + name: TestUnsafeChainNotStalling_DisabledReqRespSync + timeout: 10m + metadata: + owner: "anton evangelatov" + target_gate: "depreqres" + - id: isthmus description: "Isthmus network tests." tests: @@ -114,6 +121,10 @@ gates: - package: github.com/ethereum-optimism/optimism/op-acceptance-tests/tests/interop/loadtest timeout: 10m + - id: depreqres + description: "Deprecate Req/Res CL sync protocol" + tests: + - id: flashblocks inherits: - base diff --git a/op-acceptance-tests/justfile b/op-acceptance-tests/justfile index 3ddb1dcd0f6..14fe5dbbdc6 100644 --- a/op-acceptance-tests/justfile +++ b/op-acceptance-tests/justfile @@ -1,6 +1,6 @@ REPO_ROOT := `realpath ..` # path to the root of the optimism monorepo KURTOSIS_DIR := REPO_ROOT + "/kurtosis-devnet" -ACCEPTOR_VERSION := env_var_or_default("ACCEPTOR_VERSION", "v3.8.2") +ACCEPTOR_VERSION := env_var_or_default("ACCEPTOR_VERSION", "v3.8.3") DOCKER_REGISTRY := env_var_or_default("DOCKER_REGISTRY", "us-docker.pkg.dev/oplabs-tools-artifacts/images") ACCEPTOR_IMAGE := env_var_or_default("ACCEPTOR_IMAGE", DOCKER_REGISTRY + "/op-acceptor:" + ACCEPTOR_VERSION) diff --git a/op-acceptance-tests/tests/batcher/throttling/init_test.go b/op-acceptance-tests/tests/batcher/throttling/init_test.go new file mode 100644 index 00000000000..7bc65344ef9 --- /dev/null +++ b/op-acceptance-tests/tests/batcher/throttling/init_test.go @@ -0,0 +1,33 @@ +package throttling + +import ( + "testing" + "time" + + bss "github.com/ethereum-optimism/optimism/op-batcher/batcher" + batcherConfig "github.com/ethereum-optimism/optimism/op-batcher/config" + "github.com/ethereum-optimism/optimism/op-devstack/compat" + "github.com/ethereum-optimism/optimism/op-devstack/presets" + "github.com/ethereum-optimism/optimism/op-devstack/stack" + "github.com/ethereum-optimism/optimism/op-devstack/sysgo" +) + +const blockSizeLimit = 5_000 + +func TestMain(m *testing.M) { + presets.DoMain(m, + presets.WithMinimal(), + presets.WithCompatibleTypes(compat.SysGo), + stack.MakeCommon(sysgo.WithBatcherOption(func(id stack.L2BatcherID, cfg *bss.CLIConfig) { + // Enable throttling with step controller for predictable behavior + cfg.ThrottleConfig.LowerThreshold = 99 // > 0 enables the throttling loop. + cfg.ThrottleConfig.UpperThreshold = 100 + cfg.ThrottleConfig.ControllerType = batcherConfig.StepControllerType + + cfg.ThrottleConfig.BlockSizeLowerLimit = blockSizeLimit - 1 + cfg.ThrottleConfig.BlockSizeUpperLimit = blockSizeLimit + + cfg.PollInterval = 500 * time.Millisecond // Fast poll for quicker test feedback + })), + ) +} diff --git a/op-acceptance-tests/tests/batcher/throttling/throttling_test.go b/op-acceptance-tests/tests/batcher/throttling/throttling_test.go new file mode 100644 index 00000000000..2aedd64be12 --- /dev/null +++ b/op-acceptance-tests/tests/batcher/throttling/throttling_test.go @@ -0,0 +1,118 @@ +package throttling + +import ( + "context" + "sync" + "testing" + "time" + + "github.com/ethereum-optimism/optimism/op-acceptance-tests/tests/interop/loadtest" + "github.com/ethereum-optimism/optimism/op-core/predeploys" + "github.com/ethereum-optimism/optimism/op-devstack/devtest" + "github.com/ethereum-optimism/optimism/op-devstack/dsl" + "github.com/ethereum-optimism/optimism/op-devstack/presets" + "github.com/ethereum-optimism/optimism/op-service/bigs" + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum-optimism/optimism/op-service/txinclude" + "github.com/ethereum-optimism/optimism/op-service/txplan" +) + +// TestDABlockThrottling verifies that the execution client respects the block size limit set via +// miner_setMaxDASize. It spams transactions to saturate block space and asserts that blocks are +// filled to near capacity without exceeding the limit. +func TestDABlockThrottling(gt *testing.T) { + t := devtest.SerialT(gt) + sys := presets.NewMinimal(t) + + spamCtx, cancelSpam := context.WithCancel(t.Ctx()) + defer cancelSpam() + spamTxs(spamCtx, sys) + + const minFullSize = blockSizeLimit * 95 / 100 + + l2BlockTime := time.Duration(sys.L2Chain.Escape().RollupConfig().BlockTime) * time.Second + var consecutiveFull uint64 + for { + select { + case <-time.Tick(l2BlockTime): + _, txs, err := sys.L2EL.Escape().EthClient().InfoAndTxsByLabel(t.Ctx(), eth.Unsafe) + t.Require().NoError(err) + + var calldataSize uint64 + for _, tx := range txs { + if tx.IsDepositTx() { + continue + } + calldataSize += bigs.Uint64Strict(tx.RollupCostData().EstimatedDASize()) + } + t.Require().LessOrEqual(calldataSize, uint64(blockSizeLimit)) + + if calldataSize >= minFullSize { + consecutiveFull++ + } else { + consecutiveFull = 0 + } + + if consecutiveFull == 3 { + return + } + case <-t.Ctx().Done(): + t.Require().Fail("Never saw three consecutive blocks near the max size") + } + } +} + +func spamTxs(ctx context.Context, sys *presets.Minimal) { + l2BlockTime := time.Duration(sys.L2Chain.Escape().RollupConfig().BlockTime) * time.Second + + // Fund a lot of spammer EOAs. The funder provided by the devstack isn't very reliable when + // funding lots of different accounts. We fund one account from the faucet and then use that + // account to fund all the others. + const numAccounts = 50 + totalETH := eth.OneEther.Mul(numAccounts) + spammerELClient := txinclude.NewReliableEL(sys.L2EL.Escape().EthClient(), l2BlockTime) + funder := newSyncEOA(sys.FunderL2.NewFundedEOA(totalETH), spammerELClient) + totalETH = totalETH.Sub(totalETH.Div(50)) // Reserve 2% of the balance for gas. + ethPerAccount := totalETH.Div(numAccounts) + var eoas []*loadtest.SyncEOA + var mu sync.Mutex + var wgEOA sync.WaitGroup + for range numAccounts { + wgEOA.Add(1) + go func() { + defer wgEOA.Done() + eoa := sys.Wallet.NewEOA(sys.L2EL) + addr := eoa.Address() + _, err := funder.Include(sys.T, txplan.WithTo(&addr), txplan.WithValue(ethPerAccount)) + sys.T.Require().NoError(err) + + mu.Lock() + defer mu.Unlock() + eoas = append(eoas, newSyncEOA(eoa, spammerELClient)) + }() + } + wgEOA.Wait() + + eoasRR := loadtest.NewRoundRobin(eoas) + spammer := loadtest.SpammerFunc(func(t devtest.T) error { + _, err := eoasRR.Get().Include(t, txplan.WithTo(&predeploys.L1BlockAddr), txplan.WithData(make([]byte, 0)), txplan.WithGasLimit(70_000)) + return err + }) + schedule := loadtest.NewBurst(l2BlockTime, loadtest.WithBaseRPS(50)) + + var wg sync.WaitGroup + wg.Add(1) + sys.T.Cleanup(func() { + wg.Wait() + }) + go func() { + defer wg.Done() + schedule.Run(sys.T.WithCtx(ctx), spammer) + }() +} + +func newSyncEOA(eoa *dsl.EOA, el txinclude.EL) *loadtest.SyncEOA { + signer := txinclude.NewPkSigner(eoa.Key().Priv(), eoa.ChainID().ToBig()) + const maxConcurrentTxs = 16 // Reth's mempool limits the number of txs per account to 16. + return loadtest.NewSyncEOA(txinclude.NewLimit(txinclude.NewPersistent(signer, el), maxConcurrentTxs), eoa.Plan()) +} diff --git a/op-acceptance-tests/tests/interop/proofs/challenger_test.go b/op-acceptance-tests/tests/interop/proofs/challenger_test.go index 084a7733cf2..034538a9833 100644 --- a/op-acceptance-tests/tests/interop/proofs/challenger_test.go +++ b/op-acceptance-tests/tests/interop/proofs/challenger_test.go @@ -25,7 +25,7 @@ func TestChallengerPlaysGame(gt *testing.T) { badClaim := common.HexToHash("0xdeadbeef00000000000000000000000000000000000000000000000000000000") attacker := sys.FunderL1.NewFundedEOA(eth.Ether(15)) dgf := sys.DisputeGameFactory() - game := dgf.StartSuperCannonGame(attacker, proofs.WithSuperRootFrom(eth.Bytes32(badClaim), eth.Bytes32(badClaim))) + game := dgf.StartSuperCannonKonaGame(attacker, proofs.WithSuperRootFrom(eth.Bytes32(badClaim), eth.Bytes32(badClaim))) claim := game.RootClaim() // This is the bad claim from attacker counterClaim := claim.WaitForCounterClaim() // This is the counter-claim from the challenger @@ -47,7 +47,7 @@ func TestChallengerRespondsToMultipleInvalidClaims(gt *testing.T) { attacker := sys.FunderL1.NewFundedEOA(eth.TenEther) dgf := sys.DisputeGameFactory() - game := dgf.StartSuperCannonGame(attacker) + game := dgf.StartSuperCannonKonaGame(attacker) claims := game.PerformMoves(attacker, proofs.Move(0, common.Hash{0x01}, true), proofs.Move(1, common.Hash{0x03}, true), @@ -70,7 +70,7 @@ func TestChallengerRespondsToMultipleInvalidClaimsEOA(gt *testing.T) { dgf := sys.DisputeGameFactory() attacker := dgf.CreateHelperEOA(sys.FunderL1.NewFundedEOA(eth.TenEther)) - game := dgf.StartSuperCannonGame(attacker.EOA) + game := dgf.StartSuperCannonKonaGame(attacker.EOA) claims := attacker.PerformMoves(game.FaultDisputeGame, proofs.Move(0, common.Hash{0x01}, true), proofs.Move(1, common.Hash{0x03}, true), diff --git a/op-acceptance-tests/tests/interop/proofs/fpp/fpp_test.go b/op-acceptance-tests/tests/interop/proofs/fpp/fpp_test.go new file mode 100644 index 00000000000..ba4d858b8a7 --- /dev/null +++ b/op-acceptance-tests/tests/interop/proofs/fpp/fpp_test.go @@ -0,0 +1,59 @@ +package fpp + +import ( + "testing" + + "github.com/ethereum-optimism/optimism/op-devstack/devtest" + "github.com/ethereum-optimism/optimism/op-devstack/presets" + "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" +) + +func TestFPP(gt *testing.T) { + t := devtest.SerialT(gt) + sys := presets.NewSimpleInterop(t) + + startTimestamp := max(sys.L2ChainA.Escape().RollupConfig().TimestampForBlock(1), sys.L2ChainB.Escape().RollupConfig().TimestampForBlock(1)) + endTimestamp := sys.L2ChainA.Escape().RollupConfig().TimestampForBlock(5) + sys.SuperRoots.AwaitValidatedTimestamp(endTimestamp) + + dgf := sys.DisputeGameFactory() + dgf.RunFPP(startTimestamp, endTimestamp) +} + +func TestNextSuperRootNotFound(gt *testing.T) { + t := devtest.SerialT(gt) + sys := presets.NewSimpleInterop(t) + blockTime := sys.L2ChainA.Escape().RollupConfig().BlockTime + + // Need to setup situation where the next super root is not found but the next block is safe on the first chain, but not safe on the second. + // Wait for at least 1 block to be fully validated on both chains so we have a good starting point. + initTimestamp := max(sys.L2ChainA.Escape().RollupConfig().TimestampForBlock(1), sys.L2ChainB.Escape().RollupConfig().TimestampForBlock(1)) + sys.SuperRoots.AwaitValidatedTimestamp(initTimestamp) + + // Stop the second sequencer so we have a point where new blocks aren't available (and thus no super root is found) + chainBLastBlockHash := sys.L2CLB.StopSequencer() + defer sys.L2CLB.StartSequencer() // Start the sequencer again for other tests. + + chainBLastBlock := sys.L2ELB.BlockRefByHash(chainBLastBlockHash) + + // Wait for data to be fully validated up to the last block on second chain. + sys.SuperRoots.AwaitValidatedTimestamp(chainBLastBlock.Time) + + // Wait for safe head to advance on first chain to be sure the next block is also safe. + sys.L2CLA.Advanced(types.LocalSafe, 1, 10) + + startTimestamp := chainBLastBlock.Time + endTimestamp := startTimestamp + blockTime + + // Verify we have a super root at the last block timestamp + resp := sys.SuperRoots.SuperRootAtTimestamp(startTimestamp) + t.Require().NotNil(resp.Data) + + // But not at the next block + resp = sys.SuperRoots.SuperRootAtTimestamp(endTimestamp) + t.Require().Nil(resp.Data) + + // Run FPP from timestamp of safe head on second chain, to 2 seconds later. + dgf := sys.DisputeGameFactory() + dgf.RunFPP(startTimestamp, endTimestamp) +} diff --git a/op-acceptance-tests/tests/interop/proofs/fpp/init_test.go b/op-acceptance-tests/tests/interop/proofs/fpp/init_test.go new file mode 100644 index 00000000000..5219e3f5cfa --- /dev/null +++ b/op-acceptance-tests/tests/interop/proofs/fpp/init_test.go @@ -0,0 +1,16 @@ +package fpp + +import ( + "testing" + + "github.com/ethereum-optimism/optimism/op-devstack/presets" + "github.com/ethereum-optimism/optimism/op-devstack/stack" + "github.com/ethereum-optimism/optimism/op-devstack/sysgo" +) + +func TestMain(m *testing.M) { + presets.DoMain(m, + presets.WithSuperInteropSupernode(), + stack.MakeCommon(sysgo.WithChallengerCannonKonaEnabled()), + ) +} diff --git a/op-acceptance-tests/tests/interop/proofs/init_test.go b/op-acceptance-tests/tests/interop/proofs/init_test.go index f10196b05c0..4ee350a536f 100644 --- a/op-acceptance-tests/tests/interop/proofs/init_test.go +++ b/op-acceptance-tests/tests/interop/proofs/init_test.go @@ -4,8 +4,13 @@ import ( "testing" "github.com/ethereum-optimism/optimism/op-devstack/presets" + "github.com/ethereum-optimism/optimism/op-devstack/stack" + "github.com/ethereum-optimism/optimism/op-devstack/sysgo" ) func TestMain(m *testing.M) { - presets.DoMain(m, presets.WithSuperInteropSupernode()) + presets.DoMain(m, + presets.WithSuperInteropSupernode(), + stack.MakeCommon(sysgo.WithChallengerCannonKonaEnabled()), + ) } diff --git a/op-acceptance-tests/tests/interop/proofs/interop_fault_proofs_test.go b/op-acceptance-tests/tests/interop/proofs/interop_fault_proofs_test.go new file mode 100644 index 00000000000..dd26591929f --- /dev/null +++ b/op-acceptance-tests/tests/interop/proofs/interop_fault_proofs_test.go @@ -0,0 +1,15 @@ +package proofs + +import ( + "testing" + + sfp "github.com/ethereum-optimism/optimism/op-acceptance-tests/tests/superfaultproofs" + "github.com/ethereum-optimism/optimism/op-devstack/devtest" + "github.com/ethereum-optimism/optimism/op-devstack/presets" +) + +func TestInteropFaultProofs(gt *testing.T) { + t := devtest.SerialT(gt) + sys := presets.NewSimpleInterop(t) + sfp.RunSuperFaultProofTest(t, sys) +} diff --git a/op-acceptance-tests/tests/isthmus/preinterop/challenger_test.go b/op-acceptance-tests/tests/isthmus/preinterop/challenger_test.go index 8612c489d44..30ea9ceb0b0 100644 --- a/op-acceptance-tests/tests/isthmus/preinterop/challenger_test.go +++ b/op-acceptance-tests/tests/isthmus/preinterop/challenger_test.go @@ -25,7 +25,7 @@ func TestChallengerPlaysGame(gt *testing.T) { attacker := sys.FunderL1.NewFundedEOA(eth.Ether(15)) dgf := sys.DisputeGameFactory() - game := dgf.StartSuperCannonGame(attacker, proofs.WithSuperRootFrom(eth.Bytes32(badClaim), eth.Bytes32(badClaim))) + game := dgf.StartSuperCannonKonaGame(attacker, proofs.WithSuperRootFrom(eth.Bytes32(badClaim), eth.Bytes32(badClaim))) claim := game.RootClaim() // This is the bad claim from attacker counterClaim := claim.WaitForCounterClaim() // This is the counter-claim from the challenger diff --git a/op-acceptance-tests/tests/isthmus/preinterop/init_test.go b/op-acceptance-tests/tests/isthmus/preinterop/init_test.go index 9b842c6dadc..6251765104e 100644 --- a/op-acceptance-tests/tests/isthmus/preinterop/init_test.go +++ b/op-acceptance-tests/tests/isthmus/preinterop/init_test.go @@ -4,8 +4,13 @@ import ( "testing" "github.com/ethereum-optimism/optimism/op-devstack/presets" + "github.com/ethereum-optimism/optimism/op-devstack/stack" + "github.com/ethereum-optimism/optimism/op-devstack/sysgo" ) func TestMain(m *testing.M) { - presets.DoMain(m, presets.WithIsthmusSuperSupernode()) + presets.DoMain(m, + presets.WithIsthmusSuperSupernode(), + stack.MakeCommon(sysgo.WithChallengerCannonKonaEnabled()), + ) } diff --git a/op-acceptance-tests/tests/isthmus/preinterop/interop_fault_proofs_test.go b/op-acceptance-tests/tests/isthmus/preinterop/interop_fault_proofs_test.go new file mode 100644 index 00000000000..e845a3fe0c2 --- /dev/null +++ b/op-acceptance-tests/tests/isthmus/preinterop/interop_fault_proofs_test.go @@ -0,0 +1,15 @@ +package preinterop + +import ( + "testing" + + sfp "github.com/ethereum-optimism/optimism/op-acceptance-tests/tests/superfaultproofs" + "github.com/ethereum-optimism/optimism/op-devstack/devtest" + "github.com/ethereum-optimism/optimism/op-devstack/presets" +) + +func TestPreinteropFaultProofs(gt *testing.T) { + t := devtest.SerialT(gt) + sys := presets.NewSimpleInterop(t) + sfp.RunSuperFaultProofTest(t, sys) +} diff --git a/op-acceptance-tests/tests/superfaultproofs/superfaultproofs.go b/op-acceptance-tests/tests/superfaultproofs/superfaultproofs.go new file mode 100644 index 00000000000..47b5e36f0e4 --- /dev/null +++ b/op-acceptance-tests/tests/superfaultproofs/superfaultproofs.go @@ -0,0 +1,440 @@ +package superfaultproofs + +import ( + "math/big" + "os" + "os/exec" + "path/filepath" + "slices" + "time" + + "github.com/ethereum-optimism/optimism/op-challenger/game/fault/trace/super" + "github.com/ethereum-optimism/optimism/op-challenger/game/fault/trace/utils" + "github.com/ethereum-optimism/optimism/op-challenger/game/fault/trace/vm" + challengerTypes "github.com/ethereum-optimism/optimism/op-challenger/game/fault/types" + gameTypes "github.com/ethereum-optimism/optimism/op-challenger/game/types" + "github.com/ethereum-optimism/optimism/op-devstack/devtest" + "github.com/ethereum-optimism/optimism/op-devstack/dsl" + "github.com/ethereum-optimism/optimism/op-devstack/presets" + "github.com/ethereum-optimism/optimism/op-node/rollup" + interopTypes "github.com/ethereum-optimism/optimism/op-program/client/interop/types" + "github.com/ethereum-optimism/optimism/op-service/apis" + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/crypto" +) + +const ( + stepsPerTimestamp = super.StepsPerTimestamp + consolidateStep = stepsPerTimestamp - 1 +) + +// chain bundles the DSL handles for one L2 chain, ordered by chain ID. +type chain struct { + ID eth.ChainID + Cfg *rollup.Config + Rollup apis.RollupClient + EL *dsl.L2ELNode + Batcher *dsl.L2Batcher +} + +// transitionTest describes a single super-root transition test case. +type transitionTest struct { + Name string + AgreedClaim []byte + DisputedClaim []byte + DisputedTraceIndex int64 + L1Head eth.BlockID + ClaimTimestamp uint64 + ExpectValid bool +} + +// orderedChains returns the two interop chains sorted by chain ID. +func orderedChains(sys *presets.SimpleInterop) []*chain { + chains := []*chain{ + {ID: sys.L2ChainA.ChainID(), Cfg: sys.L2ChainA.Escape().RollupConfig(), Rollup: sys.L2CLA.Escape().RollupAPI(), EL: sys.L2ELA, Batcher: sys.L2BatcherA}, + {ID: sys.L2ChainB.ChainID(), Cfg: sys.L2ChainB.Escape().RollupConfig(), Rollup: sys.L2CLB.Escape().RollupAPI(), EL: sys.L2ELB, Batcher: sys.L2BatcherB}, + } + slices.SortFunc(chains, func(a, b *chain) int { return a.ID.Cmp(b.ID) }) + return chains +} + +// nextTimestampAfterSafeHeads returns the next block timestamp after all chains' safe heads. +func nextTimestampAfterSafeHeads(t devtest.T, chains []*chain) uint64 { + var ts uint64 + for _, c := range chains { + status, err := c.Rollup.SyncStatus(t.Ctx()) + t.Require().NoError(err) + next := c.Cfg.TimestampForBlock(status.SafeL2.Number + 1) + if next > ts { + ts = next + } + } + t.Require().NotZero(ts, "end timestamp must be non-zero") + return ts +} + +// superRootAtTimestamp constructs a SuperV1 from each chain's output at the given timestamp. +func superRootAtTimestamp(t devtest.T, chains []*chain, timestamp uint64) eth.SuperV1 { + sr := eth.SuperV1{Timestamp: timestamp, Chains: make([]eth.ChainIDAndOutput, len(chains))} + for i, c := range chains { + blockNum, err := c.Cfg.TargetBlockNumber(timestamp) + t.Require().NoError(err) + out, err := c.Rollup.OutputAtBlock(t.Ctx(), blockNum) + t.Require().NoError(err) + sr.Chains[i] = eth.ChainIDAndOutput{ChainID: c.ID, Output: out.OutputRoot} + } + return sr +} + +// optimisticBlockAtTimestamp returns the optimistic block for a single chain at the given timestamp. +func optimisticBlockAtTimestamp(t devtest.T, c *chain, timestamp uint64) interopTypes.OptimisticBlock { + blockNum, err := c.Cfg.TargetBlockNumber(timestamp) + t.Require().NoError(err) + out, err := c.Rollup.OutputAtBlock(t.Ctx(), blockNum) + t.Require().NoError(err) + return interopTypes.OptimisticBlock{BlockHash: out.BlockRef.Hash, OutputRoot: out.OutputRoot} +} + +// marshalTransition serializes a transition state with the given super root, step, and progress. +func marshalTransition(superRoot eth.SuperV1, step uint64, progress ...interopTypes.OptimisticBlock) []byte { + return (&interopTypes.TransitionState{ + SuperRoot: superRoot.Marshal(), + PendingProgress: progress, + Step: step, + }).Marshal() +} + +// latestRequiredL1 returns the latest RequiredL1 across all optimistic outputs, +// i.e. the earliest L1 block at which all chains' data is derivable. +func latestRequiredL1(resp eth.SuperRootAtTimestampResponse) eth.BlockID { + var latest eth.BlockID + for _, out := range resp.OptimisticAtTimestamp { + if out.RequiredL1.Number > latest.Number { + latest = out.RequiredL1 + } + } + return latest +} + +// awaitSafeHeadsStalled waits until every node's safe head has stopped advancing +// for at least 10 seconds. +func awaitSafeHeadsStalled(t devtest.T, nodes ...*dsl.L2CLNode) { + var last []eth.BlockID + var stableSince time.Time + t.Require().Eventually(func() bool { + cur := make([]eth.BlockID, len(nodes)) + for i, n := range nodes { + cur[i] = n.SyncStatus().SafeL2.ID() + } + if slices.Equal(cur, last) { + if stableSince.IsZero() { + stableSince = time.Now() + } + return time.Since(stableSince) >= 10*time.Second + } + last = cur + stableSince = time.Time{} + return false + }, 2*time.Minute, 2*time.Second, "safe heads did not stall in time") +} + +// awaitOptimisticPattern polls the supernode until every chain in mustHave has +// optimistic data and every chain in mustMiss does not. +func awaitOptimisticPattern(t devtest.T, sn *dsl.Supernode, timestamp uint64, mustHave, mustMiss []eth.ChainID) eth.SuperRootAtTimestampResponse { + var resp eth.SuperRootAtTimestampResponse + t.Require().Eventually(func() bool { + resp = sn.SuperRootAtTimestamp(timestamp) + for _, id := range mustHave { + if _, has := resp.OptimisticAtTimestamp[id]; !has { + return false + } + } + for _, id := range mustMiss { + if _, has := resp.OptimisticAtTimestamp[id]; has { + return false + } + } + return true + }, 2*time.Minute, 2*time.Second, "timed out waiting for optimistic pattern") + return resp +} + +// runKonaInteropProgram runs the kona interop fault proof program and checks the result. +func runKonaInteropProgram(t devtest.T, cfg vm.Config, l1Head common.Hash, agreedPreState []byte, l2Claim common.Hash, claimTimestamp uint64, expectValid bool) { + tmpDir := t.TempDir() + inputs := utils.LocalGameInputs{ + L1Head: l1Head, + AgreedPreState: agreedPreState, + L2Claim: l2Claim, + L2SequenceNumber: new(big.Int).SetUint64(claimTimestamp), + } + + argv, err := vm.NewNativeKonaSuperExecutor().OracleCommand(cfg, tmpDir, inputs) + t.Require().NoError(err) + + exePath, err := filepath.Abs(argv[0]) + t.Require().NoError(err) + cmd := exec.Command(exePath, argv[1:]...) + cmd.Dir = tmpDir + cmd.Env = append(append(cmd.Env, os.Environ()...), "NO_COLOR=1") + + out, runErr := cmd.CombinedOutput() + if expectValid { + t.Require().NoErrorf(runErr, "kona interop program failed:\n%s", string(out)) + return + } + var exitErr *exec.ExitError + t.Require().ErrorAsf(runErr, &exitErr, "expected kona interop program to fail, got: %v\n%s", runErr, string(out)) + t.Require().Equalf(1, exitErr.ExitCode(), "expected exit code 1 for invalid claim, got %d:\n%s", exitErr.ExitCode(), string(out)) +} + +// runChallengerProviderTest verifies the challenger trace provider agrees with the test expectations. +func runChallengerProviderTest(t devtest.T, queryAPI apis.SupernodeQueryAPI, gameDepth challengerTypes.Depth, startTimestamp, claimTimestamp uint64, test *transitionTest) { + prestateProvider := super.NewSuperNodePrestateProvider(queryAPI, startTimestamp) + traceProvider := super.NewSuperNodeTraceProvider( + t.Logger().New("role", "challenger-provider"), + prestateProvider, + queryAPI, + test.L1Head, + gameDepth, + startTimestamp, + claimTimestamp, + ) + + var agreedPrestate []byte + var err error + if test.DisputedTraceIndex > 0 { + agreedPrestate, err = traceProvider.GetPreimageBytes(t.Ctx(), challengerTypes.NewPosition(gameDepth, big.NewInt(test.DisputedTraceIndex-1))) + t.Require().NoError(err) + } else { + superRoot, err := traceProvider.AbsolutePreState(t.Ctx()) + t.Require().NoError(err) + agreedPrestate = superRoot.Marshal() + } + t.Require().Equal(test.AgreedClaim, agreedPrestate, "agreed prestate mismatch") + + disputedClaim, err := traceProvider.GetPreimageBytes(t.Ctx(), challengerTypes.NewPosition(gameDepth, big.NewInt(test.DisputedTraceIndex))) + t.Require().NoError(err) + if test.ExpectValid { + t.Require().Equal(test.DisputedClaim, disputedClaim, "valid claim mismatch") + } else { + t.Require().NotEqual(test.DisputedClaim, disputedClaim, "invalid claim unexpectedly matched challenger provider output") + } +} + +// buildTransitionTests constructs the standard set of super-root transition test cases. +func buildTransitionTests( + start, end eth.SuperV1, + step1, step2 []byte, + padding func(uint64) []byte, + l1HeadCurrent, l1HeadBefore, l1HeadAfterFirst eth.BlockID, + endTimestamp uint64, +) []*transitionTest { + return []*transitionTest{ + { + Name: "ClaimDirectToNextTimestamp", + AgreedClaim: start.Marshal(), + DisputedClaim: end.Marshal(), + DisputedTraceIndex: 0, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: false, + }, + { + Name: "FirstChainOptimisticBlock", + AgreedClaim: start.Marshal(), + DisputedClaim: step1, + DisputedTraceIndex: 0, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: true, + }, + { + Name: "FirstChainOptimisticBlock-InvalidNoChange", + AgreedClaim: start.Marshal(), + DisputedClaim: start.Marshal(), + DisputedTraceIndex: 0, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: false, + }, + { + Name: "SecondChainOptimisticBlock", + AgreedClaim: step1, + DisputedClaim: step2, + DisputedTraceIndex: 1, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: true, + }, + { + Name: "SecondChainOptimisticBlock-InvalidNoChange", + AgreedClaim: step1, + DisputedClaim: step1, + DisputedTraceIndex: 1, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: false, + }, + { + Name: "FirstPaddingStep", + AgreedClaim: step2, + DisputedClaim: padding(3), + DisputedTraceIndex: 2, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: true, + }, + { + Name: "FirstPaddingStep-InvalidNoChange", + AgreedClaim: step2, + DisputedClaim: step2, + DisputedTraceIndex: 2, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: false, + }, + { + Name: "SecondPaddingStep", + AgreedClaim: padding(3), + DisputedClaim: padding(4), + DisputedTraceIndex: 3, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: true, + }, + { + Name: "SecondPaddingStep-InvalidNoChange", + AgreedClaim: padding(3), + DisputedClaim: padding(3), + DisputedTraceIndex: 3, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: false, + }, + { + Name: "LastPaddingStep", + AgreedClaim: padding(consolidateStep - 1), + DisputedClaim: padding(consolidateStep), + DisputedTraceIndex: consolidateStep - 1, + L1Head: l1HeadCurrent, + ClaimTimestamp: endTimestamp, + ExpectValid: true, + }, + { + Name: "FirstChainReachesL1Head", + AgreedClaim: start.Marshal(), + DisputedClaim: super.InvalidTransition, + DisputedTraceIndex: 0, + L1Head: l1HeadBefore, + ClaimTimestamp: endTimestamp, + ExpectValid: true, + }, + { + Name: "SecondChainReachesL1Head", + AgreedClaim: step1, + DisputedClaim: super.InvalidTransition, + DisputedTraceIndex: 1, + L1Head: l1HeadAfterFirst, + ClaimTimestamp: endTimestamp, + ExpectValid: true, + }, + { + Name: "SuperRootInvalidIfUnsupportedByL1Data", + AgreedClaim: start.Marshal(), + DisputedClaim: step1, + DisputedTraceIndex: 0, + L1Head: l1HeadBefore, + ClaimTimestamp: endTimestamp, + ExpectValid: false, + }, + { + Name: "FromInvalidTransitionHash", + AgreedClaim: super.InvalidTransition, + DisputedClaim: super.InvalidTransition, + DisputedTraceIndex: 2, + L1Head: l1HeadBefore, + ClaimTimestamp: endTimestamp, + ExpectValid: true, + }, + } +} + +// RunSuperFaultProofTest encapsulates the basic super fault proof test flow. +func RunSuperFaultProofTest(t devtest.T, sys *presets.SimpleInterop) { + t.Require().NotNil(sys.SuperRoots, "supernode is required for this test") + + chains := orderedChains(sys) + t.Require().Len(chains, 2, "expected exactly 2 interop chains") + + // -- Stage 1: Freeze batch submission ---------------------------------- + chains[0].Batcher.Stop() + chains[1].Batcher.Stop() + defer func() { + chains[0].Batcher.Start() + chains[1].Batcher.Start() + }() + awaitSafeHeadsStalled(t, sys.L2CLA, sys.L2CLB) + + endTimestamp := nextTimestampAfterSafeHeads(t, chains) + startTimestamp := endTimestamp - 1 + + // Ensure both chains have produced the target blocks as unsafe. + for _, c := range chains { + target, err := c.Cfg.TargetBlockNumber(endTimestamp) + t.Require().NoError(err) + c.EL.Reached(eth.Unsafe, target, 60) + } + + // -- Stage 2: Capture L1 heads at different batch-availability points -- + + // L1 head where neither chain has batch data at endTimestamp. + respBefore := awaitOptimisticPattern(t, sys.SuperRoots, endTimestamp, + nil, []eth.ChainID{chains[0].ID, chains[1].ID}) + l1HeadBefore := respBefore.CurrentL1 + + // L1 head where only the first chain has batch data. + chains[0].Batcher.Start() + respAfterFirst := awaitOptimisticPattern(t, sys.SuperRoots, endTimestamp, + []eth.ChainID{chains[0].ID}, []eth.ChainID{chains[1].ID}) + l1HeadAfterFirst := respAfterFirst.CurrentL1 + chains[0].Batcher.Stop() + + // L1 head where both chains have batch data (fully validated). + chains[1].Batcher.Start() + sys.SuperRoots.AwaitValidatedTimestamp(endTimestamp) + l1HeadCurrent := latestRequiredL1(sys.SuperRoots.SuperRootAtTimestamp(endTimestamp)) + chains[1].Batcher.Stop() + + // --- Stage 3: Build expected transition states -------------------------- + start := superRootAtTimestamp(t, chains, startTimestamp) + end := superRootAtTimestamp(t, chains, endTimestamp) + + firstOptimistic := optimisticBlockAtTimestamp(t, chains[0], endTimestamp) + secondOptimistic := optimisticBlockAtTimestamp(t, chains[1], endTimestamp) + + step1 := marshalTransition(start, 1, firstOptimistic) + step2 := marshalTransition(start, 2, firstOptimistic, secondOptimistic) + padding := func(step uint64) []byte { + return marshalTransition(start, step, firstOptimistic, secondOptimistic) + } + + // --- Stage 4: Transition test cases ------------------------------------ + tests := buildTransitionTests(start, end, step1, step2, padding, + l1HeadCurrent, l1HeadBefore, l1HeadAfterFirst, endTimestamp) + + challengerCfg := sys.L2ChainA.Escape().L2Challengers()[0].Config() + gameDepth := sys.DisputeGameFactory().GameImpl(gameTypes.SuperCannonKonaGameType).SplitDepth() + + for _, test := range tests { + t.Run(test.Name+"-fpp", func(t devtest.T) { + runKonaInteropProgram(t, challengerCfg.CannonKona, test.L1Head.Hash, + test.AgreedClaim, crypto.Keccak256Hash(test.DisputedClaim), + test.ClaimTimestamp, test.ExpectValid) + }) + t.Run(test.Name+"-challenger", func(t devtest.T) { + runChallengerProviderTest(t, sys.SuperRoots.QueryAPI(), gameDepth, startTimestamp, test.ClaimTimestamp, test) + }) + } +} diff --git a/op-acceptance-tests/tests/supernode/interop/activation/activation_after_genesis_test.go b/op-acceptance-tests/tests/supernode/interop/activation/activation_after_genesis_test.go index 4abc6fd1da5..305763e25c2 100644 --- a/op-acceptance-tests/tests/supernode/interop/activation/activation_after_genesis_test.go +++ b/op-acceptance-tests/tests/supernode/interop/activation/activation_after_genesis_test.go @@ -47,7 +47,7 @@ func TestSupernodeInteropActivationAfterGenesis(gt *testing.T) { // Check pre-activation timestamp preActivationResp, err = snClient.SuperRootAtTimestamp(ctx, preActivationTs) if err != nil { - t.Logger().Debug("superroot_atTimestamp error for pre-activation", "timestamp", preActivationTs, "err", err) + t.Logger().Warn("superroot_atTimestamp error for pre-activation", "timestamp", preActivationTs, "err", err) return false } preVerified := preActivationResp.Data != nil @@ -55,12 +55,12 @@ func TestSupernodeInteropActivationAfterGenesis(gt *testing.T) { // Check post-activation timestamp postActivationResp, err = snClient.SuperRootAtTimestamp(ctx, postActivationTs) if err != nil { - t.Logger().Debug("superroot_atTimestamp error for post-activation", "timestamp", postActivationTs, "err", err) + t.Logger().Warn("superroot_atTimestamp error for post-activation", "timestamp", postActivationTs, "err", err) return false } postVerified := postActivationResp.Data != nil - t.Logger().Debug("waiting for both timestamps to be verified", + t.Logger().Info("waiting for both timestamps to be verified", "pre_activation_ts", preActivationTs, "pre_verified", preVerified, "post_activation_ts", postActivationTs, diff --git a/op-acceptance-tests/tests/supernode/interop/reorg/init_test.go b/op-acceptance-tests/tests/supernode/interop/reorg/init_test.go new file mode 100644 index 00000000000..8cc9d759996 --- /dev/null +++ b/op-acceptance-tests/tests/supernode/interop/reorg/init_test.go @@ -0,0 +1,15 @@ +package reorg + +import ( + "os" + "testing" + + "github.com/ethereum-optimism/optimism/op-devstack/presets" +) + +// TestMain creates an isolated two-L2 setup with a shared supernode that has interop enabled. +// This package tests block invalidation and reorg scenarios that would pollute other tests if run on a shared devnet. +func TestMain(m *testing.M) { + _ = os.Setenv("DEVSTACK_L2CL_KIND", "supernode") + presets.DoMain(m, presets.WithTwoL2SupernodeInterop(0)) +} diff --git a/op-acceptance-tests/tests/supernode/interop/reorg/invalid_message_reorg_test.go b/op-acceptance-tests/tests/supernode/interop/reorg/invalid_message_reorg_test.go new file mode 100644 index 00000000000..8100c53b123 --- /dev/null +++ b/op-acceptance-tests/tests/supernode/interop/reorg/invalid_message_reorg_test.go @@ -0,0 +1,120 @@ +package reorg + +import ( + "errors" + "math/rand" + "testing" + "time" + + "github.com/ethereum/go-ethereum" + "github.com/stretchr/testify/require" + + "github.com/ethereum-optimism/optimism/op-devstack/devtest" + "github.com/ethereum-optimism/optimism/op-devstack/presets" + "github.com/ethereum-optimism/optimism/op-service/bigs" + "github.com/ethereum-optimism/optimism/op-service/eth" +) + +// TestSupernodeInteropInvalidMessageReplacement tests that: +// WHEN: an invalid Executing Message is included in a chain +// THEN: +// - The interop activity detects the invalid block +// - The chain container is told to invalidate the block +// - A reset/rewind is triggered if the chain is using that block +// - A replacement block is built at the same height (deposits-only) +// - The replacement block's timestamp eventually becomes verified +func TestSupernodeInteropInvalidMessageReplacement(gt *testing.T) { + + t := devtest.SerialT(gt) + sys := presets.NewTwoL2SupernodeInterop(t, 0) + + ctx := t.Ctx() + + // Create funded EOAs on both chains + alice := sys.FunderA.NewFundedEOA(eth.OneEther) + bob := sys.FunderB.NewFundedEOA(eth.OneEther) + + // Deploy event logger on chain A + eventLoggerA := alice.DeployEventLogger() + + // Sync chains + sys.L2B.CatchUpTo(sys.L2A) + sys.L2A.CatchUpTo(sys.L2B) + + // Pause interop and verify it has stopped + // Uses max local safe timestamp from both chains, pauses at +10, awaits validation at +9 + paused := sys.Supernode.EnsureInteropPaused(sys.L2ACL, sys.L2BCL, 10) + t.Logger().Info("interop paused", "paused", paused) + + rng := rand.New(rand.NewSource(12345)) + + // Send an initiating message on chain A + initTx, initReceipt := alice.SendRandomInitMessage(rng, eventLoggerA, 2, 10) + + t.Logger().Info("initiating message sent on chain A", + "block", initReceipt.BlockNumber, + "hash", initReceipt.BlockHash, + ) + + // Wait for chain B to catch up + sys.L2B.WaitForBlock() + + // Send an INVALID executing message on chain B + _, invalidExecReceipt := bob.SendInvalidExecMessage(initTx, 0) + invalidBlockNumber := bigs.Uint64Strict(invalidExecReceipt.BlockNumber) + invalidBlockHash := invalidExecReceipt.BlockHash + invalidBlockTimestamp := sys.L2B.TimestampForBlockNum(invalidBlockNumber) + t.Logger().Info("invalid executing message sent on chain B", + "block", invalidBlockNumber, + "hash", invalidBlockHash, + "timestamp", invalidBlockTimestamp, + ) + + // Wait for local safety to include the invalid block + require.Eventually(t, func() bool { + numSafe := sys.L2BCL.SyncStatus().LocalSafeL2.Number >= invalidBlockNumber + return numSafe + }, 60*time.Second, time.Second, "invalid block should become locally safe") + + // Resume interop and observe reorg + // Interop activity will proceed and invalidate the block, triggering a rewind, and building a replacement block + // We observe resets and replacements, but only proceed on replacement (we may miss reset if it happens quickly) + sys.Supernode.ResumeInterop() + require.Eventually(t, func() bool { + // Check if the block hash at the invalid block number changed or block doesn't exist + // Use the EthClient directly to handle errors (block may not exist after rewind) + currentBlock, err := sys.L2ELB.Escape().EthClient().BlockRefByNumber(ctx, invalidBlockNumber) + if err != nil { + if errors.Is(eth.MaybeAsNotFoundErr(err), ethereum.NotFound) { + t.Logger().Info("RESET DETECTED! Block no longer exists (rewound)", + "block_number", invalidBlockNumber, + ) + } else { + t.Logger().Warn("unexpected error checking block", + "block_number", invalidBlockNumber, + "err", err, + ) + } + } else if currentBlock.Hash != invalidBlockHash { + t.Logger().Info("RESET DETECTED! Block hash changed", + "block_number", invalidBlockNumber, + "old_hash", invalidBlockHash, + "new_hash", currentBlock.Hash, + ) + return true + } + return false + }, 60*time.Second, time.Second, "reset should be detected") + + // Wait for interop to proceed and verify the replacement block at the timestamp + sys.Supernode.AwaitValidatedTimestamp(invalidBlockTimestamp) + + // ASSERTION: The invalid transaction no longer exists in the chain + // The invalid exec message transaction should NOT be in the replacement block + sys.L2ELB.AssertTxNotInBlock(invalidBlockNumber, invalidExecReceipt.TxHash) + + t.Logger().Info("test complete: invalid block was replaced and verified", + "invalid_block_number", invalidBlockNumber, + "invalid_block_hash", invalidBlockHash, + ) +} diff --git a/op-alloy/Cargo.lock b/op-alloy/Cargo.lock deleted file mode 100644 index c79e4f06165..00000000000 --- a/op-alloy/Cargo.lock +++ /dev/null @@ -1,4663 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "allocator-api2" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" - -[[package]] -name = "alloy-chains" -version = "0.2.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3842d8c52fcd3378039f4703dba392dca8b546b1c8ed6183048f8dab95b2be78" -dependencies = [ - "alloy-primitives", - "num_enum", - "strum", -] - -[[package]] -name = "alloy-consensus" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c3a590d13de3944675987394715f37537b50b856e3b23a0e66e97d963edbf38" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "alloy-trie", - "alloy-tx-macros", - "arbitrary", - "auto_impl", - "borsh", - "c-kzg", - "derive_more", - "either", - "k256", - "once_cell", - "rand 0.8.5", - "secp256k1", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-consensus-any" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f28f769d5ea999f0d8a105e434f483456a15b4e1fcb08edbbbe1650a497ff6d" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "arbitrary", - "serde", -] - -[[package]] -name = "alloy-eip2124" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "crc", - "rand 0.8.5", - "serde", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-eip2930" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9441120fa82df73e8959ae0e4ab8ade03de2aaae61be313fbf5746277847ce25" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "borsh", - "rand 0.8.5", - "serde", -] - -[[package]] -name = "alloy-eip7702" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2919c5a56a1007492da313e7a3b6d45ef5edc5d33416fdec63c0d7a2702a0d20" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "borsh", - "k256", - "rand 0.8.5", - "serde", - "serde_with", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-eips" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09535cbc646b0e0c6fcc12b7597eaed12cf86dff4c4fba9507a61e71b94f30eb" -dependencies = [ - "alloy-eip2124", - "alloy-eip2930", - "alloy-eip7702", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "arbitrary", - "auto_impl", - "borsh", - "c-kzg", - "derive_more", - "either", - "ethereum_ssz", - "ethereum_ssz_derive", - "serde", - "serde_with", - "sha2", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-json-abi" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84e3cf01219c966f95a460c95f1d4c30e12f6c18150c21a30b768af2a2a29142" -dependencies = [ - "alloy-primitives", - "alloy-sol-type-parser", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-json-rpc" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b626409c98ba43aaaa558361bca21440c88fd30df7542c7484b9c7a1489cdb" -dependencies = [ - "alloy-primitives", - "alloy-sol-types", - "http", - "serde", - "serde_json", - "thiserror 2.0.18", - "tracing", -] - -[[package]] -name = "alloy-network" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89924fdcfeee0e0fa42b1f10af42f92802b5d16be614a70897382565663bf7cf" -dependencies = [ - "alloy-consensus", - "alloy-consensus-any", - "alloy-eips", - "alloy-json-rpc", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-types-any", - "alloy-rpc-types-eth", - "alloy-serde", - "alloy-signer", - "alloy-sol-types", - "async-trait", - "auto_impl", - "derive_more", - "futures-utils-wasm", - "serde", - "serde_json", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-network-primitives" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0dbe56ff50065713ff8635d8712a0895db3ad7f209db9793ad8fcb6b1734aa" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-serde", - "serde", -] - -[[package]] -name = "alloy-primitives" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6a0fb18dd5fb43ec5f0f6a20be1ce0287c79825827de5744afaa6c957737c33" -dependencies = [ - "alloy-rlp", - "arbitrary", - "bytes", - "cfg-if", - "const-hex", - "derive_more", - "foldhash", - "getrandom 0.3.4", - "hashbrown 0.16.1", - "indexmap 2.13.0", - "itoa", - "k256", - "keccak-asm", - "paste", - "proptest", - "proptest-derive 0.6.0", - "rand 0.9.2", - "rapidhash", - "ruint", - "rustc-hash", - "serde", - "sha3", - "tiny-keccak", -] - -[[package]] -name = "alloy-provider" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b56f7a77513308a21a2ba0e9d57785a9d9d2d609e77f4e71a78a1192b83ff2d" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-network", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-client", - "alloy-rpc-types-eth", - "alloy-signer", - "alloy-sol-types", - "alloy-transport", - "async-stream", - "async-trait", - "auto_impl", - "dashmap", - "either", - "futures", - "futures-utils-wasm", - "lru", - "parking_lot", - "pin-project", - "serde", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tracing", - "wasmtimer", -] - -[[package]] -name = "alloy-rlp" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f70d83b765fdc080dbcd4f4db70d8d23fe4761f2f02ebfa9146b833900634b4" -dependencies = [ - "alloy-rlp-derive", - "arrayvec", - "bytes", -] - -[[package]] -name = "alloy-rlp-derive" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "alloy-rpc-client" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff01723afc25ec4c5b04de399155bef7b6a96dfde2475492b1b7b4e7a4f46445" -dependencies = [ - "alloy-json-rpc", - "alloy-primitives", - "alloy-transport", - "futures", - "pin-project", - "serde", - "serde_json", - "tokio", - "tokio-stream", - "tower", - "tracing", - "wasmtimer", -] - -[[package]] -name = "alloy-rpc-types-any" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212ca1c1dab27f531d3858f8b1a2d6bfb2da664be0c1083971078eb7b71abe4b" -dependencies = [ - "alloy-consensus-any", - "alloy-rpc-types-eth", - "alloy-serde", -] - -[[package]] -name = "alloy-rpc-types-engine" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "232f00fcbcd3ee3b9399b96223a8fc884d17742a70a44f9d7cef275f93e6e872" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "derive_more", - "ethereum_ssz", - "ethereum_ssz_derive", - "rand 0.8.5", - "serde", - "strum", -] - -[[package]] -name = "alloy-rpc-types-eth" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5715d0bf7efbd360873518bd9f6595762136b5327a9b759a8c42ccd9b5e44945" -dependencies = [ - "alloy-consensus", - "alloy-consensus-any", - "alloy-eips", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "alloy-sol-types", - "arbitrary", - "itertools 0.14.0", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-serde" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ed8531cae8d21ee1c6571d0995f8c9f0652a6ef6452fde369283edea6ab7138" -dependencies = [ - "alloy-primitives", - "arbitrary", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-signer" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb10ccd49d0248df51063fce6b716f68a315dd912d55b32178c883fd48b4021d" -dependencies = [ - "alloy-primitives", - "async-trait", - "auto_impl", - "either", - "elliptic-curve", - "k256", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-sol-macro" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09eb18ce0df92b4277291bbaa0ed70545d78b02948df756bbd3d6214bf39a218" -dependencies = [ - "alloy-sol-macro-expander", - "alloy-sol-macro-input", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "alloy-sol-macro-expander" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95d9fa2daf21f59aa546d549943f10b5cce1ae59986774019fbedae834ffe01b" -dependencies = [ - "alloy-sol-macro-input", - "const-hex", - "heck", - "indexmap 2.13.0", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.114", - "syn-solidity", - "tiny-keccak", -] - -[[package]] -name = "alloy-sol-macro-input" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9396007fe69c26ee118a19f4dee1f5d1d6be186ea75b3881adf16d87f8444686" -dependencies = [ - "const-hex", - "dunce", - "heck", - "macro-string", - "proc-macro2", - "quote", - "syn 2.0.114", - "syn-solidity", -] - -[[package]] -name = "alloy-sol-type-parser" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af67a0b0dcebe14244fc92002cd8d96ecbf65db4639d479f5fcd5805755a4c27" -dependencies = [ - "serde", - "winnow", -] - -[[package]] -name = "alloy-sol-types" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09aeea64f09a7483bdcd4193634c7e5cf9fd7775ee767585270cd8ce2d69dc95" -dependencies = [ - "alloy-json-abi", - "alloy-primitives", - "alloy-sol-macro", - "serde", -] - -[[package]] -name = "alloy-transport" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f50a9516736d22dd834cc2240e5bf264f338667cc1d9e514b55ec5a78b987ca" -dependencies = [ - "alloy-json-rpc", - "auto_impl", - "base64", - "derive_more", - "futures", - "futures-utils-wasm", - "parking_lot", - "serde", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tower", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-trie" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428aa0f0e0658ff091f8f667c406e034b431cb10abd39de4f507520968acc499" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "arrayvec", - "derive_arbitrary", - "derive_more", - "nybbles", - "proptest", - "proptest-derive 0.5.1", - "serde", - "smallvec", - "tracing", -] - -[[package]] -name = "alloy-tx-macros" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2289a842d02fe63f8c466db964168bb2c7a9fdfb7b24816dbb17d45520575fb" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "arbitrary" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" -dependencies = [ - "derive_arbitrary", -] - -[[package]] -name = "arbtest" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a3be567977128c0f71ad1462d9624ccda712193d124e944252f0c5789a06d46" -dependencies = [ - "arbitrary", -] - -[[package]] -name = "ark-ff" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" -dependencies = [ - "ark-ff-asm 0.3.0", - "ark-ff-macros 0.3.0", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.3.3", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" -dependencies = [ - "ark-ff-asm 0.4.2", - "ark-ff-macros 0.4.2", - "ark-serialize 0.4.2", - "ark-std 0.4.0", - "derivative", - "digest 0.10.7", - "itertools 0.10.5", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.4.1", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70" -dependencies = [ - "ark-ff-asm 0.5.0", - "ark-ff-macros 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "arrayvec", - "digest 0.10.7", - "educe", - "itertools 0.13.0", - "num-bigint", - "num-traits", - "paste", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-asm" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-asm" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" -dependencies = [ - "quote", - "syn 2.0.114", -] - -[[package]] -name = "ark-ff-macros" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" -dependencies = [ - "num-bigint", - "num-traits", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "ark-serialize" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" -dependencies = [ - "ark-std 0.3.0", - "digest 0.9.0", -] - -[[package]] -name = "ark-serialize" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" -dependencies = [ - "ark-std 0.4.0", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7" -dependencies = [ - "ark-std 0.5.0", - "arrayvec", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-std" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "ark-std" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "ark-std" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -dependencies = [ - "serde", -] - -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "auto_impl" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "autocfg" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" - -[[package]] -name = "base16ct" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "base64ct" -version = "1.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" - -[[package]] -name = "bincode" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36eaf5d7b090263e8150820482d5d93cd964a81e4019913c972f4edcc6edb740" -dependencies = [ - "bincode_derive", - "serde", - "unty", -] - -[[package]] -name = "bincode_derive" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf95709a440f45e986983918d0e8a1f30a9b1df04918fc828670606804ac3c09" -dependencies = [ - "virtue", -] - -[[package]] -name = "bit-set" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" - -[[package]] -name = "bitcoin-io" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dee39a0ee5b4095224a0cfc6bf4cc1baf0f9624b96b367e53b66d974e51d953" - -[[package]] -name = "bitcoin_hashes" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26ec84b80c482df901772e931a9a681e26a1b9ee2302edeff23cb30328745c8b" -dependencies = [ - "bitcoin-io", - "hex-conservative", -] - -[[package]] -name = "bitflags" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "tap", - "wyz", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "blst" -version = "0.3.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcdb4c7013139a150f9fc55d123186dbfaba0d912817466282c73ac49e71fb45" -dependencies = [ - "cc", - "glob", - "threadpool", - "zeroize", -] - -[[package]] -name = "borsh" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1da5ab77c1437701eeff7c88d968729e7766172279eab0676857b3d63af7a6f" -dependencies = [ - "borsh-derive", - "cfg_aliases", -] - -[[package]] -name = "borsh-derive" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0686c856aa6aac0c4498f936d7d6a02df690f614c03e4d906d1018062b5c5e2c" -dependencies = [ - "once_cell", - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "bstr" -version = "1.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab" -dependencies = [ - "memchr", - "regex-automata", - "serde", -] - -[[package]] -name = "bumpalo" -version = "3.19.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" - -[[package]] -name = "byte-slice-cast" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7575182f7272186991736b70173b0ea045398f984bf5ebbb3804736ce1330c9d" - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "bytes" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" -dependencies = [ - "serde", -] - -[[package]] -name = "c-kzg" -version = "2.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e00bf4b112b07b505472dbefd19e37e53307e2bfed5a79e0cc161d58ccd0e687" -dependencies = [ - "arbitrary", - "blst", - "cc", - "glob", - "hex", - "libc", - "once_cell", - "serde", -] - -[[package]] -name = "cc" -version = "1.2.53" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755d2fce177175ffca841e9a06afdb2c4ab0f593d53b4dee48147dfaade85932" -dependencies = [ - "find-msvc-tools", - "shlex", -] - -[[package]] -name = "cesu8" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" - -[[package]] -name = "cfg-if" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - -[[package]] -name = "chrono" -version = "0.4.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" -dependencies = [ - "iana-time-zone", - "num-traits", - "serde", - "windows-link", -] - -[[package]] -name = "combine" -version = "4.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" -dependencies = [ - "bytes", - "memchr", -] - -[[package]] -name = "console" -version = "0.15.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" -dependencies = [ - "encode_unicode", - "libc", - "once_cell", - "windows-sys 0.59.0", -] - -[[package]] -name = "const-hex" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735" -dependencies = [ - "cfg-if", - "cpufeatures", - "proptest", - "serde_core", -] - -[[package]] -name = "const-oid" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" - -[[package]] -name = "const_format" -version = "0.2.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" -dependencies = [ - "const_format_proc_macros", -] - -[[package]] -name = "const_format_proc_macros" -version = "0.2.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "convert_case" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "core-foundation" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" - -[[package]] -name = "cpufeatures" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" -dependencies = [ - "libc", -] - -[[package]] -name = "crc" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crunchy" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" - -[[package]] -name = "crypto-bigint" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" -dependencies = [ - "generic-array", - "rand_core 0.6.4", - "subtle", - "zeroize", -] - -[[package]] -name = "crypto-common" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" -dependencies = [ - "generic-array", - "typenum", -] - -[[package]] -name = "darling" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" -dependencies = [ - "darling_core 0.20.11", - "darling_macro 0.20.11", -] - -[[package]] -name = "darling" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" -dependencies = [ - "darling_core 0.21.3", - "darling_macro 0.21.3", -] - -[[package]] -name = "darling_core" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.114", -] - -[[package]] -name = "darling_core" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "serde", - "strsim", - "syn 2.0.114", -] - -[[package]] -name = "darling_macro" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" -dependencies = [ - "darling_core 0.20.11", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "darling_macro" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" -dependencies = [ - "darling_core 0.21.3", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "dashmap" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" -dependencies = [ - "cfg-if", - "crossbeam-utils", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", -] - -[[package]] -name = "der" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "deranged" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" -dependencies = [ - "powerfmt", - "serde_core", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "derive_arbitrary" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "derive_more" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" -dependencies = [ - "convert_case", - "proc-macro2", - "quote", - "rustc_version 0.4.1", - "syn 2.0.114", - "unicode-xid", -] - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "const-oid", - "crypto-common", - "subtle", -] - -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "dunce" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" - -[[package]] -name = "dyn-clone" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" - -[[package]] -name = "ecdsa" -version = "0.16.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" -dependencies = [ - "der", - "digest 0.10.7", - "elliptic-curve", - "rfc6979", - "serdect", - "signature", - "spki", -] - -[[package]] -name = "educe" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" -dependencies = [ - "enum-ordinalize", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "either" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -dependencies = [ - "serde", -] - -[[package]] -name = "elliptic-curve" -version = "0.13.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" -dependencies = [ - "base16ct", - "crypto-bigint", - "digest 0.10.7", - "ff", - "generic-array", - "group", - "pkcs8", - "rand_core 0.6.4", - "sec1", - "serdect", - "subtle", - "zeroize", -] - -[[package]] -name = "encode_unicode" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" - -[[package]] -name = "enum-ordinalize" -version = "4.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" -dependencies = [ - "enum-ordinalize-derive", -] - -[[package]] -name = "enum-ordinalize-derive" -version = "4.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "equivalent" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" - -[[package]] -name = "errno" -version = "0.3.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" -dependencies = [ - "libc", - "windows-sys 0.61.2", -] - -[[package]] -name = "ethereum_serde_utils" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dc1355dbb41fbbd34ec28d4fb2a57d9a70c67ac3c19f6a5ca4d4a176b9e997a" -dependencies = [ - "alloy-primitives", - "hex", - "serde", - "serde_derive", - "serde_json", -] - -[[package]] -name = "ethereum_ssz" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dcddb2554d19cde19b099fadddde576929d7a4d0c1cd3512d1fd95cf174375c" -dependencies = [ - "alloy-primitives", - "ethereum_serde_utils", - "itertools 0.13.0", - "serde", - "serde_derive", - "smallvec", - "typenum", -] - -[[package]] -name = "ethereum_ssz_derive" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a657b6b3b7e153637dc6bdc6566ad9279d9ee11a15b12cfb24a2e04360637e9f" -dependencies = [ - "darling 0.20.11", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "fastrand" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" - -[[package]] -name = "fastrlp" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", -] - -[[package]] -name = "fastrlp" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", -] - -[[package]] -name = "ff" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" -dependencies = [ - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "find-msvc-tools" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" - -[[package]] -name = "fixed-hash" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" -dependencies = [ - "byteorder", - "rand 0.8.5", - "rustc-hex", - "static_assertions", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foldhash" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" - -[[package]] -name = "form_urlencoded" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "futures" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-core" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" - -[[package]] -name = "futures-executor" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" - -[[package]] -name = "futures-macro" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "futures-sink" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" - -[[package]] -name = "futures-task" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" - -[[package]] -name = "futures-timer" -version = "3.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" -dependencies = [ - "gloo-timers", - "send_wrapper", -] - -[[package]] -name = "futures-util" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "futures-utils-wasm" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", - "zeroize", -] - -[[package]] -name = "getrandom" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - -[[package]] -name = "getrandom" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" -dependencies = [ - "cfg-if", - "libc", - "r-efi", - "wasip2", -] - -[[package]] -name = "glob" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" - -[[package]] -name = "gloo-net" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06f627b1a58ca3d42b45d6104bf1e1a03799df472df00988b6ba21accc10580" -dependencies = [ - "futures-channel", - "futures-core", - "futures-sink", - "gloo-utils", - "http", - "js-sys", - "pin-project", - "serde", - "serde_json", - "thiserror 1.0.69", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "gloo-utils" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" -dependencies = [ - "js-sys", - "serde", - "serde_json", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "group" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" -dependencies = [ - "ff", - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "h2" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http", - "indexmap 2.13.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" - -[[package]] -name = "hashbrown" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" -dependencies = [ - "allocator-api2", - "equivalent", - "foldhash", - "serde", - "serde_core", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hermit-abi" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hex-conservative" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda06d18ac606267c40c04e41b9947729bf8b9efe74bd4e82b61a5f26a510b9f" -dependencies = [ - "arrayvec", -] - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "http" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" -dependencies = [ - "bytes", - "itoa", -] - -[[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http", -] - -[[package]] -name = "http-body-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" -dependencies = [ - "bytes", - "futures-core", - "http", - "http-body", - "pin-project-lite", -] - -[[package]] -name = "httparse" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" - -[[package]] -name = "hyper" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" -dependencies = [ - "atomic-waker", - "bytes", - "futures-channel", - "futures-core", - "h2", - "http", - "http-body", - "httparse", - "itoa", - "pin-project-lite", - "pin-utils", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" -dependencies = [ - "http", - "hyper", - "hyper-util", - "log", - "rustls", - "rustls-pki-types", - "tokio", - "tokio-rustls", - "tower-service", -] - -[[package]] -name = "hyper-util" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" -dependencies = [ - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http", - "http-body", - "hyper", - "libc", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.64" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "log", - "wasm-bindgen", - "windows-core", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "icu_collections" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" -dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locale_core" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" -dependencies = [ - "displaydoc", - "litemap", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" -dependencies = [ - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" - -[[package]] -name = "icu_properties" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" -dependencies = [ - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" - -[[package]] -name = "icu_provider" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" -dependencies = [ - "displaydoc", - "icu_locale_core", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "idna" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", -] - -[[package]] -name = "indexmap" -version = "2.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" -dependencies = [ - "arbitrary", - "equivalent", - "hashbrown 0.16.1", - "serde", - "serde_core", -] - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" - -[[package]] -name = "jni" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" -dependencies = [ - "cesu8", - "cfg-if", - "combine", - "jni-sys", - "log", - "thiserror 1.0.69", - "walkdir", - "windows-sys 0.45.0", -] - -[[package]] -name = "jni-sys" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" - -[[package]] -name = "js-sys" -version = "0.3.85" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "jsonrpsee" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f3f48dc3e6b8bd21e15436c1ddd0bc22a6a54e8ec46fedd6adf3425f396ec6a" -dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-http-client", - "jsonrpsee-proc-macros", - "jsonrpsee-types", - "jsonrpsee-wasm-client", - "jsonrpsee-ws-client", - "tracing", -] - -[[package]] -name = "jsonrpsee-client-transport" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98" -dependencies = [ - "base64", - "futures-channel", - "futures-util", - "gloo-net", - "http", - "jsonrpsee-core", - "pin-project", - "rustls", - "rustls-pki-types", - "rustls-platform-verifier", - "soketto", - "thiserror 2.0.18", - "tokio", - "tokio-rustls", - "tokio-util", - "tracing", - "url", -] - -[[package]] -name = "jsonrpsee-core" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "316c96719901f05d1137f19ba598b5fe9c9bc39f4335f67f6be8613921946480" -dependencies = [ - "async-trait", - "bytes", - "futures-timer", - "futures-util", - "http", - "http-body", - "http-body-util", - "jsonrpsee-types", - "parking_lot", - "pin-project", - "rand 0.9.2", - "rustc-hash", - "serde", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tower", - "tracing", - "wasm-bindgen-futures", -] - -[[package]] -name = "jsonrpsee-http-client" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8" -dependencies = [ - "base64", - "http-body", - "hyper", - "hyper-rustls", - "hyper-util", - "jsonrpsee-core", - "jsonrpsee-types", - "rustls", - "rustls-platform-verifier", - "serde", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tower", - "url", -] - -[[package]] -name = "jsonrpsee-proc-macros" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da3f8ab5ce1bb124b6d082e62dffe997578ceaf0aeb9f3174a214589dc00f07" -dependencies = [ - "heck", - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "jsonrpsee-types" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc88ff4688e43cc3fa9883a8a95c6fa27aa2e76c96e610b737b6554d650d7fd5" -dependencies = [ - "http", - "serde", - "serde_json", - "thiserror 2.0.18", -] - -[[package]] -name = "jsonrpsee-wasm-client" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7902885de4779f711a95d82c8da2d7e5f9f3a7c7cfa44d51c067fd1c29d72a3c" -dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", - "tower", -] - -[[package]] -name = "jsonrpsee-ws-client" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6fceceeb05301cc4c065ab3bd2fa990d41ff4eb44e4ca1b30fa99c057c3e79" -dependencies = [ - "http", - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", - "tower", - "url", -] - -[[package]] -name = "k256" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" -dependencies = [ - "cfg-if", - "ecdsa", - "elliptic-curve", - "once_cell", - "serdect", - "sha2", -] - -[[package]] -name = "keccak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "keccak-asm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "505d1856a39b200489082f90d897c3f07c455563880bc5952e38eabf731c83b6" -dependencies = [ - "digest 0.10.7", - "sha3-asm", -] - -[[package]] -name = "libc" -version = "0.2.180" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" - -[[package]] -name = "libm" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" - -[[package]] -name = "linux-raw-sys" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" - -[[package]] -name = "litemap" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" - -[[package]] -name = "lock_api" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" -dependencies = [ - "scopeguard", -] - -[[package]] -name = "log" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" - -[[package]] -name = "lru" -version = "0.16.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" -dependencies = [ - "hashbrown 0.16.1", -] - -[[package]] -name = "macro-string" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "memchr" -version = "2.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" - -[[package]] -name = "mio" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" -dependencies = [ - "libc", - "wasi", - "windows-sys 0.61.2", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", -] - -[[package]] -name = "num-conv" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", - "libm", -] - -[[package]] -name = "num_cpus" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "num_enum" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" -dependencies = [ - "num_enum_derive", - "rustversion", -] - -[[package]] -name = "num_enum_derive" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "nybbles" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5676b5c379cf5b03da1df2b3061c4a4e2aa691086a56ac923e08c143f53f59" -dependencies = [ - "alloy-rlp", - "arbitrary", - "cfg-if", - "proptest", - "ruint", - "serde", - "smallvec", -] - -[[package]] -name = "once_cell" -version = "1.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" - -[[package]] -name = "op-alloy" -version = "0.23.1" -dependencies = [ - "op-alloy-consensus", - "op-alloy-network", - "op-alloy-provider", - "op-alloy-rpc-jsonrpsee", - "op-alloy-rpc-types", - "op-alloy-rpc-types-engine", -] - -[[package]] -name = "op-alloy-consensus" -version = "0.23.1" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-serde", - "alloy-signer", - "arbitrary", - "bincode", - "derive_more", - "rand 0.9.2", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.18", - "tokio", -] - -[[package]] -name = "op-alloy-network" -version = "0.23.1" -dependencies = [ - "alloy-consensus", - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-eth", - "alloy-signer", - "op-alloy-consensus", - "op-alloy-rpc-types", -] - -[[package]] -name = "op-alloy-provider" -version = "0.23.1" -dependencies = [ - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-engine", - "alloy-transport", - "async-trait", - "op-alloy-rpc-types-engine", -] - -[[package]] -name = "op-alloy-rpc-jsonrpsee" -version = "0.23.1" -dependencies = [ - "alloy-primitives", - "jsonrpsee", -] - -[[package]] -name = "op-alloy-rpc-types" -version = "0.23.1" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", - "arbitrary", - "derive_more", - "jsonrpsee", - "op-alloy-consensus", - "rand 0.9.2", - "serde", - "serde_json", - "similar-asserts", - "thiserror 2.0.18", -] - -[[package]] -name = "op-alloy-rpc-types-engine" -version = "0.23.1" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "alloy-serde", - "arbitrary", - "arbtest", - "derive_more", - "ethereum_ssz", - "ethereum_ssz_derive", - "op-alloy-consensus", - "serde", - "serde_json", - "sha2", - "snap", - "thiserror 2.0.18", -] - -[[package]] -name = "openssl-probe" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" - -[[package]] -name = "parity-scale-codec" -version = "3.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799781ae679d79a948e13d4824a40970bfa500058d245760dd857301059810fa" -dependencies = [ - "arrayvec", - "bitvec", - "byte-slice-cast", - "const_format", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "rustversion", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34b4653168b563151153c9e4c08ebed57fb8262bebfa79711552fa983c623e7a" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "parking_lot" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall", - "smallvec", - "windows-link", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "percent-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - -[[package]] -name = "pest" -version = "2.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9eb05c21a464ea704b53158d358a31e6425db2f63a1a7312268b05fe2b75f7" -dependencies = [ - "memchr", - "ucd-trie", -] - -[[package]] -name = "pin-project" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "potential_utf" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" -dependencies = [ - "zerovec", -] - -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppv-lite86" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "primitive-types" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" -dependencies = [ - "fixed-hash", - "impl-codec", - "uint", -] - -[[package]] -name = "proc-macro-crate" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" -dependencies = [ - "toml_edit", -] - -[[package]] -name = "proc-macro-error-attr2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "proc-macro-error2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" -dependencies = [ - "proc-macro-error-attr2", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "proc-macro2" -version = "1.0.105" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "proptest" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" -dependencies = [ - "bit-set", - "bit-vec", - "bitflags", - "num-traits", - "rand 0.9.2", - "rand_chacha 0.9.0", - "rand_xorshift", - "regex-syntax", - "rusty-fork", - "tempfile", - "unarray", -] - -[[package]] -name = "proptest-derive" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "proptest-derive" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "095a99f75c69734802359b682be8daaf8980296731f6470434ea2c652af1dd30" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - -[[package]] -name = "quote" -version = "1.0.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", - "serde", -] - -[[package]] -name = "rand" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" -dependencies = [ - "rand_chacha 0.9.0", - "rand_core 0.9.5", - "serde", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core 0.9.5", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.17", -] - -[[package]] -name = "rand_core" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" -dependencies = [ - "getrandom 0.3.4", - "serde", -] - -[[package]] -name = "rand_xorshift" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" -dependencies = [ - "rand_core 0.9.5", -] - -[[package]] -name = "rapidhash" -version = "4.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8b5b858a440a0bc02625b62dd95131b9201aa9f69f411195dd4a7cfb1de3d7" -dependencies = [ - "rand 0.9.2", - "rustversion", -] - -[[package]] -name = "redox_syscall" -version = "0.5.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" -dependencies = [ - "bitflags", -] - -[[package]] -name = "ref-cast" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "regex-automata" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" - -[[package]] -name = "regex-syntax" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" - -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" -dependencies = [ - "hmac", - "subtle", -] - -[[package]] -name = "ring" -version = "0.17.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" -dependencies = [ - "cc", - "cfg-if", - "getrandom 0.2.17", - "libc", - "untrusted", - "windows-sys 0.52.0", -] - -[[package]] -name = "rlp" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" -dependencies = [ - "bytes", - "rustc-hex", -] - -[[package]] -name = "ruint" -version = "1.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" -dependencies = [ - "alloy-rlp", - "arbitrary", - "ark-ff 0.3.0", - "ark-ff 0.4.2", - "ark-ff 0.5.0", - "bytes", - "fastrlp 0.3.1", - "fastrlp 0.4.0", - "num-bigint", - "num-integer", - "num-traits", - "parity-scale-codec", - "primitive-types", - "proptest", - "rand 0.8.5", - "rand 0.9.2", - "rlp", - "ruint-macro", - "serde_core", - "valuable", - "zeroize", -] - -[[package]] -name = "ruint-macro" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" - -[[package]] -name = "rustc-hash" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" -dependencies = [ - "rand 0.8.5", -] - -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - -[[package]] -name = "rustc_version" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" -dependencies = [ - "semver 0.11.0", -] - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver 1.0.27", -] - -[[package]] -name = "rustix" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys 0.61.2", -] - -[[package]] -name = "rustls" -version = "0.23.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" -dependencies = [ - "log", - "once_cell", - "ring", - "rustls-pki-types", - "rustls-webpki", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-native-certs" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" -dependencies = [ - "openssl-probe", - "rustls-pki-types", - "schannel", - "security-framework", -] - -[[package]] -name = "rustls-pki-types" -version = "1.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" -dependencies = [ - "zeroize", -] - -[[package]] -name = "rustls-platform-verifier" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19787cda76408ec5404443dc8b31795c87cd8fec49762dc75fa727740d34acc1" -dependencies = [ - "core-foundation", - "core-foundation-sys", - "jni", - "log", - "once_cell", - "rustls", - "rustls-native-certs", - "rustls-platform-verifier-android", - "rustls-webpki", - "security-framework", - "security-framework-sys", - "webpki-root-certs 0.26.11", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustls-platform-verifier-android" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" - -[[package]] -name = "rustls-webpki" -version = "0.103.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted", -] - -[[package]] -name = "rustversion" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" - -[[package]] -name = "rusty-fork" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" -dependencies = [ - "fnv", - "quick-error", - "tempfile", - "wait-timeout", -] - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schannel" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "schemars" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "schemars" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "sec1" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" -dependencies = [ - "base16ct", - "der", - "generic-array", - "pkcs8", - "serdect", - "subtle", - "zeroize", -] - -[[package]] -name = "secp256k1" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" -dependencies = [ - "bitcoin_hashes", - "rand 0.8.5", - "secp256k1-sys", - "serde", -] - -[[package]] -name = "secp256k1-sys" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9" -dependencies = [ - "cc", -] - -[[package]] -name = "security-framework" -version = "3.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" -dependencies = [ - "bitflags", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "semver" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" -dependencies = [ - "semver-parser", -] - -[[package]] -name = "semver" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" - -[[package]] -name = "semver-parser" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" -dependencies = [ - "pest", -] - -[[package]] -name = "send_wrapper" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "serde_json" -version = "1.0.149" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" -dependencies = [ - "itoa", - "memchr", - "serde", - "serde_core", - "zmij", -] - -[[package]] -name = "serde_with" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7" -dependencies = [ - "base64", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.13.0", - "schemars 0.9.0", - "schemars 1.2.0", - "serde_core", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "serdect" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a84f14a19e9a014bb9f4512488d9829a68e04ecabffb0f9904cd1ace94598177" -dependencies = [ - "base16ct", - "serde", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha2" -version = "0.10.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest 0.10.7", - "keccak", -] - -[[package]] -name = "sha3-asm" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c28efc5e327c837aa837c59eae585fc250715ef939ac32881bcc11677cd02d46" -dependencies = [ - "cc", - "cfg-if", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "digest 0.10.7", - "rand_core 0.6.4", -] - -[[package]] -name = "similar" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" -dependencies = [ - "bstr", - "unicode-segmentation", -] - -[[package]] -name = "similar-asserts" -version = "1.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b441962c817e33508847a22bd82f03a30cff43642dc2fae8b050566121eb9a" -dependencies = [ - "console", - "similar", -] - -[[package]] -name = "slab" -version = "0.4.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" - -[[package]] -name = "smallvec" -version = "1.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" -dependencies = [ - "arbitrary", - "serde", -] - -[[package]] -name = "snap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" - -[[package]] -name = "socket2" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" -dependencies = [ - "libc", - "windows-sys 0.60.2", -] - -[[package]] -name = "soketto" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" -dependencies = [ - "base64", - "bytes", - "futures", - "httparse", - "log", - "rand 0.8.5", - "sha1", -] - -[[package]] -name = "spki" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "strum" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" -dependencies = [ - "strum_macros", -] - -[[package]] -name = "strum_macros" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn-solidity" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f92d01b5de07eaf324f7fca61cc6bd3d82bbc1de5b6c963e6fe79e86f36580d" -dependencies = [ - "paste", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" - -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "tempfile" -version = "3.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" -dependencies = [ - "fastrand", - "getrandom 0.3.4", - "once_cell", - "rustix", - "windows-sys 0.61.2", -] - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl 1.0.69", -] - -[[package]] -name = "thiserror" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" -dependencies = [ - "thiserror-impl 2.0.18", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "threadpool" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" -dependencies = [ - "num_cpus", -] - -[[package]] -name = "time" -version = "0.3.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9e442fc33d7fdb45aa9bfeb312c095964abdf596f7567261062b2a7107aaabd" -dependencies = [ - "deranged", - "itoa", - "num-conv", - "powerfmt", - "serde_core", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b36ee98fd31ec7426d599183e8fe26932a8dc1fb76ddb6214d05493377d34ca" - -[[package]] -name = "time-macros" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e552d1249bf61ac2a52db88179fd0673def1e1ad8243a00d9ec9ed71fee3dd" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "tiny-keccak" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9d3793400a45f954c52e73d068316d76b6f4e36977e3fcebb13a2721e80237" -dependencies = [ - "crunchy", -] - -[[package]] -name = "tinystr" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" -dependencies = [ - "displaydoc", - "zerovec", -] - -[[package]] -name = "tokio" -version = "1.49.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" -dependencies = [ - "bytes", - "libc", - "mio", - "pin-project-lite", - "socket2", - "tokio-macros", - "windows-sys 0.61.2", -] - -[[package]] -name = "tokio-macros" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "tokio-rustls" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" -dependencies = [ - "rustls", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", - "tokio-util", -] - -[[package]] -name = "tokio-util" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" -dependencies = [ - "bytes", - "futures-core", - "futures-io", - "futures-sink", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "toml_datetime" -version = "0.7.5+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" -dependencies = [ - "serde_core", -] - -[[package]] -name = "toml_edit" -version = "0.23.10+spec-1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" -dependencies = [ - "indexmap 2.13.0", - "toml_datetime", - "toml_parser", - "winnow", -] - -[[package]] -name = "toml_parser" -version = "1.0.6+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" -dependencies = [ - "winnow", -] - -[[package]] -name = "tower" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" -dependencies = [ - "futures-core", - "futures-util", - "pin-project-lite", - "sync_wrapper", - "tower-layer", - "tower-service", -] - -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - -[[package]] -name = "tracing" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" -dependencies = [ - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "tracing-core" -version = "0.1.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" -dependencies = [ - "once_cell", -] - -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" - -[[package]] -name = "typenum" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" - -[[package]] -name = "ucd-trie" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" - -[[package]] -name = "uint" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "unarray" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" - -[[package]] -name = "unicode-ident" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - -[[package]] -name = "unty" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" - -[[package]] -name = "url" -version = "2.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", -] - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - -[[package]] -name = "valuable" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "virtue" -version = "0.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" - -[[package]] -name = "wait-timeout" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" -dependencies = [ - "libc", -] - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.11.1+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" - -[[package]] -name = "wasip2" -version = "1.0.2+wasi-0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wasm-bindgen" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" -dependencies = [ - "cfg-if", - "once_cell", - "rustversion", - "wasm-bindgen-macro", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.58" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" -dependencies = [ - "cfg-if", - "futures-util", - "js-sys", - "once_cell", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" -dependencies = [ - "bumpalo", - "proc-macro2", - "quote", - "syn 2.0.114", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "wasmtimer" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b" -dependencies = [ - "futures", - "js-sys", - "parking_lot", - "pin-utils", - "slab", - "wasm-bindgen", -] - -[[package]] -name = "web-sys" -version = "0.3.85" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki-root-certs" -version = "0.26.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75c7f0ef91146ebfb530314f5f1d24528d7f0767efbfd31dce919275413e393e" -dependencies = [ - "webpki-root-certs 1.0.5", -] - -[[package]] -name = "webpki-root-certs" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36a29fc0408b113f68cf32637857ab740edfafdf460c326cd2afaa2d84cc05dc" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "winapi-util" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "windows-core" -version = "0.62.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" -dependencies = [ - "windows-implement", - "windows-interface", - "windows-link", - "windows-result", - "windows-strings", -] - -[[package]] -name = "windows-implement" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "windows-interface" -version = "0.59.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - -[[package]] -name = "windows-result" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" -dependencies = [ - "windows-targets 0.53.5", -] - -[[package]] -name = "windows-sys" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm 0.52.6", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.53.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" -dependencies = [ - "windows-link", - "windows_aarch64_gnullvm 0.53.1", - "windows_aarch64_msvc 0.53.1", - "windows_i686_gnu 0.53.1", - "windows_i686_gnullvm 0.53.1", - "windows_i686_msvc 0.53.1", - "windows_x86_64_gnu 0.53.1", - "windows_x86_64_gnullvm 0.53.1", - "windows_x86_64_msvc 0.53.1", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_i686_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" - -[[package]] -name = "winnow" -version = "0.7.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" -dependencies = [ - "memchr", -] - -[[package]] -name = "wit-bindgen" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" - -[[package]] -name = "writeable" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - -[[package]] -name = "yoke" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" -dependencies = [ - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", - "synstructure", -] - -[[package]] -name = "zerocopy" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.33" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", - "synstructure", -] - -[[package]] -name = "zeroize" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "zerotrie" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" -dependencies = [ - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "zmij" -version = "1.0.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfcd145825aace48cff44a8844de64bf75feec3080e0aa5cdbde72961ae51a65" diff --git a/op-alloy/Cargo.toml b/op-alloy/Cargo.toml deleted file mode 100644 index 8d74193feb5..00000000000 --- a/op-alloy/Cargo.toml +++ /dev/null @@ -1,123 +0,0 @@ -[workspace] -members = ["crates/*"] -resolver = "2" - -[workspace.package] -version = "0.23.1" -edition = "2024" -rust-version = "1.88" -authors = ["Alloy Contributors"] -license = "MIT OR Apache-2.0" -homepage = "https://github.com/alloy-rs/op-alloy" -repository = "https://github.com/alloy-rs/op-alloy" -exclude = ["benches/", "tests/"] - -[workspace.lints.rustdoc] -all = "warn" - -[workspace.lints.rust] -missing-debug-implementations = "warn" -missing-docs = "warn" -unreachable-pub = "warn" -unused-must-use = "deny" -rust-2018-idioms = "deny" -unnameable-types = "warn" - -[workspace.lints.clippy] -all = { level = "warn", priority = -1 } -missing-const-for-fn = "warn" -use-self = "warn" -option-if-let-else = "warn" -redundant-clone = "warn" -result_large_err = "allow" - -[workspace.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[workspace.dependencies] -# Workspace -op-alloy-consensus = { version = "0.23.1", path = "crates/consensus", default-features = false } -op-alloy-network = { version = "0.23.1", path = "crates/network", default-features = false } -op-alloy-provider = { version = "0.23.1", path = "crates/provider", default-features = false } -op-alloy-rpc-types = { version = "0.23.1", path = "crates/rpc-types", default-features = false } -op-alloy-rpc-types-engine = { version = "0.23.1", path = "crates/rpc-types-engine", default-features = false } -op-alloy-rpc-jsonrpsee = { version = "0.23.1", path = "crates/rpc-jsonrpsee", default-features = false } - -# Alloy -alloy-eips = { version = "1.1.2", default-features = false } -alloy-serde = { version = "1.1.2", default-features = false } -alloy-signer = { version = "1.1.2", default-features = false } -alloy-network = { version = "1.1.2", default-features = false } -alloy-provider = { version = "1.1.2", default-features = false } -alloy-transport = { version = "1.1.2", default-features = false } -alloy-consensus = { version = "1.1.2", default-features = false } -alloy-rpc-types-eth = { version = "1.1.2", default-features = false } -alloy-rpc-types-engine = { version = "1.1.2", default-features = false } -alloy-network-primitives = { version = "1.1.2", default-features = false } -alloy-json-rpc = { version = "1.1.2", default-features = false } - -# Alloy RLP -alloy-rlp = { version = "0.3", default-features = false } - -# Alloy Core -alloy-sol-types = { version = "1.2.0", default-features = false } -alloy-primitives = { version = "1.2.0", default-features = false } - -# Serde -serde = { version = "1.0", default-features = false, features = [ - "derive", - "alloc", -] } -serde_with = "3.12" -serde_json = { version = "1.0", default-features = false, features = ["alloc"] } - -# Encoding -snap = "1.1.1" -bincode = "2.0.1" -ethereum_ssz = "0.9" -ethereum_ssz_derive = "0.9" - -# rpc -jsonrpsee = { version = "0.26", features = [ - "jsonrpsee-core", - "client-core", - "server-core", - "macros", -] } -jsonrpsee-core = "0.26" -jsonrpsee-types = "0.26" - -# misc -async-trait = "0.1.87" -derive_more = { version = "2.0", default-features = false } -thiserror = { version = "2.0", default-features = false } -similar-asserts = "1.7" - -# hashing -sha2 = { version = "0.10", default-features = false } - -# tracing -tracing-subscriber = "0.3.19" -tracing = { version = "0.1.41", default-features = false } - -## misc-testing -arbitrary = { version = "1.4", features = ["derive"] } -arbtest = "0.3" -rand = "0.9" -proptest = "1.6" -proptest-derive = "0.5" -tokio = "1" -rstest = "0.24.0" - -[patch.crates-io] -# alloy-eips = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } -# alloy-serde = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } -# alloy-signer = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } -# alloy-network = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } -# alloy-provider = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } -# alloy-transport = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } -# alloy-consensus = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } -# alloy-rpc-types-eth = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } -# alloy-rpc-types-engine = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } -# alloy-network-primitives = { git = "https://github.com/alloy-rs/alloy", rev = "2390e6cd5" } diff --git a/op-alloy/Justfile b/op-alloy/Justfile deleted file mode 100644 index d9782a7bfde..00000000000 --- a/op-alloy/Justfile +++ /dev/null @@ -1,82 +0,0 @@ -set positional-arguments -alias t := tests -alias l := lint -alias f := fmtf -alias b := build -alias h := hack -alias c := check -alias e := examples - -# default recipe to display help information -default: - @just --list - -# Run all tests -tests: test test-docs - -# Test for the native target with optional flags. -test *args='': - cargo nextest run --workspace {{args}} - -# Test the Rust documentation -test-docs: - cargo test --doc --all - -# Lint the workspace for all available targets -lint: lint-native lint-docs - -# Lint the workspace -lint-native: fmt-check lint-docs clippy - -# Checks the workspace with clippy -clippy: - cargo +stable clippy --workspace --all-features --all-targets -- -D warnings - -# Fix clippy warnings across the workspace -clippy-fix: - cargo +stable clippy --workspace --all-features --all-targets --fix --allow-staged --allow-dirty -- -D warnings - -# Check the formatting of the workspace -fmt-check: - cargo +nightly fmt --all -- --check - -# Lint the Rust documentation -lint-docs: - RUSTDOCFLAGS="-D warnings" cargo doc --all --no-deps --document-private-items - -# Fixes the formatting of the workspace -fmtf: - cargo +nightly fmt --all - -# Build for the native target -build *args='': - cargo build --workspace $@ - -# Checks the workspace with a cfg-check -check: - cargo +nightly check -Zcheck-cfg --workspace - -# Runs `cargo hack check` against the workspace -hack: - cargo hack check --feature-powerset --no-dev-deps --exclude op-alloy --workspace - -# Updates the git submodule source -source: - git submodule update --remote - -# Generate file bindings for super-registry -bind: - @just --justfile ./crates/registry/Justfile bind - -# Check no_std compatibility -check-no-std: - rustup target add riscv32imac-unknown-none-elf - cargo check -p op-alloy -p op-alloy-consensus -p op-alloy-rpc-types -p op-alloy-rpc-types-engine --target riscv32imac-unknown-none-elf --no-default-features - -# List all available examples and run each one -examples: - example_list=$(cargo build --example 2>&1); \ - example_list=$(echo "$example_list" | tail -n +3 | sed 's/^[ \t]*//;s/[ \t]*$//'); \ - for example in $example_list; do \ - cargo run --example $example; \ - done diff --git a/op-alloy/README.md b/op-alloy/README.md deleted file mode 100644 index 7d9e75dcfcb..00000000000 --- a/op-alloy/README.md +++ /dev/null @@ -1,108 +0,0 @@ -# op-alloy - -<a href="https://github.com/alloy-rs/op-alloy/actions/workflows/ci.yml"><img src="https://github.com/alloy-rs/op-alloy/actions/workflows/ci.yml/badge.svg?label=ci" alt="CI"></a> -<a href="https://github.com/alloy-rs/op-alloy/blob/main/LICENSE-APACHE"><img src="https://img.shields.io/badge/License-APACHE-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://github.com/alloy-rs/op-alloy/blob/main/LICENSE-MIT"><img src="https://img.shields.io/badge/License-MIT-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://github.com/alloy-rs/op-alloy/blob/main/SNAPPY-LICENSE"><img src="https://img.shields.io/badge/License-SNAPPY-d1d1f6.svg?label=license&labelColor=2a2f35" alt="License"></a> -<a href="https://alloy-rs.github.io/op-alloy"><img src="https://img.shields.io/badge/Book-854a15?logo=mdBook&labelColor=2a2f35" alt="Book"></a> - -> [!IMPORTANT] -> **This repository is moving to [ethereum-optimism/optimism](https://github.com/ethereum-optimism/optimism).** -> -> The `alloy-rs/op-alloy` repository will be archived (deprecated). All future development will continue in the new location. Your GitHub contributions will be preserved. - -Built on [Alloy][alloy], op-alloy aggregates the OP stack's unique primitives from [Maili][maili], -to the subset of L1 types used by Optimistic rollups. - - -## Usage - -The following crates are provided by `op-alloy`: - -| Crate Name | Description / Purpose | Version | -|-------------|-----------------------------------------|---------| -| [op-alloy-consensus](https://crates.io/crates/op-alloy-consensus) | Handles consensus-related logic | [![version](https://img.shields.io/crates/v/op-alloy-consensus)](https://crates.io/crates/op-alloy-consensus) | -| [op-alloy-network](https://crates.io/crates/op-alloy-network) | Manages networking functionality | [![version](https://img.shields.io/crates/v/op-alloy-network)](https://crates.io/crates/op-alloy-network) | -| [op-alloy-rpc-jsonrpsee](https://crates.io/crates/op-alloy-rpc-jsonrpsee) | RPC implementation using `jsonrpsee` | [![version](https://img.shields.io/crates/v/op-alloy-rpc-jsonrpsee)](https://crates.io/crates/op-alloy-rpc-jsonrpsee) | -| [op-alloy-rpc-types-engine](https://crates.io/crates/op-alloy-rpc-types-engine) | Type definitions specific to RPC engine | [![version](https://img.shields.io/crates/v/op-alloy-rpc-types-engine)](https://crates.io/crates/op-alloy-rpc-types-engine) | -| [op-alloy-rpc-types](https://crates.io/crates/op-alloy-rpc-types) | Shared types used across RPC components | [![version](https://img.shields.io/crates/v/op-alloy-rpc-types)](https://crates.io/crates/op-alloy-rpc-types) | - - - -## Development Status - -`op-alloy` is currently in active development, and is not yet ready for use in production. - - -## Supported Rust Versions (MSRV) - -The current MSRV (minimum supported rust version) is 1.86. - -Unlike Alloy, op-alloy may use the latest stable release, -to benefit from the latest features. - -The MSRV is not increased automatically, and will be updated -only as part of a patch (pre-1.0) or minor (post-1.0) release. - - -## Contributing - -op-alloy is built by open source contributors like you, thank you for improving the project! - -A [contributing guide][contributing] is available that sets guidelines for contributing. - -Pull requests will not be merged unless CI passes, so please ensure that your contribution follows the -linting rules and passes clippy. - - -## `no_std` - -op-alloy is intended to be `no_std` compatible, initially for use in [kona][kona]. - -The following crates support `no_std`. -Notice, provider crates do not support `no_std` compatibility. - - -| Crate Name | Description / Purpose | Version | -|----------------------------------------------------------|-----------------------------------------|---------| -| [`op-alloy-consensus`] | Handles consensus-related logic | [![version](https://img.shields.io/crates/v/op-alloy-consensus)](https://crates.io/crates/op-alloy-consensus) | -| [`op-alloy-rpc-types`] | Shared types used across RPC components | [![version](https://img.shields.io/crates/v/op-alloy-rpc-types)](https://crates.io/crates/op-alloy-rpc-types) | -| [`op-alloy-rpc-types-engine`] | RPC types specific to the engine API | [![version](https://img.shields.io/crates/v/op-alloy-rpc-types-engine)](https://crates.io/crates/op-alloy-rpc-types-engine) | - - -If you would like to add no_std support to a crate, -please make sure to update [scripts/check_no_std.sh][check-no-std]. - - -## Credits - -op-alloy is inspired by the work of several teams and projects, most notably [the Alloy project][alloy]. - -This would not be possible without the hard work from open source contributors. Thank you. - - -## License - -Licensed under either of <a href="LICENSE-APACHE">Apache License, Version -2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option. - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in these crates by you, as defined in the Apache-2.0 license, -shall be dual licensed as above, without any additional terms or conditions. - - -<!-- Hyperlinks --> - -[check-no-std]: ./scripts/check_no_std.sh - -[maili]: https://github.com/op-rs/maili -[kona]: https://github.com/op-rs/kona -[alloy]: https://github.com/alloy-rs/alloy -[contributing]: https://alloy-rs.github.io/op-alloy - -[`op-alloy-consensus`]: https://crates.io/crates/op-alloy-consensus -[`op-alloy-network`]: https://crates.io/crates/op-alloy-network -[`op-alloy-rpc-jsonrpsee`]: https://crates.io/crates/op-alloy-rpc-jsonrpsee -[`op-alloy-rpc-types-engine`]: https://crates.io/crates/op-alloy-rpc-types-engine -[`op-alloy-rpc-types`]: https://crates.io/crates/op-alloy-rpc-types - diff --git a/op-alloy/book/.gitignore b/op-alloy/book/.gitignore deleted file mode 100644 index 7585238efed..00000000000 --- a/op-alloy/book/.gitignore +++ /dev/null @@ -1 +0,0 @@ -book diff --git a/op-alloy/book/README.md b/op-alloy/book/README.md deleted file mode 100644 index c1614e4ba44..00000000000 --- a/op-alloy/book/README.md +++ /dev/null @@ -1,15 +0,0 @@ -# `op-alloy-book` - -This repository contains the source code for the op-alloy book, which is available at [alloy-rs.github.io/op-alloy](https://alloy-rs.github.io/op-alloy/). - -## Contributing - -To build the book locally, a few dependencies are required: -```sh -cargo install mdbook mdbook-mermaid mdbook-template mdbook-alerts -``` - -Then, to run the book locally during development, run: -```sh -mdbook serve -``` diff --git a/op-alloy/book/book.toml b/op-alloy/book/book.toml deleted file mode 100644 index 51fecf7889c..00000000000 --- a/op-alloy/book/book.toml +++ /dev/null @@ -1,21 +0,0 @@ -[book] -authors = ["refcell"] -language = "en" -multilingual = false -src = "src" -title = "The op-alloy Book" - -[preprocessor.mermaid] -command = "mdbook-mermaid" - -[preprocessor.template] - -[preprocessor.alerts] - -[output.html] -default-theme = "ferra" -preferred-dark-theme = "ferra" -git-repository-url = "https://github.com/alloy-rs/op-alloy" -edit-url-template = "https://github.com/alloy-rs/op-alloy/edit/main/book/{path}" -additional-css = ["custom.css"] -additional-js = ["mermaid.min.js", "mermaid-init.js"] diff --git a/op-alloy/book/custom.css b/op-alloy/book/custom.css deleted file mode 100644 index 85ca91df895..00000000000 --- a/op-alloy/book/custom.css +++ /dev/null @@ -1,147 +0,0 @@ -table { - width: 100%; -} - -table thead th { - padding: .75rem; - text-align: left; - font-weight: 500; - line-height: 1.5; - width: auto; -} - -table td { - padding: .75rem; - border: none; -} - -table thead tr { - border: none; - border-bottom: 2px var(--table-border-color) solid; -} - -table tbody tr { - border-bottom: 1px var(--table-border-line) solid; -} - -table tbody tr:nth-child(2n) { - background: unset; -} - -.content h1, -.content h2, -.content h3, -.content h4 { - font-weight: 600; - margin-top: 1.275em; - margin-bottom: .875em; -} - -.ferra { - --bg: #2b292d; - --fg: #fecdb2; - --heading-fg: #fff; - - --sidebar-bg: #383539; - --sidebar-fg: #fecdb2; - --sidebar-non-existent: #feceb454; - --sidebar-active: #ffa07a; - --scrollbar: var(--sidebar-fg); - - --icons: #f6b6c9ba; - --icons-hover: #b7b9cc; - - --links: #ffa07a; - - --inline-code-color: #f6b6c9ba; - - --theme-popup-bg: #383539; - --theme-popup-border: #5f5a60; - --theme-hover: rgba(0, 0, 0, .2); - - --quote-bg: #222124; - --quote-border: #2b292d; - - --table-border-color: #383539; - --table-header-bg: hsla(226, 23%, 31%, 0); - --table-alternate-bg: hsl(226, 23%, 14%); - --table-border-line: #383539; - - --searchbar-border-color: #222124; - --searchbar-bg: #222124; - --searchbar-fg: #fecdb2; - --searchbar-shadow-color: #aaa; - --searchresults-header-fg: #fce2d4; - --searchresults-border-color: #feceb454; - --search-mark-bg: #f6b6c9ba; -} - -.ferra .content .header { - color: #fce2d4; -} - -/* highlight.js theme, :where() is used to avoid increasing specificity */ - -:where(.ferra) .hljs { - background: #222124; - color: #feceb4e1; -} - -:where(.ferra) .hljs-comment, -:where(.ferra) .hljs-quote { - color: #6F5D63; -} - -:where(.ferra) .hljs-link, -:where(.ferra) .hljs-meta, -:where(.ferra) .hljs-name, -:where(.ferra) .hljs-regexp, -:where(.ferra) .hljs-selector-class, -:where(.ferra) .hljs-selector-id, -:where(.ferra) .hljs-tag, -:where(.ferra) .hljs-template-variable, -:where(.ferra) .hljs-variable { - color: #fecdb2; -} - -:where(.ferra) .mdbook-callouts-info { - background-color: #f0f0f0; - border-left: 4px solid #3498db; - padding: 1em; - margin: 1em 0; -} - -:where(.ferra) .hljs-built_in, -:where(.ferra) .hljs-deletion, -:where(.ferra) .hljs-literal, -:where(.ferra) .hljs-number, -:where(.ferra) .hljs-params, -:where(.ferra) .hljs-type { - color: #f6b6c9; -} - -:where(.ferra) .hljs-attribute, -:where(.ferra) .hljs-section, -:where(.ferra) .hljs-title { - color: #ffa07a; -} - -:where(.ferra) .hljs-addition, -:where(.ferra) .hljs-bullet, -:where(.ferra) .hljs-string, -:where(.ferra) .hljs-symbol { - color: #b1b695; -} - -:where(.ferra) .hljs-keyword, -:where(.ferra) .hljs-selector-tag { - color: #d1d1e0; -} - -:where(.ferra) .hljs-emphasis { - font-style: italic; -} - -:where(.ferra) .hljs-strong { - font-weight: 700; -} diff --git a/op-alloy/book/mermaid-init.js b/op-alloy/book/mermaid-init.js deleted file mode 100644 index 32b53280a12..00000000000 --- a/op-alloy/book/mermaid-init.js +++ /dev/null @@ -1 +0,0 @@ -mermaid.initialize({ startOnLoad: true, theme: 'dark' }); diff --git a/op-alloy/book/mermaid.min.js b/op-alloy/book/mermaid.min.js deleted file mode 100644 index 8c7ea4e4e4e..00000000000 --- a/op-alloy/book/mermaid.min.js +++ /dev/null @@ -1,1282 +0,0 @@ -/* MIT Licensed. Copyright (c) 2014 - 2022 Knut Sveidqvist */ -/* For license information please see https://github.com/mermaid-js/mermaid/blob/v9.2.2/LICENSE */ -(function(jr,wn){typeof exports=="object"&&typeof module<"u"?module.exports=wn():typeof define=="function"&&define.amd?define(wn):(jr=typeof globalThis<"u"?globalThis:jr||self,jr.mermaid=wn())})(this,function(){"use strict";var Ost=Object.defineProperty;var Fst=(jr,wn,fn)=>wn in jr?Ost(jr,wn,{enumerable:!0,configurable:!0,writable:!0,value:fn}):jr[wn]=fn;var vl=(jr,wn,fn)=>(Fst(jr,typeof wn!="symbol"?wn+"":wn,fn),fn);var jr=typeof globalThis<"u"?globalThis:typeof window<"u"?window:typeof global<"u"?global:typeof self<"u"?self:{};function wn(t){var e=t.default;if(typeof e=="function"){var r=function(){return e.apply(this,arguments)};r.prototype=e.prototype}else r={};return Object.defineProperty(r,"__esModule",{value:!0}),Object.keys(t).forEach(function(n){var i=Object.getOwnPropertyDescriptor(t,n);Object.defineProperty(r,n,i.get?i:{enumerable:!0,get:function(){return t[n]}})}),r}function fn(t){throw new Error('Could not dynamically require "'+t+'". Please configure the dynamicRequireTargets or/and ignoreDynamicRequires option of @rollup/plugin-commonjs appropriately for this require call to work.')}var y_={exports:{}};(function(t,e){(function(r,n){t.exports=n()})(jr,function(){var r;function n(){return r.apply(null,arguments)}function i(g){return g instanceof Array||Object.prototype.toString.call(g)==="[object Array]"}function a(g){return g!=null&&Object.prototype.toString.call(g)==="[object Object]"}function s(g,E){return Object.prototype.hasOwnProperty.call(g,E)}function o(g){if(Object.getOwnPropertyNames)return Object.getOwnPropertyNames(g).length===0;for(var E in g)if(s(g,E))return;return 1}function l(g){return g===void 0}function u(g){return typeof g=="number"||Object.prototype.toString.call(g)==="[object Number]"}function h(g){return g instanceof Date||Object.prototype.toString.call(g)==="[object Date]"}function d(g,E){for(var I=[],O=g.length,G=0;G<O;++G)I.push(E(g[G],G));return I}function f(g,E){for(var I in E)s(E,I)&&(g[I]=E[I]);return s(E,"toString")&&(g.toString=E.toString),s(E,"valueOf")&&(g.valueOf=E.valueOf),g}function p(g,E,I,O){return Dr(g,E,I,O,!0).utc()}function m(g){return g._pf==null&&(g._pf={empty:!1,unusedTokens:[],unusedInput:[],overflow:-2,charsLeftOver:0,nullInput:!1,invalidEra:null,invalidMonth:null,invalidFormat:!1,userInvalidated:!1,iso:!1,parsedDateParts:[],era:null,meridiem:null,rfc2822:!1,weekdayMismatch:!1}),g._pf}function _(g){if(g._isValid==null){var E=m(g),I=b.call(E.parsedDateParts,function(O){return O!=null}),I=!isNaN(g._d.getTime())&&E.overflow<0&&!E.empty&&!E.invalidEra&&!E.invalidMonth&&!E.invalidWeekday&&!E.weekdayMismatch&&!E.nullInput&&!E.invalidFormat&&!E.userInvalidated&&(!E.meridiem||E.meridiem&&I);if(g._strict&&(I=I&&E.charsLeftOver===0&&E.unusedTokens.length===0&&E.bigHour===void 0),Object.isFrozen!=null&&Object.isFrozen(g))return I;g._isValid=I}return g._isValid}function y(g){var E=p(NaN);return g!=null?f(m(E),g):m(E).userInvalidated=!0,E}var b=Array.prototype.some||function(g){for(var E=Object(this),I=E.length>>>0,O=0;O<I;O++)if(O in E&&g.call(this,E[O],O,E))return!0;return!1},x=n.momentProperties=[],k=!1;function T(g,E){var I,O,G,ht=x.length;if(l(E._isAMomentObject)||(g._isAMomentObject=E._isAMomentObject),l(E._i)||(g._i=E._i),l(E._f)||(g._f=E._f),l(E._l)||(g._l=E._l),l(E._strict)||(g._strict=E._strict),l(E._tzm)||(g._tzm=E._tzm),l(E._isUTC)||(g._isUTC=E._isUTC),l(E._offset)||(g._offset=E._offset),l(E._pf)||(g._pf=m(E)),l(E._locale)||(g._locale=E._locale),0<ht)for(I=0;I<ht;I++)l(G=E[O=x[I]])||(g[O]=G);return g}function C(g){T(this,g),this._d=new Date(g._d!=null?g._d.getTime():NaN),this.isValid()||(this._d=new Date(NaN)),k===!1&&(k=!0,n.updateOffset(this),k=!1)}function M(g){return g instanceof C||g!=null&&g._isAMomentObject!=null}function S(g){n.suppressDeprecationWarnings===!1&&typeof console<"u"&&console.warn&&console.warn("Deprecation warning: "+g)}function R(g,E){var I=!0;return f(function(){if(n.deprecationHandler!=null&&n.deprecationHandler(null,g),I){for(var O,G,ht=[],xt=arguments.length,Mt=0;Mt<xt;Mt++){if(O="",typeof arguments[Mt]=="object"){for(G in O+=` -[`+Mt+"] ",arguments[0])s(arguments[0],G)&&(O+=G+": "+arguments[0][G]+", ");O=O.slice(0,-2)}else O=arguments[Mt];ht.push(O)}S(g+` -Arguments: `+Array.prototype.slice.call(ht).join("")+` -`+new Error().stack),I=!1}return E.apply(this,arguments)},E)}var A={};function L(g,E){n.deprecationHandler!=null&&n.deprecationHandler(g,E),A[g]||(S(E),A[g]=!0)}function v(g){return typeof Function<"u"&&g instanceof Function||Object.prototype.toString.call(g)==="[object Function]"}function B(g,E){var I,O=f({},g);for(I in E)s(E,I)&&(a(g[I])&&a(E[I])?(O[I]={},f(O[I],g[I]),f(O[I],E[I])):E[I]!=null?O[I]=E[I]:delete O[I]);for(I in g)s(g,I)&&!s(E,I)&&a(g[I])&&(O[I]=f({},O[I]));return O}function w(g){g!=null&&this.set(g)}n.suppressDeprecationWarnings=!1,n.deprecationHandler=null;var D=Object.keys||function(g){var E,I=[];for(E in g)s(g,E)&&I.push(E);return I};function N(g,E,I){var O=""+Math.abs(g);return(0<=g?I?"+":"":"-")+Math.pow(10,Math.max(0,E-O.length)).toString().substr(1)+O}var z=/(\[[^\[]*\])|(\\)?([Hh]mm(ss)?|Mo|MM?M?M?|Do|DDDo|DD?D?D?|ddd?d?|do?|w[o|w]?|W[o|W]?|Qo?|N{1,5}|YYYYYY|YYYYY|YYYY|YY|y{2,4}|yo?|gg(ggg?)?|GG(GGG?)?|e|E|a|A|hh?|HH?|kk?|mm?|ss?|S{1,9}|x|X|zz?|ZZ?|.)/g,X=/(\[[^\[]*\])|(\\)?(LTS|LT|LL?L?L?|l{1,4})/g,ct={},J={};function Y(g,E,I,O){var G=typeof O=="string"?function(){return this[O]()}:O;g&&(J[g]=G),E&&(J[E[0]]=function(){return N(G.apply(this,arguments),E[1],E[2])}),I&&(J[I]=function(){return this.localeData().ordinal(G.apply(this,arguments),g)})}function $(g,E){return g.isValid()?(E=lt(E,g.localeData()),ct[E]=ct[E]||function(I){for(var O,G=I.match(z),ht=0,xt=G.length;ht<xt;ht++)J[G[ht]]?G[ht]=J[G[ht]]:G[ht]=(O=G[ht]).match(/\[[\s\S]/)?O.replace(/^\[|\]$/g,""):O.replace(/\\/g,"");return function(Mt){for(var Vt="",Ot=0;Ot<xt;Ot++)Vt+=v(G[Ot])?G[Ot].call(Mt,I):G[Ot];return Vt}}(E),ct[E](g)):g.localeData().invalidDate()}function lt(g,E){var I=5;function O(G){return E.longDateFormat(G)||G}for(X.lastIndex=0;0<=I&&X.test(g);)g=g.replace(X,O),X.lastIndex=0,--I;return g}var ut={};function W(g,E){var I=g.toLowerCase();ut[I]=ut[I+"s"]=ut[E]=g}function tt(g){return typeof g=="string"?ut[g]||ut[g.toLowerCase()]:void 0}function K(g){var E,I,O={};for(I in g)s(g,I)&&(E=tt(I))&&(O[E]=g[I]);return O}var it={};function Z(g,E){it[g]=E}function V(g){return g%4==0&&g%100!=0||g%400==0}function Q(g){return g<0?Math.ceil(g)||0:Math.floor(g)}function q(E){var E=+E,I=0;return I=E!=0&&isFinite(E)?Q(E):I}function U(g,E){return function(I){return I!=null?(j(this,g,I),n.updateOffset(this,E),this):F(this,g)}}function F(g,E){return g.isValid()?g._d["get"+(g._isUTC?"UTC":"")+E]():NaN}function j(g,E,I){g.isValid()&&!isNaN(I)&&(E==="FullYear"&&V(g.year())&&g.month()===1&&g.date()===29?(I=q(I),g._d["set"+(g._isUTC?"UTC":"")+E](I,g.month(),yt(I,g.month()))):g._d["set"+(g._isUTC?"UTC":"")+E](I))}var P=/\d/,fe=/\d\d/,et=/\d{3}/,to=/\d{4}/,os=/[+-]?\d{6}/,at=/\d\d?/,It=/\d\d\d\d?/,Lt=/\d\d\d\d\d\d?/,Rt=/\d{1,3}/,ls=/\d{1,4}/,ss=/[+-]?\d{1,6}/,Ct=/\d+/,pt=/[+-]?\d+/,mt=/Z|[+-]\d\d:?\d\d/gi,vt=/Z|[+-]\d\d(?::?\d\d)?/gi,Tt=/[0-9]{0,256}['a-z\u00A0-\u05FF\u0700-\uD7FF\uF900-\uFDCF\uFDF0-\uFF07\uFF10-\uFFEF]{1,256}|[\u0600-\u06FF\/]{1,256}(\s*?[\u0600-\u06FF]{1,256}){1,2}/i;function ft(g,E,I){Gt[g]=v(E)?E:function(O,G){return O&&I?I:E}}function le(g,E){return s(Gt,g)?Gt[g](E._strict,E._locale):new RegExp(Dt(g.replace("\\","").replace(/\\(\[)|\\(\])|\[([^\]\[]*)\]|\\(.)/g,function(I,O,G,ht,xt){return O||G||ht||xt})))}function Dt(g){return g.replace(/[-\/\\^$*+?.()|[\]{}]/g,"\\$&")}var Gt={},$t={};function Qt(g,E){var I,O,G=E;for(typeof g=="string"&&(g=[g]),u(E)&&(G=function(ht,xt){xt[E]=q(ht)}),O=g.length,I=0;I<O;I++)$t[g[I]]=G}function we(g,E){Qt(g,function(I,O,G,ht){G._w=G._w||{},E(I,G._w,G,ht)})}var jt,Ft=0,zt=1,wt=2,bt=3,Et=4,kt=5,Ut=6,gt=7,he=8;function yt(g,E){if(isNaN(g)||isNaN(E))return NaN;var I=(E%(I=12)+I)%I;return g+=(E-I)/12,I==1?V(g)?29:28:31-I%7%2}jt=Array.prototype.indexOf||function(g){for(var E=0;E<this.length;++E)if(this[E]===g)return E;return-1},Y("M",["MM",2],"Mo",function(){return this.month()+1}),Y("MMM",0,0,function(g){return this.localeData().monthsShort(this,g)}),Y("MMMM",0,0,function(g){return this.localeData().months(this,g)}),W("month","M"),Z("month",8),ft("M",at),ft("MM",at,fe),ft("MMM",function(g,E){return E.monthsShortRegex(g)}),ft("MMMM",function(g,E){return E.monthsRegex(g)}),Qt(["M","MM"],function(g,E){E[zt]=q(g)-1}),Qt(["MMM","MMMM"],function(g,E,I,O){O=I._locale.monthsParse(g,O,I._strict),O!=null?E[zt]=O:m(I).invalidMonth=g});var ne="January_February_March_April_May_June_July_August_September_October_November_December".split("_"),ve="Jan_Feb_Mar_Apr_May_Jun_Jul_Aug_Sep_Oct_Nov_Dec".split("_"),ye=/D[oD]?(\[[^\[\]]*\]|\s)+MMMM?/,be=Tt,Te=Tt;function Wt(g,E){var I;if(g.isValid()){if(typeof E=="string"){if(/^\d+$/.test(E))E=q(E);else if(!u(E=g.localeData().monthsParse(E)))return}I=Math.min(g.date(),yt(g.year(),E)),g._d["set"+(g._isUTC?"UTC":"")+"Month"](E,I)}}function se(g){return g!=null?(Wt(this,g),n.updateOffset(this,!0),this):F(this,"Month")}function me(){function g(xt,Mt){return Mt.length-xt.length}for(var E,I=[],O=[],G=[],ht=0;ht<12;ht++)E=p([2e3,ht]),I.push(this.monthsShort(E,"")),O.push(this.months(E,"")),G.push(this.months(E,"")),G.push(this.monthsShort(E,""));for(I.sort(g),O.sort(g),G.sort(g),ht=0;ht<12;ht++)I[ht]=Dt(I[ht]),O[ht]=Dt(O[ht]);for(ht=0;ht<24;ht++)G[ht]=Dt(G[ht]);this._monthsRegex=new RegExp("^("+G.join("|")+")","i"),this._monthsShortRegex=this._monthsRegex,this._monthsStrictRegex=new RegExp("^("+O.join("|")+")","i"),this._monthsShortStrictRegex=new RegExp("^("+I.join("|")+")","i")}function ue(g){return V(g)?366:365}Y("Y",0,0,function(){var g=this.year();return g<=9999?N(g,4):"+"+g}),Y(0,["YY",2],0,function(){return this.year()%100}),Y(0,["YYYY",4],0,"year"),Y(0,["YYYYY",5],0,"year"),Y(0,["YYYYYY",6,!0],0,"year"),W("year","y"),Z("year",1),ft("Y",pt),ft("YY",at,fe),ft("YYYY",ls,to),ft("YYYYY",ss,os),ft("YYYYYY",ss,os),Qt(["YYYYY","YYYYYY"],Ft),Qt("YYYY",function(g,E){E[Ft]=g.length===2?n.parseTwoDigitYear(g):q(g)}),Qt("YY",function(g,E){E[Ft]=n.parseTwoDigitYear(g)}),Qt("Y",function(g,E){E[Ft]=parseInt(g,10)}),n.parseTwoDigitYear=function(g){return q(g)+(68<q(g)?1900:2e3)};var N0=U("FullYear",!0);function _a(g,E,I,O,G,ht,xt){var Mt;return g<100&&0<=g?(Mt=new Date(g+400,E,I,O,G,ht,xt),isFinite(Mt.getFullYear())&&Mt.setFullYear(g)):Mt=new Date(g,E,I,O,G,ht,xt),Mt}function Hr(g){var E;return g<100&&0<=g?((E=Array.prototype.slice.call(arguments))[0]=g+400,E=new Date(Date.UTC.apply(null,E)),isFinite(E.getUTCFullYear())&&E.setUTCFullYear(g)):E=new Date(Date.UTC.apply(null,arguments)),E}function Ie(g,E,I){return I=7+E-I,I-(7+Hr(g,0,I).getUTCDay()-E)%7-1}function oe(g,xt,Mt,O,G){var ht,xt=1+7*(xt-1)+(7+Mt-O)%7+Ie(g,O,G),Mt=xt<=0?ue(ht=g-1)+xt:xt>ue(g)?(ht=g+1,xt-ue(g)):(ht=g,xt);return{year:ht,dayOfYear:Mt}}function Ke(g,E,I){var O,G,ht=Ie(g.year(),E,I),ht=Math.floor((g.dayOfYear()-ht-1)/7)+1;return ht<1?O=ht+wr(G=g.year()-1,E,I):ht>wr(g.year(),E,I)?(O=ht-wr(g.year(),E,I),G=g.year()+1):(G=g.year(),O=ht),{week:O,year:G}}function wr(g,G,I){var O=Ie(g,G,I),G=Ie(g+1,G,I);return(ue(g)-O+G)/7}Y("w",["ww",2],"wo","week"),Y("W",["WW",2],"Wo","isoWeek"),W("week","w"),W("isoWeek","W"),Z("week",5),Z("isoWeek",5),ft("w",at),ft("ww",at,fe),ft("W",at),ft("WW",at,fe),we(["w","ww","W","WW"],function(g,E,I,O){E[O.substr(0,1)]=q(g)});function Ge(g,E){return g.slice(E,7).concat(g.slice(0,E))}Y("d",0,"do","day"),Y("dd",0,0,function(g){return this.localeData().weekdaysMin(this,g)}),Y("ddd",0,0,function(g){return this.localeData().weekdaysShort(this,g)}),Y("dddd",0,0,function(g){return this.localeData().weekdays(this,g)}),Y("e",0,0,"weekday"),Y("E",0,0,"isoWeekday"),W("day","d"),W("weekday","e"),W("isoWeekday","E"),Z("day",11),Z("weekday",11),Z("isoWeekday",11),ft("d",at),ft("e",at),ft("E",at),ft("dd",function(g,E){return E.weekdaysMinRegex(g)}),ft("ddd",function(g,E){return E.weekdaysShortRegex(g)}),ft("dddd",function(g,E){return E.weekdaysRegex(g)}),we(["dd","ddd","dddd"],function(g,E,I,O){O=I._locale.weekdaysParse(g,O,I._strict),O!=null?E.d=O:m(I).invalidWeekday=g}),we(["d","e","E"],function(g,E,I,O){E[O]=q(g)});var Ze="Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),qt="Sun_Mon_Tue_Wed_Thu_Fri_Sat".split("_"),st="Su_Mo_Tu_We_Th_Fr_Sa".split("_"),At=Tt,Nt=Tt,Jt=Tt;function ze(){function g(Ot,de){return de.length-Ot.length}for(var E,I,O,G=[],ht=[],xt=[],Mt=[],Vt=0;Vt<7;Vt++)O=p([2e3,1]).day(Vt),E=Dt(this.weekdaysMin(O,"")),I=Dt(this.weekdaysShort(O,"")),O=Dt(this.weekdays(O,"")),G.push(E),ht.push(I),xt.push(O),Mt.push(E),Mt.push(I),Mt.push(O);G.sort(g),ht.sort(g),xt.sort(g),Mt.sort(g),this._weekdaysRegex=new RegExp("^("+Mt.join("|")+")","i"),this._weekdaysShortRegex=this._weekdaysRegex,this._weekdaysMinRegex=this._weekdaysRegex,this._weekdaysStrictRegex=new RegExp("^("+xt.join("|")+")","i"),this._weekdaysShortStrictRegex=new RegExp("^("+ht.join("|")+")","i"),this._weekdaysMinStrictRegex=new RegExp("^("+G.join("|")+")","i")}function Pe(){return this.hours()%12||12}function qe(g,E){Y(g,0,0,function(){return this.localeData().meridiem(this.hours(),this.minutes(),E)})}function Tr(g,E){return E._meridiemParse}Y("H",["HH",2],0,"hour"),Y("h",["hh",2],0,Pe),Y("k",["kk",2],0,function(){return this.hours()||24}),Y("hmm",0,0,function(){return""+Pe.apply(this)+N(this.minutes(),2)}),Y("hmmss",0,0,function(){return""+Pe.apply(this)+N(this.minutes(),2)+N(this.seconds(),2)}),Y("Hmm",0,0,function(){return""+this.hours()+N(this.minutes(),2)}),Y("Hmmss",0,0,function(){return""+this.hours()+N(this.minutes(),2)+N(this.seconds(),2)}),qe("a",!0),qe("A",!1),W("hour","h"),Z("hour",13),ft("a",Tr),ft("A",Tr),ft("H",at),ft("h",at),ft("k",at),ft("HH",at,fe),ft("hh",at,fe),ft("kk",at,fe),ft("hmm",It),ft("hmmss",Lt),ft("Hmm",It),ft("Hmmss",Lt),Qt(["H","HH"],bt),Qt(["k","kk"],function(g,E,I){g=q(g),E[bt]=g===24?0:g}),Qt(["a","A"],function(g,E,I){I._isPm=I._locale.isPM(g),I._meridiem=g}),Qt(["h","hh"],function(g,E,I){E[bt]=q(g),m(I).bigHour=!0}),Qt("hmm",function(g,E,I){var O=g.length-2;E[bt]=q(g.substr(0,O)),E[Et]=q(g.substr(O)),m(I).bigHour=!0}),Qt("hmmss",function(g,E,I){var O=g.length-4,G=g.length-2;E[bt]=q(g.substr(0,O)),E[Et]=q(g.substr(O,2)),E[kt]=q(g.substr(G)),m(I).bigHour=!0}),Qt("Hmm",function(g,E,I){var O=g.length-2;E[bt]=q(g.substr(0,O)),E[Et]=q(g.substr(O))}),Qt("Hmmss",function(g,E,I){var O=g.length-4,G=g.length-2;E[bt]=q(g.substr(0,O)),E[Et]=q(g.substr(O,2)),E[kt]=q(g.substr(G))}),Tt=U("Hours",!0);var Ve,va={calendar:{sameDay:"[Today at] LT",nextDay:"[Tomorrow at] LT",nextWeek:"dddd [at] LT",lastDay:"[Yesterday at] LT",lastWeek:"[Last] dddd [at] LT",sameElse:"L"},longDateFormat:{LTS:"h:mm:ss A",LT:"h:mm A",L:"MM/DD/YYYY",LL:"MMMM D, YYYY",LLL:"MMMM D, YYYY h:mm A",LLLL:"dddd, MMMM D, YYYY h:mm A"},invalidDate:"Invalid date",ordinal:"%d",dayOfMonthOrdinalParse:/\d{1,2}/,relativeTime:{future:"in %s",past:"%s ago",s:"a few seconds",ss:"%d seconds",m:"a minute",mm:"%d minutes",h:"an hour",hh:"%d hours",d:"a day",dd:"%d days",w:"a week",ww:"%d weeks",M:"a month",MM:"%d months",y:"a year",yy:"%d years"},months:ne,monthsShort:ve,week:{dow:0,doy:6},weekdays:Ze,weekdaysMin:st,weekdaysShort:qt,meridiemParse:/[ap]\.?m?\.?/i},Ce={},Wi={};function E0(g){return g&&g.toLowerCase().replace("_","-")}function _u(g){for(var E,I,O,G,ht=0;ht<g.length;){for(E=(G=E0(g[ht]).split("-")).length,I=(I=E0(g[ht+1]))?I.split("-"):null;0<E;){if(O=Ln(G.slice(0,E).join("-")))return O;if(I&&I.length>=E&&function(xt,Mt){for(var Vt=Math.min(xt.length,Mt.length),Ot=0;Ot<Vt;Ot+=1)if(xt[Ot]!==Mt[Ot])return Ot;return Vt}(G,I)>=E-1)break;E--}ht++}return Ve}function Ln(g){var E;if(Ce[g]===void 0&&!0&&t&&t.exports&&g.match("^[^/\\\\]*$")!=null)try{E=Ve._abbr,fn("./locale/"+g),Xt(E)}catch{Ce[g]=null}return Ce[g]}function Xt(g,E){return g&&((E=l(E)?ce(g):ee(g,E))?Ve=E:typeof console<"u"&&console.warn&&console.warn("Locale "+g+" not found. Did you forget to load it?")),Ve._abbr}function ee(g,E){if(E===null)return delete Ce[g],null;var I,O=va;if(E.abbr=g,Ce[g]!=null)L("defineLocaleOverride","use moment.updateLocale(localeName, config) to change an existing locale. moment.defineLocale(localeName, config) should only be used for creating a new locale See http://momentjs.com/guides/#/warnings/define-locale/ for more info."),O=Ce[g]._config;else if(E.parentLocale!=null)if(Ce[E.parentLocale]!=null)O=Ce[E.parentLocale]._config;else{if((I=Ln(E.parentLocale))==null)return Wi[E.parentLocale]||(Wi[E.parentLocale]=[]),Wi[E.parentLocale].push({name:g,config:E}),null;O=I._config}return Ce[g]=new w(B(O,E)),Wi[g]&&Wi[g].forEach(function(G){ee(G.name,G.config)}),Xt(g),Ce[g]}function ce(g){var E;if(!(g=g&&g._locale&&g._locale._abbr?g._locale._abbr:g))return Ve;if(!i(g)){if(E=Ln(g))return E;g=[g]}return _u(g)}function Pt(g){var E=g._a;return E&&m(g).overflow===-2&&(E=E[zt]<0||11<E[zt]?zt:E[wt]<1||E[wt]>yt(E[Ft],E[zt])?wt:E[bt]<0||24<E[bt]||E[bt]===24&&(E[Et]!==0||E[kt]!==0||E[Ut]!==0)?bt:E[Et]<0||59<E[Et]?Et:E[kt]<0||59<E[kt]?kt:E[Ut]<0||999<E[Ut]?Ut:-1,m(g)._overflowDayOfYear&&(E<Ft||wt<E)&&(E=wt),m(g)._overflowWeeks&&E===-1&&(E=gt),m(g)._overflowWeekday&&E===-1&&(E=he),m(g).overflow=E),g}var je=/^\s*((?:[+-]\d{6}|\d{4})-(?:\d\d-\d\d|W\d\d-\d|W\d\d|\d\d\d|\d\d))(?:(T| )(\d\d(?::\d\d(?::\d\d(?:[.,]\d+)?)?)?)([+-]\d\d(?::?\d\d)?|\s*Z)?)?$/,rt=/^\s*((?:[+-]\d{6}|\d{4})(?:\d\d\d\d|W\d\d\d|W\d\d|\d\d\d|\d\d|))(?:(T| )(\d\d(?:\d\d(?:\d\d(?:[.,]\d+)?)?)?)([+-]\d\d(?::?\d\d)?|\s*Z)?)?$/,Ks=/Z|[+-]\d\d(?::?\d\d)?/,ot=[["YYYYYY-MM-DD",/[+-]\d{6}-\d\d-\d\d/],["YYYY-MM-DD",/\d{4}-\d\d-\d\d/],["GGGG-[W]WW-E",/\d{4}-W\d\d-\d/],["GGGG-[W]WW",/\d{4}-W\d\d/,!1],["YYYY-DDD",/\d{4}-\d{3}/],["YYYY-MM",/\d{4}-\d\d/,!1],["YYYYYYMMDD",/[+-]\d{10}/],["YYYYMMDD",/\d{8}/],["GGGG[W]WWE",/\d{4}W\d{3}/],["GGGG[W]WW",/\d{4}W\d{2}/,!1],["YYYYDDD",/\d{7}/],["YYYYMM",/\d{6}/,!1],["YYYY",/\d{4}/,!1]],Gr=[["HH:mm:ss.SSSS",/\d\d:\d\d:\d\d\.\d+/],["HH:mm:ss,SSSS",/\d\d:\d\d:\d\d,\d+/],["HH:mm:ss",/\d\d:\d\d:\d\d/],["HH:mm",/\d\d:\d\d/],["HHmmss.SSSS",/\d\d\d\d\d\d\.\d+/],["HHmmss,SSSS",/\d\d\d\d\d\d,\d+/],["HHmmss",/\d\d\d\d\d\d/],["HHmm",/\d\d\d\d/],["HH",/\d\d/]],C0=/^\/?Date\((-?\d+)/i,l_=/^(?:(Mon|Tue|Wed|Thu|Fri|Sat|Sun),?\s)?(\d{1,2})\s(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s(\d{2,4})\s(\d\d):(\d\d)(?::(\d\d))?\s(?:(UT|GMT|[ECMP][SD]T)|([Zz])|([+-]\d{4}))$/,S0={UT:0,GMT:0,EDT:-240,EST:-300,CDT:-300,CST:-360,MDT:-360,MST:-420,PDT:-420,PST:-480};function A0(g){var E,I,O,G,ht,xt,Vt=g._i,Mt=je.exec(Vt)||rt.exec(Vt),Vt=ot.length,Ot=Gr.length;if(Mt){for(m(g).iso=!0,E=0,I=Vt;E<I;E++)if(ot[E][1].exec(Mt[1])){G=ot[E][0],O=ot[E][2]!==!1;break}if(G==null)g._isValid=!1;else{if(Mt[3]){for(E=0,I=Ot;E<I;E++)if(Gr[E][1].exec(Mt[3])){ht=(Mt[2]||" ")+Gr[E][0];break}if(ht==null)return void(g._isValid=!1)}if(O||ht==null){if(Mt[4]){if(!Ks.exec(Mt[4]))return void(g._isValid=!1);xt="Z"}g._f=G+(ht||"")+(xt||""),vu(g)}else g._isValid=!1}}else g._isValid=!1}function mr(g,E,I,O,G,ht){return g=[function(xt){xt=parseInt(xt,10);{if(xt<=49)return 2e3+xt;if(xt<=999)return 1900+xt}return xt}(g),ve.indexOf(E),parseInt(I,10),parseInt(O,10),parseInt(G,10)],ht&&g.push(parseInt(ht,10)),g}function Hi(g){var E,I,O,G,ht=l_.exec(g._i.replace(/\([^()]*\)|[\n\t]/g," ").replace(/(\s\s+)/g," ").replace(/^\s\s*/,"").replace(/\s\s*$/,""));ht?(E=mr(ht[4],ht[3],ht[2],ht[5],ht[6],ht[7]),I=ht[1],O=E,G=g,I&&qt.indexOf(I)!==new Date(O[0],O[1],O[2]).getDay()?(m(G).weekdayMismatch=!0,G._isValid=!1):(g._a=E,g._tzm=(I=ht[8],O=ht[9],G=ht[10],I?S0[I]:O?0:60*(((I=parseInt(G,10))-(O=I%100))/100)+O),g._d=Hr.apply(null,g._a),g._d.setUTCMinutes(g._d.getUTCMinutes()-g._tzm),m(g).rfc2822=!0)):g._isValid=!1}function Gi(g,E,I){return g!=null?g:E!=null?E:I}function Zs(g){var E,I,O,G,ht,xt,Mt,Vt,Ot,de,ie,er=[];if(!g._d){for(O=g,G=new Date(n.now()),I=O._useUTC?[G.getUTCFullYear(),G.getUTCMonth(),G.getUTCDate()]:[G.getFullYear(),G.getMonth(),G.getDate()],g._w&&g._a[wt]==null&&g._a[zt]==null&&((G=(O=g)._w).GG!=null||G.W!=null||G.E!=null?(Vt=1,Ot=4,ht=Gi(G.GG,O._a[Ft],Ke(De(),1,4).year),xt=Gi(G.W,1),((Mt=Gi(G.E,1))<1||7<Mt)&&(de=!0)):(Vt=O._locale._week.dow,Ot=O._locale._week.doy,ie=Ke(De(),Vt,Ot),ht=Gi(G.gg,O._a[Ft],ie.year),xt=Gi(G.w,ie.week),G.d!=null?((Mt=G.d)<0||6<Mt)&&(de=!0):G.e!=null?(Mt=G.e+Vt,(G.e<0||6<G.e)&&(de=!0)):Mt=Vt),xt<1||xt>wr(ht,Vt,Ot)?m(O)._overflowWeeks=!0:de!=null?m(O)._overflowWeekday=!0:(ie=oe(ht,xt,Mt,Vt,Ot),O._a[Ft]=ie.year,O._dayOfYear=ie.dayOfYear)),g._dayOfYear!=null&&(G=Gi(g._a[Ft],I[Ft]),(g._dayOfYear>ue(G)||g._dayOfYear===0)&&(m(g)._overflowDayOfYear=!0),de=Hr(G,0,g._dayOfYear),g._a[zt]=de.getUTCMonth(),g._a[wt]=de.getUTCDate()),E=0;E<3&&g._a[E]==null;++E)g._a[E]=er[E]=I[E];for(;E<7;E++)g._a[E]=er[E]=g._a[E]==null?E===2?1:0:g._a[E];g._a[bt]===24&&g._a[Et]===0&&g._a[kt]===0&&g._a[Ut]===0&&(g._nextDay=!0,g._a[bt]=0),g._d=(g._useUTC?Hr:_a).apply(null,er),ht=g._useUTC?g._d.getUTCDay():g._d.getDay(),g._tzm!=null&&g._d.setUTCMinutes(g._d.getUTCMinutes()-g._tzm),g._nextDay&&(g._a[bt]=24),g._w&&g._w.d!==void 0&&g._w.d!==ht&&(m(g).weekdayMismatch=!0)}}function vu(g){if(g._f===n.ISO_8601)A0(g);else if(g._f===n.RFC_2822)Hi(g);else{g._a=[],m(g).empty=!0;for(var E,I,O,G,ht,xt=""+g._i,Mt=xt.length,Vt=0,Ot=lt(g._f,g._locale).match(z)||[],de=Ot.length,ie=0;ie<de;ie++)I=Ot[ie],(E=(xt.match(le(I,g))||[])[0])&&(0<(O=xt.substr(0,xt.indexOf(E))).length&&m(g).unusedInput.push(O),xt=xt.slice(xt.indexOf(E)+E.length),Vt+=E.length),J[I]?(E?m(g).empty=!1:m(g).unusedTokens.push(I),O=I,ht=g,(G=E)!=null&&s($t,O)&&$t[O](G,ht._a,ht,O)):g._strict&&!E&&m(g).unusedTokens.push(I);m(g).charsLeftOver=Mt-Vt,0<xt.length&&m(g).unusedInput.push(xt),g._a[bt]<=12&&m(g).bigHour===!0&&0<g._a[bt]&&(m(g).bigHour=void 0),m(g).parsedDateParts=g._a.slice(0),m(g).meridiem=g._meridiem,g._a[bt]=function(er,br,xi){return xi==null?br:er.meridiemHour!=null?er.meridiemHour(br,xi):er.isPM!=null?((er=er.isPM(xi))&&br<12&&(br+=12),br=er||br!==12?br:0):br}(g._locale,g._a[bt],g._meridiem),(Mt=m(g).era)!==null&&(g._a[Ft]=g._locale.erasConvertYear(Mt,g._a[Ft])),Zs(g),Pt(g)}}function M0(g){var E,I,O,G=g._i,ht=g._f;return g._locale=g._locale||ce(g._l),G===null||ht===void 0&&G===""?y({nullInput:!0}):(typeof G=="string"&&(g._i=G=g._locale.preparse(G)),M(G)?new C(Pt(G)):(h(G)?g._d=G:i(ht)?function(xt){var Mt,Vt,Ot,de,ie,er,br=!1,xi=xt._f.length;if(xi===0)return m(xt).invalidFormat=!0,xt._d=new Date(NaN);for(de=0;de<xi;de++)ie=0,er=!1,Mt=T({},xt),xt._useUTC!=null&&(Mt._useUTC=xt._useUTC),Mt._f=xt._f[de],vu(Mt),_(Mt)&&(er=!0),ie=(ie+=m(Mt).charsLeftOver)+10*m(Mt).unusedTokens.length,m(Mt).score=ie,br?ie<Ot&&(Ot=ie,Vt=Mt):(Ot==null||ie<Ot||er)&&(Ot=ie,Vt=Mt,er&&(br=!0));f(xt,Vt||Mt)}(g):ht?vu(g):l(ht=(G=g)._i)?G._d=new Date(n.now()):h(ht)?G._d=new Date(ht.valueOf()):typeof ht=="string"?(I=G,(E=C0.exec(I._i))!==null?I._d=new Date(+E[1]):(A0(I),I._isValid===!1&&(delete I._isValid,Hi(I),I._isValid===!1&&(delete I._isValid,I._strict?I._isValid=!1:n.createFromInputFallback(I))))):i(ht)?(G._a=d(ht.slice(0),function(xt){return parseInt(xt,10)}),Zs(G)):a(ht)?(E=G)._d||(O=(I=K(E._i)).day===void 0?I.date:I.day,E._a=d([I.year,I.month,O,I.hour,I.minute,I.second,I.millisecond],function(xt){return xt&&parseInt(xt,10)}),Zs(E)):u(ht)?G._d=new Date(ht):n.createFromInputFallback(G),_(g)||(g._d=null),g))}function Dr(g,E,I,O,G){var ht={};return E!==!0&&E!==!1||(O=E,E=void 0),I!==!0&&I!==!1||(O=I,I=void 0),(a(g)&&o(g)||i(g)&&g.length===0)&&(g=void 0),ht._isAMomentObject=!0,ht._useUTC=ht._isUTC=G,ht._l=I,ht._i=g,ht._f=E,ht._strict=O,(G=new C(Pt(M0(G=ht))))._nextDay&&(G.add(1,"d"),G._nextDay=void 0),G}function De(g,E,I,O){return Dr(g,E,I,O,!1)}n.createFromInputFallback=R("value provided is not in a recognized RFC2822 or ISO format. moment construction falls back to js Date(), which is not reliable across all browsers and versions. Non RFC2822/ISO date formats are discouraged. Please refer to http://momentjs.com/guides/#/warnings/js-date/ for more info.",function(g){g._d=new Date(g._i+(g._useUTC?" UTC":""))}),n.ISO_8601=function(){},n.RFC_2822=function(){},It=R("moment().min is deprecated, use moment.max instead. http://momentjs.com/guides/#/warnings/min-max/",function(){var g=De.apply(null,arguments);return this.isValid()&&g.isValid()?g<this?this:g:y()}),Lt=R("moment().max is deprecated, use moment.min instead. http://momentjs.com/guides/#/warnings/min-max/",function(){var g=De.apply(null,arguments);return this.isValid()&&g.isValid()?this<g?this:g:y()});function hn(g,E){var I,O;if(!(E=E.length===1&&i(E[0])?E[0]:E).length)return De();for(I=E[0],O=1;O<E.length;++O)E[O].isValid()&&!E[O][g](I)||(I=E[O]);return I}var xa=["year","quarter","month","week","day","hour","minute","second","millisecond"];function _i(E){var E=K(E),I=E.year||0,O=E.quarter||0,G=E.month||0,ht=E.week||E.isoWeek||0,xt=E.day||0,Mt=E.hour||0,Vt=E.minute||0,Ot=E.second||0,de=E.millisecond||0;this._isValid=function(ie){var er,br,xi=!1,g_=xa.length;for(er in ie)if(s(ie,er)&&(jt.call(xa,er)===-1||ie[er]!=null&&isNaN(ie[er])))return!1;for(br=0;br<g_;++br)if(ie[xa[br]]){if(xi)return!1;parseFloat(ie[xa[br]])!==q(ie[xa[br]])&&(xi=!0)}return!0}(E),this._milliseconds=+de+1e3*Ot+6e4*Vt+1e3*Mt*60*60,this._days=+xt+7*ht,this._months=+G+3*O+12*I,this._data={},this._locale=ce(),this._bubble()}function ka(g){return g instanceof _i}function Rn(g){return g<0?-1*Math.round(-1*g):Math.round(g)}function xu(g,E){Y(g,0,0,function(){var I=this.utcOffset(),O="+";return I<0&&(I=-I,O="-"),O+N(~~(I/60),2)+E+N(~~I%60,2)})}xu("Z",":"),xu("ZZ",""),ft("Z",vt),ft("ZZ",vt),Qt(["Z","ZZ"],function(g,E,I){I._useUTC=!0,I._tzm=Qs(vt,g)});var yl=/([\+\-]|\d\d)/gi;function Qs(g,I){var I=(I||"").match(g);return I===null?null:(I=60*(g=((I[I.length-1]||[])+"").match(yl)||["-",0,0])[1]+q(g[2]))===0?0:g[0]==="+"?I:-I}function In(g,E){var I;return E._isUTC?(E=E.clone(),I=(M(g)||h(g)?g:De(g)).valueOf()-E.valueOf(),E._d.setTime(E._d.valueOf()+I),n.updateOffset(E,!1),E):De(g).local()}function c_(g){return-Math.round(g._d.getTimezoneOffset())}function rR(){return!!this.isValid()&&this._isUTC&&this._offset===0}n.updateOffset=function(){};var Ast=/^(-|\+)?(?:(\d*)[. ])?(\d+):(\d+)(?::(\d+)(\.\d*)?)?$/,Mst=/^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/;function vi(g,E){var I,O=g,G=null;return ka(g)?O={ms:g._milliseconds,d:g._days,M:g._months}:u(g)||!isNaN(+g)?(O={},E?O[E]=+g:O.milliseconds=+g):(G=Ast.exec(g))?(I=G[1]==="-"?-1:1,O={y:0,d:q(G[wt])*I,h:q(G[bt])*I,m:q(G[Et])*I,s:q(G[kt])*I,ms:q(Rn(1e3*G[Ut]))*I}):(G=Mst.exec(g))?(I=G[1]==="-"?-1:1,O={y:Js(G[2],I),M:Js(G[3],I),w:Js(G[4],I),d:Js(G[5],I),h:Js(G[6],I),m:Js(G[7],I),s:Js(G[8],I)}):O==null?O={}:typeof O=="object"&&("from"in O||"to"in O)&&(E=function(ht,xt){var Mt;return!ht.isValid()||!xt.isValid()?{milliseconds:0,months:0}:(xt=In(xt,ht),ht.isBefore(xt)?Mt=nR(ht,xt):((Mt=nR(xt,ht)).milliseconds=-Mt.milliseconds,Mt.months=-Mt.months),Mt)}(De(O.from),De(O.to)),(O={}).ms=E.milliseconds,O.M=E.months),G=new _i(O),ka(g)&&s(g,"_locale")&&(G._locale=g._locale),ka(g)&&s(g,"_isValid")&&(G._isValid=g._isValid),G}function Js(g,E){return g=g&&parseFloat(g.replace(",",".")),(isNaN(g)?0:g)*E}function nR(g,E){var I={};return I.months=E.month()-g.month()+12*(E.year()-g.year()),g.clone().add(I.months,"M").isAfter(E)&&--I.months,I.milliseconds=+E-+g.clone().add(I.months,"M"),I}function iR(g,E){return function(I,O){var G;return O===null||isNaN(+O)||(L(E,"moment()."+E+"(period, number) is deprecated. Please use moment()."+E+"(number, period). See http://momentjs.com/guides/#/warnings/add-inverted-param/ for more info."),G=I,I=O,O=G),aR(this,vi(I,O),g),this}}function aR(g,xt,I,O){var G=xt._milliseconds,ht=Rn(xt._days),xt=Rn(xt._months);g.isValid()&&(O=O==null||O,xt&&Wt(g,F(g,"Month")+xt*I),ht&&j(g,"Date",F(g,"Date")+ht*I),G&&g._d.setTime(g._d.valueOf()+G*I),O&&n.updateOffset(g,ht||xt))}vi.fn=_i.prototype,vi.invalid=function(){return vi(NaN)},ne=iR(1,"add"),Ze=iR(-1,"subtract");function sR(g){return typeof g=="string"||g instanceof String}function Lst(g){return M(g)||h(g)||sR(g)||u(g)||function(E){var I=i(E),O=!1;return I&&(O=E.filter(function(G){return!u(G)&&sR(E)}).length===0),I&&O}(g)||function(E){var I,O,G=a(E)&&!o(E),ht=!1,xt=["years","year","y","months","month","M","days","day","d","dates","date","D","hours","hour","h","minutes","minute","m","seconds","second","s","milliseconds","millisecond","ms"],Mt=xt.length;for(I=0;I<Mt;I+=1)O=xt[I],ht=ht||s(E,O);return G&&ht}(g)||g==null}function L0(g,G){if(g.date()<G.date())return-L0(G,g);var I=12*(G.year()-g.year())+(G.month()-g.month()),O=g.clone().add(I,"months"),G=G-O<0?(G-O)/(O-g.clone().add(I-1,"months")):(G-O)/(g.clone().add(1+I,"months")-O);return-(I+G)||0}function oR(g){return g===void 0?this._locale._abbr:((g=ce(g))!=null&&(this._locale=g),this)}n.defaultFormat="YYYY-MM-DDTHH:mm:ssZ",n.defaultFormatUtc="YYYY-MM-DDTHH:mm:ss[Z]",st=R("moment().lang() is deprecated. Instead, use moment().localeData() to get the language configuration. Use moment().locale() to change languages.",function(g){return g===void 0?this.localeData():this.locale(g)});function lR(){return this._locale}var cR=126227808e5;function ml(g,E){return(g%E+E)%E}function uR(g,E,I){return g<100&&0<=g?new Date(g+400,E,I)-cR:new Date(g,E,I).valueOf()}function hR(g,E,I){return g<100&&0<=g?Date.UTC(g+400,E,I)-cR:Date.UTC(g,E,I)}function u_(g,E){return E.erasAbbrRegex(g)}function h_(){for(var g=[],E=[],I=[],O=[],G=this.eras(),ht=0,xt=G.length;ht<xt;++ht)E.push(Dt(G[ht].name)),g.push(Dt(G[ht].abbr)),I.push(Dt(G[ht].narrow)),O.push(Dt(G[ht].name)),O.push(Dt(G[ht].abbr)),O.push(Dt(G[ht].narrow));this._erasRegex=new RegExp("^("+O.join("|")+")","i"),this._erasNameRegex=new RegExp("^("+E.join("|")+")","i"),this._erasAbbrRegex=new RegExp("^("+g.join("|")+")","i"),this._erasNarrowRegex=new RegExp("^("+I.join("|")+")","i")}function R0(g,E){Y(0,[g,g.length],0,E)}function fR(g,E,I,O,G){var ht;return g==null?Ke(this,O,G).year:(ht=wr(g,O,G),function(xt,Mt,Vt,Ot,de){return xt=oe(xt,Mt,Vt,Ot,de),Mt=Hr(xt.year,0,xt.dayOfYear),this.year(Mt.getUTCFullYear()),this.month(Mt.getUTCMonth()),this.date(Mt.getUTCDate()),this}.call(this,g,E=ht<E?ht:E,I,O,G))}Y("N",0,0,"eraAbbr"),Y("NN",0,0,"eraAbbr"),Y("NNN",0,0,"eraAbbr"),Y("NNNN",0,0,"eraName"),Y("NNNNN",0,0,"eraNarrow"),Y("y",["y",1],"yo","eraYear"),Y("y",["yy",2],0,"eraYear"),Y("y",["yyy",3],0,"eraYear"),Y("y",["yyyy",4],0,"eraYear"),ft("N",u_),ft("NN",u_),ft("NNN",u_),ft("NNNN",function(g,E){return E.erasNameRegex(g)}),ft("NNNNN",function(g,E){return E.erasNarrowRegex(g)}),Qt(["N","NN","NNN","NNNN","NNNNN"],function(g,E,I,O){O=I._locale.erasParse(g,O,I._strict),O?m(I).era=O:m(I).invalidEra=g}),ft("y",Ct),ft("yy",Ct),ft("yyy",Ct),ft("yyyy",Ct),ft("yo",function(g,E){return E._eraYearOrdinalRegex||Ct}),Qt(["y","yy","yyy","yyyy"],Ft),Qt(["yo"],function(g,E,I,O){var G;I._locale._eraYearOrdinalRegex&&(G=g.match(I._locale._eraYearOrdinalRegex)),I._locale.eraYearOrdinalParse?E[Ft]=I._locale.eraYearOrdinalParse(g,G):E[Ft]=parseInt(g,10)}),Y(0,["gg",2],0,function(){return this.weekYear()%100}),Y(0,["GG",2],0,function(){return this.isoWeekYear()%100}),R0("gggg","weekYear"),R0("ggggg","weekYear"),R0("GGGG","isoWeekYear"),R0("GGGGG","isoWeekYear"),W("weekYear","gg"),W("isoWeekYear","GG"),Z("weekYear",1),Z("isoWeekYear",1),ft("G",pt),ft("g",pt),ft("GG",at,fe),ft("gg",at,fe),ft("GGGG",ls,to),ft("gggg",ls,to),ft("GGGGG",ss,os),ft("ggggg",ss,os),we(["gggg","ggggg","GGGG","GGGGG"],function(g,E,I,O){E[O.substr(0,2)]=q(g)}),we(["gg","GG"],function(g,E,I,O){E[O]=n.parseTwoDigitYear(g)}),Y("Q",0,"Qo","quarter"),W("quarter","Q"),Z("quarter",7),ft("Q",P),Qt("Q",function(g,E){E[zt]=3*(q(g)-1)}),Y("D",["DD",2],"Do","date"),W("date","D"),Z("date",9),ft("D",at),ft("DD",at,fe),ft("Do",function(g,E){return g?E._dayOfMonthOrdinalParse||E._ordinalParse:E._dayOfMonthOrdinalParseLenient}),Qt(["D","DD"],wt),Qt("Do",function(g,E){E[wt]=q(g.match(at)[0])}),ls=U("Date",!0),Y("DDD",["DDDD",3],"DDDo","dayOfYear"),W("dayOfYear","DDD"),Z("dayOfYear",4),ft("DDD",Rt),ft("DDDD",et),Qt(["DDD","DDDD"],function(g,E,I){I._dayOfYear=q(g)}),Y("m",["mm",2],0,"minute"),W("minute","m"),Z("minute",14),ft("m",at),ft("mm",at,fe),Qt(["m","mm"],Et);var as,to=U("Minutes",!1),ss=(Y("s",["ss",2],0,"second"),W("second","s"),Z("second",15),ft("s",at),ft("ss",at,fe),Qt(["s","ss"],kt),U("Seconds",!1));for(Y("S",0,0,function(){return~~(this.millisecond()/100)}),Y(0,["SS",2],0,function(){return~~(this.millisecond()/10)}),Y(0,["SSS",3],0,"millisecond"),Y(0,["SSSS",4],0,function(){return 10*this.millisecond()}),Y(0,["SSSSS",5],0,function(){return 100*this.millisecond()}),Y(0,["SSSSSS",6],0,function(){return 1e3*this.millisecond()}),Y(0,["SSSSSSS",7],0,function(){return 1e4*this.millisecond()}),Y(0,["SSSSSSSS",8],0,function(){return 1e5*this.millisecond()}),Y(0,["SSSSSSSSS",9],0,function(){return 1e6*this.millisecond()}),W("millisecond","ms"),Z("millisecond",16),ft("S",Rt,P),ft("SS",Rt,fe),ft("SSS",Rt,et),as="SSSS";as.length<=9;as+="S")ft(as,Ct);function Rst(g,E){E[Ut]=q(1e3*("0."+g))}for(as="S";as.length<=9;as+="S")Qt(as,Rst);os=U("Milliseconds",!1),Y("z",0,0,"zoneAbbr"),Y("zz",0,0,"zoneName"),P=C.prototype;function dR(g){return g}P.add=ne,P.calendar=function(I,G){arguments.length===1&&(arguments[0]?Lst(arguments[0])?(I=arguments[0],G=void 0):function(ht){for(var xt=a(ht)&&!o(ht),Mt=!1,Vt=["sameDay","nextDay","lastDay","nextWeek","lastWeek","sameElse"],Ot=0;Ot<Vt.length;Ot+=1)Mt=Mt||s(ht,Vt[Ot]);return xt&&Mt}(arguments[0])&&(G=arguments[0],I=void 0):G=I=void 0);var I=I||De(),O=In(I,this).startOf("day"),O=n.calendarFormat(this,O)||"sameElse",G=G&&(v(G[O])?G[O].call(this,I):G[O]);return this.format(G||this.localeData().calendar(O,this,De(I)))},P.clone=function(){return new C(this)},P.diff=function(g,E,I){var O,G,ht;if(!this.isValid())return NaN;if(!(O=In(g,this)).isValid())return NaN;switch(G=6e4*(O.utcOffset()-this.utcOffset()),E=tt(E)){case"year":ht=L0(this,O)/12;break;case"month":ht=L0(this,O);break;case"quarter":ht=L0(this,O)/3;break;case"second":ht=(this-O)/1e3;break;case"minute":ht=(this-O)/6e4;break;case"hour":ht=(this-O)/36e5;break;case"day":ht=(this-O-G)/864e5;break;case"week":ht=(this-O-G)/6048e5;break;default:ht=this-O}return I?ht:Q(ht)},P.endOf=function(g){var E,I;if((g=tt(g))===void 0||g==="millisecond"||!this.isValid())return this;switch(I=this._isUTC?hR:uR,g){case"year":E=I(this.year()+1,0,1)-1;break;case"quarter":E=I(this.year(),this.month()-this.month()%3+3,1)-1;break;case"month":E=I(this.year(),this.month()+1,1)-1;break;case"week":E=I(this.year(),this.month(),this.date()-this.weekday()+7)-1;break;case"isoWeek":E=I(this.year(),this.month(),this.date()-(this.isoWeekday()-1)+7)-1;break;case"day":case"date":E=I(this.year(),this.month(),this.date()+1)-1;break;case"hour":E=this._d.valueOf(),E+=36e5-ml(E+(this._isUTC?0:6e4*this.utcOffset()),36e5)-1;break;case"minute":E=this._d.valueOf(),E+=6e4-ml(E,6e4)-1;break;case"second":E=this._d.valueOf(),E+=1e3-ml(E,1e3)-1;break}return this._d.setTime(E),n.updateOffset(this,!0),this},P.format=function(g){return g=g||(this.isUtc()?n.defaultFormatUtc:n.defaultFormat),g=$(this,g),this.localeData().postformat(g)},P.from=function(g,E){return this.isValid()&&(M(g)&&g.isValid()||De(g).isValid())?vi({to:this,from:g}).locale(this.locale()).humanize(!E):this.localeData().invalidDate()},P.fromNow=function(g){return this.from(De(),g)},P.to=function(g,E){return this.isValid()&&(M(g)&&g.isValid()||De(g).isValid())?vi({from:this,to:g}).locale(this.locale()).humanize(!E):this.localeData().invalidDate()},P.toNow=function(g){return this.to(De(),g)},P.get=function(g){return v(this[g=tt(g)])?this[g]():this},P.invalidAt=function(){return m(this).overflow},P.isAfter=function(g,E){return g=M(g)?g:De(g),!(!this.isValid()||!g.isValid())&&((E=tt(E)||"millisecond")==="millisecond"?this.valueOf()>g.valueOf():g.valueOf()<this.clone().startOf(E).valueOf())},P.isBefore=function(g,E){return g=M(g)?g:De(g),!(!this.isValid()||!g.isValid())&&((E=tt(E)||"millisecond")==="millisecond"?this.valueOf()<g.valueOf():this.clone().endOf(E).valueOf()<g.valueOf())},P.isBetween=function(g,E,I,O){return g=M(g)?g:De(g),E=M(E)?E:De(E),!!(this.isValid()&&g.isValid()&&E.isValid())&&((O=O||"()")[0]==="("?this.isAfter(g,I):!this.isBefore(g,I))&&(O[1]===")"?this.isBefore(E,I):!this.isAfter(E,I))},P.isSame=function(I,E){var I=M(I)?I:De(I);return!(!this.isValid()||!I.isValid())&&((E=tt(E)||"millisecond")==="millisecond"?this.valueOf()===I.valueOf():(I=I.valueOf(),this.clone().startOf(E).valueOf()<=I&&I<=this.clone().endOf(E).valueOf()))},P.isSameOrAfter=function(g,E){return this.isSame(g,E)||this.isAfter(g,E)},P.isSameOrBefore=function(g,E){return this.isSame(g,E)||this.isBefore(g,E)},P.isValid=function(){return _(this)},P.lang=st,P.locale=oR,P.localeData=lR,P.max=Lt,P.min=It,P.parsingFlags=function(){return f({},m(this))},P.set=function(g,E){if(typeof g=="object")for(var I=function(ht){var xt,Mt=[];for(xt in ht)s(ht,xt)&&Mt.push({unit:xt,priority:it[xt]});return Mt.sort(function(Vt,Ot){return Vt.priority-Ot.priority}),Mt}(g=K(g)),O=I.length,G=0;G<O;G++)this[I[G].unit](g[I[G].unit]);else if(v(this[g=tt(g)]))return this[g](E);return this},P.startOf=function(g){var E,I;if((g=tt(g))===void 0||g==="millisecond"||!this.isValid())return this;switch(I=this._isUTC?hR:uR,g){case"year":E=I(this.year(),0,1);break;case"quarter":E=I(this.year(),this.month()-this.month()%3,1);break;case"month":E=I(this.year(),this.month(),1);break;case"week":E=I(this.year(),this.month(),this.date()-this.weekday());break;case"isoWeek":E=I(this.year(),this.month(),this.date()-(this.isoWeekday()-1));break;case"day":case"date":E=I(this.year(),this.month(),this.date());break;case"hour":E=this._d.valueOf(),E-=ml(E+(this._isUTC?0:6e4*this.utcOffset()),36e5);break;case"minute":E=this._d.valueOf(),E-=ml(E,6e4);break;case"second":E=this._d.valueOf(),E-=ml(E,1e3);break}return this._d.setTime(E),n.updateOffset(this,!0),this},P.subtract=Ze,P.toArray=function(){var g=this;return[g.year(),g.month(),g.date(),g.hour(),g.minute(),g.second(),g.millisecond()]},P.toObject=function(){var g=this;return{years:g.year(),months:g.month(),date:g.date(),hours:g.hours(),minutes:g.minutes(),seconds:g.seconds(),milliseconds:g.milliseconds()}},P.toDate=function(){return new Date(this.valueOf())},P.toISOString=function(g){if(!this.isValid())return null;var E=(g=g!==!0)?this.clone().utc():this;return E.year()<0||9999<E.year()?$(E,g?"YYYYYY-MM-DD[T]HH:mm:ss.SSS[Z]":"YYYYYY-MM-DD[T]HH:mm:ss.SSSZ"):v(Date.prototype.toISOString)?g?this.toDate().toISOString():new Date(this.valueOf()+60*this.utcOffset()*1e3).toISOString().replace("Z",$(E,"Z")):$(E,g?"YYYY-MM-DD[T]HH:mm:ss.SSS[Z]":"YYYY-MM-DD[T]HH:mm:ss.SSSZ")},P.inspect=function(){if(!this.isValid())return"moment.invalid(/* "+this._i+" */)";var g,E="moment",I="";return this.isLocal()||(E=this.utcOffset()===0?"moment.utc":"moment.parseZone",I="Z"),E="["+E+'("]',g=0<=this.year()&&this.year()<=9999?"YYYY":"YYYYYY",this.format(E+g+"-MM-DD[T]HH:mm:ss.SSS"+(I+'[")]'))},typeof Symbol<"u"&&Symbol.for!=null&&(P[Symbol.for("nodejs.util.inspect.custom")]=function(){return"Moment<"+this.format()+">"}),P.toJSON=function(){return this.isValid()?this.toISOString():null},P.toString=function(){return this.clone().locale("en").format("ddd MMM DD YYYY HH:mm:ss [GMT]ZZ")},P.unix=function(){return Math.floor(this.valueOf()/1e3)},P.valueOf=function(){return this._d.valueOf()-6e4*(this._offset||0)},P.creationData=function(){return{input:this._i,format:this._f,locale:this._locale,isUTC:this._isUTC,strict:this._strict}},P.eraName=function(){for(var g,E=this.localeData().eras(),I=0,O=E.length;I<O;++I)if(g=this.clone().startOf("day").valueOf(),E[I].since<=g&&g<=E[I].until||E[I].until<=g&&g<=E[I].since)return E[I].name;return""},P.eraNarrow=function(){for(var g,E=this.localeData().eras(),I=0,O=E.length;I<O;++I)if(g=this.clone().startOf("day").valueOf(),E[I].since<=g&&g<=E[I].until||E[I].until<=g&&g<=E[I].since)return E[I].narrow;return""},P.eraAbbr=function(){for(var g,E=this.localeData().eras(),I=0,O=E.length;I<O;++I)if(g=this.clone().startOf("day").valueOf(),E[I].since<=g&&g<=E[I].until||E[I].until<=g&&g<=E[I].since)return E[I].abbr;return""},P.eraYear=function(){for(var g,E,I=this.localeData().eras(),O=0,G=I.length;O<G;++O)if(g=I[O].since<=I[O].until?1:-1,E=this.clone().startOf("day").valueOf(),I[O].since<=E&&E<=I[O].until||I[O].until<=E&&E<=I[O].since)return(this.year()-n(I[O].since).year())*g+I[O].offset;return this.year()},P.year=N0,P.isLeapYear=function(){return V(this.year())},P.weekYear=function(g){return fR.call(this,g,this.week(),this.weekday(),this.localeData()._week.dow,this.localeData()._week.doy)},P.isoWeekYear=function(g){return fR.call(this,g,this.isoWeek(),this.isoWeekday(),1,4)},P.quarter=P.quarters=function(g){return g==null?Math.ceil((this.month()+1)/3):this.month(3*(g-1)+this.month()%3)},P.month=se,P.daysInMonth=function(){return yt(this.year(),this.month())},P.week=P.weeks=function(g){var E=this.localeData().week(this);return g==null?E:this.add(7*(g-E),"d")},P.isoWeek=P.isoWeeks=function(g){var E=Ke(this,1,4).week;return g==null?E:this.add(7*(g-E),"d")},P.weeksInYear=function(){var g=this.localeData()._week;return wr(this.year(),g.dow,g.doy)},P.weeksInWeekYear=function(){var g=this.localeData()._week;return wr(this.weekYear(),g.dow,g.doy)},P.isoWeeksInYear=function(){return wr(this.year(),1,4)},P.isoWeeksInISOWeekYear=function(){return wr(this.isoWeekYear(),1,4)},P.date=ls,P.day=P.days=function(g){if(!this.isValid())return g!=null?this:NaN;var E,I,O=this._isUTC?this._d.getUTCDay():this._d.getDay();return g!=null?(E=g,I=this.localeData(),g=typeof E!="string"?E:isNaN(E)?typeof(E=I.weekdaysParse(E))=="number"?E:null:parseInt(E,10),this.add(g-O,"d")):O},P.weekday=function(g){if(!this.isValid())return g!=null?this:NaN;var E=(this.day()+7-this.localeData()._week.dow)%7;return g==null?E:this.add(g-E,"d")},P.isoWeekday=function(g){return this.isValid()?g!=null?(E=g,I=this.localeData(),I=typeof E=="string"?I.weekdaysParse(E)%7||7:isNaN(E)?null:E,this.day(this.day()%7?I:I-7)):this.day()||7:g!=null?this:NaN;var E,I},P.dayOfYear=function(g){var E=Math.round((this.clone().startOf("day")-this.clone().startOf("year"))/864e5)+1;return g==null?E:this.add(g-E,"d")},P.hour=P.hours=Tt,P.minute=P.minutes=to,P.second=P.seconds=ss,P.millisecond=P.milliseconds=os,P.utcOffset=function(g,E,I){var O,G=this._offset||0;if(!this.isValid())return g!=null?this:NaN;if(g==null)return this._isUTC?G:c_(this);if(typeof g=="string"){if((g=Qs(vt,g))===null)return this}else Math.abs(g)<16&&!I&&(g*=60);return!this._isUTC&&E&&(O=c_(this)),this._offset=g,this._isUTC=!0,O!=null&&this.add(O,"m"),G!==g&&(!E||this._changeInProgress?aR(this,vi(g-G,"m"),1,!1):this._changeInProgress||(this._changeInProgress=!0,n.updateOffset(this,!0),this._changeInProgress=null)),this},P.utc=function(g){return this.utcOffset(0,g)},P.local=function(g){return this._isUTC&&(this.utcOffset(0,g),this._isUTC=!1,g&&this.subtract(c_(this),"m")),this},P.parseZone=function(){var g;return this._tzm!=null?this.utcOffset(this._tzm,!1,!0):typeof this._i=="string"&&((g=Qs(mt,this._i))!=null?this.utcOffset(g):this.utcOffset(0,!0)),this},P.hasAlignedHourOffset=function(g){return!!this.isValid()&&(g=g?De(g).utcOffset():0,(this.utcOffset()-g)%60==0)},P.isDST=function(){return this.utcOffset()>this.clone().month(0).utcOffset()||this.utcOffset()>this.clone().month(5).utcOffset()},P.isLocal=function(){return!!this.isValid()&&!this._isUTC},P.isUtcOffset=function(){return!!this.isValid()&&this._isUTC},P.isUtc=rR,P.isUTC=rR,P.zoneAbbr=function(){return this._isUTC?"UTC":""},P.zoneName=function(){return this._isUTC?"Coordinated Universal Time":""},P.dates=R("dates accessor is deprecated. Use date instead.",ls),P.months=R("months accessor is deprecated. Use month instead",se),P.years=R("years accessor is deprecated. Use year instead",N0),P.zone=R("moment().zone is deprecated, use moment().utcOffset instead. http://momentjs.com/guides/#/warnings/zone/",function(g,E){return g!=null?(this.utcOffset(g=typeof g!="string"?-g:g,E),this):-this.utcOffset()}),P.isDSTShifted=R("isDSTShifted is deprecated. See http://momentjs.com/guides/#/warnings/dst-shifted/ for more information",function(){if(!l(this._isDSTShifted))return this._isDSTShifted;var g,E={};return T(E,this),(E=M0(E))._a?(g=(E._isUTC?p:De)(E._a),this._isDSTShifted=this.isValid()&&0<function(I,O,G){for(var ht=Math.min(I.length,O.length),xt=Math.abs(I.length-O.length),Mt=0,Vt=0;Vt<ht;Vt++)(G&&I[Vt]!==O[Vt]||!G&&q(I[Vt])!==q(O[Vt]))&&Mt++;return Mt+xt}(E._a,g.toArray())):this._isDSTShifted=!1,this._isDSTShifted}),fe=w.prototype;function I0(g,E,I,ht){var G=ce(),ht=p().set(ht,E);return G[I](ht,g)}function pR(g,E,I){if(u(g)&&(E=g,g=void 0),g=g||"",E!=null)return I0(g,E,I,"month");for(var O=[],G=0;G<12;G++)O[G]=I0(g,G,I,"month");return O}function f_(g,E,I,O){E=(typeof g=="boolean"?u(E)&&(I=E,E=void 0):(E=g,g=!1,u(I=E)&&(I=E,E=void 0)),E||"");var G,ht=ce(),xt=g?ht._week.dow:0,Mt=[];if(I!=null)return I0(E,(I+xt)%7,O,"day");for(G=0;G<7;G++)Mt[G]=I0(E,(G+xt)%7,O,"day");return Mt}fe.calendar=function(g,E,I){return v(g=this._calendar[g]||this._calendar.sameElse)?g.call(E,I):g},fe.longDateFormat=function(g){var E=this._longDateFormat[g],I=this._longDateFormat[g.toUpperCase()];return E||!I?E:(this._longDateFormat[g]=I.match(z).map(function(O){return O==="MMMM"||O==="MM"||O==="DD"||O==="dddd"?O.slice(1):O}).join(""),this._longDateFormat[g])},fe.invalidDate=function(){return this._invalidDate},fe.ordinal=function(g){return this._ordinal.replace("%d",g)},fe.preparse=dR,fe.postformat=dR,fe.relativeTime=function(g,E,I,O){var G=this._relativeTime[I];return v(G)?G(g,E,I,O):G.replace(/%d/i,g)},fe.pastFuture=function(g,E){return v(g=this._relativeTime[0<g?"future":"past"])?g(E):g.replace(/%s/i,E)},fe.set=function(g){var E,I;for(I in g)s(g,I)&&(v(E=g[I])?this[I]=E:this["_"+I]=E);this._config=g,this._dayOfMonthOrdinalParseLenient=new RegExp((this._dayOfMonthOrdinalParse.source||this._ordinalParse.source)+"|"+/\d{1,2}/.source)},fe.eras=function(g,E){for(var I,O=this._eras||ce("en")._eras,G=0,ht=O.length;G<ht;++G){switch(typeof O[G].since){case"string":I=n(O[G].since).startOf("day"),O[G].since=I.valueOf();break}switch(typeof O[G].until){case"undefined":O[G].until=1/0;break;case"string":I=n(O[G].until).startOf("day").valueOf(),O[G].until=I.valueOf();break}}return O},fe.erasParse=function(g,E,I){var O,G,ht,xt,Mt,Vt=this.eras();for(g=g.toUpperCase(),O=0,G=Vt.length;O<G;++O)if(ht=Vt[O].name.toUpperCase(),xt=Vt[O].abbr.toUpperCase(),Mt=Vt[O].narrow.toUpperCase(),I)switch(E){case"N":case"NN":case"NNN":if(xt===g)return Vt[O];break;case"NNNN":if(ht===g)return Vt[O];break;case"NNNNN":if(Mt===g)return Vt[O];break}else if(0<=[ht,xt,Mt].indexOf(g))return Vt[O]},fe.erasConvertYear=function(g,E){var I=g.since<=g.until?1:-1;return E===void 0?n(g.since).year():n(g.since).year()+(E-g.offset)*I},fe.erasAbbrRegex=function(g){return s(this,"_erasAbbrRegex")||h_.call(this),g?this._erasAbbrRegex:this._erasRegex},fe.erasNameRegex=function(g){return s(this,"_erasNameRegex")||h_.call(this),g?this._erasNameRegex:this._erasRegex},fe.erasNarrowRegex=function(g){return s(this,"_erasNarrowRegex")||h_.call(this),g?this._erasNarrowRegex:this._erasRegex},fe.months=function(g,E){return g?(i(this._months)?this._months:this._months[(this._months.isFormat||ye).test(E)?"format":"standalone"])[g.month()]:i(this._months)?this._months:this._months.standalone},fe.monthsShort=function(g,E){return g?(i(this._monthsShort)?this._monthsShort:this._monthsShort[ye.test(E)?"format":"standalone"])[g.month()]:i(this._monthsShort)?this._monthsShort:this._monthsShort.standalone},fe.monthsParse=function(g,E,I){var O,G;if(this._monthsParseExact)return function(ie,xt,Mt){var Vt,Ot,de,ie=ie.toLocaleLowerCase();if(!this._monthsParse)for(this._monthsParse=[],this._longMonthsParse=[],this._shortMonthsParse=[],Vt=0;Vt<12;++Vt)de=p([2e3,Vt]),this._shortMonthsParse[Vt]=this.monthsShort(de,"").toLocaleLowerCase(),this._longMonthsParse[Vt]=this.months(de,"").toLocaleLowerCase();return Mt?xt==="MMM"?(Ot=jt.call(this._shortMonthsParse,ie))!==-1?Ot:null:(Ot=jt.call(this._longMonthsParse,ie))!==-1?Ot:null:xt==="MMM"?(Ot=jt.call(this._shortMonthsParse,ie))!==-1||(Ot=jt.call(this._longMonthsParse,ie))!==-1?Ot:null:(Ot=jt.call(this._longMonthsParse,ie))!==-1||(Ot=jt.call(this._shortMonthsParse,ie))!==-1?Ot:null}.call(this,g,E,I);for(this._monthsParse||(this._monthsParse=[],this._longMonthsParse=[],this._shortMonthsParse=[]),O=0;O<12;O++)if(G=p([2e3,O]),I&&!this._longMonthsParse[O]&&(this._longMonthsParse[O]=new RegExp("^"+this.months(G,"").replace(".","")+"$","i"),this._shortMonthsParse[O]=new RegExp("^"+this.monthsShort(G,"").replace(".","")+"$","i")),I||this._monthsParse[O]||(G="^"+this.months(G,"")+"|^"+this.monthsShort(G,""),this._monthsParse[O]=new RegExp(G.replace(".",""),"i")),I&&E==="MMMM"&&this._longMonthsParse[O].test(g)||I&&E==="MMM"&&this._shortMonthsParse[O].test(g)||!I&&this._monthsParse[O].test(g))return O},fe.monthsRegex=function(g){return this._monthsParseExact?(s(this,"_monthsRegex")||me.call(this),g?this._monthsStrictRegex:this._monthsRegex):(s(this,"_monthsRegex")||(this._monthsRegex=Te),this._monthsStrictRegex&&g?this._monthsStrictRegex:this._monthsRegex)},fe.monthsShortRegex=function(g){return this._monthsParseExact?(s(this,"_monthsRegex")||me.call(this),g?this._monthsShortStrictRegex:this._monthsShortRegex):(s(this,"_monthsShortRegex")||(this._monthsShortRegex=be),this._monthsShortStrictRegex&&g?this._monthsShortStrictRegex:this._monthsShortRegex)},fe.week=function(g){return Ke(g,this._week.dow,this._week.doy).week},fe.firstDayOfYear=function(){return this._week.doy},fe.firstDayOfWeek=function(){return this._week.dow},fe.weekdays=function(g,E){return E=i(this._weekdays)?this._weekdays:this._weekdays[g&&g!==!0&&this._weekdays.isFormat.test(E)?"format":"standalone"],g===!0?Ge(E,this._week.dow):g?E[g.day()]:E},fe.weekdaysMin=function(g){return g===!0?Ge(this._weekdaysMin,this._week.dow):g?this._weekdaysMin[g.day()]:this._weekdaysMin},fe.weekdaysShort=function(g){return g===!0?Ge(this._weekdaysShort,this._week.dow):g?this._weekdaysShort[g.day()]:this._weekdaysShort},fe.weekdaysParse=function(g,E,I){var O,G;if(this._weekdaysParseExact)return function(ie,xt,Mt){var Vt,Ot,de,ie=ie.toLocaleLowerCase();if(!this._weekdaysParse)for(this._weekdaysParse=[],this._shortWeekdaysParse=[],this._minWeekdaysParse=[],Vt=0;Vt<7;++Vt)de=p([2e3,1]).day(Vt),this._minWeekdaysParse[Vt]=this.weekdaysMin(de,"").toLocaleLowerCase(),this._shortWeekdaysParse[Vt]=this.weekdaysShort(de,"").toLocaleLowerCase(),this._weekdaysParse[Vt]=this.weekdays(de,"").toLocaleLowerCase();return Mt?xt==="dddd"?(Ot=jt.call(this._weekdaysParse,ie))!==-1?Ot:null:xt==="ddd"?(Ot=jt.call(this._shortWeekdaysParse,ie))!==-1?Ot:null:(Ot=jt.call(this._minWeekdaysParse,ie))!==-1?Ot:null:xt==="dddd"?(Ot=jt.call(this._weekdaysParse,ie))!==-1||(Ot=jt.call(this._shortWeekdaysParse,ie))!==-1||(Ot=jt.call(this._minWeekdaysParse,ie))!==-1?Ot:null:xt==="ddd"?(Ot=jt.call(this._shortWeekdaysParse,ie))!==-1||(Ot=jt.call(this._weekdaysParse,ie))!==-1||(Ot=jt.call(this._minWeekdaysParse,ie))!==-1?Ot:null:(Ot=jt.call(this._minWeekdaysParse,ie))!==-1||(Ot=jt.call(this._weekdaysParse,ie))!==-1||(Ot=jt.call(this._shortWeekdaysParse,ie))!==-1?Ot:null}.call(this,g,E,I);for(this._weekdaysParse||(this._weekdaysParse=[],this._minWeekdaysParse=[],this._shortWeekdaysParse=[],this._fullWeekdaysParse=[]),O=0;O<7;O++)if(G=p([2e3,1]).day(O),I&&!this._fullWeekdaysParse[O]&&(this._fullWeekdaysParse[O]=new RegExp("^"+this.weekdays(G,"").replace(".","\\.?")+"$","i"),this._shortWeekdaysParse[O]=new RegExp("^"+this.weekdaysShort(G,"").replace(".","\\.?")+"$","i"),this._minWeekdaysParse[O]=new RegExp("^"+this.weekdaysMin(G,"").replace(".","\\.?")+"$","i")),this._weekdaysParse[O]||(G="^"+this.weekdays(G,"")+"|^"+this.weekdaysShort(G,"")+"|^"+this.weekdaysMin(G,""),this._weekdaysParse[O]=new RegExp(G.replace(".",""),"i")),I&&E==="dddd"&&this._fullWeekdaysParse[O].test(g)||I&&E==="ddd"&&this._shortWeekdaysParse[O].test(g)||I&&E==="dd"&&this._minWeekdaysParse[O].test(g)||!I&&this._weekdaysParse[O].test(g))return O},fe.weekdaysRegex=function(g){return this._weekdaysParseExact?(s(this,"_weekdaysRegex")||ze.call(this),g?this._weekdaysStrictRegex:this._weekdaysRegex):(s(this,"_weekdaysRegex")||(this._weekdaysRegex=At),this._weekdaysStrictRegex&&g?this._weekdaysStrictRegex:this._weekdaysRegex)},fe.weekdaysShortRegex=function(g){return this._weekdaysParseExact?(s(this,"_weekdaysRegex")||ze.call(this),g?this._weekdaysShortStrictRegex:this._weekdaysShortRegex):(s(this,"_weekdaysShortRegex")||(this._weekdaysShortRegex=Nt),this._weekdaysShortStrictRegex&&g?this._weekdaysShortStrictRegex:this._weekdaysShortRegex)},fe.weekdaysMinRegex=function(g){return this._weekdaysParseExact?(s(this,"_weekdaysRegex")||ze.call(this),g?this._weekdaysMinStrictRegex:this._weekdaysMinRegex):(s(this,"_weekdaysMinRegex")||(this._weekdaysMinRegex=Jt),this._weekdaysMinStrictRegex&&g?this._weekdaysMinStrictRegex:this._weekdaysMinRegex)},fe.isPM=function(g){return(g+"").toLowerCase().charAt(0)==="p"},fe.meridiem=function(g,E,I){return 11<g?I?"pm":"PM":I?"am":"AM"},Xt("en",{eras:[{since:"0001-01-01",until:1/0,offset:1,name:"Anno Domini",narrow:"AD",abbr:"AD"},{since:"0000-12-31",until:-1/0,offset:1,name:"Before Christ",narrow:"BC",abbr:"BC"}],dayOfMonthOrdinalParse:/\d{1,2}(th|st|nd|rd)/,ordinal:function(g){var E=g%10;return g+(q(g%100/10)===1?"th":E==1?"st":E==2?"nd":E==3?"rd":"th")}}),n.lang=R("moment.lang is deprecated. Use moment.locale instead.",Xt),n.langData=R("moment.langData is deprecated. Use moment.localeData instead.",ce);var wa=Math.abs;function gR(g,E,I,O){return E=vi(E,I),g._milliseconds+=O*E._milliseconds,g._days+=O*E._days,g._months+=O*E._months,g._bubble()}function yR(g){return g<0?Math.floor(g):Math.ceil(g)}function mR(g){return 4800*g/146097}function d_(g){return 146097*g/4800}function Ta(g){return function(){return this.as(g)}}Rt=Ta("ms"),et=Ta("s"),ne=Ta("m"),Lt=Ta("h"),It=Ta("d"),Ze=Ta("w"),Tt=Ta("M"),to=Ta("Q"),ss=Ta("y");function eo(g){return function(){return this.isValid()?this._data[g]:NaN}}var os=eo("milliseconds"),ls=eo("seconds"),N0=eo("minutes"),fe=eo("hours"),Ist=eo("days"),Nst=eo("months"),Bst=eo("years"),Ea=Math.round,bl={ss:44,s:45,m:45,h:22,d:26,w:null,M:11};function Dst(g,E,I,O){var Ot=vi(g).abs(),de=Ea(Ot.as("s")),G=Ea(Ot.as("m")),ht=Ea(Ot.as("h")),xt=Ea(Ot.as("d")),Mt=Ea(Ot.as("M")),Vt=Ea(Ot.as("w")),Ot=Ea(Ot.as("y")),de=(de<=I.ss?["s",de]:de<I.s&&["ss",de])||G<=1&&["m"]||G<I.m&&["mm",G]||ht<=1&&["h"]||ht<I.h&&["hh",ht]||xt<=1&&["d"]||xt<I.d&&["dd",xt];return(de=(de=I.w!=null?de||Vt<=1&&["w"]||Vt<I.w&&["ww",Vt]:de)||Mt<=1&&["M"]||Mt<I.M&&["MM",Mt]||Ot<=1&&["y"]||["yy",Ot])[2]=E,de[3]=0<+g,de[4]=O,function(ie,er,br,xi,g_){return g_.relativeTime(er||1,!!br,ie,xi)}.apply(null,de)}var p_=Math.abs;function _l(g){return(0<g)-(g<0)||+g}function B0(){if(!this.isValid())return this.localeData().invalidDate();var g,E,I,O,G,ht,xt,Mt=p_(this._milliseconds)/1e3,Vt=p_(this._days),Ot=p_(this._months),de=this.asSeconds();return de?(g=Q(Mt/60),E=Q(g/60),Mt%=60,g%=60,I=Q(Ot/12),Ot%=12,O=Mt?Mt.toFixed(3).replace(/\.?0+$/,""):"",G=_l(this._months)!==_l(de)?"-":"",ht=_l(this._days)!==_l(de)?"-":"",xt=_l(this._milliseconds)!==_l(de)?"-":"",(de<0?"-":"")+"P"+(I?G+I+"Y":"")+(Ot?G+Ot+"M":"")+(Vt?ht+Vt+"D":"")+(E||g||Mt?"T":"")+(E?xt+E+"H":"")+(g?xt+g+"M":"")+(Mt?xt+O+"S":"")):"P0D"}var Be=_i.prototype;return Be.isValid=function(){return this._isValid},Be.abs=function(){var g=this._data;return this._milliseconds=wa(this._milliseconds),this._days=wa(this._days),this._months=wa(this._months),g.milliseconds=wa(g.milliseconds),g.seconds=wa(g.seconds),g.minutes=wa(g.minutes),g.hours=wa(g.hours),g.months=wa(g.months),g.years=wa(g.years),this},Be.add=function(g,E){return gR(this,g,E,1)},Be.subtract=function(g,E){return gR(this,g,E,-1)},Be.as=function(g){if(!this.isValid())return NaN;var E,I,O=this._milliseconds;if((g=tt(g))==="month"||g==="quarter"||g==="year")switch(E=this._days+O/864e5,I=this._months+mR(E),g){case"month":return I;case"quarter":return I/3;case"year":return I/12}else switch(E=this._days+Math.round(d_(this._months)),g){case"week":return E/7+O/6048e5;case"day":return E+O/864e5;case"hour":return 24*E+O/36e5;case"minute":return 1440*E+O/6e4;case"second":return 86400*E+O/1e3;case"millisecond":return Math.floor(864e5*E)+O;default:throw new Error("Unknown unit "+g)}},Be.asMilliseconds=Rt,Be.asSeconds=et,Be.asMinutes=ne,Be.asHours=Lt,Be.asDays=It,Be.asWeeks=Ze,Be.asMonths=Tt,Be.asQuarters=to,Be.asYears=ss,Be.valueOf=function(){return this.isValid()?this._milliseconds+864e5*this._days+this._months%12*2592e6+31536e6*q(this._months/12):NaN},Be._bubble=function(){var g=this._milliseconds,E=this._days,I=this._months,O=this._data;return 0<=g&&0<=E&&0<=I||g<=0&&E<=0&&I<=0||(g+=864e5*yR(d_(I)+E),I=E=0),O.milliseconds=g%1e3,g=Q(g/1e3),O.seconds=g%60,g=Q(g/60),O.minutes=g%60,g=Q(g/60),O.hours=g%24,E+=Q(g/24),I+=g=Q(mR(E)),E-=yR(d_(g)),g=Q(I/12),I%=12,O.days=E,O.months=I,O.years=g,this},Be.clone=function(){return vi(this)},Be.get=function(g){return g=tt(g),this.isValid()?this[g+"s"]():NaN},Be.milliseconds=os,Be.seconds=ls,Be.minutes=N0,Be.hours=fe,Be.days=Ist,Be.weeks=function(){return Q(this.days()/7)},Be.months=Nst,Be.years=Bst,Be.humanize=function(g,E){if(!this.isValid())return this.localeData().invalidDate();var I=!1,O=bl;return typeof g=="object"&&(E=g,g=!1),typeof g=="boolean"&&(I=g),typeof E=="object"&&(O=Object.assign({},bl,E),E.s!=null&&E.ss==null&&(O.ss=E.s-1)),g=this.localeData(),E=Dst(this,!I,O,g),I&&(E=g.pastFuture(+this,E)),g.postformat(E)},Be.toISOString=B0,Be.toString=B0,Be.toJSON=B0,Be.locale=oR,Be.localeData=lR,Be.toIsoString=R("toIsoString() is deprecated. Please use toISOString() instead (notice the capitals)",B0),Be.lang=st,Y("X",0,0,"unix"),Y("x",0,0,"valueOf"),ft("x",pt),ft("X",/[+-]?\d+(\.\d{1,3})?/),Qt("X",function(g,E,I){I._d=new Date(1e3*parseFloat(g))}),Qt("x",function(g,E,I){I._d=new Date(q(g))}),n.version="2.29.4",r=De,n.fn=P,n.min=function(){return hn("isBefore",[].slice.call(arguments,0))},n.max=function(){return hn("isAfter",[].slice.call(arguments,0))},n.now=function(){return Date.now?Date.now():+new Date},n.utc=p,n.unix=function(g){return De(1e3*g)},n.months=function(g,E){return pR(g,E,"months")},n.isDate=h,n.locale=Xt,n.invalid=y,n.duration=vi,n.isMoment=M,n.weekdays=function(g,E,I){return f_(g,E,I,"weekdays")},n.parseZone=function(){return De.apply(null,arguments).parseZone()},n.localeData=ce,n.isDuration=ka,n.monthsShort=function(g,E){return pR(g,E,"monthsShort")},n.weekdaysMin=function(g,E,I){return f_(g,E,I,"weekdaysMin")},n.defineLocale=ee,n.updateLocale=function(g,E){var I,O;return E!=null?(O=va,Ce[g]!=null&&Ce[g].parentLocale!=null?Ce[g].set(B(Ce[g]._config,E)):(E=B(O=(I=Ln(g))!=null?I._config:O,E),I==null&&(E.abbr=g),(O=new w(E)).parentLocale=Ce[g],Ce[g]=O),Xt(g)):Ce[g]!=null&&(Ce[g].parentLocale!=null?(Ce[g]=Ce[g].parentLocale,g===Xt()&&Xt(g)):Ce[g]!=null&&delete Ce[g]),Ce[g]},n.locales=function(){return D(Ce)},n.weekdaysShort=function(g,E,I){return f_(g,E,I,"weekdaysShort")},n.normalizeUnits=tt,n.relativeTimeRounding=function(g){return g===void 0?Ea:typeof g=="function"&&(Ea=g,!0)},n.relativeTimeThreshold=function(g,E){return bl[g]!==void 0&&(E===void 0?bl[g]:(bl[g]=E,g==="s"&&(bl.ss=E-1),!0))},n.calendarFormat=function(g,E){return(g=g.diff(E,"days",!0))<-6?"sameElse":g<-1?"lastWeek":g<0?"lastDay":g<1?"sameDay":g<2?"nextDay":g<7?"nextWeek":"sameElse"},n.prototype=P,n.HTML5_FMT={DATETIME_LOCAL:"YYYY-MM-DDTHH:mm",DATETIME_LOCAL_SECONDS:"YYYY-MM-DDTHH:mm:ss",DATETIME_LOCAL_MS:"YYYY-MM-DDTHH:mm:ss.SSS",DATE:"YYYY-MM-DD",TIME:"HH:mm",TIME_SECONDS:"HH:mm:ss",TIME_MS:"HH:mm:ss.SSS",WEEK:"GGGG-[W]WW",MONTH:"YYYY-MM"},n})})(y_);const Xn=y_.exports,ji={trace:0,debug:1,info:2,warn:3,error:4,fatal:5},H={trace:(...t)=>{},debug:(...t)=>{},info:(...t)=>{},warn:(...t)=>{},error:(...t)=>{},fatal:(...t)=>{}},D0=function(t="fatal"){let e=ji.fatal;typeof t=="string"?(t=t.toLowerCase(),t in ji&&(e=ji[t])):typeof t=="number"&&(e=t),H.trace=()=>{},H.debug=()=>{},H.info=()=>{},H.warn=()=>{},H.error=()=>{},H.fatal=()=>{},e<=ji.fatal&&(H.fatal=console.error?console.error.bind(console,Nn("FATAL"),"color: orange"):console.log.bind(console,"\x1B[35m",Nn("FATAL"))),e<=ji.error&&(H.error=console.error?console.error.bind(console,Nn("ERROR"),"color: orange"):console.log.bind(console,"\x1B[31m",Nn("ERROR"))),e<=ji.warn&&(H.warn=console.warn?console.warn.bind(console,Nn("WARN"),"color: orange"):console.log.bind(console,"\x1B[33m",Nn("WARN"))),e<=ji.info&&(H.info=console.info?console.info.bind(console,Nn("INFO"),"color: lightblue"):console.log.bind(console,"\x1B[34m",Nn("INFO"))),e<=ji.debug&&(H.debug=console.debug?console.debug.bind(console,Nn("DEBUG"),"color: lightgreen"):console.log.bind(console,"\x1B[32m",Nn("DEBUG"))),e<=ji.trace&&(H.trace=console.debug?console.debug.bind(console,Nn("TRACE"),"color: lightgreen"):console.log.bind(console,"\x1B[32m",Nn("TRACE")))},Nn=t=>`%c${Xn().format("ss.SSS")} : ${t} : `;var O0={};Object.defineProperty(O0,"__esModule",{value:!0});var ki=O0.sanitizeUrl=void 0,bR=/^([^\w]*)(javascript|data|vbscript)/im,_R=/&#(\w+)(^\w|;)?/g,vR=/[\u0000-\u001F\u007F-\u009F\u2000-\u200D\uFEFF]/gim,xR=/^([^:]+):/gm,kR=[".","/"];function wR(t){return kR.indexOf(t[0])>-1}function TR(t){return t.replace(_R,function(e,r){return String.fromCharCode(r)})}function ER(t){var e=TR(t||"").replace(vR,"").trim();if(!e)return"about:blank";if(wR(e))return e;var r=e.match(xR);if(!r)return e;var n=r[0];return bR.test(n)?"about:blank":e}ki=O0.sanitizeUrl=ER;function Qe(t,e){return t==null||e==null?NaN:t<e?-1:t>e?1:t>=e?0:NaN}function m_(t,e){return t==null||e==null?NaN:e<t?-1:e>t?1:e>=t?0:NaN}function ku(t){let e,r,n;t.length!==2?(e=Qe,r=(o,l)=>Qe(t(o),l),n=(o,l)=>t(o)-l):(e=t===Qe||t===m_?t:CR,r=t,n=t);function i(o,l,u=0,h=o.length){if(u<h){if(e(l,l)!==0)return h;do{const d=u+h>>>1;r(o[d],l)<0?u=d+1:h=d}while(u<h)}return u}function a(o,l,u=0,h=o.length){if(u<h){if(e(l,l)!==0)return h;do{const d=u+h>>>1;r(o[d],l)<=0?u=d+1:h=d}while(u<h)}return u}function s(o,l,u=0,h=o.length){const d=i(o,l,u,h-1);return d>u&&n(o[d-1],l)>-n(o[d],l)?d-1:d}return{left:i,center:s,right:a}}function CR(){return 0}function b_(t){return t===null?NaN:+t}function*__(t,e){if(e===void 0)for(let r of t)r!=null&&(r=+r)>=r&&(yield r);else{let r=-1;for(let n of t)(n=e(n,++r,t))!=null&&(n=+n)>=n&&(yield n)}}const v_=ku(Qe),x_=v_.right,SR=v_.left,AR=ku(b_).center,cs=x_;function MR(t,e){if(!((e=+e)>=0))throw new RangeError("invalid r");let r=t.length;if(!((r=Math.floor(r))>=0))throw new RangeError("invalid length");if(!r||!e)return t;const n=F0(e),i=t.slice();return n(t,i,0,r,1),n(i,t,0,r,1),n(t,i,0,r,1),t}const k_=w_(F0),LR=w_(RR);function w_(t){return function(e,r,n=r){if(!((r=+r)>=0))throw new RangeError("invalid rx");if(!((n=+n)>=0))throw new RangeError("invalid ry");let{data:i,width:a,height:s}=e;if(!((a=Math.floor(a))>=0))throw new RangeError("invalid width");if(!((s=Math.floor(s!==void 0?s:i.length/a))>=0))throw new RangeError("invalid height");if(!a||!s||!r&&!n)return e;const o=r&&t(r),l=n&&t(n),u=i.slice();return o&&l?(ro(o,u,i,a,s),ro(o,i,u,a,s),ro(o,u,i,a,s),no(l,i,u,a,s),no(l,u,i,a,s),no(l,i,u,a,s)):o?(ro(o,i,u,a,s),ro(o,u,i,a,s),ro(o,i,u,a,s)):l&&(no(l,i,u,a,s),no(l,u,i,a,s),no(l,i,u,a,s)),e}}function ro(t,e,r,n,i){for(let a=0,s=n*i;a<s;)t(e,r,a,a+=n,1)}function no(t,e,r,n,i){for(let a=0,s=n*i;a<n;++a)t(e,r,a,a+s,n)}function RR(t){const e=F0(t);return(r,n,i,a,s)=>{i<<=2,a<<=2,s<<=2,e(r,n,i+0,a+0,s),e(r,n,i+1,a+1,s),e(r,n,i+2,a+2,s),e(r,n,i+3,a+3,s)}}function F0(t){const e=Math.floor(t);if(e===t)return IR(t);const r=t-e,n=2*t+1;return(i,a,s,o,l)=>{if(!((o-=l)>=s))return;let u=e*a[s];const h=l*e,d=h+l;for(let f=s,p=s+h;f<p;f+=l)u+=a[Math.min(o,f)];for(let f=s,p=o;f<=p;f+=l)u+=a[Math.min(o,f+h)],i[f]=(u+r*(a[Math.max(s,f-d)]+a[Math.min(o,f+d)]))/n,u-=a[Math.max(s,f-h)]}}function IR(t){const e=2*t+1;return(r,n,i,a,s)=>{if(!((a-=s)>=i))return;let o=t*n[i];const l=s*t;for(let u=i,h=i+l;u<h;u+=s)o+=n[Math.min(a,u)];for(let u=i,h=a;u<=h;u+=s)o+=n[Math.min(a,u+l)],r[u]=o/e,o-=n[Math.max(i,u-l)]}}function wu(t,e){let r=0;if(e===void 0)for(let n of t)n!=null&&(n=+n)>=n&&++r;else{let n=-1;for(let i of t)(i=e(i,++n,t))!=null&&(i=+i)>=i&&++r}return r}function NR(t){return t.length|0}function BR(t){return!(t>0)}function DR(t){return typeof t!="object"||"length"in t?t:Array.from(t)}function OR(t){return e=>t(...e)}function FR(...t){const e=typeof t[t.length-1]=="function"&&OR(t.pop());t=t.map(DR);const r=t.map(NR),n=t.length-1,i=new Array(n+1).fill(0),a=[];if(n<0||r.some(BR))return a;for(;;){a.push(i.map((o,l)=>t[l][o]));let s=n;for(;++i[s]===r[s];){if(s===0)return e?a.map(e):a;i[s--]=0}}}function PR(t,e){var r=0,n=0;return Float64Array.from(t,e===void 0?i=>r+=+i||0:i=>r+=+e(i,n++,t)||0)}function T_(t,e){let r=0,n,i=0,a=0;if(e===void 0)for(let s of t)s!=null&&(s=+s)>=s&&(n=s-i,i+=n/++r,a+=n*(s-i));else{let s=-1;for(let o of t)(o=e(o,++s,t))!=null&&(o=+o)>=o&&(n=o-i,i+=n/++r,a+=n*(o-i))}if(r>1)return a/(r-1)}function E_(t,e){const r=T_(t,e);return r&&Math.sqrt(r)}function xl(t,e){let r,n;if(e===void 0)for(const i of t)i!=null&&(r===void 0?i>=i&&(r=n=i):(r>i&&(r=i),n<i&&(n=i)));else{let i=-1;for(let a of t)(a=e(a,++i,t))!=null&&(r===void 0?a>=a&&(r=n=a):(r>a&&(r=a),n<a&&(n=a)))}return[r,n]}class _r{constructor(){this._partials=new Float64Array(32),this._n=0}add(e){const r=this._partials;let n=0;for(let i=0;i<this._n&&i<32;i++){const a=r[i],s=e+a,o=Math.abs(e)<Math.abs(a)?e-(s-a):a-(s-e);o&&(r[n++]=o),e=s}return r[n]=e,this._n=n+1,this}valueOf(){const e=this._partials;let r=this._n,n,i,a,s=0;if(r>0){for(s=e[--r];r>0&&(n=s,i=e[--r],s=n+i,a=i-(s-n),!a););r>0&&(a<0&&e[r-1]<0||a>0&&e[r-1]>0)&&(i=a*2,n=s+i,i==n-s&&(s=n))}return s}}function qR(t,e){const r=new _r;if(e===void 0)for(let n of t)(n=+n)&&r.add(n);else{let n=-1;for(let i of t)(i=+e(i,++n,t))&&r.add(i)}return+r}function VR(t,e){const r=new _r;let n=-1;return Float64Array.from(t,e===void 0?i=>r.add(+i||0):i=>r.add(+e(i,++n,t)||0))}class kl extends Map{constructor(e,r=A_){if(super(),Object.defineProperties(this,{_intern:{value:new Map},_key:{value:r}}),e!=null)for(const[n,i]of e)this.set(n,i)}get(e){return super.get(P0(this,e))}has(e){return super.has(P0(this,e))}set(e,r){return super.set(C_(this,e),r)}delete(e){return super.delete(S_(this,e))}}class us extends Set{constructor(e,r=A_){if(super(),Object.defineProperties(this,{_intern:{value:new Map},_key:{value:r}}),e!=null)for(const n of e)this.add(n)}has(e){return super.has(P0(this,e))}add(e){return super.add(C_(this,e))}delete(e){return super.delete(S_(this,e))}}function P0({_intern:t,_key:e},r){const n=e(r);return t.has(n)?t.get(n):r}function C_({_intern:t,_key:e},r){const n=e(r);return t.has(n)?t.get(n):(t.set(n,r),r)}function S_({_intern:t,_key:e},r){const n=e(r);return t.has(n)&&(r=t.get(n),t.delete(n)),r}function A_(t){return t!==null&&typeof t=="object"?t.valueOf():t}function io(t){return t}function M_(t,...e){return ao(t,io,io,e)}function L_(t,...e){return ao(t,Array.from,io,e)}function R_(t,e){for(let r=1,n=e.length;r<n;++r)t=t.flatMap(i=>i.pop().map(([a,s])=>[...i,a,s]));return t}function zR(t,...e){return R_(L_(t,...e),e)}function YR(t,e,...r){return R_(N_(t,e,...r),r)}function I_(t,e,...r){return ao(t,io,e,r)}function N_(t,e,...r){return ao(t,Array.from,e,r)}function UR(t,...e){return ao(t,io,B_,e)}function WR(t,...e){return ao(t,Array.from,B_,e)}function B_(t){if(t.length!==1)throw new Error("duplicate key");return t[0]}function ao(t,e,r,n){return function i(a,s){if(s>=n.length)return r(a);const o=new kl,l=n[s++];let u=-1;for(const h of a){const d=l(h,++u,a),f=o.get(d);f?f.push(h):o.set(d,[h])}for(const[h,d]of o)o.set(h,i(d,s));return e(o)}(t,0)}function D_(t,e){return Array.from(e,r=>t[r])}function q0(t,...e){if(typeof t[Symbol.iterator]!="function")throw new TypeError("values is not iterable");t=Array.from(t);let[r]=e;if(r&&r.length!==2||e.length>1){const n=Uint32Array.from(t,(i,a)=>a);return e.length>1?(e=e.map(i=>t.map(i)),n.sort((i,a)=>{for(const s of e){const o=so(s[i],s[a]);if(o)return o}})):(r=t.map(r),n.sort((i,a)=>so(r[i],r[a]))),D_(t,n)}return t.sort(V0(r))}function V0(t=Qe){if(t===Qe)return so;if(typeof t!="function")throw new TypeError("compare is not a function");return(e,r)=>{const n=t(e,r);return n||n===0?n:(t(r,r)===0)-(t(e,e)===0)}}function so(t,e){return(t==null||!(t>=t))-(e==null||!(e>=e))||(t<e?-1:t>e?1:0)}function HR(t,e,r){return(e.length!==2?q0(I_(t,e,r),([n,i],[a,s])=>Qe(i,s)||Qe(n,a)):q0(M_(t,r),([n,i],[a,s])=>e(i,s)||Qe(n,a))).map(([n])=>n)}var GR=Array.prototype,jR=GR.slice;function Tu(t){return()=>t}var z0=Math.sqrt(50),Y0=Math.sqrt(10),U0=Math.sqrt(2);function hs(t,e,r){var n,i=-1,a,s,o;if(e=+e,t=+t,r=+r,t===e&&r>0)return[t];if((n=e<t)&&(a=t,t=e,e=a),(o=oo(t,e,r))===0||!isFinite(o))return[];if(o>0){let l=Math.round(t/o),u=Math.round(e/o);for(l*o<t&&++l,u*o>e&&--u,s=new Array(a=u-l+1);++i<a;)s[i]=(l+i)*o}else{o=-o;let l=Math.round(t*o),u=Math.round(e*o);for(l/o<t&&++l,u/o>e&&--u,s=new Array(a=u-l+1);++i<a;)s[i]=(l+i)/o}return n&&s.reverse(),s}function oo(t,e,r){var n=(e-t)/Math.max(0,r),i=Math.floor(Math.log(n)/Math.LN10),a=n/Math.pow(10,i);return i>=0?(a>=z0?10:a>=Y0?5:a>=U0?2:1)*Math.pow(10,i):-Math.pow(10,-i)/(a>=z0?10:a>=Y0?5:a>=U0?2:1)}function wl(t,e,r){var n=Math.abs(e-t)/Math.max(0,r),i=Math.pow(10,Math.floor(Math.log(n)/Math.LN10)),a=n/i;return a>=z0?i*=10:a>=Y0?i*=5:a>=U0&&(i*=2),e<t?-i:i}function O_(t,e,r){let n;for(;;){const i=oo(t,e,r);if(i===n||i===0||!isFinite(i))return[t,e];i>0?(t=Math.floor(t/i)*i,e=Math.ceil(e/i)*i):i<0&&(t=Math.ceil(t*i)/i,e=Math.floor(e*i)/i),n=i}}function W0(t){return Math.ceil(Math.log(wu(t))/Math.LN2)+1}function F_(){var t=io,e=xl,r=W0;function n(i){Array.isArray(i)||(i=Array.from(i));var a,s=i.length,o,l,u=new Array(s);for(a=0;a<s;++a)u[a]=t(i[a],a,i);var h=e(u),d=h[0],f=h[1],p=r(u,d,f);if(!Array.isArray(p)){const b=f,x=+p;if(e===xl&&([d,f]=O_(d,f,x)),p=hs(d,f,x),p[0]<=d&&(l=oo(d,f,x)),p[p.length-1]>=f)if(b>=f&&e===xl){const k=oo(d,f,x);isFinite(k)&&(k>0?f=(Math.floor(f/k)+1)*k:k<0&&(f=(Math.ceil(f*-k)+1)/-k))}else p.pop()}for(var m=p.length;p[0]<=d;)p.shift(),--m;for(;p[m-1]>f;)p.pop(),--m;var _=new Array(m+1),y;for(a=0;a<=m;++a)y=_[a]=[],y.x0=a>0?p[a-1]:d,y.x1=a<m?p[a]:f;if(isFinite(l)){if(l>0)for(a=0;a<s;++a)(o=u[a])!=null&&d<=o&&o<=f&&_[Math.min(m,Math.floor((o-d)/l))].push(i[a]);else if(l<0){for(a=0;a<s;++a)if((o=u[a])!=null&&d<=o&&o<=f){const b=Math.floor((d-o)*l);_[Math.min(m,b+(p[b]<=o))].push(i[a])}}}else for(a=0;a<s;++a)(o=u[a])!=null&&d<=o&&o<=f&&_[cs(p,o,0,m)].push(i[a]);return _}return n.value=function(i){return arguments.length?(t=typeof i=="function"?i:Tu(i),n):t},n.domain=function(i){return arguments.length?(e=typeof i=="function"?i:Tu([i[0],i[1]]),n):e},n.thresholds=function(i){return arguments.length?(r=typeof i=="function"?i:Array.isArray(i)?Tu(jR.call(i)):Tu(i),n):r},n}function lo(t,e){let r;if(e===void 0)for(const n of t)n!=null&&(r<n||r===void 0&&n>=n)&&(r=n);else{let n=-1;for(let i of t)(i=e(i,++n,t))!=null&&(r<i||r===void 0&&i>=i)&&(r=i)}return r}function H0(t,e){let r,n=-1,i=-1;if(e===void 0)for(const a of t)++i,a!=null&&(r<a||r===void 0&&a>=a)&&(r=a,n=i);else for(let a of t)(a=e(a,++i,t))!=null&&(r<a||r===void 0&&a>=a)&&(r=a,n=i);return n}function Tl(t,e){let r;if(e===void 0)for(const n of t)n!=null&&(r>n||r===void 0&&n>=n)&&(r=n);else{let n=-1;for(let i of t)(i=e(i,++n,t))!=null&&(r>i||r===void 0&&i>=i)&&(r=i)}return r}function G0(t,e){let r,n=-1,i=-1;if(e===void 0)for(const a of t)++i,a!=null&&(r>a||r===void 0&&a>=a)&&(r=a,n=i);else for(let a of t)(a=e(a,++i,t))!=null&&(r>a||r===void 0&&a>=a)&&(r=a,n=i);return n}function Eu(t,e,r=0,n=t.length-1,i){for(i=i===void 0?so:V0(i);n>r;){if(n-r>600){const l=n-r+1,u=e-r+1,h=Math.log(l),d=.5*Math.exp(2*h/3),f=.5*Math.sqrt(h*d*(l-d)/l)*(u-l/2<0?-1:1),p=Math.max(r,Math.floor(e-u*d/l+f)),m=Math.min(n,Math.floor(e+(l-u)*d/l+f));Eu(t,e,p,m,i)}const a=t[e];let s=r,o=n;for(El(t,r,e),i(t[n],a)>0&&El(t,r,n);s<o;){for(El(t,s,o),++s,--o;i(t[s],a)<0;)++s;for(;i(t[o],a)>0;)--o}i(t[r],a)===0?El(t,r,o):(++o,El(t,o,n)),o<=e&&(r=o+1),e<=o&&(n=o-1)}return t}function El(t,e,r){const n=t[e];t[e]=t[r],t[r]=n}function P_(t,e=Qe){let r,n=!1;if(e.length===1){let i;for(const a of t){const s=e(a);(n?Qe(s,i)>0:Qe(s,s)===0)&&(r=a,i=s,n=!0)}}else for(const i of t)(n?e(i,r)>0:e(i,i)===0)&&(r=i,n=!0);return r}function Cl(t,e,r){if(t=Float64Array.from(__(t,r)),!!(n=t.length)){if((e=+e)<=0||n<2)return Tl(t);if(e>=1)return lo(t);var n,i=(n-1)*e,a=Math.floor(i),s=lo(Eu(t,a).subarray(0,a+1)),o=Tl(t.subarray(a+1));return s+(o-s)*(i-a)}}function q_(t,e,r=b_){if(!!(n=t.length)){if((e=+e)<=0||n<2)return+r(t[0],0,t);if(e>=1)return+r(t[n-1],n-1,t);var n,i=(n-1)*e,a=Math.floor(i),s=+r(t[a],a,t),o=+r(t[a+1],a+1,t);return s+(o-s)*(i-a)}}function V_(t,e,r){if(t=Float64Array.from(__(t,r)),!!(n=t.length)){if((e=+e)<=0||n<2)return G0(t);if(e>=1)return H0(t);var n,i=Math.floor((n-1)*e),a=(o,l)=>so(t[o],t[l]),s=Eu(Uint32Array.from(t,(o,l)=>l),i,0,n-1,a);return P_(s.subarray(0,i+1),o=>t[o])}}function $R(t,e,r){return Math.ceil((r-e)/(2*(Cl(t,.75)-Cl(t,.25))*Math.pow(wu(t),-1/3)))}function XR(t,e,r){return Math.ceil((r-e)*Math.cbrt(wu(t))/(3.49*E_(t)))}function KR(t,e){let r=0,n=0;if(e===void 0)for(let i of t)i!=null&&(i=+i)>=i&&(++r,n+=i);else{let i=-1;for(let a of t)(a=e(a,++i,t))!=null&&(a=+a)>=a&&(++r,n+=a)}if(r)return n/r}function ZR(t,e){return Cl(t,.5,e)}function QR(t,e){return V_(t,.5,e)}function*JR(t){for(const e of t)yield*e}function j0(t){return Array.from(JR(t))}function tI(t,e){const r=new kl;if(e===void 0)for(let a of t)a!=null&&a>=a&&r.set(a,(r.get(a)||0)+1);else{let a=-1;for(let s of t)(s=e(s,++a,t))!=null&&s>=s&&r.set(s,(r.get(s)||0)+1)}let n,i=0;for(const[a,s]of r)s>i&&(i=s,n=a);return n}function eI(t,e=rI){const r=[];let n,i=!1;for(const a of t)i&&r.push(e(n,a)),n=a,i=!0;return r}function rI(t,e){return[t,e]}function Ca(t,e,r){t=+t,e=+e,r=(i=arguments.length)<2?(e=t,t=0,1):i<3?1:+r;for(var n=-1,i=Math.max(0,Math.ceil((e-t)/r))|0,a=new Array(i);++n<i;)a[n]=t+n*r;return a}function nI(t,e=Qe){if(typeof t[Symbol.iterator]!="function")throw new TypeError("values is not iterable");let r=Array.from(t);const n=new Float64Array(r.length);e.length!==2&&(r=r.map(e),e=Qe);const i=(o,l)=>e(r[o],r[l]);let a,s;return Uint32Array.from(r,(o,l)=>l).sort(e===Qe?(o,l)=>so(r[o],r[l]):V0(i)).forEach((o,l)=>{const u=i(o,a===void 0?o:a);u>=0?((a===void 0||u>0)&&(a=o,s=l),n[o]=s):n[o]=NaN}),n}function iI(t,e=Qe){let r,n=!1;if(e.length===1){let i;for(const a of t){const s=e(a);(n?Qe(s,i)<0:Qe(s,s)===0)&&(r=a,i=s,n=!0)}}else for(const i of t)(n?e(i,r)<0:e(i,i)===0)&&(r=i,n=!0);return r}function z_(t,e=Qe){if(e.length===1)return G0(t,e);let r,n=-1,i=-1;for(const a of t)++i,(n<0?e(a,a)===0:e(a,r)<0)&&(r=a,n=i);return n}function aI(t,e=Qe){if(e.length===1)return H0(t,e);let r,n=-1,i=-1;for(const a of t)++i,(n<0?e(a,a)===0:e(a,r)>0)&&(r=a,n=i);return n}function sI(t,e){const r=z_(t,e);return r<0?void 0:r}const oI=Y_(Math.random);function Y_(t){return function(r,n=0,i=r.length){let a=i-(n=+n);for(;a;){const s=t()*a--|0,o=r[a+n];r[a+n]=r[s+n],r[s+n]=o}return r}}function lI(t,e){let r=0;if(e===void 0)for(let n of t)(n=+n)&&(r+=n);else{let n=-1;for(let i of t)(i=+e(i,++n,t))&&(r+=i)}return r}function U_(t){if(!(a=t.length))return[];for(var e=-1,r=Tl(t,cI),n=new Array(r);++e<r;)for(var i=-1,a,s=n[e]=new Array(a);++i<a;)s[i]=t[i][e];return n}function cI(t){return t.length}function uI(){return U_(arguments)}function hI(t,e){if(typeof e!="function")throw new TypeError("test is not a function");let r=-1;for(const n of t)if(!e(n,++r,t))return!1;return!0}function fI(t,e){if(typeof e!="function")throw new TypeError("test is not a function");let r=-1;for(const n of t)if(e(n,++r,t))return!0;return!1}function dI(t,e){if(typeof e!="function")throw new TypeError("test is not a function");const r=[];let n=-1;for(const i of t)e(i,++n,t)&&r.push(i);return r}function pI(t,e){if(typeof t[Symbol.iterator]!="function")throw new TypeError("values is not iterable");if(typeof e!="function")throw new TypeError("mapper is not a function");return Array.from(t,(r,n)=>e(r,n,t))}function gI(t,e,r){if(typeof e!="function")throw new TypeError("reducer is not a function");const n=t[Symbol.iterator]();let i,a,s=-1;if(arguments.length<3){if({done:i,value:r}=n.next(),i)return;++s}for(;{done:i,value:a}=n.next(),!i;)r=e(r,a,++s,t);return r}function yI(t){if(typeof t[Symbol.iterator]!="function")throw new TypeError("values is not iterable");return Array.from(t).reverse()}function mI(t,...e){t=new us(t);for(const r of e)for(const n of r)t.delete(n);return t}function bI(t,e){const r=e[Symbol.iterator](),n=new us;for(const i of t){if(n.has(i))return!1;let a,s;for(;({value:a,done:s}=r.next())&&!s;){if(Object.is(i,a))return!1;n.add(a)}}return!0}function _I(t,...e){t=new us(t),e=e.map(vI);t:for(const r of t)for(const n of e)if(!n.has(r)){t.delete(r);continue t}return t}function vI(t){return t instanceof us?t:new us(t)}function W_(t,e){const r=t[Symbol.iterator](),n=new Set;for(const i of e){const a=H_(i);if(n.has(a))continue;let s,o;for(;{value:s,done:o}=r.next();){if(o)return!1;const l=H_(s);if(n.add(l),Object.is(a,l))break}}return!0}function H_(t){return t!==null&&typeof t=="object"?t.valueOf():t}function xI(t,e){return W_(e,t)}function kI(...t){const e=new us;for(const r of t)for(const n of r)e.add(n);return e}function wI(t){return t}var Cu=1,Su=2,$0=3,Sl=4,G_=1e-6;function TI(t){return"translate("+t+",0)"}function EI(t){return"translate(0,"+t+")"}function CI(t){return e=>+t(e)}function SI(t,e){return e=Math.max(0,t.bandwidth()-e*2)/2,t.round()&&(e=Math.round(e)),r=>+t(r)+e}function AI(){return!this.__axis}function Au(t,e){var r=[],n=null,i=null,a=6,s=6,o=3,l=typeof window<"u"&&window.devicePixelRatio>1?0:.5,u=t===Cu||t===Sl?-1:1,h=t===Sl||t===Su?"x":"y",d=t===Cu||t===$0?TI:EI;function f(p){var m=n==null?e.ticks?e.ticks.apply(e,r):e.domain():n,_=i==null?e.tickFormat?e.tickFormat.apply(e,r):wI:i,y=Math.max(a,0)+o,b=e.range(),x=+b[0]+l,k=+b[b.length-1]+l,T=(e.bandwidth?SI:CI)(e.copy(),l),C=p.selection?p.selection():p,M=C.selectAll(".domain").data([null]),S=C.selectAll(".tick").data(m,e).order(),R=S.exit(),A=S.enter().append("g").attr("class","tick"),L=S.select("line"),v=S.select("text");M=M.merge(M.enter().insert("path",".tick").attr("class","domain").attr("stroke","currentColor")),S=S.merge(A),L=L.merge(A.append("line").attr("stroke","currentColor").attr(h+"2",u*a)),v=v.merge(A.append("text").attr("fill","currentColor").attr(h,u*y).attr("dy",t===Cu?"0em":t===$0?"0.71em":"0.32em")),p!==C&&(M=M.transition(p),S=S.transition(p),L=L.transition(p),v=v.transition(p),R=R.transition(p).attr("opacity",G_).attr("transform",function(B){return isFinite(B=T(B))?d(B+l):this.getAttribute("transform")}),A.attr("opacity",G_).attr("transform",function(B){var w=this.parentNode.__axis;return d((w&&isFinite(w=w(B))?w:T(B))+l)})),R.remove(),M.attr("d",t===Sl||t===Su?s?"M"+u*s+","+x+"H"+l+"V"+k+"H"+u*s:"M"+l+","+x+"V"+k:s?"M"+x+","+u*s+"V"+l+"H"+k+"V"+u*s:"M"+x+","+l+"H"+k),S.attr("opacity",1).attr("transform",function(B){return d(T(B)+l)}),L.attr(h+"2",u*a),v.attr(h,u*y).text(_),C.filter(AI).attr("fill","none").attr("font-size",10).attr("font-family","sans-serif").attr("text-anchor",t===Su?"start":t===Sl?"end":"middle"),C.each(function(){this.__axis=T})}return f.scale=function(p){return arguments.length?(e=p,f):e},f.ticks=function(){return r=Array.from(arguments),f},f.tickArguments=function(p){return arguments.length?(r=p==null?[]:Array.from(p),f):r.slice()},f.tickValues=function(p){return arguments.length?(n=p==null?null:Array.from(p),f):n&&n.slice()},f.tickFormat=function(p){return arguments.length?(i=p,f):i},f.tickSize=function(p){return arguments.length?(a=s=+p,f):a},f.tickSizeInner=function(p){return arguments.length?(a=+p,f):a},f.tickSizeOuter=function(p){return arguments.length?(s=+p,f):s},f.tickPadding=function(p){return arguments.length?(o=+p,f):o},f.offset=function(p){return arguments.length?(l=+p,f):l},f}function j_(t){return Au(Cu,t)}function MI(t){return Au(Su,t)}function $_(t){return Au($0,t)}function LI(t){return Au(Sl,t)}var RI={value:()=>{}};function fs(){for(var t=0,e=arguments.length,r={},n;t<e;++t){if(!(n=arguments[t]+"")||n in r||/[\s.]/.test(n))throw new Error("illegal type: "+n);r[n]=[]}return new Mu(r)}function Mu(t){this._=t}function II(t,e){return t.trim().split(/^|\s+/).map(function(r){var n="",i=r.indexOf(".");if(i>=0&&(n=r.slice(i+1),r=r.slice(0,i)),r&&!e.hasOwnProperty(r))throw new Error("unknown type: "+r);return{type:r,name:n}})}Mu.prototype=fs.prototype={constructor:Mu,on:function(t,e){var r=this._,n=II(t+"",r),i,a=-1,s=n.length;if(arguments.length<2){for(;++a<s;)if((i=(t=n[a]).type)&&(i=NI(r[i],t.name)))return i;return}if(e!=null&&typeof e!="function")throw new Error("invalid callback: "+e);for(;++a<s;)if(i=(t=n[a]).type)r[i]=X_(r[i],t.name,e);else if(e==null)for(i in r)r[i]=X_(r[i],t.name,null);return this},copy:function(){var t={},e=this._;for(var r in e)t[r]=e[r].slice();return new Mu(t)},call:function(t,e){if((i=arguments.length-2)>0)for(var r=new Array(i),n=0,i,a;n<i;++n)r[n]=arguments[n+2];if(!this._.hasOwnProperty(t))throw new Error("unknown type: "+t);for(a=this._[t],n=0,i=a.length;n<i;++n)a[n].value.apply(e,r)},apply:function(t,e,r){if(!this._.hasOwnProperty(t))throw new Error("unknown type: "+t);for(var n=this._[t],i=0,a=n.length;i<a;++i)n[i].value.apply(e,r)}};function NI(t,e){for(var r=0,n=t.length,i;r<n;++r)if((i=t[r]).name===e)return i.value}function X_(t,e,r){for(var n=0,i=t.length;n<i;++n)if(t[n].name===e){t[n]=RI,t=t.slice(0,n).concat(t.slice(n+1));break}return r!=null&&t.push({name:e,value:r}),t}var X0="http://www.w3.org/1999/xhtml";const K0={svg:"http://www.w3.org/2000/svg",xhtml:X0,xlink:"http://www.w3.org/1999/xlink",xml:"http://www.w3.org/XML/1998/namespace",xmlns:"http://www.w3.org/2000/xmlns/"};function Al(t){var e=t+="",r=e.indexOf(":");return r>=0&&(e=t.slice(0,r))!=="xmlns"&&(t=t.slice(r+1)),K0.hasOwnProperty(e)?{space:K0[e],local:t}:t}function BI(t){return function(){var e=this.ownerDocument,r=this.namespaceURI;return r===X0&&e.documentElement.namespaceURI===X0?e.createElement(t):e.createElementNS(r,t)}}function DI(t){return function(){return this.ownerDocument.createElementNS(t.space,t.local)}}function Lu(t){var e=Al(t);return(e.local?DI:BI)(e)}function OI(){}function Ru(t){return t==null?OI:function(){return this.querySelector(t)}}function FI(t){typeof t!="function"&&(t=Ru(t));for(var e=this._groups,r=e.length,n=new Array(r),i=0;i<r;++i)for(var a=e[i],s=a.length,o=n[i]=new Array(s),l,u,h=0;h<s;++h)(l=a[h])&&(u=t.call(l,l.__data__,h,a))&&("__data__"in l&&(u.__data__=l.__data__),o[h]=u);return new $r(n,this._parents)}function K_(t){return t==null?[]:Array.isArray(t)?t:Array.from(t)}function PI(){return[]}function Z0(t){return t==null?PI:function(){return this.querySelectorAll(t)}}function qI(t){return function(){return K_(t.apply(this,arguments))}}function VI(t){typeof t=="function"?t=qI(t):t=Z0(t);for(var e=this._groups,r=e.length,n=[],i=[],a=0;a<r;++a)for(var s=e[a],o=s.length,l,u=0;u<o;++u)(l=s[u])&&(n.push(t.call(l,l.__data__,u,s)),i.push(l));return new $r(n,i)}function Q0(t){return function(){return this.matches(t)}}function Z_(t){return function(e){return e.matches(t)}}var zI=Array.prototype.find;function YI(t){return function(){return zI.call(this.children,t)}}function UI(){return this.firstElementChild}function WI(t){return this.select(t==null?UI:YI(typeof t=="function"?t:Z_(t)))}var HI=Array.prototype.filter;function GI(){return Array.from(this.children)}function jI(t){return function(){return HI.call(this.children,t)}}function $I(t){return this.selectAll(t==null?GI:jI(typeof t=="function"?t:Z_(t)))}function XI(t){typeof t!="function"&&(t=Q0(t));for(var e=this._groups,r=e.length,n=new Array(r),i=0;i<r;++i)for(var a=e[i],s=a.length,o=n[i]=[],l,u=0;u<s;++u)(l=a[u])&&t.call(l,l.__data__,u,a)&&o.push(l);return new $r(n,this._parents)}function Q_(t){return new Array(t.length)}function KI(){return new $r(this._enter||this._groups.map(Q_),this._parents)}function Iu(t,e){this.ownerDocument=t.ownerDocument,this.namespaceURI=t.namespaceURI,this._next=null,this._parent=t,this.__data__=e}Iu.prototype={constructor:Iu,appendChild:function(t){return this._parent.insertBefore(t,this._next)},insertBefore:function(t,e){return this._parent.insertBefore(t,e)},querySelector:function(t){return this._parent.querySelector(t)},querySelectorAll:function(t){return this._parent.querySelectorAll(t)}};function ZI(t){return function(){return t}}function QI(t,e,r,n,i,a){for(var s=0,o,l=e.length,u=a.length;s<u;++s)(o=e[s])?(o.__data__=a[s],n[s]=o):r[s]=new Iu(t,a[s]);for(;s<l;++s)(o=e[s])&&(i[s]=o)}function JI(t,e,r,n,i,a,s){var o,l,u=new Map,h=e.length,d=a.length,f=new Array(h),p;for(o=0;o<h;++o)(l=e[o])&&(f[o]=p=s.call(l,l.__data__,o,e)+"",u.has(p)?i[o]=l:u.set(p,l));for(o=0;o<d;++o)p=s.call(t,a[o],o,a)+"",(l=u.get(p))?(n[o]=l,l.__data__=a[o],u.delete(p)):r[o]=new Iu(t,a[o]);for(o=0;o<h;++o)(l=e[o])&&u.get(f[o])===l&&(i[o]=l)}function tN(t){return t.__data__}function eN(t,e){if(!arguments.length)return Array.from(this,tN);var r=e?JI:QI,n=this._parents,i=this._groups;typeof t!="function"&&(t=ZI(t));for(var a=i.length,s=new Array(a),o=new Array(a),l=new Array(a),u=0;u<a;++u){var h=n[u],d=i[u],f=d.length,p=rN(t.call(h,h&&h.__data__,u,n)),m=p.length,_=o[u]=new Array(m),y=s[u]=new Array(m),b=l[u]=new Array(f);r(h,d,_,y,b,p,e);for(var x=0,k=0,T,C;x<m;++x)if(T=_[x]){for(x>=k&&(k=x+1);!(C=y[k])&&++k<m;);T._next=C||null}}return s=new $r(s,n),s._enter=o,s._exit=l,s}function rN(t){return typeof t=="object"&&"length"in t?t:Array.from(t)}function nN(){return new $r(this._exit||this._groups.map(Q_),this._parents)}function iN(t,e,r){var n=this.enter(),i=this,a=this.exit();return typeof t=="function"?(n=t(n),n&&(n=n.selection())):n=n.append(t+""),e!=null&&(i=e(i),i&&(i=i.selection())),r==null?a.remove():r(a),n&&i?n.merge(i).order():i}function aN(t){for(var e=t.selection?t.selection():t,r=this._groups,n=e._groups,i=r.length,a=n.length,s=Math.min(i,a),o=new Array(i),l=0;l<s;++l)for(var u=r[l],h=n[l],d=u.length,f=o[l]=new Array(d),p,m=0;m<d;++m)(p=u[m]||h[m])&&(f[m]=p);for(;l<i;++l)o[l]=r[l];return new $r(o,this._parents)}function sN(){for(var t=this._groups,e=-1,r=t.length;++e<r;)for(var n=t[e],i=n.length-1,a=n[i],s;--i>=0;)(s=n[i])&&(a&&s.compareDocumentPosition(a)^4&&a.parentNode.insertBefore(s,a),a=s);return this}function oN(t){t||(t=lN);function e(d,f){return d&&f?t(d.__data__,f.__data__):!d-!f}for(var r=this._groups,n=r.length,i=new Array(n),a=0;a<n;++a){for(var s=r[a],o=s.length,l=i[a]=new Array(o),u,h=0;h<o;++h)(u=s[h])&&(l[h]=u);l.sort(e)}return new $r(i,this._parents).order()}function lN(t,e){return t<e?-1:t>e?1:t>=e?0:NaN}function cN(){var t=arguments[0];return arguments[0]=this,t.apply(null,arguments),this}function uN(){return Array.from(this)}function hN(){for(var t=this._groups,e=0,r=t.length;e<r;++e)for(var n=t[e],i=0,a=n.length;i<a;++i){var s=n[i];if(s)return s}return null}function fN(){let t=0;for(const e of this)++t;return t}function dN(){return!this.node()}function pN(t){for(var e=this._groups,r=0,n=e.length;r<n;++r)for(var i=e[r],a=0,s=i.length,o;a<s;++a)(o=i[a])&&t.call(o,o.__data__,a,i);return this}function gN(t){return function(){this.removeAttribute(t)}}function yN(t){return function(){this.removeAttributeNS(t.space,t.local)}}function mN(t,e){return function(){this.setAttribute(t,e)}}function bN(t,e){return function(){this.setAttributeNS(t.space,t.local,e)}}function _N(t,e){return function(){var r=e.apply(this,arguments);r==null?this.removeAttribute(t):this.setAttribute(t,r)}}function vN(t,e){return function(){var r=e.apply(this,arguments);r==null?this.removeAttributeNS(t.space,t.local):this.setAttributeNS(t.space,t.local,r)}}function xN(t,e){var r=Al(t);if(arguments.length<2){var n=this.node();return r.local?n.getAttributeNS(r.space,r.local):n.getAttribute(r)}return this.each((e==null?r.local?yN:gN:typeof e=="function"?r.local?vN:_N:r.local?bN:mN)(r,e))}function J0(t){return t.ownerDocument&&t.ownerDocument.defaultView||t.document&&t||t.defaultView}function kN(t){return function(){this.style.removeProperty(t)}}function wN(t,e,r){return function(){this.style.setProperty(t,e,r)}}function TN(t,e,r){return function(){var n=e.apply(this,arguments);n==null?this.style.removeProperty(t):this.style.setProperty(t,n,r)}}function EN(t,e,r){return arguments.length>1?this.each((e==null?kN:typeof e=="function"?TN:wN)(t,e,r==null?"":r)):ds(this.node(),t)}function ds(t,e){return t.style.getPropertyValue(e)||J0(t).getComputedStyle(t,null).getPropertyValue(e)}function CN(t){return function(){delete this[t]}}function SN(t,e){return function(){this[t]=e}}function AN(t,e){return function(){var r=e.apply(this,arguments);r==null?delete this[t]:this[t]=r}}function MN(t,e){return arguments.length>1?this.each((e==null?CN:typeof e=="function"?AN:SN)(t,e)):this.node()[t]}function J_(t){return t.trim().split(/^|\s+/)}function td(t){return t.classList||new t5(t)}function t5(t){this._node=t,this._names=J_(t.getAttribute("class")||"")}t5.prototype={add:function(t){var e=this._names.indexOf(t);e<0&&(this._names.push(t),this._node.setAttribute("class",this._names.join(" ")))},remove:function(t){var e=this._names.indexOf(t);e>=0&&(this._names.splice(e,1),this._node.setAttribute("class",this._names.join(" ")))},contains:function(t){return this._names.indexOf(t)>=0}};function e5(t,e){for(var r=td(t),n=-1,i=e.length;++n<i;)r.add(e[n])}function r5(t,e){for(var r=td(t),n=-1,i=e.length;++n<i;)r.remove(e[n])}function LN(t){return function(){e5(this,t)}}function RN(t){return function(){r5(this,t)}}function IN(t,e){return function(){(e.apply(this,arguments)?e5:r5)(this,t)}}function NN(t,e){var r=J_(t+"");if(arguments.length<2){for(var n=td(this.node()),i=-1,a=r.length;++i<a;)if(!n.contains(r[i]))return!1;return!0}return this.each((typeof e=="function"?IN:e?LN:RN)(r,e))}function BN(){this.textContent=""}function DN(t){return function(){this.textContent=t}}function ON(t){return function(){var e=t.apply(this,arguments);this.textContent=e==null?"":e}}function FN(t){return arguments.length?this.each(t==null?BN:(typeof t=="function"?ON:DN)(t)):this.node().textContent}function PN(){this.innerHTML=""}function qN(t){return function(){this.innerHTML=t}}function VN(t){return function(){var e=t.apply(this,arguments);this.innerHTML=e==null?"":e}}function zN(t){return arguments.length?this.each(t==null?PN:(typeof t=="function"?VN:qN)(t)):this.node().innerHTML}function YN(){this.nextSibling&&this.parentNode.appendChild(this)}function UN(){return this.each(YN)}function WN(){this.previousSibling&&this.parentNode.insertBefore(this,this.parentNode.firstChild)}function HN(){return this.each(WN)}function GN(t){var e=typeof t=="function"?t:Lu(t);return this.select(function(){return this.appendChild(e.apply(this,arguments))})}function jN(){return null}function $N(t,e){var r=typeof t=="function"?t:Lu(t),n=e==null?jN:typeof e=="function"?e:Ru(e);return this.select(function(){return this.insertBefore(r.apply(this,arguments),n.apply(this,arguments)||null)})}function XN(){var t=this.parentNode;t&&t.removeChild(this)}function KN(){return this.each(XN)}function ZN(){var t=this.cloneNode(!1),e=this.parentNode;return e?e.insertBefore(t,this.nextSibling):t}function QN(){var t=this.cloneNode(!0),e=this.parentNode;return e?e.insertBefore(t,this.nextSibling):t}function JN(t){return this.select(t?QN:ZN)}function tB(t){return arguments.length?this.property("__data__",t):this.node().__data__}function eB(t){return function(e){t.call(this,e,this.__data__)}}function rB(t){return t.trim().split(/^|\s+/).map(function(e){var r="",n=e.indexOf(".");return n>=0&&(r=e.slice(n+1),e=e.slice(0,n)),{type:e,name:r}})}function nB(t){return function(){var e=this.__on;if(!!e){for(var r=0,n=-1,i=e.length,a;r<i;++r)a=e[r],(!t.type||a.type===t.type)&&a.name===t.name?this.removeEventListener(a.type,a.listener,a.options):e[++n]=a;++n?e.length=n:delete this.__on}}}function iB(t,e,r){return function(){var n=this.__on,i,a=eB(e);if(n){for(var s=0,o=n.length;s<o;++s)if((i=n[s]).type===t.type&&i.name===t.name){this.removeEventListener(i.type,i.listener,i.options),this.addEventListener(i.type,i.listener=a,i.options=r),i.value=e;return}}this.addEventListener(t.type,a,r),i={type:t.type,name:t.name,value:e,listener:a,options:r},n?n.push(i):this.__on=[i]}}function aB(t,e,r){var n=rB(t+""),i,a=n.length,s;if(arguments.length<2){var o=this.node().__on;if(o){for(var l=0,u=o.length,h;l<u;++l)for(i=0,h=o[l];i<a;++i)if((s=n[i]).type===h.type&&s.name===h.name)return h.value}return}for(o=e?iB:nB,i=0;i<a;++i)this.each(o(n[i],e,r));return this}function n5(t,e,r){var n=J0(t),i=n.CustomEvent;typeof i=="function"?i=new i(e,r):(i=n.document.createEvent("Event"),r?(i.initEvent(e,r.bubbles,r.cancelable),i.detail=r.detail):i.initEvent(e,!1,!1)),t.dispatchEvent(i)}function sB(t,e){return function(){return n5(this,t,e)}}function oB(t,e){return function(){return n5(this,t,e.apply(this,arguments))}}function lB(t,e){return this.each((typeof e=="function"?oB:sB)(t,e))}function*cB(){for(var t=this._groups,e=0,r=t.length;e<r;++e)for(var n=t[e],i=0,a=n.length,s;i<a;++i)(s=n[i])&&(yield s)}var ed=[null];function $r(t,e){this._groups=t,this._parents=e}function ps(){return new $r([[document.documentElement]],ed)}function uB(){return this}$r.prototype=ps.prototype={constructor:$r,select:FI,selectAll:VI,selectChild:WI,selectChildren:$I,filter:XI,data:eN,enter:KI,exit:nN,join:iN,merge:aN,selection:uB,order:sN,sort:oN,call:cN,nodes:uN,node:hN,size:fN,empty:dN,each:pN,attr:xN,style:EN,property:MN,classed:NN,text:FN,html:zN,raise:UN,lower:HN,append:GN,insert:$N,remove:KN,clone:JN,datum:tB,on:aB,dispatch:lB,[Symbol.iterator]:cB};function St(t){return typeof t=="string"?new $r([[document.querySelector(t)]],[document.documentElement]):new $r([[t]],ed)}function hB(t){return St(Lu(t).call(document.documentElement))}var fB=0;function i5(){return new rd}function rd(){this._="@"+(++fB).toString(36)}rd.prototype=i5.prototype={constructor:rd,get:function(t){for(var e=this._;!(e in t);)if(!(t=t.parentNode))return;return t[e]},set:function(t,e){return t[this._]=e},remove:function(t){return this._ in t&&delete t[this._]},toString:function(){return this._}};function a5(t){let e;for(;e=t.sourceEvent;)t=e;return t}function Tn(t,e){if(t=a5(t),e===void 0&&(e=t.currentTarget),e){var r=e.ownerSVGElement||e;if(r.createSVGPoint){var n=r.createSVGPoint();return n.x=t.clientX,n.y=t.clientY,n=n.matrixTransform(e.getScreenCTM().inverse()),[n.x,n.y]}if(e.getBoundingClientRect){var i=e.getBoundingClientRect();return[t.clientX-i.left-e.clientLeft,t.clientY-i.top-e.clientTop]}}return[t.pageX,t.pageY]}function dB(t,e){return t.target&&(t=a5(t),e===void 0&&(e=t.currentTarget),t=t.touches||[t]),Array.from(t,r=>Tn(r,e))}function Nu(t){return typeof t=="string"?new $r([document.querySelectorAll(t)],[document.documentElement]):new $r([K_(t)],ed)}const pB={passive:!1},Ml={capture:!0,passive:!1};function nd(t){t.stopImmediatePropagation()}function co(t){t.preventDefault(),t.stopImmediatePropagation()}function Bu(t){var e=t.document.documentElement,r=St(t).on("dragstart.drag",co,Ml);"onselectstart"in e?r.on("selectstart.drag",co,Ml):(e.__noselect=e.style.MozUserSelect,e.style.MozUserSelect="none")}function Du(t,e){var r=t.document.documentElement,n=St(t).on("dragstart.drag",null);e&&(n.on("click.drag",co,Ml),setTimeout(function(){n.on("click.drag",null)},0)),"onselectstart"in r?n.on("selectstart.drag",null):(r.style.MozUserSelect=r.__noselect,delete r.__noselect)}const Ou=t=>()=>t;function id(t,{sourceEvent:e,subject:r,target:n,identifier:i,active:a,x:s,y:o,dx:l,dy:u,dispatch:h}){Object.defineProperties(this,{type:{value:t,enumerable:!0,configurable:!0},sourceEvent:{value:e,enumerable:!0,configurable:!0},subject:{value:r,enumerable:!0,configurable:!0},target:{value:n,enumerable:!0,configurable:!0},identifier:{value:i,enumerable:!0,configurable:!0},active:{value:a,enumerable:!0,configurable:!0},x:{value:s,enumerable:!0,configurable:!0},y:{value:o,enumerable:!0,configurable:!0},dx:{value:l,enumerable:!0,configurable:!0},dy:{value:u,enumerable:!0,configurable:!0},_:{value:h}})}id.prototype.on=function(){var t=this._.on.apply(this._,arguments);return t===this._?this:t};function gB(t){return!t.ctrlKey&&!t.button}function yB(){return this.parentNode}function mB(t,e){return e==null?{x:t.x,y:t.y}:e}function bB(){return navigator.maxTouchPoints||"ontouchstart"in this}function _B(){var t=gB,e=yB,r=mB,n=bB,i={},a=fs("start","drag","end"),s=0,o,l,u,h,d=0;function f(T){T.on("mousedown.drag",p).filter(n).on("touchstart.drag",y).on("touchmove.drag",b,pB).on("touchend.drag touchcancel.drag",x).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}function p(T,C){if(!(h||!t.call(this,T,C))){var M=k(this,e.call(this,T,C),T,C,"mouse");!M||(St(T.view).on("mousemove.drag",m,Ml).on("mouseup.drag",_,Ml),Bu(T.view),nd(T),u=!1,o=T.clientX,l=T.clientY,M("start",T))}}function m(T){if(co(T),!u){var C=T.clientX-o,M=T.clientY-l;u=C*C+M*M>d}i.mouse("drag",T)}function _(T){St(T.view).on("mousemove.drag mouseup.drag",null),Du(T.view,u),co(T),i.mouse("end",T)}function y(T,C){if(!!t.call(this,T,C)){var M=T.changedTouches,S=e.call(this,T,C),R=M.length,A,L;for(A=0;A<R;++A)(L=k(this,S,T,C,M[A].identifier,M[A]))&&(nd(T),L("start",T,M[A]))}}function b(T){var C=T.changedTouches,M=C.length,S,R;for(S=0;S<M;++S)(R=i[C[S].identifier])&&(co(T),R("drag",T,C[S]))}function x(T){var C=T.changedTouches,M=C.length,S,R;for(h&&clearTimeout(h),h=setTimeout(function(){h=null},500),S=0;S<M;++S)(R=i[C[S].identifier])&&(nd(T),R("end",T,C[S]))}function k(T,C,M,S,R,A){var L=a.copy(),v=Tn(A||M,C),B,w,D;if((D=r.call(T,new id("beforestart",{sourceEvent:M,target:f,identifier:R,active:s,x:v[0],y:v[1],dx:0,dy:0,dispatch:L}),S))!=null)return B=D.x-v[0]||0,w=D.y-v[1]||0,function N(z,X,ct){var J=v,Y;switch(z){case"start":i[R]=N,Y=s++;break;case"end":delete i[R],--s;case"drag":v=Tn(ct||X,C),Y=s;break}L.call(z,T,new id(z,{sourceEvent:X,subject:D,target:f,identifier:R,active:Y,x:v[0]+B,y:v[1]+w,dx:v[0]-J[0],dy:v[1]-J[1],dispatch:L}),S)}}return f.filter=function(T){return arguments.length?(t=typeof T=="function"?T:Ou(!!T),f):t},f.container=function(T){return arguments.length?(e=typeof T=="function"?T:Ou(T),f):e},f.subject=function(T){return arguments.length?(r=typeof T=="function"?T:Ou(T),f):r},f.touchable=function(T){return arguments.length?(n=typeof T=="function"?T:Ou(!!T),f):n},f.on=function(){var T=a.on.apply(a,arguments);return T===a?f:T},f.clickDistance=function(T){return arguments.length?(d=(T=+T)*T,f):Math.sqrt(d)},f}function uo(t,e,r){t.prototype=e.prototype=r,r.constructor=t}function Ll(t,e){var r=Object.create(t.prototype);for(var n in e)r[n]=e[n];return r}function Sa(){}var gs=.7,ho=1/gs,fo="\\s*([+-]?\\d+)\\s*",Rl="\\s*([+-]?(?:\\d*\\.)?\\d+(?:[eE][+-]?\\d+)?)\\s*",wi="\\s*([+-]?(?:\\d*\\.)?\\d+(?:[eE][+-]?\\d+)?)%\\s*",vB=/^#([0-9a-f]{3,8})$/,xB=new RegExp(`^rgb\\(${fo},${fo},${fo}\\)$`),kB=new RegExp(`^rgb\\(${wi},${wi},${wi}\\)$`),wB=new RegExp(`^rgba\\(${fo},${fo},${fo},${Rl}\\)$`),TB=new RegExp(`^rgba\\(${wi},${wi},${wi},${Rl}\\)$`),EB=new RegExp(`^hsl\\(${Rl},${wi},${wi}\\)$`),CB=new RegExp(`^hsla\\(${Rl},${wi},${wi},${Rl}\\)$`),s5={aliceblue:15792383,antiquewhite:16444375,aqua:65535,aquamarine:8388564,azure:15794175,beige:16119260,bisque:16770244,black:0,blanchedalmond:16772045,blue:255,blueviolet:9055202,brown:10824234,burlywood:14596231,cadetblue:6266528,chartreuse:8388352,chocolate:13789470,coral:16744272,cornflowerblue:6591981,cornsilk:16775388,crimson:14423100,cyan:65535,darkblue:139,darkcyan:35723,darkgoldenrod:12092939,darkgray:11119017,darkgreen:25600,darkgrey:11119017,darkkhaki:12433259,darkmagenta:9109643,darkolivegreen:5597999,darkorange:16747520,darkorchid:10040012,darkred:9109504,darksalmon:15308410,darkseagreen:9419919,darkslateblue:4734347,darkslategray:3100495,darkslategrey:3100495,darkturquoise:52945,darkviolet:9699539,deeppink:16716947,deepskyblue:49151,dimgray:6908265,dimgrey:6908265,dodgerblue:2003199,firebrick:11674146,floralwhite:16775920,forestgreen:2263842,fuchsia:16711935,gainsboro:14474460,ghostwhite:16316671,gold:16766720,goldenrod:14329120,gray:8421504,green:32768,greenyellow:11403055,grey:8421504,honeydew:15794160,hotpink:16738740,indianred:13458524,indigo:4915330,ivory:16777200,khaki:15787660,lavender:15132410,lavenderblush:16773365,lawngreen:8190976,lemonchiffon:16775885,lightblue:11393254,lightcoral:15761536,lightcyan:14745599,lightgoldenrodyellow:16448210,lightgray:13882323,lightgreen:9498256,lightgrey:13882323,lightpink:16758465,lightsalmon:16752762,lightseagreen:2142890,lightskyblue:8900346,lightslategray:7833753,lightslategrey:7833753,lightsteelblue:11584734,lightyellow:16777184,lime:65280,limegreen:3329330,linen:16445670,magenta:16711935,maroon:8388608,mediumaquamarine:6737322,mediumblue:205,mediumorchid:12211667,mediumpurple:9662683,mediumseagreen:3978097,mediumslateblue:8087790,mediumspringgreen:64154,mediumturquoise:4772300,mediumvioletred:13047173,midnightblue:1644912,mintcream:16121850,mistyrose:16770273,moccasin:16770229,navajowhite:16768685,navy:128,oldlace:16643558,olive:8421376,olivedrab:7048739,orange:16753920,orangered:16729344,orchid:14315734,palegoldenrod:15657130,palegreen:10025880,paleturquoise:11529966,palevioletred:14381203,papayawhip:16773077,peachpuff:16767673,peru:13468991,pink:16761035,plum:14524637,powderblue:11591910,purple:8388736,rebeccapurple:6697881,red:16711680,rosybrown:12357519,royalblue:4286945,saddlebrown:9127187,salmon:16416882,sandybrown:16032864,seagreen:3050327,seashell:16774638,sienna:10506797,silver:12632256,skyblue:8900331,slateblue:6970061,slategray:7372944,slategrey:7372944,snow:16775930,springgreen:65407,steelblue:4620980,tan:13808780,teal:32896,thistle:14204888,tomato:16737095,turquoise:4251856,violet:15631086,wheat:16113331,white:16777215,whitesmoke:16119285,yellow:16776960,yellowgreen:10145074};uo(Sa,Aa,{copy(t){return Object.assign(new this.constructor,this,t)},displayable(){return this.rgb().displayable()},hex:o5,formatHex:o5,formatHex8:SB,formatHsl:AB,formatRgb:l5,toString:l5});function o5(){return this.rgb().formatHex()}function SB(){return this.rgb().formatHex8()}function AB(){return d5(this).formatHsl()}function l5(){return this.rgb().formatRgb()}function Aa(t){var e,r;return t=(t+"").trim().toLowerCase(),(e=vB.exec(t))?(r=e[1].length,e=parseInt(e[1],16),r===6?c5(e):r===3?new Er(e>>8&15|e>>4&240,e>>4&15|e&240,(e&15)<<4|e&15,1):r===8?Fu(e>>24&255,e>>16&255,e>>8&255,(e&255)/255):r===4?Fu(e>>12&15|e>>8&240,e>>8&15|e>>4&240,e>>4&15|e&240,((e&15)<<4|e&15)/255):null):(e=xB.exec(t))?new Er(e[1],e[2],e[3],1):(e=kB.exec(t))?new Er(e[1]*255/100,e[2]*255/100,e[3]*255/100,1):(e=wB.exec(t))?Fu(e[1],e[2],e[3],e[4]):(e=TB.exec(t))?Fu(e[1]*255/100,e[2]*255/100,e[3]*255/100,e[4]):(e=EB.exec(t))?f5(e[1],e[2]/100,e[3]/100,1):(e=CB.exec(t))?f5(e[1],e[2]/100,e[3]/100,e[4]):s5.hasOwnProperty(t)?c5(s5[t]):t==="transparent"?new Er(NaN,NaN,NaN,0):null}function c5(t){return new Er(t>>16&255,t>>8&255,t&255,1)}function Fu(t,e,r,n){return n<=0&&(t=e=r=NaN),new Er(t,e,r,n)}function ad(t){return t instanceof Sa||(t=Aa(t)),t?(t=t.rgb(),new Er(t.r,t.g,t.b,t.opacity)):new Er}function po(t,e,r,n){return arguments.length===1?ad(t):new Er(t,e,r,n==null?1:n)}function Er(t,e,r,n){this.r=+t,this.g=+e,this.b=+r,this.opacity=+n}uo(Er,po,Ll(Sa,{brighter(t){return t=t==null?ho:Math.pow(ho,t),new Er(this.r*t,this.g*t,this.b*t,this.opacity)},darker(t){return t=t==null?gs:Math.pow(gs,t),new Er(this.r*t,this.g*t,this.b*t,this.opacity)},rgb(){return this},clamp(){return new Er(ys(this.r),ys(this.g),ys(this.b),Pu(this.opacity))},displayable(){return-.5<=this.r&&this.r<255.5&&-.5<=this.g&&this.g<255.5&&-.5<=this.b&&this.b<255.5&&0<=this.opacity&&this.opacity<=1},hex:u5,formatHex:u5,formatHex8:MB,formatRgb:h5,toString:h5}));function u5(){return`#${ms(this.r)}${ms(this.g)}${ms(this.b)}`}function MB(){return`#${ms(this.r)}${ms(this.g)}${ms(this.b)}${ms((isNaN(this.opacity)?1:this.opacity)*255)}`}function h5(){const t=Pu(this.opacity);return`${t===1?"rgb(":"rgba("}${ys(this.r)}, ${ys(this.g)}, ${ys(this.b)}${t===1?")":`, ${t})`}`}function Pu(t){return isNaN(t)?1:Math.max(0,Math.min(1,t))}function ys(t){return Math.max(0,Math.min(255,Math.round(t)||0))}function ms(t){return t=ys(t),(t<16?"0":"")+t.toString(16)}function f5(t,e,r,n){return n<=0?t=e=r=NaN:r<=0||r>=1?t=e=NaN:e<=0&&(t=NaN),new Kn(t,e,r,n)}function d5(t){if(t instanceof Kn)return new Kn(t.h,t.s,t.l,t.opacity);if(t instanceof Sa||(t=Aa(t)),!t)return new Kn;if(t instanceof Kn)return t;t=t.rgb();var e=t.r/255,r=t.g/255,n=t.b/255,i=Math.min(e,r,n),a=Math.max(e,r,n),s=NaN,o=a-i,l=(a+i)/2;return o?(e===a?s=(r-n)/o+(r<n)*6:r===a?s=(n-e)/o+2:s=(e-r)/o+4,o/=l<.5?a+i:2-a-i,s*=60):o=l>0&&l<1?0:s,new Kn(s,o,l,t.opacity)}function qu(t,e,r,n){return arguments.length===1?d5(t):new Kn(t,e,r,n==null?1:n)}function Kn(t,e,r,n){this.h=+t,this.s=+e,this.l=+r,this.opacity=+n}uo(Kn,qu,Ll(Sa,{brighter(t){return t=t==null?ho:Math.pow(ho,t),new Kn(this.h,this.s,this.l*t,this.opacity)},darker(t){return t=t==null?gs:Math.pow(gs,t),new Kn(this.h,this.s,this.l*t,this.opacity)},rgb(){var t=this.h%360+(this.h<0)*360,e=isNaN(t)||isNaN(this.s)?0:this.s,r=this.l,n=r+(r<.5?r:1-r)*e,i=2*r-n;return new Er(sd(t>=240?t-240:t+120,i,n),sd(t,i,n),sd(t<120?t+240:t-120,i,n),this.opacity)},clamp(){return new Kn(p5(this.h),Vu(this.s),Vu(this.l),Pu(this.opacity))},displayable(){return(0<=this.s&&this.s<=1||isNaN(this.s))&&0<=this.l&&this.l<=1&&0<=this.opacity&&this.opacity<=1},formatHsl(){const t=Pu(this.opacity);return`${t===1?"hsl(":"hsla("}${p5(this.h)}, ${Vu(this.s)*100}%, ${Vu(this.l)*100}%${t===1?")":`, ${t})`}`}}));function p5(t){return t=(t||0)%360,t<0?t+360:t}function Vu(t){return Math.max(0,Math.min(1,t||0))}function sd(t,e,r){return(t<60?e+(r-e)*t/60:t<180?r:t<240?e+(r-e)*(240-t)/60:e)*255}const g5=Math.PI/180,y5=180/Math.PI,zu=18,m5=.96422,b5=1,_5=.82521,v5=4/29,go=6/29,x5=3*go*go,LB=go*go*go;function k5(t){if(t instanceof Zn)return new Zn(t.l,t.a,t.b,t.opacity);if(t instanceof Ti)return T5(t);t instanceof Er||(t=ad(t));var e=ud(t.r),r=ud(t.g),n=ud(t.b),i=od((.2225045*e+.7168786*r+.0606169*n)/b5),a,s;return e===r&&r===n?a=s=i:(a=od((.4360747*e+.3850649*r+.1430804*n)/m5),s=od((.0139322*e+.0971045*r+.7141733*n)/_5)),new Zn(116*i-16,500*(a-i),200*(i-s),t.opacity)}function RB(t,e){return new Zn(t,0,0,e==null?1:e)}function Yu(t,e,r,n){return arguments.length===1?k5(t):new Zn(t,e,r,n==null?1:n)}function Zn(t,e,r,n){this.l=+t,this.a=+e,this.b=+r,this.opacity=+n}uo(Zn,Yu,Ll(Sa,{brighter(t){return new Zn(this.l+zu*(t==null?1:t),this.a,this.b,this.opacity)},darker(t){return new Zn(this.l-zu*(t==null?1:t),this.a,this.b,this.opacity)},rgb(){var t=(this.l+16)/116,e=isNaN(this.a)?t:t+this.a/500,r=isNaN(this.b)?t:t-this.b/200;return e=m5*ld(e),t=b5*ld(t),r=_5*ld(r),new Er(cd(3.1338561*e-1.6168667*t-.4906146*r),cd(-.9787684*e+1.9161415*t+.033454*r),cd(.0719453*e-.2289914*t+1.4052427*r),this.opacity)}}));function od(t){return t>LB?Math.pow(t,1/3):t/x5+v5}function ld(t){return t>go?t*t*t:x5*(t-v5)}function cd(t){return 255*(t<=.0031308?12.92*t:1.055*Math.pow(t,1/2.4)-.055)}function ud(t){return(t/=255)<=.04045?t/12.92:Math.pow((t+.055)/1.055,2.4)}function w5(t){if(t instanceof Ti)return new Ti(t.h,t.c,t.l,t.opacity);if(t instanceof Zn||(t=k5(t)),t.a===0&&t.b===0)return new Ti(NaN,0<t.l&&t.l<100?0:NaN,t.l,t.opacity);var e=Math.atan2(t.b,t.a)*y5;return new Ti(e<0?e+360:e,Math.sqrt(t.a*t.a+t.b*t.b),t.l,t.opacity)}function IB(t,e,r,n){return arguments.length===1?w5(t):new Ti(r,e,t,n==null?1:n)}function Uu(t,e,r,n){return arguments.length===1?w5(t):new Ti(t,e,r,n==null?1:n)}function Ti(t,e,r,n){this.h=+t,this.c=+e,this.l=+r,this.opacity=+n}function T5(t){if(isNaN(t.h))return new Zn(t.l,0,0,t.opacity);var e=t.h*g5;return new Zn(t.l,Math.cos(e)*t.c,Math.sin(e)*t.c,t.opacity)}uo(Ti,Uu,Ll(Sa,{brighter(t){return new Ti(this.h,this.c,this.l+zu*(t==null?1:t),this.opacity)},darker(t){return new Ti(this.h,this.c,this.l-zu*(t==null?1:t),this.opacity)},rgb(){return T5(this).rgb()}}));var E5=-.14861,hd=1.78277,fd=-.29227,Wu=-.90649,Il=1.97294,C5=Il*Wu,S5=Il*hd,A5=hd*fd-Wu*E5;function NB(t){if(t instanceof bs)return new bs(t.h,t.s,t.l,t.opacity);t instanceof Er||(t=ad(t));var e=t.r/255,r=t.g/255,n=t.b/255,i=(A5*n+C5*e-S5*r)/(A5+C5-S5),a=n-i,s=(Il*(r-i)-fd*a)/Wu,o=Math.sqrt(s*s+a*a)/(Il*i*(1-i)),l=o?Math.atan2(s,a)*y5-120:NaN;return new bs(l<0?l+360:l,o,i,t.opacity)}function Qn(t,e,r,n){return arguments.length===1?NB(t):new bs(t,e,r,n==null?1:n)}function bs(t,e,r,n){this.h=+t,this.s=+e,this.l=+r,this.opacity=+n}uo(bs,Qn,Ll(Sa,{brighter(t){return t=t==null?ho:Math.pow(ho,t),new bs(this.h,this.s,this.l*t,this.opacity)},darker(t){return t=t==null?gs:Math.pow(gs,t),new bs(this.h,this.s,this.l*t,this.opacity)},rgb(){var t=isNaN(this.h)?0:(this.h+120)*g5,e=+this.l,r=isNaN(this.s)?0:this.s*e*(1-e),n=Math.cos(t),i=Math.sin(t);return new Er(255*(e+r*(E5*n+hd*i)),255*(e+r*(fd*n+Wu*i)),255*(e+r*(Il*n)),this.opacity)}}));function M5(t,e,r,n,i){var a=t*t,s=a*t;return((1-3*t+3*a-s)*e+(4-6*a+3*s)*r+(1+3*t+3*a-3*s)*n+s*i)/6}function L5(t){var e=t.length-1;return function(r){var n=r<=0?r=0:r>=1?(r=1,e-1):Math.floor(r*e),i=t[n],a=t[n+1],s=n>0?t[n-1]:2*i-a,o=n<e-1?t[n+2]:2*a-i;return M5((r-n/e)*e,s,i,a,o)}}function R5(t){var e=t.length;return function(r){var n=Math.floor(((r%=1)<0?++r:r)*e),i=t[(n+e-1)%e],a=t[n%e],s=t[(n+1)%e],o=t[(n+2)%e];return M5((r-n/e)*e,i,a,s,o)}}const Hu=t=>()=>t;function I5(t,e){return function(r){return t+r*e}}function BB(t,e,r){return t=Math.pow(t,r),e=Math.pow(e,r)-t,r=1/r,function(n){return Math.pow(t+n*e,r)}}function Gu(t,e){var r=e-t;return r?I5(t,r>180||r<-180?r-360*Math.round(r/360):r):Hu(isNaN(t)?e:t)}function DB(t){return(t=+t)==1?Cr:function(e,r){return r-e?BB(e,r,t):Hu(isNaN(e)?r:e)}}function Cr(t,e){var r=e-t;return r?I5(t,r):Hu(isNaN(t)?e:t)}const Nl=function t(e){var r=DB(e);function n(i,a){var s=r((i=po(i)).r,(a=po(a)).r),o=r(i.g,a.g),l=r(i.b,a.b),u=Cr(i.opacity,a.opacity);return function(h){return i.r=s(h),i.g=o(h),i.b=l(h),i.opacity=u(h),i+""}}return n.gamma=t,n}(1);function N5(t){return function(e){var r=e.length,n=new Array(r),i=new Array(r),a=new Array(r),s,o;for(s=0;s<r;++s)o=po(e[s]),n[s]=o.r||0,i[s]=o.g||0,a[s]=o.b||0;return n=t(n),i=t(i),a=t(a),o.opacity=1,function(l){return o.r=n(l),o.g=i(l),o.b=a(l),o+""}}}var B5=N5(L5),OB=N5(R5);function dd(t,e){e||(e=[]);var r=t?Math.min(e.length,t.length):0,n=e.slice(),i;return function(a){for(i=0;i<r;++i)n[i]=t[i]*(1-a)+e[i]*a;return n}}function D5(t){return ArrayBuffer.isView(t)&&!(t instanceof DataView)}function FB(t,e){return(D5(e)?dd:O5)(t,e)}function O5(t,e){var r=e?e.length:0,n=t?Math.min(r,t.length):0,i=new Array(n),a=new Array(r),s;for(s=0;s<n;++s)i[s]=Ma(t[s],e[s]);for(;s<r;++s)a[s]=e[s];return function(o){for(s=0;s<n;++s)a[s]=i[s](o);return a}}function F5(t,e){var r=new Date;return t=+t,e=+e,function(n){return r.setTime(t*(1-n)+e*n),r}}function Bn(t,e){return t=+t,e=+e,function(r){return t*(1-r)+e*r}}function P5(t,e){var r={},n={},i;(t===null||typeof t!="object")&&(t={}),(e===null||typeof e!="object")&&(e={});for(i in e)i in t?r[i]=Ma(t[i],e[i]):n[i]=e[i];return function(a){for(i in r)n[i]=r[i](a);return n}}var pd=/[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g,gd=new RegExp(pd.source,"g");function PB(t){return function(){return t}}function qB(t){return function(e){return t(e)+""}}function yd(t,e){var r=pd.lastIndex=gd.lastIndex=0,n,i,a,s=-1,o=[],l=[];for(t=t+"",e=e+"";(n=pd.exec(t))&&(i=gd.exec(e));)(a=i.index)>r&&(a=e.slice(r,a),o[s]?o[s]+=a:o[++s]=a),(n=n[0])===(i=i[0])?o[s]?o[s]+=i:o[++s]=i:(o[++s]=null,l.push({i:s,x:Bn(n,i)})),r=gd.lastIndex;return r<e.length&&(a=e.slice(r),o[s]?o[s]+=a:o[++s]=a),o.length<2?l[0]?qB(l[0].x):PB(e):(e=l.length,function(u){for(var h=0,d;h<e;++h)o[(d=l[h]).i]=d.x(u);return o.join("")})}function Ma(t,e){var r=typeof e,n;return e==null||r==="boolean"?Hu(e):(r==="number"?Bn:r==="string"?(n=Aa(e))?(e=n,Nl):yd:e instanceof Aa?Nl:e instanceof Date?F5:D5(e)?dd:Array.isArray(e)?O5:typeof e.valueOf!="function"&&typeof e.toString!="function"||isNaN(e)?P5:Bn)(t,e)}function VB(t){var e=t.length;return function(r){return t[Math.max(0,Math.min(e-1,Math.floor(r*e)))]}}function zB(t,e){var r=Gu(+t,+e);return function(n){var i=r(n);return i-360*Math.floor(i/360)}}function ju(t,e){return t=+t,e=+e,function(r){return Math.round(t*(1-r)+e*r)}}var q5=180/Math.PI,md={translateX:0,translateY:0,rotate:0,skewX:0,scaleX:1,scaleY:1};function V5(t,e,r,n,i,a){var s,o,l;return(s=Math.sqrt(t*t+e*e))&&(t/=s,e/=s),(l=t*r+e*n)&&(r-=t*l,n-=e*l),(o=Math.sqrt(r*r+n*n))&&(r/=o,n/=o,l/=o),t*n<e*r&&(t=-t,e=-e,l=-l,s=-s),{translateX:i,translateY:a,rotate:Math.atan2(e,t)*q5,skewX:Math.atan(l)*q5,scaleX:s,scaleY:o}}var $u;function YB(t){const e=new(typeof DOMMatrix=="function"?DOMMatrix:WebKitCSSMatrix)(t+"");return e.isIdentity?md:V5(e.a,e.b,e.c,e.d,e.e,e.f)}function UB(t){return t==null||($u||($u=document.createElementNS("http://www.w3.org/2000/svg","g")),$u.setAttribute("transform",t),!(t=$u.transform.baseVal.consolidate()))?md:(t=t.matrix,V5(t.a,t.b,t.c,t.d,t.e,t.f))}function z5(t,e,r,n){function i(u){return u.length?u.pop()+" ":""}function a(u,h,d,f,p,m){if(u!==d||h!==f){var _=p.push("translate(",null,e,null,r);m.push({i:_-4,x:Bn(u,d)},{i:_-2,x:Bn(h,f)})}else(d||f)&&p.push("translate("+d+e+f+r)}function s(u,h,d,f){u!==h?(u-h>180?h+=360:h-u>180&&(u+=360),f.push({i:d.push(i(d)+"rotate(",null,n)-2,x:Bn(u,h)})):h&&d.push(i(d)+"rotate("+h+n)}function o(u,h,d,f){u!==h?f.push({i:d.push(i(d)+"skewX(",null,n)-2,x:Bn(u,h)}):h&&d.push(i(d)+"skewX("+h+n)}function l(u,h,d,f,p,m){if(u!==d||h!==f){var _=p.push(i(p)+"scale(",null,",",null,")");m.push({i:_-4,x:Bn(u,d)},{i:_-2,x:Bn(h,f)})}else(d!==1||f!==1)&&p.push(i(p)+"scale("+d+","+f+")")}return function(u,h){var d=[],f=[];return u=t(u),h=t(h),a(u.translateX,u.translateY,h.translateX,h.translateY,d,f),s(u.rotate,h.rotate,d,f),o(u.skewX,h.skewX,d,f),l(u.scaleX,u.scaleY,h.scaleX,h.scaleY,d,f),u=h=null,function(p){for(var m=-1,_=f.length,y;++m<_;)d[(y=f[m]).i]=y.x(p);return d.join("")}}}var Y5=z5(YB,"px, ","px)","deg)"),U5=z5(UB,", ",")",")"),WB=1e-12;function W5(t){return((t=Math.exp(t))+1/t)/2}function HB(t){return((t=Math.exp(t))-1/t)/2}function GB(t){return((t=Math.exp(2*t))-1)/(t+1)}const H5=function t(e,r,n){function i(a,s){var o=a[0],l=a[1],u=a[2],h=s[0],d=s[1],f=s[2],p=h-o,m=d-l,_=p*p+m*m,y,b;if(_<WB)b=Math.log(f/u)/e,y=function(S){return[o+S*p,l+S*m,u*Math.exp(e*S*b)]};else{var x=Math.sqrt(_),k=(f*f-u*u+n*_)/(2*u*r*x),T=(f*f-u*u-n*_)/(2*f*r*x),C=Math.log(Math.sqrt(k*k+1)-k),M=Math.log(Math.sqrt(T*T+1)-T);b=(M-C)/e,y=function(S){var R=S*b,A=W5(C),L=u/(r*x)*(A*GB(e*R+C)-HB(C));return[o+L*p,l+L*m,u*A/W5(e*R+C)]}}return y.duration=b*1e3*e/Math.SQRT2,y}return i.rho=function(a){var s=Math.max(.001,+a),o=s*s,l=o*o;return t(s,o,l)},i}(Math.SQRT2,2,4);function G5(t){return function(e,r){var n=t((e=qu(e)).h,(r=qu(r)).h),i=Cr(e.s,r.s),a=Cr(e.l,r.l),s=Cr(e.opacity,r.opacity);return function(o){return e.h=n(o),e.s=i(o),e.l=a(o),e.opacity=s(o),e+""}}}const jB=G5(Gu);var $B=G5(Cr);function XB(t,e){var r=Cr((t=Yu(t)).l,(e=Yu(e)).l),n=Cr(t.a,e.a),i=Cr(t.b,e.b),a=Cr(t.opacity,e.opacity);return function(s){return t.l=r(s),t.a=n(s),t.b=i(s),t.opacity=a(s),t+""}}function j5(t){return function(e,r){var n=t((e=Uu(e)).h,(r=Uu(r)).h),i=Cr(e.c,r.c),a=Cr(e.l,r.l),s=Cr(e.opacity,r.opacity);return function(o){return e.h=n(o),e.c=i(o),e.l=a(o),e.opacity=s(o),e+""}}}const $5=j5(Gu);var KB=j5(Cr);function X5(t){return function e(r){r=+r;function n(i,a){var s=t((i=Qn(i)).h,(a=Qn(a)).h),o=Cr(i.s,a.s),l=Cr(i.l,a.l),u=Cr(i.opacity,a.opacity);return function(h){return i.h=s(h),i.s=o(h),i.l=l(Math.pow(h,r)),i.opacity=u(h),i+""}}return n.gamma=e,n}(1)}const ZB=X5(Gu);var Xu=X5(Cr);function K5(t,e){e===void 0&&(e=t,t=Ma);for(var r=0,n=e.length-1,i=e[0],a=new Array(n<0?0:n);r<n;)a[r]=t(i,i=e[++r]);return function(s){var o=Math.max(0,Math.min(n-1,Math.floor(s*=n)));return a[o](s-o)}}function QB(t,e){for(var r=new Array(e),n=0;n<e;++n)r[n]=t(n/(e-1));return r}var yo=0,Bl=0,Dl=0,Z5=1e3,Ku,Ol,Zu=0,_s=0,Qu=0,Fl=typeof performance=="object"&&performance.now?performance:Date,Q5=typeof window=="object"&&window.requestAnimationFrame?window.requestAnimationFrame.bind(window):function(t){setTimeout(t,17)};function Pl(){return _s||(Q5(JB),_s=Fl.now()+Qu)}function JB(){_s=0}function ql(){this._call=this._time=this._next=null}ql.prototype=Ju.prototype={constructor:ql,restart:function(t,e,r){if(typeof t!="function")throw new TypeError("callback is not a function");r=(r==null?Pl():+r)+(e==null?0:+e),!this._next&&Ol!==this&&(Ol?Ol._next=this:Ku=this,Ol=this),this._call=t,this._time=r,bd()},stop:function(){this._call&&(this._call=null,this._time=1/0,bd())}};function Ju(t,e,r){var n=new ql;return n.restart(t,e,r),n}function J5(){Pl(),++yo;for(var t=Ku,e;t;)(e=_s-t._time)>=0&&t._call.call(void 0,e),t=t._next;--yo}function tv(){_s=(Zu=Fl.now())+Qu,yo=Bl=0;try{J5()}finally{yo=0,eD(),_s=0}}function tD(){var t=Fl.now(),e=t-Zu;e>Z5&&(Qu-=e,Zu=t)}function eD(){for(var t,e=Ku,r,n=1/0;e;)e._call?(n>e._time&&(n=e._time),t=e,e=e._next):(r=e._next,e._next=null,e=t?t._next=r:Ku=r);Ol=t,bd(n)}function bd(t){if(!yo){Bl&&(Bl=clearTimeout(Bl));var e=t-_s;e>24?(t<1/0&&(Bl=setTimeout(tv,t-Fl.now()-Qu)),Dl&&(Dl=clearInterval(Dl))):(Dl||(Zu=Fl.now(),Dl=setInterval(tD,Z5)),yo=1,Q5(tv))}}function _d(t,e,r){var n=new ql;return e=e==null?0:+e,n.restart(i=>{n.stop(),t(i+e)},e,r),n}function rD(t,e,r){var n=new ql,i=e;return e==null?(n.restart(t,e,r),n):(n._restart=n.restart,n.restart=function(a,s,o){s=+s,o=o==null?Pl():+o,n._restart(function l(u){u+=i,n._restart(l,i+=s,o),a(u)},s,o)},n.restart(t,e,r),n)}var nD=fs("start","end","cancel","interrupt"),iD=[],ev=0,vd=1,xd=2,th=3,rv=4,kd=5,eh=6;function rh(t,e,r,n,i,a){var s=t.__transition;if(!s)t.__transition={};else if(r in s)return;aD(t,r,{name:e,index:n,group:i,on:nD,tween:iD,time:a.time,delay:a.delay,duration:a.duration,ease:a.ease,timer:null,state:ev})}function wd(t,e){var r=Jn(t,e);if(r.state>ev)throw new Error("too late; already scheduled");return r}function Ei(t,e){var r=Jn(t,e);if(r.state>th)throw new Error("too late; already running");return r}function Jn(t,e){var r=t.__transition;if(!r||!(r=r[e]))throw new Error("transition not found");return r}function aD(t,e,r){var n=t.__transition,i;n[e]=r,r.timer=Ju(a,0,r.time);function a(u){r.state=vd,r.timer.restart(s,r.delay,r.time),r.delay<=u&&s(u-r.delay)}function s(u){var h,d,f,p;if(r.state!==vd)return l();for(h in n)if(p=n[h],p.name===r.name){if(p.state===th)return _d(s);p.state===rv?(p.state=eh,p.timer.stop(),p.on.call("interrupt",t,t.__data__,p.index,p.group),delete n[h]):+h<e&&(p.state=eh,p.timer.stop(),p.on.call("cancel",t,t.__data__,p.index,p.group),delete n[h])}if(_d(function(){r.state===th&&(r.state=rv,r.timer.restart(o,r.delay,r.time),o(u))}),r.state=xd,r.on.call("start",t,t.__data__,r.index,r.group),r.state===xd){for(r.state=th,i=new Array(f=r.tween.length),h=0,d=-1;h<f;++h)(p=r.tween[h].value.call(t,t.__data__,r.index,r.group))&&(i[++d]=p);i.length=d+1}}function o(u){for(var h=u<r.duration?r.ease.call(null,u/r.duration):(r.timer.restart(l),r.state=kd,1),d=-1,f=i.length;++d<f;)i[d].call(t,h);r.state===kd&&(r.on.call("end",t,t.__data__,r.index,r.group),l())}function l(){r.state=eh,r.timer.stop(),delete n[e];for(var u in n)return;delete t.__transition}}function vs(t,e){var r=t.__transition,n,i,a=!0,s;if(!!r){e=e==null?null:e+"";for(s in r){if((n=r[s]).name!==e){a=!1;continue}i=n.state>xd&&n.state<kd,n.state=eh,n.timer.stop(),n.on.call(i?"interrupt":"cancel",t,t.__data__,n.index,n.group),delete r[s]}a&&delete t.__transition}}function sD(t){return this.each(function(){vs(this,t)})}function oD(t,e){var r,n;return function(){var i=Ei(this,t),a=i.tween;if(a!==r){n=r=a;for(var s=0,o=n.length;s<o;++s)if(n[s].name===e){n=n.slice(),n.splice(s,1);break}}i.tween=n}}function lD(t,e,r){var n,i;if(typeof r!="function")throw new Error;return function(){var a=Ei(this,t),s=a.tween;if(s!==n){i=(n=s).slice();for(var o={name:e,value:r},l=0,u=i.length;l<u;++l)if(i[l].name===e){i[l]=o;break}l===u&&i.push(o)}a.tween=i}}function cD(t,e){var r=this._id;if(t+="",arguments.length<2){for(var n=Jn(this.node(),r).tween,i=0,a=n.length,s;i<a;++i)if((s=n[i]).name===t)return s.value;return null}return this.each((e==null?oD:lD)(r,t,e))}function Td(t,e,r){var n=t._id;return t.each(function(){var i=Ei(this,n);(i.value||(i.value={}))[e]=r.apply(this,arguments)}),function(i){return Jn(i,n).value[e]}}function nv(t,e){var r;return(typeof e=="number"?Bn:e instanceof Aa?Nl:(r=Aa(e))?(e=r,Nl):yd)(t,e)}function uD(t){return function(){this.removeAttribute(t)}}function hD(t){return function(){this.removeAttributeNS(t.space,t.local)}}function fD(t,e,r){var n,i=r+"",a;return function(){var s=this.getAttribute(t);return s===i?null:s===n?a:a=e(n=s,r)}}function dD(t,e,r){var n,i=r+"",a;return function(){var s=this.getAttributeNS(t.space,t.local);return s===i?null:s===n?a:a=e(n=s,r)}}function pD(t,e,r){var n,i,a;return function(){var s,o=r(this),l;return o==null?void this.removeAttribute(t):(s=this.getAttribute(t),l=o+"",s===l?null:s===n&&l===i?a:(i=l,a=e(n=s,o)))}}function gD(t,e,r){var n,i,a;return function(){var s,o=r(this),l;return o==null?void this.removeAttributeNS(t.space,t.local):(s=this.getAttributeNS(t.space,t.local),l=o+"",s===l?null:s===n&&l===i?a:(i=l,a=e(n=s,o)))}}function yD(t,e){var r=Al(t),n=r==="transform"?U5:nv;return this.attrTween(t,typeof e=="function"?(r.local?gD:pD)(r,n,Td(this,"attr."+t,e)):e==null?(r.local?hD:uD)(r):(r.local?dD:fD)(r,n,e))}function mD(t,e){return function(r){this.setAttribute(t,e.call(this,r))}}function bD(t,e){return function(r){this.setAttributeNS(t.space,t.local,e.call(this,r))}}function _D(t,e){var r,n;function i(){var a=e.apply(this,arguments);return a!==n&&(r=(n=a)&&bD(t,a)),r}return i._value=e,i}function vD(t,e){var r,n;function i(){var a=e.apply(this,arguments);return a!==n&&(r=(n=a)&&mD(t,a)),r}return i._value=e,i}function xD(t,e){var r="attr."+t;if(arguments.length<2)return(r=this.tween(r))&&r._value;if(e==null)return this.tween(r,null);if(typeof e!="function")throw new Error;var n=Al(t);return this.tween(r,(n.local?_D:vD)(n,e))}function kD(t,e){return function(){wd(this,t).delay=+e.apply(this,arguments)}}function wD(t,e){return e=+e,function(){wd(this,t).delay=e}}function TD(t){var e=this._id;return arguments.length?this.each((typeof t=="function"?kD:wD)(e,t)):Jn(this.node(),e).delay}function ED(t,e){return function(){Ei(this,t).duration=+e.apply(this,arguments)}}function CD(t,e){return e=+e,function(){Ei(this,t).duration=e}}function SD(t){var e=this._id;return arguments.length?this.each((typeof t=="function"?ED:CD)(e,t)):Jn(this.node(),e).duration}function AD(t,e){if(typeof e!="function")throw new Error;return function(){Ei(this,t).ease=e}}function MD(t){var e=this._id;return arguments.length?this.each(AD(e,t)):Jn(this.node(),e).ease}function LD(t,e){return function(){var r=e.apply(this,arguments);if(typeof r!="function")throw new Error;Ei(this,t).ease=r}}function RD(t){if(typeof t!="function")throw new Error;return this.each(LD(this._id,t))}function ID(t){typeof t!="function"&&(t=Q0(t));for(var e=this._groups,r=e.length,n=new Array(r),i=0;i<r;++i)for(var a=e[i],s=a.length,o=n[i]=[],l,u=0;u<s;++u)(l=a[u])&&t.call(l,l.__data__,u,a)&&o.push(l);return new Ci(n,this._parents,this._name,this._id)}function ND(t){if(t._id!==this._id)throw new Error;for(var e=this._groups,r=t._groups,n=e.length,i=r.length,a=Math.min(n,i),s=new Array(n),o=0;o<a;++o)for(var l=e[o],u=r[o],h=l.length,d=s[o]=new Array(h),f,p=0;p<h;++p)(f=l[p]||u[p])&&(d[p]=f);for(;o<n;++o)s[o]=e[o];return new Ci(s,this._parents,this._name,this._id)}function BD(t){return(t+"").trim().split(/^|\s+/).every(function(e){var r=e.indexOf(".");return r>=0&&(e=e.slice(0,r)),!e||e==="start"})}function DD(t,e,r){var n,i,a=BD(e)?wd:Ei;return function(){var s=a(this,t),o=s.on;o!==n&&(i=(n=o).copy()).on(e,r),s.on=i}}function OD(t,e){var r=this._id;return arguments.length<2?Jn(this.node(),r).on.on(t):this.each(DD(r,t,e))}function FD(t){return function(){var e=this.parentNode;for(var r in this.__transition)if(+r!==t)return;e&&e.removeChild(this)}}function PD(){return this.on("end.remove",FD(this._id))}function qD(t){var e=this._name,r=this._id;typeof t!="function"&&(t=Ru(t));for(var n=this._groups,i=n.length,a=new Array(i),s=0;s<i;++s)for(var o=n[s],l=o.length,u=a[s]=new Array(l),h,d,f=0;f<l;++f)(h=o[f])&&(d=t.call(h,h.__data__,f,o))&&("__data__"in h&&(d.__data__=h.__data__),u[f]=d,rh(u[f],e,r,f,u,Jn(h,r)));return new Ci(a,this._parents,e,r)}function VD(t){var e=this._name,r=this._id;typeof t!="function"&&(t=Z0(t));for(var n=this._groups,i=n.length,a=[],s=[],o=0;o<i;++o)for(var l=n[o],u=l.length,h,d=0;d<u;++d)if(h=l[d]){for(var f=t.call(h,h.__data__,d,l),p,m=Jn(h,r),_=0,y=f.length;_<y;++_)(p=f[_])&&rh(p,e,r,_,f,m);a.push(f),s.push(h)}return new Ci(a,s,e,r)}var zD=ps.prototype.constructor;function YD(){return new zD(this._groups,this._parents)}function UD(t,e){var r,n,i;return function(){var a=ds(this,t),s=(this.style.removeProperty(t),ds(this,t));return a===s?null:a===r&&s===n?i:i=e(r=a,n=s)}}function iv(t){return function(){this.style.removeProperty(t)}}function WD(t,e,r){var n,i=r+"",a;return function(){var s=ds(this,t);return s===i?null:s===n?a:a=e(n=s,r)}}function HD(t,e,r){var n,i,a;return function(){var s=ds(this,t),o=r(this),l=o+"";return o==null&&(l=o=(this.style.removeProperty(t),ds(this,t))),s===l?null:s===n&&l===i?a:(i=l,a=e(n=s,o))}}function GD(t,e){var r,n,i,a="style."+e,s="end."+a,o;return function(){var l=Ei(this,t),u=l.on,h=l.value[a]==null?o||(o=iv(e)):void 0;(u!==r||i!==h)&&(n=(r=u).copy()).on(s,i=h),l.on=n}}function jD(t,e,r){var n=(t+="")=="transform"?Y5:nv;return e==null?this.styleTween(t,UD(t,n)).on("end.style."+t,iv(t)):typeof e=="function"?this.styleTween(t,HD(t,n,Td(this,"style."+t,e))).each(GD(this._id,t)):this.styleTween(t,WD(t,n,e),r).on("end.style."+t,null)}function $D(t,e,r){return function(n){this.style.setProperty(t,e.call(this,n),r)}}function XD(t,e,r){var n,i;function a(){var s=e.apply(this,arguments);return s!==i&&(n=(i=s)&&$D(t,s,r)),n}return a._value=e,a}function KD(t,e,r){var n="style."+(t+="");if(arguments.length<2)return(n=this.tween(n))&&n._value;if(e==null)return this.tween(n,null);if(typeof e!="function")throw new Error;return this.tween(n,XD(t,e,r==null?"":r))}function ZD(t){return function(){this.textContent=t}}function QD(t){return function(){var e=t(this);this.textContent=e==null?"":e}}function JD(t){return this.tween("text",typeof t=="function"?QD(Td(this,"text",t)):ZD(t==null?"":t+""))}function tO(t){return function(e){this.textContent=t.call(this,e)}}function eO(t){var e,r;function n(){var i=t.apply(this,arguments);return i!==r&&(e=(r=i)&&tO(i)),e}return n._value=t,n}function rO(t){var e="text";if(arguments.length<1)return(e=this.tween(e))&&e._value;if(t==null)return this.tween(e,null);if(typeof t!="function")throw new Error;return this.tween(e,eO(t))}function nO(){for(var t=this._name,e=this._id,r=sv(),n=this._groups,i=n.length,a=0;a<i;++a)for(var s=n[a],o=s.length,l,u=0;u<o;++u)if(l=s[u]){var h=Jn(l,e);rh(l,t,r,u,s,{time:h.time+h.delay+h.duration,delay:0,duration:h.duration,ease:h.ease})}return new Ci(n,this._parents,t,r)}function iO(){var t,e,r=this,n=r._id,i=r.size();return new Promise(function(a,s){var o={value:s},l={value:function(){--i===0&&a()}};r.each(function(){var u=Ei(this,n),h=u.on;h!==t&&(e=(t=h).copy(),e._.cancel.push(o),e._.interrupt.push(o),e._.end.push(l)),u.on=e}),i===0&&a()})}var aO=0;function Ci(t,e,r,n){this._groups=t,this._parents=e,this._name=r,this._id=n}function av(t){return ps().transition(t)}function sv(){return++aO}var $i=ps.prototype;Ci.prototype=av.prototype={constructor:Ci,select:qD,selectAll:VD,selectChild:$i.selectChild,selectChildren:$i.selectChildren,filter:ID,merge:ND,selection:YD,transition:nO,call:$i.call,nodes:$i.nodes,node:$i.node,size:$i.size,empty:$i.empty,each:$i.each,on:OD,attr:yD,attrTween:xD,style:jD,styleTween:KD,text:JD,textTween:rO,remove:PD,tween:cD,delay:TD,duration:SD,ease:MD,easeVarying:RD,end:iO,[Symbol.iterator]:$i[Symbol.iterator]};const sO=t=>+t;function oO(t){return t*t}function lO(t){return t*(2-t)}function ov(t){return((t*=2)<=1?t*t:--t*(2-t)+1)/2}function cO(t){return t*t*t}function uO(t){return--t*t*t+1}function Ed(t){return((t*=2)<=1?t*t*t:(t-=2)*t*t+2)/2}var Cd=3,hO=function t(e){e=+e;function r(n){return Math.pow(n,e)}return r.exponent=t,r}(Cd),fO=function t(e){e=+e;function r(n){return 1-Math.pow(1-n,e)}return r.exponent=t,r}(Cd),lv=function t(e){e=+e;function r(n){return((n*=2)<=1?Math.pow(n,e):2-Math.pow(2-n,e))/2}return r.exponent=t,r}(Cd),cv=Math.PI,uv=cv/2;function dO(t){return+t==1?1:1-Math.cos(t*uv)}function pO(t){return Math.sin(t*uv)}function hv(t){return(1-Math.cos(cv*t))/2}function La(t){return(Math.pow(2,-10*t)-.0009765625)*1.0009775171065494}function gO(t){return La(1-+t)}function yO(t){return 1-La(t)}function fv(t){return((t*=2)<=1?La(1-t):2-La(t-1))/2}function mO(t){return 1-Math.sqrt(1-t*t)}function bO(t){return Math.sqrt(1- --t*t)}function dv(t){return((t*=2)<=1?1-Math.sqrt(1-t*t):Math.sqrt(1-(t-=2)*t)+1)/2}var Sd=4/11,_O=6/11,vO=8/11,xO=3/4,kO=9/11,wO=10/11,TO=15/16,EO=21/22,CO=63/64,nh=1/Sd/Sd;function SO(t){return 1-Vl(1-t)}function Vl(t){return(t=+t)<Sd?nh*t*t:t<vO?nh*(t-=_O)*t+xO:t<wO?nh*(t-=kO)*t+TO:nh*(t-=EO)*t+CO}function AO(t){return((t*=2)<=1?1-Vl(1-t):Vl(t-1)+1)/2}var Ad=1.70158,MO=function t(e){e=+e;function r(n){return(n=+n)*n*(e*(n-1)+n)}return r.overshoot=t,r}(Ad),LO=function t(e){e=+e;function r(n){return--n*n*((n+1)*e+n)+1}return r.overshoot=t,r}(Ad),pv=function t(e){e=+e;function r(n){return((n*=2)<1?n*n*((e+1)*n-e):(n-=2)*n*((e+1)*n+e)+2)/2}return r.overshoot=t,r}(Ad),mo=2*Math.PI,Md=1,Ld=.3,RO=function t(e,r){var n=Math.asin(1/(e=Math.max(1,e)))*(r/=mo);function i(a){return e*La(- --a)*Math.sin((n-a)/r)}return i.amplitude=function(a){return t(a,r*mo)},i.period=function(a){return t(e,a)},i}(Md,Ld),gv=function t(e,r){var n=Math.asin(1/(e=Math.max(1,e)))*(r/=mo);function i(a){return 1-e*La(a=+a)*Math.sin((a+n)/r)}return i.amplitude=function(a){return t(a,r*mo)},i.period=function(a){return t(e,a)},i}(Md,Ld),IO=function t(e,r){var n=Math.asin(1/(e=Math.max(1,e)))*(r/=mo);function i(a){return((a=a*2-1)<0?e*La(-a)*Math.sin((n-a)/r):2-e*La(a)*Math.sin((n+a)/r))/2}return i.amplitude=function(a){return t(a,r*mo)},i.period=function(a){return t(e,a)},i}(Md,Ld),NO={time:null,delay:0,duration:250,ease:Ed};function BO(t,e){for(var r;!(r=t.__transition)||!(r=r[e]);)if(!(t=t.parentNode))throw new Error(`transition ${e} not found`);return r}function DO(t){var e,r;t instanceof Ci?(e=t._id,t=t._name):(e=sv(),(r=NO).time=Pl(),t=t==null?null:t+"");for(var n=this._groups,i=n.length,a=0;a<i;++a)for(var s=n[a],o=s.length,l,u=0;u<o;++u)(l=s[u])&&rh(l,t,e,u,s,r||BO(l,e));return new Ci(n,this._parents,t,e)}ps.prototype.interrupt=sD,ps.prototype.transition=DO;var OO=[null];function FO(t,e){var r=t.__transition,n,i;if(r){e=e==null?null:e+"";for(i in r)if((n=r[i]).state>vd&&n.name===e)return new Ci([[t]],OO,e,+i)}return null}const Rd=t=>()=>t;function PO(t,{sourceEvent:e,target:r,selection:n,mode:i,dispatch:a}){Object.defineProperties(this,{type:{value:t,enumerable:!0,configurable:!0},sourceEvent:{value:e,enumerable:!0,configurable:!0},target:{value:r,enumerable:!0,configurable:!0},selection:{value:n,enumerable:!0,configurable:!0},mode:{value:i,enumerable:!0,configurable:!0},_:{value:a}})}function qO(t){t.stopImmediatePropagation()}function Id(t){t.preventDefault(),t.stopImmediatePropagation()}var yv={name:"drag"},Nd={name:"space"},bo={name:"handle"},_o={name:"center"};const{abs:mv,max:Or,min:Fr}=Math;function bv(t){return[+t[0],+t[1]]}function Bd(t){return[bv(t[0]),bv(t[1])]}var ih={name:"x",handles:["w","e"].map(zl),input:function(t,e){return t==null?null:[[+t[0],e[0][1]],[+t[1],e[1][1]]]},output:function(t){return t&&[t[0][0],t[1][0]]}},ah={name:"y",handles:["n","s"].map(zl),input:function(t,e){return t==null?null:[[e[0][0],+t[0]],[e[1][0],+t[1]]]},output:function(t){return t&&[t[0][1],t[1][1]]}},VO={name:"xy",handles:["n","w","e","s","nw","ne","sw","se"].map(zl),input:function(t){return t==null?null:Bd(t)},output:function(t){return t}},Xi={overlay:"crosshair",selection:"move",n:"ns-resize",e:"ew-resize",s:"ns-resize",w:"ew-resize",nw:"nwse-resize",ne:"nesw-resize",se:"nwse-resize",sw:"nesw-resize"},_v={e:"w",w:"e",nw:"ne",ne:"nw",se:"sw",sw:"se"},vv={n:"s",s:"n",nw:"sw",ne:"se",se:"ne",sw:"nw"},zO={overlay:1,selection:1,n:null,e:1,s:null,w:-1,nw:-1,ne:1,se:1,sw:-1},YO={overlay:1,selection:1,n:-1,e:null,s:1,w:null,nw:-1,ne:-1,se:1,sw:1};function zl(t){return{type:t}}function UO(t){return!t.ctrlKey&&!t.button}function WO(){var t=this.ownerSVGElement||this;return t.hasAttribute("viewBox")?(t=t.viewBox.baseVal,[[t.x,t.y],[t.x+t.width,t.y+t.height]]):[[0,0],[t.width.baseVal.value,t.height.baseVal.value]]}function HO(){return navigator.maxTouchPoints||"ontouchstart"in this}function Dd(t){for(;!t.__brush;)if(!(t=t.parentNode))return;return t.__brush}function GO(t){return t[0][0]===t[1][0]||t[0][1]===t[1][1]}function jO(t){var e=t.__brush;return e?e.dim.output(e.selection):null}function $O(){return Od(ih)}function XO(){return Od(ah)}function KO(){return Od(VO)}function Od(t){var e=WO,r=UO,n=HO,i=!0,a=fs("start","brush","end"),s=6,o;function l(y){var b=y.property("__brush",_).selectAll(".overlay").data([zl("overlay")]);b.enter().append("rect").attr("class","overlay").attr("pointer-events","all").attr("cursor",Xi.overlay).merge(b).each(function(){var k=Dd(this).extent;St(this).attr("x",k[0][0]).attr("y",k[0][1]).attr("width",k[1][0]-k[0][0]).attr("height",k[1][1]-k[0][1])}),y.selectAll(".selection").data([zl("selection")]).enter().append("rect").attr("class","selection").attr("cursor",Xi.selection).attr("fill","#777").attr("fill-opacity",.3).attr("stroke","#fff").attr("shape-rendering","crispEdges");var x=y.selectAll(".handle").data(t.handles,function(k){return k.type});x.exit().remove(),x.enter().append("rect").attr("class",function(k){return"handle handle--"+k.type}).attr("cursor",function(k){return Xi[k.type]}),y.each(u).attr("fill","none").attr("pointer-events","all").on("mousedown.brush",f).filter(n).on("touchstart.brush",f).on("touchmove.brush",p).on("touchend.brush touchcancel.brush",m).style("touch-action","none").style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}l.move=function(y,b,x){y.tween?y.on("start.brush",function(k){h(this,arguments).beforestart().start(k)}).on("interrupt.brush end.brush",function(k){h(this,arguments).end(k)}).tween("brush",function(){var k=this,T=k.__brush,C=h(k,arguments),M=T.selection,S=t.input(typeof b=="function"?b.apply(this,arguments):b,T.extent),R=Ma(M,S);function A(L){T.selection=L===1&&S===null?null:R(L),u.call(k),C.brush()}return M!==null&&S!==null?A:A(1)}):y.each(function(){var k=this,T=arguments,C=k.__brush,M=t.input(typeof b=="function"?b.apply(k,T):b,C.extent),S=h(k,T).beforestart();vs(k),C.selection=M===null?null:M,u.call(k),S.start(x).brush(x).end(x)})},l.clear=function(y,b){l.move(y,null,b)};function u(){var y=St(this),b=Dd(this).selection;b?(y.selectAll(".selection").style("display",null).attr("x",b[0][0]).attr("y",b[0][1]).attr("width",b[1][0]-b[0][0]).attr("height",b[1][1]-b[0][1]),y.selectAll(".handle").style("display",null).attr("x",function(x){return x.type[x.type.length-1]==="e"?b[1][0]-s/2:b[0][0]-s/2}).attr("y",function(x){return x.type[0]==="s"?b[1][1]-s/2:b[0][1]-s/2}).attr("width",function(x){return x.type==="n"||x.type==="s"?b[1][0]-b[0][0]+s:s}).attr("height",function(x){return x.type==="e"||x.type==="w"?b[1][1]-b[0][1]+s:s})):y.selectAll(".selection,.handle").style("display","none").attr("x",null).attr("y",null).attr("width",null).attr("height",null)}function h(y,b,x){var k=y.__brush.emitter;return k&&(!x||!k.clean)?k:new d(y,b,x)}function d(y,b,x){this.that=y,this.args=b,this.state=y.__brush,this.active=0,this.clean=x}d.prototype={beforestart:function(){return++this.active===1&&(this.state.emitter=this,this.starting=!0),this},start:function(y,b){return this.starting?(this.starting=!1,this.emit("start",y,b)):this.emit("brush",y),this},brush:function(y,b){return this.emit("brush",y,b),this},end:function(y,b){return--this.active===0&&(delete this.state.emitter,this.emit("end",y,b)),this},emit:function(y,b,x){var k=St(this.that).datum();a.call(y,this.that,new PO(y,{sourceEvent:b,target:l,selection:t.output(this.state.selection),mode:x,dispatch:a}),k)}};function f(y){if(o&&!y.touches||!r.apply(this,arguments))return;var b=this,x=y.target.__data__.type,k=(i&&y.metaKey?x="overlay":x)==="selection"?yv:i&&y.altKey?_o:bo,T=t===ah?null:zO[x],C=t===ih?null:YO[x],M=Dd(b),S=M.extent,R=M.selection,A=S[0][0],L,v,B=S[0][1],w,D,N=S[1][0],z,X,ct=S[1][1],J,Y,$=0,lt=0,ut,W=T&&C&&i&&y.shiftKey,tt,K,it=Array.from(y.touches||[y],at=>{const It=at.identifier;return at=Tn(at,b),at.point0=at.slice(),at.identifier=It,at});vs(b);var Z=h(b,arguments,!0).beforestart();if(x==="overlay"){R&&(ut=!0);const at=[it[0],it[1]||it[0]];M.selection=R=[[L=t===ah?A:Fr(at[0][0],at[1][0]),w=t===ih?B:Fr(at[0][1],at[1][1])],[z=t===ah?N:Or(at[0][0],at[1][0]),J=t===ih?ct:Or(at[0][1],at[1][1])]],it.length>1&&F(y)}else L=R[0][0],w=R[0][1],z=R[1][0],J=R[1][1];v=L,D=w,X=z,Y=J;var V=St(b).attr("pointer-events","none"),Q=V.selectAll(".overlay").attr("cursor",Xi[x]);if(y.touches)Z.moved=U,Z.ended=j;else{var q=St(y.view).on("mousemove.brush",U,!0).on("mouseup.brush",j,!0);i&&q.on("keydown.brush",P,!0).on("keyup.brush",et,!0),Bu(y.view)}u.call(b),Z.start(y,k.name);function U(at){for(const It of at.changedTouches||[at])for(const Lt of it)Lt.identifier===It.identifier&&(Lt.cur=Tn(It,b));if(W&&!tt&&!K&&it.length===1){const It=it[0];mv(It.cur[0]-It[0])>mv(It.cur[1]-It[1])?K=!0:tt=!0}for(const It of it)It.cur&&(It[0]=It.cur[0],It[1]=It.cur[1]);ut=!0,Id(at),F(at)}function F(at){const It=it[0],Lt=It.point0;var Rt;switch($=It[0]-Lt[0],lt=It[1]-Lt[1],k){case Nd:case yv:{T&&($=Or(A-L,Fr(N-z,$)),v=L+$,X=z+$),C&&(lt=Or(B-w,Fr(ct-J,lt)),D=w+lt,Y=J+lt);break}case bo:{it[1]?(T&&(v=Or(A,Fr(N,it[0][0])),X=Or(A,Fr(N,it[1][0])),T=1),C&&(D=Or(B,Fr(ct,it[0][1])),Y=Or(B,Fr(ct,it[1][1])),C=1)):(T<0?($=Or(A-L,Fr(N-L,$)),v=L+$,X=z):T>0&&($=Or(A-z,Fr(N-z,$)),v=L,X=z+$),C<0?(lt=Or(B-w,Fr(ct-w,lt)),D=w+lt,Y=J):C>0&&(lt=Or(B-J,Fr(ct-J,lt)),D=w,Y=J+lt));break}case _o:{T&&(v=Or(A,Fr(N,L-$*T)),X=Or(A,Fr(N,z+$*T))),C&&(D=Or(B,Fr(ct,w-lt*C)),Y=Or(B,Fr(ct,J+lt*C)));break}}X<v&&(T*=-1,Rt=L,L=z,z=Rt,Rt=v,v=X,X=Rt,x in _v&&Q.attr("cursor",Xi[x=_v[x]])),Y<D&&(C*=-1,Rt=w,w=J,J=Rt,Rt=D,D=Y,Y=Rt,x in vv&&Q.attr("cursor",Xi[x=vv[x]])),M.selection&&(R=M.selection),tt&&(v=R[0][0],X=R[1][0]),K&&(D=R[0][1],Y=R[1][1]),(R[0][0]!==v||R[0][1]!==D||R[1][0]!==X||R[1][1]!==Y)&&(M.selection=[[v,D],[X,Y]],u.call(b),Z.brush(at,k.name))}function j(at){if(qO(at),at.touches){if(at.touches.length)return;o&&clearTimeout(o),o=setTimeout(function(){o=null},500)}else Du(at.view,ut),q.on("keydown.brush keyup.brush mousemove.brush mouseup.brush",null);V.attr("pointer-events","all"),Q.attr("cursor",Xi.overlay),M.selection&&(R=M.selection),GO(R)&&(M.selection=null,u.call(b)),Z.end(at,k.name)}function P(at){switch(at.keyCode){case 16:{W=T&&C;break}case 18:{k===bo&&(T&&(z=X-$*T,L=v+$*T),C&&(J=Y-lt*C,w=D+lt*C),k=_o,F(at));break}case 32:{(k===bo||k===_o)&&(T<0?z=X-$:T>0&&(L=v-$),C<0?J=Y-lt:C>0&&(w=D-lt),k=Nd,Q.attr("cursor",Xi.selection),F(at));break}default:return}Id(at)}function et(at){switch(at.keyCode){case 16:{W&&(tt=K=W=!1,F(at));break}case 18:{k===_o&&(T<0?z=X:T>0&&(L=v),C<0?J=Y:C>0&&(w=D),k=bo,F(at));break}case 32:{k===Nd&&(at.altKey?(T&&(z=X-$*T,L=v+$*T),C&&(J=Y-lt*C,w=D+lt*C),k=_o):(T<0?z=X:T>0&&(L=v),C<0?J=Y:C>0&&(w=D),k=bo),Q.attr("cursor",Xi[x]),F(at));break}default:return}Id(at)}}function p(y){h(this,arguments).moved(y)}function m(y){h(this,arguments).ended(y)}function _(){var y=this.__brush||{selection:null};return y.extent=Bd(e.apply(this,arguments)),y.dim=t,y}return l.extent=function(y){return arguments.length?(e=typeof y=="function"?y:Rd(Bd(y)),l):e},l.filter=function(y){return arguments.length?(r=typeof y=="function"?y:Rd(!!y),l):r},l.touchable=function(y){return arguments.length?(n=typeof y=="function"?y:Rd(!!y),l):n},l.handleSize=function(y){return arguments.length?(s=+y,l):s},l.keyModifiers=function(y){return arguments.length?(i=!!y,l):i},l.on=function(){var y=a.on.apply(a,arguments);return y===a?l:y},l}var xv=Math.abs,vo=Math.cos,xo=Math.sin,kv=Math.PI,sh=kv/2,wv=kv*2,Tv=Math.max,Fd=1e-12;function Pd(t,e){return Array.from({length:e-t},(r,n)=>t+n)}function ZO(t){return function(e,r){return t(e.source.value+e.target.value,r.source.value+r.target.value)}}function QO(){return qd(!1,!1)}function JO(){return qd(!1,!0)}function tF(){return qd(!0,!1)}function qd(t,e){var r=0,n=null,i=null,a=null;function s(o){var l=o.length,u=new Array(l),h=Pd(0,l),d=new Array(l*l),f=new Array(l),p=0,m;o=Float64Array.from({length:l*l},e?(_,y)=>o[y%l][y/l|0]:(_,y)=>o[y/l|0][y%l]);for(let _=0;_<l;++_){let y=0;for(let b=0;b<l;++b)y+=o[_*l+b]+t*o[b*l+_];p+=u[_]=y}p=Tv(0,wv-r*l)/p,m=p?r:wv/l;{let _=0;n&&h.sort((y,b)=>n(u[y],u[b]));for(const y of h){const b=_;if(t){const x=Pd(~l+1,l).filter(k=>k<0?o[~k*l+y]:o[y*l+k]);i&&x.sort((k,T)=>i(k<0?-o[~k*l+y]:o[y*l+k],T<0?-o[~T*l+y]:o[y*l+T]));for(const k of x)if(k<0){const T=d[~k*l+y]||(d[~k*l+y]={source:null,target:null});T.target={index:y,startAngle:_,endAngle:_+=o[~k*l+y]*p,value:o[~k*l+y]}}else{const T=d[y*l+k]||(d[y*l+k]={source:null,target:null});T.source={index:y,startAngle:_,endAngle:_+=o[y*l+k]*p,value:o[y*l+k]}}f[y]={index:y,startAngle:b,endAngle:_,value:u[y]}}else{const x=Pd(0,l).filter(k=>o[y*l+k]||o[k*l+y]);i&&x.sort((k,T)=>i(o[y*l+k],o[y*l+T]));for(const k of x){let T;if(y<k?(T=d[y*l+k]||(d[y*l+k]={source:null,target:null}),T.source={index:y,startAngle:_,endAngle:_+=o[y*l+k]*p,value:o[y*l+k]}):(T=d[k*l+y]||(d[k*l+y]={source:null,target:null}),T.target={index:y,startAngle:_,endAngle:_+=o[y*l+k]*p,value:o[y*l+k]},y===k&&(T.source=T.target)),T.source&&T.target&&T.source.value<T.target.value){const C=T.source;T.source=T.target,T.target=C}}f[y]={index:y,startAngle:b,endAngle:_,value:u[y]}}_+=m}}return d=Object.values(d),d.groups=f,a?d.sort(a):d}return s.padAngle=function(o){return arguments.length?(r=Tv(0,o),s):r},s.sortGroups=function(o){return arguments.length?(n=o,s):n},s.sortSubgroups=function(o){return arguments.length?(i=o,s):i},s.sortChords=function(o){return arguments.length?(o==null?a=null:(a=ZO(o))._=o,s):a&&a._},s}const Vd=Math.PI,zd=2*Vd,xs=1e-6,eF=zd-xs;function Yd(){this._x0=this._y0=this._x1=this._y1=null,this._=""}function Ra(){return new Yd}Yd.prototype=Ra.prototype={constructor:Yd,moveTo:function(t,e){this._+="M"+(this._x0=this._x1=+t)+","+(this._y0=this._y1=+e)},closePath:function(){this._x1!==null&&(this._x1=this._x0,this._y1=this._y0,this._+="Z")},lineTo:function(t,e){this._+="L"+(this._x1=+t)+","+(this._y1=+e)},quadraticCurveTo:function(t,e,r,n){this._+="Q"+ +t+","+ +e+","+(this._x1=+r)+","+(this._y1=+n)},bezierCurveTo:function(t,e,r,n,i,a){this._+="C"+ +t+","+ +e+","+ +r+","+ +n+","+(this._x1=+i)+","+(this._y1=+a)},arcTo:function(t,e,r,n,i){t=+t,e=+e,r=+r,n=+n,i=+i;var a=this._x1,s=this._y1,o=r-t,l=n-e,u=a-t,h=s-e,d=u*u+h*h;if(i<0)throw new Error("negative radius: "+i);if(this._x1===null)this._+="M"+(this._x1=t)+","+(this._y1=e);else if(d>xs)if(!(Math.abs(h*o-l*u)>xs)||!i)this._+="L"+(this._x1=t)+","+(this._y1=e);else{var f=r-a,p=n-s,m=o*o+l*l,_=f*f+p*p,y=Math.sqrt(m),b=Math.sqrt(d),x=i*Math.tan((Vd-Math.acos((m+d-_)/(2*y*b)))/2),k=x/b,T=x/y;Math.abs(k-1)>xs&&(this._+="L"+(t+k*u)+","+(e+k*h)),this._+="A"+i+","+i+",0,0,"+ +(h*f>u*p)+","+(this._x1=t+T*o)+","+(this._y1=e+T*l)}},arc:function(t,e,r,n,i,a){t=+t,e=+e,r=+r,a=!!a;var s=r*Math.cos(n),o=r*Math.sin(n),l=t+s,u=e+o,h=1^a,d=a?n-i:i-n;if(r<0)throw new Error("negative radius: "+r);this._x1===null?this._+="M"+l+","+u:(Math.abs(this._x1-l)>xs||Math.abs(this._y1-u)>xs)&&(this._+="L"+l+","+u),r&&(d<0&&(d=d%zd+zd),d>eF?this._+="A"+r+","+r+",0,1,"+h+","+(t-s)+","+(e-o)+"A"+r+","+r+",0,1,"+h+","+(this._x1=l)+","+(this._y1=u):d>xs&&(this._+="A"+r+","+r+",0,"+ +(d>=Vd)+","+h+","+(this._x1=t+r*Math.cos(i))+","+(this._y1=e+r*Math.sin(i))))},rect:function(t,e,r,n){this._+="M"+(this._x0=this._x1=+t)+","+(this._y0=this._y1=+e)+"h"+ +r+"v"+ +n+"h"+-r+"Z"},toString:function(){return this._}};var rF=Array.prototype.slice;function ks(t){return function(){return t}}function nF(t){return t.source}function iF(t){return t.target}function Ev(t){return t.radius}function aF(t){return t.startAngle}function sF(t){return t.endAngle}function oF(){return 0}function lF(){return 10}function Cv(t){var e=nF,r=iF,n=Ev,i=Ev,a=aF,s=sF,o=oF,l=null;function u(){var h,d=e.apply(this,arguments),f=r.apply(this,arguments),p=o.apply(this,arguments)/2,m=rF.call(arguments),_=+n.apply(this,(m[0]=d,m)),y=a.apply(this,m)-sh,b=s.apply(this,m)-sh,x=+i.apply(this,(m[0]=f,m)),k=a.apply(this,m)-sh,T=s.apply(this,m)-sh;if(l||(l=h=Ra()),p>Fd&&(xv(b-y)>p*2+Fd?b>y?(y+=p,b-=p):(y-=p,b+=p):y=b=(y+b)/2,xv(T-k)>p*2+Fd?T>k?(k+=p,T-=p):(k-=p,T+=p):k=T=(k+T)/2),l.moveTo(_*vo(y),_*xo(y)),l.arc(0,0,_,y,b),y!==k||b!==T)if(t){var C=+t.apply(this,arguments),M=x-C,S=(k+T)/2;l.quadraticCurveTo(0,0,M*vo(k),M*xo(k)),l.lineTo(x*vo(S),x*xo(S)),l.lineTo(M*vo(T),M*xo(T))}else l.quadraticCurveTo(0,0,x*vo(k),x*xo(k)),l.arc(0,0,x,k,T);if(l.quadraticCurveTo(0,0,_*vo(y),_*xo(y)),l.closePath(),h)return l=null,h+""||null}return t&&(u.headRadius=function(h){return arguments.length?(t=typeof h=="function"?h:ks(+h),u):t}),u.radius=function(h){return arguments.length?(n=i=typeof h=="function"?h:ks(+h),u):n},u.sourceRadius=function(h){return arguments.length?(n=typeof h=="function"?h:ks(+h),u):n},u.targetRadius=function(h){return arguments.length?(i=typeof h=="function"?h:ks(+h),u):i},u.startAngle=function(h){return arguments.length?(a=typeof h=="function"?h:ks(+h),u):a},u.endAngle=function(h){return arguments.length?(s=typeof h=="function"?h:ks(+h),u):s},u.padAngle=function(h){return arguments.length?(o=typeof h=="function"?h:ks(+h),u):o},u.source=function(h){return arguments.length?(e=h,u):e},u.target=function(h){return arguments.length?(r=h,u):r},u.context=function(h){return arguments.length?(l=h==null?null:h,u):l},u}function cF(){return Cv()}function uF(){return Cv(lF)}var hF=Array.prototype,Sv=hF.slice;function fF(t,e){return t-e}function dF(t){for(var e=0,r=t.length,n=t[r-1][1]*t[0][0]-t[r-1][0]*t[0][1];++e<r;)n+=t[e-1][1]*t[e][0]-t[e-1][0]*t[e][1];return n}const Ia=t=>()=>t;function pF(t,e){for(var r=-1,n=e.length,i;++r<n;)if(i=gF(t,e[r]))return i;return 0}function gF(t,e){for(var r=e[0],n=e[1],i=-1,a=0,s=t.length,o=s-1;a<s;o=a++){var l=t[a],u=l[0],h=l[1],d=t[o],f=d[0],p=d[1];if(yF(l,d,e))return 0;h>n!=p>n&&r<(f-u)*(n-h)/(p-h)+u&&(i=-i)}return i}function yF(t,e,r){var n;return mF(t,e,r)&&bF(t[n=+(t[0]===e[0])],r[n],e[n])}function mF(t,e,r){return(e[0]-t[0])*(r[1]-t[1])===(r[0]-t[0])*(e[1]-t[1])}function bF(t,e,r){return t<=e&&e<=r||r<=e&&e<=t}function _F(){}var Ki=[[],[[[1,1.5],[.5,1]]],[[[1.5,1],[1,1.5]]],[[[1.5,1],[.5,1]]],[[[1,.5],[1.5,1]]],[[[1,1.5],[.5,1]],[[1,.5],[1.5,1]]],[[[1,.5],[1,1.5]]],[[[1,.5],[.5,1]]],[[[.5,1],[1,.5]]],[[[1,1.5],[1,.5]]],[[[.5,1],[1,.5]],[[1.5,1],[1,1.5]]],[[[1.5,1],[1,.5]]],[[[.5,1],[1.5,1]]],[[[1,1.5],[1.5,1]]],[[[.5,1],[1,1.5]]],[]];function Ud(){var t=1,e=1,r=W0,n=l;function i(u){var h=r(u);if(Array.isArray(h))h=h.slice().sort(fF);else{const d=xl(u),f=wl(d[0],d[1],h);h=hs(Math.floor(d[0]/f)*f,Math.floor(d[1]/f-1)*f,h)}return h.map(d=>a(u,d))}function a(u,h){var d=[],f=[];return s(u,h,function(p){n(p,u,h),dF(p)>0?d.push([p]):f.push(p)}),f.forEach(function(p){for(var m=0,_=d.length,y;m<_;++m)if(pF((y=d[m])[0],p)!==-1){y.push(p);return}}),{type:"MultiPolygon",value:h,coordinates:d}}function s(u,h,d){var f=new Array,p=new Array,m,_,y,b,x,k;for(m=_=-1,b=u[0]>=h,Ki[b<<1].forEach(T);++m<t-1;)y=b,b=u[m+1]>=h,Ki[y|b<<1].forEach(T);for(Ki[b<<0].forEach(T);++_<e-1;){for(m=-1,b=u[_*t+t]>=h,x=u[_*t]>=h,Ki[b<<1|x<<2].forEach(T);++m<t-1;)y=b,b=u[_*t+t+m+1]>=h,k=x,x=u[_*t+m+1]>=h,Ki[y|b<<1|x<<2|k<<3].forEach(T);Ki[b|x<<3].forEach(T)}for(m=-1,x=u[_*t]>=h,Ki[x<<2].forEach(T);++m<t-1;)k=x,x=u[_*t+m+1]>=h,Ki[x<<2|k<<3].forEach(T);Ki[x<<3].forEach(T);function T(C){var M=[C[0][0]+m,C[0][1]+_],S=[C[1][0]+m,C[1][1]+_],R=o(M),A=o(S),L,v;(L=p[R])?(v=f[A])?(delete p[L.end],delete f[v.start],L===v?(L.ring.push(S),d(L.ring)):f[L.start]=p[v.end]={start:L.start,end:v.end,ring:L.ring.concat(v.ring)}):(delete p[L.end],L.ring.push(S),p[L.end=A]=L):(L=f[A])?(v=p[R])?(delete f[L.start],delete p[v.end],L===v?(L.ring.push(S),d(L.ring)):f[v.start]=p[L.end]={start:v.start,end:L.end,ring:v.ring.concat(L.ring)}):(delete f[L.start],L.ring.unshift(M),f[L.start=R]=L):f[R]=p[A]={start:R,end:A,ring:[M,S]}}}function o(u){return u[0]*2+u[1]*(t+1)*4}function l(u,h,d){u.forEach(function(f){var p=f[0],m=f[1],_=p|0,y=m|0,b,x=h[y*t+_];p>0&&p<t&&_===p&&(b=h[y*t+_-1],f[0]=p+(d-b)/(x-b)-.5),m>0&&m<e&&y===m&&(b=h[(y-1)*t+_],f[1]=m+(d-b)/(x-b)-.5)})}return i.contour=a,i.size=function(u){if(!arguments.length)return[t,e];var h=Math.floor(u[0]),d=Math.floor(u[1]);if(!(h>=0&&d>=0))throw new Error("invalid size");return t=h,e=d,i},i.thresholds=function(u){return arguments.length?(r=typeof u=="function"?u:Array.isArray(u)?Ia(Sv.call(u)):Ia(u),i):r},i.smooth=function(u){return arguments.length?(n=u?l:_F,i):n===l},i}function vF(t){return t[0]}function xF(t){return t[1]}function kF(){return 1}function wF(){var t=vF,e=xF,r=kF,n=960,i=500,a=20,s=2,o=a*3,l=n+o*2>>s,u=i+o*2>>s,h=Ia(20);function d(x){var k=new Float32Array(l*u),T=Math.pow(2,-s),C=-1;for(const w of x){var M=(t(w,++C,x)+o)*T,S=(e(w,C,x)+o)*T,R=+r(w,C,x);if(M>=0&&M<l&&S>=0&&S<u){var A=Math.floor(M),L=Math.floor(S),v=M-A-.5,B=S-L-.5;k[A+L*l]+=(1-v)*(1-B)*R,k[A+1+L*l]+=v*(1-B)*R,k[A+1+(L+1)*l]+=v*B*R,k[A+(L+1)*l]+=(1-v)*B*R}}return k_({data:k,width:l,height:u},a*T),k}function f(x){var k=d(x),T=h(k),C=Math.pow(2,2*s);return Array.isArray(T)||(T=hs(Number.MIN_VALUE,lo(k)/C,T)),Ud().size([l,u]).thresholds(T.map(M=>M*C))(k).map((M,S)=>(M.value=+T[S],p(M)))}f.contours=function(x){var k=d(x),T=Ud().size([l,u]),C=Math.pow(2,2*s),M=S=>{S=+S;var R=p(T.contour(k,S*C));return R.value=S,R};return Object.defineProperty(M,"max",{get:()=>lo(k)/C}),M};function p(x){return x.coordinates.forEach(m),x}function m(x){x.forEach(_)}function _(x){x.forEach(y)}function y(x){x[0]=x[0]*Math.pow(2,s)-o,x[1]=x[1]*Math.pow(2,s)-o}function b(){return o=a*3,l=n+o*2>>s,u=i+o*2>>s,f}return f.x=function(x){return arguments.length?(t=typeof x=="function"?x:Ia(+x),f):t},f.y=function(x){return arguments.length?(e=typeof x=="function"?x:Ia(+x),f):e},f.weight=function(x){return arguments.length?(r=typeof x=="function"?x:Ia(+x),f):r},f.size=function(x){if(!arguments.length)return[n,i];var k=+x[0],T=+x[1];if(!(k>=0&&T>=0))throw new Error("invalid size");return n=k,i=T,b()},f.cellSize=function(x){if(!arguments.length)return 1<<s;if(!((x=+x)>=1))throw new Error("invalid cell size");return s=Math.floor(Math.log(x)/Math.LN2),b()},f.thresholds=function(x){return arguments.length?(h=typeof x=="function"?x:Array.isArray(x)?Ia(Sv.call(x)):Ia(x),f):h},f.bandwidth=function(x){if(!arguments.length)return Math.sqrt(a*(a+1));if(!((x=+x)>=0))throw new Error("invalid bandwidth");return a=(Math.sqrt(4*x*x+1)-1)/2,b()},f}const Zi=11102230246251565e-32,Pr=134217729,TF=(3+8*Zi)*Zi;function Wd(t,e,r,n,i){let a,s,o,l,u=e[0],h=n[0],d=0,f=0;h>u==h>-u?(a=u,u=e[++d]):(a=h,h=n[++f]);let p=0;if(d<t&&f<r)for(h>u==h>-u?(s=u+a,o=a-(s-u),u=e[++d]):(s=h+a,o=a-(s-h),h=n[++f]),a=s,o!==0&&(i[p++]=o);d<t&&f<r;)h>u==h>-u?(s=a+u,l=s-a,o=a-(s-l)+(u-l),u=e[++d]):(s=a+h,l=s-a,o=a-(s-l)+(h-l),h=n[++f]),a=s,o!==0&&(i[p++]=o);for(;d<t;)s=a+u,l=s-a,o=a-(s-l)+(u-l),u=e[++d],a=s,o!==0&&(i[p++]=o);for(;f<r;)s=a+h,l=s-a,o=a-(s-l)+(h-l),h=n[++f],a=s,o!==0&&(i[p++]=o);return(a!==0||p===0)&&(i[p++]=a),p}function EF(t,e){let r=e[0];for(let n=1;n<t;n++)r+=e[n];return r}function Yl(t){return new Float64Array(t)}const CF=(3+16*Zi)*Zi,SF=(2+12*Zi)*Zi,AF=(9+64*Zi)*Zi*Zi,ko=Yl(4),Av=Yl(8),Mv=Yl(12),Lv=Yl(16),Xr=Yl(4);function MF(t,e,r,n,i,a,s){let o,l,u,h,d,f,p,m,_,y,b,x,k,T,C,M,S,R;const A=t-i,L=r-i,v=e-a,B=n-a;T=A*B,f=Pr*A,p=f-(f-A),m=A-p,f=Pr*B,_=f-(f-B),y=B-_,C=m*y-(T-p*_-m*_-p*y),M=v*L,f=Pr*v,p=f-(f-v),m=v-p,f=Pr*L,_=f-(f-L),y=L-_,S=m*y-(M-p*_-m*_-p*y),b=C-S,d=C-b,ko[0]=C-(b+d)+(d-S),x=T+b,d=x-T,k=T-(x-d)+(b-d),b=k-M,d=k-b,ko[1]=k-(b+d)+(d-M),R=x+b,d=R-x,ko[2]=x-(R-d)+(b-d),ko[3]=R;let w=EF(4,ko),D=SF*s;if(w>=D||-w>=D||(d=t-A,o=t-(A+d)+(d-i),d=r-L,u=r-(L+d)+(d-i),d=e-v,l=e-(v+d)+(d-a),d=n-B,h=n-(B+d)+(d-a),o===0&&l===0&&u===0&&h===0)||(D=AF*s+TF*Math.abs(w),w+=A*h+B*o-(v*u+L*l),w>=D||-w>=D))return w;T=o*B,f=Pr*o,p=f-(f-o),m=o-p,f=Pr*B,_=f-(f-B),y=B-_,C=m*y-(T-p*_-m*_-p*y),M=l*L,f=Pr*l,p=f-(f-l),m=l-p,f=Pr*L,_=f-(f-L),y=L-_,S=m*y-(M-p*_-m*_-p*y),b=C-S,d=C-b,Xr[0]=C-(b+d)+(d-S),x=T+b,d=x-T,k=T-(x-d)+(b-d),b=k-M,d=k-b,Xr[1]=k-(b+d)+(d-M),R=x+b,d=R-x,Xr[2]=x-(R-d)+(b-d),Xr[3]=R;const N=Wd(4,ko,4,Xr,Av);T=A*h,f=Pr*A,p=f-(f-A),m=A-p,f=Pr*h,_=f-(f-h),y=h-_,C=m*y-(T-p*_-m*_-p*y),M=v*u,f=Pr*v,p=f-(f-v),m=v-p,f=Pr*u,_=f-(f-u),y=u-_,S=m*y-(M-p*_-m*_-p*y),b=C-S,d=C-b,Xr[0]=C-(b+d)+(d-S),x=T+b,d=x-T,k=T-(x-d)+(b-d),b=k-M,d=k-b,Xr[1]=k-(b+d)+(d-M),R=x+b,d=R-x,Xr[2]=x-(R-d)+(b-d),Xr[3]=R;const z=Wd(N,Av,4,Xr,Mv);T=o*h,f=Pr*o,p=f-(f-o),m=o-p,f=Pr*h,_=f-(f-h),y=h-_,C=m*y-(T-p*_-m*_-p*y),M=l*u,f=Pr*l,p=f-(f-l),m=l-p,f=Pr*u,_=f-(f-u),y=u-_,S=m*y-(M-p*_-m*_-p*y),b=C-S,d=C-b,Xr[0]=C-(b+d)+(d-S),x=T+b,d=x-T,k=T-(x-d)+(b-d),b=k-M,d=k-b,Xr[1]=k-(b+d)+(d-M),R=x+b,d=R-x,Xr[2]=x-(R-d)+(b-d),Xr[3]=R;const X=Wd(z,Mv,4,Xr,Lv);return Lv[X-1]}function oh(t,e,r,n,i,a){const s=(e-a)*(r-i),o=(t-i)*(n-a),l=s-o;if(s===0||o===0||s>0!=o>0)return l;const u=Math.abs(s+o);return Math.abs(l)>=CF*u?l:-MF(t,e,r,n,i,a,u)}const Rv=Math.pow(2,-52),lh=new Uint32Array(512);class ch{static from(e,r=BF,n=DF){const i=e.length,a=new Float64Array(i*2);for(let s=0;s<i;s++){const o=e[s];a[2*s]=r(o),a[2*s+1]=n(o)}return new ch(a)}constructor(e){const r=e.length>>1;if(r>0&&typeof e[0]!="number")throw new Error("Expected coords to contain numbers.");this.coords=e;const n=Math.max(2*r-5,0);this._triangles=new Uint32Array(n*3),this._halfedges=new Int32Array(n*3),this._hashSize=Math.ceil(Math.sqrt(r)),this._hullPrev=new Uint32Array(r),this._hullNext=new Uint32Array(r),this._hullTri=new Uint32Array(r),this._hullHash=new Int32Array(this._hashSize).fill(-1),this._ids=new Uint32Array(r),this._dists=new Float64Array(r),this.update()}update(){const{coords:e,_hullPrev:r,_hullNext:n,_hullTri:i,_hullHash:a}=this,s=e.length>>1;let o=1/0,l=1/0,u=-1/0,h=-1/0;for(let L=0;L<s;L++){const v=e[2*L],B=e[2*L+1];v<o&&(o=v),B<l&&(l=B),v>u&&(u=v),B>h&&(h=B),this._ids[L]=L}const d=(o+u)/2,f=(l+h)/2;let p=1/0,m,_,y;for(let L=0;L<s;L++){const v=Hd(d,f,e[2*L],e[2*L+1]);v<p&&(m=L,p=v)}const b=e[2*m],x=e[2*m+1];p=1/0;for(let L=0;L<s;L++){if(L===m)continue;const v=Hd(b,x,e[2*L],e[2*L+1]);v<p&&v>0&&(_=L,p=v)}let k=e[2*_],T=e[2*_+1],C=1/0;for(let L=0;L<s;L++){if(L===m||L===_)continue;const v=IF(b,x,k,T,e[2*L],e[2*L+1]);v<C&&(y=L,C=v)}let M=e[2*y],S=e[2*y+1];if(C===1/0){for(let B=0;B<s;B++)this._dists[B]=e[2*B]-e[0]||e[2*B+1]-e[1];wo(this._ids,this._dists,0,s-1);const L=new Uint32Array(s);let v=0;for(let B=0,w=-1/0;B<s;B++){const D=this._ids[B];this._dists[D]>w&&(L[v++]=D,w=this._dists[D])}this.hull=L.subarray(0,v),this.triangles=new Uint32Array(0),this.halfedges=new Uint32Array(0);return}if(oh(b,x,k,T,M,S)<0){const L=_,v=k,B=T;_=y,k=M,T=S,y=L,M=v,S=B}const R=NF(b,x,k,T,M,S);this._cx=R.x,this._cy=R.y;for(let L=0;L<s;L++)this._dists[L]=Hd(e[2*L],e[2*L+1],R.x,R.y);wo(this._ids,this._dists,0,s-1),this._hullStart=m;let A=3;n[m]=r[y]=_,n[_]=r[m]=y,n[y]=r[_]=m,i[m]=0,i[_]=1,i[y]=2,a.fill(-1),a[this._hashKey(b,x)]=m,a[this._hashKey(k,T)]=_,a[this._hashKey(M,S)]=y,this.trianglesLen=0,this._addTriangle(m,_,y,-1,-1,-1);for(let L=0,v,B;L<this._ids.length;L++){const w=this._ids[L],D=e[2*w],N=e[2*w+1];if(L>0&&Math.abs(D-v)<=Rv&&Math.abs(N-B)<=Rv||(v=D,B=N,w===m||w===_||w===y))continue;let z=0;for(let $=0,lt=this._hashKey(D,N);$<this._hashSize&&(z=a[(lt+$)%this._hashSize],!(z!==-1&&z!==n[z]));$++);z=r[z];let X=z,ct;for(;ct=n[X],oh(D,N,e[2*X],e[2*X+1],e[2*ct],e[2*ct+1])>=0;)if(X=ct,X===z){X=-1;break}if(X===-1)continue;let J=this._addTriangle(X,w,n[X],-1,-1,i[X]);i[w]=this._legalize(J+2),i[X]=J,A++;let Y=n[X];for(;ct=n[Y],oh(D,N,e[2*Y],e[2*Y+1],e[2*ct],e[2*ct+1])<0;)J=this._addTriangle(Y,w,ct,i[w],-1,i[Y]),i[w]=this._legalize(J+2),n[Y]=Y,A--,Y=ct;if(X===z)for(;ct=r[X],oh(D,N,e[2*ct],e[2*ct+1],e[2*X],e[2*X+1])<0;)J=this._addTriangle(ct,w,X,-1,i[X],i[ct]),this._legalize(J+2),i[ct]=J,n[X]=X,A--,X=ct;this._hullStart=r[w]=X,n[X]=r[Y]=w,n[w]=Y,a[this._hashKey(D,N)]=w,a[this._hashKey(e[2*X],e[2*X+1])]=X}this.hull=new Uint32Array(A);for(let L=0,v=this._hullStart;L<A;L++)this.hull[L]=v,v=n[v];this.triangles=this._triangles.subarray(0,this.trianglesLen),this.halfedges=this._halfedges.subarray(0,this.trianglesLen)}_hashKey(e,r){return Math.floor(LF(e-this._cx,r-this._cy)*this._hashSize)%this._hashSize}_legalize(e){const{_triangles:r,_halfedges:n,coords:i}=this;let a=0,s=0;for(;;){const o=n[e],l=e-e%3;if(s=l+(e+2)%3,o===-1){if(a===0)break;e=lh[--a];continue}const u=o-o%3,h=l+(e+1)%3,d=u+(o+2)%3,f=r[s],p=r[e],m=r[h],_=r[d];if(RF(i[2*f],i[2*f+1],i[2*p],i[2*p+1],i[2*m],i[2*m+1],i[2*_],i[2*_+1])){r[e]=_,r[o]=f;const b=n[d];if(b===-1){let k=this._hullStart;do{if(this._hullTri[k]===d){this._hullTri[k]=e;break}k=this._hullPrev[k]}while(k!==this._hullStart)}this._link(e,b),this._link(o,n[s]),this._link(s,d);const x=u+(o+1)%3;a<lh.length&&(lh[a++]=x)}else{if(a===0)break;e=lh[--a]}}return s}_link(e,r){this._halfedges[e]=r,r!==-1&&(this._halfedges[r]=e)}_addTriangle(e,r,n,i,a,s){const o=this.trianglesLen;return this._triangles[o]=e,this._triangles[o+1]=r,this._triangles[o+2]=n,this._link(o,i),this._link(o+1,a),this._link(o+2,s),this.trianglesLen+=3,o}}function LF(t,e){const r=t/(Math.abs(t)+Math.abs(e));return(e>0?3-r:1+r)/4}function Hd(t,e,r,n){const i=t-r,a=e-n;return i*i+a*a}function RF(t,e,r,n,i,a,s,o){const l=t-s,u=e-o,h=r-s,d=n-o,f=i-s,p=a-o,m=l*l+u*u,_=h*h+d*d,y=f*f+p*p;return l*(d*y-_*p)-u*(h*y-_*f)+m*(h*p-d*f)<0}function IF(t,e,r,n,i,a){const s=r-t,o=n-e,l=i-t,u=a-e,h=s*s+o*o,d=l*l+u*u,f=.5/(s*u-o*l),p=(u*h-o*d)*f,m=(s*d-l*h)*f;return p*p+m*m}function NF(t,e,r,n,i,a){const s=r-t,o=n-e,l=i-t,u=a-e,h=s*s+o*o,d=l*l+u*u,f=.5/(s*u-o*l),p=t+(u*h-o*d)*f,m=e+(s*d-l*h)*f;return{x:p,y:m}}function wo(t,e,r,n){if(n-r<=20)for(let i=r+1;i<=n;i++){const a=t[i],s=e[a];let o=i-1;for(;o>=r&&e[t[o]]>s;)t[o+1]=t[o--];t[o+1]=a}else{const i=r+n>>1;let a=r+1,s=n;Ul(t,i,a),e[t[r]]>e[t[n]]&&Ul(t,r,n),e[t[a]]>e[t[n]]&&Ul(t,a,n),e[t[r]]>e[t[a]]&&Ul(t,r,a);const o=t[a],l=e[o];for(;;){do a++;while(e[t[a]]<l);do s--;while(e[t[s]]>l);if(s<a)break;Ul(t,a,s)}t[r+1]=t[s],t[s]=o,n-a+1>=s-r?(wo(t,e,a,n),wo(t,e,r,s-1)):(wo(t,e,r,s-1),wo(t,e,a,n))}}function Ul(t,e,r){const n=t[e];t[e]=t[r],t[r]=n}function BF(t){return t[0]}function DF(t){return t[1]}const Iv=1e-6;class ws{constructor(){this._x0=this._y0=this._x1=this._y1=null,this._=""}moveTo(e,r){this._+=`M${this._x0=this._x1=+e},${this._y0=this._y1=+r}`}closePath(){this._x1!==null&&(this._x1=this._x0,this._y1=this._y0,this._+="Z")}lineTo(e,r){this._+=`L${this._x1=+e},${this._y1=+r}`}arc(e,r,n){e=+e,r=+r,n=+n;const i=e+n,a=r;if(n<0)throw new Error("negative radius");this._x1===null?this._+=`M${i},${a}`:(Math.abs(this._x1-i)>Iv||Math.abs(this._y1-a)>Iv)&&(this._+="L"+i+","+a),n&&(this._+=`A${n},${n},0,1,1,${e-n},${r}A${n},${n},0,1,1,${this._x1=i},${this._y1=a}`)}rect(e,r,n,i){this._+=`M${this._x0=this._x1=+e},${this._y0=this._y1=+r}h${+n}v${+i}h${-n}Z`}value(){return this._||null}}class Gd{constructor(){this._=[]}moveTo(e,r){this._.push([e,r])}closePath(){this._.push(this._[0].slice())}lineTo(e,r){this._.push([e,r])}value(){return this._.length?this._:null}}class Nv{constructor(e,[r,n,i,a]=[0,0,960,500]){if(!((i=+i)>=(r=+r))||!((a=+a)>=(n=+n)))throw new Error("invalid bounds");this.delaunay=e,this._circumcenters=new Float64Array(e.points.length*2),this.vectors=new Float64Array(e.points.length*2),this.xmax=i,this.xmin=r,this.ymax=a,this.ymin=n,this._init()}update(){return this.delaunay.update(),this._init(),this}_init(){const{delaunay:{points:e,hull:r,triangles:n},vectors:i}=this,a=this.circumcenters=this._circumcenters.subarray(0,n.length/3*2);for(let p=0,m=0,_=n.length,y,b;p<_;p+=3,m+=2){const x=n[p]*2,k=n[p+1]*2,T=n[p+2]*2,C=e[x],M=e[x+1],S=e[k],R=e[k+1],A=e[T],L=e[T+1],v=S-C,B=R-M,w=A-C,D=L-M,N=(v*D-B*w)*2;if(Math.abs(N)<1e-9){let z=1e9;const X=n[0]*2;z*=Math.sign((e[X]-C)*D-(e[X+1]-M)*w),y=(C+A)/2-z*D,b=(M+L)/2+z*w}else{const z=1/N,X=v*v+B*B,ct=w*w+D*D;y=C+(D*X-B*ct)*z,b=M+(v*ct-w*X)*z}a[m]=y,a[m+1]=b}let s=r[r.length-1],o,l=s*4,u,h=e[2*s],d,f=e[2*s+1];i.fill(0);for(let p=0;p<r.length;++p)s=r[p],o=l,u=h,d=f,l=s*4,h=e[2*s],f=e[2*s+1],i[o+2]=i[l]=d-f,i[o+3]=i[l+1]=h-u}render(e){const r=e==null?e=new ws:void 0,{delaunay:{halfedges:n,inedges:i,hull:a},circumcenters:s,vectors:o}=this;if(a.length<=1)return null;for(let h=0,d=n.length;h<d;++h){const f=n[h];if(f<h)continue;const p=Math.floor(h/3)*2,m=Math.floor(f/3)*2,_=s[p],y=s[p+1],b=s[m],x=s[m+1];this._renderSegment(_,y,b,x,e)}let l,u=a[a.length-1];for(let h=0;h<a.length;++h){l=u,u=a[h];const d=Math.floor(i[u]/3)*2,f=s[d],p=s[d+1],m=l*4,_=this._project(f,p,o[m+2],o[m+3]);_&&this._renderSegment(f,p,_[0],_[1],e)}return r&&r.value()}renderBounds(e){const r=e==null?e=new ws:void 0;return e.rect(this.xmin,this.ymin,this.xmax-this.xmin,this.ymax-this.ymin),r&&r.value()}renderCell(e,r){const n=r==null?r=new ws:void 0,i=this._clip(e);if(i===null||!i.length)return;r.moveTo(i[0],i[1]);let a=i.length;for(;i[0]===i[a-2]&&i[1]===i[a-1]&&a>1;)a-=2;for(let s=2;s<a;s+=2)(i[s]!==i[s-2]||i[s+1]!==i[s-1])&&r.lineTo(i[s],i[s+1]);return r.closePath(),n&&n.value()}*cellPolygons(){const{delaunay:{points:e}}=this;for(let r=0,n=e.length/2;r<n;++r){const i=this.cellPolygon(r);i&&(i.index=r,yield i)}}cellPolygon(e){const r=new Gd;return this.renderCell(e,r),r.value()}_renderSegment(e,r,n,i,a){let s;const o=this._regioncode(e,r),l=this._regioncode(n,i);o===0&&l===0?(a.moveTo(e,r),a.lineTo(n,i)):(s=this._clipSegment(e,r,n,i,o,l))&&(a.moveTo(s[0],s[1]),a.lineTo(s[2],s[3]))}contains(e,r,n){return r=+r,r!==r||(n=+n,n!==n)?!1:this.delaunay._step(e,r,n)===e}*neighbors(e){const r=this._clip(e);if(r)for(const n of this.delaunay.neighbors(e)){const i=this._clip(n);if(i){t:for(let a=0,s=r.length;a<s;a+=2)for(let o=0,l=i.length;o<l;o+=2)if(r[a]==i[o]&&r[a+1]==i[o+1]&&r[(a+2)%s]==i[(o+l-2)%l]&&r[(a+3)%s]==i[(o+l-1)%l]){yield n;break t}}}}_cell(e){const{circumcenters:r,delaunay:{inedges:n,halfedges:i,triangles:a}}=this,s=n[e];if(s===-1)return null;const o=[];let l=s;do{const u=Math.floor(l/3);if(o.push(r[u*2],r[u*2+1]),l=l%3===2?l-2:l+1,a[l]!==e)break;l=i[l]}while(l!==s&&l!==-1);return o}_clip(e){if(e===0&&this.delaunay.hull.length===1)return[this.xmax,this.ymin,this.xmax,this.ymax,this.xmin,this.ymax,this.xmin,this.ymin];const r=this._cell(e);if(r===null)return null;const{vectors:n}=this,i=e*4;return n[i]||n[i+1]?this._clipInfinite(e,r,n[i],n[i+1],n[i+2],n[i+3]):this._clipFinite(e,r)}_clipFinite(e,r){const n=r.length;let i=null,a,s,o=r[n-2],l=r[n-1],u,h=this._regioncode(o,l),d,f=0;for(let p=0;p<n;p+=2)if(a=o,s=l,o=r[p],l=r[p+1],u=h,h=this._regioncode(o,l),u===0&&h===0)d=f,f=0,i?i.push(o,l):i=[o,l];else{let m,_,y,b,x;if(u===0){if((m=this._clipSegment(a,s,o,l,u,h))===null)continue;[_,y,b,x]=m}else{if((m=this._clipSegment(o,l,a,s,h,u))===null)continue;[b,x,_,y]=m,d=f,f=this._edgecode(_,y),d&&f&&this._edge(e,d,f,i,i.length),i?i.push(_,y):i=[_,y]}d=f,f=this._edgecode(b,x),d&&f&&this._edge(e,d,f,i,i.length),i?i.push(b,x):i=[b,x]}if(i)d=f,f=this._edgecode(i[0],i[1]),d&&f&&this._edge(e,d,f,i,i.length);else if(this.contains(e,(this.xmin+this.xmax)/2,(this.ymin+this.ymax)/2))return[this.xmax,this.ymin,this.xmax,this.ymax,this.xmin,this.ymax,this.xmin,this.ymin];return i}_clipSegment(e,r,n,i,a,s){for(;;){if(a===0&&s===0)return[e,r,n,i];if(a&s)return null;let o,l,u=a||s;u&8?(o=e+(n-e)*(this.ymax-r)/(i-r),l=this.ymax):u&4?(o=e+(n-e)*(this.ymin-r)/(i-r),l=this.ymin):u&2?(l=r+(i-r)*(this.xmax-e)/(n-e),o=this.xmax):(l=r+(i-r)*(this.xmin-e)/(n-e),o=this.xmin),a?(e=o,r=l,a=this._regioncode(e,r)):(n=o,i=l,s=this._regioncode(n,i))}}_clipInfinite(e,r,n,i,a,s){let o=Array.from(r),l;if((l=this._project(o[0],o[1],n,i))&&o.unshift(l[0],l[1]),(l=this._project(o[o.length-2],o[o.length-1],a,s))&&o.push(l[0],l[1]),o=this._clipFinite(e,o))for(let u=0,h=o.length,d,f=this._edgecode(o[h-2],o[h-1]);u<h;u+=2)d=f,f=this._edgecode(o[u],o[u+1]),d&&f&&(u=this._edge(e,d,f,o,u),h=o.length);else this.contains(e,(this.xmin+this.xmax)/2,(this.ymin+this.ymax)/2)&&(o=[this.xmin,this.ymin,this.xmax,this.ymin,this.xmax,this.ymax,this.xmin,this.ymax]);return o}_edge(e,r,n,i,a){for(;r!==n;){let s,o;switch(r){case 5:r=4;continue;case 4:r=6,s=this.xmax,o=this.ymin;break;case 6:r=2;continue;case 2:r=10,s=this.xmax,o=this.ymax;break;case 10:r=8;continue;case 8:r=9,s=this.xmin,o=this.ymax;break;case 9:r=1;continue;case 1:r=5,s=this.xmin,o=this.ymin;break}(i[a]!==s||i[a+1]!==o)&&this.contains(e,s,o)&&(i.splice(a,0,s,o),a+=2)}if(i.length>4)for(let s=0;s<i.length;s+=2){const o=(s+2)%i.length,l=(s+4)%i.length;(i[s]===i[o]&&i[o]===i[l]||i[s+1]===i[o+1]&&i[o+1]===i[l+1])&&(i.splice(o,2),s-=2)}return a}_project(e,r,n,i){let a=1/0,s,o,l;if(i<0){if(r<=this.ymin)return null;(s=(this.ymin-r)/i)<a&&(l=this.ymin,o=e+(a=s)*n)}else if(i>0){if(r>=this.ymax)return null;(s=(this.ymax-r)/i)<a&&(l=this.ymax,o=e+(a=s)*n)}if(n>0){if(e>=this.xmax)return null;(s=(this.xmax-e)/n)<a&&(o=this.xmax,l=r+(a=s)*i)}else if(n<0){if(e<=this.xmin)return null;(s=(this.xmin-e)/n)<a&&(o=this.xmin,l=r+(a=s)*i)}return[o,l]}_edgecode(e,r){return(e===this.xmin?1:e===this.xmax?2:0)|(r===this.ymin?4:r===this.ymax?8:0)}_regioncode(e,r){return(e<this.xmin?1:e>this.xmax?2:0)|(r<this.ymin?4:r>this.ymax?8:0)}}const OF=2*Math.PI,To=Math.pow;function FF(t){return t[0]}function PF(t){return t[1]}function qF(t){const{triangles:e,coords:r}=t;for(let n=0;n<e.length;n+=3){const i=2*e[n],a=2*e[n+1],s=2*e[n+2];if((r[s]-r[i])*(r[a+1]-r[i+1])-(r[a]-r[i])*(r[s+1]-r[i+1])>1e-10)return!1}return!0}function VF(t,e,r){return[t+Math.sin(t+e)*r,e+Math.cos(t-e)*r]}class jd{static from(e,r=FF,n=PF,i){return new jd("length"in e?zF(e,r,n,i):Float64Array.from(YF(e,r,n,i)))}constructor(e){this._delaunator=new ch(e),this.inedges=new Int32Array(e.length/2),this._hullIndex=new Int32Array(e.length/2),this.points=this._delaunator.coords,this._init()}update(){return this._delaunator.update(),this._init(),this}_init(){const e=this._delaunator,r=this.points;if(e.hull&&e.hull.length>2&&qF(e)){this.collinear=Int32Array.from({length:r.length/2},(f,p)=>p).sort((f,p)=>r[2*f]-r[2*p]||r[2*f+1]-r[2*p+1]);const l=this.collinear[0],u=this.collinear[this.collinear.length-1],h=[r[2*l],r[2*l+1],r[2*u],r[2*u+1]],d=1e-8*Math.hypot(h[3]-h[1],h[2]-h[0]);for(let f=0,p=r.length/2;f<p;++f){const m=VF(r[2*f],r[2*f+1],d);r[2*f]=m[0],r[2*f+1]=m[1]}this._delaunator=new ch(r)}else delete this.collinear;const n=this.halfedges=this._delaunator.halfedges,i=this.hull=this._delaunator.hull,a=this.triangles=this._delaunator.triangles,s=this.inedges.fill(-1),o=this._hullIndex.fill(-1);for(let l=0,u=n.length;l<u;++l){const h=a[l%3===2?l-2:l+1];(n[l]===-1||s[h]===-1)&&(s[h]=l)}for(let l=0,u=i.length;l<u;++l)o[i[l]]=l;i.length<=2&&i.length>0&&(this.triangles=new Int32Array(3).fill(-1),this.halfedges=new Int32Array(3).fill(-1),this.triangles[0]=i[0],s[i[0]]=1,i.length===2&&(s[i[1]]=0,this.triangles[1]=i[1],this.triangles[2]=i[1]))}voronoi(e){return new Nv(this,e)}*neighbors(e){const{inedges:r,hull:n,_hullIndex:i,halfedges:a,triangles:s,collinear:o}=this;if(o){const d=o.indexOf(e);d>0&&(yield o[d-1]),d<o.length-1&&(yield o[d+1]);return}const l=r[e];if(l===-1)return;let u=l,h=-1;do{if(yield h=s[u],u=u%3===2?u-2:u+1,s[u]!==e)return;if(u=a[u],u===-1){const d=n[(i[e]+1)%n.length];d!==h&&(yield d);return}}while(u!==l)}find(e,r,n=0){if(e=+e,e!==e||(r=+r,r!==r))return-1;const i=n;let a;for(;(a=this._step(n,e,r))>=0&&a!==n&&a!==i;)n=a;return a}_step(e,r,n){const{inedges:i,hull:a,_hullIndex:s,halfedges:o,triangles:l,points:u}=this;if(i[e]===-1||!u.length)return(e+1)%(u.length>>1);let h=e,d=To(r-u[e*2],2)+To(n-u[e*2+1],2);const f=i[e];let p=f;do{let m=l[p];const _=To(r-u[m*2],2)+To(n-u[m*2+1],2);if(_<d&&(d=_,h=m),p=p%3===2?p-2:p+1,l[p]!==e)break;if(p=o[p],p===-1){if(p=a[(s[e]+1)%a.length],p!==m&&To(r-u[p*2],2)+To(n-u[p*2+1],2)<d)return p;break}}while(p!==f);return h}render(e){const r=e==null?e=new ws:void 0,{points:n,halfedges:i,triangles:a}=this;for(let s=0,o=i.length;s<o;++s){const l=i[s];if(l<s)continue;const u=a[s]*2,h=a[l]*2;e.moveTo(n[u],n[u+1]),e.lineTo(n[h],n[h+1])}return this.renderHull(e),r&&r.value()}renderPoints(e,r){r===void 0&&(!e||typeof e.moveTo!="function")&&(r=e,e=null),r=r==null?2:+r;const n=e==null?e=new ws:void 0,{points:i}=this;for(let a=0,s=i.length;a<s;a+=2){const o=i[a],l=i[a+1];e.moveTo(o+r,l),e.arc(o,l,r,0,OF)}return n&&n.value()}renderHull(e){const r=e==null?e=new ws:void 0,{hull:n,points:i}=this,a=n[0]*2,s=n.length;e.moveTo(i[a],i[a+1]);for(let o=1;o<s;++o){const l=2*n[o];e.lineTo(i[l],i[l+1])}return e.closePath(),r&&r.value()}hullPolygon(){const e=new Gd;return this.renderHull(e),e.value()}renderTriangle(e,r){const n=r==null?r=new ws:void 0,{points:i,triangles:a}=this,s=a[e*=3]*2,o=a[e+1]*2,l=a[e+2]*2;return r.moveTo(i[s],i[s+1]),r.lineTo(i[o],i[o+1]),r.lineTo(i[l],i[l+1]),r.closePath(),n&&n.value()}*trianglePolygons(){const{triangles:e}=this;for(let r=0,n=e.length/3;r<n;++r)yield this.trianglePolygon(r)}trianglePolygon(e){const r=new Gd;return this.renderTriangle(e,r),r.value()}}function zF(t,e,r,n){const i=t.length,a=new Float64Array(i*2);for(let s=0;s<i;++s){const o=t[s];a[s*2]=e.call(n,o,s,t),a[s*2+1]=r.call(n,o,s,t)}return a}function*YF(t,e,r,n){let i=0;for(const a of t)yield e.call(n,a,i,t),yield r.call(n,a,i,t),++i}var Bv={},$d={},Xd=34,Wl=10,Kd=13;function Dv(t){return new Function("d","return {"+t.map(function(e,r){return JSON.stringify(e)+": d["+r+'] || ""'}).join(",")+"}")}function UF(t,e){var r=Dv(t);return function(n,i){return e(r(n),i,t)}}function Ov(t){var e=Object.create(null),r=[];return t.forEach(function(n){for(var i in n)i in e||r.push(e[i]=i)}),r}function dn(t,e){var r=t+"",n=r.length;return n<e?new Array(e-n+1).join(0)+r:r}function WF(t){return t<0?"-"+dn(-t,6):t>9999?"+"+dn(t,6):dn(t,4)}function HF(t){var e=t.getUTCHours(),r=t.getUTCMinutes(),n=t.getUTCSeconds(),i=t.getUTCMilliseconds();return isNaN(t)?"Invalid Date":WF(t.getUTCFullYear())+"-"+dn(t.getUTCMonth()+1,2)+"-"+dn(t.getUTCDate(),2)+(i?"T"+dn(e,2)+":"+dn(r,2)+":"+dn(n,2)+"."+dn(i,3)+"Z":n?"T"+dn(e,2)+":"+dn(r,2)+":"+dn(n,2)+"Z":r||e?"T"+dn(e,2)+":"+dn(r,2)+"Z":"")}function uh(t){var e=new RegExp('["'+t+` -\r]`),r=t.charCodeAt(0);function n(d,f){var p,m,_=i(d,function(y,b){if(p)return p(y,b-1);m=y,p=f?UF(y,f):Dv(y)});return _.columns=m||[],_}function i(d,f){var p=[],m=d.length,_=0,y=0,b,x=m<=0,k=!1;d.charCodeAt(m-1)===Wl&&--m,d.charCodeAt(m-1)===Kd&&--m;function T(){if(x)return $d;if(k)return k=!1,Bv;var M,S=_,R;if(d.charCodeAt(S)===Xd){for(;_++<m&&d.charCodeAt(_)!==Xd||d.charCodeAt(++_)===Xd;);return(M=_)>=m?x=!0:(R=d.charCodeAt(_++))===Wl?k=!0:R===Kd&&(k=!0,d.charCodeAt(_)===Wl&&++_),d.slice(S+1,M-1).replace(/""/g,'"')}for(;_<m;){if((R=d.charCodeAt(M=_++))===Wl)k=!0;else if(R===Kd)k=!0,d.charCodeAt(_)===Wl&&++_;else if(R!==r)continue;return d.slice(S,M)}return x=!0,d.slice(S,m)}for(;(b=T())!==$d;){for(var C=[];b!==Bv&&b!==$d;)C.push(b),b=T();f&&(C=f(C,y++))==null||p.push(C)}return p}function a(d,f){return d.map(function(p){return f.map(function(m){return h(p[m])}).join(t)})}function s(d,f){return f==null&&(f=Ov(d)),[f.map(h).join(t)].concat(a(d,f)).join(` -`)}function o(d,f){return f==null&&(f=Ov(d)),a(d,f).join(` -`)}function l(d){return d.map(u).join(` -`)}function u(d){return d.map(h).join(t)}function h(d){return d==null?"":d instanceof Date?HF(d):e.test(d+="")?'"'+d.replace(/"/g,'""')+'"':d}return{parse:n,parseRows:i,format:s,formatBody:o,formatRows:l,formatRow:u,formatValue:h}}var Ts=uh(","),Fv=Ts.parse,GF=Ts.parseRows,jF=Ts.format,$F=Ts.formatBody,XF=Ts.formatRows,KF=Ts.formatRow,ZF=Ts.formatValue,Es=uh(" "),Pv=Es.parse,QF=Es.parseRows,JF=Es.format,tP=Es.formatBody,eP=Es.formatRows,rP=Es.formatRow,nP=Es.formatValue;function iP(t){for(var e in t){var r=t[e].trim(),n,i;if(!r)r=null;else if(r==="true")r=!0;else if(r==="false")r=!1;else if(r==="NaN")r=NaN;else if(!isNaN(n=+r))r=n;else if(i=r.match(/^([-+]\d{2})?\d{4}(-\d{2}(-\d{2})?)?(T\d{2}:\d{2}(:\d{2}(\.\d{3})?)?(Z|[-+]\d{2}:\d{2})?)?$/))aP&&!!i[4]&&!i[7]&&(r=r.replace(/-/g,"/").replace(/T/," ")),r=new Date(r);else continue;t[e]=r}return t}const aP=new Date("2019-01-01T00:00").getHours()||new Date("2019-07-01T00:00").getHours();function sP(t){if(!t.ok)throw new Error(t.status+" "+t.statusText);return t.blob()}function oP(t,e){return fetch(t,e).then(sP)}function lP(t){if(!t.ok)throw new Error(t.status+" "+t.statusText);return t.arrayBuffer()}function cP(t,e){return fetch(t,e).then(lP)}function uP(t){if(!t.ok)throw new Error(t.status+" "+t.statusText);return t.text()}function hh(t,e){return fetch(t,e).then(uP)}function qv(t){return function(e,r,n){return arguments.length===2&&typeof r=="function"&&(n=r,r=void 0),hh(e,r).then(function(i){return t(i,n)})}}function hP(t,e,r,n){arguments.length===3&&typeof r=="function"&&(n=r,r=void 0);var i=uh(t);return hh(e,r).then(function(a){return i.parse(a,n)})}var fP=qv(Fv),dP=qv(Pv);function pP(t,e){return new Promise(function(r,n){var i=new Image;for(var a in e)i[a]=e[a];i.onerror=n,i.onload=function(){r(i)},i.src=t})}function gP(t){if(!t.ok)throw new Error(t.status+" "+t.statusText);if(!(t.status===204||t.status===205))return t.json()}function yP(t,e){return fetch(t,e).then(gP)}function Zd(t){return(e,r)=>hh(e,r).then(n=>new DOMParser().parseFromString(n,t))}const mP=Zd("application/xml");var bP=Zd("text/html"),_P=Zd("image/svg+xml");function vP(t,e){var r,n=1;t==null&&(t=0),e==null&&(e=0);function i(){var a,s=r.length,o,l=0,u=0;for(a=0;a<s;++a)o=r[a],l+=o.x,u+=o.y;for(l=(l/s-t)*n,u=(u/s-e)*n,a=0;a<s;++a)o=r[a],o.x-=l,o.y-=u}return i.initialize=function(a){r=a},i.x=function(a){return arguments.length?(t=+a,i):t},i.y=function(a){return arguments.length?(e=+a,i):e},i.strength=function(a){return arguments.length?(n=+a,i):n},i}function xP(t){const e=+this._x.call(null,t),r=+this._y.call(null,t);return Vv(this.cover(e,r),e,r,t)}function Vv(t,e,r,n){if(isNaN(e)||isNaN(r))return t;var i,a=t._root,s={data:n},o=t._x0,l=t._y0,u=t._x1,h=t._y1,d,f,p,m,_,y,b,x;if(!a)return t._root=s,t;for(;a.length;)if((_=e>=(d=(o+u)/2))?o=d:u=d,(y=r>=(f=(l+h)/2))?l=f:h=f,i=a,!(a=a[b=y<<1|_]))return i[b]=s,t;if(p=+t._x.call(null,a.data),m=+t._y.call(null,a.data),e===p&&r===m)return s.next=a,i?i[b]=s:t._root=s,t;do i=i?i[b]=new Array(4):t._root=new Array(4),(_=e>=(d=(o+u)/2))?o=d:u=d,(y=r>=(f=(l+h)/2))?l=f:h=f;while((b=y<<1|_)===(x=(m>=f)<<1|p>=d));return i[x]=a,i[b]=s,t}function kP(t){var e,r,n=t.length,i,a,s=new Array(n),o=new Array(n),l=1/0,u=1/0,h=-1/0,d=-1/0;for(r=0;r<n;++r)isNaN(i=+this._x.call(null,e=t[r]))||isNaN(a=+this._y.call(null,e))||(s[r]=i,o[r]=a,i<l&&(l=i),i>h&&(h=i),a<u&&(u=a),a>d&&(d=a));if(l>h||u>d)return this;for(this.cover(l,u).cover(h,d),r=0;r<n;++r)Vv(this,s[r],o[r],t[r]);return this}function wP(t,e){if(isNaN(t=+t)||isNaN(e=+e))return this;var r=this._x0,n=this._y0,i=this._x1,a=this._y1;if(isNaN(r))i=(r=Math.floor(t))+1,a=(n=Math.floor(e))+1;else{for(var s=i-r||1,o=this._root,l,u;r>t||t>=i||n>e||e>=a;)switch(u=(e<n)<<1|t<r,l=new Array(4),l[u]=o,o=l,s*=2,u){case 0:i=r+s,a=n+s;break;case 1:r=i-s,a=n+s;break;case 2:i=r+s,n=a-s;break;case 3:r=i-s,n=a-s;break}this._root&&this._root.length&&(this._root=o)}return this._x0=r,this._y0=n,this._x1=i,this._y1=a,this}function TP(){var t=[];return this.visit(function(e){if(!e.length)do t.push(e.data);while(e=e.next)}),t}function EP(t){return arguments.length?this.cover(+t[0][0],+t[0][1]).cover(+t[1][0],+t[1][1]):isNaN(this._x0)?void 0:[[this._x0,this._y0],[this._x1,this._y1]]}function Kr(t,e,r,n,i){this.node=t,this.x0=e,this.y0=r,this.x1=n,this.y1=i}function CP(t,e,r){var n,i=this._x0,a=this._y0,s,o,l,u,h=this._x1,d=this._y1,f=[],p=this._root,m,_;for(p&&f.push(new Kr(p,i,a,h,d)),r==null?r=1/0:(i=t-r,a=e-r,h=t+r,d=e+r,r*=r);m=f.pop();)if(!(!(p=m.node)||(s=m.x0)>h||(o=m.y0)>d||(l=m.x1)<i||(u=m.y1)<a))if(p.length){var y=(s+l)/2,b=(o+u)/2;f.push(new Kr(p[3],y,b,l,u),new Kr(p[2],s,b,y,u),new Kr(p[1],y,o,l,b),new Kr(p[0],s,o,y,b)),(_=(e>=b)<<1|t>=y)&&(m=f[f.length-1],f[f.length-1]=f[f.length-1-_],f[f.length-1-_]=m)}else{var x=t-+this._x.call(null,p.data),k=e-+this._y.call(null,p.data),T=x*x+k*k;if(T<r){var C=Math.sqrt(r=T);i=t-C,a=e-C,h=t+C,d=e+C,n=p.data}}return n}function SP(t){if(isNaN(h=+this._x.call(null,t))||isNaN(d=+this._y.call(null,t)))return this;var e,r=this._root,n,i,a,s=this._x0,o=this._y0,l=this._x1,u=this._y1,h,d,f,p,m,_,y,b;if(!r)return this;if(r.length)for(;;){if((m=h>=(f=(s+l)/2))?s=f:l=f,(_=d>=(p=(o+u)/2))?o=p:u=p,e=r,!(r=r[y=_<<1|m]))return this;if(!r.length)break;(e[y+1&3]||e[y+2&3]||e[y+3&3])&&(n=e,b=y)}for(;r.data!==t;)if(i=r,!(r=r.next))return this;return(a=r.next)&&delete r.next,i?(a?i.next=a:delete i.next,this):e?(a?e[y]=a:delete e[y],(r=e[0]||e[1]||e[2]||e[3])&&r===(e[3]||e[2]||e[1]||e[0])&&!r.length&&(n?n[b]=r:this._root=r),this):(this._root=a,this)}function AP(t){for(var e=0,r=t.length;e<r;++e)this.remove(t[e]);return this}function MP(){return this._root}function LP(){var t=0;return this.visit(function(e){if(!e.length)do++t;while(e=e.next)}),t}function RP(t){var e=[],r,n=this._root,i,a,s,o,l;for(n&&e.push(new Kr(n,this._x0,this._y0,this._x1,this._y1));r=e.pop();)if(!t(n=r.node,a=r.x0,s=r.y0,o=r.x1,l=r.y1)&&n.length){var u=(a+o)/2,h=(s+l)/2;(i=n[3])&&e.push(new Kr(i,u,h,o,l)),(i=n[2])&&e.push(new Kr(i,a,h,u,l)),(i=n[1])&&e.push(new Kr(i,u,s,o,h)),(i=n[0])&&e.push(new Kr(i,a,s,u,h))}return this}function IP(t){var e=[],r=[],n;for(this._root&&e.push(new Kr(this._root,this._x0,this._y0,this._x1,this._y1));n=e.pop();){var i=n.node;if(i.length){var a,s=n.x0,o=n.y0,l=n.x1,u=n.y1,h=(s+l)/2,d=(o+u)/2;(a=i[0])&&e.push(new Kr(a,s,o,h,d)),(a=i[1])&&e.push(new Kr(a,h,o,l,d)),(a=i[2])&&e.push(new Kr(a,s,d,h,u)),(a=i[3])&&e.push(new Kr(a,h,d,l,u))}r.push(n)}for(;n=r.pop();)t(n.node,n.x0,n.y0,n.x1,n.y1);return this}function NP(t){return t[0]}function BP(t){return arguments.length?(this._x=t,this):this._x}function DP(t){return t[1]}function OP(t){return arguments.length?(this._y=t,this):this._y}function fh(t,e,r){var n=new Qd(e==null?NP:e,r==null?DP:r,NaN,NaN,NaN,NaN);return t==null?n:n.addAll(t)}function Qd(t,e,r,n,i,a){this._x=t,this._y=e,this._x0=r,this._y0=n,this._x1=i,this._y1=a,this._root=void 0}function zv(t){for(var e={data:t.data},r=e;t=t.next;)r=r.next={data:t.data};return e}var Zr=fh.prototype=Qd.prototype;Zr.copy=function(){var t=new Qd(this._x,this._y,this._x0,this._y0,this._x1,this._y1),e=this._root,r,n;if(!e)return t;if(!e.length)return t._root=zv(e),t;for(r=[{source:e,target:t._root=new Array(4)}];e=r.pop();)for(var i=0;i<4;++i)(n=e.source[i])&&(n.length?r.push({source:n,target:e.target[i]=new Array(4)}):e.target[i]=zv(n));return t},Zr.add=xP,Zr.addAll=kP,Zr.cover=wP,Zr.data=TP,Zr.extent=EP,Zr.find=CP,Zr.remove=SP,Zr.removeAll=AP,Zr.root=MP,Zr.size=LP,Zr.visit=RP,Zr.visitAfter=IP,Zr.x=BP,Zr.y=OP;function vr(t){return function(){return t}}function Na(t){return(t()-.5)*1e-6}function FP(t){return t.x+t.vx}function PP(t){return t.y+t.vy}function qP(t){var e,r,n,i=1,a=1;typeof t!="function"&&(t=vr(t==null?1:+t));function s(){for(var u,h=e.length,d,f,p,m,_,y,b=0;b<a;++b)for(d=fh(e,FP,PP).visitAfter(o),u=0;u<h;++u)f=e[u],_=r[f.index],y=_*_,p=f.x+f.vx,m=f.y+f.vy,d.visit(x);function x(k,T,C,M,S){var R=k.data,A=k.r,L=_+A;if(R){if(R.index>f.index){var v=p-R.x-R.vx,B=m-R.y-R.vy,w=v*v+B*B;w<L*L&&(v===0&&(v=Na(n),w+=v*v),B===0&&(B=Na(n),w+=B*B),w=(L-(w=Math.sqrt(w)))/w*i,f.vx+=(v*=w)*(L=(A*=A)/(y+A)),f.vy+=(B*=w)*L,R.vx-=v*(L=1-L),R.vy-=B*L)}return}return T>p+L||M<p-L||C>m+L||S<m-L}}function o(u){if(u.data)return u.r=r[u.data.index];for(var h=u.r=0;h<4;++h)u[h]&&u[h].r>u.r&&(u.r=u[h].r)}function l(){if(!!e){var u,h=e.length,d;for(r=new Array(h),u=0;u<h;++u)d=e[u],r[d.index]=+t(d,u,e)}}return s.initialize=function(u,h){e=u,n=h,l()},s.iterations=function(u){return arguments.length?(a=+u,s):a},s.strength=function(u){return arguments.length?(i=+u,s):i},s.radius=function(u){return arguments.length?(t=typeof u=="function"?u:vr(+u),l(),s):t},s}function VP(t){return t.index}function Yv(t,e){var r=t.get(e);if(!r)throw new Error("node not found: "+e);return r}function zP(t){var e=VP,r=d,n,i=vr(30),a,s,o,l,u,h=1;t==null&&(t=[]);function d(y){return 1/Math.min(o[y.source.index],o[y.target.index])}function f(y){for(var b=0,x=t.length;b<h;++b)for(var k=0,T,C,M,S,R,A,L;k<x;++k)T=t[k],C=T.source,M=T.target,S=M.x+M.vx-C.x-C.vx||Na(u),R=M.y+M.vy-C.y-C.vy||Na(u),A=Math.sqrt(S*S+R*R),A=(A-a[k])/A*y*n[k],S*=A,R*=A,M.vx-=S*(L=l[k]),M.vy-=R*L,C.vx+=S*(L=1-L),C.vy+=R*L}function p(){if(!!s){var y,b=s.length,x=t.length,k=new Map(s.map((C,M)=>[e(C,M,s),C])),T;for(y=0,o=new Array(b);y<x;++y)T=t[y],T.index=y,typeof T.source!="object"&&(T.source=Yv(k,T.source)),typeof T.target!="object"&&(T.target=Yv(k,T.target)),o[T.source.index]=(o[T.source.index]||0)+1,o[T.target.index]=(o[T.target.index]||0)+1;for(y=0,l=new Array(x);y<x;++y)T=t[y],l[y]=o[T.source.index]/(o[T.source.index]+o[T.target.index]);n=new Array(x),m(),a=new Array(x),_()}}function m(){if(!!s)for(var y=0,b=t.length;y<b;++y)n[y]=+r(t[y],y,t)}function _(){if(!!s)for(var y=0,b=t.length;y<b;++y)a[y]=+i(t[y],y,t)}return f.initialize=function(y,b){s=y,u=b,p()},f.links=function(y){return arguments.length?(t=y,p(),f):t},f.id=function(y){return arguments.length?(e=y,f):e},f.iterations=function(y){return arguments.length?(h=+y,f):h},f.strength=function(y){return arguments.length?(r=typeof y=="function"?y:vr(+y),m(),f):r},f.distance=function(y){return arguments.length?(i=typeof y=="function"?y:vr(+y),_(),f):i},f}const YP=1664525,UP=1013904223,Uv=4294967296;function WP(){let t=1;return()=>(t=(YP*t+UP)%Uv)/Uv}function HP(t){return t.x}function GP(t){return t.y}var jP=10,$P=Math.PI*(3-Math.sqrt(5));function XP(t){var e,r=1,n=.001,i=1-Math.pow(n,1/300),a=0,s=.6,o=new Map,l=Ju(d),u=fs("tick","end"),h=WP();t==null&&(t=[]);function d(){f(),u.call("tick",e),r<n&&(l.stop(),u.call("end",e))}function f(_){var y,b=t.length,x;_===void 0&&(_=1);for(var k=0;k<_;++k)for(r+=(a-r)*i,o.forEach(function(T){T(r)}),y=0;y<b;++y)x=t[y],x.fx==null?x.x+=x.vx*=s:(x.x=x.fx,x.vx=0),x.fy==null?x.y+=x.vy*=s:(x.y=x.fy,x.vy=0);return e}function p(){for(var _=0,y=t.length,b;_<y;++_){if(b=t[_],b.index=_,b.fx!=null&&(b.x=b.fx),b.fy!=null&&(b.y=b.fy),isNaN(b.x)||isNaN(b.y)){var x=jP*Math.sqrt(.5+_),k=_*$P;b.x=x*Math.cos(k),b.y=x*Math.sin(k)}(isNaN(b.vx)||isNaN(b.vy))&&(b.vx=b.vy=0)}}function m(_){return _.initialize&&_.initialize(t,h),_}return p(),e={tick:f,restart:function(){return l.restart(d),e},stop:function(){return l.stop(),e},nodes:function(_){return arguments.length?(t=_,p(),o.forEach(m),e):t},alpha:function(_){return arguments.length?(r=+_,e):r},alphaMin:function(_){return arguments.length?(n=+_,e):n},alphaDecay:function(_){return arguments.length?(i=+_,e):+i},alphaTarget:function(_){return arguments.length?(a=+_,e):a},velocityDecay:function(_){return arguments.length?(s=1-_,e):1-s},randomSource:function(_){return arguments.length?(h=_,o.forEach(m),e):h},force:function(_,y){return arguments.length>1?(y==null?o.delete(_):o.set(_,m(y)),e):o.get(_)},find:function(_,y,b){var x=0,k=t.length,T,C,M,S,R;for(b==null?b=1/0:b*=b,x=0;x<k;++x)S=t[x],T=_-S.x,C=y-S.y,M=T*T+C*C,M<b&&(R=S,b=M);return R},on:function(_,y){return arguments.length>1?(u.on(_,y),e):u.on(_)}}}function KP(){var t,e,r,n,i=vr(-30),a,s=1,o=1/0,l=.81;function u(p){var m,_=t.length,y=fh(t,HP,GP).visitAfter(d);for(n=p,m=0;m<_;++m)e=t[m],y.visit(f)}function h(){if(!!t){var p,m=t.length,_;for(a=new Array(m),p=0;p<m;++p)_=t[p],a[_.index]=+i(_,p,t)}}function d(p){var m=0,_,y,b=0,x,k,T;if(p.length){for(x=k=T=0;T<4;++T)(_=p[T])&&(y=Math.abs(_.value))&&(m+=_.value,b+=y,x+=y*_.x,k+=y*_.y);p.x=x/b,p.y=k/b}else{_=p,_.x=_.data.x,_.y=_.data.y;do m+=a[_.data.index];while(_=_.next)}p.value=m}function f(p,m,_,y){if(!p.value)return!0;var b=p.x-e.x,x=p.y-e.y,k=y-m,T=b*b+x*x;if(k*k/l<T)return T<o&&(b===0&&(b=Na(r),T+=b*b),x===0&&(x=Na(r),T+=x*x),T<s&&(T=Math.sqrt(s*T)),e.vx+=b*p.value*n/T,e.vy+=x*p.value*n/T),!0;if(p.length||T>=o)return;(p.data!==e||p.next)&&(b===0&&(b=Na(r),T+=b*b),x===0&&(x=Na(r),T+=x*x),T<s&&(T=Math.sqrt(s*T)));do p.data!==e&&(k=a[p.data.index]*n/T,e.vx+=b*k,e.vy+=x*k);while(p=p.next)}return u.initialize=function(p,m){t=p,r=m,h()},u.strength=function(p){return arguments.length?(i=typeof p=="function"?p:vr(+p),h(),u):i},u.distanceMin=function(p){return arguments.length?(s=p*p,u):Math.sqrt(s)},u.distanceMax=function(p){return arguments.length?(o=p*p,u):Math.sqrt(o)},u.theta=function(p){return arguments.length?(l=p*p,u):Math.sqrt(l)},u}function ZP(t,e,r){var n,i=vr(.1),a,s;typeof t!="function"&&(t=vr(+t)),e==null&&(e=0),r==null&&(r=0);function o(u){for(var h=0,d=n.length;h<d;++h){var f=n[h],p=f.x-e||1e-6,m=f.y-r||1e-6,_=Math.sqrt(p*p+m*m),y=(s[h]-_)*a[h]*u/_;f.vx+=p*y,f.vy+=m*y}}function l(){if(!!n){var u,h=n.length;for(a=new Array(h),s=new Array(h),u=0;u<h;++u)s[u]=+t(n[u],u,n),a[u]=isNaN(s[u])?0:+i(n[u],u,n)}}return o.initialize=function(u){n=u,l()},o.strength=function(u){return arguments.length?(i=typeof u=="function"?u:vr(+u),l(),o):i},o.radius=function(u){return arguments.length?(t=typeof u=="function"?u:vr(+u),l(),o):t},o.x=function(u){return arguments.length?(e=+u,o):e},o.y=function(u){return arguments.length?(r=+u,o):r},o}function QP(t){var e=vr(.1),r,n,i;typeof t!="function"&&(t=vr(t==null?0:+t));function a(o){for(var l=0,u=r.length,h;l<u;++l)h=r[l],h.vx+=(i[l]-h.x)*n[l]*o}function s(){if(!!r){var o,l=r.length;for(n=new Array(l),i=new Array(l),o=0;o<l;++o)n[o]=isNaN(i[o]=+t(r[o],o,r))?0:+e(r[o],o,r)}}return a.initialize=function(o){r=o,s()},a.strength=function(o){return arguments.length?(e=typeof o=="function"?o:vr(+o),s(),a):e},a.x=function(o){return arguments.length?(t=typeof o=="function"?o:vr(+o),s(),a):t},a}function JP(t){var e=vr(.1),r,n,i;typeof t!="function"&&(t=vr(t==null?0:+t));function a(o){for(var l=0,u=r.length,h;l<u;++l)h=r[l],h.vy+=(i[l]-h.y)*n[l]*o}function s(){if(!!r){var o,l=r.length;for(n=new Array(l),i=new Array(l),o=0;o<l;++o)n[o]=isNaN(i[o]=+t(r[o],o,r))?0:+e(r[o],o,r)}}return a.initialize=function(o){r=o,s()},a.strength=function(o){return arguments.length?(e=typeof o=="function"?o:vr(+o),s(),a):e},a.y=function(o){return arguments.length?(t=typeof o=="function"?o:vr(+o),s(),a):t},a}function tq(t){return Math.abs(t=Math.round(t))>=1e21?t.toLocaleString("en").replace(/,/g,""):t.toString(10)}function dh(t,e){if((r=(t=e?t.toExponential(e-1):t.toExponential()).indexOf("e"))<0)return null;var r,n=t.slice(0,r);return[n.length>1?n[0]+n.slice(2):n,+t.slice(r+1)]}function Eo(t){return t=dh(Math.abs(t)),t?t[1]:NaN}function eq(t,e){return function(r,n){for(var i=r.length,a=[],s=0,o=t[0],l=0;i>0&&o>0&&(l+o+1>n&&(o=Math.max(1,n-l)),a.push(r.substring(i-=o,i+o)),!((l+=o+1)>n));)o=t[s=(s+1)%t.length];return a.reverse().join(e)}}function rq(t){return function(e){return e.replace(/[0-9]/g,function(r){return t[+r]})}}var nq=/^(?:(.)?([<>=^]))?([+\-( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i;function Co(t){if(!(e=nq.exec(t)))throw new Error("invalid format: "+t);var e;return new ph({fill:e[1],align:e[2],sign:e[3],symbol:e[4],zero:e[5],width:e[6],comma:e[7],precision:e[8]&&e[8].slice(1),trim:e[9],type:e[10]})}Co.prototype=ph.prototype;function ph(t){this.fill=t.fill===void 0?" ":t.fill+"",this.align=t.align===void 0?">":t.align+"",this.sign=t.sign===void 0?"-":t.sign+"",this.symbol=t.symbol===void 0?"":t.symbol+"",this.zero=!!t.zero,this.width=t.width===void 0?void 0:+t.width,this.comma=!!t.comma,this.precision=t.precision===void 0?void 0:+t.precision,this.trim=!!t.trim,this.type=t.type===void 0?"":t.type+""}ph.prototype.toString=function(){return this.fill+this.align+this.sign+this.symbol+(this.zero?"0":"")+(this.width===void 0?"":Math.max(1,this.width|0))+(this.comma?",":"")+(this.precision===void 0?"":"."+Math.max(0,this.precision|0))+(this.trim?"~":"")+this.type};function iq(t){t:for(var e=t.length,r=1,n=-1,i;r<e;++r)switch(t[r]){case".":n=i=r;break;case"0":n===0&&(n=r),i=r;break;default:if(!+t[r])break t;n>0&&(n=0);break}return n>0?t.slice(0,n)+t.slice(i+1):t}var Wv;function aq(t,e){var r=dh(t,e);if(!r)return t+"";var n=r[0],i=r[1],a=i-(Wv=Math.max(-8,Math.min(8,Math.floor(i/3)))*3)+1,s=n.length;return a===s?n:a>s?n+new Array(a-s+1).join("0"):a>0?n.slice(0,a)+"."+n.slice(a):"0."+new Array(1-a).join("0")+dh(t,Math.max(0,e+a-1))[0]}function Hv(t,e){var r=dh(t,e);if(!r)return t+"";var n=r[0],i=r[1];return i<0?"0."+new Array(-i).join("0")+n:n.length>i+1?n.slice(0,i+1)+"."+n.slice(i+1):n+new Array(i-n.length+2).join("0")}const Gv={"%":(t,e)=>(t*100).toFixed(e),b:t=>Math.round(t).toString(2),c:t=>t+"",d:tq,e:(t,e)=>t.toExponential(e),f:(t,e)=>t.toFixed(e),g:(t,e)=>t.toPrecision(e),o:t=>Math.round(t).toString(8),p:(t,e)=>Hv(t*100,e),r:Hv,s:aq,X:t=>Math.round(t).toString(16).toUpperCase(),x:t=>Math.round(t).toString(16)};function jv(t){return t}var $v=Array.prototype.map,Xv=["y","z","a","f","p","n","\xB5","m","","k","M","G","T","P","E","Z","Y"];function Kv(t){var e=t.grouping===void 0||t.thousands===void 0?jv:eq($v.call(t.grouping,Number),t.thousands+""),r=t.currency===void 0?"":t.currency[0]+"",n=t.currency===void 0?"":t.currency[1]+"",i=t.decimal===void 0?".":t.decimal+"",a=t.numerals===void 0?jv:rq($v.call(t.numerals,String)),s=t.percent===void 0?"%":t.percent+"",o=t.minus===void 0?"\u2212":t.minus+"",l=t.nan===void 0?"NaN":t.nan+"";function u(d){d=Co(d);var f=d.fill,p=d.align,m=d.sign,_=d.symbol,y=d.zero,b=d.width,x=d.comma,k=d.precision,T=d.trim,C=d.type;C==="n"?(x=!0,C="g"):Gv[C]||(k===void 0&&(k=12),T=!0,C="g"),(y||f==="0"&&p==="=")&&(y=!0,f="0",p="=");var M=_==="$"?r:_==="#"&&/[boxX]/.test(C)?"0"+C.toLowerCase():"",S=_==="$"?n:/[%p]/.test(C)?s:"",R=Gv[C],A=/[defgprs%]/.test(C);k=k===void 0?6:/[gprs]/.test(C)?Math.max(1,Math.min(21,k)):Math.max(0,Math.min(20,k));function L(v){var B=M,w=S,D,N,z;if(C==="c")w=R(v)+w,v="";else{v=+v;var X=v<0||1/v<0;if(v=isNaN(v)?l:R(Math.abs(v),k),T&&(v=iq(v)),X&&+v==0&&m!=="+"&&(X=!1),B=(X?m==="("?m:o:m==="-"||m==="("?"":m)+B,w=(C==="s"?Xv[8+Wv/3]:"")+w+(X&&m==="("?")":""),A){for(D=-1,N=v.length;++D<N;)if(z=v.charCodeAt(D),48>z||z>57){w=(z===46?i+v.slice(D+1):v.slice(D))+w,v=v.slice(0,D);break}}}x&&!y&&(v=e(v,1/0));var ct=B.length+v.length+w.length,J=ct<b?new Array(b-ct+1).join(f):"";switch(x&&y&&(v=e(J+v,J.length?b-w.length:1/0),J=""),p){case"<":v=B+v+w+J;break;case"=":v=B+J+v+w;break;case"^":v=J.slice(0,ct=J.length>>1)+B+v+w+J.slice(ct);break;default:v=J+B+v+w;break}return a(v)}return L.toString=function(){return d+""},L}function h(d,f){var p=u((d=Co(d),d.type="f",d)),m=Math.max(-8,Math.min(8,Math.floor(Eo(f)/3)))*3,_=Math.pow(10,-m),y=Xv[8+m/3];return function(b){return p(_*b)+y}}return{format:u,formatPrefix:h}}var gh,yh,Jd;Zv({thousands:",",grouping:[3],currency:["$",""]});function Zv(t){return gh=Kv(t),yh=gh.format,Jd=gh.formatPrefix,gh}function Qv(t){return Math.max(0,-Eo(Math.abs(t)))}function Jv(t,e){return Math.max(0,Math.max(-8,Math.min(8,Math.floor(Eo(e)/3)))*3-Eo(Math.abs(t)))}function t6(t,e){return t=Math.abs(t),e=Math.abs(e)-t,Math.max(0,Eo(e)-Eo(t))+1}var te=1e-6,Hl=1e-12,Ae=Math.PI,rr=Ae/2,mh=Ae/4,Qr=Ae*2,Ue=180/Ae,re=Ae/180,Ne=Math.abs,So=Math.atan,Jr=Math.atan2,Kt=Math.cos,bh=Math.ceil,e6=Math.exp,t2=Math.hypot,_h=Math.log,e2=Math.pow,Ht=Math.sin,Dn=Math.sign||function(t){return t>0?1:t<0?-1:0},Sr=Math.sqrt,r2=Math.tan;function r6(t){return t>1?0:t<-1?Ae:Math.acos(t)}function tn(t){return t>1?rr:t<-1?-rr:Math.asin(t)}function n6(t){return(t=Ht(t/2))*t}function Je(){}function vh(t,e){t&&a6.hasOwnProperty(t.type)&&a6[t.type](t,e)}var i6={Feature:function(t,e){vh(t.geometry,e)},FeatureCollection:function(t,e){for(var r=t.features,n=-1,i=r.length;++n<i;)vh(r[n].geometry,e)}},a6={Sphere:function(t,e){e.sphere()},Point:function(t,e){t=t.coordinates,e.point(t[0],t[1],t[2])},MultiPoint:function(t,e){for(var r=t.coordinates,n=-1,i=r.length;++n<i;)t=r[n],e.point(t[0],t[1],t[2])},LineString:function(t,e){n2(t.coordinates,e,0)},MultiLineString:function(t,e){for(var r=t.coordinates,n=-1,i=r.length;++n<i;)n2(r[n],e,0)},Polygon:function(t,e){s6(t.coordinates,e)},MultiPolygon:function(t,e){for(var r=t.coordinates,n=-1,i=r.length;++n<i;)s6(r[n],e)},GeometryCollection:function(t,e){for(var r=t.geometries,n=-1,i=r.length;++n<i;)vh(r[n],e)}};function n2(t,e,r){var n=-1,i=t.length-r,a;for(e.lineStart();++n<i;)a=t[n],e.point(a[0],a[1],a[2]);e.lineEnd()}function s6(t,e){var r=-1,n=t.length;for(e.polygonStart();++r<n;)n2(t[r],e,1);e.polygonEnd()}function ti(t,e){t&&i6.hasOwnProperty(t.type)?i6[t.type](t,e):vh(t,e)}var xh=new _r,kh=new _r,o6,l6,i2,a2,s2,Si={point:Je,lineStart:Je,lineEnd:Je,polygonStart:function(){xh=new _r,Si.lineStart=sq,Si.lineEnd=oq},polygonEnd:function(){var t=+xh;kh.add(t<0?Qr+t:t),this.lineStart=this.lineEnd=this.point=Je},sphere:function(){kh.add(Qr)}};function sq(){Si.point=lq}function oq(){c6(o6,l6)}function lq(t,e){Si.point=c6,o6=t,l6=e,t*=re,e*=re,i2=t,a2=Kt(e=e/2+mh),s2=Ht(e)}function c6(t,e){t*=re,e*=re,e=e/2+mh;var r=t-i2,n=r>=0?1:-1,i=n*r,a=Kt(e),s=Ht(e),o=s2*s,l=a2*a+o*Kt(i),u=o*n*Ht(i);xh.add(Jr(u,l)),i2=t,a2=a,s2=s}function cq(t){return kh=new _r,ti(t,Si),kh*2}function wh(t){return[Jr(t[1],t[0]),tn(t[2])]}function Cs(t){var e=t[0],r=t[1],n=Kt(r);return[n*Kt(e),n*Ht(e),Ht(r)]}function Th(t,e){return t[0]*e[0]+t[1]*e[1]+t[2]*e[2]}function Ao(t,e){return[t[1]*e[2]-t[2]*e[1],t[2]*e[0]-t[0]*e[2],t[0]*e[1]-t[1]*e[0]]}function o2(t,e){t[0]+=e[0],t[1]+=e[1],t[2]+=e[2]}function Eh(t,e){return[t[0]*e,t[1]*e,t[2]*e]}function Ch(t){var e=Sr(t[0]*t[0]+t[1]*t[1]+t[2]*t[2]);t[0]/=e,t[1]/=e,t[2]/=e}var tr,pn,nr,En,Ss,u6,h6,Mo,Gl,Ba,Qi,Ji={point:l2,lineStart:d6,lineEnd:p6,polygonStart:function(){Ji.point=g6,Ji.lineStart=uq,Ji.lineEnd=hq,Gl=new _r,Si.polygonStart()},polygonEnd:function(){Si.polygonEnd(),Ji.point=l2,Ji.lineStart=d6,Ji.lineEnd=p6,xh<0?(tr=-(nr=180),pn=-(En=90)):Gl>te?En=90:Gl<-te&&(pn=-90),Qi[0]=tr,Qi[1]=nr},sphere:function(){tr=-(nr=180),pn=-(En=90)}};function l2(t,e){Ba.push(Qi=[tr=t,nr=t]),e<pn&&(pn=e),e>En&&(En=e)}function f6(t,e){var r=Cs([t*re,e*re]);if(Mo){var n=Ao(Mo,r),i=[n[1],-n[0],0],a=Ao(i,n);Ch(a),a=wh(a);var s=t-Ss,o=s>0?1:-1,l=a[0]*Ue*o,u,h=Ne(s)>180;h^(o*Ss<l&&l<o*t)?(u=a[1]*Ue,u>En&&(En=u)):(l=(l+360)%360-180,h^(o*Ss<l&&l<o*t)?(u=-a[1]*Ue,u<pn&&(pn=u)):(e<pn&&(pn=e),e>En&&(En=e))),h?t<Ss?Cn(tr,t)>Cn(tr,nr)&&(nr=t):Cn(t,nr)>Cn(tr,nr)&&(tr=t):nr>=tr?(t<tr&&(tr=t),t>nr&&(nr=t)):t>Ss?Cn(tr,t)>Cn(tr,nr)&&(nr=t):Cn(t,nr)>Cn(tr,nr)&&(tr=t)}else Ba.push(Qi=[tr=t,nr=t]);e<pn&&(pn=e),e>En&&(En=e),Mo=r,Ss=t}function d6(){Ji.point=f6}function p6(){Qi[0]=tr,Qi[1]=nr,Ji.point=l2,Mo=null}function g6(t,e){if(Mo){var r=t-Ss;Gl.add(Ne(r)>180?r+(r>0?360:-360):r)}else u6=t,h6=e;Si.point(t,e),f6(t,e)}function uq(){Si.lineStart()}function hq(){g6(u6,h6),Si.lineEnd(),Ne(Gl)>te&&(tr=-(nr=180)),Qi[0]=tr,Qi[1]=nr,Mo=null}function Cn(t,e){return(e-=t)<0?e+360:e}function fq(t,e){return t[0]-e[0]}function y6(t,e){return t[0]<=t[1]?t[0]<=e&&e<=t[1]:e<t[0]||t[1]<e}function dq(t){var e,r,n,i,a,s,o;if(En=nr=-(tr=pn=1/0),Ba=[],ti(t,Ji),r=Ba.length){for(Ba.sort(fq),e=1,n=Ba[0],a=[n];e<r;++e)i=Ba[e],y6(n,i[0])||y6(n,i[1])?(Cn(n[0],i[1])>Cn(n[0],n[1])&&(n[1]=i[1]),Cn(i[0],n[1])>Cn(n[0],n[1])&&(n[0]=i[0])):a.push(n=i);for(s=-1/0,r=a.length-1,e=0,n=a[r];e<=r;n=i,++e)i=a[e],(o=Cn(n[1],i[0]))>s&&(s=o,tr=i[0],nr=n[1])}return Ba=Qi=null,tr===1/0||pn===1/0?[[NaN,NaN],[NaN,NaN]]:[[tr,pn],[nr,En]]}var jl,Sh,Ah,Mh,Lh,Rh,Ih,Nh,c2,u2,h2,m6,b6,en,rn,nn,ei={sphere:Je,point:f2,lineStart:_6,lineEnd:v6,polygonStart:function(){ei.lineStart=yq,ei.lineEnd=mq},polygonEnd:function(){ei.lineStart=_6,ei.lineEnd=v6}};function f2(t,e){t*=re,e*=re;var r=Kt(e);$l(r*Kt(t),r*Ht(t),Ht(e))}function $l(t,e,r){++jl,Ah+=(t-Ah)/jl,Mh+=(e-Mh)/jl,Lh+=(r-Lh)/jl}function _6(){ei.point=pq}function pq(t,e){t*=re,e*=re;var r=Kt(e);en=r*Kt(t),rn=r*Ht(t),nn=Ht(e),ei.point=gq,$l(en,rn,nn)}function gq(t,e){t*=re,e*=re;var r=Kt(e),n=r*Kt(t),i=r*Ht(t),a=Ht(e),s=Jr(Sr((s=rn*a-nn*i)*s+(s=nn*n-en*a)*s+(s=en*i-rn*n)*s),en*n+rn*i+nn*a);Sh+=s,Rh+=s*(en+(en=n)),Ih+=s*(rn+(rn=i)),Nh+=s*(nn+(nn=a)),$l(en,rn,nn)}function v6(){ei.point=f2}function yq(){ei.point=bq}function mq(){x6(m6,b6),ei.point=f2}function bq(t,e){m6=t,b6=e,t*=re,e*=re,ei.point=x6;var r=Kt(e);en=r*Kt(t),rn=r*Ht(t),nn=Ht(e),$l(en,rn,nn)}function x6(t,e){t*=re,e*=re;var r=Kt(e),n=r*Kt(t),i=r*Ht(t),a=Ht(e),s=rn*a-nn*i,o=nn*n-en*a,l=en*i-rn*n,u=t2(s,o,l),h=tn(u),d=u&&-h/u;c2.add(d*s),u2.add(d*o),h2.add(d*l),Sh+=h,Rh+=h*(en+(en=n)),Ih+=h*(rn+(rn=i)),Nh+=h*(nn+(nn=a)),$l(en,rn,nn)}function _q(t){jl=Sh=Ah=Mh=Lh=Rh=Ih=Nh=0,c2=new _r,u2=new _r,h2=new _r,ti(t,ei);var e=+c2,r=+u2,n=+h2,i=t2(e,r,n);return i<Hl&&(e=Rh,r=Ih,n=Nh,Sh<te&&(e=Ah,r=Mh,n=Lh),i=t2(e,r,n),i<Hl)?[NaN,NaN]:[Jr(r,e)*Ue,tn(n/i)*Ue]}function Lo(t){return function(){return t}}function d2(t,e){function r(n,i){return n=t(n,i),e(n[0],n[1])}return t.invert&&e.invert&&(r.invert=function(n,i){return n=e.invert(n,i),n&&t.invert(n[0],n[1])}),r}function p2(t,e){return[Ne(t)>Ae?t+Math.round(-t/Qr)*Qr:t,e]}p2.invert=p2;function g2(t,e,r){return(t%=Qr)?e||r?d2(w6(t),T6(e,r)):w6(t):e||r?T6(e,r):p2}function k6(t){return function(e,r){return e+=t,[e>Ae?e-Qr:e<-Ae?e+Qr:e,r]}}function w6(t){var e=k6(t);return e.invert=k6(-t),e}function T6(t,e){var r=Kt(t),n=Ht(t),i=Kt(e),a=Ht(e);function s(o,l){var u=Kt(l),h=Kt(o)*u,d=Ht(o)*u,f=Ht(l),p=f*r+h*n;return[Jr(d*i-p*a,h*r-f*n),tn(p*i+d*a)]}return s.invert=function(o,l){var u=Kt(l),h=Kt(o)*u,d=Ht(o)*u,f=Ht(l),p=f*i-d*a;return[Jr(d*i+f*a,h*r+p*n),tn(p*r-h*n)]},s}function E6(t){t=g2(t[0]*re,t[1]*re,t.length>2?t[2]*re:0);function e(r){return r=t(r[0]*re,r[1]*re),r[0]*=Ue,r[1]*=Ue,r}return e.invert=function(r){return r=t.invert(r[0]*re,r[1]*re),r[0]*=Ue,r[1]*=Ue,r},e}function C6(t,e,r,n,i,a){if(!!r){var s=Kt(e),o=Ht(e),l=n*r;i==null?(i=e+n*Qr,a=e-l/2):(i=S6(s,i),a=S6(s,a),(n>0?i<a:i>a)&&(i+=n*Qr));for(var u,h=i;n>0?h>a:h<a;h-=l)u=wh([s,-o*Kt(h),-o*Ht(h)]),t.point(u[0],u[1])}}function S6(t,e){e=Cs(e),e[0]-=t,Ch(e);var r=r6(-e[1]);return((-e[2]<0?-r:r)+Qr-te)%Qr}function vq(){var t=Lo([0,0]),e=Lo(90),r=Lo(6),n,i,a={point:s};function s(l,u){n.push(l=i(l,u)),l[0]*=Ue,l[1]*=Ue}function o(){var l=t.apply(this,arguments),u=e.apply(this,arguments)*re,h=r.apply(this,arguments)*re;return n=[],i=g2(-l[0]*re,-l[1]*re,0).invert,C6(a,u,h,1),l={type:"Polygon",coordinates:[n]},n=i=null,l}return o.center=function(l){return arguments.length?(t=typeof l=="function"?l:Lo([+l[0],+l[1]]),o):t},o.radius=function(l){return arguments.length?(e=typeof l=="function"?l:Lo(+l),o):e},o.precision=function(l){return arguments.length?(r=typeof l=="function"?l:Lo(+l),o):r},o}function A6(){var t=[],e;return{point:function(r,n,i){e.push([r,n,i])},lineStart:function(){t.push(e=[])},lineEnd:Je,rejoin:function(){t.length>1&&t.push(t.pop().concat(t.shift()))},result:function(){var r=t;return t=[],e=null,r}}}function Bh(t,e){return Ne(t[0]-e[0])<te&&Ne(t[1]-e[1])<te}function Dh(t,e,r,n){this.x=t,this.z=e,this.o=r,this.e=n,this.v=!1,this.n=this.p=null}function M6(t,e,r,n,i){var a=[],s=[],o,l;if(t.forEach(function(m){if(!((_=m.length-1)<=0)){var _,y=m[0],b=m[_],x;if(Bh(y,b)){if(!y[2]&&!b[2]){for(i.lineStart(),o=0;o<_;++o)i.point((y=m[o])[0],y[1]);i.lineEnd();return}b[0]+=2*te}a.push(x=new Dh(y,m,null,!0)),s.push(x.o=new Dh(y,null,x,!1)),a.push(x=new Dh(b,m,null,!1)),s.push(x.o=new Dh(b,null,x,!0))}}),!!a.length){for(s.sort(e),L6(a),L6(s),o=0,l=s.length;o<l;++o)s[o].e=r=!r;for(var u=a[0],h,d;;){for(var f=u,p=!0;f.v;)if((f=f.n)===u)return;h=f.z,i.lineStart();do{if(f.v=f.o.v=!0,f.e){if(p)for(o=0,l=h.length;o<l;++o)i.point((d=h[o])[0],d[1]);else n(f.x,f.n.x,1,i);f=f.n}else{if(p)for(h=f.p.z,o=h.length-1;o>=0;--o)i.point((d=h[o])[0],d[1]);else n(f.x,f.p.x,-1,i);f=f.p}f=f.o,h=f.z,p=!p}while(!f.v);i.lineEnd()}}}function L6(t){if(!!(e=t.length)){for(var e,r=0,n=t[0],i;++r<e;)n.n=i=t[r],i.p=n,n=i;n.n=i=t[0],i.p=n}}function y2(t){return Ne(t[0])<=Ae?t[0]:Dn(t[0])*((Ne(t[0])+Ae)%Qr-Ae)}function R6(t,e){var r=y2(e),n=e[1],i=Ht(n),a=[Ht(r),-Kt(r),0],s=0,o=0,l=new _r;i===1?n=rr+te:i===-1&&(n=-rr-te);for(var u=0,h=t.length;u<h;++u)if(!!(f=(d=t[u]).length))for(var d,f,p=d[f-1],m=y2(p),_=p[1]/2+mh,y=Ht(_),b=Kt(_),x=0;x<f;++x,m=T,y=M,b=S,p=k){var k=d[x],T=y2(k),C=k[1]/2+mh,M=Ht(C),S=Kt(C),R=T-m,A=R>=0?1:-1,L=A*R,v=L>Ae,B=y*M;if(l.add(Jr(B*A*Ht(L),b*S+B*Kt(L))),s+=v?R+A*Qr:R,v^m>=r^T>=r){var w=Ao(Cs(p),Cs(k));Ch(w);var D=Ao(a,w);Ch(D);var N=(v^R>=0?-1:1)*tn(D[2]);(n>N||n===N&&(w[0]||w[1]))&&(o+=v^R>=0?1:-1)}}return(s<-te||s<te&&l<-Hl)^o&1}function I6(t,e,r,n){return function(i){var a=e(i),s=A6(),o=e(s),l=!1,u,h,d,f={point:p,lineStart:_,lineEnd:y,polygonStart:function(){f.point=b,f.lineStart=x,f.lineEnd=k,h=[],u=[]},polygonEnd:function(){f.point=p,f.lineStart=_,f.lineEnd=y,h=j0(h);var T=R6(u,n);h.length?(l||(i.polygonStart(),l=!0),M6(h,kq,T,r,i)):T&&(l||(i.polygonStart(),l=!0),i.lineStart(),r(null,null,1,i),i.lineEnd()),l&&(i.polygonEnd(),l=!1),h=u=null},sphere:function(){i.polygonStart(),i.lineStart(),r(null,null,1,i),i.lineEnd(),i.polygonEnd()}};function p(T,C){t(T,C)&&i.point(T,C)}function m(T,C){a.point(T,C)}function _(){f.point=m,a.lineStart()}function y(){f.point=p,a.lineEnd()}function b(T,C){d.push([T,C]),o.point(T,C)}function x(){o.lineStart(),d=[]}function k(){b(d[0][0],d[0][1]),o.lineEnd();var T=o.clean(),C=s.result(),M,S=C.length,R,A,L;if(d.pop(),u.push(d),d=null,!!S){if(T&1){if(A=C[0],(R=A.length-1)>0){for(l||(i.polygonStart(),l=!0),i.lineStart(),M=0;M<R;++M)i.point((L=A[M])[0],L[1]);i.lineEnd()}return}S>1&&T&2&&C.push(C.pop().concat(C.shift())),h.push(C.filter(xq))}}return f}}function xq(t){return t.length>1}function kq(t,e){return((t=t.x)[0]<0?t[1]-rr-te:rr-t[1])-((e=e.x)[0]<0?e[1]-rr-te:rr-e[1])}const m2=I6(function(){return!0},wq,Eq,[-Ae,-rr]);function wq(t){var e=NaN,r=NaN,n=NaN,i;return{lineStart:function(){t.lineStart(),i=1},point:function(a,s){var o=a>0?Ae:-Ae,l=Ne(a-e);Ne(l-Ae)<te?(t.point(e,r=(r+s)/2>0?rr:-rr),t.point(n,r),t.lineEnd(),t.lineStart(),t.point(o,r),t.point(a,r),i=0):n!==o&&l>=Ae&&(Ne(e-n)<te&&(e-=n*te),Ne(a-o)<te&&(a-=o*te),r=Tq(e,r,a,s),t.point(n,r),t.lineEnd(),t.lineStart(),t.point(o,r),i=0),t.point(e=a,r=s),n=o},lineEnd:function(){t.lineEnd(),e=r=NaN},clean:function(){return 2-i}}}function Tq(t,e,r,n){var i,a,s=Ht(t-r);return Ne(s)>te?So((Ht(e)*(a=Kt(n))*Ht(r)-Ht(n)*(i=Kt(e))*Ht(t))/(i*a*s)):(e+n)/2}function Eq(t,e,r,n){var i;if(t==null)i=r*rr,n.point(-Ae,i),n.point(0,i),n.point(Ae,i),n.point(Ae,0),n.point(Ae,-i),n.point(0,-i),n.point(-Ae,-i),n.point(-Ae,0),n.point(-Ae,i);else if(Ne(t[0]-e[0])>te){var a=t[0]<e[0]?Ae:-Ae;i=r*a/2,n.point(-a,i),n.point(0,i),n.point(a,i)}else n.point(e[0],e[1])}function N6(t){var e=Kt(t),r=6*re,n=e>0,i=Ne(e)>te;function a(h,d,f,p){C6(p,t,r,f,h,d)}function s(h,d){return Kt(h)*Kt(d)>e}function o(h){var d,f,p,m,_;return{lineStart:function(){m=p=!1,_=1},point:function(y,b){var x=[y,b],k,T=s(y,b),C=n?T?0:u(y,b):T?u(y+(y<0?Ae:-Ae),b):0;if(!d&&(m=p=T)&&h.lineStart(),T!==p&&(k=l(d,x),(!k||Bh(d,k)||Bh(x,k))&&(x[2]=1)),T!==p)_=0,T?(h.lineStart(),k=l(x,d),h.point(k[0],k[1])):(k=l(d,x),h.point(k[0],k[1],2),h.lineEnd()),d=k;else if(i&&d&&n^T){var M;!(C&f)&&(M=l(x,d,!0))&&(_=0,n?(h.lineStart(),h.point(M[0][0],M[0][1]),h.point(M[1][0],M[1][1]),h.lineEnd()):(h.point(M[1][0],M[1][1]),h.lineEnd(),h.lineStart(),h.point(M[0][0],M[0][1],3)))}T&&(!d||!Bh(d,x))&&h.point(x[0],x[1]),d=x,p=T,f=C},lineEnd:function(){p&&h.lineEnd(),d=null},clean:function(){return _|(m&&p)<<1}}}function l(h,d,f){var p=Cs(h),m=Cs(d),_=[1,0,0],y=Ao(p,m),b=Th(y,y),x=y[0],k=b-x*x;if(!k)return!f&&h;var T=e*b/k,C=-e*x/k,M=Ao(_,y),S=Eh(_,T),R=Eh(y,C);o2(S,R);var A=M,L=Th(S,A),v=Th(A,A),B=L*L-v*(Th(S,S)-1);if(!(B<0)){var w=Sr(B),D=Eh(A,(-L-w)/v);if(o2(D,S),D=wh(D),!f)return D;var N=h[0],z=d[0],X=h[1],ct=d[1],J;z<N&&(J=N,N=z,z=J);var Y=z-N,$=Ne(Y-Ae)<te,lt=$||Y<te;if(!$&&ct<X&&(J=X,X=ct,ct=J),lt?$?X+ct>0^D[1]<(Ne(D[0]-N)<te?X:ct):X<=D[1]&&D[1]<=ct:Y>Ae^(N<=D[0]&&D[0]<=z)){var ut=Eh(A,(-L+w)/v);return o2(ut,S),[D,wh(ut)]}}}function u(h,d){var f=n?t:Ae-t,p=0;return h<-f?p|=1:h>f&&(p|=2),d<-f?p|=4:d>f&&(p|=8),p}return I6(s,o,a,n?[0,-t]:[-Ae,t-Ae])}function Cq(t,e,r,n,i,a){var s=t[0],o=t[1],l=e[0],u=e[1],h=0,d=1,f=l-s,p=u-o,m;if(m=r-s,!(!f&&m>0)){if(m/=f,f<0){if(m<h)return;m<d&&(d=m)}else if(f>0){if(m>d)return;m>h&&(h=m)}if(m=i-s,!(!f&&m<0)){if(m/=f,f<0){if(m>d)return;m>h&&(h=m)}else if(f>0){if(m<h)return;m<d&&(d=m)}if(m=n-o,!(!p&&m>0)){if(m/=p,p<0){if(m<h)return;m<d&&(d=m)}else if(p>0){if(m>d)return;m>h&&(h=m)}if(m=a-o,!(!p&&m<0)){if(m/=p,p<0){if(m>d)return;m>h&&(h=m)}else if(p>0){if(m<h)return;m<d&&(d=m)}return h>0&&(t[0]=s+h*f,t[1]=o+h*p),d<1&&(e[0]=s+d*f,e[1]=o+d*p),!0}}}}}var Xl=1e9,Oh=-Xl;function Fh(t,e,r,n){function i(u,h){return t<=u&&u<=r&&e<=h&&h<=n}function a(u,h,d,f){var p=0,m=0;if(u==null||(p=s(u,d))!==(m=s(h,d))||l(u,h)<0^d>0)do f.point(p===0||p===3?t:r,p>1?n:e);while((p=(p+d+4)%4)!==m);else f.point(h[0],h[1])}function s(u,h){return Ne(u[0]-t)<te?h>0?0:3:Ne(u[0]-r)<te?h>0?2:1:Ne(u[1]-e)<te?h>0?1:0:h>0?3:2}function o(u,h){return l(u.x,h.x)}function l(u,h){var d=s(u,1),f=s(h,1);return d!==f?d-f:d===0?h[1]-u[1]:d===1?u[0]-h[0]:d===2?u[1]-h[1]:h[0]-u[0]}return function(u){var h=u,d=A6(),f,p,m,_,y,b,x,k,T,C,M,S={point:R,lineStart:B,lineEnd:w,polygonStart:L,polygonEnd:v};function R(N,z){i(N,z)&&h.point(N,z)}function A(){for(var N=0,z=0,X=p.length;z<X;++z)for(var ct=p[z],J=1,Y=ct.length,$=ct[0],lt,ut,W=$[0],tt=$[1];J<Y;++J)lt=W,ut=tt,$=ct[J],W=$[0],tt=$[1],ut<=n?tt>n&&(W-lt)*(n-ut)>(tt-ut)*(t-lt)&&++N:tt<=n&&(W-lt)*(n-ut)<(tt-ut)*(t-lt)&&--N;return N}function L(){h=d,f=[],p=[],M=!0}function v(){var N=A(),z=M&&N,X=(f=j0(f)).length;(z||X)&&(u.polygonStart(),z&&(u.lineStart(),a(null,null,1,u),u.lineEnd()),X&&M6(f,o,N,a,u),u.polygonEnd()),h=u,f=p=m=null}function B(){S.point=D,p&&p.push(m=[]),C=!0,T=!1,x=k=NaN}function w(){f&&(D(_,y),b&&T&&d.rejoin(),f.push(d.result())),S.point=R,T&&h.lineEnd()}function D(N,z){var X=i(N,z);if(p&&m.push([N,z]),C)_=N,y=z,b=X,C=!1,X&&(h.lineStart(),h.point(N,z));else if(X&&T)h.point(N,z);else{var ct=[x=Math.max(Oh,Math.min(Xl,x)),k=Math.max(Oh,Math.min(Xl,k))],J=[N=Math.max(Oh,Math.min(Xl,N)),z=Math.max(Oh,Math.min(Xl,z))];Cq(ct,J,t,e,r,n)?(T||(h.lineStart(),h.point(ct[0],ct[1])),h.point(J[0],J[1]),X||h.lineEnd(),M=!1):X&&(h.lineStart(),h.point(N,z),M=!1)}x=N,k=z,T=X}return S}}function Sq(){var t=0,e=0,r=960,n=500,i,a,s;return s={stream:function(o){return i&&a===o?i:i=Fh(t,e,r,n)(a=o)},extent:function(o){return arguments.length?(t=+o[0][0],e=+o[0][1],r=+o[1][0],n=+o[1][1],i=a=null,s):[[t,e],[r,n]]}}}var b2,_2,Ph,qh,Ro={sphere:Je,point:Je,lineStart:Aq,lineEnd:Je,polygonStart:Je,polygonEnd:Je};function Aq(){Ro.point=Lq,Ro.lineEnd=Mq}function Mq(){Ro.point=Ro.lineEnd=Je}function Lq(t,e){t*=re,e*=re,_2=t,Ph=Ht(e),qh=Kt(e),Ro.point=Rq}function Rq(t,e){t*=re,e*=re;var r=Ht(e),n=Kt(e),i=Ne(t-_2),a=Kt(i),s=Ht(i),o=n*s,l=qh*r-Ph*n*a,u=Ph*r+qh*n*a;b2.add(Jr(Sr(o*o+l*l),u)),_2=t,Ph=r,qh=n}function B6(t){return b2=new _r,ti(t,Ro),+b2}var v2=[null,null],Iq={type:"LineString",coordinates:v2};function Vh(t,e){return v2[0]=t,v2[1]=e,B6(Iq)}var D6={Feature:function(t,e){return zh(t.geometry,e)},FeatureCollection:function(t,e){for(var r=t.features,n=-1,i=r.length;++n<i;)if(zh(r[n].geometry,e))return!0;return!1}},O6={Sphere:function(){return!0},Point:function(t,e){return F6(t.coordinates,e)},MultiPoint:function(t,e){for(var r=t.coordinates,n=-1,i=r.length;++n<i;)if(F6(r[n],e))return!0;return!1},LineString:function(t,e){return P6(t.coordinates,e)},MultiLineString:function(t,e){for(var r=t.coordinates,n=-1,i=r.length;++n<i;)if(P6(r[n],e))return!0;return!1},Polygon:function(t,e){return q6(t.coordinates,e)},MultiPolygon:function(t,e){for(var r=t.coordinates,n=-1,i=r.length;++n<i;)if(q6(r[n],e))return!0;return!1},GeometryCollection:function(t,e){for(var r=t.geometries,n=-1,i=r.length;++n<i;)if(zh(r[n],e))return!0;return!1}};function zh(t,e){return t&&O6.hasOwnProperty(t.type)?O6[t.type](t,e):!1}function F6(t,e){return Vh(t,e)===0}function P6(t,e){for(var r,n,i,a=0,s=t.length;a<s;a++){if(n=Vh(t[a],e),n===0||a>0&&(i=Vh(t[a],t[a-1]),i>0&&r<=i&&n<=i&&(r+n-i)*(1-Math.pow((r-n)/i,2))<Hl*i))return!0;r=n}return!1}function q6(t,e){return!!R6(t.map(Nq),V6(e))}function Nq(t){return t=t.map(V6),t.pop(),t}function V6(t){return[t[0]*re,t[1]*re]}function Bq(t,e){return(t&&D6.hasOwnProperty(t.type)?D6[t.type]:zh)(t,e)}function z6(t,e,r){var n=Ca(t,e-te,r).concat(e);return function(i){return n.map(function(a){return[i,a]})}}function Y6(t,e,r){var n=Ca(t,e-te,r).concat(e);return function(i){return n.map(function(a){return[a,i]})}}function U6(){var t,e,r,n,i,a,s,o,l=10,u=l,h=90,d=360,f,p,m,_,y=2.5;function b(){return{type:"MultiLineString",coordinates:x()}}function x(){return Ca(bh(n/h)*h,r,h).map(m).concat(Ca(bh(o/d)*d,s,d).map(_)).concat(Ca(bh(e/l)*l,t,l).filter(function(k){return Ne(k%h)>te}).map(f)).concat(Ca(bh(a/u)*u,i,u).filter(function(k){return Ne(k%d)>te}).map(p))}return b.lines=function(){return x().map(function(k){return{type:"LineString",coordinates:k}})},b.outline=function(){return{type:"Polygon",coordinates:[m(n).concat(_(s).slice(1),m(r).reverse().slice(1),_(o).reverse().slice(1))]}},b.extent=function(k){return arguments.length?b.extentMajor(k).extentMinor(k):b.extentMinor()},b.extentMajor=function(k){return arguments.length?(n=+k[0][0],r=+k[1][0],o=+k[0][1],s=+k[1][1],n>r&&(k=n,n=r,r=k),o>s&&(k=o,o=s,s=k),b.precision(y)):[[n,o],[r,s]]},b.extentMinor=function(k){return arguments.length?(e=+k[0][0],t=+k[1][0],a=+k[0][1],i=+k[1][1],e>t&&(k=e,e=t,t=k),a>i&&(k=a,a=i,i=k),b.precision(y)):[[e,a],[t,i]]},b.step=function(k){return arguments.length?b.stepMajor(k).stepMinor(k):b.stepMinor()},b.stepMajor=function(k){return arguments.length?(h=+k[0],d=+k[1],b):[h,d]},b.stepMinor=function(k){return arguments.length?(l=+k[0],u=+k[1],b):[l,u]},b.precision=function(k){return arguments.length?(y=+k,f=z6(a,i,90),p=Y6(e,t,y),m=z6(o,s,90),_=Y6(n,r,y),b):y},b.extentMajor([[-180,-90+te],[180,90-te]]).extentMinor([[-180,-80-te],[180,80+te]])}function Dq(){return U6()()}function Oq(t,e){var r=t[0]*re,n=t[1]*re,i=e[0]*re,a=e[1]*re,s=Kt(n),o=Ht(n),l=Kt(a),u=Ht(a),h=s*Kt(r),d=s*Ht(r),f=l*Kt(i),p=l*Ht(i),m=2*tn(Sr(n6(a-n)+s*l*n6(i-r))),_=Ht(m),y=m?function(b){var x=Ht(b*=m)/_,k=Ht(m-b)/_,T=k*h+x*f,C=k*d+x*p,M=k*o+x*u;return[Jr(C,T)*Ue,Jr(M,Sr(T*T+C*C))*Ue]}:function(){return[r*Ue,n*Ue]};return y.distance=m,y}const Kl=t=>t;var x2=new _r,k2=new _r,W6,H6,w2,T2,Da={point:Je,lineStart:Je,lineEnd:Je,polygonStart:function(){Da.lineStart=Fq,Da.lineEnd=qq},polygonEnd:function(){Da.lineStart=Da.lineEnd=Da.point=Je,x2.add(Ne(k2)),k2=new _r},result:function(){var t=x2/2;return x2=new _r,t}};function Fq(){Da.point=Pq}function Pq(t,e){Da.point=G6,W6=w2=t,H6=T2=e}function G6(t,e){k2.add(T2*t-w2*e),w2=t,T2=e}function qq(){G6(W6,H6)}const j6=Da;var Io=1/0,Yh=Io,Zl=-Io,Uh=Zl,Vq={point:zq,lineStart:Je,lineEnd:Je,polygonStart:Je,polygonEnd:Je,result:function(){var t=[[Io,Yh],[Zl,Uh]];return Zl=Uh=-(Yh=Io=1/0),t}};function zq(t,e){t<Io&&(Io=t),t>Zl&&(Zl=t),e<Yh&&(Yh=e),e>Uh&&(Uh=e)}const Wh=Vq;var E2=0,C2=0,Ql=0,Hh=0,Gh=0,No=0,S2=0,A2=0,Jl=0,$6,X6,Ai,Mi,ri={point:As,lineStart:K6,lineEnd:Z6,polygonStart:function(){ri.lineStart=Wq,ri.lineEnd=Hq},polygonEnd:function(){ri.point=As,ri.lineStart=K6,ri.lineEnd=Z6},result:function(){var t=Jl?[S2/Jl,A2/Jl]:No?[Hh/No,Gh/No]:Ql?[E2/Ql,C2/Ql]:[NaN,NaN];return E2=C2=Ql=Hh=Gh=No=S2=A2=Jl=0,t}};function As(t,e){E2+=t,C2+=e,++Ql}function K6(){ri.point=Yq}function Yq(t,e){ri.point=Uq,As(Ai=t,Mi=e)}function Uq(t,e){var r=t-Ai,n=e-Mi,i=Sr(r*r+n*n);Hh+=i*(Ai+t)/2,Gh+=i*(Mi+e)/2,No+=i,As(Ai=t,Mi=e)}function Z6(){ri.point=As}function Wq(){ri.point=Gq}function Hq(){Q6($6,X6)}function Gq(t,e){ri.point=Q6,As($6=Ai=t,X6=Mi=e)}function Q6(t,e){var r=t-Ai,n=e-Mi,i=Sr(r*r+n*n);Hh+=i*(Ai+t)/2,Gh+=i*(Mi+e)/2,No+=i,i=Mi*t-Ai*e,S2+=i*(Ai+t),A2+=i*(Mi+e),Jl+=i*3,As(Ai=t,Mi=e)}const J6=ri;function tx(t){this._context=t}tx.prototype={_radius:4.5,pointRadius:function(t){return this._radius=t,this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){this._line===0&&this._context.closePath(),this._point=NaN},point:function(t,e){switch(this._point){case 0:{this._context.moveTo(t,e),this._point=1;break}case 1:{this._context.lineTo(t,e);break}default:{this._context.moveTo(t+this._radius,e),this._context.arc(t,e,this._radius,0,Qr);break}}},result:Je};var M2=new _r,L2,ex,rx,tc,ec,jh={point:Je,lineStart:function(){jh.point=jq},lineEnd:function(){L2&&nx(ex,rx),jh.point=Je},polygonStart:function(){L2=!0},polygonEnd:function(){L2=null},result:function(){var t=+M2;return M2=new _r,t}};function jq(t,e){jh.point=nx,ex=tc=t,rx=ec=e}function nx(t,e){tc-=t,ec-=e,M2.add(Sr(tc*tc+ec*ec)),tc=t,ec=e}const ix=jh;function ax(){this._string=[]}ax.prototype={_radius:4.5,_circle:sx(4.5),pointRadius:function(t){return(t=+t)!==this._radius&&(this._radius=t,this._circle=null),this},polygonStart:function(){this._line=0},polygonEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){this._line===0&&this._string.push("Z"),this._point=NaN},point:function(t,e){switch(this._point){case 0:{this._string.push("M",t,",",e),this._point=1;break}case 1:{this._string.push("L",t,",",e);break}default:{this._circle==null&&(this._circle=sx(this._radius)),this._string.push("M",t,",",e,this._circle);break}}},result:function(){if(this._string.length){var t=this._string.join("");return this._string=[],t}else return null}};function sx(t){return"m0,"+t+"a"+t+","+t+" 0 1,1 0,"+-2*t+"a"+t+","+t+" 0 1,1 0,"+2*t+"z"}function $q(t,e){var r=4.5,n,i;function a(s){return s&&(typeof r=="function"&&i.pointRadius(+r.apply(this,arguments)),ti(s,n(i))),i.result()}return a.area=function(s){return ti(s,n(j6)),j6.result()},a.measure=function(s){return ti(s,n(ix)),ix.result()},a.bounds=function(s){return ti(s,n(Wh)),Wh.result()},a.centroid=function(s){return ti(s,n(J6)),J6.result()},a.projection=function(s){return arguments.length?(n=s==null?(t=null,Kl):(t=s).stream,a):t},a.context=function(s){return arguments.length?(i=s==null?(e=null,new ax):new tx(e=s),typeof r!="function"&&i.pointRadius(r),a):e},a.pointRadius=function(s){return arguments.length?(r=typeof s=="function"?s:(i.pointRadius(+s),+s),a):r},a.projection(t).context(e)}function Xq(t){return{stream:rc(t)}}function rc(t){return function(e){var r=new R2;for(var n in t)r[n]=t[n];return r.stream=e,r}}function R2(){}R2.prototype={constructor:R2,point:function(t,e){this.stream.point(t,e)},sphere:function(){this.stream.sphere()},lineStart:function(){this.stream.lineStart()},lineEnd:function(){this.stream.lineEnd()},polygonStart:function(){this.stream.polygonStart()},polygonEnd:function(){this.stream.polygonEnd()}};function I2(t,e,r){var n=t.clipExtent&&t.clipExtent();return t.scale(150).translate([0,0]),n!=null&&t.clipExtent(null),ti(r,t.stream(Wh)),e(Wh.result()),n!=null&&t.clipExtent(n),t}function $h(t,e,r){return I2(t,function(n){var i=e[1][0]-e[0][0],a=e[1][1]-e[0][1],s=Math.min(i/(n[1][0]-n[0][0]),a/(n[1][1]-n[0][1])),o=+e[0][0]+(i-s*(n[1][0]+n[0][0]))/2,l=+e[0][1]+(a-s*(n[1][1]+n[0][1]))/2;t.scale(150*s).translate([o,l])},r)}function N2(t,e,r){return $h(t,[[0,0],e],r)}function B2(t,e,r){return I2(t,function(n){var i=+e,a=i/(n[1][0]-n[0][0]),s=(i-a*(n[1][0]+n[0][0]))/2,o=-a*n[0][1];t.scale(150*a).translate([s,o])},r)}function D2(t,e,r){return I2(t,function(n){var i=+e,a=i/(n[1][1]-n[0][1]),s=-a*n[0][0],o=(i-a*(n[1][1]+n[0][1]))/2;t.scale(150*a).translate([s,o])},r)}var ox=16,Kq=Kt(30*re);function lx(t,e){return+e?Qq(t,e):Zq(t)}function Zq(t){return rc({point:function(e,r){e=t(e,r),this.stream.point(e[0],e[1])}})}function Qq(t,e){function r(n,i,a,s,o,l,u,h,d,f,p,m,_,y){var b=u-n,x=h-i,k=b*b+x*x;if(k>4*e&&_--){var T=s+f,C=o+p,M=l+m,S=Sr(T*T+C*C+M*M),R=tn(M/=S),A=Ne(Ne(M)-1)<te||Ne(a-d)<te?(a+d)/2:Jr(C,T),L=t(A,R),v=L[0],B=L[1],w=v-n,D=B-i,N=x*w-b*D;(N*N/k>e||Ne((b*w+x*D)/k-.5)>.3||s*f+o*p+l*m<Kq)&&(r(n,i,a,s,o,l,v,B,A,T/=S,C/=S,M,_,y),y.point(v,B),r(v,B,A,T,C,M,u,h,d,f,p,m,_,y))}}return function(n){var i,a,s,o,l,u,h,d,f,p,m,_,y={point:b,lineStart:x,lineEnd:T,polygonStart:function(){n.polygonStart(),y.lineStart=C},polygonEnd:function(){n.polygonEnd(),y.lineStart=x}};function b(R,A){R=t(R,A),n.point(R[0],R[1])}function x(){d=NaN,y.point=k,n.lineStart()}function k(R,A){var L=Cs([R,A]),v=t(R,A);r(d,f,h,p,m,_,d=v[0],f=v[1],h=R,p=L[0],m=L[1],_=L[2],ox,n),n.point(d,f)}function T(){y.point=b,n.lineEnd()}function C(){x(),y.point=M,y.lineEnd=S}function M(R,A){k(i=R,A),a=d,s=f,o=p,l=m,u=_,y.point=k}function S(){r(d,f,h,p,m,_,a,s,i,o,l,u,ox,n),y.lineEnd=T,T()}return y}}var Jq=rc({point:function(t,e){this.stream.point(t*re,e*re)}});function tV(t){return rc({point:function(e,r){var n=t(e,r);return this.stream.point(n[0],n[1])}})}function eV(t,e,r,n,i){function a(s,o){return s*=n,o*=i,[e+t*s,r-t*o]}return a.invert=function(s,o){return[(s-e)/t*n,(r-o)/t*i]},a}function cx(t,e,r,n,i,a){if(!a)return eV(t,e,r,n,i);var s=Kt(a),o=Ht(a),l=s*t,u=o*t,h=s/t,d=o/t,f=(o*r-s*e)/t,p=(o*e+s*r)/t;function m(_,y){return _*=n,y*=i,[l*_-u*y+e,r-u*_-l*y]}return m.invert=function(_,y){return[n*(h*_-d*y+f),i*(p-d*_-h*y)]},m}function Li(t){return O2(function(){return t})()}function O2(t){var e,r=150,n=480,i=250,a=0,s=0,o=0,l=0,u=0,h,d=0,f=1,p=1,m=null,_=m2,y=null,b,x,k,T=Kl,C=.5,M,S,R,A,L;function v(N){return R(N[0]*re,N[1]*re)}function B(N){return N=R.invert(N[0],N[1]),N&&[N[0]*Ue,N[1]*Ue]}v.stream=function(N){return A&&L===N?A:A=Jq(tV(h)(_(M(T(L=N)))))},v.preclip=function(N){return arguments.length?(_=N,m=void 0,D()):_},v.postclip=function(N){return arguments.length?(T=N,y=b=x=k=null,D()):T},v.clipAngle=function(N){return arguments.length?(_=+N?N6(m=N*re):(m=null,m2),D()):m*Ue},v.clipExtent=function(N){return arguments.length?(T=N==null?(y=b=x=k=null,Kl):Fh(y=+N[0][0],b=+N[0][1],x=+N[1][0],k=+N[1][1]),D()):y==null?null:[[y,b],[x,k]]},v.scale=function(N){return arguments.length?(r=+N,w()):r},v.translate=function(N){return arguments.length?(n=+N[0],i=+N[1],w()):[n,i]},v.center=function(N){return arguments.length?(a=N[0]%360*re,s=N[1]%360*re,w()):[a*Ue,s*Ue]},v.rotate=function(N){return arguments.length?(o=N[0]%360*re,l=N[1]%360*re,u=N.length>2?N[2]%360*re:0,w()):[o*Ue,l*Ue,u*Ue]},v.angle=function(N){return arguments.length?(d=N%360*re,w()):d*Ue},v.reflectX=function(N){return arguments.length?(f=N?-1:1,w()):f<0},v.reflectY=function(N){return arguments.length?(p=N?-1:1,w()):p<0},v.precision=function(N){return arguments.length?(M=lx(S,C=N*N),D()):Sr(C)},v.fitExtent=function(N,z){return $h(v,N,z)},v.fitSize=function(N,z){return N2(v,N,z)},v.fitWidth=function(N,z){return B2(v,N,z)},v.fitHeight=function(N,z){return D2(v,N,z)};function w(){var N=cx(r,0,0,f,p,d).apply(null,e(a,s)),z=cx(r,n-N[0],i-N[1],f,p,d);return h=g2(o,l,u),S=d2(e,z),R=d2(h,S),M=lx(S,C),D()}function D(){return A=L=null,v}return function(){return e=t.apply(this,arguments),v.invert=e.invert&&B,w()}}function F2(t){var e=0,r=Ae/3,n=O2(t),i=n(e,r);return i.parallels=function(a){return arguments.length?n(e=a[0]*re,r=a[1]*re):[e*Ue,r*Ue]},i}function rV(t){var e=Kt(t);function r(n,i){return[n*e,Ht(i)/e]}return r.invert=function(n,i){return[n/e,tn(i*e)]},r}function ux(t,e){var r=Ht(t),n=(r+Ht(e))/2;if(Ne(n)<te)return rV(t);var i=1+r*(2*n-r),a=Sr(i)/n;function s(o,l){var u=Sr(i-2*n*Ht(l))/n;return[u*Ht(o*=n),a-u*Kt(o)]}return s.invert=function(o,l){var u=a-l,h=Jr(o,Ne(u))*Dn(u);return u*n<0&&(h-=Ae*Dn(o)*Dn(u)),[h/n,tn((i-(o*o+u*u)*n*n)/(2*n))]},s}function Xh(){return F2(ux).scale(155.424).center([0,33.6442])}function hx(){return Xh().parallels([29.5,45.5]).scale(1070).translate([480,250]).rotate([96,0]).center([-.6,38.7])}function nV(t){var e=t.length;return{point:function(r,n){for(var i=-1;++i<e;)t[i].point(r,n)},sphere:function(){for(var r=-1;++r<e;)t[r].sphere()},lineStart:function(){for(var r=-1;++r<e;)t[r].lineStart()},lineEnd:function(){for(var r=-1;++r<e;)t[r].lineEnd()},polygonStart:function(){for(var r=-1;++r<e;)t[r].polygonStart()},polygonEnd:function(){for(var r=-1;++r<e;)t[r].polygonEnd()}}}function iV(){var t,e,r=hx(),n,i=Xh().rotate([154,0]).center([-2,58.5]).parallels([55,65]),a,s=Xh().rotate([157,0]).center([-3,19.9]).parallels([8,18]),o,l,u={point:function(f,p){l=[f,p]}};function h(f){var p=f[0],m=f[1];return l=null,n.point(p,m),l||(a.point(p,m),l)||(o.point(p,m),l)}h.invert=function(f){var p=r.scale(),m=r.translate(),_=(f[0]-m[0])/p,y=(f[1]-m[1])/p;return(y>=.12&&y<.234&&_>=-.425&&_<-.214?i:y>=.166&&y<.234&&_>=-.214&&_<-.115?s:r).invert(f)},h.stream=function(f){return t&&e===f?t:t=nV([r.stream(e=f),i.stream(f),s.stream(f)])},h.precision=function(f){return arguments.length?(r.precision(f),i.precision(f),s.precision(f),d()):r.precision()},h.scale=function(f){return arguments.length?(r.scale(f),i.scale(f*.35),s.scale(f),h.translate(r.translate())):r.scale()},h.translate=function(f){if(!arguments.length)return r.translate();var p=r.scale(),m=+f[0],_=+f[1];return n=r.translate(f).clipExtent([[m-.455*p,_-.238*p],[m+.455*p,_+.238*p]]).stream(u),a=i.translate([m-.307*p,_+.201*p]).clipExtent([[m-.425*p+te,_+.12*p+te],[m-.214*p-te,_+.234*p-te]]).stream(u),o=s.translate([m-.205*p,_+.212*p]).clipExtent([[m-.214*p+te,_+.166*p+te],[m-.115*p-te,_+.234*p-te]]).stream(u),d()},h.fitExtent=function(f,p){return $h(h,f,p)},h.fitSize=function(f,p){return N2(h,f,p)},h.fitWidth=function(f,p){return B2(h,f,p)},h.fitHeight=function(f,p){return D2(h,f,p)};function d(){return t=e=null,h}return h.scale(1070)}function fx(t){return function(e,r){var n=Kt(e),i=Kt(r),a=t(n*i);return a===1/0?[2,0]:[a*i*Ht(e),a*Ht(r)]}}function nc(t){return function(e,r){var n=Sr(e*e+r*r),i=t(n),a=Ht(i),s=Kt(i);return[Jr(e*a,n*s),tn(n&&r*a/n)]}}var P2=fx(function(t){return Sr(2/(1+t))});P2.invert=nc(function(t){return 2*tn(t/2)});function aV(){return Li(P2).scale(124.75).clipAngle(180-.001)}var q2=fx(function(t){return(t=r6(t))&&t/Ht(t)});q2.invert=nc(function(t){return t});function sV(){return Li(q2).scale(79.4188).clipAngle(180-.001)}function ic(t,e){return[t,_h(r2((rr+e)/2))]}ic.invert=function(t,e){return[t,2*So(e6(e))-rr]};function oV(){return dx(ic).scale(961/Qr)}function dx(t){var e=Li(t),r=e.center,n=e.scale,i=e.translate,a=e.clipExtent,s=null,o,l,u;e.scale=function(d){return arguments.length?(n(d),h()):n()},e.translate=function(d){return arguments.length?(i(d),h()):i()},e.center=function(d){return arguments.length?(r(d),h()):r()},e.clipExtent=function(d){return arguments.length?(d==null?s=o=l=u=null:(s=+d[0][0],o=+d[0][1],l=+d[1][0],u=+d[1][1]),h()):s==null?null:[[s,o],[l,u]]};function h(){var d=Ae*n(),f=e(E6(e.rotate()).invert([0,0]));return a(s==null?[[f[0]-d,f[1]-d],[f[0]+d,f[1]+d]]:t===ic?[[Math.max(f[0]-d,s),o],[Math.min(f[0]+d,l),u]]:[[s,Math.max(f[1]-d,o)],[l,Math.min(f[1]+d,u)]])}return h()}function Kh(t){return r2((rr+t)/2)}function px(t,e){var r=Kt(t),n=t===e?Ht(t):_h(r/Kt(e))/_h(Kh(e)/Kh(t)),i=r*e2(Kh(t),n)/n;if(!n)return ic;function a(s,o){i>0?o<-rr+te&&(o=-rr+te):o>rr-te&&(o=rr-te);var l=i/e2(Kh(o),n);return[l*Ht(n*s),i-l*Kt(n*s)]}return a.invert=function(s,o){var l=i-o,u=Dn(n)*Sr(s*s+l*l),h=Jr(s,Ne(l))*Dn(l);return l*n<0&&(h-=Ae*Dn(s)*Dn(l)),[h/n,2*So(e2(i/u,1/n))-rr]},a}function lV(){return F2(px).scale(109.5).parallels([30,30])}function ac(t,e){return[t,e]}ac.invert=ac;function cV(){return Li(ac).scale(152.63)}function gx(t,e){var r=Kt(t),n=t===e?Ht(t):(r-Kt(e))/(e-t),i=r/n+t;if(Ne(n)<te)return ac;function a(s,o){var l=i-o,u=n*s;return[l*Ht(u),i-l*Kt(u)]}return a.invert=function(s,o){var l=i-o,u=Jr(s,Ne(l))*Dn(l);return l*n<0&&(u-=Ae*Dn(s)*Dn(l)),[u/n,i-Dn(n)*Sr(s*s+l*l)]},a}function uV(){return F2(gx).scale(131.154).center([0,13.9389])}var sc=1.340264,oc=-.081106,lc=893e-6,cc=.003796,Zh=Sr(3)/2,hV=12;function V2(t,e){var r=tn(Zh*Ht(e)),n=r*r,i=n*n*n;return[t*Kt(r)/(Zh*(sc+3*oc*n+i*(7*lc+9*cc*n))),r*(sc+oc*n+i*(lc+cc*n))]}V2.invert=function(t,e){for(var r=e,n=r*r,i=n*n*n,a=0,s,o,l;a<hV&&(o=r*(sc+oc*n+i*(lc+cc*n))-e,l=sc+3*oc*n+i*(7*lc+9*cc*n),r-=s=o/l,n=r*r,i=n*n*n,!(Ne(s)<Hl));++a);return[Zh*t*(sc+3*oc*n+i*(7*lc+9*cc*n))/Kt(r),tn(Ht(r)/Zh)]};function fV(){return Li(V2).scale(177.158)}function z2(t,e){var r=Kt(e),n=Kt(t)*r;return[r*Ht(t)/n,Ht(e)/n]}z2.invert=nc(So);function dV(){return Li(z2).scale(144.049).clipAngle(60)}function pV(){var t=1,e=0,r=0,n=1,i=1,a=0,s,o,l=null,u,h,d,f=1,p=1,m=rc({point:function(T,C){var M=k([T,C]);this.stream.point(M[0],M[1])}}),_=Kl,y,b;function x(){return f=t*n,p=t*i,y=b=null,k}function k(T){var C=T[0]*f,M=T[1]*p;if(a){var S=M*s-C*o;C=C*s+M*o,M=S}return[C+e,M+r]}return k.invert=function(T){var C=T[0]-e,M=T[1]-r;if(a){var S=M*s+C*o;C=C*s-M*o,M=S}return[C/f,M/p]},k.stream=function(T){return y&&b===T?y:y=m(_(b=T))},k.postclip=function(T){return arguments.length?(_=T,l=u=h=d=null,x()):_},k.clipExtent=function(T){return arguments.length?(_=T==null?(l=u=h=d=null,Kl):Fh(l=+T[0][0],u=+T[0][1],h=+T[1][0],d=+T[1][1]),x()):l==null?null:[[l,u],[h,d]]},k.scale=function(T){return arguments.length?(t=+T,x()):t},k.translate=function(T){return arguments.length?(e=+T[0],r=+T[1],x()):[e,r]},k.angle=function(T){return arguments.length?(a=T%360*re,o=Ht(a),s=Kt(a),x()):a*Ue},k.reflectX=function(T){return arguments.length?(n=T?-1:1,x()):n<0},k.reflectY=function(T){return arguments.length?(i=T?-1:1,x()):i<0},k.fitExtent=function(T,C){return $h(k,T,C)},k.fitSize=function(T,C){return N2(k,T,C)},k.fitWidth=function(T,C){return B2(k,T,C)},k.fitHeight=function(T,C){return D2(k,T,C)},k}function Y2(t,e){var r=e*e,n=r*r;return[t*(.8707-.131979*r+n*(-.013791+n*(.003971*r-.001529*n))),e*(1.007226+r*(.015085+n*(-.044475+.028874*r-.005916*n)))]}Y2.invert=function(t,e){var r=e,n=25,i;do{var a=r*r,s=a*a;r-=i=(r*(1.007226+a*(.015085+s*(-.044475+.028874*a-.005916*s)))-e)/(1.007226+a*(.015085*3+s*(-.044475*7+.028874*9*a-.005916*11*s)))}while(Ne(i)>te&&--n>0);return[t/(.8707+(a=r*r)*(-.131979+a*(-.013791+a*a*a*(.003971-.001529*a)))),r]};function gV(){return Li(Y2).scale(175.295)}function U2(t,e){return[Kt(e)*Ht(t),Ht(e)]}U2.invert=nc(tn);function yV(){return Li(U2).scale(249.5).clipAngle(90+te)}function W2(t,e){var r=Kt(e),n=1+Kt(t)*r;return[r*Ht(t)/n,Ht(e)/n]}W2.invert=nc(function(t){return 2*So(t)});function mV(){return Li(W2).scale(250).clipAngle(142)}function H2(t,e){return[_h(r2((rr+e)/2)),-t]}H2.invert=function(t,e){return[-e,2*So(e6(t))-rr]};function bV(){var t=dx(H2),e=t.center,r=t.rotate;return t.center=function(n){return arguments.length?e([-n[1],n[0]]):(n=e(),[n[1],-n[0]])},t.rotate=function(n){return arguments.length?r([n[0],n[1],n.length>2?n[2]+90:90]):(n=r(),[n[0],n[1],n[2]-90])},r([0,0,90]).scale(159.155)}function _V(t,e){return t.parent===e.parent?1:2}function vV(t){return t.reduce(xV,0)/t.length}function xV(t,e){return t+e.x}function kV(t){return 1+t.reduce(wV,0)}function wV(t,e){return Math.max(t,e.y)}function TV(t){for(var e;e=t.children;)t=e[0];return t}function EV(t){for(var e;e=t.children;)t=e[e.length-1];return t}function CV(){var t=_V,e=1,r=1,n=!1;function i(a){var s,o=0;a.eachAfter(function(f){var p=f.children;p?(f.x=vV(p),f.y=kV(p)):(f.x=s?o+=t(f,s):0,f.y=0,s=f)});var l=TV(a),u=EV(a),h=l.x-t(l,u)/2,d=u.x+t(u,l)/2;return a.eachAfter(n?function(f){f.x=(f.x-a.x)*e,f.y=(a.y-f.y)*r}:function(f){f.x=(f.x-h)/(d-h)*e,f.y=(1-(a.y?f.y/a.y:1))*r})}return i.separation=function(a){return arguments.length?(t=a,i):t},i.size=function(a){return arguments.length?(n=!1,e=+a[0],r=+a[1],i):n?null:[e,r]},i.nodeSize=function(a){return arguments.length?(n=!0,e=+a[0],r=+a[1],i):n?[e,r]:null},i}function SV(t){var e=0,r=t.children,n=r&&r.length;if(!n)e=1;else for(;--n>=0;)e+=r[n].value;t.value=e}function AV(){return this.eachAfter(SV)}function MV(t,e){let r=-1;for(const n of this)t.call(e,n,++r,this);return this}function LV(t,e){for(var r=this,n=[r],i,a,s=-1;r=n.pop();)if(t.call(e,r,++s,this),i=r.children)for(a=i.length-1;a>=0;--a)n.push(i[a]);return this}function RV(t,e){for(var r=this,n=[r],i=[],a,s,o,l=-1;r=n.pop();)if(i.push(r),a=r.children)for(s=0,o=a.length;s<o;++s)n.push(a[s]);for(;r=i.pop();)t.call(e,r,++l,this);return this}function IV(t,e){let r=-1;for(const n of this)if(t.call(e,n,++r,this))return n}function NV(t){return this.eachAfter(function(e){for(var r=+t(e.data)||0,n=e.children,i=n&&n.length;--i>=0;)r+=n[i].value;e.value=r})}function BV(t){return this.eachBefore(function(e){e.children&&e.children.sort(t)})}function DV(t){for(var e=this,r=OV(e,t),n=[e];e!==r;)e=e.parent,n.push(e);for(var i=n.length;t!==r;)n.splice(i,0,t),t=t.parent;return n}function OV(t,e){if(t===e)return t;var r=t.ancestors(),n=e.ancestors(),i=null;for(t=r.pop(),e=n.pop();t===e;)i=t,t=r.pop(),e=n.pop();return i}function FV(){for(var t=this,e=[t];t=t.parent;)e.push(t);return e}function PV(){return Array.from(this)}function qV(){var t=[];return this.eachBefore(function(e){e.children||t.push(e)}),t}function VV(){var t=this,e=[];return t.each(function(r){r!==t&&e.push({source:r.parent,target:r})}),e}function*zV(){var t=this,e,r=[t],n,i,a;do for(e=r.reverse(),r=[];t=e.pop();)if(yield t,n=t.children)for(i=0,a=n.length;i<a;++i)r.push(n[i]);while(r.length)}function G2(t,e){t instanceof Map?(t=[void 0,t],e===void 0&&(e=WV)):e===void 0&&(e=UV);for(var r=new Ms(t),n,i=[r],a,s,o,l;n=i.pop();)if((s=e(n.data))&&(l=(s=Array.from(s)).length))for(n.children=s,o=l-1;o>=0;--o)i.push(a=s[o]=new Ms(s[o])),a.parent=n,a.depth=n.depth+1;return r.eachBefore(yx)}function YV(){return G2(this).eachBefore(HV)}function UV(t){return t.children}function WV(t){return Array.isArray(t)?t[1]:null}function HV(t){t.data.value!==void 0&&(t.value=t.data.value),t.data=t.data.data}function yx(t){var e=0;do t.height=e;while((t=t.parent)&&t.height<++e)}function Ms(t){this.data=t,this.depth=this.height=0,this.parent=null}Ms.prototype=G2.prototype={constructor:Ms,count:AV,each:MV,eachAfter:RV,eachBefore:LV,find:IV,sum:NV,sort:BV,path:DV,ancestors:FV,descendants:PV,leaves:qV,links:VV,copy:YV,[Symbol.iterator]:zV};function Qh(t){return t==null?null:mx(t)}function mx(t){if(typeof t!="function")throw new Error;return t}function Ls(){return 0}function Bo(t){return function(){return t}}const GV=1664525,jV=1013904223,bx=4294967296;function j2(){let t=1;return()=>(t=(GV*t+jV)%bx)/bx}function $V(t){return typeof t=="object"&&"length"in t?t:Array.from(t)}function XV(t,e){let r=t.length,n,i;for(;r;)i=e()*r--|0,n=t[r],t[r]=t[i],t[i]=n;return t}function KV(t){return _x(t,j2())}function _x(t,e){for(var r=0,n=(t=XV(Array.from(t),e)).length,i=[],a,s;r<n;)a=t[r],s&&vx(s,a)?++r:(s=QV(i=ZV(i,a)),r=0);return s}function ZV(t,e){var r,n;if($2(e,t))return[e];for(r=0;r<t.length;++r)if(Jh(e,t[r])&&$2(uc(t[r],e),t))return[t[r],e];for(r=0;r<t.length-1;++r)for(n=r+1;n<t.length;++n)if(Jh(uc(t[r],t[n]),e)&&Jh(uc(t[r],e),t[n])&&Jh(uc(t[n],e),t[r])&&$2(xx(t[r],t[n],e),t))return[t[r],t[n],e];throw new Error}function Jh(t,e){var r=t.r-e.r,n=e.x-t.x,i=e.y-t.y;return r<0||r*r<n*n+i*i}function vx(t,e){var r=t.r-e.r+Math.max(t.r,e.r,1)*1e-9,n=e.x-t.x,i=e.y-t.y;return r>0&&r*r>n*n+i*i}function $2(t,e){for(var r=0;r<e.length;++r)if(!vx(t,e[r]))return!1;return!0}function QV(t){switch(t.length){case 1:return JV(t[0]);case 2:return uc(t[0],t[1]);case 3:return xx(t[0],t[1],t[2])}}function JV(t){return{x:t.x,y:t.y,r:t.r}}function uc(t,e){var r=t.x,n=t.y,i=t.r,a=e.x,s=e.y,o=e.r,l=a-r,u=s-n,h=o-i,d=Math.sqrt(l*l+u*u);return{x:(r+a+l/d*h)/2,y:(n+s+u/d*h)/2,r:(d+i+o)/2}}function xx(t,e,r){var n=t.x,i=t.y,a=t.r,s=e.x,o=e.y,l=e.r,u=r.x,h=r.y,d=r.r,f=n-s,p=n-u,m=i-o,_=i-h,y=l-a,b=d-a,x=n*n+i*i-a*a,k=x-s*s-o*o+l*l,T=x-u*u-h*h+d*d,C=p*m-f*_,M=(m*T-_*k)/(C*2)-n,S=(_*y-m*b)/C,R=(p*k-f*T)/(C*2)-i,A=(f*b-p*y)/C,L=S*S+A*A-1,v=2*(a+M*S+R*A),B=M*M+R*R-a*a,w=-(Math.abs(L)>1e-6?(v+Math.sqrt(v*v-4*L*B))/(2*L):B/v);return{x:n+M+S*w,y:i+R+A*w,r:w}}function kx(t,e,r){var n=t.x-e.x,i,a,s=t.y-e.y,o,l,u=n*n+s*s;u?(a=e.r+r.r,a*=a,l=t.r+r.r,l*=l,a>l?(i=(u+l-a)/(2*u),o=Math.sqrt(Math.max(0,l/u-i*i)),r.x=t.x-i*n-o*s,r.y=t.y-i*s+o*n):(i=(u+a-l)/(2*u),o=Math.sqrt(Math.max(0,a/u-i*i)),r.x=e.x+i*n-o*s,r.y=e.y+i*s+o*n)):(r.x=e.x+r.r,r.y=e.y)}function wx(t,e){var r=t.r+e.r-1e-6,n=e.x-t.x,i=e.y-t.y;return r>0&&r*r>n*n+i*i}function Tx(t){var e=t._,r=t.next._,n=e.r+r.r,i=(e.x*r.r+r.x*e.r)/n,a=(e.y*r.r+r.y*e.r)/n;return i*i+a*a}function tf(t){this._=t,this.next=null,this.previous=null}function Ex(t,e){if(!(a=(t=$V(t)).length))return 0;var r,n,i,a,s,o,l,u,h,d,f;if(r=t[0],r.x=0,r.y=0,!(a>1))return r.r;if(n=t[1],r.x=-n.r,n.x=r.r,n.y=0,!(a>2))return r.r+n.r;kx(n,r,i=t[2]),r=new tf(r),n=new tf(n),i=new tf(i),r.next=i.previous=n,n.next=r.previous=i,i.next=n.previous=r;t:for(l=3;l<a;++l){kx(r._,n._,i=t[l]),i=new tf(i),u=n.next,h=r.previous,d=n._.r,f=r._.r;do if(d<=f){if(wx(u._,i._)){n=u,r.next=n,n.previous=r,--l;continue t}d+=u._.r,u=u.next}else{if(wx(h._,i._)){r=h,r.next=n,n.previous=r,--l;continue t}f+=h._.r,h=h.previous}while(u!==h.next);for(i.previous=r,i.next=n,r.next=n.previous=n=i,s=Tx(r);(i=i.next)!==n;)(o=Tx(i))<s&&(r=i,s=o);n=r.next}for(r=[n._],i=n;(i=i.next)!==n;)r.push(i._);for(i=_x(r,e),l=0;l<a;++l)r=t[l],r.x-=i.x,r.y-=i.y;return i.r}function tz(t){return Ex(t,j2()),t}function ez(t){return Math.sqrt(t.value)}function rz(){var t=null,e=1,r=1,n=Ls;function i(a){const s=j2();return a.x=e/2,a.y=r/2,t?a.eachBefore(Cx(t)).eachAfter(X2(n,.5,s)).eachBefore(Sx(1)):a.eachBefore(Cx(ez)).eachAfter(X2(Ls,1,s)).eachAfter(X2(n,a.r/Math.min(e,r),s)).eachBefore(Sx(Math.min(e,r)/(2*a.r))),a}return i.radius=function(a){return arguments.length?(t=Qh(a),i):t},i.size=function(a){return arguments.length?(e=+a[0],r=+a[1],i):[e,r]},i.padding=function(a){return arguments.length?(n=typeof a=="function"?a:Bo(+a),i):n},i}function Cx(t){return function(e){e.children||(e.r=Math.max(0,+t(e)||0))}}function X2(t,e,r){return function(n){if(i=n.children){var i,a,s=i.length,o=t(n)*e||0,l;if(o)for(a=0;a<s;++a)i[a].r+=o;if(l=Ex(i,r),o)for(a=0;a<s;++a)i[a].r-=o;n.r=l+o}}}function Sx(t){return function(e){var r=e.parent;e.r*=t,r&&(e.x=r.x+t*e.x,e.y=r.y+t*e.y)}}function Ax(t){t.x0=Math.round(t.x0),t.y0=Math.round(t.y0),t.x1=Math.round(t.x1),t.y1=Math.round(t.y1)}function hc(t,e,r,n,i){for(var a=t.children,s,o=-1,l=a.length,u=t.value&&(n-e)/t.value;++o<l;)s=a[o],s.y0=r,s.y1=i,s.x0=e,s.x1=e+=s.value*u}function nz(){var t=1,e=1,r=0,n=!1;function i(s){var o=s.height+1;return s.x0=s.y0=r,s.x1=t,s.y1=e/o,s.eachBefore(a(e,o)),n&&s.eachBefore(Ax),s}function a(s,o){return function(l){l.children&&hc(l,l.x0,s*(l.depth+1)/o,l.x1,s*(l.depth+2)/o);var u=l.x0,h=l.y0,d=l.x1-r,f=l.y1-r;d<u&&(u=d=(u+d)/2),f<h&&(h=f=(h+f)/2),l.x0=u,l.y0=h,l.x1=d,l.y1=f}}return i.round=function(s){return arguments.length?(n=!!s,i):n},i.size=function(s){return arguments.length?(t=+s[0],e=+s[1],i):[t,e]},i.padding=function(s){return arguments.length?(r=+s,i):r},i}var iz={depth:-1},Mx={},K2={};function az(t){return t.id}function sz(t){return t.parentId}function oz(){var t=az,e=sz,r;function n(i){var a=Array.from(i),s=t,o=e,l,u,h,d,f,p,m,_,y=new Map;if(r!=null){const b=a.map((T,C)=>lz(r(T,C,i))),x=b.map(Lx),k=new Set(b).add("");for(const T of x)k.has(T)||(k.add(T),b.push(T),x.push(Lx(T)),a.push(K2));s=(T,C)=>b[C],o=(T,C)=>x[C]}for(h=0,l=a.length;h<l;++h)u=a[h],p=a[h]=new Ms(u),(m=s(u,h,i))!=null&&(m+="")&&(_=p.id=m,y.set(_,y.has(_)?Mx:p)),(m=o(u,h,i))!=null&&(m+="")&&(p.parent=m);for(h=0;h<l;++h)if(p=a[h],m=p.parent){if(f=y.get(m),!f)throw new Error("missing: "+m);if(f===Mx)throw new Error("ambiguous: "+m);f.children?f.children.push(p):f.children=[p],p.parent=f}else{if(d)throw new Error("multiple roots");d=p}if(!d)throw new Error("no root");if(r!=null){for(;d.data===K2&&d.children.length===1;)d=d.children[0],--l;for(let b=a.length-1;b>=0&&(p=a[b],p.data===K2);--b)p.data=null}if(d.parent=iz,d.eachBefore(function(b){b.depth=b.parent.depth+1,--l}).eachBefore(yx),d.parent=null,l>0)throw new Error("cycle");return d}return n.id=function(i){return arguments.length?(t=Qh(i),n):t},n.parentId=function(i){return arguments.length?(e=Qh(i),n):e},n.path=function(i){return arguments.length?(r=Qh(i),n):r},n}function lz(t){t=`${t}`;let e=t.length;return Z2(t,e-1)&&!Z2(t,e-2)&&(t=t.slice(0,-1)),t[0]==="/"?t:`/${t}`}function Lx(t){let e=t.length;if(e<2)return"";for(;--e>1&&!Z2(t,e););return t.slice(0,e)}function Z2(t,e){if(t[e]==="/"){let r=0;for(;e>0&&t[--e]==="\\";)++r;if((r&1)===0)return!0}return!1}function cz(t,e){return t.parent===e.parent?1:2}function Q2(t){var e=t.children;return e?e[0]:t.t}function J2(t){var e=t.children;return e?e[e.length-1]:t.t}function uz(t,e,r){var n=r/(e.i-t.i);e.c-=n,e.s+=r,t.c+=n,e.z+=r,e.m+=r}function hz(t){for(var e=0,r=0,n=t.children,i=n.length,a;--i>=0;)a=n[i],a.z+=e,a.m+=e,e+=a.s+(r+=a.c)}function fz(t,e,r){return t.a.parent===e.parent?t.a:r}function ef(t,e){this._=t,this.parent=null,this.children=null,this.A=null,this.a=this,this.z=0,this.m=0,this.c=0,this.s=0,this.t=null,this.i=e}ef.prototype=Object.create(Ms.prototype);function dz(t){for(var e=new ef(t,0),r,n=[e],i,a,s,o;r=n.pop();)if(a=r._.children)for(r.children=new Array(o=a.length),s=o-1;s>=0;--s)n.push(i=r.children[s]=new ef(a[s],s)),i.parent=r;return(e.parent=new ef(null,0)).children=[e],e}function pz(){var t=cz,e=1,r=1,n=null;function i(u){var h=dz(u);if(h.eachAfter(a),h.parent.m=-h.z,h.eachBefore(s),n)u.eachBefore(l);else{var d=u,f=u,p=u;u.eachBefore(function(x){x.x<d.x&&(d=x),x.x>f.x&&(f=x),x.depth>p.depth&&(p=x)});var m=d===f?1:t(d,f)/2,_=m-d.x,y=e/(f.x+m+_),b=r/(p.depth||1);u.eachBefore(function(x){x.x=(x.x+_)*y,x.y=x.depth*b})}return u}function a(u){var h=u.children,d=u.parent.children,f=u.i?d[u.i-1]:null;if(h){hz(u);var p=(h[0].z+h[h.length-1].z)/2;f?(u.z=f.z+t(u._,f._),u.m=u.z-p):u.z=p}else f&&(u.z=f.z+t(u._,f._));u.parent.A=o(u,f,u.parent.A||d[0])}function s(u){u._.x=u.z+u.parent.m,u.m+=u.parent.m}function o(u,h,d){if(h){for(var f=u,p=u,m=h,_=f.parent.children[0],y=f.m,b=p.m,x=m.m,k=_.m,T;m=J2(m),f=Q2(f),m&&f;)_=Q2(_),p=J2(p),p.a=u,T=m.z+x-f.z-y+t(m._,f._),T>0&&(uz(fz(m,u,d),u,T),y+=T,b+=T),x+=m.m,y+=f.m,k+=_.m,b+=p.m;m&&!J2(p)&&(p.t=m,p.m+=x-b),f&&!Q2(_)&&(_.t=f,_.m+=y-k,d=u)}return d}function l(u){u.x*=e,u.y=u.depth*r}return i.separation=function(u){return arguments.length?(t=u,i):t},i.size=function(u){return arguments.length?(n=!1,e=+u[0],r=+u[1],i):n?null:[e,r]},i.nodeSize=function(u){return arguments.length?(n=!0,e=+u[0],r=+u[1],i):n?[e,r]:null},i}function rf(t,e,r,n,i){for(var a=t.children,s,o=-1,l=a.length,u=t.value&&(i-r)/t.value;++o<l;)s=a[o],s.x0=e,s.x1=n,s.y0=r,s.y1=r+=s.value*u}var Rx=(1+Math.sqrt(5))/2;function Ix(t,e,r,n,i,a){for(var s=[],o=e.children,l,u,h=0,d=0,f=o.length,p,m,_=e.value,y,b,x,k,T,C,M;h<f;){p=i-r,m=a-n;do y=o[d++].value;while(!y&&d<f);for(b=x=y,C=Math.max(m/p,p/m)/(_*t),M=y*y*C,T=Math.max(x/M,M/b);d<f;++d){if(y+=u=o[d].value,u<b&&(b=u),u>x&&(x=u),M=y*y*C,k=Math.max(x/M,M/b),k>T){y-=u;break}T=k}s.push(l={value:y,dice:p<m,children:o.slice(h,d)}),l.dice?hc(l,r,n,i,_?n+=m*y/_:a):rf(l,r,n,_?r+=p*y/_:i,a),_-=y,h=d}return s}const Nx=function t(e){function r(n,i,a,s,o){Ix(e,n,i,a,s,o)}return r.ratio=function(n){return t((n=+n)>1?n:1)},r}(Rx);function gz(){var t=Nx,e=!1,r=1,n=1,i=[0],a=Ls,s=Ls,o=Ls,l=Ls,u=Ls;function h(f){return f.x0=f.y0=0,f.x1=r,f.y1=n,f.eachBefore(d),i=[0],e&&f.eachBefore(Ax),f}function d(f){var p=i[f.depth],m=f.x0+p,_=f.y0+p,y=f.x1-p,b=f.y1-p;y<m&&(m=y=(m+y)/2),b<_&&(_=b=(_+b)/2),f.x0=m,f.y0=_,f.x1=y,f.y1=b,f.children&&(p=i[f.depth+1]=a(f)/2,m+=u(f)-p,_+=s(f)-p,y-=o(f)-p,b-=l(f)-p,y<m&&(m=y=(m+y)/2),b<_&&(_=b=(_+b)/2),t(f,m,_,y,b))}return h.round=function(f){return arguments.length?(e=!!f,h):e},h.size=function(f){return arguments.length?(r=+f[0],n=+f[1],h):[r,n]},h.tile=function(f){return arguments.length?(t=mx(f),h):t},h.padding=function(f){return arguments.length?h.paddingInner(f).paddingOuter(f):h.paddingInner()},h.paddingInner=function(f){return arguments.length?(a=typeof f=="function"?f:Bo(+f),h):a},h.paddingOuter=function(f){return arguments.length?h.paddingTop(f).paddingRight(f).paddingBottom(f).paddingLeft(f):h.paddingTop()},h.paddingTop=function(f){return arguments.length?(s=typeof f=="function"?f:Bo(+f),h):s},h.paddingRight=function(f){return arguments.length?(o=typeof f=="function"?f:Bo(+f),h):o},h.paddingBottom=function(f){return arguments.length?(l=typeof f=="function"?f:Bo(+f),h):l},h.paddingLeft=function(f){return arguments.length?(u=typeof f=="function"?f:Bo(+f),h):u},h}function yz(t,e,r,n,i){var a=t.children,s,o=a.length,l,u=new Array(o+1);for(u[0]=l=s=0;s<o;++s)u[s+1]=l+=a[s].value;h(0,o,t.value,e,r,n,i);function h(d,f,p,m,_,y,b){if(d>=f-1){var x=a[d];x.x0=m,x.y0=_,x.x1=y,x.y1=b;return}for(var k=u[d],T=p/2+k,C=d+1,M=f-1;C<M;){var S=C+M>>>1;u[S]<T?C=S+1:M=S}T-u[C-1]<u[C]-T&&d+1<C&&--C;var R=u[C]-k,A=p-R;if(y-m>b-_){var L=p?(m*A+y*R)/p:y;h(d,C,R,m,_,L,b),h(C,f,A,L,_,y,b)}else{var v=p?(_*A+b*R)/p:b;h(d,C,R,m,_,y,v),h(C,f,A,m,v,y,b)}}}function mz(t,e,r,n,i){(t.depth&1?rf:hc)(t,e,r,n,i)}const bz=function t(e){function r(n,i,a,s,o){if((l=n._squarify)&&l.ratio===e)for(var l,u,h,d,f=-1,p,m=l.length,_=n.value;++f<m;){for(u=l[f],h=u.children,d=u.value=0,p=h.length;d<p;++d)u.value+=h[d].value;u.dice?hc(u,i,a,s,_?a+=(o-a)*u.value/_:o):rf(u,i,a,_?i+=(s-i)*u.value/_:s,o),_-=u.value}else n._squarify=l=Ix(e,n,i,a,s,o),l.ratio=e}return r.ratio=function(n){return t((n=+n)>1?n:1)},r}(Rx);function _z(t){for(var e=-1,r=t.length,n,i=t[r-1],a=0;++e<r;)n=i,i=t[e],a+=n[1]*i[0]-n[0]*i[1];return a/2}function vz(t){for(var e=-1,r=t.length,n=0,i=0,a,s=t[r-1],o,l=0;++e<r;)a=s,s=t[e],l+=o=a[0]*s[1]-s[0]*a[1],n+=(a[0]+s[0])*o,i+=(a[1]+s[1])*o;return l*=3,[n/l,i/l]}function xz(t,e,r){return(e[0]-t[0])*(r[1]-t[1])-(e[1]-t[1])*(r[0]-t[0])}function kz(t,e){return t[0]-e[0]||t[1]-e[1]}function Bx(t){const e=t.length,r=[0,1];let n=2,i;for(i=2;i<e;++i){for(;n>1&&xz(t[r[n-2]],t[r[n-1]],t[i])<=0;)--n;r[n++]=i}return r.slice(0,n)}function wz(t){if((r=t.length)<3)return null;var e,r,n=new Array(r),i=new Array(r);for(e=0;e<r;++e)n[e]=[+t[e][0],+t[e][1],e];for(n.sort(kz),e=0;e<r;++e)i[e]=[n[e][0],-n[e][1]];var a=Bx(n),s=Bx(i),o=s[0]===a[0],l=s[s.length-1]===a[a.length-1],u=[];for(e=a.length-1;e>=0;--e)u.push(t[n[a[e]][2]]);for(e=+o;e<s.length-l;++e)u.push(t[n[s[e]][2]]);return u}function Tz(t,e){for(var r=t.length,n=t[r-1],i=e[0],a=e[1],s=n[0],o=n[1],l,u,h=!1,d=0;d<r;++d)n=t[d],l=n[0],u=n[1],u>a!=o>a&&i<(s-l)*(a-u)/(o-u)+l&&(h=!h),s=l,o=u;return h}function Ez(t){for(var e=-1,r=t.length,n=t[r-1],i,a,s=n[0],o=n[1],l=0;++e<r;)i=s,a=o,n=t[e],s=n[0],o=n[1],i-=s,a-=o,l+=Math.hypot(i,a);return l}const Ir=Math.random,Cz=function t(e){function r(n,i){return n=n==null?0:+n,i=i==null?1:+i,arguments.length===1?(i=n,n=0):i-=n,function(){return e()*i+n}}return r.source=t,r}(Ir),Sz=function t(e){function r(n,i){return arguments.length<2&&(i=n,n=0),n=Math.floor(n),i=Math.floor(i)-n,function(){return Math.floor(e()*i+n)}}return r.source=t,r}(Ir),tp=function t(e){function r(n,i){var a,s;return n=n==null?0:+n,i=i==null?1:+i,function(){var o;if(a!=null)o=a,a=null;else do a=e()*2-1,o=e()*2-1,s=a*a+o*o;while(!s||s>1);return n+i*o*Math.sqrt(-2*Math.log(s)/s)}}return r.source=t,r}(Ir),Az=function t(e){var r=tp.source(e);function n(){var i=r.apply(this,arguments);return function(){return Math.exp(i())}}return n.source=t,n}(Ir),Dx=function t(e){function r(n){return(n=+n)<=0?()=>0:function(){for(var i=0,a=n;a>1;--a)i+=e();return i+a*e()}}return r.source=t,r}(Ir),Mz=function t(e){var r=Dx.source(e);function n(i){if((i=+i)==0)return e;var a=r(i);return function(){return a()/i}}return n.source=t,n}(Ir),Lz=function t(e){function r(n){return function(){return-Math.log1p(-e())/n}}return r.source=t,r}(Ir),Rz=function t(e){function r(n){if((n=+n)<0)throw new RangeError("invalid alpha");return n=1/-n,function(){return Math.pow(1-e(),n)}}return r.source=t,r}(Ir),Iz=function t(e){function r(n){if((n=+n)<0||n>1)throw new RangeError("invalid p");return function(){return Math.floor(e()+n)}}return r.source=t,r}(Ir),Ox=function t(e){function r(n){if((n=+n)<0||n>1)throw new RangeError("invalid p");return n===0?()=>1/0:n===1?()=>1:(n=Math.log1p(-n),function(){return 1+Math.floor(Math.log1p(-e())/n)})}return r.source=t,r}(Ir),ep=function t(e){var r=tp.source(e)();function n(i,a){if((i=+i)<0)throw new RangeError("invalid k");if(i===0)return()=>0;if(a=a==null?1:+a,i===1)return()=>-Math.log1p(-e())*a;var s=(i<1?i+1:i)-1/3,o=1/(3*Math.sqrt(s)),l=i<1?()=>Math.pow(e(),1/i):()=>1;return function(){do{do var u=r(),h=1+o*u;while(h<=0);h*=h*h;var d=1-e()}while(d>=1-.0331*u*u*u*u&&Math.log(d)>=.5*u*u+s*(1-h+Math.log(h)));return s*h*l()*a}}return n.source=t,n}(Ir),Fx=function t(e){var r=ep.source(e);function n(i,a){var s=r(i),o=r(a);return function(){var l=s();return l===0?0:l/(l+o())}}return n.source=t,n}(Ir),Px=function t(e){var r=Ox.source(e),n=Fx.source(e);function i(a,s){return a=+a,(s=+s)>=1?()=>a:s<=0?()=>0:function(){for(var o=0,l=a,u=s;l*u>16&&l*(1-u)>16;){var h=Math.floor((l+1)*u),d=n(h,l-h+1)();d<=u?(o+=h,l-=h,u=(u-d)/(1-d)):(l=h-1,u/=d)}for(var f=u<.5,p=f?u:1-u,m=r(p),_=m(),y=0;_<=l;++y)_+=m();return o+(f?y:l-y)}}return i.source=t,i}(Ir),Nz=function t(e){function r(n,i,a){var s;return(n=+n)==0?s=o=>-Math.log(o):(n=1/n,s=o=>Math.pow(o,n)),i=i==null?0:+i,a=a==null?1:+a,function(){return i+a*s(-Math.log1p(-e()))}}return r.source=t,r}(Ir),Bz=function t(e){function r(n,i){return n=n==null?0:+n,i=i==null?1:+i,function(){return n+i*Math.tan(Math.PI*e())}}return r.source=t,r}(Ir),Dz=function t(e){function r(n,i){return n=n==null?0:+n,i=i==null?1:+i,function(){var a=e();return n+i*Math.log(a/(1-a))}}return r.source=t,r}(Ir),Oz=function t(e){var r=ep.source(e),n=Px.source(e);function i(a){return function(){for(var s=0,o=a;o>16;){var l=Math.floor(.875*o),u=r(l)();if(u>o)return s+n(l-1,o/u)();s+=l,o-=u}for(var h=-Math.log1p(-e()),d=0;h<=o;++d)h-=Math.log1p(-e());return s+d}}return i.source=t,i}(Ir),Fz=1664525,Pz=1013904223,qx=1/4294967296;function qz(t=Math.random()){let e=(0<=t&&t<1?t/qx:Math.abs(t))|0;return()=>(e=Fz*e+Pz|0,qx*(e>>>0))}function On(t,e){switch(arguments.length){case 0:break;case 1:this.range(t);break;default:this.range(e).domain(t);break}return this}function ta(t,e){switch(arguments.length){case 0:break;case 1:{typeof t=="function"?this.interpolator(t):this.range(t);break}default:{this.domain(t),typeof e=="function"?this.interpolator(e):this.range(e);break}}return this}const rp=Symbol("implicit");function nf(){var t=new kl,e=[],r=[],n=rp;function i(a){let s=t.get(a);if(s===void 0){if(n!==rp)return n;t.set(a,s=e.push(a)-1)}return r[s%r.length]}return i.domain=function(a){if(!arguments.length)return e.slice();e=[],t=new kl;for(const s of a)t.has(s)||t.set(s,e.push(s)-1);return i},i.range=function(a){return arguments.length?(r=Array.from(a),i):r.slice()},i.unknown=function(a){return arguments.length?(n=a,i):n},i.copy=function(){return nf(e,r).unknown(n)},On.apply(i,arguments),i}function np(){var t=nf().unknown(void 0),e=t.domain,r=t.range,n=0,i=1,a,s,o=!1,l=0,u=0,h=.5;delete t.unknown;function d(){var f=e().length,p=i<n,m=p?i:n,_=p?n:i;a=(_-m)/Math.max(1,f-l+u*2),o&&(a=Math.floor(a)),m+=(_-m-a*(f-l))*h,s=a*(1-l),o&&(m=Math.round(m),s=Math.round(s));var y=Ca(f).map(function(b){return m+a*b});return r(p?y.reverse():y)}return t.domain=function(f){return arguments.length?(e(f),d()):e()},t.range=function(f){return arguments.length?([n,i]=f,n=+n,i=+i,d()):[n,i]},t.rangeRound=function(f){return[n,i]=f,n=+n,i=+i,o=!0,d()},t.bandwidth=function(){return s},t.step=function(){return a},t.round=function(f){return arguments.length?(o=!!f,d()):o},t.padding=function(f){return arguments.length?(l=Math.min(1,u=+f),d()):l},t.paddingInner=function(f){return arguments.length?(l=Math.min(1,f),d()):l},t.paddingOuter=function(f){return arguments.length?(u=+f,d()):u},t.align=function(f){return arguments.length?(h=Math.max(0,Math.min(1,f)),d()):h},t.copy=function(){return np(e(),[n,i]).round(o).paddingInner(l).paddingOuter(u).align(h)},On.apply(d(),arguments)}function Vx(t){var e=t.copy;return t.padding=t.paddingOuter,delete t.paddingInner,delete t.paddingOuter,t.copy=function(){return Vx(e())},t}function Vz(){return Vx(np.apply(null,arguments).paddingInner(1))}function zz(t){return function(){return t}}function af(t){return+t}var zx=[0,1];function an(t){return t}function ip(t,e){return(e-=t=+t)?function(r){return(r-t)/e}:zz(isNaN(e)?NaN:.5)}function Yz(t,e){var r;return t>e&&(r=t,t=e,e=r),function(n){return Math.max(t,Math.min(e,n))}}function Uz(t,e,r){var n=t[0],i=t[1],a=e[0],s=e[1];return i<n?(n=ip(i,n),a=r(s,a)):(n=ip(n,i),a=r(a,s)),function(o){return a(n(o))}}function Wz(t,e,r){var n=Math.min(t.length,e.length)-1,i=new Array(n),a=new Array(n),s=-1;for(t[n]<t[0]&&(t=t.slice().reverse(),e=e.slice().reverse());++s<n;)i[s]=ip(t[s],t[s+1]),a[s]=r(e[s],e[s+1]);return function(o){var l=cs(t,o,1,n)-1;return a[l](i[l](o))}}function fc(t,e){return e.domain(t.domain()).range(t.range()).interpolate(t.interpolate()).clamp(t.clamp()).unknown(t.unknown())}function sf(){var t=zx,e=zx,r=Ma,n,i,a,s=an,o,l,u;function h(){var f=Math.min(t.length,e.length);return s!==an&&(s=Yz(t[0],t[f-1])),o=f>2?Wz:Uz,l=u=null,d}function d(f){return f==null||isNaN(f=+f)?a:(l||(l=o(t.map(n),e,r)))(n(s(f)))}return d.invert=function(f){return s(i((u||(u=o(e,t.map(n),Bn)))(f)))},d.domain=function(f){return arguments.length?(t=Array.from(f,af),h()):t.slice()},d.range=function(f){return arguments.length?(e=Array.from(f),h()):e.slice()},d.rangeRound=function(f){return e=Array.from(f),r=ju,h()},d.clamp=function(f){return arguments.length?(s=f?!0:an,h()):s!==an},d.interpolate=function(f){return arguments.length?(r=f,h()):r},d.unknown=function(f){return arguments.length?(a=f,d):a},function(f,p){return n=f,i=p,h()}}function ap(){return sf()(an,an)}function Yx(t,e,r,n){var i=wl(t,e,r),a;switch(n=Co(n==null?",f":n),n.type){case"s":{var s=Math.max(Math.abs(t),Math.abs(e));return n.precision==null&&!isNaN(a=Jv(i,s))&&(n.precision=a),Jd(n,s)}case"":case"e":case"g":case"p":case"r":{n.precision==null&&!isNaN(a=t6(i,Math.max(Math.abs(t),Math.abs(e))))&&(n.precision=a-(n.type==="e"));break}case"f":case"%":{n.precision==null&&!isNaN(a=Qv(i))&&(n.precision=a-(n.type==="%")*2);break}}return yh(n)}function Oa(t){var e=t.domain;return t.ticks=function(r){var n=e();return hs(n[0],n[n.length-1],r==null?10:r)},t.tickFormat=function(r,n){var i=e();return Yx(i[0],i[i.length-1],r==null?10:r,n)},t.nice=function(r){r==null&&(r=10);var n=e(),i=0,a=n.length-1,s=n[i],o=n[a],l,u,h=10;for(o<s&&(u=s,s=o,o=u,u=i,i=a,a=u);h-- >0;){if(u=oo(s,o,r),u===l)return n[i]=s,n[a]=o,e(n);if(u>0)s=Math.floor(s/u)*u,o=Math.ceil(o/u)*u;else if(u<0)s=Math.ceil(s*u)/u,o=Math.floor(o*u)/u;else break;l=u}return t},t}function sp(){var t=ap();return t.copy=function(){return fc(t,sp())},On.apply(t,arguments),Oa(t)}function Ux(t){var e;function r(n){return n==null||isNaN(n=+n)?e:n}return r.invert=r,r.domain=r.range=function(n){return arguments.length?(t=Array.from(n,af),r):t.slice()},r.unknown=function(n){return arguments.length?(e=n,r):e},r.copy=function(){return Ux(t).unknown(e)},t=arguments.length?Array.from(t,af):[0,1],Oa(r)}function Wx(t,e){t=t.slice();var r=0,n=t.length-1,i=t[r],a=t[n],s;return a<i&&(s=r,r=n,n=s,s=i,i=a,a=s),t[r]=e.floor(i),t[n]=e.ceil(a),t}function Hx(t){return Math.log(t)}function Gx(t){return Math.exp(t)}function Hz(t){return-Math.log(-t)}function Gz(t){return-Math.exp(-t)}function jz(t){return isFinite(t)?+("1e"+t):t<0?0:t}function $z(t){return t===10?jz:t===Math.E?Math.exp:e=>Math.pow(t,e)}function Xz(t){return t===Math.E?Math.log:t===10&&Math.log10||t===2&&Math.log2||(t=Math.log(t),e=>Math.log(e)/t)}function jx(t){return(e,r)=>-t(-e,r)}function op(t){const e=t(Hx,Gx),r=e.domain;let n=10,i,a;function s(){return i=Xz(n),a=$z(n),r()[0]<0?(i=jx(i),a=jx(a),t(Hz,Gz)):t(Hx,Gx),e}return e.base=function(o){return arguments.length?(n=+o,s()):n},e.domain=function(o){return arguments.length?(r(o),s()):r()},e.ticks=o=>{const l=r();let u=l[0],h=l[l.length-1];const d=h<u;d&&([u,h]=[h,u]);let f=i(u),p=i(h),m,_;const y=o==null?10:+o;let b=[];if(!(n%1)&&p-f<y){if(f=Math.floor(f),p=Math.ceil(p),u>0){for(;f<=p;++f)for(m=1;m<n;++m)if(_=f<0?m/a(-f):m*a(f),!(_<u)){if(_>h)break;b.push(_)}}else for(;f<=p;++f)for(m=n-1;m>=1;--m)if(_=f>0?m/a(-f):m*a(f),!(_<u)){if(_>h)break;b.push(_)}b.length*2<y&&(b=hs(u,h,y))}else b=hs(f,p,Math.min(p-f,y)).map(a);return d?b.reverse():b},e.tickFormat=(o,l)=>{if(o==null&&(o=10),l==null&&(l=n===10?"s":","),typeof l!="function"&&(!(n%1)&&(l=Co(l)).precision==null&&(l.trim=!0),l=yh(l)),o===1/0)return l;const u=Math.max(1,n*o/e.ticks().length);return h=>{let d=h/a(Math.round(i(h)));return d*n<n-.5&&(d*=n),d<=u?l(h):""}},e.nice=()=>r(Wx(r(),{floor:o=>a(Math.floor(i(o))),ceil:o=>a(Math.ceil(i(o)))})),e}function $x(){const t=op(sf()).domain([1,10]);return t.copy=()=>fc(t,$x()).base(t.base()),On.apply(t,arguments),t}function Xx(t){return function(e){return Math.sign(e)*Math.log1p(Math.abs(e/t))}}function Kx(t){return function(e){return Math.sign(e)*Math.expm1(Math.abs(e))*t}}function lp(t){var e=1,r=t(Xx(e),Kx(e));return r.constant=function(n){return arguments.length?t(Xx(e=+n),Kx(e)):e},Oa(r)}function Zx(){var t=lp(sf());return t.copy=function(){return fc(t,Zx()).constant(t.constant())},On.apply(t,arguments)}function Qx(t){return function(e){return e<0?-Math.pow(-e,t):Math.pow(e,t)}}function Kz(t){return t<0?-Math.sqrt(-t):Math.sqrt(t)}function Zz(t){return t<0?-t*t:t*t}function cp(t){var e=t(an,an),r=1;function n(){return r===1?t(an,an):r===.5?t(Kz,Zz):t(Qx(r),Qx(1/r))}return e.exponent=function(i){return arguments.length?(r=+i,n()):r},Oa(e)}function up(){var t=cp(sf());return t.copy=function(){return fc(t,up()).exponent(t.exponent())},On.apply(t,arguments),t}function Qz(){return up.apply(null,arguments).exponent(.5)}function Jx(t){return Math.sign(t)*t*t}function Jz(t){return Math.sign(t)*Math.sqrt(Math.abs(t))}function t8(){var t=ap(),e=[0,1],r=!1,n;function i(a){var s=Jz(t(a));return isNaN(s)?n:r?Math.round(s):s}return i.invert=function(a){return t.invert(Jx(a))},i.domain=function(a){return arguments.length?(t.domain(a),i):t.domain()},i.range=function(a){return arguments.length?(t.range((e=Array.from(a,af)).map(Jx)),i):e.slice()},i.rangeRound=function(a){return i.range(a).round(!0)},i.round=function(a){return arguments.length?(r=!!a,i):r},i.clamp=function(a){return arguments.length?(t.clamp(a),i):t.clamp()},i.unknown=function(a){return arguments.length?(n=a,i):n},i.copy=function(){return t8(t.domain(),e).round(r).clamp(t.clamp()).unknown(n)},On.apply(i,arguments),Oa(i)}function e8(){var t=[],e=[],r=[],n;function i(){var s=0,o=Math.max(1,e.length);for(r=new Array(o-1);++s<o;)r[s-1]=q_(t,s/o);return a}function a(s){return s==null||isNaN(s=+s)?n:e[cs(r,s)]}return a.invertExtent=function(s){var o=e.indexOf(s);return o<0?[NaN,NaN]:[o>0?r[o-1]:t[0],o<r.length?r[o]:t[t.length-1]]},a.domain=function(s){if(!arguments.length)return t.slice();t=[];for(let o of s)o!=null&&!isNaN(o=+o)&&t.push(o);return t.sort(Qe),i()},a.range=function(s){return arguments.length?(e=Array.from(s),i()):e.slice()},a.unknown=function(s){return arguments.length?(n=s,a):n},a.quantiles=function(){return r.slice()},a.copy=function(){return e8().domain(t).range(e).unknown(n)},On.apply(a,arguments)}function r8(){var t=0,e=1,r=1,n=[.5],i=[0,1],a;function s(l){return l!=null&&l<=l?i[cs(n,l,0,r)]:a}function o(){var l=-1;for(n=new Array(r);++l<r;)n[l]=((l+1)*e-(l-r)*t)/(r+1);return s}return s.domain=function(l){return arguments.length?([t,e]=l,t=+t,e=+e,o()):[t,e]},s.range=function(l){return arguments.length?(r=(i=Array.from(l)).length-1,o()):i.slice()},s.invertExtent=function(l){var u=i.indexOf(l);return u<0?[NaN,NaN]:u<1?[t,n[0]]:u>=r?[n[r-1],e]:[n[u-1],n[u]]},s.unknown=function(l){return arguments.length&&(a=l),s},s.thresholds=function(){return n.slice()},s.copy=function(){return r8().domain([t,e]).range(i).unknown(a)},On.apply(Oa(s),arguments)}function n8(){var t=[.5],e=[0,1],r,n=1;function i(a){return a!=null&&a<=a?e[cs(t,a,0,n)]:r}return i.domain=function(a){return arguments.length?(t=Array.from(a),n=Math.min(t.length,e.length-1),i):t.slice()},i.range=function(a){return arguments.length?(e=Array.from(a),n=Math.min(t.length,e.length-1),i):e.slice()},i.invertExtent=function(a){var s=e.indexOf(a);return[t[s-1],t[s]]},i.unknown=function(a){return arguments.length?(r=a,i):r},i.copy=function(){return n8().domain(t).range(e).unknown(r)},On.apply(i,arguments)}var hp=new Date,fp=new Date;function xr(t,e,r,n){function i(a){return t(a=arguments.length===0?new Date:new Date(+a)),a}return i.floor=function(a){return t(a=new Date(+a)),a},i.ceil=function(a){return t(a=new Date(a-1)),e(a,1),t(a),a},i.round=function(a){var s=i(a),o=i.ceil(a);return a-s<o-a?s:o},i.offset=function(a,s){return e(a=new Date(+a),s==null?1:Math.floor(s)),a},i.range=function(a,s,o){var l=[],u;if(a=i.ceil(a),o=o==null?1:Math.floor(o),!(a<s)||!(o>0))return l;do l.push(u=new Date(+a)),e(a,o),t(a);while(u<a&&a<s);return l},i.filter=function(a){return xr(function(s){if(s>=s)for(;t(s),!a(s);)s.setTime(s-1)},function(s,o){if(s>=s)if(o<0)for(;++o<=0;)for(;e(s,-1),!a(s););else for(;--o>=0;)for(;e(s,1),!a(s););})},r&&(i.count=function(a,s){return hp.setTime(+a),fp.setTime(+s),t(hp),t(fp),Math.floor(r(hp,fp))},i.every=function(a){return a=Math.floor(a),!isFinite(a)||!(a>0)?null:a>1?i.filter(n?function(s){return n(s)%a===0}:function(s){return i.count(0,s)%a===0}):i}),i}var of=xr(function(){},function(t,e){t.setTime(+t+e)},function(t,e){return e-t});of.every=function(t){return t=Math.floor(t),!isFinite(t)||!(t>0)?null:t>1?xr(function(e){e.setTime(Math.floor(e/t)*t)},function(e,r){e.setTime(+e+r*t)},function(e,r){return(r-e)/t}):of};const dp=of;var i8=of.range;const ea=1e3,Fn=ea*60,ra=Fn*60,Rs=ra*24,pp=Rs*7,a8=Rs*30,gp=Rs*365;var s8=xr(function(t){t.setTime(t-t.getMilliseconds())},function(t,e){t.setTime(+t+e*ea)},function(t,e){return(e-t)/ea},function(t){return t.getUTCSeconds()});const Fa=s8;var o8=s8.range,l8=xr(function(t){t.setTime(t-t.getMilliseconds()-t.getSeconds()*ea)},function(t,e){t.setTime(+t+e*Fn)},function(t,e){return(e-t)/Fn},function(t){return t.getMinutes()});const yp=l8;var tY=l8.range,c8=xr(function(t){t.setTime(t-t.getMilliseconds()-t.getSeconds()*ea-t.getMinutes()*Fn)},function(t,e){t.setTime(+t+e*ra)},function(t,e){return(e-t)/ra},function(t){return t.getHours()});const mp=c8;var eY=c8.range,u8=xr(t=>t.setHours(0,0,0,0),(t,e)=>t.setDate(t.getDate()+e),(t,e)=>(e-t-(e.getTimezoneOffset()-t.getTimezoneOffset())*Fn)/Rs,t=>t.getDate()-1);const dc=u8;var rY=u8.range;function Is(t){return xr(function(e){e.setDate(e.getDate()-(e.getDay()+7-t)%7),e.setHours(0,0,0,0)},function(e,r){e.setDate(e.getDate()+r*7)},function(e,r){return(r-e-(r.getTimezoneOffset()-e.getTimezoneOffset())*Fn)/pp})}var Do=Is(0),pc=Is(1),h8=Is(2),f8=Is(3),Ns=Is(4),d8=Is(5),p8=Is(6),g8=Do.range,nY=pc.range,iY=h8.range,aY=f8.range,sY=Ns.range,oY=d8.range,lY=p8.range,y8=xr(function(t){t.setDate(1),t.setHours(0,0,0,0)},function(t,e){t.setMonth(t.getMonth()+e)},function(t,e){return e.getMonth()-t.getMonth()+(e.getFullYear()-t.getFullYear())*12},function(t){return t.getMonth()});const bp=y8;var cY=y8.range,_p=xr(function(t){t.setMonth(0,1),t.setHours(0,0,0,0)},function(t,e){t.setFullYear(t.getFullYear()+e)},function(t,e){return e.getFullYear()-t.getFullYear()},function(t){return t.getFullYear()});_p.every=function(t){return!isFinite(t=Math.floor(t))||!(t>0)?null:xr(function(e){e.setFullYear(Math.floor(e.getFullYear()/t)*t),e.setMonth(0,1),e.setHours(0,0,0,0)},function(e,r){e.setFullYear(e.getFullYear()+r*t)})};const Pa=_p;var uY=_p.range,m8=xr(function(t){t.setUTCSeconds(0,0)},function(t,e){t.setTime(+t+e*Fn)},function(t,e){return(e-t)/Fn},function(t){return t.getUTCMinutes()});const vp=m8;var hY=m8.range,b8=xr(function(t){t.setUTCMinutes(0,0,0)},function(t,e){t.setTime(+t+e*ra)},function(t,e){return(e-t)/ra},function(t){return t.getUTCHours()});const xp=b8;var fY=b8.range,_8=xr(function(t){t.setUTCHours(0,0,0,0)},function(t,e){t.setUTCDate(t.getUTCDate()+e)},function(t,e){return(e-t)/Rs},function(t){return t.getUTCDate()-1});const gc=_8;var dY=_8.range;function Bs(t){return xr(function(e){e.setUTCDate(e.getUTCDate()-(e.getUTCDay()+7-t)%7),e.setUTCHours(0,0,0,0)},function(e,r){e.setUTCDate(e.getUTCDate()+r*7)},function(e,r){return(r-e)/pp})}var Oo=Bs(0),yc=Bs(1),v8=Bs(2),x8=Bs(3),Ds=Bs(4),k8=Bs(5),w8=Bs(6),T8=Oo.range,pY=yc.range,gY=v8.range,yY=x8.range,mY=Ds.range,bY=k8.range,_Y=w8.range,E8=xr(function(t){t.setUTCDate(1),t.setUTCHours(0,0,0,0)},function(t,e){t.setUTCMonth(t.getUTCMonth()+e)},function(t,e){return e.getUTCMonth()-t.getUTCMonth()+(e.getUTCFullYear()-t.getUTCFullYear())*12},function(t){return t.getUTCMonth()});const kp=E8;var vY=E8.range,wp=xr(function(t){t.setUTCMonth(0,1),t.setUTCHours(0,0,0,0)},function(t,e){t.setUTCFullYear(t.getUTCFullYear()+e)},function(t,e){return e.getUTCFullYear()-t.getUTCFullYear()},function(t){return t.getUTCFullYear()});wp.every=function(t){return!isFinite(t=Math.floor(t))||!(t>0)?null:xr(function(e){e.setUTCFullYear(Math.floor(e.getUTCFullYear()/t)*t),e.setUTCMonth(0,1),e.setUTCHours(0,0,0,0)},function(e,r){e.setUTCFullYear(e.getUTCFullYear()+r*t)})};const qa=wp;var xY=wp.range;function C8(t,e,r,n,i,a){const s=[[Fa,1,ea],[Fa,5,5*ea],[Fa,15,15*ea],[Fa,30,30*ea],[a,1,Fn],[a,5,5*Fn],[a,15,15*Fn],[a,30,30*Fn],[i,1,ra],[i,3,3*ra],[i,6,6*ra],[i,12,12*ra],[n,1,Rs],[n,2,2*Rs],[r,1,pp],[e,1,a8],[e,3,3*a8],[t,1,gp]];function o(u,h,d){const f=h<u;f&&([u,h]=[h,u]);const p=d&&typeof d.range=="function"?d:l(u,h,d),m=p?p.range(u,+h+1):[];return f?m.reverse():m}function l(u,h,d){const f=Math.abs(h-u)/d,p=ku(([,,y])=>y).right(s,f);if(p===s.length)return t.every(wl(u/gp,h/gp,d));if(p===0)return dp.every(Math.max(wl(u,h,d),1));const[m,_]=s[f/s[p-1][2]<s[p][2]/f?p-1:p];return m.every(_)}return[o,l]}const[S8,A8]=C8(qa,kp,Oo,gc,xp,vp),[M8,L8]=C8(Pa,bp,Do,dc,mp,yp);function Tp(t){if(0<=t.y&&t.y<100){var e=new Date(-1,t.m,t.d,t.H,t.M,t.S,t.L);return e.setFullYear(t.y),e}return new Date(t.y,t.m,t.d,t.H,t.M,t.S,t.L)}function Ep(t){if(0<=t.y&&t.y<100){var e=new Date(Date.UTC(-1,t.m,t.d,t.H,t.M,t.S,t.L));return e.setUTCFullYear(t.y),e}return new Date(Date.UTC(t.y,t.m,t.d,t.H,t.M,t.S,t.L))}function mc(t,e,r){return{y:t,m:e,d:r,H:0,M:0,S:0,L:0}}function R8(t){var e=t.dateTime,r=t.date,n=t.time,i=t.periods,a=t.days,s=t.shortDays,o=t.months,l=t.shortMonths,u=bc(i),h=_c(i),d=bc(a),f=_c(a),p=bc(s),m=_c(s),_=bc(o),y=_c(o),b=bc(l),x=_c(l),k={a:X,A:ct,b:J,B:Y,c:null,d:F8,e:F8,f:WY,g:tU,G:rU,H:zY,I:YY,j:UY,L:P8,m:HY,M:GY,p:$,q:lt,Q:W8,s:H8,S:jY,u:$Y,U:XY,V:KY,w:ZY,W:QY,x:null,X:null,y:JY,Y:eU,Z:nU,"%":U8},T={a:ut,A:W,b:tt,B:K,c:null,d:V8,e:V8,f:oU,g:mU,G:_U,H:iU,I:aU,j:sU,L:z8,m:lU,M:cU,p:it,q:Z,Q:W8,s:H8,S:uU,u:hU,U:fU,V:dU,w:pU,W:gU,x:null,X:null,y:yU,Y:bU,Z:vU,"%":U8},C={a:L,A:v,b:B,B:w,c:D,d:D8,e:D8,f:FY,g:B8,G:N8,H:O8,I:O8,j:NY,L:OY,m:IY,M:BY,p:A,q:RY,Q:qY,s:VY,S:DY,u:CY,U:SY,V:AY,w:EY,W:MY,x:N,X:z,y:B8,Y:N8,Z:LY,"%":PY};k.x=M(r,k),k.X=M(n,k),k.c=M(e,k),T.x=M(r,T),T.X=M(n,T),T.c=M(e,T);function M(V,Q){return function(q){var U=[],F=-1,j=0,P=V.length,et,at,It;for(q instanceof Date||(q=new Date(+q));++F<P;)V.charCodeAt(F)===37&&(U.push(V.slice(j,F)),(at=I8[et=V.charAt(++F)])!=null?et=V.charAt(++F):at=et==="e"?" ":"0",(It=Q[et])&&(et=It(q,at)),U.push(et),j=F+1);return U.push(V.slice(j,F)),U.join("")}}function S(V,Q){return function(q){var U=mc(1900,void 0,1),F=R(U,V,q+="",0),j,P;if(F!=q.length)return null;if("Q"in U)return new Date(U.Q);if("s"in U)return new Date(U.s*1e3+("L"in U?U.L:0));if(Q&&!("Z"in U)&&(U.Z=0),"p"in U&&(U.H=U.H%12+U.p*12),U.m===void 0&&(U.m="q"in U?U.q:0),"V"in U){if(U.V<1||U.V>53)return null;"w"in U||(U.w=1),"Z"in U?(j=Ep(mc(U.y,0,1)),P=j.getUTCDay(),j=P>4||P===0?yc.ceil(j):yc(j),j=gc.offset(j,(U.V-1)*7),U.y=j.getUTCFullYear(),U.m=j.getUTCMonth(),U.d=j.getUTCDate()+(U.w+6)%7):(j=Tp(mc(U.y,0,1)),P=j.getDay(),j=P>4||P===0?pc.ceil(j):pc(j),j=dc.offset(j,(U.V-1)*7),U.y=j.getFullYear(),U.m=j.getMonth(),U.d=j.getDate()+(U.w+6)%7)}else("W"in U||"U"in U)&&("w"in U||(U.w="u"in U?U.u%7:"W"in U?1:0),P="Z"in U?Ep(mc(U.y,0,1)).getUTCDay():Tp(mc(U.y,0,1)).getDay(),U.m=0,U.d="W"in U?(U.w+6)%7+U.W*7-(P+5)%7:U.w+U.U*7-(P+6)%7);return"Z"in U?(U.H+=U.Z/100|0,U.M+=U.Z%100,Ep(U)):Tp(U)}}function R(V,Q,q,U){for(var F=0,j=Q.length,P=q.length,et,at;F<j;){if(U>=P)return-1;if(et=Q.charCodeAt(F++),et===37){if(et=Q.charAt(F++),at=C[et in I8?Q.charAt(F++):et],!at||(U=at(V,q,U))<0)return-1}else if(et!=q.charCodeAt(U++))return-1}return U}function A(V,Q,q){var U=u.exec(Q.slice(q));return U?(V.p=h.get(U[0].toLowerCase()),q+U[0].length):-1}function L(V,Q,q){var U=p.exec(Q.slice(q));return U?(V.w=m.get(U[0].toLowerCase()),q+U[0].length):-1}function v(V,Q,q){var U=d.exec(Q.slice(q));return U?(V.w=f.get(U[0].toLowerCase()),q+U[0].length):-1}function B(V,Q,q){var U=b.exec(Q.slice(q));return U?(V.m=x.get(U[0].toLowerCase()),q+U[0].length):-1}function w(V,Q,q){var U=_.exec(Q.slice(q));return U?(V.m=y.get(U[0].toLowerCase()),q+U[0].length):-1}function D(V,Q,q){return R(V,e,Q,q)}function N(V,Q,q){return R(V,r,Q,q)}function z(V,Q,q){return R(V,n,Q,q)}function X(V){return s[V.getDay()]}function ct(V){return a[V.getDay()]}function J(V){return l[V.getMonth()]}function Y(V){return o[V.getMonth()]}function $(V){return i[+(V.getHours()>=12)]}function lt(V){return 1+~~(V.getMonth()/3)}function ut(V){return s[V.getUTCDay()]}function W(V){return a[V.getUTCDay()]}function tt(V){return l[V.getUTCMonth()]}function K(V){return o[V.getUTCMonth()]}function it(V){return i[+(V.getUTCHours()>=12)]}function Z(V){return 1+~~(V.getUTCMonth()/3)}return{format:function(V){var Q=M(V+="",k);return Q.toString=function(){return V},Q},parse:function(V){var Q=S(V+="",!1);return Q.toString=function(){return V},Q},utcFormat:function(V){var Q=M(V+="",T);return Q.toString=function(){return V},Q},utcParse:function(V){var Q=S(V+="",!0);return Q.toString=function(){return V},Q}}}var I8={"-":"",_:" ",0:"0"},Ar=/^\s*\d+/,kY=/^%/,wY=/[\\^$*+?|[\]().{}]/g;function Oe(t,e,r){var n=t<0?"-":"",i=(n?-t:t)+"",a=i.length;return n+(a<r?new Array(r-a+1).join(e)+i:i)}function TY(t){return t.replace(wY,"\\$&")}function bc(t){return new RegExp("^(?:"+t.map(TY).join("|")+")","i")}function _c(t){return new Map(t.map((e,r)=>[e.toLowerCase(),r]))}function EY(t,e,r){var n=Ar.exec(e.slice(r,r+1));return n?(t.w=+n[0],r+n[0].length):-1}function CY(t,e,r){var n=Ar.exec(e.slice(r,r+1));return n?(t.u=+n[0],r+n[0].length):-1}function SY(t,e,r){var n=Ar.exec(e.slice(r,r+2));return n?(t.U=+n[0],r+n[0].length):-1}function AY(t,e,r){var n=Ar.exec(e.slice(r,r+2));return n?(t.V=+n[0],r+n[0].length):-1}function MY(t,e,r){var n=Ar.exec(e.slice(r,r+2));return n?(t.W=+n[0],r+n[0].length):-1}function N8(t,e,r){var n=Ar.exec(e.slice(r,r+4));return n?(t.y=+n[0],r+n[0].length):-1}function B8(t,e,r){var n=Ar.exec(e.slice(r,r+2));return n?(t.y=+n[0]+(+n[0]>68?1900:2e3),r+n[0].length):-1}function LY(t,e,r){var n=/^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(e.slice(r,r+6));return n?(t.Z=n[1]?0:-(n[2]+(n[3]||"00")),r+n[0].length):-1}function RY(t,e,r){var n=Ar.exec(e.slice(r,r+1));return n?(t.q=n[0]*3-3,r+n[0].length):-1}function IY(t,e,r){var n=Ar.exec(e.slice(r,r+2));return n?(t.m=n[0]-1,r+n[0].length):-1}function D8(t,e,r){var n=Ar.exec(e.slice(r,r+2));return n?(t.d=+n[0],r+n[0].length):-1}function NY(t,e,r){var n=Ar.exec(e.slice(r,r+3));return n?(t.m=0,t.d=+n[0],r+n[0].length):-1}function O8(t,e,r){var n=Ar.exec(e.slice(r,r+2));return n?(t.H=+n[0],r+n[0].length):-1}function BY(t,e,r){var n=Ar.exec(e.slice(r,r+2));return n?(t.M=+n[0],r+n[0].length):-1}function DY(t,e,r){var n=Ar.exec(e.slice(r,r+2));return n?(t.S=+n[0],r+n[0].length):-1}function OY(t,e,r){var n=Ar.exec(e.slice(r,r+3));return n?(t.L=+n[0],r+n[0].length):-1}function FY(t,e,r){var n=Ar.exec(e.slice(r,r+6));return n?(t.L=Math.floor(n[0]/1e3),r+n[0].length):-1}function PY(t,e,r){var n=kY.exec(e.slice(r,r+1));return n?r+n[0].length:-1}function qY(t,e,r){var n=Ar.exec(e.slice(r));return n?(t.Q=+n[0],r+n[0].length):-1}function VY(t,e,r){var n=Ar.exec(e.slice(r));return n?(t.s=+n[0],r+n[0].length):-1}function F8(t,e){return Oe(t.getDate(),e,2)}function zY(t,e){return Oe(t.getHours(),e,2)}function YY(t,e){return Oe(t.getHours()%12||12,e,2)}function UY(t,e){return Oe(1+dc.count(Pa(t),t),e,3)}function P8(t,e){return Oe(t.getMilliseconds(),e,3)}function WY(t,e){return P8(t,e)+"000"}function HY(t,e){return Oe(t.getMonth()+1,e,2)}function GY(t,e){return Oe(t.getMinutes(),e,2)}function jY(t,e){return Oe(t.getSeconds(),e,2)}function $Y(t){var e=t.getDay();return e===0?7:e}function XY(t,e){return Oe(Do.count(Pa(t)-1,t),e,2)}function q8(t){var e=t.getDay();return e>=4||e===0?Ns(t):Ns.ceil(t)}function KY(t,e){return t=q8(t),Oe(Ns.count(Pa(t),t)+(Pa(t).getDay()===4),e,2)}function ZY(t){return t.getDay()}function QY(t,e){return Oe(pc.count(Pa(t)-1,t),e,2)}function JY(t,e){return Oe(t.getFullYear()%100,e,2)}function tU(t,e){return t=q8(t),Oe(t.getFullYear()%100,e,2)}function eU(t,e){return Oe(t.getFullYear()%1e4,e,4)}function rU(t,e){var r=t.getDay();return t=r>=4||r===0?Ns(t):Ns.ceil(t),Oe(t.getFullYear()%1e4,e,4)}function nU(t){var e=t.getTimezoneOffset();return(e>0?"-":(e*=-1,"+"))+Oe(e/60|0,"0",2)+Oe(e%60,"0",2)}function V8(t,e){return Oe(t.getUTCDate(),e,2)}function iU(t,e){return Oe(t.getUTCHours(),e,2)}function aU(t,e){return Oe(t.getUTCHours()%12||12,e,2)}function sU(t,e){return Oe(1+gc.count(qa(t),t),e,3)}function z8(t,e){return Oe(t.getUTCMilliseconds(),e,3)}function oU(t,e){return z8(t,e)+"000"}function lU(t,e){return Oe(t.getUTCMonth()+1,e,2)}function cU(t,e){return Oe(t.getUTCMinutes(),e,2)}function uU(t,e){return Oe(t.getUTCSeconds(),e,2)}function hU(t){var e=t.getUTCDay();return e===0?7:e}function fU(t,e){return Oe(Oo.count(qa(t)-1,t),e,2)}function Y8(t){var e=t.getUTCDay();return e>=4||e===0?Ds(t):Ds.ceil(t)}function dU(t,e){return t=Y8(t),Oe(Ds.count(qa(t),t)+(qa(t).getUTCDay()===4),e,2)}function pU(t){return t.getUTCDay()}function gU(t,e){return Oe(yc.count(qa(t)-1,t),e,2)}function yU(t,e){return Oe(t.getUTCFullYear()%100,e,2)}function mU(t,e){return t=Y8(t),Oe(t.getUTCFullYear()%100,e,2)}function bU(t,e){return Oe(t.getUTCFullYear()%1e4,e,4)}function _U(t,e){var r=t.getUTCDay();return t=r>=4||r===0?Ds(t):Ds.ceil(t),Oe(t.getUTCFullYear()%1e4,e,4)}function vU(){return"+0000"}function U8(){return"%"}function W8(t){return+t}function H8(t){return Math.floor(+t/1e3)}var Fo,vc,G8,lf,Cp;j8({dateTime:"%x, %X",date:"%-m/%-d/%Y",time:"%-I:%M:%S %p",periods:["AM","PM"],days:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],shortDays:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],months:["January","February","March","April","May","June","July","August","September","October","November","December"],shortMonths:["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"]});function j8(t){return Fo=R8(t),vc=Fo.format,G8=Fo.parse,lf=Fo.utcFormat,Cp=Fo.utcParse,Fo}var $8="%Y-%m-%dT%H:%M:%S.%LZ";function xU(t){return t.toISOString()}var kU=Date.prototype.toISOString?xU:lf($8);const wU=kU;function TU(t){var e=new Date(t);return isNaN(e)?null:e}var EU=+new Date("2000-01-01T00:00:00.000Z")?TU:Cp($8);const CU=EU;function SU(t){return new Date(t)}function AU(t){return t instanceof Date?+t:+new Date(+t)}function Sp(t,e,r,n,i,a,s,o,l,u){var h=ap(),d=h.invert,f=h.domain,p=u(".%L"),m=u(":%S"),_=u("%I:%M"),y=u("%I %p"),b=u("%a %d"),x=u("%b %d"),k=u("%B"),T=u("%Y");function C(M){return(l(M)<M?p:o(M)<M?m:s(M)<M?_:a(M)<M?y:n(M)<M?i(M)<M?b:x:r(M)<M?k:T)(M)}return h.invert=function(M){return new Date(d(M))},h.domain=function(M){return arguments.length?f(Array.from(M,AU)):f().map(SU)},h.ticks=function(M){var S=f();return t(S[0],S[S.length-1],M==null?10:M)},h.tickFormat=function(M,S){return S==null?C:u(S)},h.nice=function(M){var S=f();return(!M||typeof M.range!="function")&&(M=e(S[0],S[S.length-1],M==null?10:M)),M?f(Wx(S,M)):h},h.copy=function(){return fc(h,Sp(t,e,r,n,i,a,s,o,l,u))},h}function X8(){return On.apply(Sp(M8,L8,Pa,bp,Do,dc,mp,yp,Fa,vc).domain([new Date(2e3,0,1),new Date(2e3,0,2)]),arguments)}function MU(){return On.apply(Sp(S8,A8,qa,kp,Oo,gc,xp,vp,Fa,lf).domain([Date.UTC(2e3,0,1),Date.UTC(2e3,0,2)]),arguments)}function cf(){var t=0,e=1,r,n,i,a,s=an,o=!1,l;function u(d){return d==null||isNaN(d=+d)?l:s(i===0?.5:(d=(a(d)-r)*i,o?Math.max(0,Math.min(1,d)):d))}u.domain=function(d){return arguments.length?([t,e]=d,r=a(t=+t),n=a(e=+e),i=r===n?0:1/(n-r),u):[t,e]},u.clamp=function(d){return arguments.length?(o=!!d,u):o},u.interpolator=function(d){return arguments.length?(s=d,u):s};function h(d){return function(f){var p,m;return arguments.length?([p,m]=f,s=d(p,m),u):[s(0),s(1)]}}return u.range=h(Ma),u.rangeRound=h(ju),u.unknown=function(d){return arguments.length?(l=d,u):l},function(d){return a=d,r=d(t),n=d(e),i=r===n?0:1/(n-r),u}}function Va(t,e){return e.domain(t.domain()).interpolator(t.interpolator()).clamp(t.clamp()).unknown(t.unknown())}function K8(){var t=Oa(cf()(an));return t.copy=function(){return Va(t,K8())},ta.apply(t,arguments)}function Z8(){var t=op(cf()).domain([1,10]);return t.copy=function(){return Va(t,Z8()).base(t.base())},ta.apply(t,arguments)}function Q8(){var t=lp(cf());return t.copy=function(){return Va(t,Q8()).constant(t.constant())},ta.apply(t,arguments)}function Ap(){var t=cp(cf());return t.copy=function(){return Va(t,Ap()).exponent(t.exponent())},ta.apply(t,arguments)}function LU(){return Ap.apply(null,arguments).exponent(.5)}function J8(){var t=[],e=an;function r(n){if(n!=null&&!isNaN(n=+n))return e((cs(t,n,1)-1)/(t.length-1))}return r.domain=function(n){if(!arguments.length)return t.slice();t=[];for(let i of n)i!=null&&!isNaN(i=+i)&&t.push(i);return t.sort(Qe),r},r.interpolator=function(n){return arguments.length?(e=n,r):e},r.range=function(){return t.map((n,i)=>e(i/(t.length-1)))},r.quantiles=function(n){return Array.from({length:n+1},(i,a)=>Cl(t,a/n))},r.copy=function(){return J8(e).domain(t)},ta.apply(r,arguments)}function uf(){var t=0,e=.5,r=1,n=1,i,a,s,o,l,u=an,h,d=!1,f;function p(_){return isNaN(_=+_)?f:(_=.5+((_=+h(_))-a)*(n*_<n*a?o:l),u(d?Math.max(0,Math.min(1,_)):_))}p.domain=function(_){return arguments.length?([t,e,r]=_,i=h(t=+t),a=h(e=+e),s=h(r=+r),o=i===a?0:.5/(a-i),l=a===s?0:.5/(s-a),n=a<i?-1:1,p):[t,e,r]},p.clamp=function(_){return arguments.length?(d=!!_,p):d},p.interpolator=function(_){return arguments.length?(u=_,p):u};function m(_){return function(y){var b,x,k;return arguments.length?([b,x,k]=y,u=K5(_,[b,x,k]),p):[u(0),u(.5),u(1)]}}return p.range=m(Ma),p.rangeRound=m(ju),p.unknown=function(_){return arguments.length?(f=_,p):f},function(_){return h=_,i=_(t),a=_(e),s=_(r),o=i===a?0:.5/(a-i),l=a===s?0:.5/(s-a),n=a<i?-1:1,p}}function t7(){var t=Oa(uf()(an));return t.copy=function(){return Va(t,t7())},ta.apply(t,arguments)}function e7(){var t=op(uf()).domain([.1,1,10]);return t.copy=function(){return Va(t,e7()).base(t.base())},ta.apply(t,arguments)}function r7(){var t=lp(uf());return t.copy=function(){return Va(t,r7()).constant(t.constant())},ta.apply(t,arguments)}function Mp(){var t=cp(uf());return t.copy=function(){return Va(t,Mp()).exponent(t.exponent())},ta.apply(t,arguments)}function RU(){return Mp.apply(null,arguments).exponent(.5)}function Ee(t){for(var e=t.length/6|0,r=new Array(e),n=0;n<e;)r[n]="#"+t.slice(n*6,++n*6);return r}const IU=Ee("1f77b4ff7f0e2ca02cd627289467bd8c564be377c27f7f7fbcbd2217becf"),NU=Ee("7fc97fbeaed4fdc086ffff99386cb0f0027fbf5b17666666"),BU=Ee("1b9e77d95f027570b3e7298a66a61ee6ab02a6761d666666"),DU=Ee("a6cee31f78b4b2df8a33a02cfb9a99e31a1cfdbf6fff7f00cab2d66a3d9affff99b15928"),OU=Ee("fbb4aeb3cde3ccebc5decbe4fed9a6ffffcce5d8bdfddaecf2f2f2"),FU=Ee("b3e2cdfdcdaccbd5e8f4cae4e6f5c9fff2aef1e2cccccccc"),PU=Ee("e41a1c377eb84daf4a984ea3ff7f00ffff33a65628f781bf999999"),qU=Ee("66c2a5fc8d628da0cbe78ac3a6d854ffd92fe5c494b3b3b3"),VU=Ee("8dd3c7ffffb3bebadafb807280b1d3fdb462b3de69fccde5d9d9d9bc80bdccebc5ffed6f"),zU=Ee("4e79a7f28e2ce1575976b7b259a14fedc949af7aa1ff9da79c755fbab0ab"),We=t=>B5(t[t.length-1]);var n7=new Array(3).concat("d8b365f5f5f55ab4ac","a6611adfc27d80cdc1018571","a6611adfc27df5f5f580cdc1018571","8c510ad8b365f6e8c3c7eae55ab4ac01665e","8c510ad8b365f6e8c3f5f5f5c7eae55ab4ac01665e","8c510abf812ddfc27df6e8c3c7eae580cdc135978f01665e","8c510abf812ddfc27df6e8c3f5f5f5c7eae580cdc135978f01665e","5430058c510abf812ddfc27df6e8c3c7eae580cdc135978f01665e003c30","5430058c510abf812ddfc27df6e8c3f5f5f5c7eae580cdc135978f01665e003c30").map(Ee);const YU=We(n7);var i7=new Array(3).concat("af8dc3f7f7f77fbf7b","7b3294c2a5cfa6dba0008837","7b3294c2a5cff7f7f7a6dba0008837","762a83af8dc3e7d4e8d9f0d37fbf7b1b7837","762a83af8dc3e7d4e8f7f7f7d9f0d37fbf7b1b7837","762a839970abc2a5cfe7d4e8d9f0d3a6dba05aae611b7837","762a839970abc2a5cfe7d4e8f7f7f7d9f0d3a6dba05aae611b7837","40004b762a839970abc2a5cfe7d4e8d9f0d3a6dba05aae611b783700441b","40004b762a839970abc2a5cfe7d4e8f7f7f7d9f0d3a6dba05aae611b783700441b").map(Ee);const UU=We(i7);var a7=new Array(3).concat("e9a3c9f7f7f7a1d76a","d01c8bf1b6dab8e1864dac26","d01c8bf1b6daf7f7f7b8e1864dac26","c51b7de9a3c9fde0efe6f5d0a1d76a4d9221","c51b7de9a3c9fde0eff7f7f7e6f5d0a1d76a4d9221","c51b7dde77aef1b6dafde0efe6f5d0b8e1867fbc414d9221","c51b7dde77aef1b6dafde0eff7f7f7e6f5d0b8e1867fbc414d9221","8e0152c51b7dde77aef1b6dafde0efe6f5d0b8e1867fbc414d9221276419","8e0152c51b7dde77aef1b6dafde0eff7f7f7e6f5d0b8e1867fbc414d9221276419").map(Ee);const WU=We(a7);var s7=new Array(3).concat("998ec3f7f7f7f1a340","5e3c99b2abd2fdb863e66101","5e3c99b2abd2f7f7f7fdb863e66101","542788998ec3d8daebfee0b6f1a340b35806","542788998ec3d8daebf7f7f7fee0b6f1a340b35806","5427888073acb2abd2d8daebfee0b6fdb863e08214b35806","5427888073acb2abd2d8daebf7f7f7fee0b6fdb863e08214b35806","2d004b5427888073acb2abd2d8daebfee0b6fdb863e08214b358067f3b08","2d004b5427888073acb2abd2d8daebf7f7f7fee0b6fdb863e08214b358067f3b08").map(Ee);const HU=We(s7);var o7=new Array(3).concat("ef8a62f7f7f767a9cf","ca0020f4a58292c5de0571b0","ca0020f4a582f7f7f792c5de0571b0","b2182bef8a62fddbc7d1e5f067a9cf2166ac","b2182bef8a62fddbc7f7f7f7d1e5f067a9cf2166ac","b2182bd6604df4a582fddbc7d1e5f092c5de4393c32166ac","b2182bd6604df4a582fddbc7f7f7f7d1e5f092c5de4393c32166ac","67001fb2182bd6604df4a582fddbc7d1e5f092c5de4393c32166ac053061","67001fb2182bd6604df4a582fddbc7f7f7f7d1e5f092c5de4393c32166ac053061").map(Ee);const GU=We(o7);var l7=new Array(3).concat("ef8a62ffffff999999","ca0020f4a582bababa404040","ca0020f4a582ffffffbababa404040","b2182bef8a62fddbc7e0e0e09999994d4d4d","b2182bef8a62fddbc7ffffffe0e0e09999994d4d4d","b2182bd6604df4a582fddbc7e0e0e0bababa8787874d4d4d","b2182bd6604df4a582fddbc7ffffffe0e0e0bababa8787874d4d4d","67001fb2182bd6604df4a582fddbc7e0e0e0bababa8787874d4d4d1a1a1a","67001fb2182bd6604df4a582fddbc7ffffffe0e0e0bababa8787874d4d4d1a1a1a").map(Ee);const jU=We(l7);var c7=new Array(3).concat("fc8d59ffffbf91bfdb","d7191cfdae61abd9e92c7bb6","d7191cfdae61ffffbfabd9e92c7bb6","d73027fc8d59fee090e0f3f891bfdb4575b4","d73027fc8d59fee090ffffbfe0f3f891bfdb4575b4","d73027f46d43fdae61fee090e0f3f8abd9e974add14575b4","d73027f46d43fdae61fee090ffffbfe0f3f8abd9e974add14575b4","a50026d73027f46d43fdae61fee090e0f3f8abd9e974add14575b4313695","a50026d73027f46d43fdae61fee090ffffbfe0f3f8abd9e974add14575b4313695").map(Ee);const $U=We(c7);var u7=new Array(3).concat("fc8d59ffffbf91cf60","d7191cfdae61a6d96a1a9641","d7191cfdae61ffffbfa6d96a1a9641","d73027fc8d59fee08bd9ef8b91cf601a9850","d73027fc8d59fee08bffffbfd9ef8b91cf601a9850","d73027f46d43fdae61fee08bd9ef8ba6d96a66bd631a9850","d73027f46d43fdae61fee08bffffbfd9ef8ba6d96a66bd631a9850","a50026d73027f46d43fdae61fee08bd9ef8ba6d96a66bd631a9850006837","a50026d73027f46d43fdae61fee08bffffbfd9ef8ba6d96a66bd631a9850006837").map(Ee);const XU=We(u7);var h7=new Array(3).concat("fc8d59ffffbf99d594","d7191cfdae61abdda42b83ba","d7191cfdae61ffffbfabdda42b83ba","d53e4ffc8d59fee08be6f59899d5943288bd","d53e4ffc8d59fee08bffffbfe6f59899d5943288bd","d53e4ff46d43fdae61fee08be6f598abdda466c2a53288bd","d53e4ff46d43fdae61fee08bffffbfe6f598abdda466c2a53288bd","9e0142d53e4ff46d43fdae61fee08be6f598abdda466c2a53288bd5e4fa2","9e0142d53e4ff46d43fdae61fee08bffffbfe6f598abdda466c2a53288bd5e4fa2").map(Ee);const KU=We(h7);var f7=new Array(3).concat("e5f5f999d8c92ca25f","edf8fbb2e2e266c2a4238b45","edf8fbb2e2e266c2a42ca25f006d2c","edf8fbccece699d8c966c2a42ca25f006d2c","edf8fbccece699d8c966c2a441ae76238b45005824","f7fcfde5f5f9ccece699d8c966c2a441ae76238b45005824","f7fcfde5f5f9ccece699d8c966c2a441ae76238b45006d2c00441b").map(Ee);const ZU=We(f7);var d7=new Array(3).concat("e0ecf49ebcda8856a7","edf8fbb3cde38c96c688419d","edf8fbb3cde38c96c68856a7810f7c","edf8fbbfd3e69ebcda8c96c68856a7810f7c","edf8fbbfd3e69ebcda8c96c68c6bb188419d6e016b","f7fcfde0ecf4bfd3e69ebcda8c96c68c6bb188419d6e016b","f7fcfde0ecf4bfd3e69ebcda8c96c68c6bb188419d810f7c4d004b").map(Ee);const QU=We(d7);var p7=new Array(3).concat("e0f3dba8ddb543a2ca","f0f9e8bae4bc7bccc42b8cbe","f0f9e8bae4bc7bccc443a2ca0868ac","f0f9e8ccebc5a8ddb57bccc443a2ca0868ac","f0f9e8ccebc5a8ddb57bccc44eb3d32b8cbe08589e","f7fcf0e0f3dbccebc5a8ddb57bccc44eb3d32b8cbe08589e","f7fcf0e0f3dbccebc5a8ddb57bccc44eb3d32b8cbe0868ac084081").map(Ee);const JU=We(p7);var g7=new Array(3).concat("fee8c8fdbb84e34a33","fef0d9fdcc8afc8d59d7301f","fef0d9fdcc8afc8d59e34a33b30000","fef0d9fdd49efdbb84fc8d59e34a33b30000","fef0d9fdd49efdbb84fc8d59ef6548d7301f990000","fff7ecfee8c8fdd49efdbb84fc8d59ef6548d7301f990000","fff7ecfee8c8fdd49efdbb84fc8d59ef6548d7301fb300007f0000").map(Ee);const tW=We(g7);var y7=new Array(3).concat("ece2f0a6bddb1c9099","f6eff7bdc9e167a9cf02818a","f6eff7bdc9e167a9cf1c9099016c59","f6eff7d0d1e6a6bddb67a9cf1c9099016c59","f6eff7d0d1e6a6bddb67a9cf3690c002818a016450","fff7fbece2f0d0d1e6a6bddb67a9cf3690c002818a016450","fff7fbece2f0d0d1e6a6bddb67a9cf3690c002818a016c59014636").map(Ee);const eW=We(y7);var m7=new Array(3).concat("ece7f2a6bddb2b8cbe","f1eef6bdc9e174a9cf0570b0","f1eef6bdc9e174a9cf2b8cbe045a8d","f1eef6d0d1e6a6bddb74a9cf2b8cbe045a8d","f1eef6d0d1e6a6bddb74a9cf3690c00570b0034e7b","fff7fbece7f2d0d1e6a6bddb74a9cf3690c00570b0034e7b","fff7fbece7f2d0d1e6a6bddb74a9cf3690c00570b0045a8d023858").map(Ee);const rW=We(m7);var b7=new Array(3).concat("e7e1efc994c7dd1c77","f1eef6d7b5d8df65b0ce1256","f1eef6d7b5d8df65b0dd1c77980043","f1eef6d4b9dac994c7df65b0dd1c77980043","f1eef6d4b9dac994c7df65b0e7298ace125691003f","f7f4f9e7e1efd4b9dac994c7df65b0e7298ace125691003f","f7f4f9e7e1efd4b9dac994c7df65b0e7298ace125698004367001f").map(Ee);const nW=We(b7);var _7=new Array(3).concat("fde0ddfa9fb5c51b8a","feebe2fbb4b9f768a1ae017e","feebe2fbb4b9f768a1c51b8a7a0177","feebe2fcc5c0fa9fb5f768a1c51b8a7a0177","feebe2fcc5c0fa9fb5f768a1dd3497ae017e7a0177","fff7f3fde0ddfcc5c0fa9fb5f768a1dd3497ae017e7a0177","fff7f3fde0ddfcc5c0fa9fb5f768a1dd3497ae017e7a017749006a").map(Ee);const iW=We(_7);var v7=new Array(3).concat("edf8b17fcdbb2c7fb8","ffffcca1dab441b6c4225ea8","ffffcca1dab441b6c42c7fb8253494","ffffccc7e9b47fcdbb41b6c42c7fb8253494","ffffccc7e9b47fcdbb41b6c41d91c0225ea80c2c84","ffffd9edf8b1c7e9b47fcdbb41b6c41d91c0225ea80c2c84","ffffd9edf8b1c7e9b47fcdbb41b6c41d91c0225ea8253494081d58").map(Ee);const aW=We(v7);var x7=new Array(3).concat("f7fcb9addd8e31a354","ffffccc2e69978c679238443","ffffccc2e69978c67931a354006837","ffffccd9f0a3addd8e78c67931a354006837","ffffccd9f0a3addd8e78c67941ab5d238443005a32","ffffe5f7fcb9d9f0a3addd8e78c67941ab5d238443005a32","ffffe5f7fcb9d9f0a3addd8e78c67941ab5d238443006837004529").map(Ee);const sW=We(x7);var k7=new Array(3).concat("fff7bcfec44fd95f0e","ffffd4fed98efe9929cc4c02","ffffd4fed98efe9929d95f0e993404","ffffd4fee391fec44ffe9929d95f0e993404","ffffd4fee391fec44ffe9929ec7014cc4c028c2d04","ffffe5fff7bcfee391fec44ffe9929ec7014cc4c028c2d04","ffffe5fff7bcfee391fec44ffe9929ec7014cc4c02993404662506").map(Ee);const oW=We(k7);var w7=new Array(3).concat("ffeda0feb24cf03b20","ffffb2fecc5cfd8d3ce31a1c","ffffb2fecc5cfd8d3cf03b20bd0026","ffffb2fed976feb24cfd8d3cf03b20bd0026","ffffb2fed976feb24cfd8d3cfc4e2ae31a1cb10026","ffffccffeda0fed976feb24cfd8d3cfc4e2ae31a1cb10026","ffffccffeda0fed976feb24cfd8d3cfc4e2ae31a1cbd0026800026").map(Ee);const lW=We(w7);var T7=new Array(3).concat("deebf79ecae13182bd","eff3ffbdd7e76baed62171b5","eff3ffbdd7e76baed63182bd08519c","eff3ffc6dbef9ecae16baed63182bd08519c","eff3ffc6dbef9ecae16baed64292c62171b5084594","f7fbffdeebf7c6dbef9ecae16baed64292c62171b5084594","f7fbffdeebf7c6dbef9ecae16baed64292c62171b508519c08306b").map(Ee);const cW=We(T7);var E7=new Array(3).concat("e5f5e0a1d99b31a354","edf8e9bae4b374c476238b45","edf8e9bae4b374c47631a354006d2c","edf8e9c7e9c0a1d99b74c47631a354006d2c","edf8e9c7e9c0a1d99b74c47641ab5d238b45005a32","f7fcf5e5f5e0c7e9c0a1d99b74c47641ab5d238b45005a32","f7fcf5e5f5e0c7e9c0a1d99b74c47641ab5d238b45006d2c00441b").map(Ee);const uW=We(E7);var C7=new Array(3).concat("f0f0f0bdbdbd636363","f7f7f7cccccc969696525252","f7f7f7cccccc969696636363252525","f7f7f7d9d9d9bdbdbd969696636363252525","f7f7f7d9d9d9bdbdbd969696737373525252252525","fffffff0f0f0d9d9d9bdbdbd969696737373525252252525","fffffff0f0f0d9d9d9bdbdbd969696737373525252252525000000").map(Ee);const hW=We(C7);var S7=new Array(3).concat("efedf5bcbddc756bb1","f2f0f7cbc9e29e9ac86a51a3","f2f0f7cbc9e29e9ac8756bb154278f","f2f0f7dadaebbcbddc9e9ac8756bb154278f","f2f0f7dadaebbcbddc9e9ac8807dba6a51a34a1486","fcfbfdefedf5dadaebbcbddc9e9ac8807dba6a51a34a1486","fcfbfdefedf5dadaebbcbddc9e9ac8807dba6a51a354278f3f007d").map(Ee);const fW=We(S7);var A7=new Array(3).concat("fee0d2fc9272de2d26","fee5d9fcae91fb6a4acb181d","fee5d9fcae91fb6a4ade2d26a50f15","fee5d9fcbba1fc9272fb6a4ade2d26a50f15","fee5d9fcbba1fc9272fb6a4aef3b2ccb181d99000d","fff5f0fee0d2fcbba1fc9272fb6a4aef3b2ccb181d99000d","fff5f0fee0d2fcbba1fc9272fb6a4aef3b2ccb181da50f1567000d").map(Ee);const dW=We(A7);var M7=new Array(3).concat("fee6cefdae6be6550d","feeddefdbe85fd8d3cd94701","feeddefdbe85fd8d3ce6550da63603","feeddefdd0a2fdae6bfd8d3ce6550da63603","feeddefdd0a2fdae6bfd8d3cf16913d948018c2d04","fff5ebfee6cefdd0a2fdae6bfd8d3cf16913d948018c2d04","fff5ebfee6cefdd0a2fdae6bfd8d3cf16913d94801a636037f2704").map(Ee);const pW=We(M7);function gW(t){return t=Math.max(0,Math.min(1,t)),"rgb("+Math.max(0,Math.min(255,Math.round(-4.54-t*(35.34-t*(2381.73-t*(6402.7-t*(7024.72-t*2710.57)))))))+", "+Math.max(0,Math.min(255,Math.round(32.49+t*(170.73+t*(52.82-t*(131.46-t*(176.58-t*67.37)))))))+", "+Math.max(0,Math.min(255,Math.round(81.24+t*(442.36-t*(2482.43-t*(6167.24-t*(6614.94-t*2475.67)))))))+")"}const yW=Xu(Qn(300,.5,0),Qn(-240,.5,1));var mW=Xu(Qn(-100,.75,.35),Qn(80,1.5,.8)),bW=Xu(Qn(260,.75,.35),Qn(80,1.5,.8)),hf=Qn();function _W(t){(t<0||t>1)&&(t-=Math.floor(t));var e=Math.abs(t-.5);return hf.h=360*t-100,hf.s=1.5-1.5*e,hf.l=.8-.9*e,hf+""}var ff=po(),vW=Math.PI/3,xW=Math.PI*2/3;function kW(t){var e;return t=(.5-t)*Math.PI,ff.r=255*(e=Math.sin(t))*e,ff.g=255*(e=Math.sin(t+vW))*e,ff.b=255*(e=Math.sin(t+xW))*e,ff+""}function wW(t){return t=Math.max(0,Math.min(1,t)),"rgb("+Math.max(0,Math.min(255,Math.round(34.61+t*(1172.33-t*(10793.56-t*(33300.12-t*(38394.49-t*14825.05)))))))+", "+Math.max(0,Math.min(255,Math.round(23.31+t*(557.33+t*(1225.33-t*(3574.96-t*(1073.77+t*707.56)))))))+", "+Math.max(0,Math.min(255,Math.round(27.2+t*(3211.1-t*(15327.97-t*(27814-t*(22569.18-t*6838.66)))))))+")"}function df(t){var e=t.length;return function(r){return t[Math.max(0,Math.min(e-1,Math.floor(r*e)))]}}const TW=df(Ee("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725"));var EW=df(Ee("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf")),CW=df(Ee("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4")),SW=df(Ee("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921"));function xe(t){return function(){return t}}const L7=Math.abs,qr=Math.atan2,na=Math.cos,AW=Math.max,Po=Math.min,gn=Math.sin,He=Math.sqrt,Vr=1e-12,za=Math.PI,pf=za/2,Ya=2*za;function MW(t){return t>1?0:t<-1?za:Math.acos(t)}function R7(t){return t>=1?pf:t<=-1?-pf:Math.asin(t)}function LW(t){return t.innerRadius}function RW(t){return t.outerRadius}function IW(t){return t.startAngle}function NW(t){return t.endAngle}function BW(t){return t&&t.padAngle}function DW(t,e,r,n,i,a,s,o){var l=r-t,u=n-e,h=s-i,d=o-a,f=d*l-h*u;if(!(f*f<Vr))return f=(h*(e-a)-d*(t-i))/f,[t+f*l,e+f*u]}function gf(t,e,r,n,i,a,s){var o=t-r,l=e-n,u=(s?a:-a)/He(o*o+l*l),h=u*l,d=-u*o,f=t+h,p=e+d,m=r+h,_=n+d,y=(f+m)/2,b=(p+_)/2,x=m-f,k=_-p,T=x*x+k*k,C=i-a,M=f*_-m*p,S=(k<0?-1:1)*He(AW(0,C*C*T-M*M)),R=(M*k-x*S)/T,A=(-M*x-k*S)/T,L=(M*k+x*S)/T,v=(-M*x+k*S)/T,B=R-y,w=A-b,D=L-y,N=v-b;return B*B+w*w>D*D+N*N&&(R=L,A=v),{cx:R,cy:A,x01:-h,y01:-d,x11:R*(i/C-1),y11:A*(i/C-1)}}function yf(){var t=LW,e=RW,r=xe(0),n=null,i=IW,a=NW,s=BW,o=null;function l(){var u,h,d=+t.apply(this,arguments),f=+e.apply(this,arguments),p=i.apply(this,arguments)-pf,m=a.apply(this,arguments)-pf,_=L7(m-p),y=m>p;if(o||(o=u=Ra()),f<d&&(h=f,f=d,d=h),!(f>Vr))o.moveTo(0,0);else if(_>Ya-Vr)o.moveTo(f*na(p),f*gn(p)),o.arc(0,0,f,p,m,!y),d>Vr&&(o.moveTo(d*na(m),d*gn(m)),o.arc(0,0,d,m,p,y));else{var b=p,x=m,k=p,T=m,C=_,M=_,S=s.apply(this,arguments)/2,R=S>Vr&&(n?+n.apply(this,arguments):He(d*d+f*f)),A=Po(L7(f-d)/2,+r.apply(this,arguments)),L=A,v=A,B,w;if(R>Vr){var D=R7(R/d*gn(S)),N=R7(R/f*gn(S));(C-=D*2)>Vr?(D*=y?1:-1,k+=D,T-=D):(C=0,k=T=(p+m)/2),(M-=N*2)>Vr?(N*=y?1:-1,b+=N,x-=N):(M=0,b=x=(p+m)/2)}var z=f*na(b),X=f*gn(b),ct=d*na(T),J=d*gn(T);if(A>Vr){var Y=f*na(x),$=f*gn(x),lt=d*na(k),ut=d*gn(k),W;if(_<za&&(W=DW(z,X,lt,ut,Y,$,ct,J))){var tt=z-W[0],K=X-W[1],it=Y-W[0],Z=$-W[1],V=1/gn(MW((tt*it+K*Z)/(He(tt*tt+K*K)*He(it*it+Z*Z)))/2),Q=He(W[0]*W[0]+W[1]*W[1]);L=Po(A,(d-Q)/(V-1)),v=Po(A,(f-Q)/(V+1))}}M>Vr?v>Vr?(B=gf(lt,ut,z,X,f,v,y),w=gf(Y,$,ct,J,f,v,y),o.moveTo(B.cx+B.x01,B.cy+B.y01),v<A?o.arc(B.cx,B.cy,v,qr(B.y01,B.x01),qr(w.y01,w.x01),!y):(o.arc(B.cx,B.cy,v,qr(B.y01,B.x01),qr(B.y11,B.x11),!y),o.arc(0,0,f,qr(B.cy+B.y11,B.cx+B.x11),qr(w.cy+w.y11,w.cx+w.x11),!y),o.arc(w.cx,w.cy,v,qr(w.y11,w.x11),qr(w.y01,w.x01),!y))):(o.moveTo(z,X),o.arc(0,0,f,b,x,!y)):o.moveTo(z,X),!(d>Vr)||!(C>Vr)?o.lineTo(ct,J):L>Vr?(B=gf(ct,J,Y,$,d,-L,y),w=gf(z,X,lt,ut,d,-L,y),o.lineTo(B.cx+B.x01,B.cy+B.y01),L<A?o.arc(B.cx,B.cy,L,qr(B.y01,B.x01),qr(w.y01,w.x01),!y):(o.arc(B.cx,B.cy,L,qr(B.y01,B.x01),qr(B.y11,B.x11),!y),o.arc(0,0,d,qr(B.cy+B.y11,B.cx+B.x11),qr(w.cy+w.y11,w.cx+w.x11),y),o.arc(w.cx,w.cy,L,qr(w.y11,w.x11),qr(w.y01,w.x01),!y))):o.arc(0,0,d,T,k,y)}if(o.closePath(),u)return o=null,u+""||null}return l.centroid=function(){var u=(+t.apply(this,arguments)+ +e.apply(this,arguments))/2,h=(+i.apply(this,arguments)+ +a.apply(this,arguments))/2-za/2;return[na(h)*u,gn(h)*u]},l.innerRadius=function(u){return arguments.length?(t=typeof u=="function"?u:xe(+u),l):t},l.outerRadius=function(u){return arguments.length?(e=typeof u=="function"?u:xe(+u),l):e},l.cornerRadius=function(u){return arguments.length?(r=typeof u=="function"?u:xe(+u),l):r},l.padRadius=function(u){return arguments.length?(n=u==null?null:typeof u=="function"?u:xe(+u),l):n},l.startAngle=function(u){return arguments.length?(i=typeof u=="function"?u:xe(+u),l):i},l.endAngle=function(u){return arguments.length?(a=typeof u=="function"?u:xe(+u),l):a},l.padAngle=function(u){return arguments.length?(s=typeof u=="function"?u:xe(+u),l):s},l.context=function(u){return arguments.length?(o=u==null?null:u,l):o},l}var OW=Array.prototype.slice;function mf(t){return typeof t=="object"&&"length"in t?t:Array.from(t)}function I7(t){this._context=t}I7.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._point=0},lineEnd:function(){(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;default:this._context.lineTo(t,e);break}}};function yn(t){return new I7(t)}function Lp(t){return t[0]}function Rp(t){return t[1]}function Ua(t,e){var r=xe(!0),n=null,i=yn,a=null;t=typeof t=="function"?t:t===void 0?Lp:xe(t),e=typeof e=="function"?e:e===void 0?Rp:xe(e);function s(o){var l,u=(o=mf(o)).length,h,d=!1,f;for(n==null&&(a=i(f=Ra())),l=0;l<=u;++l)!(l<u&&r(h=o[l],l,o))===d&&((d=!d)?a.lineStart():a.lineEnd()),d&&a.point(+t(h,l,o),+e(h,l,o));if(f)return a=null,f+""||null}return s.x=function(o){return arguments.length?(t=typeof o=="function"?o:xe(+o),s):t},s.y=function(o){return arguments.length?(e=typeof o=="function"?o:xe(+o),s):e},s.defined=function(o){return arguments.length?(r=typeof o=="function"?o:xe(!!o),s):r},s.curve=function(o){return arguments.length?(i=o,n!=null&&(a=i(n)),s):i},s.context=function(o){return arguments.length?(o==null?n=a=null:a=i(n=o),s):n},s}function N7(t,e,r){var n=null,i=xe(!0),a=null,s=yn,o=null;t=typeof t=="function"?t:t===void 0?Lp:xe(+t),e=typeof e=="function"?e:xe(e===void 0?0:+e),r=typeof r=="function"?r:r===void 0?Rp:xe(+r);function l(h){var d,f,p,m=(h=mf(h)).length,_,y=!1,b,x=new Array(m),k=new Array(m);for(a==null&&(o=s(b=Ra())),d=0;d<=m;++d){if(!(d<m&&i(_=h[d],d,h))===y)if(y=!y)f=d,o.areaStart(),o.lineStart();else{for(o.lineEnd(),o.lineStart(),p=d-1;p>=f;--p)o.point(x[p],k[p]);o.lineEnd(),o.areaEnd()}y&&(x[d]=+t(_,d,h),k[d]=+e(_,d,h),o.point(n?+n(_,d,h):x[d],r?+r(_,d,h):k[d]))}if(b)return o=null,b+""||null}function u(){return Ua().defined(i).curve(s).context(a)}return l.x=function(h){return arguments.length?(t=typeof h=="function"?h:xe(+h),n=null,l):t},l.x0=function(h){return arguments.length?(t=typeof h=="function"?h:xe(+h),l):t},l.x1=function(h){return arguments.length?(n=h==null?null:typeof h=="function"?h:xe(+h),l):n},l.y=function(h){return arguments.length?(e=typeof h=="function"?h:xe(+h),r=null,l):e},l.y0=function(h){return arguments.length?(e=typeof h=="function"?h:xe(+h),l):e},l.y1=function(h){return arguments.length?(r=h==null?null:typeof h=="function"?h:xe(+h),l):r},l.lineX0=l.lineY0=function(){return u().x(t).y(e)},l.lineY1=function(){return u().x(t).y(r)},l.lineX1=function(){return u().x(n).y(e)},l.defined=function(h){return arguments.length?(i=typeof h=="function"?h:xe(!!h),l):i},l.curve=function(h){return arguments.length?(s=h,a!=null&&(o=s(a)),l):s},l.context=function(h){return arguments.length?(h==null?a=o=null:o=s(a=h),l):a},l}function FW(t,e){return e<t?-1:e>t?1:e>=t?0:NaN}function PW(t){return t}function B7(){var t=PW,e=FW,r=null,n=xe(0),i=xe(Ya),a=xe(0);function s(o){var l,u=(o=mf(o)).length,h,d,f=0,p=new Array(u),m=new Array(u),_=+n.apply(this,arguments),y=Math.min(Ya,Math.max(-Ya,i.apply(this,arguments)-_)),b,x=Math.min(Math.abs(y)/u,a.apply(this,arguments)),k=x*(y<0?-1:1),T;for(l=0;l<u;++l)(T=m[p[l]=l]=+t(o[l],l,o))>0&&(f+=T);for(e!=null?p.sort(function(C,M){return e(m[C],m[M])}):r!=null&&p.sort(function(C,M){return r(o[C],o[M])}),l=0,d=f?(y-u*k)/f:0;l<u;++l,_=b)h=p[l],T=m[h],b=_+(T>0?T*d:0)+k,m[h]={data:o[h],index:l,value:T,startAngle:_,endAngle:b,padAngle:x};return m}return s.value=function(o){return arguments.length?(t=typeof o=="function"?o:xe(+o),s):t},s.sortValues=function(o){return arguments.length?(e=o,r=null,s):e},s.sort=function(o){return arguments.length?(r=o,e=null,s):r},s.startAngle=function(o){return arguments.length?(n=typeof o=="function"?o:xe(+o),s):n},s.endAngle=function(o){return arguments.length?(i=typeof o=="function"?o:xe(+o),s):i},s.padAngle=function(o){return arguments.length?(a=typeof o=="function"?o:xe(+o),s):a},s}var D7=Ip(yn);function O7(t){this._curve=t}O7.prototype={areaStart:function(){this._curve.areaStart()},areaEnd:function(){this._curve.areaEnd()},lineStart:function(){this._curve.lineStart()},lineEnd:function(){this._curve.lineEnd()},point:function(t,e){this._curve.point(e*Math.sin(t),e*-Math.cos(t))}};function Ip(t){function e(r){return new O7(t(r))}return e._curve=t,e}function xc(t){var e=t.curve;return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t.curve=function(r){return arguments.length?e(Ip(r)):e()._curve},t}function F7(){return xc(Ua().curve(D7))}function P7(){var t=N7().curve(D7),e=t.curve,r=t.lineX0,n=t.lineX1,i=t.lineY0,a=t.lineY1;return t.angle=t.x,delete t.x,t.startAngle=t.x0,delete t.x0,t.endAngle=t.x1,delete t.x1,t.radius=t.y,delete t.y,t.innerRadius=t.y0,delete t.y0,t.outerRadius=t.y1,delete t.y1,t.lineStartAngle=function(){return xc(r())},delete t.lineX0,t.lineEndAngle=function(){return xc(n())},delete t.lineX1,t.lineInnerRadius=function(){return xc(i())},delete t.lineY0,t.lineOuterRadius=function(){return xc(a())},delete t.lineY1,t.curve=function(s){return arguments.length?e(Ip(s)):e()._curve},t}function kc(t,e){return[(e=+e)*Math.cos(t-=Math.PI/2),e*Math.sin(t)]}class q7{constructor(e,r){this._context=e,this._x=r}areaStart(){this._line=0}areaEnd(){this._line=NaN}lineStart(){this._point=0}lineEnd(){(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line}point(e,r){switch(e=+e,r=+r,this._point){case 0:{this._point=1,this._line?this._context.lineTo(e,r):this._context.moveTo(e,r);break}case 1:this._point=2;default:{this._x?this._context.bezierCurveTo(this._x0=(this._x0+e)/2,this._y0,this._x0,r,e,r):this._context.bezierCurveTo(this._x0,this._y0=(this._y0+r)/2,e,this._y0,e,r);break}}this._x0=e,this._y0=r}}class qW{constructor(e){this._context=e}lineStart(){this._point=0}lineEnd(){}point(e,r){if(e=+e,r=+r,this._point++===0)this._x0=e,this._y0=r;else{const n=kc(this._x0,this._y0),i=kc(this._x0,this._y0=(this._y0+r)/2),a=kc(e,this._y0),s=kc(e,r);this._context.moveTo(...n),this._context.bezierCurveTo(...i,...a,...s)}}}function V7(t){return new q7(t,!0)}function z7(t){return new q7(t,!1)}function VW(t){return new qW(t)}function zW(t){return t.source}function YW(t){return t.target}function bf(t){let e=zW,r=YW,n=Lp,i=Rp,a=null,s=null;function o(){let l;const u=OW.call(arguments),h=e.apply(this,u),d=r.apply(this,u);if(a==null&&(s=t(l=Ra())),s.lineStart(),u[0]=h,s.point(+n.apply(this,u),+i.apply(this,u)),u[0]=d,s.point(+n.apply(this,u),+i.apply(this,u)),s.lineEnd(),l)return s=null,l+""||null}return o.source=function(l){return arguments.length?(e=l,o):e},o.target=function(l){return arguments.length?(r=l,o):r},o.x=function(l){return arguments.length?(n=typeof l=="function"?l:xe(+l),o):n},o.y=function(l){return arguments.length?(i=typeof l=="function"?l:xe(+l),o):i},o.context=function(l){return arguments.length?(l==null?a=s=null:s=t(a=l),o):a},o}function UW(){return bf(V7)}function WW(){return bf(z7)}function HW(){const t=bf(VW);return t.angle=t.x,delete t.x,t.radius=t.y,delete t.y,t}const GW=He(3),Y7={draw(t,e){const r=He(e+Po(e/28,.75))*.59436,n=r/2,i=n*GW;t.moveTo(0,r),t.lineTo(0,-r),t.moveTo(-i,-n),t.lineTo(i,n),t.moveTo(-i,n),t.lineTo(i,-n)}},_f={draw(t,e){const r=He(e/za);t.moveTo(r,0),t.arc(0,0,r,0,Ya)}},U7={draw(t,e){const r=He(e/5)/2;t.moveTo(-3*r,-r),t.lineTo(-r,-r),t.lineTo(-r,-3*r),t.lineTo(r,-3*r),t.lineTo(r,-r),t.lineTo(3*r,-r),t.lineTo(3*r,r),t.lineTo(r,r),t.lineTo(r,3*r),t.lineTo(-r,3*r),t.lineTo(-r,r),t.lineTo(-3*r,r),t.closePath()}},W7=He(1/3),jW=W7*2,H7={draw(t,e){const r=He(e/jW),n=r*W7;t.moveTo(0,-r),t.lineTo(n,0),t.lineTo(0,r),t.lineTo(-n,0),t.closePath()}},G7={draw(t,e){const r=He(e)*.62625;t.moveTo(0,-r),t.lineTo(r,0),t.lineTo(0,r),t.lineTo(-r,0),t.closePath()}},j7={draw(t,e){const r=He(e-Po(e/7,2))*.87559;t.moveTo(-r,0),t.lineTo(r,0),t.moveTo(0,r),t.lineTo(0,-r)}},$7={draw(t,e){const r=He(e),n=-r/2;t.rect(n,n,r,r)}},X7={draw(t,e){const r=He(e)*.4431;t.moveTo(r,r),t.lineTo(r,-r),t.lineTo(-r,-r),t.lineTo(-r,r),t.closePath()}},$W=.8908130915292852,K7=gn(za/10)/gn(7*za/10),XW=gn(Ya/10)*K7,KW=-na(Ya/10)*K7,Z7={draw(t,e){const r=He(e*$W),n=XW*r,i=KW*r;t.moveTo(0,-r),t.lineTo(n,i);for(let a=1;a<5;++a){const s=Ya*a/5,o=na(s),l=gn(s);t.lineTo(l*r,-o*r),t.lineTo(o*n-l*i,l*n+o*i)}t.closePath()}},Np=He(3),Q7={draw(t,e){const r=-He(e/(Np*3));t.moveTo(0,r*2),t.lineTo(-Np*r,-r),t.lineTo(Np*r,-r),t.closePath()}},ZW=He(3),J7={draw(t,e){const r=He(e)*.6824,n=r/2,i=r*ZW/2;t.moveTo(0,-r),t.lineTo(i,n),t.lineTo(-i,n),t.closePath()}},Pn=-.5,qn=He(3)/2,Bp=1/He(12),QW=(Bp/2+1)*3,tk={draw(t,e){const r=He(e/QW),n=r/2,i=r*Bp,a=n,s=r*Bp+r,o=-a,l=s;t.moveTo(n,i),t.lineTo(a,s),t.lineTo(o,l),t.lineTo(Pn*n-qn*i,qn*n+Pn*i),t.lineTo(Pn*a-qn*s,qn*a+Pn*s),t.lineTo(Pn*o-qn*l,qn*o+Pn*l),t.lineTo(Pn*n+qn*i,Pn*i-qn*n),t.lineTo(Pn*a+qn*s,Pn*s-qn*a),t.lineTo(Pn*o+qn*l,Pn*l-qn*o),t.closePath()}},ek={draw(t,e){const r=He(e-Po(e/6,1.7))*.6189;t.moveTo(-r,-r),t.lineTo(r,r),t.moveTo(-r,r),t.lineTo(r,-r)}},rk=[_f,U7,H7,$7,Z7,Q7,tk],JW=[_f,j7,ek,J7,Y7,X7,G7];function tH(t,e){let r=null;t=typeof t=="function"?t:xe(t||_f),e=typeof e=="function"?e:xe(e===void 0?64:+e);function n(){let i;if(r||(r=i=Ra()),t.apply(this,arguments).draw(r,+e.apply(this,arguments)),i)return r=null,i+""||null}return n.type=function(i){return arguments.length?(t=typeof i=="function"?i:xe(i),n):t},n.size=function(i){return arguments.length?(e=typeof i=="function"?i:xe(+i),n):e},n.context=function(i){return arguments.length?(r=i==null?null:i,n):r},n}function Wa(){}function vf(t,e,r){t._context.bezierCurveTo((2*t._x0+t._x1)/3,(2*t._y0+t._y1)/3,(t._x0+2*t._x1)/3,(t._y0+2*t._y1)/3,(t._x0+4*t._x1+e)/6,(t._y0+4*t._y1+r)/6)}function xf(t){this._context=t}xf.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){switch(this._point){case 3:vf(this,this._x1,this._y1);case 2:this._context.lineTo(this._x1,this._y1);break}(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;break;case 2:this._point=3,this._context.lineTo((5*this._x0+this._x1)/6,(5*this._y0+this._y1)/6);default:vf(this,t,e);break}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e}};function Os(t){return new xf(t)}function nk(t){this._context=t}nk.prototype={areaStart:Wa,areaEnd:Wa,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._y0=this._y1=this._y2=this._y3=this._y4=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:{this._context.moveTo(this._x2,this._y2),this._context.closePath();break}case 2:{this._context.moveTo((this._x2+2*this._x3)/3,(this._y2+2*this._y3)/3),this._context.lineTo((this._x3+2*this._x2)/3,(this._y3+2*this._y2)/3),this._context.closePath();break}case 3:{this.point(this._x2,this._y2),this.point(this._x3,this._y3),this.point(this._x4,this._y4);break}}},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._x2=t,this._y2=e;break;case 1:this._point=2,this._x3=t,this._y3=e;break;case 2:this._point=3,this._x4=t,this._y4=e,this._context.moveTo((this._x0+4*this._x1+t)/6,(this._y0+4*this._y1+e)/6);break;default:vf(this,t,e);break}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e}};function ik(t){return new nk(t)}function ak(t){this._context=t}ak.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=NaN,this._point=0},lineEnd:function(){(this._line||this._line!==0&&this._point===3)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3;var r=(this._x0+4*this._x1+t)/6,n=(this._y0+4*this._y1+e)/6;this._line?this._context.lineTo(r,n):this._context.moveTo(r,n);break;case 3:this._point=4;default:vf(this,t,e);break}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e}};function sk(t){return new ak(t)}function ok(t,e){this._basis=new xf(t),this._beta=e}ok.prototype={lineStart:function(){this._x=[],this._y=[],this._basis.lineStart()},lineEnd:function(){var t=this._x,e=this._y,r=t.length-1;if(r>0)for(var n=t[0],i=e[0],a=t[r]-n,s=e[r]-i,o=-1,l;++o<=r;)l=o/r,this._basis.point(this._beta*t[o]+(1-this._beta)*(n+l*a),this._beta*e[o]+(1-this._beta)*(i+l*s));this._x=this._y=null,this._basis.lineEnd()},point:function(t,e){this._x.push(+t),this._y.push(+e)}};const eH=function t(e){function r(n){return e===1?new xf(n):new ok(n,e)}return r.beta=function(n){return t(+n)},r}(.85);function kf(t,e,r){t._context.bezierCurveTo(t._x1+t._k*(t._x2-t._x0),t._y1+t._k*(t._y2-t._y0),t._x2+t._k*(t._x1-e),t._y2+t._k*(t._y1-r),t._x2,t._y2)}function Dp(t,e){this._context=t,this._k=(1-e)/6}Dp.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:kf(this,this._x1,this._y1);break}(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2,this._x1=t,this._y1=e;break;case 2:this._point=3;default:kf(this,t,e);break}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const rH=function t(e){function r(n){return new Dp(n,e)}return r.tension=function(n){return t(+n)},r}(0);function Op(t,e){this._context=t,this._k=(1-e)/6}Op.prototype={areaStart:Wa,areaEnd:Wa,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._point=0},lineEnd:function(){switch(this._point){case 1:{this._context.moveTo(this._x3,this._y3),this._context.closePath();break}case 2:{this._context.lineTo(this._x3,this._y3),this._context.closePath();break}case 3:{this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5);break}}},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._x3=t,this._y3=e;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=e);break;case 2:this._point=3,this._x5=t,this._y5=e;break;default:kf(this,t,e);break}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const nH=function t(e){function r(n){return new Op(n,e)}return r.tension=function(n){return t(+n)},r}(0);function Fp(t,e){this._context=t,this._k=(1-e)/6}Fp.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._point=0},lineEnd:function(){(this._line||this._line!==0&&this._point===3)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:kf(this,t,e);break}this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const iH=function t(e){function r(n){return new Fp(n,e)}return r.tension=function(n){return t(+n)},r}(0);function Pp(t,e,r){var n=t._x1,i=t._y1,a=t._x2,s=t._y2;if(t._l01_a>Vr){var o=2*t._l01_2a+3*t._l01_a*t._l12_a+t._l12_2a,l=3*t._l01_a*(t._l01_a+t._l12_a);n=(n*o-t._x0*t._l12_2a+t._x2*t._l01_2a)/l,i=(i*o-t._y0*t._l12_2a+t._y2*t._l01_2a)/l}if(t._l23_a>Vr){var u=2*t._l23_2a+3*t._l23_a*t._l12_a+t._l12_2a,h=3*t._l23_a*(t._l23_a+t._l12_a);a=(a*u+t._x1*t._l23_2a-e*t._l12_2a)/h,s=(s*u+t._y1*t._l23_2a-r*t._l12_2a)/h}t._context.bezierCurveTo(n,i,a,s,t._x2,t._y2)}function lk(t,e){this._context=t,this._alpha=e}lk.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x2,this._y2);break;case 3:this.point(this._x2,this._y2);break}(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){if(t=+t,e=+e,this._point){var r=this._x2-t,n=this._y2-e;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(r*r+n*n,this._alpha))}switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;break;case 2:this._point=3;default:Pp(this,t,e);break}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const aH=function t(e){function r(n){return e?new lk(n,e):new Dp(n,0)}return r.alpha=function(n){return t(+n)},r}(.5);function ck(t,e){this._context=t,this._alpha=e}ck.prototype={areaStart:Wa,areaEnd:Wa,lineStart:function(){this._x0=this._x1=this._x2=this._x3=this._x4=this._x5=this._y0=this._y1=this._y2=this._y3=this._y4=this._y5=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){switch(this._point){case 1:{this._context.moveTo(this._x3,this._y3),this._context.closePath();break}case 2:{this._context.lineTo(this._x3,this._y3),this._context.closePath();break}case 3:{this.point(this._x3,this._y3),this.point(this._x4,this._y4),this.point(this._x5,this._y5);break}}},point:function(t,e){if(t=+t,e=+e,this._point){var r=this._x2-t,n=this._y2-e;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(r*r+n*n,this._alpha))}switch(this._point){case 0:this._point=1,this._x3=t,this._y3=e;break;case 1:this._point=2,this._context.moveTo(this._x4=t,this._y4=e);break;case 2:this._point=3,this._x5=t,this._y5=e;break;default:Pp(this,t,e);break}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const sH=function t(e){function r(n){return e?new ck(n,e):new Op(n,0)}return r.alpha=function(n){return t(+n)},r}(.5);function uk(t,e){this._context=t,this._alpha=e}uk.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._x2=this._y0=this._y1=this._y2=NaN,this._l01_a=this._l12_a=this._l23_a=this._l01_2a=this._l12_2a=this._l23_2a=this._point=0},lineEnd:function(){(this._line||this._line!==0&&this._point===3)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){if(t=+t,e=+e,this._point){var r=this._x2-t,n=this._y2-e;this._l23_a=Math.sqrt(this._l23_2a=Math.pow(r*r+n*n,this._alpha))}switch(this._point){case 0:this._point=1;break;case 1:this._point=2;break;case 2:this._point=3,this._line?this._context.lineTo(this._x2,this._y2):this._context.moveTo(this._x2,this._y2);break;case 3:this._point=4;default:Pp(this,t,e);break}this._l01_a=this._l12_a,this._l12_a=this._l23_a,this._l01_2a=this._l12_2a,this._l12_2a=this._l23_2a,this._x0=this._x1,this._x1=this._x2,this._x2=t,this._y0=this._y1,this._y1=this._y2,this._y2=e}};const oH=function t(e){function r(n){return e?new uk(n,e):new Fp(n,0)}return r.alpha=function(n){return t(+n)},r}(.5);function hk(t){this._context=t}hk.prototype={areaStart:Wa,areaEnd:Wa,lineStart:function(){this._point=0},lineEnd:function(){this._point&&this._context.closePath()},point:function(t,e){t=+t,e=+e,this._point?this._context.lineTo(t,e):(this._point=1,this._context.moveTo(t,e))}};function fk(t){return new hk(t)}function dk(t){return t<0?-1:1}function pk(t,e,r){var n=t._x1-t._x0,i=e-t._x1,a=(t._y1-t._y0)/(n||i<0&&-0),s=(r-t._y1)/(i||n<0&&-0),o=(a*i+s*n)/(n+i);return(dk(a)+dk(s))*Math.min(Math.abs(a),Math.abs(s),.5*Math.abs(o))||0}function gk(t,e){var r=t._x1-t._x0;return r?(3*(t._y1-t._y0)/r-e)/2:e}function qp(t,e,r){var n=t._x0,i=t._y0,a=t._x1,s=t._y1,o=(a-n)/3;t._context.bezierCurveTo(n+o,i+o*e,a-o,s-o*r,a,s)}function wf(t){this._context=t}wf.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x0=this._x1=this._y0=this._y1=this._t0=NaN,this._point=0},lineEnd:function(){switch(this._point){case 2:this._context.lineTo(this._x1,this._y1);break;case 3:qp(this,this._t0,gk(this,this._t0));break}(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line=1-this._line},point:function(t,e){var r=NaN;if(t=+t,e=+e,!(t===this._x1&&e===this._y1)){switch(this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;break;case 2:this._point=3,qp(this,gk(this,r=pk(this,t,e)),r);break;default:qp(this,this._t0,r=pk(this,t,e));break}this._x0=this._x1,this._x1=t,this._y0=this._y1,this._y1=e,this._t0=r}}};function yk(t){this._context=new mk(t)}(yk.prototype=Object.create(wf.prototype)).point=function(t,e){wf.prototype.point.call(this,e,t)};function mk(t){this._context=t}mk.prototype={moveTo:function(t,e){this._context.moveTo(e,t)},closePath:function(){this._context.closePath()},lineTo:function(t,e){this._context.lineTo(e,t)},bezierCurveTo:function(t,e,r,n,i,a){this._context.bezierCurveTo(e,t,n,r,a,i)}};function bk(t){return new wf(t)}function _k(t){return new yk(t)}function vk(t){this._context=t}vk.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x=[],this._y=[]},lineEnd:function(){var t=this._x,e=this._y,r=t.length;if(r)if(this._line?this._context.lineTo(t[0],e[0]):this._context.moveTo(t[0],e[0]),r===2)this._context.lineTo(t[1],e[1]);else for(var n=xk(t),i=xk(e),a=0,s=1;s<r;++a,++s)this._context.bezierCurveTo(n[0][a],i[0][a],n[1][a],i[1][a],t[s],e[s]);(this._line||this._line!==0&&r===1)&&this._context.closePath(),this._line=1-this._line,this._x=this._y=null},point:function(t,e){this._x.push(+t),this._y.push(+e)}};function xk(t){var e,r=t.length-1,n,i=new Array(r),a=new Array(r),s=new Array(r);for(i[0]=0,a[0]=2,s[0]=t[0]+2*t[1],e=1;e<r-1;++e)i[e]=1,a[e]=4,s[e]=4*t[e]+2*t[e+1];for(i[r-1]=2,a[r-1]=7,s[r-1]=8*t[r-1]+t[r],e=1;e<r;++e)n=i[e]/a[e-1],a[e]-=n,s[e]-=n*s[e-1];for(i[r-1]=s[r-1]/a[r-1],e=r-2;e>=0;--e)i[e]=(s[e]-i[e+1])/a[e];for(a[r-1]=(t[r]+i[r-1])/2,e=0;e<r-1;++e)a[e]=2*t[e+1]-i[e+1];return[i,a]}function kk(t){return new vk(t)}function Tf(t,e){this._context=t,this._t=e}Tf.prototype={areaStart:function(){this._line=0},areaEnd:function(){this._line=NaN},lineStart:function(){this._x=this._y=NaN,this._point=0},lineEnd:function(){0<this._t&&this._t<1&&this._point===2&&this._context.lineTo(this._x,this._y),(this._line||this._line!==0&&this._point===1)&&this._context.closePath(),this._line>=0&&(this._t=1-this._t,this._line=1-this._line)},point:function(t,e){switch(t=+t,e=+e,this._point){case 0:this._point=1,this._line?this._context.lineTo(t,e):this._context.moveTo(t,e);break;case 1:this._point=2;default:{if(this._t<=0)this._context.lineTo(this._x,e),this._context.lineTo(t,e);else{var r=this._x*(1-this._t)+t*this._t;this._context.lineTo(r,this._y),this._context.lineTo(r,e)}break}}this._x=t,this._y=e}};function wk(t){return new Tf(t,.5)}function Tk(t){return new Tf(t,0)}function Ek(t){return new Tf(t,1)}function qo(t,e){if((s=t.length)>1)for(var r=1,n,i,a=t[e[0]],s,o=a.length;r<s;++r)for(i=a,a=t[e[r]],n=0;n<o;++n)a[n][1]+=a[n][0]=isNaN(i[n][1])?i[n][0]:i[n][1]}function Vo(t){for(var e=t.length,r=new Array(e);--e>=0;)r[e]=e;return r}function lH(t,e){return t[e]}function cH(t){const e=[];return e.key=t,e}function uH(){var t=xe([]),e=Vo,r=qo,n=lH;function i(a){var s=Array.from(t.apply(this,arguments),cH),o,l=s.length,u=-1,h;for(const d of a)for(o=0,++u;o<l;++o)(s[o][u]=[0,+n(d,s[o].key,u,a)]).data=d;for(o=0,h=mf(e(s));o<l;++o)s[h[o]].index=o;return r(s,h),s}return i.keys=function(a){return arguments.length?(t=typeof a=="function"?a:xe(Array.from(a)),i):t},i.value=function(a){return arguments.length?(n=typeof a=="function"?a:xe(+a),i):n},i.order=function(a){return arguments.length?(e=a==null?Vo:typeof a=="function"?a:xe(Array.from(a)),i):e},i.offset=function(a){return arguments.length?(r=a==null?qo:a,i):r},i}function hH(t,e){if((n=t.length)>0){for(var r,n,i=0,a=t[0].length,s;i<a;++i){for(s=r=0;r<n;++r)s+=t[r][i][1]||0;if(s)for(r=0;r<n;++r)t[r][i][1]/=s}qo(t,e)}}function fH(t,e){if((l=t.length)>0)for(var r,n=0,i,a,s,o,l,u=t[e[0]].length;n<u;++n)for(s=o=0,r=0;r<l;++r)(a=(i=t[e[r]][n])[1]-i[0])>0?(i[0]=s,i[1]=s+=a):a<0?(i[1]=o,i[0]=o+=a):(i[0]=0,i[1]=a)}function dH(t,e){if((i=t.length)>0){for(var r=0,n=t[e[0]],i,a=n.length;r<a;++r){for(var s=0,o=0;s<i;++s)o+=t[s][r][1]||0;n[r][1]+=n[r][0]=-o/2}qo(t,e)}}function pH(t,e){if(!(!((s=t.length)>0)||!((a=(i=t[e[0]]).length)>0))){for(var r=0,n=1,i,a,s;n<a;++n){for(var o=0,l=0,u=0;o<s;++o){for(var h=t[e[o]],d=h[n][1]||0,f=h[n-1][1]||0,p=(d-f)/2,m=0;m<o;++m){var _=t[e[m]],y=_[n][1]||0,b=_[n-1][1]||0;p+=y-b}l+=d,u+=p*d}i[n-1][1]+=i[n-1][0]=r,l&&(r-=u/l)}i[n-1][1]+=i[n-1][0]=r,qo(t,e)}}function Ck(t){var e=t.map(gH);return Vo(t).sort(function(r,n){return e[r]-e[n]})}function gH(t){for(var e=-1,r=0,n=t.length,i,a=-1/0;++e<n;)(i=+t[e][1])>a&&(a=i,r=e);return r}function Sk(t){var e=t.map(Ak);return Vo(t).sort(function(r,n){return e[r]-e[n]})}function Ak(t){for(var e=0,r=-1,n=t.length,i;++r<n;)(i=+t[r][1])&&(e+=i);return e}function yH(t){return Sk(t).reverse()}function mH(t){var e=t.length,r,n,i=t.map(Ak),a=Ck(t),s=0,o=0,l=[],u=[];for(r=0;r<e;++r)n=a[r],s<o?(s+=i[n],l.push(n)):(o+=i[n],u.push(n));return u.reverse().concat(l)}function bH(t){return Vo(t).reverse()}const Ef=t=>()=>t;function _H(t,{sourceEvent:e,target:r,transform:n,dispatch:i}){Object.defineProperties(this,{type:{value:t,enumerable:!0,configurable:!0},sourceEvent:{value:e,enumerable:!0,configurable:!0},target:{value:r,enumerable:!0,configurable:!0},transform:{value:n,enumerable:!0,configurable:!0},_:{value:i}})}function Ri(t,e,r){this.k=t,this.x=e,this.y=r}Ri.prototype={constructor:Ri,scale:function(t){return t===1?this:new Ri(this.k*t,this.x,this.y)},translate:function(t,e){return t===0&e===0?this:new Ri(this.k,this.x+this.k*t,this.y+this.k*e)},apply:function(t){return[t[0]*this.k+this.x,t[1]*this.k+this.y]},applyX:function(t){return t*this.k+this.x},applyY:function(t){return t*this.k+this.y},invert:function(t){return[(t[0]-this.x)/this.k,(t[1]-this.y)/this.k]},invertX:function(t){return(t-this.x)/this.k},invertY:function(t){return(t-this.y)/this.k},rescaleX:function(t){return t.copy().domain(t.range().map(this.invertX,this).map(t.invert,t))},rescaleY:function(t){return t.copy().domain(t.range().map(this.invertY,this).map(t.invert,t))},toString:function(){return"translate("+this.x+","+this.y+") scale("+this.k+")"}};var Cf=new Ri(1,0,0);Mk.prototype=Ri.prototype;function Mk(t){for(;!t.__zoom;)if(!(t=t.parentNode))return Cf;return t.__zoom}function Vp(t){t.stopImmediatePropagation()}function wc(t){t.preventDefault(),t.stopImmediatePropagation()}function vH(t){return(!t.ctrlKey||t.type==="wheel")&&!t.button}function xH(){var t=this;return t instanceof SVGElement?(t=t.ownerSVGElement||t,t.hasAttribute("viewBox")?(t=t.viewBox.baseVal,[[t.x,t.y],[t.x+t.width,t.y+t.height]]):[[0,0],[t.width.baseVal.value,t.height.baseVal.value]]):[[0,0],[t.clientWidth,t.clientHeight]]}function Lk(){return this.__zoom||Cf}function kH(t){return-t.deltaY*(t.deltaMode===1?.05:t.deltaMode?1:.002)*(t.ctrlKey?10:1)}function wH(){return navigator.maxTouchPoints||"ontouchstart"in this}function TH(t,e,r){var n=t.invertX(e[0][0])-r[0][0],i=t.invertX(e[1][0])-r[1][0],a=t.invertY(e[0][1])-r[0][1],s=t.invertY(e[1][1])-r[1][1];return t.translate(i>n?(n+i)/2:Math.min(0,n)||Math.max(0,i),s>a?(a+s)/2:Math.min(0,a)||Math.max(0,s))}function EH(){var t=vH,e=xH,r=TH,n=kH,i=wH,a=[0,1/0],s=[[-1/0,-1/0],[1/0,1/0]],o=250,l=H5,u=fs("start","zoom","end"),h,d,f,p=500,m=150,_=0,y=10;function b(D){D.property("__zoom",Lk).on("wheel.zoom",R,{passive:!1}).on("mousedown.zoom",A).on("dblclick.zoom",L).filter(i).on("touchstart.zoom",v).on("touchmove.zoom",B).on("touchend.zoom touchcancel.zoom",w).style("-webkit-tap-highlight-color","rgba(0,0,0,0)")}b.transform=function(D,N,z,X){var ct=D.selection?D.selection():D;ct.property("__zoom",Lk),D!==ct?C(D,N,z,X):ct.interrupt().each(function(){M(this,arguments).event(X).start().zoom(null,typeof N=="function"?N.apply(this,arguments):N).end()})},b.scaleBy=function(D,N,z,X){b.scaleTo(D,function(){var ct=this.__zoom.k,J=typeof N=="function"?N.apply(this,arguments):N;return ct*J},z,X)},b.scaleTo=function(D,N,z,X){b.transform(D,function(){var ct=e.apply(this,arguments),J=this.__zoom,Y=z==null?T(ct):typeof z=="function"?z.apply(this,arguments):z,$=J.invert(Y),lt=typeof N=="function"?N.apply(this,arguments):N;return r(k(x(J,lt),Y,$),ct,s)},z,X)},b.translateBy=function(D,N,z,X){b.transform(D,function(){return r(this.__zoom.translate(typeof N=="function"?N.apply(this,arguments):N,typeof z=="function"?z.apply(this,arguments):z),e.apply(this,arguments),s)},null,X)},b.translateTo=function(D,N,z,X,ct){b.transform(D,function(){var J=e.apply(this,arguments),Y=this.__zoom,$=X==null?T(J):typeof X=="function"?X.apply(this,arguments):X;return r(Cf.translate($[0],$[1]).scale(Y.k).translate(typeof N=="function"?-N.apply(this,arguments):-N,typeof z=="function"?-z.apply(this,arguments):-z),J,s)},X,ct)};function x(D,N){return N=Math.max(a[0],Math.min(a[1],N)),N===D.k?D:new Ri(N,D.x,D.y)}function k(D,N,z){var X=N[0]-z[0]*D.k,ct=N[1]-z[1]*D.k;return X===D.x&&ct===D.y?D:new Ri(D.k,X,ct)}function T(D){return[(+D[0][0]+ +D[1][0])/2,(+D[0][1]+ +D[1][1])/2]}function C(D,N,z,X){D.on("start.zoom",function(){M(this,arguments).event(X).start()}).on("interrupt.zoom end.zoom",function(){M(this,arguments).event(X).end()}).tween("zoom",function(){var ct=this,J=arguments,Y=M(ct,J).event(X),$=e.apply(ct,J),lt=z==null?T($):typeof z=="function"?z.apply(ct,J):z,ut=Math.max($[1][0]-$[0][0],$[1][1]-$[0][1]),W=ct.__zoom,tt=typeof N=="function"?N.apply(ct,J):N,K=l(W.invert(lt).concat(ut/W.k),tt.invert(lt).concat(ut/tt.k));return function(it){if(it===1)it=tt;else{var Z=K(it),V=ut/Z[2];it=new Ri(V,lt[0]-Z[0]*V,lt[1]-Z[1]*V)}Y.zoom(null,it)}})}function M(D,N,z){return!z&&D.__zooming||new S(D,N)}function S(D,N){this.that=D,this.args=N,this.active=0,this.sourceEvent=null,this.extent=e.apply(D,N),this.taps=0}S.prototype={event:function(D){return D&&(this.sourceEvent=D),this},start:function(){return++this.active===1&&(this.that.__zooming=this,this.emit("start")),this},zoom:function(D,N){return this.mouse&&D!=="mouse"&&(this.mouse[1]=N.invert(this.mouse[0])),this.touch0&&D!=="touch"&&(this.touch0[1]=N.invert(this.touch0[0])),this.touch1&&D!=="touch"&&(this.touch1[1]=N.invert(this.touch1[0])),this.that.__zoom=N,this.emit("zoom"),this},end:function(){return--this.active===0&&(delete this.that.__zooming,this.emit("end")),this},emit:function(D){var N=St(this.that).datum();u.call(D,this.that,new _H(D,{sourceEvent:this.sourceEvent,target:b,type:D,transform:this.that.__zoom,dispatch:u}),N)}};function R(D,...N){if(!t.apply(this,arguments))return;var z=M(this,N).event(D),X=this.__zoom,ct=Math.max(a[0],Math.min(a[1],X.k*Math.pow(2,n.apply(this,arguments)))),J=Tn(D);if(z.wheel)(z.mouse[0][0]!==J[0]||z.mouse[0][1]!==J[1])&&(z.mouse[1]=X.invert(z.mouse[0]=J)),clearTimeout(z.wheel);else{if(X.k===ct)return;z.mouse=[J,X.invert(J)],vs(this),z.start()}wc(D),z.wheel=setTimeout(Y,m),z.zoom("mouse",r(k(x(X,ct),z.mouse[0],z.mouse[1]),z.extent,s));function Y(){z.wheel=null,z.end()}}function A(D,...N){if(f||!t.apply(this,arguments))return;var z=D.currentTarget,X=M(this,N,!0).event(D),ct=St(D.view).on("mousemove.zoom",lt,!0).on("mouseup.zoom",ut,!0),J=Tn(D,z),Y=D.clientX,$=D.clientY;Bu(D.view),Vp(D),X.mouse=[J,this.__zoom.invert(J)],vs(this),X.start();function lt(W){if(wc(W),!X.moved){var tt=W.clientX-Y,K=W.clientY-$;X.moved=tt*tt+K*K>_}X.event(W).zoom("mouse",r(k(X.that.__zoom,X.mouse[0]=Tn(W,z),X.mouse[1]),X.extent,s))}function ut(W){ct.on("mousemove.zoom mouseup.zoom",null),Du(W.view,X.moved),wc(W),X.event(W).end()}}function L(D,...N){if(!!t.apply(this,arguments)){var z=this.__zoom,X=Tn(D.changedTouches?D.changedTouches[0]:D,this),ct=z.invert(X),J=z.k*(D.shiftKey?.5:2),Y=r(k(x(z,J),X,ct),e.apply(this,N),s);wc(D),o>0?St(this).transition().duration(o).call(C,Y,X,D):St(this).call(b.transform,Y,X,D)}}function v(D,...N){if(!!t.apply(this,arguments)){var z=D.touches,X=z.length,ct=M(this,N,D.changedTouches.length===X).event(D),J,Y,$,lt;for(Vp(D),Y=0;Y<X;++Y)$=z[Y],lt=Tn($,this),lt=[lt,this.__zoom.invert(lt),$.identifier],ct.touch0?!ct.touch1&&ct.touch0[2]!==lt[2]&&(ct.touch1=lt,ct.taps=0):(ct.touch0=lt,J=!0,ct.taps=1+!!h);h&&(h=clearTimeout(h)),J&&(ct.taps<2&&(d=lt[0],h=setTimeout(function(){h=null},p)),vs(this),ct.start())}}function B(D,...N){if(!!this.__zooming){var z=M(this,N).event(D),X=D.changedTouches,ct=X.length,J,Y,$,lt;for(wc(D),J=0;J<ct;++J)Y=X[J],$=Tn(Y,this),z.touch0&&z.touch0[2]===Y.identifier?z.touch0[0]=$:z.touch1&&z.touch1[2]===Y.identifier&&(z.touch1[0]=$);if(Y=z.that.__zoom,z.touch1){var ut=z.touch0[0],W=z.touch0[1],tt=z.touch1[0],K=z.touch1[1],it=(it=tt[0]-ut[0])*it+(it=tt[1]-ut[1])*it,Z=(Z=K[0]-W[0])*Z+(Z=K[1]-W[1])*Z;Y=x(Y,Math.sqrt(it/Z)),$=[(ut[0]+tt[0])/2,(ut[1]+tt[1])/2],lt=[(W[0]+K[0])/2,(W[1]+K[1])/2]}else if(z.touch0)$=z.touch0[0],lt=z.touch0[1];else return;z.zoom("touch",r(k(Y,$,lt),z.extent,s))}}function w(D,...N){if(!!this.__zooming){var z=M(this,N).event(D),X=D.changedTouches,ct=X.length,J,Y;for(Vp(D),f&&clearTimeout(f),f=setTimeout(function(){f=null},p),J=0;J<ct;++J)Y=X[J],z.touch0&&z.touch0[2]===Y.identifier?delete z.touch0:z.touch1&&z.touch1[2]===Y.identifier&&delete z.touch1;if(z.touch1&&!z.touch0&&(z.touch0=z.touch1,delete z.touch1),z.touch0)z.touch0[1]=this.__zoom.invert(z.touch0[0]);else if(z.end(),z.taps===2&&(Y=Tn(Y,this),Math.hypot(d[0]-Y[0],d[1]-Y[1])<y)){var $=St(this).on("dblclick.zoom");$&&$.apply(this,arguments)}}}return b.wheelDelta=function(D){return arguments.length?(n=typeof D=="function"?D:Ef(+D),b):n},b.filter=function(D){return arguments.length?(t=typeof D=="function"?D:Ef(!!D),b):t},b.touchable=function(D){return arguments.length?(i=typeof D=="function"?D:Ef(!!D),b):i},b.extent=function(D){return arguments.length?(e=typeof D=="function"?D:Ef([[+D[0][0],+D[0][1]],[+D[1][0],+D[1][1]]]),b):e},b.scaleExtent=function(D){return arguments.length?(a[0]=+D[0],a[1]=+D[1],b):[a[0],a[1]]},b.translateExtent=function(D){return arguments.length?(s[0][0]=+D[0][0],s[1][0]=+D[1][0],s[0][1]=+D[0][1],s[1][1]=+D[1][1],b):[[s[0][0],s[0][1]],[s[1][0],s[1][1]]]},b.constrain=function(D){return arguments.length?(r=D,b):r},b.duration=function(D){return arguments.length?(o=+D,b):o},b.interpolate=function(D){return arguments.length?(l=D,b):l},b.on=function(){var D=u.on.apply(u,arguments);return D===u?b:D},b.clickDistance=function(D){return arguments.length?(_=(D=+D)*D,b):Math.sqrt(_)},b.tapDistance=function(D){return arguments.length?(y=+D,b):y},b}const CH=Object.freeze(Object.defineProperty({__proto__:null,bisect:cs,bisectRight:x_,bisectLeft:SR,bisectCenter:AR,ascending:Qe,bisector:ku,blur:MR,blur2:k_,blurImage:LR,count:wu,cross:FR,cumsum:PR,descending:m_,deviation:E_,extent:xl,Adder:_r,fsum:qR,fcumsum:VR,group:M_,flatGroup:zR,flatRollup:YR,groups:L_,index:UR,indexes:WR,rollup:I_,rollups:N_,groupSort:HR,bin:F_,histogram:F_,thresholdFreedmanDiaconis:$R,thresholdScott:XR,thresholdSturges:W0,max:lo,maxIndex:H0,mean:KR,median:ZR,medianIndex:QR,merge:j0,min:Tl,minIndex:G0,mode:tI,nice:O_,pairs:eI,permute:D_,quantile:Cl,quantileIndex:V_,quantileSorted:q_,quickselect:Eu,range:Ca,rank:nI,least:iI,leastIndex:z_,greatest:P_,greatestIndex:aI,scan:sI,shuffle:oI,shuffler:Y_,sum:lI,ticks:hs,tickIncrement:oo,tickStep:wl,transpose:U_,variance:T_,zip:uI,every:hI,some:fI,filter:dI,map:pI,reduce:gI,reverse:yI,sort:q0,difference:mI,disjoint:bI,intersection:_I,subset:xI,superset:W_,union:kI,InternMap:kl,InternSet:us,axisTop:j_,axisRight:MI,axisBottom:$_,axisLeft:LI,brush:KO,brushX:$O,brushY:XO,brushSelection:jO,chord:QO,chordTranspose:JO,chordDirected:tF,ribbon:cF,ribbonArrow:uF,color:Aa,rgb:po,hsl:qu,lab:Yu,hcl:Uu,lch:IB,gray:RB,cubehelix:Qn,contours:Ud,contourDensity:wF,Delaunay:jd,Voronoi:Nv,dispatch:fs,drag:_B,dragDisable:Bu,dragEnable:Du,dsvFormat:uh,csvParse:Fv,csvParseRows:GF,csvFormat:jF,csvFormatBody:$F,csvFormatRows:XF,csvFormatRow:KF,csvFormatValue:ZF,tsvParse:Pv,tsvParseRows:QF,tsvFormat:JF,tsvFormatBody:tP,tsvFormatRows:eP,tsvFormatRow:rP,tsvFormatValue:nP,autoType:iP,easeLinear:sO,easeQuad:ov,easeQuadIn:oO,easeQuadOut:lO,easeQuadInOut:ov,easeCubic:Ed,easeCubicIn:cO,easeCubicOut:uO,easeCubicInOut:Ed,easePoly:lv,easePolyIn:hO,easePolyOut:fO,easePolyInOut:lv,easeSin:hv,easeSinIn:dO,easeSinOut:pO,easeSinInOut:hv,easeExp:fv,easeExpIn:gO,easeExpOut:yO,easeExpInOut:fv,easeCircle:dv,easeCircleIn:mO,easeCircleOut:bO,easeCircleInOut:dv,easeBounce:Vl,easeBounceIn:SO,easeBounceOut:Vl,easeBounceInOut:AO,easeBack:pv,easeBackIn:MO,easeBackOut:LO,easeBackInOut:pv,easeElastic:gv,easeElasticIn:RO,easeElasticOut:gv,easeElasticInOut:IO,blob:oP,buffer:cP,dsv:hP,csv:fP,tsv:dP,image:pP,json:yP,text:hh,xml:mP,html:bP,svg:_P,forceCenter:vP,forceCollide:qP,forceLink:zP,forceManyBody:KP,forceRadial:ZP,forceSimulation:XP,forceX:QP,forceY:JP,formatDefaultLocale:Zv,get format(){return yh},get formatPrefix(){return Jd},formatLocale:Kv,formatSpecifier:Co,FormatSpecifier:ph,precisionFixed:Qv,precisionPrefix:Jv,precisionRound:t6,geoArea:cq,geoBounds:dq,geoCentroid:_q,geoCircle:vq,geoClipAntimeridian:m2,geoClipCircle:N6,geoClipExtent:Sq,geoClipRectangle:Fh,geoContains:Bq,geoDistance:Vh,geoGraticule:U6,geoGraticule10:Dq,geoInterpolate:Oq,geoLength:B6,geoPath:$q,geoAlbers:hx,geoAlbersUsa:iV,geoAzimuthalEqualArea:aV,geoAzimuthalEqualAreaRaw:P2,geoAzimuthalEquidistant:sV,geoAzimuthalEquidistantRaw:q2,geoConicConformal:lV,geoConicConformalRaw:px,geoConicEqualArea:Xh,geoConicEqualAreaRaw:ux,geoConicEquidistant:uV,geoConicEquidistantRaw:gx,geoEqualEarth:fV,geoEqualEarthRaw:V2,geoEquirectangular:cV,geoEquirectangularRaw:ac,geoGnomonic:dV,geoGnomonicRaw:z2,geoIdentity:pV,geoProjection:Li,geoProjectionMutator:O2,geoMercator:oV,geoMercatorRaw:ic,geoNaturalEarth1:gV,geoNaturalEarth1Raw:Y2,geoOrthographic:yV,geoOrthographicRaw:U2,geoStereographic:mV,geoStereographicRaw:W2,geoTransverseMercator:bV,geoTransverseMercatorRaw:H2,geoRotation:E6,geoStream:ti,geoTransform:Xq,cluster:CV,hierarchy:G2,Node:Ms,pack:rz,packSiblings:tz,packEnclose:KV,partition:nz,stratify:oz,tree:pz,treemap:gz,treemapBinary:yz,treemapDice:hc,treemapSlice:rf,treemapSliceDice:mz,treemapSquarify:Nx,treemapResquarify:bz,interpolate:Ma,interpolateArray:FB,interpolateBasis:L5,interpolateBasisClosed:R5,interpolateDate:F5,interpolateDiscrete:VB,interpolateHue:zB,interpolateNumber:Bn,interpolateNumberArray:dd,interpolateObject:P5,interpolateRound:ju,interpolateString:yd,interpolateTransformCss:Y5,interpolateTransformSvg:U5,interpolateZoom:H5,interpolateRgb:Nl,interpolateRgbBasis:B5,interpolateRgbBasisClosed:OB,interpolateHsl:jB,interpolateHslLong:$B,interpolateLab:XB,interpolateHcl:$5,interpolateHclLong:KB,interpolateCubehelix:ZB,interpolateCubehelixLong:Xu,piecewise:K5,quantize:QB,path:Ra,polygonArea:_z,polygonCentroid:vz,polygonHull:wz,polygonContains:Tz,polygonLength:Ez,quadtree:fh,randomUniform:Cz,randomInt:Sz,randomNormal:tp,randomLogNormal:Az,randomBates:Mz,randomIrwinHall:Dx,randomExponential:Lz,randomPareto:Rz,randomBernoulli:Iz,randomGeometric:Ox,randomBinomial:Px,randomGamma:ep,randomBeta:Fx,randomWeibull:Nz,randomCauchy:Bz,randomLogistic:Dz,randomPoisson:Oz,randomLcg:qz,scaleBand:np,scalePoint:Vz,scaleIdentity:Ux,scaleLinear:sp,scaleLog:$x,scaleSymlog:Zx,scaleOrdinal:nf,scaleImplicit:rp,scalePow:up,scaleSqrt:Qz,scaleRadial:t8,scaleQuantile:e8,scaleQuantize:r8,scaleThreshold:n8,scaleTime:X8,scaleUtc:MU,scaleSequential:K8,scaleSequentialLog:Z8,scaleSequentialPow:Ap,scaleSequentialSqrt:LU,scaleSequentialSymlog:Q8,scaleSequentialQuantile:J8,scaleDiverging:t7,scaleDivergingLog:e7,scaleDivergingPow:Mp,scaleDivergingSqrt:RU,scaleDivergingSymlog:r7,tickFormat:Yx,schemeCategory10:IU,schemeAccent:NU,schemeDark2:BU,schemePaired:DU,schemePastel1:OU,schemePastel2:FU,schemeSet1:PU,schemeSet2:qU,schemeSet3:VU,schemeTableau10:zU,interpolateBrBG:YU,schemeBrBG:n7,interpolatePRGn:UU,schemePRGn:i7,interpolatePiYG:WU,schemePiYG:a7,interpolatePuOr:HU,schemePuOr:s7,interpolateRdBu:GU,schemeRdBu:o7,interpolateRdGy:jU,schemeRdGy:l7,interpolateRdYlBu:$U,schemeRdYlBu:c7,interpolateRdYlGn:XU,schemeRdYlGn:u7,interpolateSpectral:KU,schemeSpectral:h7,interpolateBuGn:ZU,schemeBuGn:f7,interpolateBuPu:QU,schemeBuPu:d7,interpolateGnBu:JU,schemeGnBu:p7,interpolateOrRd:tW,schemeOrRd:g7,interpolatePuBuGn:eW,schemePuBuGn:y7,interpolatePuBu:rW,schemePuBu:m7,interpolatePuRd:nW,schemePuRd:b7,interpolateRdPu:iW,schemeRdPu:_7,interpolateYlGnBu:aW,schemeYlGnBu:v7,interpolateYlGn:sW,schemeYlGn:x7,interpolateYlOrBr:oW,schemeYlOrBr:k7,interpolateYlOrRd:lW,schemeYlOrRd:w7,interpolateBlues:cW,schemeBlues:T7,interpolateGreens:uW,schemeGreens:E7,interpolateGreys:hW,schemeGreys:C7,interpolatePurples:fW,schemePurples:S7,interpolateReds:dW,schemeReds:A7,interpolateOranges:pW,schemeOranges:M7,interpolateCividis:gW,interpolateCubehelixDefault:yW,interpolateRainbow:_W,interpolateWarm:mW,interpolateCool:bW,interpolateSinebow:kW,interpolateTurbo:wW,interpolateViridis:TW,interpolateMagma:EW,interpolateInferno:CW,interpolatePlasma:SW,create:hB,creator:Lu,local:i5,matcher:Q0,namespace:Al,namespaces:K0,pointer:Tn,pointers:dB,select:St,selectAll:Nu,selection:ps,selector:Ru,selectorAll:Z0,style:ds,window:J0,arc:yf,area:N7,line:Ua,pie:B7,areaRadial:P7,radialArea:P7,lineRadial:F7,radialLine:F7,pointRadial:kc,link:bf,linkHorizontal:UW,linkVertical:WW,linkRadial:HW,symbol:tH,symbolsStroke:JW,symbolsFill:rk,symbols:rk,symbolAsterisk:Y7,symbolCircle:_f,symbolCross:U7,symbolDiamond:H7,symbolDiamond2:G7,symbolPlus:j7,symbolSquare:$7,symbolSquare2:X7,symbolStar:Z7,symbolTriangle:Q7,symbolTriangle2:J7,symbolWye:tk,symbolX:ek,curveBasisClosed:ik,curveBasisOpen:sk,curveBasis:Os,curveBumpX:V7,curveBumpY:z7,curveBundle:eH,curveCardinalClosed:nH,curveCardinalOpen:iH,curveCardinal:rH,curveCatmullRomClosed:sH,curveCatmullRomOpen:oH,curveCatmullRom:aH,curveLinearClosed:fk,curveLinear:yn,curveMonotoneX:bk,curveMonotoneY:_k,curveNatural:kk,curveStep:wk,curveStepAfter:Ek,curveStepBefore:Tk,stack:uH,stackOffsetExpand:hH,stackOffsetDiverging:fH,stackOffsetNone:qo,stackOffsetSilhouette:dH,stackOffsetWiggle:pH,stackOrderAppearance:Ck,stackOrderAscending:Sk,stackOrderDescending:yH,stackOrderInsideOut:mH,stackOrderNone:Vo,stackOrderReverse:bH,timeInterval:xr,timeMillisecond:dp,timeMilliseconds:i8,utcMillisecond:dp,utcMilliseconds:i8,timeSecond:Fa,timeSeconds:o8,utcSecond:Fa,utcSeconds:o8,timeMinute:yp,timeMinutes:tY,timeHour:mp,timeHours:eY,timeDay:dc,timeDays:rY,timeWeek:Do,timeWeeks:g8,timeSunday:Do,timeSundays:g8,timeMonday:pc,timeMondays:nY,timeTuesday:h8,timeTuesdays:iY,timeWednesday:f8,timeWednesdays:aY,timeThursday:Ns,timeThursdays:sY,timeFriday:d8,timeFridays:oY,timeSaturday:p8,timeSaturdays:lY,timeMonth:bp,timeMonths:cY,timeYear:Pa,timeYears:uY,utcMinute:vp,utcMinutes:hY,utcHour:xp,utcHours:fY,utcDay:gc,utcDays:dY,utcWeek:Oo,utcWeeks:T8,utcSunday:Oo,utcSundays:T8,utcMonday:yc,utcMondays:pY,utcTuesday:v8,utcTuesdays:gY,utcWednesday:x8,utcWednesdays:yY,utcThursday:Ds,utcThursdays:mY,utcFriday:k8,utcFridays:bY,utcSaturday:w8,utcSaturdays:_Y,utcMonth:kp,utcMonths:vY,utcYear:qa,utcYears:xY,utcTicks:S8,utcTickInterval:A8,timeTicks:M8,timeTickInterval:L8,timeFormatDefaultLocale:j8,get timeFormat(){return vc},get timeParse(){return G8},get utcFormat(){return lf},get utcParse(){return Cp},timeFormatLocale:R8,isoFormat:wU,isoParse:CU,now:Pl,timer:Ju,timerFlush:J5,timeout:_d,interval:rD,transition:av,active:FO,interrupt:vs,zoom:EH,zoomTransform:Mk,zoomIdentity:Cf,ZoomTransform:Ri},Symbol.toStringTag,{value:"Module"}));/*! @license DOMPurify 2.4.0 | (c) Cure53 and other contributors | Released under the Apache license 2.0 and Mozilla Public License 2.0 | github.com/cure53/DOMPurify/blob/2.4.0/LICENSE */function Ha(t){return Ha=typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?function(e){return typeof e}:function(e){return e&&typeof Symbol=="function"&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},Ha(t)}function zp(t,e){return zp=Object.setPrototypeOf||function(n,i){return n.__proto__=i,n},zp(t,e)}function SH(){if(typeof Reflect>"u"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch{return!1}}function Sf(t,e,r){return SH()?Sf=Reflect.construct:Sf=function(i,a,s){var o=[null];o.push.apply(o,a);var l=Function.bind.apply(i,o),u=new l;return s&&zp(u,s.prototype),u},Sf.apply(null,arguments)}function ni(t){return AH(t)||MH(t)||LH(t)||RH()}function AH(t){if(Array.isArray(t))return Yp(t)}function MH(t){if(typeof Symbol<"u"&&t[Symbol.iterator]!=null||t["@@iterator"]!=null)return Array.from(t)}function LH(t,e){if(!!t){if(typeof t=="string")return Yp(t,e);var r=Object.prototype.toString.call(t).slice(8,-1);if(r==="Object"&&t.constructor&&(r=t.constructor.name),r==="Map"||r==="Set")return Array.from(t);if(r==="Arguments"||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(r))return Yp(t,e)}}function Yp(t,e){(e==null||e>t.length)&&(e=t.length);for(var r=0,n=new Array(e);r<e;r++)n[r]=t[r];return n}function RH(){throw new TypeError(`Invalid attempt to spread non-iterable instance. -In order to be iterable, non-array objects must have a [Symbol.iterator]() method.`)}var IH=Object.hasOwnProperty,Rk=Object.setPrototypeOf,NH=Object.isFrozen,BH=Object.getPrototypeOf,DH=Object.getOwnPropertyDescriptor,sn=Object.freeze,Ii=Object.seal,OH=Object.create,Ik=typeof Reflect<"u"&&Reflect,Af=Ik.apply,Up=Ik.construct;Af||(Af=function(e,r,n){return e.apply(r,n)}),sn||(sn=function(e){return e}),Ii||(Ii=function(e){return e}),Up||(Up=function(e,r){return Sf(e,ni(r))});var FH=ii(Array.prototype.forEach),Nk=ii(Array.prototype.pop),Tc=ii(Array.prototype.push),Mf=ii(String.prototype.toLowerCase),PH=ii(String.prototype.match),Ga=ii(String.prototype.replace),qH=ii(String.prototype.indexOf),VH=ii(String.prototype.trim),on=ii(RegExp.prototype.test),Wp=zH(TypeError);function ii(t){return function(e){for(var r=arguments.length,n=new Array(r>1?r-1:0),i=1;i<r;i++)n[i-1]=arguments[i];return Af(t,e,n)}}function zH(t){return function(){for(var e=arguments.length,r=new Array(e),n=0;n<e;n++)r[n]=arguments[n];return Up(t,r)}}function Me(t,e,r){r=r||Mf,Rk&&Rk(t,null);for(var n=e.length;n--;){var i=e[n];if(typeof i=="string"){var a=r(i);a!==i&&(NH(e)||(e[n]=a),i=a)}t[i]=!0}return t}function Fs(t){var e=OH(null),r;for(r in t)Af(IH,t,[r])&&(e[r]=t[r]);return e}function Lf(t,e){for(;t!==null;){var r=DH(t,e);if(r){if(r.get)return ii(r.get);if(typeof r.value=="function")return ii(r.value)}t=BH(t)}function n(i){return console.warn("fallback value for",i),null}return n}var Bk=sn(["a","abbr","acronym","address","area","article","aside","audio","b","bdi","bdo","big","blink","blockquote","body","br","button","canvas","caption","center","cite","code","col","colgroup","content","data","datalist","dd","decorator","del","details","dfn","dialog","dir","div","dl","dt","element","em","fieldset","figcaption","figure","font","footer","form","h1","h2","h3","h4","h5","h6","head","header","hgroup","hr","html","i","img","input","ins","kbd","label","legend","li","main","map","mark","marquee","menu","menuitem","meter","nav","nobr","ol","optgroup","option","output","p","picture","pre","progress","q","rp","rt","ruby","s","samp","section","select","shadow","small","source","spacer","span","strike","strong","style","sub","summary","sup","table","tbody","td","template","textarea","tfoot","th","thead","time","tr","track","tt","u","ul","var","video","wbr"]),Hp=sn(["svg","a","altglyph","altglyphdef","altglyphitem","animatecolor","animatemotion","animatetransform","circle","clippath","defs","desc","ellipse","filter","font","g","glyph","glyphref","hkern","image","line","lineargradient","marker","mask","metadata","mpath","path","pattern","polygon","polyline","radialgradient","rect","stop","style","switch","symbol","text","textpath","title","tref","tspan","view","vkern"]),Gp=sn(["feBlend","feColorMatrix","feComponentTransfer","feComposite","feConvolveMatrix","feDiffuseLighting","feDisplacementMap","feDistantLight","feFlood","feFuncA","feFuncB","feFuncG","feFuncR","feGaussianBlur","feImage","feMerge","feMergeNode","feMorphology","feOffset","fePointLight","feSpecularLighting","feSpotLight","feTile","feTurbulence"]),YH=sn(["animate","color-profile","cursor","discard","fedropshadow","font-face","font-face-format","font-face-name","font-face-src","font-face-uri","foreignobject","hatch","hatchpath","mesh","meshgradient","meshpatch","meshrow","missing-glyph","script","set","solidcolor","unknown","use"]),jp=sn(["math","menclose","merror","mfenced","mfrac","mglyph","mi","mlabeledtr","mmultiscripts","mn","mo","mover","mpadded","mphantom","mroot","mrow","ms","mspace","msqrt","mstyle","msub","msup","msubsup","mtable","mtd","mtext","mtr","munder","munderover"]),UH=sn(["maction","maligngroup","malignmark","mlongdiv","mscarries","mscarry","msgroup","mstack","msline","msrow","semantics","annotation","annotation-xml","mprescripts","none"]),Dk=sn(["#text"]),Ok=sn(["accept","action","align","alt","autocapitalize","autocomplete","autopictureinpicture","autoplay","background","bgcolor","border","capture","cellpadding","cellspacing","checked","cite","class","clear","color","cols","colspan","controls","controlslist","coords","crossorigin","datetime","decoding","default","dir","disabled","disablepictureinpicture","disableremoteplayback","download","draggable","enctype","enterkeyhint","face","for","headers","height","hidden","high","href","hreflang","id","inputmode","integrity","ismap","kind","label","lang","list","loading","loop","low","max","maxlength","media","method","min","minlength","multiple","muted","name","nonce","noshade","novalidate","nowrap","open","optimum","pattern","placeholder","playsinline","poster","preload","pubdate","radiogroup","readonly","rel","required","rev","reversed","role","rows","rowspan","spellcheck","scope","selected","shape","size","sizes","span","srclang","start","src","srcset","step","style","summary","tabindex","title","translate","type","usemap","valign","value","width","xmlns","slot"]),$p=sn(["accent-height","accumulate","additive","alignment-baseline","ascent","attributename","attributetype","azimuth","basefrequency","baseline-shift","begin","bias","by","class","clip","clippathunits","clip-path","clip-rule","color","color-interpolation","color-interpolation-filters","color-profile","color-rendering","cx","cy","d","dx","dy","diffuseconstant","direction","display","divisor","dur","edgemode","elevation","end","fill","fill-opacity","fill-rule","filter","filterunits","flood-color","flood-opacity","font-family","font-size","font-size-adjust","font-stretch","font-style","font-variant","font-weight","fx","fy","g1","g2","glyph-name","glyphref","gradientunits","gradienttransform","height","href","id","image-rendering","in","in2","k","k1","k2","k3","k4","kerning","keypoints","keysplines","keytimes","lang","lengthadjust","letter-spacing","kernelmatrix","kernelunitlength","lighting-color","local","marker-end","marker-mid","marker-start","markerheight","markerunits","markerwidth","maskcontentunits","maskunits","max","mask","media","method","mode","min","name","numoctaves","offset","operator","opacity","order","orient","orientation","origin","overflow","paint-order","path","pathlength","patterncontentunits","patterntransform","patternunits","points","preservealpha","preserveaspectratio","primitiveunits","r","rx","ry","radius","refx","refy","repeatcount","repeatdur","restart","result","rotate","scale","seed","shape-rendering","specularconstant","specularexponent","spreadmethod","startoffset","stddeviation","stitchtiles","stop-color","stop-opacity","stroke-dasharray","stroke-dashoffset","stroke-linecap","stroke-linejoin","stroke-miterlimit","stroke-opacity","stroke","stroke-width","style","surfacescale","systemlanguage","tabindex","targetx","targety","transform","transform-origin","text-anchor","text-decoration","text-rendering","textlength","type","u1","u2","unicode","values","viewbox","visibility","version","vert-adv-y","vert-origin-x","vert-origin-y","width","word-spacing","wrap","writing-mode","xchannelselector","ychannelselector","x","x1","x2","xmlns","y","y1","y2","z","zoomandpan"]),Fk=sn(["accent","accentunder","align","bevelled","close","columnsalign","columnlines","columnspan","denomalign","depth","dir","display","displaystyle","encoding","fence","frame","height","href","id","largeop","length","linethickness","lspace","lquote","mathbackground","mathcolor","mathsize","mathvariant","maxsize","minsize","movablelimits","notation","numalign","open","rowalign","rowlines","rowspacing","rowspan","rspace","rquote","scriptlevel","scriptminsize","scriptsizemultiplier","selection","separator","separators","stretchy","subscriptshift","supscriptshift","symmetric","voffset","width","xmlns"]),Rf=sn(["xlink:href","xml:id","xlink:title","xml:space","xmlns:xlink"]),WH=Ii(/\{\{[\w\W]*|[\w\W]*\}\}/gm),HH=Ii(/<%[\w\W]*|[\w\W]*%>/gm),GH=Ii(/^data-[\-\w.\u00B7-\uFFFF]/),jH=Ii(/^aria-[\-\w]+$/),$H=Ii(/^(?:(?:(?:f|ht)tps?|mailto|tel|callto|cid|xmpp):|[^a-z]|[a-z+.\-]+(?:[^a-z+.\-:]|$))/i),XH=Ii(/^(?:\w+script|data):/i),KH=Ii(/[\u0000-\u0020\u00A0\u1680\u180E\u2000-\u2029\u205F\u3000]/g),ZH=Ii(/^html$/i),QH=function(){return typeof window>"u"?null:window},JH=function(e,r){if(Ha(e)!=="object"||typeof e.createPolicy!="function")return null;var n=null,i="data-tt-policy-suffix";r.currentScript&&r.currentScript.hasAttribute(i)&&(n=r.currentScript.getAttribute(i));var a="dompurify"+(n?"#"+n:"");try{return e.createPolicy(a,{createHTML:function(o){return o},createScriptURL:function(o){return o}})}catch{return console.warn("TrustedTypes policy "+a+" could not be created."),null}};function Pk(){var t=arguments.length>0&&arguments[0]!==void 0?arguments[0]:QH(),e=function(st){return Pk(st)};if(e.version="2.4.0",e.removed=[],!t||!t.document||t.document.nodeType!==9)return e.isSupported=!1,e;var r=t.document,n=t.document,i=t.DocumentFragment,a=t.HTMLTemplateElement,s=t.Node,o=t.Element,l=t.NodeFilter,u=t.NamedNodeMap,h=u===void 0?t.NamedNodeMap||t.MozNamedAttrMap:u,d=t.HTMLFormElement,f=t.DOMParser,p=t.trustedTypes,m=o.prototype,_=Lf(m,"cloneNode"),y=Lf(m,"nextSibling"),b=Lf(m,"childNodes"),x=Lf(m,"parentNode");if(typeof a=="function"){var k=n.createElement("template");k.content&&k.content.ownerDocument&&(n=k.content.ownerDocument)}var T=JH(p,r),C=T?T.createHTML(""):"",M=n,S=M.implementation,R=M.createNodeIterator,A=M.createDocumentFragment,L=M.getElementsByTagName,v=r.importNode,B={};try{B=Fs(n).documentMode?n.documentMode:{}}catch{}var w={};e.isSupported=typeof x=="function"&&S&&typeof S.createHTMLDocument<"u"&&B!==9;var D=WH,N=HH,z=GH,X=jH,ct=XH,J=KH,Y=$H,$=null,lt=Me({},[].concat(ni(Bk),ni(Hp),ni(Gp),ni(jp),ni(Dk))),ut=null,W=Me({},[].concat(ni(Ok),ni($p),ni(Fk),ni(Rf))),tt=Object.seal(Object.create(null,{tagNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},attributeNameCheck:{writable:!0,configurable:!1,enumerable:!0,value:null},allowCustomizedBuiltInElements:{writable:!0,configurable:!1,enumerable:!0,value:!1}})),K=null,it=null,Z=!0,V=!0,Q=!1,q=!1,U=!1,F=!1,j=!1,P=!1,et=!1,at=!1,It=!0,Lt=!1,Rt="user-content-",Ct=!0,pt=!1,mt={},vt=null,Tt=Me({},["annotation-xml","audio","colgroup","desc","foreignobject","head","iframe","math","mi","mn","mo","ms","mtext","noembed","noframes","noscript","plaintext","script","style","svg","template","thead","title","video","xmp"]),ft=null,le=Me({},["audio","video","img","source","image","track"]),Dt=null,Gt=Me({},["alt","class","for","id","label","name","pattern","placeholder","role","summary","title","value","style","xmlns"]),$t="http://www.w3.org/1998/Math/MathML",Qt="http://www.w3.org/2000/svg",we="http://www.w3.org/1999/xhtml",jt=we,Ft=!1,zt,wt=["application/xhtml+xml","text/html"],bt="text/html",Et,kt=null,Ut=n.createElement("form"),gt=function(st){return st instanceof RegExp||st instanceof Function},he=function(st){kt&&kt===st||((!st||Ha(st)!=="object")&&(st={}),st=Fs(st),zt=wt.indexOf(st.PARSER_MEDIA_TYPE)===-1?zt=bt:zt=st.PARSER_MEDIA_TYPE,Et=zt==="application/xhtml+xml"?function(At){return At}:Mf,$="ALLOWED_TAGS"in st?Me({},st.ALLOWED_TAGS,Et):lt,ut="ALLOWED_ATTR"in st?Me({},st.ALLOWED_ATTR,Et):W,Dt="ADD_URI_SAFE_ATTR"in st?Me(Fs(Gt),st.ADD_URI_SAFE_ATTR,Et):Gt,ft="ADD_DATA_URI_TAGS"in st?Me(Fs(le),st.ADD_DATA_URI_TAGS,Et):le,vt="FORBID_CONTENTS"in st?Me({},st.FORBID_CONTENTS,Et):Tt,K="FORBID_TAGS"in st?Me({},st.FORBID_TAGS,Et):{},it="FORBID_ATTR"in st?Me({},st.FORBID_ATTR,Et):{},mt="USE_PROFILES"in st?st.USE_PROFILES:!1,Z=st.ALLOW_ARIA_ATTR!==!1,V=st.ALLOW_DATA_ATTR!==!1,Q=st.ALLOW_UNKNOWN_PROTOCOLS||!1,q=st.SAFE_FOR_TEMPLATES||!1,U=st.WHOLE_DOCUMENT||!1,P=st.RETURN_DOM||!1,et=st.RETURN_DOM_FRAGMENT||!1,at=st.RETURN_TRUSTED_TYPE||!1,j=st.FORCE_BODY||!1,It=st.SANITIZE_DOM!==!1,Lt=st.SANITIZE_NAMED_PROPS||!1,Ct=st.KEEP_CONTENT!==!1,pt=st.IN_PLACE||!1,Y=st.ALLOWED_URI_REGEXP||Y,jt=st.NAMESPACE||we,st.CUSTOM_ELEMENT_HANDLING&>(st.CUSTOM_ELEMENT_HANDLING.tagNameCheck)&&(tt.tagNameCheck=st.CUSTOM_ELEMENT_HANDLING.tagNameCheck),st.CUSTOM_ELEMENT_HANDLING&>(st.CUSTOM_ELEMENT_HANDLING.attributeNameCheck)&&(tt.attributeNameCheck=st.CUSTOM_ELEMENT_HANDLING.attributeNameCheck),st.CUSTOM_ELEMENT_HANDLING&&typeof st.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements=="boolean"&&(tt.allowCustomizedBuiltInElements=st.CUSTOM_ELEMENT_HANDLING.allowCustomizedBuiltInElements),q&&(V=!1),et&&(P=!0),mt&&($=Me({},ni(Dk)),ut=[],mt.html===!0&&(Me($,Bk),Me(ut,Ok)),mt.svg===!0&&(Me($,Hp),Me(ut,$p),Me(ut,Rf)),mt.svgFilters===!0&&(Me($,Gp),Me(ut,$p),Me(ut,Rf)),mt.mathMl===!0&&(Me($,jp),Me(ut,Fk),Me(ut,Rf))),st.ADD_TAGS&&($===lt&&($=Fs($)),Me($,st.ADD_TAGS,Et)),st.ADD_ATTR&&(ut===W&&(ut=Fs(ut)),Me(ut,st.ADD_ATTR,Et)),st.ADD_URI_SAFE_ATTR&&Me(Dt,st.ADD_URI_SAFE_ATTR,Et),st.FORBID_CONTENTS&&(vt===Tt&&(vt=Fs(vt)),Me(vt,st.FORBID_CONTENTS,Et)),Ct&&($["#text"]=!0),U&&Me($,["html","head","body"]),$.table&&(Me($,["tbody"]),delete K.tbody),sn&&sn(st),kt=st)},yt=Me({},["mi","mo","mn","ms","mtext"]),ne=Me({},["foreignobject","desc","title","annotation-xml"]),ve=Me({},["title","style","font","a","script"]),ye=Me({},Hp);Me(ye,Gp),Me(ye,YH);var be=Me({},jp);Me(be,UH);var Te=function(st){var At=x(st);(!At||!At.tagName)&&(At={namespaceURI:we,tagName:"template"});var Nt=Mf(st.tagName),Jt=Mf(At.tagName);return st.namespaceURI===Qt?At.namespaceURI===we?Nt==="svg":At.namespaceURI===$t?Nt==="svg"&&(Jt==="annotation-xml"||yt[Jt]):Boolean(ye[Nt]):st.namespaceURI===$t?At.namespaceURI===we?Nt==="math":At.namespaceURI===Qt?Nt==="math"&&ne[Jt]:Boolean(be[Nt]):st.namespaceURI===we?At.namespaceURI===Qt&&!ne[Jt]||At.namespaceURI===$t&&!yt[Jt]?!1:!be[Nt]&&(ve[Nt]||!ye[Nt]):!1},Wt=function(st){Tc(e.removed,{element:st});try{st.parentNode.removeChild(st)}catch{try{st.outerHTML=C}catch{st.remove()}}},se=function(st,At){try{Tc(e.removed,{attribute:At.getAttributeNode(st),from:At})}catch{Tc(e.removed,{attribute:null,from:At})}if(At.removeAttribute(st),st==="is"&&!ut[st])if(P||et)try{Wt(At)}catch{}else try{At.setAttribute(st,"")}catch{}},me=function(st){var At,Nt;if(j)st="<remove></remove>"+st;else{var Jt=PH(st,/^[\r\n\t ]+/);Nt=Jt&&Jt[0]}zt==="application/xhtml+xml"&&(st='<html xmlns="http://www.w3.org/1999/xhtml"><head></head><body>'+st+"</body></html>");var ze=T?T.createHTML(st):st;if(jt===we)try{At=new f().parseFromString(ze,zt)}catch{}if(!At||!At.documentElement){At=S.createDocument(jt,"template",null);try{At.documentElement.innerHTML=Ft?"":ze}catch{}}var Pe=At.body||At.documentElement;return st&&Nt&&Pe.insertBefore(n.createTextNode(Nt),Pe.childNodes[0]||null),jt===we?L.call(At,U?"html":"body")[0]:U?At.documentElement:Pe},ue=function(st){return R.call(st.ownerDocument||st,st,l.SHOW_ELEMENT|l.SHOW_COMMENT|l.SHOW_TEXT,null,!1)},_a=function(st){return st instanceof d&&(typeof st.nodeName!="string"||typeof st.textContent!="string"||typeof st.removeChild!="function"||!(st.attributes instanceof h)||typeof st.removeAttribute!="function"||typeof st.setAttribute!="function"||typeof st.namespaceURI!="string"||typeof st.insertBefore!="function")},Hr=function(st){return Ha(s)==="object"?st instanceof s:st&&Ha(st)==="object"&&typeof st.nodeType=="number"&&typeof st.nodeName=="string"},Ie=function(st,At,Nt){!w[st]||FH(w[st],function(Jt){Jt.call(e,At,Nt,kt)})},oe=function(st){var At;if(Ie("beforeSanitizeElements",st,null),_a(st)||on(/[\u0080-\uFFFF]/,st.nodeName))return Wt(st),!0;var Nt=Et(st.nodeName);if(Ie("uponSanitizeElement",st,{tagName:Nt,allowedTags:$}),st.hasChildNodes()&&!Hr(st.firstElementChild)&&(!Hr(st.content)||!Hr(st.content.firstElementChild))&&on(/<[/\w]/g,st.innerHTML)&&on(/<[/\w]/g,st.textContent)||Nt==="select"&&on(/<template/i,st.innerHTML))return Wt(st),!0;if(!$[Nt]||K[Nt]){if(!K[Nt]&&wr(Nt)&&(tt.tagNameCheck instanceof RegExp&&on(tt.tagNameCheck,Nt)||tt.tagNameCheck instanceof Function&&tt.tagNameCheck(Nt)))return!1;if(Ct&&!vt[Nt]){var Jt=x(st)||st.parentNode,ze=b(st)||st.childNodes;if(ze&&Jt)for(var Pe=ze.length,qe=Pe-1;qe>=0;--qe)Jt.insertBefore(_(ze[qe],!0),y(st))}return Wt(st),!0}return st instanceof o&&!Te(st)||(Nt==="noscript"||Nt==="noembed")&&on(/<\/no(script|embed)/i,st.innerHTML)?(Wt(st),!0):(q&&st.nodeType===3&&(At=st.textContent,At=Ga(At,D," "),At=Ga(At,N," "),st.textContent!==At&&(Tc(e.removed,{element:st.cloneNode()}),st.textContent=At)),Ie("afterSanitizeElements",st,null),!1)},Ke=function(st,At,Nt){if(It&&(At==="id"||At==="name")&&(Nt in n||Nt in Ut))return!1;if(!(V&&!it[At]&&on(z,At))){if(!(Z&&on(X,At))){if(!ut[At]||it[At]){if(!(wr(st)&&(tt.tagNameCheck instanceof RegExp&&on(tt.tagNameCheck,st)||tt.tagNameCheck instanceof Function&&tt.tagNameCheck(st))&&(tt.attributeNameCheck instanceof RegExp&&on(tt.attributeNameCheck,At)||tt.attributeNameCheck instanceof Function&&tt.attributeNameCheck(At))||At==="is"&&tt.allowCustomizedBuiltInElements&&(tt.tagNameCheck instanceof RegExp&&on(tt.tagNameCheck,Nt)||tt.tagNameCheck instanceof Function&&tt.tagNameCheck(Nt))))return!1}else if(!Dt[At]){if(!on(Y,Ga(Nt,J,""))){if(!((At==="src"||At==="xlink:href"||At==="href")&&st!=="script"&&qH(Nt,"data:")===0&&ft[st])){if(!(Q&&!on(ct,Ga(Nt,J,"")))){if(Nt)return!1}}}}}}return!0},wr=function(st){return st.indexOf("-")>0},Ge=function(st){var At,Nt,Jt,ze;Ie("beforeSanitizeAttributes",st,null);var Pe=st.attributes;if(!!Pe){var qe={attrName:"",attrValue:"",keepAttr:!0,allowedAttributes:ut};for(ze=Pe.length;ze--;){At=Pe[ze];var Tr=At,Ve=Tr.name,va=Tr.namespaceURI;if(Nt=Ve==="value"?At.value:VH(At.value),Jt=Et(Ve),qe.attrName=Jt,qe.attrValue=Nt,qe.keepAttr=!0,qe.forceKeepAttr=void 0,Ie("uponSanitizeAttribute",st,qe),Nt=qe.attrValue,!qe.forceKeepAttr&&(se(Ve,st),!!qe.keepAttr)){if(on(/\/>/i,Nt)){se(Ve,st);continue}q&&(Nt=Ga(Nt,D," "),Nt=Ga(Nt,N," "));var Ce=Et(st.nodeName);if(!!Ke(Ce,Jt,Nt)){if(Lt&&(Jt==="id"||Jt==="name")&&(se(Ve,st),Nt=Rt+Nt),T&&Ha(p)==="object"&&typeof p.getAttributeType=="function"&&!va)switch(p.getAttributeType(Ce,Jt)){case"TrustedHTML":Nt=T.createHTML(Nt);break;case"TrustedScriptURL":Nt=T.createScriptURL(Nt);break}try{va?st.setAttributeNS(va,Ve,Nt):st.setAttribute(Ve,Nt),Nk(e.removed)}catch{}}}}Ie("afterSanitizeAttributes",st,null)}},Ze=function qt(st){var At,Nt=ue(st);for(Ie("beforeSanitizeShadowDOM",st,null);At=Nt.nextNode();)Ie("uponSanitizeShadowNode",At,null),!oe(At)&&(At.content instanceof i&&qt(At.content),Ge(At));Ie("afterSanitizeShadowDOM",st,null)};return e.sanitize=function(qt){var st=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{},At,Nt,Jt,ze,Pe;if(Ft=!qt,Ft&&(qt="<!-->"),typeof qt!="string"&&!Hr(qt)){if(typeof qt.toString!="function")throw Wp("toString is not a function");if(qt=qt.toString(),typeof qt!="string")throw Wp("dirty is not a string, aborting")}if(!e.isSupported){if(Ha(t.toStaticHTML)==="object"||typeof t.toStaticHTML=="function"){if(typeof qt=="string")return t.toStaticHTML(qt);if(Hr(qt))return t.toStaticHTML(qt.outerHTML)}return qt}if(F||he(st),e.removed=[],typeof qt=="string"&&(pt=!1),pt){if(qt.nodeName){var qe=Et(qt.nodeName);if(!$[qe]||K[qe])throw Wp("root node is forbidden and cannot be sanitized in-place")}}else if(qt instanceof s)At=me("<!---->"),Nt=At.ownerDocument.importNode(qt,!0),Nt.nodeType===1&&Nt.nodeName==="BODY"||Nt.nodeName==="HTML"?At=Nt:At.appendChild(Nt);else{if(!P&&!q&&!U&&qt.indexOf("<")===-1)return T&&at?T.createHTML(qt):qt;if(At=me(qt),!At)return P?null:at?C:""}At&&j&&Wt(At.firstChild);for(var Tr=ue(pt?qt:At);Jt=Tr.nextNode();)Jt.nodeType===3&&Jt===ze||oe(Jt)||(Jt.content instanceof i&&Ze(Jt.content),Ge(Jt),ze=Jt);if(ze=null,pt)return qt;if(P){if(et)for(Pe=A.call(At.ownerDocument);At.firstChild;)Pe.appendChild(At.firstChild);else Pe=At;return ut.shadowroot&&(Pe=v.call(r,Pe,!0)),Pe}var Ve=U?At.outerHTML:At.innerHTML;return U&&$["!doctype"]&&At.ownerDocument&&At.ownerDocument.doctype&&At.ownerDocument.doctype.name&&on(ZH,At.ownerDocument.doctype.name)&&(Ve="<!DOCTYPE "+At.ownerDocument.doctype.name+`> -`+Ve),q&&(Ve=Ga(Ve,D," "),Ve=Ga(Ve,N," ")),T&&at?T.createHTML(Ve):Ve},e.setConfig=function(qt){he(qt),F=!0},e.clearConfig=function(){kt=null,F=!1},e.isValidAttribute=function(qt,st,At){kt||he({});var Nt=Et(qt),Jt=Et(st);return Ke(Nt,Jt,At)},e.addHook=function(qt,st){typeof st=="function"&&(w[qt]=w[qt]||[],Tc(w[qt],st))},e.removeHook=function(qt){if(w[qt])return Nk(w[qt])},e.removeHooks=function(qt){w[qt]&&(w[qt]=[])},e.removeAllHooks=function(){w={}},e}var Ec=Pk();const tG=t=>t?zk(t).replace(/\\n/g,"#br#").split("#br#"):[""],qk=t=>Ec.sanitize(t),Vk=(t,e)=>{var r;if(((r=e.flowchart)==null?void 0:r.htmlLabels)!==!1){const n=e.securityLevel;n==="antiscript"||n==="strict"?t=qk(t):n!=="loose"&&(t=zk(t),t=t.replace(/</g,"<").replace(/>/g,">"),t=t.replace(/=/g,"="),t=iG(t))}return t},ai=(t,e)=>t&&(e.dompurifyConfig?t=Ec.sanitize(Vk(t,e),e.dompurifyConfig).toString():t=Ec.sanitize(Vk(t,e)),t),eG=(t,e)=>typeof t=="string"?ai(t,e):t.flat().map(r=>ai(r,e)),If=/<br\s*\/?>/gi,rG=t=>If.test(t),nG=t=>t.split(If),iG=t=>t.replace(/#br#/g,"<br/>"),zk=t=>t.replace(If,"#br#"),aG=t=>{let e="";return t&&(e=window.location.protocol+"//"+window.location.host+window.location.pathname+window.location.search,e=e.replaceAll(/\(/g,"\\("),e=e.replaceAll(/\)/g,"\\)")),e},Mr=t=>!(t===!1||["false","null","0"].includes(String(t).trim().toLowerCase())),ja=function(t){let e=t;return t.indexOf("~")!==-1?(e=e.replace(/~([^~].*)/,"<$1"),e=e.replace(/~([^~]*)$/,">$1"),ja(e)):e},pe={getRows:tG,sanitizeText:ai,sanitizeTextOrArray:eG,hasBreaks:rG,splitBreaks:nG,lineBreakRegex:If,removeScript:qk,getUrl:aG,evaluate:Mr},Nf={min:{r:0,g:0,b:0,s:0,l:0,a:0},max:{r:255,g:255,b:255,h:360,s:100,l:100,a:1},clamp:{r:t=>t>=255?255:t<0?0:t,g:t=>t>=255?255:t<0?0:t,b:t=>t>=255?255:t<0?0:t,h:t=>t%360,s:t=>t>=100?100:t<0?0:t,l:t=>t>=100?100:t<0?0:t,a:t=>t>=1?1:t<0?0:t},toLinear:t=>{const e=t/255;return t>.03928?Math.pow((e+.055)/1.055,2.4):e/12.92},hue2rgb:(t,e,r)=>(r<0&&(r+=1),r>1&&(r-=1),r<1/6?t+(e-t)*6*r:r<1/2?e:r<2/3?t+(e-t)*(2/3-r)*6:t),hsl2rgb:({h:t,s:e,l:r},n)=>{if(!e)return r*2.55;t/=360,e/=100,r/=100;const i=r<.5?r*(1+e):r+e-r*e,a=2*r-i;switch(n){case"r":return Nf.hue2rgb(a,i,t+1/3)*255;case"g":return Nf.hue2rgb(a,i,t)*255;case"b":return Nf.hue2rgb(a,i,t-1/3)*255}},rgb2hsl:({r:t,g:e,b:r},n)=>{t/=255,e/=255,r/=255;const i=Math.max(t,e,r),a=Math.min(t,e,r),s=(i+a)/2;if(n==="l")return s*100;if(i===a)return 0;const o=i-a,l=s>.5?o/(2-i-a):o/(i+a);if(n==="s")return l*100;switch(i){case t:return((e-r)/o+(e<r?6:0))*60;case e:return((r-t)/o+2)*60;case r:return((t-e)/o+4)*60;default:return-1}}},ke={channel:Nf,lang:{clamp:(t,e,r)=>e>r?Math.min(e,Math.max(r,t)):Math.min(r,Math.max(e,t)),round:t=>Math.round(t*1e10)/1e10},unit:{dec2hex:t=>{const e=Math.round(t).toString(16);return e.length>1?e:`0${e}`}}},$a={};for(let t=0;t<=255;t++)$a[t]=ke.unit.dec2hex(t);const zr={ALL:0,RGB:1,HSL:2};class sG{constructor(){this.type=zr.ALL}get(){return this.type}set(e){if(this.type&&this.type!==e)throw new Error("Cannot change both RGB and HSL channels at the same time");this.type=e}reset(){this.type=zr.ALL}is(e){return this.type===e}}const oG=sG;class lG{constructor(e,r){this.color=r,this.changed=!1,this.data=e,this.type=new oG}set(e,r){return this.color=r,this.changed=!1,this.data=e,this.type.type=zr.ALL,this}_ensureHSL(){const e=this.data,{h:r,s:n,l:i}=e;r===void 0&&(e.h=ke.channel.rgb2hsl(e,"h")),n===void 0&&(e.s=ke.channel.rgb2hsl(e,"s")),i===void 0&&(e.l=ke.channel.rgb2hsl(e,"l"))}_ensureRGB(){const e=this.data,{r,g:n,b:i}=e;r===void 0&&(e.r=ke.channel.hsl2rgb(e,"r")),n===void 0&&(e.g=ke.channel.hsl2rgb(e,"g")),i===void 0&&(e.b=ke.channel.hsl2rgb(e,"b"))}get r(){const e=this.data,r=e.r;return!this.type.is(zr.HSL)&&r!==void 0?r:(this._ensureHSL(),ke.channel.hsl2rgb(e,"r"))}get g(){const e=this.data,r=e.g;return!this.type.is(zr.HSL)&&r!==void 0?r:(this._ensureHSL(),ke.channel.hsl2rgb(e,"g"))}get b(){const e=this.data,r=e.b;return!this.type.is(zr.HSL)&&r!==void 0?r:(this._ensureHSL(),ke.channel.hsl2rgb(e,"b"))}get h(){const e=this.data,r=e.h;return!this.type.is(zr.RGB)&&r!==void 0?r:(this._ensureRGB(),ke.channel.rgb2hsl(e,"h"))}get s(){const e=this.data,r=e.s;return!this.type.is(zr.RGB)&&r!==void 0?r:(this._ensureRGB(),ke.channel.rgb2hsl(e,"s"))}get l(){const e=this.data,r=e.l;return!this.type.is(zr.RGB)&&r!==void 0?r:(this._ensureRGB(),ke.channel.rgb2hsl(e,"l"))}get a(){return this.data.a}set r(e){this.type.set(zr.RGB),this.changed=!0,this.data.r=e}set g(e){this.type.set(zr.RGB),this.changed=!0,this.data.g=e}set b(e){this.type.set(zr.RGB),this.changed=!0,this.data.b=e}set h(e){this.type.set(zr.HSL),this.changed=!0,this.data.h=e}set s(e){this.type.set(zr.HSL),this.changed=!0,this.data.s=e}set l(e){this.type.set(zr.HSL),this.changed=!0,this.data.l=e}set a(e){this.changed=!0,this.data.a=e}}const cG=lG,Bf=new cG({r:0,g:0,b:0,a:0},"transparent"),Yk={re:/^#((?:[a-f0-9]{2}){2,4}|[a-f0-9]{3})$/i,parse:t=>{if(t.charCodeAt(0)!==35)return;const e=t.match(Yk.re);if(!e)return;const r=e[1],n=parseInt(r,16),i=r.length,a=i%4===0,s=i>4,o=s?1:17,l=s?8:4,u=a?0:-1,h=s?255:15;return Bf.set({r:(n>>l*(u+3)&h)*o,g:(n>>l*(u+2)&h)*o,b:(n>>l*(u+1)&h)*o,a:a?(n&h)*o/255:1},t)},stringify:t=>{const{r:e,g:r,b:n,a:i}=t;return i<1?`#${$a[Math.round(e)]}${$a[Math.round(r)]}${$a[Math.round(n)]}${$a[Math.round(i*255)]}`:`#${$a[Math.round(e)]}${$a[Math.round(r)]}${$a[Math.round(n)]}`}},Cc=Yk,Df={re:/^hsla?\(\s*?(-?(?:\d+(?:\.\d+)?|(?:\.\d+))(?:e-?\d+)?(?:deg|grad|rad|turn)?)\s*?(?:,|\s)\s*?(-?(?:\d+(?:\.\d+)?|(?:\.\d+))(?:e-?\d+)?%)\s*?(?:,|\s)\s*?(-?(?:\d+(?:\.\d+)?|(?:\.\d+))(?:e-?\d+)?%)(?:\s*?(?:,|\/)\s*?\+?(-?(?:\d+(?:\.\d+)?|(?:\.\d+))(?:e-?\d+)?(%)?))?\s*?\)$/i,hueRe:/^(.+?)(deg|grad|rad|turn)$/i,_hue2deg:t=>{const e=t.match(Df.hueRe);if(e){const[,r,n]=e;switch(n){case"grad":return ke.channel.clamp.h(parseFloat(r)*.9);case"rad":return ke.channel.clamp.h(parseFloat(r)*180/Math.PI);case"turn":return ke.channel.clamp.h(parseFloat(r)*360)}}return ke.channel.clamp.h(parseFloat(t))},parse:t=>{const e=t.charCodeAt(0);if(e!==104&&e!==72)return;const r=t.match(Df.re);if(!r)return;const[,n,i,a,s,o]=r;return Bf.set({h:Df._hue2deg(n),s:ke.channel.clamp.s(parseFloat(i)),l:ke.channel.clamp.l(parseFloat(a)),a:s?ke.channel.clamp.a(o?parseFloat(s)/100:parseFloat(s)):1},t)},stringify:t=>{const{h:e,s:r,l:n,a:i}=t;return i<1?`hsla(${ke.lang.round(e)}, ${ke.lang.round(r)}%, ${ke.lang.round(n)}%, ${i})`:`hsl(${ke.lang.round(e)}, ${ke.lang.round(r)}%, ${ke.lang.round(n)}%)`}},Of=Df,Ff={colors:{aliceblue:"#f0f8ff",antiquewhite:"#faebd7",aqua:"#00ffff",aquamarine:"#7fffd4",azure:"#f0ffff",beige:"#f5f5dc",bisque:"#ffe4c4",black:"#000000",blanchedalmond:"#ffebcd",blue:"#0000ff",blueviolet:"#8a2be2",brown:"#a52a2a",burlywood:"#deb887",cadetblue:"#5f9ea0",chartreuse:"#7fff00",chocolate:"#d2691e",coral:"#ff7f50",cornflowerblue:"#6495ed",cornsilk:"#fff8dc",crimson:"#dc143c",cyanaqua:"#00ffff",darkblue:"#00008b",darkcyan:"#008b8b",darkgoldenrod:"#b8860b",darkgray:"#a9a9a9",darkgreen:"#006400",darkgrey:"#a9a9a9",darkkhaki:"#bdb76b",darkmagenta:"#8b008b",darkolivegreen:"#556b2f",darkorange:"#ff8c00",darkorchid:"#9932cc",darkred:"#8b0000",darksalmon:"#e9967a",darkseagreen:"#8fbc8f",darkslateblue:"#483d8b",darkslategray:"#2f4f4f",darkslategrey:"#2f4f4f",darkturquoise:"#00ced1",darkviolet:"#9400d3",deeppink:"#ff1493",deepskyblue:"#00bfff",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1e90ff",firebrick:"#b22222",floralwhite:"#fffaf0",forestgreen:"#228b22",fuchsia:"#ff00ff",gainsboro:"#dcdcdc",ghostwhite:"#f8f8ff",gold:"#ffd700",goldenrod:"#daa520",gray:"#808080",green:"#008000",greenyellow:"#adff2f",grey:"#808080",honeydew:"#f0fff0",hotpink:"#ff69b4",indianred:"#cd5c5c",indigo:"#4b0082",ivory:"#fffff0",khaki:"#f0e68c",lavender:"#e6e6fa",lavenderblush:"#fff0f5",lawngreen:"#7cfc00",lemonchiffon:"#fffacd",lightblue:"#add8e6",lightcoral:"#f08080",lightcyan:"#e0ffff",lightgoldenrodyellow:"#fafad2",lightgray:"#d3d3d3",lightgreen:"#90ee90",lightgrey:"#d3d3d3",lightpink:"#ffb6c1",lightsalmon:"#ffa07a",lightseagreen:"#20b2aa",lightskyblue:"#87cefa",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#b0c4de",lightyellow:"#ffffe0",lime:"#00ff00",limegreen:"#32cd32",linen:"#faf0e6",magenta:"#ff00ff",maroon:"#800000",mediumaquamarine:"#66cdaa",mediumblue:"#0000cd",mediumorchid:"#ba55d3",mediumpurple:"#9370db",mediumseagreen:"#3cb371",mediumslateblue:"#7b68ee",mediumspringgreen:"#00fa9a",mediumturquoise:"#48d1cc",mediumvioletred:"#c71585",midnightblue:"#191970",mintcream:"#f5fffa",mistyrose:"#ffe4e1",moccasin:"#ffe4b5",navajowhite:"#ffdead",navy:"#000080",oldlace:"#fdf5e6",olive:"#808000",olivedrab:"#6b8e23",orange:"#ffa500",orangered:"#ff4500",orchid:"#da70d6",palegoldenrod:"#eee8aa",palegreen:"#98fb98",paleturquoise:"#afeeee",palevioletred:"#db7093",papayawhip:"#ffefd5",peachpuff:"#ffdab9",peru:"#cd853f",pink:"#ffc0cb",plum:"#dda0dd",powderblue:"#b0e0e6",purple:"#800080",rebeccapurple:"#663399",red:"#ff0000",rosybrown:"#bc8f8f",royalblue:"#4169e1",saddlebrown:"#8b4513",salmon:"#fa8072",sandybrown:"#f4a460",seagreen:"#2e8b57",seashell:"#fff5ee",sienna:"#a0522d",silver:"#c0c0c0",skyblue:"#87ceeb",slateblue:"#6a5acd",slategray:"#708090",slategrey:"#708090",snow:"#fffafa",springgreen:"#00ff7f",tan:"#d2b48c",teal:"#008080",thistle:"#d8bfd8",transparent:"#00000000",turquoise:"#40e0d0",violet:"#ee82ee",wheat:"#f5deb3",white:"#ffffff",whitesmoke:"#f5f5f5",yellow:"#ffff00",yellowgreen:"#9acd32"},parse:t=>{t=t.toLowerCase();const e=Ff.colors[t];if(!!e)return Cc.parse(e)},stringify:t=>{const e=Cc.stringify(t);for(const r in Ff.colors)if(Ff.colors[r]===e)return r}},Uk=Ff,Wk={re:/^rgba?\(\s*?(-?(?:\d+(?:\.\d+)?|(?:\.\d+))(?:e\d+)?(%?))\s*?(?:,|\s)\s*?(-?(?:\d+(?:\.\d+)?|(?:\.\d+))(?:e\d+)?(%?))\s*?(?:,|\s)\s*?(-?(?:\d+(?:\.\d+)?|(?:\.\d+))(?:e\d+)?(%?))(?:\s*?(?:,|\/)\s*?\+?(-?(?:\d+(?:\.\d+)?|(?:\.\d+))(?:e\d+)?(%?)))?\s*?\)$/i,parse:t=>{const e=t.charCodeAt(0);if(e!==114&&e!==82)return;const r=t.match(Wk.re);if(!r)return;const[,n,i,a,s,o,l,u,h]=r;return Bf.set({r:ke.channel.clamp.r(i?parseFloat(n)*2.55:parseFloat(n)),g:ke.channel.clamp.g(s?parseFloat(a)*2.55:parseFloat(a)),b:ke.channel.clamp.b(l?parseFloat(o)*2.55:parseFloat(o)),a:u?ke.channel.clamp.a(h?parseFloat(u)/100:parseFloat(u)):1},t)},stringify:t=>{const{r:e,g:r,b:n,a:i}=t;return i<1?`rgba(${ke.lang.round(e)}, ${ke.lang.round(r)}, ${ke.lang.round(n)}, ${ke.lang.round(i)})`:`rgb(${ke.lang.round(e)}, ${ke.lang.round(r)}, ${ke.lang.round(n)})`}},Pf=Wk,ia={format:{keyword:Uk,hex:Cc,rgb:Pf,rgba:Pf,hsl:Of,hsla:Of},parse:t=>{if(typeof t!="string")return t;const e=Cc.parse(t)||Pf.parse(t)||Of.parse(t)||Uk.parse(t);if(e)return e;throw new Error(`Unsupported color format: "${t}"`)},stringify:t=>!t.changed&&t.color?t.color:t.type.is(zr.HSL)||t.data.r===void 0?Of.stringify(t):t.a<1||!Number.isInteger(t.r)||!Number.isInteger(t.g)||!Number.isInteger(t.b)?Pf.stringify(t):Cc.stringify(t)},Hk=(t,e)=>{const r=ia.parse(t);for(const n in e)r[n]=ke.channel.clamp[n](e[n]);return ia.stringify(r)},Sc=(t,e,r=0,n=1)=>{if(typeof t!="number")return Hk(t,{a:e});const i=Bf.set({r:ke.channel.clamp.r(t),g:ke.channel.clamp.g(e),b:ke.channel.clamp.b(r),a:ke.channel.clamp.a(n)});return ia.stringify(i)},Gk=(t,e,r)=>{const n=ia.parse(t),i=n[e],a=ke.channel.clamp[e](i+r);return i!==a&&(n[e]=a),ia.stringify(n)},ae=(t,e)=>Gk(t,"l",e),ge=(t,e)=>Gk(t,"l",-e),_t=(t,e)=>{const r=ia.parse(t),n={};for(const i in e)!e[i]||(n[i]=r[i]+e[i]);return Hk(t,n)},uG=(t,e,r=50)=>{const{r:n,g:i,b:a,a:s}=ia.parse(t),{r:o,g:l,b:u,a:h}=ia.parse(e),d=r/100,f=d*2-1,p=s-h,_=((f*p===-1?f:(f+p)/(1+f*p))+1)/2,y=1-_,b=n*_+o*y,x=i*_+l*y,k=a*_+u*y,T=s*d+h*(1-d);return Sc(b,x,k,T)},Yt=(t,e=100)=>{const r=ia.parse(t);return r.r=255-r.r,r.g=255-r.g,r.b=255-r.b,uG(r,t,e)},ln=(t,e)=>e?_t(t,{s:-40,l:10}):_t(t,{s:-40,l:-10}),qf="#ffffff",Vf="#f2f2f2";class hG{constructor(){this.background="#f4f4f4",this.darkMode=!1,this.primaryColor="#fff4dd",this.noteBkgColor="#fff5ad",this.noteTextColor="#333",this.THEME_COLOR_LIMIT=12,this.fontFamily='"trebuchet ms", verdana, arial, sans-serif',this.fontSize="16px"}updateColors(){if(this.primaryTextColor=this.primaryTextColor||(this.darkMode?"#eee":"#333"),this.secondaryColor=this.secondaryColor||_t(this.primaryColor,{h:-120}),this.tertiaryColor=this.tertiaryColor||_t(this.primaryColor,{h:180,l:5}),this.primaryBorderColor=this.primaryBorderColor||ln(this.primaryColor,this.darkMode),this.secondaryBorderColor=this.secondaryBorderColor||ln(this.secondaryColor,this.darkMode),this.tertiaryBorderColor=this.tertiaryBorderColor||ln(this.tertiaryColor,this.darkMode),this.noteBorderColor=this.noteBorderColor||ln(this.noteBkgColor,this.darkMode),this.noteBkgColor=this.noteBkgColor||"#fff5ad",this.noteTextColor=this.noteTextColor||"#333",this.secondaryTextColor=this.secondaryTextColor||Yt(this.secondaryColor),this.tertiaryTextColor=this.tertiaryTextColor||Yt(this.tertiaryColor),this.lineColor=this.lineColor||Yt(this.background),this.textColor=this.textColor||this.primaryTextColor,this.nodeBkg=this.nodeBkg||this.primaryColor,this.mainBkg=this.mainBkg||this.primaryColor,this.nodeBorder=this.nodeBorder||this.primaryBorderColor,this.clusterBkg=this.clusterBkg||this.tertiaryColor,this.clusterBorder=this.clusterBorder||this.tertiaryBorderColor,this.defaultLinkColor=this.defaultLinkColor||this.lineColor,this.titleColor=this.titleColor||this.tertiaryTextColor,this.edgeLabelBackground=this.edgeLabelBackground||(this.darkMode?ge(this.secondaryColor,30):this.secondaryColor),this.nodeTextColor=this.nodeTextColor||this.primaryTextColor,this.actorBorder=this.actorBorder||this.primaryBorderColor,this.actorBkg=this.actorBkg||this.mainBkg,this.actorTextColor=this.actorTextColor||this.primaryTextColor,this.actorLineColor=this.actorLineColor||"grey",this.labelBoxBkgColor=this.labelBoxBkgColor||this.actorBkg,this.signalColor=this.signalColor||this.textColor,this.signalTextColor=this.signalTextColor||this.textColor,this.labelBoxBorderColor=this.labelBoxBorderColor||this.actorBorder,this.labelTextColor=this.labelTextColor||this.actorTextColor,this.loopTextColor=this.loopTextColor||this.actorTextColor,this.activationBorderColor=this.activationBorderColor||ge(this.secondaryColor,10),this.activationBkgColor=this.activationBkgColor||this.secondaryColor,this.sequenceNumberColor=this.sequenceNumberColor||Yt(this.lineColor),this.sectionBkgColor=this.sectionBkgColor||this.tertiaryColor,this.altSectionBkgColor=this.altSectionBkgColor||"white",this.sectionBkgColor=this.sectionBkgColor||this.secondaryColor,this.sectionBkgColor2=this.sectionBkgColor2||this.primaryColor,this.excludeBkgColor=this.excludeBkgColor||"#eeeeee",this.taskBorderColor=this.taskBorderColor||this.primaryBorderColor,this.taskBkgColor=this.taskBkgColor||this.primaryColor,this.activeTaskBorderColor=this.activeTaskBorderColor||this.primaryColor,this.activeTaskBkgColor=this.activeTaskBkgColor||ae(this.primaryColor,23),this.gridColor=this.gridColor||"lightgrey",this.doneTaskBkgColor=this.doneTaskBkgColor||"lightgrey",this.doneTaskBorderColor=this.doneTaskBorderColor||"grey",this.critBorderColor=this.critBorderColor||"#ff8888",this.critBkgColor=this.critBkgColor||"red",this.todayLineColor=this.todayLineColor||"red",this.taskTextColor=this.taskTextColor||this.textColor,this.taskTextOutsideColor=this.taskTextOutsideColor||this.textColor,this.taskTextLightColor=this.taskTextLightColor||this.textColor,this.taskTextColor=this.taskTextColor||this.primaryTextColor,this.taskTextDarkColor=this.taskTextDarkColor||this.textColor,this.taskTextClickableColor=this.taskTextClickableColor||"#003163",this.personBorder=this.personBorder||this.primaryBorderColor,this.personBkg=this.personBkg||this.mainBkg,this.transitionColor=this.transitionColor||this.lineColor,this.transitionLabelColor=this.transitionLabelColor||this.textColor,this.stateLabelColor=this.stateLabelColor||this.stateBkg||this.primaryTextColor,this.stateBkg=this.stateBkg||this.mainBkg,this.labelBackgroundColor=this.labelBackgroundColor||this.stateBkg,this.compositeBackground=this.compositeBackground||this.background||this.tertiaryColor,this.altBackground=this.altBackground||this.tertiaryColor,this.compositeTitleBackground=this.compositeTitleBackground||this.mainBkg,this.compositeBorder=this.compositeBorder||this.nodeBorder,this.innerEndBackground=this.nodeBorder,this.errorBkgColor=this.errorBkgColor||this.tertiaryColor,this.errorTextColor=this.errorTextColor||this.tertiaryTextColor,this.transitionColor=this.transitionColor||this.lineColor,this.specialStateColor=this.lineColor,this.cScale0=this.cScale0||this.primaryColor,this.cScale1=this.cScale1||this.secondaryColor,this.cScale2=this.cScale2||this.tertiaryColor,this.cScale3=this.cScale3||_t(this.primaryColor,{h:30}),this.cScale4=this.cScale4||_t(this.primaryColor,{h:60}),this.cScale5=this.cScale5||_t(this.primaryColor,{h:90}),this.cScale6=this.cScale6||_t(this.primaryColor,{h:120}),this.cScale7=this.cScale7||_t(this.primaryColor,{h:150}),this.cScale8=this.cScale8||_t(this.primaryColor,{h:210,l:150}),this.cScale9=this.cScale9||_t(this.primaryColor,{h:270}),this.cScale10=this.cScale10||_t(this.primaryColor,{h:300}),this.cScale11=this.cScale11||_t(this.primaryColor,{h:330}),this.darkMode)for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScale"+e]=ge(this["cScale"+e],75);else for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScale"+e]=ge(this["cScale"+e],25);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleInv"+e]=this["cScaleInv"+e]||Yt(this["cScale"+e]);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this.darkMode?this["cScalePeer"+e]=this["cScalePeer"+e]||ae(this["cScale"+e],10):this["cScalePeer"+e]=this["cScalePeer"+e]||ge(this["cScale"+e],10);this.scaleLabelColor=this.scaleLabelColor||this.labelTextColor;for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleLabel"+e]=this["cScaleLabel"+e]||this.scaleLabelColor;this.classText=this.classText||this.textColor,this.fillType0=this.fillType0||this.primaryColor,this.fillType1=this.fillType1||this.secondaryColor,this.fillType2=this.fillType2||_t(this.primaryColor,{h:64}),this.fillType3=this.fillType3||_t(this.secondaryColor,{h:64}),this.fillType4=this.fillType4||_t(this.primaryColor,{h:-64}),this.fillType5=this.fillType5||_t(this.secondaryColor,{h:-64}),this.fillType6=this.fillType6||_t(this.primaryColor,{h:128}),this.fillType7=this.fillType7||_t(this.secondaryColor,{h:128}),this.pie1=this.pie1||this.primaryColor,this.pie2=this.pie2||this.secondaryColor,this.pie3=this.pie3||this.tertiaryColor,this.pie4=this.pie4||_t(this.primaryColor,{l:-10}),this.pie5=this.pie5||_t(this.secondaryColor,{l:-10}),this.pie6=this.pie6||_t(this.tertiaryColor,{l:-10}),this.pie7=this.pie7||_t(this.primaryColor,{h:60,l:-10}),this.pie8=this.pie8||_t(this.primaryColor,{h:-60,l:-10}),this.pie9=this.pie9||_t(this.primaryColor,{h:120,l:0}),this.pie10=this.pie10||_t(this.primaryColor,{h:60,l:-20}),this.pie11=this.pie11||_t(this.primaryColor,{h:-60,l:-20}),this.pie12=this.pie12||_t(this.primaryColor,{h:120,l:-10}),this.pieTitleTextSize=this.pieTitleTextSize||"25px",this.pieTitleTextColor=this.pieTitleTextColor||this.taskTextDarkColor,this.pieSectionTextSize=this.pieSectionTextSize||"17px",this.pieSectionTextColor=this.pieSectionTextColor||this.textColor,this.pieLegendTextSize=this.pieLegendTextSize||"17px",this.pieLegendTextColor=this.pieLegendTextColor||this.taskTextDarkColor,this.pieStrokeColor=this.pieStrokeColor||"black",this.pieStrokeWidth=this.pieStrokeWidth||"2px",this.pieOpacity=this.pieOpacity||"0.7",this.requirementBackground=this.requirementBackground||this.primaryColor,this.requirementBorderColor=this.requirementBorderColor||this.primaryBorderColor,this.requirementBorderSize=this.requirementBorderSize||this.primaryBorderColor,this.requirementTextColor=this.requirementTextColor||this.primaryTextColor,this.relationColor=this.relationColor||this.lineColor,this.relationLabelBackground=this.relationLabelBackground||(this.darkMode?ge(this.secondaryColor,30):this.secondaryColor),this.relationLabelColor=this.relationLabelColor||this.actorTextColor,this.git0=this.git0||this.primaryColor,this.git1=this.git1||this.secondaryColor,this.git2=this.git2||this.tertiaryColor,this.git3=this.git3||_t(this.primaryColor,{h:-30}),this.git4=this.git4||_t(this.primaryColor,{h:-60}),this.git5=this.git5||_t(this.primaryColor,{h:-90}),this.git6=this.git6||_t(this.primaryColor,{h:60}),this.git7=this.git7||_t(this.primaryColor,{h:120}),this.darkMode?(this.git0=ae(this.git0,25),this.git1=ae(this.git1,25),this.git2=ae(this.git2,25),this.git3=ae(this.git3,25),this.git4=ae(this.git4,25),this.git5=ae(this.git5,25),this.git6=ae(this.git6,25),this.git7=ae(this.git7,25)):(this.git0=ge(this.git0,25),this.git1=ge(this.git1,25),this.git2=ge(this.git2,25),this.git3=ge(this.git3,25),this.git4=ge(this.git4,25),this.git5=ge(this.git5,25),this.git6=ge(this.git6,25),this.git7=ge(this.git7,25)),this.gitInv0=this.gitInv0||Yt(this.git0),this.gitInv1=this.gitInv1||Yt(this.git1),this.gitInv2=this.gitInv2||Yt(this.git2),this.gitInv3=this.gitInv3||Yt(this.git3),this.gitInv4=this.gitInv4||Yt(this.git4),this.gitInv5=this.gitInv5||Yt(this.git5),this.gitInv6=this.gitInv6||Yt(this.git6),this.gitInv7=this.gitInv7||Yt(this.git7),this.branchLabelColor=this.branchLabelColor||(this.darkMode?"black":this.labelTextColor),this.gitBranchLabel0=this.gitBranchLabel0||this.branchLabelColor,this.gitBranchLabel1=this.gitBranchLabel1||this.branchLabelColor,this.gitBranchLabel2=this.gitBranchLabel2||this.branchLabelColor,this.gitBranchLabel3=this.gitBranchLabel3||this.branchLabelColor,this.gitBranchLabel4=this.gitBranchLabel4||this.branchLabelColor,this.gitBranchLabel5=this.gitBranchLabel5||this.branchLabelColor,this.gitBranchLabel6=this.gitBranchLabel6||this.branchLabelColor,this.gitBranchLabel7=this.gitBranchLabel7||this.branchLabelColor,this.tagLabelColor=this.tagLabelColor||this.primaryTextColor,this.tagLabelBackground=this.tagLabelBackground||this.primaryColor,this.tagLabelBorder=this.tagBorder||this.primaryBorderColor,this.tagLabelFontSize=this.tagLabelFontSize||"10px",this.commitLabelColor=this.commitLabelColor||this.secondaryTextColor,this.commitLabelBackground=this.commitLabelBackground||this.secondaryColor,this.commitLabelFontSize=this.commitLabelFontSize||"10px",this.attributeBackgroundColorOdd=this.attributeBackgroundColorOdd||qf,this.attributeBackgroundColorEven=this.attributeBackgroundColorEven||Vf}calculate(e){if(typeof e!="object"){this.updateColors();return}const r=Object.keys(e);r.forEach(n=>{this[n]=e[n]}),this.updateColors(),r.forEach(n=>{this[n]=e[n]})}}const fG=t=>{const e=new hG;return e.calculate(t),e};class dG{constructor(){this.background="#333",this.primaryColor="#1f2020",this.secondaryColor=ae(this.primaryColor,16),this.tertiaryColor=_t(this.primaryColor,{h:-160}),this.primaryBorderColor=Yt(this.background),this.secondaryBorderColor=ln(this.secondaryColor,this.darkMode),this.tertiaryBorderColor=ln(this.tertiaryColor,this.darkMode),this.primaryTextColor=Yt(this.primaryColor),this.secondaryTextColor=Yt(this.secondaryColor),this.tertiaryTextColor=Yt(this.tertiaryColor),this.lineColor=Yt(this.background),this.textColor=Yt(this.background),this.mainBkg="#1f2020",this.secondBkg="calculated",this.mainContrastColor="lightgrey",this.darkTextColor=ae(Yt("#323D47"),10),this.lineColor="calculated",this.border1="#81B1DB",this.border2=Sc(255,255,255,.25),this.arrowheadColor="calculated",this.fontFamily='"trebuchet ms", verdana, arial, sans-serif',this.fontSize="16px",this.labelBackground="#181818",this.textColor="#ccc",this.THEME_COLOR_LIMIT=12,this.nodeBkg="calculated",this.nodeBorder="calculated",this.clusterBkg="calculated",this.clusterBorder="calculated",this.defaultLinkColor="calculated",this.titleColor="#F9FFFE",this.edgeLabelBackground="calculated",this.actorBorder="calculated",this.actorBkg="calculated",this.actorTextColor="calculated",this.actorLineColor="calculated",this.signalColor="calculated",this.signalTextColor="calculated",this.labelBoxBkgColor="calculated",this.labelBoxBorderColor="calculated",this.labelTextColor="calculated",this.loopTextColor="calculated",this.noteBorderColor="calculated",this.noteBkgColor="#fff5ad",this.noteTextColor="calculated",this.activationBorderColor="calculated",this.activationBkgColor="calculated",this.sequenceNumberColor="black",this.sectionBkgColor=ge("#EAE8D9",30),this.altSectionBkgColor="calculated",this.sectionBkgColor2="#EAE8D9",this.taskBorderColor=Sc(255,255,255,70),this.taskBkgColor="calculated",this.taskTextColor="calculated",this.taskTextLightColor="calculated",this.taskTextOutsideColor="calculated",this.taskTextClickableColor="#003163",this.activeTaskBorderColor=Sc(255,255,255,50),this.activeTaskBkgColor="#81B1DB",this.gridColor="calculated",this.doneTaskBkgColor="calculated",this.doneTaskBorderColor="grey",this.critBorderColor="#E83737",this.critBkgColor="#E83737",this.taskTextDarkColor="calculated",this.todayLineColor="#DB5757",this.personBorder="calculated",this.personBkg="calculated",this.labelColor="calculated",this.errorBkgColor="#a44141",this.errorTextColor="#ddd"}updateColors(){this.secondBkg=ae(this.mainBkg,16),this.lineColor=this.mainContrastColor,this.arrowheadColor=this.mainContrastColor,this.nodeBkg=this.mainBkg,this.nodeBorder=this.border1,this.clusterBkg=this.secondBkg,this.clusterBorder=this.border2,this.defaultLinkColor=this.lineColor,this.edgeLabelBackground=ae(this.labelBackground,25),this.actorBorder=this.border1,this.actorBkg=this.mainBkg,this.actorTextColor=this.mainContrastColor,this.actorLineColor=this.mainContrastColor,this.signalColor=this.mainContrastColor,this.signalTextColor=this.mainContrastColor,this.labelBoxBkgColor=this.actorBkg,this.labelBoxBorderColor=this.actorBorder,this.labelTextColor=this.mainContrastColor,this.loopTextColor=this.mainContrastColor,this.noteBorderColor=this.secondaryBorderColor,this.noteBkgColor=this.secondBkg,this.noteTextColor=this.secondaryTextColor,this.activationBorderColor=this.border1,this.activationBkgColor=this.secondBkg,this.altSectionBkgColor=this.background,this.taskBkgColor=ae(this.mainBkg,23),this.taskTextColor=this.darkTextColor,this.taskTextLightColor=this.mainContrastColor,this.taskTextOutsideColor=this.taskTextLightColor,this.gridColor=this.mainContrastColor,this.doneTaskBkgColor=this.mainContrastColor,this.taskTextDarkColor=this.darkTextColor,this.transitionColor=this.transitionColor||this.lineColor,this.transitionLabelColor=this.transitionLabelColor||this.textColor,this.stateLabelColor=this.stateLabelColor||this.stateBkg||this.primaryTextColor,this.stateBkg=this.stateBkg||this.mainBkg,this.labelBackgroundColor=this.labelBackgroundColor||this.stateBkg,this.compositeBackground=this.compositeBackground||this.background||this.tertiaryColor,this.altBackground=this.altBackground||"#555",this.compositeTitleBackground=this.compositeTitleBackground||this.mainBkg,this.compositeBorder=this.compositeBorder||this.nodeBorder,this.innerEndBackground=this.primaryBorderColor,this.specialStateColor="#f4f4f4",this.errorBkgColor=this.errorBkgColor||this.tertiaryColor,this.errorTextColor=this.errorTextColor||this.tertiaryTextColor,this.fillType0=this.primaryColor,this.fillType1=this.secondaryColor,this.fillType2=_t(this.primaryColor,{h:64}),this.fillType3=_t(this.secondaryColor,{h:64}),this.fillType4=_t(this.primaryColor,{h:-64}),this.fillType5=_t(this.secondaryColor,{h:-64}),this.fillType6=_t(this.primaryColor,{h:128}),this.fillType7=_t(this.secondaryColor,{h:128}),this.cScale1=this.cScale1||"#0b0000",this.cScale2=this.cScale2||"#4d1037",this.cScale3=this.cScale3||"#3f5258",this.cScale4=this.cScale4||"#4f2f1b",this.cScale5=this.cScale5||"#6e0a0a",this.cScale6=this.cScale6||"#3b0048",this.cScale7=this.cScale7||"#995a01",this.cScale8=this.cScale8||"#154706",this.cScale9=this.cScale9||"#161722",this.cScale10=this.cScale10||"#00296f",this.cScale11=this.cScale11||"#01629c",this.cScale12=this.cScale12||"#010029",this.cScale0=this.cScale0||this.primaryColor,this.cScale1=this.cScale1||this.secondaryColor,this.cScale2=this.cScale2||this.tertiaryColor,this.cScale3=this.cScale3||_t(this.primaryColor,{h:30}),this.cScale4=this.cScale4||_t(this.primaryColor,{h:60}),this.cScale5=this.cScale5||_t(this.primaryColor,{h:90}),this.cScale6=this.cScale6||_t(this.primaryColor,{h:120}),this.cScale7=this.cScale7||_t(this.primaryColor,{h:150}),this.cScale8=this.cScale8||_t(this.primaryColor,{h:210}),this.cScale9=this.cScale9||_t(this.primaryColor,{h:270}),this.cScale10=this.cScale10||_t(this.primaryColor,{h:300}),this.cScale11=this.cScale11||_t(this.primaryColor,{h:330});for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleInv"+e]=this["cScaleInv"+e]||Yt(this["cScale"+e]);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScalePeer"+e]=this["cScalePeer"+e]||ae(this["cScale"+e],10);this.scaleLabelColor=this.scaleLabelColor||(this.darkMode?"black":this.labelTextColor);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleLabel"+e]=this["cScaleLabel"+e]||this.scaleLabelColor;for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["pie"+e]=this["cScale"+e];this.pieTitleTextSize=this.pieTitleTextSize||"25px",this.pieTitleTextColor=this.pieTitleTextColor||this.taskTextDarkColor,this.pieSectionTextSize=this.pieSectionTextSize||"17px",this.pieSectionTextColor=this.pieSectionTextColor||this.textColor,this.pieLegendTextSize=this.pieLegendTextSize||"17px",this.pieLegendTextColor=this.pieLegendTextColor||this.taskTextDarkColor,this.pieStrokeColor=this.pieStrokeColor||"black",this.pieStrokeWidth=this.pieStrokeWidth||"2px",this.pieOpacity=this.pieOpacity||"0.7",this.classText=this.primaryTextColor,this.requirementBackground=this.requirementBackground||this.primaryColor,this.requirementBorderColor=this.requirementBorderColor||this.primaryBorderColor,this.requirementBorderSize=this.requirementBorderSize||this.primaryBorderColor,this.requirementTextColor=this.requirementTextColor||this.primaryTextColor,this.relationColor=this.relationColor||this.lineColor,this.relationLabelBackground=this.relationLabelBackground||(this.darkMode?ge(this.secondaryColor,30):this.secondaryColor),this.relationLabelColor=this.relationLabelColor||this.actorTextColor,this.git0=ae(this.secondaryColor,20),this.git1=ae(this.pie2||this.secondaryColor,20),this.git2=ae(this.pie3||this.tertiaryColor,20),this.git3=ae(this.pie4||_t(this.primaryColor,{h:-30}),20),this.git4=ae(this.pie5||_t(this.primaryColor,{h:-60}),20),this.git5=ae(this.pie6||_t(this.primaryColor,{h:-90}),10),this.git6=ae(this.pie7||_t(this.primaryColor,{h:60}),10),this.git7=ae(this.pie8||_t(this.primaryColor,{h:120}),20),this.gitInv0=this.gitInv0||Yt(this.git0),this.gitInv1=this.gitInv1||Yt(this.git1),this.gitInv2=this.gitInv2||Yt(this.git2),this.gitInv3=this.gitInv3||Yt(this.git3),this.gitInv4=this.gitInv4||Yt(this.git4),this.gitInv5=this.gitInv5||Yt(this.git5),this.gitInv6=this.gitInv6||Yt(this.git6),this.gitInv7=this.gitInv7||Yt(this.git7),this.tagLabelColor=this.tagLabelColor||this.primaryTextColor,this.tagLabelBackground=this.tagLabelBackground||this.primaryColor,this.tagLabelBorder=this.tagBorder||this.primaryBorderColor,this.tagLabelFontSize=this.tagLabelFontSize||"10px",this.commitLabelColor=this.commitLabelColor||this.secondaryTextColor,this.commitLabelBackground=this.commitLabelBackground||this.secondaryColor,this.commitLabelFontSize=this.commitLabelFontSize||"10px",this.attributeBackgroundColorOdd=this.attributeBackgroundColorOdd||ae(this.background,12),this.attributeBackgroundColorEven=this.attributeBackgroundColorEven||ae(this.background,2)}calculate(e){if(typeof e!="object"){this.updateColors();return}const r=Object.keys(e);r.forEach(n=>{this[n]=e[n]}),this.updateColors(),r.forEach(n=>{this[n]=e[n]})}}const pG=t=>{const e=new dG;return e.calculate(t),e};class gG{constructor(){this.background="#f4f4f4",this.primaryColor="#ECECFF",this.secondaryColor=_t(this.primaryColor,{h:120}),this.secondaryColor="#ffffde",this.tertiaryColor=_t(this.primaryColor,{h:-160}),this.primaryBorderColor=ln(this.primaryColor,this.darkMode),this.secondaryBorderColor=ln(this.secondaryColor,this.darkMode),this.tertiaryBorderColor=ln(this.tertiaryColor,this.darkMode),this.primaryTextColor=Yt(this.primaryColor),this.secondaryTextColor=Yt(this.secondaryColor),this.tertiaryTextColor=Yt(this.tertiaryColor),this.lineColor=Yt(this.background),this.textColor=Yt(this.background),this.background="white",this.mainBkg="#ECECFF",this.secondBkg="#ffffde",this.lineColor="#333333",this.border1="#9370DB",this.border2="#aaaa33",this.arrowheadColor="#333333",this.fontFamily='"trebuchet ms", verdana, arial, sans-serif',this.fontSize="16px",this.labelBackground="#e8e8e8",this.textColor="#333",this.THEME_COLOR_LIMIT=12,this.nodeBkg="calculated",this.nodeBorder="calculated",this.clusterBkg="calculated",this.clusterBorder="calculated",this.defaultLinkColor="calculated",this.titleColor="calculated",this.edgeLabelBackground="calculated",this.actorBorder="calculated",this.actorBkg="calculated",this.actorTextColor="black",this.actorLineColor="grey",this.signalColor="calculated",this.signalTextColor="calculated",this.labelBoxBkgColor="calculated",this.labelBoxBorderColor="calculated",this.labelTextColor="calculated",this.loopTextColor="calculated",this.noteBorderColor="calculated",this.noteBkgColor="#fff5ad",this.noteTextColor="calculated",this.activationBorderColor="#666",this.activationBkgColor="#f4f4f4",this.sequenceNumberColor="white",this.sectionBkgColor="calculated",this.altSectionBkgColor="calculated",this.sectionBkgColor2="calculated",this.excludeBkgColor="#eeeeee",this.taskBorderColor="calculated",this.taskBkgColor="calculated",this.taskTextLightColor="calculated",this.taskTextColor=this.taskTextLightColor,this.taskTextDarkColor="calculated",this.taskTextOutsideColor=this.taskTextDarkColor,this.taskTextClickableColor="calculated",this.activeTaskBorderColor="calculated",this.activeTaskBkgColor="calculated",this.gridColor="calculated",this.doneTaskBkgColor="calculated",this.doneTaskBorderColor="calculated",this.critBorderColor="calculated",this.critBkgColor="calculated",this.todayLineColor="calculated",this.sectionBkgColor=Sc(102,102,255,.49),this.altSectionBkgColor="white",this.sectionBkgColor2="#fff400",this.taskBorderColor="#534fbc",this.taskBkgColor="#8a90dd",this.taskTextLightColor="white",this.taskTextColor="calculated",this.taskTextDarkColor="black",this.taskTextOutsideColor="calculated",this.taskTextClickableColor="#003163",this.activeTaskBorderColor="#534fbc",this.activeTaskBkgColor="#bfc7ff",this.gridColor="lightgrey",this.doneTaskBkgColor="lightgrey",this.doneTaskBorderColor="grey",this.critBorderColor="#ff8888",this.critBkgColor="red",this.todayLineColor="red",this.personBorder="calculated",this.personBkg="calculated",this.labelColor="black",this.errorBkgColor="#552222",this.errorTextColor="#552222",this.updateColors()}updateColors(){this.cScale0=this.cScale0||this.primaryColor,this.cScale1=this.cScale1||this.secondaryColor,this.cScale2=this.cScale2||this.tertiaryColor,this.cScale3=this.cScale3||_t(this.primaryColor,{h:30}),this.cScale4=this.cScale4||_t(this.primaryColor,{h:60}),this.cScale5=this.cScale5||_t(this.primaryColor,{h:90}),this.cScale6=this.cScale6||_t(this.primaryColor,{h:120}),this.cScale7=this.cScale7||_t(this.primaryColor,{h:150}),this.cScale8=this.cScale8||_t(this.primaryColor,{h:210}),this.cScale9=this.cScale9||_t(this.primaryColor,{h:270}),this.cScale10=this.cScale10||_t(this.primaryColor,{h:300}),this.cScale11=this.cScale11||_t(this.primaryColor,{h:330}),this["cScalePeer"+1]=this["cScalePeer"+1]||ge(this.secondaryColor,45),this["cScalePeer"+2]=this["cScalePeer"+2]||ge(this.tertiaryColor,40);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScale"+e]=ge(this["cScale"+e],10),this["cScalePeer"+e]=this["cScalePeer"+e]||ge(this["cScale"+e],25);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleInv"+e]=this["cScaleInv"+e]||_t(this["cScale"+e],{h:180});if(this.scaleLabelColor=this.scaleLabelColor!=="calculated"&&this.scaleLabelColor?this.scaleLabelColor:this.labelTextColor,this.labelTextColor!=="calculated"){this.cScaleLabel0=this.cScaleLabel0||Yt(this.labelTextColor),this.cScaleLabel3=this.cScaleLabel3||Yt(this.labelTextColor);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleLabel"+e]=this["cScaleLabel"+e]||this.labelTextColor}this.nodeBkg=this.mainBkg,this.nodeBorder=this.border1,this.clusterBkg=this.secondBkg,this.clusterBorder=this.border2,this.defaultLinkColor=this.lineColor,this.titleColor=this.textColor,this.edgeLabelBackground=this.labelBackground,this.actorBorder=ae(this.border1,23),this.actorBkg=this.mainBkg,this.labelBoxBkgColor=this.actorBkg,this.signalColor=this.textColor,this.signalTextColor=this.textColor,this.labelBoxBorderColor=this.actorBorder,this.labelTextColor=this.actorTextColor,this.loopTextColor=this.actorTextColor,this.noteBorderColor=this.border2,this.noteTextColor=this.actorTextColor,this.taskTextColor=this.taskTextLightColor,this.taskTextOutsideColor=this.taskTextDarkColor,this.transitionColor=this.transitionColor||this.lineColor,this.transitionLabelColor=this.transitionLabelColor||this.textColor,this.stateLabelColor=this.stateLabelColor||this.stateBkg||this.primaryTextColor,this.stateBkg=this.stateBkg||this.mainBkg,this.labelBackgroundColor=this.labelBackgroundColor||this.stateBkg,this.compositeBackground=this.compositeBackground||this.background||this.tertiaryColor,this.altBackground=this.altBackground||"#f0f0f0",this.compositeTitleBackground=this.compositeTitleBackground||this.mainBkg,this.compositeBorder=this.compositeBorder||this.nodeBorder,this.innerEndBackground=this.nodeBorder,this.specialStateColor=this.lineColor,this.errorBkgColor=this.errorBkgColor||this.tertiaryColor,this.errorTextColor=this.errorTextColor||this.tertiaryTextColor,this.transitionColor=this.transitionColor||this.lineColor,this.classText=this.primaryTextColor,this.fillType0=this.primaryColor,this.fillType1=this.secondaryColor,this.fillType2=_t(this.primaryColor,{h:64}),this.fillType3=_t(this.secondaryColor,{h:64}),this.fillType4=_t(this.primaryColor,{h:-64}),this.fillType5=_t(this.secondaryColor,{h:-64}),this.fillType6=_t(this.primaryColor,{h:128}),this.fillType7=_t(this.secondaryColor,{h:128}),this.pie1=this.pie1||this.primaryColor,this.pie2=this.pie2||this.secondaryColor,this.pie3=this.pie3||_t(this.tertiaryColor,{l:-40}),this.pie4=this.pie4||_t(this.primaryColor,{l:-10}),this.pie5=this.pie5||_t(this.secondaryColor,{l:-30}),this.pie6=this.pie6||_t(this.tertiaryColor,{l:-20}),this.pie7=this.pie7||_t(this.primaryColor,{h:60,l:-20}),this.pie8=this.pie8||_t(this.primaryColor,{h:-60,l:-40}),this.pie9=this.pie9||_t(this.primaryColor,{h:120,l:-40}),this.pie10=this.pie10||_t(this.primaryColor,{h:60,l:-40}),this.pie11=this.pie11||_t(this.primaryColor,{h:-90,l:-40}),this.pie12=this.pie12||_t(this.primaryColor,{h:120,l:-30}),this.pieTitleTextSize=this.pieTitleTextSize||"25px",this.pieTitleTextColor=this.pieTitleTextColor||this.taskTextDarkColor,this.pieSectionTextSize=this.pieSectionTextSize||"17px",this.pieSectionTextColor=this.pieSectionTextColor||this.textColor,this.pieLegendTextSize=this.pieLegendTextSize||"17px",this.pieLegendTextColor=this.pieLegendTextColor||this.taskTextDarkColor,this.pieStrokeColor=this.pieStrokeColor||"black",this.pieStrokeWidth=this.pieStrokeWidth||"2px",this.pieOpacity=this.pieOpacity||"0.7",this.requirementBackground=this.requirementBackground||this.primaryColor,this.requirementBorderColor=this.requirementBorderColor||this.primaryBorderColor,this.requirementBorderSize=this.requirementBorderSize||this.primaryBorderColor,this.requirementTextColor=this.requirementTextColor||this.primaryTextColor,this.relationColor=this.relationColor||this.lineColor,this.relationLabelBackground=this.relationLabelBackground||this.labelBackground,this.relationLabelColor=this.relationLabelColor||this.actorTextColor,this.git0=this.git0||this.primaryColor,this.git1=this.git1||this.secondaryColor,this.git2=this.git2||this.tertiaryColor,this.git3=this.git3||_t(this.primaryColor,{h:-30}),this.git4=this.git4||_t(this.primaryColor,{h:-60}),this.git5=this.git5||_t(this.primaryColor,{h:-90}),this.git6=this.git6||_t(this.primaryColor,{h:60}),this.git7=this.git7||_t(this.primaryColor,{h:120}),this.darkMode?(this.git0=ae(this.git0,25),this.git1=ae(this.git1,25),this.git2=ae(this.git2,25),this.git3=ae(this.git3,25),this.git4=ae(this.git4,25),this.git5=ae(this.git5,25),this.git6=ae(this.git6,25),this.git7=ae(this.git7,25)):(this.git0=ge(this.git0,25),this.git1=ge(this.git1,25),this.git2=ge(this.git2,25),this.git3=ge(this.git3,25),this.git4=ge(this.git4,25),this.git5=ge(this.git5,25),this.git6=ge(this.git6,25),this.git7=ge(this.git7,25)),this.gitInv0=this.gitInv0||ge(Yt(this.git0),25),this.gitInv1=this.gitInv1||Yt(this.git1),this.gitInv2=this.gitInv2||Yt(this.git2),this.gitInv3=this.gitInv3||Yt(this.git3),this.gitInv4=this.gitInv4||Yt(this.git4),this.gitInv5=this.gitInv5||Yt(this.git5),this.gitInv6=this.gitInv6||Yt(this.git6),this.gitInv7=this.gitInv7||Yt(this.git7),this.gitBranchLabel0=this.gitBranchLabel0||Yt(this.labelTextColor),this.gitBranchLabel1=this.gitBranchLabel1||this.labelTextColor,this.gitBranchLabel2=this.gitBranchLabel2||this.labelTextColor,this.gitBranchLabel3=this.gitBranchLabel3||Yt(this.labelTextColor),this.gitBranchLabel4=this.gitBranchLabel4||this.labelTextColor,this.gitBranchLabel5=this.gitBranchLabel5||this.labelTextColor,this.gitBranchLabel6=this.gitBranchLabel6||this.labelTextColor,this.gitBranchLabel7=this.gitBranchLabel7||this.labelTextColor,this.tagLabelColor=this.tagLabelColor||this.primaryTextColor,this.tagLabelBackground=this.tagLabelBackground||this.primaryColor,this.tagLabelBorder=this.tagBorder||this.primaryBorderColor,this.tagLabelFontSize=this.tagLabelFontSize||"10px",this.commitLabelColor=this.commitLabelColor||this.secondaryTextColor,this.commitLabelBackground=this.commitLabelBackground||this.secondaryColor,this.commitLabelFontSize=this.commitLabelFontSize||"10px",this.attributeBackgroundColorOdd=this.attributeBackgroundColorOdd||qf,this.attributeBackgroundColorEven=this.attributeBackgroundColorEven||Vf}calculate(e){if(typeof e!="object"){this.updateColors();return}const r=Object.keys(e);r.forEach(n=>{this[n]=e[n]}),this.updateColors(),r.forEach(n=>{this[n]=e[n]})}}const yG=t=>{const e=new gG;return e.calculate(t),e};class mG{constructor(){this.background="#f4f4f4",this.primaryColor="#cde498",this.secondaryColor="#cdffb2",this.background="white",this.mainBkg="#cde498",this.secondBkg="#cdffb2",this.lineColor="green",this.border1="#13540c",this.border2="#6eaa49",this.arrowheadColor="green",this.fontFamily='"trebuchet ms", verdana, arial, sans-serif',this.fontSize="16px",this.tertiaryColor=ae("#cde498",10),this.primaryBorderColor=ln(this.primaryColor,this.darkMode),this.secondaryBorderColor=ln(this.secondaryColor,this.darkMode),this.tertiaryBorderColor=ln(this.tertiaryColor,this.darkMode),this.primaryTextColor=Yt(this.primaryColor),this.secondaryTextColor=Yt(this.secondaryColor),this.tertiaryTextColor=Yt(this.primaryColor),this.lineColor=Yt(this.background),this.textColor=Yt(this.background),this.THEME_COLOR_LIMIT=12,this.nodeBkg="calculated",this.nodeBorder="calculated",this.clusterBkg="calculated",this.clusterBorder="calculated",this.defaultLinkColor="calculated",this.titleColor="#333",this.edgeLabelBackground="#e8e8e8",this.actorBorder="calculated",this.actorBkg="calculated",this.actorTextColor="black",this.actorLineColor="grey",this.signalColor="#333",this.signalTextColor="#333",this.labelBoxBkgColor="calculated",this.labelBoxBorderColor="#326932",this.labelTextColor="calculated",this.loopTextColor="calculated",this.noteBorderColor="calculated",this.noteBkgColor="#fff5ad",this.noteTextColor="calculated",this.activationBorderColor="#666",this.activationBkgColor="#f4f4f4",this.sequenceNumberColor="white",this.sectionBkgColor="#6eaa49",this.altSectionBkgColor="white",this.sectionBkgColor2="#6eaa49",this.excludeBkgColor="#eeeeee",this.taskBorderColor="calculated",this.taskBkgColor="#487e3a",this.taskTextLightColor="white",this.taskTextColor="calculated",this.taskTextDarkColor="black",this.taskTextOutsideColor="calculated",this.taskTextClickableColor="#003163",this.activeTaskBorderColor="calculated",this.activeTaskBkgColor="calculated",this.gridColor="lightgrey",this.doneTaskBkgColor="lightgrey",this.doneTaskBorderColor="grey",this.critBorderColor="#ff8888",this.critBkgColor="red",this.todayLineColor="red",this.personBorder="calculated",this.personBkg="calculated",this.labelColor="black",this.errorBkgColor="#552222",this.errorTextColor="#552222"}updateColors(){this.cScale0=this.cScale0||this.primaryColor,this.cScale1=this.cScale1||this.secondaryColor,this.cScale2=this.cScale2||this.tertiaryColor,this.cScale3=this.cScale3||_t(this.primaryColor,{h:30}),this.cScale4=this.cScale4||_t(this.primaryColor,{h:60}),this.cScale5=this.cScale5||_t(this.primaryColor,{h:90}),this.cScale6=this.cScale6||_t(this.primaryColor,{h:120}),this.cScale7=this.cScale7||_t(this.primaryColor,{h:150}),this.cScale8=this.cScale8||_t(this.primaryColor,{h:210}),this.cScale9=this.cScale9||_t(this.primaryColor,{h:270}),this.cScale10=this.cScale10||_t(this.primaryColor,{h:300}),this.cScale11=this.cScale11||_t(this.primaryColor,{h:330}),this["cScalePeer"+1]=this["cScalePeer"+1]||ge(this.secondaryColor,45),this["cScalePeer"+2]=this["cScalePeer"+2]||ge(this.tertiaryColor,40);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScale"+e]=ge(this["cScale"+e],10),this["cScalePeer"+e]=this["cScalePeer"+e]||ge(this["cScale"+e],25);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleInv"+e]=this["cScaleInv"+e]||_t(this["cScale"+e],{h:180});this.scaleLabelColor=this.scaleLabelColor!=="calculated"&&this.scaleLabelColor?this.scaleLabelColor:this.labelTextColor;for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleLabel"+e]=this["cScaleLabel"+e]||this.scaleLabelColor;this.nodeBkg=this.mainBkg,this.nodeBorder=this.border1,this.clusterBkg=this.secondBkg,this.clusterBorder=this.border2,this.defaultLinkColor=this.lineColor,this.actorBorder=ge(this.mainBkg,20),this.actorBkg=this.mainBkg,this.labelBoxBkgColor=this.actorBkg,this.labelTextColor=this.actorTextColor,this.loopTextColor=this.actorTextColor,this.noteBorderColor=this.border2,this.noteTextColor=this.actorTextColor,this.taskBorderColor=this.border1,this.taskTextColor=this.taskTextLightColor,this.taskTextOutsideColor=this.taskTextDarkColor,this.activeTaskBorderColor=this.taskBorderColor,this.activeTaskBkgColor=this.mainBkg,this.transitionColor=this.transitionColor||this.lineColor,this.transitionLabelColor=this.transitionLabelColor||this.textColor,this.stateLabelColor=this.stateLabelColor||this.stateBkg||this.primaryTextColor,this.stateBkg=this.stateBkg||this.mainBkg,this.labelBackgroundColor=this.labelBackgroundColor||this.stateBkg,this.compositeBackground=this.compositeBackground||this.background||this.tertiaryColor,this.altBackground=this.altBackground||"#f0f0f0",this.compositeTitleBackground=this.compositeTitleBackground||this.mainBkg,this.compositeBorder=this.compositeBorder||this.nodeBorder,this.innerEndBackground=this.primaryBorderColor,this.specialStateColor=this.lineColor,this.errorBkgColor=this.errorBkgColor||this.tertiaryColor,this.errorTextColor=this.errorTextColor||this.tertiaryTextColor,this.transitionColor=this.transitionColor||this.lineColor,this.classText=this.primaryTextColor,this.fillType0=this.primaryColor,this.fillType1=this.secondaryColor,this.fillType2=_t(this.primaryColor,{h:64}),this.fillType3=_t(this.secondaryColor,{h:64}),this.fillType4=_t(this.primaryColor,{h:-64}),this.fillType5=_t(this.secondaryColor,{h:-64}),this.fillType6=_t(this.primaryColor,{h:128}),this.fillType7=_t(this.secondaryColor,{h:128}),this.pie1=this.pie1||this.primaryColor,this.pie2=this.pie2||this.secondaryColor,this.pie3=this.pie3||this.tertiaryColor,this.pie4=this.pie4||_t(this.primaryColor,{l:-30}),this.pie5=this.pie5||_t(this.secondaryColor,{l:-30}),this.pie6=this.pie6||_t(this.tertiaryColor,{h:40,l:-40}),this.pie7=this.pie7||_t(this.primaryColor,{h:60,l:-10}),this.pie8=this.pie8||_t(this.primaryColor,{h:-60,l:-10}),this.pie9=this.pie9||_t(this.primaryColor,{h:120,l:0}),this.pie10=this.pie10||_t(this.primaryColor,{h:60,l:-50}),this.pie11=this.pie11||_t(this.primaryColor,{h:-60,l:-50}),this.pie12=this.pie12||_t(this.primaryColor,{h:120,l:-50}),this.pieTitleTextSize=this.pieTitleTextSize||"25px",this.pieTitleTextColor=this.pieTitleTextColor||this.taskTextDarkColor,this.pieSectionTextSize=this.pieSectionTextSize||"17px",this.pieSectionTextColor=this.pieSectionTextColor||this.textColor,this.pieLegendTextSize=this.pieLegendTextSize||"17px",this.pieLegendTextColor=this.pieLegendTextColor||this.taskTextDarkColor,this.pieStrokeColor=this.pieStrokeColor||"black",this.pieStrokeWidth=this.pieStrokeWidth||"2px",this.pieOpacity=this.pieOpacity||"0.7",this.requirementBackground=this.requirementBackground||this.primaryColor,this.requirementBorderColor=this.requirementBorderColor||this.primaryBorderColor,this.requirementBorderSize=this.requirementBorderSize||this.primaryBorderColor,this.requirementTextColor=this.requirementTextColor||this.primaryTextColor,this.relationColor=this.relationColor||this.lineColor,this.relationLabelBackground=this.relationLabelBackground||this.edgeLabelBackground,this.relationLabelColor=this.relationLabelColor||this.actorTextColor,this.git0=this.git0||this.primaryColor,this.git1=this.git1||this.secondaryColor,this.git2=this.git2||this.tertiaryColor,this.git3=this.git3||_t(this.primaryColor,{h:-30}),this.git4=this.git4||_t(this.primaryColor,{h:-60}),this.git5=this.git5||_t(this.primaryColor,{h:-90}),this.git6=this.git6||_t(this.primaryColor,{h:60}),this.git7=this.git7||_t(this.primaryColor,{h:120}),this.darkMode?(this.git0=ae(this.git0,25),this.git1=ae(this.git1,25),this.git2=ae(this.git2,25),this.git3=ae(this.git3,25),this.git4=ae(this.git4,25),this.git5=ae(this.git5,25),this.git6=ae(this.git6,25),this.git7=ae(this.git7,25)):(this.git0=ge(this.git0,25),this.git1=ge(this.git1,25),this.git2=ge(this.git2,25),this.git3=ge(this.git3,25),this.git4=ge(this.git4,25),this.git5=ge(this.git5,25),this.git6=ge(this.git6,25),this.git7=ge(this.git7,25)),this.gitInv0=this.gitInv0||Yt(this.git0),this.gitInv1=this.gitInv1||Yt(this.git1),this.gitInv2=this.gitInv2||Yt(this.git2),this.gitInv3=this.gitInv3||Yt(this.git3),this.gitInv4=this.gitInv4||Yt(this.git4),this.gitInv5=this.gitInv5||Yt(this.git5),this.gitInv6=this.gitInv6||Yt(this.git6),this.gitInv7=this.gitInv7||Yt(this.git7),this.tagLabelColor=this.tagLabelColor||this.primaryTextColor,this.tagLabelBackground=this.tagLabelBackground||this.primaryColor,this.tagLabelBorder=this.tagBorder||this.primaryBorderColor,this.tagLabelFontSize=this.tagLabelFontSize||"10px",this.commitLabelColor=this.commitLabelColor||this.secondaryTextColor,this.commitLabelBackground=this.commitLabelBackground||this.secondaryColor,this.commitLabelFontSize=this.commitLabelFontSize||"10px",this.attributeBackgroundColorOdd=this.attributeBackgroundColorOdd||qf,this.attributeBackgroundColorEven=this.attributeBackgroundColorEven||Vf}calculate(e){if(typeof e!="object"){this.updateColors();return}const r=Object.keys(e);r.forEach(n=>{this[n]=e[n]}),this.updateColors(),r.forEach(n=>{this[n]=e[n]})}}const bG=t=>{const e=new mG;return e.calculate(t),e};class _G{constructor(){this.primaryColor="#eee",this.contrast="#707070",this.secondaryColor=ae(this.contrast,55),this.background="#ffffff",this.tertiaryColor=_t(this.primaryColor,{h:-160}),this.primaryBorderColor=ln(this.primaryColor,this.darkMode),this.secondaryBorderColor=ln(this.secondaryColor,this.darkMode),this.tertiaryBorderColor=ln(this.tertiaryColor,this.darkMode),this.primaryTextColor=Yt(this.primaryColor),this.secondaryTextColor=Yt(this.secondaryColor),this.tertiaryTextColor=Yt(this.tertiaryColor),this.lineColor=Yt(this.background),this.textColor=Yt(this.background),this.mainBkg="#eee",this.secondBkg="calculated",this.lineColor="#666",this.border1="#999",this.border2="calculated",this.note="#ffa",this.text="#333",this.critical="#d42",this.done="#bbb",this.arrowheadColor="#333333",this.fontFamily='"trebuchet ms", verdana, arial, sans-serif',this.fontSize="16px",this.THEME_COLOR_LIMIT=12,this.nodeBkg="calculated",this.nodeBorder="calculated",this.clusterBkg="calculated",this.clusterBorder="calculated",this.defaultLinkColor="calculated",this.titleColor="calculated",this.edgeLabelBackground="white",this.actorBorder="calculated",this.actorBkg="calculated",this.actorTextColor="calculated",this.actorLineColor="calculated",this.signalColor="calculated",this.signalTextColor="calculated",this.labelBoxBkgColor="calculated",this.labelBoxBorderColor="calculated",this.labelTextColor="calculated",this.loopTextColor="calculated",this.noteBorderColor="calculated",this.noteBkgColor="calculated",this.noteTextColor="calculated",this.activationBorderColor="#666",this.activationBkgColor="#f4f4f4",this.sequenceNumberColor="white",this.sectionBkgColor="calculated",this.altSectionBkgColor="white",this.sectionBkgColor2="calculated",this.excludeBkgColor="#eeeeee",this.taskBorderColor="calculated",this.taskBkgColor="calculated",this.taskTextLightColor="white",this.taskTextColor="calculated",this.taskTextDarkColor="calculated",this.taskTextOutsideColor="calculated",this.taskTextClickableColor="#003163",this.activeTaskBorderColor="calculated",this.activeTaskBkgColor="calculated",this.gridColor="calculated",this.doneTaskBkgColor="calculated",this.doneTaskBorderColor="calculated",this.critBkgColor="calculated",this.critBorderColor="calculated",this.todayLineColor="calculated",this.personBorder="calculated",this.personBkg="calculated",this.labelColor="black",this.errorBkgColor="#552222",this.errorTextColor="#552222"}updateColors(){this.secondBkg=ae(this.contrast,55),this.border2=this.contrast,this.cScale0=this.cScale0||"#555",this.cScale1=this.cScale1||"#F4F4F4",this.cScale2=this.cScale2||"#555",this.cScale3=this.cScale3||"#BBB",this.cScale4=this.cScale4||"#777",this.cScale5=this.cScale5||"#999",this.cScale6=this.cScale6||"#DDD",this.cScale7=this.cScale7||"#FFF",this.cScale8=this.cScale8||"#DDD",this.cScale9=this.cScale9||"#BBB",this.cScale10=this.cScale10||"#999",this.cScale11=this.cScale11||"#777";for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleInv"+e]=this["cScaleInv"+e]||Yt(this["cScale"+e]);for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this.darkMode?this["cScalePeer"+e]=this["cScalePeer"+e]||ae(this["cScale"+e],10):this["cScalePeer"+e]=this["cScalePeer"+e]||ge(this["cScale"+e],10);this.scaleLabelColor=this.scaleLabelColor||(this.darkMode?"black":this.labelTextColor),this.cScaleLabel0=this.cScaleLabel0||this.cScale1,this.cScaleLabel2=this.cScaleLabel2||this.cScale1;for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["cScaleLabel"+e]=this["cScaleLabel"+e]||this.scaleLabelColor;this.nodeBkg=this.mainBkg,this.nodeBorder=this.border1,this.clusterBkg=this.secondBkg,this.clusterBorder=this.border2,this.defaultLinkColor=this.lineColor,this.titleColor=this.text,this.actorBorder=ae(this.border1,23),this.actorBkg=this.mainBkg,this.actorTextColor=this.text,this.actorLineColor=this.lineColor,this.signalColor=this.text,this.signalTextColor=this.text,this.labelBoxBkgColor=this.actorBkg,this.labelBoxBorderColor=this.actorBorder,this.labelTextColor=this.text,this.loopTextColor=this.text,this.noteBorderColor="#999",this.noteBkgColor="#666",this.noteTextColor="#fff",this.sectionBkgColor=ae(this.contrast,30),this.sectionBkgColor2=ae(this.contrast,30),this.taskBorderColor=ge(this.contrast,10),this.taskBkgColor=this.contrast,this.taskTextColor=this.taskTextLightColor,this.taskTextDarkColor=this.text,this.taskTextOutsideColor=this.taskTextDarkColor,this.activeTaskBorderColor=this.taskBorderColor,this.activeTaskBkgColor=this.mainBkg,this.gridColor=ae(this.border1,30),this.doneTaskBkgColor=this.done,this.doneTaskBorderColor=this.lineColor,this.critBkgColor=this.critical,this.critBorderColor=ge(this.critBkgColor,10),this.todayLineColor=this.critBkgColor,this.transitionColor=this.transitionColor||"#000",this.transitionLabelColor=this.transitionLabelColor||this.textColor,this.stateLabelColor=this.stateLabelColor||this.stateBkg||this.primaryTextColor,this.stateBkg=this.stateBkg||this.mainBkg,this.labelBackgroundColor=this.labelBackgroundColor||this.stateBkg,this.compositeBackground=this.compositeBackground||this.background||this.tertiaryColor,this.altBackground=this.altBackground||"#f4f4f4",this.compositeTitleBackground=this.compositeTitleBackground||this.mainBkg,this.stateBorder=this.stateBorder||"#000",this.innerEndBackground=this.primaryBorderColor,this.specialStateColor="#222",this.errorBkgColor=this.errorBkgColor||this.tertiaryColor,this.errorTextColor=this.errorTextColor||this.tertiaryTextColor,this.classText=this.primaryTextColor,this.fillType0=this.primaryColor,this.fillType1=this.secondaryColor,this.fillType2=_t(this.primaryColor,{h:64}),this.fillType3=_t(this.secondaryColor,{h:64}),this.fillType4=_t(this.primaryColor,{h:-64}),this.fillType5=_t(this.secondaryColor,{h:-64}),this.fillType6=_t(this.primaryColor,{h:128}),this.fillType7=_t(this.secondaryColor,{h:128});for(let e=0;e<this.THEME_COLOR_LIMIT;e++)this["pie"+e]=this["cScale"+e];this.pie12=this.pie0,this.pieTitleTextSize=this.pieTitleTextSize||"25px",this.pieTitleTextColor=this.pieTitleTextColor||this.taskTextDarkColor,this.pieSectionTextSize=this.pieSectionTextSize||"17px",this.pieSectionTextColor=this.pieSectionTextColor||this.textColor,this.pieLegendTextSize=this.pieLegendTextSize||"17px",this.pieLegendTextColor=this.pieLegendTextColor||this.taskTextDarkColor,this.pieStrokeColor=this.pieStrokeColor||"black",this.pieStrokeWidth=this.pieStrokeWidth||"2px",this.pieOpacity=this.pieOpacity||"0.7",this.requirementBackground=this.requirementBackground||this.primaryColor,this.requirementBorderColor=this.requirementBorderColor||this.primaryBorderColor,this.requirementBorderSize=this.requirementBorderSize||this.primaryBorderColor,this.requirementTextColor=this.requirementTextColor||this.primaryTextColor,this.relationColor=this.relationColor||this.lineColor,this.relationLabelBackground=this.relationLabelBackground||this.edgeLabelBackground,this.relationLabelColor=this.relationLabelColor||this.actorTextColor,this.git0=ge(this.pie1,25)||this.primaryColor,this.git1=this.pie2||this.secondaryColor,this.git2=this.pie3||this.tertiaryColor,this.git3=this.pie4||_t(this.primaryColor,{h:-30}),this.git4=this.pie5||_t(this.primaryColor,{h:-60}),this.git5=this.pie6||_t(this.primaryColor,{h:-90}),this.git6=this.pie7||_t(this.primaryColor,{h:60}),this.git7=this.pie8||_t(this.primaryColor,{h:120}),this.gitInv0=this.gitInv0||Yt(this.git0),this.gitInv1=this.gitInv1||Yt(this.git1),this.gitInv2=this.gitInv2||Yt(this.git2),this.gitInv3=this.gitInv3||Yt(this.git3),this.gitInv4=this.gitInv4||Yt(this.git4),this.gitInv5=this.gitInv5||Yt(this.git5),this.gitInv6=this.gitInv6||Yt(this.git6),this.gitInv7=this.gitInv7||Yt(this.git7),this.branchLabelColor=this.branchLabelColor||this.labelTextColor,this.gitBranchLabel0=this.branchLabelColor,this.gitBranchLabel1="white",this.gitBranchLabel2=this.branchLabelColor,this.gitBranchLabel3="white",this.gitBranchLabel4=this.branchLabelColor,this.gitBranchLabel5=this.branchLabelColor,this.gitBranchLabel6=this.branchLabelColor,this.gitBranchLabel7=this.branchLabelColor,this.tagLabelColor=this.tagLabelColor||this.primaryTextColor,this.tagLabelBackground=this.tagLabelBackground||this.primaryColor,this.tagLabelBorder=this.tagBorder||this.primaryBorderColor,this.tagLabelFontSize=this.tagLabelFontSize||"10px",this.commitLabelColor=this.commitLabelColor||this.secondaryTextColor,this.commitLabelBackground=this.commitLabelBackground||this.secondaryColor,this.commitLabelFontSize=this.commitLabelFontSize||"10px",this.attributeBackgroundColorOdd=this.attributeBackgroundColorOdd||qf,this.attributeBackgroundColorEven=this.attributeBackgroundColorEven||Vf}calculate(e){if(typeof e!="object"){this.updateColors();return}const r=Object.keys(e);r.forEach(n=>{this[n]=e[n]}),this.updateColors(),r.forEach(n=>{this[n]=e[n]})}}const aa={base:{getThemeVariables:fG},dark:{getThemeVariables:pG},default:{getThemeVariables:yG},forest:{getThemeVariables:bG},neutral:{getThemeVariables:t=>{const e=new _G;return e.calculate(t),e}}},Xa={theme:"default",themeVariables:aa.default.getThemeVariables(),themeCSS:void 0,maxTextSize:5e4,darkMode:!1,fontFamily:'"trebuchet ms", verdana, arial, sans-serif;',logLevel:5,securityLevel:"strict",startOnLoad:!0,arrowMarkerAbsolute:!1,secure:["secure","securityLevel","startOnLoad","maxTextSize"],deterministicIds:!1,deterministicIDSeed:void 0,flowchart:{diagramPadding:8,htmlLabels:!0,nodeSpacing:50,rankSpacing:50,curve:"basis",padding:15,useMaxWidth:!0,defaultRenderer:"dagre-wrapper"},sequence:{hideUnusedParticipants:!1,activationWidth:10,diagramMarginX:50,diagramMarginY:10,actorMargin:50,width:150,height:65,boxMargin:10,boxTextMargin:5,noteMargin:10,messageMargin:35,messageAlign:"center",mirrorActors:!0,forceMenus:!1,bottomMarginAdj:1,useMaxWidth:!0,rightAngles:!1,showSequenceNumbers:!1,actorFontSize:14,actorFontFamily:'"Open Sans", sans-serif',actorFontWeight:400,noteFontSize:14,noteFontFamily:'"trebuchet ms", verdana, arial, sans-serif',noteFontWeight:400,noteAlign:"center",messageFontSize:16,messageFontFamily:'"trebuchet ms", verdana, arial, sans-serif',messageFontWeight:400,wrap:!1,wrapPadding:10,labelBoxWidth:50,labelBoxHeight:20,messageFont:function(){return{fontFamily:this.messageFontFamily,fontSize:this.messageFontSize,fontWeight:this.messageFontWeight}},noteFont:function(){return{fontFamily:this.noteFontFamily,fontSize:this.noteFontSize,fontWeight:this.noteFontWeight}},actorFont:function(){return{fontFamily:this.actorFontFamily,fontSize:this.actorFontSize,fontWeight:this.actorFontWeight}}},gantt:{titleTopMargin:25,barHeight:20,barGap:4,topPadding:50,rightPadding:75,leftPadding:75,gridLineStartPadding:35,fontSize:11,sectionFontSize:11,numberSectionStyles:4,axisFormat:"%Y-%m-%d",useMaxWidth:!0,topAxis:!1,useWidth:void 0},journey:{diagramMarginX:50,diagramMarginY:10,leftMargin:150,width:150,height:50,boxMargin:10,boxTextMargin:5,noteMargin:10,messageMargin:35,messageAlign:"center",bottomMarginAdj:1,useMaxWidth:!0,rightAngles:!1,taskFontSize:14,taskFontFamily:'"Open Sans", sans-serif',taskMargin:50,activationWidth:10,textPlacement:"fo",actorColours:["#8FBC8F","#7CFC00","#00FFFF","#20B2AA","#B0E0E6","#FFFFE0"],sectionFills:["#191970","#8B008B","#4B0082","#2F4F4F","#800000","#8B4513","#00008B"],sectionColours:["#fff"]},class:{arrowMarkerAbsolute:!1,dividerMargin:10,padding:5,textHeight:10,useMaxWidth:!0,defaultRenderer:"dagre-wrapper"},state:{dividerMargin:10,sizeUnit:5,padding:8,textHeight:10,titleShift:-15,noteMargin:10,forkWidth:70,forkHeight:7,miniPadding:2,fontSizeFactor:5.02,fontSize:24,labelHeight:16,edgeLengthFactor:"20",compositTitleSize:35,radius:5,useMaxWidth:!0,defaultRenderer:"dagre-wrapper"},er:{diagramPadding:20,layoutDirection:"TB",minEntityWidth:100,minEntityHeight:75,entityPadding:15,stroke:"gray",fill:"honeydew",fontSize:12,useMaxWidth:!0},pie:{useWidth:void 0,useMaxWidth:!0},requirement:{useWidth:void 0,useMaxWidth:!0,rect_fill:"#f9f9f9",text_color:"#333",rect_border_size:"0.5px",rect_border_color:"#bbb",rect_min_width:200,rect_min_height:200,fontSize:14,rect_padding:10,line_height:20},gitGraph:{diagramPadding:8,nodeLabel:{width:75,height:100,x:-25,y:0},mainBranchName:"main",mainBranchOrder:0,showCommitLabel:!0,showBranches:!0,rotateCommitLabel:!0},c4:{useWidth:void 0,diagramMarginX:50,diagramMarginY:10,c4ShapeMargin:50,c4ShapePadding:20,width:216,height:60,boxMargin:10,useMaxWidth:!0,c4ShapeInRow:4,nextLinePaddingX:0,c4BoundaryInRow:2,personFontSize:14,personFontFamily:'"Open Sans", sans-serif',personFontWeight:"normal",external_personFontSize:14,external_personFontFamily:'"Open Sans", sans-serif',external_personFontWeight:"normal",systemFontSize:14,systemFontFamily:'"Open Sans", sans-serif',systemFontWeight:"normal",external_systemFontSize:14,external_systemFontFamily:'"Open Sans", sans-serif',external_systemFontWeight:"normal",system_dbFontSize:14,system_dbFontFamily:'"Open Sans", sans-serif',system_dbFontWeight:"normal",external_system_dbFontSize:14,external_system_dbFontFamily:'"Open Sans", sans-serif',external_system_dbFontWeight:"normal",system_queueFontSize:14,system_queueFontFamily:'"Open Sans", sans-serif',system_queueFontWeight:"normal",external_system_queueFontSize:14,external_system_queueFontFamily:'"Open Sans", sans-serif',external_system_queueFontWeight:"normal",boundaryFontSize:14,boundaryFontFamily:'"Open Sans", sans-serif',boundaryFontWeight:"normal",messageFontSize:12,messageFontFamily:'"Open Sans", sans-serif',messageFontWeight:"normal",containerFontSize:14,containerFontFamily:'"Open Sans", sans-serif',containerFontWeight:"normal",external_containerFontSize:14,external_containerFontFamily:'"Open Sans", sans-serif',external_containerFontWeight:"normal",container_dbFontSize:14,container_dbFontFamily:'"Open Sans", sans-serif',container_dbFontWeight:"normal",external_container_dbFontSize:14,external_container_dbFontFamily:'"Open Sans", sans-serif',external_container_dbFontWeight:"normal",container_queueFontSize:14,container_queueFontFamily:'"Open Sans", sans-serif',container_queueFontWeight:"normal",external_container_queueFontSize:14,external_container_queueFontFamily:'"Open Sans", sans-serif',external_container_queueFontWeight:"normal",componentFontSize:14,componentFontFamily:'"Open Sans", sans-serif',componentFontWeight:"normal",external_componentFontSize:14,external_componentFontFamily:'"Open Sans", sans-serif',external_componentFontWeight:"normal",component_dbFontSize:14,component_dbFontFamily:'"Open Sans", sans-serif',component_dbFontWeight:"normal",external_component_dbFontSize:14,external_component_dbFontFamily:'"Open Sans", sans-serif',external_component_dbFontWeight:"normal",component_queueFontSize:14,component_queueFontFamily:'"Open Sans", sans-serif',component_queueFontWeight:"normal",external_component_queueFontSize:14,external_component_queueFontFamily:'"Open Sans", sans-serif',external_component_queueFontWeight:"normal",wrap:!0,wrapPadding:10,personFont:function(){return{fontFamily:this.personFontFamily,fontSize:this.personFontSize,fontWeight:this.personFontWeight}},external_personFont:function(){return{fontFamily:this.external_personFontFamily,fontSize:this.external_personFontSize,fontWeight:this.external_personFontWeight}},systemFont:function(){return{fontFamily:this.systemFontFamily,fontSize:this.systemFontSize,fontWeight:this.systemFontWeight}},external_systemFont:function(){return{fontFamily:this.external_systemFontFamily,fontSize:this.external_systemFontSize,fontWeight:this.external_systemFontWeight}},system_dbFont:function(){return{fontFamily:this.system_dbFontFamily,fontSize:this.system_dbFontSize,fontWeight:this.system_dbFontWeight}},external_system_dbFont:function(){return{fontFamily:this.external_system_dbFontFamily,fontSize:this.external_system_dbFontSize,fontWeight:this.external_system_dbFontWeight}},system_queueFont:function(){return{fontFamily:this.system_queueFontFamily,fontSize:this.system_queueFontSize,fontWeight:this.system_queueFontWeight}},external_system_queueFont:function(){return{fontFamily:this.external_system_queueFontFamily,fontSize:this.external_system_queueFontSize,fontWeight:this.external_system_queueFontWeight}},containerFont:function(){return{fontFamily:this.containerFontFamily,fontSize:this.containerFontSize,fontWeight:this.containerFontWeight}},external_containerFont:function(){return{fontFamily:this.external_containerFontFamily,fontSize:this.external_containerFontSize,fontWeight:this.external_containerFontWeight}},container_dbFont:function(){return{fontFamily:this.container_dbFontFamily,fontSize:this.container_dbFontSize,fontWeight:this.container_dbFontWeight}},external_container_dbFont:function(){return{fontFamily:this.external_container_dbFontFamily,fontSize:this.external_container_dbFontSize,fontWeight:this.external_container_dbFontWeight}},container_queueFont:function(){return{fontFamily:this.container_queueFontFamily,fontSize:this.container_queueFontSize,fontWeight:this.container_queueFontWeight}},external_container_queueFont:function(){return{fontFamily:this.external_container_queueFontFamily,fontSize:this.external_container_queueFontSize,fontWeight:this.external_container_queueFontWeight}},componentFont:function(){return{fontFamily:this.componentFontFamily,fontSize:this.componentFontSize,fontWeight:this.componentFontWeight}},external_componentFont:function(){return{fontFamily:this.external_componentFontFamily,fontSize:this.external_componentFontSize,fontWeight:this.external_componentFontWeight}},component_dbFont:function(){return{fontFamily:this.component_dbFontFamily,fontSize:this.component_dbFontSize,fontWeight:this.component_dbFontWeight}},external_component_dbFont:function(){return{fontFamily:this.external_component_dbFontFamily,fontSize:this.external_component_dbFontSize,fontWeight:this.external_component_dbFontWeight}},component_queueFont:function(){return{fontFamily:this.component_queueFontFamily,fontSize:this.component_queueFontSize,fontWeight:this.component_queueFontWeight}},external_component_queueFont:function(){return{fontFamily:this.external_component_queueFontFamily,fontSize:this.external_component_queueFontSize,fontWeight:this.external_component_queueFontWeight}},boundaryFont:function(){return{fontFamily:this.boundaryFontFamily,fontSize:this.boundaryFontSize,fontWeight:this.boundaryFontWeight}},messageFont:function(){return{fontFamily:this.messageFontFamily,fontSize:this.messageFontSize,fontWeight:this.messageFontWeight}},person_bg_color:"#08427B",person_border_color:"#073B6F",external_person_bg_color:"#686868",external_person_border_color:"#8A8A8A",system_bg_color:"#1168BD",system_border_color:"#3C7FC0",system_db_bg_color:"#1168BD",system_db_border_color:"#3C7FC0",system_queue_bg_color:"#1168BD",system_queue_border_color:"#3C7FC0",external_system_bg_color:"#999999",external_system_border_color:"#8A8A8A",external_system_db_bg_color:"#999999",external_system_db_border_color:"#8A8A8A",external_system_queue_bg_color:"#999999",external_system_queue_border_color:"#8A8A8A",container_bg_color:"#438DD5",container_border_color:"#3C7FC0",container_db_bg_color:"#438DD5",container_db_border_color:"#3C7FC0",container_queue_bg_color:"#438DD5",container_queue_border_color:"#3C7FC0",external_container_bg_color:"#B3B3B3",external_container_border_color:"#A6A6A6",external_container_db_bg_color:"#B3B3B3",external_container_db_border_color:"#A6A6A6",external_container_queue_bg_color:"#B3B3B3",external_container_queue_border_color:"#A6A6A6",component_bg_color:"#85BBF0",component_border_color:"#78A8D8",component_db_bg_color:"#85BBF0",component_db_border_color:"#78A8D8",component_queue_bg_color:"#85BBF0",component_queue_border_color:"#78A8D8",external_component_bg_color:"#CCCCCC",external_component_border_color:"#BFBFBF",external_component_db_bg_color:"#CCCCCC",external_component_db_border_color:"#BFBFBF",external_component_queue_bg_color:"#CCCCCC",external_component_queue_border_color:"#BFBFBF"},mindmap:{useMaxWidth:!0,padding:10,maxNodeWidth:200},fontSize:16};Xa.class&&(Xa.class.arrowMarkerAbsolute=Xa.arrowMarkerAbsolute),Xa.gitGraph&&(Xa.gitGraph.arrowMarkerAbsolute=Xa.arrowMarkerAbsolute);const jk=(t,e="")=>Object.keys(t).reduce((r,n)=>Array.isArray(t[n])?r:typeof t[n]=="object"&&t[n]!==null?[...r,e+n,...jk(t[n],"")]:[...r,e+n],[]),vG=jk(Xa,""),xG=/[%]{2}[{]\s*(?:(?:(\w+)\s*:|(\w+))\s*(?:(?:(\w+))|((?:(?![}][%]{2}).|\r?\n)*))?\s*)(?:[}][%]{2})?/gi,kG=/\s*%%.*\n/gm,zf={},Xp=function(t,e){t=t.replace(xG,"").replace(kG,` -`);for(const[r,{detector:n}]of Object.entries(zf))if(n(t,e))return r;throw new Error(`No diagram type detected for text: ${t}`)},$k=(t,e,r)=>{if(zf[t])throw new Error(`Detector with key ${t} already exists`);zf[t]={detector:e,loader:r},H.debug(`Detector with key ${t} added${r?" with loader":""}`)},wG=t=>zf[t].loader,fr=function(t,e,r){const{depth:n,clobber:i}=Object.assign({depth:2,clobber:!1},r);return Array.isArray(e)&&!Array.isArray(t)?(e.forEach(a=>fr(t,a,r)),t):Array.isArray(e)&&Array.isArray(t)?(e.forEach(a=>{t.indexOf(a)===-1&&t.push(a)}),t):typeof t>"u"||n<=0?t!=null&&typeof t=="object"&&typeof e=="object"?Object.assign(t,e):e:(typeof e<"u"&&typeof t=="object"&&typeof e=="object"&&Object.keys(e).forEach(a=>{typeof e[a]=="object"&&(t[a]===void 0||typeof t[a]=="object")?(t[a]===void 0&&(t[a]=Array.isArray(e[a])?[]:{}),t[a]=fr(t[a],e[a],{depth:n-1,clobber:i})):(i||typeof t[a]!="object"&&typeof e[a]!="object")&&(t[a]=e[a])}),t)};var TG=typeof jr=="object"&&jr&&jr.Object===Object&&jr,Xk=TG,EG=Xk,CG=typeof self=="object"&&self&&self.Object===Object&&self,SG=EG||CG||Function("return this")(),si=SG,AG=si,MG=AG.Symbol,zo=MG,Kk=zo,Zk=Object.prototype,LG=Zk.hasOwnProperty,RG=Zk.toString,Ac=Kk?Kk.toStringTag:void 0;function IG(t){var e=LG.call(t,Ac),r=t[Ac];try{t[Ac]=void 0;var n=!0}catch{}var i=RG.call(t);return n&&(e?t[Ac]=r:delete t[Ac]),i}var NG=IG,BG=Object.prototype,DG=BG.toString;function OG(t){return DG.call(t)}var FG=OG,Qk=zo,PG=NG,qG=FG,VG="[object Null]",zG="[object Undefined]",Jk=Qk?Qk.toStringTag:void 0;function YG(t){return t==null?t===void 0?zG:VG:Jk&&Jk in Object(t)?PG(t):qG(t)}var Ps=YG;function UG(t){var e=typeof t;return t!=null&&(e=="object"||e=="function")}var Vn=UG,WG=Ps,HG=Vn,GG="[object AsyncFunction]",jG="[object Function]",$G="[object GeneratorFunction]",XG="[object Proxy]";function KG(t){if(!HG(t))return!1;var e=WG(t);return e==jG||e==$G||e==GG||e==XG}var Yo=KG,ZG=si,QG=ZG["__core-js_shared__"],JG=QG,Kp=JG,tw=function(){var t=/[^.]+$/.exec(Kp&&Kp.keys&&Kp.keys.IE_PROTO||"");return t?"Symbol(src)_1."+t:""}();function tj(t){return!!tw&&tw in t}var ej=tj,rj=Function.prototype,nj=rj.toString;function ij(t){if(t!=null){try{return nj.call(t)}catch{}try{return t+""}catch{}}return""}var ew=ij,aj=Yo,sj=ej,oj=Vn,lj=ew,cj=/[\\^$.*+?()[\]{}|]/g,uj=/^\[object .+?Constructor\]$/,hj=Function.prototype,fj=Object.prototype,dj=hj.toString,pj=fj.hasOwnProperty,gj=RegExp("^"+dj.call(pj).replace(cj,"\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g,"$1.*?")+"$");function yj(t){if(!oj(t)||sj(t))return!1;var e=aj(t)?gj:uj;return e.test(lj(t))}var mj=yj;function bj(t,e){return t==null?void 0:t[e]}var _j=bj,vj=mj,xj=_j;function kj(t,e){var r=xj(t,e);return vj(r)?r:void 0}var qs=kj,wj=qs,Tj=wj(Object,"create"),Yf=Tj,rw=Yf;function Ej(){this.__data__=rw?rw(null):{},this.size=0}var Cj=Ej;function Sj(t){var e=this.has(t)&&delete this.__data__[t];return this.size-=e?1:0,e}var Aj=Sj,Mj=Yf,Lj="__lodash_hash_undefined__",Rj=Object.prototype,Ij=Rj.hasOwnProperty;function Nj(t){var e=this.__data__;if(Mj){var r=e[t];return r===Lj?void 0:r}return Ij.call(e,t)?e[t]:void 0}var Bj=Nj,Dj=Yf,Oj=Object.prototype,Fj=Oj.hasOwnProperty;function Pj(t){var e=this.__data__;return Dj?e[t]!==void 0:Fj.call(e,t)}var qj=Pj,Vj=Yf,zj="__lodash_hash_undefined__";function Yj(t,e){var r=this.__data__;return this.size+=this.has(t)?0:1,r[t]=Vj&&e===void 0?zj:e,this}var Uj=Yj,Wj=Cj,Hj=Aj,Gj=Bj,jj=qj,$j=Uj;function Uo(t){var e=-1,r=t==null?0:t.length;for(this.clear();++e<r;){var n=t[e];this.set(n[0],n[1])}}Uo.prototype.clear=Wj,Uo.prototype.delete=Hj,Uo.prototype.get=Gj,Uo.prototype.has=jj,Uo.prototype.set=$j;var Xj=Uo;function Kj(){this.__data__=[],this.size=0}var Zj=Kj;function Qj(t,e){return t===e||t!==t&&e!==e}var Wo=Qj,Jj=Wo;function t$(t,e){for(var r=t.length;r--;)if(Jj(t[r][0],e))return r;return-1}var Uf=t$,e$=Uf,r$=Array.prototype,n$=r$.splice;function i$(t){var e=this.__data__,r=e$(e,t);if(r<0)return!1;var n=e.length-1;return r==n?e.pop():n$.call(e,r,1),--this.size,!0}var a$=i$,s$=Uf;function o$(t){var e=this.__data__,r=s$(e,t);return r<0?void 0:e[r][1]}var l$=o$,c$=Uf;function u$(t){return c$(this.__data__,t)>-1}var h$=u$,f$=Uf;function d$(t,e){var r=this.__data__,n=f$(r,t);return n<0?(++this.size,r.push([t,e])):r[n][1]=e,this}var p$=d$,g$=Zj,y$=a$,m$=l$,b$=h$,_$=p$;function Ho(t){var e=-1,r=t==null?0:t.length;for(this.clear();++e<r;){var n=t[e];this.set(n[0],n[1])}}Ho.prototype.clear=g$,Ho.prototype.delete=y$,Ho.prototype.get=m$,Ho.prototype.has=b$,Ho.prototype.set=_$;var Wf=Ho,v$=qs,x$=si,k$=v$(x$,"Map"),Zp=k$,nw=Xj,w$=Wf,T$=Zp;function E$(){this.size=0,this.__data__={hash:new nw,map:new(T$||w$),string:new nw}}var C$=E$;function S$(t){var e=typeof t;return e=="string"||e=="number"||e=="symbol"||e=="boolean"?t!=="__proto__":t===null}var A$=S$,M$=A$;function L$(t,e){var r=t.__data__;return M$(e)?r[typeof e=="string"?"string":"hash"]:r.map}var Hf=L$,R$=Hf;function I$(t){var e=R$(this,t).delete(t);return this.size-=e?1:0,e}var N$=I$,B$=Hf;function D$(t){return B$(this,t).get(t)}var O$=D$,F$=Hf;function P$(t){return F$(this,t).has(t)}var q$=P$,V$=Hf;function z$(t,e){var r=V$(this,t),n=r.size;return r.set(t,e),this.size+=r.size==n?0:1,this}var Y$=z$,U$=C$,W$=N$,H$=O$,G$=q$,j$=Y$;function Go(t){var e=-1,r=t==null?0:t.length;for(this.clear();++e<r;){var n=t[e];this.set(n[0],n[1])}}Go.prototype.clear=U$,Go.prototype.delete=W$,Go.prototype.get=H$,Go.prototype.has=G$,Go.prototype.set=j$;var Qp=Go,iw=Qp,$$="Expected a function";function Jp(t,e){if(typeof t!="function"||e!=null&&typeof e!="function")throw new TypeError($$);var r=function(){var n=arguments,i=e?e.apply(this,n):n[0],a=r.cache;if(a.has(i))return a.get(i);var s=t.apply(this,n);return r.cache=a.set(i,s)||a,s};return r.cache=new(Jp.Cache||iw),r}Jp.Cache=iw;var Gf=Jp;const X$={curveBasis:Os,curveBasisClosed:ik,curveBasisOpen:sk,curveLinear:yn,curveLinearClosed:fk,curveMonotoneX:bk,curveMonotoneY:_k,curveNatural:kk,curveStep:wk,curveStepAfter:Ek,curveStepBefore:Tk},tg=/[%]{2}[{]\s*(?:(?:(\w+)\s*:|(\w+))\s*(?:(?:(\w+))|((?:(?![}][%]{2}).|\r?\n)*))?\s*)(?:[}][%]{2})?/gi,K$=/\s*(?:(?:(\w+)(?=:):|(\w+))\s*(?:(?:(\w+))|((?:(?![}][%]{2}).|\r?\n)*))?\s*)(?:[}][%]{2})?/gi,Z$=function(t,e){const r=aw(t,/(?:init\b)|(?:initialize\b)/);let n={};if(Array.isArray(r)){const i=r.map(a=>a.args);Vs(i),n=fr(n,[...i])}else n=r.args;if(n){let i=Xp(t,e);["config"].forEach(a=>{typeof n[a]<"u"&&(i==="flowchart-v2"&&(i="flowchart"),n[i]=n[a],delete n[a])})}return n},aw=function(t,e=null){try{const r=new RegExp(`[%]{2}(?![{]${K$.source})(?=[}][%]{2}).* -`,"ig");t=t.trim().replace(r,"").replace(/'/gm,'"'),H.debug(`Detecting diagram directive${e!==null?" type:"+e:""} based on the text:${t}`);let n;const i=[];for(;(n=tg.exec(t))!==null;)if(n.index===tg.lastIndex&&tg.lastIndex++,n&&!e||e&&n[1]&&n[1].match(e)||e&&n[2]&&n[2].match(e)){const a=n[1]?n[1]:n[2],s=n[3]?n[3].trim():n[4]?JSON.parse(n[4].trim()):null;i.push({type:a,args:s})}return i.length===0&&i.push({type:t,args:null}),i.length===1?i[0]:i}catch(r){return H.error(`ERROR: ${r.message} - Unable to parse directive - ${e!==null?" type:"+e:""} based on the text:${t}`),{type:null,args:null}}},Q$=function(t,e){for(let r=0;r<e.length;r++)if(e[r].match(t))return r;return-1},Ni=(t,e)=>{if(!t)return e;const r=`curve${t.charAt(0).toUpperCase()+t.slice(1)}`;return X$[r]||e},J$=(t,e)=>{const r=t.trim();if(r)return e.securityLevel!=="loose"?ki(r):r},tX=(t,...e)=>{const r=t.split("."),n=r.length-1,i=r[n];let a=window;for(let s=0;s<n;s++)if(a=a[r[s]],!a)return;a[i](...e)},Mc=(t,e)=>t&&e?Math.sqrt(Math.pow(e.x-t.x,2)+Math.pow(e.y-t.y,2)):0,eX=t=>{let e,r=0;t.forEach(a=>{r+=Mc(a,e),e=a});let n=r/2,i;return e=void 0,t.forEach(a=>{if(e&&!i){const s=Mc(a,e);if(s<n)n-=s;else{const o=n/s;o<=0&&(i=e),o>=1&&(i={x:a.x,y:a.y}),o>0&&o<1&&(i={x:(1-o)*e.x+o*a.x,y:(1-o)*e.y+o*a.y})}}e=a}),i},rX=t=>t.length===1?t[0]:eX(t),nX=(t,e,r)=>{let n;H.info("our points",e),e[0]!==r&&(e=e.reverse()),e.forEach(h=>{totalDistance+=Mc(h,n),n=h});let a=25,s;n=void 0,e.forEach(h=>{if(n&&!s){const d=Mc(h,n);if(d<a)a-=d;else{const f=a/d;f<=0&&(s=n),f>=1&&(s={x:h.x,y:h.y}),f>0&&f<1&&(s={x:(1-f)*n.x+f*h.x,y:(1-f)*n.y+f*h.y})}}n=h});const o=t?10:5,l=Math.atan2(e[0].y-s.y,e[0].x-s.x),u={x:0,y:0};return u.x=Math.sin(l)*o+(e[0].x+s.x)/2,u.y=-Math.cos(l)*o+(e[0].y+s.y)/2,u},iX=(t,e,r)=>{let n=JSON.parse(JSON.stringify(r)),i;H.info("our points",n),e!=="start_left"&&e!=="start_right"&&(n=n.reverse()),n.forEach(d=>{i=d});let s=25+t,o;i=void 0,n.forEach(d=>{if(i&&!o){const f=Mc(d,i);if(f<s)s-=f;else{const p=s/f;p<=0&&(o=i),p>=1&&(o={x:d.x,y:d.y}),p>0&&p<1&&(o={x:(1-p)*i.x+p*d.x,y:(1-p)*i.y+p*d.y})}}i=d});const l=10+t*.5,u=Math.atan2(n[0].y-o.y,n[0].x-o.x),h={x:0,y:0};return h.x=Math.sin(u)*l+(n[0].x+o.x)/2,h.y=-Math.cos(u)*l+(n[0].y+o.y)/2,e==="start_left"&&(h.x=Math.sin(u+Math.PI)*l+(n[0].x+o.x)/2,h.y=-Math.cos(u+Math.PI)*l+(n[0].y+o.y)/2),e==="end_right"&&(h.x=Math.sin(u-Math.PI)*l+(n[0].x+o.x)/2-5,h.y=-Math.cos(u-Math.PI)*l+(n[0].y+o.y)/2-5),e==="end_left"&&(h.x=Math.sin(u)*l+(n[0].x+o.x)/2-5,h.y=-Math.cos(u)*l+(n[0].y+o.y)/2-5),h},Ka=t=>{let e="",r="";for(let n=0;n<t.length;n++)typeof t[n]<"u"&&(t[n].startsWith("color:")||t[n].startsWith("text-align:")?r=r+t[n]+";":e=e+t[n]+";");return{style:e,labelStyle:r}};let sw=0;const ow=()=>(sw++,"id-"+Math.random().toString(36).substr(2,12)+"-"+sw);function aX(t){let e="";const r="0123456789abcdef",n=r.length;for(let i=0;i<t;i++)e+=r.charAt(Math.floor(Math.random()*n));return e}const lw=t=>aX(t.length),sX=function(){return{x:0,y:0,fill:void 0,anchor:"start",style:"#666",width:100,height:100,textMargin:0,rx:0,ry:0,valign:void 0}},oX=function(t,e){const r=e.text.replace(pe.lineBreakRegex," "),n=t.append("text");n.attr("x",e.x),n.attr("y",e.y),n.style("text-anchor",e.anchor),n.style("font-family",e.fontFamily),n.style("font-size",e.fontSize),n.style("font-weight",e.fontWeight),n.attr("fill",e.fill),typeof e.class<"u"&&n.attr("class",e.class);const i=n.append("tspan");return i.attr("x",e.x+e.textMargin*2),i.attr("fill",e.fill),i.text(r),n},cw=Gf((t,e,r)=>{if(!t||(r=Object.assign({fontSize:12,fontWeight:400,fontFamily:"Arial",joinWith:"<br/>"},r),pe.lineBreakRegex.test(t)))return t;const n=t.split(" "),i=[];let a="";return n.forEach((s,o)=>{const l=Bi(`${s} `,r),u=Bi(a,r);if(l>e){const{hyphenatedStrings:f,remainingWord:p}=lX(s,e,"-",r);i.push(a,...f),a=p}else u+l>=e?(i.push(a),a=s):a=[a,s].filter(Boolean).join(" ");o+1===n.length&&i.push(a)}),i.filter(s=>s!=="").join(r.joinWith)},(t,e,r)=>`${t}${e}${r.fontSize}${r.fontWeight}${r.fontFamily}${r.joinWith}`),lX=Gf((t,e,r="-",n)=>{n=Object.assign({fontSize:12,fontWeight:400,fontFamily:"Arial",margin:0},n);const i=t.split(""),a=[];let s="";return i.forEach((o,l)=>{const u=`${s}${o}`;if(Bi(u,n)>=e){const d=l+1,f=i.length===d,p=`${u}${r}`;a.push(f?u:p),s=""}else s=u}),{hyphenatedStrings:a,remainingWord:s}},(t,e,r="-",n)=>`${t}${e}${r}${n.fontSize}${n.fontWeight}${n.fontFamily}`),eg=function(t,e){return e=Object.assign({fontSize:12,fontWeight:400,fontFamily:"Arial",margin:15},e),rg(t,e).height},Bi=function(t,e){return e=Object.assign({fontSize:12,fontWeight:400,fontFamily:"Arial"},e),rg(t,e).width},rg=Gf(function(t,e){e=Object.assign({fontSize:12,fontWeight:400,fontFamily:"Arial"},e);const{fontSize:r,fontFamily:n,fontWeight:i}=e;if(!t)return{width:0,height:0};const a=["sans-serif",n],s=t.split(pe.lineBreakRegex),o=[],l=St("body");if(!l.remove)return{width:0,height:0,lineHeight:0};const u=l.append("svg");for(const d of a){let f=0;const p={width:0,height:0,lineHeight:0};for(const m of s){const _=sX();_.text=m;const y=oX(u,_).style("font-size",r).style("font-weight",i).style("font-family",d),b=(y._groups||y)[0][0].getBBox();p.width=Math.round(Math.max(p.width,b.width)),f=Math.round(b.height),p.height+=f,p.lineHeight=Math.round(Math.max(p.lineHeight,f))}o.push(p)}u.remove();const h=isNaN(o[1].height)||isNaN(o[1].width)||isNaN(o[1].lineHeight)||o[0].height>o[1].height&&o[0].width>o[1].width&&o[0].lineHeight>o[1].lineHeight?0:1;return o[h]},(t,e)=>`${t}${e.fontSize}${e.fontWeight}${e.fontFamily}`),cX=class{constructor(e,r){this.deterministic=e,this.seed=r,this.count=r?r.length:0}next(){return this.deterministic?this.count++:Date.now()}};let jf;const uX=function(t){return jf=jf||document.createElement("div"),t=escape(t).replace(/%26/g,"&").replace(/%23/g,"#").replace(/%3B/g,";"),jf.innerHTML=t,unescape(jf.textContent)},Vs=t=>{if(H.debug("directiveSanitizer called with",t),typeof t=="object"&&(t.length?t.forEach(e=>Vs(e)):Object.keys(t).forEach(e=>{H.debug("Checking key",e),e.indexOf("__")===0&&(H.debug("sanitize deleting __ option",e),delete t[e]),e.indexOf("proto")>=0&&(H.debug("sanitize deleting proto option",e),delete t[e]),e.indexOf("constr")>=0&&(H.debug("sanitize deleting constr option",e),delete t[e]),e.indexOf("themeCSS")>=0&&(H.debug("sanitizing themeCss option"),t[e]=$f(t[e])),e.indexOf("fontFamily")>=0&&(H.debug("sanitizing fontFamily option"),t[e]=$f(t[e])),e.indexOf("altFontFamily")>=0&&(H.debug("sanitizing altFontFamily option"),t[e]=$f(t[e])),vG.indexOf(e)<0?(H.debug("sanitize deleting option",e),delete t[e]):typeof t[e]=="object"&&(H.debug("sanitize deleting object",e),Vs(t[e]))})),t.themeVariables){const e=Object.keys(t.themeVariables);for(let r=0;r<e.length;r++){const n=e[r],i=t.themeVariables[n];i&&i.match&&!i.match(/^[a-zA-Z0-9#,";()%. ]+$/)&&(t.themeVariables[n]="")}}H.debug("After sanitization",t)},$f=t=>{let e=0,r=0;for(let n=0;n<t.length;n++){if(e<r)return"{ /* ERROR: Unbalanced CSS */ }";t[n]==="{"?e++:t[n]==="}"&&r++}return e!==r?"{ /* ERROR: Unbalanced CSS */ }":t};function ng(t){return"str"in t}function hX(t){return t instanceof Error?t.message:String(t)}const Se={assignWithDepth:fr,wrapLabel:cw,calculateTextHeight:eg,calculateTextWidth:Bi,calculateTextDimensions:rg,detectInit:Z$,detectDirective:aw,isSubstringInArray:Q$,interpolateToCurve:Ni,calcLabelPosition:rX,calcCardinalityPosition:nX,calcTerminalLabelPosition:iX,formatUrl:J$,getStylesFromArray:Ka,generateId:ow,random:lw,runFunc:tX,entityDecode:uX,initIdGenerator:cX,directiveSanitizer:Vs,sanitizeCss:$f};var uw="comm",hw="rule",fw="decl",fX="@import",dX="@keyframes",pX=Math.abs,ig=String.fromCharCode;function dw(t){return t.trim()}function ag(t,e,r){return t.replace(e,r)}function gX(t,e){return t.indexOf(e)}function Xf(t,e){return t.charCodeAt(e)|0}function Lc(t,e,r){return t.slice(e,r)}function Za(t){return t.length}function pw(t){return t.length}function Kf(t,e){return e.push(t),t}var Zf=1,jo=1,gw=0,zn=0,dr=0,$o="";function sg(t,e,r,n,i,a,s){return{value:t,root:e,parent:r,type:n,props:i,children:a,line:Zf,column:jo,length:s,return:""}}function yX(){return dr}function mX(){return dr=zn>0?Xf($o,--zn):0,jo--,dr===10&&(jo=1,Zf--),dr}function oi(){return dr=zn<gw?Xf($o,zn++):0,jo++,dr===10&&(jo=1,Zf++),dr}function zs(){return Xf($o,zn)}function Qf(){return zn}function Jf(t,e){return Lc($o,t,e)}function og(t){switch(t){case 0:case 9:case 10:case 13:case 32:return 5;case 33:case 43:case 44:case 47:case 62:case 64:case 126:case 59:case 123:case 125:return 4;case 58:return 3;case 34:case 39:case 40:case 91:return 2;case 41:case 93:return 1}return 0}function bX(t){return Zf=jo=1,gw=Za($o=t),zn=0,[]}function _X(t){return $o="",t}function lg(t){return dw(Jf(zn-1,cg(t===91?t+2:t===40?t+1:t)))}function vX(t){for(;(dr=zs())&&dr<33;)oi();return og(t)>2||og(dr)>3?"":" "}function xX(t,e){for(;--e&&oi()&&!(dr<48||dr>102||dr>57&&dr<65||dr>70&&dr<97););return Jf(t,Qf()+(e<6&&zs()==32&&oi()==32))}function cg(t){for(;oi();)switch(dr){case t:return zn;case 34:case 39:t!==34&&t!==39&&cg(dr);break;case 40:t===41&&cg(t);break;case 92:oi();break}return zn}function kX(t,e){for(;oi()&&t+dr!==47+10;)if(t+dr===42+42&&zs()===47)break;return"/*"+Jf(e,zn-1)+"*"+ig(t===47?t:oi())}function wX(t){for(;!og(zs());)oi();return Jf(t,zn)}function yw(t){return _X(t1("",null,null,null,[""],t=bX(t),0,[0],t))}function t1(t,e,r,n,i,a,s,o,l){for(var u=0,h=0,d=s,f=0,p=0,m=0,_=1,y=1,b=1,x=0,k="",T=i,C=a,M=n,S=k;y;)switch(m=x,x=oi()){case 40:if(m!=108&&Xf(S,d-1)==58){gX(S+=ag(lg(x),"&","&\f"),"&\f")!=-1&&(b=-1);break}case 34:case 39:case 91:S+=lg(x);break;case 9:case 10:case 13:case 32:S+=vX(m);break;case 92:S+=xX(Qf()-1,7);continue;case 47:switch(zs()){case 42:case 47:Kf(TX(kX(oi(),Qf()),e,r),l);break;default:S+="/"}break;case 123*_:o[u++]=Za(S)*b;case 125*_:case 59:case 0:switch(x){case 0:case 125:y=0;case 59+h:p>0&&Za(S)-d&&Kf(p>32?bw(S+";",n,r,d-1):bw(ag(S," ","")+";",n,r,d-2),l);break;case 59:S+=";";default:if(Kf(M=mw(S,e,r,u,h,i,o,k,T=[],C=[],d),a),x===123)if(h===0)t1(S,e,M,M,T,a,d,o,C);else switch(f){case 100:case 109:case 115:t1(t,M,M,n&&Kf(mw(t,M,M,0,0,i,o,k,i,T=[],d),C),i,C,d,o,n?T:C);break;default:t1(S,M,M,M,[""],C,0,o,C)}}u=h=p=0,_=b=1,k=S="",d=s;break;case 58:d=1+Za(S),p=m;default:if(_<1){if(x==123)--_;else if(x==125&&_++==0&&mX()==125)continue}switch(S+=ig(x),x*_){case 38:b=h>0?1:(S+="\f",-1);break;case 44:o[u++]=(Za(S)-1)*b,b=1;break;case 64:zs()===45&&(S+=lg(oi())),f=zs(),h=d=Za(k=S+=wX(Qf())),x++;break;case 45:m===45&&Za(S)==2&&(_=0)}}return a}function mw(t,e,r,n,i,a,s,o,l,u,h){for(var d=i-1,f=i===0?a:[""],p=pw(f),m=0,_=0,y=0;m<n;++m)for(var b=0,x=Lc(t,d+1,d=pX(_=s[m])),k=t;b<p;++b)(k=dw(_>0?f[b]+" "+x:ag(x,/&\f/g,f[b])))&&(l[y++]=k);return sg(t,e,r,i===0?hw:o,l,u,h)}function TX(t,e,r){return sg(t,e,r,uw,ig(yX()),Lc(t,2,-2),0)}function bw(t,e,r,n){return sg(t,e,r,fw,Lc(t,0,n),Lc(t,n+1,-1),n)}function e1(t,e){for(var r="",n=pw(t),i=0;i<n;i++)r+=e(t[i],i,t,e)||"";return r}function _w(t,e,r,n){switch(t.type){case fX:case fw:return t.return=t.return||t.value;case uw:return"";case dX:return t.return=t.value+"{"+e1(t.children,n)+"}";case hw:t.value=t.props.join(",")}return Za(r=e1(t.children,n))?t.return=t.value+"{"+r+"}":""}const r1={name:"mermaid",version:"9.2.2",description:"Markdownish syntax for generating flowcharts, sequence diagrams, class diagrams, gantt charts and git graphs.",main:"./dist/mermaid.min.js",module:"./dist/mermaid.core.mjs",types:"./dist/mermaid.d.ts",type:"commonjs",exports:{".":{require:"./dist/mermaid.min.js",import:"./dist/mermaid.core.mjs",types:"./dist/mermaid.d.ts"},"./*":"./*"},keywords:["diagram","markdown","flowchart","sequence diagram","gantt","class diagram","git graph"],scripts:{clean:"rimraf dist","build:code":"node .esbuild/esbuild.cjs","build:types":"tsc -p ./tsconfig.json --emitDeclarationOnly","build:watch":"yarn build:code --watch","build:esbuild":'concurrently "yarn build:code" "yarn build:types"',build:"yarn clean; yarn build:esbuild",dev:"node .esbuild/serve.cjs","docs:build":"ts-node-esm src/docs.mts","docs:verify":"yarn docs:build --verify","todo-postbuild":"documentation build src/mermaidAPI.ts src/config.ts src/defaultConfig.ts --shallow -f md --markdown-toc false > src/docs/Setup.md && prettier --write src/docs/Setup.md",release:"yarn build",lint:"eslint --cache --ignore-path .gitignore . && yarn lint:jison && prettier --check .","lint:fix":"eslint --fix --ignore-path .gitignore . && prettier --write .","lint:jison":"ts-node-esm src/jison/lint.mts",cypress:"cypress run","cypress:open":"cypress open",e2e:"start-server-and-test dev http://localhost:9000/ cypress","todo-prepare":'concurrently "husky install" "yarn build"',"pre-commit":"lint-staged"},repository:{type:"git",url:"https://github.com/mermaid-js/mermaid"},author:"Knut Sveidqvist",license:"MIT",standard:{ignore:["**/parser/*.js","dist/**/*.js","cypress/**/*.js"],globals:["page"]},dependencies:{"@braintree/sanitize-url":"^6.0.0",d3:"^7.0.0",dagre:"^0.8.5","dagre-d3":"^0.6.4",dompurify:"2.4.0","fast-clone":"^1.5.13",graphlib:"^2.1.8",khroma:"^2.0.0",lodash:"^4.17.21","moment-mini":"^2.24.0","non-layered-tidy-tree-layout":"^2.0.2",stylis:"^4.1.2",uuid:"^9.0.0"},devDependencies:{"@applitools/eyes-cypress":"^3.25.7","@commitlint/cli":"^17.1.2","@commitlint/config-conventional":"^17.0.0","@types/d3":"^7.4.0","@types/dompurify":"^2.3.4","@types/eslint":"^8.4.6","@types/express":"^4.17.13","@types/jsdom":"^20.0.0","@types/lodash":"^4.14.185","@types/prettier":"^2.7.0","@types/stylis":"^4.0.2","@types/uuid":"^8.3.4","@typescript-eslint/eslint-plugin":"^5.37.0","@typescript-eslint/parser":"^5.37.0",concurrently:"^7.4.0",coveralls:"^3.1.1",cypress:"^10.0.0","cypress-image-snapshot":"^4.0.1",documentation:"13.2.0",esbuild:"^0.15.8",eslint:"^8.23.1","eslint-config-prettier":"^8.5.0","eslint-plugin-cypress":"^2.12.1","eslint-plugin-html":"^7.1.0","eslint-plugin-jest":"^27.0.4","eslint-plugin-jsdoc":"^39.3.6","eslint-plugin-json":"^3.1.0","eslint-plugin-markdown":"^3.0.0",express:"^4.18.1",globby:"^13.1.2",husky:"^8.0.0","identity-obj-proxy":"^3.0.0",jison:"^0.4.18","js-base64":"3.7.2",jsdom:"^20.0.0","lint-staged":"^13.0.0",moment:"^2.23.0","path-browserify":"^1.0.1",prettier:"^2.7.1","prettier-plugin-jsdoc":"^0.4.2",remark:"^14.0.2",rimraf:"^3.0.2","start-server-and-test":"^1.12.6","ts-node":"^10.9.1",typescript:"^4.8.3","unist-util-flatmap":"^1.0.0"},resolutions:{d3:"^7.0.0"},files:["dist","README.md"],sideEffects:["**/*.css","**/*.scss"]},Xo=Object.freeze(Xa);let mn=fr({},Xo),vw,Ko=[],Rc=fr({},Xo);const n1=(t,e)=>{let r=fr({},t),n={};for(let i=0;i<e.length;i++){const a=e[i];ww(a),n=fr(n,a)}if(r=fr(r,n),n.theme&&n.theme in aa){const i=fr({},vw),a=fr(i.themeVariables||{},n.themeVariables);r.theme&&r.theme in aa&&(r.themeVariables=aa[r.theme].getThemeVariables(a))}return Rc=r,Cw(Rc),Rc},EX=t=>(mn=fr({},Xo),mn=fr(mn,t),t.theme&&aa[t.theme]&&(mn.themeVariables=aa[t.theme].getThemeVariables(t.themeVariables)),n1(mn,Ko),mn),CX=t=>{vw=fr({},t)},SX=t=>(mn=fr(mn,t),n1(mn,Ko),mn),xw=()=>fr({},mn),kw=t=>(Cw(t),fr(Rc,t),nt()),nt=()=>fr({},Rc),ww=t=>{var e;["secure",...(e=mn.secure)!=null?e:[]].forEach(r=>{typeof t[r]<"u"&&(H.debug(`Denied attempt to modify a secure key ${r}`,t[r]),delete t[r])}),Object.keys(t).forEach(r=>{r.indexOf("__")===0&&delete t[r]}),Object.keys(t).forEach(r=>{typeof t[r]=="string"&&(t[r].indexOf("<")>-1||t[r].indexOf(">")>-1||t[r].indexOf("url(data:")>-1)&&delete t[r],typeof t[r]=="object"&&ww(t[r])})},ug=t=>{t.fontFamily&&(t.themeVariables?t.themeVariables.fontFamily||(t.themeVariables={fontFamily:t.fontFamily}):t.themeVariables={fontFamily:t.fontFamily}),Ko.push(t),n1(mn,Ko)},Ic=(t=mn)=>{Ko=[],n1(t,Ko)};var Tw=(t=>(t.LAZY_LOAD_DEPRECATED="The configuration options lazyLoadedDiagrams and loadExternalDiagramsAtStartup are deprecated. Please use registerExternalDiagrams instead.",t))(Tw||{});const Ew={},AX=t=>{Ew[t]||(H.warn(Tw[t]),Ew[t]=!0)},Cw=t=>{!t||(t.lazyLoadedDiagrams||t.loadExternalDiagramsAtStartup)&&AX("LAZY_LOAD_DEPRECATED")},MX=function(t,e){for(let r of e)t.attr(r[0],r[1])},LX=function(t,e,r){let n=new Map;return r?(n.set("width","100%"),n.set("style",`max-width: ${e}px;`)):n.set("width",e),n},li=function(t,e,r,n){const i=LX(e,r,n);MX(t,i)},i1=function(t,e,r,n){const i=e.node().getBBox(),a=i.width,s=i.height;H.info(`SVG bounds: ${a}x${s}`,i);let o=0,l=0;H.info(`Graph bounds: ${o}x${l}`,t),o=a+r*2,l=s+r*2,H.info(`Calculated bounds: ${o}x${l}`),li(e,l,o,n);const u=`${i.x-r} ${i.y-r} ${i.width+2*r} ${i.height+2*r}`;e.attr("viewBox",u)},Nc=t=>`g.classGroup text { - fill: ${t.nodeBorder}; - fill: ${t.classText}; - stroke: none; - font-family: ${t.fontFamily}; - font-size: 10px; - - .title { - font-weight: bolder; - } - -} - -.nodeLabel, .edgeLabel { - color: ${t.classText}; -} -.edgeLabel .label rect { - fill: ${t.mainBkg}; -} -.label text { - fill: ${t.classText}; -} -.edgeLabel .label span { - background: ${t.mainBkg}; -} - -.classTitle { - font-weight: bolder; -} -.node rect, - .node circle, - .node ellipse, - .node polygon, - .node path { - fill: ${t.mainBkg}; - stroke: ${t.nodeBorder}; - stroke-width: 1px; - } - - -.divider { - stroke: ${t.nodeBorder}; - stroke: 1; -} - -g.clickable { - cursor: pointer; -} - -g.classGroup rect { - fill: ${t.mainBkg}; - stroke: ${t.nodeBorder}; -} - -g.classGroup line { - stroke: ${t.nodeBorder}; - stroke-width: 1; -} - -.classLabel .box { - stroke: none; - stroke-width: 0; - fill: ${t.mainBkg}; - opacity: 0.5; -} - -.classLabel .label { - fill: ${t.nodeBorder}; - font-size: 10px; -} - -.relation { - stroke: ${t.lineColor}; - stroke-width: 1; - fill: none; -} - -.dashed-line{ - stroke-dasharray: 3; -} - -#compositionStart, .composition { - fill: ${t.lineColor} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -#compositionEnd, .composition { - fill: ${t.lineColor} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -#dependencyStart, .dependency { - fill: ${t.lineColor} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -#dependencyStart, .dependency { - fill: ${t.lineColor} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -#extensionStart, .extension { - fill: ${t.lineColor} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -#extensionEnd, .extension { - fill: ${t.lineColor} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -#aggregationStart, .aggregation { - fill: ${t.mainBkg} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -#aggregationEnd, .aggregation { - fill: ${t.mainBkg} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -#lollipopStart, .lollipop { - fill: ${t.mainBkg} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -#lollipopEnd, .lollipop { - fill: ${t.mainBkg} !important; - stroke: ${t.lineColor} !important; - stroke-width: 1; -} - -.edgeTerminals { - font-size: 11px; -} - -`,Sw=t=>` - .entityBox { - fill: ${t.mainBkg}; - stroke: ${t.nodeBorder}; - } - - .attributeBoxOdd { - fill: ${t.attributeBackgroundColorOdd}; - stroke: ${t.nodeBorder}; - } - - .attributeBoxEven { - fill: ${t.attributeBackgroundColorEven}; - stroke: ${t.nodeBorder}; - } - - .relationshipLabelBox { - fill: ${t.tertiaryColor}; - opacity: 0.7; - background-color: ${t.tertiaryColor}; - rect { - opacity: 0.5; - } - } - - .relationshipLine { - stroke: ${t.lineColor}; - } -`,Aw=()=>"",a1=t=>`.label { - font-family: ${t.fontFamily}; - color: ${t.nodeTextColor||t.textColor}; - } - .cluster-label text { - fill: ${t.titleColor}; - } - .cluster-label span { - color: ${t.titleColor}; - } - - .label text,span { - fill: ${t.nodeTextColor||t.textColor}; - color: ${t.nodeTextColor||t.textColor}; - } - - .node rect, - .node circle, - .node ellipse, - .node polygon, - .node path { - fill: ${t.mainBkg}; - stroke: ${t.nodeBorder}; - stroke-width: 1px; - } - - .node .label { - text-align: center; - } - .node.clickable { - cursor: pointer; - } - - .arrowheadPath { - fill: ${t.arrowheadColor}; - } - - .edgePath .path { - stroke: ${t.lineColor}; - stroke-width: 2.0px; - } - - .flowchart-link { - stroke: ${t.lineColor}; - fill: none; - } - - .edgeLabel { - background-color: ${t.edgeLabelBackground}; - rect { - opacity: 0.5; - background-color: ${t.edgeLabelBackground}; - fill: ${t.edgeLabelBackground}; - } - text-align: center; - } - - .cluster rect { - fill: ${t.clusterBkg}; - stroke: ${t.clusterBorder}; - stroke-width: 1px; - } - - .cluster text { - fill: ${t.titleColor}; - } - - .cluster span { - color: ${t.titleColor}; - } - /* .cluster div { - color: ${t.titleColor}; - } */ - - div.mermaidTooltip { - position: absolute; - text-align: center; - max-width: 200px; - padding: 2px; - font-family: ${t.fontFamily}; - font-size: 12px; - background: ${t.tertiaryColor}; - border: 1px solid ${t.border2}; - border-radius: 2px; - pointer-events: none; - z-index: 100; - } -`,Mw=t=>` - .mermaid-main-font { - font-family: "trebuchet ms", verdana, arial, sans-serif; - font-family: var(--mermaid-font-family); - } - .exclude-range { - fill: ${t.excludeBkgColor}; - } - - .section { - stroke: none; - opacity: 0.2; - } - - .section0 { - fill: ${t.sectionBkgColor}; - } - - .section2 { - fill: ${t.sectionBkgColor2}; - } - - .section1, - .section3 { - fill: ${t.altSectionBkgColor}; - opacity: 0.2; - } - - .sectionTitle0 { - fill: ${t.titleColor}; - } - - .sectionTitle1 { - fill: ${t.titleColor}; - } - - .sectionTitle2 { - fill: ${t.titleColor}; - } - - .sectionTitle3 { - fill: ${t.titleColor}; - } - - .sectionTitle { - text-anchor: start; - // font-size: ${t.ganttFontSize}; - // text-height: 14px; - font-family: 'trebuchet ms', verdana, arial, sans-serif; - font-family: var(--mermaid-font-family); - - } - - - /* Grid and axis */ - - .grid .tick { - stroke: ${t.gridColor}; - opacity: 0.8; - shape-rendering: crispEdges; - text { - font-family: ${t.fontFamily}; - fill: ${t.textColor}; - } - } - - .grid path { - stroke-width: 0; - } - - - /* Today line */ - - .today { - fill: none; - stroke: ${t.todayLineColor}; - stroke-width: 2px; - } - - - /* Task styling */ - - /* Default task */ - - .task { - stroke-width: 2; - } - - .taskText { - text-anchor: middle; - font-family: 'trebuchet ms', verdana, arial, sans-serif; - font-family: var(--mermaid-font-family); - } - - // .taskText:not([font-size]) { - // font-size: ${t.ganttFontSize}; - // } - - .taskTextOutsideRight { - fill: ${t.taskTextDarkColor}; - text-anchor: start; - // font-size: ${t.ganttFontSize}; - font-family: 'trebuchet ms', verdana, arial, sans-serif; - font-family: var(--mermaid-font-family); - - } - - .taskTextOutsideLeft { - fill: ${t.taskTextDarkColor}; - text-anchor: end; - // font-size: ${t.ganttFontSize}; - } - - /* Special case clickable */ - .task.clickable { - cursor: pointer; - } - .taskText.clickable { - cursor: pointer; - fill: ${t.taskTextClickableColor} !important; - font-weight: bold; - } - - .taskTextOutsideLeft.clickable { - cursor: pointer; - fill: ${t.taskTextClickableColor} !important; - font-weight: bold; - } - - .taskTextOutsideRight.clickable { - cursor: pointer; - fill: ${t.taskTextClickableColor} !important; - font-weight: bold; - } - - /* Specific task settings for the sections*/ - - .taskText0, - .taskText1, - .taskText2, - .taskText3 { - fill: ${t.taskTextColor}; - } - - .task0, - .task1, - .task2, - .task3 { - fill: ${t.taskBkgColor}; - stroke: ${t.taskBorderColor}; - } - - .taskTextOutside0, - .taskTextOutside2 - { - fill: ${t.taskTextOutsideColor}; - } - - .taskTextOutside1, - .taskTextOutside3 { - fill: ${t.taskTextOutsideColor}; - } - - - /* Active task */ - - .active0, - .active1, - .active2, - .active3 { - fill: ${t.activeTaskBkgColor}; - stroke: ${t.activeTaskBorderColor}; - } - - .activeText0, - .activeText1, - .activeText2, - .activeText3 { - fill: ${t.taskTextDarkColor} !important; - } - - - /* Completed task */ - - .done0, - .done1, - .done2, - .done3 { - stroke: ${t.doneTaskBorderColor}; - fill: ${t.doneTaskBkgColor}; - stroke-width: 2; - } - - .doneText0, - .doneText1, - .doneText2, - .doneText3 { - fill: ${t.taskTextDarkColor} !important; - } - - - /* Tasks on the critical line */ - - .crit0, - .crit1, - .crit2, - .crit3 { - stroke: ${t.critBorderColor}; - fill: ${t.critBkgColor}; - stroke-width: 2; - } - - .activeCrit0, - .activeCrit1, - .activeCrit2, - .activeCrit3 { - stroke: ${t.critBorderColor}; - fill: ${t.activeTaskBkgColor}; - stroke-width: 2; - } - - .doneCrit0, - .doneCrit1, - .doneCrit2, - .doneCrit3 { - stroke: ${t.critBorderColor}; - fill: ${t.doneTaskBkgColor}; - stroke-width: 2; - cursor: pointer; - shape-rendering: crispEdges; - } - - .milestone { - transform: rotate(45deg) scale(0.8,0.8); - } - - .milestoneText { - font-style: italic; - } - .doneCritText0, - .doneCritText1, - .doneCritText2, - .doneCritText3 { - fill: ${t.taskTextDarkColor} !important; - } - - .activeCritText0, - .activeCritText1, - .activeCritText2, - .activeCritText3 { - fill: ${t.taskTextDarkColor} !important; - } - - .titleText { - text-anchor: middle; - font-size: 18px; - fill: ${t.textColor} ; - font-family: 'trebuchet ms', verdana, arial, sans-serif; - font-family: var(--mermaid-font-family); - } -`,Lw=()=>"",Rw=t=>` - .pieCircle{ - stroke: ${t.pieStrokeColor}; - stroke-width : ${t.pieStrokeWidth}; - opacity : ${t.pieOpacity}; - } - .pieTitleText { - text-anchor: middle; - font-size: ${t.pieTitleTextSize}; - fill: ${t.pieTitleTextColor}; - font-family: ${t.fontFamily}; - } - .slice { - font-family: ${t.fontFamily}; - fill: ${t.pieSectionTextColor}; - font-size:${t.pieSectionTextSize}; - // fill: white; - } - .legend text { - fill: ${t.pieLegendTextColor}; - font-family: ${t.fontFamily}; - font-size: ${t.pieLegendTextSize}; - } -`,Iw=t=>` - - marker { - fill: ${t.relationColor}; - stroke: ${t.relationColor}; - } - - marker.cross { - stroke: ${t.lineColor}; - } - - svg { - font-family: ${t.fontFamily}; - font-size: ${t.fontSize}; - } - - .reqBox { - fill: ${t.requirementBackground}; - fill-opacity: 100%; - stroke: ${t.requirementBorderColor}; - stroke-width: ${t.requirementBorderSize}; - } - - .reqTitle, .reqLabel{ - fill: ${t.requirementTextColor}; - } - .reqLabelBox { - fill: ${t.relationLabelBackground}; - fill-opacity: 100%; - } - - .req-title-line { - stroke: ${t.requirementBorderColor}; - stroke-width: ${t.requirementBorderSize}; - } - .relationshipLine { - stroke: ${t.relationColor}; - stroke-width: 1; - } - .relationshipLabel { - fill: ${t.relationLabelColor}; - } - -`,Nw=t=>`.actor { - stroke: ${t.actorBorder}; - fill: ${t.actorBkg}; - } - - text.actor > tspan { - fill: ${t.actorTextColor}; - stroke: none; - } - - .actor-line { - stroke: ${t.actorLineColor}; - } - - .messageLine0 { - stroke-width: 1.5; - stroke-dasharray: none; - stroke: ${t.signalColor}; - } - - .messageLine1 { - stroke-width: 1.5; - stroke-dasharray: 2, 2; - stroke: ${t.signalColor}; - } - - #arrowhead path { - fill: ${t.signalColor}; - stroke: ${t.signalColor}; - } - - .sequenceNumber { - fill: ${t.sequenceNumberColor}; - } - - #sequencenumber { - fill: ${t.signalColor}; - } - - #crosshead path { - fill: ${t.signalColor}; - stroke: ${t.signalColor}; - } - - .messageText { - fill: ${t.signalTextColor}; - stroke: none; - } - - .labelBox { - stroke: ${t.labelBoxBorderColor}; - fill: ${t.labelBoxBkgColor}; - } - - .labelText, .labelText > tspan { - fill: ${t.labelTextColor}; - stroke: none; - } - - .loopText, .loopText > tspan { - fill: ${t.loopTextColor}; - stroke: none; - } - - .loopLine { - stroke-width: 2px; - stroke-dasharray: 2, 2; - stroke: ${t.labelBoxBorderColor}; - fill: ${t.labelBoxBorderColor}; - } - - .note { - //stroke: #decc93; - stroke: ${t.noteBorderColor}; - fill: ${t.noteBkgColor}; - } - - .noteText, .noteText > tspan { - fill: ${t.noteTextColor}; - stroke: none; - } - - .activation0 { - fill: ${t.activationBkgColor}; - stroke: ${t.activationBorderColor}; - } - - .activation1 { - fill: ${t.activationBkgColor}; - stroke: ${t.activationBorderColor}; - } - - .activation2 { - fill: ${t.activationBkgColor}; - stroke: ${t.activationBorderColor}; - } - - .actorPopupMenu { - position: absolute; - } - - .actorPopupMenuPanel { - position: absolute; - fill: ${t.actorBkg}; - box-shadow: 0px 8px 16px 0px rgba(0,0,0,0.2); - filter: drop-shadow(3px 5px 2px rgb(0 0 0 / 0.4)); -} - .actor-man line { - stroke: ${t.actorBorder}; - fill: ${t.actorBkg}; - } - .actor-man circle, line { - stroke: ${t.actorBorder}; - fill: ${t.actorBkg}; - stroke-width: 2px; - } -`,s1=t=>` -defs #statediagram-barbEnd { - fill: ${t.transitionColor}; - stroke: ${t.transitionColor}; - } -g.stateGroup text { - fill: ${t.nodeBorder}; - stroke: none; - font-size: 10px; -} -g.stateGroup text { - fill: ${t.textColor}; - stroke: none; - font-size: 10px; - -} -g.stateGroup .state-title { - font-weight: bolder; - fill: ${t.stateLabelColor}; -} - -g.stateGroup rect { - fill: ${t.mainBkg}; - stroke: ${t.nodeBorder}; -} - -g.stateGroup line { - stroke: ${t.lineColor}; - stroke-width: 1; -} - -.transition { - stroke: ${t.transitionColor}; - stroke-width: 1; - fill: none; -} - -.stateGroup .composit { - fill: ${t.background}; - border-bottom: 1px -} - -.stateGroup .alt-composit { - fill: #e0e0e0; - border-bottom: 1px -} - -.state-note { - stroke: ${t.noteBorderColor}; - fill: ${t.noteBkgColor}; - - text { - fill: ${t.noteTextColor}; - stroke: none; - font-size: 10px; - } -} - -.stateLabel .box { - stroke: none; - stroke-width: 0; - fill: ${t.mainBkg}; - opacity: 0.5; -} - -.edgeLabel .label rect { - fill: ${t.labelBackgroundColor}; - opacity: 0.5; -} -.edgeLabel .label text { - fill: ${t.transitionLabelColor||t.tertiaryTextColor}; -} -.label div .edgeLabel { - color: ${t.transitionLabelColor||t.tertiaryTextColor}; -} - -.stateLabel text { - fill: ${t.stateLabelColor}; - font-size: 10px; - font-weight: bold; -} - -.node circle.state-start { - fill: ${t.specialStateColor}; - stroke: ${t.specialStateColor}; -} - -.node .fork-join { - fill: ${t.specialStateColor}; - stroke: ${t.specialStateColor}; -} - -.node circle.state-end { - fill: ${t.innerEndBackground}; - stroke: ${t.background}; - stroke-width: 1.5 -} -.end-state-inner { - fill: ${t.compositeBackground||t.background}; - // stroke: ${t.background}; - stroke-width: 1.5 -} - -.node rect { - fill: ${t.stateBkg||t.mainBkg}; - stroke: ${t.stateBorder||t.nodeBorder}; - stroke-width: 1px; -} -.node polygon { - fill: ${t.mainBkg}; - stroke: ${t.stateBorder||t.nodeBorder};; - stroke-width: 1px; -} -#statediagram-barbEnd { - fill: ${t.lineColor}; -} - -.statediagram-cluster rect { - fill: ${t.compositeTitleBackground}; - stroke: ${t.stateBorder||t.nodeBorder}; - stroke-width: 1px; -} - -.cluster-label, .nodeLabel { - color: ${t.stateLabelColor}; -} - -.statediagram-cluster rect.outer { - rx: 5px; - ry: 5px; -} -.statediagram-state .divider { - stroke: ${t.stateBorder||t.nodeBorder}; -} - -.statediagram-state .title-state { - rx: 5px; - ry: 5px; -} -.statediagram-cluster.statediagram-cluster .inner { - fill: ${t.compositeBackground||t.background}; -} -.statediagram-cluster.statediagram-cluster-alt .inner { - fill: ${t.altBackground?t.altBackground:"#efefef"}; -} - -.statediagram-cluster .inner { - rx:0; - ry:0; -} - -.statediagram-state rect.basic { - rx: 5px; - ry: 5px; -} -.statediagram-state rect.divider { - stroke-dasharray: 10,10; - fill: ${t.altBackground?t.altBackground:"#efefef"}; -} - -.note-edge { - stroke-dasharray: 5; -} - -.statediagram-note rect { - fill: ${t.noteBkgColor}; - stroke: ${t.noteBorderColor}; - stroke-width: 1px; - rx: 0; - ry: 0; -} -.statediagram-note rect { - fill: ${t.noteBkgColor}; - stroke: ${t.noteBorderColor}; - stroke-width: 1px; - rx: 0; - ry: 0; -} - -.statediagram-note text { - fill: ${t.noteTextColor}; -} - -.statediagram-note .nodeLabel { - color: ${t.noteTextColor}; -} -.statediagram .edgeLabel { - color: red; // ${t.noteTextColor}; -} - -#dependencyStart, #dependencyEnd { - fill: ${t.lineColor}; - stroke: ${t.lineColor}; - stroke-width: 1; -} -`,Bw=t=>`.label { - font-family: 'trebuchet ms', verdana, arial, sans-serif; - font-family: var(--mermaid-font-family); - color: ${t.textColor}; - } - .mouth { - stroke: #666; - } - - line { - stroke: ${t.textColor} - } - - .legend { - fill: ${t.textColor}; - } - - .label text { - fill: #333; - } - .label { - color: ${t.textColor} - } - - .face { - ${t.faceColor?`fill: ${t.faceColor}`:"fill: #FFF8DC"}; - stroke: #999; - } - - .node rect, - .node circle, - .node ellipse, - .node polygon, - .node path { - fill: ${t.mainBkg}; - stroke: ${t.nodeBorder}; - stroke-width: 1px; - } - - .node .label { - text-align: center; - } - .node.clickable { - cursor: pointer; - } - - .arrowheadPath { - fill: ${t.arrowheadColor}; - } - - .edgePath .path { - stroke: ${t.lineColor}; - stroke-width: 1.5px; - } - - .flowchart-link { - stroke: ${t.lineColor}; - fill: none; - } - - .edgeLabel { - background-color: ${t.edgeLabelBackground}; - rect { - opacity: 0.5; - } - text-align: center; - } - - .cluster rect { - } - - .cluster text { - fill: ${t.titleColor}; - } - - div.mermaidTooltip { - position: absolute; - text-align: center; - max-width: 200px; - padding: 2px; - font-family: 'trebuchet ms', verdana, arial, sans-serif; - font-family: var(--mermaid-font-family); - font-size: 12px; - background: ${t.tertiaryColor}; - border: 1px solid ${t.border2}; - border-radius: 2px; - pointer-events: none; - z-index: 100; - } - - .task-type-0, .section-type-0 { - ${t.fillType0?`fill: ${t.fillType0}`:""}; - } - .task-type-1, .section-type-1 { - ${t.fillType0?`fill: ${t.fillType1}`:""}; - } - .task-type-2, .section-type-2 { - ${t.fillType0?`fill: ${t.fillType2}`:""}; - } - .task-type-3, .section-type-3 { - ${t.fillType0?`fill: ${t.fillType3}`:""}; - } - .task-type-4, .section-type-4 { - ${t.fillType0?`fill: ${t.fillType4}`:""}; - } - .task-type-5, .section-type-5 { - ${t.fillType0?`fill: ${t.fillType5}`:""}; - } - .task-type-6, .section-type-6 { - ${t.fillType0?`fill: ${t.fillType6}`:""}; - } - .task-type-7, .section-type-7 { - ${t.fillType0?`fill: ${t.fillType7}`:""}; - } - - .actor-0 { - ${t.actor0?`fill: ${t.actor0}`:""}; - } - .actor-1 { - ${t.actor1?`fill: ${t.actor1}`:""}; - } - .actor-2 { - ${t.actor2?`fill: ${t.actor2}`:""}; - } - .actor-3 { - ${t.actor3?`fill: ${t.actor3}`:""}; - } - .actor-4 { - ${t.actor4?`fill: ${t.actor4}`:""}; - } - .actor-5 { - ${t.actor5?`fill: ${t.actor5}`:""}; - } -`,Dw=t=>`.person { - stroke: ${t.personBorder}; - fill: ${t.personBkg}; - } -`,o1={flowchart:a1,"flowchart-v2":a1,sequence:Nw,gantt:Mw,classDiagram:Nc,"classDiagram-v2":Nc,class:Nc,stateDiagram:s1,state:s1,info:Lw,pie:Rw,er:Sw,error:Aw,journey:Bw,requirement:Iw,c4:Dw},Ow=(t,e,r)=>{let n="";return t in o1&&o1[t]?n=o1[t](r):H.warn(`No theme found for ${t}`),` { - font-family: ${r.fontFamily}; - font-size: ${r.fontSize}; - fill: ${r.textColor} - } - - /* Classes common for multiple diagrams */ - - .error-icon { - fill: ${r.errorBkgColor}; - } - .error-text { - fill: ${r.errorTextColor}; - stroke: ${r.errorTextColor}; - } - - .edge-thickness-normal { - stroke-width: 2px; - } - .edge-thickness-thick { - stroke-width: 3.5px - } - .edge-pattern-solid { - stroke-dasharray: 0; - } - - .edge-pattern-dashed{ - stroke-dasharray: 3; - } - .edge-pattern-dotted { - stroke-dasharray: 2; - } - - .marker { - fill: ${r.lineColor}; - stroke: ${r.lineColor}; - } - .marker.cross { - stroke: ${r.lineColor}; - } - - svg { - font-family: ${r.fontFamily}; - font-size: ${r.fontSize}; - } - - ${n} - - ${e} -`},RX=(t,e)=>{o1[t]=e},Bc=H,IX=D0,Zo=nt,NX=t=>ai(t,Zo()),Fw=i1,Qo={},Lr=(t,e,r)=>{Bc.debug(`Registering diagram ${t}`),Qo[t]&&Bc.warn(`Diagram ${t} already registered.`),Qo[t]=e,r&&$k(t,r),RX(t,e.styles),e.injectUtils&&e.injectUtils(Bc,IX,Zo,NX,Fw),Bc.debug(`Registered diagram ${t}. ${Object.keys(Qo).join(", ")} diagrams registered.`)},Pw=t=>{if(Bc.debug(`Getting diagram ${t}. ${Object.keys(Qo).join(", ")} diagrams registered.`),t in Qo)return Qo[t];throw new qw(t)};class qw extends Error{constructor(e){super(`Diagram ${e} not found.`)}}var hg=function(){var t=function(C,M,S,R){for(S=S||{},R=C.length;R--;S[C[R]]=M);return S},e=[1,4],r=[1,7],n=[1,5],i=[1,9],a=[1,6],s=[2,6],o=[1,16],l=[6,8,14,20,22,24,25,27,29,32,37,40,50,54],u=[8,14,20,22,24,25,27,29,32,37,40],h=[8,13,14,20,22,24,25,27,29,32,37,40],d=[1,26],f=[6,8,14,50,54],p=[8,14,54],m=[1,65],_=[1,66],y=[1,67],b=[8,14,33,35,42,54],x={trace:function(){},yy:{},symbols_:{error:2,start:3,eol:4,directive:5,GG:6,document:7,EOF:8,":":9,DIR:10,options:11,body:12,OPT:13,NL:14,line:15,statement:16,commitStatement:17,mergeStatement:18,cherryPickStatement:19,acc_title:20,acc_title_value:21,acc_descr:22,acc_descr_value:23,acc_descr_multiline_value:24,section:25,branchStatement:26,CHECKOUT:27,ID:28,BRANCH:29,ORDER:30,NUM:31,CHERRY_PICK:32,COMMIT_ID:33,STR:34,COMMIT_TAG:35,EMPTYSTR:36,MERGE:37,COMMIT_TYPE:38,commitType:39,COMMIT:40,commit_arg:41,COMMIT_MSG:42,NORMAL:43,REVERSE:44,HIGHLIGHT:45,openDirective:46,typeDirective:47,closeDirective:48,argDirective:49,open_directive:50,type_directive:51,arg_directive:52,close_directive:53,";":54,$accept:0,$end:1},terminals_:{2:"error",6:"GG",8:"EOF",9:":",10:"DIR",13:"OPT",14:"NL",20:"acc_title",21:"acc_title_value",22:"acc_descr",23:"acc_descr_value",24:"acc_descr_multiline_value",25:"section",27:"CHECKOUT",28:"ID",29:"BRANCH",30:"ORDER",31:"NUM",32:"CHERRY_PICK",33:"COMMIT_ID",34:"STR",35:"COMMIT_TAG",36:"EMPTYSTR",37:"MERGE",38:"COMMIT_TYPE",40:"COMMIT",42:"COMMIT_MSG",43:"NORMAL",44:"REVERSE",45:"HIGHLIGHT",50:"open_directive",51:"type_directive",52:"arg_directive",53:"close_directive",54:";"},productions_:[0,[3,2],[3,2],[3,3],[3,4],[3,5],[7,0],[7,2],[11,2],[11,1],[12,0],[12,2],[15,2],[15,1],[16,1],[16,1],[16,1],[16,2],[16,2],[16,1],[16,1],[16,1],[16,2],[26,2],[26,4],[19,3],[19,5],[19,5],[19,5],[19,5],[18,2],[18,4],[18,4],[18,4],[18,6],[18,6],[18,6],[18,6],[18,6],[18,6],[18,8],[18,8],[18,8],[18,8],[18,8],[18,8],[17,2],[17,3],[17,3],[17,5],[17,5],[17,3],[17,5],[17,5],[17,5],[17,5],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,3],[17,5],[17,5],[17,5],[17,5],[17,5],[17,5],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,7],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[17,9],[41,0],[41,1],[39,1],[39,1],[39,1],[5,3],[5,5],[46,1],[47,1],[49,1],[48,1],[4,1],[4,1],[4,1]],performAction:function(M,S,R,A,L,v,B){var w=v.length-1;switch(L){case 3:return v[w];case 4:return v[w-1];case 5:return A.setDirection(v[w-3]),v[w-1];case 7:A.setOptions(v[w-1]),this.$=v[w];break;case 8:v[w-1]+=v[w],this.$=v[w-1];break;case 10:this.$=[];break;case 11:v[w-1].push(v[w]),this.$=v[w-1];break;case 12:this.$=v[w-1];break;case 17:this.$=v[w].trim(),A.setAccTitle(this.$);break;case 18:case 19:this.$=v[w].trim(),A.setAccDescription(this.$);break;case 20:A.addSection(v[w].substr(8)),this.$=v[w].substr(8);break;case 22:A.checkout(v[w]);break;case 23:A.branch(v[w]);break;case 24:A.branch(v[w-2],v[w]);break;case 25:A.cherryPick(v[w],"",void 0);break;case 26:A.cherryPick(v[w-2],"",v[w]);break;case 27:case 29:A.cherryPick(v[w-2],"","");break;case 28:A.cherryPick(v[w],"",v[w-2]);break;case 30:A.merge(v[w],"","","");break;case 31:A.merge(v[w-2],v[w],"","");break;case 32:A.merge(v[w-2],"",v[w],"");break;case 33:A.merge(v[w-2],"","",v[w]);break;case 34:A.merge(v[w-4],v[w],"",v[w-2]);break;case 35:A.merge(v[w-4],"",v[w],v[w-2]);break;case 36:A.merge(v[w-4],"",v[w-2],v[w]);break;case 37:A.merge(v[w-4],v[w-2],v[w],"");break;case 38:A.merge(v[w-4],v[w-2],"",v[w]);break;case 39:A.merge(v[w-4],v[w],v[w-2],"");break;case 40:A.merge(v[w-6],v[w-4],v[w-2],v[w]);break;case 41:A.merge(v[w-6],v[w],v[w-4],v[w-2]);break;case 42:A.merge(v[w-6],v[w-4],v[w],v[w-2]);break;case 43:A.merge(v[w-6],v[w-2],v[w-4],v[w]);break;case 44:A.merge(v[w-6],v[w],v[w-2],v[w-4]);break;case 45:A.merge(v[w-6],v[w-2],v[w],v[w-4]);break;case 46:A.commit(v[w]);break;case 47:A.commit("","",A.commitType.NORMAL,v[w]);break;case 48:A.commit("","",v[w],"");break;case 49:A.commit("","",v[w],v[w-2]);break;case 50:A.commit("","",v[w-2],v[w]);break;case 51:A.commit("",v[w],A.commitType.NORMAL,"");break;case 52:A.commit("",v[w-2],A.commitType.NORMAL,v[w]);break;case 53:A.commit("",v[w],A.commitType.NORMAL,v[w-2]);break;case 54:A.commit("",v[w-2],v[w],"");break;case 55:A.commit("",v[w],v[w-2],"");break;case 56:A.commit("",v[w-4],v[w-2],v[w]);break;case 57:A.commit("",v[w-4],v[w],v[w-2]);break;case 58:A.commit("",v[w-2],v[w-4],v[w]);break;case 59:A.commit("",v[w],v[w-4],v[w-2]);break;case 60:A.commit("",v[w],v[w-2],v[w-4]);break;case 61:A.commit("",v[w-2],v[w],v[w-4]);break;case 62:A.commit(v[w],"",A.commitType.NORMAL,"");break;case 63:A.commit(v[w],"",A.commitType.NORMAL,v[w-2]);break;case 64:A.commit(v[w-2],"",A.commitType.NORMAL,v[w]);break;case 65:A.commit(v[w-2],"",v[w],"");break;case 66:A.commit(v[w],"",v[w-2],"");break;case 67:A.commit(v[w],v[w-2],A.commitType.NORMAL,"");break;case 68:A.commit(v[w-2],v[w],A.commitType.NORMAL,"");break;case 69:A.commit(v[w-4],"",v[w-2],v[w]);break;case 70:A.commit(v[w-4],"",v[w],v[w-2]);break;case 71:A.commit(v[w-2],"",v[w-4],v[w]);break;case 72:A.commit(v[w],"",v[w-4],v[w-2]);break;case 73:A.commit(v[w],"",v[w-2],v[w-4]);break;case 74:A.commit(v[w-2],"",v[w],v[w-4]);break;case 75:A.commit(v[w-4],v[w],v[w-2],"");break;case 76:A.commit(v[w-4],v[w-2],v[w],"");break;case 77:A.commit(v[w-2],v[w],v[w-4],"");break;case 78:A.commit(v[w],v[w-2],v[w-4],"");break;case 79:A.commit(v[w],v[w-4],v[w-2],"");break;case 80:A.commit(v[w-2],v[w-4],v[w],"");break;case 81:A.commit(v[w-4],v[w],A.commitType.NORMAL,v[w-2]);break;case 82:A.commit(v[w-4],v[w-2],A.commitType.NORMAL,v[w]);break;case 83:A.commit(v[w-2],v[w],A.commitType.NORMAL,v[w-4]);break;case 84:A.commit(v[w],v[w-2],A.commitType.NORMAL,v[w-4]);break;case 85:A.commit(v[w],v[w-4],A.commitType.NORMAL,v[w-2]);break;case 86:A.commit(v[w-2],v[w-4],A.commitType.NORMAL,v[w]);break;case 87:A.commit(v[w-6],v[w-4],v[w-2],v[w]);break;case 88:A.commit(v[w-6],v[w-4],v[w],v[w-2]);break;case 89:A.commit(v[w-6],v[w-2],v[w-4],v[w]);break;case 90:A.commit(v[w-6],v[w],v[w-4],v[w-2]);break;case 91:A.commit(v[w-6],v[w-2],v[w],v[w-4]);break;case 92:A.commit(v[w-6],v[w],v[w-2],v[w-4]);break;case 93:A.commit(v[w-4],v[w-6],v[w-2],v[w]);break;case 94:A.commit(v[w-4],v[w-6],v[w],v[w-2]);break;case 95:A.commit(v[w-2],v[w-6],v[w-4],v[w]);break;case 96:A.commit(v[w],v[w-6],v[w-4],v[w-2]);break;case 97:A.commit(v[w-2],v[w-6],v[w],v[w-4]);break;case 98:A.commit(v[w],v[w-6],v[w-2],v[w-4]);break;case 99:A.commit(v[w],v[w-4],v[w-2],v[w-6]);break;case 100:A.commit(v[w-2],v[w-4],v[w],v[w-6]);break;case 101:A.commit(v[w],v[w-2],v[w-4],v[w-6]);break;case 102:A.commit(v[w-2],v[w],v[w-4],v[w-6]);break;case 103:A.commit(v[w-4],v[w-2],v[w],v[w-6]);break;case 104:A.commit(v[w-4],v[w],v[w-2],v[w-6]);break;case 105:A.commit(v[w-2],v[w-4],v[w-6],v[w]);break;case 106:A.commit(v[w],v[w-4],v[w-6],v[w-2]);break;case 107:A.commit(v[w-2],v[w],v[w-6],v[w-4]);break;case 108:A.commit(v[w],v[w-2],v[w-6],v[w-4]);break;case 109:A.commit(v[w-4],v[w-2],v[w-6],v[w]);break;case 110:A.commit(v[w-4],v[w],v[w-6],v[w-2]);break;case 111:this.$="";break;case 112:this.$=v[w];break;case 113:this.$=A.commitType.NORMAL;break;case 114:this.$=A.commitType.REVERSE;break;case 115:this.$=A.commitType.HIGHLIGHT;break;case 118:A.parseDirective("%%{","open_directive");break;case 119:A.parseDirective(v[w],"type_directive");break;case 120:v[w]=v[w].trim().replace(/'/g,'"'),A.parseDirective(v[w],"arg_directive");break;case 121:A.parseDirective("}%%","close_directive","gitGraph");break}},table:[{3:1,4:2,5:3,6:e,8:r,14:n,46:8,50:i,54:a},{1:[3]},{3:10,4:2,5:3,6:e,8:r,14:n,46:8,50:i,54:a},{3:11,4:2,5:3,6:e,8:r,14:n,46:8,50:i,54:a},{7:12,8:s,9:[1,13],10:[1,14],11:15,14:o},t(l,[2,122]),t(l,[2,123]),t(l,[2,124]),{47:17,51:[1,18]},{51:[2,118]},{1:[2,1]},{1:[2,2]},{8:[1,19]},{7:20,8:s,11:15,14:o},{9:[1,21]},t(u,[2,10],{12:22,13:[1,23]}),t(h,[2,9]),{9:[1,25],48:24,53:d},t([9,53],[2,119]),{1:[2,3]},{8:[1,27]},{7:28,8:s,11:15,14:o},{8:[2,7],14:[1,31],15:29,16:30,17:32,18:33,19:34,20:[1,35],22:[1,36],24:[1,37],25:[1,38],26:39,27:[1,40],29:[1,44],32:[1,43],37:[1,42],40:[1,41]},t(h,[2,8]),t(f,[2,116]),{49:45,52:[1,46]},t(f,[2,121]),{1:[2,4]},{8:[1,47]},t(u,[2,11]),{4:48,8:r,14:n,54:a},t(u,[2,13]),t(p,[2,14]),t(p,[2,15]),t(p,[2,16]),{21:[1,49]},{23:[1,50]},t(p,[2,19]),t(p,[2,20]),t(p,[2,21]),{28:[1,51]},t(p,[2,111],{41:52,33:[1,55],34:[1,57],35:[1,53],38:[1,54],42:[1,56]}),{28:[1,58]},{33:[1,59],35:[1,60]},{28:[1,61]},{48:62,53:d},{53:[2,120]},{1:[2,5]},t(u,[2,12]),t(p,[2,17]),t(p,[2,18]),t(p,[2,22]),t(p,[2,46]),{34:[1,63]},{39:64,43:m,44:_,45:y},{34:[1,68]},{34:[1,69]},t(p,[2,112]),t(p,[2,30],{33:[1,70],35:[1,72],38:[1,71]}),{34:[1,73]},{34:[1,74],36:[1,75]},t(p,[2,23],{30:[1,76]}),t(f,[2,117]),t(p,[2,47],{33:[1,78],38:[1,77],42:[1,79]}),t(p,[2,48],{33:[1,81],35:[1,80],42:[1,82]}),t(b,[2,113]),t(b,[2,114]),t(b,[2,115]),t(p,[2,51],{35:[1,83],38:[1,84],42:[1,85]}),t(p,[2,62],{33:[1,88],35:[1,86],38:[1,87]}),{34:[1,89]},{39:90,43:m,44:_,45:y},{34:[1,91]},t(p,[2,25],{35:[1,92]}),{33:[1,93]},{33:[1,94]},{31:[1,95]},{39:96,43:m,44:_,45:y},{34:[1,97]},{34:[1,98]},{34:[1,99]},{34:[1,100]},{34:[1,101]},{34:[1,102]},{39:103,43:m,44:_,45:y},{34:[1,104]},{34:[1,105]},{39:106,43:m,44:_,45:y},{34:[1,107]},t(p,[2,31],{35:[1,109],38:[1,108]}),t(p,[2,32],{33:[1,111],35:[1,110]}),t(p,[2,33],{33:[1,112],38:[1,113]}),{34:[1,114],36:[1,115]},{34:[1,116]},{34:[1,117]},t(p,[2,24]),t(p,[2,49],{33:[1,118],42:[1,119]}),t(p,[2,53],{38:[1,120],42:[1,121]}),t(p,[2,63],{33:[1,123],38:[1,122]}),t(p,[2,50],{33:[1,124],42:[1,125]}),t(p,[2,55],{35:[1,126],42:[1,127]}),t(p,[2,66],{33:[1,129],35:[1,128]}),t(p,[2,52],{38:[1,130],42:[1,131]}),t(p,[2,54],{35:[1,132],42:[1,133]}),t(p,[2,67],{35:[1,135],38:[1,134]}),t(p,[2,64],{33:[1,137],38:[1,136]}),t(p,[2,65],{33:[1,139],35:[1,138]}),t(p,[2,68],{35:[1,141],38:[1,140]}),{39:142,43:m,44:_,45:y},{34:[1,143]},{34:[1,144]},{34:[1,145]},{34:[1,146]},{39:147,43:m,44:_,45:y},t(p,[2,26]),t(p,[2,27]),t(p,[2,28]),t(p,[2,29]),{34:[1,148]},{34:[1,149]},{39:150,43:m,44:_,45:y},{34:[1,151]},{39:152,43:m,44:_,45:y},{34:[1,153]},{34:[1,154]},{34:[1,155]},{34:[1,156]},{34:[1,157]},{34:[1,158]},{34:[1,159]},{39:160,43:m,44:_,45:y},{34:[1,161]},{34:[1,162]},{34:[1,163]},{39:164,43:m,44:_,45:y},{34:[1,165]},{39:166,43:m,44:_,45:y},{34:[1,167]},{34:[1,168]},{34:[1,169]},{39:170,43:m,44:_,45:y},{34:[1,171]},t(p,[2,37],{35:[1,172]}),t(p,[2,38],{38:[1,173]}),t(p,[2,36],{33:[1,174]}),t(p,[2,39],{35:[1,175]}),t(p,[2,34],{38:[1,176]}),t(p,[2,35],{33:[1,177]}),t(p,[2,60],{42:[1,178]}),t(p,[2,73],{33:[1,179]}),t(p,[2,61],{42:[1,180]}),t(p,[2,84],{38:[1,181]}),t(p,[2,74],{33:[1,182]}),t(p,[2,83],{38:[1,183]}),t(p,[2,59],{42:[1,184]}),t(p,[2,72],{33:[1,185]}),t(p,[2,58],{42:[1,186]}),t(p,[2,78],{35:[1,187]}),t(p,[2,71],{33:[1,188]}),t(p,[2,77],{35:[1,189]}),t(p,[2,57],{42:[1,190]}),t(p,[2,85],{38:[1,191]}),t(p,[2,56],{42:[1,192]}),t(p,[2,79],{35:[1,193]}),t(p,[2,80],{35:[1,194]}),t(p,[2,86],{38:[1,195]}),t(p,[2,70],{33:[1,196]}),t(p,[2,81],{38:[1,197]}),t(p,[2,69],{33:[1,198]}),t(p,[2,75],{35:[1,199]}),t(p,[2,76],{35:[1,200]}),t(p,[2,82],{38:[1,201]}),{34:[1,202]},{39:203,43:m,44:_,45:y},{34:[1,204]},{34:[1,205]},{39:206,43:m,44:_,45:y},{34:[1,207]},{34:[1,208]},{34:[1,209]},{34:[1,210]},{39:211,43:m,44:_,45:y},{34:[1,212]},{39:213,43:m,44:_,45:y},{34:[1,214]},{34:[1,215]},{34:[1,216]},{34:[1,217]},{34:[1,218]},{34:[1,219]},{34:[1,220]},{39:221,43:m,44:_,45:y},{34:[1,222]},{34:[1,223]},{34:[1,224]},{39:225,43:m,44:_,45:y},{34:[1,226]},{39:227,43:m,44:_,45:y},{34:[1,228]},{34:[1,229]},{34:[1,230]},{39:231,43:m,44:_,45:y},t(p,[2,40]),t(p,[2,42]),t(p,[2,41]),t(p,[2,43]),t(p,[2,45]),t(p,[2,44]),t(p,[2,101]),t(p,[2,102]),t(p,[2,99]),t(p,[2,100]),t(p,[2,104]),t(p,[2,103]),t(p,[2,108]),t(p,[2,107]),t(p,[2,106]),t(p,[2,105]),t(p,[2,110]),t(p,[2,109]),t(p,[2,98]),t(p,[2,97]),t(p,[2,96]),t(p,[2,95]),t(p,[2,93]),t(p,[2,94]),t(p,[2,92]),t(p,[2,91]),t(p,[2,90]),t(p,[2,89]),t(p,[2,87]),t(p,[2,88])],defaultActions:{9:[2,118],10:[2,1],11:[2,2],19:[2,3],27:[2,4],46:[2,120],47:[2,5]},parseError:function(M,S){if(S.recoverable)this.trace(M);else{var R=new Error(M);throw R.hash=S,R}},parse:function(M){var S=this,R=[0],A=[],L=[null],v=[],B=this.table,w="",D=0,N=0,z=2,X=1,ct=v.slice.call(arguments,1),J=Object.create(this.lexer),Y={yy:{}};for(var $ in this.yy)Object.prototype.hasOwnProperty.call(this.yy,$)&&(Y.yy[$]=this.yy[$]);J.setInput(M,Y.yy),Y.yy.lexer=J,Y.yy.parser=this,typeof J.yylloc>"u"&&(J.yylloc={});var lt=J.yylloc;v.push(lt);var ut=J.options&&J.options.ranges;typeof Y.yy.parseError=="function"?this.parseError=Y.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function W(){var P;return P=A.pop()||J.lex()||X,typeof P!="number"&&(P instanceof Array&&(A=P,P=A.pop()),P=S.symbols_[P]||P),P}for(var tt,K,it,Z,V={},Q,q,U,F;;){if(K=R[R.length-1],this.defaultActions[K]?it=this.defaultActions[K]:((tt===null||typeof tt>"u")&&(tt=W()),it=B[K]&&B[K][tt]),typeof it>"u"||!it.length||!it[0]){var j="";F=[];for(Q in B[K])this.terminals_[Q]&&Q>z&&F.push("'"+this.terminals_[Q]+"'");J.showPosition?j="Parse error on line "+(D+1)+`: -`+J.showPosition()+` -Expecting `+F.join(", ")+", got '"+(this.terminals_[tt]||tt)+"'":j="Parse error on line "+(D+1)+": Unexpected "+(tt==X?"end of input":"'"+(this.terminals_[tt]||tt)+"'"),this.parseError(j,{text:J.match,token:this.terminals_[tt]||tt,line:J.yylineno,loc:lt,expected:F})}if(it[0]instanceof Array&&it.length>1)throw new Error("Parse Error: multiple actions possible at state: "+K+", token: "+tt);switch(it[0]){case 1:R.push(tt),L.push(J.yytext),v.push(J.yylloc),R.push(it[1]),tt=null,N=J.yyleng,w=J.yytext,D=J.yylineno,lt=J.yylloc;break;case 2:if(q=this.productions_[it[1]][1],V.$=L[L.length-q],V._$={first_line:v[v.length-(q||1)].first_line,last_line:v[v.length-1].last_line,first_column:v[v.length-(q||1)].first_column,last_column:v[v.length-1].last_column},ut&&(V._$.range=[v[v.length-(q||1)].range[0],v[v.length-1].range[1]]),Z=this.performAction.apply(V,[w,N,D,Y.yy,it[1],L,v].concat(ct)),typeof Z<"u")return Z;q&&(R=R.slice(0,-1*q*2),L=L.slice(0,-1*q),v=v.slice(0,-1*q)),R.push(this.productions_[it[1]][0]),L.push(V.$),v.push(V._$),U=B[R[R.length-2]][R[R.length-1]],R.push(U);break;case 3:return!0}}return!0}},k=function(){var C={EOF:1,parseError:function(S,R){if(this.yy.parser)this.yy.parser.parseError(S,R);else throw new Error(S)},setInput:function(M,S){return this.yy=S||this.yy||{},this._input=M,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var M=this._input[0];this.yytext+=M,this.yyleng++,this.offset++,this.match+=M,this.matched+=M;var S=M.match(/(?:\r\n?|\n).*/g);return S?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),M},unput:function(M){var S=M.length,R=M.split(/(?:\r\n?|\n)/g);this._input=M+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-S),this.offset-=S;var A=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),R.length-1&&(this.yylineno-=R.length-1);var L=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:R?(R.length===A.length?this.yylloc.first_column:0)+A[A.length-R.length].length-R[0].length:this.yylloc.first_column-S},this.options.ranges&&(this.yylloc.range=[L[0],L[0]+this.yyleng-S]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(M){this.unput(this.match.slice(M))},pastInput:function(){var M=this.matched.substr(0,this.matched.length-this.match.length);return(M.length>20?"...":"")+M.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var M=this.match;return M.length<20&&(M+=this._input.substr(0,20-M.length)),(M.substr(0,20)+(M.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var M=this.pastInput(),S=new Array(M.length+1).join("-");return M+this.upcomingInput()+` -`+S+"^"},test_match:function(M,S){var R,A,L;if(this.options.backtrack_lexer&&(L={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(L.yylloc.range=this.yylloc.range.slice(0))),A=M[0].match(/(?:\r\n?|\n).*/g),A&&(this.yylineno+=A.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:A?A[A.length-1].length-A[A.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+M[0].length},this.yytext+=M[0],this.match+=M[0],this.matches=M,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(M[0].length),this.matched+=M[0],R=this.performAction.call(this,this.yy,this,S,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),R)return R;if(this._backtrack){for(var v in L)this[v]=L[v];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var M,S,R,A;this._more||(this.yytext="",this.match="");for(var L=this._currentRules(),v=0;v<L.length;v++)if(R=this._input.match(this.rules[L[v]]),R&&(!S||R[0].length>S[0].length)){if(S=R,A=v,this.options.backtrack_lexer){if(M=this.test_match(R,L[v]),M!==!1)return M;if(this._backtrack){S=!1;continue}else return!1}else if(!this.options.flex)break}return S?(M=this.test_match(S,L[A]),M!==!1?M:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var S=this.next();return S||this.lex()},begin:function(S){this.conditionStack.push(S)},popState:function(){var S=this.conditionStack.length-1;return S>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(S){return S=this.conditionStack.length-1-Math.abs(S||0),S>=0?this.conditionStack[S]:"INITIAL"},pushState:function(S){this.begin(S)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(S,R,A,L){switch(A){case 0:return this.begin("open_directive"),50;case 1:return this.begin("type_directive"),51;case 2:return this.popState(),this.begin("arg_directive"),9;case 3:return this.popState(),this.popState(),53;case 4:return 52;case 5:return this.begin("acc_title"),20;case 6:return this.popState(),"acc_title_value";case 7:return this.begin("acc_descr"),22;case 8:return this.popState(),"acc_descr_value";case 9:this.begin("acc_descr_multiline");break;case 10:this.popState();break;case 11:return"acc_descr_multiline_value";case 12:return 14;case 13:break;case 14:break;case 15:return 6;case 16:return 40;case 17:return 33;case 18:return 38;case 19:return 42;case 20:return 43;case 21:return 44;case 22:return 45;case 23:return 35;case 24:return 29;case 25:return 30;case 26:return 37;case 27:return 32;case 28:return 27;case 29:return 10;case 30:return 10;case 31:return 9;case 32:return"CARET";case 33:this.begin("options");break;case 34:this.popState();break;case 35:return 13;case 36:return 36;case 37:this.begin("string");break;case 38:this.popState();break;case 39:return 34;case 40:return 31;case 41:return 28;case 42:return 8}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:(\r?\n)+)/i,/^(?:#[^\n]*)/i,/^(?:%[^\n]*)/i,/^(?:gitGraph\b)/i,/^(?:commit(?=\s|$))/i,/^(?:id:)/i,/^(?:type:)/i,/^(?:msg:)/i,/^(?:NORMAL\b)/i,/^(?:REVERSE\b)/i,/^(?:HIGHLIGHT\b)/i,/^(?:tag:)/i,/^(?:branch(?=\s|$))/i,/^(?:order:)/i,/^(?:merge(?=\s|$))/i,/^(?:cherry-pick(?=\s|$))/i,/^(?:checkout(?=\s|$))/i,/^(?:LR\b)/i,/^(?:BT\b)/i,/^(?::)/i,/^(?:\^)/i,/^(?:options\r?\n)/i,/^(?:[ \r\n\t]+end\b)/i,/^(?:[\s\S]+(?=[ \r\n\t]+end))/i,/^(?:["]["])/i,/^(?:["])/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:[0-9]+(?=\s|$))/i,/^(?:\w([-\./\w]*[-\w])?)/i,/^(?:$)/i,/^(?:\s+)/i],conditions:{acc_descr_multiline:{rules:[10,11],inclusive:!1},acc_descr:{rules:[8],inclusive:!1},acc_title:{rules:[6],inclusive:!1},close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},options:{rules:[34,35],inclusive:!1},string:{rules:[38,39],inclusive:!1},INITIAL:{rules:[0,5,7,9,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,36,37,40,41,42,43],inclusive:!0}}};return C}();x.lexer=k;function T(){this.yy={}}return T.prototype=x,x.Parser=T,new T}();hg.parser=hg;const BX=t=>t.match(/^\s*gitGraph/)!==null;let fg="",l1="",dg="";const pg=t=>ai(t,nt()),ci=function(){fg="",dg="",l1=""},Yn=function(t){fg=pg(t).replace(/^\s+/g,"")},ui=function(){return fg||l1},hi=function(t){dg=pg(t).replace(/\n\s+/g,` -`)},fi=function(){return dg},c1=function(t){l1=pg(t)},u1=function(){return l1};let h1=nt().gitGraph.mainBranchName,DX=nt().gitGraph.mainBranchOrder,kr={},cn=null,Dc={};Dc[h1]={name:h1,order:DX};let pr={};pr[h1]=cn;let Rr=h1,Vw="LR",Ys=0;function gg(){return lw({length:7})}const OX=function(t,e,r){Xe.parseDirective(this,t,e,r)};function FX(t,e){const r=Object.create(null);return t.reduce((n,i)=>{const a=e(i);return r[a]||(r[a]=!0,n.push(i)),n},[])}const PX=function(t){Vw=t};let zw={};const qX=function(t){H.debug("options str",t),t=t&&t.trim(),t=t||"{}";try{zw=JSON.parse(t)}catch(e){H.error("error while parsing gitGraph options",e.message)}},VX=function(){return zw},zX=function(t,e,r,n){H.debug("Entering commit:",t,e,r,n),e=pe.sanitizeText(e,nt()),t=pe.sanitizeText(t,nt()),n=pe.sanitizeText(n,nt());const i={id:e||Ys+"-"+gg(),message:t,seq:Ys++,type:r||Oc.NORMAL,tag:n||"",parents:cn==null?[]:[cn.id],branch:Rr};cn=i,kr[i.id]=i,pr[Rr]=i.id,H.debug("in pushCommit "+i.id)},YX=function(t,e){if(t=pe.sanitizeText(t,nt()),typeof pr[t]>"u")pr[t]=cn!=null?cn.id:null,Dc[t]={name:t,order:e?parseInt(e,10):null},Yw(t),H.debug("in createBranch");else{let r=new Error('Trying to create an existing branch. (Help: Either use a new name if you want create a new branch or try using "checkout '+t+'")');throw r.hash={text:"branch "+t,token:"branch "+t,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:['"checkout '+t+'"']},r}},UX=function(t,e,r,n){t=pe.sanitizeText(t,nt()),e=pe.sanitizeText(e,nt());const i=kr[pr[Rr]],a=kr[pr[t]];if(Rr===t){let o=new Error('Incorrect usage of "merge". Cannot merge a branch to itself');throw o.hash={text:"merge "+t,token:"merge "+t,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["branch abc"]},o}else if(typeof i>"u"||!i){let o=new Error('Incorrect usage of "merge". Current branch ('+Rr+")has no commits");throw o.hash={text:"merge "+t,token:"merge "+t,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["commit"]},o}else if(typeof pr[t]>"u"){let o=new Error('Incorrect usage of "merge". Branch to be merged ('+t+") does not exist");throw o.hash={text:"merge "+t,token:"merge "+t,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["branch "+t]},o}else if(typeof a>"u"||!a){let o=new Error('Incorrect usage of "merge". Branch to be merged ('+t+") has no commits");throw o.hash={text:"merge "+t,token:"merge "+t,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:['"commit"']},o}else if(i===a){let o=new Error('Incorrect usage of "merge". Both branches have same head');throw o.hash={text:"merge "+t,token:"merge "+t,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["branch abc"]},o}else if(e&&typeof kr[e]<"u"){let o=new Error('Incorrect usage of "merge". Commit with id:'+e+" already exists, use different custom Id");throw o.hash={text:"merge "+t+e+r+n,token:"merge "+t+e+r+n,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["merge "+t+" "+e+"_UNIQUE "+r+" "+n]},o}const s={id:e||Ys+"-"+gg(),message:"merged branch "+t+" into "+Rr,seq:Ys++,parents:[cn==null?null:cn.id,pr[t]],branch:Rr,type:Oc.MERGE,customType:r,customId:!!e,tag:n||""};cn=s,kr[s.id]=s,pr[Rr]=s.id,H.debug(pr),H.debug("in mergeBranch")},WX=function(t,e,r){if(H.debug("Entering cherryPick:",t,e,r),t=pe.sanitizeText(t,nt()),e=pe.sanitizeText(e,nt()),r=pe.sanitizeText(r,nt()),!t||typeof kr[t]>"u"){let a=new Error('Incorrect usage of "cherryPick". Source commit id should exist and provided');throw a.hash={text:"cherryPick "+t+" "+e,token:"cherryPick "+t+" "+e,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["cherry-pick abc"]},a}let n=kr[t],i=n.branch;if(n.type===Oc.MERGE){let a=new Error('Incorrect usage of "cherryPick". Source commit should not be a merge commit');throw a.hash={text:"cherryPick "+t+" "+e,token:"cherryPick "+t+" "+e,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["cherry-pick abc"]},a}if(!e||typeof kr[e]>"u"){if(i===Rr){let o=new Error('Incorrect usage of "cherryPick". Source commit is already on current branch');throw o.hash={text:"cherryPick "+t+" "+e,token:"cherryPick "+t+" "+e,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["cherry-pick abc"]},o}const a=kr[pr[Rr]];if(typeof a>"u"||!a){let o=new Error('Incorrect usage of "cherry-pick". Current branch ('+Rr+")has no commits");throw o.hash={text:"cherryPick "+t+" "+e,token:"cherryPick "+t+" "+e,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["cherry-pick abc"]},o}const s={id:Ys+"-"+gg(),message:"cherry-picked "+n+" into "+Rr,seq:Ys++,parents:[cn==null?null:cn.id,n.id],branch:Rr,type:Oc.CHERRY_PICK,tag:r!=null?r:"cherry-pick:"+n.id};cn=s,kr[s.id]=s,pr[Rr]=s.id,H.debug(pr),H.debug("in cherryPick")}},Yw=function(t){if(t=pe.sanitizeText(t,nt()),typeof pr[t]>"u"){let e=new Error('Trying to checkout branch which is not yet created. (Help try using "branch '+t+'")');throw e.hash={text:"checkout "+t,token:"checkout "+t,line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:['"branch '+t+'"']},e}else{Rr=t;const e=pr[Rr];cn=kr[e]}};function Uw(t,e,r){const n=t.indexOf(e);n===-1?t.push(r):t.splice(n,1,r)}function Ww(t){const e=t.reduce((i,a)=>i.seq>a.seq?i:a,t[0]);let r="";t.forEach(function(i){i===e?r+=" *":r+=" |"});const n=[r,e.id,e.seq];for(let i in pr)pr[i]===e.id&&n.push(i);if(H.debug(n.join(" ")),e.parents&&e.parents.length==2){const i=kr[e.parents[0]];Uw(t,e,i),t.push(kr[e.parents[1]])}else{if(e.parents.length==0)return;{const i=kr[e.parents];Uw(t,e,i)}}t=FX(t,i=>i.id),Ww(t)}const HX=function(){H.debug(kr);const t=Hw()[0];Ww([t])},GX=function(){kr={},cn=null;let t=nt().gitGraph.mainBranchName,e=nt().gitGraph.mainBranchOrder;pr={},pr[t]=null,Dc={},Dc[t]={name:t,order:e},Rr=t,Ys=0,ci()},jX=function(){return Object.values(Dc).map((e,r)=>e.order!==null?e:{...e,order:parseFloat(`0.${r}`,10)}).sort((e,r)=>e.order-r.order).map(({name:e})=>({name:e}))},$X=function(){return pr},XX=function(){return kr},Hw=function(){const t=Object.keys(kr).map(function(e){return kr[e]});return t.forEach(function(e){H.debug(e.id)}),t.sort((e,r)=>e.seq-r.seq),t},KX=function(){return Rr},ZX=function(){return Vw},QX=function(){return cn},Oc={NORMAL:0,REVERSE:1,HIGHLIGHT:2,MERGE:3,CHERRY_PICK:4},JX={parseDirective:OX,getConfig:()=>nt().gitGraph,setDirection:PX,setOptions:qX,getOptions:VX,commit:zX,branch:YX,merge:UX,cherryPick:WX,checkout:Yw,prettyPrint:HX,clear:GX,getBranchesAsObjArray:jX,getBranches:$X,getCommits:XX,getCommitsArray:Hw,getCurrentBranch:KX,getDirection:ZX,getHead:QX,setAccTitle:Yn,getAccTitle:ui,getAccDescription:fi,setAccDescription:hi,commitType:Oc};function bn(t,e,r){if(typeof e.insert>"u")return;let n=t.getAccTitle(),i=t.getAccDescription();e.attr("role","img").attr("aria-labelledby","chart-title-"+r+" chart-desc-"+r),e.insert("desc",":first-child").attr("id","chart-desc-"+r).text(i),e.insert("title",":first-child").attr("id","chart-title-"+r).text(n)}let Fc={};const Sn={NORMAL:0,REVERSE:1,HIGHLIGHT:2,MERGE:3,CHERRY_PICK:4},Us=8;let _n={},f1={},Pc=[],d1=0;const tK=()=>{_n={},f1={},Fc={},d1=0,Pc=[]},eK=t=>{const e=document.createElementNS("http://www.w3.org/2000/svg","text");let r=[];typeof t=="string"?r=t.split(/\\n|\n|<br\s*\/?>/gi):Array.isArray(t)?r=t:r=[];for(let n=0;n<r.length;n++){const i=document.createElementNS("http://www.w3.org/2000/svg","tspan");i.setAttributeNS("http://www.w3.org/XML/1998/namespace","xml:space","preserve"),i.setAttribute("dy","1em"),i.setAttribute("x","0"),i.setAttribute("class","row"),i.textContent=r[n].trim(),e.appendChild(i)}return e},Gw=(t,e,r)=>{const n=Zo().gitGraph,i=t.append("g").attr("class","commit-bullets"),a=t.append("g").attr("class","commit-labels");let s=0;Object.keys(e).sort((u,h)=>e[u].seq-e[h].seq).forEach(u=>{const h=e[u],d=_n[h.branch].pos,f=s+10;if(r){let p,m=typeof h.customType<"u"&&h.customType!==""?h.customType:h.type;switch(m){case Sn.NORMAL:p="commit-normal";break;case Sn.REVERSE:p="commit-reverse";break;case Sn.HIGHLIGHT:p="commit-highlight";break;case Sn.MERGE:p="commit-merge";break;case Sn.CHERRY_PICK:p="commit-cherry-pick";break;default:p="commit-normal"}if(m===Sn.HIGHLIGHT){const _=i.append("rect");_.attr("x",f-10),_.attr("y",d-10),_.attr("height",20),_.attr("width",20),_.attr("class",`commit ${h.id} commit-highlight${_n[h.branch].index%Us} ${p}-outer`),i.append("rect").attr("x",f-6).attr("y",d-6).attr("height",12).attr("width",12).attr("class",`commit ${h.id} commit${_n[h.branch].index%Us} ${p}-inner`)}else if(m===Sn.CHERRY_PICK)i.append("circle").attr("cx",f).attr("cy",d).attr("r",10).attr("class",`commit ${h.id} ${p}`),i.append("circle").attr("cx",f-3).attr("cy",d+2).attr("r",2.75).attr("fill","#fff").attr("class",`commit ${h.id} ${p}`),i.append("circle").attr("cx",f+3).attr("cy",d+2).attr("r",2.75).attr("fill","#fff").attr("class",`commit ${h.id} ${p}`),i.append("line").attr("x1",f+3).attr("y1",d+1).attr("x2",f).attr("y2",d-5).attr("stroke","#fff").attr("class",`commit ${h.id} ${p}`),i.append("line").attr("x1",f-3).attr("y1",d+1).attr("x2",f).attr("y2",d-5).attr("stroke","#fff").attr("class",`commit ${h.id} ${p}`);else{const _=i.append("circle");if(_.attr("cx",f),_.attr("cy",d),_.attr("r",h.type===Sn.MERGE?9:10),_.attr("class",`commit ${h.id} commit${_n[h.branch].index%Us}`),m===Sn.MERGE){const y=i.append("circle");y.attr("cx",f),y.attr("cy",d),y.attr("r",6),y.attr("class",`commit ${p} ${h.id} commit${_n[h.branch].index%Us}`)}m===Sn.REVERSE&&i.append("path").attr("d",`M ${f-5},${d-5}L${f+5},${d+5}M${f-5},${d+5}L${f+5},${d-5}`).attr("class",`commit ${p} ${h.id} commit${_n[h.branch].index%Us}`)}}if(f1[h.id]={x:s+10,y:d},r){if(h.type!==Sn.CHERRY_PICK&&(h.customId&&h.type===Sn.MERGE||h.type!==Sn.MERGE)&&n.showCommitLabel){const _=a.append("g"),y=_.insert("rect").attr("class","commit-label-bkg"),b=_.append("text").attr("x",s).attr("y",d+25).attr("class","commit-label").text(h.id);let x=b.node().getBBox();if(y.attr("x",s+10-x.width/2-2).attr("y",d+13.5).attr("width",x.width+2*2).attr("height",x.height+2*2),b.attr("x",s+10-x.width/2),n.rotateCommitLabel){let k=-7.5-(x.width+10)/25*9.5,T=10+x.width/25*8.5;_.attr("transform","translate("+k+", "+T+") rotate("+-45+", "+s+", "+d+")")}}if(h.tag){const _=a.insert("polygon"),y=a.append("circle"),b=a.append("text").attr("y",d-16).attr("class","tag-label").text(h.tag);let x=b.node().getBBox();b.attr("x",s+10-x.width/2);const k=x.height/2,T=d-19.2;_.attr("class","tag-label-bkg").attr("points",` - ${s-x.width/2-4/2},${T+2} - ${s-x.width/2-4/2},${T-2} - ${s+10-x.width/2-4},${T-k-2} - ${s+10+x.width/2+4},${T-k-2} - ${s+10+x.width/2+4},${T+k+2} - ${s+10-x.width/2-4},${T+k+2}`),y.attr("cx",s-x.width/2+4/2).attr("cy",T).attr("r",1.5).attr("class","tag-hole")}}s+=50,s>d1&&(d1=s)})},rK=(t,e,r)=>Object.keys(r).filter(a=>r[a].branch===e.branch&&r[a].seq>t.seq&&r[a].seq<e.seq).length>0,yg=(t,e,r)=>{const n=r||0,i=t+Math.abs(t-e)/2;if(n>5)return i;let a=!0;for(let o=0;o<Pc.length;o++)Math.abs(Pc[o]-i)<10&&(a=!1);if(a)return Pc.push(i),i;const s=Math.abs(t-e);return yg(t,e-s/5,n+1)},nK=(t,e,r,n)=>{const i=f1[e.id],a=f1[r.id],s=rK(e,r,n);let o="",l="",u=0,h=0,d=_n[r.branch].index,f;if(s){o="A 10 10, 0, 0, 0,",l="A 10 10, 0, 0, 1,",u=10,h=10,d=_n[r.branch].index;const p=i.y<a.y?yg(i.y,a.y):yg(a.y,i.y);i.y<a.y?f=`M ${i.x} ${i.y} L ${i.x} ${p-u} ${o} ${i.x+h} ${p} L ${a.x-u} ${p} ${l} ${a.x} ${p+h} L ${a.x} ${a.y}`:f=`M ${i.x} ${i.y} L ${i.x} ${p+u} ${l} ${i.x+h} ${p} L ${a.x-u} ${p} ${o} ${a.x} ${p-h} L ${a.x} ${a.y}`}else i.y<a.y&&(o="A 20 20, 0, 0, 0,",u=20,h=20,d=_n[r.branch].index,f=`M ${i.x} ${i.y} L ${i.x} ${a.y-u} ${o} ${i.x+h} ${a.y} L ${a.x} ${a.y}`),i.y>a.y&&(o="A 20 20, 0, 0, 0,",u=20,h=20,d=_n[e.branch].index,f=`M ${i.x} ${i.y} L ${a.x-u} ${i.y} ${o} ${a.x} ${i.y-h} L ${a.x} ${a.y}`),i.y===a.y&&(d=_n[e.branch].index,f=`M ${i.x} ${i.y} L ${i.x} ${a.y-u} ${o} ${i.x+h} ${a.y} L ${a.x} ${a.y}`);t.append("path").attr("d",f).attr("class","arrow arrow"+d%Us)},iK=(t,e)=>{const r=t.append("g").attr("class","commit-arrows");Object.keys(e).forEach(n=>{const i=e[n];i.parents&&i.parents.length>0&&i.parents.forEach(a=>{nK(r,e[a],i,e)})})},aK=(t,e)=>{const r=Zo().gitGraph,n=t.append("g");e.forEach((i,a)=>{const s=a%Us,o=_n[i.name].pos,l=n.append("line");l.attr("x1",0),l.attr("y1",o),l.attr("x2",d1),l.attr("y2",o),l.attr("class","branch branch"+s),Pc.push(o);let u=i.name;const h=eK(u),d=n.insert("rect"),p=n.insert("g").attr("class","branchLabel").insert("g").attr("class","label branch-label"+s);p.node().appendChild(h);let m=h.getBBox();d.attr("class","branchLabelBkg label"+s).attr("rx",4).attr("ry",4).attr("x",-m.width-4-(r.rotateCommitLabel===!0?30:0)).attr("y",-m.height/2+8).attr("width",m.width+18).attr("height",m.height+4),p.attr("transform","translate("+(-m.width-14-(r.rotateCommitLabel===!0?30:0))+", "+(o-m.height/2-1)+")"),d.attr("transform","translate("+-19+", "+(o-m.height/2)+")")})},sK={draw:function(t,e,r,n){tK();const i=Zo(),a=Zo().gitGraph;H.debug("in gitgraph renderer",t+` -`,"id:",e,r),Fc=n.db.getCommits();const s=n.db.getBranchesAsObjArray();let o=0;s.forEach((u,h)=>{_n[u.name]={pos:o,index:h},o+=50+(a.rotateCommitLabel?40:0)});const l=St(`[id="${e}"]`);bn(n.db,l,e),Gw(l,Fc,!1),a.showBranches&&aK(l,s),iK(l,Fc),Gw(l,Fc,!0),Fw(void 0,l,a.diagramPadding,i.useMaxWidth)}},oK=t=>` - .commit-id, - .commit-msg, - .branch-label { - fill: lightgrey; - color: lightgrey; - font-family: 'trebuchet ms', verdana, arial, sans-serif; - font-family: var(--mermaid-font-family); - } - ${[0,1,2,3,4,5,6,7].map(e=>` - .branch-label${e} { fill: ${t["gitBranchLabel"+e]}; } - .commit${e} { stroke: ${t["git"+e]}; fill: ${t["git"+e]}; } - .commit-highlight${e} { stroke: ${t["gitInv"+e]}; fill: ${t["gitInv"+e]}; } - .label${e} { fill: ${t["git"+e]}; } - .arrow${e} { stroke: ${t["git"+e]}; } - `).join(` -`)} - - .branch { - stroke-width: 1; - stroke: ${t.lineColor}; - stroke-dasharray: 2; - } - .commit-label { font-size: ${t.commitLabelFontSize}; fill: ${t.commitLabelColor};} - .commit-label-bkg { font-size: ${t.commitLabelFontSize}; fill: ${t.commitLabelBackground}; opacity: 0.5; } - .tag-label { font-size: ${t.tagLabelFontSize}; fill: ${t.tagLabelColor};} - .tag-label-bkg { fill: ${t.tagLabelBackground}; stroke: ${t.tagLabelBorder}; } - .tag-hole { fill: ${t.textColor}; } - - .commit-merge { - stroke: ${t.primaryColor}; - fill: ${t.primaryColor}; - } - .commit-reverse { - stroke: ${t.primaryColor}; - fill: ${t.primaryColor}; - stroke-width: 3; - } - .commit-highlight-outer { - } - .commit-highlight-inner { - stroke: ${t.primaryColor}; - fill: ${t.primaryColor}; - } - - .arrow { stroke-width: 8; stroke-linecap: round; fill: none} - } -`;var qc=function(){var t=function(zt,wt,bt,Et){for(bt=bt||{},Et=zt.length;Et--;bt[zt[Et]]=wt);return bt},e=[1,6],r=[1,7],n=[1,8],i=[1,9],a=[1,16],s=[1,11],o=[1,12],l=[1,13],u=[1,14],h=[1,15],d=[1,27],f=[1,33],p=[1,34],m=[1,35],_=[1,36],y=[1,37],b=[1,72],x=[1,73],k=[1,74],T=[1,75],C=[1,76],M=[1,77],S=[1,78],R=[1,38],A=[1,39],L=[1,40],v=[1,41],B=[1,42],w=[1,43],D=[1,44],N=[1,45],z=[1,46],X=[1,47],ct=[1,48],J=[1,49],Y=[1,50],$=[1,51],lt=[1,52],ut=[1,53],W=[1,54],tt=[1,55],K=[1,56],it=[1,57],Z=[1,59],V=[1,60],Q=[1,61],q=[1,62],U=[1,63],F=[1,64],j=[1,65],P=[1,66],et=[1,67],at=[1,68],It=[1,69],Lt=[24,52],Rt=[24,44,46,47,48,49,50,51,52,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84],Ct=[15,24,44,46,47,48,49,50,51,52,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84],pt=[1,94],mt=[1,95],vt=[1,96],Tt=[1,97],ft=[15,24,52],le=[7,8,9,10,18,22,25,26,27,28],Dt=[15,24,43,52],Gt=[15,24,43,52,86,87,89,90],$t=[15,43],Qt=[44,46,47,48,49,50,51,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84],we={trace:function(){},yy:{},symbols_:{error:2,start:3,mermaidDoc:4,direction:5,directive:6,direction_tb:7,direction_bt:8,direction_rl:9,direction_lr:10,graphConfig:11,openDirective:12,typeDirective:13,closeDirective:14,NEWLINE:15,":":16,argDirective:17,open_directive:18,type_directive:19,arg_directive:20,close_directive:21,C4_CONTEXT:22,statements:23,EOF:24,C4_CONTAINER:25,C4_COMPONENT:26,C4_DYNAMIC:27,C4_DEPLOYMENT:28,otherStatements:29,diagramStatements:30,otherStatement:31,title:32,accDescription:33,acc_title:34,acc_title_value:35,acc_descr:36,acc_descr_value:37,acc_descr_multiline_value:38,boundaryStatement:39,boundaryStartStatement:40,boundaryStopStatement:41,boundaryStart:42,LBRACE:43,ENTERPRISE_BOUNDARY:44,attributes:45,SYSTEM_BOUNDARY:46,BOUNDARY:47,CONTAINER_BOUNDARY:48,NODE:49,NODE_L:50,NODE_R:51,RBRACE:52,diagramStatement:53,PERSON:54,PERSON_EXT:55,SYSTEM:56,SYSTEM_DB:57,SYSTEM_QUEUE:58,SYSTEM_EXT:59,SYSTEM_EXT_DB:60,SYSTEM_EXT_QUEUE:61,CONTAINER:62,CONTAINER_DB:63,CONTAINER_QUEUE:64,CONTAINER_EXT:65,CONTAINER_EXT_DB:66,CONTAINER_EXT_QUEUE:67,COMPONENT:68,COMPONENT_DB:69,COMPONENT_QUEUE:70,COMPONENT_EXT:71,COMPONENT_EXT_DB:72,COMPONENT_EXT_QUEUE:73,REL:74,BIREL:75,REL_U:76,REL_D:77,REL_L:78,REL_R:79,REL_B:80,REL_INDEX:81,UPDATE_EL_STYLE:82,UPDATE_REL_STYLE:83,UPDATE_LAYOUT_CONFIG:84,attribute:85,STR:86,STR_KEY:87,STR_VALUE:88,ATTRIBUTE:89,ATTRIBUTE_EMPTY:90,$accept:0,$end:1},terminals_:{2:"error",7:"direction_tb",8:"direction_bt",9:"direction_rl",10:"direction_lr",15:"NEWLINE",16:":",18:"open_directive",19:"type_directive",20:"arg_directive",21:"close_directive",22:"C4_CONTEXT",24:"EOF",25:"C4_CONTAINER",26:"C4_COMPONENT",27:"C4_DYNAMIC",28:"C4_DEPLOYMENT",32:"title",33:"accDescription",34:"acc_title",35:"acc_title_value",36:"acc_descr",37:"acc_descr_value",38:"acc_descr_multiline_value",43:"LBRACE",44:"ENTERPRISE_BOUNDARY",46:"SYSTEM_BOUNDARY",47:"BOUNDARY",48:"CONTAINER_BOUNDARY",49:"NODE",50:"NODE_L",51:"NODE_R",52:"RBRACE",54:"PERSON",55:"PERSON_EXT",56:"SYSTEM",57:"SYSTEM_DB",58:"SYSTEM_QUEUE",59:"SYSTEM_EXT",60:"SYSTEM_EXT_DB",61:"SYSTEM_EXT_QUEUE",62:"CONTAINER",63:"CONTAINER_DB",64:"CONTAINER_QUEUE",65:"CONTAINER_EXT",66:"CONTAINER_EXT_DB",67:"CONTAINER_EXT_QUEUE",68:"COMPONENT",69:"COMPONENT_DB",70:"COMPONENT_QUEUE",71:"COMPONENT_EXT",72:"COMPONENT_EXT_DB",73:"COMPONENT_EXT_QUEUE",74:"REL",75:"BIREL",76:"REL_U",77:"REL_D",78:"REL_L",79:"REL_R",80:"REL_B",81:"REL_INDEX",82:"UPDATE_EL_STYLE",83:"UPDATE_REL_STYLE",84:"UPDATE_LAYOUT_CONFIG",86:"STR",87:"STR_KEY",88:"STR_VALUE",89:"ATTRIBUTE",90:"ATTRIBUTE_EMPTY"},productions_:[0,[3,1],[3,1],[3,2],[5,1],[5,1],[5,1],[5,1],[4,1],[6,4],[6,6],[12,1],[13,1],[17,1],[14,1],[11,4],[11,4],[11,4],[11,4],[11,4],[23,1],[23,1],[23,2],[29,1],[29,2],[29,3],[31,1],[31,1],[31,2],[31,2],[31,1],[39,3],[40,3],[40,3],[40,4],[42,2],[42,2],[42,2],[42,2],[42,2],[42,2],[42,2],[41,1],[30,1],[30,2],[30,3],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,1],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[53,2],[45,1],[45,2],[85,1],[85,2],[85,1],[85,1]],performAction:function(wt,bt,Et,kt,Ut,gt,he){var yt=gt.length-1;switch(Ut){case 4:kt.setDirection("TB");break;case 5:kt.setDirection("BT");break;case 6:kt.setDirection("RL");break;case 7:kt.setDirection("LR");break;case 11:kt.parseDirective("%%{","open_directive");break;case 12:break;case 13:gt[yt]=gt[yt].trim().replace(/'/g,'"'),kt.parseDirective(gt[yt],"arg_directive");break;case 14:kt.parseDirective("}%%","close_directive","c4Context");break;case 15:case 16:case 17:case 18:case 19:kt.setC4Type(gt[yt-3]);break;case 26:kt.setTitle(gt[yt].substring(6)),this.$=gt[yt].substring(6);break;case 27:kt.setAccDescription(gt[yt].substring(15)),this.$=gt[yt].substring(15);break;case 28:this.$=gt[yt].trim(),kt.setTitle(this.$);break;case 29:case 30:this.$=gt[yt].trim(),kt.setAccDescription(this.$);break;case 35:case 36:gt[yt].splice(2,0,"ENTERPRISE"),kt.addPersonOrSystemBoundary(...gt[yt]),this.$=gt[yt];break;case 37:kt.addPersonOrSystemBoundary(...gt[yt]),this.$=gt[yt];break;case 38:gt[yt].splice(2,0,"CONTAINER"),kt.addContainerBoundary(...gt[yt]),this.$=gt[yt];break;case 39:kt.addDeploymentNode("node",...gt[yt]),this.$=gt[yt];break;case 40:kt.addDeploymentNode("nodeL",...gt[yt]),this.$=gt[yt];break;case 41:kt.addDeploymentNode("nodeR",...gt[yt]),this.$=gt[yt];break;case 42:kt.popBoundaryParseStack();break;case 46:kt.addPersonOrSystem("person",...gt[yt]),this.$=gt[yt];break;case 47:kt.addPersonOrSystem("external_person",...gt[yt]),this.$=gt[yt];break;case 48:kt.addPersonOrSystem("system",...gt[yt]),this.$=gt[yt];break;case 49:kt.addPersonOrSystem("system_db",...gt[yt]),this.$=gt[yt];break;case 50:kt.addPersonOrSystem("system_queue",...gt[yt]),this.$=gt[yt];break;case 51:kt.addPersonOrSystem("external_system",...gt[yt]),this.$=gt[yt];break;case 52:kt.addPersonOrSystem("external_system_db",...gt[yt]),this.$=gt[yt];break;case 53:kt.addPersonOrSystem("external_system_queue",...gt[yt]),this.$=gt[yt];break;case 54:kt.addContainer("container",...gt[yt]),this.$=gt[yt];break;case 55:kt.addContainer("container_db",...gt[yt]),this.$=gt[yt];break;case 56:kt.addContainer("container_queue",...gt[yt]),this.$=gt[yt];break;case 57:kt.addContainer("external_container",...gt[yt]),this.$=gt[yt];break;case 58:kt.addContainer("external_container_db",...gt[yt]),this.$=gt[yt];break;case 59:kt.addContainer("external_container_queue",...gt[yt]),this.$=gt[yt];break;case 60:kt.addComponent("component",...gt[yt]),this.$=gt[yt];break;case 61:kt.addComponent("component_db",...gt[yt]),this.$=gt[yt];break;case 62:kt.addComponent("component_queue",...gt[yt]),this.$=gt[yt];break;case 63:kt.addComponent("external_component",...gt[yt]),this.$=gt[yt];break;case 64:kt.addComponent("external_component_db",...gt[yt]),this.$=gt[yt];break;case 65:kt.addComponent("external_component_queue",...gt[yt]),this.$=gt[yt];break;case 67:kt.addRel("rel",...gt[yt]),this.$=gt[yt];break;case 68:kt.addRel("birel",...gt[yt]),this.$=gt[yt];break;case 69:kt.addRel("rel_u",...gt[yt]),this.$=gt[yt];break;case 70:kt.addRel("rel_d",...gt[yt]),this.$=gt[yt];break;case 71:kt.addRel("rel_l",...gt[yt]),this.$=gt[yt];break;case 72:kt.addRel("rel_r",...gt[yt]),this.$=gt[yt];break;case 73:kt.addRel("rel_b",...gt[yt]),this.$=gt[yt];break;case 74:gt[yt].splice(0,1),kt.addRel("rel",...gt[yt]),this.$=gt[yt];break;case 75:kt.updateElStyle("update_el_style",...gt[yt]),this.$=gt[yt];break;case 76:kt.updateRelStyle("update_rel_style",...gt[yt]),this.$=gt[yt];break;case 77:kt.updateLayoutConfig("update_layout_config",...gt[yt]),this.$=gt[yt];break;case 78:this.$=[gt[yt]];break;case 79:gt[yt].unshift(gt[yt-1]),this.$=gt[yt];break;case 80:case 82:this.$=gt[yt].trim();break;case 81:let ne={};ne[gt[yt-1].trim()]=gt[yt].trim(),this.$=ne;break;case 83:this.$="";break}},table:[{3:1,4:2,5:3,6:4,7:e,8:r,9:n,10:i,11:5,12:10,18:a,22:s,25:o,26:l,27:u,28:h},{1:[3]},{1:[2,1]},{1:[2,2]},{3:17,4:2,5:3,6:4,7:e,8:r,9:n,10:i,11:5,12:10,18:a,22:s,25:o,26:l,27:u,28:h},{1:[2,8]},{1:[2,4]},{1:[2,5]},{1:[2,6]},{1:[2,7]},{13:18,19:[1,19]},{15:[1,20]},{15:[1,21]},{15:[1,22]},{15:[1,23]},{15:[1,24]},{19:[2,11]},{1:[2,3]},{14:25,16:[1,26],21:d},t([16,21],[2,12]),{23:28,29:29,30:30,31:31,32:f,33:p,34:m,36:_,38:y,39:58,40:70,42:71,44:b,46:x,47:k,48:T,49:C,50:M,51:S,53:32,54:R,55:A,56:L,57:v,58:B,59:w,60:D,61:N,62:z,63:X,64:ct,65:J,66:Y,67:$,68:lt,69:ut,70:W,71:tt,72:K,73:it,74:Z,75:V,76:Q,77:q,78:U,79:F,80:j,81:P,82:et,83:at,84:It},{23:79,29:29,30:30,31:31,32:f,33:p,34:m,36:_,38:y,39:58,40:70,42:71,44:b,46:x,47:k,48:T,49:C,50:M,51:S,53:32,54:R,55:A,56:L,57:v,58:B,59:w,60:D,61:N,62:z,63:X,64:ct,65:J,66:Y,67:$,68:lt,69:ut,70:W,71:tt,72:K,73:it,74:Z,75:V,76:Q,77:q,78:U,79:F,80:j,81:P,82:et,83:at,84:It},{23:80,29:29,30:30,31:31,32:f,33:p,34:m,36:_,38:y,39:58,40:70,42:71,44:b,46:x,47:k,48:T,49:C,50:M,51:S,53:32,54:R,55:A,56:L,57:v,58:B,59:w,60:D,61:N,62:z,63:X,64:ct,65:J,66:Y,67:$,68:lt,69:ut,70:W,71:tt,72:K,73:it,74:Z,75:V,76:Q,77:q,78:U,79:F,80:j,81:P,82:et,83:at,84:It},{23:81,29:29,30:30,31:31,32:f,33:p,34:m,36:_,38:y,39:58,40:70,42:71,44:b,46:x,47:k,48:T,49:C,50:M,51:S,53:32,54:R,55:A,56:L,57:v,58:B,59:w,60:D,61:N,62:z,63:X,64:ct,65:J,66:Y,67:$,68:lt,69:ut,70:W,71:tt,72:K,73:it,74:Z,75:V,76:Q,77:q,78:U,79:F,80:j,81:P,82:et,83:at,84:It},{23:82,29:29,30:30,31:31,32:f,33:p,34:m,36:_,38:y,39:58,40:70,42:71,44:b,46:x,47:k,48:T,49:C,50:M,51:S,53:32,54:R,55:A,56:L,57:v,58:B,59:w,60:D,61:N,62:z,63:X,64:ct,65:J,66:Y,67:$,68:lt,69:ut,70:W,71:tt,72:K,73:it,74:Z,75:V,76:Q,77:q,78:U,79:F,80:j,81:P,82:et,83:at,84:It},{15:[1,83]},{17:84,20:[1,85]},{15:[2,14]},{24:[1,86]},t(Lt,[2,20],{53:32,39:58,40:70,42:71,30:87,44:b,46:x,47:k,48:T,49:C,50:M,51:S,54:R,55:A,56:L,57:v,58:B,59:w,60:D,61:N,62:z,63:X,64:ct,65:J,66:Y,67:$,68:lt,69:ut,70:W,71:tt,72:K,73:it,74:Z,75:V,76:Q,77:q,78:U,79:F,80:j,81:P,82:et,83:at,84:It}),t(Lt,[2,21]),t(Rt,[2,23],{15:[1,88]}),t(Lt,[2,43],{15:[1,89]}),t(Ct,[2,26]),t(Ct,[2,27]),{35:[1,90]},{37:[1,91]},t(Ct,[2,30]),{45:92,85:93,86:pt,87:mt,89:vt,90:Tt},{45:98,85:93,86:pt,87:mt,89:vt,90:Tt},{45:99,85:93,86:pt,87:mt,89:vt,90:Tt},{45:100,85:93,86:pt,87:mt,89:vt,90:Tt},{45:101,85:93,86:pt,87:mt,89:vt,90:Tt},{45:102,85:93,86:pt,87:mt,89:vt,90:Tt},{45:103,85:93,86:pt,87:mt,89:vt,90:Tt},{45:104,85:93,86:pt,87:mt,89:vt,90:Tt},{45:105,85:93,86:pt,87:mt,89:vt,90:Tt},{45:106,85:93,86:pt,87:mt,89:vt,90:Tt},{45:107,85:93,86:pt,87:mt,89:vt,90:Tt},{45:108,85:93,86:pt,87:mt,89:vt,90:Tt},{45:109,85:93,86:pt,87:mt,89:vt,90:Tt},{45:110,85:93,86:pt,87:mt,89:vt,90:Tt},{45:111,85:93,86:pt,87:mt,89:vt,90:Tt},{45:112,85:93,86:pt,87:mt,89:vt,90:Tt},{45:113,85:93,86:pt,87:mt,89:vt,90:Tt},{45:114,85:93,86:pt,87:mt,89:vt,90:Tt},{45:115,85:93,86:pt,87:mt,89:vt,90:Tt},{45:116,85:93,86:pt,87:mt,89:vt,90:Tt},t(ft,[2,66]),{45:117,85:93,86:pt,87:mt,89:vt,90:Tt},{45:118,85:93,86:pt,87:mt,89:vt,90:Tt},{45:119,85:93,86:pt,87:mt,89:vt,90:Tt},{45:120,85:93,86:pt,87:mt,89:vt,90:Tt},{45:121,85:93,86:pt,87:mt,89:vt,90:Tt},{45:122,85:93,86:pt,87:mt,89:vt,90:Tt},{45:123,85:93,86:pt,87:mt,89:vt,90:Tt},{45:124,85:93,86:pt,87:mt,89:vt,90:Tt},{45:125,85:93,86:pt,87:mt,89:vt,90:Tt},{45:126,85:93,86:pt,87:mt,89:vt,90:Tt},{45:127,85:93,86:pt,87:mt,89:vt,90:Tt},{30:128,39:58,40:70,42:71,44:b,46:x,47:k,48:T,49:C,50:M,51:S,53:32,54:R,55:A,56:L,57:v,58:B,59:w,60:D,61:N,62:z,63:X,64:ct,65:J,66:Y,67:$,68:lt,69:ut,70:W,71:tt,72:K,73:it,74:Z,75:V,76:Q,77:q,78:U,79:F,80:j,81:P,82:et,83:at,84:It},{15:[1,130],43:[1,129]},{45:131,85:93,86:pt,87:mt,89:vt,90:Tt},{45:132,85:93,86:pt,87:mt,89:vt,90:Tt},{45:133,85:93,86:pt,87:mt,89:vt,90:Tt},{45:134,85:93,86:pt,87:mt,89:vt,90:Tt},{45:135,85:93,86:pt,87:mt,89:vt,90:Tt},{45:136,85:93,86:pt,87:mt,89:vt,90:Tt},{45:137,85:93,86:pt,87:mt,89:vt,90:Tt},{24:[1,138]},{24:[1,139]},{24:[1,140]},{24:[1,141]},t(le,[2,9]),{14:142,21:d},{21:[2,13]},{1:[2,15]},t(Lt,[2,22]),t(Rt,[2,24],{31:31,29:143,32:f,33:p,34:m,36:_,38:y}),t(Lt,[2,44],{29:29,30:30,31:31,53:32,39:58,40:70,42:71,23:144,32:f,33:p,34:m,36:_,38:y,44:b,46:x,47:k,48:T,49:C,50:M,51:S,54:R,55:A,56:L,57:v,58:B,59:w,60:D,61:N,62:z,63:X,64:ct,65:J,66:Y,67:$,68:lt,69:ut,70:W,71:tt,72:K,73:it,74:Z,75:V,76:Q,77:q,78:U,79:F,80:j,81:P,82:et,83:at,84:It}),t(Ct,[2,28]),t(Ct,[2,29]),t(ft,[2,46]),t(Dt,[2,78],{85:93,45:145,86:pt,87:mt,89:vt,90:Tt}),t(Gt,[2,80]),{88:[1,146]},t(Gt,[2,82]),t(Gt,[2,83]),t(ft,[2,47]),t(ft,[2,48]),t(ft,[2,49]),t(ft,[2,50]),t(ft,[2,51]),t(ft,[2,52]),t(ft,[2,53]),t(ft,[2,54]),t(ft,[2,55]),t(ft,[2,56]),t(ft,[2,57]),t(ft,[2,58]),t(ft,[2,59]),t(ft,[2,60]),t(ft,[2,61]),t(ft,[2,62]),t(ft,[2,63]),t(ft,[2,64]),t(ft,[2,65]),t(ft,[2,67]),t(ft,[2,68]),t(ft,[2,69]),t(ft,[2,70]),t(ft,[2,71]),t(ft,[2,72]),t(ft,[2,73]),t(ft,[2,74]),t(ft,[2,75]),t(ft,[2,76]),t(ft,[2,77]),{41:147,52:[1,148]},{15:[1,149]},{43:[1,150]},t($t,[2,35]),t($t,[2,36]),t($t,[2,37]),t($t,[2,38]),t($t,[2,39]),t($t,[2,40]),t($t,[2,41]),{1:[2,16]},{1:[2,17]},{1:[2,18]},{1:[2,19]},{15:[1,151]},t(Rt,[2,25]),t(Lt,[2,45]),t(Dt,[2,79]),t(Gt,[2,81]),t(ft,[2,31]),t(ft,[2,42]),t(Qt,[2,32]),t(Qt,[2,33],{15:[1,152]}),t(le,[2,10]),t(Qt,[2,34])],defaultActions:{2:[2,1],3:[2,2],5:[2,8],6:[2,4],7:[2,5],8:[2,6],9:[2,7],16:[2,11],17:[2,3],27:[2,14],85:[2,13],86:[2,15],138:[2,16],139:[2,17],140:[2,18],141:[2,19]},parseError:function(wt,bt){if(bt.recoverable)this.trace(wt);else{var Et=new Error(wt);throw Et.hash=bt,Et}},parse:function(wt){var bt=this,Et=[0],kt=[],Ut=[null],gt=[],he=this.table,yt="",ne=0,ve=0,ye=2,be=1,Te=gt.slice.call(arguments,1),Wt=Object.create(this.lexer),se={yy:{}};for(var me in this.yy)Object.prototype.hasOwnProperty.call(this.yy,me)&&(se.yy[me]=this.yy[me]);Wt.setInput(wt,se.yy),se.yy.lexer=Wt,se.yy.parser=this,typeof Wt.yylloc>"u"&&(Wt.yylloc={});var ue=Wt.yylloc;gt.push(ue);var _a=Wt.options&&Wt.options.ranges;typeof se.yy.parseError=="function"?this.parseError=se.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function Hr(){var Jt;return Jt=kt.pop()||Wt.lex()||be,typeof Jt!="number"&&(Jt instanceof Array&&(kt=Jt,Jt=kt.pop()),Jt=bt.symbols_[Jt]||Jt),Jt}for(var Ie,oe,Ke,wr,Ge={},Ze,qt,st,At;;){if(oe=Et[Et.length-1],this.defaultActions[oe]?Ke=this.defaultActions[oe]:((Ie===null||typeof Ie>"u")&&(Ie=Hr()),Ke=he[oe]&&he[oe][Ie]),typeof Ke>"u"||!Ke.length||!Ke[0]){var Nt="";At=[];for(Ze in he[oe])this.terminals_[Ze]&&Ze>ye&&At.push("'"+this.terminals_[Ze]+"'");Wt.showPosition?Nt="Parse error on line "+(ne+1)+`: -`+Wt.showPosition()+` -Expecting `+At.join(", ")+", got '"+(this.terminals_[Ie]||Ie)+"'":Nt="Parse error on line "+(ne+1)+": Unexpected "+(Ie==be?"end of input":"'"+(this.terminals_[Ie]||Ie)+"'"),this.parseError(Nt,{text:Wt.match,token:this.terminals_[Ie]||Ie,line:Wt.yylineno,loc:ue,expected:At})}if(Ke[0]instanceof Array&&Ke.length>1)throw new Error("Parse Error: multiple actions possible at state: "+oe+", token: "+Ie);switch(Ke[0]){case 1:Et.push(Ie),Ut.push(Wt.yytext),gt.push(Wt.yylloc),Et.push(Ke[1]),Ie=null,ve=Wt.yyleng,yt=Wt.yytext,ne=Wt.yylineno,ue=Wt.yylloc;break;case 2:if(qt=this.productions_[Ke[1]][1],Ge.$=Ut[Ut.length-qt],Ge._$={first_line:gt[gt.length-(qt||1)].first_line,last_line:gt[gt.length-1].last_line,first_column:gt[gt.length-(qt||1)].first_column,last_column:gt[gt.length-1].last_column},_a&&(Ge._$.range=[gt[gt.length-(qt||1)].range[0],gt[gt.length-1].range[1]]),wr=this.performAction.apply(Ge,[yt,ve,ne,se.yy,Ke[1],Ut,gt].concat(Te)),typeof wr<"u")return wr;qt&&(Et=Et.slice(0,-1*qt*2),Ut=Ut.slice(0,-1*qt),gt=gt.slice(0,-1*qt)),Et.push(this.productions_[Ke[1]][0]),Ut.push(Ge.$),gt.push(Ge._$),st=he[Et[Et.length-2]][Et[Et.length-1]],Et.push(st);break;case 3:return!0}}return!0}},jt=function(){var zt={EOF:1,parseError:function(bt,Et){if(this.yy.parser)this.yy.parser.parseError(bt,Et);else throw new Error(bt)},setInput:function(wt,bt){return this.yy=bt||this.yy||{},this._input=wt,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var wt=this._input[0];this.yytext+=wt,this.yyleng++,this.offset++,this.match+=wt,this.matched+=wt;var bt=wt.match(/(?:\r\n?|\n).*/g);return bt?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),wt},unput:function(wt){var bt=wt.length,Et=wt.split(/(?:\r\n?|\n)/g);this._input=wt+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-bt),this.offset-=bt;var kt=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),Et.length-1&&(this.yylineno-=Et.length-1);var Ut=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:Et?(Et.length===kt.length?this.yylloc.first_column:0)+kt[kt.length-Et.length].length-Et[0].length:this.yylloc.first_column-bt},this.options.ranges&&(this.yylloc.range=[Ut[0],Ut[0]+this.yyleng-bt]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(wt){this.unput(this.match.slice(wt))},pastInput:function(){var wt=this.matched.substr(0,this.matched.length-this.match.length);return(wt.length>20?"...":"")+wt.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var wt=this.match;return wt.length<20&&(wt+=this._input.substr(0,20-wt.length)),(wt.substr(0,20)+(wt.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var wt=this.pastInput(),bt=new Array(wt.length+1).join("-");return wt+this.upcomingInput()+` -`+bt+"^"},test_match:function(wt,bt){var Et,kt,Ut;if(this.options.backtrack_lexer&&(Ut={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(Ut.yylloc.range=this.yylloc.range.slice(0))),kt=wt[0].match(/(?:\r\n?|\n).*/g),kt&&(this.yylineno+=kt.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:kt?kt[kt.length-1].length-kt[kt.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+wt[0].length},this.yytext+=wt[0],this.match+=wt[0],this.matches=wt,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(wt[0].length),this.matched+=wt[0],Et=this.performAction.call(this,this.yy,this,bt,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),Et)return Et;if(this._backtrack){for(var gt in Ut)this[gt]=Ut[gt];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var wt,bt,Et,kt;this._more||(this.yytext="",this.match="");for(var Ut=this._currentRules(),gt=0;gt<Ut.length;gt++)if(Et=this._input.match(this.rules[Ut[gt]]),Et&&(!bt||Et[0].length>bt[0].length)){if(bt=Et,kt=gt,this.options.backtrack_lexer){if(wt=this.test_match(Et,Ut[gt]),wt!==!1)return wt;if(this._backtrack){bt=!1;continue}else return!1}else if(!this.options.flex)break}return bt?(wt=this.test_match(bt,Ut[kt]),wt!==!1?wt:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var bt=this.next();return bt||this.lex()},begin:function(bt){this.conditionStack.push(bt)},popState:function(){var bt=this.conditionStack.length-1;return bt>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(bt){return bt=this.conditionStack.length-1-Math.abs(bt||0),bt>=0?this.conditionStack[bt]:"INITIAL"},pushState:function(bt){this.begin(bt)},stateStackSize:function(){return this.conditionStack.length},options:{},performAction:function(bt,Et,kt,Ut){switch(kt){case 0:return this.begin("open_directive"),18;case 1:return 7;case 2:return 8;case 3:return 9;case 4:return 10;case 5:return this.begin("type_directive"),19;case 6:return this.popState(),this.begin("arg_directive"),16;case 7:return this.popState(),this.popState(),21;case 8:return 20;case 9:return 32;case 10:return 33;case 11:return this.begin("acc_title"),34;case 12:return this.popState(),"acc_title_value";case 13:return this.begin("acc_descr"),36;case 14:return this.popState(),"acc_descr_value";case 15:this.begin("acc_descr_multiline");break;case 16:this.popState();break;case 17:return"acc_descr_multiline_value";case 18:break;case 19:c;break;case 20:return 15;case 21:break;case 22:return 22;case 23:return 25;case 24:return 26;case 25:return 27;case 26:return 28;case 27:return this.begin("person_ext"),55;case 28:return this.begin("person"),54;case 29:return this.begin("system_ext_queue"),61;case 30:return this.begin("system_ext_db"),60;case 31:return this.begin("system_ext"),59;case 32:return this.begin("system_queue"),58;case 33:return this.begin("system_db"),57;case 34:return this.begin("system"),56;case 35:return this.begin("boundary"),47;case 36:return this.begin("enterprise_boundary"),44;case 37:return this.begin("system_boundary"),46;case 38:return this.begin("container_ext_queue"),67;case 39:return this.begin("container_ext_db"),66;case 40:return this.begin("container_ext"),65;case 41:return this.begin("container_queue"),64;case 42:return this.begin("container_db"),63;case 43:return this.begin("container"),62;case 44:return this.begin("container_boundary"),48;case 45:return this.begin("component_ext_queue"),73;case 46:return this.begin("component_ext_db"),72;case 47:return this.begin("component_ext"),71;case 48:return this.begin("component_queue"),70;case 49:return this.begin("component_db"),69;case 50:return this.begin("component"),68;case 51:return this.begin("node"),49;case 52:return this.begin("node"),49;case 53:return this.begin("node_l"),50;case 54:return this.begin("node_r"),51;case 55:return this.begin("rel"),74;case 56:return this.begin("birel"),75;case 57:return this.begin("rel_u"),76;case 58:return this.begin("rel_u"),76;case 59:return this.begin("rel_d"),77;case 60:return this.begin("rel_d"),77;case 61:return this.begin("rel_l"),78;case 62:return this.begin("rel_l"),78;case 63:return this.begin("rel_r"),79;case 64:return this.begin("rel_r"),79;case 65:return this.begin("rel_b"),80;case 66:return this.begin("rel_index"),81;case 67:return this.begin("update_el_style"),82;case 68:return this.begin("update_rel_style"),83;case 69:return this.begin("update_layout_config"),84;case 70:return"EOF_IN_STRUCT";case 71:return this.begin("attribute"),"ATTRIBUTE_EMPTY";case 72:this.begin("attribute");break;case 73:this.popState(),this.popState();break;case 74:return 90;case 75:break;case 76:return 90;case 77:this.begin("string");break;case 78:this.popState();break;case 79:return"STR";case 80:this.begin("string_kv");break;case 81:return this.begin("string_kv_key"),"STR_KEY";case 82:this.popState(),this.begin("string_kv_value");break;case 83:return"STR_VALUE";case 84:this.popState(),this.popState();break;case 85:return"STR";case 86:return"LBRACE";case 87:return"RBRACE";case 88:return"SPACE";case 89:return"EOL";case 90:return 24}},rules:[/^(?:%%\{)/,/^(?:.*direction\s+TB[^\n]*)/,/^(?:.*direction\s+BT[^\n]*)/,/^(?:.*direction\s+RL[^\n]*)/,/^(?:.*direction\s+LR[^\n]*)/,/^(?:((?:(?!\}%%)[^:.])*))/,/^(?::)/,/^(?:\}%%)/,/^(?:((?:(?!\}%%).|\n)*))/,/^(?:title\s[^#\n;]+)/,/^(?:accDescription\s[^#\n;]+)/,/^(?:accTitle\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*\{\s*)/,/^(?:[\}])/,/^(?:[^\}]*)/,/^(?:%%(?!\{)*[^\n]*(\r?\n?)+)/,/^(?:%%[^\n]*(\r?\n)*)/,/^(?:\s*(\r?\n)+)/,/^(?:\s+)/,/^(?:C4Context\b)/,/^(?:C4Container\b)/,/^(?:C4Component\b)/,/^(?:C4Dynamic\b)/,/^(?:C4Deployment\b)/,/^(?:Person_Ext\b)/,/^(?:Person\b)/,/^(?:SystemQueue_Ext\b)/,/^(?:SystemDb_Ext\b)/,/^(?:System_Ext\b)/,/^(?:SystemQueue\b)/,/^(?:SystemDb\b)/,/^(?:System\b)/,/^(?:Boundary\b)/,/^(?:Enterprise_Boundary\b)/,/^(?:System_Boundary\b)/,/^(?:ContainerQueue_Ext\b)/,/^(?:ContainerDb_Ext\b)/,/^(?:Container_Ext\b)/,/^(?:ContainerQueue\b)/,/^(?:ContainerDb\b)/,/^(?:Container\b)/,/^(?:Container_Boundary\b)/,/^(?:ComponentQueue_Ext\b)/,/^(?:ComponentDb_Ext\b)/,/^(?:Component_Ext\b)/,/^(?:ComponentQueue\b)/,/^(?:ComponentDb\b)/,/^(?:Component\b)/,/^(?:Deployment_Node\b)/,/^(?:Node\b)/,/^(?:Node_L\b)/,/^(?:Node_R\b)/,/^(?:Rel\b)/,/^(?:BiRel\b)/,/^(?:Rel_Up\b)/,/^(?:Rel_U\b)/,/^(?:Rel_Down\b)/,/^(?:Rel_D\b)/,/^(?:Rel_Left\b)/,/^(?:Rel_L\b)/,/^(?:Rel_Right\b)/,/^(?:Rel_R\b)/,/^(?:Rel_Back\b)/,/^(?:RelIndex\b)/,/^(?:UpdateElementStyle\b)/,/^(?:UpdateRelStyle\b)/,/^(?:UpdateLayoutConfig\b)/,/^(?:$)/,/^(?:[(][ ]*[,])/,/^(?:[(])/,/^(?:[)])/,/^(?:,,)/,/^(?:,)/,/^(?:[ ]*["]["])/,/^(?:[ ]*["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:[ ]*[\$])/,/^(?:[^=]*)/,/^(?:[=][ ]*["])/,/^(?:[^"]+)/,/^(?:["])/,/^(?:[^,]+)/,/^(?:\{)/,/^(?:\})/,/^(?:[\s]+)/,/^(?:[\n\r]+)/,/^(?:$)/],conditions:{acc_descr_multiline:{rules:[16,17],inclusive:!1},acc_descr:{rules:[14],inclusive:!1},acc_title:{rules:[12],inclusive:!1},close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[7,8],inclusive:!1},type_directive:{rules:[6,7],inclusive:!1},open_directive:{rules:[5],inclusive:!1},string_kv_value:{rules:[83,84],inclusive:!1},string_kv_key:{rules:[82],inclusive:!1},string_kv:{rules:[81],inclusive:!1},string:{rules:[78,79],inclusive:!1},attribute:{rules:[73,74,75,76,77,80,85],inclusive:!1},update_layout_config:{rules:[70,71,72,73],inclusive:!1},update_rel_style:{rules:[70,71,72,73],inclusive:!1},update_el_style:{rules:[70,71,72,73],inclusive:!1},rel_b:{rules:[70,71,72,73],inclusive:!1},rel_r:{rules:[70,71,72,73],inclusive:!1},rel_l:{rules:[70,71,72,73],inclusive:!1},rel_d:{rules:[70,71,72,73],inclusive:!1},rel_u:{rules:[70,71,72,73],inclusive:!1},rel_bi:{rules:[],inclusive:!1},rel:{rules:[70,71,72,73],inclusive:!1},node_r:{rules:[70,71,72,73],inclusive:!1},node_l:{rules:[70,71,72,73],inclusive:!1},node:{rules:[70,71,72,73],inclusive:!1},index:{rules:[],inclusive:!1},rel_index:{rules:[70,71,72,73],inclusive:!1},component_ext_queue:{rules:[],inclusive:!1},component_ext_db:{rules:[70,71,72,73],inclusive:!1},component_ext:{rules:[70,71,72,73],inclusive:!1},component_queue:{rules:[70,71,72,73],inclusive:!1},component_db:{rules:[70,71,72,73],inclusive:!1},component:{rules:[70,71,72,73],inclusive:!1},container_boundary:{rules:[70,71,72,73],inclusive:!1},container_ext_queue:{rules:[],inclusive:!1},container_ext_db:{rules:[70,71,72,73],inclusive:!1},container_ext:{rules:[70,71,72,73],inclusive:!1},container_queue:{rules:[70,71,72,73],inclusive:!1},container_db:{rules:[70,71,72,73],inclusive:!1},container:{rules:[70,71,72,73],inclusive:!1},birel:{rules:[70,71,72,73],inclusive:!1},system_boundary:{rules:[70,71,72,73],inclusive:!1},enterprise_boundary:{rules:[70,71,72,73],inclusive:!1},boundary:{rules:[70,71,72,73],inclusive:!1},system_ext_queue:{rules:[70,71,72,73],inclusive:!1},system_ext_db:{rules:[70,71,72,73],inclusive:!1},system_ext:{rules:[70,71,72,73],inclusive:!1},system_queue:{rules:[70,71,72,73],inclusive:!1},system_db:{rules:[70,71,72,73],inclusive:!1},system:{rules:[70,71,72,73],inclusive:!1},person_ext:{rules:[70,71,72,73],inclusive:!1},person:{rules:[70,71,72,73],inclusive:!1},INITIAL:{rules:[0,1,2,3,4,9,10,11,13,15,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,86,87,88,89,90],inclusive:!0}}};return zt}();we.lexer=jt;function Ft(){this.yy={}}return Ft.prototype=we,we.Parser=Ft,new Ft}();qc.parser=qc;const lK=t=>t.match(/^\s*C4Context|C4Container|C4Component|C4Dynamic|C4Deployment/)!==null;let di=[],Qa=[""],un="global",pi="",Di=[{alias:"global",label:{text:"global"},type:{text:"global"},tags:null,link:null,parentBoundary:""}],Vc=[],mg="",bg=!1,p1=4,g1=2;var jw;const cK=function(){return jw},uK=function(t){jw=ai(t,nt())},hK=function(t,e,r){Xe.parseDirective(this,t,e,r)},fK=function(t,e,r,n,i,a,s,o,l){if(t==null||e===void 0||e===null||r===void 0||r===null||n===void 0||n===null)return;let u={};const h=Vc.find(d=>d.from===e&&d.to===r);if(h?u=h:Vc.push(u),u.type=t,u.from=e,u.to=r,u.label={text:n},i==null)u.techn={text:""};else if(typeof i=="object"){let[d,f]=Object.entries(i)[0];u[d]={text:f}}else u.techn={text:i};if(a==null)u.descr={text:""};else if(typeof a=="object"){let[d,f]=Object.entries(a)[0];u[d]={text:f}}else u.descr={text:a};if(typeof s=="object"){let[d,f]=Object.entries(s)[0];u[d]=f}else u.sprite=s;if(typeof o=="object"){let[d,f]=Object.entries(o)[0];u[d]=f}else u.tags=o;if(typeof l=="object"){let[d,f]=Object.entries(l)[0];u[d]=f}else u.link=l;u.wrap=Ja()},dK=function(t,e,r,n,i,a,s){if(e===null||r===null)return;let o={};const l=di.find(u=>u.alias===e);if(l&&e===l.alias?o=l:(o.alias=e,di.push(o)),r==null?o.label={text:""}:o.label={text:r},n==null)o.descr={text:""};else if(typeof n=="object"){let[u,h]=Object.entries(n)[0];o[u]={text:h}}else o.descr={text:n};if(typeof i=="object"){let[u,h]=Object.entries(i)[0];o[u]=h}else o.sprite=i;if(typeof a=="object"){let[u,h]=Object.entries(a)[0];o[u]=h}else o.tags=a;if(typeof s=="object"){let[u,h]=Object.entries(s)[0];o[u]=h}else o.link=s;o.typeC4Shape={text:t},o.parentBoundary=un,o.wrap=Ja()},pK=function(t,e,r,n,i,a,s,o){if(e===null||r===null)return;let l={};const u=di.find(h=>h.alias===e);if(u&&e===u.alias?l=u:(l.alias=e,di.push(l)),r==null?l.label={text:""}:l.label={text:r},n==null)l.techn={text:""};else if(typeof n=="object"){let[h,d]=Object.entries(n)[0];l[h]={text:d}}else l.techn={text:n};if(i==null)l.descr={text:""};else if(typeof i=="object"){let[h,d]=Object.entries(i)[0];l[h]={text:d}}else l.descr={text:i};if(typeof a=="object"){let[h,d]=Object.entries(a)[0];l[h]=d}else l.sprite=a;if(typeof s=="object"){let[h,d]=Object.entries(s)[0];l[h]=d}else l.tags=s;if(typeof o=="object"){let[h,d]=Object.entries(o)[0];l[h]=d}else l.link=o;l.wrap=Ja(),l.typeC4Shape={text:t},l.parentBoundary=un},gK=function(t,e,r,n,i,a,s,o){if(e===null||r===null)return;let l={};const u=di.find(h=>h.alias===e);if(u&&e===u.alias?l=u:(l.alias=e,di.push(l)),r==null?l.label={text:""}:l.label={text:r},n==null)l.techn={text:""};else if(typeof n=="object"){let[h,d]=Object.entries(n)[0];l[h]={text:d}}else l.techn={text:n};if(i==null)l.descr={text:""};else if(typeof i=="object"){let[h,d]=Object.entries(i)[0];l[h]={text:d}}else l.descr={text:i};if(typeof a=="object"){let[h,d]=Object.entries(a)[0];l[h]=d}else l.sprite=a;if(typeof s=="object"){let[h,d]=Object.entries(s)[0];l[h]=d}else l.tags=s;if(typeof o=="object"){let[h,d]=Object.entries(o)[0];l[h]=d}else l.link=o;l.wrap=Ja(),l.typeC4Shape={text:t},l.parentBoundary=un},yK=function(t,e,r,n,i){if(t===null||e===null)return;let a={};const s=Di.find(o=>o.alias===t);if(s&&t===s.alias?a=s:(a.alias=t,Di.push(a)),e==null?a.label={text:""}:a.label={text:e},r==null)a.type={text:"system"};else if(typeof r=="object"){let[o,l]=Object.entries(r)[0];a[o]={text:l}}else a.type={text:r};if(typeof n=="object"){let[o,l]=Object.entries(n)[0];a[o]=l}else a.tags=n;if(typeof i=="object"){let[o,l]=Object.entries(i)[0];a[o]=l}else a.link=i;a.parentBoundary=un,a.wrap=Ja(),pi=un,un=t,Qa.push(pi)},mK=function(t,e,r,n,i){if(t===null||e===null)return;let a={};const s=Di.find(o=>o.alias===t);if(s&&t===s.alias?a=s:(a.alias=t,Di.push(a)),e==null?a.label={text:""}:a.label={text:e},r==null)a.type={text:"container"};else if(typeof r=="object"){let[o,l]=Object.entries(r)[0];a[o]={text:l}}else a.type={text:r};if(typeof n=="object"){let[o,l]=Object.entries(n)[0];a[o]=l}else a.tags=n;if(typeof i=="object"){let[o,l]=Object.entries(i)[0];a[o]=l}else a.link=i;a.parentBoundary=un,a.wrap=Ja(),pi=un,un=t,Qa.push(pi)},bK=function(t,e,r,n,i,a,s,o){if(e===null||r===null)return;let l={};const u=Di.find(h=>h.alias===e);if(u&&e===u.alias?l=u:(l.alias=e,Di.push(l)),r==null?l.label={text:""}:l.label={text:r},n==null)l.type={text:"node"};else if(typeof n=="object"){let[h,d]=Object.entries(n)[0];l[h]={text:d}}else l.type={text:n};if(i==null)l.descr={text:""};else if(typeof i=="object"){let[h,d]=Object.entries(i)[0];l[h]={text:d}}else l.descr={text:i};if(typeof s=="object"){let[h,d]=Object.entries(s)[0];l[h]=d}else l.tags=s;if(typeof o=="object"){let[h,d]=Object.entries(o)[0];l[h]=d}else l.link=o;l.nodeType=t,l.parentBoundary=un,l.wrap=Ja(),pi=un,un=e,Qa.push(pi)},_K=function(){un=pi,Qa.pop(),pi=Qa.pop(),Qa.push(pi)},vK=function(t,e,r,n,i,a,s,o,l,u,h){let d=di.find(f=>f.alias===e);if(!(d===void 0&&(d=Di.find(f=>f.alias===e),d===void 0))){if(r!=null)if(typeof r=="object"){let[f,p]=Object.entries(r)[0];d[f]=p}else d.bgColor=r;if(n!=null)if(typeof n=="object"){let[f,p]=Object.entries(n)[0];d[f]=p}else d.fontColor=n;if(i!=null)if(typeof i=="object"){let[f,p]=Object.entries(i)[0];d[f]=p}else d.borderColor=i;if(a!=null)if(typeof a=="object"){let[f,p]=Object.entries(a)[0];d[f]=p}else d.shadowing=a;if(s!=null)if(typeof s=="object"){let[f,p]=Object.entries(s)[0];d[f]=p}else d.shape=s;if(o!=null)if(typeof o=="object"){let[f,p]=Object.entries(o)[0];d[f]=p}else d.sprite=o;if(l!=null)if(typeof l=="object"){let[f,p]=Object.entries(l)[0];d[f]=p}else d.techn=l;if(u!=null)if(typeof u=="object"){let[f,p]=Object.entries(u)[0];d[f]=p}else d.legendText=u;if(h!=null)if(typeof h=="object"){let[f,p]=Object.entries(h)[0];d[f]=p}else d.legendSprite=h}},xK=function(t,e,r,n,i,a,s){const o=Vc.find(l=>l.from===e&&l.to===r);if(o!==void 0){if(n!=null)if(typeof n=="object"){let[l,u]=Object.entries(n)[0];o[l]=u}else o.textColor=n;if(i!=null)if(typeof i=="object"){let[l,u]=Object.entries(i)[0];o[l]=u}else o.lineColor=i;if(a!=null)if(typeof a=="object"){let[l,u]=Object.entries(a)[0];o[l]=parseInt(u)}else o.offsetX=parseInt(a);if(s!=null)if(typeof s=="object"){let[l,u]=Object.entries(s)[0];o[l]=parseInt(u)}else o.offsetY=parseInt(s)}},kK=function(t,e,r){let n=p1,i=g1;if(typeof e=="object"){const a=Object.values(e)[0];n=parseInt(a)}else n=parseInt(e);if(typeof r=="object"){const a=Object.values(r)[0];i=parseInt(a)}else i=parseInt(r);n>=1&&(p1=n),i>=1&&(g1=i)},wK=function(){return p1},TK=function(){return g1},EK=function(){return un},CK=function(){return pi},$w=function(t){return t==null?di:di.filter(e=>e.parentBoundary===t)},SK=function(t){return di.find(e=>e.alias===t)},AK=function(t){return Object.keys($w(t))},MK=function(t){return t==null?Di:Di.filter(e=>e.parentBoundary===t)},LK=function(){return Vc},RK=function(){return mg},IK=function(t){bg=t},Ja=function(){return bg},Xw={addPersonOrSystem:dK,addPersonOrSystemBoundary:yK,addContainer:pK,addContainerBoundary:mK,addComponent:gK,addDeploymentNode:bK,popBoundaryParseStack:_K,addRel:fK,updateElStyle:vK,updateRelStyle:xK,updateLayoutConfig:kK,autoWrap:Ja,setWrap:IK,getC4ShapeArray:$w,getC4Shape:SK,getC4ShapeKeys:AK,getBoundarys:MK,getCurrentBoundaryParse:EK,getParentBoundaryParse:CK,getRels:LK,getTitle:RK,getC4Type:cK,getC4ShapeInRow:wK,getC4BoundaryInRow:TK,setAccTitle:Yn,getAccTitle:ui,getAccDescription:fi,setAccDescription:hi,parseDirective:hK,getConfig:()=>nt().c4,clear:function(){di=[],Di=[{alias:"global",label:{text:"global"},type:{text:"global"},tags:null,link:null,parentBoundary:""}],pi="",un="global",Qa=[""],Vc=[],Qa=[""],mg="",bg=!1,p1=4,g1=2},LINETYPE:{SOLID:0,DOTTED:1,NOTE:2,SOLID_CROSS:3,DOTTED_CROSS:4,SOLID_OPEN:5,DOTTED_OPEN:6,LOOP_START:10,LOOP_END:11,ALT_START:12,ALT_ELSE:13,ALT_END:14,OPT_START:15,OPT_END:16,ACTIVE_START:17,ACTIVE_END:18,PAR_START:19,PAR_AND:20,PAR_END:21,RECT_START:22,RECT_END:23,SOLID_POINT:24,DOTTED_POINT:25},ARROWTYPE:{FILLED:0,OPEN:1},PLACEMENT:{LEFTOF:0,RIGHTOF:1,OVER:2},setTitle:function(t){mg=ai(t,nt())},setC4Type:uK},_g=function(t,e){const r=t.append("rect");if(r.attr("x",e.x),r.attr("y",e.y),r.attr("fill",e.fill),r.attr("stroke",e.stroke),r.attr("width",e.width),r.attr("height",e.height),r.attr("rx",e.rx),r.attr("ry",e.ry),e.attrs!=="undefined"&&e.attrs!==null)for(let n in e.attrs)r.attr(n,e.attrs[n]);return e.class!=="undefined"&&r.attr("class",e.class),r},Kw=function(t,e,r,n,i,a){const s=t.append("image");s.attr("width",e),s.attr("height",r),s.attr("x",n),s.attr("y",i);let o=a.startsWith("data:image/png;base64")?a:ki(a);s.attr("xlink:href",o)},NK=function(t,e,r,n){const i=t.append("use");i.attr("x",e),i.attr("y",r);var a=ki(n);i.attr("xlink:href","#"+a)},Zw=function(t,e){let r=0,n=0;const i=e.text.split(pe.lineBreakRegex);let a=[],s=0,o=()=>e.y;if(typeof e.valign<"u"&&typeof e.textMargin<"u"&&e.textMargin>0)switch(e.valign){case"top":case"start":o=()=>Math.round(e.y+e.textMargin);break;case"middle":case"center":o=()=>Math.round(e.y+(r+n+e.textMargin)/2);break;case"bottom":case"end":o=()=>Math.round(e.y+(r+n+2*e.textMargin)-e.textMargin);break}if(typeof e.anchor<"u"&&typeof e.textMargin<"u"&&typeof e.width<"u")switch(e.anchor){case"left":case"start":e.x=Math.round(e.x+e.textMargin),e.anchor="start",e.dominantBaseline="text-after-edge",e.alignmentBaseline="middle";break;case"middle":case"center":e.x=Math.round(e.x+e.width/2),e.anchor="middle",e.dominantBaseline="middle",e.alignmentBaseline="middle";break;case"right":case"end":e.x=Math.round(e.x+e.width-e.textMargin),e.anchor="end",e.dominantBaseline="text-before-edge",e.alignmentBaseline="middle";break}for(let l=0;l<i.length;l++){let u=i[l];typeof e.textMargin<"u"&&e.textMargin===0&&typeof e.fontSize<"u"&&(s=l*e.fontSize);const h=t.append("text");if(h.attr("x",e.x),h.attr("y",o()),typeof e.anchor<"u"&&h.attr("text-anchor",e.anchor).attr("dominant-baseline",e.dominantBaseline).attr("alignment-baseline",e.alignmentBaseline),typeof e.fontFamily<"u"&&h.style("font-family",e.fontFamily),typeof e.fontSize<"u"&&h.style("font-size",e.fontSize),typeof e.fontWeight<"u"&&h.style("font-weight",e.fontWeight),typeof e.fill<"u"&&h.attr("fill",e.fill),typeof e.class<"u"&&h.attr("class",e.class),typeof e.dy<"u"?h.attr("dy",e.dy):s!==0&&h.attr("dy",s),e.tspan){const d=h.append("tspan");d.attr("x",e.x),typeof e.fill<"u"&&d.attr("fill",e.fill),d.text(u)}else h.text(u);typeof e.valign<"u"&&typeof e.textMargin<"u"&&e.textMargin>0&&(n+=(h._groups||h)[0][0].getBBox().height,r=n),a.push(h)}return a},BK=function(t,e){function r(i,a,s,o,l){return i+","+a+" "+(i+s)+","+a+" "+(i+s)+","+(a+o-l)+" "+(i+s-l*1.2)+","+(a+o)+" "+i+","+(a+o)}const n=t.append("polygon");return n.attr("points",r(e.x,e.y,e.width,e.height,7)),n.attr("class","labelBox"),e.y=e.y+e.height/2,Zw(t,e),n},DK=(t,e,r)=>{const n=t.append("g");let i=0;for(let a of e){let s=a.textColor?a.textColor:"#444444",o=a.lineColor?a.lineColor:"#444444",l=a.offsetX?parseInt(a.offsetX):0,u=a.offsetY?parseInt(a.offsetY):0,h="";if(i===0){let f=n.append("line");f.attr("x1",a.startPoint.x),f.attr("y1",a.startPoint.y),f.attr("x2",a.endPoint.x),f.attr("y2",a.endPoint.y),f.attr("stroke-width","1"),f.attr("stroke",o),f.style("fill","none"),a.type!=="rel_b"&&f.attr("marker-end","url("+h+"#arrowhead)"),(a.type==="birel"||a.type==="rel_b")&&f.attr("marker-start","url("+h+"#arrowend)"),i=-1}else{let f=n.append("path");f.attr("fill","none").attr("stroke-width","1").attr("stroke",o).attr("d","Mstartx,starty Qcontrolx,controly stopx,stopy ".replaceAll("startx",a.startPoint.x).replaceAll("starty",a.startPoint.y).replaceAll("controlx",a.startPoint.x+(a.endPoint.x-a.startPoint.x)/2-(a.endPoint.x-a.startPoint.x)/4).replaceAll("controly",a.startPoint.y+(a.endPoint.y-a.startPoint.y)/2).replaceAll("stopx",a.endPoint.x).replaceAll("stopy",a.endPoint.y)),a.type!=="rel_b"&&f.attr("marker-end","url("+h+"#arrowhead)"),(a.type==="birel"||a.type==="rel_b")&&f.attr("marker-start","url("+h+"#arrowend)")}let d=r.messageFont();sa(r)(a.label.text,n,Math.min(a.startPoint.x,a.endPoint.x)+Math.abs(a.endPoint.x-a.startPoint.x)/2+l,Math.min(a.startPoint.y,a.endPoint.y)+Math.abs(a.endPoint.y-a.startPoint.y)/2+u,a.label.width,a.label.height,{fill:s},d),a.techn&&a.techn.text!==""&&(d=r.messageFont(),sa(r)("["+a.techn.text+"]",n,Math.min(a.startPoint.x,a.endPoint.x)+Math.abs(a.endPoint.x-a.startPoint.x)/2+l,Math.min(a.startPoint.y,a.endPoint.y)+Math.abs(a.endPoint.y-a.startPoint.y)/2+r.messageFontSize+5+u,Math.max(a.label.width,a.techn.width),a.techn.height,{fill:s,"font-style":"italic"},d))}},OK=function(t,e,r){const n=t.append("g");let i=e.bgColor?e.bgColor:"none",a=e.borderColor?e.borderColor:"#444444",s=e.fontColor?e.fontColor:"black",o={"stroke-width":1,"stroke-dasharray":"7.0,7.0"};e.nodeType&&(o={"stroke-width":1});let l={x:e.x,y:e.y,fill:i,stroke:a,width:e.width,height:e.height,rx:2.5,ry:2.5,attrs:o};_g(n,l);let u=r.boundaryFont();u.fontWeight="bold",u.fontSize=u.fontSize+2,u.fontColor=s,sa(r)(e.label.text,n,e.x,e.y+e.label.Y,e.width,e.height,{fill:"#444444"},u),e.type&&e.type.text!==""&&(u=r.boundaryFont(),u.fontColor=s,sa(r)(e.type.text,n,e.x,e.y+e.type.Y,e.width,e.height,{fill:"#444444"},u)),e.descr&&e.descr.text!==""&&(u=r.boundaryFont(),u.fontSize=u.fontSize-2,u.fontColor=s,sa(r)(e.descr.text,n,e.x,e.y+e.descr.Y,e.width,e.height,{fill:"#444444"},u))},FK=function(t,e,r){let n=e.bgColor?e.bgColor:r[e.typeC4Shape.text+"_bg_color"],i=e.borderColor?e.borderColor:r[e.typeC4Shape.text+"_border_color"],a=e.fontColor?e.fontColor:"#FFFFFF",s="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAIAAADYYG7QAAACD0lEQVR4Xu2YoU4EMRCGT+4j8Ai8AhaH4QHgAUjQuFMECUgMIUgwJAgMhgQsAYUiJCiQIBBY+EITsjfTdme6V24v4c8vyGbb+ZjOtN0bNcvjQXmkH83WvYBWto6PLm6v7p7uH1/w2fXD+PBycX1Pv2l3IdDm/vn7x+dXQiAubRzoURa7gRZWd0iGRIiJbOnhnfYBQZNJjNbuyY2eJG8fkDE3bbG4ep6MHUAsgYxmE3nVs6VsBWJSGccsOlFPmLIViMzLOB7pCVO2AtHJMohH7Fh6zqitQK7m0rJvAVYgGcEpe//PLdDz65sM4pF9N7ICcXDKIB5Nv6j7tD0NoSdM2QrU9Gg0ewE1LqBhHR3BBdvj2vapnidjHxD/q6vd7Pvhr31AwcY8eXMTXAKECZZJFXuEq27aLgQK5uLMohCenGGuGewOxSjBvYBqeG6B+Nqiblggdjnc+ZXDy+FNFpFzw76O3UBAROuXh6FoiAcf5g9eTvUgzy0nWg6I8cXHRUpg5bOVBCo+KDpFajOf23GgPme7RSQ+lacIENUgJ6gg1k6HjgOlqnLqip4tEuhv0hNEMXUD0clyXE3p6pZA0S2nnvTlXwLJEZWlb7cTQH1+USgTN4VhAenm/wea1OCAOmqo6fE1WCb9WSKBah+rbUWPWAmE2Rvk0ApiB45eOyNAzU8xcTvj8KvkKEoOaIYeHNA3ZuygAvFMUO0AAAAASUVORK5CYII=";switch(e.typeC4Shape.text){case"person":s="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAIAAADYYG7QAAACD0lEQVR4Xu2YoU4EMRCGT+4j8Ai8AhaH4QHgAUjQuFMECUgMIUgwJAgMhgQsAYUiJCiQIBBY+EITsjfTdme6V24v4c8vyGbb+ZjOtN0bNcvjQXmkH83WvYBWto6PLm6v7p7uH1/w2fXD+PBycX1Pv2l3IdDm/vn7x+dXQiAubRzoURa7gRZWd0iGRIiJbOnhnfYBQZNJjNbuyY2eJG8fkDE3bbG4ep6MHUAsgYxmE3nVs6VsBWJSGccsOlFPmLIViMzLOB7pCVO2AtHJMohH7Fh6zqitQK7m0rJvAVYgGcEpe//PLdDz65sM4pF9N7ICcXDKIB5Nv6j7tD0NoSdM2QrU9Gg0ewE1LqBhHR3BBdvj2vapnidjHxD/q6vd7Pvhr31AwcY8eXMTXAKECZZJFXuEq27aLgQK5uLMohCenGGuGewOxSjBvYBqeG6B+Nqiblggdjnc+ZXDy+FNFpFzw76O3UBAROuXh6FoiAcf5g9eTvUgzy0nWg6I8cXHRUpg5bOVBCo+KDpFajOf23GgPme7RSQ+lacIENUgJ6gg1k6HjgOlqnLqip4tEuhv0hNEMXUD0clyXE3p6pZA0S2nnvTlXwLJEZWlb7cTQH1+USgTN4VhAenm/wea1OCAOmqo6fE1WCb9WSKBah+rbUWPWAmE2Rvk0ApiB45eOyNAzU8xcTvj8KvkKEoOaIYeHNA3ZuygAvFMUO0AAAAASUVORK5CYII=";break;case"external_person":s="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAIAAADYYG7QAAAB6ElEQVR4Xu2YLY+EMBCG9+dWr0aj0Wg0Go1Go0+j8Xdv2uTCvv1gpt0ebHKPuhDaeW4605Z9mJvx4AdXUyTUdd08z+u6flmWZRnHsWkafk9DptAwDPu+f0eAYtu2PEaGWuj5fCIZrBAC2eLBAnRCsEkkxmeaJp7iDJ2QMDdHsLg8SxKFEJaAo8lAXnmuOFIhTMpxxKATebo4UiFknuNo4OniSIXQyRxEA3YsnjGCVEjVXD7yLUAqxBGUyPv/Y4W2beMgGuS7kVQIBycH0fD+oi5pezQETxdHKmQKGk1eQEYldK+jw5GxPfZ9z7Mk0Qnhf1W1m3w//EUn5BDmSZsbR44QQLBEqrBHqOrmSKaQAxdnLArCrxZcM7A7ZKs4ioRq8LFC+NpC3WCBJsvpVw5edm9iEXFuyNfxXAgSwfrFQ1c0iNda8AdejvUgnktOtJQQxmcfFzGglc5WVCj7oDgFqU18boeFSs52CUh8LE8BIVQDT1ABrB0HtgSEYlX5doJnCwv9TXocKCaKbnwhdDKPq4lf3SwU3HLq4V/+WYhHVMa/3b4IlfyikAduCkcBc7mQ3/z/Qq/cTuikhkzB12Ae/mcJC9U+Vo8Ej1gWAtgbeGgFsAMHr50BIWOLCbezvhpBFUdY6EJuJ/QDW0XoMX60zZ0AAAAASUVORK5CYII=";break}const o=t.append("g");o.attr("class","person-man");const l=Qw();switch(e.typeC4Shape.text){case"person":case"external_person":case"system":case"external_system":case"container":case"external_container":case"component":case"external_component":l.x=e.x,l.y=e.y,l.fill=n,l.width=e.width,l.height=e.height,l.style="stroke:"+i+";stroke-width:0.5;",l.rx=2.5,l.ry=2.5,_g(o,l);break;case"system_db":case"external_system_db":case"container_db":case"external_container_db":case"component_db":case"external_component_db":o.append("path").attr("fill",n).attr("stroke-width","0.5").attr("stroke",i).attr("d","Mstartx,startyc0,-10 half,-10 half,-10c0,0 half,0 half,10l0,heightc0,10 -half,10 -half,10c0,0 -half,0 -half,-10l0,-height".replaceAll("startx",e.x).replaceAll("starty",e.y).replaceAll("half",e.width/2).replaceAll("height",e.height)),o.append("path").attr("fill","none").attr("stroke-width","0.5").attr("stroke",i).attr("d","Mstartx,startyc0,10 half,10 half,10c0,0 half,0 half,-10".replaceAll("startx",e.x).replaceAll("starty",e.y).replaceAll("half",e.width/2));break;case"system_queue":case"external_system_queue":case"container_queue":case"external_container_queue":case"component_queue":case"external_component_queue":o.append("path").attr("fill",n).attr("stroke-width","0.5").attr("stroke",i).attr("d","Mstartx,startylwidth,0c5,0 5,half 5,halfc0,0 0,half -5,halfl-width,0c-5,0 -5,-half -5,-halfc0,0 0,-half 5,-half".replaceAll("startx",e.x).replaceAll("starty",e.y).replaceAll("width",e.width).replaceAll("half",e.height/2)),o.append("path").attr("fill","none").attr("stroke-width","0.5").attr("stroke",i).attr("d","Mstartx,startyc-5,0 -5,half -5,halfc0,half 5,half 5,half".replaceAll("startx",e.x+e.width).replaceAll("starty",e.y).replaceAll("half",e.height/2));break}let u=jK(r,e.typeC4Shape.text);switch(o.append("text").attr("fill",a).attr("font-family",u.fontFamily).attr("font-size",u.fontSize-2).attr("font-style","italic").attr("lengthAdjust","spacing").attr("textLength",e.typeC4Shape.width).attr("x",e.x+e.width/2-e.typeC4Shape.width/2).attr("y",e.y+e.typeC4Shape.Y).text("<<"+e.typeC4Shape.text+">>"),e.typeC4Shape.text){case"person":case"external_person":Kw(o,48,48,e.x+e.width/2-24,e.y+e.image.Y,s);break}let h=r[e.typeC4Shape.text+"Font"]();return h.fontWeight="bold",h.fontSize=h.fontSize+2,h.fontColor=a,sa(r)(e.label.text,o,e.x,e.y+e.label.Y,e.width,e.height,{fill:a},h),h=r[e.typeC4Shape.text+"Font"](),h.fontColor=a,e.thchn&&e.thchn.text!==""?sa(r)(e.thchn.text,o,e.x,e.y+e.thchn.Y,e.width,e.height,{fill:a,"font-style":"italic"},h):e.type&&e.type.text!==""&&sa(r)(e.type.text,o,e.x,e.y+e.type.Y,e.width,e.height,{fill:a,"font-style":"italic"},h),e.descr&&e.descr.text!==""&&(h=r.personFont(),h.fontColor=a,sa(r)(e.descr.text,o,e.x,e.y+e.descr.Y,e.width,e.height,{fill:a},h)),e.height},PK=function(t){t.append("defs").append("symbol").attr("id","database").attr("fill-rule","evenodd").attr("clip-rule","evenodd").append("path").attr("transform","scale(.5)").attr("d","M12.258.001l.256.004.255.005.253.008.251.01.249.012.247.015.246.016.242.019.241.02.239.023.236.024.233.027.231.028.229.031.225.032.223.034.22.036.217.038.214.04.211.041.208.043.205.045.201.046.198.048.194.05.191.051.187.053.183.054.18.056.175.057.172.059.168.06.163.061.16.063.155.064.15.066.074.033.073.033.071.034.07.034.069.035.068.035.067.035.066.035.064.036.064.036.062.036.06.036.06.037.058.037.058.037.055.038.055.038.053.038.052.038.051.039.05.039.048.039.047.039.045.04.044.04.043.04.041.04.04.041.039.041.037.041.036.041.034.041.033.042.032.042.03.042.029.042.027.042.026.043.024.043.023.043.021.043.02.043.018.044.017.043.015.044.013.044.012.044.011.045.009.044.007.045.006.045.004.045.002.045.001.045v17l-.001.045-.002.045-.004.045-.006.045-.007.045-.009.044-.011.045-.012.044-.013.044-.015.044-.017.043-.018.044-.02.043-.021.043-.023.043-.024.043-.026.043-.027.042-.029.042-.03.042-.032.042-.033.042-.034.041-.036.041-.037.041-.039.041-.04.041-.041.04-.043.04-.044.04-.045.04-.047.039-.048.039-.05.039-.051.039-.052.038-.053.038-.055.038-.055.038-.058.037-.058.037-.06.037-.06.036-.062.036-.064.036-.064.036-.066.035-.067.035-.068.035-.069.035-.07.034-.071.034-.073.033-.074.033-.15.066-.155.064-.16.063-.163.061-.168.06-.172.059-.175.057-.18.056-.183.054-.187.053-.191.051-.194.05-.198.048-.201.046-.205.045-.208.043-.211.041-.214.04-.217.038-.22.036-.223.034-.225.032-.229.031-.231.028-.233.027-.236.024-.239.023-.241.02-.242.019-.246.016-.247.015-.249.012-.251.01-.253.008-.255.005-.256.004-.258.001-.258-.001-.256-.004-.255-.005-.253-.008-.251-.01-.249-.012-.247-.015-.245-.016-.243-.019-.241-.02-.238-.023-.236-.024-.234-.027-.231-.028-.228-.031-.226-.032-.223-.034-.22-.036-.217-.038-.214-.04-.211-.041-.208-.043-.204-.045-.201-.046-.198-.048-.195-.05-.19-.051-.187-.053-.184-.054-.179-.056-.176-.057-.172-.059-.167-.06-.164-.061-.159-.063-.155-.064-.151-.066-.074-.033-.072-.033-.072-.034-.07-.034-.069-.035-.068-.035-.067-.035-.066-.035-.064-.036-.063-.036-.062-.036-.061-.036-.06-.037-.058-.037-.057-.037-.056-.038-.055-.038-.053-.038-.052-.038-.051-.039-.049-.039-.049-.039-.046-.039-.046-.04-.044-.04-.043-.04-.041-.04-.04-.041-.039-.041-.037-.041-.036-.041-.034-.041-.033-.042-.032-.042-.03-.042-.029-.042-.027-.042-.026-.043-.024-.043-.023-.043-.021-.043-.02-.043-.018-.044-.017-.043-.015-.044-.013-.044-.012-.044-.011-.045-.009-.044-.007-.045-.006-.045-.004-.045-.002-.045-.001-.045v-17l.001-.045.002-.045.004-.045.006-.045.007-.045.009-.044.011-.045.012-.044.013-.044.015-.044.017-.043.018-.044.02-.043.021-.043.023-.043.024-.043.026-.043.027-.042.029-.042.03-.042.032-.042.033-.042.034-.041.036-.041.037-.041.039-.041.04-.041.041-.04.043-.04.044-.04.046-.04.046-.039.049-.039.049-.039.051-.039.052-.038.053-.038.055-.038.056-.038.057-.037.058-.037.06-.037.061-.036.062-.036.063-.036.064-.036.066-.035.067-.035.068-.035.069-.035.07-.034.072-.034.072-.033.074-.033.151-.066.155-.064.159-.063.164-.061.167-.06.172-.059.176-.057.179-.056.184-.054.187-.053.19-.051.195-.05.198-.048.201-.046.204-.045.208-.043.211-.041.214-.04.217-.038.22-.036.223-.034.226-.032.228-.031.231-.028.234-.027.236-.024.238-.023.241-.02.243-.019.245-.016.247-.015.249-.012.251-.01.253-.008.255-.005.256-.004.258-.001.258.001zm-9.258 20.499v.01l.001.021.003.021.004.022.005.021.006.022.007.022.009.023.01.022.011.023.012.023.013.023.015.023.016.024.017.023.018.024.019.024.021.024.022.025.023.024.024.025.052.049.056.05.061.051.066.051.07.051.075.051.079.052.084.052.088.052.092.052.097.052.102.051.105.052.11.052.114.051.119.051.123.051.127.05.131.05.135.05.139.048.144.049.147.047.152.047.155.047.16.045.163.045.167.043.171.043.176.041.178.041.183.039.187.039.19.037.194.035.197.035.202.033.204.031.209.03.212.029.216.027.219.025.222.024.226.021.23.02.233.018.236.016.24.015.243.012.246.01.249.008.253.005.256.004.259.001.26-.001.257-.004.254-.005.25-.008.247-.011.244-.012.241-.014.237-.016.233-.018.231-.021.226-.021.224-.024.22-.026.216-.027.212-.028.21-.031.205-.031.202-.034.198-.034.194-.036.191-.037.187-.039.183-.04.179-.04.175-.042.172-.043.168-.044.163-.045.16-.046.155-.046.152-.047.148-.048.143-.049.139-.049.136-.05.131-.05.126-.05.123-.051.118-.052.114-.051.11-.052.106-.052.101-.052.096-.052.092-.052.088-.053.083-.051.079-.052.074-.052.07-.051.065-.051.06-.051.056-.05.051-.05.023-.024.023-.025.021-.024.02-.024.019-.024.018-.024.017-.024.015-.023.014-.024.013-.023.012-.023.01-.023.01-.022.008-.022.006-.022.006-.022.004-.022.004-.021.001-.021.001-.021v-4.127l-.077.055-.08.053-.083.054-.085.053-.087.052-.09.052-.093.051-.095.05-.097.05-.1.049-.102.049-.105.048-.106.047-.109.047-.111.046-.114.045-.115.045-.118.044-.12.043-.122.042-.124.042-.126.041-.128.04-.13.04-.132.038-.134.038-.135.037-.138.037-.139.035-.142.035-.143.034-.144.033-.147.032-.148.031-.15.03-.151.03-.153.029-.154.027-.156.027-.158.026-.159.025-.161.024-.162.023-.163.022-.165.021-.166.02-.167.019-.169.018-.169.017-.171.016-.173.015-.173.014-.175.013-.175.012-.177.011-.178.01-.179.008-.179.008-.181.006-.182.005-.182.004-.184.003-.184.002h-.37l-.184-.002-.184-.003-.182-.004-.182-.005-.181-.006-.179-.008-.179-.008-.178-.01-.176-.011-.176-.012-.175-.013-.173-.014-.172-.015-.171-.016-.17-.017-.169-.018-.167-.019-.166-.02-.165-.021-.163-.022-.162-.023-.161-.024-.159-.025-.157-.026-.156-.027-.155-.027-.153-.029-.151-.03-.15-.03-.148-.031-.146-.032-.145-.033-.143-.034-.141-.035-.14-.035-.137-.037-.136-.037-.134-.038-.132-.038-.13-.04-.128-.04-.126-.041-.124-.042-.122-.042-.12-.044-.117-.043-.116-.045-.113-.045-.112-.046-.109-.047-.106-.047-.105-.048-.102-.049-.1-.049-.097-.05-.095-.05-.093-.052-.09-.051-.087-.052-.085-.053-.083-.054-.08-.054-.077-.054v4.127zm0-5.654v.011l.001.021.003.021.004.021.005.022.006.022.007.022.009.022.01.022.011.023.012.023.013.023.015.024.016.023.017.024.018.024.019.024.021.024.022.024.023.025.024.024.052.05.056.05.061.05.066.051.07.051.075.052.079.051.084.052.088.052.092.052.097.052.102.052.105.052.11.051.114.051.119.052.123.05.127.051.131.05.135.049.139.049.144.048.147.048.152.047.155.046.16.045.163.045.167.044.171.042.176.042.178.04.183.04.187.038.19.037.194.036.197.034.202.033.204.032.209.03.212.028.216.027.219.025.222.024.226.022.23.02.233.018.236.016.24.014.243.012.246.01.249.008.253.006.256.003.259.001.26-.001.257-.003.254-.006.25-.008.247-.01.244-.012.241-.015.237-.016.233-.018.231-.02.226-.022.224-.024.22-.025.216-.027.212-.029.21-.03.205-.032.202-.033.198-.035.194-.036.191-.037.187-.039.183-.039.179-.041.175-.042.172-.043.168-.044.163-.045.16-.045.155-.047.152-.047.148-.048.143-.048.139-.05.136-.049.131-.05.126-.051.123-.051.118-.051.114-.052.11-.052.106-.052.101-.052.096-.052.092-.052.088-.052.083-.052.079-.052.074-.051.07-.052.065-.051.06-.05.056-.051.051-.049.023-.025.023-.024.021-.025.02-.024.019-.024.018-.024.017-.024.015-.023.014-.023.013-.024.012-.022.01-.023.01-.023.008-.022.006-.022.006-.022.004-.021.004-.022.001-.021.001-.021v-4.139l-.077.054-.08.054-.083.054-.085.052-.087.053-.09.051-.093.051-.095.051-.097.05-.1.049-.102.049-.105.048-.106.047-.109.047-.111.046-.114.045-.115.044-.118.044-.12.044-.122.042-.124.042-.126.041-.128.04-.13.039-.132.039-.134.038-.135.037-.138.036-.139.036-.142.035-.143.033-.144.033-.147.033-.148.031-.15.03-.151.03-.153.028-.154.028-.156.027-.158.026-.159.025-.161.024-.162.023-.163.022-.165.021-.166.02-.167.019-.169.018-.169.017-.171.016-.173.015-.173.014-.175.013-.175.012-.177.011-.178.009-.179.009-.179.007-.181.007-.182.005-.182.004-.184.003-.184.002h-.37l-.184-.002-.184-.003-.182-.004-.182-.005-.181-.007-.179-.007-.179-.009-.178-.009-.176-.011-.176-.012-.175-.013-.173-.014-.172-.015-.171-.016-.17-.017-.169-.018-.167-.019-.166-.02-.165-.021-.163-.022-.162-.023-.161-.024-.159-.025-.157-.026-.156-.027-.155-.028-.153-.028-.151-.03-.15-.03-.148-.031-.146-.033-.145-.033-.143-.033-.141-.035-.14-.036-.137-.036-.136-.037-.134-.038-.132-.039-.13-.039-.128-.04-.126-.041-.124-.042-.122-.043-.12-.043-.117-.044-.116-.044-.113-.046-.112-.046-.109-.046-.106-.047-.105-.048-.102-.049-.1-.049-.097-.05-.095-.051-.093-.051-.09-.051-.087-.053-.085-.052-.083-.054-.08-.054-.077-.054v4.139zm0-5.666v.011l.001.02.003.022.004.021.005.022.006.021.007.022.009.023.01.022.011.023.012.023.013.023.015.023.016.024.017.024.018.023.019.024.021.025.022.024.023.024.024.025.052.05.056.05.061.05.066.051.07.051.075.052.079.051.084.052.088.052.092.052.097.052.102.052.105.051.11.052.114.051.119.051.123.051.127.05.131.05.135.05.139.049.144.048.147.048.152.047.155.046.16.045.163.045.167.043.171.043.176.042.178.04.183.04.187.038.19.037.194.036.197.034.202.033.204.032.209.03.212.028.216.027.219.025.222.024.226.021.23.02.233.018.236.017.24.014.243.012.246.01.249.008.253.006.256.003.259.001.26-.001.257-.003.254-.006.25-.008.247-.01.244-.013.241-.014.237-.016.233-.018.231-.02.226-.022.224-.024.22-.025.216-.027.212-.029.21-.03.205-.032.202-.033.198-.035.194-.036.191-.037.187-.039.183-.039.179-.041.175-.042.172-.043.168-.044.163-.045.16-.045.155-.047.152-.047.148-.048.143-.049.139-.049.136-.049.131-.051.126-.05.123-.051.118-.052.114-.051.11-.052.106-.052.101-.052.096-.052.092-.052.088-.052.083-.052.079-.052.074-.052.07-.051.065-.051.06-.051.056-.05.051-.049.023-.025.023-.025.021-.024.02-.024.019-.024.018-.024.017-.024.015-.023.014-.024.013-.023.012-.023.01-.022.01-.023.008-.022.006-.022.006-.022.004-.022.004-.021.001-.021.001-.021v-4.153l-.077.054-.08.054-.083.053-.085.053-.087.053-.09.051-.093.051-.095.051-.097.05-.1.049-.102.048-.105.048-.106.048-.109.046-.111.046-.114.046-.115.044-.118.044-.12.043-.122.043-.124.042-.126.041-.128.04-.13.039-.132.039-.134.038-.135.037-.138.036-.139.036-.142.034-.143.034-.144.033-.147.032-.148.032-.15.03-.151.03-.153.028-.154.028-.156.027-.158.026-.159.024-.161.024-.162.023-.163.023-.165.021-.166.02-.167.019-.169.018-.169.017-.171.016-.173.015-.173.014-.175.013-.175.012-.177.01-.178.01-.179.009-.179.007-.181.006-.182.006-.182.004-.184.003-.184.001-.185.001-.185-.001-.184-.001-.184-.003-.182-.004-.182-.006-.181-.006-.179-.007-.179-.009-.178-.01-.176-.01-.176-.012-.175-.013-.173-.014-.172-.015-.171-.016-.17-.017-.169-.018-.167-.019-.166-.02-.165-.021-.163-.023-.162-.023-.161-.024-.159-.024-.157-.026-.156-.027-.155-.028-.153-.028-.151-.03-.15-.03-.148-.032-.146-.032-.145-.033-.143-.034-.141-.034-.14-.036-.137-.036-.136-.037-.134-.038-.132-.039-.13-.039-.128-.041-.126-.041-.124-.041-.122-.043-.12-.043-.117-.044-.116-.044-.113-.046-.112-.046-.109-.046-.106-.048-.105-.048-.102-.048-.1-.05-.097-.049-.095-.051-.093-.051-.09-.052-.087-.052-.085-.053-.083-.053-.08-.054-.077-.054v4.153zm8.74-8.179l-.257.004-.254.005-.25.008-.247.011-.244.012-.241.014-.237.016-.233.018-.231.021-.226.022-.224.023-.22.026-.216.027-.212.028-.21.031-.205.032-.202.033-.198.034-.194.036-.191.038-.187.038-.183.04-.179.041-.175.042-.172.043-.168.043-.163.045-.16.046-.155.046-.152.048-.148.048-.143.048-.139.049-.136.05-.131.05-.126.051-.123.051-.118.051-.114.052-.11.052-.106.052-.101.052-.096.052-.092.052-.088.052-.083.052-.079.052-.074.051-.07.052-.065.051-.06.05-.056.05-.051.05-.023.025-.023.024-.021.024-.02.025-.019.024-.018.024-.017.023-.015.024-.014.023-.013.023-.012.023-.01.023-.01.022-.008.022-.006.023-.006.021-.004.022-.004.021-.001.021-.001.021.001.021.001.021.004.021.004.022.006.021.006.023.008.022.01.022.01.023.012.023.013.023.014.023.015.024.017.023.018.024.019.024.02.025.021.024.023.024.023.025.051.05.056.05.06.05.065.051.07.052.074.051.079.052.083.052.088.052.092.052.096.052.101.052.106.052.11.052.114.052.118.051.123.051.126.051.131.05.136.05.139.049.143.048.148.048.152.048.155.046.16.046.163.045.168.043.172.043.175.042.179.041.183.04.187.038.191.038.194.036.198.034.202.033.205.032.21.031.212.028.216.027.22.026.224.023.226.022.231.021.233.018.237.016.241.014.244.012.247.011.25.008.254.005.257.004.26.001.26-.001.257-.004.254-.005.25-.008.247-.011.244-.012.241-.014.237-.016.233-.018.231-.021.226-.022.224-.023.22-.026.216-.027.212-.028.21-.031.205-.032.202-.033.198-.034.194-.036.191-.038.187-.038.183-.04.179-.041.175-.042.172-.043.168-.043.163-.045.16-.046.155-.046.152-.048.148-.048.143-.048.139-.049.136-.05.131-.05.126-.051.123-.051.118-.051.114-.052.11-.052.106-.052.101-.052.096-.052.092-.052.088-.052.083-.052.079-.052.074-.051.07-.052.065-.051.06-.05.056-.05.051-.05.023-.025.023-.024.021-.024.02-.025.019-.024.018-.024.017-.023.015-.024.014-.023.013-.023.012-.023.01-.023.01-.022.008-.022.006-.023.006-.021.004-.022.004-.021.001-.021.001-.021-.001-.021-.001-.021-.004-.021-.004-.022-.006-.021-.006-.023-.008-.022-.01-.022-.01-.023-.012-.023-.013-.023-.014-.023-.015-.024-.017-.023-.018-.024-.019-.024-.02-.025-.021-.024-.023-.024-.023-.025-.051-.05-.056-.05-.06-.05-.065-.051-.07-.052-.074-.051-.079-.052-.083-.052-.088-.052-.092-.052-.096-.052-.101-.052-.106-.052-.11-.052-.114-.052-.118-.051-.123-.051-.126-.051-.131-.05-.136-.05-.139-.049-.143-.048-.148-.048-.152-.048-.155-.046-.16-.046-.163-.045-.168-.043-.172-.043-.175-.042-.179-.041-.183-.04-.187-.038-.191-.038-.194-.036-.198-.034-.202-.033-.205-.032-.21-.031-.212-.028-.216-.027-.22-.026-.224-.023-.226-.022-.231-.021-.233-.018-.237-.016-.241-.014-.244-.012-.247-.011-.25-.008-.254-.005-.257-.004-.26-.001-.26.001z")},qK=function(t){t.append("defs").append("symbol").attr("id","computer").attr("width","24").attr("height","24").append("path").attr("transform","scale(.5)").attr("d","M2 2v13h20v-13h-20zm18 11h-16v-9h16v9zm-10.228 6l.466-1h3.524l.467 1h-4.457zm14.228 3h-24l2-6h2.104l-1.33 4h18.45l-1.297-4h2.073l2 6zm-5-10h-14v-7h14v7z")},VK=function(t){t.append("defs").append("symbol").attr("id","clock").attr("width","24").attr("height","24").append("path").attr("transform","scale(.5)").attr("d","M12 2c5.514 0 10 4.486 10 10s-4.486 10-10 10-10-4.486-10-10 4.486-10 10-10zm0-2c-6.627 0-12 5.373-12 12s5.373 12 12 12 12-5.373 12-12-5.373-12-12-12zm5.848 12.459c.202.038.202.333.001.372-1.907.361-6.045 1.111-6.547 1.111-.719 0-1.301-.582-1.301-1.301 0-.512.77-5.447 1.125-7.445.034-.192.312-.181.343.014l.985 6.238 5.394 1.011z")},zK=function(t){t.append("defs").append("marker").attr("id","arrowhead").attr("refX",9).attr("refY",5).attr("markerUnits","userSpaceOnUse").attr("markerWidth",12).attr("markerHeight",12).attr("orient","auto").append("path").attr("d","M 0 0 L 10 5 L 0 10 z")},YK=function(t){t.append("defs").append("marker").attr("id","arrowend").attr("refX",1).attr("refY",5).attr("markerUnits","userSpaceOnUse").attr("markerWidth",12).attr("markerHeight",12).attr("orient","auto").append("path").attr("d","M 10 0 L 0 5 L 10 10 z")},UK=function(t){t.append("defs").append("marker").attr("id","filled-head").attr("refX",18).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L14,7 L9,1 Z")},WK=function(t){t.append("defs").append("marker").attr("id","sequencenumber").attr("refX",15).attr("refY",15).attr("markerWidth",60).attr("markerHeight",40).attr("orient","auto").append("circle").attr("cx",15).attr("cy",15).attr("r",6)},HK=function(t){const r=t.append("defs").append("marker").attr("id","crosshead").attr("markerWidth",15).attr("markerHeight",8).attr("orient","auto").attr("refX",16).attr("refY",4);r.append("path").attr("fill","black").attr("stroke","#000000").style("stroke-dasharray","0, 0").attr("stroke-width","1px").attr("d","M 9,2 V 6 L16,4 Z"),r.append("path").attr("fill","none").attr("stroke","#000000").style("stroke-dasharray","0, 0").attr("stroke-width","1px").attr("d","M 0,1 L 6,7 M 6,1 L 0,7")},GK=function(){return{x:0,y:0,fill:void 0,anchor:void 0,style:"#666",width:void 0,height:void 0,textMargin:0,rx:0,ry:0,tspan:!0,valign:void 0}},Qw=function(){return{x:0,y:0,fill:"#EDF2AE",stroke:"#666",width:100,anchor:"start",height:100,rx:0,ry:0}},jK=(t,e)=>({fontFamily:t[e+"FontFamily"],fontSize:t[e+"FontSize"],fontWeight:t[e+"FontWeight"]}),sa=function(){function t(i,a,s,o,l,u,h){const d=a.append("text").attr("x",s+l/2).attr("y",o+u/2+5).style("text-anchor","middle").text(i);n(d,h)}function e(i,a,s,o,l,u,h,d){const{fontSize:f,fontFamily:p,fontWeight:m}=d,_=i.split(pe.lineBreakRegex);for(let y=0;y<_.length;y++){const b=y*f-f*(_.length-1)/2,x=a.append("text").attr("x",s+l/2).attr("y",o).style("text-anchor","middle").attr("dominant-baseline","middle").style("font-size",f).style("font-weight",m).style("font-family",p);x.append("tspan").attr("dy",b).text(_[y]).attr("alignment-baseline","mathematical"),n(x,h)}}function r(i,a,s,o,l,u,h,d){const f=a.append("switch"),m=f.append("foreignObject").attr("x",s).attr("y",o).attr("width",l).attr("height",u).append("xhtml:div").style("display","table").style("height","100%").style("width","100%");m.append("div").style("display","table-cell").style("text-align","center").style("vertical-align","middle").text(i),e(i,f,s,o,l,u,h,d),n(m,h)}function n(i,a){for(const s in a)a.hasOwnProperty(s)&&i.attr(s,a[s])}return function(i){return i.textPlacement==="fo"?r:i.textPlacement==="old"?t:e}}(),Oi={drawRect:_g,drawText:Zw,drawLabel:BK,drawBoundary:OK,drawC4Shape:FK,drawRels:DK,drawImage:Kw,drawEmbeddedImage:NK,insertArrowHead:zK,insertArrowEnd:YK,insertArrowFilledHead:UK,insertDynamicNumber:WK,insertArrowCrossHead:HK,insertDatabaseIcon:PK,insertComputerIcon:qK,insertClockIcon:VK,getTextObj:GK,getNoteRect:Qw,sanitizeUrl:ki};let y1=0,m1=0,Jw=4,vg=2;qc.yy=Xw;let Zt={};class t9{constructor(e){this.name="",this.data={},this.data.startx=void 0,this.data.stopx=void 0,this.data.starty=void 0,this.data.stopy=void 0,this.data.widthLimit=void 0,this.nextData={},this.nextData.startx=void 0,this.nextData.stopx=void 0,this.nextData.starty=void 0,this.nextData.stopy=void 0,this.nextData.cnt=0,xg(e.db.getConfig())}setData(e,r,n,i){this.nextData.startx=this.data.startx=e,this.nextData.stopx=this.data.stopx=r,this.nextData.starty=this.data.starty=n,this.nextData.stopy=this.data.stopy=i}updateVal(e,r,n,i){typeof e[r]>"u"?e[r]=n:e[r]=i(n,e[r])}insert(e){this.nextData.cnt=this.nextData.cnt+1;let r=this.nextData.startx===this.nextData.stopx?this.nextData.stopx+e.margin:this.nextData.stopx+e.margin*2,n=r+e.width,i=this.nextData.starty+e.margin*2,a=i+e.height;(r>=this.data.widthLimit||n>=this.data.widthLimit||this.nextData.cnt>Jw)&&(r=this.nextData.startx+e.margin+Zt.nextLinePaddingX,i=this.nextData.stopy+e.margin*2,this.nextData.stopx=n=r+e.width,this.nextData.starty=this.nextData.stopy,this.nextData.stopy=a=i+e.height,this.nextData.cnt=1),e.x=r,e.y=i,this.updateVal(this.data,"startx",r,Math.min),this.updateVal(this.data,"starty",i,Math.min),this.updateVal(this.data,"stopx",n,Math.max),this.updateVal(this.data,"stopy",a,Math.max),this.updateVal(this.nextData,"startx",r,Math.min),this.updateVal(this.nextData,"starty",i,Math.min),this.updateVal(this.nextData,"stopx",n,Math.max),this.updateVal(this.nextData,"stopy",a,Math.max)}init(e){this.name="",this.data={startx:void 0,stopx:void 0,starty:void 0,stopy:void 0,widthLimit:void 0},this.nextData={startx:void 0,stopx:void 0,starty:void 0,stopy:void 0,cnt:0},xg(e.db.getConfig())}bumpLastMargin(e){this.data.stopx+=e,this.data.stopy+=e}}const xg=function(t){fr(Zt,t),t.fontFamily&&(Zt.personFontFamily=Zt.systemFontFamily=Zt.messageFontFamily=t.fontFamily),t.fontSize&&(Zt.personFontSize=Zt.systemFontSize=Zt.messageFontSize=t.fontSize),t.fontWeight&&(Zt.personFontWeight=Zt.systemFontWeight=Zt.messageFontWeight=t.fontWeight)},zc=(t,e)=>({fontFamily:t[e+"FontFamily"],fontSize:t[e+"FontSize"],fontWeight:t[e+"FontWeight"]}),b1=t=>({fontFamily:t.boundaryFontFamily,fontSize:t.boundaryFontSize,fontWeight:t.boundaryFontWeight}),$K=t=>({fontFamily:t.messageFontFamily,fontSize:t.messageFontSize,fontWeight:t.messageFontWeight});function gi(t,e,r,n,i){if(!e[t].width)if(r)e[t].text=cw(e[t].text,i,n),e[t].textLines=e[t].text.split(pe.lineBreakRegex).length,e[t].width=i,e[t].height=eg(e[t].text,n);else{let a=e[t].text.split(pe.lineBreakRegex);e[t].textLines=a.length;let s=0;e[t].height=0,e[t].width=0;for(let o=0;o<a.length;o++)e[t].width=Math.max(Bi(a[o],n),e[t].width),s=eg(a[o],n),e[t].height=e[t].height+s}}const e9=function(t,e,r){e.x=r.data.startx,e.y=r.data.starty,e.width=r.data.stopx-r.data.startx,e.height=r.data.stopy-r.data.starty,e.label.y=Zt.c4ShapeMargin-35;let n=e.wrap&&Zt.wrap,i=b1(Zt);i.fontSize=i.fontSize+2,i.fontWeight="bold";let a=Bi(e.label.text,i);gi("label",e,n,i,a),Oi.drawBoundary(t,e,Zt)},r9=function(t,e,r,n){let i=0;for(let a=0;a<n.length;a++){i=0;const s=r[n[a]];let o=zc(Zt,s.typeC4Shape.text);switch(o.fontSize=o.fontSize-2,s.typeC4Shape.width=Bi("<<"+s.typeC4Shape.text+">>",o),s.typeC4Shape.height=o.fontSize+2,s.typeC4Shape.Y=Zt.c4ShapePadding,i=s.typeC4Shape.Y+s.typeC4Shape.height-4,s.image={width:0,height:0,Y:0},s.typeC4Shape.text){case"person":case"external_person":s.image.width=48,s.image.height=48,s.image.Y=i,i=s.image.Y+s.image.height;break}s.sprite&&(s.image.width=48,s.image.height=48,s.image.Y=i,i=s.image.Y+s.image.height);let l=s.wrap&&Zt.wrap,u=Zt.width-Zt.c4ShapePadding*2,h=zc(Zt,s.typeC4Shape.text);if(h.fontSize=h.fontSize+2,h.fontWeight="bold",gi("label",s,l,h,u),s.label.Y=i+8,i=s.label.Y+s.label.height,s.type&&s.type.text!==""){s.type.text="["+s.type.text+"]";let p=zc(Zt,s.typeC4Shape.text);gi("type",s,l,p,u),s.type.Y=i+5,i=s.type.Y+s.type.height}else if(s.techn&&s.techn.text!==""){s.techn.text="["+s.techn.text+"]";let p=zc(Zt,s.techn.text);gi("techn",s,l,p,u),s.techn.Y=i+5,i=s.techn.Y+s.techn.height}let d=i,f=s.label.width;if(s.descr&&s.descr.text!==""){let p=zc(Zt,s.typeC4Shape.text);gi("descr",s,l,p,u),s.descr.Y=i+20,i=s.descr.Y+s.descr.height,f=Math.max(s.label.width,s.descr.width),d=i-s.descr.textLines*5}f=f+Zt.c4ShapePadding,s.width=Math.max(s.width||Zt.width,f,Zt.width),s.height=Math.max(s.height||Zt.height,d,Zt.height),s.margin=s.margin||Zt.c4ShapeMargin,t.insert(s),Oi.drawC4Shape(e,s,Zt)}t.bumpLastMargin(Zt.c4ShapeMargin)};class Un{constructor(e,r){this.x=e,this.y=r}}let n9=function(t,e){let r=t.x,n=t.y,i=e.x,a=e.y,s=r+t.width/2,o=n+t.height/2,l=Math.abs(r-i),u=Math.abs(n-a),h=u/l,d=t.height/t.width,f=null;return n==a&&r<i?f=new Un(r+t.width,o):n==a&&r>i?f=new Un(r,o):r==i&&n<a?f=new Un(s,n+t.height):r==i&&n>a&&(f=new Un(s,n)),r>i&&n<a?d>=h?f=new Un(r,o+h*t.width/2):f=new Un(s-l/u*t.height/2,n+t.height):r<i&&n<a?d>=h?f=new Un(r+t.width,o+h*t.width/2):f=new Un(s+l/u*t.height/2,n+t.height):r<i&&n>a?d>=h?f=new Un(r+t.width,o-h*t.width/2):f=new Un(s+t.height/2*l/u,n):r>i&&n>a&&(d>=h?f=new Un(r,o-t.width/2*h):f=new Un(s-t.height/2*l/u,n)),f},XK=function(t,e){let r={x:0,y:0};r.x=e.x+e.width/2,r.y=e.y+e.height/2;let n=n9(t,r);r.x=t.x+t.width/2,r.y=t.y+t.height/2;let i=n9(e,r);return{startPoint:n,endPoint:i}};const KK=function(t,e,r,n){let i=0;for(let a of e){i=i+1;let s=a.wrap&&Zt.wrap,o=$K(Zt);n.db.getC4Type()==="C4Dynamic"&&(a.label.text=i+": "+a.label.text);let u=Bi(a.label.text,o);gi("label",a,s,o,u),a.techn&&a.techn.text!==""&&(u=Bi(a.techn.text,o),gi("techn",a,s,o,u)),a.descr&&a.descr.text!==""&&(u=Bi(a.descr.text,o),gi("descr",a,s,o,u));let h=r(a.from),d=r(a.to),f=XK(h,d);a.startPoint=f.startPoint,a.endPoint=f.endPoint}Oi.drawRels(t,e,Zt)};function i9(t,e,r,n,i){let a=new t9(i);a.data.widthLimit=r.data.widthLimit/Math.min(vg,n.length);for(let s=0;s<n.length;s++){let o=n[s],l=0;o.image={width:0,height:0,Y:0},o.sprite&&(o.image.width=48,o.image.height=48,o.image.Y=l,l=o.image.Y+o.image.height);let u=o.wrap&&Zt.wrap,h=b1(Zt);if(h.fontSize=h.fontSize+2,h.fontWeight="bold",gi("label",o,u,h,a.data.widthLimit),o.label.Y=l+8,l=o.label.Y+o.label.height,o.type&&o.type.text!==""){o.type.text="["+o.type.text+"]";let m=b1(Zt);gi("type",o,u,m,a.data.widthLimit),o.type.Y=l+5,l=o.type.Y+o.type.height}if(o.descr&&o.descr.text!==""){let m=b1(Zt);m.fontSize=m.fontSize-2,gi("descr",o,u,m,a.data.widthLimit),o.descr.Y=l+20,l=o.descr.Y+o.descr.height}if(s==0||s%vg===0){let m=r.data.startx+Zt.diagramMarginX,_=r.data.stopy+Zt.diagramMarginY+l;a.setData(m,m,_,_)}else{let m=a.data.stopx!==a.data.startx?a.data.stopx+Zt.diagramMarginX:a.data.startx,_=a.data.starty;a.setData(m,m,_,_)}a.name=o.alias;let d=i.db.getC4ShapeArray(o.alias),f=i.db.getC4ShapeKeys(o.alias);f.length>0&&r9(a,t,d,f),e=o.alias;let p=i.db.getBoundarys(e);p.length>0&&i9(t,e,a,p,i),o.alias!=="global"&&e9(t,o,a),r.data.stopy=Math.max(a.data.stopy+Zt.c4ShapeMargin,r.data.stopy),r.data.stopx=Math.max(a.data.stopx+Zt.c4ShapeMargin,r.data.stopx),y1=Math.max(y1,r.data.stopx),m1=Math.max(m1,r.data.stopy)}}const a9={drawPersonOrSystemArray:r9,drawBoundary:e9,setConf:xg,draw:function(t,e,r,n){Zt=nt().c4;const i=nt().securityLevel;let a;i==="sandbox"&&(a=St("#i"+e));const s=St(i==="sandbox"?a.nodes()[0].contentDocument.body:"body");let o=n.db;n.db.setWrap(Zt.wrap),Jw=o.getC4ShapeInRow(),vg=o.getC4BoundaryInRow(),H.debug(`C:${JSON.stringify(Zt,null,2)}`);const l=i==="sandbox"?s.select(`[id="${e}"]`):St(`[id="${e}"]`);Oi.insertComputerIcon(l),Oi.insertDatabaseIcon(l),Oi.insertClockIcon(l);let u=new t9(n);u.setData(Zt.diagramMarginX,Zt.diagramMarginX,Zt.diagramMarginY,Zt.diagramMarginY),u.data.widthLimit=screen.availWidth,y1=Zt.diagramMarginX,m1=Zt.diagramMarginY;const h=n.db.getTitle();let d=n.db.getBoundarys("");i9(l,"",u,d,n),Oi.insertArrowHead(l),Oi.insertArrowEnd(l),Oi.insertArrowCrossHead(l),Oi.insertArrowFilledHead(l),KK(l,n.db.getRels(),n.db.getC4Shape,n),u.data.stopx=y1,u.data.stopy=m1;const f=u.data;let m=f.stopy-f.starty+2*Zt.diagramMarginY;const y=f.stopx-f.startx+2*Zt.diagramMarginX;h&&l.append("text").text(h).attr("x",(f.stopx-f.startx)/2-4*Zt.diagramMarginX).attr("y",f.starty+Zt.diagramMarginY),li(l,m,y,Zt.useMaxWidth);const b=h?60:0;l.attr("viewBox",f.startx-Zt.diagramMarginX+" -"+(Zt.diagramMarginY+b)+" "+y+" "+(m+b)),bn(qc.yy,l,e),H.debug("models:",f)}};var _1=function(){var t=function(Z,V,Q,q){for(Q=Q||{},q=Z.length;q--;Q[Z[q]]=V);return Q},e=[1,3],r=[1,7],n=[1,8],i=[1,9],a=[1,10],s=[1,13],o=[1,12],l=[1,16,25],u=[1,20],h=[1,31],d=[1,32],f=[1,33],p=[1,35],m=[1,38],_=[1,36],y=[1,37],b=[1,39],x=[1,40],k=[1,41],T=[1,42],C=[1,45],M=[1,46],S=[1,47],R=[1,48],A=[16,25],L=[1,62],v=[1,63],B=[1,64],w=[1,65],D=[1,66],N=[1,67],z=[1,68],X=[16,25,32,44,45,53,56,57,58,59,60,61,62,67,69],ct=[16,25,30,32,44,45,49,53,56,57,58,59,60,61,62,67,69,84,85,86,87],J=[5,8,9,10,11,16,19,23,25],Y=[53,84,85,86,87],$=[53,61,62,84,85,86,87],lt=[53,56,57,58,59,60,84,85,86,87],ut=[16,25,32],W=[1,100],tt={trace:function(){},yy:{},symbols_:{error:2,start:3,mermaidDoc:4,statments:5,direction:6,directive:7,direction_tb:8,direction_bt:9,direction_rl:10,direction_lr:11,graphConfig:12,openDirective:13,typeDirective:14,closeDirective:15,NEWLINE:16,":":17,argDirective:18,open_directive:19,type_directive:20,arg_directive:21,close_directive:22,CLASS_DIAGRAM:23,statements:24,EOF:25,statement:26,className:27,alphaNumToken:28,classLiteralName:29,GENERICTYPE:30,relationStatement:31,LABEL:32,classStatement:33,methodStatement:34,annotationStatement:35,clickStatement:36,cssClassStatement:37,acc_title:38,acc_title_value:39,acc_descr:40,acc_descr_value:41,acc_descr_multiline_value:42,CLASS:43,STYLE_SEPARATOR:44,STRUCT_START:45,members:46,STRUCT_STOP:47,ANNOTATION_START:48,ANNOTATION_END:49,MEMBER:50,SEPARATOR:51,relation:52,STR:53,relationType:54,lineType:55,AGGREGATION:56,EXTENSION:57,COMPOSITION:58,DEPENDENCY:59,LOLLIPOP:60,LINE:61,DOTTED_LINE:62,CALLBACK:63,LINK:64,LINK_TARGET:65,CLICK:66,CALLBACK_NAME:67,CALLBACK_ARGS:68,HREF:69,CSSCLASS:70,commentToken:71,textToken:72,graphCodeTokens:73,textNoTagsToken:74,TAGSTART:75,TAGEND:76,"==":77,"--":78,PCT:79,DEFAULT:80,SPACE:81,MINUS:82,keywords:83,UNICODE_TEXT:84,NUM:85,ALPHA:86,BQUOTE_STR:87,$accept:0,$end:1},terminals_:{2:"error",5:"statments",8:"direction_tb",9:"direction_bt",10:"direction_rl",11:"direction_lr",16:"NEWLINE",17:":",19:"open_directive",20:"type_directive",21:"arg_directive",22:"close_directive",23:"CLASS_DIAGRAM",25:"EOF",30:"GENERICTYPE",32:"LABEL",38:"acc_title",39:"acc_title_value",40:"acc_descr",41:"acc_descr_value",42:"acc_descr_multiline_value",43:"CLASS",44:"STYLE_SEPARATOR",45:"STRUCT_START",47:"STRUCT_STOP",48:"ANNOTATION_START",49:"ANNOTATION_END",50:"MEMBER",51:"SEPARATOR",53:"STR",56:"AGGREGATION",57:"EXTENSION",58:"COMPOSITION",59:"DEPENDENCY",60:"LOLLIPOP",61:"LINE",62:"DOTTED_LINE",63:"CALLBACK",64:"LINK",65:"LINK_TARGET",66:"CLICK",67:"CALLBACK_NAME",68:"CALLBACK_ARGS",69:"HREF",70:"CSSCLASS",73:"graphCodeTokens",75:"TAGSTART",76:"TAGEND",77:"==",78:"--",79:"PCT",80:"DEFAULT",81:"SPACE",82:"MINUS",83:"keywords",84:"UNICODE_TEXT",85:"NUM",86:"ALPHA",87:"BQUOTE_STR"},productions_:[0,[3,1],[3,1],[3,1],[3,2],[6,1],[6,1],[6,1],[6,1],[4,1],[7,4],[7,6],[13,1],[14,1],[18,1],[15,1],[12,4],[24,1],[24,2],[24,3],[27,1],[27,1],[27,2],[27,2],[27,2],[26,1],[26,2],[26,1],[26,1],[26,1],[26,1],[26,1],[26,1],[26,1],[26,2],[26,2],[26,1],[33,2],[33,4],[33,5],[33,7],[35,4],[46,1],[46,2],[34,1],[34,2],[34,1],[34,1],[31,3],[31,4],[31,4],[31,5],[52,3],[52,2],[52,2],[52,1],[54,1],[54,1],[54,1],[54,1],[54,1],[55,1],[55,1],[36,3],[36,4],[36,3],[36,4],[36,4],[36,5],[36,3],[36,4],[36,4],[36,5],[36,3],[36,4],[36,4],[36,5],[37,3],[71,1],[71,1],[72,1],[72,1],[72,1],[72,1],[72,1],[72,1],[72,1],[74,1],[74,1],[74,1],[74,1],[28,1],[28,1],[28,1],[29,1]],performAction:function(V,Q,q,U,F,j,P){var et=j.length-1;switch(F){case 5:U.setDirection("TB");break;case 6:U.setDirection("BT");break;case 7:U.setDirection("RL");break;case 8:U.setDirection("LR");break;case 12:U.parseDirective("%%{","open_directive");break;case 13:U.parseDirective(j[et],"type_directive");break;case 14:j[et]=j[et].trim().replace(/'/g,'"'),U.parseDirective(j[et],"arg_directive");break;case 15:U.parseDirective("}%%","close_directive","class");break;case 20:case 21:this.$=j[et];break;case 22:this.$=j[et-1]+j[et];break;case 23:case 24:this.$=j[et-1]+"~"+j[et];break;case 25:U.addRelation(j[et]);break;case 26:j[et-1].title=U.cleanupLabel(j[et]),U.addRelation(j[et-1]);break;case 34:this.$=j[et].trim(),U.setAccTitle(this.$);break;case 35:case 36:this.$=j[et].trim(),U.setAccDescription(this.$);break;case 37:U.addClass(j[et]);break;case 38:U.addClass(j[et-2]),U.setCssClass(j[et-2],j[et]);break;case 39:U.addClass(j[et-3]),U.addMembers(j[et-3],j[et-1]);break;case 40:U.addClass(j[et-5]),U.setCssClass(j[et-5],j[et-3]),U.addMembers(j[et-5],j[et-1]);break;case 41:U.addAnnotation(j[et],j[et-2]);break;case 42:this.$=[j[et]];break;case 43:j[et].push(j[et-1]),this.$=j[et];break;case 44:break;case 45:U.addMember(j[et-1],U.cleanupLabel(j[et]));break;case 46:break;case 47:break;case 48:this.$={id1:j[et-2],id2:j[et],relation:j[et-1],relationTitle1:"none",relationTitle2:"none"};break;case 49:this.$={id1:j[et-3],id2:j[et],relation:j[et-1],relationTitle1:j[et-2],relationTitle2:"none"};break;case 50:this.$={id1:j[et-3],id2:j[et],relation:j[et-2],relationTitle1:"none",relationTitle2:j[et-1]};break;case 51:this.$={id1:j[et-4],id2:j[et],relation:j[et-2],relationTitle1:j[et-3],relationTitle2:j[et-1]};break;case 52:this.$={type1:j[et-2],type2:j[et],lineType:j[et-1]};break;case 53:this.$={type1:"none",type2:j[et],lineType:j[et-1]};break;case 54:this.$={type1:j[et-1],type2:"none",lineType:j[et]};break;case 55:this.$={type1:"none",type2:"none",lineType:j[et]};break;case 56:this.$=U.relationType.AGGREGATION;break;case 57:this.$=U.relationType.EXTENSION;break;case 58:this.$=U.relationType.COMPOSITION;break;case 59:this.$=U.relationType.DEPENDENCY;break;case 60:this.$=U.relationType.LOLLIPOP;break;case 61:this.$=U.lineType.LINE;break;case 62:this.$=U.lineType.DOTTED_LINE;break;case 63:case 69:this.$=j[et-2],U.setClickEvent(j[et-1],j[et]);break;case 64:case 70:this.$=j[et-3],U.setClickEvent(j[et-2],j[et-1]),U.setTooltip(j[et-2],j[et]);break;case 65:case 73:this.$=j[et-2],U.setLink(j[et-1],j[et]);break;case 66:this.$=j[et-3],U.setLink(j[et-2],j[et-1],j[et]);break;case 67:case 75:this.$=j[et-3],U.setLink(j[et-2],j[et-1]),U.setTooltip(j[et-2],j[et]);break;case 68:case 76:this.$=j[et-4],U.setLink(j[et-3],j[et-2],j[et]),U.setTooltip(j[et-3],j[et-1]);break;case 71:this.$=j[et-3],U.setClickEvent(j[et-2],j[et-1],j[et]);break;case 72:this.$=j[et-4],U.setClickEvent(j[et-3],j[et-2],j[et-1]),U.setTooltip(j[et-3],j[et]);break;case 74:this.$=j[et-3],U.setLink(j[et-2],j[et-1],j[et]);break;case 77:U.setCssClass(j[et-1],j[et]);break}},table:[{3:1,4:2,5:e,6:4,7:5,8:r,9:n,10:i,11:a,12:6,13:11,19:s,23:o},{1:[3]},{1:[2,1]},{1:[2,2]},{1:[2,3]},{3:14,4:2,5:e,6:4,7:5,8:r,9:n,10:i,11:a,12:6,13:11,19:s,23:o},{1:[2,9]},t(l,[2,5]),t(l,[2,6]),t(l,[2,7]),t(l,[2,8]),{14:15,20:[1,16]},{16:[1,17]},{20:[2,12]},{1:[2,4]},{15:18,17:[1,19],22:u},t([17,22],[2,13]),{6:30,7:29,8:r,9:n,10:i,11:a,13:11,19:s,24:21,26:22,27:34,28:43,29:44,31:23,33:24,34:25,35:26,36:27,37:28,38:h,40:d,42:f,43:p,48:m,50:_,51:y,63:b,64:x,66:k,70:T,84:C,85:M,86:S,87:R},{16:[1,49]},{18:50,21:[1,51]},{16:[2,15]},{25:[1,52]},{16:[1,53],25:[2,17]},t(A,[2,25],{32:[1,54]}),t(A,[2,27]),t(A,[2,28]),t(A,[2,29]),t(A,[2,30]),t(A,[2,31]),t(A,[2,32]),t(A,[2,33]),{39:[1,55]},{41:[1,56]},t(A,[2,36]),t(A,[2,44],{52:57,54:60,55:61,32:[1,59],53:[1,58],56:L,57:v,58:B,59:w,60:D,61:N,62:z}),{27:69,28:43,29:44,84:C,85:M,86:S,87:R},t(A,[2,46]),t(A,[2,47]),{28:70,84:C,85:M,86:S},{27:71,28:43,29:44,84:C,85:M,86:S,87:R},{27:72,28:43,29:44,84:C,85:M,86:S,87:R},{27:73,28:43,29:44,84:C,85:M,86:S,87:R},{53:[1,74]},t(X,[2,20],{28:43,29:44,27:75,30:[1,76],84:C,85:M,86:S,87:R}),t(X,[2,21],{30:[1,77]}),t(ct,[2,91]),t(ct,[2,92]),t(ct,[2,93]),t([16,25,30,32,44,45,53,56,57,58,59,60,61,62,67,69],[2,94]),t(J,[2,10]),{15:78,22:u},{22:[2,14]},{1:[2,16]},{6:30,7:29,8:r,9:n,10:i,11:a,13:11,19:s,24:79,25:[2,18],26:22,27:34,28:43,29:44,31:23,33:24,34:25,35:26,36:27,37:28,38:h,40:d,42:f,43:p,48:m,50:_,51:y,63:b,64:x,66:k,70:T,84:C,85:M,86:S,87:R},t(A,[2,26]),t(A,[2,34]),t(A,[2,35]),{27:80,28:43,29:44,53:[1,81],84:C,85:M,86:S,87:R},{52:82,54:60,55:61,56:L,57:v,58:B,59:w,60:D,61:N,62:z},t(A,[2,45]),{55:83,61:N,62:z},t(Y,[2,55],{54:84,56:L,57:v,58:B,59:w,60:D}),t($,[2,56]),t($,[2,57]),t($,[2,58]),t($,[2,59]),t($,[2,60]),t(lt,[2,61]),t(lt,[2,62]),t(A,[2,37],{44:[1,85],45:[1,86]}),{49:[1,87]},{53:[1,88]},{53:[1,89]},{67:[1,90],69:[1,91]},{28:92,84:C,85:M,86:S},t(X,[2,22]),t(X,[2,23]),t(X,[2,24]),{16:[1,93]},{25:[2,19]},t(ut,[2,48]),{27:94,28:43,29:44,84:C,85:M,86:S,87:R},{27:95,28:43,29:44,53:[1,96],84:C,85:M,86:S,87:R},t(Y,[2,54],{54:97,56:L,57:v,58:B,59:w,60:D}),t(Y,[2,53]),{28:98,84:C,85:M,86:S},{46:99,50:W},{27:101,28:43,29:44,84:C,85:M,86:S,87:R},t(A,[2,63],{53:[1,102]}),t(A,[2,65],{53:[1,104],65:[1,103]}),t(A,[2,69],{53:[1,105],68:[1,106]}),t(A,[2,73],{53:[1,108],65:[1,107]}),t(A,[2,77]),t(J,[2,11]),t(ut,[2,50]),t(ut,[2,49]),{27:109,28:43,29:44,84:C,85:M,86:S,87:R},t(Y,[2,52]),t(A,[2,38],{45:[1,110]}),{47:[1,111]},{46:112,47:[2,42],50:W},t(A,[2,41]),t(A,[2,64]),t(A,[2,66]),t(A,[2,67],{65:[1,113]}),t(A,[2,70]),t(A,[2,71],{53:[1,114]}),t(A,[2,74]),t(A,[2,75],{65:[1,115]}),t(ut,[2,51]),{46:116,50:W},t(A,[2,39]),{47:[2,43]},t(A,[2,68]),t(A,[2,72]),t(A,[2,76]),{47:[1,117]},t(A,[2,40])],defaultActions:{2:[2,1],3:[2,2],4:[2,3],6:[2,9],13:[2,12],14:[2,4],20:[2,15],51:[2,14],52:[2,16],79:[2,19],112:[2,43]},parseError:function(V,Q){if(Q.recoverable)this.trace(V);else{var q=new Error(V);throw q.hash=Q,q}},parse:function(V){var Q=this,q=[0],U=[],F=[null],j=[],P=this.table,et="",at=0,It=0,Lt=2,Rt=1,Ct=j.slice.call(arguments,1),pt=Object.create(this.lexer),mt={yy:{}};for(var vt in this.yy)Object.prototype.hasOwnProperty.call(this.yy,vt)&&(mt.yy[vt]=this.yy[vt]);pt.setInput(V,mt.yy),mt.yy.lexer=pt,mt.yy.parser=this,typeof pt.yylloc>"u"&&(pt.yylloc={});var Tt=pt.yylloc;j.push(Tt);var ft=pt.options&&pt.options.ranges;typeof mt.yy.parseError=="function"?this.parseError=mt.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function le(){var Et;return Et=U.pop()||pt.lex()||Rt,typeof Et!="number"&&(Et instanceof Array&&(U=Et,Et=U.pop()),Et=Q.symbols_[Et]||Et),Et}for(var Dt,Gt,$t,Qt,we={},jt,Ft,zt,wt;;){if(Gt=q[q.length-1],this.defaultActions[Gt]?$t=this.defaultActions[Gt]:((Dt===null||typeof Dt>"u")&&(Dt=le()),$t=P[Gt]&&P[Gt][Dt]),typeof $t>"u"||!$t.length||!$t[0]){var bt="";wt=[];for(jt in P[Gt])this.terminals_[jt]&&jt>Lt&&wt.push("'"+this.terminals_[jt]+"'");pt.showPosition?bt="Parse error on line "+(at+1)+`: -`+pt.showPosition()+` -Expecting `+wt.join(", ")+", got '"+(this.terminals_[Dt]||Dt)+"'":bt="Parse error on line "+(at+1)+": Unexpected "+(Dt==Rt?"end of input":"'"+(this.terminals_[Dt]||Dt)+"'"),this.parseError(bt,{text:pt.match,token:this.terminals_[Dt]||Dt,line:pt.yylineno,loc:Tt,expected:wt})}if($t[0]instanceof Array&&$t.length>1)throw new Error("Parse Error: multiple actions possible at state: "+Gt+", token: "+Dt);switch($t[0]){case 1:q.push(Dt),F.push(pt.yytext),j.push(pt.yylloc),q.push($t[1]),Dt=null,It=pt.yyleng,et=pt.yytext,at=pt.yylineno,Tt=pt.yylloc;break;case 2:if(Ft=this.productions_[$t[1]][1],we.$=F[F.length-Ft],we._$={first_line:j[j.length-(Ft||1)].first_line,last_line:j[j.length-1].last_line,first_column:j[j.length-(Ft||1)].first_column,last_column:j[j.length-1].last_column},ft&&(we._$.range=[j[j.length-(Ft||1)].range[0],j[j.length-1].range[1]]),Qt=this.performAction.apply(we,[et,It,at,mt.yy,$t[1],F,j].concat(Ct)),typeof Qt<"u")return Qt;Ft&&(q=q.slice(0,-1*Ft*2),F=F.slice(0,-1*Ft),j=j.slice(0,-1*Ft)),q.push(this.productions_[$t[1]][0]),F.push(we.$),j.push(we._$),zt=P[q[q.length-2]][q[q.length-1]],q.push(zt);break;case 3:return!0}}return!0}},K=function(){var Z={EOF:1,parseError:function(Q,q){if(this.yy.parser)this.yy.parser.parseError(Q,q);else throw new Error(Q)},setInput:function(V,Q){return this.yy=Q||this.yy||{},this._input=V,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var V=this._input[0];this.yytext+=V,this.yyleng++,this.offset++,this.match+=V,this.matched+=V;var Q=V.match(/(?:\r\n?|\n).*/g);return Q?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),V},unput:function(V){var Q=V.length,q=V.split(/(?:\r\n?|\n)/g);this._input=V+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-Q),this.offset-=Q;var U=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),q.length-1&&(this.yylineno-=q.length-1);var F=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:q?(q.length===U.length?this.yylloc.first_column:0)+U[U.length-q.length].length-q[0].length:this.yylloc.first_column-Q},this.options.ranges&&(this.yylloc.range=[F[0],F[0]+this.yyleng-Q]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(V){this.unput(this.match.slice(V))},pastInput:function(){var V=this.matched.substr(0,this.matched.length-this.match.length);return(V.length>20?"...":"")+V.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var V=this.match;return V.length<20&&(V+=this._input.substr(0,20-V.length)),(V.substr(0,20)+(V.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var V=this.pastInput(),Q=new Array(V.length+1).join("-");return V+this.upcomingInput()+` -`+Q+"^"},test_match:function(V,Q){var q,U,F;if(this.options.backtrack_lexer&&(F={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(F.yylloc.range=this.yylloc.range.slice(0))),U=V[0].match(/(?:\r\n?|\n).*/g),U&&(this.yylineno+=U.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:U?U[U.length-1].length-U[U.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+V[0].length},this.yytext+=V[0],this.match+=V[0],this.matches=V,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(V[0].length),this.matched+=V[0],q=this.performAction.call(this,this.yy,this,Q,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),q)return q;if(this._backtrack){for(var j in F)this[j]=F[j];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var V,Q,q,U;this._more||(this.yytext="",this.match="");for(var F=this._currentRules(),j=0;j<F.length;j++)if(q=this._input.match(this.rules[F[j]]),q&&(!Q||q[0].length>Q[0].length)){if(Q=q,U=j,this.options.backtrack_lexer){if(V=this.test_match(q,F[j]),V!==!1)return V;if(this._backtrack){Q=!1;continue}else return!1}else if(!this.options.flex)break}return Q?(V=this.test_match(Q,F[U]),V!==!1?V:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var Q=this.next();return Q||this.lex()},begin:function(Q){this.conditionStack.push(Q)},popState:function(){var Q=this.conditionStack.length-1;return Q>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(Q){return Q=this.conditionStack.length-1-Math.abs(Q||0),Q>=0?this.conditionStack[Q]:"INITIAL"},pushState:function(Q){this.begin(Q)},stateStackSize:function(){return this.conditionStack.length},options:{},performAction:function(Q,q,U,F){switch(U){case 0:return this.begin("open_directive"),19;case 1:return 8;case 2:return 9;case 3:return 10;case 4:return 11;case 5:return this.begin("type_directive"),20;case 6:return this.popState(),this.begin("arg_directive"),17;case 7:return this.popState(),this.popState(),22;case 8:return 21;case 9:break;case 10:break;case 11:return this.begin("acc_title"),38;case 12:return this.popState(),"acc_title_value";case 13:return this.begin("acc_descr"),40;case 14:return this.popState(),"acc_descr_value";case 15:this.begin("acc_descr_multiline");break;case 16:this.popState();break;case 17:return"acc_descr_multiline_value";case 18:return 16;case 19:break;case 20:return 23;case 21:return 23;case 22:return this.begin("struct"),45;case 23:return"EDGE_STATE";case 24:return"EOF_IN_STRUCT";case 25:return"OPEN_IN_STRUCT";case 26:return this.popState(),47;case 27:break;case 28:return"MEMBER";case 29:return 43;case 30:return 70;case 31:return 63;case 32:return 64;case 33:return 66;case 34:return 48;case 35:return 49;case 36:this.begin("generic");break;case 37:this.popState();break;case 38:return"GENERICTYPE";case 39:this.begin("string");break;case 40:this.popState();break;case 41:return"STR";case 42:this.begin("bqstring");break;case 43:this.popState();break;case 44:return"BQUOTE_STR";case 45:this.begin("href");break;case 46:this.popState();break;case 47:return 69;case 48:this.begin("callback_name");break;case 49:this.popState();break;case 50:this.popState(),this.begin("callback_args");break;case 51:return 67;case 52:this.popState();break;case 53:return 68;case 54:return 65;case 55:return 65;case 56:return 65;case 57:return 65;case 58:return 57;case 59:return 57;case 60:return 59;case 61:return 59;case 62:return 58;case 63:return 56;case 64:return 60;case 65:return 61;case 66:return 62;case 67:return 32;case 68:return 44;case 69:return 82;case 70:return"DOT";case 71:return"PLUS";case 72:return 79;case 73:return"EQUALS";case 74:return"EQUALS";case 75:return 86;case 76:return"PUNCTUATION";case 77:return 85;case 78:return 84;case 79:return 81;case 80:return 25}},rules:[/^(?:%%\{)/,/^(?:.*direction\s+TB[^\n]*)/,/^(?:.*direction\s+BT[^\n]*)/,/^(?:.*direction\s+RL[^\n]*)/,/^(?:.*direction\s+LR[^\n]*)/,/^(?:((?:(?!\}%%)[^:.])*))/,/^(?::)/,/^(?:\}%%)/,/^(?:((?:(?!\}%%).|\n)*))/,/^(?:%%(?!\{)*[^\n]*(\r?\n?)+)/,/^(?:%%[^\n]*(\r?\n)*)/,/^(?:accTitle\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*\{\s*)/,/^(?:[\}])/,/^(?:[^\}]*)/,/^(?:\s*(\r?\n)+)/,/^(?:\s+)/,/^(?:classDiagram-v2\b)/,/^(?:classDiagram\b)/,/^(?:[{])/,/^(?:\[\*\])/,/^(?:$)/,/^(?:[{])/,/^(?:[}])/,/^(?:[\n])/,/^(?:[^{}\n]*)/,/^(?:class\b)/,/^(?:cssClass\b)/,/^(?:callback\b)/,/^(?:link\b)/,/^(?:click\b)/,/^(?:<<)/,/^(?:>>)/,/^(?:[~])/,/^(?:[~])/,/^(?:[^~]*)/,/^(?:["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:[`])/,/^(?:[`])/,/^(?:[^`]+)/,/^(?:href[\s]+["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:call[\s]+)/,/^(?:\([\s]*\))/,/^(?:\()/,/^(?:[^(]*)/,/^(?:\))/,/^(?:[^)]*)/,/^(?:_self\b)/,/^(?:_blank\b)/,/^(?:_parent\b)/,/^(?:_top\b)/,/^(?:\s*<\|)/,/^(?:\s*\|>)/,/^(?:\s*>)/,/^(?:\s*<)/,/^(?:\s*\*)/,/^(?:\s*o\b)/,/^(?:\s*\(\))/,/^(?:--)/,/^(?:\.\.)/,/^(?::{1}[^:\n;]+)/,/^(?::{3})/,/^(?:-)/,/^(?:\.)/,/^(?:\+)/,/^(?:%)/,/^(?:=)/,/^(?:=)/,/^(?:\w+)/,/^(?:[!"#$%&'*+,-.`?\\/])/,/^(?:[0-9]+)/,/^(?:[\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6]|[\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377]|[\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5]|[\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA]|[\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE]|[\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA]|[\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0]|[\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977]|[\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2]|[\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A]|[\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39]|[\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8]|[\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C]|[\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C]|[\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99]|[\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0]|[\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D]|[\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3]|[\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10]|[\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1]|[\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81]|[\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3]|[\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6]|[\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A]|[\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081]|[\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D]|[\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0]|[\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310]|[\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C]|[\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u1700-\u170C\u170E-\u1711]|[\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7]|[\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C]|[\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16]|[\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF]|[\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC]|[\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D]|[\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D]|[\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3]|[\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F]|[\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128]|[\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2183\u2184]|[\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3]|[\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6]|[\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE]|[\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C]|[\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D]|[\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC]|[\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B]|[\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6E5\uA717-\uA71F\uA722-\uA788]|[\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805]|[\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB]|[\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28]|[\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5]|[\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4]|[\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E]|[\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D]|[\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36]|[\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D]|[\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC]|[\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF]|[\uFFD2-\uFFD7\uFFDA-\uFFDC])/,/^(?:\s)/,/^(?:$)/],conditions:{acc_descr_multiline:{rules:[16,17],inclusive:!1},acc_descr:{rules:[14],inclusive:!1},acc_title:{rules:[12],inclusive:!1},arg_directive:{rules:[7,8],inclusive:!1},type_directive:{rules:[6,7],inclusive:!1},open_directive:{rules:[5],inclusive:!1},callback_args:{rules:[52,53],inclusive:!1},callback_name:{rules:[49,50,51],inclusive:!1},href:{rules:[46,47],inclusive:!1},struct:{rules:[23,24,25,26,27,28],inclusive:!1},generic:{rules:[37,38],inclusive:!1},bqstring:{rules:[43,44],inclusive:!1},string:{rules:[40,41],inclusive:!1},INITIAL:{rules:[0,1,2,3,4,9,10,11,13,15,18,19,20,21,22,23,29,30,31,32,33,34,35,36,39,42,45,48,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80],inclusive:!0}}};return Z}();tt.lexer=K;function it(){this.yy={}}return it.prototype=tt,tt.Parser=it,new it}();_1.parser=_1;const ZK=(t,e)=>{var r;return((r=e==null?void 0:e.class)==null?void 0:r.defaultRenderer)==="dagre-wrapper"?!1:t.match(/^\s*classDiagram/)!==null},QK=(t,e)=>{var r;return t.match(/^\s*classDiagram/)!==null&&((r=e==null?void 0:e.class)==null?void 0:r.defaultRenderer)==="dagre-wrapper"?!0:t.match(/^\s*classDiagram-v2/)!==null},kg="classid-";let wg=[],lr={},s9=0,Yc=[];const Uc=t=>pe.sanitizeText(t,nt()),JK=function(t,e,r){Xe.parseDirective(this,t,e,r)},Wc=function(t){let e="",r=t;if(t.indexOf("~")>0){let n=t.split("~");r=n[0],e=pe.sanitizeText(n[1],nt())}return{className:r,type:e}},Tg=function(t){let e=Wc(t);typeof lr[e.className]<"u"||(lr[e.className]={id:e.className,type:e.type,cssClasses:[],methods:[],members:[],annotations:[],domId:kg+e.className+"-"+s9},s9++)},o9=function(t){const e=Object.keys(lr);for(let r=0;r<e.length;r++)if(lr[e[r]].id===t)return lr[e[r]].domId},tZ=function(){wg=[],lr={},Yc=[],Yc.push(c9),ci()},eZ=function(t){return lr[t]},rZ=function(){return lr},nZ=function(){return wg},iZ=function(t){H.debug("Adding relation: "+JSON.stringify(t)),Tg(t.id1),Tg(t.id2),t.id1=Wc(t.id1).className,t.id2=Wc(t.id2).className,t.relationTitle1=pe.sanitizeText(t.relationTitle1.trim(),nt()),t.relationTitle2=pe.sanitizeText(t.relationTitle2.trim(),nt()),wg.push(t)},aZ=function(t,e){const r=Wc(t).className;lr[r].annotations.push(e)},l9=function(t,e){const r=Wc(t).className,n=lr[r];if(typeof e=="string"){const i=e.trim();i.startsWith("<<")&&i.endsWith(">>")?n.annotations.push(Uc(i.substring(2,i.length-2))):i.indexOf(")")>0?n.methods.push(Uc(i)):i&&n.members.push(Uc(i))}},sZ=function(t,e){Array.isArray(e)&&(e.reverse(),e.forEach(r=>l9(t,r)))},oZ=function(t){return t.substring(0,1)===":"?pe.sanitizeText(t.substr(1).trim(),nt()):Uc(t.trim())},Eg=function(t,e){t.split(",").forEach(function(r){let n=r;r[0].match(/\d/)&&(n=kg+n),typeof lr[n]<"u"&&lr[n].cssClasses.push(e)})},lZ=function(t,e){const r=nt();t.split(",").forEach(function(n){typeof e<"u"&&(lr[n].tooltip=pe.sanitizeText(e,r))})},cZ=function(t){return lr[t].tooltip},uZ=function(t,e,r){const n=nt();t.split(",").forEach(function(i){let a=i;i[0].match(/\d/)&&(a=kg+a),typeof lr[a]<"u"&&(lr[a].link=Se.formatUrl(e,n),n.securityLevel==="sandbox"?lr[a].linkTarget="_top":typeof r=="string"?lr[a].linkTarget=Uc(r):lr[a].linkTarget="_blank")}),Eg(t,"clickable")},hZ=function(t,e,r){t.split(",").forEach(function(n){fZ(n,e,r),lr[n].haveCallback=!0}),Eg(t,"clickable")},fZ=function(t,e,r){const n=nt();let i=t,a=o9(i);if(n.securityLevel==="loose"&&!(typeof e>"u")&&typeof lr[i]<"u"){let s=[];if(typeof r=="string"){s=r.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/);for(let o=0;o<s.length;o++){let l=s[o].trim();l.charAt(0)==='"'&&l.charAt(l.length-1)==='"'&&(l=l.substr(1,l.length-2)),s[o]=l}}s.length===0&&s.push(a),Yc.push(function(){const o=document.querySelector(`[id="${a}"]`);o!==null&&o.addEventListener("click",function(){Se.runFunc(e,...s)},!1)})}},dZ=function(t){Yc.forEach(function(e){e(t)})},pZ={LINE:0,DOTTED_LINE:1},gZ={AGGREGATION:0,EXTENSION:1,COMPOSITION:2,DEPENDENCY:3,LOLLIPOP:4},c9=function(t){let e=St(".mermaidTooltip");(e._groups||e)[0][0]===null&&(e=St("body").append("div").attr("class","mermaidTooltip").style("opacity",0)),St(t).select("svg").selectAll("g.node").on("mouseover",function(){const i=St(this);if(i.attr("title")===null)return;const s=this.getBoundingClientRect();e.transition().duration(200).style("opacity",".9"),e.text(i.attr("title")).style("left",window.scrollX+s.left+(s.right-s.left)/2+"px").style("top",window.scrollY+s.top-14+document.body.scrollTop+"px"),e.html(e.html().replace(/<br\/>/g,"<br/>")),i.classed("hover",!0)}).on("mouseout",function(){e.transition().duration(500).style("opacity",0),St(this).classed("hover",!1)})};Yc.push(c9);let u9="TB";const Jo={parseDirective:JK,setAccTitle:Yn,getAccTitle:ui,getAccDescription:fi,setAccDescription:hi,getConfig:()=>nt().class,addClass:Tg,bindFunctions:dZ,clear:tZ,getClass:eZ,getClasses:rZ,addAnnotation:aZ,getRelations:nZ,addRelation:iZ,getDirection:()=>u9,setDirection:t=>{u9=t},addMember:l9,addMembers:sZ,cleanupLabel:oZ,lineType:pZ,relationType:gZ,setClickEvent:hZ,setCssClass:Eg,setLink:uZ,getTooltip:cZ,setTooltip:lZ,lookUpDomId:o9};var Cg,h9;function yZ(){if(h9)return Cg;h9=1;var t=Wf;function e(){this.__data__=new t,this.size=0}return Cg=e,Cg}var Sg,f9;function mZ(){if(f9)return Sg;f9=1;function t(e){var r=this.__data__,n=r.delete(e);return this.size=r.size,n}return Sg=t,Sg}var Ag,d9;function bZ(){if(d9)return Ag;d9=1;function t(e){return this.__data__.get(e)}return Ag=t,Ag}var Mg,p9;function _Z(){if(p9)return Mg;p9=1;function t(e){return this.__data__.has(e)}return Mg=t,Mg}var Lg,g9;function vZ(){if(g9)return Lg;g9=1;var t=Wf,e=Zp,r=Qp,n=200;function i(a,s){var o=this.__data__;if(o instanceof t){var l=o.__data__;if(!e||l.length<n-1)return l.push([a,s]),this.size=++o.size,this;o=this.__data__=new r(l)}return o.set(a,s),this.size=o.size,this}return Lg=i,Lg}var Rg,y9;function v1(){if(y9)return Rg;y9=1;var t=Wf,e=yZ(),r=mZ(),n=bZ(),i=_Z(),a=vZ();function s(o){var l=this.__data__=new t(o);this.size=l.size}return s.prototype.clear=e,s.prototype.delete=r,s.prototype.get=n,s.prototype.has=i,s.prototype.set=a,Rg=s,Rg}var Ig,m9;function Ng(){if(m9)return Ig;m9=1;function t(e,r){for(var n=-1,i=e==null?0:e.length;++n<i&&r(e[n],n,e)!==!1;);return e}return Ig=t,Ig}var Bg,b9;function _9(){if(b9)return Bg;b9=1;var t=qs,e=function(){try{var r=t(Object,"defineProperty");return r({},"",{}),r}catch{}}();return Bg=e,Bg}var Dg,v9;function x1(){if(v9)return Dg;v9=1;var t=_9();function e(r,n,i){n=="__proto__"&&t?t(r,n,{configurable:!0,enumerable:!0,value:i,writable:!0}):r[n]=i}return Dg=e,Dg}var Og,x9;function k1(){if(x9)return Og;x9=1;var t=x1(),e=Wo,r=Object.prototype,n=r.hasOwnProperty;function i(a,s,o){var l=a[s];(!(n.call(a,s)&&e(l,o))||o===void 0&&!(s in a))&&t(a,s,o)}return Og=i,Og}var Fg,k9;function Hc(){if(k9)return Fg;k9=1;var t=k1(),e=x1();function r(n,i,a,s){var o=!a;a||(a={});for(var l=-1,u=i.length;++l<u;){var h=i[l],d=s?s(a[h],n[h],h,a,n):void 0;d===void 0&&(d=n[h]),o?e(a,h,d):t(a,h,d)}return a}return Fg=r,Fg}var Pg,w9;function xZ(){if(w9)return Pg;w9=1;function t(e,r){for(var n=-1,i=Array(e);++n<e;)i[n]=r(n);return i}return Pg=t,Pg}var qg,T9;function Fi(){if(T9)return qg;T9=1;function t(e){return e!=null&&typeof e=="object"}return qg=t,qg}var Vg,E9;function kZ(){if(E9)return Vg;E9=1;var t=Ps,e=Fi(),r="[object Arguments]";function n(i){return e(i)&&t(i)==r}return Vg=n,Vg}var zg,C9;function Gc(){if(C9)return zg;C9=1;var t=kZ(),e=Fi(),r=Object.prototype,n=r.hasOwnProperty,i=r.propertyIsEnumerable,a=t(function(){return arguments}())?t:function(s){return e(s)&&n.call(s,"callee")&&!i.call(s,"callee")};return zg=a,zg}var Yg,S9;function gr(){if(S9)return Yg;S9=1;var t=Array.isArray;return Yg=t,Yg}var w1={exports:{}},Ug,A9;function wZ(){if(A9)return Ug;A9=1;function t(){return!1}return Ug=t,Ug}var M9;function tl(){return M9||(M9=1,function(t,e){var r=si,n=wZ(),i=e&&!e.nodeType&&e,a=i&&!0&&t&&!t.nodeType&&t,s=a&&a.exports===i,o=s?r.Buffer:void 0,l=o?o.isBuffer:void 0,u=l||n;t.exports=u}(w1,w1.exports)),w1.exports}var Wg,L9;function T1(){if(L9)return Wg;L9=1;var t=9007199254740991,e=/^(?:0|[1-9]\d*)$/;function r(n,i){var a=typeof n;return i=i==null?t:i,!!i&&(a=="number"||a!="symbol"&&e.test(n))&&n>-1&&n%1==0&&n<i}return Wg=r,Wg}var Hg,R9;function Gg(){if(R9)return Hg;R9=1;var t=9007199254740991;function e(r){return typeof r=="number"&&r>-1&&r%1==0&&r<=t}return Hg=e,Hg}var jg,I9;function TZ(){if(I9)return jg;I9=1;var t=Ps,e=Gg(),r=Fi(),n="[object Arguments]",i="[object Array]",a="[object Boolean]",s="[object Date]",o="[object Error]",l="[object Function]",u="[object Map]",h="[object Number]",d="[object Object]",f="[object RegExp]",p="[object Set]",m="[object String]",_="[object WeakMap]",y="[object ArrayBuffer]",b="[object DataView]",x="[object Float32Array]",k="[object Float64Array]",T="[object Int8Array]",C="[object Int16Array]",M="[object Int32Array]",S="[object Uint8Array]",R="[object Uint8ClampedArray]",A="[object Uint16Array]",L="[object Uint32Array]",v={};v[x]=v[k]=v[T]=v[C]=v[M]=v[S]=v[R]=v[A]=v[L]=!0,v[n]=v[i]=v[y]=v[a]=v[b]=v[s]=v[o]=v[l]=v[u]=v[h]=v[d]=v[f]=v[p]=v[m]=v[_]=!1;function B(w){return r(w)&&e(w.length)&&!!v[t(w)]}return jg=B,jg}var $g,N9;function E1(){if(N9)return $g;N9=1;function t(e){return function(r){return e(r)}}return $g=t,$g}var C1={exports:{}},B9;function Xg(){return B9||(B9=1,function(t,e){var r=Xk,n=e&&!e.nodeType&&e,i=n&&!0&&t&&!t.nodeType&&t,a=i&&i.exports===n,s=a&&r.process,o=function(){try{var l=i&&i.require&&i.require("util").types;return l||s&&s.binding&&s.binding("util")}catch{}}();t.exports=o}(C1,C1.exports)),C1.exports}var Kg,D9;function jc(){if(D9)return Kg;D9=1;var t=TZ(),e=E1(),r=Xg(),n=r&&r.isTypedArray,i=n?e(n):t;return Kg=i,Kg}var Zg,O9;function F9(){if(O9)return Zg;O9=1;var t=xZ(),e=Gc(),r=gr(),n=tl(),i=T1(),a=jc(),s=Object.prototype,o=s.hasOwnProperty;function l(u,h){var d=r(u),f=!d&&e(u),p=!d&&!f&&n(u),m=!d&&!f&&!p&&a(u),_=d||f||p||m,y=_?t(u.length,String):[],b=y.length;for(var x in u)(h||o.call(u,x))&&!(_&&(x=="length"||p&&(x=="offset"||x=="parent")||m&&(x=="buffer"||x=="byteLength"||x=="byteOffset")||i(x,b)))&&y.push(x);return y}return Zg=l,Zg}var Qg,P9;function S1(){if(P9)return Qg;P9=1;var t=Object.prototype;function e(r){var n=r&&r.constructor,i=typeof n=="function"&&n.prototype||t;return r===i}return Qg=e,Qg}var Jg,q9;function V9(){if(q9)return Jg;q9=1;function t(e,r){return function(n){return e(r(n))}}return Jg=t,Jg}var ty,z9;function EZ(){if(z9)return ty;z9=1;var t=V9(),e=t(Object.keys,Object);return ty=e,ty}var ey,Y9;function ry(){if(Y9)return ey;Y9=1;var t=S1(),e=EZ(),r=Object.prototype,n=r.hasOwnProperty;function i(a){if(!t(a))return e(a);var s=[];for(var o in Object(a))n.call(a,o)&&o!="constructor"&&s.push(o);return s}return ey=i,ey}var ny,U9;function oa(){if(U9)return ny;U9=1;var t=Yo,e=Gg();function r(n){return n!=null&&e(n.length)&&!t(n)}return ny=r,ny}var iy,W9;function ts(){if(W9)return iy;W9=1;var t=F9(),e=ry(),r=oa();function n(i){return r(i)?t(i):e(i)}return iy=n,iy}var ay,H9;function CZ(){if(H9)return ay;H9=1;var t=Hc(),e=ts();function r(n,i){return n&&t(i,e(i),n)}return ay=r,ay}var sy,G9;function SZ(){if(G9)return sy;G9=1;function t(e){var r=[];if(e!=null)for(var n in Object(e))r.push(n);return r}return sy=t,sy}var oy,j9;function AZ(){if(j9)return oy;j9=1;var t=Vn,e=S1(),r=SZ(),n=Object.prototype,i=n.hasOwnProperty;function a(s){if(!t(s))return r(s);var o=e(s),l=[];for(var u in s)u=="constructor"&&(o||!i.call(s,u))||l.push(u);return l}return oy=a,oy}var ly,$9;function Ws(){if($9)return ly;$9=1;var t=F9(),e=AZ(),r=oa();function n(i){return r(i)?t(i,!0):e(i)}return ly=n,ly}var cy,X9;function MZ(){if(X9)return cy;X9=1;var t=Hc(),e=Ws();function r(n,i){return n&&t(i,e(i),n)}return cy=r,cy}var A1={exports:{}},K9;function Z9(){return K9||(K9=1,function(t,e){var r=si,n=e&&!e.nodeType&&e,i=n&&!0&&t&&!t.nodeType&&t,a=i&&i.exports===n,s=a?r.Buffer:void 0,o=s?s.allocUnsafe:void 0;function l(u,h){if(h)return u.slice();var d=u.length,f=o?o(d):new u.constructor(d);return u.copy(f),f}t.exports=l}(A1,A1.exports)),A1.exports}var uy,Q9;function J9(){if(Q9)return uy;Q9=1;function t(e,r){var n=-1,i=e.length;for(r||(r=Array(i));++n<i;)r[n]=e[n];return r}return uy=t,uy}var hy,tT;function eT(){if(tT)return hy;tT=1;function t(e,r){for(var n=-1,i=e==null?0:e.length,a=0,s=[];++n<i;){var o=e[n];r(o,n,e)&&(s[a++]=o)}return s}return hy=t,hy}var fy,rT;function nT(){if(rT)return fy;rT=1;function t(){return[]}return fy=t,fy}var dy,iT;function py(){if(iT)return dy;iT=1;var t=eT(),e=nT(),r=Object.prototype,n=r.propertyIsEnumerable,i=Object.getOwnPropertySymbols,a=i?function(s){return s==null?[]:(s=Object(s),t(i(s),function(o){return n.call(s,o)}))}:e;return dy=a,dy}var gy,aT;function LZ(){if(aT)return gy;aT=1;var t=Hc(),e=py();function r(n,i){return t(n,e(n),i)}return gy=r,gy}var yy,sT;function my(){if(sT)return yy;sT=1;function t(e,r){for(var n=-1,i=r.length,a=e.length;++n<i;)e[a+n]=r[n];return e}return yy=t,yy}var by,oT;function M1(){if(oT)return by;oT=1;var t=V9(),e=t(Object.getPrototypeOf,Object);return by=e,by}var _y,lT;function cT(){if(lT)return _y;lT=1;var t=my(),e=M1(),r=py(),n=nT(),i=Object.getOwnPropertySymbols,a=i?function(s){for(var o=[];s;)t(o,r(s)),s=e(s);return o}:n;return _y=a,_y}var vy,uT;function RZ(){if(uT)return vy;uT=1;var t=Hc(),e=cT();function r(n,i){return t(n,e(n),i)}return vy=r,vy}var xy,hT;function fT(){if(hT)return xy;hT=1;var t=my(),e=gr();function r(n,i,a){var s=i(n);return e(n)?s:t(s,a(n))}return xy=r,xy}var ky,dT;function pT(){if(dT)return ky;dT=1;var t=fT(),e=py(),r=ts();function n(i){return t(i,r,e)}return ky=n,ky}var wy,gT;function IZ(){if(gT)return wy;gT=1;var t=fT(),e=cT(),r=Ws();function n(i){return t(i,r,e)}return wy=n,wy}var Ty,yT;function NZ(){if(yT)return Ty;yT=1;var t=qs,e=si,r=t(e,"DataView");return Ty=r,Ty}var Ey,mT;function BZ(){if(mT)return Ey;mT=1;var t=qs,e=si,r=t(e,"Promise");return Ey=r,Ey}var Cy,bT;function _T(){if(bT)return Cy;bT=1;var t=qs,e=si,r=t(e,"Set");return Cy=r,Cy}var Sy,vT;function DZ(){if(vT)return Sy;vT=1;var t=qs,e=si,r=t(e,"WeakMap");return Sy=r,Sy}var Ay,xT;function el(){if(xT)return Ay;xT=1;var t=NZ(),e=Zp,r=BZ(),n=_T(),i=DZ(),a=Ps,s=ew,o="[object Map]",l="[object Object]",u="[object Promise]",h="[object Set]",d="[object WeakMap]",f="[object DataView]",p=s(t),m=s(e),_=s(r),y=s(n),b=s(i),x=a;return(t&&x(new t(new ArrayBuffer(1)))!=f||e&&x(new e)!=o||r&&x(r.resolve())!=u||n&&x(new n)!=h||i&&x(new i)!=d)&&(x=function(k){var T=a(k),C=T==l?k.constructor:void 0,M=C?s(C):"";if(M)switch(M){case p:return f;case m:return o;case _:return u;case y:return h;case b:return d}return T}),Ay=x,Ay}var My,kT;function OZ(){if(kT)return My;kT=1;var t=Object.prototype,e=t.hasOwnProperty;function r(n){var i=n.length,a=new n.constructor(i);return i&&typeof n[0]=="string"&&e.call(n,"index")&&(a.index=n.index,a.input=n.input),a}return My=r,My}var Ly,wT;function TT(){if(wT)return Ly;wT=1;var t=si,e=t.Uint8Array;return Ly=e,Ly}var Ry,ET;function Iy(){if(ET)return Ry;ET=1;var t=TT();function e(r){var n=new r.constructor(r.byteLength);return new t(n).set(new t(r)),n}return Ry=e,Ry}var Ny,CT;function FZ(){if(CT)return Ny;CT=1;var t=Iy();function e(r,n){var i=n?t(r.buffer):r.buffer;return new r.constructor(i,r.byteOffset,r.byteLength)}return Ny=e,Ny}var By,ST;function PZ(){if(ST)return By;ST=1;var t=/\w*$/;function e(r){var n=new r.constructor(r.source,t.exec(r));return n.lastIndex=r.lastIndex,n}return By=e,By}var Dy,AT;function qZ(){if(AT)return Dy;AT=1;var t=zo,e=t?t.prototype:void 0,r=e?e.valueOf:void 0;function n(i){return r?Object(r.call(i)):{}}return Dy=n,Dy}var Oy,MT;function LT(){if(MT)return Oy;MT=1;var t=Iy();function e(r,n){var i=n?t(r.buffer):r.buffer;return new r.constructor(i,r.byteOffset,r.length)}return Oy=e,Oy}var Fy,RT;function VZ(){if(RT)return Fy;RT=1;var t=Iy(),e=FZ(),r=PZ(),n=qZ(),i=LT(),a="[object Boolean]",s="[object Date]",o="[object Map]",l="[object Number]",u="[object RegExp]",h="[object Set]",d="[object String]",f="[object Symbol]",p="[object ArrayBuffer]",m="[object DataView]",_="[object Float32Array]",y="[object Float64Array]",b="[object Int8Array]",x="[object Int16Array]",k="[object Int32Array]",T="[object Uint8Array]",C="[object Uint8ClampedArray]",M="[object Uint16Array]",S="[object Uint32Array]";function R(A,L,v){var B=A.constructor;switch(L){case p:return t(A);case a:case s:return new B(+A);case m:return e(A,v);case _:case y:case b:case x:case k:case T:case C:case M:case S:return i(A,v);case o:return new B;case l:case d:return new B(A);case u:return r(A);case h:return new B;case f:return n(A)}}return Fy=R,Fy}var Py,IT;function NT(){if(IT)return Py;IT=1;var t=Vn,e=Object.create,r=function(){function n(){}return function(i){if(!t(i))return{};if(e)return e(i);n.prototype=i;var a=new n;return n.prototype=void 0,a}}();return Py=r,Py}var qy,BT;function DT(){if(BT)return qy;BT=1;var t=NT(),e=M1(),r=S1();function n(i){return typeof i.constructor=="function"&&!r(i)?t(e(i)):{}}return qy=n,qy}var Vy,OT;function zZ(){if(OT)return Vy;OT=1;var t=el(),e=Fi(),r="[object Map]";function n(i){return e(i)&&t(i)==r}return Vy=n,Vy}var zy,FT;function YZ(){if(FT)return zy;FT=1;var t=zZ(),e=E1(),r=Xg(),n=r&&r.isMap,i=n?e(n):t;return zy=i,zy}var Yy,PT;function UZ(){if(PT)return Yy;PT=1;var t=el(),e=Fi(),r="[object Set]";function n(i){return e(i)&&t(i)==r}return Yy=n,Yy}var Uy,qT;function WZ(){if(qT)return Uy;qT=1;var t=UZ(),e=E1(),r=Xg(),n=r&&r.isSet,i=n?e(n):t;return Uy=i,Uy}var Wy,VT;function zT(){if(VT)return Wy;VT=1;var t=v1(),e=Ng(),r=k1(),n=CZ(),i=MZ(),a=Z9(),s=J9(),o=LZ(),l=RZ(),u=pT(),h=IZ(),d=el(),f=OZ(),p=VZ(),m=DT(),_=gr(),y=tl(),b=YZ(),x=Vn,k=WZ(),T=ts(),C=Ws(),M=1,S=2,R=4,A="[object Arguments]",L="[object Array]",v="[object Boolean]",B="[object Date]",w="[object Error]",D="[object Function]",N="[object GeneratorFunction]",z="[object Map]",X="[object Number]",ct="[object Object]",J="[object RegExp]",Y="[object Set]",$="[object String]",lt="[object Symbol]",ut="[object WeakMap]",W="[object ArrayBuffer]",tt="[object DataView]",K="[object Float32Array]",it="[object Float64Array]",Z="[object Int8Array]",V="[object Int16Array]",Q="[object Int32Array]",q="[object Uint8Array]",U="[object Uint8ClampedArray]",F="[object Uint16Array]",j="[object Uint32Array]",P={};P[A]=P[L]=P[W]=P[tt]=P[v]=P[B]=P[K]=P[it]=P[Z]=P[V]=P[Q]=P[z]=P[X]=P[ct]=P[J]=P[Y]=P[$]=P[lt]=P[q]=P[U]=P[F]=P[j]=!0,P[w]=P[D]=P[ut]=!1;function et(at,It,Lt,Rt,Ct,pt){var mt,vt=It&M,Tt=It&S,ft=It&R;if(Lt&&(mt=Ct?Lt(at,Rt,Ct,pt):Lt(at)),mt!==void 0)return mt;if(!x(at))return at;var le=_(at);if(le){if(mt=f(at),!vt)return s(at,mt)}else{var Dt=d(at),Gt=Dt==D||Dt==N;if(y(at))return a(at,vt);if(Dt==ct||Dt==A||Gt&&!Ct){if(mt=Tt||Gt?{}:m(at),!vt)return Tt?l(at,i(mt,at)):o(at,n(mt,at))}else{if(!P[Dt])return Ct?at:{};mt=p(at,Dt,vt)}}pt||(pt=new t);var $t=pt.get(at);if($t)return $t;pt.set(at,mt),k(at)?at.forEach(function(jt){mt.add(et(jt,It,Lt,jt,at,pt))}):b(at)&&at.forEach(function(jt,Ft){mt.set(Ft,et(jt,It,Lt,Ft,at,pt))});var Qt=ft?Tt?h:u:Tt?C:T,we=le?void 0:Qt(at);return e(we||at,function(jt,Ft){we&&(Ft=jt,jt=at[Ft]),r(mt,Ft,et(jt,It,Lt,Ft,at,pt))}),mt}return Wy=et,Wy}var Hy,YT;function HZ(){if(YT)return Hy;YT=1;var t=zT(),e=4;function r(n){return t(n,e)}return Hy=r,Hy}var Gy,UT;function jy(){if(UT)return Gy;UT=1;function t(e){return function(){return e}}return Gy=t,Gy}var $y={exports:{}},Xy,WT;function GZ(){if(WT)return Xy;WT=1;function t(e){return function(r,n,i){for(var a=-1,s=Object(r),o=i(r),l=o.length;l--;){var u=o[e?l:++a];if(n(s[u],u,s)===!1)break}return r}}return Xy=t,Xy}var Ky,HT;function Zy(){if(HT)return Ky;HT=1;var t=GZ(),e=t();return Ky=e,Ky}var Qy,GT;function Jy(){if(GT)return Qy;GT=1;var t=Zy(),e=ts();function r(n,i){return n&&t(n,i,e)}return Qy=r,Qy}var tm,jT;function jZ(){if(jT)return tm;jT=1;var t=oa();function e(r,n){return function(i,a){if(i==null)return i;if(!t(i))return r(i,a);for(var s=i.length,o=n?s:-1,l=Object(i);(n?o--:++o<s)&&a(l[o],o,l)!==!1;);return i}}return tm=e,tm}var em,$T;function L1(){if($T)return em;$T=1;var t=Jy(),e=jZ(),r=e(t);return em=r,em}var rm,XT;function Hs(){if(XT)return rm;XT=1;function t(e){return e}return rm=t,rm}var nm,KT;function ZT(){if(KT)return nm;KT=1;var t=Hs();function e(r){return typeof r=="function"?r:t}return nm=e,nm}var im,QT;function JT(){if(QT)return im;QT=1;var t=Ng(),e=L1(),r=ZT(),n=gr();function i(a,s){var o=n(a)?t:e;return o(a,r(s))}return im=i,im}var tE;function am(){return tE||(tE=1,function(t){t.exports=JT()}($y)),$y.exports}var sm,eE;function $Z(){if(eE)return sm;eE=1;var t=L1();function e(r,n){var i=[];return t(r,function(a,s,o){n(a,s,o)&&i.push(a)}),i}return sm=e,sm}var om,rE;function XZ(){if(rE)return om;rE=1;var t="__lodash_hash_undefined__";function e(r){return this.__data__.set(r,t),this}return om=e,om}var lm,nE;function KZ(){if(nE)return lm;nE=1;function t(e){return this.__data__.has(e)}return lm=t,lm}var cm,iE;function aE(){if(iE)return cm;iE=1;var t=Qp,e=XZ(),r=KZ();function n(i){var a=-1,s=i==null?0:i.length;for(this.__data__=new t;++a<s;)this.add(i[a])}return n.prototype.add=n.prototype.push=e,n.prototype.has=r,cm=n,cm}var um,sE;function ZZ(){if(sE)return um;sE=1;function t(e,r){for(var n=-1,i=e==null?0:e.length;++n<i;)if(r(e[n],n,e))return!0;return!1}return um=t,um}var hm,oE;function lE(){if(oE)return hm;oE=1;function t(e,r){return e.has(r)}return hm=t,hm}var fm,cE;function uE(){if(cE)return fm;cE=1;var t=aE(),e=ZZ(),r=lE(),n=1,i=2;function a(s,o,l,u,h,d){var f=l&n,p=s.length,m=o.length;if(p!=m&&!(f&&m>p))return!1;var _=d.get(s),y=d.get(o);if(_&&y)return _==o&&y==s;var b=-1,x=!0,k=l&i?new t:void 0;for(d.set(s,o),d.set(o,s);++b<p;){var T=s[b],C=o[b];if(u)var M=f?u(C,T,b,o,s,d):u(T,C,b,s,o,d);if(M!==void 0){if(M)continue;x=!1;break}if(k){if(!e(o,function(S,R){if(!r(k,R)&&(T===S||h(T,S,l,u,d)))return k.push(R)})){x=!1;break}}else if(!(T===C||h(T,C,l,u,d))){x=!1;break}}return d.delete(s),d.delete(o),x}return fm=a,fm}var dm,hE;function QZ(){if(hE)return dm;hE=1;function t(e){var r=-1,n=Array(e.size);return e.forEach(function(i,a){n[++r]=[a,i]}),n}return dm=t,dm}var pm,fE;function gm(){if(fE)return pm;fE=1;function t(e){var r=-1,n=Array(e.size);return e.forEach(function(i){n[++r]=i}),n}return pm=t,pm}var ym,dE;function JZ(){if(dE)return ym;dE=1;var t=zo,e=TT(),r=Wo,n=uE(),i=QZ(),a=gm(),s=1,o=2,l="[object Boolean]",u="[object Date]",h="[object Error]",d="[object Map]",f="[object Number]",p="[object RegExp]",m="[object Set]",_="[object String]",y="[object Symbol]",b="[object ArrayBuffer]",x="[object DataView]",k=t?t.prototype:void 0,T=k?k.valueOf:void 0;function C(M,S,R,A,L,v,B){switch(R){case x:if(M.byteLength!=S.byteLength||M.byteOffset!=S.byteOffset)return!1;M=M.buffer,S=S.buffer;case b:return!(M.byteLength!=S.byteLength||!v(new e(M),new e(S)));case l:case u:case f:return r(+M,+S);case h:return M.name==S.name&&M.message==S.message;case p:case _:return M==S+"";case d:var w=i;case m:var D=A&s;if(w||(w=a),M.size!=S.size&&!D)return!1;var N=B.get(M);if(N)return N==S;A|=o,B.set(M,S);var z=n(w(M),w(S),A,L,v,B);return B.delete(M),z;case y:if(T)return T.call(M)==T.call(S)}return!1}return ym=C,ym}var mm,pE;function tQ(){if(pE)return mm;pE=1;var t=pT(),e=1,r=Object.prototype,n=r.hasOwnProperty;function i(a,s,o,l,u,h){var d=o&e,f=t(a),p=f.length,m=t(s),_=m.length;if(p!=_&&!d)return!1;for(var y=p;y--;){var b=f[y];if(!(d?b in s:n.call(s,b)))return!1}var x=h.get(a),k=h.get(s);if(x&&k)return x==s&&k==a;var T=!0;h.set(a,s),h.set(s,a);for(var C=d;++y<p;){b=f[y];var M=a[b],S=s[b];if(l)var R=d?l(S,M,b,s,a,h):l(M,S,b,a,s,h);if(!(R===void 0?M===S||u(M,S,o,l,h):R)){T=!1;break}C||(C=b=="constructor")}if(T&&!C){var A=a.constructor,L=s.constructor;A!=L&&"constructor"in a&&"constructor"in s&&!(typeof A=="function"&&A instanceof A&&typeof L=="function"&&L instanceof L)&&(T=!1)}return h.delete(a),h.delete(s),T}return mm=i,mm}var bm,gE;function eQ(){if(gE)return bm;gE=1;var t=v1(),e=uE(),r=JZ(),n=tQ(),i=el(),a=gr(),s=tl(),o=jc(),l=1,u="[object Arguments]",h="[object Array]",d="[object Object]",f=Object.prototype,p=f.hasOwnProperty;function m(_,y,b,x,k,T){var C=a(_),M=a(y),S=C?h:i(_),R=M?h:i(y);S=S==u?d:S,R=R==u?d:R;var A=S==d,L=R==d,v=S==R;if(v&&s(_)){if(!s(y))return!1;C=!0,A=!1}if(v&&!A)return T||(T=new t),C||o(_)?e(_,y,b,x,k,T):r(_,y,S,b,x,k,T);if(!(b&l)){var B=A&&p.call(_,"__wrapped__"),w=L&&p.call(y,"__wrapped__");if(B||w){var D=B?_.value():_,N=w?y.value():y;return T||(T=new t),k(D,N,b,x,T)}}return v?(T||(T=new t),n(_,y,b,x,k,T)):!1}return bm=m,bm}var _m,yE;function mE(){if(yE)return _m;yE=1;var t=eQ(),e=Fi();function r(n,i,a,s,o){return n===i?!0:n==null||i==null||!e(n)&&!e(i)?n!==n&&i!==i:t(n,i,a,s,r,o)}return _m=r,_m}var vm,bE;function rQ(){if(bE)return vm;bE=1;var t=v1(),e=mE(),r=1,n=2;function i(a,s,o,l){var u=o.length,h=u,d=!l;if(a==null)return!h;for(a=Object(a);u--;){var f=o[u];if(d&&f[2]?f[1]!==a[f[0]]:!(f[0]in a))return!1}for(;++u<h;){f=o[u];var p=f[0],m=a[p],_=f[1];if(d&&f[2]){if(m===void 0&&!(p in a))return!1}else{var y=new t;if(l)var b=l(m,_,p,a,s,y);if(!(b===void 0?e(_,m,r|n,l,y):b))return!1}}return!0}return vm=i,vm}var xm,_E;function vE(){if(_E)return xm;_E=1;var t=Vn;function e(r){return r===r&&!t(r)}return xm=e,xm}var km,xE;function nQ(){if(xE)return km;xE=1;var t=vE(),e=ts();function r(n){for(var i=e(n),a=i.length;a--;){var s=i[a],o=n[s];i[a]=[s,o,t(o)]}return i}return km=r,km}var wm,kE;function wE(){if(kE)return wm;kE=1;function t(e,r){return function(n){return n==null?!1:n[e]===r&&(r!==void 0||e in Object(n))}}return wm=t,wm}var Tm,TE;function iQ(){if(TE)return Tm;TE=1;var t=rQ(),e=nQ(),r=wE();function n(i){var a=e(i);return a.length==1&&a[0][2]?r(a[0][0],a[0][1]):function(s){return s===i||t(s,i,a)}}return Tm=n,Tm}var Em,EE;function rl(){if(EE)return Em;EE=1;var t=Ps,e=Fi(),r="[object Symbol]";function n(i){return typeof i=="symbol"||e(i)&&t(i)==r}return Em=n,Em}var Cm,CE;function Sm(){if(CE)return Cm;CE=1;var t=gr(),e=rl(),r=/\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,n=/^\w*$/;function i(a,s){if(t(a))return!1;var o=typeof a;return o=="number"||o=="symbol"||o=="boolean"||a==null||e(a)?!0:n.test(a)||!r.test(a)||s!=null&&a in Object(s)}return Cm=i,Cm}var Am,SE;function aQ(){if(SE)return Am;SE=1;var t=Gf,e=500;function r(n){var i=t(n,function(s){return a.size===e&&a.clear(),s}),a=i.cache;return i}return Am=r,Am}var Mm,AE;function sQ(){if(AE)return Mm;AE=1;var t=aQ(),e=/[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g,r=/\\(\\)?/g,n=t(function(i){var a=[];return i.charCodeAt(0)===46&&a.push(""),i.replace(e,function(s,o,l,u){a.push(l?u.replace(r,"$1"):o||s)}),a});return Mm=n,Mm}var Lm,ME;function R1(){if(ME)return Lm;ME=1;function t(e,r){for(var n=-1,i=e==null?0:e.length,a=Array(i);++n<i;)a[n]=r(e[n],n,e);return a}return Lm=t,Lm}var Rm,LE;function oQ(){if(LE)return Rm;LE=1;var t=zo,e=R1(),r=gr(),n=rl(),i=1/0,a=t?t.prototype:void 0,s=a?a.toString:void 0;function o(l){if(typeof l=="string")return l;if(r(l))return e(l,o)+"";if(n(l))return s?s.call(l):"";var u=l+"";return u=="0"&&1/l==-i?"-0":u}return Rm=o,Rm}var Im,RE;function IE(){if(RE)return Im;RE=1;var t=oQ();function e(r){return r==null?"":t(r)}return Im=e,Im}var Nm,NE;function I1(){if(NE)return Nm;NE=1;var t=gr(),e=Sm(),r=sQ(),n=IE();function i(a,s){return t(a)?a:e(a,s)?[a]:r(n(a))}return Nm=i,Nm}var Bm,BE;function $c(){if(BE)return Bm;BE=1;var t=rl(),e=1/0;function r(n){if(typeof n=="string"||t(n))return n;var i=n+"";return i=="0"&&1/n==-e?"-0":i}return Bm=r,Bm}var Dm,DE;function N1(){if(DE)return Dm;DE=1;var t=I1(),e=$c();function r(n,i){i=t(i,n);for(var a=0,s=i.length;n!=null&&a<s;)n=n[e(i[a++])];return a&&a==s?n:void 0}return Dm=r,Dm}var Om,OE;function lQ(){if(OE)return Om;OE=1;var t=N1();function e(r,n,i){var a=r==null?void 0:t(r,n);return a===void 0?i:a}return Om=e,Om}var Fm,FE;function cQ(){if(FE)return Fm;FE=1;function t(e,r){return e!=null&&r in Object(e)}return Fm=t,Fm}var Pm,PE;function qE(){if(PE)return Pm;PE=1;var t=I1(),e=Gc(),r=gr(),n=T1(),i=Gg(),a=$c();function s(o,l,u){l=t(l,o);for(var h=-1,d=l.length,f=!1;++h<d;){var p=a(l[h]);if(!(f=o!=null&&u(o,p)))break;o=o[p]}return f||++h!=d?f:(d=o==null?0:o.length,!!d&&i(d)&&n(p,d)&&(r(o)||e(o)))}return Pm=s,Pm}var qm,VE;function zE(){if(VE)return qm;VE=1;var t=cQ(),e=qE();function r(n,i){return n!=null&&e(n,i,t)}return qm=r,qm}var Vm,YE;function uQ(){if(YE)return Vm;YE=1;var t=mE(),e=lQ(),r=zE(),n=Sm(),i=vE(),a=wE(),s=$c(),o=1,l=2;function u(h,d){return n(h)&&i(d)?a(s(h),d):function(f){var p=e(f,h);return p===void 0&&p===d?r(f,h):t(d,p,o|l)}}return Vm=u,Vm}var zm,UE;function WE(){if(UE)return zm;UE=1;function t(e){return function(r){return r==null?void 0:r[e]}}return zm=t,zm}var Ym,HE;function hQ(){if(HE)return Ym;HE=1;var t=N1();function e(r){return function(n){return t(n,r)}}return Ym=e,Ym}var Um,GE;function fQ(){if(GE)return Um;GE=1;var t=WE(),e=hQ(),r=Sm(),n=$c();function i(a){return r(a)?t(n(a)):e(a)}return Um=i,Um}var Wm,jE;function la(){if(jE)return Wm;jE=1;var t=iQ(),e=uQ(),r=Hs(),n=gr(),i=fQ();function a(s){return typeof s=="function"?s:s==null?r:typeof s=="object"?n(s)?e(s[0],s[1]):t(s):i(s)}return Wm=a,Wm}var Hm,$E;function XE(){if($E)return Hm;$E=1;var t=eT(),e=$Z(),r=la(),n=gr();function i(a,s){var o=n(a)?t:e;return o(a,r(s,3))}return Hm=i,Hm}var Gm,KE;function dQ(){if(KE)return Gm;KE=1;var t=Object.prototype,e=t.hasOwnProperty;function r(n,i){return n!=null&&e.call(n,i)}return Gm=r,Gm}var jm,ZE;function $m(){if(ZE)return jm;ZE=1;var t=dQ(),e=qE();function r(n,i){return n!=null&&e(n,i,t)}return jm=r,jm}var Xm,QE;function pQ(){if(QE)return Xm;QE=1;var t=ry(),e=el(),r=Gc(),n=gr(),i=oa(),a=tl(),s=S1(),o=jc(),l="[object Map]",u="[object Set]",h=Object.prototype,d=h.hasOwnProperty;function f(p){if(p==null)return!0;if(i(p)&&(n(p)||typeof p=="string"||typeof p.splice=="function"||a(p)||o(p)||r(p)))return!p.length;var m=e(p);if(m==l||m==u)return!p.size;if(s(p))return!t(p).length;for(var _ in p)if(d.call(p,_))return!1;return!0}return Xm=f,Xm}var Km,JE;function tC(){if(JE)return Km;JE=1;function t(e){return e===void 0}return Km=t,Km}var Zm,eC;function rC(){if(eC)return Zm;eC=1;var t=L1(),e=oa();function r(n,i){var a=-1,s=e(n)?Array(n.length):[];return t(n,function(o,l,u){s[++a]=i(o,l,u)}),s}return Zm=r,Zm}var Qm,nC;function iC(){if(nC)return Qm;nC=1;var t=R1(),e=la(),r=rC(),n=gr();function i(a,s){var o=n(a)?t:r;return o(a,e(s,3))}return Qm=i,Qm}var Jm,aC;function gQ(){if(aC)return Jm;aC=1;function t(e,r,n,i){var a=-1,s=e==null?0:e.length;for(i&&s&&(n=e[++a]);++a<s;)n=r(n,e[a],a,e);return n}return Jm=t,Jm}var tb,sC;function yQ(){if(sC)return tb;sC=1;function t(e,r,n,i,a){return a(e,function(s,o,l){n=i?(i=!1,s):r(n,s,o,l)}),n}return tb=t,tb}var eb,oC;function lC(){if(oC)return eb;oC=1;var t=gQ(),e=L1(),r=la(),n=yQ(),i=gr();function a(s,o,l){var u=i(s)?t:n,h=arguments.length<3;return u(s,r(o,4),l,h,e)}return eb=a,eb}var rb,cC;function mQ(){if(cC)return rb;cC=1;var t=Ps,e=gr(),r=Fi(),n="[object String]";function i(a){return typeof a=="string"||!e(a)&&r(a)&&t(a)==n}return rb=i,rb}var nb,uC;function bQ(){if(uC)return nb;uC=1;var t=WE(),e=t("length");return nb=e,nb}var ib,hC;function _Q(){if(hC)return ib;hC=1;var t="\\ud800-\\udfff",e="\\u0300-\\u036f",r="\\ufe20-\\ufe2f",n="\\u20d0-\\u20ff",i=e+r+n,a="\\ufe0e\\ufe0f",s="\\u200d",o=RegExp("["+s+t+i+a+"]");function l(u){return o.test(u)}return ib=l,ib}var ab,fC;function vQ(){if(fC)return ab;fC=1;var t="\\ud800-\\udfff",e="\\u0300-\\u036f",r="\\ufe20-\\ufe2f",n="\\u20d0-\\u20ff",i=e+r+n,a="\\ufe0e\\ufe0f",s="["+t+"]",o="["+i+"]",l="\\ud83c[\\udffb-\\udfff]",u="(?:"+o+"|"+l+")",h="[^"+t+"]",d="(?:\\ud83c[\\udde6-\\uddff]){2}",f="[\\ud800-\\udbff][\\udc00-\\udfff]",p="\\u200d",m=u+"?",_="["+a+"]?",y="(?:"+p+"(?:"+[h,d,f].join("|")+")"+_+m+")*",b=_+m+y,x="(?:"+[h+o+"?",o,d,f,s].join("|")+")",k=RegExp(l+"(?="+l+")|"+x+b,"g");function T(C){for(var M=k.lastIndex=0;k.test(C);)++M;return M}return ab=T,ab}var sb,dC;function xQ(){if(dC)return sb;dC=1;var t=bQ(),e=_Q(),r=vQ();function n(i){return e(i)?r(i):t(i)}return sb=n,sb}var ob,pC;function kQ(){if(pC)return ob;pC=1;var t=ry(),e=el(),r=oa(),n=mQ(),i=xQ(),a="[object Map]",s="[object Set]";function o(l){if(l==null)return 0;if(r(l))return n(l)?i(l):l.length;var u=e(l);return u==a||u==s?l.size:t(l).length}return ob=o,ob}var lb,gC;function wQ(){if(gC)return lb;gC=1;var t=Ng(),e=NT(),r=Jy(),n=la(),i=M1(),a=gr(),s=tl(),o=Yo,l=Vn,u=jc();function h(d,f,p){var m=a(d),_=m||s(d)||u(d);if(f=n(f,4),p==null){var y=d&&d.constructor;_?p=m?new y:[]:l(d)?p=o(y)?e(i(d)):{}:p={}}return(_?t:r)(d,function(b,x,k){return f(p,b,x,k)}),p}return lb=h,lb}var cb,yC;function TQ(){if(yC)return cb;yC=1;var t=zo,e=Gc(),r=gr(),n=t?t.isConcatSpreadable:void 0;function i(a){return r(a)||e(a)||!!(n&&a&&a[n])}return cb=i,cb}var ub,mC;function hb(){if(mC)return ub;mC=1;var t=my(),e=TQ();function r(n,i,a,s,o){var l=-1,u=n.length;for(a||(a=e),o||(o=[]);++l<u;){var h=n[l];i>0&&a(h)?i>1?r(h,i-1,a,s,o):t(o,h):s||(o[o.length]=h)}return o}return ub=r,ub}var fb,bC;function EQ(){if(bC)return fb;bC=1;function t(e,r,n){switch(n.length){case 0:return e.call(r);case 1:return e.call(r,n[0]);case 2:return e.call(r,n[0],n[1]);case 3:return e.call(r,n[0],n[1],n[2])}return e.apply(r,n)}return fb=t,fb}var db,_C;function vC(){if(_C)return db;_C=1;var t=EQ(),e=Math.max;function r(n,i,a){return i=e(i===void 0?n.length-1:i,0),function(){for(var s=arguments,o=-1,l=e(s.length-i,0),u=Array(l);++o<l;)u[o]=s[i+o];o=-1;for(var h=Array(i+1);++o<i;)h[o]=s[o];return h[i]=a(u),t(n,this,h)}}return db=r,db}var pb,xC;function CQ(){if(xC)return pb;xC=1;var t=jy(),e=_9(),r=Hs(),n=e?function(i,a){return e(i,"toString",{configurable:!0,enumerable:!1,value:t(a),writable:!0})}:r;return pb=n,pb}var gb,kC;function SQ(){if(kC)return gb;kC=1;var t=800,e=16,r=Date.now;function n(i){var a=0,s=0;return function(){var o=r(),l=e-(o-s);if(s=o,l>0){if(++a>=t)return arguments[0]}else a=0;return i.apply(void 0,arguments)}}return gb=n,gb}var yb,wC;function TC(){if(wC)return yb;wC=1;var t=CQ(),e=SQ(),r=e(t);return yb=r,yb}var mb,EC;function B1(){if(EC)return mb;EC=1;var t=Hs(),e=vC(),r=TC();function n(i,a){return r(e(i,a,t),i+"")}return mb=n,mb}var bb,CC;function SC(){if(CC)return bb;CC=1;function t(e,r,n,i){for(var a=e.length,s=n+(i?1:-1);i?s--:++s<a;)if(r(e[s],s,e))return s;return-1}return bb=t,bb}var _b,AC;function AQ(){if(AC)return _b;AC=1;function t(e){return e!==e}return _b=t,_b}var vb,MC;function MQ(){if(MC)return vb;MC=1;function t(e,r,n){for(var i=n-1,a=e.length;++i<a;)if(e[i]===r)return i;return-1}return vb=t,vb}var xb,LC;function LQ(){if(LC)return xb;LC=1;var t=SC(),e=AQ(),r=MQ();function n(i,a,s){return a===a?r(i,a,s):t(i,e,s)}return xb=n,xb}var kb,RC;function RQ(){if(RC)return kb;RC=1;var t=LQ();function e(r,n){var i=r==null?0:r.length;return!!i&&t(r,n,0)>-1}return kb=e,kb}var wb,IC;function IQ(){if(IC)return wb;IC=1;function t(e,r,n){for(var i=-1,a=e==null?0:e.length;++i<a;)if(n(r,e[i]))return!0;return!1}return wb=t,wb}var Tb,NC;function NQ(){if(NC)return Tb;NC=1;function t(){}return Tb=t,Tb}var Eb,BC;function BQ(){if(BC)return Eb;BC=1;var t=_T(),e=NQ(),r=gm(),n=1/0,i=t&&1/r(new t([,-0]))[1]==n?function(a){return new t(a)}:e;return Eb=i,Eb}var Cb,DC;function DQ(){if(DC)return Cb;DC=1;var t=aE(),e=RQ(),r=IQ(),n=lE(),i=BQ(),a=gm(),s=200;function o(l,u,h){var d=-1,f=e,p=l.length,m=!0,_=[],y=_;if(h)m=!1,f=r;else if(p>=s){var b=u?null:i(l);if(b)return a(b);m=!1,f=n,y=new t}else y=u?[]:_;t:for(;++d<p;){var x=l[d],k=u?u(x):x;if(x=h||x!==0?x:0,m&&k===k){for(var T=y.length;T--;)if(y[T]===k)continue t;u&&y.push(k),_.push(x)}else f(y,k,h)||(y!==_&&y.push(k),_.push(x))}return _}return Cb=o,Cb}var Sb,OC;function FC(){if(OC)return Sb;OC=1;var t=oa(),e=Fi();function r(n){return e(n)&&t(n)}return Sb=r,Sb}var Ab,PC;function OQ(){if(PC)return Ab;PC=1;var t=hb(),e=B1(),r=DQ(),n=FC(),i=e(function(a){return r(t(a,1,n,!0))});return Ab=i,Ab}var Mb,qC;function FQ(){if(qC)return Mb;qC=1;var t=R1();function e(r,n){return t(n,function(i){return r[i]})}return Mb=e,Mb}var Lb,VC;function zC(){if(VC)return Lb;VC=1;var t=FQ(),e=ts();function r(n){return n==null?[]:t(n,e(n))}return Lb=r,Lb}var D1;if(typeof fn=="function")try{D1={clone:HZ(),constant:jy(),each:am(),filter:XE(),has:$m(),isArray:gr(),isEmpty:pQ(),isFunction:Yo,isUndefined:tC(),keys:ts(),map:iC(),reduce:lC(),size:kQ(),transform:wQ(),union:OQ(),values:zC()}}catch{}D1||(D1=window._);var Wn=D1,_e=Wn,Rb=Le,PQ="\0",Gs="\0",YC="";function Le(t){this._isDirected=_e.has(t,"directed")?t.directed:!0,this._isMultigraph=_e.has(t,"multigraph")?t.multigraph:!1,this._isCompound=_e.has(t,"compound")?t.compound:!1,this._label=void 0,this._defaultNodeLabelFn=_e.constant(void 0),this._defaultEdgeLabelFn=_e.constant(void 0),this._nodes={},this._isCompound&&(this._parent={},this._children={},this._children[Gs]={}),this._in={},this._preds={},this._out={},this._sucs={},this._edgeObjs={},this._edgeLabels={}}Le.prototype._nodeCount=0,Le.prototype._edgeCount=0,Le.prototype.isDirected=function(){return this._isDirected},Le.prototype.isMultigraph=function(){return this._isMultigraph},Le.prototype.isCompound=function(){return this._isCompound},Le.prototype.setGraph=function(t){return this._label=t,this},Le.prototype.graph=function(){return this._label},Le.prototype.setDefaultNodeLabel=function(t){return _e.isFunction(t)||(t=_e.constant(t)),this._defaultNodeLabelFn=t,this},Le.prototype.nodeCount=function(){return this._nodeCount},Le.prototype.nodes=function(){return _e.keys(this._nodes)},Le.prototype.sources=function(){var t=this;return _e.filter(this.nodes(),function(e){return _e.isEmpty(t._in[e])})},Le.prototype.sinks=function(){var t=this;return _e.filter(this.nodes(),function(e){return _e.isEmpty(t._out[e])})},Le.prototype.setNodes=function(t,e){var r=arguments,n=this;return _e.each(t,function(i){r.length>1?n.setNode(i,e):n.setNode(i)}),this},Le.prototype.setNode=function(t,e){return _e.has(this._nodes,t)?(arguments.length>1&&(this._nodes[t]=e),this):(this._nodes[t]=arguments.length>1?e:this._defaultNodeLabelFn(t),this._isCompound&&(this._parent[t]=Gs,this._children[t]={},this._children[Gs][t]=!0),this._in[t]={},this._preds[t]={},this._out[t]={},this._sucs[t]={},++this._nodeCount,this)},Le.prototype.node=function(t){return this._nodes[t]},Le.prototype.hasNode=function(t){return _e.has(this._nodes,t)},Le.prototype.removeNode=function(t){var e=this;if(_e.has(this._nodes,t)){var r=function(n){e.removeEdge(e._edgeObjs[n])};delete this._nodes[t],this._isCompound&&(this._removeFromParentsChildList(t),delete this._parent[t],_e.each(this.children(t),function(n){e.setParent(n)}),delete this._children[t]),_e.each(_e.keys(this._in[t]),r),delete this._in[t],delete this._preds[t],_e.each(_e.keys(this._out[t]),r),delete this._out[t],delete this._sucs[t],--this._nodeCount}return this},Le.prototype.setParent=function(t,e){if(!this._isCompound)throw new Error("Cannot set parent in a non-compound graph");if(_e.isUndefined(e))e=Gs;else{e+="";for(var r=e;!_e.isUndefined(r);r=this.parent(r))if(r===t)throw new Error("Setting "+e+" as parent of "+t+" would create a cycle");this.setNode(e)}return this.setNode(t),this._removeFromParentsChildList(t),this._parent[t]=e,this._children[e][t]=!0,this},Le.prototype._removeFromParentsChildList=function(t){delete this._children[this._parent[t]][t]},Le.prototype.parent=function(t){if(this._isCompound){var e=this._parent[t];if(e!==Gs)return e}},Le.prototype.children=function(t){if(_e.isUndefined(t)&&(t=Gs),this._isCompound){var e=this._children[t];if(e)return _e.keys(e)}else{if(t===Gs)return this.nodes();if(this.hasNode(t))return[]}},Le.prototype.predecessors=function(t){var e=this._preds[t];if(e)return _e.keys(e)},Le.prototype.successors=function(t){var e=this._sucs[t];if(e)return _e.keys(e)},Le.prototype.neighbors=function(t){var e=this.predecessors(t);if(e)return _e.union(e,this.successors(t))},Le.prototype.isLeaf=function(t){var e;return this.isDirected()?e=this.successors(t):e=this.neighbors(t),e.length===0},Le.prototype.filterNodes=function(t){var e=new this.constructor({directed:this._isDirected,multigraph:this._isMultigraph,compound:this._isCompound});e.setGraph(this.graph());var r=this;_e.each(this._nodes,function(a,s){t(s)&&e.setNode(s,a)}),_e.each(this._edgeObjs,function(a){e.hasNode(a.v)&&e.hasNode(a.w)&&e.setEdge(a,r.edge(a))});var n={};function i(a){var s=r.parent(a);return s===void 0||e.hasNode(s)?(n[a]=s,s):s in n?n[s]:i(s)}return this._isCompound&&_e.each(e.nodes(),function(a){e.setParent(a,i(a))}),e},Le.prototype.setDefaultEdgeLabel=function(t){return _e.isFunction(t)||(t=_e.constant(t)),this._defaultEdgeLabelFn=t,this},Le.prototype.edgeCount=function(){return this._edgeCount},Le.prototype.edges=function(){return _e.values(this._edgeObjs)},Le.prototype.setPath=function(t,e){var r=this,n=arguments;return _e.reduce(t,function(i,a){return n.length>1?r.setEdge(i,a,e):r.setEdge(i,a),a}),this},Le.prototype.setEdge=function(){var t,e,r,n,i=!1,a=arguments[0];typeof a=="object"&&a!==null&&"v"in a?(t=a.v,e=a.w,r=a.name,arguments.length===2&&(n=arguments[1],i=!0)):(t=a,e=arguments[1],r=arguments[3],arguments.length>2&&(n=arguments[2],i=!0)),t=""+t,e=""+e,_e.isUndefined(r)||(r=""+r);var s=Xc(this._isDirected,t,e,r);if(_e.has(this._edgeLabels,s))return i&&(this._edgeLabels[s]=n),this;if(!_e.isUndefined(r)&&!this._isMultigraph)throw new Error("Cannot set a named edge when isMultigraph = false");this.setNode(t),this.setNode(e),this._edgeLabels[s]=i?n:this._defaultEdgeLabelFn(t,e,r);var o=qQ(this._isDirected,t,e,r);return t=o.v,e=o.w,Object.freeze(o),this._edgeObjs[s]=o,UC(this._preds[e],t),UC(this._sucs[t],e),this._in[e][s]=o,this._out[t][s]=o,this._edgeCount++,this},Le.prototype.edge=function(t,e,r){var n=arguments.length===1?Ib(this._isDirected,arguments[0]):Xc(this._isDirected,t,e,r);return this._edgeLabels[n]},Le.prototype.hasEdge=function(t,e,r){var n=arguments.length===1?Ib(this._isDirected,arguments[0]):Xc(this._isDirected,t,e,r);return _e.has(this._edgeLabels,n)},Le.prototype.removeEdge=function(t,e,r){var n=arguments.length===1?Ib(this._isDirected,arguments[0]):Xc(this._isDirected,t,e,r),i=this._edgeObjs[n];return i&&(t=i.v,e=i.w,delete this._edgeLabels[n],delete this._edgeObjs[n],WC(this._preds[e],t),WC(this._sucs[t],e),delete this._in[e][n],delete this._out[t][n],this._edgeCount--),this},Le.prototype.inEdges=function(t,e){var r=this._in[t];if(r){var n=_e.values(r);return e?_e.filter(n,function(i){return i.v===e}):n}},Le.prototype.outEdges=function(t,e){var r=this._out[t];if(r){var n=_e.values(r);return e?_e.filter(n,function(i){return i.w===e}):n}},Le.prototype.nodeEdges=function(t,e){var r=this.inEdges(t,e);if(r)return r.concat(this.outEdges(t,e))};function UC(t,e){t[e]?t[e]++:t[e]=1}function WC(t,e){--t[e]||delete t[e]}function Xc(t,e,r,n){var i=""+e,a=""+r;if(!t&&i>a){var s=i;i=a,a=s}return i+YC+a+YC+(_e.isUndefined(n)?PQ:n)}function qQ(t,e,r,n){var i=""+e,a=""+r;if(!t&&i>a){var s=i;i=a,a=s}var o={v:i,w:a};return n&&(o.name=n),o}function Ib(t,e){return Xc(t,e.v,e.w,e.name)}var VQ="2.1.8",zQ={Graph:Rb,version:VQ},Pi=Wn,YQ=Rb,UQ={write:WQ,read:jQ};function WQ(t){var e={options:{directed:t.isDirected(),multigraph:t.isMultigraph(),compound:t.isCompound()},nodes:HQ(t),edges:GQ(t)};return Pi.isUndefined(t.graph())||(e.value=Pi.clone(t.graph())),e}function HQ(t){return Pi.map(t.nodes(),function(e){var r=t.node(e),n=t.parent(e),i={v:e};return Pi.isUndefined(r)||(i.value=r),Pi.isUndefined(n)||(i.parent=n),i})}function GQ(t){return Pi.map(t.edges(),function(e){var r=t.edge(e),n={v:e.v,w:e.w};return Pi.isUndefined(e.name)||(n.name=e.name),Pi.isUndefined(r)||(n.value=r),n})}function jQ(t){var e=new YQ(t.options).setGraph(t.value);return Pi.each(t.nodes,function(r){e.setNode(r.v,r.value),r.parent&&e.setParent(r.v,r.parent)}),Pi.each(t.edges,function(r){e.setEdge({v:r.v,w:r.w,name:r.name},r.value)}),e}var O1=Wn,$Q=XQ;function XQ(t){var e={},r=[],n;function i(a){O1.has(e,a)||(e[a]=!0,n.push(a),O1.each(t.successors(a),i),O1.each(t.predecessors(a),i))}return O1.each(t.nodes(),function(a){n=[],i(a),n.length&&r.push(n)}),r}var HC=Wn,GC=Hn;function Hn(){this._arr=[],this._keyIndices={}}Hn.prototype.size=function(){return this._arr.length},Hn.prototype.keys=function(){return this._arr.map(function(t){return t.key})},Hn.prototype.has=function(t){return HC.has(this._keyIndices,t)},Hn.prototype.priority=function(t){var e=this._keyIndices[t];if(e!==void 0)return this._arr[e].priority},Hn.prototype.min=function(){if(this.size()===0)throw new Error("Queue underflow");return this._arr[0].key},Hn.prototype.add=function(t,e){var r=this._keyIndices;if(t=String(t),!HC.has(r,t)){var n=this._arr,i=n.length;return r[t]=i,n.push({key:t,priority:e}),this._decrease(i),!0}return!1},Hn.prototype.removeMin=function(){this._swap(0,this._arr.length-1);var t=this._arr.pop();return delete this._keyIndices[t.key],this._heapify(0),t.key},Hn.prototype.decrease=function(t,e){var r=this._keyIndices[t];if(e>this._arr[r].priority)throw new Error("New priority is greater than current priority. Key: "+t+" Old: "+this._arr[r].priority+" New: "+e);this._arr[r].priority=e,this._decrease(r)},Hn.prototype._heapify=function(t){var e=this._arr,r=2*t,n=r+1,i=t;r<e.length&&(i=e[r].priority<e[i].priority?r:i,n<e.length&&(i=e[n].priority<e[i].priority?n:i),i!==t&&(this._swap(t,i),this._heapify(i)))},Hn.prototype._decrease=function(t){for(var e=this._arr,r=e[t].priority,n;t!==0&&(n=t>>1,!(e[n].priority<r));)this._swap(t,n),t=n},Hn.prototype._swap=function(t,e){var r=this._arr,n=this._keyIndices,i=r[t],a=r[e];r[t]=a,r[e]=i,n[a.key]=t,n[i.key]=e};var KQ=Wn,ZQ=GC,jC=JQ,QQ=KQ.constant(1);function JQ(t,e,r,n){return tJ(t,String(e),r||QQ,n||function(i){return t.outEdges(i)})}function tJ(t,e,r,n){var i={},a=new ZQ,s,o,l=function(u){var h=u.v!==s?u.v:u.w,d=i[h],f=r(u),p=o.distance+f;if(f<0)throw new Error("dijkstra does not allow negative edge weights. Bad edge: "+u+" Weight: "+f);p<d.distance&&(d.distance=p,d.predecessor=s,a.decrease(h,p))};for(t.nodes().forEach(function(u){var h=u===e?0:Number.POSITIVE_INFINITY;i[u]={distance:h},a.add(u,h)});a.size()>0&&(s=a.removeMin(),o=i[s],o.distance!==Number.POSITIVE_INFINITY);)n(s).forEach(l);return i}var eJ=jC,rJ=Wn,nJ=iJ;function iJ(t,e,r){return rJ.transform(t.nodes(),function(n,i){n[i]=eJ(t,i,e,r)},{})}var $C=Wn,XC=aJ;function aJ(t){var e=0,r=[],n={},i=[];function a(s){var o=n[s]={onStack:!0,lowlink:e,index:e++};if(r.push(s),t.successors(s).forEach(function(h){$C.has(n,h)?n[h].onStack&&(o.lowlink=Math.min(o.lowlink,n[h].index)):(a(h),o.lowlink=Math.min(o.lowlink,n[h].lowlink))}),o.lowlink===o.index){var l=[],u;do u=r.pop(),n[u].onStack=!1,l.push(u);while(s!==u);i.push(l)}}return t.nodes().forEach(function(s){$C.has(n,s)||a(s)}),i}var sJ=Wn,oJ=XC,lJ=cJ;function cJ(t){return sJ.filter(oJ(t),function(e){return e.length>1||e.length===1&&t.hasEdge(e[0],e[0])})}var uJ=Wn,hJ=dJ,fJ=uJ.constant(1);function dJ(t,e,r){return pJ(t,e||fJ,r||function(n){return t.outEdges(n)})}function pJ(t,e,r){var n={},i=t.nodes();return i.forEach(function(a){n[a]={},n[a][a]={distance:0},i.forEach(function(s){a!==s&&(n[a][s]={distance:Number.POSITIVE_INFINITY})}),r(a).forEach(function(s){var o=s.v===a?s.w:s.v,l=e(s);n[a][o]={distance:l,predecessor:a}})}),i.forEach(function(a){var s=n[a];i.forEach(function(o){var l=n[o];i.forEach(function(u){var h=l[a],d=s[u],f=l[u],p=h.distance+d.distance;p<f.distance&&(f.distance=p,f.predecessor=d.predecessor)})})}),n}var Kc=Wn,KC=ZC;ZC.CycleException=F1;function ZC(t){var e={},r={},n=[];function i(a){if(Kc.has(r,a))throw new F1;Kc.has(e,a)||(r[a]=!0,e[a]=!0,Kc.each(t.predecessors(a),i),delete r[a],n.push(a))}if(Kc.each(t.sinks(),i),Kc.size(e)!==t.nodeCount())throw new F1;return n}function F1(){}F1.prototype=new Error;var QC=KC,gJ=yJ;function yJ(t){try{QC(t)}catch(e){if(e instanceof QC.CycleException)return!1;throw e}return!0}var P1=Wn,JC=mJ;function mJ(t,e,r){P1.isArray(e)||(e=[e]);var n=(t.isDirected()?t.successors:t.neighbors).bind(t),i=[],a={};return P1.each(e,function(s){if(!t.hasNode(s))throw new Error("Graph does not have node: "+s);tS(t,s,r==="post",a,n,i)}),i}function tS(t,e,r,n,i,a){P1.has(n,e)||(n[e]=!0,r||a.push(e),P1.each(i(e),function(s){tS(t,s,r,n,i,a)}),r&&a.push(e))}var bJ=JC,_J=vJ;function vJ(t,e){return bJ(t,e,"post")}var xJ=JC,kJ=wJ;function wJ(t,e){return xJ(t,e,"pre")}var eS=Wn,TJ=Rb,EJ=GC,CJ=SJ;function SJ(t,e){var r=new TJ,n={},i=new EJ,a;function s(l){var u=l.v===a?l.w:l.v,h=i.priority(u);if(h!==void 0){var d=e(l);d<h&&(n[u]=a,i.decrease(u,d))}}if(t.nodeCount()===0)return r;eS.each(t.nodes(),function(l){i.add(l,Number.POSITIVE_INFINITY),r.setNode(l)}),i.decrease(t.nodes()[0],0);for(var o=!1;i.size()>0;){if(a=i.removeMin(),eS.has(n,a))r.setEdge(a,n[a]);else{if(o)throw new Error("Input graph is not connected: "+t);o=!0}t.nodeEdges(a).forEach(s)}return r}var AJ={components:$Q,dijkstra:jC,dijkstraAll:nJ,findCycles:lJ,floydWarshall:hJ,isAcyclic:gJ,postorder:_J,preorder:kJ,prim:CJ,tarjan:XC,topsort:KC},rS=zQ,cr={Graph:rS.Graph,json:UQ,alg:AJ,version:rS.version},Nb,nS;function yi(){if(nS)return Nb;nS=1;var t;if(typeof fn=="function")try{t=cr}catch{}return t||(t=window.graphlib),Nb=t,Nb}var Bb,iS;function MJ(){if(iS)return Bb;iS=1;var t=zT(),e=1,r=4;function n(i){return t(i,e|r)}return Bb=n,Bb}var Db,aS;function q1(){if(aS)return Db;aS=1;var t=Wo,e=oa(),r=T1(),n=Vn;function i(a,s,o){if(!n(o))return!1;var l=typeof s;return(l=="number"?e(o)&&r(s,o.length):l=="string"&&s in o)?t(o[s],a):!1}return Db=i,Db}var Ob,sS;function oS(){if(sS)return Ob;sS=1;var t=B1(),e=Wo,r=q1(),n=Ws(),i=Object.prototype,a=i.hasOwnProperty,s=t(function(o,l){o=Object(o);var u=-1,h=l.length,d=h>2?l[2]:void 0;for(d&&r(l[0],l[1],d)&&(h=1);++u<h;)for(var f=l[u],p=n(f),m=-1,_=p.length;++m<_;){var y=p[m],b=o[y];(b===void 0||e(b,i[y])&&!a.call(o,y))&&(o[y]=f[y])}return o});return Ob=s,Ob}var Fb,lS;function LJ(){if(lS)return Fb;lS=1;var t=la(),e=oa(),r=ts();function n(i){return function(a,s,o){var l=Object(a);if(!e(a)){var u=t(s,3);a=r(a),s=function(d){return u(l[d],d,l)}}var h=i(a,s,o);return h>-1?l[u?a[h]:h]:void 0}}return Fb=n,Fb}var Pb,cS;function RJ(){if(cS)return Pb;cS=1;var t=/\s/;function e(r){for(var n=r.length;n--&&t.test(r.charAt(n)););return n}return Pb=e,Pb}var qb,uS;function IJ(){if(uS)return qb;uS=1;var t=RJ(),e=/^\s+/;function r(n){return n&&n.slice(0,t(n)+1).replace(e,"")}return qb=r,qb}var Vb,hS;function NJ(){if(hS)return Vb;hS=1;var t=IJ(),e=Vn,r=rl(),n=0/0,i=/^[-+]0x[0-9a-f]+$/i,a=/^0b[01]+$/i,s=/^0o[0-7]+$/i,o=parseInt;function l(u){if(typeof u=="number")return u;if(r(u))return n;if(e(u)){var h=typeof u.valueOf=="function"?u.valueOf():u;u=e(h)?h+"":h}if(typeof u!="string")return u===0?u:+u;u=t(u);var d=a.test(u);return d||s.test(u)?o(u.slice(2),d?2:8):i.test(u)?n:+u}return Vb=l,Vb}var zb,fS;function dS(){if(fS)return zb;fS=1;var t=NJ(),e=1/0,r=17976931348623157e292;function n(i){if(!i)return i===0?i:0;if(i=t(i),i===e||i===-e){var a=i<0?-1:1;return a*r}return i===i?i:0}return zb=n,zb}var Yb,pS;function BJ(){if(pS)return Yb;pS=1;var t=dS();function e(r){var n=t(r),i=n%1;return n===n?i?n-i:n:0}return Yb=e,Yb}var Ub,gS;function DJ(){if(gS)return Ub;gS=1;var t=SC(),e=la(),r=BJ(),n=Math.max;function i(a,s,o){var l=a==null?0:a.length;if(!l)return-1;var u=o==null?0:r(o);return u<0&&(u=n(l+u,0)),t(a,e(s,3),u)}return Ub=i,Ub}var Wb,yS;function OJ(){if(yS)return Wb;yS=1;var t=LJ(),e=DJ(),r=t(e);return Wb=r,Wb}var Hb,mS;function bS(){if(mS)return Hb;mS=1;var t=hb();function e(r){var n=r==null?0:r.length;return n?t(r,1):[]}return Hb=e,Hb}var Gb,_S;function FJ(){if(_S)return Gb;_S=1;var t=Zy(),e=ZT(),r=Ws();function n(i,a){return i==null?i:t(i,e(a),r)}return Gb=n,Gb}var jb,vS;function PJ(){if(vS)return jb;vS=1;function t(e){var r=e==null?0:e.length;return r?e[r-1]:void 0}return jb=t,jb}var $b,xS;function qJ(){if(xS)return $b;xS=1;var t=x1(),e=Jy(),r=la();function n(i,a){var s={};return a=r(a,3),e(i,function(o,l,u){t(s,l,a(o,l,u))}),s}return $b=n,$b}var Xb,kS;function Kb(){if(kS)return Xb;kS=1;var t=rl();function e(r,n,i){for(var a=-1,s=r.length;++a<s;){var o=r[a],l=n(o);if(l!=null&&(u===void 0?l===l&&!t(l):i(l,u)))var u=l,h=o}return h}return Xb=e,Xb}var Zb,wS;function VJ(){if(wS)return Zb;wS=1;function t(e,r){return e>r}return Zb=t,Zb}var Qb,TS;function zJ(){if(TS)return Qb;TS=1;var t=Kb(),e=VJ(),r=Hs();function n(i){return i&&i.length?t(i,r,e):void 0}return Qb=n,Qb}var Jb,ES;function CS(){if(ES)return Jb;ES=1;var t=x1(),e=Wo;function r(n,i,a){(a!==void 0&&!e(n[i],a)||a===void 0&&!(i in n))&&t(n,i,a)}return Jb=r,Jb}var t3,SS;function AS(){if(SS)return t3;SS=1;var t=Ps,e=M1(),r=Fi(),n="[object Object]",i=Function.prototype,a=Object.prototype,s=i.toString,o=a.hasOwnProperty,l=s.call(Object);function u(h){if(!r(h)||t(h)!=n)return!1;var d=e(h);if(d===null)return!0;var f=o.call(d,"constructor")&&d.constructor;return typeof f=="function"&&f instanceof f&&s.call(f)==l}return t3=u,t3}var e3,MS;function LS(){if(MS)return e3;MS=1;function t(e,r){if(!(r==="constructor"&&typeof e[r]=="function")&&r!="__proto__")return e[r]}return e3=t,e3}var r3,RS;function YJ(){if(RS)return r3;RS=1;var t=Hc(),e=Ws();function r(n){return t(n,e(n))}return r3=r,r3}var n3,IS;function UJ(){if(IS)return n3;IS=1;var t=CS(),e=Z9(),r=LT(),n=J9(),i=DT(),a=Gc(),s=gr(),o=FC(),l=tl(),u=Yo,h=Vn,d=AS(),f=jc(),p=LS(),m=YJ();function _(y,b,x,k,T,C,M){var S=p(y,x),R=p(b,x),A=M.get(R);if(A){t(y,x,A);return}var L=C?C(S,R,x+"",y,b,M):void 0,v=L===void 0;if(v){var B=s(R),w=!B&&l(R),D=!B&&!w&&f(R);L=R,B||w||D?s(S)?L=S:o(S)?L=n(S):w?(v=!1,L=e(R,!0)):D?(v=!1,L=r(R,!0)):L=[]:d(R)||a(R)?(L=S,a(S)?L=m(S):(!h(S)||u(S))&&(L=i(R))):v=!1}v&&(M.set(R,L),T(L,R,k,C,M),M.delete(R)),t(y,x,L)}return n3=_,n3}var i3,NS;function WJ(){if(NS)return i3;NS=1;var t=v1(),e=CS(),r=Zy(),n=UJ(),i=Vn,a=Ws(),s=LS();function o(l,u,h,d,f){l!==u&&r(u,function(p,m){if(f||(f=new t),i(p))n(l,u,m,h,o,d,f);else{var _=d?d(s(l,m),p,m+"",l,u,f):void 0;_===void 0&&(_=p),e(l,m,_)}},a)}return i3=o,i3}var a3,BS;function HJ(){if(BS)return a3;BS=1;var t=B1(),e=q1();function r(n){return t(function(i,a){var s=-1,o=a.length,l=o>1?a[o-1]:void 0,u=o>2?a[2]:void 0;for(l=n.length>3&&typeof l=="function"?(o--,l):void 0,u&&e(a[0],a[1],u)&&(l=o<3?void 0:l,o=1),i=Object(i);++s<o;){var h=a[s];h&&n(i,h,s,l)}return i})}return a3=r,a3}var s3,DS;function GJ(){if(DS)return s3;DS=1;var t=WJ(),e=HJ(),r=e(function(n,i,a){t(n,i,a)});return s3=r,s3}var o3,OS;function FS(){if(OS)return o3;OS=1;function t(e,r){return e<r}return o3=t,o3}var l3,PS;function jJ(){if(PS)return l3;PS=1;var t=Kb(),e=FS(),r=Hs();function n(i){return i&&i.length?t(i,r,e):void 0}return l3=n,l3}var c3,qS;function $J(){if(qS)return c3;qS=1;var t=Kb(),e=la(),r=FS();function n(i,a){return i&&i.length?t(i,e(a,2),r):void 0}return c3=n,c3}var u3,VS;function XJ(){if(VS)return u3;VS=1;var t=si,e=function(){return t.Date.now()};return u3=e,u3}var h3,zS;function KJ(){if(zS)return h3;zS=1;var t=k1(),e=I1(),r=T1(),n=Vn,i=$c();function a(s,o,l,u){if(!n(s))return s;o=e(o,s);for(var h=-1,d=o.length,f=d-1,p=s;p!=null&&++h<d;){var m=i(o[h]),_=l;if(m==="__proto__"||m==="constructor"||m==="prototype")return s;if(h!=f){var y=p[m];_=u?u(y,m,p):void 0,_===void 0&&(_=n(y)?y:r(o[h+1])?[]:{})}t(p,m,_),p=p[m]}return s}return h3=a,h3}var f3,YS;function ZJ(){if(YS)return f3;YS=1;var t=N1(),e=KJ(),r=I1();function n(i,a,s){for(var o=-1,l=a.length,u={};++o<l;){var h=a[o],d=t(i,h);s(d,h)&&e(u,r(h,i),d)}return u}return f3=n,f3}var d3,US;function QJ(){if(US)return d3;US=1;var t=ZJ(),e=zE();function r(n,i){return t(n,i,function(a,s){return e(n,s)})}return d3=r,d3}var p3,WS;function JJ(){if(WS)return p3;WS=1;var t=bS(),e=vC(),r=TC();function n(i){return r(e(i,void 0,t),i+"")}return p3=n,p3}var g3,HS;function GS(){if(HS)return g3;HS=1;var t=QJ(),e=JJ(),r=e(function(n,i){return n==null?{}:t(n,i)});return g3=r,g3}var y3,jS;function ttt(){if(jS)return y3;jS=1;var t=Math.ceil,e=Math.max;function r(n,i,a,s){for(var o=-1,l=e(t((i-n)/(a||1)),0),u=Array(l);l--;)u[s?l:++o]=n,n+=a;return u}return y3=r,y3}var m3,$S;function ett(){if($S)return m3;$S=1;var t=ttt(),e=q1(),r=dS();function n(i){return function(a,s,o){return o&&typeof o!="number"&&e(a,s,o)&&(s=o=void 0),a=r(a),s===void 0?(s=a,a=0):s=r(s),o=o===void 0?a<s?1:-1:r(o),t(a,s,o,i)}}return m3=n,m3}var b3,XS;function KS(){if(XS)return b3;XS=1;var t=ett(),e=t();return b3=e,b3}var _3,ZS;function rtt(){if(ZS)return _3;ZS=1;function t(e,r){var n=e.length;for(e.sort(r);n--;)e[n]=e[n].value;return e}return _3=t,_3}var v3,QS;function ntt(){if(QS)return v3;QS=1;var t=rl();function e(r,n){if(r!==n){var i=r!==void 0,a=r===null,s=r===r,o=t(r),l=n!==void 0,u=n===null,h=n===n,d=t(n);if(!u&&!d&&!o&&r>n||o&&l&&h&&!u&&!d||a&&l&&h||!i&&h||!s)return 1;if(!a&&!o&&!d&&r<n||d&&i&&s&&!a&&!o||u&&i&&s||!l&&s||!h)return-1}return 0}return v3=e,v3}var x3,JS;function itt(){if(JS)return x3;JS=1;var t=ntt();function e(r,n,i){for(var a=-1,s=r.criteria,o=n.criteria,l=s.length,u=i.length;++a<l;){var h=t(s[a],o[a]);if(h){if(a>=u)return h;var d=i[a];return h*(d=="desc"?-1:1)}}return r.index-n.index}return x3=e,x3}var k3,tA;function att(){if(tA)return k3;tA=1;var t=R1(),e=N1(),r=la(),n=rC(),i=rtt(),a=E1(),s=itt(),o=Hs(),l=gr();function u(h,d,f){d.length?d=t(d,function(_){return l(_)?function(y){return e(y,_.length===1?_[0]:_)}:_}):d=[o];var p=-1;d=t(d,a(r));var m=n(h,function(_,y,b){var x=t(d,function(k){return k(_)});return{criteria:x,index:++p,value:_}});return i(m,function(_,y){return s(_,y,f)})}return k3=u,k3}var w3,eA;function stt(){if(eA)return w3;eA=1;var t=hb(),e=att(),r=B1(),n=q1(),i=r(function(a,s){if(a==null)return[];var o=s.length;return o>1&&n(a,s[0],s[1])?s=[]:o>2&&n(s[0],s[1],s[2])&&(s=[s[0]]),e(a,t(s,1),[])});return w3=i,w3}var T3,rA;function nA(){if(rA)return T3;rA=1;var t=IE(),e=0;function r(n){var i=++e;return t(n)+i}return T3=r,T3}var E3,iA;function ott(){if(iA)return E3;iA=1;function t(e,r,n){for(var i=-1,a=e.length,s=r.length,o={};++i<a;){var l=i<s?r[i]:void 0;n(o,e[i],l)}return o}return E3=t,E3}var C3,aA;function ltt(){if(aA)return C3;aA=1;var t=k1(),e=ott();function r(n,i){return e(n||[],i||[],t)}return C3=r,C3}var S3,sA;function $e(){if(sA)return S3;sA=1;var t;if(typeof fn=="function")try{t={cloneDeep:MJ(),constant:jy(),defaults:oS(),each:am(),filter:XE(),find:OJ(),flatten:bS(),forEach:JT(),forIn:FJ(),has:$m(),isUndefined:tC(),last:PJ(),map:iC(),mapValues:qJ(),max:zJ(),merge:GJ(),min:jJ(),minBy:$J(),now:XJ(),pick:GS(),range:KS(),reduce:lC(),sortBy:stt(),uniqueId:nA(),values:zC(),zipObject:ltt()}}catch{}return t||(t=window._),S3=t,S3}var A3,oA;function ctt(){if(oA)return A3;oA=1,A3=t;function t(){var n={};n._next=n._prev=n,this._sentinel=n}t.prototype.dequeue=function(){var n=this._sentinel,i=n._prev;if(i!==n)return e(i),i},t.prototype.enqueue=function(n){var i=this._sentinel;n._prev&&n._next&&e(n),n._next=i._next,i._next._prev=n,i._next=n,n._prev=i},t.prototype.toString=function(){for(var n=[],i=this._sentinel,a=i._prev;a!==i;)n.push(JSON.stringify(a,r)),a=a._prev;return"["+n.join(", ")+"]"};function e(n){n._prev._next=n._next,n._next._prev=n._prev,delete n._next,delete n._prev}function r(n,i){if(n!=="_next"&&n!=="_prev")return i}return A3}var M3,lA;function utt(){if(lA)return M3;lA=1;var t=$e(),e=yi().Graph,r=ctt();M3=i;var n=t.constant(1);function i(u,h){if(u.nodeCount()<=1)return[];var d=o(u,h||n),f=a(d.graph,d.buckets,d.zeroIdx);return t.flatten(t.map(f,function(p){return u.outEdges(p.v,p.w)}),!0)}function a(u,h,d){for(var f=[],p=h[h.length-1],m=h[0],_;u.nodeCount();){for(;_=m.dequeue();)s(u,h,d,_);for(;_=p.dequeue();)s(u,h,d,_);if(u.nodeCount()){for(var y=h.length-2;y>0;--y)if(_=h[y].dequeue(),_){f=f.concat(s(u,h,d,_,!0));break}}}return f}function s(u,h,d,f,p){var m=p?[]:void 0;return t.forEach(u.inEdges(f.v),function(_){var y=u.edge(_),b=u.node(_.v);p&&m.push({v:_.v,w:_.w}),b.out-=y,l(h,d,b)}),t.forEach(u.outEdges(f.v),function(_){var y=u.edge(_),b=_.w,x=u.node(b);x.in-=y,l(h,d,x)}),u.removeNode(f.v),m}function o(u,h){var d=new e,f=0,p=0;t.forEach(u.nodes(),function(y){d.setNode(y,{v:y,in:0,out:0})}),t.forEach(u.edges(),function(y){var b=d.edge(y.v,y.w)||0,x=h(y),k=b+x;d.setEdge(y.v,y.w,k),p=Math.max(p,d.node(y.v).out+=x),f=Math.max(f,d.node(y.w).in+=x)});var m=t.range(p+f+3).map(function(){return new r}),_=f+1;return t.forEach(d.nodes(),function(y){l(m,_,d.node(y))}),{graph:d,buckets:m,zeroIdx:_}}function l(u,h,d){d.out?d.in?u[d.out-d.in+h].enqueue(d):u[u.length-1].enqueue(d):u[0].enqueue(d)}return M3}var L3,cA;function htt(){if(cA)return L3;cA=1;var t=$e(),e=utt();L3={run:r,undo:i};function r(a){var s=a.graph().acyclicer==="greedy"?e(a,o(a)):n(a);t.forEach(s,function(l){var u=a.edge(l);a.removeEdge(l),u.forwardName=l.name,u.reversed=!0,a.setEdge(l.w,l.v,u,t.uniqueId("rev"))});function o(l){return function(u){return l.edge(u).weight}}}function n(a){var s=[],o={},l={};function u(h){t.has(l,h)||(l[h]=!0,o[h]=!0,t.forEach(a.outEdges(h),function(d){t.has(o,d.w)?s.push(d):u(d.w)}),delete o[h])}return t.forEach(a.nodes(),u),s}function i(a){t.forEach(a.edges(),function(s){var o=a.edge(s);if(o.reversed){a.removeEdge(s);var l=o.forwardName;delete o.reversed,delete o.forwardName,a.setEdge(s.w,s.v,o,l)}})}return L3}var R3,uA;function vn(){if(uA)return R3;uA=1;var t=$e(),e=yi().Graph;R3={addDummyNode:r,simplify:n,asNonCompoundGraph:i,successorWeights:a,predecessorWeights:s,intersectRect:o,buildLayerMatrix:l,normalizeRanks:u,removeEmptyRanks:h,addBorderNode:d,maxRank:f,partition:p,time:m,notime:_};function r(y,b,x,k){var T;do T=t.uniqueId(k);while(y.hasNode(T));return x.dummy=b,y.setNode(T,x),T}function n(y){var b=new e().setGraph(y.graph());return t.forEach(y.nodes(),function(x){b.setNode(x,y.node(x))}),t.forEach(y.edges(),function(x){var k=b.edge(x.v,x.w)||{weight:0,minlen:1},T=y.edge(x);b.setEdge(x.v,x.w,{weight:k.weight+T.weight,minlen:Math.max(k.minlen,T.minlen)})}),b}function i(y){var b=new e({multigraph:y.isMultigraph()}).setGraph(y.graph());return t.forEach(y.nodes(),function(x){y.children(x).length||b.setNode(x,y.node(x))}),t.forEach(y.edges(),function(x){b.setEdge(x,y.edge(x))}),b}function a(y){var b=t.map(y.nodes(),function(x){var k={};return t.forEach(y.outEdges(x),function(T){k[T.w]=(k[T.w]||0)+y.edge(T).weight}),k});return t.zipObject(y.nodes(),b)}function s(y){var b=t.map(y.nodes(),function(x){var k={};return t.forEach(y.inEdges(x),function(T){k[T.v]=(k[T.v]||0)+y.edge(T).weight}),k});return t.zipObject(y.nodes(),b)}function o(y,b){var x=y.x,k=y.y,T=b.x-x,C=b.y-k,M=y.width/2,S=y.height/2;if(!T&&!C)throw new Error("Not possible to find intersection inside of the rectangle");var R,A;return Math.abs(C)*M>Math.abs(T)*S?(C<0&&(S=-S),R=S*T/C,A=S):(T<0&&(M=-M),R=M,A=M*C/T),{x:x+R,y:k+A}}function l(y){var b=t.map(t.range(f(y)+1),function(){return[]});return t.forEach(y.nodes(),function(x){var k=y.node(x),T=k.rank;t.isUndefined(T)||(b[T][k.order]=x)}),b}function u(y){var b=t.min(t.map(y.nodes(),function(x){return y.node(x).rank}));t.forEach(y.nodes(),function(x){var k=y.node(x);t.has(k,"rank")&&(k.rank-=b)})}function h(y){var b=t.min(t.map(y.nodes(),function(C){return y.node(C).rank})),x=[];t.forEach(y.nodes(),function(C){var M=y.node(C).rank-b;x[M]||(x[M]=[]),x[M].push(C)});var k=0,T=y.graph().nodeRankFactor;t.forEach(x,function(C,M){t.isUndefined(C)&&M%T!==0?--k:k&&t.forEach(C,function(S){y.node(S).rank+=k})})}function d(y,b,x,k){var T={width:0,height:0};return arguments.length>=4&&(T.rank=x,T.order=k),r(y,"border",T,b)}function f(y){return t.max(t.map(y.nodes(),function(b){var x=y.node(b).rank;if(!t.isUndefined(x))return x}))}function p(y,b){var x={lhs:[],rhs:[]};return t.forEach(y,function(k){b(k)?x.lhs.push(k):x.rhs.push(k)}),x}function m(y,b){var x=t.now();try{return b()}finally{console.log(y+" time: "+(t.now()-x)+"ms")}}function _(y,b){return b()}return R3}var I3,hA;function ftt(){if(hA)return I3;hA=1;var t=$e(),e=vn();I3={run:r,undo:i};function r(a){a.graph().dummyChains=[],t.forEach(a.edges(),function(s){n(a,s)})}function n(a,s){var o=s.v,l=a.node(o).rank,u=s.w,h=a.node(u).rank,d=s.name,f=a.edge(s),p=f.labelRank;if(h!==l+1){a.removeEdge(s);var m,_,y;for(y=0,++l;l<h;++y,++l)f.points=[],_={width:0,height:0,edgeLabel:f,edgeObj:s,rank:l},m=e.addDummyNode(a,"edge",_,"_d"),l===p&&(_.width=f.width,_.height=f.height,_.dummy="edge-label",_.labelpos=f.labelpos),a.setEdge(o,m,{weight:f.weight},d),y===0&&a.graph().dummyChains.push(m),o=m;a.setEdge(o,u,{weight:f.weight},d)}}function i(a){t.forEach(a.graph().dummyChains,function(s){var o=a.node(s),l=o.edgeLabel,u;for(a.setEdge(o.edgeObj,l);o.dummy;)u=a.successors(s)[0],a.removeNode(s),l.points.push({x:o.x,y:o.y}),o.dummy==="edge-label"&&(l.x=o.x,l.y=o.y,l.width=o.width,l.height=o.height),s=u,o=a.node(s)})}return I3}var N3,fA;function V1(){if(fA)return N3;fA=1;var t=$e();N3={longestPath:e,slack:r};function e(n){var i={};function a(s){var o=n.node(s);if(t.has(i,s))return o.rank;i[s]=!0;var l=t.min(t.map(n.outEdges(s),function(u){return a(u.w)-n.edge(u).minlen}));return(l===Number.POSITIVE_INFINITY||l===void 0||l===null)&&(l=0),o.rank=l}t.forEach(n.sources(),a)}function r(n,i){return n.node(i.w).rank-n.node(i.v).rank-n.edge(i).minlen}return N3}var B3,dA;function pA(){if(dA)return B3;dA=1;var t=$e(),e=yi().Graph,r=V1().slack;B3=n;function n(o){var l=new e({directed:!1}),u=o.nodes()[0],h=o.nodeCount();l.setNode(u,{});for(var d,f;i(l,o)<h;)d=a(l,o),f=l.hasNode(d.v)?r(o,d):-r(o,d),s(l,o,f);return l}function i(o,l){function u(h){t.forEach(l.nodeEdges(h),function(d){var f=d.v,p=h===f?d.w:f;!o.hasNode(p)&&!r(l,d)&&(o.setNode(p,{}),o.setEdge(h,p,{}),u(p))})}return t.forEach(o.nodes(),u),o.nodeCount()}function a(o,l){return t.minBy(l.edges(),function(u){if(o.hasNode(u.v)!==o.hasNode(u.w))return r(l,u)})}function s(o,l,u){t.forEach(o.nodes(),function(h){l.node(h).rank+=u})}return B3}var D3,gA;function dtt(){if(gA)return D3;gA=1;var t=$e(),e=pA(),r=V1().slack,n=V1().longestPath,i=yi().alg.preorder,a=yi().alg.postorder,s=vn().simplify;D3=o,o.initLowLimValues=d,o.initCutValues=l,o.calcCutValue=h,o.leaveEdge=p,o.enterEdge=m,o.exchangeEdges=_;function o(k){k=s(k),n(k);var T=e(k);d(T),l(T,k);for(var C,M;C=p(T);)M=m(T,k,C),_(T,k,C,M)}function l(k,T){var C=a(k,k.nodes());C=C.slice(0,C.length-1),t.forEach(C,function(M){u(k,T,M)})}function u(k,T,C){var M=k.node(C),S=M.parent;k.edge(C,S).cutvalue=h(k,T,C)}function h(k,T,C){var M=k.node(C),S=M.parent,R=!0,A=T.edge(C,S),L=0;return A||(R=!1,A=T.edge(S,C)),L=A.weight,t.forEach(T.nodeEdges(C),function(v){var B=v.v===C,w=B?v.w:v.v;if(w!==S){var D=B===R,N=T.edge(v).weight;if(L+=D?N:-N,b(k,C,w)){var z=k.edge(C,w).cutvalue;L+=D?-z:z}}}),L}function d(k,T){arguments.length<2&&(T=k.nodes()[0]),f(k,{},1,T)}function f(k,T,C,M,S){var R=C,A=k.node(M);return T[M]=!0,t.forEach(k.neighbors(M),function(L){t.has(T,L)||(C=f(k,T,C,L,M))}),A.low=R,A.lim=C++,S?A.parent=S:delete A.parent,C}function p(k){return t.find(k.edges(),function(T){return k.edge(T).cutvalue<0})}function m(k,T,C){var M=C.v,S=C.w;T.hasEdge(M,S)||(M=C.w,S=C.v);var R=k.node(M),A=k.node(S),L=R,v=!1;R.lim>A.lim&&(L=A,v=!0);var B=t.filter(T.edges(),function(w){return v===x(k,k.node(w.v),L)&&v!==x(k,k.node(w.w),L)});return t.minBy(B,function(w){return r(T,w)})}function _(k,T,C,M){var S=C.v,R=C.w;k.removeEdge(S,R),k.setEdge(M.v,M.w,{}),d(k),l(k,T),y(k,T)}function y(k,T){var C=t.find(k.nodes(),function(S){return!T.node(S).parent}),M=i(k,C);M=M.slice(1),t.forEach(M,function(S){var R=k.node(S).parent,A=T.edge(S,R),L=!1;A||(A=T.edge(R,S),L=!0),T.node(S).rank=T.node(R).rank+(L?A.minlen:-A.minlen)})}function b(k,T,C){return k.hasEdge(T,C)}function x(k,T,C){return C.low<=T.lim&&T.lim<=C.lim}return D3}var O3,yA;function ptt(){if(yA)return O3;yA=1;var t=V1(),e=t.longestPath,r=pA(),n=dtt();O3=i;function i(l){switch(l.graph().ranker){case"network-simplex":o(l);break;case"tight-tree":s(l);break;case"longest-path":a(l);break;default:o(l)}}var a=e;function s(l){e(l),r(l)}function o(l){n(l)}return O3}var F3,mA;function gtt(){if(mA)return F3;mA=1;var t=$e();F3=e;function e(i){var a=n(i);t.forEach(i.graph().dummyChains,function(s){for(var o=i.node(s),l=o.edgeObj,u=r(i,a,l.v,l.w),h=u.path,d=u.lca,f=0,p=h[f],m=!0;s!==l.w;){if(o=i.node(s),m){for(;(p=h[f])!==d&&i.node(p).maxRank<o.rank;)f++;p===d&&(m=!1)}if(!m){for(;f<h.length-1&&i.node(p=h[f+1]).minRank<=o.rank;)f++;p=h[f]}i.setParent(s,p),s=i.successors(s)[0]}})}function r(i,a,s,o){var l=[],u=[],h=Math.min(a[s].low,a[o].low),d=Math.max(a[s].lim,a[o].lim),f,p;f=s;do f=i.parent(f),l.push(f);while(f&&(a[f].low>h||d>a[f].lim));for(p=f,f=o;(f=i.parent(f))!==p;)u.push(f);return{path:l.concat(u.reverse()),lca:p}}function n(i){var a={},s=0;function o(l){var u=s;t.forEach(i.children(l),o),a[l]={low:u,lim:s++}}return t.forEach(i.children(),o),a}return F3}var P3,bA;function ytt(){if(bA)return P3;bA=1;var t=$e(),e=vn();P3={run:r,cleanup:s};function r(o){var l=e.addDummyNode(o,"root",{},"_root"),u=i(o),h=t.max(t.values(u))-1,d=2*h+1;o.graph().nestingRoot=l,t.forEach(o.edges(),function(p){o.edge(p).minlen*=d});var f=a(o)+1;t.forEach(o.children(),function(p){n(o,l,d,f,h,u,p)}),o.graph().nodeRankFactor=d}function n(o,l,u,h,d,f,p){var m=o.children(p);if(!m.length){p!==l&&o.setEdge(l,p,{weight:0,minlen:u});return}var _=e.addBorderNode(o,"_bt"),y=e.addBorderNode(o,"_bb"),b=o.node(p);o.setParent(_,p),b.borderTop=_,o.setParent(y,p),b.borderBottom=y,t.forEach(m,function(x){n(o,l,u,h,d,f,x);var k=o.node(x),T=k.borderTop?k.borderTop:x,C=k.borderBottom?k.borderBottom:x,M=k.borderTop?h:2*h,S=T!==C?1:d-f[p]+1;o.setEdge(_,T,{weight:M,minlen:S,nestingEdge:!0}),o.setEdge(C,y,{weight:M,minlen:S,nestingEdge:!0})}),o.parent(p)||o.setEdge(l,_,{weight:0,minlen:d+f[p]})}function i(o){var l={};function u(h,d){var f=o.children(h);f&&f.length&&t.forEach(f,function(p){u(p,d+1)}),l[h]=d}return t.forEach(o.children(),function(h){u(h,1)}),l}function a(o){return t.reduce(o.edges(),function(l,u){return l+o.edge(u).weight},0)}function s(o){var l=o.graph();o.removeNode(l.nestingRoot),delete l.nestingRoot,t.forEach(o.edges(),function(u){var h=o.edge(u);h.nestingEdge&&o.removeEdge(u)})}return P3}var q3,_A;function mtt(){if(_A)return q3;_A=1;var t=$e(),e=vn();q3=r;function r(i){function a(s){var o=i.children(s),l=i.node(s);if(o.length&&t.forEach(o,a),t.has(l,"minRank")){l.borderLeft=[],l.borderRight=[];for(var u=l.minRank,h=l.maxRank+1;u<h;++u)n(i,"borderLeft","_bl",s,l,u),n(i,"borderRight","_br",s,l,u)}}t.forEach(i.children(),a)}function n(i,a,s,o,l,u){var h={width:0,height:0,rank:u,borderType:a},d=l[a][u-1],f=e.addDummyNode(i,"border",h,s);l[a][u]=f,i.setParent(f,o),d&&i.setEdge(d,f,{weight:1})}return q3}var V3,vA;function btt(){if(vA)return V3;vA=1;var t=$e();V3={adjust:e,undo:r};function e(u){var h=u.graph().rankdir.toLowerCase();(h==="lr"||h==="rl")&&n(u)}function r(u){var h=u.graph().rankdir.toLowerCase();(h==="bt"||h==="rl")&&a(u),(h==="lr"||h==="rl")&&(o(u),n(u))}function n(u){t.forEach(u.nodes(),function(h){i(u.node(h))}),t.forEach(u.edges(),function(h){i(u.edge(h))})}function i(u){var h=u.width;u.width=u.height,u.height=h}function a(u){t.forEach(u.nodes(),function(h){s(u.node(h))}),t.forEach(u.edges(),function(h){var d=u.edge(h);t.forEach(d.points,s),t.has(d,"y")&&s(d)})}function s(u){u.y=-u.y}function o(u){t.forEach(u.nodes(),function(h){l(u.node(h))}),t.forEach(u.edges(),function(h){var d=u.edge(h);t.forEach(d.points,l),t.has(d,"x")&&l(d)})}function l(u){var h=u.x;u.x=u.y,u.y=h}return V3}var z3,xA;function _tt(){if(xA)return z3;xA=1;var t=$e();z3=e;function e(r){var n={},i=t.filter(r.nodes(),function(u){return!r.children(u).length}),a=t.max(t.map(i,function(u){return r.node(u).rank})),s=t.map(t.range(a+1),function(){return[]});function o(u){if(!t.has(n,u)){n[u]=!0;var h=r.node(u);s[h.rank].push(u),t.forEach(r.successors(u),o)}}var l=t.sortBy(i,function(u){return r.node(u).rank});return t.forEach(l,o),s}return z3}var Y3,kA;function vtt(){if(kA)return Y3;kA=1;var t=$e();Y3=e;function e(n,i){for(var a=0,s=1;s<i.length;++s)a+=r(n,i[s-1],i[s]);return a}function r(n,i,a){for(var s=t.zipObject(a,t.map(a,function(f,p){return p})),o=t.flatten(t.map(i,function(f){return t.sortBy(t.map(n.outEdges(f),function(p){return{pos:s[p.w],weight:n.edge(p).weight}}),"pos")}),!0),l=1;l<a.length;)l<<=1;var u=2*l-1;l-=1;var h=t.map(new Array(u),function(){return 0}),d=0;return t.forEach(o.forEach(function(f){var p=f.pos+l;h[p]+=f.weight;for(var m=0;p>0;)p%2&&(m+=h[p+1]),p=p-1>>1,h[p]+=f.weight;d+=f.weight*m})),d}return Y3}var U3,wA;function xtt(){if(wA)return U3;wA=1;var t=$e();U3=e;function e(r,n){return t.map(n,function(i){var a=r.inEdges(i);if(a.length){var s=t.reduce(a,function(o,l){var u=r.edge(l),h=r.node(l.v);return{sum:o.sum+u.weight*h.order,weight:o.weight+u.weight}},{sum:0,weight:0});return{v:i,barycenter:s.sum/s.weight,weight:s.weight}}else return{v:i}})}return U3}var W3,TA;function ktt(){if(TA)return W3;TA=1;var t=$e();W3=e;function e(i,a){var s={};t.forEach(i,function(l,u){var h=s[l.v]={indegree:0,in:[],out:[],vs:[l.v],i:u};t.isUndefined(l.barycenter)||(h.barycenter=l.barycenter,h.weight=l.weight)}),t.forEach(a.edges(),function(l){var u=s[l.v],h=s[l.w];!t.isUndefined(u)&&!t.isUndefined(h)&&(h.indegree++,u.out.push(s[l.w]))});var o=t.filter(s,function(l){return!l.indegree});return r(o)}function r(i){var a=[];function s(u){return function(h){h.merged||(t.isUndefined(h.barycenter)||t.isUndefined(u.barycenter)||h.barycenter>=u.barycenter)&&n(u,h)}}function o(u){return function(h){h.in.push(u),--h.indegree===0&&i.push(h)}}for(;i.length;){var l=i.pop();a.push(l),t.forEach(l.in.reverse(),s(l)),t.forEach(l.out,o(l))}return t.map(t.filter(a,function(u){return!u.merged}),function(u){return t.pick(u,["vs","i","barycenter","weight"])})}function n(i,a){var s=0,o=0;i.weight&&(s+=i.barycenter*i.weight,o+=i.weight),a.weight&&(s+=a.barycenter*a.weight,o+=a.weight),i.vs=a.vs.concat(i.vs),i.barycenter=s/o,i.weight=o,i.i=Math.min(a.i,i.i),a.merged=!0}return W3}var H3,EA;function wtt(){if(EA)return H3;EA=1;var t=$e(),e=vn();H3=r;function r(a,s){var o=e.partition(a,function(_){return t.has(_,"barycenter")}),l=o.lhs,u=t.sortBy(o.rhs,function(_){return-_.i}),h=[],d=0,f=0,p=0;l.sort(i(!!s)),p=n(h,u,p),t.forEach(l,function(_){p+=_.vs.length,h.push(_.vs),d+=_.barycenter*_.weight,f+=_.weight,p=n(h,u,p)});var m={vs:t.flatten(h,!0)};return f&&(m.barycenter=d/f,m.weight=f),m}function n(a,s,o){for(var l;s.length&&(l=t.last(s)).i<=o;)s.pop(),a.push(l.vs),o++;return o}function i(a){return function(s,o){return s.barycenter<o.barycenter?-1:s.barycenter>o.barycenter?1:a?o.i-s.i:s.i-o.i}}return H3}var G3,CA;function Ttt(){if(CA)return G3;CA=1;var t=$e(),e=xtt(),r=ktt(),n=wtt();G3=i;function i(o,l,u,h){var d=o.children(l),f=o.node(l),p=f?f.borderLeft:void 0,m=f?f.borderRight:void 0,_={};p&&(d=t.filter(d,function(C){return C!==p&&C!==m}));var y=e(o,d);t.forEach(y,function(C){if(o.children(C.v).length){var M=i(o,C.v,u,h);_[C.v]=M,t.has(M,"barycenter")&&s(C,M)}});var b=r(y,u);a(b,_);var x=n(b,h);if(p&&(x.vs=t.flatten([p,x.vs,m],!0),o.predecessors(p).length)){var k=o.node(o.predecessors(p)[0]),T=o.node(o.predecessors(m)[0]);t.has(x,"barycenter")||(x.barycenter=0,x.weight=0),x.barycenter=(x.barycenter*x.weight+k.order+T.order)/(x.weight+2),x.weight+=2}return x}function a(o,l){t.forEach(o,function(u){u.vs=t.flatten(u.vs.map(function(h){return l[h]?l[h].vs:h}),!0)})}function s(o,l){t.isUndefined(o.barycenter)?(o.barycenter=l.barycenter,o.weight=l.weight):(o.barycenter=(o.barycenter*o.weight+l.barycenter*l.weight)/(o.weight+l.weight),o.weight+=l.weight)}return G3}var j3,SA;function Ett(){if(SA)return j3;SA=1;var t=$e(),e=yi().Graph;j3=r;function r(i,a,s){var o=n(i),l=new e({compound:!0}).setGraph({root:o}).setDefaultNodeLabel(function(u){return i.node(u)});return t.forEach(i.nodes(),function(u){var h=i.node(u),d=i.parent(u);(h.rank===a||h.minRank<=a&&a<=h.maxRank)&&(l.setNode(u),l.setParent(u,d||o),t.forEach(i[s](u),function(f){var p=f.v===u?f.w:f.v,m=l.edge(p,u),_=t.isUndefined(m)?0:m.weight;l.setEdge(p,u,{weight:i.edge(f).weight+_})}),t.has(h,"minRank")&&l.setNode(u,{borderLeft:h.borderLeft[a],borderRight:h.borderRight[a]}))}),l}function n(i){for(var a;i.hasNode(a=t.uniqueId("_root")););return a}return j3}var $3,AA;function Ctt(){if(AA)return $3;AA=1;var t=$e();$3=e;function e(r,n,i){var a={},s;t.forEach(i,function(o){for(var l=r.parent(o),u,h;l;){if(u=r.parent(l),u?(h=a[u],a[u]=l):(h=s,s=l),h&&h!==l){n.setEdge(h,l);return}l=u}})}return $3}var X3,MA;function Stt(){if(MA)return X3;MA=1;var t=$e(),e=_tt(),r=vtt(),n=Ttt(),i=Ett(),a=Ctt(),s=yi().Graph,o=vn();X3=l;function l(f){var p=o.maxRank(f),m=u(f,t.range(1,p+1),"inEdges"),_=u(f,t.range(p-1,-1,-1),"outEdges"),y=e(f);d(f,y);for(var b=Number.POSITIVE_INFINITY,x,k=0,T=0;T<4;++k,++T){h(k%2?m:_,k%4>=2),y=o.buildLayerMatrix(f);var C=r(f,y);C<b&&(T=0,x=t.cloneDeep(y),b=C)}d(f,x)}function u(f,p,m){return t.map(p,function(_){return i(f,_,m)})}function h(f,p){var m=new s;t.forEach(f,function(_){var y=_.graph().root,b=n(_,y,m,p);t.forEach(b.vs,function(x,k){_.node(x).order=k}),a(_,m,b.vs)})}function d(f,p){t.forEach(p,function(m){t.forEach(m,function(_,y){f.node(_).order=y})})}return X3}var K3,LA;function Att(){if(LA)return K3;LA=1;var t=$e(),e=yi().Graph,r=vn();K3={positionX:m,findType1Conflicts:n,findType2Conflicts:i,addConflict:s,hasConflict:o,verticalAlignment:l,horizontalCompaction:u,alignCoordinates:f,findSmallestWidthAlignment:d,balance:p};function n(b,x){var k={};function T(C,M){var S=0,R=0,A=C.length,L=t.last(M);return t.forEach(M,function(v,B){var w=a(b,v),D=w?b.node(w).order:A;(w||v===L)&&(t.forEach(M.slice(R,B+1),function(N){t.forEach(b.predecessors(N),function(z){var X=b.node(z),ct=X.order;(ct<S||D<ct)&&!(X.dummy&&b.node(N).dummy)&&s(k,z,N)})}),R=B+1,S=D)}),M}return t.reduce(x,T),k}function i(b,x){var k={};function T(M,S,R,A,L){var v;t.forEach(t.range(S,R),function(B){v=M[B],b.node(v).dummy&&t.forEach(b.predecessors(v),function(w){var D=b.node(w);D.dummy&&(D.order<A||D.order>L)&&s(k,w,v)})})}function C(M,S){var R=-1,A,L=0;return t.forEach(S,function(v,B){if(b.node(v).dummy==="border"){var w=b.predecessors(v);w.length&&(A=b.node(w[0]).order,T(S,L,B,R,A),L=B,R=A)}T(S,L,S.length,A,M.length)}),S}return t.reduce(x,C),k}function a(b,x){if(b.node(x).dummy)return t.find(b.predecessors(x),function(k){return b.node(k).dummy})}function s(b,x,k){if(x>k){var T=x;x=k,k=T}var C=b[x];C||(b[x]=C={}),C[k]=!0}function o(b,x,k){if(x>k){var T=x;x=k,k=T}return t.has(b[x],k)}function l(b,x,k,T){var C={},M={},S={};return t.forEach(x,function(R){t.forEach(R,function(A,L){C[A]=A,M[A]=A,S[A]=L})}),t.forEach(x,function(R){var A=-1;t.forEach(R,function(L){var v=T(L);if(v.length){v=t.sortBy(v,function(z){return S[z]});for(var B=(v.length-1)/2,w=Math.floor(B),D=Math.ceil(B);w<=D;++w){var N=v[w];M[L]===L&&A<S[N]&&!o(k,L,N)&&(M[N]=L,M[L]=C[L]=C[N],A=S[N])}}})}),{root:C,align:M}}function u(b,x,k,T,C){var M={},S=h(b,x,k,C),R=C?"borderLeft":"borderRight";function A(B,w){for(var D=S.nodes(),N=D.pop(),z={};N;)z[N]?B(N):(z[N]=!0,D.push(N),D=D.concat(w(N))),N=D.pop()}function L(B){M[B]=S.inEdges(B).reduce(function(w,D){return Math.max(w,M[D.v]+S.edge(D))},0)}function v(B){var w=S.outEdges(B).reduce(function(N,z){return Math.min(N,M[z.w]-S.edge(z))},Number.POSITIVE_INFINITY),D=b.node(B);w!==Number.POSITIVE_INFINITY&&D.borderType!==R&&(M[B]=Math.max(M[B],w))}return A(L,S.predecessors.bind(S)),A(v,S.successors.bind(S)),t.forEach(T,function(B){M[B]=M[k[B]]}),M}function h(b,x,k,T){var C=new e,M=b.graph(),S=_(M.nodesep,M.edgesep,T);return t.forEach(x,function(R){var A;t.forEach(R,function(L){var v=k[L];if(C.setNode(v),A){var B=k[A],w=C.edge(B,v);C.setEdge(B,v,Math.max(S(b,L,A),w||0))}A=L})}),C}function d(b,x){return t.minBy(t.values(x),function(k){var T=Number.NEGATIVE_INFINITY,C=Number.POSITIVE_INFINITY;return t.forIn(k,function(M,S){var R=y(b,S)/2;T=Math.max(M+R,T),C=Math.min(M-R,C)}),T-C})}function f(b,x){var k=t.values(x),T=t.min(k),C=t.max(k);t.forEach(["u","d"],function(M){t.forEach(["l","r"],function(S){var R=M+S,A=b[R],L;if(A!==x){var v=t.values(A);L=S==="l"?T-t.min(v):C-t.max(v),L&&(b[R]=t.mapValues(A,function(B){return B+L}))}})})}function p(b,x){return t.mapValues(b.ul,function(k,T){if(x)return b[x.toLowerCase()][T];var C=t.sortBy(t.map(b,T));return(C[1]+C[2])/2})}function m(b){var x=r.buildLayerMatrix(b),k=t.merge(n(b,x),i(b,x)),T={},C;t.forEach(["u","d"],function(S){C=S==="u"?x:t.values(x).reverse(),t.forEach(["l","r"],function(R){R==="r"&&(C=t.map(C,function(B){return t.values(B).reverse()}));var A=(S==="u"?b.predecessors:b.successors).bind(b),L=l(b,C,k,A),v=u(b,C,L.root,L.align,R==="r");R==="r"&&(v=t.mapValues(v,function(B){return-B})),T[S+R]=v})});var M=d(b,T);return f(T,M),p(T,b.graph().align)}function _(b,x,k){return function(T,C,M){var S=T.node(C),R=T.node(M),A=0,L;if(A+=S.width/2,t.has(S,"labelpos"))switch(S.labelpos.toLowerCase()){case"l":L=-S.width/2;break;case"r":L=S.width/2;break}if(L&&(A+=k?L:-L),L=0,A+=(S.dummy?x:b)/2,A+=(R.dummy?x:b)/2,A+=R.width/2,t.has(R,"labelpos"))switch(R.labelpos.toLowerCase()){case"l":L=R.width/2;break;case"r":L=-R.width/2;break}return L&&(A+=k?L:-L),L=0,A}}function y(b,x){return b.node(x).width}return K3}var Z3,RA;function Mtt(){if(RA)return Z3;RA=1;var t=$e(),e=vn(),r=Att().positionX;Z3=n;function n(a){a=e.asNonCompoundGraph(a),i(a),t.forEach(r(a),function(s,o){a.node(o).x=s})}function i(a){var s=e.buildLayerMatrix(a),o=a.graph().ranksep,l=0;t.forEach(s,function(u){var h=t.max(t.map(u,function(d){return a.node(d).height}));t.forEach(u,function(d){a.node(d).y=l+h/2}),l+=h+o})}return Z3}var Q3,IA;function Ltt(){if(IA)return Q3;IA=1;var t=$e(),e=htt(),r=ftt(),n=ptt(),i=vn().normalizeRanks,a=gtt(),s=vn().removeEmptyRanks,o=ytt(),l=mtt(),u=btt(),h=Stt(),d=Mtt(),f=vn(),p=yi().Graph;Q3=m;function m(W,tt){var K=tt&&tt.debugTiming?f.time:f.notime;K("layout",function(){var it=K(" buildLayoutGraph",function(){return A(W)});K(" runLayout",function(){_(it,K)}),K(" updateInputGraph",function(){y(W,it)})})}function _(W,tt){tt(" makeSpaceForEdgeLabels",function(){L(W)}),tt(" removeSelfEdges",function(){J(W)}),tt(" acyclic",function(){e.run(W)}),tt(" nestingGraph.run",function(){o.run(W)}),tt(" rank",function(){n(f.asNonCompoundGraph(W))}),tt(" injectEdgeLabelProxies",function(){v(W)}),tt(" removeEmptyRanks",function(){s(W)}),tt(" nestingGraph.cleanup",function(){o.cleanup(W)}),tt(" normalizeRanks",function(){i(W)}),tt(" assignRankMinMax",function(){B(W)}),tt(" removeEdgeLabelProxies",function(){w(W)}),tt(" normalize.run",function(){r.run(W)}),tt(" parentDummyChains",function(){a(W)}),tt(" addBorderSegments",function(){l(W)}),tt(" order",function(){h(W)}),tt(" insertSelfEdges",function(){Y(W)}),tt(" adjustCoordinateSystem",function(){u.adjust(W)}),tt(" position",function(){d(W)}),tt(" positionSelfEdges",function(){$(W)}),tt(" removeBorderNodes",function(){ct(W)}),tt(" normalize.undo",function(){r.undo(W)}),tt(" fixupEdgeLabelCoords",function(){z(W)}),tt(" undoCoordinateSystem",function(){u.undo(W)}),tt(" translateGraph",function(){D(W)}),tt(" assignNodeIntersects",function(){N(W)}),tt(" reversePoints",function(){X(W)}),tt(" acyclic.undo",function(){e.undo(W)})}function y(W,tt){t.forEach(W.nodes(),function(K){var it=W.node(K),Z=tt.node(K);it&&(it.x=Z.x,it.y=Z.y,tt.children(K).length&&(it.width=Z.width,it.height=Z.height))}),t.forEach(W.edges(),function(K){var it=W.edge(K),Z=tt.edge(K);it.points=Z.points,t.has(Z,"x")&&(it.x=Z.x,it.y=Z.y)}),W.graph().width=tt.graph().width,W.graph().height=tt.graph().height}var b=["nodesep","edgesep","ranksep","marginx","marginy"],x={ranksep:50,edgesep:20,nodesep:50,rankdir:"tb"},k=["acyclicer","ranker","rankdir","align"],T=["width","height"],C={width:0,height:0},M=["minlen","weight","width","height","labeloffset"],S={minlen:1,weight:1,width:0,height:0,labeloffset:10,labelpos:"r"},R=["labelpos"];function A(W){var tt=new p({multigraph:!0,compound:!0}),K=ut(W.graph());return tt.setGraph(t.merge({},x,lt(K,b),t.pick(K,k))),t.forEach(W.nodes(),function(it){var Z=ut(W.node(it));tt.setNode(it,t.defaults(lt(Z,T),C)),tt.setParent(it,W.parent(it))}),t.forEach(W.edges(),function(it){var Z=ut(W.edge(it));tt.setEdge(it,t.merge({},S,lt(Z,M),t.pick(Z,R)))}),tt}function L(W){var tt=W.graph();tt.ranksep/=2,t.forEach(W.edges(),function(K){var it=W.edge(K);it.minlen*=2,it.labelpos.toLowerCase()!=="c"&&(tt.rankdir==="TB"||tt.rankdir==="BT"?it.width+=it.labeloffset:it.height+=it.labeloffset)})}function v(W){t.forEach(W.edges(),function(tt){var K=W.edge(tt);if(K.width&&K.height){var it=W.node(tt.v),Z=W.node(tt.w),V={rank:(Z.rank-it.rank)/2+it.rank,e:tt};f.addDummyNode(W,"edge-proxy",V,"_ep")}})}function B(W){var tt=0;t.forEach(W.nodes(),function(K){var it=W.node(K);it.borderTop&&(it.minRank=W.node(it.borderTop).rank,it.maxRank=W.node(it.borderBottom).rank,tt=t.max(tt,it.maxRank))}),W.graph().maxRank=tt}function w(W){t.forEach(W.nodes(),function(tt){var K=W.node(tt);K.dummy==="edge-proxy"&&(W.edge(K.e).labelRank=K.rank,W.removeNode(tt))})}function D(W){var tt=Number.POSITIVE_INFINITY,K=0,it=Number.POSITIVE_INFINITY,Z=0,V=W.graph(),Q=V.marginx||0,q=V.marginy||0;function U(F){var j=F.x,P=F.y,et=F.width,at=F.height;tt=Math.min(tt,j-et/2),K=Math.max(K,j+et/2),it=Math.min(it,P-at/2),Z=Math.max(Z,P+at/2)}t.forEach(W.nodes(),function(F){U(W.node(F))}),t.forEach(W.edges(),function(F){var j=W.edge(F);t.has(j,"x")&&U(j)}),tt-=Q,it-=q,t.forEach(W.nodes(),function(F){var j=W.node(F);j.x-=tt,j.y-=it}),t.forEach(W.edges(),function(F){var j=W.edge(F);t.forEach(j.points,function(P){P.x-=tt,P.y-=it}),t.has(j,"x")&&(j.x-=tt),t.has(j,"y")&&(j.y-=it)}),V.width=K-tt+Q,V.height=Z-it+q}function N(W){t.forEach(W.edges(),function(tt){var K=W.edge(tt),it=W.node(tt.v),Z=W.node(tt.w),V,Q;K.points?(V=K.points[0],Q=K.points[K.points.length-1]):(K.points=[],V=Z,Q=it),K.points.unshift(f.intersectRect(it,V)),K.points.push(f.intersectRect(Z,Q))})}function z(W){t.forEach(W.edges(),function(tt){var K=W.edge(tt);if(t.has(K,"x"))switch((K.labelpos==="l"||K.labelpos==="r")&&(K.width-=K.labeloffset),K.labelpos){case"l":K.x-=K.width/2+K.labeloffset;break;case"r":K.x+=K.width/2+K.labeloffset;break}})}function X(W){t.forEach(W.edges(),function(tt){var K=W.edge(tt);K.reversed&&K.points.reverse()})}function ct(W){t.forEach(W.nodes(),function(tt){if(W.children(tt).length){var K=W.node(tt),it=W.node(K.borderTop),Z=W.node(K.borderBottom),V=W.node(t.last(K.borderLeft)),Q=W.node(t.last(K.borderRight));K.width=Math.abs(Q.x-V.x),K.height=Math.abs(Z.y-it.y),K.x=V.x+K.width/2,K.y=it.y+K.height/2}}),t.forEach(W.nodes(),function(tt){W.node(tt).dummy==="border"&&W.removeNode(tt)})}function J(W){t.forEach(W.edges(),function(tt){if(tt.v===tt.w){var K=W.node(tt.v);K.selfEdges||(K.selfEdges=[]),K.selfEdges.push({e:tt,label:W.edge(tt)}),W.removeEdge(tt)}})}function Y(W){var tt=f.buildLayerMatrix(W);t.forEach(tt,function(K){var it=0;t.forEach(K,function(Z,V){var Q=W.node(Z);Q.order=V+it,t.forEach(Q.selfEdges,function(q){f.addDummyNode(W,"selfedge",{width:q.label.width,height:q.label.height,rank:Q.rank,order:V+ ++it,e:q.e,label:q.label},"_se")}),delete Q.selfEdges})})}function $(W){t.forEach(W.nodes(),function(tt){var K=W.node(tt);if(K.dummy==="selfedge"){var it=W.node(K.e.v),Z=it.x+it.width/2,V=it.y,Q=K.x-Z,q=it.height/2;W.setEdge(K.e,K.label),W.removeNode(tt),K.label.points=[{x:Z+2*Q/3,y:V-q},{x:Z+5*Q/6,y:V-q},{x:Z+Q,y:V},{x:Z+5*Q/6,y:V+q},{x:Z+2*Q/3,y:V+q}],K.label.x=K.x,K.label.y=K.y}})}function lt(W,tt){return t.mapValues(t.pick(W,tt),Number)}function ut(W){var tt={};return t.forEach(W,function(K,it){tt[it.toLowerCase()]=K}),tt}return Q3}var J3,NA;function Rtt(){if(NA)return J3;NA=1;var t=$e(),e=vn(),r=yi().Graph;J3={debugOrdering:n};function n(i){var a=e.buildLayerMatrix(i),s=new r({compound:!0,multigraph:!0}).setGraph({});return t.forEach(i.nodes(),function(o){s.setNode(o,{label:o}),s.setParent(o,"layer"+i.node(o).rank)}),t.forEach(i.edges(),function(o){s.setEdge(o.v,o.w,{},o.name)}),t.forEach(a,function(o,l){var u="layer"+l;s.setNode(u,{rank:"same"}),t.reduce(o,function(h,d){return s.setEdge(h,d,{style:"invis"}),d})}),s}return J3}var t4,BA;function Itt(){return BA||(BA=1,t4="0.8.5"),t4}var e4,DA;function OA(){return DA||(DA=1,e4={graphlib:yi(),layout:Ltt(),debug:Rtt(),util:{time:vn().time,notime:vn().notime},version:Itt()}),e4}var Zc=OA();let FA=0;const Ntt=function(t,e,r,n,i){const a=function(x){switch(x){case i.db.relationType.AGGREGATION:return"aggregation";case i.db.EXTENSION:return"extension";case i.db.COMPOSITION:return"composition";case i.db.DEPENDENCY:return"dependency";case i.db.LOLLIPOP:return"lollipop"}};e.points=e.points.filter(x=>!Number.isNaN(x.y));const s=e.points,o=Ua().x(function(x){return x.x}).y(function(x){return x.y}).curve(Os),l=t.append("path").attr("d",o(s)).attr("id","edge"+FA).attr("class","relation");let u="";n.arrowMarkerAbsolute&&(u=window.location.protocol+"//"+window.location.host+window.location.pathname+window.location.search,u=u.replace(/\(/g,"\\("),u=u.replace(/\)/g,"\\)")),r.relation.lineType==1&&l.attr("class","relation dashed-line"),r.relation.type1!=="none"&&l.attr("marker-start","url("+u+"#"+a(r.relation.type1)+"Start)"),r.relation.type2!=="none"&&l.attr("marker-end","url("+u+"#"+a(r.relation.type2)+"End)");let h,d;const f=e.points.length;let p=Se.calcLabelPosition(e.points);h=p.x,d=p.y;let m,_,y,b;if(f%2!==0&&f>1){let x=Se.calcCardinalityPosition(r.relation.type1!=="none",e.points,e.points[0]),k=Se.calcCardinalityPosition(r.relation.type2!=="none",e.points,e.points[f-1]);H.debug("cardinality_1_point "+JSON.stringify(x)),H.debug("cardinality_2_point "+JSON.stringify(k)),m=x.x,_=x.y,y=k.x,b=k.y}if(typeof r.title<"u"){const x=t.append("g").attr("class","classLabel"),k=x.append("text").attr("class","label").attr("x",h).attr("y",d).attr("fill","red").attr("text-anchor","middle").text(r.title);window.label=k;const T=k.node().getBBox();x.insert("rect",":first-child").attr("class","box").attr("x",T.x-n.padding/2).attr("y",T.y-n.padding/2).attr("width",T.width+n.padding).attr("height",T.height+n.padding)}H.info("Rendering relation "+JSON.stringify(r)),typeof r.relationTitle1<"u"&&r.relationTitle1!=="none"&&t.append("g").attr("class","cardinality").append("text").attr("class","type1").attr("x",m).attr("y",_).attr("fill","black").attr("font-size","6").text(r.relationTitle1),typeof r.relationTitle2<"u"&&r.relationTitle2!=="none"&&t.append("g").attr("class","cardinality").append("text").attr("class","type2").attr("x",y).attr("y",b).attr("fill","black").attr("font-size","6").text(r.relationTitle2),FA++},Btt=function(t,e,r,n){H.debug("Rendering class ",e,r);const i=e.id,a={id:i,label:e.id,width:0,height:0},s=t.append("g").attr("id",n.db.lookUpDomId(i)).attr("class","classGroup");let o;e.link?o=s.append("svg:a").attr("xlink:href",e.link).attr("target",e.linkTarget).append("text").attr("y",r.textHeight+r.padding).attr("x",0):o=s.append("text").attr("y",r.textHeight+r.padding).attr("x",0);let l=!0;e.annotations.forEach(function(C){const M=o.append("tspan").text("\xAB"+C+"\xBB");l||M.attr("dy",r.textHeight),l=!1});let u=e.id;e.type!==void 0&&e.type!==""&&(u+="<"+e.type+">");const h=o.append("tspan").text(u).attr("class","title");l||h.attr("dy",r.textHeight);const d=o.node().getBBox().height,f=s.append("line").attr("x1",0).attr("y1",r.padding+d+r.dividerMargin/2).attr("y2",r.padding+d+r.dividerMargin/2),p=s.append("text").attr("x",r.padding).attr("y",d+r.dividerMargin+r.textHeight).attr("fill","white").attr("class","classText");l=!0,e.members.forEach(function(C){PA(p,C,l,r),l=!1});const m=p.node().getBBox(),_=s.append("line").attr("x1",0).attr("y1",r.padding+d+r.dividerMargin+m.height).attr("y2",r.padding+d+r.dividerMargin+m.height),y=s.append("text").attr("x",r.padding).attr("y",d+2*r.dividerMargin+m.height+r.textHeight).attr("fill","white").attr("class","classText");l=!0,e.methods.forEach(function(C){PA(y,C,l,r),l=!1});const b=s.node().getBBox();var x=" ";e.cssClasses.length>0&&(x=x+e.cssClasses.join(" "));const T=s.insert("rect",":first-child").attr("x",0).attr("y",0).attr("width",b.width+2*r.padding).attr("height",b.height+r.padding+.5*r.dividerMargin).attr("class",x).node().getBBox().width;return o.node().childNodes.forEach(function(C){C.setAttribute("x",(T-C.getBBox().width)/2)}),e.tooltip&&o.insert("title").text(e.tooltip),f.attr("x2",T),_.attr("x2",T),a.width=T,a.height=b.height+r.padding+.5*r.dividerMargin,a},z1=function(t){const e=/^(\+|-|~|#)?(\w+)(~\w+~|\[\])?\s+(\w+) *(\*|\$)?$/,r=/^([+|\-|~|#])?(\w+) *\( *(.*)\) *(\*|\$)? *(\w*[~|[\]]*\s*\w*~?)$/;let n=t.match(e),i=t.match(r);return n&&!i?Dtt(n):i?Ott(i):Ftt(t)},Dtt=function(t){let e="",r="";try{let n=t[1]?t[1].trim():"",i=t[2]?t[2].trim():"",a=t[3]?ja(t[3].trim()):"",s=t[4]?t[4].trim():"",o=t[5]?t[5].trim():"";r=n+i+a+" "+s,e=r4(o)}catch{r=t}return{displayText:r,cssStyle:e}},Ott=function(t){let e="",r="";try{let n=t[1]?t[1].trim():"",i=t[2]?t[2].trim():"",a=t[3]?ja(t[3].trim()):"",s=t[4]?t[4].trim():"",o=t[5]?" : "+ja(t[5]).trim():"";r=n+i+"("+a+")"+o,e=r4(s)}catch{r=t}return{displayText:r,cssStyle:e}},Ftt=function(t){let e="",r="",n="",i=t.indexOf("("),a=t.indexOf(")");if(i>1&&a>i&&a<=t.length){let s="",o="",l=t.substring(0,1);l.match(/\w/)?o=t.substring(0,i).trim():(l.match(/\+|-|~|#/)&&(s=l),o=t.substring(1,i).trim());const u=t.substring(i+1,a);t.substring(a+1,1),r=r4(t.substring(a+1,a+2)),e=s+o+"("+ja(u.trim())+")",a<t.length&&(n=t.substring(a+2).trim(),n!==""&&(n=" : "+ja(n),e+=n))}else e=ja(t);return{displayText:e,cssStyle:r}},PA=function(t,e,r,n){let i=z1(e);const a=t.append("tspan").attr("x",n.padding).text(i.displayText);i.cssStyle!==""&&a.attr("style",i.cssStyle),r||a.attr("dy",n.textHeight)},r4=function(t){switch(t){case"*":return"font-style:italic;";case"$":return"text-decoration:underline;";default:return""}},qA={drawClass:Btt,drawEdge:Ntt,parseMember:z1};let n4={};const Y1=20,U1=function(t){const e=Object.entries(n4).find(r=>r[1].label===t);if(e)return e[0]},Ptt=function(t){t.append("defs").append("marker").attr("id","extensionStart").attr("class","extension").attr("refX",0).attr("refY",7).attr("markerWidth",190).attr("markerHeight",240).attr("orient","auto").append("path").attr("d","M 1,7 L18,13 V 1 Z"),t.append("defs").append("marker").attr("id","extensionEnd").attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 1,1 V 13 L18,7 Z"),t.append("defs").append("marker").attr("id","compositionStart").attr("class","extension").attr("refX",0).attr("refY",7).attr("markerWidth",190).attr("markerHeight",240).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L1,7 L9,1 Z"),t.append("defs").append("marker").attr("id","compositionEnd").attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L1,7 L9,1 Z"),t.append("defs").append("marker").attr("id","aggregationStart").attr("class","extension").attr("refX",0).attr("refY",7).attr("markerWidth",190).attr("markerHeight",240).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L1,7 L9,1 Z"),t.append("defs").append("marker").attr("id","aggregationEnd").attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L1,7 L9,1 Z"),t.append("defs").append("marker").attr("id","dependencyStart").attr("class","extension").attr("refX",0).attr("refY",7).attr("markerWidth",190).attr("markerHeight",240).attr("orient","auto").append("path").attr("d","M 5,7 L9,13 L1,7 L9,1 Z"),t.append("defs").append("marker").attr("id","dependencyEnd").attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L14,7 L9,1 Z")},qtt={draw:function(t,e,r,n){const i=nt().class;n4={},H.info("Rendering diagram "+t);const a=nt().securityLevel;let s;a==="sandbox"&&(s=St("#i"+e));const o=St(a==="sandbox"?s.nodes()[0].contentDocument.body:"body"),l=o.select(`[id='${e}']`);Ptt(l);const u=new cr.Graph({multigraph:!0});u.setGraph({isMultiGraph:!0}),u.setDefaultEdgeLabel(function(){return{}});const h=n.db.getClasses(),d=Object.keys(h);for(let b=0;b<d.length;b++){const x=h[d[b]],k=qA.drawClass(l,x,i,n);n4[k.id]=k,u.setNode(k.id,k),H.info("Org height: "+k.height)}n.db.getRelations().forEach(function(b){H.info("tjoho"+U1(b.id1)+U1(b.id2)+JSON.stringify(b)),u.setEdge(U1(b.id1),U1(b.id2),{relation:b},b.title||"DEFAULT")}),Zc.layout(u),u.nodes().forEach(function(b){typeof b<"u"&&typeof u.node(b)<"u"&&(H.debug("Node "+b+": "+JSON.stringify(u.node(b))),o.select("#"+n.db.lookUpDomId(b)).attr("transform","translate("+(u.node(b).x-u.node(b).width/2)+","+(u.node(b).y-u.node(b).height/2)+" )"))}),u.edges().forEach(function(b){typeof b<"u"&&typeof u.edge(b)<"u"&&(H.debug("Edge "+b.v+" -> "+b.w+": "+JSON.stringify(u.edge(b))),qA.drawEdge(l,u.edge(b),u.edge(b).relation,i,n))});const p=l.node().getBBox(),m=p.width+Y1*2,_=p.height+Y1*2;li(l,_,m,i.useMaxWidth);const y=`${p.x-Y1} ${p.y-Y1} ${m} ${_}`;H.debug(`viewBox ${y}`),l.attr("viewBox",y),bn(n.db,l,e)}},Vtt=(t,e,r,n)=>{e.forEach(i=>{ztt[i](t,r,n)})},ztt={extension:(t,e,r)=>{H.trace("Making markers for ",r),t.append("defs").append("marker").attr("id",e+"-extensionStart").attr("class","marker extension "+e).attr("refX",0).attr("refY",7).attr("markerWidth",190).attr("markerHeight",240).attr("orient","auto").append("path").attr("d","M 1,7 L18,13 V 1 Z"),t.append("defs").append("marker").attr("id",e+"-extensionEnd").attr("class","marker extension "+e).attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 1,1 V 13 L18,7 Z")},composition:(t,e)=>{t.append("defs").append("marker").attr("id",e+"-compositionStart").attr("class","marker composition "+e).attr("refX",0).attr("refY",7).attr("markerWidth",190).attr("markerHeight",240).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L1,7 L9,1 Z"),t.append("defs").append("marker").attr("id",e+"-compositionEnd").attr("class","marker composition "+e).attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L1,7 L9,1 Z")},aggregation:(t,e)=>{t.append("defs").append("marker").attr("id",e+"-aggregationStart").attr("class","marker aggregation "+e).attr("refX",0).attr("refY",7).attr("markerWidth",190).attr("markerHeight",240).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L1,7 L9,1 Z"),t.append("defs").append("marker").attr("id",e+"-aggregationEnd").attr("class","marker aggregation "+e).attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L1,7 L9,1 Z")},dependency:(t,e)=>{t.append("defs").append("marker").attr("id",e+"-dependencyStart").attr("class","marker dependency "+e).attr("refX",0).attr("refY",7).attr("markerWidth",190).attr("markerHeight",240).attr("orient","auto").append("path").attr("d","M 5,7 L9,13 L1,7 L9,1 Z"),t.append("defs").append("marker").attr("id",e+"-dependencyEnd").attr("class","marker dependency "+e).attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L14,7 L9,1 Z")},lollipop:(t,e)=>{t.append("defs").append("marker").attr("id",e+"-lollipopStart").attr("class","marker lollipop "+e).attr("refX",0).attr("refY",7).attr("markerWidth",190).attr("markerHeight",240).attr("orient","auto").append("circle").attr("stroke","black").attr("fill","white").attr("cx",6).attr("cy",7).attr("r",6)},point:(t,e)=>{t.append("marker").attr("id",e+"-pointEnd").attr("class","marker "+e).attr("viewBox","0 0 10 10").attr("refX",10).attr("refY",5).attr("markerUnits","userSpaceOnUse").attr("markerWidth",12).attr("markerHeight",12).attr("orient","auto").append("path").attr("d","M 0 0 L 10 5 L 0 10 z").attr("class","arrowMarkerPath").style("stroke-width",1).style("stroke-dasharray","1,0"),t.append("marker").attr("id",e+"-pointStart").attr("class","marker "+e).attr("viewBox","0 0 10 10").attr("refX",0).attr("refY",5).attr("markerUnits","userSpaceOnUse").attr("markerWidth",12).attr("markerHeight",12).attr("orient","auto").append("path").attr("d","M 0 5 L 10 10 L 10 0 z").attr("class","arrowMarkerPath").style("stroke-width",1).style("stroke-dasharray","1,0")},circle:(t,e)=>{t.append("marker").attr("id",e+"-circleEnd").attr("class","marker "+e).attr("viewBox","0 0 10 10").attr("refX",11).attr("refY",5).attr("markerUnits","userSpaceOnUse").attr("markerWidth",11).attr("markerHeight",11).attr("orient","auto").append("circle").attr("cx","5").attr("cy","5").attr("r","5").attr("class","arrowMarkerPath").style("stroke-width",1).style("stroke-dasharray","1,0"),t.append("marker").attr("id",e+"-circleStart").attr("class","marker "+e).attr("viewBox","0 0 10 10").attr("refX",-1).attr("refY",5).attr("markerUnits","userSpaceOnUse").attr("markerWidth",11).attr("markerHeight",11).attr("orient","auto").append("circle").attr("cx","5").attr("cy","5").attr("r","5").attr("class","arrowMarkerPath").style("stroke-width",1).style("stroke-dasharray","1,0")},cross:(t,e)=>{t.append("marker").attr("id",e+"-crossEnd").attr("class","marker cross "+e).attr("viewBox","0 0 11 11").attr("refX",12).attr("refY",5.2).attr("markerUnits","userSpaceOnUse").attr("markerWidth",11).attr("markerHeight",11).attr("orient","auto").append("path").attr("d","M 1,1 l 9,9 M 10,1 l -9,9").attr("class","arrowMarkerPath").style("stroke-width",2).style("stroke-dasharray","1,0"),t.append("marker").attr("id",e+"-crossStart").attr("class","marker cross "+e).attr("viewBox","0 0 11 11").attr("refX",-1).attr("refY",5.2).attr("markerUnits","userSpaceOnUse").attr("markerWidth",11).attr("markerHeight",11).attr("orient","auto").append("path").attr("d","M 1,1 l 9,9 M 10,1 l -9,9").attr("class","arrowMarkerPath").style("stroke-width",2).style("stroke-dasharray","1,0")},barb:(t,e)=>{t.append("defs").append("marker").attr("id",e+"-barbEnd").attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",14).attr("markerUnits","strokeWidth").attr("orient","auto").append("path").attr("d","M 19,7 L9,13 L14,7 L9,1 Z")}};function Ytt(t,e){e&&t.attr("style",e)}function Utt(t){const e=St(document.createElementNS("http://www.w3.org/2000/svg","foreignObject")),r=e.append("xhtml:div"),n=t.label,i=t.isNode?"nodeLabel":"edgeLabel";return r.html('<span class="'+i+'" '+(t.labelStyle?'style="'+t.labelStyle+'"':"")+">"+n+"</span>"),Ytt(r,t.labelStyle),r.style("display","inline-block"),r.style("white-space","nowrap"),r.attr("xmlns","http://www.w3.org/1999/xhtml"),e.node()}const xn=(t,e,r,n)=>{let i=t||"";if(typeof i=="object"&&(i=i[0]),Mr(nt().flowchart.htmlLabels)){i=i.replace(/\\n|\n/g,"<br />"),H.info("vertexText"+i);const a={isNode:n,label:w0(i).replace(/fa[lrsb]?:fa-[\w-]+/g,o=>`<i class='${o.replace(":"," ")}'></i>`),labelStyle:e.replace("fill:","color:")};return Utt(a)}else{const a=document.createElementNS("http://www.w3.org/2000/svg","text");a.setAttribute("style",e.replace("color:","fill:"));let s=[];typeof i=="string"?s=i.split(/\\n|\n|<br\s*\/?>/gi):Array.isArray(i)?s=i:s=[];for(let o=0;o<s.length;o++){const l=document.createElementNS("http://www.w3.org/2000/svg","tspan");l.setAttributeNS("http://www.w3.org/XML/1998/namespace","xml:space","preserve"),l.setAttribute("dy","1em"),l.setAttribute("x","0"),r?l.setAttribute("class","title-row"):l.setAttribute("class","row"),l.textContent=s[o].trim(),a.appendChild(l)}return a}},Yr=(t,e,r,n)=>{let i;r?i=r:i="node default";const a=t.insert("g").attr("class",i).attr("id",e.domId||e.id),s=a.insert("g").attr("class","label").attr("style",e.labelStyle);let o;typeof e.labelText>"u"?o="":o=typeof e.labelText=="string"?e.labelText:e.labelText[0];const l=s.node().appendChild(xn(ai(w0(o),nt()),e.labelStyle,!1,n));let u=l.getBBox();if(Mr(nt().flowchart.htmlLabels)){const d=l.children[0],f=St(l);u=d.getBoundingClientRect(),f.attr("width",u.width),f.attr("height",u.height)}const h=e.padding/2;return s.attr("transform","translate("+-u.width/2+", "+-u.height/2+")"),{shapeSvg:a,bbox:u,halfPadding:h,label:s}},ur=(t,e)=>{const r=e.node().getBBox();t.width=r.width,t.height=r.height};function ca(t,e,r,n){return t.insert("polygon",":first-child").attr("points",n.map(function(i){return i.x+","+i.y}).join(" ")).attr("class","label-container").attr("transform","translate("+-e/2+","+r/2+")")}let Re={},mi={},VA={};const Wtt=()=>{mi={},VA={},Re={}},W1=(t,e)=>(H.trace("In isDecendant",e," ",t," = ",mi[e].indexOf(t)>=0),mi[e].indexOf(t)>=0),Htt=(t,e)=>(H.info("Decendants of ",e," is ",mi[e]),H.info("Edge is ",t),t.v===e||t.w===e?!1:mi[e]?!!(mi[e].indexOf(t.v)>=0||W1(t.v,e)||W1(t.w,e)||mi[e].indexOf(t.w)>=0):(H.debug("Tilt, ",e,",not in decendants"),!1)),zA=(t,e,r,n)=>{H.warn("Copying children of ",t,"root",n,"data",e.node(t),n);const i=e.children(t)||[];t!==n&&i.push(t),H.warn("Copying (nodes) clusterId",t,"nodes",i),i.forEach(a=>{if(e.children(a).length>0)zA(a,e,r,n);else{const s=e.node(a);H.info("cp ",a," to ",n," with parent ",t),r.setNode(a,s),n!==e.parent(a)&&(H.warn("Setting parent",a,e.parent(a)),r.setParent(a,e.parent(a))),t!==n&&a!==t?(H.debug("Setting parent",a,t),r.setParent(a,t)):(H.info("In copy ",t,"root",n,"data",e.node(t),n),H.debug("Not Setting parent for node=",a,"cluster!==rootId",t!==n,"node!==clusterId",a!==t));const o=e.edges(a);H.debug("Copying Edges",o),o.forEach(l=>{H.info("Edge",l);const u=e.edge(l.v,l.w,l.name);H.info("Edge data",u,n);try{Htt(l,n)?(H.info("Copying as ",l.v,l.w,u,l.name),r.setEdge(l.v,l.w,u,l.name),H.info("newGraph edges ",r.edges(),r.edge(r.edges()[0]))):H.info("Skipping copy of edge ",l.v,"-->",l.w," rootId: ",n," clusterId:",t)}catch(h){H.error(h)}})}H.debug("Removing node",a),e.removeNode(a)})},YA=(t,e)=>{const r=e.children(t);let n=[].concat(r);for(let i=0;i<r.length;i++)VA[r[i]]=t,n=n.concat(YA(r[i],e));return n},Qc=(t,e)=>{H.trace("Searching",t);const r=e.children(t);if(H.trace("Searching children of id ",t,r),r.length<1)return H.trace("This is a valid node",t),t;for(let n=0;n<r.length;n++){const i=Qc(r[n],e);if(i)return H.trace("Found replacement for",t," => ",i),i}},H1=t=>!Re[t]||!Re[t].externalConnections?t:Re[t]?Re[t].id:t,Gtt=(t,e)=>{if(!t||e>10){H.debug("Opting out, no graph ");return}else H.debug("Opting in, graph ");t.nodes().forEach(function(r){t.children(r).length>0&&(H.warn("Cluster identified",r," Replacement id in edges: ",Qc(r,t)),mi[r]=YA(r,t),Re[r]={id:Qc(r,t),clusterData:t.node(r)})}),t.nodes().forEach(function(r){const n=t.children(r),i=t.edges();n.length>0?(H.debug("Cluster identified",r,mi),i.forEach(a=>{if(a.v!==r&&a.w!==r){const s=W1(a.v,r),o=W1(a.w,r);s^o&&(H.warn("Edge: ",a," leaves cluster ",r),H.warn("Decendants of XXX ",r,": ",mi[r]),Re[r].externalConnections=!0)}})):H.debug("Not a cluster ",r,mi)}),t.edges().forEach(function(r){const n=t.edge(r);H.warn("Edge "+r.v+" -> "+r.w+": "+JSON.stringify(r)),H.warn("Edge "+r.v+" -> "+r.w+": "+JSON.stringify(t.edge(r)));let i=r.v,a=r.w;if(H.warn("Fix XXX",Re,"ids:",r.v,r.w,"Translateing: ",Re[r.v]," --- ",Re[r.w]),Re[r.v]&&Re[r.w]&&Re[r.v]===Re[r.w]){H.warn("Fixing and trixing link to self - removing XXX",r.v,r.w,r.name),H.warn("Fixing and trixing - removing XXX",r.v,r.w,r.name),i=H1(r.v),a=H1(r.w),t.removeEdge(r.v,r.w,r.name);const s=r.w+"---"+r.v;t.setNode(s,{domId:s,id:s,labelStyle:"",labelText:n.label,padding:0,shape:"labelRect",style:""});const o=JSON.parse(JSON.stringify(n)),l=JSON.parse(JSON.stringify(n));o.label="",o.arrowTypeEnd="none",l.label="",o.fromCluster=r.v,l.toCluster=r.v,t.setEdge(i,s,o,r.name+"-cyclic-special"),t.setEdge(s,a,l,r.name+"-cyclic-special")}else(Re[r.v]||Re[r.w])&&(H.warn("Fixing and trixing - removing XXX",r.v,r.w,r.name),i=H1(r.v),a=H1(r.w),t.removeEdge(r.v,r.w,r.name),i!==r.v&&(n.fromCluster=r.v),a!==r.w&&(n.toCluster=r.w),H.warn("Fix Replacing with XXX",i,a,r.name),t.setEdge(i,a,n,r.name))}),H.warn("Adjusted Graph",cr.json.write(t)),UA(t,0),H.trace(Re)},UA=(t,e)=>{if(H.warn("extractor - ",e,cr.json.write(t),t.children("D")),e>10){H.error("Bailing out");return}let r=t.nodes(),n=!1;for(let i=0;i<r.length;i++){const a=r[i],s=t.children(a);n=n||s.length>0}if(!n){H.debug("Done, no node has children",t.nodes());return}H.debug("Nodes = ",r,e);for(let i=0;i<r.length;i++){const a=r[i];if(H.debug("Extracting node",a,Re,Re[a]&&!Re[a].externalConnections,!t.parent(a),t.node(a),t.children("D")," Depth ",e),!Re[a])H.debug("Not a cluster",a,e);else if(!Re[a].externalConnections&&t.children(a)&&t.children(a).length>0){H.warn("Cluster without external connections, without a parent and with children",a,e);let o=t.graph().rankdir==="TB"?"LR":"TB";Re[a]&&Re[a].clusterData&&Re[a].clusterData.dir&&(o=Re[a].clusterData.dir,H.warn("Fixing dir",Re[a].clusterData.dir,o));const l=new cr.Graph({multigraph:!0,compound:!0}).setGraph({rankdir:o,nodesep:50,ranksep:50,marginx:8,marginy:8}).setDefaultEdgeLabel(function(){return{}});H.warn("Old graph before copy",cr.json.write(t)),zA(a,t,l,a),t.setNode(a,{clusterNode:!0,id:a,clusterData:Re[a].clusterData,labelText:Re[a].labelText,graph:l}),H.warn("New graph after copy node: (",a,")",cr.json.write(l)),H.debug("Old graph after copy",cr.json.write(t))}else H.warn("Cluster ** ",a," **not meeting the criteria !externalConnections:",!Re[a].externalConnections," no parent: ",!t.parent(a)," children ",t.children(a)&&t.children(a).length>0,t.children("D"),e),H.debug(Re)}r=t.nodes(),H.warn("New list of nodes",r);for(let i=0;i<r.length;i++){const a=r[i],s=t.node(a);H.warn(" Now next level",a,s),s.clusterNode&&UA(s.graph,e+1)}},WA=(t,e)=>{if(e.length===0)return[];let r=Object.assign(e);return e.forEach(n=>{const i=t.children(n),a=WA(t,i);r=r.concat(a)}),r},jtt=t=>WA(t,t.children());function $tt(t,e){return t.intersect(e)}function HA(t,e,r,n){var i=t.x,a=t.y,s=i-n.x,o=a-n.y,l=Math.sqrt(e*e*o*o+r*r*s*s),u=Math.abs(e*r*s/l);n.x<i&&(u=-u);var h=Math.abs(e*r*o/l);return n.y<a&&(h=-h),{x:i+u,y:a+h}}function Xtt(t,e,r){return HA(t,e,e,r)}function Ktt(t,e,r,n){var i,a,s,o,l,u,h,d,f,p,m,_,y,b,x;if(i=e.y-t.y,s=t.x-e.x,l=e.x*t.y-t.x*e.y,f=i*r.x+s*r.y+l,p=i*n.x+s*n.y+l,!(f!==0&&p!==0&&GA(f,p))&&(a=n.y-r.y,o=r.x-n.x,u=n.x*r.y-r.x*n.y,h=a*t.x+o*t.y+u,d=a*e.x+o*e.y+u,!(h!==0&&d!==0&&GA(h,d))&&(m=i*o-a*s,m!==0)))return _=Math.abs(m/2),y=s*u-o*l,b=y<0?(y-_)/m:(y+_)/m,y=a*l-i*u,x=y<0?(y-_)/m:(y+_)/m,{x:b,y:x}}function GA(t,e){return t*e>0}function Ztt(t,e,r){var n=t.x,i=t.y,a=[],s=Number.POSITIVE_INFINITY,o=Number.POSITIVE_INFINITY;typeof e.forEach=="function"?e.forEach(function(m){s=Math.min(s,m.x),o=Math.min(o,m.y)}):(s=Math.min(s,e.x),o=Math.min(o,e.y));for(var l=n-t.width/2-s,u=i-t.height/2-o,h=0;h<e.length;h++){var d=e[h],f=e[h<e.length-1?h+1:0],p=Ktt(t,r,{x:l+d.x,y:u+d.y},{x:l+f.x,y:u+f.y});p&&a.push(p)}return a.length?(a.length>1&&a.sort(function(m,_){var y=m.x-r.x,b=m.y-r.y,x=Math.sqrt(y*y+b*b),k=_.x-r.x,T=_.y-r.y,C=Math.sqrt(k*k+T*T);return x<C?-1:x===C?0:1}),a[0]):t}const Jc=(t,e)=>{var r=t.x,n=t.y,i=e.x-r,a=e.y-n,s=t.width/2,o=t.height/2,l,u;return Math.abs(a)*s>Math.abs(i)*o?(a<0&&(o=-o),l=a===0?0:o*i/a,u=o):(i<0&&(s=-s),l=s,u=i===0?0:s*a/i),{x:r+l,y:n+u}},ir={node:$tt,circle:Xtt,ellipse:HA,polygon:Ztt,rect:Jc},Qtt=(t,e)=>{const{shapeSvg:r,bbox:n,halfPadding:i}=Yr(t,e,"node "+e.classes,!0);H.info("Classes = ",e.classes);const a=r.insert("rect",":first-child");return a.attr("rx",e.rx).attr("ry",e.ry).attr("x",-n.width/2-i).attr("y",-n.height/2-i).attr("width",n.width+e.padding).attr("height",n.height+e.padding),ur(e,a),e.intersect=function(s){return ir.rect(e,s)},r},Jtt=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.width+e.padding,a=n.height+e.padding,s=i+a,o=[{x:s/2,y:0},{x:s,y:-s/2},{x:s/2,y:-s},{x:0,y:-s/2}];H.info("Question main (Circle)");const l=ca(r,s,s,o);return l.attr("style",e.style),ur(e,l),e.intersect=function(u){return H.warn("Intersect called"),ir.polygon(e,o,u)},r},tet=(t,e)=>{const r=t.insert("g").attr("class","node default").attr("id",e.domId||e.id),n=28,i=[{x:0,y:n/2},{x:n/2,y:0},{x:0,y:-n/2},{x:-n/2,y:0}];return r.insert("polygon",":first-child").attr("points",i.map(function(s){return s.x+","+s.y}).join(" ")).attr("class","state-start").attr("r",7).attr("width",28).attr("height",28),e.width=28,e.height=28,e.intersect=function(s){return ir.circle(e,14,s)},r},eet=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=4,a=n.height+e.padding,s=a/i,o=n.width+2*s+e.padding,l=[{x:s,y:0},{x:o-s,y:0},{x:o,y:-a/2},{x:o-s,y:-a},{x:s,y:-a},{x:0,y:-a/2}],u=ca(r,o,a,l);return u.attr("style",e.style),ur(e,u),e.intersect=function(h){return ir.polygon(e,l,h)},r},ret=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.width+e.padding,a=n.height+e.padding,s=[{x:-a/2,y:0},{x:i,y:0},{x:i,y:-a},{x:-a/2,y:-a},{x:0,y:-a/2}];return ca(r,i,a,s).attr("style",e.style),e.width=i+a,e.height=a,e.intersect=function(l){return ir.polygon(e,s,l)},r},net=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.width+e.padding,a=n.height+e.padding,s=[{x:-2*a/6,y:0},{x:i-a/6,y:0},{x:i+2*a/6,y:-a},{x:a/6,y:-a}],o=ca(r,i,a,s);return o.attr("style",e.style),ur(e,o),e.intersect=function(l){return ir.polygon(e,s,l)},r},iet=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.width+e.padding,a=n.height+e.padding,s=[{x:2*a/6,y:0},{x:i+a/6,y:0},{x:i-2*a/6,y:-a},{x:-a/6,y:-a}],o=ca(r,i,a,s);return o.attr("style",e.style),ur(e,o),e.intersect=function(l){return ir.polygon(e,s,l)},r},aet=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.width+e.padding,a=n.height+e.padding,s=[{x:-2*a/6,y:0},{x:i+2*a/6,y:0},{x:i-a/6,y:-a},{x:a/6,y:-a}],o=ca(r,i,a,s);return o.attr("style",e.style),ur(e,o),e.intersect=function(l){return ir.polygon(e,s,l)},r},set=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.width+e.padding,a=n.height+e.padding,s=[{x:a/6,y:0},{x:i-a/6,y:0},{x:i+2*a/6,y:-a},{x:-2*a/6,y:-a}],o=ca(r,i,a,s);return o.attr("style",e.style),ur(e,o),e.intersect=function(l){return ir.polygon(e,s,l)},r},oet=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.width+e.padding,a=n.height+e.padding,s=[{x:0,y:0},{x:i+a/2,y:0},{x:i,y:-a/2},{x:i+a/2,y:-a},{x:0,y:-a}],o=ca(r,i,a,s);return o.attr("style",e.style),ur(e,o),e.intersect=function(l){return ir.polygon(e,s,l)},r},cet=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.width+e.padding,a=i/2,s=a/(2.5+i/50),o=n.height+s+e.padding,l="M 0,"+s+" a "+a+","+s+" 0,0,0 "+i+" 0 a "+a+","+s+" 0,0,0 "+-i+" 0 l 0,"+o+" a "+a+","+s+" 0,0,0 "+i+" 0 l 0,"+-o,u=r.attr("label-offset-y",s).insert("path",":first-child").attr("style",e.style).attr("d",l).attr("transform","translate("+-i/2+","+-(o/2+s)+")");return ur(e,u),e.intersect=function(h){const d=ir.rect(e,h),f=d.x-e.x;if(a!=0&&(Math.abs(f)<e.width/2||Math.abs(f)==e.width/2&&Math.abs(d.y-e.y)>e.height/2-s)){let p=s*s*(1-f*f/(a*a));p!=0&&(p=Math.sqrt(p)),p=s-p,h.y-e.y>0&&(p=-p),d.y+=p}return d},r},uet=(t,e)=>{const{shapeSvg:r,bbox:n,halfPadding:i}=Yr(t,e,"node "+e.classes,!0);H.trace("Classes = ",e.classes);const a=r.insert("rect",":first-child"),s=n.width+e.padding,o=n.height+e.padding;if(a.attr("class","basic label-container").attr("style",e.style).attr("rx",e.rx).attr("ry",e.ry).attr("x",-n.width/2-i).attr("y",-n.height/2-i).attr("width",s).attr("height",o),e.props){const l=new Set(Object.keys(e.props));e.props.borders&&(jA(a,e.props.borders,s,o),l.delete("borders")),l.forEach(u=>{H.warn(`Unknown node property ${u}`)})}return ur(e,a),e.intersect=function(l){return ir.rect(e,l)},r},het=(t,e)=>{const{shapeSvg:r}=Yr(t,e,"label",!0);H.trace("Classes = ",e.classes);const n=r.insert("rect",":first-child"),i=0,a=0;if(n.attr("width",i).attr("height",a),r.attr("class","label edgeLabel"),e.props){const s=new Set(Object.keys(e.props));e.props.borders&&(jA(n,e.props.borders,i,a),s.delete("borders")),s.forEach(o=>{H.warn(`Unknown node property ${o}`)})}return ur(e,n),e.intersect=function(s){return ir.rect(e,s)},r};function jA(t,e,r,n){const i=[],a=o=>{i.push(o),i.push(0)},s=o=>{i.push(0),i.push(o)};e.includes("t")?(H.debug("add top border"),a(r)):s(r),e.includes("r")?(H.debug("add right border"),a(n)):s(n),e.includes("b")?(H.debug("add bottom border"),a(r)):s(r),e.includes("l")?(H.debug("add left border"),a(n)):s(n),t.attr("stroke-dasharray",i.join(" "))}const fet=(t,e)=>{let r;e.classes?r="node "+e.classes:r="node default";const n=t.insert("g").attr("class",r).attr("id",e.domId||e.id),i=n.insert("rect",":first-child"),a=n.insert("line"),s=n.insert("g").attr("class","label"),o=e.labelText.flat?e.labelText.flat():e.labelText;let l="";typeof o=="object"?l=o[0]:l=o,H.info("Label text abc79",l,o,typeof o=="object");const u=s.node().appendChild(xn(l,e.labelStyle,!0,!0));let h={width:0,height:0};if(Mr(nt().flowchart.htmlLabels)){const _=u.children[0],y=St(u);h=_.getBoundingClientRect(),y.attr("width",h.width),y.attr("height",h.height)}H.info("Text 2",o);const d=o.slice(1,o.length);let f=u.getBBox();const p=s.node().appendChild(xn(d.join?d.join("<br/>"):d,e.labelStyle,!0,!0));if(Mr(nt().flowchart.htmlLabels)){const _=p.children[0],y=St(p);h=_.getBoundingClientRect(),y.attr("width",h.width),y.attr("height",h.height)}const m=e.padding/2;return St(p).attr("transform","translate( "+(h.width>f.width?0:(f.width-h.width)/2)+", "+(f.height+m+5)+")"),St(u).attr("transform","translate( "+(h.width<f.width?0:-(f.width-h.width)/2)+", "+0+")"),h=s.node().getBBox(),s.attr("transform","translate("+-h.width/2+", "+(-h.height/2-m+3)+")"),i.attr("class","outer title-state").attr("x",-h.width/2-m).attr("y",-h.height/2-m).attr("width",h.width+e.padding).attr("height",h.height+e.padding),a.attr("class","divider").attr("x1",-h.width/2-m).attr("x2",h.width/2+m).attr("y1",-h.height/2-m+f.height+m).attr("y2",-h.height/2-m+f.height+m),ur(e,i),e.intersect=function(_){return ir.rect(e,_)},n},det=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.height+e.padding,a=n.width+i/4+e.padding,s=r.insert("rect",":first-child").attr("style",e.style).attr("rx",i/2).attr("ry",i/2).attr("x",-a/2).attr("y",-i/2).attr("width",a).attr("height",i);return ur(e,s),e.intersect=function(o){return ir.rect(e,o)},r},pet=(t,e)=>{const{shapeSvg:r,bbox:n,halfPadding:i}=Yr(t,e,void 0,!0),a=r.insert("circle",":first-child");return a.attr("style",e.style).attr("rx",e.rx).attr("ry",e.ry).attr("r",n.width/2+i).attr("width",n.width+e.padding).attr("height",n.height+e.padding),H.info("Circle main"),ur(e,a),e.intersect=function(s){return H.info("Circle intersect",e,n.width/2+i,s),ir.circle(e,n.width/2+i,s)},r},get=(t,e)=>{const{shapeSvg:r,bbox:n,halfPadding:i}=Yr(t,e,void 0,!0),a=5,s=r.insert("g",":first-child"),o=s.insert("circle"),l=s.insert("circle");return o.attr("style",e.style).attr("rx",e.rx).attr("ry",e.ry).attr("r",n.width/2+i+a).attr("width",n.width+e.padding+a*2).attr("height",n.height+e.padding+a*2),l.attr("style",e.style).attr("rx",e.rx).attr("ry",e.ry).attr("r",n.width/2+i).attr("width",n.width+e.padding).attr("height",n.height+e.padding),H.info("DoubleCircle main"),ur(e,o),e.intersect=function(u){return H.info("DoubleCircle intersect",e,n.width/2+i+a,u),ir.circle(e,n.width/2+i+a,u)},r},yet=(t,e)=>{const{shapeSvg:r,bbox:n}=Yr(t,e,void 0,!0),i=n.width+e.padding,a=n.height+e.padding,s=[{x:0,y:0},{x:i,y:0},{x:i,y:-a},{x:0,y:-a},{x:0,y:0},{x:-8,y:0},{x:i+8,y:0},{x:i+8,y:-a},{x:-8,y:-a},{x:-8,y:0}],o=ca(r,i,a,s);return o.attr("style",e.style),ur(e,o),e.intersect=function(l){return ir.polygon(e,s,l)},r},met=(t,e)=>{const r=t.insert("g").attr("class","node default").attr("id",e.domId||e.id),n=r.insert("circle",":first-child");return n.attr("class","state-start").attr("r",7).attr("width",14).attr("height",14),ur(e,n),e.intersect=function(i){return ir.circle(e,7,i)},r},$A=(t,e,r)=>{const n=t.insert("g").attr("class","node default").attr("id",e.domId||e.id);let i=70,a=10;r==="LR"&&(i=10,a=70);const s=n.append("rect").attr("x",-1*i/2).attr("y",-1*a/2).attr("width",i).attr("height",a).attr("class","fork-join");return ur(e,s),e.height=e.height+e.padding/2,e.width=e.width+e.padding/2,e.intersect=function(o){return ir.rect(e,o)},n},XA={question:Jtt,rect:uet,labelRect:het,rectWithTitle:fet,choice:tet,circle:pet,doublecircle:get,stadium:det,hexagon:eet,rect_left_inv_arrow:ret,lean_right:net,lean_left:iet,trapezoid:aet,inv_trapezoid:set,rect_right_inv_arrow:oet,cylinder:cet,start:met,end:(t,e)=>{const r=t.insert("g").attr("class","node default").attr("id",e.domId||e.id),n=r.insert("circle",":first-child"),i=r.insert("circle",":first-child");return i.attr("class","state-start").attr("r",7).attr("width",14).attr("height",14),n.attr("class","state-end").attr("r",5).attr("width",10).attr("height",10),ur(e,i),e.intersect=function(a){return ir.circle(e,7,a)},r},note:Qtt,subroutine:yet,fork:$A,join:$A,class_box:(t,e)=>{const r=e.padding/2,n=4,i=8;let a;e.classes?a="node "+e.classes:a="node default";const s=t.insert("g").attr("class",a).attr("id",e.domId||e.id),o=s.insert("rect",":first-child"),l=s.insert("line"),u=s.insert("line");let h=0,d=n;const f=s.insert("g").attr("class","label");let p=0;const m=e.classData.annotations&&e.classData.annotations[0],_=e.classData.annotations[0]?"\xAB"+e.classData.annotations[0]+"\xBB":"",y=f.node().appendChild(xn(_,e.labelStyle,!0,!0));let b=y.getBBox();if(Mr(nt().flowchart.htmlLabels)){const R=y.children[0],A=St(y);b=R.getBoundingClientRect(),A.attr("width",b.width),A.attr("height",b.height)}e.classData.annotations[0]&&(d+=b.height+n,h+=b.width);let x=e.classData.id;e.classData.type!==void 0&&e.classData.type!==""&&(nt().flowchart.htmlLabels?x+="<"+e.classData.type+">":x+="<"+e.classData.type+">");const k=f.node().appendChild(xn(x,e.labelStyle,!0,!0));St(k).attr("class","classTitle");let T=k.getBBox();if(Mr(nt().flowchart.htmlLabels)){const R=k.children[0],A=St(k);T=R.getBoundingClientRect(),A.attr("width",T.width),A.attr("height",T.height)}d+=T.height+n,T.width>h&&(h=T.width);const C=[];e.classData.members.forEach(R=>{const A=z1(R);let L=A.displayText;nt().flowchart.htmlLabels&&(L=L.replace(/</g,"<").replace(/>/g,">"));const v=f.node().appendChild(xn(L,A.cssStyle?A.cssStyle:e.labelStyle,!0,!0));let B=v.getBBox();if(Mr(nt().flowchart.htmlLabels)){const w=v.children[0],D=St(v);B=w.getBoundingClientRect(),D.attr("width",B.width),D.attr("height",B.height)}B.width>h&&(h=B.width),d+=B.height+n,C.push(v)}),d+=i;const M=[];if(e.classData.methods.forEach(R=>{const A=z1(R);let L=A.displayText;nt().flowchart.htmlLabels&&(L=L.replace(/</g,"<").replace(/>/g,">"));const v=f.node().appendChild(xn(L,A.cssStyle?A.cssStyle:e.labelStyle,!0,!0));let B=v.getBBox();if(Mr(nt().flowchart.htmlLabels)){const w=v.children[0],D=St(v);B=w.getBoundingClientRect(),D.attr("width",B.width),D.attr("height",B.height)}B.width>h&&(h=B.width),d+=B.height+n,M.push(v)}),d+=i,m){let R=(h-b.width)/2;St(y).attr("transform","translate( "+(-1*h/2+R)+", "+-1*d/2+")"),p=b.height+n}let S=(h-T.width)/2;return St(k).attr("transform","translate( "+(-1*h/2+S)+", "+(-1*d/2+p)+")"),p+=T.height+n,l.attr("class","divider").attr("x1",-h/2-r).attr("x2",h/2+r).attr("y1",-d/2-r+i+p).attr("y2",-d/2-r+i+p),p+=i,C.forEach(R=>{St(R).attr("transform","translate( "+-h/2+", "+(-1*d/2+p+i/2)+")"),p+=T.height+n}),p+=i,u.attr("class","divider").attr("x1",-h/2-r).attr("x2",h/2+r).attr("y1",-d/2-r+i+p).attr("y2",-d/2-r+i+p),p+=i,M.forEach(R=>{St(R).attr("transform","translate( "+-h/2+", "+(-1*d/2+p)+")"),p+=T.height+n}),o.attr("class","outer title-state").attr("x",-h/2-r).attr("y",-(d/2)-r).attr("width",h+e.padding).attr("height",d+e.padding),ur(e,o),e.intersect=function(R){return ir.rect(e,R)},s}};let nl={};const bet=(t,e,r)=>{let n,i;if(e.link){let a;nt().securityLevel==="sandbox"?a="_top":e.linkTarget&&(a=e.linkTarget||"_blank"),n=t.insert("svg:a").attr("xlink:href",e.link).attr("target",a),i=XA[e.shape](n,e,r)}else i=XA[e.shape](t,e,r),n=i;e.tooltip&&i.attr("title",e.tooltip),e.class&&i.attr("class","node default "+e.class),nl[e.id]=n,e.haveCallback&&nl[e.id].attr("class",nl[e.id].attr("class")+" clickable")},_et=(t,e)=>{nl[e.id]=t},vet=()=>{nl={}},KA=t=>{const e=nl[t.id];H.trace("Transforming node",t.diff,t,"translate("+(t.x-t.width/2-5)+", "+t.width/2+")");const r=8,n=t.diff||0;return t.clusterNode?e.attr("transform","translate("+(t.x+n-t.width/2)+", "+(t.y-t.height/2-r)+")"):e.attr("transform","translate("+t.x+", "+t.y+")"),n},xet={rect:(t,e)=>{H.trace("Creating subgraph rect for ",e.id,e);const r=t.insert("g").attr("class","cluster"+(e.class?" "+e.class:"")).attr("id",e.id),n=r.insert("rect",":first-child"),i=r.insert("g").attr("class","cluster-label"),a=i.node().appendChild(xn(e.labelText,e.labelStyle,void 0,!0));let s=a.getBBox();if(Mr(nt().flowchart.htmlLabels)){const d=a.children[0],f=St(a);s=d.getBoundingClientRect(),f.attr("width",s.width),f.attr("height",s.height)}const o=0*e.padding,l=o/2,u=e.width<=s.width+o?s.width+o:e.width;e.width<=s.width+o?e.diff=(s.width-e.width)/2-e.padding/2:e.diff=-e.padding/2,H.trace("Data ",e,JSON.stringify(e)),n.attr("style",e.style).attr("rx",e.rx).attr("ry",e.ry).attr("x",e.x-u/2).attr("y",e.y-e.height/2-l).attr("width",u).attr("height",e.height+o),i.attr("transform","translate("+(e.x-s.width/2)+", "+(e.y-e.height/2+e.padding/3)+")");const h=n.node().getBBox();return e.width=h.width,e.height=h.height,e.intersect=function(d){return Jc(e,d)},r},roundedWithTitle:(t,e)=>{const r=t.insert("g").attr("class",e.classes).attr("id",e.id),n=r.insert("rect",":first-child"),i=r.insert("g").attr("class","cluster-label"),a=r.append("rect"),s=i.node().appendChild(xn(e.labelText,e.labelStyle,void 0,!0));let o=s.getBBox();if(Mr(nt().flowchart.htmlLabels)){const f=s.children[0],p=St(s);o=f.getBoundingClientRect(),p.attr("width",o.width),p.attr("height",o.height)}o=s.getBBox();const l=0*e.padding,u=l/2,h=e.width<=o.width+e.padding?o.width+e.padding:e.width;e.width<=o.width+e.padding?e.diff=(o.width+e.padding*0-e.width)/2:e.diff=-e.padding/2,n.attr("class","outer").attr("x",e.x-h/2-u).attr("y",e.y-e.height/2-u).attr("width",h+l).attr("height",e.height+l),a.attr("class","inner").attr("x",e.x-h/2-u).attr("y",e.y-e.height/2-u+o.height-1).attr("width",h+l).attr("height",e.height+l-o.height-3),i.attr("transform","translate("+(e.x-o.width/2)+", "+(e.y-e.height/2-e.padding/3+(Mr(nt().flowchart.htmlLabels)?5:3))+")");const d=n.node().getBBox();return e.height=d.height,e.intersect=function(f){return Jc(e,f)},r},noteGroup:(t,e)=>{const r=t.insert("g").attr("class","note-cluster").attr("id",e.id),n=r.insert("rect",":first-child"),i=0*e.padding,a=i/2;n.attr("rx",e.rx).attr("ry",e.ry).attr("x",e.x-e.width/2-a).attr("y",e.y-e.height/2-a).attr("width",e.width+i).attr("height",e.height+i).attr("fill","none");const s=n.node().getBBox();return e.width=s.width,e.height=s.height,e.intersect=function(o){return Jc(e,o)},r},divider:(t,e)=>{const r=t.insert("g").attr("class",e.classes).attr("id",e.id),n=r.insert("rect",":first-child"),i=0*e.padding,a=i/2;n.attr("class","divider").attr("x",e.x-e.width/2-a).attr("y",e.y-e.height/2).attr("width",e.width+i).attr("height",e.height+i);const s=n.node().getBBox();return e.width=s.width,e.height=s.height,e.diff=-e.padding/2,e.intersect=function(o){return Jc(e,o)},r}};let ZA={};const ket=(t,e)=>{H.trace("Inserting cluster");const r=e.shape||"rect";ZA[e.id]=xet[r](t,e)},wet=()=>{ZA={}};let G1={},Nr={};const Tet=()=>{G1={},Nr={}},Eet=(t,e)=>{const r=xn(e.label,e.labelStyle),n=t.insert("g").attr("class","edgeLabel"),i=n.insert("g").attr("class","label");i.node().appendChild(r);let a=r.getBBox();if(Mr(nt().flowchart.htmlLabels)){const o=r.children[0],l=St(r);a=o.getBoundingClientRect(),l.attr("width",a.width),l.attr("height",a.height)}i.attr("transform","translate("+-a.width/2+", "+-a.height/2+")"),G1[e.id]=n,e.width=a.width,e.height=a.height;let s;if(e.startLabelLeft){const o=xn(e.startLabelLeft,e.labelStyle),l=t.insert("g").attr("class","edgeTerminals"),u=l.insert("g").attr("class","inner");s=u.node().appendChild(o);const h=o.getBBox();u.attr("transform","translate("+-h.width/2+", "+-h.height/2+")"),Nr[e.id]||(Nr[e.id]={}),Nr[e.id].startLeft=l,j1(s,e.startLabelLeft)}if(e.startLabelRight){const o=xn(e.startLabelRight,e.labelStyle),l=t.insert("g").attr("class","edgeTerminals"),u=l.insert("g").attr("class","inner");s=l.node().appendChild(o),u.node().appendChild(o);const h=o.getBBox();u.attr("transform","translate("+-h.width/2+", "+-h.height/2+")"),Nr[e.id]||(Nr[e.id]={}),Nr[e.id].startRight=l,j1(s,e.startLabelRight)}if(e.endLabelLeft){const o=xn(e.endLabelLeft,e.labelStyle),l=t.insert("g").attr("class","edgeTerminals"),u=l.insert("g").attr("class","inner");s=u.node().appendChild(o);const h=o.getBBox();u.attr("transform","translate("+-h.width/2+", "+-h.height/2+")"),l.node().appendChild(o),Nr[e.id]||(Nr[e.id]={}),Nr[e.id].endLeft=l,j1(s,e.endLabelLeft)}if(e.endLabelRight){const o=xn(e.endLabelRight,e.labelStyle),l=t.insert("g").attr("class","edgeTerminals"),u=l.insert("g").attr("class","inner");s=u.node().appendChild(o);const h=o.getBBox();u.attr("transform","translate("+-h.width/2+", "+-h.height/2+")"),l.node().appendChild(o),Nr[e.id]||(Nr[e.id]={}),Nr[e.id].endRight=l,j1(s,e.endLabelRight)}};function j1(t,e){nt().flowchart.htmlLabels&&t&&(t.style.width=e.length*9+"px",t.style.height="12px")}const Cet=(t,e)=>{H.info("Moving label abc78 ",t.id,t.label,G1[t.id]);let r=e.updatedPath?e.updatedPath:e.originalPath;if(t.label){const n=G1[t.id];let i=t.x,a=t.y;if(r){const s=Se.calcLabelPosition(r);H.info("Moving label from (",i,",",a,") to (",s.x,",",s.y,") abc78")}n.attr("transform","translate("+i+", "+a+")")}if(t.startLabelLeft){const n=Nr[t.id].startLeft;let i=t.x,a=t.y;if(r){const s=Se.calcTerminalLabelPosition(t.arrowTypeStart?10:0,"start_left",r);i=s.x,a=s.y}n.attr("transform","translate("+i+", "+a+")")}if(t.startLabelRight){const n=Nr[t.id].startRight;let i=t.x,a=t.y;if(r){const s=Se.calcTerminalLabelPosition(t.arrowTypeStart?10:0,"start_right",r);i=s.x,a=s.y}n.attr("transform","translate("+i+", "+a+")")}if(t.endLabelLeft){const n=Nr[t.id].endLeft;let i=t.x,a=t.y;if(r){const s=Se.calcTerminalLabelPosition(t.arrowTypeEnd?10:0,"end_left",r);i=s.x,a=s.y}n.attr("transform","translate("+i+", "+a+")")}if(t.endLabelRight){const n=Nr[t.id].endRight;let i=t.x,a=t.y;if(r){const s=Se.calcTerminalLabelPosition(t.arrowTypeEnd?10:0,"end_right",r);i=s.x,a=s.y}n.attr("transform","translate("+i+", "+a+")")}},Aet=(t,e)=>{const r=t.x,n=t.y,i=Math.abs(e.x-r),a=Math.abs(e.y-n),s=t.width/2,o=t.height/2;return i>=s||a>=o},Met=(t,e,r)=>{H.warn(`intersection calc abc89: - outsidePoint: ${JSON.stringify(e)} - insidePoint : ${JSON.stringify(r)} - node : x:${t.x} y:${t.y} w:${t.width} h:${t.height}`);const n=t.x,i=t.y,a=Math.abs(n-r.x),s=t.width/2;let o=r.x<e.x?s-a:s+a;const l=t.height/2,u=Math.abs(e.y-r.y),h=Math.abs(e.x-r.x);if(Math.abs(i-e.y)*s>Math.abs(n-e.x)*l){let d=r.y<e.y?e.y-l-i:i-l-e.y;o=h*d/u;const f={x:r.x<e.x?r.x+o:r.x-h+o,y:r.y<e.y?r.y+u-d:r.y-u+d};return o===0&&(f.x=e.x,f.y=e.y),h===0&&(f.x=e.x),u===0&&(f.y=e.y),H.warn(`abc89 topp/bott calc, Q ${u}, q ${d}, R ${h}, r ${o}`,f),f}else{r.x<e.x?o=e.x-s-n:o=n-s-e.x;let d=u*o/h,f=r.x<e.x?r.x+h-o:r.x-h+o,p=r.y<e.y?r.y+d:r.y-d;return H.warn(`sides calc abc89, Q ${u}, q ${d}, R ${h}, r ${o}`,{_x:f,_y:p}),o===0&&(f=e.x,p=e.y),h===0&&(f=e.x),u===0&&(p=e.y),{x:f,y:p}}},QA=(t,e)=>{H.warn("abc88 cutPathAtIntersect",t,e);let r=[],n=t[0],i=!1;return t.forEach(a=>{if(H.info("abc88 checking point",a,e),!Aet(e,a)&&!i){const s=Met(e,n,a);H.warn("abc88 inside",a,n,s),H.warn("abc88 intersection",s);let o=!1;r.forEach(l=>{o=o||l.x===s.x&&l.y===s.y}),r.find(l=>l.x===s.x&&l.y===s.y)?H.warn("abc88 no intersect",s,r):r.push(s),i=!0}else H.warn("abc88 outside",a,n),n=a,i||r.push(a)}),H.warn("abc88 returning points",r),r},Let=function(t,e,r,n,i,a){let s=r.points,o=!1;const l=a.node(e.v);var u=a.node(e.w);H.info("abc88 InsertEdge: ",r),u.intersect&&l.intersect&&(s=s.slice(1,r.points.length-1),s.unshift(l.intersect(s[0])),H.info("Last point",s[s.length-1],u,u.intersect(s[s.length-1])),s.push(u.intersect(s[s.length-1]))),r.toCluster&&(H.info("to cluster abc88",n[r.toCluster]),s=QA(r.points,n[r.toCluster].node),o=!0),r.fromCluster&&(H.info("from cluster abc88",n[r.fromCluster]),s=QA(s.reverse(),n[r.fromCluster].node).reverse(),o=!0);const h=s.filter(b=>!Number.isNaN(b.y));let d;i==="graph"||i==="flowchart"?d=r.curve||Os:d=Os;const f=Ua().x(function(b){return b.x}).y(function(b){return b.y}).curve(d);let p;switch(r.thickness){case"normal":p="edge-thickness-normal";break;case"thick":p="edge-thickness-thick";break;case"invisible":p="edge-thickness-thick";break;default:p=""}switch(r.pattern){case"solid":p+=" edge-pattern-solid";break;case"dotted":p+=" edge-pattern-dotted";break;case"dashed":p+=" edge-pattern-dashed";break}const m=t.append("path").attr("d",f(h)).attr("id",r.id).attr("class"," "+p+(r.classes?" "+r.classes:"")).attr("style",r.style);let _="";switch((nt().flowchart.arrowMarkerAbsolute||nt().state.arrowMarkerAbsolute)&&(_=window.location.protocol+"//"+window.location.host+window.location.pathname+window.location.search,_=_.replace(/\(/g,"\\("),_=_.replace(/\)/g,"\\)")),H.info("arrowTypeStart",r.arrowTypeStart),H.info("arrowTypeEnd",r.arrowTypeEnd),r.arrowTypeStart){case"arrow_cross":m.attr("marker-start","url("+_+"#"+i+"-crossStart)");break;case"arrow_point":m.attr("marker-start","url("+_+"#"+i+"-pointStart)");break;case"arrow_barb":m.attr("marker-start","url("+_+"#"+i+"-barbStart)");break;case"arrow_circle":m.attr("marker-start","url("+_+"#"+i+"-circleStart)");break;case"aggregation":m.attr("marker-start","url("+_+"#"+i+"-aggregationStart)");break;case"extension":m.attr("marker-start","url("+_+"#"+i+"-extensionStart)");break;case"composition":m.attr("marker-start","url("+_+"#"+i+"-compositionStart)");break;case"dependency":m.attr("marker-start","url("+_+"#"+i+"-dependencyStart)");break;case"lollipop":m.attr("marker-start","url("+_+"#"+i+"-lollipopStart)");break}switch(r.arrowTypeEnd){case"arrow_cross":m.attr("marker-end","url("+_+"#"+i+"-crossEnd)");break;case"arrow_point":m.attr("marker-end","url("+_+"#"+i+"-pointEnd)");break;case"arrow_barb":m.attr("marker-end","url("+_+"#"+i+"-barbEnd)");break;case"arrow_circle":m.attr("marker-end","url("+_+"#"+i+"-circleEnd)");break;case"aggregation":m.attr("marker-end","url("+_+"#"+i+"-aggregationEnd)");break;case"extension":m.attr("marker-end","url("+_+"#"+i+"-extensionEnd)");break;case"composition":m.attr("marker-end","url("+_+"#"+i+"-compositionEnd)");break;case"dependency":m.attr("marker-end","url("+_+"#"+i+"-dependencyEnd)");break;case"lollipop":m.attr("marker-end","url("+_+"#"+i+"-lollipopEnd)");break}let y={};return o&&(y.updatedPath=s),y.originalPath=r.points,y},JA=(t,e,r,n)=>{H.info("Graph in recursive render: XXX",cr.json.write(e),n);const i=e.graph().rankdir;H.trace("Dir in recursive render - dir:",i);const a=t.insert("g").attr("class","root");e.nodes()?H.info("Recursive render XXX",e.nodes()):H.info("No nodes found for",e),e.edges().length>0&&H.trace("Recursive edges",e.edge(e.edges()[0]));const s=a.insert("g").attr("class","clusters"),o=a.insert("g").attr("class","edgePaths"),l=a.insert("g").attr("class","edgeLabels"),u=a.insert("g").attr("class","nodes");e.nodes().forEach(function(d){const f=e.node(d);if(typeof n<"u"){const p=JSON.parse(JSON.stringify(n.clusterData));H.info("Setting data for cluster XXX (",d,") ",p,n),e.setNode(n.id,p),e.parent(d)||(H.trace("Setting parent",d,n.id),e.setParent(d,n.id,p))}if(H.info("(Insert) Node XXX"+d+": "+JSON.stringify(e.node(d))),f&&f.clusterNode){H.info("Cluster identified",d,f.width,e.node(d));const p=JA(u,f.graph,r,e.node(d)),m=p.elem;ur(f,m),f.diff=p.diff||0,H.info("Node bounds (abc123)",d,f,f.width,f.x,f.y),_et(m,f),H.warn("Recursive render complete ",m,f)}else e.children(d).length>0?(H.info("Cluster - the non recursive path XXX",d,f.id,f,e),H.info(Qc(f.id,e)),Re[f.id]={id:Qc(f.id,e),node:f}):(H.info("Node - the non recursive path",d,f.id,f),bet(u,e.node(d),i))}),e.edges().forEach(function(d){const f=e.edge(d.v,d.w,d.name);H.info("Edge "+d.v+" -> "+d.w+": "+JSON.stringify(d)),H.info("Edge "+d.v+" -> "+d.w+": ",d," ",JSON.stringify(e.edge(d))),H.info("Fix",Re,"ids:",d.v,d.w,"Translateing: ",Re[d.v],Re[d.w]),Eet(l,f)}),e.edges().forEach(function(d){H.info("Edge "+d.v+" -> "+d.w+": "+JSON.stringify(d))}),H.info("#############################################"),H.info("### Layout ###"),H.info("#############################################"),H.info(e),Zc.layout(e),H.info("Graph after layout:",cr.json.write(e));let h=0;return jtt(e).forEach(function(d){const f=e.node(d);H.info("Position "+d+": "+JSON.stringify(e.node(d))),H.info("Position "+d+": ("+f.x,","+f.y,") width: ",f.width," height: ",f.height),f&&f.clusterNode?KA(f):e.children(d).length>0?(ket(s,f),Re[f.id].node=f):KA(f)}),e.edges().forEach(function(d){const f=e.edge(d);H.info("Edge "+d.v+" -> "+d.w+": "+JSON.stringify(f),f);const p=Let(o,d,f,Re,r,e);Cet(f,p)}),e.nodes().forEach(function(d){const f=e.node(d);H.info(d,f.type,f.diff),f.type==="group"&&(h=f.diff)}),{elem:a,diff:h}},i4=(t,e,r,n,i)=>{Vtt(t,r,n,i),vet(),Tet(),wet(),Wtt(),H.warn("Graph at first:",cr.json.write(e)),Gtt(e),H.warn("Graph after:",cr.json.write(e)),JA(t,e,n)},Ret=t=>pe.sanitizeText(t,nt()),Iet=function(t,e,r,n){const i=Object.keys(t);H.info("keys:",i),H.info(t),i.forEach(function(a){const s=t[a];let o="";s.cssClasses.length>0&&(o=o+" "+s.cssClasses.join(" "));const l={labelStyle:""};let u=s.text!==void 0?s.text:s.id,h=0,d="";switch(s.type){case"class":d="class_box";break;default:d="class_box"}e.setNode(s.id,{labelStyle:l.labelStyle,shape:d,labelText:Ret(u),classData:s,rx:h,ry:h,class:o,style:l.style,id:s.id,domId:s.domId,tooltip:n.db.getTooltip(s.id)||"",haveCallback:s.haveCallback,link:s.link,width:s.type==="group"?500:void 0,type:s.type,padding:nt().flowchart.padding}),H.info("setNode",{labelStyle:l.labelStyle,shape:d,labelText:u,rx:h,ry:h,class:o,style:l.style,id:s.id,width:s.type==="group"?500:void 0,type:s.type,padding:nt().flowchart.padding})})},Net=function(t,e){const r=nt().flowchart;let n=0;t.forEach(function(i){n++;const a={};a.classes="relation",a.pattern=i.relation.lineType==1?"dashed":"solid",a.id="id"+n,i.type==="arrow_open"?a.arrowhead="none":a.arrowhead="normal",H.info(a,i),a.startLabelRight=i.relationTitle1==="none"?"":i.relationTitle1,a.endLabelLeft=i.relationTitle2==="none"?"":i.relationTitle2,a.arrowTypeStart=tM(i.relation.type1),a.arrowTypeEnd=tM(i.relation.type2);let s="",o="";if(typeof i.style<"u"){const l=Ka(i.style);s=l.style,o=l.labelStyle}else s="fill:none";a.style=s,a.labelStyle=o,typeof i.interpolate<"u"?a.curve=Ni(i.interpolate,yn):typeof t.defaultInterpolate<"u"?a.curve=Ni(t.defaultInterpolate,yn):a.curve=Ni(r.curve,yn),i.text=i.title,typeof i.text>"u"?typeof i.style<"u"&&(a.arrowheadStyle="fill: #333"):(a.arrowheadStyle="fill: #333",a.labelpos="c",nt().flowchart.htmlLabels?(a.labelType="html",a.label='<span class="edgeLabel">'+i.text+"</span>"):(a.labelType="text",a.label=i.text.replace(pe.lineBreakRegex,` -`),typeof i.style>"u"&&(a.style=a.style||"stroke: #333; stroke-width: 1.5px;fill:none"),a.labelStyle=a.labelStyle.replace("color:","fill:"))),e.setEdge(i.id1,i.id2,a,n)})},Bet=function(t){Object.keys(t).forEach(function(r){t[r]})},Det=function(t,e,r,n){H.info("Drawing class - ",e);const i=nt().flowchart,a=nt().securityLevel;H.info("config:",i);const s=i.nodeSpacing||50,o=i.rankSpacing||50,l=new cr.Graph({multigraph:!0,compound:!0}).setGraph({rankdir:n.db.getDirection(),nodesep:s,ranksep:o,marginx:8,marginy:8}).setDefaultEdgeLabel(function(){return{}}),u=n.db.getClasses(),h=n.db.getRelations();H.info(h),Iet(u,l,e,n),Net(h,l);let d;a==="sandbox"&&(d=St("#i"+e));const f=St(a==="sandbox"?d.nodes()[0].contentDocument.body:"body"),p=f.select(`[id="${e}"]`),m=f.select("#"+e+" g");if(i4(m,l,["aggregation","extension","composition","dependency","lollipop"],"classDiagram",e),i1(l,p,i.diagramPadding,i.useMaxWidth),!i.htmlLabels){const _=a==="sandbox"?d.nodes()[0].contentDocument:document,y=_.querySelectorAll('[id="'+e+'"] .edgeLabel .label');for(let b=0;b<y.length;b++){const x=y[b],k=x.getBBox(),T=_.createElementNS("http://www.w3.org/2000/svg","rect");T.setAttribute("rx",0),T.setAttribute("ry",0),T.setAttribute("width",k.width),T.setAttribute("height",k.height),x.insertBefore(T,x.firstChild)}}bn(n.db,p,e)};function tM(t){let e;switch(t){case 0:e="aggregation";break;case 1:e="extension";break;case 2:e="composition";break;case 3:e="dependency";break;case 4:e="lollipop";break;default:e="none"}return e}const Oet={setConf:Bet,draw:Det};var a4=function(){var t=function(A,L,v,B){for(v=v||{},B=A.length;B--;v[A[B]]=L);return v},e=[1,2],r=[1,5],n=[6,9,11,23,25,27,29,30,31,49],i=[1,17],a=[1,18],s=[1,19],o=[1,20],l=[1,21],u=[1,22],h=[1,25],d=[1,30],f=[1,31],p=[1,32],m=[1,33],_=[6,9,11,15,20,23,25,27,29,30,31,42,43,44,45,49],y=[1,45],b=[30,31,46,47],x=[4,6,9,11,23,25,27,29,30,31,49],k=[42,43,44,45],T=[22,37],C=[1,64],M={trace:function(){},yy:{},symbols_:{error:2,start:3,ER_DIAGRAM:4,document:5,EOF:6,directive:7,line:8,SPACE:9,statement:10,NEWLINE:11,openDirective:12,typeDirective:13,closeDirective:14,":":15,argDirective:16,entityName:17,relSpec:18,role:19,BLOCK_START:20,attributes:21,BLOCK_STOP:22,title:23,title_value:24,acc_title:25,acc_title_value:26,acc_descr:27,acc_descr_value:28,acc_descr_multiline_value:29,ALPHANUM:30,ENTITY_NAME:31,attribute:32,attributeType:33,attributeName:34,attributeKeyType:35,attributeComment:36,ATTRIBUTE_WORD:37,ATTRIBUTE_KEY:38,COMMENT:39,cardinality:40,relType:41,ZERO_OR_ONE:42,ZERO_OR_MORE:43,ONE_OR_MORE:44,ONLY_ONE:45,NON_IDENTIFYING:46,IDENTIFYING:47,WORD:48,open_directive:49,type_directive:50,arg_directive:51,close_directive:52,$accept:0,$end:1},terminals_:{2:"error",4:"ER_DIAGRAM",6:"EOF",9:"SPACE",11:"NEWLINE",15:":",20:"BLOCK_START",22:"BLOCK_STOP",23:"title",24:"title_value",25:"acc_title",26:"acc_title_value",27:"acc_descr",28:"acc_descr_value",29:"acc_descr_multiline_value",30:"ALPHANUM",31:"ENTITY_NAME",37:"ATTRIBUTE_WORD",38:"ATTRIBUTE_KEY",39:"COMMENT",42:"ZERO_OR_ONE",43:"ZERO_OR_MORE",44:"ONE_OR_MORE",45:"ONLY_ONE",46:"NON_IDENTIFYING",47:"IDENTIFYING",48:"WORD",49:"open_directive",50:"type_directive",51:"arg_directive",52:"close_directive"},productions_:[0,[3,3],[3,2],[5,0],[5,2],[8,2],[8,1],[8,1],[8,1],[7,4],[7,6],[10,1],[10,5],[10,4],[10,3],[10,1],[10,2],[10,2],[10,2],[10,1],[17,1],[17,1],[21,1],[21,2],[32,2],[32,3],[32,3],[32,4],[33,1],[34,1],[35,1],[36,1],[18,3],[40,1],[40,1],[40,1],[40,1],[41,1],[41,1],[19,1],[19,1],[19,1],[12,1],[13,1],[16,1],[14,1]],performAction:function(L,v,B,w,D,N,z){var X=N.length-1;switch(D){case 1:break;case 3:this.$=[];break;case 4:N[X-1].push(N[X]),this.$=N[X-1];break;case 5:case 6:this.$=N[X];break;case 7:case 8:this.$=[];break;case 12:w.addEntity(N[X-4]),w.addEntity(N[X-2]),w.addRelationship(N[X-4],N[X],N[X-2],N[X-3]);break;case 13:w.addEntity(N[X-3]),w.addAttributes(N[X-3],N[X-1]);break;case 14:w.addEntity(N[X-2]);break;case 15:w.addEntity(N[X]);break;case 16:case 17:this.$=N[X].trim(),w.setAccTitle(this.$);break;case 18:case 19:this.$=N[X].trim(),w.setAccDescription(this.$);break;case 20:case 41:this.$=N[X];break;case 21:case 39:case 40:this.$=N[X].replace(/"/g,"");break;case 22:this.$=[N[X]];break;case 23:N[X].push(N[X-1]),this.$=N[X];break;case 24:this.$={attributeType:N[X-1],attributeName:N[X]};break;case 25:this.$={attributeType:N[X-2],attributeName:N[X-1],attributeKeyType:N[X]};break;case 26:this.$={attributeType:N[X-2],attributeName:N[X-1],attributeComment:N[X]};break;case 27:this.$={attributeType:N[X-3],attributeName:N[X-2],attributeKeyType:N[X-1],attributeComment:N[X]};break;case 28:case 29:case 30:this.$=N[X];break;case 31:this.$=N[X].replace(/"/g,"");break;case 32:this.$={cardA:N[X],relType:N[X-1],cardB:N[X-2]};break;case 33:this.$=w.Cardinality.ZERO_OR_ONE;break;case 34:this.$=w.Cardinality.ZERO_OR_MORE;break;case 35:this.$=w.Cardinality.ONE_OR_MORE;break;case 36:this.$=w.Cardinality.ONLY_ONE;break;case 37:this.$=w.Identification.NON_IDENTIFYING;break;case 38:this.$=w.Identification.IDENTIFYING;break;case 42:w.parseDirective("%%{","open_directive");break;case 43:w.parseDirective(N[X],"type_directive");break;case 44:N[X]=N[X].trim().replace(/'/g,'"'),w.parseDirective(N[X],"arg_directive");break;case 45:w.parseDirective("}%%","close_directive","er");break}},table:[{3:1,4:e,7:3,12:4,49:r},{1:[3]},t(n,[2,3],{5:6}),{3:7,4:e,7:3,12:4,49:r},{13:8,50:[1,9]},{50:[2,42]},{6:[1,10],7:15,8:11,9:[1,12],10:13,11:[1,14],12:4,17:16,23:i,25:a,27:s,29:o,30:l,31:u,49:r},{1:[2,2]},{14:23,15:[1,24],52:h},t([15,52],[2,43]),t(n,[2,8],{1:[2,1]}),t(n,[2,4]),{7:15,10:26,12:4,17:16,23:i,25:a,27:s,29:o,30:l,31:u,49:r},t(n,[2,6]),t(n,[2,7]),t(n,[2,11]),t(n,[2,15],{18:27,40:29,20:[1,28],42:d,43:f,44:p,45:m}),{24:[1,34]},{26:[1,35]},{28:[1,36]},t(n,[2,19]),t(_,[2,20]),t(_,[2,21]),{11:[1,37]},{16:38,51:[1,39]},{11:[2,45]},t(n,[2,5]),{17:40,30:l,31:u},{21:41,22:[1,42],32:43,33:44,37:y},{41:46,46:[1,47],47:[1,48]},t(b,[2,33]),t(b,[2,34]),t(b,[2,35]),t(b,[2,36]),t(n,[2,16]),t(n,[2,17]),t(n,[2,18]),t(x,[2,9]),{14:49,52:h},{52:[2,44]},{15:[1,50]},{22:[1,51]},t(n,[2,14]),{21:52,22:[2,22],32:43,33:44,37:y},{34:53,37:[1,54]},{37:[2,28]},{40:55,42:d,43:f,44:p,45:m},t(k,[2,37]),t(k,[2,38]),{11:[1,56]},{19:57,30:[1,60],31:[1,59],48:[1,58]},t(n,[2,13]),{22:[2,23]},t(T,[2,24],{35:61,36:62,38:[1,63],39:C}),t([22,37,38,39],[2,29]),t([30,31],[2,32]),t(x,[2,10]),t(n,[2,12]),t(n,[2,39]),t(n,[2,40]),t(n,[2,41]),t(T,[2,25],{36:65,39:C}),t(T,[2,26]),t([22,37,39],[2,30]),t(T,[2,31]),t(T,[2,27])],defaultActions:{5:[2,42],7:[2,2],25:[2,45],39:[2,44],45:[2,28],52:[2,23]},parseError:function(L,v){if(v.recoverable)this.trace(L);else{var B=new Error(L);throw B.hash=v,B}},parse:function(L){var v=this,B=[0],w=[],D=[null],N=[],z=this.table,X="",ct=0,J=0,Y=2,$=1,lt=N.slice.call(arguments,1),ut=Object.create(this.lexer),W={yy:{}};for(var tt in this.yy)Object.prototype.hasOwnProperty.call(this.yy,tt)&&(W.yy[tt]=this.yy[tt]);ut.setInput(L,W.yy),W.yy.lexer=ut,W.yy.parser=this,typeof ut.yylloc>"u"&&(ut.yylloc={});var K=ut.yylloc;N.push(K);var it=ut.options&&ut.options.ranges;typeof W.yy.parseError=="function"?this.parseError=W.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function Z(){var Lt;return Lt=w.pop()||ut.lex()||$,typeof Lt!="number"&&(Lt instanceof Array&&(w=Lt,Lt=w.pop()),Lt=v.symbols_[Lt]||Lt),Lt}for(var V,Q,q,U,F={},j,P,et,at;;){if(Q=B[B.length-1],this.defaultActions[Q]?q=this.defaultActions[Q]:((V===null||typeof V>"u")&&(V=Z()),q=z[Q]&&z[Q][V]),typeof q>"u"||!q.length||!q[0]){var It="";at=[];for(j in z[Q])this.terminals_[j]&&j>Y&&at.push("'"+this.terminals_[j]+"'");ut.showPosition?It="Parse error on line "+(ct+1)+`: -`+ut.showPosition()+` -Expecting `+at.join(", ")+", got '"+(this.terminals_[V]||V)+"'":It="Parse error on line "+(ct+1)+": Unexpected "+(V==$?"end of input":"'"+(this.terminals_[V]||V)+"'"),this.parseError(It,{text:ut.match,token:this.terminals_[V]||V,line:ut.yylineno,loc:K,expected:at})}if(q[0]instanceof Array&&q.length>1)throw new Error("Parse Error: multiple actions possible at state: "+Q+", token: "+V);switch(q[0]){case 1:B.push(V),D.push(ut.yytext),N.push(ut.yylloc),B.push(q[1]),V=null,J=ut.yyleng,X=ut.yytext,ct=ut.yylineno,K=ut.yylloc;break;case 2:if(P=this.productions_[q[1]][1],F.$=D[D.length-P],F._$={first_line:N[N.length-(P||1)].first_line,last_line:N[N.length-1].last_line,first_column:N[N.length-(P||1)].first_column,last_column:N[N.length-1].last_column},it&&(F._$.range=[N[N.length-(P||1)].range[0],N[N.length-1].range[1]]),U=this.performAction.apply(F,[X,J,ct,W.yy,q[1],D,N].concat(lt)),typeof U<"u")return U;P&&(B=B.slice(0,-1*P*2),D=D.slice(0,-1*P),N=N.slice(0,-1*P)),B.push(this.productions_[q[1]][0]),D.push(F.$),N.push(F._$),et=z[B[B.length-2]][B[B.length-1]],B.push(et);break;case 3:return!0}}return!0}},S=function(){var A={EOF:1,parseError:function(v,B){if(this.yy.parser)this.yy.parser.parseError(v,B);else throw new Error(v)},setInput:function(L,v){return this.yy=v||this.yy||{},this._input=L,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var L=this._input[0];this.yytext+=L,this.yyleng++,this.offset++,this.match+=L,this.matched+=L;var v=L.match(/(?:\r\n?|\n).*/g);return v?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),L},unput:function(L){var v=L.length,B=L.split(/(?:\r\n?|\n)/g);this._input=L+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-v),this.offset-=v;var w=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),B.length-1&&(this.yylineno-=B.length-1);var D=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:B?(B.length===w.length?this.yylloc.first_column:0)+w[w.length-B.length].length-B[0].length:this.yylloc.first_column-v},this.options.ranges&&(this.yylloc.range=[D[0],D[0]+this.yyleng-v]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(L){this.unput(this.match.slice(L))},pastInput:function(){var L=this.matched.substr(0,this.matched.length-this.match.length);return(L.length>20?"...":"")+L.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var L=this.match;return L.length<20&&(L+=this._input.substr(0,20-L.length)),(L.substr(0,20)+(L.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var L=this.pastInput(),v=new Array(L.length+1).join("-");return L+this.upcomingInput()+` -`+v+"^"},test_match:function(L,v){var B,w,D;if(this.options.backtrack_lexer&&(D={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(D.yylloc.range=this.yylloc.range.slice(0))),w=L[0].match(/(?:\r\n?|\n).*/g),w&&(this.yylineno+=w.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:w?w[w.length-1].length-w[w.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+L[0].length},this.yytext+=L[0],this.match+=L[0],this.matches=L,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(L[0].length),this.matched+=L[0],B=this.performAction.call(this,this.yy,this,v,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),B)return B;if(this._backtrack){for(var N in D)this[N]=D[N];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var L,v,B,w;this._more||(this.yytext="",this.match="");for(var D=this._currentRules(),N=0;N<D.length;N++)if(B=this._input.match(this.rules[D[N]]),B&&(!v||B[0].length>v[0].length)){if(v=B,w=N,this.options.backtrack_lexer){if(L=this.test_match(B,D[N]),L!==!1)return L;if(this._backtrack){v=!1;continue}else return!1}else if(!this.options.flex)break}return v?(L=this.test_match(v,D[w]),L!==!1?L:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var v=this.next();return v||this.lex()},begin:function(v){this.conditionStack.push(v)},popState:function(){var v=this.conditionStack.length-1;return v>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(v){return v=this.conditionStack.length-1-Math.abs(v||0),v>=0?this.conditionStack[v]:"INITIAL"},pushState:function(v){this.begin(v)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(v,B,w,D){switch(w){case 0:return this.begin("acc_title"),25;case 1:return this.popState(),"acc_title_value";case 2:return this.begin("acc_descr"),27;case 3:return this.popState(),"acc_descr_value";case 4:this.begin("acc_descr_multiline");break;case 5:this.popState();break;case 6:return"acc_descr_multiline_value";case 7:return this.begin("open_directive"),49;case 8:return this.begin("type_directive"),50;case 9:return this.popState(),this.begin("arg_directive"),15;case 10:return this.popState(),this.popState(),52;case 11:return 51;case 12:break;case 13:break;case 14:return 11;case 15:break;case 16:return 9;case 17:return 31;case 18:return 48;case 19:return 4;case 20:return this.begin("block"),20;case 21:break;case 22:return 38;case 23:return 37;case 24:return 37;case 25:return 39;case 26:break;case 27:return this.popState(),22;case 28:return B.yytext[0];case 29:return 42;case 30:return 43;case 31:return 44;case 32:return 45;case 33:return 42;case 34:return 43;case 35:return 44;case 36:return 46;case 37:return 47;case 38:return 46;case 39:return 46;case 40:return 30;case 41:return B.yytext[0];case 42:return 6}},rules:[/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[\n]+)/i,/^(?:\s+)/i,/^(?:[\s]+)/i,/^(?:"[^"%\r\n\v\b\\]+")/i,/^(?:"[^"]*")/i,/^(?:erDiagram\b)/i,/^(?:\{)/i,/^(?:\s+)/i,/^(?:\b((?:PK)|(?:FK))\b)/i,/^(?:(.*?)[~](.*?)*[~])/i,/^(?:[A-Za-z][A-Za-z0-9\-_\[\]]*)/i,/^(?:"[^"]*")/i,/^(?:[\n]+)/i,/^(?:\})/i,/^(?:.)/i,/^(?:\|o\b)/i,/^(?:\}o\b)/i,/^(?:\}\|)/i,/^(?:\|\|)/i,/^(?:o\|)/i,/^(?:o\{)/i,/^(?:\|\{)/i,/^(?:\.\.)/i,/^(?:--)/i,/^(?:\.-)/i,/^(?:-\.)/i,/^(?:[A-Za-z][A-Za-z0-9\-_]*)/i,/^(?:.)/i,/^(?:$)/i],conditions:{acc_descr_multiline:{rules:[5,6],inclusive:!1},acc_descr:{rules:[3],inclusive:!1},acc_title:{rules:[1],inclusive:!1},open_directive:{rules:[8],inclusive:!1},type_directive:{rules:[9,10],inclusive:!1},arg_directive:{rules:[10,11],inclusive:!1},block:{rules:[21,22,23,24,25,26,27,28],inclusive:!1},INITIAL:{rules:[0,2,4,7,12,13,14,15,16,17,18,19,20,29,30,31,32,33,34,35,36,37,38,39,40,41,42],inclusive:!0}}};return A}();M.lexer=S;function R(){this.yy={}}return R.prototype=M,M.Parser=R,new R}();a4.parser=a4;const Fet=t=>t.match(/^\s*erDiagram/)!==null;let tu={},s4=[];const Pet={ZERO_OR_ONE:"ZERO_OR_ONE",ZERO_OR_MORE:"ZERO_OR_MORE",ONE_OR_MORE:"ONE_OR_MORE",ONLY_ONE:"ONLY_ONE"},qet={NON_IDENTIFYING:"NON_IDENTIFYING",IDENTIFYING:"IDENTIFYING"},Vet=function(t,e,r){Xe.parseDirective(this,t,e,r)},eM=function(t){return typeof tu[t]>"u"&&(tu[t]={attributes:[]},H.info("Added new entity :",t)),tu[t]},zet={Cardinality:Pet,Identification:qet,parseDirective:Vet,getConfig:()=>nt().er,addEntity:eM,addAttributes:function(t,e){let r=eM(t),n;for(n=e.length-1;n>=0;n--)r.attributes.push(e[n]),H.debug("Added attribute ",e[n].attributeName)},getEntities:()=>tu,addRelationship:function(t,e,r,n){let i={entityA:t,roleA:e,entityB:r,relSpec:n};s4.push(i),H.debug("Added new relationship :",i)},getRelationships:()=>s4,clear:function(){tu={},s4=[],ci()},setAccTitle:Yn,getAccTitle:ui,setAccDescription:hi,getAccDescription:fi},ua={ONLY_ONE_START:"ONLY_ONE_START",ONLY_ONE_END:"ONLY_ONE_END",ZERO_OR_ONE_START:"ZERO_OR_ONE_START",ZERO_OR_ONE_END:"ZERO_OR_ONE_END",ONE_OR_MORE_START:"ONE_OR_MORE_START",ONE_OR_MORE_END:"ONE_OR_MORE_END",ZERO_OR_MORE_START:"ZERO_OR_MORE_START",ZERO_OR_MORE_END:"ZERO_OR_MORE_END"},ha={ERMarkers:ua,insertMarkers:function(t,e){let r;t.append("defs").append("marker").attr("id",ua.ONLY_ONE_START).attr("refX",0).attr("refY",9).attr("markerWidth",18).attr("markerHeight",18).attr("orient","auto").append("path").attr("stroke",e.stroke).attr("fill","none").attr("d","M9,0 L9,18 M15,0 L15,18"),t.append("defs").append("marker").attr("id",ua.ONLY_ONE_END).attr("refX",18).attr("refY",9).attr("markerWidth",18).attr("markerHeight",18).attr("orient","auto").append("path").attr("stroke",e.stroke).attr("fill","none").attr("d","M3,0 L3,18 M9,0 L9,18"),r=t.append("defs").append("marker").attr("id",ua.ZERO_OR_ONE_START).attr("refX",0).attr("refY",9).attr("markerWidth",30).attr("markerHeight",18).attr("orient","auto"),r.append("circle").attr("stroke",e.stroke).attr("fill","white").attr("cx",21).attr("cy",9).attr("r",6),r.append("path").attr("stroke",e.stroke).attr("fill","none").attr("d","M9,0 L9,18"),r=t.append("defs").append("marker").attr("id",ua.ZERO_OR_ONE_END).attr("refX",30).attr("refY",9).attr("markerWidth",30).attr("markerHeight",18).attr("orient","auto"),r.append("circle").attr("stroke",e.stroke).attr("fill","white").attr("cx",9).attr("cy",9).attr("r",6),r.append("path").attr("stroke",e.stroke).attr("fill","none").attr("d","M21,0 L21,18"),t.append("defs").append("marker").attr("id",ua.ONE_OR_MORE_START).attr("refX",18).attr("refY",18).attr("markerWidth",45).attr("markerHeight",36).attr("orient","auto").append("path").attr("stroke",e.stroke).attr("fill","none").attr("d","M0,18 Q 18,0 36,18 Q 18,36 0,18 M42,9 L42,27"),t.append("defs").append("marker").attr("id",ua.ONE_OR_MORE_END).attr("refX",27).attr("refY",18).attr("markerWidth",45).attr("markerHeight",36).attr("orient","auto").append("path").attr("stroke",e.stroke).attr("fill","none").attr("d","M3,9 L3,27 M9,18 Q27,0 45,18 Q27,36 9,18"),r=t.append("defs").append("marker").attr("id",ua.ZERO_OR_MORE_START).attr("refX",18).attr("refY",18).attr("markerWidth",57).attr("markerHeight",36).attr("orient","auto"),r.append("circle").attr("stroke",e.stroke).attr("fill","white").attr("cx",48).attr("cy",18).attr("r",6),r.append("path").attr("stroke",e.stroke).attr("fill","none").attr("d","M0,18 Q18,0 36,18 Q18,36 0,18"),r=t.append("defs").append("marker").attr("id",ua.ZERO_OR_MORE_END).attr("refX",39).attr("refY",18).attr("markerWidth",57).attr("markerHeight",36).attr("orient","auto"),r.append("circle").attr("stroke",e.stroke).attr("fill","white").attr("cx",9).attr("cy",18).attr("r",6),r.append("path").attr("stroke",e.stroke).attr("fill","none").attr("d","M21,18 Q39,0 57,18 Q39,36 21,18")}};let $1;const Yet=new Uint8Array(16);function Uet(){if(!$1&&($1=typeof crypto<"u"&&crypto.getRandomValues&&crypto.getRandomValues.bind(crypto),!$1))throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");return $1(Yet)}const Br=[];for(let t=0;t<256;++t)Br.push((t+256).toString(16).slice(1));function Wet(t,e=0){return(Br[t[e+0]]+Br[t[e+1]]+Br[t[e+2]]+Br[t[e+3]]+"-"+Br[t[e+4]]+Br[t[e+5]]+"-"+Br[t[e+6]]+Br[t[e+7]]+"-"+Br[t[e+8]]+Br[t[e+9]]+"-"+Br[t[e+10]]+Br[t[e+11]]+Br[t[e+12]]+Br[t[e+13]]+Br[t[e+14]]+Br[t[e+15]]).toLowerCase()}const rM={randomUUID:typeof crypto<"u"&&crypto.randomUUID&&crypto.randomUUID.bind(crypto)};function Het(t,e,r){if(rM.randomUUID&&!e&&!t)return rM.randomUUID();t=t||{};const n=t.random||(t.rng||Uet)();if(n[6]=n[6]&15|64,n[8]=n[8]&63|128,e){r=r||0;for(let i=0;i<16;++i)e[r+i]=n[i];return e}return Wet(n)}const Get=/[^A-Za-z0-9]([\W])*/g;let Ye={},eu=new Map;const jet=function(t){const e=Object.keys(t);for(let r=0;r<e.length;r++)Ye[e[r]]=t[e[r]]},$et=(t,e,r)=>{const n=Ye.entityPadding/3,i=Ye.entityPadding/3,a=Ye.fontSize*.85,s=e.node().getBBox(),o=[];let l=!1,u=!1,h=0,d=0,f=0,p=0,m=s.height+n*2,_=1;r.forEach(k=>{k.attributeKeyType!==void 0&&(l=!0),k.attributeComment!==void 0&&(u=!0)}),r.forEach(k=>{const T=`${e.node().id}-attr-${_}`;let C=0;const M=ja(k.attributeType),S=t.append("text").attr("class","er entityLabel").attr("id",`${T}-type`).attr("x",0).attr("y",0).attr("dominant-baseline","middle").attr("text-anchor","left").attr("style","font-family: "+nt().fontFamily+"; font-size: "+a+"px").text(M),R=t.append("text").attr("class","er entityLabel").attr("id",`${T}-name`).attr("x",0).attr("y",0).attr("dominant-baseline","middle").attr("text-anchor","left").attr("style","font-family: "+nt().fontFamily+"; font-size: "+a+"px").text(k.attributeName),A={};A.tn=S,A.nn=R;const L=S.node().getBBox(),v=R.node().getBBox();if(h=Math.max(h,L.width),d=Math.max(d,v.width),C=Math.max(L.height,v.height),l){const B=t.append("text").attr("class","er entityLabel").attr("id",`${T}-key`).attr("x",0).attr("y",0).attr("dominant-baseline","middle").attr("text-anchor","left").attr("style","font-family: "+nt().fontFamily+"; font-size: "+a+"px").text(k.attributeKeyType||"");A.kn=B;const w=B.node().getBBox();f=Math.max(f,w.width),C=Math.max(C,w.height)}if(u){const B=t.append("text").attr("class","er entityLabel").attr("id",`${T}-comment`).attr("x",0).attr("y",0).attr("dominant-baseline","middle").attr("text-anchor","left").attr("style","font-family: "+nt().fontFamily+"; font-size: "+a+"px").text(k.attributeComment||"");A.cn=B;const w=B.node().getBBox();p=Math.max(p,w.width),C=Math.max(C,w.height)}A.height=C,o.push(A),m+=C+n*2,_+=1});let y=4;l&&(y+=2),u&&(y+=2);const b=h+d+f+p,x={width:Math.max(Ye.minEntityWidth,Math.max(s.width+Ye.entityPadding*2,b+i*y)),height:r.length>0?m:Math.max(Ye.minEntityHeight,s.height+Ye.entityPadding*2)};if(r.length>0){const k=Math.max(0,(x.width-b-i*y)/(y/2));e.attr("transform","translate("+x.width/2+","+(n+s.height/2)+")");let T=s.height+n*2,C="attributeBoxOdd";o.forEach(M=>{const S=T+n+M.height/2;M.tn.attr("transform","translate("+i+","+S+")");const R=t.insert("rect","#"+M.tn.node().id).attr("class",`er ${C}`).attr("fill",Ye.fill).attr("fill-opacity","100%").attr("stroke",Ye.stroke).attr("x",0).attr("y",T).attr("width",h+i*2+k).attr("height",M.height+n*2),A=parseFloat(R.attr("x"))+parseFloat(R.attr("width"));M.nn.attr("transform","translate("+(A+i)+","+S+")");const L=t.insert("rect","#"+M.nn.node().id).attr("class",`er ${C}`).attr("fill",Ye.fill).attr("fill-opacity","100%").attr("stroke",Ye.stroke).attr("x",A).attr("y",T).attr("width",d+i*2+k).attr("height",M.height+n*2);let v=parseFloat(L.attr("x"))+parseFloat(L.attr("width"));if(l){M.kn.attr("transform","translate("+(v+i)+","+S+")");const B=t.insert("rect","#"+M.kn.node().id).attr("class",`er ${C}`).attr("fill",Ye.fill).attr("fill-opacity","100%").attr("stroke",Ye.stroke).attr("x",v).attr("y",T).attr("width",f+i*2+k).attr("height",M.height+n*2);v=parseFloat(B.attr("x"))+parseFloat(B.attr("width"))}u&&(M.cn.attr("transform","translate("+(v+i)+","+S+")"),t.insert("rect","#"+M.cn.node().id).attr("class",`er ${C}`).attr("fill",Ye.fill).attr("fill-opacity","100%").attr("stroke",Ye.stroke).attr("x",v).attr("y",T).attr("width",p+i*2+k).attr("height",M.height+n*2)),T+=M.height+n*2,C=C==="attributeBoxOdd"?"attributeBoxEven":"attributeBoxOdd"})}else x.height=Math.max(Ye.minEntityHeight,m),e.attr("transform","translate("+x.width/2+","+x.height/2+")");return x},Xet=function(t,e,r){const n=Object.keys(e);let i;return n.forEach(function(a){const s=trt(a,"entity");eu.set(a,s);const o=t.append("g").attr("id",s);i=i===void 0?s:i;const l="text-"+s,u=o.append("text").attr("class","er entityLabel").attr("id",l).attr("x",0).attr("y",0).attr("dominant-baseline","middle").attr("text-anchor","middle").attr("style","font-family: "+nt().fontFamily+"; font-size: "+Ye.fontSize+"px").text(a),{width:h,height:d}=$et(o,u,e[a].attributes),p=o.insert("rect","#"+l).attr("class","er entityBox").attr("fill",Ye.fill).attr("fill-opacity","100%").attr("stroke",Ye.stroke).attr("x",0).attr("y",0).attr("width",h).attr("height",d).node().getBBox();r.setNode(s,{width:p.width,height:p.height,shape:"rect",id:s})}),i},Ket=function(t,e){e.nodes().forEach(function(r){typeof r<"u"&&typeof e.node(r)<"u"&&t.select("#"+r).attr("transform","translate("+(e.node(r).x-e.node(r).width/2)+","+(e.node(r).y-e.node(r).height/2)+" )")})},nM=function(t){return(t.entityA+t.roleA+t.entityB).replace(/\s/g,"")},Zet=function(t,e){return t.forEach(function(r){e.setEdge(eu.get(r.entityA),eu.get(r.entityB),{relationship:r},nM(r))}),t};let iM=0;const Qet=function(t,e,r,n,i){iM++;const a=r.edge(eu.get(e.entityA),eu.get(e.entityB),nM(e)),s=Ua().x(function(m){return m.x}).y(function(m){return m.y}).curve(Os),o=t.insert("path","#"+n).attr("class","er relationshipLine").attr("d",s(a.points)).attr("stroke",Ye.stroke).attr("fill","none");e.relSpec.relType===i.db.Identification.NON_IDENTIFYING&&o.attr("stroke-dasharray","8,8");let l="";switch(Ye.arrowMarkerAbsolute&&(l=window.location.protocol+"//"+window.location.host+window.location.pathname+window.location.search,l=l.replace(/\(/g,"\\("),l=l.replace(/\)/g,"\\)")),e.relSpec.cardA){case i.db.Cardinality.ZERO_OR_ONE:o.attr("marker-end","url("+l+"#"+ha.ERMarkers.ZERO_OR_ONE_END+")");break;case i.db.Cardinality.ZERO_OR_MORE:o.attr("marker-end","url("+l+"#"+ha.ERMarkers.ZERO_OR_MORE_END+")");break;case i.db.Cardinality.ONE_OR_MORE:o.attr("marker-end","url("+l+"#"+ha.ERMarkers.ONE_OR_MORE_END+")");break;case i.db.Cardinality.ONLY_ONE:o.attr("marker-end","url("+l+"#"+ha.ERMarkers.ONLY_ONE_END+")");break}switch(e.relSpec.cardB){case i.db.Cardinality.ZERO_OR_ONE:o.attr("marker-start","url("+l+"#"+ha.ERMarkers.ZERO_OR_ONE_START+")");break;case i.db.Cardinality.ZERO_OR_MORE:o.attr("marker-start","url("+l+"#"+ha.ERMarkers.ZERO_OR_MORE_START+")");break;case i.db.Cardinality.ONE_OR_MORE:o.attr("marker-start","url("+l+"#"+ha.ERMarkers.ONE_OR_MORE_START+")");break;case i.db.Cardinality.ONLY_ONE:o.attr("marker-start","url("+l+"#"+ha.ERMarkers.ONLY_ONE_START+")");break}const u=o.node().getTotalLength(),h=o.node().getPointAtLength(u*.5),d="rel"+iM,p=t.append("text").attr("class","er relationshipLabel").attr("id",d).attr("x",h.x).attr("y",h.y).attr("text-anchor","middle").attr("dominant-baseline","middle").attr("style","font-family: "+nt().fontFamily+"; font-size: "+Ye.fontSize+"px").text(e.roleA).node().getBBox();t.insert("rect","#"+d).attr("class","er relationshipLabelBox").attr("x",h.x-p.width/2).attr("y",h.y-p.height/2).attr("width",p.width).attr("height",p.height).attr("fill","white").attr("fill-opacity","85%")},Jet=function(t,e,r,n){Ye=nt().er,H.info("Drawing ER diagram");const i=nt().securityLevel;let a;i==="sandbox"&&(a=St("#i"+e));const o=St(i==="sandbox"?a.nodes()[0].contentDocument.body:"body").select(`[id='${e}']`);ha.insertMarkers(o,Ye);let l;l=new cr.Graph({multigraph:!0,directed:!0,compound:!1}).setGraph({rankdir:Ye.layoutDirection,marginx:20,marginy:20,nodesep:100,edgesep:100,ranksep:100}).setDefaultEdgeLabel(function(){return{}});const u=Xet(o,n.db.getEntities(),l),h=Zet(n.db.getRelationships(),l);Zc.layout(l),Ket(o,l),h.forEach(function(_){Qet(o,_,l,u,n)});const d=Ye.diagramPadding,f=o.node().getBBox(),p=f.width+d*2,m=f.height+d*2;li(o,m,p,Ye.useMaxWidth),o.attr("viewBox",`${f.x-d} ${f.y-d} ${p} ${m}`),bn(n.db,o,e)};function trt(t="",e=""){const r=t.replace(Get,"");return`${aM(e)}${aM(r)}${Het()}`}function aM(t=""){return t.length>0?`${t}-`:""}const ert={setConf:jet,draw:Jet};var X1=function(){var t=function(Ln,Xt,ee,ce){for(ee=ee||{},ce=Ln.length;ce--;ee[Ln[ce]]=Xt);return ee},e=[1,9],r=[1,7],n=[1,6],i=[1,8],a=[1,20,21,22,23,38,44,46,48,52,66,67,86,87,88,89,90,91,95,105,106,109,111,112,118,119,120,121,122,123,124,125,126,127],s=[2,10],o=[1,20],l=[1,21],u=[1,22],h=[1,23],d=[1,30],f=[1,32],p=[1,33],m=[1,34],_=[1,62],y=[1,48],b=[1,52],x=[1,36],k=[1,37],T=[1,38],C=[1,39],M=[1,40],S=[1,56],R=[1,63],A=[1,51],L=[1,53],v=[1,55],B=[1,59],w=[1,60],D=[1,41],N=[1,42],z=[1,43],X=[1,44],ct=[1,61],J=[1,50],Y=[1,54],$=[1,57],lt=[1,58],ut=[1,49],W=[1,66],tt=[1,71],K=[1,20,21,22,23,38,42,44,46,48,52,66,67,86,87,88,89,90,91,95,105,106,109,111,112,118,119,120,121,122,123,124,125,126,127],it=[1,75],Z=[1,74],V=[1,76],Q=[20,21,23,81,82],q=[1,99],U=[1,104],F=[1,107],j=[1,108],P=[1,101],et=[1,106],at=[1,109],It=[1,102],Lt=[1,114],Rt=[1,113],Ct=[1,103],pt=[1,105],mt=[1,110],vt=[1,111],Tt=[1,112],ft=[1,115],le=[20,21,22,23,81,82],Dt=[20,21,22,23,53,81,82],Gt=[20,21,22,23,40,52,53,55,57,59,61,63,65,66,67,69,71,73,74,76,81,82,91,95,105,106,109,111,112,122,123,124,125,126,127],$t=[20,21,23],Qt=[20,21,23,52,66,67,81,82,91,95,105,106,109,111,112,122,123,124,125,126,127],we=[1,12,20,21,22,23,24,38,42,44,46,48,52,66,67,86,87,88,89,90,91,95,105,106,109,111,112,118,119,120,121,122,123,124,125,126,127],jt=[52,66,67,91,95,105,106,109,111,112,122,123,124,125,126,127],Ft=[1,149],zt=[1,157],wt=[1,158],bt=[1,159],Et=[1,160],kt=[1,144],Ut=[1,145],gt=[1,141],he=[1,152],yt=[1,153],ne=[1,154],ve=[1,155],ye=[1,156],be=[1,161],Te=[1,162],Wt=[1,147],se=[1,150],me=[1,146],ue=[1,143],_a=[20,21,22,23,38,42,44,46,48,52,66,67,86,87,88,89,90,91,95,105,106,109,111,112,118,119,120,121,122,123,124,125,126,127],Hr=[1,165],Ie=[20,21,22,23,26,52,66,67,91,105,106,109,111,112,122,123,124,125,126,127],oe=[20,21,22,23,24,26,38,40,41,42,52,56,58,60,62,64,66,67,68,70,72,73,75,77,81,82,86,87,88,89,90,91,92,95,105,106,109,111,112,113,114,122,123,124,125,126,127],Ke=[12,21,22,24],wr=[22,106],Ge=[1,250],Ze=[1,245],qt=[1,246],st=[1,254],At=[1,251],Nt=[1,248],Jt=[1,247],ze=[1,249],Pe=[1,252],qe=[1,253],Tr=[1,255],Ve=[1,273],va=[20,21,23,106],Ce=[20,21,22,23,66,67,86,102,105,106,109,110,111,112,113],Wi={trace:function(){},yy:{},symbols_:{error:2,start:3,mermaidDoc:4,directive:5,openDirective:6,typeDirective:7,closeDirective:8,separator:9,":":10,argDirective:11,open_directive:12,type_directive:13,arg_directive:14,close_directive:15,graphConfig:16,document:17,line:18,statement:19,SEMI:20,NEWLINE:21,SPACE:22,EOF:23,GRAPH:24,NODIR:25,DIR:26,FirstStmtSeperator:27,ending:28,endToken:29,spaceList:30,spaceListNewline:31,verticeStatement:32,styleStatement:33,linkStyleStatement:34,classDefStatement:35,classStatement:36,clickStatement:37,subgraph:38,text:39,SQS:40,SQE:41,end:42,direction:43,acc_title:44,acc_title_value:45,acc_descr:46,acc_descr_value:47,acc_descr_multiline_value:48,link:49,node:50,vertex:51,AMP:52,STYLE_SEPARATOR:53,idString:54,DOUBLECIRCLESTART:55,DOUBLECIRCLEEND:56,PS:57,PE:58,"(-":59,"-)":60,STADIUMSTART:61,STADIUMEND:62,SUBROUTINESTART:63,SUBROUTINEEND:64,VERTEX_WITH_PROPS_START:65,ALPHA:66,COLON:67,PIPE:68,CYLINDERSTART:69,CYLINDEREND:70,DIAMOND_START:71,DIAMOND_STOP:72,TAGEND:73,TRAPSTART:74,TRAPEND:75,INVTRAPSTART:76,INVTRAPEND:77,linkStatement:78,arrowText:79,TESTSTR:80,START_LINK:81,LINK:82,textToken:83,STR:84,keywords:85,STYLE:86,LINKSTYLE:87,CLASSDEF:88,CLASS:89,CLICK:90,DOWN:91,UP:92,textNoTags:93,textNoTagsToken:94,DEFAULT:95,stylesOpt:96,alphaNum:97,CALLBACKNAME:98,CALLBACKARGS:99,HREF:100,LINK_TARGET:101,HEX:102,numList:103,INTERPOLATE:104,NUM:105,COMMA:106,style:107,styleComponent:108,MINUS:109,UNIT:110,BRKT:111,DOT:112,PCT:113,TAGSTART:114,alphaNumToken:115,idStringToken:116,alphaNumStatement:117,direction_tb:118,direction_bt:119,direction_rl:120,direction_lr:121,PUNCTUATION:122,UNICODE_TEXT:123,PLUS:124,EQUALS:125,MULT:126,UNDERSCORE:127,graphCodeTokens:128,ARROW_CROSS:129,ARROW_POINT:130,ARROW_CIRCLE:131,ARROW_OPEN:132,QUOTE:133,$accept:0,$end:1},terminals_:{2:"error",10:":",12:"open_directive",13:"type_directive",14:"arg_directive",15:"close_directive",20:"SEMI",21:"NEWLINE",22:"SPACE",23:"EOF",24:"GRAPH",25:"NODIR",26:"DIR",38:"subgraph",40:"SQS",41:"SQE",42:"end",44:"acc_title",45:"acc_title_value",46:"acc_descr",47:"acc_descr_value",48:"acc_descr_multiline_value",52:"AMP",53:"STYLE_SEPARATOR",55:"DOUBLECIRCLESTART",56:"DOUBLECIRCLEEND",57:"PS",58:"PE",59:"(-",60:"-)",61:"STADIUMSTART",62:"STADIUMEND",63:"SUBROUTINESTART",64:"SUBROUTINEEND",65:"VERTEX_WITH_PROPS_START",66:"ALPHA",67:"COLON",68:"PIPE",69:"CYLINDERSTART",70:"CYLINDEREND",71:"DIAMOND_START",72:"DIAMOND_STOP",73:"TAGEND",74:"TRAPSTART",75:"TRAPEND",76:"INVTRAPSTART",77:"INVTRAPEND",80:"TESTSTR",81:"START_LINK",82:"LINK",84:"STR",86:"STYLE",87:"LINKSTYLE",88:"CLASSDEF",89:"CLASS",90:"CLICK",91:"DOWN",92:"UP",95:"DEFAULT",98:"CALLBACKNAME",99:"CALLBACKARGS",100:"HREF",101:"LINK_TARGET",102:"HEX",104:"INTERPOLATE",105:"NUM",106:"COMMA",109:"MINUS",110:"UNIT",111:"BRKT",112:"DOT",113:"PCT",114:"TAGSTART",118:"direction_tb",119:"direction_bt",120:"direction_rl",121:"direction_lr",122:"PUNCTUATION",123:"UNICODE_TEXT",124:"PLUS",125:"EQUALS",126:"MULT",127:"UNDERSCORE",129:"ARROW_CROSS",130:"ARROW_POINT",131:"ARROW_CIRCLE",132:"ARROW_OPEN",133:"QUOTE"},productions_:[0,[3,1],[3,2],[5,4],[5,6],[6,1],[7,1],[11,1],[8,1],[4,2],[17,0],[17,2],[18,1],[18,1],[18,1],[18,1],[18,1],[16,2],[16,2],[16,2],[16,3],[28,2],[28,1],[29,1],[29,1],[29,1],[27,1],[27,1],[27,2],[31,2],[31,2],[31,1],[31,1],[30,2],[30,1],[19,2],[19,2],[19,2],[19,2],[19,2],[19,2],[19,9],[19,6],[19,4],[19,1],[19,2],[19,2],[19,1],[9,1],[9,1],[9,1],[32,3],[32,4],[32,2],[32,1],[50,1],[50,5],[50,3],[51,4],[51,4],[51,6],[51,4],[51,4],[51,4],[51,8],[51,4],[51,4],[51,4],[51,6],[51,4],[51,4],[51,4],[51,4],[51,4],[51,1],[49,2],[49,3],[49,3],[49,1],[49,3],[78,1],[79,3],[39,1],[39,2],[39,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[85,1],[93,1],[93,2],[35,5],[35,5],[36,5],[37,2],[37,4],[37,3],[37,5],[37,2],[37,4],[37,4],[37,6],[37,2],[37,4],[37,2],[37,4],[37,4],[37,6],[33,5],[33,5],[34,5],[34,5],[34,9],[34,9],[34,7],[34,7],[103,1],[103,3],[96,1],[96,3],[107,1],[107,2],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[108,1],[83,1],[83,1],[83,1],[83,1],[83,1],[83,1],[94,1],[94,1],[94,1],[94,1],[54,1],[54,2],[97,1],[97,2],[117,1],[117,1],[117,1],[117,1],[43,1],[43,1],[43,1],[43,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[115,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[116,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1],[128,1]],performAction:function(Xt,ee,ce,Pt,je,rt,Ks){var ot=rt.length-1;switch(je){case 5:Pt.parseDirective("%%{","open_directive");break;case 6:Pt.parseDirective(rt[ot],"type_directive");break;case 7:rt[ot]=rt[ot].trim().replace(/'/g,'"'),Pt.parseDirective(rt[ot],"arg_directive");break;case 8:Pt.parseDirective("}%%","close_directive","flowchart");break;case 10:this.$=[];break;case 11:(!Array.isArray(rt[ot])||rt[ot].length>0)&&rt[ot-1].push(rt[ot]),this.$=rt[ot-1];break;case 12:case 82:case 84:case 96:case 152:case 154:case 155:this.$=rt[ot];break;case 19:Pt.setDirection("TB"),this.$="TB";break;case 20:Pt.setDirection(rt[ot-1]),this.$=rt[ot-1];break;case 35:this.$=rt[ot-1].nodes;break;case 36:case 37:case 38:case 39:case 40:this.$=[];break;case 41:this.$=Pt.addSubGraph(rt[ot-6],rt[ot-1],rt[ot-4]);break;case 42:this.$=Pt.addSubGraph(rt[ot-3],rt[ot-1],rt[ot-3]);break;case 43:this.$=Pt.addSubGraph(void 0,rt[ot-1],void 0);break;case 45:this.$=rt[ot].trim(),Pt.setAccTitle(this.$);break;case 46:case 47:this.$=rt[ot].trim(),Pt.setAccDescription(this.$);break;case 51:Pt.addLink(rt[ot-2].stmt,rt[ot],rt[ot-1]),this.$={stmt:rt[ot],nodes:rt[ot].concat(rt[ot-2].nodes)};break;case 52:Pt.addLink(rt[ot-3].stmt,rt[ot-1],rt[ot-2]),this.$={stmt:rt[ot-1],nodes:rt[ot-1].concat(rt[ot-3].nodes)};break;case 53:this.$={stmt:rt[ot-1],nodes:rt[ot-1]};break;case 54:this.$={stmt:rt[ot],nodes:rt[ot]};break;case 55:this.$=[rt[ot]];break;case 56:this.$=rt[ot-4].concat(rt[ot]);break;case 57:this.$=[rt[ot-2]],Pt.setClass(rt[ot-2],rt[ot]);break;case 58:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"square");break;case 59:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"doublecircle");break;case 60:this.$=rt[ot-5],Pt.addVertex(rt[ot-5],rt[ot-2],"circle");break;case 61:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"ellipse");break;case 62:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"stadium");break;case 63:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"subroutine");break;case 64:this.$=rt[ot-7],Pt.addVertex(rt[ot-7],rt[ot-1],"rect",void 0,void 0,void 0,Object.fromEntries([[rt[ot-5],rt[ot-3]]]));break;case 65:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"cylinder");break;case 66:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"round");break;case 67:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"diamond");break;case 68:this.$=rt[ot-5],Pt.addVertex(rt[ot-5],rt[ot-2],"hexagon");break;case 69:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"odd");break;case 70:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"trapezoid");break;case 71:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"inv_trapezoid");break;case 72:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"lean_right");break;case 73:this.$=rt[ot-3],Pt.addVertex(rt[ot-3],rt[ot-1],"lean_left");break;case 74:this.$=rt[ot],Pt.addVertex(rt[ot]);break;case 75:rt[ot-1].text=rt[ot],this.$=rt[ot-1];break;case 76:case 77:rt[ot-2].text=rt[ot-1],this.$=rt[ot-2];break;case 78:this.$=rt[ot];break;case 79:var Gr=Pt.destructLink(rt[ot],rt[ot-2]);this.$={type:Gr.type,stroke:Gr.stroke,length:Gr.length,text:rt[ot-1]};break;case 80:var Gr=Pt.destructLink(rt[ot]);this.$={type:Gr.type,stroke:Gr.stroke,length:Gr.length};break;case 81:this.$=rt[ot-1];break;case 83:case 97:case 153:this.$=rt[ot-1]+""+rt[ot];break;case 98:case 99:this.$=rt[ot-4],Pt.addClass(rt[ot-2],rt[ot]);break;case 100:this.$=rt[ot-4],Pt.setClass(rt[ot-2],rt[ot]);break;case 101:case 109:this.$=rt[ot-1],Pt.setClickEvent(rt[ot-1],rt[ot]);break;case 102:case 110:this.$=rt[ot-3],Pt.setClickEvent(rt[ot-3],rt[ot-2]),Pt.setTooltip(rt[ot-3],rt[ot]);break;case 103:this.$=rt[ot-2],Pt.setClickEvent(rt[ot-2],rt[ot-1],rt[ot]);break;case 104:this.$=rt[ot-4],Pt.setClickEvent(rt[ot-4],rt[ot-3],rt[ot-2]),Pt.setTooltip(rt[ot-4],rt[ot]);break;case 105:case 111:this.$=rt[ot-1],Pt.setLink(rt[ot-1],rt[ot]);break;case 106:case 112:this.$=rt[ot-3],Pt.setLink(rt[ot-3],rt[ot-2]),Pt.setTooltip(rt[ot-3],rt[ot]);break;case 107:case 113:this.$=rt[ot-3],Pt.setLink(rt[ot-3],rt[ot-2],rt[ot]);break;case 108:case 114:this.$=rt[ot-5],Pt.setLink(rt[ot-5],rt[ot-4],rt[ot]),Pt.setTooltip(rt[ot-5],rt[ot-2]);break;case 115:this.$=rt[ot-4],Pt.addVertex(rt[ot-2],void 0,void 0,rt[ot]);break;case 116:case 118:this.$=rt[ot-4],Pt.updateLink(rt[ot-2],rt[ot]);break;case 117:this.$=rt[ot-4],Pt.updateLink([rt[ot-2]],rt[ot]);break;case 119:this.$=rt[ot-8],Pt.updateLinkInterpolate([rt[ot-6]],rt[ot-2]),Pt.updateLink([rt[ot-6]],rt[ot]);break;case 120:this.$=rt[ot-8],Pt.updateLinkInterpolate(rt[ot-6],rt[ot-2]),Pt.updateLink(rt[ot-6],rt[ot]);break;case 121:this.$=rt[ot-6],Pt.updateLinkInterpolate([rt[ot-4]],rt[ot]);break;case 122:this.$=rt[ot-6],Pt.updateLinkInterpolate(rt[ot-4],rt[ot]);break;case 123:case 125:this.$=[rt[ot]];break;case 124:case 126:rt[ot-2].push(rt[ot]),this.$=rt[ot-2];break;case 128:this.$=rt[ot-1]+rt[ot];break;case 150:this.$=rt[ot];break;case 151:this.$=rt[ot-1]+""+rt[ot];break;case 156:this.$="v";break;case 157:this.$="-";break;case 158:this.$={stmt:"dir",value:"TB"};break;case 159:this.$={stmt:"dir",value:"BT"};break;case 160:this.$={stmt:"dir",value:"RL"};break;case 161:this.$={stmt:"dir",value:"LR"};break}},table:[{3:1,4:2,5:3,6:5,12:e,16:4,21:r,22:n,24:i},{1:[3]},{1:[2,1]},{3:10,4:2,5:3,6:5,12:e,16:4,21:r,22:n,24:i},t(a,s,{17:11}),{7:12,13:[1,13]},{16:14,21:r,22:n,24:i},{16:15,21:r,22:n,24:i},{25:[1,16],26:[1,17]},{13:[2,5]},{1:[2,2]},{1:[2,9],18:18,19:19,20:o,21:l,22:u,23:h,32:24,33:25,34:26,35:27,36:28,37:29,38:d,43:31,44:f,46:p,48:m,50:35,51:45,52:_,54:46,66:y,67:b,86:x,87:k,88:T,89:C,90:M,91:S,95:R,105:A,106:L,109:v,111:B,112:w,116:47,118:D,119:N,120:z,121:X,122:ct,123:J,124:Y,125:$,126:lt,127:ut},{8:64,10:[1,65],15:W},t([10,15],[2,6]),t(a,[2,17]),t(a,[2,18]),t(a,[2,19]),{20:[1,68],21:[1,69],22:tt,27:67,30:70},t(K,[2,11]),t(K,[2,12]),t(K,[2,13]),t(K,[2,14]),t(K,[2,15]),t(K,[2,16]),{9:72,20:it,21:Z,23:V,49:73,78:77,81:[1,78],82:[1,79]},{9:80,20:it,21:Z,23:V},{9:81,20:it,21:Z,23:V},{9:82,20:it,21:Z,23:V},{9:83,20:it,21:Z,23:V},{9:84,20:it,21:Z,23:V},{9:86,20:it,21:Z,22:[1,85],23:V},t(K,[2,44]),{45:[1,87]},{47:[1,88]},t(K,[2,47]),t(Q,[2,54],{30:89,22:tt}),{22:[1,90]},{22:[1,91]},{22:[1,92]},{22:[1,93]},{26:q,52:U,66:F,67:j,84:[1,97],91:P,97:96,98:[1,94],100:[1,95],105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:98,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t(K,[2,158]),t(K,[2,159]),t(K,[2,160]),t(K,[2,161]),t(le,[2,55],{53:[1,116]}),t(Dt,[2,74],{116:129,40:[1,117],52:_,55:[1,118],57:[1,119],59:[1,120],61:[1,121],63:[1,122],65:[1,123],66:y,67:b,69:[1,124],71:[1,125],73:[1,126],74:[1,127],76:[1,128],91:S,95:R,105:A,106:L,109:v,111:B,112:w,122:ct,123:J,124:Y,125:$,126:lt,127:ut}),t(Gt,[2,150]),t(Gt,[2,175]),t(Gt,[2,176]),t(Gt,[2,177]),t(Gt,[2,178]),t(Gt,[2,179]),t(Gt,[2,180]),t(Gt,[2,181]),t(Gt,[2,182]),t(Gt,[2,183]),t(Gt,[2,184]),t(Gt,[2,185]),t(Gt,[2,186]),t(Gt,[2,187]),t(Gt,[2,188]),t(Gt,[2,189]),t(Gt,[2,190]),{9:130,20:it,21:Z,23:V},{11:131,14:[1,132]},t($t,[2,8]),t(a,[2,20]),t(a,[2,26]),t(a,[2,27]),{21:[1,133]},t(Qt,[2,34],{30:134,22:tt}),t(K,[2,35]),{50:135,51:45,52:_,54:46,66:y,67:b,91:S,95:R,105:A,106:L,109:v,111:B,112:w,116:47,122:ct,123:J,124:Y,125:$,126:lt,127:ut},t(we,[2,48]),t(we,[2,49]),t(we,[2,50]),t(jt,[2,78],{79:136,68:[1,138],80:[1,137]}),{22:Ft,24:zt,26:wt,38:bt,39:139,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t([52,66,67,68,80,91,95,105,106,109,111,112,122,123,124,125,126,127],[2,80]),t(K,[2,36]),t(K,[2,37]),t(K,[2,38]),t(K,[2,39]),t(K,[2,40]),{22:Ft,24:zt,26:wt,38:bt,39:163,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t(_a,s,{17:164}),t(K,[2,45]),t(K,[2,46]),t(Q,[2,53],{52:Hr}),{26:q,52:U,66:F,67:j,91:P,97:166,102:[1,167],105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:98,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{95:[1,168],103:169,105:[1,170]},{26:q,52:U,66:F,67:j,91:P,95:[1,171],97:172,105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:98,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{26:q,52:U,66:F,67:j,91:P,97:173,105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:98,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t($t,[2,101],{22:[1,174],99:[1,175]}),t($t,[2,105],{22:[1,176]}),t($t,[2,109],{115:100,117:178,22:[1,177],26:q,52:U,66:F,67:j,91:P,105:et,106:at,109:It,111:Lt,112:Rt,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft}),t($t,[2,111],{22:[1,179]}),t(Ie,[2,152]),t(Ie,[2,154]),t(Ie,[2,155]),t(Ie,[2,156]),t(Ie,[2,157]),t(oe,[2,162]),t(oe,[2,163]),t(oe,[2,164]),t(oe,[2,165]),t(oe,[2,166]),t(oe,[2,167]),t(oe,[2,168]),t(oe,[2,169]),t(oe,[2,170]),t(oe,[2,171]),t(oe,[2,172]),t(oe,[2,173]),t(oe,[2,174]),{52:_,54:180,66:y,67:b,91:S,95:R,105:A,106:L,109:v,111:B,112:w,116:47,122:ct,123:J,124:Y,125:$,126:lt,127:ut},{22:Ft,24:zt,26:wt,38:bt,39:181,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:182,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:184,42:Et,52:U,57:[1,183],66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:185,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:186,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:187,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{66:[1,188]},{22:Ft,24:zt,26:wt,38:bt,39:189,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:190,42:Et,52:U,66:F,67:j,71:[1,191],73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:192,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:193,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:194,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t(Gt,[2,151]),t(Ke,[2,3]),{8:195,15:W},{15:[2,7]},t(a,[2,28]),t(Qt,[2,33]),t(Q,[2,51],{30:196,22:tt}),t(jt,[2,75],{22:[1,197]}),{22:[1,198]},{22:Ft,24:zt,26:wt,38:bt,39:199,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,66:F,67:j,73:kt,81:Ut,82:[1,200],83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t(oe,[2,82]),t(oe,[2,84]),t(oe,[2,140]),t(oe,[2,141]),t(oe,[2,142]),t(oe,[2,143]),t(oe,[2,144]),t(oe,[2,145]),t(oe,[2,146]),t(oe,[2,147]),t(oe,[2,148]),t(oe,[2,149]),t(oe,[2,85]),t(oe,[2,86]),t(oe,[2,87]),t(oe,[2,88]),t(oe,[2,89]),t(oe,[2,90]),t(oe,[2,91]),t(oe,[2,92]),t(oe,[2,93]),t(oe,[2,94]),t(oe,[2,95]),{9:203,20:it,21:Z,22:Ft,23:V,24:zt,26:wt,38:bt,40:[1,202],42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{18:18,19:19,20:o,21:l,22:u,23:h,32:24,33:25,34:26,35:27,36:28,37:29,38:d,42:[1,204],43:31,44:f,46:p,48:m,50:35,51:45,52:_,54:46,66:y,67:b,86:x,87:k,88:T,89:C,90:M,91:S,95:R,105:A,106:L,109:v,111:B,112:w,116:47,118:D,119:N,120:z,121:X,122:ct,123:J,124:Y,125:$,126:lt,127:ut},{22:tt,30:205},{22:[1,206],26:q,52:U,66:F,67:j,91:P,105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:178,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:[1,207]},{22:[1,208]},{22:[1,209],106:[1,210]},t(wr,[2,123]),{22:[1,211]},{22:[1,212],26:q,52:U,66:F,67:j,91:P,105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:178,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:[1,213],26:q,52:U,66:F,67:j,91:P,105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:178,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{84:[1,214]},t($t,[2,103],{22:[1,215]}),{84:[1,216],101:[1,217]},{84:[1,218]},t(Ie,[2,153]),{84:[1,219],101:[1,220]},t(le,[2,57],{116:129,52:_,66:y,67:b,91:S,95:R,105:A,106:L,109:v,111:B,112:w,122:ct,123:J,124:Y,125:$,126:lt,127:ut}),{22:Ft,24:zt,26:wt,38:bt,41:[1,221],42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,56:[1,222],66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:223,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,58:[1,224],66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,60:[1,225],66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,62:[1,226],66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,64:[1,227],66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{67:[1,228]},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,66:F,67:j,70:[1,229],73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,66:F,67:j,72:[1,230],73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,39:231,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,41:[1,232],42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,66:F,67:j,73:kt,75:[1,233],77:[1,234],81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,66:F,67:j,73:kt,75:[1,236],77:[1,235],81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{9:237,20:it,21:Z,23:V},t(Q,[2,52],{52:Hr}),t(jt,[2,77]),t(jt,[2,76]),{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,66:F,67:j,68:[1,238],73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t(jt,[2,79]),t(oe,[2,83]),{22:Ft,24:zt,26:wt,38:bt,39:239,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t(_a,s,{17:240}),t(K,[2,43]),{51:241,52:_,54:46,66:y,67:b,91:S,95:R,105:A,106:L,109:v,111:B,112:w,116:47,122:ct,123:J,124:Y,125:$,126:lt,127:ut},{22:Ge,66:Ze,67:qt,86:st,96:242,102:At,105:Nt,107:243,108:244,109:Jt,110:ze,111:Pe,112:qe,113:Tr},{22:Ge,66:Ze,67:qt,86:st,96:256,102:At,105:Nt,107:243,108:244,109:Jt,110:ze,111:Pe,112:qe,113:Tr},{22:Ge,66:Ze,67:qt,86:st,96:257,102:At,104:[1,258],105:Nt,107:243,108:244,109:Jt,110:ze,111:Pe,112:qe,113:Tr},{22:Ge,66:Ze,67:qt,86:st,96:259,102:At,104:[1,260],105:Nt,107:243,108:244,109:Jt,110:ze,111:Pe,112:qe,113:Tr},{105:[1,261]},{22:Ge,66:Ze,67:qt,86:st,96:262,102:At,105:Nt,107:243,108:244,109:Jt,110:ze,111:Pe,112:qe,113:Tr},{22:Ge,66:Ze,67:qt,86:st,96:263,102:At,105:Nt,107:243,108:244,109:Jt,110:ze,111:Pe,112:qe,113:Tr},{26:q,52:U,66:F,67:j,91:P,97:264,105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:98,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t($t,[2,102]),{84:[1,265]},t($t,[2,106],{22:[1,266]}),t($t,[2,107]),t($t,[2,110]),t($t,[2,112],{22:[1,267]}),t($t,[2,113]),t(Dt,[2,58]),t(Dt,[2,59]),{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,58:[1,268],66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t(Dt,[2,66]),t(Dt,[2,61]),t(Dt,[2,62]),t(Dt,[2,63]),{66:[1,269]},t(Dt,[2,65]),t(Dt,[2,67]),{22:Ft,24:zt,26:wt,38:bt,42:Et,52:U,66:F,67:j,72:[1,270],73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t(Dt,[2,69]),t(Dt,[2,70]),t(Dt,[2,72]),t(Dt,[2,71]),t(Dt,[2,73]),t(Ke,[2,4]),t([22,52,66,67,91,95,105,106,109,111,112,122,123,124,125,126,127],[2,81]),{22:Ft,24:zt,26:wt,38:bt,41:[1,271],42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{18:18,19:19,20:o,21:l,22:u,23:h,32:24,33:25,34:26,35:27,36:28,37:29,38:d,42:[1,272],43:31,44:f,46:p,48:m,50:35,51:45,52:_,54:46,66:y,67:b,86:x,87:k,88:T,89:C,90:M,91:S,95:R,105:A,106:L,109:v,111:B,112:w,116:47,118:D,119:N,120:z,121:X,122:ct,123:J,124:Y,125:$,126:lt,127:ut},t(le,[2,56]),t($t,[2,115],{106:Ve}),t(va,[2,125],{108:274,22:Ge,66:Ze,67:qt,86:st,102:At,105:Nt,109:Jt,110:ze,111:Pe,112:qe,113:Tr}),t(Ce,[2,127]),t(Ce,[2,129]),t(Ce,[2,130]),t(Ce,[2,131]),t(Ce,[2,132]),t(Ce,[2,133]),t(Ce,[2,134]),t(Ce,[2,135]),t(Ce,[2,136]),t(Ce,[2,137]),t(Ce,[2,138]),t(Ce,[2,139]),t($t,[2,116],{106:Ve}),t($t,[2,117],{106:Ve}),{22:[1,275]},t($t,[2,118],{106:Ve}),{22:[1,276]},t(wr,[2,124]),t($t,[2,98],{106:Ve}),t($t,[2,99],{106:Ve}),t($t,[2,100],{115:100,117:178,26:q,52:U,66:F,67:j,91:P,105:et,106:at,109:It,111:Lt,112:Rt,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft}),t($t,[2,104]),{101:[1,277]},{101:[1,278]},{58:[1,279]},{68:[1,280]},{72:[1,281]},{9:282,20:it,21:Z,23:V},t(K,[2,42]),{22:Ge,66:Ze,67:qt,86:st,102:At,105:Nt,107:283,108:244,109:Jt,110:ze,111:Pe,112:qe,113:Tr},t(Ce,[2,128]),{26:q,52:U,66:F,67:j,91:P,97:284,105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:98,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{26:q,52:U,66:F,67:j,91:P,97:285,105:et,106:at,109:It,111:Lt,112:Rt,115:100,117:98,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t($t,[2,108]),t($t,[2,114]),t(Dt,[2,60]),{22:Ft,24:zt,26:wt,38:bt,39:286,42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:140,84:gt,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},t(Dt,[2,68]),t(_a,s,{17:287}),t(va,[2,126],{108:274,22:Ge,66:Ze,67:qt,86:st,102:At,105:Nt,109:Jt,110:ze,111:Pe,112:qe,113:Tr}),t($t,[2,121],{115:100,117:178,22:[1,288],26:q,52:U,66:F,67:j,91:P,105:et,106:at,109:It,111:Lt,112:Rt,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft}),t($t,[2,122],{115:100,117:178,22:[1,289],26:q,52:U,66:F,67:j,91:P,105:et,106:at,109:It,111:Lt,112:Rt,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft}),{22:Ft,24:zt,26:wt,38:bt,41:[1,290],42:Et,52:U,66:F,67:j,73:kt,81:Ut,83:201,85:151,86:he,87:yt,88:ne,89:ve,90:ye,91:be,92:Te,94:142,95:Wt,105:et,106:at,109:se,111:Lt,112:Rt,113:me,114:ue,115:148,122:Ct,123:pt,124:mt,125:vt,126:Tt,127:ft},{18:18,19:19,20:o,21:l,22:u,23:h,32:24,33:25,34:26,35:27,36:28,37:29,38:d,42:[1,291],43:31,44:f,46:p,48:m,50:35,51:45,52:_,54:46,66:y,67:b,86:x,87:k,88:T,89:C,90:M,91:S,95:R,105:A,106:L,109:v,111:B,112:w,116:47,118:D,119:N,120:z,121:X,122:ct,123:J,124:Y,125:$,126:lt,127:ut},{22:Ge,66:Ze,67:qt,86:st,96:292,102:At,105:Nt,107:243,108:244,109:Jt,110:ze,111:Pe,112:qe,113:Tr},{22:Ge,66:Ze,67:qt,86:st,96:293,102:At,105:Nt,107:243,108:244,109:Jt,110:ze,111:Pe,112:qe,113:Tr},t(Dt,[2,64]),t(K,[2,41]),t($t,[2,119],{106:Ve}),t($t,[2,120],{106:Ve})],defaultActions:{2:[2,1],9:[2,5],10:[2,2],132:[2,7]},parseError:function(Xt,ee){if(ee.recoverable)this.trace(Xt);else{var ce=new Error(Xt);throw ce.hash=ee,ce}},parse:function(Xt){var ee=this,ce=[0],Pt=[],je=[null],rt=[],Ks=this.table,ot="",Gr=0,C0=0,l_=2,S0=1,A0=rt.slice.call(arguments,1),mr=Object.create(this.lexer),Hi={yy:{}};for(var Gi in this.yy)Object.prototype.hasOwnProperty.call(this.yy,Gi)&&(Hi.yy[Gi]=this.yy[Gi]);mr.setInput(Xt,Hi.yy),Hi.yy.lexer=mr,Hi.yy.parser=this,typeof mr.yylloc>"u"&&(mr.yylloc={});var Zs=mr.yylloc;rt.push(Zs);var vu=mr.options&&mr.options.ranges;typeof Hi.yy.parseError=="function"?this.parseError=Hi.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function M0(){var In;return In=Pt.pop()||mr.lex()||S0,typeof In!="number"&&(In instanceof Array&&(Pt=In,In=Pt.pop()),In=ee.symbols_[In]||In),In}for(var Dr,De,hn,xa,_i={},ka,Rn,xu,yl;;){if(De=ce[ce.length-1],this.defaultActions[De]?hn=this.defaultActions[De]:((Dr===null||typeof Dr>"u")&&(Dr=M0()),hn=Ks[De]&&Ks[De][Dr]),typeof hn>"u"||!hn.length||!hn[0]){var Qs="";yl=[];for(ka in Ks[De])this.terminals_[ka]&&ka>l_&&yl.push("'"+this.terminals_[ka]+"'");mr.showPosition?Qs="Parse error on line "+(Gr+1)+`: -`+mr.showPosition()+` -Expecting `+yl.join(", ")+", got '"+(this.terminals_[Dr]||Dr)+"'":Qs="Parse error on line "+(Gr+1)+": Unexpected "+(Dr==S0?"end of input":"'"+(this.terminals_[Dr]||Dr)+"'"),this.parseError(Qs,{text:mr.match,token:this.terminals_[Dr]||Dr,line:mr.yylineno,loc:Zs,expected:yl})}if(hn[0]instanceof Array&&hn.length>1)throw new Error("Parse Error: multiple actions possible at state: "+De+", token: "+Dr);switch(hn[0]){case 1:ce.push(Dr),je.push(mr.yytext),rt.push(mr.yylloc),ce.push(hn[1]),Dr=null,C0=mr.yyleng,ot=mr.yytext,Gr=mr.yylineno,Zs=mr.yylloc;break;case 2:if(Rn=this.productions_[hn[1]][1],_i.$=je[je.length-Rn],_i._$={first_line:rt[rt.length-(Rn||1)].first_line,last_line:rt[rt.length-1].last_line,first_column:rt[rt.length-(Rn||1)].first_column,last_column:rt[rt.length-1].last_column},vu&&(_i._$.range=[rt[rt.length-(Rn||1)].range[0],rt[rt.length-1].range[1]]),xa=this.performAction.apply(_i,[ot,C0,Gr,Hi.yy,hn[1],je,rt].concat(A0)),typeof xa<"u")return xa;Rn&&(ce=ce.slice(0,-1*Rn*2),je=je.slice(0,-1*Rn),rt=rt.slice(0,-1*Rn)),ce.push(this.productions_[hn[1]][0]),je.push(_i.$),rt.push(_i._$),xu=Ks[ce[ce.length-2]][ce[ce.length-1]],ce.push(xu);break;case 3:return!0}}return!0}},E0=function(){var Ln={EOF:1,parseError:function(ee,ce){if(this.yy.parser)this.yy.parser.parseError(ee,ce);else throw new Error(ee)},setInput:function(Xt,ee){return this.yy=ee||this.yy||{},this._input=Xt,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var Xt=this._input[0];this.yytext+=Xt,this.yyleng++,this.offset++,this.match+=Xt,this.matched+=Xt;var ee=Xt.match(/(?:\r\n?|\n).*/g);return ee?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),Xt},unput:function(Xt){var ee=Xt.length,ce=Xt.split(/(?:\r\n?|\n)/g);this._input=Xt+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-ee),this.offset-=ee;var Pt=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),ce.length-1&&(this.yylineno-=ce.length-1);var je=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:ce?(ce.length===Pt.length?this.yylloc.first_column:0)+Pt[Pt.length-ce.length].length-ce[0].length:this.yylloc.first_column-ee},this.options.ranges&&(this.yylloc.range=[je[0],je[0]+this.yyleng-ee]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(Xt){this.unput(this.match.slice(Xt))},pastInput:function(){var Xt=this.matched.substr(0,this.matched.length-this.match.length);return(Xt.length>20?"...":"")+Xt.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var Xt=this.match;return Xt.length<20&&(Xt+=this._input.substr(0,20-Xt.length)),(Xt.substr(0,20)+(Xt.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var Xt=this.pastInput(),ee=new Array(Xt.length+1).join("-");return Xt+this.upcomingInput()+` -`+ee+"^"},test_match:function(Xt,ee){var ce,Pt,je;if(this.options.backtrack_lexer&&(je={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(je.yylloc.range=this.yylloc.range.slice(0))),Pt=Xt[0].match(/(?:\r\n?|\n).*/g),Pt&&(this.yylineno+=Pt.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:Pt?Pt[Pt.length-1].length-Pt[Pt.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+Xt[0].length},this.yytext+=Xt[0],this.match+=Xt[0],this.matches=Xt,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(Xt[0].length),this.matched+=Xt[0],ce=this.performAction.call(this,this.yy,this,ee,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),ce)return ce;if(this._backtrack){for(var rt in je)this[rt]=je[rt];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var Xt,ee,ce,Pt;this._more||(this.yytext="",this.match="");for(var je=this._currentRules(),rt=0;rt<je.length;rt++)if(ce=this._input.match(this.rules[je[rt]]),ce&&(!ee||ce[0].length>ee[0].length)){if(ee=ce,Pt=rt,this.options.backtrack_lexer){if(Xt=this.test_match(ce,je[rt]),Xt!==!1)return Xt;if(this._backtrack){ee=!1;continue}else return!1}else if(!this.options.flex)break}return ee?(Xt=this.test_match(ee,je[Pt]),Xt!==!1?Xt:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var ee=this.next();return ee||this.lex()},begin:function(ee){this.conditionStack.push(ee)},popState:function(){var ee=this.conditionStack.length-1;return ee>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(ee){return ee=this.conditionStack.length-1-Math.abs(ee||0),ee>=0?this.conditionStack[ee]:"INITIAL"},pushState:function(ee){this.begin(ee)},stateStackSize:function(){return this.conditionStack.length},options:{},performAction:function(ee,ce,Pt,je){switch(Pt){case 0:return this.begin("open_directive"),12;case 1:return this.begin("type_directive"),13;case 2:return this.popState(),this.begin("arg_directive"),10;case 3:return this.popState(),this.popState(),15;case 4:return 14;case 5:break;case 6:break;case 7:return this.begin("acc_title"),44;case 8:return this.popState(),"acc_title_value";case 9:return this.begin("acc_descr"),46;case 10:return this.popState(),"acc_descr_value";case 11:this.begin("acc_descr_multiline");break;case 12:this.popState();break;case 13:return"acc_descr_multiline_value";case 14:this.begin("string");break;case 15:this.popState();break;case 16:return"STR";case 17:return 86;case 18:return 95;case 19:return 87;case 20:return 104;case 21:return 88;case 22:return 89;case 23:this.begin("href");break;case 24:this.popState();break;case 25:return 100;case 26:this.begin("callbackname");break;case 27:this.popState();break;case 28:this.popState(),this.begin("callbackargs");break;case 29:return 98;case 30:this.popState();break;case 31:return 99;case 32:this.begin("click");break;case 33:this.popState();break;case 34:return 90;case 35:return ee.lex.firstGraph()&&this.begin("dir"),24;case 36:return ee.lex.firstGraph()&&this.begin("dir"),24;case 37:return 38;case 38:return 42;case 39:return 101;case 40:return 101;case 41:return 101;case 42:return 101;case 43:return this.popState(),25;case 44:return this.popState(),26;case 45:return this.popState(),26;case 46:return this.popState(),26;case 47:return this.popState(),26;case 48:return this.popState(),26;case 49:return this.popState(),26;case 50:return this.popState(),26;case 51:return this.popState(),26;case 52:return this.popState(),26;case 53:return this.popState(),26;case 54:return 118;case 55:return 119;case 56:return 120;case 57:return 121;case 58:return 105;case 59:return 111;case 60:return 53;case 61:return 67;case 62:return 52;case 63:return 20;case 64:return 106;case 65:return 126;case 66:return 82;case 67:return 82;case 68:return 82;case 69:return 82;case 70:return 81;case 71:return 81;case 72:return 81;case 73:return 59;case 74:return 60;case 75:return 61;case 76:return 62;case 77:return 63;case 78:return 64;case 79:return 65;case 80:return 69;case 81:return 70;case 82:return 55;case 83:return 56;case 84:return 109;case 85:return 112;case 86:return 127;case 87:return 124;case 88:return 113;case 89:return 125;case 90:return 125;case 91:return 114;case 92:return 73;case 93:return 92;case 94:return"SEP";case 95:return 91;case 96:return 66;case 97:return 75;case 98:return 74;case 99:return 77;case 100:return 76;case 101:return 122;case 102:return 123;case 103:return 68;case 104:return 57;case 105:return 58;case 106:return 40;case 107:return 41;case 108:return 71;case 109:return 72;case 110:return 133;case 111:return 21;case 112:return 22;case 113:return 23}},rules:[/^(?:%%\{)/,/^(?:((?:(?!\}%%)[^:.])*))/,/^(?::)/,/^(?:\}%%)/,/^(?:((?:(?!\}%%).|\n)*))/,/^(?:%%(?!\{)[^\n]*)/,/^(?:[^\}]%%[^\n]*)/,/^(?:accTitle\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*:\s*)/,/^(?:(?!\n||)*[^\n]*)/,/^(?:accDescr\s*\{\s*)/,/^(?:[\}])/,/^(?:[^\}]*)/,/^(?:["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:style\b)/,/^(?:default\b)/,/^(?:linkStyle\b)/,/^(?:interpolate\b)/,/^(?:classDef\b)/,/^(?:class\b)/,/^(?:href[\s]+["])/,/^(?:["])/,/^(?:[^"]*)/,/^(?:call[\s]+)/,/^(?:\([\s]*\))/,/^(?:\()/,/^(?:[^(]*)/,/^(?:\))/,/^(?:[^)]*)/,/^(?:click[\s]+)/,/^(?:[\s\n])/,/^(?:[^\s\n]*)/,/^(?:graph\b)/,/^(?:flowchart\b)/,/^(?:subgraph\b)/,/^(?:end\b\s*)/,/^(?:_self\b)/,/^(?:_blank\b)/,/^(?:_parent\b)/,/^(?:_top\b)/,/^(?:(\r?\n)*\s*\n)/,/^(?:\s*LR\b)/,/^(?:\s*RL\b)/,/^(?:\s*TB\b)/,/^(?:\s*BT\b)/,/^(?:\s*TD\b)/,/^(?:\s*BR\b)/,/^(?:\s*<)/,/^(?:\s*>)/,/^(?:\s*\^)/,/^(?:\s*v\b)/,/^(?:.*direction\s+TB[^\n]*)/,/^(?:.*direction\s+BT[^\n]*)/,/^(?:.*direction\s+RL[^\n]*)/,/^(?:.*direction\s+LR[^\n]*)/,/^(?:[0-9]+)/,/^(?:#)/,/^(?::::)/,/^(?::)/,/^(?:&)/,/^(?:;)/,/^(?:,)/,/^(?:\*)/,/^(?:\s*[xo<]?--+[-xo>]\s*)/,/^(?:\s*[xo<]?==+[=xo>]\s*)/,/^(?:\s*[xo<]?-?\.+-[xo>]?\s*)/,/^(?:\s*~~[\~]+\s*)/,/^(?:\s*[xo<]?--\s*)/,/^(?:\s*[xo<]?==\s*)/,/^(?:\s*[xo<]?-\.\s*)/,/^(?:\(-)/,/^(?:-\))/,/^(?:\(\[)/,/^(?:\]\))/,/^(?:\[\[)/,/^(?:\]\])/,/^(?:\[\|)/,/^(?:\[\()/,/^(?:\)\])/,/^(?:\(\(\()/,/^(?:\)\)\))/,/^(?:-)/,/^(?:\.)/,/^(?:[\_])/,/^(?:\+)/,/^(?:%)/,/^(?:=)/,/^(?:=)/,/^(?:<)/,/^(?:>)/,/^(?:\^)/,/^(?:\\\|)/,/^(?:v\b)/,/^(?:[A-Za-z]+)/,/^(?:\\\])/,/^(?:\[\/)/,/^(?:\/\])/,/^(?:\[\\)/,/^(?:[!"#$%&'*+,-.`?\\_/])/,/^(?:[\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6]|[\u00F8-\u02C1\u02C6-\u02D1\u02E0-\u02E4\u02EC\u02EE\u0370-\u0374\u0376\u0377]|[\u037A-\u037D\u0386\u0388-\u038A\u038C\u038E-\u03A1\u03A3-\u03F5]|[\u03F7-\u0481\u048A-\u0527\u0531-\u0556\u0559\u0561-\u0587\u05D0-\u05EA]|[\u05F0-\u05F2\u0620-\u064A\u066E\u066F\u0671-\u06D3\u06D5\u06E5\u06E6\u06EE]|[\u06EF\u06FA-\u06FC\u06FF\u0710\u0712-\u072F\u074D-\u07A5\u07B1\u07CA-\u07EA]|[\u07F4\u07F5\u07FA\u0800-\u0815\u081A\u0824\u0828\u0840-\u0858\u08A0]|[\u08A2-\u08AC\u0904-\u0939\u093D\u0950\u0958-\u0961\u0971-\u0977]|[\u0979-\u097F\u0985-\u098C\u098F\u0990\u0993-\u09A8\u09AA-\u09B0\u09B2]|[\u09B6-\u09B9\u09BD\u09CE\u09DC\u09DD\u09DF-\u09E1\u09F0\u09F1\u0A05-\u0A0A]|[\u0A0F\u0A10\u0A13-\u0A28\u0A2A-\u0A30\u0A32\u0A33\u0A35\u0A36\u0A38\u0A39]|[\u0A59-\u0A5C\u0A5E\u0A72-\u0A74\u0A85-\u0A8D\u0A8F-\u0A91\u0A93-\u0AA8]|[\u0AAA-\u0AB0\u0AB2\u0AB3\u0AB5-\u0AB9\u0ABD\u0AD0\u0AE0\u0AE1\u0B05-\u0B0C]|[\u0B0F\u0B10\u0B13-\u0B28\u0B2A-\u0B30\u0B32\u0B33\u0B35-\u0B39\u0B3D\u0B5C]|[\u0B5D\u0B5F-\u0B61\u0B71\u0B83\u0B85-\u0B8A\u0B8E-\u0B90\u0B92-\u0B95\u0B99]|[\u0B9A\u0B9C\u0B9E\u0B9F\u0BA3\u0BA4\u0BA8-\u0BAA\u0BAE-\u0BB9\u0BD0]|[\u0C05-\u0C0C\u0C0E-\u0C10\u0C12-\u0C28\u0C2A-\u0C33\u0C35-\u0C39\u0C3D]|[\u0C58\u0C59\u0C60\u0C61\u0C85-\u0C8C\u0C8E-\u0C90\u0C92-\u0CA8\u0CAA-\u0CB3]|[\u0CB5-\u0CB9\u0CBD\u0CDE\u0CE0\u0CE1\u0CF1\u0CF2\u0D05-\u0D0C\u0D0E-\u0D10]|[\u0D12-\u0D3A\u0D3D\u0D4E\u0D60\u0D61\u0D7A-\u0D7F\u0D85-\u0D96\u0D9A-\u0DB1]|[\u0DB3-\u0DBB\u0DBD\u0DC0-\u0DC6\u0E01-\u0E30\u0E32\u0E33\u0E40-\u0E46\u0E81]|[\u0E82\u0E84\u0E87\u0E88\u0E8A\u0E8D\u0E94-\u0E97\u0E99-\u0E9F\u0EA1-\u0EA3]|[\u0EA5\u0EA7\u0EAA\u0EAB\u0EAD-\u0EB0\u0EB2\u0EB3\u0EBD\u0EC0-\u0EC4\u0EC6]|[\u0EDC-\u0EDF\u0F00\u0F40-\u0F47\u0F49-\u0F6C\u0F88-\u0F8C\u1000-\u102A]|[\u103F\u1050-\u1055\u105A-\u105D\u1061\u1065\u1066\u106E-\u1070\u1075-\u1081]|[\u108E\u10A0-\u10C5\u10C7\u10CD\u10D0-\u10FA\u10FC-\u1248\u124A-\u124D]|[\u1250-\u1256\u1258\u125A-\u125D\u1260-\u1288\u128A-\u128D\u1290-\u12B0]|[\u12B2-\u12B5\u12B8-\u12BE\u12C0\u12C2-\u12C5\u12C8-\u12D6\u12D8-\u1310]|[\u1312-\u1315\u1318-\u135A\u1380-\u138F\u13A0-\u13F4\u1401-\u166C]|[\u166F-\u167F\u1681-\u169A\u16A0-\u16EA\u1700-\u170C\u170E-\u1711]|[\u1720-\u1731\u1740-\u1751\u1760-\u176C\u176E-\u1770\u1780-\u17B3\u17D7]|[\u17DC\u1820-\u1877\u1880-\u18A8\u18AA\u18B0-\u18F5\u1900-\u191C]|[\u1950-\u196D\u1970-\u1974\u1980-\u19AB\u19C1-\u19C7\u1A00-\u1A16]|[\u1A20-\u1A54\u1AA7\u1B05-\u1B33\u1B45-\u1B4B\u1B83-\u1BA0\u1BAE\u1BAF]|[\u1BBA-\u1BE5\u1C00-\u1C23\u1C4D-\u1C4F\u1C5A-\u1C7D\u1CE9-\u1CEC]|[\u1CEE-\u1CF1\u1CF5\u1CF6\u1D00-\u1DBF\u1E00-\u1F15\u1F18-\u1F1D]|[\u1F20-\u1F45\u1F48-\u1F4D\u1F50-\u1F57\u1F59\u1F5B\u1F5D\u1F5F-\u1F7D]|[\u1F80-\u1FB4\u1FB6-\u1FBC\u1FBE\u1FC2-\u1FC4\u1FC6-\u1FCC\u1FD0-\u1FD3]|[\u1FD6-\u1FDB\u1FE0-\u1FEC\u1FF2-\u1FF4\u1FF6-\u1FFC\u2071\u207F]|[\u2090-\u209C\u2102\u2107\u210A-\u2113\u2115\u2119-\u211D\u2124\u2126\u2128]|[\u212A-\u212D\u212F-\u2139\u213C-\u213F\u2145-\u2149\u214E\u2183\u2184]|[\u2C00-\u2C2E\u2C30-\u2C5E\u2C60-\u2CE4\u2CEB-\u2CEE\u2CF2\u2CF3]|[\u2D00-\u2D25\u2D27\u2D2D\u2D30-\u2D67\u2D6F\u2D80-\u2D96\u2DA0-\u2DA6]|[\u2DA8-\u2DAE\u2DB0-\u2DB6\u2DB8-\u2DBE\u2DC0-\u2DC6\u2DC8-\u2DCE]|[\u2DD0-\u2DD6\u2DD8-\u2DDE\u2E2F\u3005\u3006\u3031-\u3035\u303B\u303C]|[\u3041-\u3096\u309D-\u309F\u30A1-\u30FA\u30FC-\u30FF\u3105-\u312D]|[\u3131-\u318E\u31A0-\u31BA\u31F0-\u31FF\u3400-\u4DB5\u4E00-\u9FCC]|[\uA000-\uA48C\uA4D0-\uA4FD\uA500-\uA60C\uA610-\uA61F\uA62A\uA62B]|[\uA640-\uA66E\uA67F-\uA697\uA6A0-\uA6E5\uA717-\uA71F\uA722-\uA788]|[\uA78B-\uA78E\uA790-\uA793\uA7A0-\uA7AA\uA7F8-\uA801\uA803-\uA805]|[\uA807-\uA80A\uA80C-\uA822\uA840-\uA873\uA882-\uA8B3\uA8F2-\uA8F7\uA8FB]|[\uA90A-\uA925\uA930-\uA946\uA960-\uA97C\uA984-\uA9B2\uA9CF\uAA00-\uAA28]|[\uAA40-\uAA42\uAA44-\uAA4B\uAA60-\uAA76\uAA7A\uAA80-\uAAAF\uAAB1\uAAB5]|[\uAAB6\uAAB9-\uAABD\uAAC0\uAAC2\uAADB-\uAADD\uAAE0-\uAAEA\uAAF2-\uAAF4]|[\uAB01-\uAB06\uAB09-\uAB0E\uAB11-\uAB16\uAB20-\uAB26\uAB28-\uAB2E]|[\uABC0-\uABE2\uAC00-\uD7A3\uD7B0-\uD7C6\uD7CB-\uD7FB\uF900-\uFA6D]|[\uFA70-\uFAD9\uFB00-\uFB06\uFB13-\uFB17\uFB1D\uFB1F-\uFB28\uFB2A-\uFB36]|[\uFB38-\uFB3C\uFB3E\uFB40\uFB41\uFB43\uFB44\uFB46-\uFBB1\uFBD3-\uFD3D]|[\uFD50-\uFD8F\uFD92-\uFDC7\uFDF0-\uFDFB\uFE70-\uFE74\uFE76-\uFEFC]|[\uFF21-\uFF3A\uFF41-\uFF5A\uFF66-\uFFBE\uFFC2-\uFFC7\uFFCA-\uFFCF]|[\uFFD2-\uFFD7\uFFDA-\uFFDC])/,/^(?:\|)/,/^(?:\()/,/^(?:\))/,/^(?:\[)/,/^(?:\])/,/^(?:\{)/,/^(?:\})/,/^(?:")/,/^(?:(\r?\n)+)/,/^(?:\s)/,/^(?:$)/],conditions:{close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},callbackargs:{rules:[30,31],inclusive:!1},callbackname:{rules:[27,28,29],inclusive:!1},href:{rules:[24,25],inclusive:!1},click:{rules:[33,34],inclusive:!1},vertex:{rules:[],inclusive:!1},dir:{rules:[43,44,45,46,47,48,49,50,51,52,53],inclusive:!1},acc_descr_multiline:{rules:[12,13],inclusive:!1},acc_descr:{rules:[10],inclusive:!1},acc_title:{rules:[8],inclusive:!1},string:{rules:[15,16],inclusive:!1},INITIAL:{rules:[0,5,6,7,9,11,14,17,18,19,20,21,22,23,26,32,35,36,37,38,39,40,41,42,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111,112,113],inclusive:!0}}};return Ln}();Wi.lexer=E0;function _u(){this.yy={}}return _u.prototype=Wi,Wi.Parser=_u,new _u}();X1.parser=X1;const rrt=(t,e)=>{var r;return((r=e==null?void 0:e.flowchart)==null?void 0:r.defaultRenderer)==="dagre-wrapper"?!1:t.match(/^\s*graph/)!==null},nrt=(t,e)=>{var r;return((r=e==null?void 0:e.flowchart)==null?void 0:r.defaultRenderer)==="dagre-wrapper"&&t.match(/^\s*graph/)!==null?!0:t.match(/^\s*flowchart/)!==null},irt="flowchart-";let sM=0,o4=nt(),ar={},js=[],il=[],qi=[],K1={},l4={},Z1=0,c4=!0,Vi,Q1,J1=[];const t0=t=>pe.sanitizeText(t,o4),art=function(t,e,r){Xe.parseDirective(this,t,e,r)},e0=function(t){const e=Object.keys(ar);for(let r=0;r<e.length;r++)if(ar[e[r]].id===t)return ar[e[r]].domId;return t},srt=function(t,e,r,n,i,a,s={}){let o,l=t;typeof l>"u"||l.trim().length!==0&&(typeof ar[l]>"u"&&(ar[l]={id:l,domId:irt+l+"-"+sM,styles:[],classes:[]}),sM++,typeof e<"u"?(o4=nt(),o=t0(e.trim()),o[0]==='"'&&o[o.length-1]==='"'&&(o=o.substring(1,o.length-1)),ar[l].text=o):typeof ar[l].text>"u"&&(ar[l].text=t),typeof r<"u"&&(ar[l].type=r),typeof n<"u"&&n!==null&&n.forEach(function(u){ar[l].styles.push(u)}),typeof i<"u"&&i!==null&&i.forEach(function(u){ar[l].classes.push(u)}),typeof a<"u"&&(ar[l].dir=a),ar[l].props=s)},ort=function(t,e,r,n){const s={start:t,end:e,type:void 0,text:""};n=r.text,typeof n<"u"&&(s.text=t0(n.trim()),s.text[0]==='"'&&s.text[s.text.length-1]==='"'&&(s.text=s.text.substring(1,s.text.length-1))),typeof r<"u"&&(s.type=r.type,s.stroke=r.stroke,s.length=r.length),js.push(s)},lrt=function(t,e,r,n){let i,a;for(i=0;i<t.length;i++)for(a=0;a<e.length;a++)ort(t[i],e[a],r,n)},crt=function(t,e){t.forEach(function(r){r==="default"?js.defaultInterpolate=e:js[r].interpolate=e})},urt=function(t,e){t.forEach(function(r){r==="default"?js.defaultStyle=e:(Se.isSubstringInArray("fill",e)===-1&&e.push("fill:none"),js[r].style=e)})},hrt=function(t,e){typeof il[t]>"u"&&(il[t]={id:t,styles:[],textStyles:[]}),typeof e<"u"&&e!==null&&e.forEach(function(r){if(r.match("color")){const i=r.replace("fill","bgFill").replace("color","fill");il[t].textStyles.push(i)}il[t].styles.push(r)})},frt=function(t){Vi=t,Vi.match(/.*</)&&(Vi="RL"),Vi.match(/.*\^/)&&(Vi="BT"),Vi.match(/.*>/)&&(Vi="LR"),Vi.match(/.*v/)&&(Vi="TB")},u4=function(t,e){t.split(",").forEach(function(r){let n=r;typeof ar[n]<"u"&&ar[n].classes.push(e),typeof K1[n]<"u"&&K1[n].classes.push(e)})},drt=function(t,e){t.split(",").forEach(function(r){typeof e<"u"&&(l4[Q1==="gen-1"?e0(r):r]=t0(e))})},prt=function(t,e,r){let n=e0(t);if(nt().securityLevel!=="loose"||typeof e>"u")return;let i=[];if(typeof r=="string"){i=r.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/);for(let a=0;a<i.length;a++){let s=i[a].trim();s.charAt(0)==='"'&&s.charAt(s.length-1)==='"'&&(s=s.substr(1,s.length-2)),i[a]=s}}i.length===0&&i.push(t),typeof ar[t]<"u"&&(ar[t].haveCallback=!0,J1.push(function(){const a=document.querySelector(`[id="${n}"]`);a!==null&&a.addEventListener("click",function(){Se.runFunc(e,...i)},!1)}))},grt=function(t,e,r){t.split(",").forEach(function(n){typeof ar[n]<"u"&&(ar[n].link=Se.formatUrl(e,o4),ar[n].linkTarget=r)}),u4(t,"clickable")},yrt=function(t){return l4[t]},mrt=function(t,e,r){t.split(",").forEach(function(n){prt(n,e,r)}),u4(t,"clickable")},brt=function(t){J1.forEach(function(e){e(t)})},_rt=function(){return Vi.trim()},vrt=function(){return ar},xrt=function(){return js},krt=function(){return il},oM=function(t){let e=St(".mermaidTooltip");(e._groups||e)[0][0]===null&&(e=St("body").append("div").attr("class","mermaidTooltip").style("opacity",0)),St(t).select("svg").selectAll("g.node").on("mouseover",function(){const i=St(this);if(i.attr("title")===null)return;const s=this.getBoundingClientRect();e.transition().duration(200).style("opacity",".9"),e.text(i.attr("title")).style("left",window.scrollX+s.left+(s.right-s.left)/2+"px").style("top",window.scrollY+s.top-14+document.body.scrollTop+"px"),e.html(e.html().replace(/<br\/>/g,"<br/>")),i.classed("hover",!0)}).on("mouseout",function(){e.transition().duration(500).style("opacity",0),St(this).classed("hover",!1)})};J1.push(oM);const wrt=function(t="gen-1"){ar={},il={},js=[],J1=[oM],qi=[],K1={},Z1=0,l4=[],c4=!0,Q1=t,ci()},Trt=t=>{Q1=t||"gen-1"},Ert=function(){return"fill:#ffa;stroke: #f66; stroke-width: 3px; stroke-dasharray: 5, 5;fill:#ffa;stroke: #666;"},Crt=function(t,e,r){let n=t.trim(),i=r.trim();n===i&&i.match(/\s/)&&(n=void 0);function a(h){const d={boolean:{},number:{},string:{}},f=[];let p;return{nodeList:h.filter(function(_){const y=typeof _;return _.stmt&&_.stmt==="dir"?(p=_.value,!1):_.trim()===""?!1:y in d?d[y].hasOwnProperty(_)?!1:d[y][_]=!0:f.indexOf(_)>=0?!1:f.push(_)}),dir:p}}let s=[];const{nodeList:o,dir:l}=a(s.concat.apply(s,e));if(s=o,Q1==="gen-1")for(let h=0;h<s.length;h++)s[h]=e0(s[h]);n=n||"subGraph"+Z1,i=i||"",i=t0(i),Z1=Z1+1;const u={id:n,nodes:s,title:i.trim(),classes:[],dir:l};return H.info("Adding",u.id,u.nodes,u.dir),u.nodes=hM(u,qi).nodes,qi.push(u),K1[n]=u,n},Srt=function(t){for(let e=0;e<qi.length;e++)if(qi[e].id===t)return e;return-1};let ru=-1;const lM=[],cM=function(t,e){const r=qi[e].nodes;if(ru=ru+1,ru>2e3)return;if(lM[ru]=e,qi[e].id===t)return{result:!0,count:0};let n=0,i=1;for(;n<r.length;){const a=Srt(r[n]);if(a>=0){const s=cM(t,a);if(s.result)return{result:!0,count:i+s.count};i=i+s.count}n=n+1}return{result:!1,count:i}},Art=function(t){return lM[t]},Mrt=function(){ru=-1,qi.length>0&&cM("none",qi.length-1)},Lrt=function(){return qi},Rrt=()=>c4?(c4=!1,!0):!1,Irt=t=>{let e=t.trim(),r="arrow_open";switch(e[0]){case"<":r="arrow_point",e=e.slice(1);break;case"x":r="arrow_cross",e=e.slice(1);break;case"o":r="arrow_circle",e=e.slice(1);break}let n="normal";return e.indexOf("=")!==-1&&(n="thick"),e.indexOf(".")!==-1&&(n="dotted"),{type:r,stroke:n}},Nrt=(t,e)=>{const r=e.length;let n=0;for(let i=0;i<r;++i)e[i]===t&&++n;return n},Brt=t=>{const e=t.trim();let r=e.slice(0,-1),n="arrow_open";switch(e.slice(-1)){case"x":n="arrow_cross",e[0]==="x"&&(n="double_"+n,r=r.slice(1));break;case">":n="arrow_point",e[0]==="<"&&(n="double_"+n,r=r.slice(1));break;case"o":n="arrow_circle",e[0]==="o"&&(n="double_"+n,r=r.slice(1));break}let i="normal",a=r.length-1;r[0]==="="&&(i="thick"),r[0]==="~"&&(i="invisible");let s=Nrt(".",r);return s&&(i="dotted",a=s),{type:n,stroke:i,length:a}},Drt=(t,e)=>{const r=Brt(t);let n;if(e){if(n=Irt(e),n.stroke!==r.stroke)return{type:"INVALID",stroke:"INVALID"};if(n.type==="arrow_open")n.type=r.type;else{if(n.type!==r.type)return{type:"INVALID",stroke:"INVALID"};n.type="double_"+n.type}return n.type==="double_arrow"&&(n.type="double_arrow_point"),n.length=r.length,n}return r},uM=(t,e)=>{let r=!1;return t.forEach(n=>{n.nodes.indexOf(e)>=0&&(r=!0)}),r},hM=(t,e)=>{const r=[];return t.nodes.forEach((n,i)=>{uM(e,n)||r.push(t.nodes[i])}),{nodes:r}},fa={parseDirective:art,defaultConfig:()=>Xo.flowchart,setAccTitle:Yn,getAccTitle:ui,getAccDescription:fi,setAccDescription:hi,addVertex:srt,lookUpDomId:e0,addLink:lrt,updateLinkInterpolate:crt,updateLink:urt,addClass:hrt,setDirection:frt,setClass:u4,setTooltip:drt,getTooltip:yrt,setClickEvent:mrt,setLink:grt,bindFunctions:brt,getDirection:_rt,getVertices:vrt,getEdges:xrt,getClasses:krt,clear:wrt,setGen:Trt,defaultStyle:Ert,addSubGraph:Crt,getDepthFirstPos:Art,indexNodes:Mrt,getSubGraphs:Lrt,destructLink:Drt,lex:{firstGraph:Rrt},exists:uM,makeUniq:hM};var r0;if(typeof fn=="function")try{r0=cr}catch{}r0||(r0=window.graphlib);var Ort=r0,n0;if(typeof fn=="function")try{n0=OA()}catch{}n0||(n0=window.dagre);var fM=n0,dM=Frt;function Frt(t,e){return t.intersect(e)}var h4=Prt;function Prt(t,e,r,n){var i=t.x,a=t.y,s=i-n.x,o=a-n.y,l=Math.sqrt(e*e*o*o+r*r*s*s),u=Math.abs(e*r*s/l);n.x<i&&(u=-u);var h=Math.abs(e*r*o/l);return n.y<a&&(h=-h),{x:i+u,y:a+h}}var qrt=h4,pM=Vrt;function Vrt(t,e,r){return qrt(t,e,e,r)}var zrt=Yrt;function Yrt(t,e,r,n){var i,a,s,o,l,u,h,d,f,p,m,_,y,b,x;if(i=e.y-t.y,s=t.x-e.x,l=e.x*t.y-t.x*e.y,f=i*r.x+s*r.y+l,p=i*n.x+s*n.y+l,!(f!==0&&p!==0&&gM(f,p))&&(a=n.y-r.y,o=r.x-n.x,u=n.x*r.y-r.x*n.y,h=a*t.x+o*t.y+u,d=a*e.x+o*e.y+u,!(h!==0&&d!==0&&gM(h,d))&&(m=i*o-a*s,m!==0)))return _=Math.abs(m/2),y=s*u-o*l,b=y<0?(y-_)/m:(y+_)/m,y=a*l-i*u,x=y<0?(y-_)/m:(y+_)/m,{x:b,y:x}}function gM(t,e){return t*e>0}var Urt=zrt,yM=Wrt;function Wrt(t,e,r){var n=t.x,i=t.y,a=[],s=Number.POSITIVE_INFINITY,o=Number.POSITIVE_INFINITY;e.forEach(function(m){s=Math.min(s,m.x),o=Math.min(o,m.y)});for(var l=n-t.width/2-s,u=i-t.height/2-o,h=0;h<e.length;h++){var d=e[h],f=e[h<e.length-1?h+1:0],p=Urt(t,r,{x:l+d.x,y:u+d.y},{x:l+f.x,y:u+f.y});p&&a.push(p)}return a.length?(a.length>1&&a.sort(function(m,_){var y=m.x-r.x,b=m.y-r.y,x=Math.sqrt(y*y+b*b),k=_.x-r.x,T=_.y-r.y,C=Math.sqrt(k*k+T*T);return x<C?-1:x===C?0:1}),a[0]):(console.log("NO INTERSECTION FOUND, RETURN NODE CENTER",t),t)}var mM=Hrt;function Hrt(t,e){var r=t.x,n=t.y,i=e.x-r,a=e.y-n,s=t.width/2,o=t.height/2,l,u;return Math.abs(a)*s>Math.abs(i)*o?(a<0&&(o=-o),l=a===0?0:o*i/a,u=o):(i<0&&(s=-s),l=s,u=i===0?0:s*a/i),{x:r+l,y:n+u}}var Grt={node:dM,circle:pM,ellipse:h4,polygon:yM,rect:mM},i0;if(typeof fn=="function")try{i0={defaults:oS(),each:am(),isFunction:Yo,isPlainObject:AS(),pick:GS(),has:$m(),range:KS(),uniqueId:nA()}}catch{}i0||(i0=window._);var al=i0;const jrt=wn(CH);var nu;if(!nu&&typeof fn=="function")try{nu=jrt}catch{}nu||(nu=window.d3);var es=nu,bM=al,Gn={isSubgraph:$rt,edgeToId:Xrt,applyStyle:Zrt,applyClass:Qrt,applyTransition:Jrt};function $rt(t,e){return!!t.children(e).length}function Xrt(t){return f4(t.v)+":"+f4(t.w)+":"+f4(t.name)}var Krt=/:/g;function f4(t){return t?String(t).replace(Krt,"\\:"):""}function Zrt(t,e){e&&t.attr("style",e)}function Qrt(t,e,r){e&&t.attr("class",e).attr("class",r+" "+t.attr("class"))}function Jrt(t,e){var r=e.graph();if(bM.isPlainObject(r)){var n=r.transition;if(bM.isFunction(n))return n(t)}return t}var d4,_M;function tnt(){if(_M)return d4;_M=1;var t=Gn;d4=e;function e(n,i){for(var a=n.append("text"),s=r(i.label).split(` -`),o=0;o<s.length;o++)a.append("tspan").attr("xml:space","preserve").attr("dy","1em").attr("x","1").text(s[o]);return t.applyStyle(a,i.labelStyle),a}function r(n){for(var i="",a=!1,s,o=0;o<n.length;++o)if(s=n[o],a){switch(s){case"n":i+=` -`;break;default:i+=s}a=!1}else s==="\\"?a=!0:i+=s;return i}return d4}var ent=Gn,p4=rnt;function rnt(t,e){var r=t.append("foreignObject").attr("width","100000"),n=r.append("xhtml:div");n.attr("xmlns","http://www.w3.org/1999/xhtml");var i=e.label;switch(typeof i){case"function":n.insert(i);break;case"object":n.insert(function(){return i});break;default:n.html(i)}ent.applyStyle(n,e.labelStyle),n.style("display","inline-block"),n.style("white-space","nowrap");var a=n.node().getBoundingClientRect();return r.attr("width",a.width).attr("height",a.height),r}var g4,vM;function nnt(){if(vM)return g4;vM=1;var t=Gn;g4=e;function e(r,n){var i=r;return i.node().appendChild(n.label),t.applyStyle(i,n.labelStyle),i}return g4}var y4,xM;function m4(){if(xM)return y4;xM=1;var t=tnt(),e=p4,r=nnt();y4=n;function n(i,a,s){var o=a.label,l=i.append("g");a.labelType==="svg"?r(l,a):typeof o!="string"||a.labelType==="html"?e(l,a):t(l,a);var u=l.node().getBBox(),h;switch(s){case"top":h=-a.height/2;break;case"bottom":h=a.height/2-u.height;break;default:h=-u.height/2}return l.attr("transform","translate("+-u.width/2+","+h+")"),l}return y4}var b4,kM;function int(){if(kM)return b4;kM=1;var t=al,e=m4(),r=Gn,n=es;b4=i;function i(a,s,o){var l=s.nodes().filter(function(d){return!r.isSubgraph(s,d)}),u=a.selectAll("g.node").data(l,function(d){return d}).classed("update",!0);u.exit().remove(),u.enter().append("g").attr("class","node").style("opacity",0),u=a.selectAll("g.node"),u.each(function(d){var f=s.node(d),p=n.select(this);r.applyClass(p,f.class,(p.classed("update")?"update ":"")+"node"),p.select("g.label").remove();var m=p.append("g").attr("class","label"),_=e(m,f),y=o[f.shape],b=t.pick(_.node().getBBox(),"width","height");f.elem=this,f.id&&p.attr("id",f.id),f.labelId&&m.attr("id",f.labelId),t.has(f,"width")&&(b.width=f.width),t.has(f,"height")&&(b.height=f.height),b.width+=f.paddingLeft+f.paddingRight,b.height+=f.paddingTop+f.paddingBottom,m.attr("transform","translate("+(f.paddingLeft-f.paddingRight)/2+","+(f.paddingTop-f.paddingBottom)/2+")");var x=n.select(this);x.select(".label-container").remove();var k=y(x,b,f).classed("label-container",!0);r.applyStyle(k,f.style);var T=k.node().getBBox();f.width=T.width,f.height=T.height});var h;return u.exit?h=u.exit():h=u.selectAll(null),r.applyTransition(h,s).style("opacity",0).remove(),u}return b4}var _4,wM;function ant(){if(wM)return _4;wM=1;var t=Gn,e=es,r=m4();_4=n;function n(i,a){var s=a.nodes().filter(function(u){return t.isSubgraph(a,u)}),o=i.selectAll("g.cluster").data(s,function(u){return u});o.selectAll("*").remove(),o.enter().append("g").attr("class","cluster").attr("id",function(u){var h=a.node(u);return h.id}).style("opacity",0),o=i.selectAll("g.cluster"),t.applyTransition(o,a).style("opacity",1),o.each(function(u){var h=a.node(u),d=e.select(this);e.select(this).append("rect");var f=d.append("g").attr("class","label");r(f,h,h.clusterLabelPos)}),o.selectAll("rect").each(function(u){var h=a.node(u),d=e.select(this);t.applyStyle(d,h.style)});var l;return o.exit?l=o.exit():l=o.selectAll(null),t.applyTransition(l,a).style("opacity",0).remove(),o}return _4}var v4,TM;function snt(){if(TM)return v4;TM=1;var t=al,e=m4(),r=Gn,n=es;v4=i;function i(a,s){var o=a.selectAll("g.edgeLabel").data(s.edges(),function(u){return r.edgeToId(u)}).classed("update",!0);o.exit().remove(),o.enter().append("g").classed("edgeLabel",!0).style("opacity",0),o=a.selectAll("g.edgeLabel"),o.each(function(u){var h=n.select(this);h.select(".label").remove();var d=s.edge(u),f=e(h,s.edge(u),0,0).classed("label",!0),p=f.node().getBBox();d.labelId&&f.attr("id",d.labelId),t.has(d,"width")||(d.width=p.width),t.has(d,"height")||(d.height=p.height)});var l;return o.exit?l=o.exit():l=o.selectAll(null),r.applyTransition(l,s).style("opacity",0).remove(),o}return v4}var x4,EM;function ont(){if(EM)return x4;EM=1;var t=al,e=dM,r=Gn,n=es;x4=i;function i(d,f,p){var m=d.selectAll("g.edgePath").data(f.edges(),function(b){return r.edgeToId(b)}).classed("update",!0),_=u(m,f);h(m,f);var y=m.merge!==void 0?m.merge(_):m;return r.applyTransition(y,f).style("opacity",1),y.each(function(b){var x=n.select(this),k=f.edge(b);k.elem=this,k.id&&x.attr("id",k.id),r.applyClass(x,k.class,(x.classed("update")?"update ":"")+"edgePath")}),y.selectAll("path.path").each(function(b){var x=f.edge(b);x.arrowheadId=t.uniqueId("arrowhead");var k=n.select(this).attr("marker-end",function(){return"url("+a(location.href,x.arrowheadId)+")"}).style("fill","none");r.applyTransition(k,f).attr("d",function(T){return s(f,T)}),r.applyStyle(k,x.style)}),y.selectAll("defs *").remove(),y.selectAll("defs").each(function(b){var x=f.edge(b),k=p[x.arrowhead];k(n.select(this),x.arrowheadId,x,"arrowhead")}),y}function a(d,f){var p=d.split("#")[0];return p+"#"+f}function s(d,f){var p=d.edge(f),m=d.node(f.v),_=d.node(f.w),y=p.points.slice(1,p.points.length-1);return y.unshift(e(m,y[0])),y.push(e(_,y[y.length-1])),o(p,y)}function o(d,f){var p=(n.line||n.svg.line)().x(function(m){return m.x}).y(function(m){return m.y});return(p.curve||p.interpolate)(d.curve),p(f)}function l(d){var f=d.getBBox(),p=d.ownerSVGElement.getScreenCTM().inverse().multiply(d.getScreenCTM()).translate(f.width/2,f.height/2);return{x:p.e,y:p.f}}function u(d,f){var p=d.enter().append("g").attr("class","edgePath").style("opacity",0);return p.append("path").attr("class","path").attr("d",function(m){var _=f.edge(m),y=f.node(m.v).elem,b=t.range(_.points.length).map(function(){return l(y)});return o(_,b)}),p.append("defs"),p}function h(d,f){var p=d.exit();r.applyTransition(p,f).style("opacity",0).remove()}return x4}var k4,CM;function lnt(){if(CM)return k4;CM=1;var t=Gn,e=es;k4=r;function r(n,i){var a=n.filter(function(){return!e.select(this).classed("update")});function s(o){var l=i.node(o);return"translate("+l.x+","+l.y+")"}a.attr("transform",s),t.applyTransition(n,i).style("opacity",1).attr("transform",s)}return k4}var w4,SM;function cnt(){if(SM)return w4;SM=1;var t=Gn,e=es,r=al;w4=n;function n(i,a){var s=i.filter(function(){return!e.select(this).classed("update")});function o(l){var u=a.edge(l);return r.has(u,"x")?"translate("+u.x+","+u.y+")":""}s.attr("transform",o),t.applyTransition(i,a).style("opacity",1).attr("transform",o)}return w4}var T4,AM;function unt(){if(AM)return T4;AM=1;var t=Gn,e=es;T4=r;function r(n,i){var a=n.filter(function(){return!e.select(this).classed("update")});function s(o){var l=i.node(o);return"translate("+l.x+","+l.y+")"}a.attr("transform",s),t.applyTransition(n,i).style("opacity",1).attr("transform",s),t.applyTransition(a.selectAll("rect"),i).attr("width",function(o){return i.node(o).width}).attr("height",function(o){return i.node(o).height}).attr("x",function(o){var l=i.node(o);return-l.width/2}).attr("y",function(o){var l=i.node(o);return-l.height/2})}return T4}var E4,MM;function hnt(){if(MM)return E4;MM=1;var t=mM,e=h4,r=pM,n=yM;E4={rect:i,ellipse:a,circle:s,diamond:o};function i(l,u,h){var d=l.insert("rect",":first-child").attr("rx",h.rx).attr("ry",h.ry).attr("x",-u.width/2).attr("y",-u.height/2).attr("width",u.width).attr("height",u.height);return h.intersect=function(f){return t(h,f)},d}function a(l,u,h){var d=u.width/2,f=u.height/2,p=l.insert("ellipse",":first-child").attr("x",-u.width/2).attr("y",-u.height/2).attr("rx",d).attr("ry",f);return h.intersect=function(m){return e(h,d,f,m)},p}function s(l,u,h){var d=Math.max(u.width,u.height)/2,f=l.insert("circle",":first-child").attr("x",-u.width/2).attr("y",-u.height/2).attr("r",d);return h.intersect=function(p){return r(h,d,p)},f}function o(l,u,h){var d=u.width*Math.SQRT2/2,f=u.height*Math.SQRT2/2,p=[{x:0,y:-f},{x:-d,y:0},{x:0,y:f},{x:d,y:0}],m=l.insert("polygon",":first-child").attr("points",p.map(function(_){return _.x+","+_.y}).join(" "));return h.intersect=function(_){return n(h,p,_)},m}return E4}var C4,LM;function fnt(){if(LM)return C4;LM=1;var t=Gn;C4={default:e,normal:e,vee:r,undirected:n};function e(i,a,s,o){var l=i.append("marker").attr("id",a).attr("viewBox","0 0 10 10").attr("refX",9).attr("refY",5).attr("markerUnits","strokeWidth").attr("markerWidth",8).attr("markerHeight",6).attr("orient","auto"),u=l.append("path").attr("d","M 0 0 L 10 5 L 0 10 z").style("stroke-width",1).style("stroke-dasharray","1,0");t.applyStyle(u,s[o+"Style"]),s[o+"Class"]&&u.attr("class",s[o+"Class"])}function r(i,a,s,o){var l=i.append("marker").attr("id",a).attr("viewBox","0 0 10 10").attr("refX",9).attr("refY",5).attr("markerUnits","strokeWidth").attr("markerWidth",8).attr("markerHeight",6).attr("orient","auto"),u=l.append("path").attr("d","M 0 0 L 10 5 L 0 10 L 4 5 z").style("stroke-width",1).style("stroke-dasharray","1,0");t.applyStyle(u,s[o+"Style"]),s[o+"Class"]&&u.attr("class",s[o+"Class"])}function n(i,a,s,o){var l=i.append("marker").attr("id",a).attr("viewBox","0 0 10 10").attr("refX",9).attr("refY",5).attr("markerUnits","strokeWidth").attr("markerWidth",8).attr("markerHeight",6).attr("orient","auto"),u=l.append("path").attr("d","M 0 5 L 10 5").style("stroke-width",1).style("stroke-dasharray","1,0");t.applyStyle(u,s[o+"Style"]),s[o+"Class"]&&u.attr("class",s[o+"Class"])}return C4}var Ur=al,dnt=es,pnt=fM.layout,gnt=ynt;function ynt(){var t=int(),e=ant(),r=snt(),n=ont(),i=lnt(),a=cnt(),s=unt(),o=hnt(),l=fnt(),u=function(h,d){_nt(d);var f=iu(h,"output"),p=iu(f,"clusters"),m=iu(f,"edgePaths"),_=r(iu(f,"edgeLabels"),d),y=t(iu(f,"nodes"),d,o);pnt(d),i(y,d),a(_,d),n(m,d,l);var b=e(p,d);s(b,d),vnt(d)};return u.createNodes=function(h){return arguments.length?(t=h,u):t},u.createClusters=function(h){return arguments.length?(e=h,u):e},u.createEdgeLabels=function(h){return arguments.length?(r=h,u):r},u.createEdgePaths=function(h){return arguments.length?(n=h,u):n},u.shapes=function(h){return arguments.length?(o=h,u):o},u.arrows=function(h){return arguments.length?(l=h,u):l},u}var mnt={paddingLeft:10,paddingRight:10,paddingTop:10,paddingBottom:10,rx:0,ry:0,shape:"rect"},bnt={arrowhead:"normal",curve:dnt.curveLinear};function _nt(t){t.nodes().forEach(function(e){var r=t.node(e);!Ur.has(r,"label")&&!t.children(e).length&&(r.label=e),Ur.has(r,"paddingX")&&Ur.defaults(r,{paddingLeft:r.paddingX,paddingRight:r.paddingX}),Ur.has(r,"paddingY")&&Ur.defaults(r,{paddingTop:r.paddingY,paddingBottom:r.paddingY}),Ur.has(r,"padding")&&Ur.defaults(r,{paddingLeft:r.padding,paddingRight:r.padding,paddingTop:r.padding,paddingBottom:r.padding}),Ur.defaults(r,mnt),Ur.each(["paddingLeft","paddingRight","paddingTop","paddingBottom"],function(n){r[n]=Number(r[n])}),Ur.has(r,"width")&&(r._prevWidth=r.width),Ur.has(r,"height")&&(r._prevHeight=r.height)}),t.edges().forEach(function(e){var r=t.edge(e);Ur.has(r,"label")||(r.label=""),Ur.defaults(r,bnt)})}function vnt(t){Ur.each(t.nodes(),function(e){var r=t.node(e);Ur.has(r,"_prevWidth")?r.width=r._prevWidth:delete r.width,Ur.has(r,"_prevHeight")?r.height=r._prevHeight:delete r.height,delete r._prevWidth,delete r._prevHeight})}function iu(t,e){var r=t.select("g."+e);return r.empty()&&(r=t.append("g").attr("class",e)),r}var xnt="0.6.4";/** - * @license - * Copyright (c) 2012-2013 Chris Pettitt - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to deal - * in the Software without restriction, including without limitation the rights - * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - * copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN - * THE SOFTWARE. - */var An={graphlib:Ort,dagre:fM,intersect:Grt,render:gnt,util:Gn,version:xnt};function RM(t,e,r){const n=e.width,i=e.height,a=(n+i)*.9,s=[{x:a/2,y:0},{x:a,y:-a/2},{x:a/2,y:-a},{x:0,y:-a/2}],o=da(t,a,a,s);return r.intersect=function(l){return An.intersect.polygon(r,s,l)},o}function IM(t,e,r){const i=e.height,a=i/4,s=e.width+2*a,o=[{x:a,y:0},{x:s-a,y:0},{x:s,y:-i/2},{x:s-a,y:-i},{x:a,y:-i},{x:0,y:-i/2}],l=da(t,s,i,o);return r.intersect=function(u){return An.intersect.polygon(r,o,u)},l}function NM(t,e,r){const n=e.width,i=e.height,a=[{x:-i/2,y:0},{x:n,y:0},{x:n,y:-i},{x:-i/2,y:-i},{x:0,y:-i/2}],s=da(t,n,i,a);return r.intersect=function(o){return An.intersect.polygon(r,a,o)},s}function BM(t,e,r){const n=e.width,i=e.height,a=[{x:-2*i/6,y:0},{x:n-i/6,y:0},{x:n+2*i/6,y:-i},{x:i/6,y:-i}],s=da(t,n,i,a);return r.intersect=function(o){return An.intersect.polygon(r,a,o)},s}function DM(t,e,r){const n=e.width,i=e.height,a=[{x:2*i/6,y:0},{x:n+i/6,y:0},{x:n-2*i/6,y:-i},{x:-i/6,y:-i}],s=da(t,n,i,a);return r.intersect=function(o){return An.intersect.polygon(r,a,o)},s}function OM(t,e,r){const n=e.width,i=e.height,a=[{x:-2*i/6,y:0},{x:n+2*i/6,y:0},{x:n-i/6,y:-i},{x:i/6,y:-i}],s=da(t,n,i,a);return r.intersect=function(o){return An.intersect.polygon(r,a,o)},s}function FM(t,e,r){const n=e.width,i=e.height,a=[{x:i/6,y:0},{x:n-i/6,y:0},{x:n+2*i/6,y:-i},{x:-2*i/6,y:-i}],s=da(t,n,i,a);return r.intersect=function(o){return An.intersect.polygon(r,a,o)},s}function PM(t,e,r){const n=e.width,i=e.height,a=[{x:0,y:0},{x:n+i/2,y:0},{x:n,y:-i/2},{x:n+i/2,y:-i},{x:0,y:-i}],s=da(t,n,i,a);return r.intersect=function(o){return An.intersect.polygon(r,a,o)},s}function qM(t,e,r){const n=e.height,i=e.width+n/4,a=t.insert("rect",":first-child").attr("rx",n/2).attr("ry",n/2).attr("x",-i/2).attr("y",-n/2).attr("width",i).attr("height",n);return r.intersect=function(s){return An.intersect.rect(r,s)},a}function VM(t,e,r){const n=e.width,i=e.height,a=[{x:0,y:0},{x:n,y:0},{x:n,y:-i},{x:0,y:-i},{x:0,y:0},{x:-8,y:0},{x:n+8,y:0},{x:n+8,y:-i},{x:-8,y:-i},{x:-8,y:0}],s=da(t,n,i,a);return r.intersect=function(o){return An.intersect.polygon(r,a,o)},s}function zM(t,e,r){const n=e.width,i=n/2,a=i/(2.5+n/50),s=e.height+a,o="M 0,"+a+" a "+i+","+a+" 0,0,0 "+n+" 0 a "+i+","+a+" 0,0,0 "+-n+" 0 l 0,"+s+" a "+i+","+a+" 0,0,0 "+n+" 0 l 0,"+-s,l=t.attr("label-offset-y",a).insert("path",":first-child").attr("d",o).attr("transform","translate("+-n/2+","+-(s/2+a)+")");return r.intersect=function(u){const h=An.intersect.rect(r,u),d=h.x-r.x;if(i!=0&&(Math.abs(d)<r.width/2||Math.abs(d)==r.width/2&&Math.abs(h.y-r.y)>r.height/2-a)){let f=a*a*(1-d*d/(i*i));f!=0&&(f=Math.sqrt(f)),f=a-f,u.y-r.y>0&&(f=-f),h.y+=f}return h},l}function knt(t){t.shapes().question=RM,t.shapes().hexagon=IM,t.shapes().stadium=qM,t.shapes().subroutine=VM,t.shapes().cylinder=zM,t.shapes().rect_left_inv_arrow=NM,t.shapes().lean_right=BM,t.shapes().lean_left=DM,t.shapes().trapezoid=OM,t.shapes().inv_trapezoid=FM,t.shapes().rect_right_inv_arrow=PM}function wnt(t){t({question:RM}),t({hexagon:IM}),t({stadium:qM}),t({subroutine:VM}),t({cylinder:zM}),t({rect_left_inv_arrow:NM}),t({lean_right:BM}),t({lean_left:DM}),t({trapezoid:OM}),t({inv_trapezoid:FM}),t({rect_right_inv_arrow:PM})}function da(t,e,r,n){return t.insert("polygon",":first-child").attr("points",n.map(function(i){return i.x+","+i.y}).join(" ")).attr("transform","translate("+-e/2+","+r/2+")")}const Tnt={addToRender:knt,addToRenderV2:wnt},YM={},Ent=function(t){const e=Object.keys(t);for(let r=0;r<e.length;r++)YM[e[r]]=t[e[r]]},UM=function(t,e,r,n,i,a){const s=n?n.select(`[id="${r}"]`):St(`[id="${r}"]`),o=i||document;Object.keys(t).forEach(function(u){const h=t[u];let d="default";h.classes.length>0&&(d=h.classes.join(" "));const f=Ka(h.styles);let p=h.text!==void 0?h.text:h.id,m;if(Mr(nt().flowchart.htmlLabels)){const b={label:p.replace(/fa[lrsb]?:fa-[\w-]+/g,x=>`<i class='${x.replace(":"," ")}'></i>`)};m=p4(s,b).node(),m.parentNode.removeChild(m)}else{const b=o.createElementNS("http://www.w3.org/2000/svg","text");b.setAttribute("style",f.labelStyle.replace("color:","fill:"));const x=p.split(pe.lineBreakRegex);for(let k=0;k<x.length;k++){const T=o.createElementNS("http://www.w3.org/2000/svg","tspan");T.setAttributeNS("http://www.w3.org/XML/1998/namespace","xml:space","preserve"),T.setAttribute("dy","1em"),T.setAttribute("x","1"),T.textContent=x[k],b.appendChild(T)}m=b}let _=0,y="";switch(h.type){case"round":_=5,y="rect";break;case"square":y="rect";break;case"diamond":y="question";break;case"hexagon":y="hexagon";break;case"odd":y="rect_left_inv_arrow";break;case"lean_right":y="lean_right";break;case"lean_left":y="lean_left";break;case"trapezoid":y="trapezoid";break;case"inv_trapezoid":y="inv_trapezoid";break;case"odd_right":y="rect_left_inv_arrow";break;case"circle":y="circle";break;case"ellipse":y="ellipse";break;case"stadium":y="stadium";break;case"subroutine":y="subroutine";break;case"cylinder":y="cylinder";break;case"group":y="rect";break;default:y="rect"}H.warn("Adding node",h.id,h.domId),e.setNode(a.db.lookUpDomId(h.id),{labelType:"svg",labelStyle:f.labelStyle,shape:y,label:m,rx:_,ry:_,class:d,style:f.style,id:a.db.lookUpDomId(h.id)})})},WM=function(t,e,r){let n=0,i,a;if(typeof t.defaultStyle<"u"){const s=Ka(t.defaultStyle);i=s.style,a=s.labelStyle}t.forEach(function(s){n++;var o="L-"+s.start+"-"+s.end,l="LS-"+s.start,u="LE-"+s.end;const h={};s.type==="arrow_open"?h.arrowhead="none":h.arrowhead="normal";let d="",f="";if(typeof s.style<"u"){const p=Ka(s.style);d=p.style,f=p.labelStyle}else switch(s.stroke){case"normal":d="fill:none",typeof i<"u"&&(d=i),typeof a<"u"&&(f=a);break;case"dotted":d="fill:none;stroke-width:2px;stroke-dasharray:3;";break;case"thick":d=" stroke-width: 3.5px;fill:none";break}h.style=d,h.labelStyle=f,typeof s.interpolate<"u"?h.curve=Ni(s.interpolate,yn):typeof t.defaultInterpolate<"u"?h.curve=Ni(t.defaultInterpolate,yn):h.curve=Ni(YM.curve,yn),typeof s.text>"u"?typeof s.style<"u"&&(h.arrowheadStyle="fill: #333"):(h.arrowheadStyle="fill: #333",h.labelpos="c",Mr(nt().flowchart.htmlLabels)?(h.labelType="html",h.label=`<span id="L-${o}" class="edgeLabel L-${l}' L-${u}" style="${h.labelStyle}">${s.text.replace(/fa[lrsb]?:fa-[\w-]+/g,p=>`<i class='${p.replace(":"," ")}'></i>`)}</span>`):(h.labelType="text",h.label=s.text.replace(pe.lineBreakRegex,` -`),typeof s.style>"u"&&(h.style=h.style||"stroke: #333; stroke-width: 1.5px;fill:none"),h.labelStyle=h.labelStyle.replace("color:","fill:"))),h.id=o,h.class=l+" "+u,h.minlen=s.length||1,e.setEdge(r.db.lookUpDomId(s.start),r.db.lookUpDomId(s.end),h,n)})},S4={setConf:Ent,addVertices:UM,addEdges:WM,getClasses:function(t,e){H.info("Extracting classes"),e.db.clear();try{return e.parse(t),e.db.getClasses()}catch{return}},draw:function(t,e,r,n){H.info("Drawing flowchart"),n.db.clear();const{securityLevel:i,flowchart:a}=nt();let s;i==="sandbox"&&(s=St("#i"+e));const o=St(i==="sandbox"?s.nodes()[0].contentDocument.body:"body"),l=i==="sandbox"?s.nodes()[0].contentDocument:document;try{n.parser.parse(t)}catch{H.debug("Parsing failed")}let u=n.db.getDirection();typeof u>"u"&&(u="TD");const h=a.nodeSpacing||50,d=a.rankSpacing||50,f=new cr.Graph({multigraph:!0,compound:!0}).setGraph({rankdir:u,nodesep:h,ranksep:d,marginx:8,marginy:8}).setDefaultEdgeLabel(function(){return{}});let p;const m=n.db.getSubGraphs();for(let S=m.length-1;S>=0;S--)p=m[S],n.db.addVertex(p.id,p.title,"group",void 0,p.classes);const _=n.db.getVertices();H.warn("Get vertices",_);const y=n.db.getEdges();let b=0;for(b=m.length-1;b>=0;b--){p=m[b],Nu("cluster").append("text");for(let S=0;S<p.nodes.length;S++)H.warn("Setting subgraph",p.nodes[S],n.db.lookUpDomId(p.nodes[S]),n.db.lookUpDomId(p.id)),f.setParent(n.db.lookUpDomId(p.nodes[S]),n.db.lookUpDomId(p.id))}UM(_,f,e,o,l,n),WM(y,f,n);const x=An.render,k=new x;Tnt.addToRender(k),k.arrows().none=function(R,A,L,v){const w=R.append("marker").attr("id",A).attr("viewBox","0 0 10 10").attr("refX",9).attr("refY",5).attr("markerUnits","strokeWidth").attr("markerWidth",8).attr("markerHeight",6).attr("orient","auto").append("path").attr("d","M 0 0 L 0 0 L 0 0 z");An.util.applyStyle(w,L[v+"Style"])},k.arrows().normal=function(R,A){R.append("marker").attr("id",A).attr("viewBox","0 0 10 10").attr("refX",9).attr("refY",5).attr("markerUnits","strokeWidth").attr("markerWidth",8).attr("markerHeight",6).attr("orient","auto").append("path").attr("d","M 0 0 L 10 5 L 0 10 z").attr("class","arrowheadPath").style("stroke-width",1).style("stroke-dasharray","1,0")};const T=o.select(`[id="${e}"]`);bn(n.db,T,e);const C=o.select("#"+e+" g");for(k(C,f),C.selectAll("g.node").attr("title",function(){return n.db.getTooltip(this.id)}),n.db.indexNodes("subGraph"+b),b=0;b<m.length;b++)if(p=m[b],p.title!=="undefined"){const S=l.querySelectorAll("#"+e+' [id="'+n.db.lookUpDomId(p.id)+'"] rect'),R=l.querySelectorAll("#"+e+' [id="'+n.db.lookUpDomId(p.id)+'"]'),A=S[0].x.baseVal.value,L=S[0].y.baseVal.value,v=S[0].width.baseVal.value,w=St(R[0]).select(".label");w.attr("transform",`translate(${A+v/2}, ${L+14})`),w.attr("id",e+"Text");for(let D=0;D<p.classes.length;D++)R[0].classList.add(p.classes[D])}if(!a.htmlLabels){const S=l.querySelectorAll('[id="'+e+'"] .edgeLabel .label');for(let R=0;R<S.length;R++){const A=S[R],L=A.getBBox(),v=l.createElementNS("http://www.w3.org/2000/svg","rect");v.setAttribute("rx",0),v.setAttribute("ry",0),v.setAttribute("width",L.width),v.setAttribute("height",L.height),A.insertBefore(v,A.firstChild)}}i1(f,T,a.diagramPadding,a.useMaxWidth),Object.keys(_).forEach(function(S){const R=_[S];if(R.link){const A=o.select("#"+e+' [id="'+n.db.lookUpDomId(S)+'"]');if(A){const L=l.createElementNS("http://www.w3.org/2000/svg","a");L.setAttributeNS("http://www.w3.org/2000/svg","class",R.classes.join(" ")),L.setAttributeNS("http://www.w3.org/2000/svg","href",R.link),L.setAttributeNS("http://www.w3.org/2000/svg","rel","noopener"),i==="sandbox"?L.setAttributeNS("http://www.w3.org/2000/svg","target","_top"):R.linkTarget&&L.setAttributeNS("http://www.w3.org/2000/svg","target",R.linkTarget);const v=A.insert(function(){return L},":first-child"),B=A.select(".label-container");B&&v.append(function(){return B.node()});const w=A.select(".label");w&&v.append(function(){return w.node()})}}})}},HM={},Cnt=function(t){const e=Object.keys(t);for(let r=0;r<e.length;r++)HM[e[r]]=t[e[r]]},GM=function(t,e,r,n,i,a){const s=n.select(`[id="${r}"]`);Object.keys(t).forEach(function(l){const u=t[l];let h="default";u.classes.length>0&&(h=u.classes.join(" "));const d=Ka(u.styles);let f=u.text!==void 0?u.text:u.id,p;if(Mr(nt().flowchart.htmlLabels)){const y={label:f.replace(/fa[lrsb]?:fa-[\w-]+/g,b=>`<i class='${b.replace(":"," ")}'></i>`)};p=p4(s,y).node(),p.parentNode.removeChild(p)}else{const y=i.createElementNS("http://www.w3.org/2000/svg","text");y.setAttribute("style",d.labelStyle.replace("color:","fill:"));const b=f.split(pe.lineBreakRegex);for(let x=0;x<b.length;x++){const k=i.createElementNS("http://www.w3.org/2000/svg","tspan");k.setAttributeNS("http://www.w3.org/XML/1998/namespace","xml:space","preserve"),k.setAttribute("dy","1em"),k.setAttribute("x","1"),k.textContent=b[x],y.appendChild(k)}p=y}let m=0,_="";switch(u.type){case"round":m=5,_="rect";break;case"square":_="rect";break;case"diamond":_="question";break;case"hexagon":_="hexagon";break;case"odd":_="rect_left_inv_arrow";break;case"lean_right":_="lean_right";break;case"lean_left":_="lean_left";break;case"trapezoid":_="trapezoid";break;case"inv_trapezoid":_="inv_trapezoid";break;case"odd_right":_="rect_left_inv_arrow";break;case"circle":_="circle";break;case"ellipse":_="ellipse";break;case"stadium":_="stadium";break;case"subroutine":_="subroutine";break;case"cylinder":_="cylinder";break;case"group":_="rect";break;case"doublecircle":_="doublecircle";break;default:_="rect"}e.setNode(u.id,{labelStyle:d.labelStyle,shape:_,labelText:f,rx:m,ry:m,class:h,style:d.style,id:u.id,link:u.link,linkTarget:u.linkTarget,tooltip:a.db.getTooltip(u.id)||"",domId:a.db.lookUpDomId(u.id),haveCallback:u.haveCallback,width:u.type==="group"?500:void 0,dir:u.dir,type:u.type,props:u.props,padding:nt().flowchart.padding}),H.info("setNode",{labelStyle:d.labelStyle,shape:_,labelText:f,rx:m,ry:m,class:h,style:d.style,id:u.id,domId:a.db.lookUpDomId(u.id),width:u.type==="group"?500:void 0,type:u.type,dir:u.dir,props:u.props,padding:nt().flowchart.padding})})},jM=function(t,e,r){H.info("abc78 edges = ",t);let n=0,i={},a,s;if(typeof t.defaultStyle<"u"){const o=Ka(t.defaultStyle);a=o.style,s=o.labelStyle}t.forEach(function(o){n++;var l="L-"+o.start+"-"+o.end;typeof i[l]>"u"?(i[l]=0,H.info("abc78 new entry",l,i[l])):(i[l]++,H.info("abc78 new entry",l,i[l]));let u=l+"-"+i[l];H.info("abc78 new link id to be used is",l,u,i[l]);var h="LS-"+o.start,d="LE-"+o.end;const f={style:"",labelStyle:""};switch(f.minlen=o.length||1,o.type==="arrow_open"?f.arrowhead="none":f.arrowhead="normal",f.arrowTypeStart="arrow_open",f.arrowTypeEnd="arrow_open",o.type){case"double_arrow_cross":f.arrowTypeStart="arrow_cross";case"arrow_cross":f.arrowTypeEnd="arrow_cross";break;case"double_arrow_point":f.arrowTypeStart="arrow_point";case"arrow_point":f.arrowTypeEnd="arrow_point";break;case"double_arrow_circle":f.arrowTypeStart="arrow_circle";case"arrow_circle":f.arrowTypeEnd="arrow_circle";break}let p="",m="";switch(o.stroke){case"normal":p="fill:none;",typeof a<"u"&&(p=a),typeof s<"u"&&(m=s),f.thickness="normal",f.pattern="solid";break;case"dotted":f.thickness="normal",f.pattern="dotted",f.style="fill:none;stroke-width:2px;stroke-dasharray:3;";break;case"thick":f.thickness="thick",f.pattern="solid",f.style="stroke-width: 3.5px;fill:none;";break;case"invisible":f.thickness="invisible",f.pattern="solid",f.style="stroke-width: 0;fill:none;";break}if(typeof o.style<"u"){const _=Ka(o.style);p=_.style,m=_.labelStyle}f.style=f.style+=p,f.labelStyle=f.labelStyle+=m,typeof o.interpolate<"u"?f.curve=Ni(o.interpolate,yn):typeof t.defaultInterpolate<"u"?f.curve=Ni(t.defaultInterpolate,yn):f.curve=Ni(HM.curve,yn),typeof o.text>"u"?typeof o.style<"u"&&(f.arrowheadStyle="fill: #333"):(f.arrowheadStyle="fill: #333",f.labelpos="c"),f.labelType="text",f.label=o.text.replace(pe.lineBreakRegex,` -`),typeof o.style>"u"&&(f.style=f.style||"stroke: #333; stroke-width: 1.5px;fill:none;"),f.labelStyle=f.labelStyle.replace("color:","fill:"),f.id=u,f.classes="flowchart-link "+h+" "+d,e.setEdge(o.start,o.end,f,n)})},A4={setConf:Cnt,addVertices:GM,addEdges:jM,getClasses:function(t,e){H.info("Extracting classes"),e.db.clear();try{return e.parse(t),e.db.getClasses()}catch{return}},draw:function(t,e,r,n){H.info("Drawing flowchart"),n.db.clear(),fa.setGen("gen-2"),n.parser.parse(t);let i=n.db.getDirection();typeof i>"u"&&(i="TD");const{securityLevel:a,flowchart:s}=nt(),o=s.nodeSpacing||50,l=s.rankSpacing||50;let u;a==="sandbox"&&(u=St("#i"+e));const h=St(a==="sandbox"?u.nodes()[0].contentDocument.body:"body"),d=a==="sandbox"?u.nodes()[0].contentDocument:document,f=new cr.Graph({multigraph:!0,compound:!0}).setGraph({rankdir:i,nodesep:o,ranksep:l,marginx:0,marginy:0}).setDefaultEdgeLabel(function(){return{}});let p;const m=n.db.getSubGraphs();H.info("Subgraphs - ",m);for(let C=m.length-1;C>=0;C--)p=m[C],H.info("Subgraph - ",p),n.db.addVertex(p.id,p.title,"group",void 0,p.classes,p.dir);const _=n.db.getVertices(),y=n.db.getEdges();H.info(y);let b=0;for(b=m.length-1;b>=0;b--){p=m[b],Nu("cluster").append("text");for(let C=0;C<p.nodes.length;C++)H.info("Setting up subgraphs",p.nodes[C],p.id),f.setParent(p.nodes[C],p.id)}GM(_,f,e,h,d,n),jM(y,f);const x=h.select(`[id="${e}"]`);bn(n.db,x,e);const k=h.select("#"+e+" g");if(i4(k,f,["point","circle","cross"],"flowchart",e),i1(f,x,s.diagramPadding,s.useMaxWidth),n.db.indexNodes("subGraph"+b),!s.htmlLabels){const C=d.querySelectorAll('[id="'+e+'"] .edgeLabel .label');for(let M=0;M<C.length;M++){const S=C[M],R=S.getBBox(),A=d.createElementNS("http://www.w3.org/2000/svg","rect");A.setAttribute("rx",0),A.setAttribute("ry",0),A.setAttribute("width",R.width),A.setAttribute("height",R.height),S.insertBefore(A,S.firstChild)}}Object.keys(_).forEach(function(C){const M=_[C];if(M.link){const S=St("#"+e+' [id="'+C+'"]');if(S){const R=d.createElementNS("http://www.w3.org/2000/svg","a");R.setAttributeNS("http://www.w3.org/2000/svg","class",M.classes.join(" ")),R.setAttributeNS("http://www.w3.org/2000/svg","href",M.link),R.setAttributeNS("http://www.w3.org/2000/svg","rel","noopener"),a==="sandbox"?R.setAttributeNS("http://www.w3.org/2000/svg","target","_top"):M.linkTarget&&R.setAttributeNS("http://www.w3.org/2000/svg","target",M.linkTarget);const A=S.insert(function(){return R},":first-child"),L=S.select(".label-container");L&&A.append(function(){return L.node()});const v=S.select(".label");v&&A.append(function(){return v.node()})}}})}};var M4=function(){var t=function(S,R,A,L){for(A=A||{},L=S.length;L--;A[S[L]]=R);return A},e=[1,3],r=[1,5],n=[7,9,11,12,13,14,15,16,17,18,19,20,22,24,25,27,34,39],i=[1,15],a=[1,16],s=[1,17],o=[1,18],l=[1,19],u=[1,20],h=[1,21],d=[1,22],f=[1,23],p=[1,24],m=[1,25],_=[1,26],y=[1,28],b=[1,30],x=[1,33],k=[5,7,9,11,12,13,14,15,16,17,18,19,20,22,24,25,27,34,39],T={trace:function(){},yy:{},symbols_:{error:2,start:3,directive:4,gantt:5,document:6,EOF:7,line:8,SPACE:9,statement:10,NL:11,dateFormat:12,inclusiveEndDates:13,topAxis:14,axisFormat:15,excludes:16,includes:17,todayMarker:18,title:19,acc_title:20,acc_title_value:21,acc_descr:22,acc_descr_value:23,acc_descr_multiline_value:24,section:25,clickStatement:26,taskTxt:27,taskData:28,openDirective:29,typeDirective:30,closeDirective:31,":":32,argDirective:33,click:34,callbackname:35,callbackargs:36,href:37,clickStatementDebug:38,open_directive:39,type_directive:40,arg_directive:41,close_directive:42,$accept:0,$end:1},terminals_:{2:"error",5:"gantt",7:"EOF",9:"SPACE",11:"NL",12:"dateFormat",13:"inclusiveEndDates",14:"topAxis",15:"axisFormat",16:"excludes",17:"includes",18:"todayMarker",19:"title",20:"acc_title",21:"acc_title_value",22:"acc_descr",23:"acc_descr_value",24:"acc_descr_multiline_value",25:"section",27:"taskTxt",28:"taskData",32:":",34:"click",35:"callbackname",36:"callbackargs",37:"href",39:"open_directive",40:"type_directive",41:"arg_directive",42:"close_directive"},productions_:[0,[3,2],[3,3],[6,0],[6,2],[8,2],[8,1],[8,1],[8,1],[10,1],[10,1],[10,1],[10,1],[10,1],[10,1],[10,1],[10,1],[10,2],[10,2],[10,1],[10,1],[10,1],[10,2],[10,1],[4,4],[4,6],[26,2],[26,3],[26,3],[26,4],[26,3],[26,4],[26,2],[38,2],[38,3],[38,3],[38,4],[38,3],[38,4],[38,2],[29,1],[30,1],[33,1],[31,1]],performAction:function(R,A,L,v,B,w,D){var N=w.length-1;switch(B){case 2:return w[N-1];case 3:this.$=[];break;case 4:w[N-1].push(w[N]),this.$=w[N-1];break;case 5:case 6:this.$=w[N];break;case 7:case 8:this.$=[];break;case 9:v.setDateFormat(w[N].substr(11)),this.$=w[N].substr(11);break;case 10:v.enableInclusiveEndDates(),this.$=w[N].substr(18);break;case 11:v.TopAxis(),this.$=w[N].substr(8);break;case 12:v.setAxisFormat(w[N].substr(11)),this.$=w[N].substr(11);break;case 13:v.setExcludes(w[N].substr(9)),this.$=w[N].substr(9);break;case 14:v.setIncludes(w[N].substr(9)),this.$=w[N].substr(9);break;case 15:v.setTodayMarker(w[N].substr(12)),this.$=w[N].substr(12);break;case 16:v.setDiagramTitle(w[N].substr(6)),this.$=w[N].substr(6);break;case 17:this.$=w[N].trim(),v.setAccTitle(this.$);break;case 18:case 19:this.$=w[N].trim(),v.setAccDescription(this.$);break;case 20:v.addSection(w[N].substr(8)),this.$=w[N].substr(8);break;case 22:v.addTask(w[N-1],w[N]),this.$="task";break;case 26:this.$=w[N-1],v.setClickEvent(w[N-1],w[N],null);break;case 27:this.$=w[N-2],v.setClickEvent(w[N-2],w[N-1],w[N]);break;case 28:this.$=w[N-2],v.setClickEvent(w[N-2],w[N-1],null),v.setLink(w[N-2],w[N]);break;case 29:this.$=w[N-3],v.setClickEvent(w[N-3],w[N-2],w[N-1]),v.setLink(w[N-3],w[N]);break;case 30:this.$=w[N-2],v.setClickEvent(w[N-2],w[N],null),v.setLink(w[N-2],w[N-1]);break;case 31:this.$=w[N-3],v.setClickEvent(w[N-3],w[N-1],w[N]),v.setLink(w[N-3],w[N-2]);break;case 32:this.$=w[N-1],v.setLink(w[N-1],w[N]);break;case 33:case 39:this.$=w[N-1]+" "+w[N];break;case 34:case 35:case 37:this.$=w[N-2]+" "+w[N-1]+" "+w[N];break;case 36:case 38:this.$=w[N-3]+" "+w[N-2]+" "+w[N-1]+" "+w[N];break;case 40:v.parseDirective("%%{","open_directive");break;case 41:v.parseDirective(w[N],"type_directive");break;case 42:w[N]=w[N].trim().replace(/'/g,'"'),v.parseDirective(w[N],"arg_directive");break;case 43:v.parseDirective("}%%","close_directive","gantt");break}},table:[{3:1,4:2,5:e,29:4,39:r},{1:[3]},{3:6,4:2,5:e,29:4,39:r},t(n,[2,3],{6:7}),{30:8,40:[1,9]},{40:[2,40]},{1:[2,1]},{4:29,7:[1,10],8:11,9:[1,12],10:13,11:[1,14],12:i,13:a,14:s,15:o,16:l,17:u,18:h,19:d,20:f,22:p,24:m,25:_,26:27,27:y,29:4,34:b,39:r},{31:31,32:[1,32],42:x},t([32,42],[2,41]),t(n,[2,8],{1:[2,2]}),t(n,[2,4]),{4:29,10:34,12:i,13:a,14:s,15:o,16:l,17:u,18:h,19:d,20:f,22:p,24:m,25:_,26:27,27:y,29:4,34:b,39:r},t(n,[2,6]),t(n,[2,7]),t(n,[2,9]),t(n,[2,10]),t(n,[2,11]),t(n,[2,12]),t(n,[2,13]),t(n,[2,14]),t(n,[2,15]),t(n,[2,16]),{21:[1,35]},{23:[1,36]},t(n,[2,19]),t(n,[2,20]),t(n,[2,21]),{28:[1,37]},t(n,[2,23]),{35:[1,38],37:[1,39]},{11:[1,40]},{33:41,41:[1,42]},{11:[2,43]},t(n,[2,5]),t(n,[2,17]),t(n,[2,18]),t(n,[2,22]),t(n,[2,26],{36:[1,43],37:[1,44]}),t(n,[2,32],{35:[1,45]}),t(k,[2,24]),{31:46,42:x},{42:[2,42]},t(n,[2,27],{37:[1,47]}),t(n,[2,28]),t(n,[2,30],{36:[1,48]}),{11:[1,49]},t(n,[2,29]),t(n,[2,31]),t(k,[2,25])],defaultActions:{5:[2,40],6:[2,1],33:[2,43],42:[2,42]},parseError:function(R,A){if(A.recoverable)this.trace(R);else{var L=new Error(R);throw L.hash=A,L}},parse:function(R){var A=this,L=[0],v=[],B=[null],w=[],D=this.table,N="",z=0,X=0,ct=2,J=1,Y=w.slice.call(arguments,1),$=Object.create(this.lexer),lt={yy:{}};for(var ut in this.yy)Object.prototype.hasOwnProperty.call(this.yy,ut)&&(lt.yy[ut]=this.yy[ut]);$.setInput(R,lt.yy),lt.yy.lexer=$,lt.yy.parser=this,typeof $.yylloc>"u"&&($.yylloc={});var W=$.yylloc;w.push(W);var tt=$.options&&$.options.ranges;typeof lt.yy.parseError=="function"?this.parseError=lt.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function K(){var at;return at=v.pop()||$.lex()||J,typeof at!="number"&&(at instanceof Array&&(v=at,at=v.pop()),at=A.symbols_[at]||at),at}for(var it,Z,V,Q,q={},U,F,j,P;;){if(Z=L[L.length-1],this.defaultActions[Z]?V=this.defaultActions[Z]:((it===null||typeof it>"u")&&(it=K()),V=D[Z]&&D[Z][it]),typeof V>"u"||!V.length||!V[0]){var et="";P=[];for(U in D[Z])this.terminals_[U]&&U>ct&&P.push("'"+this.terminals_[U]+"'");$.showPosition?et="Parse error on line "+(z+1)+`: -`+$.showPosition()+` -Expecting `+P.join(", ")+", got '"+(this.terminals_[it]||it)+"'":et="Parse error on line "+(z+1)+": Unexpected "+(it==J?"end of input":"'"+(this.terminals_[it]||it)+"'"),this.parseError(et,{text:$.match,token:this.terminals_[it]||it,line:$.yylineno,loc:W,expected:P})}if(V[0]instanceof Array&&V.length>1)throw new Error("Parse Error: multiple actions possible at state: "+Z+", token: "+it);switch(V[0]){case 1:L.push(it),B.push($.yytext),w.push($.yylloc),L.push(V[1]),it=null,X=$.yyleng,N=$.yytext,z=$.yylineno,W=$.yylloc;break;case 2:if(F=this.productions_[V[1]][1],q.$=B[B.length-F],q._$={first_line:w[w.length-(F||1)].first_line,last_line:w[w.length-1].last_line,first_column:w[w.length-(F||1)].first_column,last_column:w[w.length-1].last_column},tt&&(q._$.range=[w[w.length-(F||1)].range[0],w[w.length-1].range[1]]),Q=this.performAction.apply(q,[N,X,z,lt.yy,V[1],B,w].concat(Y)),typeof Q<"u")return Q;F&&(L=L.slice(0,-1*F*2),B=B.slice(0,-1*F),w=w.slice(0,-1*F)),L.push(this.productions_[V[1]][0]),B.push(q.$),w.push(q._$),j=D[L[L.length-2]][L[L.length-1]],L.push(j);break;case 3:return!0}}return!0}},C=function(){var S={EOF:1,parseError:function(A,L){if(this.yy.parser)this.yy.parser.parseError(A,L);else throw new Error(A)},setInput:function(R,A){return this.yy=A||this.yy||{},this._input=R,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var R=this._input[0];this.yytext+=R,this.yyleng++,this.offset++,this.match+=R,this.matched+=R;var A=R.match(/(?:\r\n?|\n).*/g);return A?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),R},unput:function(R){var A=R.length,L=R.split(/(?:\r\n?|\n)/g);this._input=R+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-A),this.offset-=A;var v=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),L.length-1&&(this.yylineno-=L.length-1);var B=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:L?(L.length===v.length?this.yylloc.first_column:0)+v[v.length-L.length].length-L[0].length:this.yylloc.first_column-A},this.options.ranges&&(this.yylloc.range=[B[0],B[0]+this.yyleng-A]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(R){this.unput(this.match.slice(R))},pastInput:function(){var R=this.matched.substr(0,this.matched.length-this.match.length);return(R.length>20?"...":"")+R.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var R=this.match;return R.length<20&&(R+=this._input.substr(0,20-R.length)),(R.substr(0,20)+(R.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var R=this.pastInput(),A=new Array(R.length+1).join("-");return R+this.upcomingInput()+` -`+A+"^"},test_match:function(R,A){var L,v,B;if(this.options.backtrack_lexer&&(B={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(B.yylloc.range=this.yylloc.range.slice(0))),v=R[0].match(/(?:\r\n?|\n).*/g),v&&(this.yylineno+=v.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:v?v[v.length-1].length-v[v.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+R[0].length},this.yytext+=R[0],this.match+=R[0],this.matches=R,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(R[0].length),this.matched+=R[0],L=this.performAction.call(this,this.yy,this,A,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),L)return L;if(this._backtrack){for(var w in B)this[w]=B[w];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var R,A,L,v;this._more||(this.yytext="",this.match="");for(var B=this._currentRules(),w=0;w<B.length;w++)if(L=this._input.match(this.rules[B[w]]),L&&(!A||L[0].length>A[0].length)){if(A=L,v=w,this.options.backtrack_lexer){if(R=this.test_match(L,B[w]),R!==!1)return R;if(this._backtrack){A=!1;continue}else return!1}else if(!this.options.flex)break}return A?(R=this.test_match(A,B[v]),R!==!1?R:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var A=this.next();return A||this.lex()},begin:function(A){this.conditionStack.push(A)},popState:function(){var A=this.conditionStack.length-1;return A>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(A){return A=this.conditionStack.length-1-Math.abs(A||0),A>=0?this.conditionStack[A]:"INITIAL"},pushState:function(A){this.begin(A)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(A,L,v,B){switch(v){case 0:return this.begin("open_directive"),39;case 1:return this.begin("type_directive"),40;case 2:return this.popState(),this.begin("arg_directive"),32;case 3:return this.popState(),this.popState(),42;case 4:return 41;case 5:return this.begin("acc_title"),20;case 6:return this.popState(),"acc_title_value";case 7:return this.begin("acc_descr"),22;case 8:return this.popState(),"acc_descr_value";case 9:this.begin("acc_descr_multiline");break;case 10:this.popState();break;case 11:return"acc_descr_multiline_value";case 12:break;case 13:break;case 14:break;case 15:return 11;case 16:break;case 17:break;case 18:break;case 19:this.begin("href");break;case 20:this.popState();break;case 21:return 37;case 22:this.begin("callbackname");break;case 23:this.popState();break;case 24:this.popState(),this.begin("callbackargs");break;case 25:return 35;case 26:this.popState();break;case 27:return 36;case 28:this.begin("click");break;case 29:this.popState();break;case 30:return 34;case 31:return 5;case 32:return 12;case 33:return 13;case 34:return 14;case 35:return 15;case 36:return 17;case 37:return 16;case 38:return 18;case 39:return"date";case 40:return 19;case 41:return"accDescription";case 42:return 25;case 43:return 27;case 44:return 28;case 45:return 32;case 46:return 7;case 47:return"INVALID"}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:%%(?!\{)*[^\n]*)/i,/^(?:[^\}]%%*[^\n]*)/i,/^(?:%%*[^\n]*[\n]*)/i,/^(?:[\n]+)/i,/^(?:\s+)/i,/^(?:#[^\n]*)/i,/^(?:%[^\n]*)/i,/^(?:href[\s]+["])/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:call[\s]+)/i,/^(?:\([\s]*\))/i,/^(?:\()/i,/^(?:[^(]*)/i,/^(?:\))/i,/^(?:[^)]*)/i,/^(?:click[\s]+)/i,/^(?:[\s\n])/i,/^(?:[^\s\n]*)/i,/^(?:gantt\b)/i,/^(?:dateFormat\s[^#\n;]+)/i,/^(?:inclusiveEndDates\b)/i,/^(?:topAxis\b)/i,/^(?:axisFormat\s[^#\n;]+)/i,/^(?:includes\s[^#\n;]+)/i,/^(?:excludes\s[^#\n;]+)/i,/^(?:todayMarker\s[^\n;]+)/i,/^(?:\d\d\d\d-\d\d-\d\d\b)/i,/^(?:title\s[^#\n;]+)/i,/^(?:accDescription\s[^#\n;]+)/i,/^(?:section\s[^#:\n;]+)/i,/^(?:[^#:\n;]+)/i,/^(?::[^#\n;]+)/i,/^(?::)/i,/^(?:$)/i,/^(?:.)/i],conditions:{acc_descr_multiline:{rules:[10,11],inclusive:!1},acc_descr:{rules:[8],inclusive:!1},acc_title:{rules:[6],inclusive:!1},close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},callbackargs:{rules:[26,27],inclusive:!1},callbackname:{rules:[23,24,25],inclusive:!1},href:{rules:[20,21],inclusive:!1},click:{rules:[29,30],inclusive:!1},INITIAL:{rules:[0,5,7,9,12,13,14,15,16,17,18,19,22,28,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47],inclusive:!0}}};return S}();T.lexer=C;function M(){this.yy={}}return M.prototype=T,T.Parser=M,new M}();M4.parser=M4;const Snt=t=>t.match(/^\s*gantt/)!==null;let zi="",L4="",R4="",au=[],su=[],I4={},N4=[],a0=[],sl="";const $M=["active","done","crit","milestone"];let s0=[],ou=!1,B4=!1,D4=0;const Ant=function(t,e,r){Xe.parseDirective(this,t,e,r)},Mnt=function(){N4=[],a0=[],sl="",s0=[],o0=0,F4=void 0,l0=void 0,yr=[],zi="",L4="",R4="",au=[],su=[],ou=!1,B4=!1,D4=0,I4={},ci()},Lnt=function(t){L4=t},Rnt=function(){return L4},Int=function(t){R4=t},Nnt=function(){return R4},Bnt=function(t){zi=t},Dnt=function(){ou=!0},Ont=function(){return ou},Fnt=function(){B4=!0},Pnt=function(){return B4},qnt=function(){return zi},Vnt=function(t){au=t.toLowerCase().split(/[\s,]+/)},znt=function(){return au},Ynt=function(t){su=t.toLowerCase().split(/[\s,]+/)},Unt=function(){return su},Wnt=function(){return I4},Hnt=function(t){sl=t,N4.push(t)},Gnt=function(){return N4},jnt=function(){let t=tL();const e=10;let r=0;for(;!t&&r<e;)t=tL(),r++;return a0=yr,a0},XM=function(t,e,r,n){return n.indexOf(t.format(e.trim()))>=0?!1:t.isoWeekday()>=6&&r.indexOf("weekends")>=0||r.indexOf(t.format("dddd").toLowerCase())>=0?!0:r.indexOf(t.format(e.trim()))>=0},KM=function(t,e,r,n){if(!r.length||t.manualEndTime)return;let i=Xn(t.startTime,e,!0);i.add(1,"d");let a=Xn(t.endTime,e,!0),s=$nt(i,a,e,r,n);t.endTime=a.toDate(),t.renderEndTime=s},$nt=function(t,e,r,n,i){let a=!1,s=null;for(;t<=e;)a||(s=e.toDate()),a=XM(t,r,n,i),a&&e.add(1,"d"),t.add(1,"d");return s},O4=function(t,e,r){r=r.trim();const i=/^after\s+([\d\w- ]+)/.exec(r.trim());if(i!==null){let s=null;if(i[1].split(" ").forEach(function(o){let l=ll(o);typeof l<"u"&&(s?l.endTime>s.endTime&&(s=l):s=l)}),s)return s.endTime;{const o=new Date;return o.setHours(0,0,0,0),o}}let a=Xn(r,e.trim(),!0);if(a.isValid())return a.toDate();{H.debug("Invalid date:"+r),H.debug("With date format:"+e.trim());const s=new Date(r);if(typeof s>"u"||isNaN(s.getTime()))throw new Error("Invalid date:"+r);return s}},ZM=function(t){const e=/^(\d+(?:\.\d+)?)([yMwdhms]|ms)$/.exec(t.trim());return e!==null?Xn.duration(Number.parseFloat(e[1]),e[2]):Xn.duration.invalid()},QM=function(t,e,r,n){n=n||!1,r=r.trim();let i=Xn(r,e.trim(),!0);if(i.isValid())return n&&i.add(1,"d"),i.toDate();const a=Xn(t),s=ZM(r);return s.isValid()&&a.add(s),a.toDate()};let o0=0;const ol=function(t){return typeof t>"u"?(o0=o0+1,"task"+o0):t},Xnt=function(t,e){let r;e.substr(0,1)===":"?r=e.substr(1,e.length):r=e;const n=r.split(","),i={};nL(n,i,$M);for(let s=0;s<n.length;s++)n[s]=n[s].trim();let a="";switch(n.length){case 1:i.id=ol(),i.startTime=t.endTime,a=n[0];break;case 2:i.id=ol(),i.startTime=O4(void 0,zi,n[0]),a=n[1];break;case 3:i.id=ol(n[0]),i.startTime=O4(void 0,zi,n[1]),a=n[2];break}return a&&(i.endTime=QM(i.startTime,zi,a,ou),i.manualEndTime=Xn(a,"YYYY-MM-DD",!0).isValid(),KM(i,zi,su,au)),i},Knt=function(t,e){let r;e.substr(0,1)===":"?r=e.substr(1,e.length):r=e;const n=r.split(","),i={};nL(n,i,$M);for(let a=0;a<n.length;a++)n[a]=n[a].trim();switch(n.length){case 1:i.id=ol(),i.startTime={type:"prevTaskEnd",id:t},i.endTime={data:n[0]};break;case 2:i.id=ol(),i.startTime={type:"getStartDate",startData:n[0]},i.endTime={data:n[1]};break;case 3:i.id=ol(n[0]),i.startTime={type:"getStartDate",startData:n[1]},i.endTime={data:n[2]};break}return i};let F4,l0,yr=[];const JM={},Znt=function(t,e){const r={section:sl,type:sl,processed:!1,manualEndTime:!1,renderEndTime:null,raw:{data:e},task:t,classes:[]},n=Knt(l0,e);r.raw.startTime=n.startTime,r.raw.endTime=n.endTime,r.id=n.id,r.prevTaskId=l0,r.active=n.active,r.done=n.done,r.crit=n.crit,r.milestone=n.milestone,r.order=D4,D4++;const i=yr.push(r);l0=r.id,JM[r.id]=i-1},ll=function(t){const e=JM[t];return yr[e]},Qnt=function(t,e){const r={section:sl,type:sl,description:t,task:t,classes:[]},n=Xnt(F4,e);r.startTime=n.startTime,r.endTime=n.endTime,r.id=n.id,r.active=n.active,r.done=n.done,r.crit=n.crit,r.milestone=n.milestone,F4=r,a0.push(r)},tL=function(){const t=function(r){const n=yr[r];let i="";switch(yr[r].raw.startTime.type){case"prevTaskEnd":{const a=ll(n.prevTaskId);n.startTime=a.endTime;break}case"getStartDate":i=O4(void 0,zi,yr[r].raw.startTime.startData),i&&(yr[r].startTime=i);break}return yr[r].startTime&&(yr[r].endTime=QM(yr[r].startTime,zi,yr[r].raw.endTime.data,ou),yr[r].endTime&&(yr[r].processed=!0,yr[r].manualEndTime=Xn(yr[r].raw.endTime.data,"YYYY-MM-DD",!0).isValid(),KM(yr[r],zi,su,au))),yr[r].processed};let e=!0;for(let r=0;r<yr.length;r++)t(r),e=e&&yr[r].processed;return e},Jnt=function(t,e){let r=e;nt().securityLevel!=="loose"&&(r=ki(e)),t.split(",").forEach(function(n){typeof ll(n)<"u"&&(rL(n,()=>{window.open(r,"_self")}),I4[n]=r)}),eL(t,"clickable")},eL=function(t,e){t.split(",").forEach(function(r){let n=ll(r);typeof n<"u"&&n.classes.push(e)})},tit=function(t,e,r){if(nt().securityLevel!=="loose"||typeof e>"u")return;let n=[];if(typeof r=="string"){n=r.split(/,(?=(?:(?:[^"]*"){2})*[^"]*$)/);for(let a=0;a<n.length;a++){let s=n[a].trim();s.charAt(0)==='"'&&s.charAt(s.length-1)==='"'&&(s=s.substr(1,s.length-2)),n[a]=s}}n.length===0&&n.push(t),typeof ll(t)<"u"&&rL(t,()=>{Se.runFunc(e,...n)})},rL=function(t,e){s0.push(function(){const r=document.querySelector(`[id="${t}"]`);r!==null&&r.addEventListener("click",function(){e()})}),s0.push(function(){const r=document.querySelector(`[id="${t}-text"]`);r!==null&&r.addEventListener("click",function(){e()})})},P4={parseDirective:Ant,getConfig:()=>nt().gantt,clear:Mnt,setDateFormat:Bnt,getDateFormat:qnt,enableInclusiveEndDates:Dnt,endDatesAreInclusive:Ont,enableTopAxis:Fnt,topAxisEnabled:Pnt,setAxisFormat:Lnt,getAxisFormat:Rnt,setTodayMarker:Int,getTodayMarker:Nnt,setAccTitle:Yn,getAccTitle:ui,setDiagramTitle:c1,getDiagramTitle:u1,setAccDescription:hi,getAccDescription:fi,addSection:Hnt,getSections:Gnt,getTasks:jnt,addTask:Znt,findTaskById:ll,addTaskOrg:Qnt,setIncludes:Vnt,getIncludes:znt,setExcludes:Ynt,getExcludes:Unt,setClickEvent:function(t,e,r){t.split(",").forEach(function(n){tit(n,e,r)}),eL(t,"clickable")},setLink:Jnt,getLinks:Wnt,bindFunctions:function(t){s0.forEach(function(e){e(t)})},parseDuration:ZM,isInvalidDate:XM};function nL(t,e,r){let n=!0;for(;n;)n=!1,r.forEach(function(i){const a="^\\s*"+i+"\\s*$",s=new RegExp(a);t[0].match(s)&&(e[i]=!0,t.shift(1),n=!0)})}const eit=function(){H.debug("Something is calling, setConf, remove the call")};let pa;const rit={setConf:eit,draw:function(t,e,r,n){const i=nt().gantt,a=nt().securityLevel;let s;a==="sandbox"&&(s=St("#i"+e));const o=St(a==="sandbox"?s.nodes()[0].contentDocument.body:"body"),l=a==="sandbox"?s.nodes()[0].contentDocument:document,u=l.getElementById(e);pa=u.parentElement.offsetWidth,typeof pa>"u"&&(pa=1200),typeof i.useWidth<"u"&&(pa=i.useWidth);const h=n.db.getTasks(),d=h.length*(i.barHeight+i.barGap)+2*i.topPadding;u.setAttribute("viewBox","0 0 "+pa+" "+d);const f=o.select(`[id="${e}"]`),p=X8().domain([Tl(h,function(L){return L.startTime}),lo(h,function(L){return L.endTime})]).rangeRound([0,pa-i.leftPadding-i.rightPadding]);let m=[];for(let L=0;L<h.length;L++)m.push(h[L].type);const _=m;m=S(m);function y(L,v){const B=L.startTime,w=v.startTime;let D=0;return B>w?D=1:B<w&&(D=-1),D}h.sort(y),b(h,pa,d),li(f,d,pa,i.useMaxWidth),f.append("text").text(n.db.getDiagramTitle()).attr("x",pa/2).attr("y",i.titleTopMargin).attr("class","titleText"),bn(n.db,f,e);function b(L,v,B){const w=i.barHeight,D=w+i.barGap,N=i.topPadding,z=i.leftPadding,X=sp().domain([0,m.length]).range(["#00B9FA","#F95002"]).interpolate($5);k(D,N,z,v,B,L,n.db.getExcludes(),n.db.getIncludes()),T(z,N,v,B),x(L,D,N,z,w,X,v),C(D,N),M(z,N,v,B)}function x(L,v,B,w,D,N,z){f.append("g").selectAll("rect").data(L).enter().append("rect").attr("x",0).attr("y",function(Y,$){return $=Y.order,$*v+B-2}).attr("width",function(){return z-i.rightPadding/2}).attr("height",v).attr("class",function(Y){for(let $=0;$<m.length;$++)if(Y.type===m[$])return"section section"+$%i.numberSectionStyles;return"section section0"});const X=f.append("g").selectAll("rect").data(L).enter(),ct=n.db.getLinks();if(X.append("rect").attr("id",function(Y){return Y.id}).attr("rx",3).attr("ry",3).attr("x",function(Y){return Y.milestone?p(Y.startTime)+w+.5*(p(Y.endTime)-p(Y.startTime))-.5*D:p(Y.startTime)+w}).attr("y",function(Y,$){return $=Y.order,$*v+B}).attr("width",function(Y){return Y.milestone?D:p(Y.renderEndTime||Y.endTime)-p(Y.startTime)}).attr("height",D).attr("transform-origin",function(Y,$){return $=Y.order,(p(Y.startTime)+w+.5*(p(Y.endTime)-p(Y.startTime))).toString()+"px "+($*v+B+.5*D).toString()+"px"}).attr("class",function(Y){const $="task";let lt="";Y.classes.length>0&&(lt=Y.classes.join(" "));let ut=0;for(let tt=0;tt<m.length;tt++)Y.type===m[tt]&&(ut=tt%i.numberSectionStyles);let W="";return Y.active?Y.crit?W+=" activeCrit":W=" active":Y.done?Y.crit?W=" doneCrit":W=" done":Y.crit&&(W+=" crit"),W.length===0&&(W=" task"),Y.milestone&&(W=" milestone "+W),W+=ut,W+=" "+lt,$+W}),X.append("text").attr("id",function(Y){return Y.id+"-text"}).text(function(Y){return Y.task}).attr("font-size",i.fontSize).attr("x",function(Y){let $=p(Y.startTime),lt=p(Y.renderEndTime||Y.endTime);Y.milestone&&($+=.5*(p(Y.endTime)-p(Y.startTime))-.5*D),Y.milestone&&(lt=$+D);const ut=this.getBBox().width;return ut>lt-$?lt+ut+1.5*i.leftPadding>z?$+w-5:lt+w+5:(lt-$)/2+$+w}).attr("y",function(Y,$){return $=Y.order,$*v+i.barHeight/2+(i.fontSize/2-2)+B}).attr("text-height",D).attr("class",function(Y){const $=p(Y.startTime);let lt=p(Y.endTime);Y.milestone&&(lt=$+D);const ut=this.getBBox().width;let W="";Y.classes.length>0&&(W=Y.classes.join(" "));let tt=0;for(let it=0;it<m.length;it++)Y.type===m[it]&&(tt=it%i.numberSectionStyles);let K="";return Y.active&&(Y.crit?K="activeCritText"+tt:K="activeText"+tt),Y.done?Y.crit?K=K+" doneCritText"+tt:K=K+" doneText"+tt:Y.crit&&(K=K+" critText"+tt),Y.milestone&&(K+=" milestoneText"),ut>lt-$?lt+ut+1.5*i.leftPadding>z?W+" taskTextOutsideLeft taskTextOutside"+tt+" "+K:W+" taskTextOutsideRight taskTextOutside"+tt+" "+K+" width-"+ut:W+" taskText taskText"+tt+" "+K+" width-"+ut}),nt().securityLevel==="sandbox"){let Y;Y=St("#i"+e);const $=Y.nodes()[0].contentDocument;X.filter(function(lt){return typeof ct[lt.id]<"u"}).each(function(lt){var ut=$.querySelector("#"+lt.id),W=$.querySelector("#"+lt.id+"-text");const tt=ut.parentNode;var K=$.createElement("a");K.setAttribute("xlink:href",ct[lt.id]),K.setAttribute("target","_top"),tt.appendChild(K),K.appendChild(ut),K.appendChild(W)})}}function k(L,v,B,w,D,N,z,X){const ct=N.reduce((tt,{startTime:K})=>tt?Math.min(tt,K):K,0),J=N.reduce((tt,{endTime:K})=>tt?Math.max(tt,K):K,0),Y=n.db.getDateFormat();if(!ct||!J)return;const $=[];let lt=null,ut=Xn(ct);for(;ut.valueOf()<=J;)n.db.isInvalidDate(ut,Y,z,X)?lt?lt.end=ut.clone():lt={start:ut.clone(),end:ut.clone()}:lt&&($.push(lt),lt=null),ut.add(1,"d");f.append("g").selectAll("rect").data($).enter().append("rect").attr("id",function(tt){return"exclude-"+tt.start.format("YYYY-MM-DD")}).attr("x",function(tt){return p(tt.start)+B}).attr("y",i.gridLineStartPadding).attr("width",function(tt){const K=tt.end.clone().add(1,"day");return p(K)-p(tt.start)}).attr("height",D-v-i.gridLineStartPadding).attr("transform-origin",function(tt,K){return(p(tt.start)+B+.5*(p(tt.end)-p(tt.start))).toString()+"px "+(K*L+.5*D).toString()+"px"}).attr("class","exclude-range")}function T(L,v,B,w){let D=$_(p).tickSize(-w+v+i.gridLineStartPadding).tickFormat(vc(n.db.getAxisFormat()||i.axisFormat||"%Y-%m-%d"));if(f.append("g").attr("class","grid").attr("transform","translate("+L+", "+(w-50)+")").call(D).selectAll("text").style("text-anchor","middle").attr("fill","#000").attr("stroke","none").attr("font-size",10).attr("dy","1em"),n.db.topAxisEnabled()||i.topAxis){let N=j_(p).tickSize(-w+v+i.gridLineStartPadding).tickFormat(vc(n.db.getAxisFormat()||i.axisFormat||"%Y-%m-%d"));f.append("g").attr("class","grid").attr("transform","translate("+L+", "+v+")").call(N).selectAll("text").style("text-anchor","middle").attr("fill","#000").attr("stroke","none").attr("font-size",10)}}function C(L,v){const B=[];let w=0;for(let D=0;D<m.length;D++)B[D]=[m[D],A(m[D],_)];f.append("g").selectAll("text").data(B).enter().append(function(D){const N=D[0].split(pe.lineBreakRegex),z=-(N.length-1)/2,X=l.createElementNS("http://www.w3.org/2000/svg","text");X.setAttribute("dy",z+"em");for(let ct=0;ct<N.length;ct++){const J=l.createElementNS("http://www.w3.org/2000/svg","tspan");J.setAttribute("alignment-baseline","central"),J.setAttribute("x","10"),ct>0&&J.setAttribute("dy","1em"),J.textContent=N[ct],X.appendChild(J)}return X}).attr("x",10).attr("y",function(D,N){if(N>0)for(let z=0;z<N;z++)return w+=B[N-1][1],D[1]*L/2+w*L+v;else return D[1]*L/2+v}).attr("font-size",i.sectionFontSize).attr("font-size",i.sectionFontSize).attr("class",function(D){for(let N=0;N<m.length;N++)if(D[0]===m[N])return"sectionTitle sectionTitle"+N%i.numberSectionStyles;return"sectionTitle"})}function M(L,v,B,w){const D=n.db.getTodayMarker();if(D==="off")return;const N=f.append("g").attr("class","today"),z=new Date,X=N.append("line");X.attr("x1",p(z)+L).attr("x2",p(z)+L).attr("y1",i.titleTopMargin).attr("y2",w-i.titleTopMargin).attr("class","today"),D!==""&&X.attr("style",D.replace(/,/g,";"))}function S(L){const v={},B=[];for(let w=0,D=L.length;w<D;++w)Object.prototype.hasOwnProperty.call(v,L[w])||(v[L[w]]=!0,B.push(L[w]));return B}function R(L){let v=L.length;const B={};for(;v;)B[L[--v]]=(B[L[v]]||0)+1;return B}function A(L,v){return R(v)[L]||0}}};var q4=function(){var t=function(a,s,o,l){for(o=o||{},l=a.length;l--;o[a[l]]=s);return o},e=[6,9,10],r={trace:function(){},yy:{},symbols_:{error:2,start:3,info:4,document:5,EOF:6,line:7,statement:8,NL:9,showInfo:10,$accept:0,$end:1},terminals_:{2:"error",4:"info",6:"EOF",9:"NL",10:"showInfo"},productions_:[0,[3,3],[5,0],[5,2],[7,1],[7,1],[8,1]],performAction:function(s,o,l,u,h,d,f){switch(d.length-1,h){case 1:return u;case 4:break;case 6:u.setInfo(!0);break}},table:[{3:1,4:[1,2]},{1:[3]},t(e,[2,2],{5:3}),{6:[1,4],7:5,8:6,9:[1,7],10:[1,8]},{1:[2,1]},t(e,[2,3]),t(e,[2,4]),t(e,[2,5]),t(e,[2,6])],defaultActions:{4:[2,1]},parseError:function(s,o){if(o.recoverable)this.trace(s);else{var l=new Error(s);throw l.hash=o,l}},parse:function(s){var o=this,l=[0],u=[],h=[null],d=[],f=this.table,p="",m=0,_=0,y=2,b=1,x=d.slice.call(arguments,1),k=Object.create(this.lexer),T={yy:{}};for(var C in this.yy)Object.prototype.hasOwnProperty.call(this.yy,C)&&(T.yy[C]=this.yy[C]);k.setInput(s,T.yy),T.yy.lexer=k,T.yy.parser=this,typeof k.yylloc>"u"&&(k.yylloc={});var M=k.yylloc;d.push(M);var S=k.options&&k.options.ranges;typeof T.yy.parseError=="function"?this.parseError=T.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function R(){var J;return J=u.pop()||k.lex()||b,typeof J!="number"&&(J instanceof Array&&(u=J,J=u.pop()),J=o.symbols_[J]||J),J}for(var A,L,v,B,w={},D,N,z,X;;){if(L=l[l.length-1],this.defaultActions[L]?v=this.defaultActions[L]:((A===null||typeof A>"u")&&(A=R()),v=f[L]&&f[L][A]),typeof v>"u"||!v.length||!v[0]){var ct="";X=[];for(D in f[L])this.terminals_[D]&&D>y&&X.push("'"+this.terminals_[D]+"'");k.showPosition?ct="Parse error on line "+(m+1)+`: -`+k.showPosition()+` -Expecting `+X.join(", ")+", got '"+(this.terminals_[A]||A)+"'":ct="Parse error on line "+(m+1)+": Unexpected "+(A==b?"end of input":"'"+(this.terminals_[A]||A)+"'"),this.parseError(ct,{text:k.match,token:this.terminals_[A]||A,line:k.yylineno,loc:M,expected:X})}if(v[0]instanceof Array&&v.length>1)throw new Error("Parse Error: multiple actions possible at state: "+L+", token: "+A);switch(v[0]){case 1:l.push(A),h.push(k.yytext),d.push(k.yylloc),l.push(v[1]),A=null,_=k.yyleng,p=k.yytext,m=k.yylineno,M=k.yylloc;break;case 2:if(N=this.productions_[v[1]][1],w.$=h[h.length-N],w._$={first_line:d[d.length-(N||1)].first_line,last_line:d[d.length-1].last_line,first_column:d[d.length-(N||1)].first_column,last_column:d[d.length-1].last_column},S&&(w._$.range=[d[d.length-(N||1)].range[0],d[d.length-1].range[1]]),B=this.performAction.apply(w,[p,_,m,T.yy,v[1],h,d].concat(x)),typeof B<"u")return B;N&&(l=l.slice(0,-1*N*2),h=h.slice(0,-1*N),d=d.slice(0,-1*N)),l.push(this.productions_[v[1]][0]),h.push(w.$),d.push(w._$),z=f[l[l.length-2]][l[l.length-1]],l.push(z);break;case 3:return!0}}return!0}},n=function(){var a={EOF:1,parseError:function(o,l){if(this.yy.parser)this.yy.parser.parseError(o,l);else throw new Error(o)},setInput:function(s,o){return this.yy=o||this.yy||{},this._input=s,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var s=this._input[0];this.yytext+=s,this.yyleng++,this.offset++,this.match+=s,this.matched+=s;var o=s.match(/(?:\r\n?|\n).*/g);return o?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),s},unput:function(s){var o=s.length,l=s.split(/(?:\r\n?|\n)/g);this._input=s+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-o),this.offset-=o;var u=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),l.length-1&&(this.yylineno-=l.length-1);var h=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:l?(l.length===u.length?this.yylloc.first_column:0)+u[u.length-l.length].length-l[0].length:this.yylloc.first_column-o},this.options.ranges&&(this.yylloc.range=[h[0],h[0]+this.yyleng-o]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(s){this.unput(this.match.slice(s))},pastInput:function(){var s=this.matched.substr(0,this.matched.length-this.match.length);return(s.length>20?"...":"")+s.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var s=this.match;return s.length<20&&(s+=this._input.substr(0,20-s.length)),(s.substr(0,20)+(s.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var s=this.pastInput(),o=new Array(s.length+1).join("-");return s+this.upcomingInput()+` -`+o+"^"},test_match:function(s,o){var l,u,h;if(this.options.backtrack_lexer&&(h={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(h.yylloc.range=this.yylloc.range.slice(0))),u=s[0].match(/(?:\r\n?|\n).*/g),u&&(this.yylineno+=u.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:u?u[u.length-1].length-u[u.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+s[0].length},this.yytext+=s[0],this.match+=s[0],this.matches=s,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(s[0].length),this.matched+=s[0],l=this.performAction.call(this,this.yy,this,o,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),l)return l;if(this._backtrack){for(var d in h)this[d]=h[d];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var s,o,l,u;this._more||(this.yytext="",this.match="");for(var h=this._currentRules(),d=0;d<h.length;d++)if(l=this._input.match(this.rules[h[d]]),l&&(!o||l[0].length>o[0].length)){if(o=l,u=d,this.options.backtrack_lexer){if(s=this.test_match(l,h[d]),s!==!1)return s;if(this._backtrack){o=!1;continue}else return!1}else if(!this.options.flex)break}return o?(s=this.test_match(o,h[u]),s!==!1?s:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var o=this.next();return o||this.lex()},begin:function(o){this.conditionStack.push(o)},popState:function(){var o=this.conditionStack.length-1;return o>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(o){return o=this.conditionStack.length-1-Math.abs(o||0),o>=0?this.conditionStack[o]:"INITIAL"},pushState:function(o){this.begin(o)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(o,l,u,h){switch(u){case 0:return 4;case 1:return 9;case 2:return"space";case 3:return 10;case 4:return 6;case 5:return"TXT"}},rules:[/^(?:info\b)/i,/^(?:[\s\n\r]+)/i,/^(?:[\s]+)/i,/^(?:showInfo\b)/i,/^(?:$)/i,/^(?:.)/i],conditions:{INITIAL:{rules:[0,1,2,3,4,5],inclusive:!0}}};return a}();r.lexer=n;function i(){this.yy={}}return i.prototype=r,r.Parser=i,new i}();q4.parser=q4;var iL="",aL=!1;const nit={setMessage:t=>{H.debug("Setting message to: "+t),iL=t},getMessage:()=>iL,setInfo:t=>{aL=t},getInfo:()=>aL,clear:ci},iit={draw:(t,e,r,n)=>{try{H.debug(`Rendering info diagram -`+t);const i=nt().securityLevel;let a;i==="sandbox"&&(a=St("#i"+e));const o=St(i==="sandbox"?a.nodes()[0].contentDocument.body:"body").select("#"+e);o.append("g").append("text").attr("x",100).attr("y",40).attr("class","version").attr("font-size","32px").style("text-anchor","middle").text("v "+r),o.attr("height",100),o.attr("width",400)}catch(i){H.error("Error while rendering info diagram"),H.error(i.message)}}},ait=t=>t.match(/^\s*info/)!==null;var V4=function(){var t=function(M,S,R,A){for(R=R||{},A=M.length;A--;R[M[A]]=S);return R},e=[1,4],r=[1,5],n=[1,6],i=[1,7],a=[1,9],s=[1,11,13,15,17,19,20,26,27,28,29],o=[2,5],l=[1,6,11,13,15,17,19,20,26,27,28,29],u=[26,27,28],h=[2,8],d=[1,18],f=[1,19],p=[1,20],m=[1,21],_=[1,22],y=[1,23],b=[1,28],x=[6,26,27,28,29],k={trace:function(){},yy:{},symbols_:{error:2,start:3,eol:4,directive:5,PIE:6,document:7,showData:8,line:9,statement:10,txt:11,value:12,title:13,title_value:14,acc_title:15,acc_title_value:16,acc_descr:17,acc_descr_value:18,acc_descr_multiline_value:19,section:20,openDirective:21,typeDirective:22,closeDirective:23,":":24,argDirective:25,NEWLINE:26,";":27,EOF:28,open_directive:29,type_directive:30,arg_directive:31,close_directive:32,$accept:0,$end:1},terminals_:{2:"error",6:"PIE",8:"showData",11:"txt",12:"value",13:"title",14:"title_value",15:"acc_title",16:"acc_title_value",17:"acc_descr",18:"acc_descr_value",19:"acc_descr_multiline_value",20:"section",24:":",26:"NEWLINE",27:";",28:"EOF",29:"open_directive",30:"type_directive",31:"arg_directive",32:"close_directive"},productions_:[0,[3,2],[3,2],[3,2],[3,3],[7,0],[7,2],[9,2],[10,0],[10,2],[10,2],[10,2],[10,2],[10,1],[10,1],[10,1],[5,3],[5,5],[4,1],[4,1],[4,1],[21,1],[22,1],[25,1],[23,1]],performAction:function(S,R,A,L,v,B,w){var D=B.length-1;switch(v){case 4:L.setShowData(!0);break;case 7:this.$=B[D-1];break;case 9:L.addSection(B[D-1],L.cleanupValue(B[D]));break;case 10:this.$=B[D].trim(),L.setDiagramTitle(this.$);break;case 11:this.$=B[D].trim(),L.setAccTitle(this.$);break;case 12:case 13:this.$=B[D].trim(),L.setAccDescription(this.$);break;case 14:L.addSection(B[D].substr(8)),this.$=B[D].substr(8);break;case 21:L.parseDirective("%%{","open_directive");break;case 22:L.parseDirective(B[D],"type_directive");break;case 23:B[D]=B[D].trim().replace(/'/g,'"'),L.parseDirective(B[D],"arg_directive");break;case 24:L.parseDirective("}%%","close_directive","pie");break}},table:[{3:1,4:2,5:3,6:e,21:8,26:r,27:n,28:i,29:a},{1:[3]},{3:10,4:2,5:3,6:e,21:8,26:r,27:n,28:i,29:a},{3:11,4:2,5:3,6:e,21:8,26:r,27:n,28:i,29:a},t(s,o,{7:12,8:[1,13]}),t(l,[2,18]),t(l,[2,19]),t(l,[2,20]),{22:14,30:[1,15]},{30:[2,21]},{1:[2,1]},{1:[2,2]},t(u,h,{21:8,9:16,10:17,5:24,1:[2,3],11:d,13:f,15:p,17:m,19:_,20:y,29:a}),t(s,o,{7:25}),{23:26,24:[1,27],32:b},t([24,32],[2,22]),t(s,[2,6]),{4:29,26:r,27:n,28:i},{12:[1,30]},{14:[1,31]},{16:[1,32]},{18:[1,33]},t(u,[2,13]),t(u,[2,14]),t(u,[2,15]),t(u,h,{21:8,9:16,10:17,5:24,1:[2,4],11:d,13:f,15:p,17:m,19:_,20:y,29:a}),t(x,[2,16]),{25:34,31:[1,35]},t(x,[2,24]),t(s,[2,7]),t(u,[2,9]),t(u,[2,10]),t(u,[2,11]),t(u,[2,12]),{23:36,32:b},{32:[2,23]},t(x,[2,17])],defaultActions:{9:[2,21],10:[2,1],11:[2,2],35:[2,23]},parseError:function(S,R){if(R.recoverable)this.trace(S);else{var A=new Error(S);throw A.hash=R,A}},parse:function(S){var R=this,A=[0],L=[],v=[null],B=[],w=this.table,D="",N=0,z=0,X=2,ct=1,J=B.slice.call(arguments,1),Y=Object.create(this.lexer),$={yy:{}};for(var lt in this.yy)Object.prototype.hasOwnProperty.call(this.yy,lt)&&($.yy[lt]=this.yy[lt]);Y.setInput(S,$.yy),$.yy.lexer=Y,$.yy.parser=this,typeof Y.yylloc>"u"&&(Y.yylloc={});var ut=Y.yylloc;B.push(ut);var W=Y.options&&Y.options.ranges;typeof $.yy.parseError=="function"?this.parseError=$.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function tt(){var et;return et=L.pop()||Y.lex()||ct,typeof et!="number"&&(et instanceof Array&&(L=et,et=L.pop()),et=R.symbols_[et]||et),et}for(var K,it,Z,V,Q={},q,U,F,j;;){if(it=A[A.length-1],this.defaultActions[it]?Z=this.defaultActions[it]:((K===null||typeof K>"u")&&(K=tt()),Z=w[it]&&w[it][K]),typeof Z>"u"||!Z.length||!Z[0]){var P="";j=[];for(q in w[it])this.terminals_[q]&&q>X&&j.push("'"+this.terminals_[q]+"'");Y.showPosition?P="Parse error on line "+(N+1)+`: -`+Y.showPosition()+` -Expecting `+j.join(", ")+", got '"+(this.terminals_[K]||K)+"'":P="Parse error on line "+(N+1)+": Unexpected "+(K==ct?"end of input":"'"+(this.terminals_[K]||K)+"'"),this.parseError(P,{text:Y.match,token:this.terminals_[K]||K,line:Y.yylineno,loc:ut,expected:j})}if(Z[0]instanceof Array&&Z.length>1)throw new Error("Parse Error: multiple actions possible at state: "+it+", token: "+K);switch(Z[0]){case 1:A.push(K),v.push(Y.yytext),B.push(Y.yylloc),A.push(Z[1]),K=null,z=Y.yyleng,D=Y.yytext,N=Y.yylineno,ut=Y.yylloc;break;case 2:if(U=this.productions_[Z[1]][1],Q.$=v[v.length-U],Q._$={first_line:B[B.length-(U||1)].first_line,last_line:B[B.length-1].last_line,first_column:B[B.length-(U||1)].first_column,last_column:B[B.length-1].last_column},W&&(Q._$.range=[B[B.length-(U||1)].range[0],B[B.length-1].range[1]]),V=this.performAction.apply(Q,[D,z,N,$.yy,Z[1],v,B].concat(J)),typeof V<"u")return V;U&&(A=A.slice(0,-1*U*2),v=v.slice(0,-1*U),B=B.slice(0,-1*U)),A.push(this.productions_[Z[1]][0]),v.push(Q.$),B.push(Q._$),F=w[A[A.length-2]][A[A.length-1]],A.push(F);break;case 3:return!0}}return!0}},T=function(){var M={EOF:1,parseError:function(R,A){if(this.yy.parser)this.yy.parser.parseError(R,A);else throw new Error(R)},setInput:function(S,R){return this.yy=R||this.yy||{},this._input=S,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var S=this._input[0];this.yytext+=S,this.yyleng++,this.offset++,this.match+=S,this.matched+=S;var R=S.match(/(?:\r\n?|\n).*/g);return R?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),S},unput:function(S){var R=S.length,A=S.split(/(?:\r\n?|\n)/g);this._input=S+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-R),this.offset-=R;var L=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),A.length-1&&(this.yylineno-=A.length-1);var v=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:A?(A.length===L.length?this.yylloc.first_column:0)+L[L.length-A.length].length-A[0].length:this.yylloc.first_column-R},this.options.ranges&&(this.yylloc.range=[v[0],v[0]+this.yyleng-R]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(S){this.unput(this.match.slice(S))},pastInput:function(){var S=this.matched.substr(0,this.matched.length-this.match.length);return(S.length>20?"...":"")+S.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var S=this.match;return S.length<20&&(S+=this._input.substr(0,20-S.length)),(S.substr(0,20)+(S.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var S=this.pastInput(),R=new Array(S.length+1).join("-");return S+this.upcomingInput()+` -`+R+"^"},test_match:function(S,R){var A,L,v;if(this.options.backtrack_lexer&&(v={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(v.yylloc.range=this.yylloc.range.slice(0))),L=S[0].match(/(?:\r\n?|\n).*/g),L&&(this.yylineno+=L.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:L?L[L.length-1].length-L[L.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+S[0].length},this.yytext+=S[0],this.match+=S[0],this.matches=S,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(S[0].length),this.matched+=S[0],A=this.performAction.call(this,this.yy,this,R,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),A)return A;if(this._backtrack){for(var B in v)this[B]=v[B];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var S,R,A,L;this._more||(this.yytext="",this.match="");for(var v=this._currentRules(),B=0;B<v.length;B++)if(A=this._input.match(this.rules[v[B]]),A&&(!R||A[0].length>R[0].length)){if(R=A,L=B,this.options.backtrack_lexer){if(S=this.test_match(A,v[B]),S!==!1)return S;if(this._backtrack){R=!1;continue}else return!1}else if(!this.options.flex)break}return R?(S=this.test_match(R,v[L]),S!==!1?S:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var R=this.next();return R||this.lex()},begin:function(R){this.conditionStack.push(R)},popState:function(){var R=this.conditionStack.length-1;return R>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(R){return R=this.conditionStack.length-1-Math.abs(R||0),R>=0?this.conditionStack[R]:"INITIAL"},pushState:function(R){this.begin(R)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(R,A,L,v){switch(L){case 0:return this.begin("open_directive"),29;case 1:return this.begin("type_directive"),30;case 2:return this.popState(),this.begin("arg_directive"),24;case 3:return this.popState(),this.popState(),32;case 4:return 31;case 5:break;case 6:break;case 7:return 26;case 8:break;case 9:break;case 10:return this.begin("title"),13;case 11:return this.popState(),"title_value";case 12:return this.begin("acc_title"),15;case 13:return this.popState(),"acc_title_value";case 14:return this.begin("acc_descr"),17;case 15:return this.popState(),"acc_descr_value";case 16:this.begin("acc_descr_multiline");break;case 17:this.popState();break;case 18:return"acc_descr_multiline_value";case 19:this.begin("string");break;case 20:this.popState();break;case 21:return"txt";case 22:return 6;case 23:return 8;case 24:return"value";case 25:return 28}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:%%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[\n\r]+)/i,/^(?:%%[^\n]*)/i,/^(?:[\s]+)/i,/^(?:title\b)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:["])/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:pie\b)/i,/^(?:showData\b)/i,/^(?::[\s]*[\d]+(?:\.[\d]+)?)/i,/^(?:$)/i],conditions:{acc_descr_multiline:{rules:[17,18],inclusive:!1},acc_descr:{rules:[15],inclusive:!1},acc_title:{rules:[13],inclusive:!1},close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},title:{rules:[11],inclusive:!1},string:{rules:[20,21],inclusive:!1},INITIAL:{rules:[0,5,6,7,8,9,10,12,14,16,19,22,23,24,25],inclusive:!0}}};return M}();k.lexer=T;function C(){this.yy={}}return C.prototype=k,k.Parser=C,new C}();V4.parser=V4;const sit=t=>t.match(/^\s*pie/)!==null;let c0={},z4=!1;const oit={parseDirective:function(t,e,r){Xe.parseDirective(this,t,e,r)},getConfig:()=>nt().pie,addSection:function(t,e){t=pe.sanitizeText(t,nt()),typeof c0[t]>"u"&&(c0[t]=e,H.debug("Added new section :",t))},getSections:()=>c0,cleanupValue:function(t){return t.substring(0,1)===":"&&(t=t.substring(1).trim()),Number(t.trim())},clear:function(){c0={},z4=!1,ci()},setAccTitle:Yn,getAccTitle:ui,setDiagramTitle:c1,getDiagramTitle:u1,setShowData:function(t){z4=t},getShowData:function(){return z4},getAccDescription:fi,setAccDescription:hi};let ga=nt(),ya;const lu=450,lit={draw:(t,e,r,n)=>{try{ga=nt(),H.debug(`Rendering info diagram -`+t);const b=nt().securityLevel;let x;b==="sandbox"&&(x=St("#i"+e));const k=St(b==="sandbox"?x.nodes()[0].contentDocument.body:"body"),T=b==="sandbox"?x.nodes()[0].contentDocument:document;n.db.clear(),n.parser.parse(t),H.debug("Parsed info diagram");const C=T.getElementById(e);ya=C.parentElement.offsetWidth,typeof ya>"u"&&(ya=1200),typeof ga.useWidth<"u"&&(ya=ga.useWidth),typeof ga.pie.useWidth<"u"&&(ya=ga.pie.useWidth);const M=k.select("#"+e);li(M,lu,ya,ga.pie.useMaxWidth),bn(n.db,M,e),C.setAttribute("viewBox","0 0 "+ya+" "+lu);var i=40,a=18,s=4,o=Math.min(ya,lu)/2-i,l=M.append("g").attr("transform","translate("+ya/2+","+lu/2+")"),u=n.db.getSections(),h=0;Object.keys(u).forEach(function(R){h+=u[R]});const S=ga.themeVariables;var d=[S.pie1,S.pie2,S.pie3,S.pie4,S.pie5,S.pie6,S.pie7,S.pie8,S.pie9,S.pie10,S.pie11,S.pie12],f=nf().range(d),p=B7().value(function(R){return R[1]}),m=p(Object.entries(u)),_=yf().innerRadius(0).outerRadius(o);l.selectAll("mySlices").data(m).enter().append("path").attr("d",_).attr("fill",function(R){return f(R.data[0])}).attr("class","pieCircle"),l.selectAll("mySlices").data(m).enter().append("text").text(function(R){return(R.data[1]/h*100).toFixed(0)+"%"}).attr("transform",function(R){return"translate("+_.centroid(R)+")"}).style("text-anchor","middle").attr("class","slice"),l.append("text").text(n.db.getDiagramTitle()).attr("x",0).attr("y",-(lu-50)/2).attr("class","pieTitleText");var y=l.selectAll(".legend").data(f.domain()).enter().append("g").attr("class","legend").attr("transform",function(R,A){var L=a+s,v=L*f.domain().length/2,B=12*a,w=A*L-v;return"translate("+B+","+w+")"});y.append("rect").attr("width",a).attr("height",a).style("fill",f).style("stroke",f),y.data(m).append("text").attr("x",a+s).attr("y",a-s).text(function(R){return n.db.getShowData()||ga.showData||ga.pie.showData?R.data[0]+" ["+R.data[1]+"]":R.data[0]})}catch(b){H.error("Error while rendering info diagram"),H.error(b)}}};var Y4=function(){var t=function(it,Z,V,Q){for(V=V||{},Q=it.length;Q--;V[it[Q]]=Z);return V},e=[1,3],r=[1,5],n=[1,6],i=[1,7],a=[1,8],s=[5,6,8,14,16,18,19,40,41,42,43,44,45,53,71,72],o=[1,22],l=[2,13],u=[1,26],h=[1,27],d=[1,28],f=[1,29],p=[1,30],m=[1,31],_=[1,24],y=[1,32],b=[1,33],x=[1,36],k=[71,72],T=[5,8,14,16,18,19,40,41,42,43,44,45,53,60,62,71,72],C=[1,56],M=[1,57],S=[1,58],R=[1,59],A=[1,60],L=[1,61],v=[1,62],B=[62,63],w=[1,74],D=[1,70],N=[1,71],z=[1,72],X=[1,73],ct=[1,75],J=[1,79],Y=[1,80],$=[1,77],lt=[1,78],ut=[5,8,14,16,18,19,40,41,42,43,44,45,53,71,72],W={trace:function(){},yy:{},symbols_:{error:2,start:3,directive:4,NEWLINE:5,RD:6,diagram:7,EOF:8,openDirective:9,typeDirective:10,closeDirective:11,":":12,argDirective:13,acc_title:14,acc_title_value:15,acc_descr:16,acc_descr_value:17,acc_descr_multiline_value:18,open_directive:19,type_directive:20,arg_directive:21,close_directive:22,requirementDef:23,elementDef:24,relationshipDef:25,requirementType:26,requirementName:27,STRUCT_START:28,requirementBody:29,ID:30,COLONSEP:31,id:32,TEXT:33,text:34,RISK:35,riskLevel:36,VERIFYMTHD:37,verifyType:38,STRUCT_STOP:39,REQUIREMENT:40,FUNCTIONAL_REQUIREMENT:41,INTERFACE_REQUIREMENT:42,PERFORMANCE_REQUIREMENT:43,PHYSICAL_REQUIREMENT:44,DESIGN_CONSTRAINT:45,LOW_RISK:46,MED_RISK:47,HIGH_RISK:48,VERIFY_ANALYSIS:49,VERIFY_DEMONSTRATION:50,VERIFY_INSPECTION:51,VERIFY_TEST:52,ELEMENT:53,elementName:54,elementBody:55,TYPE:56,type:57,DOCREF:58,ref:59,END_ARROW_L:60,relationship:61,LINE:62,END_ARROW_R:63,CONTAINS:64,COPIES:65,DERIVES:66,SATISFIES:67,VERIFIES:68,REFINES:69,TRACES:70,unqString:71,qString:72,$accept:0,$end:1},terminals_:{2:"error",5:"NEWLINE",6:"RD",8:"EOF",12:":",14:"acc_title",15:"acc_title_value",16:"acc_descr",17:"acc_descr_value",18:"acc_descr_multiline_value",19:"open_directive",20:"type_directive",21:"arg_directive",22:"close_directive",28:"STRUCT_START",30:"ID",31:"COLONSEP",33:"TEXT",35:"RISK",37:"VERIFYMTHD",39:"STRUCT_STOP",40:"REQUIREMENT",41:"FUNCTIONAL_REQUIREMENT",42:"INTERFACE_REQUIREMENT",43:"PERFORMANCE_REQUIREMENT",44:"PHYSICAL_REQUIREMENT",45:"DESIGN_CONSTRAINT",46:"LOW_RISK",47:"MED_RISK",48:"HIGH_RISK",49:"VERIFY_ANALYSIS",50:"VERIFY_DEMONSTRATION",51:"VERIFY_INSPECTION",52:"VERIFY_TEST",53:"ELEMENT",56:"TYPE",58:"DOCREF",60:"END_ARROW_L",62:"LINE",63:"END_ARROW_R",64:"CONTAINS",65:"COPIES",66:"DERIVES",67:"SATISFIES",68:"VERIFIES",69:"REFINES",70:"TRACES",71:"unqString",72:"qString"},productions_:[0,[3,3],[3,2],[3,4],[4,3],[4,5],[4,2],[4,2],[4,1],[9,1],[10,1],[13,1],[11,1],[7,0],[7,2],[7,2],[7,2],[7,2],[7,2],[23,5],[29,5],[29,5],[29,5],[29,5],[29,2],[29,1],[26,1],[26,1],[26,1],[26,1],[26,1],[26,1],[36,1],[36,1],[36,1],[38,1],[38,1],[38,1],[38,1],[24,5],[55,5],[55,5],[55,2],[55,1],[25,5],[25,5],[61,1],[61,1],[61,1],[61,1],[61,1],[61,1],[61,1],[27,1],[27,1],[32,1],[32,1],[34,1],[34,1],[54,1],[54,1],[57,1],[57,1],[59,1],[59,1]],performAction:function(Z,V,Q,q,U,F,j){var P=F.length-1;switch(U){case 6:this.$=F[P].trim(),q.setAccTitle(this.$);break;case 7:case 8:this.$=F[P].trim(),q.setAccDescription(this.$);break;case 9:q.parseDirective("%%{","open_directive");break;case 10:q.parseDirective(F[P],"type_directive");break;case 11:F[P]=F[P].trim().replace(/'/g,'"'),q.parseDirective(F[P],"arg_directive");break;case 12:q.parseDirective("}%%","close_directive","pie");break;case 13:this.$=[];break;case 19:q.addRequirement(F[P-3],F[P-4]);break;case 20:q.setNewReqId(F[P-2]);break;case 21:q.setNewReqText(F[P-2]);break;case 22:q.setNewReqRisk(F[P-2]);break;case 23:q.setNewReqVerifyMethod(F[P-2]);break;case 26:this.$=q.RequirementType.REQUIREMENT;break;case 27:this.$=q.RequirementType.FUNCTIONAL_REQUIREMENT;break;case 28:this.$=q.RequirementType.INTERFACE_REQUIREMENT;break;case 29:this.$=q.RequirementType.PERFORMANCE_REQUIREMENT;break;case 30:this.$=q.RequirementType.PHYSICAL_REQUIREMENT;break;case 31:this.$=q.RequirementType.DESIGN_CONSTRAINT;break;case 32:this.$=q.RiskLevel.LOW_RISK;break;case 33:this.$=q.RiskLevel.MED_RISK;break;case 34:this.$=q.RiskLevel.HIGH_RISK;break;case 35:this.$=q.VerifyType.VERIFY_ANALYSIS;break;case 36:this.$=q.VerifyType.VERIFY_DEMONSTRATION;break;case 37:this.$=q.VerifyType.VERIFY_INSPECTION;break;case 38:this.$=q.VerifyType.VERIFY_TEST;break;case 39:q.addElement(F[P-3]);break;case 40:q.setNewElementType(F[P-2]);break;case 41:q.setNewElementDocRef(F[P-2]);break;case 44:q.addRelationship(F[P-2],F[P],F[P-4]);break;case 45:q.addRelationship(F[P-2],F[P-4],F[P]);break;case 46:this.$=q.Relationships.CONTAINS;break;case 47:this.$=q.Relationships.COPIES;break;case 48:this.$=q.Relationships.DERIVES;break;case 49:this.$=q.Relationships.SATISFIES;break;case 50:this.$=q.Relationships.VERIFIES;break;case 51:this.$=q.Relationships.REFINES;break;case 52:this.$=q.Relationships.TRACES;break}},table:[{3:1,4:2,6:e,9:4,14:r,16:n,18:i,19:a},{1:[3]},{3:10,4:2,5:[1,9],6:e,9:4,14:r,16:n,18:i,19:a},{5:[1,11]},{10:12,20:[1,13]},{15:[1,14]},{17:[1,15]},t(s,[2,8]),{20:[2,9]},{3:16,4:2,6:e,9:4,14:r,16:n,18:i,19:a},{1:[2,2]},{4:21,5:o,7:17,8:l,9:4,14:r,16:n,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:h,42:d,43:f,44:p,45:m,53:_,71:y,72:b},{11:34,12:[1,35],22:x},t([12,22],[2,10]),t(s,[2,6]),t(s,[2,7]),{1:[2,1]},{8:[1,37]},{4:21,5:o,7:38,8:l,9:4,14:r,16:n,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:h,42:d,43:f,44:p,45:m,53:_,71:y,72:b},{4:21,5:o,7:39,8:l,9:4,14:r,16:n,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:h,42:d,43:f,44:p,45:m,53:_,71:y,72:b},{4:21,5:o,7:40,8:l,9:4,14:r,16:n,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:h,42:d,43:f,44:p,45:m,53:_,71:y,72:b},{4:21,5:o,7:41,8:l,9:4,14:r,16:n,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:h,42:d,43:f,44:p,45:m,53:_,71:y,72:b},{4:21,5:o,7:42,8:l,9:4,14:r,16:n,18:i,19:a,23:18,24:19,25:20,26:23,32:25,40:u,41:h,42:d,43:f,44:p,45:m,53:_,71:y,72:b},{27:43,71:[1,44],72:[1,45]},{54:46,71:[1,47],72:[1,48]},{60:[1,49],62:[1,50]},t(k,[2,26]),t(k,[2,27]),t(k,[2,28]),t(k,[2,29]),t(k,[2,30]),t(k,[2,31]),t(T,[2,55]),t(T,[2,56]),t(s,[2,4]),{13:51,21:[1,52]},t(s,[2,12]),{1:[2,3]},{8:[2,14]},{8:[2,15]},{8:[2,16]},{8:[2,17]},{8:[2,18]},{28:[1,53]},{28:[2,53]},{28:[2,54]},{28:[1,54]},{28:[2,59]},{28:[2,60]},{61:55,64:C,65:M,66:S,67:R,68:A,69:L,70:v},{61:63,64:C,65:M,66:S,67:R,68:A,69:L,70:v},{11:64,22:x},{22:[2,11]},{5:[1,65]},{5:[1,66]},{62:[1,67]},t(B,[2,46]),t(B,[2,47]),t(B,[2,48]),t(B,[2,49]),t(B,[2,50]),t(B,[2,51]),t(B,[2,52]),{63:[1,68]},t(s,[2,5]),{5:w,29:69,30:D,33:N,35:z,37:X,39:ct},{5:J,39:Y,55:76,56:$,58:lt},{32:81,71:y,72:b},{32:82,71:y,72:b},t(ut,[2,19]),{31:[1,83]},{31:[1,84]},{31:[1,85]},{31:[1,86]},{5:w,29:87,30:D,33:N,35:z,37:X,39:ct},t(ut,[2,25]),t(ut,[2,39]),{31:[1,88]},{31:[1,89]},{5:J,39:Y,55:90,56:$,58:lt},t(ut,[2,43]),t(ut,[2,44]),t(ut,[2,45]),{32:91,71:y,72:b},{34:92,71:[1,93],72:[1,94]},{36:95,46:[1,96],47:[1,97],48:[1,98]},{38:99,49:[1,100],50:[1,101],51:[1,102],52:[1,103]},t(ut,[2,24]),{57:104,71:[1,105],72:[1,106]},{59:107,71:[1,108],72:[1,109]},t(ut,[2,42]),{5:[1,110]},{5:[1,111]},{5:[2,57]},{5:[2,58]},{5:[1,112]},{5:[2,32]},{5:[2,33]},{5:[2,34]},{5:[1,113]},{5:[2,35]},{5:[2,36]},{5:[2,37]},{5:[2,38]},{5:[1,114]},{5:[2,61]},{5:[2,62]},{5:[1,115]},{5:[2,63]},{5:[2,64]},{5:w,29:116,30:D,33:N,35:z,37:X,39:ct},{5:w,29:117,30:D,33:N,35:z,37:X,39:ct},{5:w,29:118,30:D,33:N,35:z,37:X,39:ct},{5:w,29:119,30:D,33:N,35:z,37:X,39:ct},{5:J,39:Y,55:120,56:$,58:lt},{5:J,39:Y,55:121,56:$,58:lt},t(ut,[2,20]),t(ut,[2,21]),t(ut,[2,22]),t(ut,[2,23]),t(ut,[2,40]),t(ut,[2,41])],defaultActions:{8:[2,9],10:[2,2],16:[2,1],37:[2,3],38:[2,14],39:[2,15],40:[2,16],41:[2,17],42:[2,18],44:[2,53],45:[2,54],47:[2,59],48:[2,60],52:[2,11],93:[2,57],94:[2,58],96:[2,32],97:[2,33],98:[2,34],100:[2,35],101:[2,36],102:[2,37],103:[2,38],105:[2,61],106:[2,62],108:[2,63],109:[2,64]},parseError:function(Z,V){if(V.recoverable)this.trace(Z);else{var Q=new Error(Z);throw Q.hash=V,Q}},parse:function(Z){var V=this,Q=[0],q=[],U=[null],F=[],j=this.table,P="",et=0,at=0,It=2,Lt=1,Rt=F.slice.call(arguments,1),Ct=Object.create(this.lexer),pt={yy:{}};for(var mt in this.yy)Object.prototype.hasOwnProperty.call(this.yy,mt)&&(pt.yy[mt]=this.yy[mt]);Ct.setInput(Z,pt.yy),pt.yy.lexer=Ct,pt.yy.parser=this,typeof Ct.yylloc>"u"&&(Ct.yylloc={});var vt=Ct.yylloc;F.push(vt);var Tt=Ct.options&&Ct.options.ranges;typeof pt.yy.parseError=="function"?this.parseError=pt.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function ft(){var bt;return bt=q.pop()||Ct.lex()||Lt,typeof bt!="number"&&(bt instanceof Array&&(q=bt,bt=q.pop()),bt=V.symbols_[bt]||bt),bt}for(var le,Dt,Gt,$t,Qt={},we,jt,Ft,zt;;){if(Dt=Q[Q.length-1],this.defaultActions[Dt]?Gt=this.defaultActions[Dt]:((le===null||typeof le>"u")&&(le=ft()),Gt=j[Dt]&&j[Dt][le]),typeof Gt>"u"||!Gt.length||!Gt[0]){var wt="";zt=[];for(we in j[Dt])this.terminals_[we]&&we>It&&zt.push("'"+this.terminals_[we]+"'");Ct.showPosition?wt="Parse error on line "+(et+1)+`: -`+Ct.showPosition()+` -Expecting `+zt.join(", ")+", got '"+(this.terminals_[le]||le)+"'":wt="Parse error on line "+(et+1)+": Unexpected "+(le==Lt?"end of input":"'"+(this.terminals_[le]||le)+"'"),this.parseError(wt,{text:Ct.match,token:this.terminals_[le]||le,line:Ct.yylineno,loc:vt,expected:zt})}if(Gt[0]instanceof Array&&Gt.length>1)throw new Error("Parse Error: multiple actions possible at state: "+Dt+", token: "+le);switch(Gt[0]){case 1:Q.push(le),U.push(Ct.yytext),F.push(Ct.yylloc),Q.push(Gt[1]),le=null,at=Ct.yyleng,P=Ct.yytext,et=Ct.yylineno,vt=Ct.yylloc;break;case 2:if(jt=this.productions_[Gt[1]][1],Qt.$=U[U.length-jt],Qt._$={first_line:F[F.length-(jt||1)].first_line,last_line:F[F.length-1].last_line,first_column:F[F.length-(jt||1)].first_column,last_column:F[F.length-1].last_column},Tt&&(Qt._$.range=[F[F.length-(jt||1)].range[0],F[F.length-1].range[1]]),$t=this.performAction.apply(Qt,[P,at,et,pt.yy,Gt[1],U,F].concat(Rt)),typeof $t<"u")return $t;jt&&(Q=Q.slice(0,-1*jt*2),U=U.slice(0,-1*jt),F=F.slice(0,-1*jt)),Q.push(this.productions_[Gt[1]][0]),U.push(Qt.$),F.push(Qt._$),Ft=j[Q[Q.length-2]][Q[Q.length-1]],Q.push(Ft);break;case 3:return!0}}return!0}},tt=function(){var it={EOF:1,parseError:function(V,Q){if(this.yy.parser)this.yy.parser.parseError(V,Q);else throw new Error(V)},setInput:function(Z,V){return this.yy=V||this.yy||{},this._input=Z,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var Z=this._input[0];this.yytext+=Z,this.yyleng++,this.offset++,this.match+=Z,this.matched+=Z;var V=Z.match(/(?:\r\n?|\n).*/g);return V?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),Z},unput:function(Z){var V=Z.length,Q=Z.split(/(?:\r\n?|\n)/g);this._input=Z+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-V),this.offset-=V;var q=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),Q.length-1&&(this.yylineno-=Q.length-1);var U=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:Q?(Q.length===q.length?this.yylloc.first_column:0)+q[q.length-Q.length].length-Q[0].length:this.yylloc.first_column-V},this.options.ranges&&(this.yylloc.range=[U[0],U[0]+this.yyleng-V]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(Z){this.unput(this.match.slice(Z))},pastInput:function(){var Z=this.matched.substr(0,this.matched.length-this.match.length);return(Z.length>20?"...":"")+Z.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var Z=this.match;return Z.length<20&&(Z+=this._input.substr(0,20-Z.length)),(Z.substr(0,20)+(Z.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var Z=this.pastInput(),V=new Array(Z.length+1).join("-");return Z+this.upcomingInput()+` -`+V+"^"},test_match:function(Z,V){var Q,q,U;if(this.options.backtrack_lexer&&(U={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(U.yylloc.range=this.yylloc.range.slice(0))),q=Z[0].match(/(?:\r\n?|\n).*/g),q&&(this.yylineno+=q.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:q?q[q.length-1].length-q[q.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+Z[0].length},this.yytext+=Z[0],this.match+=Z[0],this.matches=Z,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(Z[0].length),this.matched+=Z[0],Q=this.performAction.call(this,this.yy,this,V,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),Q)return Q;if(this._backtrack){for(var F in U)this[F]=U[F];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var Z,V,Q,q;this._more||(this.yytext="",this.match="");for(var U=this._currentRules(),F=0;F<U.length;F++)if(Q=this._input.match(this.rules[U[F]]),Q&&(!V||Q[0].length>V[0].length)){if(V=Q,q=F,this.options.backtrack_lexer){if(Z=this.test_match(Q,U[F]),Z!==!1)return Z;if(this._backtrack){V=!1;continue}else return!1}else if(!this.options.flex)break}return V?(Z=this.test_match(V,U[q]),Z!==!1?Z:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var V=this.next();return V||this.lex()},begin:function(V){this.conditionStack.push(V)},popState:function(){var V=this.conditionStack.length-1;return V>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(V){return V=this.conditionStack.length-1-Math.abs(V||0),V>=0?this.conditionStack[V]:"INITIAL"},pushState:function(V){this.begin(V)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(V,Q,q,U){switch(q){case 0:return this.begin("open_directive"),19;case 1:return this.begin("type_directive"),20;case 2:return this.popState(),this.begin("arg_directive"),12;case 3:return this.popState(),this.popState(),22;case 4:return 21;case 5:return"title";case 6:return this.begin("acc_title"),14;case 7:return this.popState(),"acc_title_value";case 8:return this.begin("acc_descr"),16;case 9:return this.popState(),"acc_descr_value";case 10:this.begin("acc_descr_multiline");break;case 11:this.popState();break;case 12:return"acc_descr_multiline_value";case 13:return 5;case 14:break;case 15:break;case 16:break;case 17:return 8;case 18:return 6;case 19:return 28;case 20:return 39;case 21:return 31;case 22:return 30;case 23:return 33;case 24:return 35;case 25:return 37;case 26:return 40;case 27:return 41;case 28:return 42;case 29:return 43;case 30:return 44;case 31:return 45;case 32:return 46;case 33:return 47;case 34:return 48;case 35:return 49;case 36:return 50;case 37:return 51;case 38:return 52;case 39:return 53;case 40:return 64;case 41:return 65;case 42:return 66;case 43:return 67;case 44:return 68;case 45:return 69;case 46:return 70;case 47:return 56;case 48:return 58;case 49:return 60;case 50:return 63;case 51:return 62;case 52:this.begin("string");break;case 53:this.popState();break;case 54:return"qString";case 55:return Q.yytext=Q.yytext.trim(),71}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:title\s[^#\n;]+)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:(\r?\n)+)/i,/^(?:\s+)/i,/^(?:#[^\n]*)/i,/^(?:%[^\n]*)/i,/^(?:$)/i,/^(?:requirementDiagram\b)/i,/^(?:\{)/i,/^(?:\})/i,/^(?::)/i,/^(?:id\b)/i,/^(?:text\b)/i,/^(?:risk\b)/i,/^(?:verifyMethod\b)/i,/^(?:requirement\b)/i,/^(?:functionalRequirement\b)/i,/^(?:interfaceRequirement\b)/i,/^(?:performanceRequirement\b)/i,/^(?:physicalRequirement\b)/i,/^(?:designConstraint\b)/i,/^(?:low\b)/i,/^(?:medium\b)/i,/^(?:high\b)/i,/^(?:analysis\b)/i,/^(?:demonstration\b)/i,/^(?:inspection\b)/i,/^(?:test\b)/i,/^(?:element\b)/i,/^(?:contains\b)/i,/^(?:copies\b)/i,/^(?:derives\b)/i,/^(?:satisfies\b)/i,/^(?:verifies\b)/i,/^(?:refines\b)/i,/^(?:traces\b)/i,/^(?:type\b)/i,/^(?:docref\b)/i,/^(?:<-)/i,/^(?:->)/i,/^(?:-)/i,/^(?:["])/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:[\w][^\r\n\{\<\>\-\=]*)/i],conditions:{acc_descr_multiline:{rules:[11,12],inclusive:!1},acc_descr:{rules:[9],inclusive:!1},acc_title:{rules:[7],inclusive:!1},close_directive:{rules:[],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},open_directive:{rules:[1],inclusive:!1},unqString:{rules:[],inclusive:!1},token:{rules:[],inclusive:!1},string:{rules:[53,54],inclusive:!1},INITIAL:{rules:[0,5,6,8,10,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,55],inclusive:!0}}};return it}();W.lexer=tt;function K(){this.yy={}}return K.prototype=W,W.Parser=K,new K}();Y4.parser=Y4;const cit=t=>t.match(/^\s*requirement(Diagram)?/)!==null;let U4=[],kn={},cu={},rs={},uu={};const uit={RequirementType:{REQUIREMENT:"Requirement",FUNCTIONAL_REQUIREMENT:"Functional Requirement",INTERFACE_REQUIREMENT:"Interface Requirement",PERFORMANCE_REQUIREMENT:"Performance Requirement",PHYSICAL_REQUIREMENT:"Physical Requirement",DESIGN_CONSTRAINT:"Design Constraint"},RiskLevel:{LOW_RISK:"Low",MED_RISK:"Medium",HIGH_RISK:"High"},VerifyType:{VERIFY_ANALYSIS:"Analysis",VERIFY_DEMONSTRATION:"Demonstration",VERIFY_INSPECTION:"Inspection",VERIFY_TEST:"Test"},Relationships:{CONTAINS:"contains",COPIES:"copies",DERIVES:"derives",SATISFIES:"satisfies",VERIFIES:"verifies",REFINES:"refines",TRACES:"traces"},parseDirective:function(t,e,r){Xe.parseDirective(this,t,e,r)},getConfig:()=>nt().req,addRequirement:(t,e)=>(typeof cu[t]>"u"&&(cu[t]={name:t,type:e,id:kn.id,text:kn.text,risk:kn.risk,verifyMethod:kn.verifyMethod}),kn={},cu[t]),getRequirements:()=>cu,setNewReqId:t=>{typeof kn<"u"&&(kn.id=t)},setNewReqText:t=>{typeof kn<"u"&&(kn.text=t)},setNewReqRisk:t=>{typeof kn<"u"&&(kn.risk=t)},setNewReqVerifyMethod:t=>{typeof kn<"u"&&(kn.verifyMethod=t)},setAccTitle:Yn,getAccTitle:ui,setAccDescription:hi,getAccDescription:fi,addElement:t=>(typeof uu[t]>"u"&&(uu[t]={name:t,type:rs.type,docRef:rs.docRef},H.info("Added new requirement: ",t)),rs={},uu[t]),getElements:()=>uu,setNewElementType:t=>{typeof rs<"u"&&(rs.type=t)},setNewElementDocRef:t=>{typeof rs<"u"&&(rs.docRef=t)},addRelationship:(t,e,r)=>{U4.push({type:t,src:e,dst:r})},getRelationships:()=>U4,clear:()=>{U4=[],kn={},cu={},rs={},uu={},ci()}},W4={CONTAINS:"contains",ARROW:"arrow"},sL={ReqMarkers:W4,insertLineEndings:(t,e)=>{let r=t.append("defs").append("marker").attr("id",W4.CONTAINS+"_line_ending").attr("refX",0).attr("refY",e.line_height/2).attr("markerWidth",e.line_height).attr("markerHeight",e.line_height).attr("orient","auto").append("g");r.append("circle").attr("cx",e.line_height/2).attr("cy",e.line_height/2).attr("r",e.line_height/2).attr("fill","none"),r.append("line").attr("x1",0).attr("x2",e.line_height).attr("y1",e.line_height/2).attr("y2",e.line_height/2).attr("stroke-width",1),r.append("line").attr("y1",0).attr("y2",e.line_height).attr("x1",e.line_height/2).attr("x2",e.line_height/2).attr("stroke-width",1),t.append("defs").append("marker").attr("id",W4.ARROW+"_line_ending").attr("refX",e.line_height).attr("refY",.5*e.line_height).attr("markerWidth",e.line_height).attr("markerHeight",e.line_height).attr("orient","auto").append("path").attr("d",`M0,0 - L${e.line_height},${e.line_height/2} - M${e.line_height},${e.line_height/2} - L0,${e.line_height}`).attr("stroke-width",1)}};let sr={},oL=0;const lL=(t,e)=>t.insert("rect","#"+e).attr("class","req reqBox").attr("x",0).attr("y",0).attr("width",sr.rect_min_width+"px").attr("height",sr.rect_min_height+"px"),cL=(t,e,r)=>{let n=sr.rect_min_width/2,i=t.append("text").attr("class","req reqLabel reqTitle").attr("id",e).attr("x",n).attr("y",sr.rect_padding).attr("dominant-baseline","hanging"),a=0;r.forEach(u=>{a==0?i.append("tspan").attr("text-anchor","middle").attr("x",sr.rect_min_width/2).attr("dy",0).text(u):i.append("tspan").attr("text-anchor","middle").attr("x",sr.rect_min_width/2).attr("dy",sr.line_height*.75).text(u),a++});let s=1.5*sr.rect_padding,o=a*sr.line_height*.75,l=s+o;return t.append("line").attr("class","req-title-line").attr("x1","0").attr("x2",sr.rect_min_width).attr("y1",l).attr("y2",l),{titleNode:i,y:l}},uL=(t,e,r,n)=>{let i=t.append("text").attr("class","req reqLabel").attr("id",e).attr("x",sr.rect_padding).attr("y",n).attr("dominant-baseline","hanging"),a=0;const s=30;let o=[];return r.forEach(l=>{let u=l.length;for(;u>s&&a<3;){let h=l.substring(0,s);l=l.substring(s,l.length),u=l.length,o[o.length]=h,a++}if(a==3){let h=o[o.length-1];o[o.length-1]=h.substring(0,h.length-4)+"..."}else o[o.length]=l;a=0}),o.forEach(l=>{i.append("tspan").attr("x",sr.rect_padding).attr("dy",sr.line_height).text(l)}),i},hit=(t,e,r,n)=>{const i=e.node().getTotalLength(),a=e.node().getPointAtLength(i*.5),s="rel"+oL;oL++;const l=t.append("text").attr("class","req relationshipLabel").attr("id",s).attr("x",a.x).attr("y",a.y).attr("text-anchor","middle").attr("dominant-baseline","middle").text(n).node().getBBox();t.insert("rect","#"+s).attr("class","req reqLabelBox").attr("x",a.x-l.width/2).attr("y",a.y-l.height/2).attr("width",l.width).attr("height",l.height).attr("fill","white").attr("fill-opacity","85%")},fit=function(t,e,r,n,i){const a=r.edge(cl(e.src),cl(e.dst)),s=Ua().x(function(l){return l.x}).y(function(l){return l.y}),o=t.insert("path","#"+n).attr("class","er relationshipLine").attr("d",s(a.points)).attr("fill","none");e.type==i.db.Relationships.CONTAINS?o.attr("marker-start","url("+pe.getUrl(sr.arrowMarkerAbsolute)+"#"+e.type+"_line_ending)"):(o.attr("stroke-dasharray","10,7"),o.attr("marker-end","url("+pe.getUrl(sr.arrowMarkerAbsolute)+"#"+sL.ReqMarkers.ARROW+"_line_ending)")),hit(t,o,sr,`<<${e.type}>>`)},dit=(t,e,r)=>{Object.keys(t).forEach(n=>{let i=t[n];n=cl(n),H.info("Added new requirement: ",n);const a=r.append("g").attr("id",n),s="req-"+n,o=lL(a,s);let l=cL(a,n+"_title",[`<<${i.type}>>`,`${i.name}`]);uL(a,n+"_body",[`Id: ${i.id}`,`Text: ${i.text}`,`Risk: ${i.risk}`,`Verification: ${i.verifyMethod}`],l.y);const u=o.node().getBBox();e.setNode(n,{width:u.width,height:u.height,shape:"rect",id:n})})},pit=(t,e,r)=>{Object.keys(t).forEach(n=>{let i=t[n];const a=cl(n),s=r.append("g").attr("id",a),o="element-"+a,l=lL(s,o);let u=cL(s,o+"_title",["<<Element>>",`${n}`]);uL(s,o+"_body",[`Type: ${i.type||"Not Specified"}`,`Doc Ref: ${i.docRef||"None"}`],u.y);const h=l.node().getBBox();e.setNode(a,{width:h.width,height:h.height,shape:"rect",id:a})})},git=(t,e)=>(t.forEach(function(r){let n=cl(r.src),i=cl(r.dst);e.setEdge(n,i,{relationship:r})}),t),yit=function(t,e){e.nodes().forEach(function(r){typeof r<"u"&&typeof e.node(r)<"u"&&(t.select("#"+r),t.select("#"+r).attr("transform","translate("+(e.node(r).x-e.node(r).width/2)+","+(e.node(r).y-e.node(r).height/2)+" )"))})},cl=t=>t.replace(/\s/g,"").replace(/\./g,"_"),mit={draw:(t,e,r,n)=>{sr=nt().requirement,n.db.clear(),n.parser.parse(t);const i=sr.securityLevel;let a;i==="sandbox"&&(a=St("#i"+e));const o=St(i==="sandbox"?a.nodes()[0].contentDocument.body:"body").select(`[id='${e}']`);sL.insertLineEndings(o,sr);const l=new cr.Graph({multigraph:!1,compound:!1,directed:!0}).setGraph({rankdir:sr.layoutDirection,marginx:20,marginy:20,nodesep:100,edgesep:100,ranksep:100}).setDefaultEdgeLabel(function(){return{}});let u=n.db.getRequirements(),h=n.db.getElements(),d=n.db.getRelationships();dit(u,l,o),pit(h,l,o),git(d,l),Zc.layout(l),yit(o,l),d.forEach(function(y){fit(o,y,l,e,n)});const f=sr.rect_padding,p=o.node().getBBox(),m=p.width+f*2,_=p.height+f*2;li(o,_,m,sr.useMaxWidth),o.attr("viewBox",`${p.x-f} ${p.y-f} ${m} ${_}`),bn(n.db,o,e)}};var H4=function(){var t=function(it,Z,V,Q){for(V=V||{},Q=it.length;Q--;V[it[Q]]=Z);return V},e=[1,2],r=[1,3],n=[1,5],i=[1,7],a=[2,5],s=[1,15],o=[1,17],l=[1,18],u=[1,19],h=[1,21],d=[1,22],f=[1,23],p=[1,29],m=[1,30],_=[1,31],y=[1,32],b=[1,33],x=[1,34],k=[1,35],T=[1,36],C=[1,37],M=[1,38],S=[1,39],R=[1,40],A=[1,43],L=[1,44],v=[1,45],B=[1,46],w=[1,47],D=[1,48],N=[1,51],z=[1,4,5,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,47,49,50,51,52,53,58,59,60,61,69,79],X=[4,5,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,47,49,53,58,59,60,61,69,79],ct=[4,5,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,47,49,52,53,58,59,60,61,69,79],J=[4,5,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,47,49,51,53,58,59,60,61,69,79],Y=[4,5,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,47,49,50,53,58,59,60,61,69,79],$=[67,68,69],lt=[1,121],ut=[1,4,5,7,16,20,22,25,26,32,33,34,36,38,39,40,41,42,43,45,47,49,50,51,52,53,58,59,60,61,69,79],W={trace:function(){},yy:{},symbols_:{error:2,start:3,SPACE:4,NEWLINE:5,directive:6,SD:7,document:8,line:9,statement:10,openDirective:11,typeDirective:12,closeDirective:13,":":14,argDirective:15,participant:16,actor:17,AS:18,restOfLine:19,participant_actor:20,signal:21,autonumber:22,NUM:23,off:24,activate:25,deactivate:26,note_statement:27,links_statement:28,link_statement:29,properties_statement:30,details_statement:31,title:32,legacy_title:33,acc_title:34,acc_title_value:35,acc_descr:36,acc_descr_value:37,acc_descr_multiline_value:38,loop:39,end:40,rect:41,opt:42,alt:43,else_sections:44,par:45,par_sections:46,critical:47,option_sections:48,break:49,option:50,and:51,else:52,note:53,placement:54,text2:55,over:56,actor_pair:57,links:58,link:59,properties:60,details:61,spaceList:62,",":63,left_of:64,right_of:65,signaltype:66,"+":67,"-":68,ACTOR:69,SOLID_OPEN_ARROW:70,DOTTED_OPEN_ARROW:71,SOLID_ARROW:72,DOTTED_ARROW:73,SOLID_CROSS:74,DOTTED_CROSS:75,SOLID_POINT:76,DOTTED_POINT:77,TXT:78,open_directive:79,type_directive:80,arg_directive:81,close_directive:82,$accept:0,$end:1},terminals_:{2:"error",4:"SPACE",5:"NEWLINE",7:"SD",14:":",16:"participant",18:"AS",19:"restOfLine",20:"participant_actor",22:"autonumber",23:"NUM",24:"off",25:"activate",26:"deactivate",32:"title",33:"legacy_title",34:"acc_title",35:"acc_title_value",36:"acc_descr",37:"acc_descr_value",38:"acc_descr_multiline_value",39:"loop",40:"end",41:"rect",42:"opt",43:"alt",45:"par",47:"critical",49:"break",50:"option",51:"and",52:"else",53:"note",56:"over",58:"links",59:"link",60:"properties",61:"details",63:",",64:"left_of",65:"right_of",67:"+",68:"-",69:"ACTOR",70:"SOLID_OPEN_ARROW",71:"DOTTED_OPEN_ARROW",72:"SOLID_ARROW",73:"DOTTED_ARROW",74:"SOLID_CROSS",75:"DOTTED_CROSS",76:"SOLID_POINT",77:"DOTTED_POINT",78:"TXT",79:"open_directive",80:"type_directive",81:"arg_directive",82:"close_directive"},productions_:[0,[3,2],[3,2],[3,2],[3,2],[8,0],[8,2],[9,2],[9,1],[9,1],[6,4],[6,6],[10,5],[10,3],[10,5],[10,3],[10,2],[10,4],[10,3],[10,3],[10,2],[10,3],[10,3],[10,2],[10,2],[10,2],[10,2],[10,2],[10,1],[10,1],[10,2],[10,2],[10,1],[10,4],[10,4],[10,4],[10,4],[10,4],[10,4],[10,4],[10,1],[48,1],[48,4],[46,1],[46,4],[44,1],[44,4],[27,4],[27,4],[28,3],[29,3],[30,3],[31,3],[62,2],[62,1],[57,3],[57,1],[54,1],[54,1],[21,5],[21,5],[21,4],[17,1],[66,1],[66,1],[66,1],[66,1],[66,1],[66,1],[66,1],[66,1],[55,1],[11,1],[12,1],[15,1],[13,1]],performAction:function(Z,V,Q,q,U,F,j){var P=F.length-1;switch(U){case 4:return q.apply(F[P]),F[P];case 5:this.$=[];break;case 6:F[P-1].push(F[P]),this.$=F[P-1];break;case 7:case 8:this.$=F[P];break;case 9:this.$=[];break;case 12:F[P-3].type="addParticipant",F[P-3].description=q.parseMessage(F[P-1]),this.$=F[P-3];break;case 13:F[P-1].type="addParticipant",this.$=F[P-1];break;case 14:F[P-3].type="addActor",F[P-3].description=q.parseMessage(F[P-1]),this.$=F[P-3];break;case 15:F[P-1].type="addActor",this.$=F[P-1];break;case 17:this.$={type:"sequenceIndex",sequenceIndex:Number(F[P-2]),sequenceIndexStep:Number(F[P-1]),sequenceVisible:!0,signalType:q.LINETYPE.AUTONUMBER};break;case 18:this.$={type:"sequenceIndex",sequenceIndex:Number(F[P-1]),sequenceIndexStep:1,sequenceVisible:!0,signalType:q.LINETYPE.AUTONUMBER};break;case 19:this.$={type:"sequenceIndex",sequenceVisible:!1,signalType:q.LINETYPE.AUTONUMBER};break;case 20:this.$={type:"sequenceIndex",sequenceVisible:!0,signalType:q.LINETYPE.AUTONUMBER};break;case 21:this.$={type:"activeStart",signalType:q.LINETYPE.ACTIVE_START,actor:F[P-1]};break;case 22:this.$={type:"activeEnd",signalType:q.LINETYPE.ACTIVE_END,actor:F[P-1]};break;case 28:q.setDiagramTitle(F[P].substring(6)),this.$=F[P].substring(6);break;case 29:q.setDiagramTitle(F[P].substring(7)),this.$=F[P].substring(7);break;case 30:this.$=F[P].trim(),q.setAccTitle(this.$);break;case 31:case 32:this.$=F[P].trim(),q.setAccDescription(this.$);break;case 33:F[P-1].unshift({type:"loopStart",loopText:q.parseMessage(F[P-2]),signalType:q.LINETYPE.LOOP_START}),F[P-1].push({type:"loopEnd",loopText:F[P-2],signalType:q.LINETYPE.LOOP_END}),this.$=F[P-1];break;case 34:F[P-1].unshift({type:"rectStart",color:q.parseMessage(F[P-2]),signalType:q.LINETYPE.RECT_START}),F[P-1].push({type:"rectEnd",color:q.parseMessage(F[P-2]),signalType:q.LINETYPE.RECT_END}),this.$=F[P-1];break;case 35:F[P-1].unshift({type:"optStart",optText:q.parseMessage(F[P-2]),signalType:q.LINETYPE.OPT_START}),F[P-1].push({type:"optEnd",optText:q.parseMessage(F[P-2]),signalType:q.LINETYPE.OPT_END}),this.$=F[P-1];break;case 36:F[P-1].unshift({type:"altStart",altText:q.parseMessage(F[P-2]),signalType:q.LINETYPE.ALT_START}),F[P-1].push({type:"altEnd",signalType:q.LINETYPE.ALT_END}),this.$=F[P-1];break;case 37:F[P-1].unshift({type:"parStart",parText:q.parseMessage(F[P-2]),signalType:q.LINETYPE.PAR_START}),F[P-1].push({type:"parEnd",signalType:q.LINETYPE.PAR_END}),this.$=F[P-1];break;case 38:F[P-1].unshift({type:"criticalStart",criticalText:q.parseMessage(F[P-2]),signalType:q.LINETYPE.CRITICAL_START}),F[P-1].push({type:"criticalEnd",signalType:q.LINETYPE.CRITICAL_END}),this.$=F[P-1];break;case 39:F[P-1].unshift({type:"breakStart",breakText:q.parseMessage(F[P-2]),signalType:q.LINETYPE.BREAK_START}),F[P-1].push({type:"breakEnd",optText:q.parseMessage(F[P-2]),signalType:q.LINETYPE.BREAK_END}),this.$=F[P-1];break;case 42:this.$=F[P-3].concat([{type:"option",optionText:q.parseMessage(F[P-1]),signalType:q.LINETYPE.CRITICAL_OPTION},F[P]]);break;case 44:this.$=F[P-3].concat([{type:"and",parText:q.parseMessage(F[P-1]),signalType:q.LINETYPE.PAR_AND},F[P]]);break;case 46:this.$=F[P-3].concat([{type:"else",altText:q.parseMessage(F[P-1]),signalType:q.LINETYPE.ALT_ELSE},F[P]]);break;case 47:this.$=[F[P-1],{type:"addNote",placement:F[P-2],actor:F[P-1].actor,text:F[P]}];break;case 48:F[P-2]=[].concat(F[P-1],F[P-1]).slice(0,2),F[P-2][0]=F[P-2][0].actor,F[P-2][1]=F[P-2][1].actor,this.$=[F[P-1],{type:"addNote",placement:q.PLACEMENT.OVER,actor:F[P-2].slice(0,2),text:F[P]}];break;case 49:this.$=[F[P-1],{type:"addLinks",actor:F[P-1].actor,text:F[P]}];break;case 50:this.$=[F[P-1],{type:"addALink",actor:F[P-1].actor,text:F[P]}];break;case 51:this.$=[F[P-1],{type:"addProperties",actor:F[P-1].actor,text:F[P]}];break;case 52:this.$=[F[P-1],{type:"addDetails",actor:F[P-1].actor,text:F[P]}];break;case 55:this.$=[F[P-2],F[P]];break;case 56:this.$=F[P];break;case 57:this.$=q.PLACEMENT.LEFTOF;break;case 58:this.$=q.PLACEMENT.RIGHTOF;break;case 59:this.$=[F[P-4],F[P-1],{type:"addMessage",from:F[P-4].actor,to:F[P-1].actor,signalType:F[P-3],msg:F[P]},{type:"activeStart",signalType:q.LINETYPE.ACTIVE_START,actor:F[P-1]}];break;case 60:this.$=[F[P-4],F[P-1],{type:"addMessage",from:F[P-4].actor,to:F[P-1].actor,signalType:F[P-3],msg:F[P]},{type:"activeEnd",signalType:q.LINETYPE.ACTIVE_END,actor:F[P-4]}];break;case 61:this.$=[F[P-3],F[P-1],{type:"addMessage",from:F[P-3].actor,to:F[P-1].actor,signalType:F[P-2],msg:F[P]}];break;case 62:this.$={type:"addParticipant",actor:F[P]};break;case 63:this.$=q.LINETYPE.SOLID_OPEN;break;case 64:this.$=q.LINETYPE.DOTTED_OPEN;break;case 65:this.$=q.LINETYPE.SOLID;break;case 66:this.$=q.LINETYPE.DOTTED;break;case 67:this.$=q.LINETYPE.SOLID_CROSS;break;case 68:this.$=q.LINETYPE.DOTTED_CROSS;break;case 69:this.$=q.LINETYPE.SOLID_POINT;break;case 70:this.$=q.LINETYPE.DOTTED_POINT;break;case 71:this.$=q.parseMessage(F[P].trim().substring(1));break;case 72:q.parseDirective("%%{","open_directive");break;case 73:q.parseDirective(F[P],"type_directive");break;case 74:F[P]=F[P].trim().replace(/'/g,'"'),q.parseDirective(F[P],"arg_directive");break;case 75:q.parseDirective("}%%","close_directive","sequence");break}},table:[{3:1,4:e,5:r,6:4,7:n,11:6,79:i},{1:[3]},{3:8,4:e,5:r,6:4,7:n,11:6,79:i},{3:9,4:e,5:r,6:4,7:n,11:6,79:i},{3:10,4:e,5:r,6:4,7:n,11:6,79:i},t([1,4,5,16,20,22,25,26,32,33,34,36,38,39,41,42,43,45,47,49,53,58,59,60,61,69,79],a,{8:11}),{12:12,80:[1,13]},{80:[2,72]},{1:[2,1]},{1:[2,2]},{1:[2,3]},{1:[2,4],4:s,5:o,6:41,9:14,10:16,11:6,16:l,17:42,20:u,21:20,22:h,25:d,26:f,27:24,28:25,29:26,30:27,31:28,32:p,33:m,34:_,36:y,38:b,39:x,41:k,42:T,43:C,45:M,47:S,49:R,53:A,58:L,59:v,60:B,61:w,69:D,79:i},{13:49,14:[1,50],82:N},t([14,82],[2,73]),t(z,[2,6]),{6:41,10:52,11:6,16:l,17:42,20:u,21:20,22:h,25:d,26:f,27:24,28:25,29:26,30:27,31:28,32:p,33:m,34:_,36:y,38:b,39:x,41:k,42:T,43:C,45:M,47:S,49:R,53:A,58:L,59:v,60:B,61:w,69:D,79:i},t(z,[2,8]),t(z,[2,9]),{17:53,69:D},{17:54,69:D},{5:[1,55]},{5:[1,58],23:[1,56],24:[1,57]},{17:59,69:D},{17:60,69:D},{5:[1,61]},{5:[1,62]},{5:[1,63]},{5:[1,64]},{5:[1,65]},t(z,[2,28]),t(z,[2,29]),{35:[1,66]},{37:[1,67]},t(z,[2,32]),{19:[1,68]},{19:[1,69]},{19:[1,70]},{19:[1,71]},{19:[1,72]},{19:[1,73]},{19:[1,74]},t(z,[2,40]),{66:75,70:[1,76],71:[1,77],72:[1,78],73:[1,79],74:[1,80],75:[1,81],76:[1,82],77:[1,83]},{54:84,56:[1,85],64:[1,86],65:[1,87]},{17:88,69:D},{17:89,69:D},{17:90,69:D},{17:91,69:D},t([5,18,63,70,71,72,73,74,75,76,77,78],[2,62]),{5:[1,92]},{15:93,81:[1,94]},{5:[2,75]},t(z,[2,7]),{5:[1,96],18:[1,95]},{5:[1,98],18:[1,97]},t(z,[2,16]),{5:[1,100],23:[1,99]},{5:[1,101]},t(z,[2,20]),{5:[1,102]},{5:[1,103]},t(z,[2,23]),t(z,[2,24]),t(z,[2,25]),t(z,[2,26]),t(z,[2,27]),t(z,[2,30]),t(z,[2,31]),t(X,a,{8:104}),t(X,a,{8:105}),t(X,a,{8:106}),t(ct,a,{44:107,8:108}),t(J,a,{46:109,8:110}),t(Y,a,{48:111,8:112}),t(X,a,{8:113}),{17:116,67:[1,114],68:[1,115],69:D},t($,[2,63]),t($,[2,64]),t($,[2,65]),t($,[2,66]),t($,[2,67]),t($,[2,68]),t($,[2,69]),t($,[2,70]),{17:117,69:D},{17:119,57:118,69:D},{69:[2,57]},{69:[2,58]},{55:120,78:lt},{55:122,78:lt},{55:123,78:lt},{55:124,78:lt},t(ut,[2,10]),{13:125,82:N},{82:[2,74]},{19:[1,126]},t(z,[2,13]),{19:[1,127]},t(z,[2,15]),{5:[1,128]},t(z,[2,18]),t(z,[2,19]),t(z,[2,21]),t(z,[2,22]),{4:s,5:o,6:41,9:14,10:16,11:6,16:l,17:42,20:u,21:20,22:h,25:d,26:f,27:24,28:25,29:26,30:27,31:28,32:p,33:m,34:_,36:y,38:b,39:x,40:[1,129],41:k,42:T,43:C,45:M,47:S,49:R,53:A,58:L,59:v,60:B,61:w,69:D,79:i},{4:s,5:o,6:41,9:14,10:16,11:6,16:l,17:42,20:u,21:20,22:h,25:d,26:f,27:24,28:25,29:26,30:27,31:28,32:p,33:m,34:_,36:y,38:b,39:x,40:[1,130],41:k,42:T,43:C,45:M,47:S,49:R,53:A,58:L,59:v,60:B,61:w,69:D,79:i},{4:s,5:o,6:41,9:14,10:16,11:6,16:l,17:42,20:u,21:20,22:h,25:d,26:f,27:24,28:25,29:26,30:27,31:28,32:p,33:m,34:_,36:y,38:b,39:x,40:[1,131],41:k,42:T,43:C,45:M,47:S,49:R,53:A,58:L,59:v,60:B,61:w,69:D,79:i},{40:[1,132]},{4:s,5:o,6:41,9:14,10:16,11:6,16:l,17:42,20:u,21:20,22:h,25:d,26:f,27:24,28:25,29:26,30:27,31:28,32:p,33:m,34:_,36:y,38:b,39:x,40:[2,45],41:k,42:T,43:C,45:M,47:S,49:R,52:[1,133],53:A,58:L,59:v,60:B,61:w,69:D,79:i},{40:[1,134]},{4:s,5:o,6:41,9:14,10:16,11:6,16:l,17:42,20:u,21:20,22:h,25:d,26:f,27:24,28:25,29:26,30:27,31:28,32:p,33:m,34:_,36:y,38:b,39:x,40:[2,43],41:k,42:T,43:C,45:M,47:S,49:R,51:[1,135],53:A,58:L,59:v,60:B,61:w,69:D,79:i},{40:[1,136]},{4:s,5:o,6:41,9:14,10:16,11:6,16:l,17:42,20:u,21:20,22:h,25:d,26:f,27:24,28:25,29:26,30:27,31:28,32:p,33:m,34:_,36:y,38:b,39:x,40:[2,41],41:k,42:T,43:C,45:M,47:S,49:R,50:[1,137],53:A,58:L,59:v,60:B,61:w,69:D,79:i},{4:s,5:o,6:41,9:14,10:16,11:6,16:l,17:42,20:u,21:20,22:h,25:d,26:f,27:24,28:25,29:26,30:27,31:28,32:p,33:m,34:_,36:y,38:b,39:x,40:[1,138],41:k,42:T,43:C,45:M,47:S,49:R,53:A,58:L,59:v,60:B,61:w,69:D,79:i},{17:139,69:D},{17:140,69:D},{55:141,78:lt},{55:142,78:lt},{55:143,78:lt},{63:[1,144],78:[2,56]},{5:[2,49]},{5:[2,71]},{5:[2,50]},{5:[2,51]},{5:[2,52]},{5:[1,145]},{5:[1,146]},{5:[1,147]},t(z,[2,17]),t(z,[2,33]),t(z,[2,34]),t(z,[2,35]),t(z,[2,36]),{19:[1,148]},t(z,[2,37]),{19:[1,149]},t(z,[2,38]),{19:[1,150]},t(z,[2,39]),{55:151,78:lt},{55:152,78:lt},{5:[2,61]},{5:[2,47]},{5:[2,48]},{17:153,69:D},t(ut,[2,11]),t(z,[2,12]),t(z,[2,14]),t(ct,a,{8:108,44:154}),t(J,a,{8:110,46:155}),t(Y,a,{8:112,48:156}),{5:[2,59]},{5:[2,60]},{78:[2,55]},{40:[2,46]},{40:[2,44]},{40:[2,42]}],defaultActions:{7:[2,72],8:[2,1],9:[2,2],10:[2,3],51:[2,75],86:[2,57],87:[2,58],94:[2,74],120:[2,49],121:[2,71],122:[2,50],123:[2,51],124:[2,52],141:[2,61],142:[2,47],143:[2,48],151:[2,59],152:[2,60],153:[2,55],154:[2,46],155:[2,44],156:[2,42]},parseError:function(Z,V){if(V.recoverable)this.trace(Z);else{var Q=new Error(Z);throw Q.hash=V,Q}},parse:function(Z){var V=this,Q=[0],q=[],U=[null],F=[],j=this.table,P="",et=0,at=0,It=2,Lt=1,Rt=F.slice.call(arguments,1),Ct=Object.create(this.lexer),pt={yy:{}};for(var mt in this.yy)Object.prototype.hasOwnProperty.call(this.yy,mt)&&(pt.yy[mt]=this.yy[mt]);Ct.setInput(Z,pt.yy),pt.yy.lexer=Ct,pt.yy.parser=this,typeof Ct.yylloc>"u"&&(Ct.yylloc={});var vt=Ct.yylloc;F.push(vt);var Tt=Ct.options&&Ct.options.ranges;typeof pt.yy.parseError=="function"?this.parseError=pt.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function ft(){var bt;return bt=q.pop()||Ct.lex()||Lt,typeof bt!="number"&&(bt instanceof Array&&(q=bt,bt=q.pop()),bt=V.symbols_[bt]||bt),bt}for(var le,Dt,Gt,$t,Qt={},we,jt,Ft,zt;;){if(Dt=Q[Q.length-1],this.defaultActions[Dt]?Gt=this.defaultActions[Dt]:((le===null||typeof le>"u")&&(le=ft()),Gt=j[Dt]&&j[Dt][le]),typeof Gt>"u"||!Gt.length||!Gt[0]){var wt="";zt=[];for(we in j[Dt])this.terminals_[we]&&we>It&&zt.push("'"+this.terminals_[we]+"'");Ct.showPosition?wt="Parse error on line "+(et+1)+`: -`+Ct.showPosition()+` -Expecting `+zt.join(", ")+", got '"+(this.terminals_[le]||le)+"'":wt="Parse error on line "+(et+1)+": Unexpected "+(le==Lt?"end of input":"'"+(this.terminals_[le]||le)+"'"),this.parseError(wt,{text:Ct.match,token:this.terminals_[le]||le,line:Ct.yylineno,loc:vt,expected:zt})}if(Gt[0]instanceof Array&&Gt.length>1)throw new Error("Parse Error: multiple actions possible at state: "+Dt+", token: "+le);switch(Gt[0]){case 1:Q.push(le),U.push(Ct.yytext),F.push(Ct.yylloc),Q.push(Gt[1]),le=null,at=Ct.yyleng,P=Ct.yytext,et=Ct.yylineno,vt=Ct.yylloc;break;case 2:if(jt=this.productions_[Gt[1]][1],Qt.$=U[U.length-jt],Qt._$={first_line:F[F.length-(jt||1)].first_line,last_line:F[F.length-1].last_line,first_column:F[F.length-(jt||1)].first_column,last_column:F[F.length-1].last_column},Tt&&(Qt._$.range=[F[F.length-(jt||1)].range[0],F[F.length-1].range[1]]),$t=this.performAction.apply(Qt,[P,at,et,pt.yy,Gt[1],U,F].concat(Rt)),typeof $t<"u")return $t;jt&&(Q=Q.slice(0,-1*jt*2),U=U.slice(0,-1*jt),F=F.slice(0,-1*jt)),Q.push(this.productions_[Gt[1]][0]),U.push(Qt.$),F.push(Qt._$),Ft=j[Q[Q.length-2]][Q[Q.length-1]],Q.push(Ft);break;case 3:return!0}}return!0}},tt=function(){var it={EOF:1,parseError:function(V,Q){if(this.yy.parser)this.yy.parser.parseError(V,Q);else throw new Error(V)},setInput:function(Z,V){return this.yy=V||this.yy||{},this._input=Z,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var Z=this._input[0];this.yytext+=Z,this.yyleng++,this.offset++,this.match+=Z,this.matched+=Z;var V=Z.match(/(?:\r\n?|\n).*/g);return V?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),Z},unput:function(Z){var V=Z.length,Q=Z.split(/(?:\r\n?|\n)/g);this._input=Z+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-V),this.offset-=V;var q=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),Q.length-1&&(this.yylineno-=Q.length-1);var U=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:Q?(Q.length===q.length?this.yylloc.first_column:0)+q[q.length-Q.length].length-Q[0].length:this.yylloc.first_column-V},this.options.ranges&&(this.yylloc.range=[U[0],U[0]+this.yyleng-V]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(Z){this.unput(this.match.slice(Z))},pastInput:function(){var Z=this.matched.substr(0,this.matched.length-this.match.length);return(Z.length>20?"...":"")+Z.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var Z=this.match;return Z.length<20&&(Z+=this._input.substr(0,20-Z.length)),(Z.substr(0,20)+(Z.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var Z=this.pastInput(),V=new Array(Z.length+1).join("-");return Z+this.upcomingInput()+` -`+V+"^"},test_match:function(Z,V){var Q,q,U;if(this.options.backtrack_lexer&&(U={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(U.yylloc.range=this.yylloc.range.slice(0))),q=Z[0].match(/(?:\r\n?|\n).*/g),q&&(this.yylineno+=q.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:q?q[q.length-1].length-q[q.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+Z[0].length},this.yytext+=Z[0],this.match+=Z[0],this.matches=Z,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(Z[0].length),this.matched+=Z[0],Q=this.performAction.call(this,this.yy,this,V,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),Q)return Q;if(this._backtrack){for(var F in U)this[F]=U[F];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var Z,V,Q,q;this._more||(this.yytext="",this.match="");for(var U=this._currentRules(),F=0;F<U.length;F++)if(Q=this._input.match(this.rules[U[F]]),Q&&(!V||Q[0].length>V[0].length)){if(V=Q,q=F,this.options.backtrack_lexer){if(Z=this.test_match(Q,U[F]),Z!==!1)return Z;if(this._backtrack){V=!1;continue}else return!1}else if(!this.options.flex)break}return V?(Z=this.test_match(V,U[q]),Z!==!1?Z:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var V=this.next();return V||this.lex()},begin:function(V){this.conditionStack.push(V)},popState:function(){var V=this.conditionStack.length-1;return V>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(V){return V=this.conditionStack.length-1-Math.abs(V||0),V>=0?this.conditionStack[V]:"INITIAL"},pushState:function(V){this.begin(V)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(V,Q,q,U){switch(q){case 0:return this.begin("open_directive"),79;case 1:return this.begin("type_directive"),80;case 2:return this.popState(),this.begin("arg_directive"),14;case 3:return this.popState(),this.popState(),82;case 4:return 81;case 5:return 5;case 6:break;case 7:break;case 8:break;case 9:break;case 10:break;case 11:return 23;case 12:return this.begin("ID"),16;case 13:return this.begin("ID"),20;case 14:return Q.yytext=Q.yytext.trim(),this.begin("ALIAS"),69;case 15:return this.popState(),this.popState(),this.begin("LINE"),18;case 16:return this.popState(),this.popState(),5;case 17:return this.begin("LINE"),39;case 18:return this.begin("LINE"),41;case 19:return this.begin("LINE"),42;case 20:return this.begin("LINE"),43;case 21:return this.begin("LINE"),52;case 22:return this.begin("LINE"),45;case 23:return this.begin("LINE"),51;case 24:return this.begin("LINE"),47;case 25:return this.begin("LINE"),50;case 26:return this.begin("LINE"),49;case 27:return this.popState(),19;case 28:return 40;case 29:return 64;case 30:return 65;case 31:return 58;case 32:return 59;case 33:return 60;case 34:return 61;case 35:return 56;case 36:return 53;case 37:return this.begin("ID"),25;case 38:return this.begin("ID"),26;case 39:return 32;case 40:return 33;case 41:return this.begin("acc_title"),34;case 42:return this.popState(),"acc_title_value";case 43:return this.begin("acc_descr"),36;case 44:return this.popState(),"acc_descr_value";case 45:this.begin("acc_descr_multiline");break;case 46:this.popState();break;case 47:return"acc_descr_multiline_value";case 48:return 7;case 49:return 22;case 50:return 24;case 51:return 63;case 52:return 5;case 53:return Q.yytext=Q.yytext.trim(),69;case 54:return 72;case 55:return 73;case 56:return 70;case 57:return 71;case 58:return 74;case 59:return 75;case 60:return 76;case 61:return 77;case 62:return 78;case 63:return 67;case 64:return 68;case 65:return 5;case 66:return"INVALID"}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:[\n]+)/i,/^(?:\s+)/i,/^(?:((?!\n)\s)+)/i,/^(?:#[^\n]*)/i,/^(?:%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[0-9]+(?=[ \n]+))/i,/^(?:participant\b)/i,/^(?:actor\b)/i,/^(?:[^\->:\n,;]+?([\-]*[^\->:\n,;]+?)*?(?=((?!\n)\s)+as(?!\n)\s|[#\n;]|$))/i,/^(?:as\b)/i,/^(?:(?:))/i,/^(?:loop\b)/i,/^(?:rect\b)/i,/^(?:opt\b)/i,/^(?:alt\b)/i,/^(?:else\b)/i,/^(?:par\b)/i,/^(?:and\b)/i,/^(?:critical\b)/i,/^(?:option\b)/i,/^(?:break\b)/i,/^(?:(?:[:]?(?:no)?wrap)?[^#\n;]*)/i,/^(?:end\b)/i,/^(?:left of\b)/i,/^(?:right of\b)/i,/^(?:links\b)/i,/^(?:link\b)/i,/^(?:properties\b)/i,/^(?:details\b)/i,/^(?:over\b)/i,/^(?:note\b)/i,/^(?:activate\b)/i,/^(?:deactivate\b)/i,/^(?:title\s[^#\n;]+)/i,/^(?:title:\s[^#\n;]+)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:sequenceDiagram\b)/i,/^(?:autonumber\b)/i,/^(?:off\b)/i,/^(?:,)/i,/^(?:;)/i,/^(?:[^\+\->:\n,;]+((?!(-x|--x|-\)|--\)))[\-]*[^\+\->:\n,;]+)*)/i,/^(?:->>)/i,/^(?:-->>)/i,/^(?:->)/i,/^(?:-->)/i,/^(?:-[x])/i,/^(?:--[x])/i,/^(?:-[\)])/i,/^(?:--[\)])/i,/^(?::(?:(?:no)?wrap)?[^#\n;]+)/i,/^(?:\+)/i,/^(?:-)/i,/^(?:$)/i,/^(?:.)/i],conditions:{acc_descr_multiline:{rules:[46,47],inclusive:!1},acc_descr:{rules:[44],inclusive:!1},acc_title:{rules:[42],inclusive:!1},open_directive:{rules:[1,8],inclusive:!1},type_directive:{rules:[2,3,8],inclusive:!1},arg_directive:{rules:[3,4,8],inclusive:!1},ID:{rules:[7,8,14],inclusive:!1},ALIAS:{rules:[7,8,15,16],inclusive:!1},LINE:{rules:[7,8,27],inclusive:!1},INITIAL:{rules:[0,5,6,8,9,10,11,12,13,17,18,19,20,21,22,23,24,25,26,28,29,30,31,32,33,34,35,36,37,38,39,40,41,43,45,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66],inclusive:!0}}};return it}();W.lexer=tt;function K(){this.yy={}}return K.prototype=W,W.Parser=K,new K}();H4.parser=H4;const bit=t=>t.match(/^\s*sequenceDiagram/)!==null;let hu,ns={},bi=[],u0=!1,G4;const _it=function(t,e,r){Xe.parseDirective(this,t,e,r)},j4=function(t,e,r,n){const i=ns[t];i&&e===i.name&&r==null||((r==null||r.text==null)&&(r={text:e,wrap:null,type:n}),(n==null||r.text==null)&&(r={text:e,wrap:null,type:n}),ns[t]={name:e,description:r.text,wrap:r.wrap===void 0&&ul()||!!r.wrap,prevActor:hu,links:{},properties:{},actorCnt:null,rectData:null,type:n||"participant"},hu&&ns[hu]&&(ns[hu].nextActor=t),hu=t)},vit=t=>{let e,r=0;for(e=0;e<bi.length;e++)bi[e].type===du.ACTIVE_START&&bi[e].from.actor===t&&r++,bi[e].type===du.ACTIVE_END&&bi[e].from.actor===t&&r--;return r},xit=function(t,e,r,n){bi.push({from:t,to:e,message:r.text,wrap:r.wrap===void 0&&ul()||!!r.wrap,answer:n})},hr=function(t,e,r={text:void 0,wrap:void 0},n){if(n===du.ACTIVE_END&&vit(t.actor)<1){let a=new Error("Trying to inactivate an inactive participant ("+t.actor+")");throw a.hash={text:"->>-",token:"->>-",line:"1",loc:{first_line:1,last_line:1,first_column:1,last_column:1},expected:["'ACTIVE_PARTICIPANT'"]},a}return bi.push({from:t,to:e,message:r.text,wrap:r.wrap===void 0&&ul()||!!r.wrap,type:n}),!0},kit=function(){return bi},wit=function(){return ns},fu=function(t){return ns[t]},Tit=function(){return Object.keys(ns)},Eit=function(){u0=!0},Cit=function(){u0=!1},Sit=()=>u0,Ait=function(t){G4=t},ul=()=>typeof G4<"u"?G4:nt().sequence.wrap,Mit=function(){ns={},bi=[],u0=!1,ci()},Lit=function(t){const e=t.trim(),r={text:e.replace(/^[:]?(?:no)?wrap:/,"").trim(),wrap:e.match(/^[:]?wrap:/)!==null?!0:e.match(/^[:]?nowrap:/)!==null?!1:void 0};return H.debug("parseMessage:",r),r},du={SOLID:0,DOTTED:1,NOTE:2,SOLID_CROSS:3,DOTTED_CROSS:4,SOLID_OPEN:5,DOTTED_OPEN:6,LOOP_START:10,LOOP_END:11,ALT_START:12,ALT_ELSE:13,ALT_END:14,OPT_START:15,OPT_END:16,ACTIVE_START:17,ACTIVE_END:18,PAR_START:19,PAR_AND:20,PAR_END:21,RECT_START:22,RECT_END:23,SOLID_POINT:24,DOTTED_POINT:25,AUTONUMBER:26,CRITICAL_START:27,CRITICAL_OPTION:28,CRITICAL_END:29,BREAK_START:30,BREAK_END:31},Rit={FILLED:0,OPEN:1},Iit={LEFTOF:0,RIGHTOF:1,OVER:2},hL=function(t,e,r){r.text,r.wrap===void 0&&ul()||r.wrap;const n=[].concat(t,t);bi.push({from:n[0],to:n[1],message:r.text,wrap:r.wrap===void 0&&ul()||!!r.wrap,type:du.NOTE,placement:e})},fL=function(t,e){const r=fu(t);try{let n=ai(e.text,nt());n=n.replace(/&/g,"&"),n=n.replace(/=/g,"=");const i=JSON.parse(n);$4(r,i)}catch(n){H.error("error while parsing actor link text",n)}},Nit=function(t,e){const r=fu(t);try{const s={};let o=ai(e.text,nt());var n=o.indexOf("@");o=o.replace(/&/g,"&"),o=o.replace(/=/g,"=");var i=o.slice(0,n-1).trim(),a=o.slice(n+1).trim();s[i]=a,$4(r,s)}catch(s){H.error("error while parsing actor link text",s)}};function $4(t,e){if(t.links==null)t.links=e;else for(let r in e)t.links[r]=e[r]}const dL=function(t,e){const r=fu(t);try{let n=ai(e.text,nt());const i=JSON.parse(n);pL(r,i)}catch(n){H.error("error while parsing actor properties text",n)}};function pL(t,e){if(t.properties==null)t.properties=e;else for(let r in e)t.properties[r]=e[r]}const gL=function(t,e){const r=fu(t),n=document.getElementById(e.text);try{const i=n.innerHTML,a=JSON.parse(i);a.properties&&pL(r,a.properties),a.links&&$4(r,a.links)}catch(i){H.error("error while parsing actor details text",i)}},Bit=function(t,e){if(typeof t<"u"&&typeof t.properties<"u")return t.properties[e]},yL=function(t){if(t instanceof Array)t.forEach(function(e){yL(e)});else switch(t.type){case"sequenceIndex":bi.push({from:void 0,to:void 0,message:{start:t.sequenceIndex,step:t.sequenceIndexStep,visible:t.sequenceVisible},wrap:!1,type:t.signalType});break;case"addParticipant":j4(t.actor,t.actor,t.description,"participant");break;case"addActor":j4(t.actor,t.actor,t.description,"actor");break;case"activeStart":hr(t.actor,void 0,void 0,t.signalType);break;case"activeEnd":hr(t.actor,void 0,void 0,t.signalType);break;case"addNote":hL(t.actor,t.placement,t.text);break;case"addLinks":fL(t.actor,t.text);break;case"addALink":Nit(t.actor,t.text);break;case"addProperties":dL(t.actor,t.text);break;case"addDetails":gL(t.actor,t.text);break;case"addMessage":hr(t.from,t.to,t.msg,t.signalType);break;case"loopStart":hr(void 0,void 0,t.loopText,t.signalType);break;case"loopEnd":hr(void 0,void 0,void 0,t.signalType);break;case"rectStart":hr(void 0,void 0,t.color,t.signalType);break;case"rectEnd":hr(void 0,void 0,void 0,t.signalType);break;case"optStart":hr(void 0,void 0,t.optText,t.signalType);break;case"optEnd":hr(void 0,void 0,void 0,t.signalType);break;case"altStart":hr(void 0,void 0,t.altText,t.signalType);break;case"else":hr(void 0,void 0,t.altText,t.signalType);break;case"altEnd":hr(void 0,void 0,void 0,t.signalType);break;case"setAccTitle":Yn(t.text);break;case"parStart":hr(void 0,void 0,t.parText,t.signalType);break;case"and":hr(void 0,void 0,t.parText,t.signalType);break;case"parEnd":hr(void 0,void 0,void 0,t.signalType);break;case"criticalStart":hr(void 0,void 0,t.criticalText,t.signalType);break;case"option":hr(void 0,void 0,t.optionText,t.signalType);break;case"criticalEnd":hr(void 0,void 0,void 0,t.signalType);break;case"breakStart":hr(void 0,void 0,t.breakText,t.signalType);break;case"breakEnd":hr(void 0,void 0,void 0,t.signalType);break}},mL={addActor:j4,addMessage:xit,addSignal:hr,addLinks:fL,addDetails:gL,addProperties:dL,autoWrap:ul,setWrap:Ait,enableSequenceNumbers:Eit,disableSequenceNumbers:Cit,showSequenceNumbers:Sit,getMessages:kit,getActors:wit,getActor:fu,getActorKeys:Tit,getActorProperty:Bit,getAccTitle:ui,getDiagramTitle:u1,setDiagramTitle:c1,parseDirective:_it,getConfig:()=>nt().sequence,clear:Mit,parseMessage:Lit,LINETYPE:du,ARROWTYPE:Rit,PLACEMENT:Iit,addNote:hL,setAccTitle:Yn,apply:yL,setAccDescription:hi,getAccDescription:fi};let X4=[];const Dit=t=>{X4.push(t)},bL=()=>{X4.forEach(t=>{t()}),X4=[]},h0=function(t,e){const r=t.append("rect");return r.attr("x",e.x),r.attr("y",e.y),r.attr("fill",e.fill),r.attr("stroke",e.stroke),r.attr("width",e.width),r.attr("height",e.height),r.attr("rx",e.rx),r.attr("ry",e.ry),typeof e.class<"u"&&r.attr("class",e.class),r},_L=(t,e)=>{Dit(()=>{const r=document.querySelectorAll(t);r.length!==0&&(r[0].addEventListener("mouseover",function(){qit("actor"+e+"_popup")}),r[0].addEventListener("mouseout",function(){Vit("actor"+e+"_popup")}))})},Oit=function(t,e,r,n,i){if(e.links===void 0||e.links===null||Object.keys(e.links).length===0)return{height:0,width:0};const a=e.links,s=e.actorCnt,o=e.rectData;var l="none";i&&(l="block !important");const u=t.append("g");u.attr("id","actor"+s+"_popup"),u.attr("class","actorPopupMenu"),u.attr("display",l),_L("#actor"+s+"_popup",s);var h="";typeof o.class<"u"&&(h=" "+o.class);let d=o.width>r?o.width:r;const f=u.append("rect");if(f.attr("class","actorPopupMenuPanel"+h),f.attr("x",o.x),f.attr("y",o.height),f.attr("fill",o.fill),f.attr("stroke",o.stroke),f.attr("width",d),f.attr("height",o.height),f.attr("rx",o.rx),f.attr("ry",o.ry),a!=null){var p=20;for(let y in a){var m=u.append("a"),_=ki(a[y]);m.attr("xlink:href",_),m.attr("target","_blank"),eat(n)(y,m,o.x+10,o.height+p,d,20,{class:"actor"},n),p+=30}}return f.attr("height",p),{height:o.height+p,width:d}},vL=function(t,e,r,n){const i=t.append("image");i.attr("x",e),i.attr("y",r);var a=ki(n);i.attr("xlink:href",a)},xL=function(t,e,r,n){const i=t.append("use");i.attr("x",e),i.attr("y",r);var a=ki(n);i.attr("xlink:href","#"+a)},Fit=function(t){return"var pu = document.getElementById('"+t+"'); if (pu != null) { pu.style.display = 'block'; }"},Pit=function(t){return"var pu = document.getElementById('"+t+"'); if (pu != null) { pu.style.display = 'none'; }"},qit=function(t){var e=document.getElementById(t);e!=null&&(e.style.display="block")},Vit=function(t){var e=document.getElementById(t);e!=null&&(e.style.display="none")},hl=function(t,e){let r=0,n=0;const i=e.text.split(pe.lineBreakRegex);let a=[],s=0,o=()=>e.y;if(typeof e.valign<"u"&&typeof e.textMargin<"u"&&e.textMargin>0)switch(e.valign){case"top":case"start":o=()=>Math.round(e.y+e.textMargin);break;case"middle":case"center":o=()=>Math.round(e.y+(r+n+e.textMargin)/2);break;case"bottom":case"end":o=()=>Math.round(e.y+(r+n+2*e.textMargin)-e.textMargin);break}if(typeof e.anchor<"u"&&typeof e.textMargin<"u"&&typeof e.width<"u")switch(e.anchor){case"left":case"start":e.x=Math.round(e.x+e.textMargin),e.anchor="start",e.dominantBaseline="middle",e.alignmentBaseline="middle";break;case"middle":case"center":e.x=Math.round(e.x+e.width/2),e.anchor="middle",e.dominantBaseline="middle",e.alignmentBaseline="middle";break;case"right":case"end":e.x=Math.round(e.x+e.width-e.textMargin),e.anchor="end",e.dominantBaseline="middle",e.alignmentBaseline="middle";break}for(let l=0;l<i.length;l++){let u=i[l];typeof e.textMargin<"u"&&e.textMargin===0&&typeof e.fontSize<"u"&&(s=l*e.fontSize);const h=t.append("text");if(h.attr("x",e.x),h.attr("y",o()),typeof e.anchor<"u"&&h.attr("text-anchor",e.anchor).attr("dominant-baseline",e.dominantBaseline).attr("alignment-baseline",e.alignmentBaseline),typeof e.fontFamily<"u"&&h.style("font-family",e.fontFamily),typeof e.fontSize<"u"&&h.style("font-size",e.fontSize),typeof e.fontWeight<"u"&&h.style("font-weight",e.fontWeight),typeof e.fill<"u"&&h.attr("fill",e.fill),typeof e.class<"u"&&h.attr("class",e.class),typeof e.dy<"u"?h.attr("dy",e.dy):s!==0&&h.attr("dy",s),e.tspan){const d=h.append("tspan");d.attr("x",e.x),typeof e.fill<"u"&&d.attr("fill",e.fill),d.text(u)}else h.text(u);typeof e.valign<"u"&&typeof e.textMargin<"u"&&e.textMargin>0&&(n+=(h._groups||h)[0][0].getBBox().height,r=n),a.push(h)}return a},kL=function(t,e){function r(i,a,s,o,l){return i+","+a+" "+(i+s)+","+a+" "+(i+s)+","+(a+o-l)+" "+(i+s-l*1.2)+","+(a+o)+" "+i+","+(a+o)}const n=t.append("polygon");return n.attr("points",r(e.x,e.y,e.width,e.height,7)),n.attr("class","labelBox"),e.y=e.y+e.height/2,hl(t,e),n};let Yi=-1;const wL=(t,e)=>{!t.selectAll||t.selectAll(".actor-line").attr("class","200").attr("y2",e-55)},zit=function(t,e,r){const n=e.x+e.width/2,i=t.append("g");var a=i;e.y===0&&(Yi++,a.append("line").attr("id","actor"+Yi).attr("x1",n).attr("y1",5).attr("x2",n).attr("y2",2e3).attr("class","actor-line").attr("stroke-width","0.5px").attr("stroke","#999"),a=i.append("g"),e.actorCnt=Yi,e.links!=null&&(a.attr("id","root-"+Yi),_L("#root-"+Yi,Yi)));const s=f0();var o="actor";e.properties!=null&&e.properties.class?o=e.properties.class:s.fill="#eaeaea",s.x=e.x,s.y=e.y,s.width=e.width,s.height=e.height,s.class=o,s.rx=3,s.ry=3;const l=h0(a,s);if(e.rectData=s,e.properties!=null&&e.properties.icon){const h=e.properties.icon.trim();h.charAt(0)==="@"?xL(a,s.x+s.width-20,s.y+10,h.substr(1)):vL(a,s.x+s.width-20,s.y+10,h)}TL(r)(e.description,a,s.x,s.y,s.width,s.height,{class:"actor"},r);let u=e.height;if(l.node){const h=l.node().getBBox();e.height=h.height,u=h.height}return u},Yit=function(t,e,r){const n=e.x+e.width/2;e.y===0&&(Yi++,t.append("line").attr("id","actor"+Yi).attr("x1",n).attr("y1",80).attr("x2",n).attr("y2",2e3).attr("class","actor-line").attr("stroke-width","0.5px").attr("stroke","#999"));const i=t.append("g");i.attr("class","actor-man");const a=f0();a.x=e.x,a.y=e.y,a.fill="#eaeaea",a.width=e.width,a.height=e.height,a.class="actor",a.rx=3,a.ry=3,i.append("line").attr("id","actor-man-torso"+Yi).attr("x1",n).attr("y1",e.y+25).attr("x2",n).attr("y2",e.y+45),i.append("line").attr("id","actor-man-arms"+Yi).attr("x1",n-18).attr("y1",e.y+33).attr("x2",n+18).attr("y2",e.y+33),i.append("line").attr("x1",n-18).attr("y1",e.y+60).attr("x2",n).attr("y2",e.y+45),i.append("line").attr("x1",n).attr("y1",e.y+45).attr("x2",n+16).attr("y2",e.y+60);const s=i.append("circle");s.attr("cx",e.x+e.width/2),s.attr("cy",e.y+10),s.attr("r",15),s.attr("width",e.width),s.attr("height",e.height);const o=i.node().getBBox();return e.height=o.height,TL(r)(e.description,i,a.x,a.y+35,a.width,a.height,{class:"actor"},r),e.height},Uit=function(t,e,r){switch(e.type){case"actor":return Yit(t,e,r);case"participant":return zit(t,e,r)}},Wit=function(t){return t.append("g")},Hit=function(t,e,r,n,i){const a=f0(),s=e.anchored;a.x=e.startx,a.y=e.starty,a.class="activation"+i%3,a.width=e.stopx-e.startx,a.height=r-e.starty,h0(s,a)},Git=function(t,e,r,n){const{boxMargin:i,boxTextMargin:a,labelBoxHeight:s,labelBoxWidth:o,messageFontFamily:l,messageFontSize:u,messageFontWeight:h}=n,d=t.append("g"),f=function(_,y,b,x){return d.append("line").attr("x1",_).attr("y1",y).attr("x2",b).attr("y2",x).attr("class","loopLine")};f(e.startx,e.starty,e.stopx,e.starty),f(e.stopx,e.starty,e.stopx,e.stopy),f(e.startx,e.stopy,e.stopx,e.stopy),f(e.startx,e.starty,e.startx,e.stopy),typeof e.sections<"u"&&e.sections.forEach(function(_){f(e.startx,_.y,e.stopx,_.y).style("stroke-dasharray","3, 3")});let p=K4();p.text=r,p.x=e.startx,p.y=e.starty,p.fontFamily=l,p.fontSize=u,p.fontWeight=h,p.anchor="middle",p.valign="middle",p.tspan=!1,p.width=o||50,p.height=s||20,p.textMargin=a,p.class="labelText",kL(d,p),p=K4(),p.text=e.title,p.x=e.startx+o/2+(e.stopx-e.startx)/2,p.y=e.starty+i+a,p.anchor="middle",p.valign="middle",p.textMargin=a,p.class="loopText",p.fontFamily=l,p.fontSize=u,p.fontWeight=h,p.wrap=!0;let m=hl(d,p);return typeof e.sectionTitles<"u"&&e.sectionTitles.forEach(function(_,y){if(_.message){p.text=_.message,p.x=e.startx+(e.stopx-e.startx)/2,p.y=e.sections[y].y+i+a,p.class="loopText",p.anchor="middle",p.valign="middle",p.tspan=!1,p.fontFamily=l,p.fontSize=u,p.fontWeight=h,p.wrap=e.wrap,m=hl(d,p);let b=Math.round(m.map(x=>(x._groups||x)[0][0].getBBox().height).reduce((x,k)=>x+k));e.sections[y].height+=b-(i+a)}}),e.height=Math.round(e.stopy-e.starty),d},jit=function(t,e){h0(t,{x:e.startx,y:e.starty,width:e.stopx-e.startx,height:e.stopy-e.starty,fill:e.fill,class:"rect"}).lower()},$it=function(t){t.append("defs").append("symbol").attr("id","database").attr("fill-rule","evenodd").attr("clip-rule","evenodd").append("path").attr("transform","scale(.5)").attr("d","M12.258.001l.256.004.255.005.253.008.251.01.249.012.247.015.246.016.242.019.241.02.239.023.236.024.233.027.231.028.229.031.225.032.223.034.22.036.217.038.214.04.211.041.208.043.205.045.201.046.198.048.194.05.191.051.187.053.183.054.18.056.175.057.172.059.168.06.163.061.16.063.155.064.15.066.074.033.073.033.071.034.07.034.069.035.068.035.067.035.066.035.064.036.064.036.062.036.06.036.06.037.058.037.058.037.055.038.055.038.053.038.052.038.051.039.05.039.048.039.047.039.045.04.044.04.043.04.041.04.04.041.039.041.037.041.036.041.034.041.033.042.032.042.03.042.029.042.027.042.026.043.024.043.023.043.021.043.02.043.018.044.017.043.015.044.013.044.012.044.011.045.009.044.007.045.006.045.004.045.002.045.001.045v17l-.001.045-.002.045-.004.045-.006.045-.007.045-.009.044-.011.045-.012.044-.013.044-.015.044-.017.043-.018.044-.02.043-.021.043-.023.043-.024.043-.026.043-.027.042-.029.042-.03.042-.032.042-.033.042-.034.041-.036.041-.037.041-.039.041-.04.041-.041.04-.043.04-.044.04-.045.04-.047.039-.048.039-.05.039-.051.039-.052.038-.053.038-.055.038-.055.038-.058.037-.058.037-.06.037-.06.036-.062.036-.064.036-.064.036-.066.035-.067.035-.068.035-.069.035-.07.034-.071.034-.073.033-.074.033-.15.066-.155.064-.16.063-.163.061-.168.06-.172.059-.175.057-.18.056-.183.054-.187.053-.191.051-.194.05-.198.048-.201.046-.205.045-.208.043-.211.041-.214.04-.217.038-.22.036-.223.034-.225.032-.229.031-.231.028-.233.027-.236.024-.239.023-.241.02-.242.019-.246.016-.247.015-.249.012-.251.01-.253.008-.255.005-.256.004-.258.001-.258-.001-.256-.004-.255-.005-.253-.008-.251-.01-.249-.012-.247-.015-.245-.016-.243-.019-.241-.02-.238-.023-.236-.024-.234-.027-.231-.028-.228-.031-.226-.032-.223-.034-.22-.036-.217-.038-.214-.04-.211-.041-.208-.043-.204-.045-.201-.046-.198-.048-.195-.05-.19-.051-.187-.053-.184-.054-.179-.056-.176-.057-.172-.059-.167-.06-.164-.061-.159-.063-.155-.064-.151-.066-.074-.033-.072-.033-.072-.034-.07-.034-.069-.035-.068-.035-.067-.035-.066-.035-.064-.036-.063-.036-.062-.036-.061-.036-.06-.037-.058-.037-.057-.037-.056-.038-.055-.038-.053-.038-.052-.038-.051-.039-.049-.039-.049-.039-.046-.039-.046-.04-.044-.04-.043-.04-.041-.04-.04-.041-.039-.041-.037-.041-.036-.041-.034-.041-.033-.042-.032-.042-.03-.042-.029-.042-.027-.042-.026-.043-.024-.043-.023-.043-.021-.043-.02-.043-.018-.044-.017-.043-.015-.044-.013-.044-.012-.044-.011-.045-.009-.044-.007-.045-.006-.045-.004-.045-.002-.045-.001-.045v-17l.001-.045.002-.045.004-.045.006-.045.007-.045.009-.044.011-.045.012-.044.013-.044.015-.044.017-.043.018-.044.02-.043.021-.043.023-.043.024-.043.026-.043.027-.042.029-.042.03-.042.032-.042.033-.042.034-.041.036-.041.037-.041.039-.041.04-.041.041-.04.043-.04.044-.04.046-.04.046-.039.049-.039.049-.039.051-.039.052-.038.053-.038.055-.038.056-.038.057-.037.058-.037.06-.037.061-.036.062-.036.063-.036.064-.036.066-.035.067-.035.068-.035.069-.035.07-.034.072-.034.072-.033.074-.033.151-.066.155-.064.159-.063.164-.061.167-.06.172-.059.176-.057.179-.056.184-.054.187-.053.19-.051.195-.05.198-.048.201-.046.204-.045.208-.043.211-.041.214-.04.217-.038.22-.036.223-.034.226-.032.228-.031.231-.028.234-.027.236-.024.238-.023.241-.02.243-.019.245-.016.247-.015.249-.012.251-.01.253-.008.255-.005.256-.004.258-.001.258.001zm-9.258 20.499v.01l.001.021.003.021.004.022.005.021.006.022.007.022.009.023.01.022.011.023.012.023.013.023.015.023.016.024.017.023.018.024.019.024.021.024.022.025.023.024.024.025.052.049.056.05.061.051.066.051.07.051.075.051.079.052.084.052.088.052.092.052.097.052.102.051.105.052.11.052.114.051.119.051.123.051.127.05.131.05.135.05.139.048.144.049.147.047.152.047.155.047.16.045.163.045.167.043.171.043.176.041.178.041.183.039.187.039.19.037.194.035.197.035.202.033.204.031.209.03.212.029.216.027.219.025.222.024.226.021.23.02.233.018.236.016.24.015.243.012.246.01.249.008.253.005.256.004.259.001.26-.001.257-.004.254-.005.25-.008.247-.011.244-.012.241-.014.237-.016.233-.018.231-.021.226-.021.224-.024.22-.026.216-.027.212-.028.21-.031.205-.031.202-.034.198-.034.194-.036.191-.037.187-.039.183-.04.179-.04.175-.042.172-.043.168-.044.163-.045.16-.046.155-.046.152-.047.148-.048.143-.049.139-.049.136-.05.131-.05.126-.05.123-.051.118-.052.114-.051.11-.052.106-.052.101-.052.096-.052.092-.052.088-.053.083-.051.079-.052.074-.052.07-.051.065-.051.06-.051.056-.05.051-.05.023-.024.023-.025.021-.024.02-.024.019-.024.018-.024.017-.024.015-.023.014-.024.013-.023.012-.023.01-.023.01-.022.008-.022.006-.022.006-.022.004-.022.004-.021.001-.021.001-.021v-4.127l-.077.055-.08.053-.083.054-.085.053-.087.052-.09.052-.093.051-.095.05-.097.05-.1.049-.102.049-.105.048-.106.047-.109.047-.111.046-.114.045-.115.045-.118.044-.12.043-.122.042-.124.042-.126.041-.128.04-.13.04-.132.038-.134.038-.135.037-.138.037-.139.035-.142.035-.143.034-.144.033-.147.032-.148.031-.15.03-.151.03-.153.029-.154.027-.156.027-.158.026-.159.025-.161.024-.162.023-.163.022-.165.021-.166.02-.167.019-.169.018-.169.017-.171.016-.173.015-.173.014-.175.013-.175.012-.177.011-.178.01-.179.008-.179.008-.181.006-.182.005-.182.004-.184.003-.184.002h-.37l-.184-.002-.184-.003-.182-.004-.182-.005-.181-.006-.179-.008-.179-.008-.178-.01-.176-.011-.176-.012-.175-.013-.173-.014-.172-.015-.171-.016-.17-.017-.169-.018-.167-.019-.166-.02-.165-.021-.163-.022-.162-.023-.161-.024-.159-.025-.157-.026-.156-.027-.155-.027-.153-.029-.151-.03-.15-.03-.148-.031-.146-.032-.145-.033-.143-.034-.141-.035-.14-.035-.137-.037-.136-.037-.134-.038-.132-.038-.13-.04-.128-.04-.126-.041-.124-.042-.122-.042-.12-.044-.117-.043-.116-.045-.113-.045-.112-.046-.109-.047-.106-.047-.105-.048-.102-.049-.1-.049-.097-.05-.095-.05-.093-.052-.09-.051-.087-.052-.085-.053-.083-.054-.08-.054-.077-.054v4.127zm0-5.654v.011l.001.021.003.021.004.021.005.022.006.022.007.022.009.022.01.022.011.023.012.023.013.023.015.024.016.023.017.024.018.024.019.024.021.024.022.024.023.025.024.024.052.05.056.05.061.05.066.051.07.051.075.052.079.051.084.052.088.052.092.052.097.052.102.052.105.052.11.051.114.051.119.052.123.05.127.051.131.05.135.049.139.049.144.048.147.048.152.047.155.046.16.045.163.045.167.044.171.042.176.042.178.04.183.04.187.038.19.037.194.036.197.034.202.033.204.032.209.03.212.028.216.027.219.025.222.024.226.022.23.02.233.018.236.016.24.014.243.012.246.01.249.008.253.006.256.003.259.001.26-.001.257-.003.254-.006.25-.008.247-.01.244-.012.241-.015.237-.016.233-.018.231-.02.226-.022.224-.024.22-.025.216-.027.212-.029.21-.03.205-.032.202-.033.198-.035.194-.036.191-.037.187-.039.183-.039.179-.041.175-.042.172-.043.168-.044.163-.045.16-.045.155-.047.152-.047.148-.048.143-.048.139-.05.136-.049.131-.05.126-.051.123-.051.118-.051.114-.052.11-.052.106-.052.101-.052.096-.052.092-.052.088-.052.083-.052.079-.052.074-.051.07-.052.065-.051.06-.05.056-.051.051-.049.023-.025.023-.024.021-.025.02-.024.019-.024.018-.024.017-.024.015-.023.014-.023.013-.024.012-.022.01-.023.01-.023.008-.022.006-.022.006-.022.004-.021.004-.022.001-.021.001-.021v-4.139l-.077.054-.08.054-.083.054-.085.052-.087.053-.09.051-.093.051-.095.051-.097.05-.1.049-.102.049-.105.048-.106.047-.109.047-.111.046-.114.045-.115.044-.118.044-.12.044-.122.042-.124.042-.126.041-.128.04-.13.039-.132.039-.134.038-.135.037-.138.036-.139.036-.142.035-.143.033-.144.033-.147.033-.148.031-.15.03-.151.03-.153.028-.154.028-.156.027-.158.026-.159.025-.161.024-.162.023-.163.022-.165.021-.166.02-.167.019-.169.018-.169.017-.171.016-.173.015-.173.014-.175.013-.175.012-.177.011-.178.009-.179.009-.179.007-.181.007-.182.005-.182.004-.184.003-.184.002h-.37l-.184-.002-.184-.003-.182-.004-.182-.005-.181-.007-.179-.007-.179-.009-.178-.009-.176-.011-.176-.012-.175-.013-.173-.014-.172-.015-.171-.016-.17-.017-.169-.018-.167-.019-.166-.02-.165-.021-.163-.022-.162-.023-.161-.024-.159-.025-.157-.026-.156-.027-.155-.028-.153-.028-.151-.03-.15-.03-.148-.031-.146-.033-.145-.033-.143-.033-.141-.035-.14-.036-.137-.036-.136-.037-.134-.038-.132-.039-.13-.039-.128-.04-.126-.041-.124-.042-.122-.043-.12-.043-.117-.044-.116-.044-.113-.046-.112-.046-.109-.046-.106-.047-.105-.048-.102-.049-.1-.049-.097-.05-.095-.051-.093-.051-.09-.051-.087-.053-.085-.052-.083-.054-.08-.054-.077-.054v4.139zm0-5.666v.011l.001.02.003.022.004.021.005.022.006.021.007.022.009.023.01.022.011.023.012.023.013.023.015.023.016.024.017.024.018.023.019.024.021.025.022.024.023.024.024.025.052.05.056.05.061.05.066.051.07.051.075.052.079.051.084.052.088.052.092.052.097.052.102.052.105.051.11.052.114.051.119.051.123.051.127.05.131.05.135.05.139.049.144.048.147.048.152.047.155.046.16.045.163.045.167.043.171.043.176.042.178.04.183.04.187.038.19.037.194.036.197.034.202.033.204.032.209.03.212.028.216.027.219.025.222.024.226.021.23.02.233.018.236.017.24.014.243.012.246.01.249.008.253.006.256.003.259.001.26-.001.257-.003.254-.006.25-.008.247-.01.244-.013.241-.014.237-.016.233-.018.231-.02.226-.022.224-.024.22-.025.216-.027.212-.029.21-.03.205-.032.202-.033.198-.035.194-.036.191-.037.187-.039.183-.039.179-.041.175-.042.172-.043.168-.044.163-.045.16-.045.155-.047.152-.047.148-.048.143-.049.139-.049.136-.049.131-.051.126-.05.123-.051.118-.052.114-.051.11-.052.106-.052.101-.052.096-.052.092-.052.088-.052.083-.052.079-.052.074-.052.07-.051.065-.051.06-.051.056-.05.051-.049.023-.025.023-.025.021-.024.02-.024.019-.024.018-.024.017-.024.015-.023.014-.024.013-.023.012-.023.01-.022.01-.023.008-.022.006-.022.006-.022.004-.022.004-.021.001-.021.001-.021v-4.153l-.077.054-.08.054-.083.053-.085.053-.087.053-.09.051-.093.051-.095.051-.097.05-.1.049-.102.048-.105.048-.106.048-.109.046-.111.046-.114.046-.115.044-.118.044-.12.043-.122.043-.124.042-.126.041-.128.04-.13.039-.132.039-.134.038-.135.037-.138.036-.139.036-.142.034-.143.034-.144.033-.147.032-.148.032-.15.03-.151.03-.153.028-.154.028-.156.027-.158.026-.159.024-.161.024-.162.023-.163.023-.165.021-.166.02-.167.019-.169.018-.169.017-.171.016-.173.015-.173.014-.175.013-.175.012-.177.01-.178.01-.179.009-.179.007-.181.006-.182.006-.182.004-.184.003-.184.001-.185.001-.185-.001-.184-.001-.184-.003-.182-.004-.182-.006-.181-.006-.179-.007-.179-.009-.178-.01-.176-.01-.176-.012-.175-.013-.173-.014-.172-.015-.171-.016-.17-.017-.169-.018-.167-.019-.166-.02-.165-.021-.163-.023-.162-.023-.161-.024-.159-.024-.157-.026-.156-.027-.155-.028-.153-.028-.151-.03-.15-.03-.148-.032-.146-.032-.145-.033-.143-.034-.141-.034-.14-.036-.137-.036-.136-.037-.134-.038-.132-.039-.13-.039-.128-.041-.126-.041-.124-.041-.122-.043-.12-.043-.117-.044-.116-.044-.113-.046-.112-.046-.109-.046-.106-.048-.105-.048-.102-.048-.1-.05-.097-.049-.095-.051-.093-.051-.09-.052-.087-.052-.085-.053-.083-.053-.08-.054-.077-.054v4.153zm8.74-8.179l-.257.004-.254.005-.25.008-.247.011-.244.012-.241.014-.237.016-.233.018-.231.021-.226.022-.224.023-.22.026-.216.027-.212.028-.21.031-.205.032-.202.033-.198.034-.194.036-.191.038-.187.038-.183.04-.179.041-.175.042-.172.043-.168.043-.163.045-.16.046-.155.046-.152.048-.148.048-.143.048-.139.049-.136.05-.131.05-.126.051-.123.051-.118.051-.114.052-.11.052-.106.052-.101.052-.096.052-.092.052-.088.052-.083.052-.079.052-.074.051-.07.052-.065.051-.06.05-.056.05-.051.05-.023.025-.023.024-.021.024-.02.025-.019.024-.018.024-.017.023-.015.024-.014.023-.013.023-.012.023-.01.023-.01.022-.008.022-.006.023-.006.021-.004.022-.004.021-.001.021-.001.021.001.021.001.021.004.021.004.022.006.021.006.023.008.022.01.022.01.023.012.023.013.023.014.023.015.024.017.023.018.024.019.024.02.025.021.024.023.024.023.025.051.05.056.05.06.05.065.051.07.052.074.051.079.052.083.052.088.052.092.052.096.052.101.052.106.052.11.052.114.052.118.051.123.051.126.051.131.05.136.05.139.049.143.048.148.048.152.048.155.046.16.046.163.045.168.043.172.043.175.042.179.041.183.04.187.038.191.038.194.036.198.034.202.033.205.032.21.031.212.028.216.027.22.026.224.023.226.022.231.021.233.018.237.016.241.014.244.012.247.011.25.008.254.005.257.004.26.001.26-.001.257-.004.254-.005.25-.008.247-.011.244-.012.241-.014.237-.016.233-.018.231-.021.226-.022.224-.023.22-.026.216-.027.212-.028.21-.031.205-.032.202-.033.198-.034.194-.036.191-.038.187-.038.183-.04.179-.041.175-.042.172-.043.168-.043.163-.045.16-.046.155-.046.152-.048.148-.048.143-.048.139-.049.136-.05.131-.05.126-.051.123-.051.118-.051.114-.052.11-.052.106-.052.101-.052.096-.052.092-.052.088-.052.083-.052.079-.052.074-.051.07-.052.065-.051.06-.05.056-.05.051-.05.023-.025.023-.024.021-.024.02-.025.019-.024.018-.024.017-.023.015-.024.014-.023.013-.023.012-.023.01-.023.01-.022.008-.022.006-.023.006-.021.004-.022.004-.021.001-.021.001-.021-.001-.021-.001-.021-.004-.021-.004-.022-.006-.021-.006-.023-.008-.022-.01-.022-.01-.023-.012-.023-.013-.023-.014-.023-.015-.024-.017-.023-.018-.024-.019-.024-.02-.025-.021-.024-.023-.024-.023-.025-.051-.05-.056-.05-.06-.05-.065-.051-.07-.052-.074-.051-.079-.052-.083-.052-.088-.052-.092-.052-.096-.052-.101-.052-.106-.052-.11-.052-.114-.052-.118-.051-.123-.051-.126-.051-.131-.05-.136-.05-.139-.049-.143-.048-.148-.048-.152-.048-.155-.046-.16-.046-.163-.045-.168-.043-.172-.043-.175-.042-.179-.041-.183-.04-.187-.038-.191-.038-.194-.036-.198-.034-.202-.033-.205-.032-.21-.031-.212-.028-.216-.027-.22-.026-.224-.023-.226-.022-.231-.021-.233-.018-.237-.016-.241-.014-.244-.012-.247-.011-.25-.008-.254-.005-.257-.004-.26-.001-.26.001z")},Xit=function(t){t.append("defs").append("symbol").attr("id","computer").attr("width","24").attr("height","24").append("path").attr("transform","scale(.5)").attr("d","M2 2v13h20v-13h-20zm18 11h-16v-9h16v9zm-10.228 6l.466-1h3.524l.467 1h-4.457zm14.228 3h-24l2-6h2.104l-1.33 4h18.45l-1.297-4h2.073l2 6zm-5-10h-14v-7h14v7z")},Kit=function(t){t.append("defs").append("symbol").attr("id","clock").attr("width","24").attr("height","24").append("path").attr("transform","scale(.5)").attr("d","M12 2c5.514 0 10 4.486 10 10s-4.486 10-10 10-10-4.486-10-10 4.486-10 10-10zm0-2c-6.627 0-12 5.373-12 12s5.373 12 12 12 12-5.373 12-12-5.373-12-12-12zm5.848 12.459c.202.038.202.333.001.372-1.907.361-6.045 1.111-6.547 1.111-.719 0-1.301-.582-1.301-1.301 0-.512.77-5.447 1.125-7.445.034-.192.312-.181.343.014l.985 6.238 5.394 1.011z")},Zit=function(t){t.append("defs").append("marker").attr("id","arrowhead").attr("refX",9).attr("refY",5).attr("markerUnits","userSpaceOnUse").attr("markerWidth",12).attr("markerHeight",12).attr("orient","auto").append("path").attr("d","M 0 0 L 10 5 L 0 10 z")},Qit=function(t){t.append("defs").append("marker").attr("id","filled-head").attr("refX",18).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 18,7 L9,13 L14,7 L9,1 Z")},Jit=function(t){t.append("defs").append("marker").attr("id","sequencenumber").attr("refX",15).attr("refY",15).attr("markerWidth",60).attr("markerHeight",40).attr("orient","auto").append("circle").attr("cx",15).attr("cy",15).attr("r",6)},tat=function(t){const r=t.append("defs").append("marker").attr("id","crosshead").attr("markerWidth",15).attr("markerHeight",8).attr("orient","auto").attr("refX",16).attr("refY",4);r.append("path").attr("fill","black").attr("stroke","#000000").style("stroke-dasharray","0, 0").attr("stroke-width","1px").attr("d","M 9,2 V 6 L16,4 Z"),r.append("path").attr("fill","none").attr("stroke","#000000").style("stroke-dasharray","0, 0").attr("stroke-width","1px").attr("d","M 0,1 L 6,7 M 6,1 L 0,7")},K4=function(){return{x:0,y:0,fill:void 0,anchor:void 0,style:"#666",width:void 0,height:void 0,textMargin:0,rx:0,ry:0,tspan:!0,valign:void 0}},f0=function(){return{x:0,y:0,fill:"#EDF2AE",stroke:"#666",width:100,anchor:"start",height:100,rx:0,ry:0}},TL=function(){function t(i,a,s,o,l,u,h){const d=a.append("text").attr("x",s+l/2).attr("y",o+u/2+5).style("text-anchor","middle").text(i);n(d,h)}function e(i,a,s,o,l,u,h,d){const{actorFontSize:f,actorFontFamily:p,actorFontWeight:m}=d;let _=f&&f.replace?f.replace("px",""):f;const y=i.split(pe.lineBreakRegex);for(let b=0;b<y.length;b++){const x=b*_-_*(y.length-1)/2,k=a.append("text").attr("x",s+l/2).attr("y",o).style("text-anchor","middle").style("font-size",f).style("font-weight",m).style("font-family",p);k.append("tspan").attr("x",s+l/2).attr("dy",x).text(y[b]),k.attr("y",o+u/2).attr("dominant-baseline","central").attr("alignment-baseline","central"),n(k,h)}}function r(i,a,s,o,l,u,h,d){const f=a.append("switch"),m=f.append("foreignObject").attr("x",s).attr("y",o).attr("width",l).attr("height",u).append("xhtml:div").style("display","table").style("height","100%").style("width","100%");m.append("div").style("display","table-cell").style("text-align","center").style("vertical-align","middle").text(i),e(i,f,s,o,l,u,h,d),n(m,h)}function n(i,a){for(const s in a)a.hasOwnProperty(s)&&i.attr(s,a[s])}return function(i){return i.textPlacement==="fo"?r:i.textPlacement==="old"?t:e}}(),eat=function(){function t(i,a,s,o,l,u,h){const d=a.append("text").attr("x",s).attr("y",o).style("text-anchor","start").text(i);n(d,h)}function e(i,a,s,o,l,u,h,d){const{actorFontSize:f,actorFontFamily:p,actorFontWeight:m}=d,_=i.split(pe.lineBreakRegex);for(let y=0;y<_.length;y++){const b=y*f-f*(_.length-1)/2,x=a.append("text").attr("x",s).attr("y",o).style("text-anchor","start").style("font-size",f).style("font-weight",m).style("font-family",p);x.append("tspan").attr("x",s).attr("dy",b).text(_[y]),x.attr("y",o+u/2).attr("dominant-baseline","central").attr("alignment-baseline","central"),n(x,h)}}function r(i,a,s,o,l,u,h,d){const f=a.append("switch"),m=f.append("foreignObject").attr("x",s).attr("y",o).attr("width",l).attr("height",u).append("xhtml:div").style("display","table").style("height","100%").style("width","100%");m.append("div").style("display","table-cell").style("text-align","center").style("vertical-align","middle").text(i),e(i,f,s,o,l,u,h,d),n(m,h)}function n(i,a){for(const s in a)a.hasOwnProperty(s)&&i.attr(s,a[s])}return function(i){return i.textPlacement==="fo"?r:i.textPlacement==="old"?t:e}}(),or={drawRect:h0,drawText:hl,drawLabel:kL,drawActor:Uit,drawPopup:Oit,drawImage:vL,drawEmbeddedImage:xL,anchorElement:Wit,drawActivation:Hit,drawLoop:Git,drawBackgroundRect:jit,insertArrowHead:Zit,insertArrowFilledHead:Qit,insertSequenceNumber:Jit,insertArrowCrossHead:tat,insertDatabaseIcon:$it,insertComputerIcon:Xit,insertClockIcon:Kit,getTextObj:K4,getNoteRect:f0,popupMenu:Fit,popdownMenu:Pit,fixLifeLineHeights:wL,sanitizeUrl:ki};let dt={};const Bt={data:{startx:void 0,stopx:void 0,starty:void 0,stopy:void 0},verticalPos:0,sequenceItems:[],activations:[],models:{getHeight:function(){return Math.max.apply(null,this.actors.length===0?[0]:this.actors.map(t=>t.height||0))+(this.loops.length===0?0:this.loops.map(t=>t.height||0).reduce((t,e)=>t+e))+(this.messages.length===0?0:this.messages.map(t=>t.height||0).reduce((t,e)=>t+e))+(this.notes.length===0?0:this.notes.map(t=>t.height||0).reduce((t,e)=>t+e))},clear:function(){this.actors=[],this.loops=[],this.messages=[],this.notes=[]},addActor:function(t){this.actors.push(t)},addLoop:function(t){this.loops.push(t)},addMessage:function(t){this.messages.push(t)},addNote:function(t){this.notes.push(t)},lastActor:function(){return this.actors[this.actors.length-1]},lastLoop:function(){return this.loops[this.loops.length-1]},lastMessage:function(){return this.messages[this.messages.length-1]},lastNote:function(){return this.notes[this.notes.length-1]},actors:[],loops:[],messages:[],notes:[]},init:function(){this.sequenceItems=[],this.activations=[],this.models.clear(),this.data={startx:void 0,stopx:void 0,starty:void 0,stopy:void 0},this.verticalPos=0,CL(nt())},updateVal:function(t,e,r,n){typeof t[e]>"u"?t[e]=r:t[e]=n(r,t[e])},updateBounds:function(t,e,r,n){const i=this;let a=0;function s(o){return function(u){a++;const h=i.sequenceItems.length-a+1;i.updateVal(u,"starty",e-h*dt.boxMargin,Math.min),i.updateVal(u,"stopy",n+h*dt.boxMargin,Math.max),i.updateVal(Bt.data,"startx",t-h*dt.boxMargin,Math.min),i.updateVal(Bt.data,"stopx",r+h*dt.boxMargin,Math.max),o!=="activation"&&(i.updateVal(u,"startx",t-h*dt.boxMargin,Math.min),i.updateVal(u,"stopx",r+h*dt.boxMargin,Math.max),i.updateVal(Bt.data,"starty",e-h*dt.boxMargin,Math.min),i.updateVal(Bt.data,"stopy",n+h*dt.boxMargin,Math.max))}}this.sequenceItems.forEach(s()),this.activations.forEach(s("activation"))},insert:function(t,e,r,n){const i=Math.min(t,r),a=Math.max(t,r),s=Math.min(e,n),o=Math.max(e,n);this.updateVal(Bt.data,"startx",i,Math.min),this.updateVal(Bt.data,"starty",s,Math.min),this.updateVal(Bt.data,"stopx",a,Math.max),this.updateVal(Bt.data,"stopy",o,Math.max),this.updateBounds(i,s,a,o)},newActivation:function(t,e,r){const n=r[t.from.actor],i=d0(t.from.actor).length||0,a=n.x+n.width/2+(i-1)*dt.activationWidth/2;this.activations.push({startx:a,starty:this.verticalPos+2,stopx:a+dt.activationWidth,stopy:void 0,actor:t.from.actor,anchored:or.anchorElement(e)})},endActivation:function(t){const e=this.activations.map(function(r){return r.actor}).lastIndexOf(t.from.actor);return this.activations.splice(e,1)[0]},createLoop:function(t={message:void 0,wrap:!1,width:void 0},e){return{startx:void 0,starty:this.verticalPos,stopx:void 0,stopy:void 0,title:t.message,wrap:t.wrap,width:t.width,height:0,fill:e}},newLoop:function(t={message:void 0,wrap:!1,width:void 0},e){this.sequenceItems.push(this.createLoop(t,e))},endLoop:function(){return this.sequenceItems.pop()},addSectionToLoop:function(t){const e=this.sequenceItems.pop();e.sections=e.sections||[],e.sectionTitles=e.sectionTitles||[],e.sections.push({y:Bt.getVerticalPos(),height:0}),e.sectionTitles.push(t),this.sequenceItems.push(e)},bumpVerticalPos:function(t){this.verticalPos=this.verticalPos+t,this.data.stopy=this.verticalPos},getVerticalPos:function(){return this.verticalPos},getBounds:function(){return{bounds:this.data,models:this.models}}},rat=function(t,e){Bt.bumpVerticalPos(dt.boxMargin),e.height=dt.boxMargin,e.starty=Bt.getVerticalPos();const r=or.getNoteRect();r.x=e.startx,r.y=e.starty,r.width=e.width||dt.width,r.class="note";const n=t.append("g"),i=or.drawRect(n,r),a=or.getTextObj();a.x=e.startx,a.y=e.starty,a.width=r.width,a.dy="1em",a.text=e.message,a.class="noteText",a.fontFamily=dt.noteFontFamily,a.fontSize=dt.noteFontSize,a.fontWeight=dt.noteFontWeight,a.anchor=dt.noteAlign,a.textMargin=dt.noteMargin,a.valign="center";const s=hl(n,a),o=Math.round(s.map(l=>(l._groups||l)[0][0].getBBox().height).reduce((l,u)=>l+u));i.attr("height",o+2*dt.noteMargin),e.height+=o+2*dt.noteMargin,Bt.bumpVerticalPos(o+2*dt.noteMargin),e.stopy=e.starty+o+2*dt.noteMargin,e.stopx=e.startx+r.width,Bt.insert(e.startx,e.starty,e.stopx,e.stopy),Bt.models.addNote(e)},fl=t=>({fontFamily:t.messageFontFamily,fontSize:t.messageFontSize,fontWeight:t.messageFontWeight}),dl=t=>({fontFamily:t.noteFontFamily,fontSize:t.noteFontSize,fontWeight:t.noteFontWeight}),Z4=t=>({fontFamily:t.actorFontFamily,fontSize:t.actorFontSize,fontWeight:t.actorFontWeight}),nat=function(t,e){Bt.bumpVerticalPos(10);const{startx:r,stopx:n,message:i}=e,a=pe.splitBreaks(i).length,s=Se.calculateTextDimensions(i,fl(dt)),o=s.height/a;e.height+=o,Bt.bumpVerticalPos(o);let l,u=s.height-10;const h=s.width;if(r===n){l=Bt.getVerticalPos()+u,dt.rightAngles||(u+=dt.boxMargin,l=Bt.getVerticalPos()+u),u+=30;const d=Math.max(h/2,dt.width/2);Bt.insert(r-d,Bt.getVerticalPos()-10+u,n+d,Bt.getVerticalPos()+30+u)}else u+=dt.boxMargin,l=Bt.getVerticalPos()+u,Bt.insert(r,l-10,n,l);return Bt.bumpVerticalPos(u),e.height+=u,e.stopy=e.starty+e.height,Bt.insert(e.fromBounds,e.starty,e.toBounds,e.stopy),l},iat=function(t,e,r,n){const{startx:i,stopx:a,starty:s,message:o,type:l,sequenceIndex:u,sequenceVisible:h}=e,d=Se.calculateTextDimensions(o,fl(dt)),f=or.getTextObj();f.x=i,f.y=s+10,f.width=a-i,f.class="messageText",f.dy="1em",f.text=o,f.fontFamily=dt.messageFontFamily,f.fontSize=dt.messageFontSize,f.fontWeight=dt.messageFontWeight,f.anchor=dt.messageAlign,f.valign="center",f.textMargin=dt.wrapPadding,f.tspan=!1,hl(t,f);const p=d.width;let m;i===a?dt.rightAngles?m=t.append("path").attr("d",`M ${i},${r} H ${i+Math.max(dt.width/2,p/2)} V ${r+25} H ${i}`):m=t.append("path").attr("d","M "+i+","+r+" C "+(i+60)+","+(r-10)+" "+(i+60)+","+(r+30)+" "+i+","+(r+20)):(m=t.append("line"),m.attr("x1",i),m.attr("y1",r),m.attr("x2",a),m.attr("y2",r)),l===n.db.LINETYPE.DOTTED||l===n.db.LINETYPE.DOTTED_CROSS||l===n.db.LINETYPE.DOTTED_POINT||l===n.db.LINETYPE.DOTTED_OPEN?(m.style("stroke-dasharray","3, 3"),m.attr("class","messageLine1")):m.attr("class","messageLine0");let _="";dt.arrowMarkerAbsolute&&(_=window.location.protocol+"//"+window.location.host+window.location.pathname+window.location.search,_=_.replace(/\(/g,"\\("),_=_.replace(/\)/g,"\\)")),m.attr("stroke-width",2),m.attr("stroke","none"),m.style("fill","none"),(l===n.db.LINETYPE.SOLID||l===n.db.LINETYPE.DOTTED)&&m.attr("marker-end","url("+_+"#arrowhead)"),(l===n.db.LINETYPE.SOLID_POINT||l===n.db.LINETYPE.DOTTED_POINT)&&m.attr("marker-end","url("+_+"#filled-head)"),(l===n.db.LINETYPE.SOLID_CROSS||l===n.db.LINETYPE.DOTTED_CROSS)&&m.attr("marker-end","url("+_+"#crosshead)"),(h||dt.showSequenceNumbers)&&(m.attr("marker-start","url("+_+"#sequencenumber)"),t.append("text").attr("x",i).attr("y",r+4).attr("font-family","sans-serif").attr("font-size","12px").attr("text-anchor","middle").attr("class","sequenceNumber").text(u))},Q4=function(t,e,r,n,i,a){if(i.hideUnusedParticipants===!0){const u=new Set;a.forEach(h=>{u.add(h.from),u.add(h.to)}),r=r.filter(h=>u.has(h))}let s=0,o=0,l=0;for(let u=0;u<r.length;u++){const h=e[r[u]];h.width=h.width||dt.width,h.height=Math.max(h.height||dt.height,dt.height),h.margin=h.margin||dt.actorMargin,h.x=s+o,h.y=n;const d=or.drawActor(t,h,dt);l=Math.max(l,d),Bt.insert(h.x,n,h.x+h.width,h.height),s+=h.width,o+=h.margin,Bt.models.addActor(h)}Bt.bumpVerticalPos(l)},EL=function(t,e,r,n){let i=0,a=0;for(let s=0;s<r.length;s++){const o=e[r[s]],l=oat(o),u=or.drawPopup(t,o,l,dt,dt.forceMenus,n);u.height>i&&(i=u.height),u.width+o.x>a&&(a=u.width+o.x)}return{maxHeight:i,maxWidth:a}},CL=function(t){fr(dt,t),t.fontFamily&&(dt.actorFontFamily=dt.noteFontFamily=dt.messageFontFamily=t.fontFamily),t.fontSize&&(dt.actorFontSize=dt.noteFontSize=dt.messageFontSize=t.fontSize),t.fontWeight&&(dt.actorFontWeight=dt.noteFontWeight=dt.messageFontWeight=t.fontWeight)},d0=function(t){return Bt.activations.filter(function(e){return e.actor===t})},SL=function(t,e){const r=e[t],n=d0(t),i=n.reduce(function(s,o){return Math.min(s,o.startx)},r.x+r.width/2),a=n.reduce(function(s,o){return Math.max(s,o.stopx)},r.x+r.width/2);return[i,a]};function Ui(t,e,r,n,i){Bt.bumpVerticalPos(r);let a=n;if(e.id&&e.message&&t[e.id]){const s=t[e.id].width,o=fl(dt);e.message=Se.wrapLabel(`[${e.message}]`,s-2*dt.wrapPadding,o),e.width=s,e.wrap=!0;const l=Se.calculateTextDimensions(e.message,o),u=Math.max(l.height,dt.labelBoxHeight);a=n+u,H.debug(`${u} - ${e.message}`)}i(e),Bt.bumpVerticalPos(a)}const aat=function(t,e,r,n){const{securityLevel:i,sequence:a}=nt();dt=a;let s;i==="sandbox"&&(s=St("#i"+e));const o=St(i==="sandbox"?s.nodes()[0].contentDocument.body:"body"),l=i==="sandbox"?s.nodes()[0].contentDocument:document;Bt.init(),H.debug(n.db);const u=i==="sandbox"?o.select(`[id="${e}"]`):St(`[id="${e}"]`),h=n.db.getActors(),d=n.db.getActorKeys(),f=n.db.getMessages(),p=n.db.getDiagramTitle(),m=sat(h,f,n);dt.height=lat(h,m),or.insertComputerIcon(u),or.insertDatabaseIcon(u),or.insertClockIcon(u),Q4(u,h,d,0,dt,f);const _=hat(f,h,m,n);or.insertArrowHead(u),or.insertArrowCrossHead(u),or.insertArrowFilledHead(u),or.insertSequenceNumber(u);function y(B,w){const D=Bt.endActivation(B);D.starty+18>w&&(D.starty=w-6,w+=12),or.drawActivation(u,D,w,dt,d0(B.from.actor).length),Bt.insert(D.startx,w-10,D.stopx,w)}let b=1,x=1;const k=[];f.forEach(function(B){let w,D,N;switch(B.type){case n.db.LINETYPE.NOTE:D=B.noteModel,rat(u,D);break;case n.db.LINETYPE.ACTIVE_START:Bt.newActivation(B,u,h);break;case n.db.LINETYPE.ACTIVE_END:y(B,Bt.getVerticalPos());break;case n.db.LINETYPE.LOOP_START:Ui(_,B,dt.boxMargin,dt.boxMargin+dt.boxTextMargin,z=>Bt.newLoop(z));break;case n.db.LINETYPE.LOOP_END:w=Bt.endLoop(),or.drawLoop(u,w,"loop",dt),Bt.bumpVerticalPos(w.stopy-Bt.getVerticalPos()),Bt.models.addLoop(w);break;case n.db.LINETYPE.RECT_START:Ui(_,B,dt.boxMargin,dt.boxMargin,z=>Bt.newLoop(void 0,z.message));break;case n.db.LINETYPE.RECT_END:w=Bt.endLoop(),or.drawBackgroundRect(u,w),Bt.models.addLoop(w),Bt.bumpVerticalPos(w.stopy-Bt.getVerticalPos());break;case n.db.LINETYPE.OPT_START:Ui(_,B,dt.boxMargin,dt.boxMargin+dt.boxTextMargin,z=>Bt.newLoop(z));break;case n.db.LINETYPE.OPT_END:w=Bt.endLoop(),or.drawLoop(u,w,"opt",dt),Bt.bumpVerticalPos(w.stopy-Bt.getVerticalPos()),Bt.models.addLoop(w);break;case n.db.LINETYPE.ALT_START:Ui(_,B,dt.boxMargin,dt.boxMargin+dt.boxTextMargin,z=>Bt.newLoop(z));break;case n.db.LINETYPE.ALT_ELSE:Ui(_,B,dt.boxMargin+dt.boxTextMargin,dt.boxMargin,z=>Bt.addSectionToLoop(z));break;case n.db.LINETYPE.ALT_END:w=Bt.endLoop(),or.drawLoop(u,w,"alt",dt),Bt.bumpVerticalPos(w.stopy-Bt.getVerticalPos()),Bt.models.addLoop(w);break;case n.db.LINETYPE.PAR_START:Ui(_,B,dt.boxMargin,dt.boxMargin+dt.boxTextMargin,z=>Bt.newLoop(z));break;case n.db.LINETYPE.PAR_AND:Ui(_,B,dt.boxMargin+dt.boxTextMargin,dt.boxMargin,z=>Bt.addSectionToLoop(z));break;case n.db.LINETYPE.PAR_END:w=Bt.endLoop(),or.drawLoop(u,w,"par",dt),Bt.bumpVerticalPos(w.stopy-Bt.getVerticalPos()),Bt.models.addLoop(w);break;case n.db.LINETYPE.AUTONUMBER:b=B.message.start||b,x=B.message.step||x,B.message.visible?n.db.enableSequenceNumbers():n.db.disableSequenceNumbers();break;case n.db.LINETYPE.CRITICAL_START:Ui(_,B,dt.boxMargin,dt.boxMargin+dt.boxTextMargin,z=>Bt.newLoop(z));break;case n.db.LINETYPE.CRITICAL_OPTION:Ui(_,B,dt.boxMargin+dt.boxTextMargin,dt.boxMargin,z=>Bt.addSectionToLoop(z));break;case n.db.LINETYPE.CRITICAL_END:w=Bt.endLoop(),or.drawLoop(u,w,"critical",dt),Bt.bumpVerticalPos(w.stopy-Bt.getVerticalPos()),Bt.models.addLoop(w);break;case n.db.LINETYPE.BREAK_START:Ui(_,B,dt.boxMargin,dt.boxMargin+dt.boxTextMargin,z=>Bt.newLoop(z));break;case n.db.LINETYPE.BREAK_END:w=Bt.endLoop(),or.drawLoop(u,w,"break",dt),Bt.bumpVerticalPos(w.stopy-Bt.getVerticalPos()),Bt.models.addLoop(w);break;default:try{N=B.msgModel,N.starty=Bt.getVerticalPos(),N.sequenceIndex=b,N.sequenceVisible=n.db.showSequenceNumbers();const z=nat(u,N);k.push({messageModel:N,lineStarty:z}),Bt.models.addMessage(N)}catch(z){H.error("error while drawing message",z)}}[n.db.LINETYPE.SOLID_OPEN,n.db.LINETYPE.DOTTED_OPEN,n.db.LINETYPE.SOLID,n.db.LINETYPE.DOTTED,n.db.LINETYPE.SOLID_CROSS,n.db.LINETYPE.DOTTED_CROSS,n.db.LINETYPE.SOLID_POINT,n.db.LINETYPE.DOTTED_POINT].includes(B.type)&&(b=b+x)}),k.forEach(B=>iat(u,B.messageModel,B.lineStarty,n)),dt.mirrorActors&&(Bt.bumpVerticalPos(dt.boxMargin*2),Q4(u,h,d,Bt.getVerticalPos(),dt,f),Bt.bumpVerticalPos(dt.boxMargin),wL(u,Bt.getVerticalPos()));const T=EL(u,h,d,l),{bounds:C}=Bt.getBounds();H.debug("For line height fix Querying: #"+e+" .actor-line"),Nu("#"+e+" .actor-line").attr("y2",C.stopy);let S=C.stopy-C.starty;S<T.maxHeight&&(S=T.maxHeight);let R=S+2*dt.diagramMarginY;dt.mirrorActors&&(R=R-dt.boxMargin+dt.bottomMarginAdj);let A=C.stopx-C.startx;A<T.maxWidth&&(A=T.maxWidth);const L=A+2*dt.diagramMarginX;p&&u.append("text").text(p).attr("x",(C.stopx-C.startx)/2-2*dt.diagramMarginX).attr("y",-25),li(u,R,L,dt.useMaxWidth);const v=p?40:0;u.attr("viewBox",C.startx-dt.diagramMarginX+" -"+(dt.diagramMarginY+v)+" "+L+" "+(R+v)),bn(n.db,u,e),H.debug("models:",Bt.models)},sat=function(t,e,r){const n={};return e.forEach(function(i){if(t[i.to]&&t[i.from]){const a=t[i.to];if(i.placement===r.db.PLACEMENT.LEFTOF&&!a.prevActor||i.placement===r.db.PLACEMENT.RIGHTOF&&!a.nextActor)return;const s=i.placement!==void 0,o=!s,l=s?dl(dt):fl(dt),u=i.wrap?Se.wrapLabel(i.message,dt.width-2*dt.wrapPadding,l):i.message,d=Se.calculateTextDimensions(u,l).width+2*dt.wrapPadding;o&&i.from===a.nextActor?n[i.to]=Math.max(n[i.to]||0,d):o&&i.from===a.prevActor?n[i.from]=Math.max(n[i.from]||0,d):o&&i.from===i.to?(n[i.from]=Math.max(n[i.from]||0,d/2),n[i.to]=Math.max(n[i.to]||0,d/2)):i.placement===r.db.PLACEMENT.RIGHTOF?n[i.from]=Math.max(n[i.from]||0,d):i.placement===r.db.PLACEMENT.LEFTOF?n[a.prevActor]=Math.max(n[a.prevActor]||0,d):i.placement===r.db.PLACEMENT.OVER&&(a.prevActor&&(n[a.prevActor]=Math.max(n[a.prevActor]||0,d/2)),a.nextActor&&(n[i.from]=Math.max(n[i.from]||0,d/2)))}}),H.debug("maxMessageWidthPerActor:",n),n},oat=function(t){let e=0;const r=Z4(dt);for(const n in t.links){const a=Se.calculateTextDimensions(n,r).width+2*dt.wrapPadding+2*dt.boxMargin;e<a&&(e=a)}return e},lat=function(t,e){let r=0;Object.keys(t).forEach(n=>{const i=t[n];i.wrap&&(i.description=Se.wrapLabel(i.description,dt.width-2*dt.wrapPadding,Z4(dt)));const a=Se.calculateTextDimensions(i.description,Z4(dt));i.width=i.wrap?dt.width:Math.max(dt.width,a.width+2*dt.wrapPadding),i.height=i.wrap?Math.max(a.height,dt.height):dt.height,r=Math.max(r,i.height)});for(const n in e){const i=t[n];if(!i)continue;const a=t[i.nextActor];if(!a)continue;const o=e[n]+dt.actorMargin-i.width/2-a.width/2;i.margin=Math.max(o,dt.actorMargin)}return Math.max(r,dt.height)},cat=function(t,e,r){const n=e[t.from].x,i=e[t.to].x,a=t.wrap&&t.message;let s=Se.calculateTextDimensions(a?Se.wrapLabel(t.message,dt.width,dl(dt)):t.message,dl(dt));const o={width:a?dt.width:Math.max(dt.width,s.width+2*dt.noteMargin),height:0,startx:e[t.from].x,stopx:0,starty:0,stopy:0,message:t.message};return t.placement===r.db.PLACEMENT.RIGHTOF?(o.width=a?Math.max(dt.width,s.width):Math.max(e[t.from].width/2+e[t.to].width/2,s.width+2*dt.noteMargin),o.startx=n+(e[t.from].width+dt.actorMargin)/2):t.placement===r.db.PLACEMENT.LEFTOF?(o.width=Math.max(a?dt.width:e[t.from].width/2+e[t.to].width/2,s.width+2*dt.noteMargin),o.startx=n-o.width+(e[t.from].width-dt.actorMargin)/2):t.to===t.from?(s=Se.calculateTextDimensions(a?Se.wrapLabel(t.message,Math.max(dt.width,e[t.from].width),dl(dt)):t.message,dl(dt)),o.width=a?Math.max(dt.width,e[t.from].width):Math.max(e[t.from].width,dt.width,s.width+2*dt.noteMargin),o.startx=n+(e[t.from].width-o.width)/2):(o.width=Math.abs(n+e[t.from].width/2-(i+e[t.to].width/2))+dt.actorMargin,o.startx=n<i?n+e[t.from].width/2-dt.actorMargin/2:i+e[t.to].width/2-dt.actorMargin/2),a&&(o.message=Se.wrapLabel(t.message,o.width-2*dt.wrapPadding,dl(dt))),H.debug(`NM:[${o.startx},${o.stopx},${o.starty},${o.stopy}:${o.width},${o.height}=${t.message}]`),o},uat=function(t,e,r){let n=!1;if([r.db.LINETYPE.SOLID_OPEN,r.db.LINETYPE.DOTTED_OPEN,r.db.LINETYPE.SOLID,r.db.LINETYPE.DOTTED,r.db.LINETYPE.SOLID_CROSS,r.db.LINETYPE.DOTTED_CROSS,r.db.LINETYPE.SOLID_POINT,r.db.LINETYPE.DOTTED_POINT].includes(t.type)&&(n=!0),!n)return{};const i=SL(t.from,e),a=SL(t.to,e),s=i[0]<=a[0]?1:0,o=i[0]<a[0]?0:1,l=i.concat(a),u=Math.abs(a[o]-i[s]);t.wrap&&t.message&&(t.message=Se.wrapLabel(t.message,Math.max(u+2*dt.wrapPadding,dt.width),fl(dt)));const h=Se.calculateTextDimensions(t.message,fl(dt));return{width:Math.max(t.wrap?0:h.width+2*dt.wrapPadding,u+2*dt.wrapPadding,dt.width),height:0,startx:i[s],stopx:a[o],starty:0,stopy:0,message:t.message,type:t.type,wrap:t.wrap,fromBounds:Math.min.apply(null,l),toBounds:Math.max.apply(null,l)}},hat=function(t,e,r,n){const i={},a=[];let s,o,l;return t.forEach(function(u){switch(u.id=Se.random({length:10}),u.type){case n.db.LINETYPE.LOOP_START:case n.db.LINETYPE.ALT_START:case n.db.LINETYPE.OPT_START:case n.db.LINETYPE.PAR_START:case n.db.LINETYPE.CRITICAL_START:case n.db.LINETYPE.BREAK_START:a.push({id:u.id,msg:u.message,from:Number.MAX_SAFE_INTEGER,to:Number.MIN_SAFE_INTEGER,width:0});break;case n.db.LINETYPE.ALT_ELSE:case n.db.LINETYPE.PAR_AND:case n.db.LINETYPE.CRITICAL_OPTION:u.message&&(s=a.pop(),i[s.id]=s,i[u.id]=s,a.push(s));break;case n.db.LINETYPE.LOOP_END:case n.db.LINETYPE.ALT_END:case n.db.LINETYPE.OPT_END:case n.db.LINETYPE.PAR_END:case n.db.LINETYPE.CRITICAL_END:case n.db.LINETYPE.BREAK_END:s=a.pop(),i[s.id]=s;break;case n.db.LINETYPE.ACTIVE_START:{const d=e[u.from?u.from.actor:u.to.actor],f=d0(u.from?u.from.actor:u.to.actor).length,p=d.x+d.width/2+(f-1)*dt.activationWidth/2,m={startx:p,stopx:p+dt.activationWidth,actor:u.from.actor,enabled:!0};Bt.activations.push(m)}break;case n.db.LINETYPE.ACTIVE_END:{const d=Bt.activations.map(f=>f.actor).lastIndexOf(u.from.actor);delete Bt.activations.splice(d,1)[0]}break}u.placement!==void 0?(o=cat(u,e,n),u.noteModel=o,a.forEach(d=>{s=d,s.from=Math.min(s.from,o.startx),s.to=Math.max(s.to,o.startx+o.width),s.width=Math.max(s.width,Math.abs(s.from-s.to))-dt.labelBoxWidth})):(l=uat(u,e,n),u.msgModel=l,l.startx&&l.stopx&&a.length>0&&a.forEach(d=>{if(s=d,l.startx===l.stopx){const f=e[u.from],p=e[u.to];s.from=Math.min(f.x-l.width/2,f.x-f.width/2,s.from),s.to=Math.max(p.x+l.width/2,p.x+f.width/2,s.to),s.width=Math.max(s.width,Math.abs(s.to-s.from))-dt.labelBoxWidth}else s.from=Math.min(l.startx,s.from),s.to=Math.max(l.stopx,s.to),s.width=Math.max(s.width,l.width)-dt.labelBoxWidth}))}),Bt.activations=[],H.debug("Loop type widths:",i),i},AL={bounds:Bt,drawActors:Q4,drawActorsPopup:EL,setConf:CL,draw:aat};var p0=function(){var t=function(ct,J,Y,$){for(Y=Y||{},$=ct.length;$--;Y[ct[$]]=J);return Y},e=[1,2],r=[1,3],n=[1,5],i=[1,7],a=[2,5],s=[1,15],o=[1,17],l=[1,19],u=[1,20],h=[1,21],d=[1,22],f=[1,33],p=[1,23],m=[1,24],_=[1,25],y=[1,26],b=[1,27],x=[1,30],k=[1,31],T=[1,32],C=[1,35],M=[1,36],S=[1,37],R=[1,38],A=[1,34],L=[1,41],v=[1,4,5,14,15,17,19,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],B=[1,4,5,12,13,14,15,17,19,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],w=[1,4,5,7,14,15,17,19,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],D=[4,5,14,15,17,19,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],N={trace:function(){},yy:{},symbols_:{error:2,start:3,SPACE:4,NL:5,directive:6,SD:7,document:8,line:9,statement:10,idStatement:11,DESCR:12,"-->":13,HIDE_EMPTY:14,scale:15,WIDTH:16,COMPOSIT_STATE:17,STRUCT_START:18,STRUCT_STOP:19,STATE_DESCR:20,AS:21,ID:22,FORK:23,JOIN:24,CHOICE:25,CONCURRENT:26,note:27,notePosition:28,NOTE_TEXT:29,direction:30,acc_title:31,acc_title_value:32,acc_descr:33,acc_descr_value:34,acc_descr_multiline_value:35,openDirective:36,typeDirective:37,closeDirective:38,":":39,argDirective:40,direction_tb:41,direction_bt:42,direction_rl:43,direction_lr:44,eol:45,";":46,EDGE_STATE:47,left_of:48,right_of:49,open_directive:50,type_directive:51,arg_directive:52,close_directive:53,$accept:0,$end:1},terminals_:{2:"error",4:"SPACE",5:"NL",7:"SD",12:"DESCR",13:"-->",14:"HIDE_EMPTY",15:"scale",16:"WIDTH",17:"COMPOSIT_STATE",18:"STRUCT_START",19:"STRUCT_STOP",20:"STATE_DESCR",21:"AS",22:"ID",23:"FORK",24:"JOIN",25:"CHOICE",26:"CONCURRENT",27:"note",29:"NOTE_TEXT",31:"acc_title",32:"acc_title_value",33:"acc_descr",34:"acc_descr_value",35:"acc_descr_multiline_value",39:":",41:"direction_tb",42:"direction_bt",43:"direction_rl",44:"direction_lr",46:";",47:"EDGE_STATE",48:"left_of",49:"right_of",50:"open_directive",51:"type_directive",52:"arg_directive",53:"close_directive"},productions_:[0,[3,2],[3,2],[3,2],[3,2],[8,0],[8,2],[9,2],[9,1],[9,1],[10,1],[10,2],[10,3],[10,4],[10,1],[10,2],[10,1],[10,4],[10,3],[10,6],[10,1],[10,1],[10,1],[10,1],[10,4],[10,4],[10,1],[10,1],[10,2],[10,2],[10,1],[6,3],[6,5],[30,1],[30,1],[30,1],[30,1],[45,1],[45,1],[11,1],[11,1],[28,1],[28,1],[36,1],[37,1],[40,1],[38,1]],performAction:function(J,Y,$,lt,ut,W,tt){var K=W.length-1;switch(ut){case 4:return lt.setRootDoc(W[K]),W[K];case 5:this.$=[];break;case 6:W[K]!="nl"&&(W[K-1].push(W[K]),this.$=W[K-1]);break;case 7:case 8:this.$=W[K];break;case 9:this.$="nl";break;case 10:this.$={stmt:"state",id:W[K],type:"default",description:""};break;case 11:this.$={stmt:"state",id:W[K-1],type:"default",description:lt.trimColon(W[K])};break;case 12:this.$={stmt:"relation",state1:{stmt:"state",id:W[K-2],type:"default",description:""},state2:{stmt:"state",id:W[K],type:"default",description:""}};break;case 13:this.$={stmt:"relation",state1:{stmt:"state",id:W[K-3],type:"default",description:""},state2:{stmt:"state",id:W[K-1],type:"default",description:""},description:W[K].substr(1).trim()};break;case 17:this.$={stmt:"state",id:W[K-3],type:"default",description:"",doc:W[K-1]};break;case 18:var it=W[K],Z=W[K-2].trim();if(W[K].match(":")){var V=W[K].split(":");it=V[0],Z=[Z,V[1]]}this.$={stmt:"state",id:it,type:"default",description:Z};break;case 19:this.$={stmt:"state",id:W[K-3],type:"default",description:W[K-5],doc:W[K-1]};break;case 20:this.$={stmt:"state",id:W[K],type:"fork"};break;case 21:this.$={stmt:"state",id:W[K],type:"join"};break;case 22:this.$={stmt:"state",id:W[K],type:"choice"};break;case 23:this.$={stmt:"state",id:lt.getDividerId(),type:"divider"};break;case 24:this.$={stmt:"state",id:W[K-1].trim(),note:{position:W[K-2].trim(),text:W[K].trim()}};break;case 28:this.$=W[K].trim(),lt.setAccTitle(this.$);break;case 29:case 30:this.$=W[K].trim(),lt.setAccDescription(this.$);break;case 33:lt.setDirection("TB"),this.$={stmt:"dir",value:"TB"};break;case 34:lt.setDirection("BT"),this.$={stmt:"dir",value:"BT"};break;case 35:lt.setDirection("RL"),this.$={stmt:"dir",value:"RL"};break;case 36:lt.setDirection("LR"),this.$={stmt:"dir",value:"LR"};break;case 39:case 40:this.$=W[K];break;case 43:lt.parseDirective("%%{","open_directive");break;case 44:lt.parseDirective(W[K],"type_directive");break;case 45:W[K]=W[K].trim().replace(/'/g,'"'),lt.parseDirective(W[K],"arg_directive");break;case 46:lt.parseDirective("}%%","close_directive","state");break}},table:[{3:1,4:e,5:r,6:4,7:n,36:6,50:i},{1:[3]},{3:8,4:e,5:r,6:4,7:n,36:6,50:i},{3:9,4:e,5:r,6:4,7:n,36:6,50:i},{3:10,4:e,5:r,6:4,7:n,36:6,50:i},t([1,4,5,14,15,17,20,22,23,24,25,26,27,31,33,35,41,42,43,44,47,50],a,{8:11}),{37:12,51:[1,13]},{51:[2,43]},{1:[2,1]},{1:[2,2]},{1:[2,3]},{1:[2,4],4:s,5:o,6:28,9:14,10:16,11:18,14:l,15:u,17:h,20:d,22:f,23:p,24:m,25:_,26:y,27:b,30:29,31:x,33:k,35:T,36:6,41:C,42:M,43:S,44:R,47:A,50:i},{38:39,39:[1,40],53:L},t([39,53],[2,44]),t(v,[2,6]),{6:28,10:42,11:18,14:l,15:u,17:h,20:d,22:f,23:p,24:m,25:_,26:y,27:b,30:29,31:x,33:k,35:T,36:6,41:C,42:M,43:S,44:R,47:A,50:i},t(v,[2,8]),t(v,[2,9]),t(v,[2,10],{12:[1,43],13:[1,44]}),t(v,[2,14]),{16:[1,45]},t(v,[2,16],{18:[1,46]}),{21:[1,47]},t(v,[2,20]),t(v,[2,21]),t(v,[2,22]),t(v,[2,23]),{28:48,29:[1,49],48:[1,50],49:[1,51]},t(v,[2,26]),t(v,[2,27]),{32:[1,52]},{34:[1,53]},t(v,[2,30]),t(B,[2,39]),t(B,[2,40]),t(v,[2,33]),t(v,[2,34]),t(v,[2,35]),t(v,[2,36]),t(w,[2,31]),{40:54,52:[1,55]},t(w,[2,46]),t(v,[2,7]),t(v,[2,11]),{11:56,22:f,47:A},t(v,[2,15]),t(D,a,{8:57}),{22:[1,58]},{22:[1,59]},{21:[1,60]},{22:[2,41]},{22:[2,42]},t(v,[2,28]),t(v,[2,29]),{38:61,53:L},{53:[2,45]},t(v,[2,12],{12:[1,62]}),{4:s,5:o,6:28,9:14,10:16,11:18,14:l,15:u,17:h,19:[1,63],20:d,22:f,23:p,24:m,25:_,26:y,27:b,30:29,31:x,33:k,35:T,36:6,41:C,42:M,43:S,44:R,47:A,50:i},t(v,[2,18],{18:[1,64]}),{29:[1,65]},{22:[1,66]},t(w,[2,32]),t(v,[2,13]),t(v,[2,17]),t(D,a,{8:67}),t(v,[2,24]),t(v,[2,25]),{4:s,5:o,6:28,9:14,10:16,11:18,14:l,15:u,17:h,19:[1,68],20:d,22:f,23:p,24:m,25:_,26:y,27:b,30:29,31:x,33:k,35:T,36:6,41:C,42:M,43:S,44:R,47:A,50:i},t(v,[2,19])],defaultActions:{7:[2,43],8:[2,1],9:[2,2],10:[2,3],50:[2,41],51:[2,42],55:[2,45]},parseError:function(J,Y){if(Y.recoverable)this.trace(J);else{var $=new Error(J);throw $.hash=Y,$}},parse:function(J){var Y=this,$=[0],lt=[],ut=[null],W=[],tt=this.table,K="",it=0,Z=0,V=2,Q=1,q=W.slice.call(arguments,1),U=Object.create(this.lexer),F={yy:{}};for(var j in this.yy)Object.prototype.hasOwnProperty.call(this.yy,j)&&(F.yy[j]=this.yy[j]);U.setInput(J,F.yy),F.yy.lexer=U,F.yy.parser=this,typeof U.yylloc>"u"&&(U.yylloc={});var P=U.yylloc;W.push(P);var et=U.options&&U.options.ranges;typeof F.yy.parseError=="function"?this.parseError=F.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function at(){var Dt;return Dt=lt.pop()||U.lex()||Q,typeof Dt!="number"&&(Dt instanceof Array&&(lt=Dt,Dt=lt.pop()),Dt=Y.symbols_[Dt]||Dt),Dt}for(var It,Lt,Rt,Ct,pt={},mt,vt,Tt,ft;;){if(Lt=$[$.length-1],this.defaultActions[Lt]?Rt=this.defaultActions[Lt]:((It===null||typeof It>"u")&&(It=at()),Rt=tt[Lt]&&tt[Lt][It]),typeof Rt>"u"||!Rt.length||!Rt[0]){var le="";ft=[];for(mt in tt[Lt])this.terminals_[mt]&&mt>V&&ft.push("'"+this.terminals_[mt]+"'");U.showPosition?le="Parse error on line "+(it+1)+`: -`+U.showPosition()+` -Expecting `+ft.join(", ")+", got '"+(this.terminals_[It]||It)+"'":le="Parse error on line "+(it+1)+": Unexpected "+(It==Q?"end of input":"'"+(this.terminals_[It]||It)+"'"),this.parseError(le,{text:U.match,token:this.terminals_[It]||It,line:U.yylineno,loc:P,expected:ft})}if(Rt[0]instanceof Array&&Rt.length>1)throw new Error("Parse Error: multiple actions possible at state: "+Lt+", token: "+It);switch(Rt[0]){case 1:$.push(It),ut.push(U.yytext),W.push(U.yylloc),$.push(Rt[1]),It=null,Z=U.yyleng,K=U.yytext,it=U.yylineno,P=U.yylloc;break;case 2:if(vt=this.productions_[Rt[1]][1],pt.$=ut[ut.length-vt],pt._$={first_line:W[W.length-(vt||1)].first_line,last_line:W[W.length-1].last_line,first_column:W[W.length-(vt||1)].first_column,last_column:W[W.length-1].last_column},et&&(pt._$.range=[W[W.length-(vt||1)].range[0],W[W.length-1].range[1]]),Ct=this.performAction.apply(pt,[K,Z,it,F.yy,Rt[1],ut,W].concat(q)),typeof Ct<"u")return Ct;vt&&($=$.slice(0,-1*vt*2),ut=ut.slice(0,-1*vt),W=W.slice(0,-1*vt)),$.push(this.productions_[Rt[1]][0]),ut.push(pt.$),W.push(pt._$),Tt=tt[$[$.length-2]][$[$.length-1]],$.push(Tt);break;case 3:return!0}}return!0}},z=function(){var ct={EOF:1,parseError:function(Y,$){if(this.yy.parser)this.yy.parser.parseError(Y,$);else throw new Error(Y)},setInput:function(J,Y){return this.yy=Y||this.yy||{},this._input=J,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var J=this._input[0];this.yytext+=J,this.yyleng++,this.offset++,this.match+=J,this.matched+=J;var Y=J.match(/(?:\r\n?|\n).*/g);return Y?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),J},unput:function(J){var Y=J.length,$=J.split(/(?:\r\n?|\n)/g);this._input=J+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-Y),this.offset-=Y;var lt=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),$.length-1&&(this.yylineno-=$.length-1);var ut=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:$?($.length===lt.length?this.yylloc.first_column:0)+lt[lt.length-$.length].length-$[0].length:this.yylloc.first_column-Y},this.options.ranges&&(this.yylloc.range=[ut[0],ut[0]+this.yyleng-Y]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(J){this.unput(this.match.slice(J))},pastInput:function(){var J=this.matched.substr(0,this.matched.length-this.match.length);return(J.length>20?"...":"")+J.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var J=this.match;return J.length<20&&(J+=this._input.substr(0,20-J.length)),(J.substr(0,20)+(J.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var J=this.pastInput(),Y=new Array(J.length+1).join("-");return J+this.upcomingInput()+` -`+Y+"^"},test_match:function(J,Y){var $,lt,ut;if(this.options.backtrack_lexer&&(ut={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(ut.yylloc.range=this.yylloc.range.slice(0))),lt=J[0].match(/(?:\r\n?|\n).*/g),lt&&(this.yylineno+=lt.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:lt?lt[lt.length-1].length-lt[lt.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+J[0].length},this.yytext+=J[0],this.match+=J[0],this.matches=J,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(J[0].length),this.matched+=J[0],$=this.performAction.call(this,this.yy,this,Y,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),$)return $;if(this._backtrack){for(var W in ut)this[W]=ut[W];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var J,Y,$,lt;this._more||(this.yytext="",this.match="");for(var ut=this._currentRules(),W=0;W<ut.length;W++)if($=this._input.match(this.rules[ut[W]]),$&&(!Y||$[0].length>Y[0].length)){if(Y=$,lt=W,this.options.backtrack_lexer){if(J=this.test_match($,ut[W]),J!==!1)return J;if(this._backtrack){Y=!1;continue}else return!1}else if(!this.options.flex)break}return Y?(J=this.test_match(Y,ut[lt]),J!==!1?J:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var Y=this.next();return Y||this.lex()},begin:function(Y){this.conditionStack.push(Y)},popState:function(){var Y=this.conditionStack.length-1;return Y>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(Y){return Y=this.conditionStack.length-1-Math.abs(Y||0),Y>=0?this.conditionStack[Y]:"INITIAL"},pushState:function(Y){this.begin(Y)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(Y,$,lt,ut){switch(lt){case 0:return 41;case 1:return 42;case 2:return 43;case 3:return 44;case 4:return this.begin("open_directive"),50;case 5:return this.begin("type_directive"),51;case 6:return this.popState(),this.begin("arg_directive"),39;case 7:return this.popState(),this.popState(),53;case 8:return 52;case 9:break;case 10:break;case 11:return 5;case 12:break;case 13:break;case 14:break;case 15:break;case 16:return this.pushState("SCALE"),15;case 17:return 16;case 18:this.popState();break;case 19:return this.begin("acc_title"),31;case 20:return this.popState(),"acc_title_value";case 21:return this.begin("acc_descr"),33;case 22:return this.popState(),"acc_descr_value";case 23:this.begin("acc_descr_multiline");break;case 24:this.popState();break;case 25:return"acc_descr_multiline_value";case 26:this.pushState("STATE");break;case 27:return this.popState(),$.yytext=$.yytext.slice(0,-8).trim(),23;case 28:return this.popState(),$.yytext=$.yytext.slice(0,-8).trim(),24;case 29:return this.popState(),$.yytext=$.yytext.slice(0,-10).trim(),25;case 30:return this.popState(),$.yytext=$.yytext.slice(0,-8).trim(),23;case 31:return this.popState(),$.yytext=$.yytext.slice(0,-8).trim(),24;case 32:return this.popState(),$.yytext=$.yytext.slice(0,-10).trim(),25;case 33:return 41;case 34:return 42;case 35:return 43;case 36:return 44;case 37:this.begin("STATE_STRING");break;case 38:return this.popState(),this.pushState("STATE_ID"),"AS";case 39:return this.popState(),"ID";case 40:this.popState();break;case 41:return"STATE_DESCR";case 42:return 17;case 43:this.popState();break;case 44:return this.popState(),this.pushState("struct"),18;case 45:return this.popState(),19;case 46:break;case 47:return this.begin("NOTE"),27;case 48:return this.popState(),this.pushState("NOTE_ID"),48;case 49:return this.popState(),this.pushState("NOTE_ID"),49;case 50:this.popState(),this.pushState("FLOATING_NOTE");break;case 51:return this.popState(),this.pushState("FLOATING_NOTE_ID"),"AS";case 52:break;case 53:return"NOTE_TEXT";case 54:return this.popState(),"ID";case 55:return this.popState(),this.pushState("NOTE_TEXT"),22;case 56:return this.popState(),$.yytext=$.yytext.substr(2).trim(),29;case 57:return this.popState(),$.yytext=$.yytext.slice(0,-8).trim(),29;case 58:return 7;case 59:return 7;case 60:return 14;case 61:return 47;case 62:return 22;case 63:return $.yytext=$.yytext.trim(),12;case 64:return 13;case 65:return 26;case 66:return 5;case 67:return"INVALID"}},rules:[/^(?:.*direction\s+TB[^\n]*)/i,/^(?:.*direction\s+BT[^\n]*)/i,/^(?:.*direction\s+RL[^\n]*)/i,/^(?:.*direction\s+LR[^\n]*)/i,/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:%%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[\n]+)/i,/^(?:[\s]+)/i,/^(?:((?!\n)\s)+)/i,/^(?:#[^\n]*)/i,/^(?:%[^\n]*)/i,/^(?:scale\s+)/i,/^(?:\d+)/i,/^(?:\s+width\b)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:state\s+)/i,/^(?:.*<<fork>>)/i,/^(?:.*<<join>>)/i,/^(?:.*<<choice>>)/i,/^(?:.*\[\[fork\]\])/i,/^(?:.*\[\[join\]\])/i,/^(?:.*\[\[choice\]\])/i,/^(?:.*direction\s+TB[^\n]*)/i,/^(?:.*direction\s+BT[^\n]*)/i,/^(?:.*direction\s+RL[^\n]*)/i,/^(?:.*direction\s+LR[^\n]*)/i,/^(?:["])/i,/^(?:\s*as\s+)/i,/^(?:[^\n\{]*)/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:[^\n\s\{]+)/i,/^(?:\n)/i,/^(?:\{)/i,/^(?:\})/i,/^(?:[\n])/i,/^(?:note\s+)/i,/^(?:left of\b)/i,/^(?:right of\b)/i,/^(?:")/i,/^(?:\s*as\s*)/i,/^(?:["])/i,/^(?:[^"]*)/i,/^(?:[^\n]*)/i,/^(?:\s*[^:\n\s\-]+)/i,/^(?:\s*:[^:\n;]+)/i,/^(?:[\s\S]*?end note\b)/i,/^(?:stateDiagram\s+)/i,/^(?:stateDiagram-v2\s+)/i,/^(?:hide empty description\b)/i,/^(?:\[\*\])/i,/^(?:[^:\n\s\-\{]+)/i,/^(?:\s*:[^:\n;]+)/i,/^(?:-->)/i,/^(?:--)/i,/^(?:$)/i,/^(?:.)/i],conditions:{LINE:{rules:[13,14],inclusive:!1},close_directive:{rules:[13,14],inclusive:!1},arg_directive:{rules:[7,8,13,14],inclusive:!1},type_directive:{rules:[6,7,13,14],inclusive:!1},open_directive:{rules:[5,13,14],inclusive:!1},struct:{rules:[13,14,26,33,34,35,36,45,46,47,61,62,63,64,65],inclusive:!1},FLOATING_NOTE_ID:{rules:[54],inclusive:!1},FLOATING_NOTE:{rules:[51,52,53],inclusive:!1},NOTE_TEXT:{rules:[56,57],inclusive:!1},NOTE_ID:{rules:[55],inclusive:!1},NOTE:{rules:[48,49,50],inclusive:!1},acc_descr_multiline:{rules:[24,25],inclusive:!1},acc_descr:{rules:[22],inclusive:!1},acc_title:{rules:[20],inclusive:!1},SCALE:{rules:[17,18],inclusive:!1},ALIAS:{rules:[],inclusive:!1},STATE_ID:{rules:[39],inclusive:!1},STATE_STRING:{rules:[40,41],inclusive:!1},FORK_STATE:{rules:[],inclusive:!1},STATE:{rules:[13,14,27,28,29,30,31,32,37,38,42,43,44],inclusive:!1},ID:{rules:[13,14],inclusive:!1},INITIAL:{rules:[0,1,2,3,4,9,10,11,12,14,15,16,19,21,23,26,44,47,58,59,60,61,62,63,64,66,67],inclusive:!0}}};return ct}();N.lexer=z;function X(){this.yy={}}return X.prototype=N,N.Parser=X,new X}();p0.parser=p0;const fat=(t,e)=>{var r;return((r=e==null?void 0:e.state)==null?void 0:r.defaultRenderer)==="dagre-wrapper"?!1:t.match(/^\s*stateDiagram/)!==null},dat=(t,e)=>{var r;return!!(t.match(/^\s*stateDiagram-v2/)!==null||t.match(/^\s*stateDiagram/)&&((r=e==null?void 0:e.state)==null?void 0:r.defaultRenderer)==="dagre-wrapper")},g0=t=>JSON.parse(JSON.stringify(t));let y0=[];const pat=function(t,e,r){Xe.parseDirective(this,t,e,r)},gat=t=>{H.info("Setting root doc",t),y0=t},yat=()=>y0,m0=(t,e,r)=>{if(e.stmt==="relation")m0(t,e.state1,!0),m0(t,e.state2,!1);else if(e.stmt==="state"&&e.id==="[*]"&&(e.id=r?t.id+"_start":t.id+"_end",e.start=r),e.doc){const n=[];let i=0,a=[];for(i=0;i<e.doc.length;i++)if(e.doc[i].type==="divider"){const s=g0(e.doc[i]);s.doc=g0(a),n.push(s),a=[]}else a.push(e.doc[i]);if(n.length>0&&a.length>0){const s={stmt:"state",id:ow(),type:"divider",doc:g0(a)};n.push(g0(s)),e.doc=n}e.doc.forEach(s=>m0(e,s,!0))}},mat=()=>(m0({id:"root"},{id:"root",doc:y0},!0),{id:"root",doc:y0}),bat=t=>{let e;t.doc?e=t.doc:e=t,H.info(e),LL(!0),H.info("Extract",e),e.forEach(r=>{r.stmt==="state"&&_0(r.id,r.type,r.doc,r.description,r.note),r.stmt==="relation"&&RL(r.state1.id,r.state2.id,r.description)})},ML=()=>({relations:[],states:{},documents:{}});let pu={root:ML()},Wr=pu.root,b0=0;const _0=function(t,e,r,n,i){typeof Wr.states[t]>"u"?Wr.states[t]={id:t,descriptions:[],type:e,doc:r,note:i}:(Wr.states[t].doc||(Wr.states[t].doc=r),Wr.states[t].type||(Wr.states[t].type=e)),n&&(H.info("Adding state ",t,n),typeof n=="string"&&IL(t,n.trim()),typeof n=="object"&&n.forEach(a=>IL(t,a.trim()))),i&&(Wr.states[t].note=i,Wr.states[t].note.text=pe.sanitizeText(Wr.states[t].note.text,nt()))},LL=function(t){pu={root:ML()},Wr=pu.root,Wr=pu.root,b0=0,BL=[],t||ci()},_at=function(t){return Wr.states[t]},vat=function(){return Wr.states},xat=function(){H.info("Documents = ",pu)},kat=function(){return Wr.relations},RL=function(t,e,r){let n=t,i=e,a="default",s="default";t==="[*]"&&(b0++,n="start"+b0,a="start"),e==="[*]"&&(i="end"+b0,s="end"),_0(n,a),_0(i,s),Wr.relations.push({id1:n,id2:i,title:pe.sanitizeText(r,nt())})},IL=function(t,e){const r=Wr.states[t];let n=e;n[0]===":"&&(n=n.substr(1).trim()),r.descriptions.push(pe.sanitizeText(n,nt()))},wat=function(t){return t.substring(0,1)===":"?t.substr(2).trim():t.trim()},Tat={LINE:0,DOTTED_LINE:1};let NL=0;const Eat=()=>(NL++,"divider-id-"+NL);let BL=[];const Cat=()=>BL;let DL="TB";const ma={parseDirective:pat,getConfig:()=>nt().state,addState:_0,clear:LL,getState:_at,getStates:vat,getRelations:kat,getClasses:Cat,getDirection:()=>DL,addRelation:RL,getDividerId:Eat,setDirection:t=>{DL=t},cleanupLabel:wat,lineType:Tat,relationType:{AGGREGATION:0,EXTENSION:1,COMPOSITION:2,DEPENDENCY:3},logDocuments:xat,getRootDoc:yat,setRootDoc:gat,getRootDocV2:mat,extract:bat,trimColon:t=>t&&t[0]===":"?t.substr(1).trim():t.trim(),getAccTitle:ui,setAccTitle:Yn,getAccDescription:fi,setAccDescription:hi},Sat=t=>t.append("circle").attr("class","start-state").attr("r",nt().state.sizeUnit).attr("cx",nt().state.padding+nt().state.sizeUnit).attr("cy",nt().state.padding+nt().state.sizeUnit),Aat=t=>t.append("line").style("stroke","grey").style("stroke-dasharray","3").attr("x1",nt().state.textHeight).attr("class","divider").attr("x2",nt().state.textHeight*2).attr("y1",0).attr("y2",0),Mat=(t,e)=>{const r=t.append("text").attr("x",2*nt().state.padding).attr("y",nt().state.textHeight+2*nt().state.padding).attr("font-size",nt().state.fontSize).attr("class","state-title").text(e.id),n=r.node().getBBox();return t.insert("rect",":first-child").attr("x",nt().state.padding).attr("y",nt().state.padding).attr("width",n.width+2*nt().state.padding).attr("height",n.height+2*nt().state.padding).attr("rx",nt().state.radius),r},Lat=(t,e)=>{const r=function(f,p,m){const _=f.append("tspan").attr("x",2*nt().state.padding).text(p);m||_.attr("dy",nt().state.textHeight)},i=t.append("text").attr("x",2*nt().state.padding).attr("y",nt().state.textHeight+1.3*nt().state.padding).attr("font-size",nt().state.fontSize).attr("class","state-title").text(e.descriptions[0]).node().getBBox(),a=i.height,s=t.append("text").attr("x",nt().state.padding).attr("y",a+nt().state.padding*.4+nt().state.dividerMargin+nt().state.textHeight).attr("class","state-description");let o=!0,l=!0;e.descriptions.forEach(function(f){o||(r(s,f,l),l=!1),o=!1});const u=t.append("line").attr("x1",nt().state.padding).attr("y1",nt().state.padding+a+nt().state.dividerMargin/2).attr("y2",nt().state.padding+a+nt().state.dividerMargin/2).attr("class","descr-divider"),h=s.node().getBBox(),d=Math.max(h.width,i.width);return u.attr("x2",d+3*nt().state.padding),t.insert("rect",":first-child").attr("x",nt().state.padding).attr("y",nt().state.padding).attr("width",d+2*nt().state.padding).attr("height",h.height+a+2*nt().state.padding).attr("rx",nt().state.radius),t},Rat=(t,e,r)=>{const n=nt().state.padding,i=2*nt().state.padding,a=t.node().getBBox(),s=a.width,o=a.x,l=t.append("text").attr("x",0).attr("y",nt().state.titleShift).attr("font-size",nt().state.fontSize).attr("class","state-title").text(e.id),h=l.node().getBBox().width+i;let d=Math.max(h,s);d===s&&(d=d+i);let f;const p=t.node().getBBox();e.doc,f=o-n,h>s&&(f=(s-d)/2+n),Math.abs(o-p.x)<n&&h>s&&(f=o-(h-s)/2);const m=1-nt().state.textHeight;return t.insert("rect",":first-child").attr("x",f).attr("y",m).attr("class",r?"alt-composit":"composit").attr("width",d).attr("height",p.height+nt().state.textHeight+nt().state.titleShift+1).attr("rx","0"),l.attr("x",f+n),h<=s&&l.attr("x",o+(d-i)/2-h/2+n),t.insert("rect",":first-child").attr("x",f).attr("y",nt().state.titleShift-nt().state.textHeight-nt().state.padding).attr("width",d).attr("height",nt().state.textHeight*3).attr("rx",nt().state.radius),t.insert("rect",":first-child").attr("x",f).attr("y",nt().state.titleShift-nt().state.textHeight-nt().state.padding).attr("width",d).attr("height",p.height+3+2*nt().state.textHeight).attr("rx",nt().state.radius),t},Iat=t=>(t.append("circle").attr("class","end-state-outer").attr("r",nt().state.sizeUnit+nt().state.miniPadding).attr("cx",nt().state.padding+nt().state.sizeUnit+nt().state.miniPadding).attr("cy",nt().state.padding+nt().state.sizeUnit+nt().state.miniPadding),t.append("circle").attr("class","end-state-inner").attr("r",nt().state.sizeUnit).attr("cx",nt().state.padding+nt().state.sizeUnit+2).attr("cy",nt().state.padding+nt().state.sizeUnit+2)),Nat=(t,e)=>{let r=nt().state.forkWidth,n=nt().state.forkHeight;if(e.parentId){let i=r;r=n,n=i}return t.append("rect").style("stroke","black").style("fill","black").attr("width",r).attr("height",n).attr("x",nt().state.padding).attr("y",nt().state.padding)},Bat=(t,e,r,n)=>{let i=0;const a=n.append("text");a.style("text-anchor","start"),a.attr("class","noteText");let s=t.replace(/\r\n/g,"<br/>");s=s.replace(/\n/g,"<br/>");const o=s.split(pe.lineBreakRegex);let l=1.25*nt().state.noteMargin;for(const u of o){const h=u.trim();if(h.length>0){const d=a.append("tspan");d.text(h),l===0&&(l+=d.node().getBBox().height),i+=l,d.attr("x",e+nt().state.noteMargin),d.attr("y",r+i+1.25*nt().state.noteMargin)}}return{textWidth:a.node().getBBox().width,textHeight:i}},Dat=(t,e)=>{e.attr("class","state-note");const r=e.append("rect").attr("x",0).attr("y",nt().state.padding),n=e.append("g"),{textWidth:i,textHeight:a}=Bat(t,0,0,n);return r.attr("height",a+2*nt().state.noteMargin),r.attr("width",i+nt().state.noteMargin*2),r},OL=function(t,e){const r=e.id,n={id:r,label:e.id,width:0,height:0},i=t.append("g").attr("id",r).attr("class","stateGroup");e.type==="start"&&Sat(i),e.type==="end"&&Iat(i),(e.type==="fork"||e.type==="join")&&Nat(i,e),e.type==="note"&&Dat(e.note.text,i),e.type==="divider"&&Aat(i),e.type==="default"&&e.descriptions.length===0&&Mat(i,e),e.type==="default"&&e.descriptions.length>0&&Lat(i,e);const a=i.node().getBBox();return n.width=a.width+2*nt().state.padding,n.height=a.height+2*nt().state.padding,n};let FL=0;const Oat=function(t,e,r){const n=function(l){switch(l){case ma.relationType.AGGREGATION:return"aggregation";case ma.relationType.EXTENSION:return"extension";case ma.relationType.COMPOSITION:return"composition";case ma.relationType.DEPENDENCY:return"dependency"}};e.points=e.points.filter(l=>!Number.isNaN(l.y));const i=e.points,a=Ua().x(function(l){return l.x}).y(function(l){return l.y}).curve(Os),s=t.append("path").attr("d",a(i)).attr("id","edge"+FL).attr("class","transition");let o="";if(nt().state.arrowMarkerAbsolute&&(o=window.location.protocol+"//"+window.location.host+window.location.pathname+window.location.search,o=o.replace(/\(/g,"\\("),o=o.replace(/\)/g,"\\)")),s.attr("marker-end","url("+o+"#"+n(ma.relationType.DEPENDENCY)+"End)"),typeof r.title<"u"){const l=t.append("g").attr("class","stateLabel"),{x:u,y:h}=Se.calcLabelPosition(e.points),d=pe.getRows(r.title);let f=0;const p=[];let m=0,_=0;for(let x=0;x<=d.length;x++){const k=l.append("text").attr("text-anchor","middle").text(d[x]).attr("x",u).attr("y",h+f),T=k.node().getBBox();m=Math.max(m,T.width),_=Math.min(_,T.x),H.info(T.x,u,h+f),f===0&&(f=k.node().getBBox().height,H.info("Title height",f,h)),p.push(k)}let y=f*d.length;if(d.length>1){const x=(d.length-1)*f*.5;p.forEach((k,T)=>k.attr("y",h+T*f-x)),y=f*d.length}const b=l.node().getBBox();l.insert("rect",":first-child").attr("class","box").attr("x",u-m/2-nt().state.padding/2).attr("y",h-y/2-nt().state.padding/2-3.5).attr("width",m+nt().state.padding).attr("height",y+nt().state.padding),H.info(b)}FL++};let Mn;const J4={},Fat=function(){},Pat=function(t){t.append("defs").append("marker").attr("id","dependencyEnd").attr("refX",19).attr("refY",7).attr("markerWidth",20).attr("markerHeight",28).attr("orient","auto").append("path").attr("d","M 19,7 L9,13 L14,7 L9,1 Z")},qat=function(t,e,r,n){Mn=nt().state;const i=nt().securityLevel;let a;i==="sandbox"&&(a=St("#i"+e));const s=St(i==="sandbox"?a.nodes()[0].contentDocument.body:"body"),o=i==="sandbox"?a.nodes()[0].contentDocument:document;H.debug("Rendering diagram "+t);const l=s.select(`[id='${e}']`);Pat(l),new cr.Graph({multigraph:!0,compound:!0,rankdir:"RL"}).setDefaultEdgeLabel(function(){return{}});const h=n.db.getRootDoc();PL(h,l,void 0,!1,s,o,n);const d=Mn.padding,f=l.node().getBBox(),p=f.width+d*2,m=f.height+d*2,_=p*1.75;li(l,m,_,Mn.useMaxWidth),l.attr("viewBox",`${f.x-Mn.padding} ${f.y-Mn.padding} `+p+" "+m),bn(n.db,l,e)},Vat=t=>t?t.length*Mn.fontSizeFactor:1,PL=(t,e,r,n,i,a,s)=>{const o=new cr.Graph({compound:!0,multigraph:!0});let l,u=!0;for(l=0;l<t.length;l++)if(t[l].stmt==="relation"){u=!1;break}r?o.setGraph({rankdir:"LR",multigraph:!0,compound:!0,ranker:"tight-tree",ranksep:u?1:Mn.edgeLengthFactor,nodeSep:u?1:50,isMultiGraph:!0}):o.setGraph({rankdir:"TB",multigraph:!0,compound:!0,ranksep:u?1:Mn.edgeLengthFactor,nodeSep:u?1:50,ranker:"tight-tree",isMultiGraph:!0}),o.setDefaultEdgeLabel(function(){return{}}),s.db.extract(t);const h=s.db.getStates(),d=s.db.getRelations(),f=Object.keys(h);for(let b=0;b<f.length;b++){const x=h[f[b]];r&&(x.parentId=r);let k;if(x.doc){let T=e.append("g").attr("id",x.id).attr("class","stateGroup");k=PL(x.doc,T,x.id,!n,i,a,s);{T=Rat(T,x,n);let C=T.node().getBBox();k.width=C.width,k.height=C.height+Mn.padding/2,J4[x.id]={y:Mn.compositTitleSize}}}else k=OL(e,x);if(x.note){const T={descriptions:[],id:x.id+"-note",note:x.note,type:"note"},C=OL(e,T);x.note.position==="left of"?(o.setNode(k.id+"-note",C),o.setNode(k.id,k)):(o.setNode(k.id,k),o.setNode(k.id+"-note",C)),o.setParent(k.id,k.id+"-group"),o.setParent(k.id+"-note",k.id+"-group")}else o.setNode(k.id,k)}H.debug("Count=",o.nodeCount(),o);let p=0;d.forEach(function(b){p++,H.debug("Setting edge",b),o.setEdge(b.id1,b.id2,{relation:b,width:Vat(b.title),height:Mn.labelHeight*pe.getRows(b.title).length,labelpos:"c"},"id"+p)}),Zc.layout(o),H.debug("Graph after layout",o.nodes());const m=e.node();o.nodes().forEach(function(b){typeof b<"u"&&typeof o.node(b)<"u"?(H.warn("Node "+b+": "+JSON.stringify(o.node(b))),i.select("#"+m.id+" #"+b).attr("transform","translate("+(o.node(b).x-o.node(b).width/2)+","+(o.node(b).y+(J4[b]?J4[b].y:0)-o.node(b).height/2)+" )"),i.select("#"+m.id+" #"+b).attr("data-x-shift",o.node(b).x-o.node(b).width/2),a.querySelectorAll("#"+m.id+" #"+b+" .divider").forEach(k=>{const T=k.parentElement;let C=0,M=0;T&&(T.parentElement&&(C=T.parentElement.getBBox().width),M=parseInt(T.getAttribute("data-x-shift"),10),Number.isNaN(M)&&(M=0)),k.setAttribute("x1",0-M+8),k.setAttribute("x2",C-M-8)})):H.debug("No Node "+b+": "+JSON.stringify(o.node(b)))});let _=m.getBBox();o.edges().forEach(function(b){typeof b<"u"&&typeof o.edge(b)<"u"&&(H.debug("Edge "+b.v+" -> "+b.w+": "+JSON.stringify(o.edge(b))),Oat(e,o.edge(b),o.edge(b).relation))}),_=m.getBBox();const y={id:r||"root",label:r||"root",width:0,height:0};return y.width=_.width+2*Mn.padding,y.height=_.height+2*Mn.padding,H.debug("Doc rendered",y,o),y},zat={setConf:Fat,draw:qat},Yat={},Uat=function(t){const e=Object.keys(t);for(let r=0;r<e.length;r++)Yat[e[r]]=t[e[r]]};let Fe={};const Wat=function(t,e){return H.trace("Extracting classes"),e.sb.clear(),e.parser.parse(t),e.sb.getClasses()},v0=(t,e,r,n)=>{if(r.id!=="root"){let i="rect";r.start===!0&&(i="start"),r.start===!1&&(i="end"),r.type!=="default"&&(i=r.type),Fe[r.id]||(Fe[r.id]={id:r.id,shape:i,description:pe.sanitizeText(r.id,nt()),classes:"statediagram-state"}),r.description&&(Array.isArray(Fe[r.id].description)?(Fe[r.id].shape="rectWithTitle",Fe[r.id].description.push(r.description)):Fe[r.id].description.length>0?(Fe[r.id].shape="rectWithTitle",Fe[r.id].description===r.id?Fe[r.id].description=[r.description]:Fe[r.id].description=[Fe[r.id].description,r.description]):(Fe[r.id].shape="rect",Fe[r.id].description=r.description),Fe[r.id].description=pe.sanitizeTextOrArray(Fe[r.id].description,nt())),Fe[r.id].description.length===1&&Fe[r.id].shape==="rectWithTitle"&&(Fe[r.id].shape="rect"),!Fe[r.id].type&&r.doc&&(H.info("Setting cluster for ",r.id,t_(r)),Fe[r.id].type="group",Fe[r.id].dir=t_(r),Fe[r.id].shape=r.type==="divider"?"divider":"roundedWithTitle",Fe[r.id].classes=Fe[r.id].classes+" "+(n?"statediagram-cluster statediagram-cluster-alt":"statediagram-cluster"));const a={labelStyle:"",shape:Fe[r.id].shape,labelText:Fe[r.id].description,classes:Fe[r.id].classes,style:"",id:r.id,dir:Fe[r.id].dir,domId:"state-"+r.id+"-"+is,type:Fe[r.id].type,padding:15};if(r.note){const s={labelStyle:"",shape:"note",labelText:r.note.text,classes:"statediagram-note",style:"",id:r.id+"----note-"+is,domId:"state-"+r.id+"----note-"+is,type:Fe[r.id].type,padding:15},o={labelStyle:"",shape:"noteGroup",labelText:r.note.text,classes:Fe[r.id].classes,style:"",id:r.id+"----parent",domId:"state-"+r.id+"----parent-"+is,type:"group",padding:0};is++,t.setNode(r.id+"----parent",o),t.setNode(s.id,s),t.setNode(r.id,a),t.setParent(r.id,r.id+"----parent"),t.setParent(s.id,r.id+"----parent");let l=r.id,u=s.id;r.note.position==="left of"&&(l=s.id,u=r.id),t.setEdge(l,u,{arrowhead:"none",arrowType:"",style:"fill:none",labelStyle:"",classes:"transition note-edge",arrowheadStyle:"fill: #333",labelpos:"c",labelType:"text",thickness:"normal"})}else t.setNode(r.id,a)}e&&e.id!=="root"&&(H.trace("Setting node ",r.id," to be child of its parent ",e.id),t.setParent(r.id,e.id)),r.doc&&(H.trace("Adding nodes children "),Hat(t,r,r.doc,!n))};let is=0;const Hat=(t,e,r,n)=>{H.trace("items",r),r.forEach(i=>{if(i.stmt==="state"||i.stmt==="default")v0(t,e,i,n);else if(i.stmt==="relation"){v0(t,e,i.state1,n),v0(t,e,i.state2,n);const a={id:"edge"+is,arrowhead:"normal",arrowTypeEnd:"arrow_barb",style:"fill:none",labelStyle:"",label:pe.sanitizeText(i.description,nt()),arrowheadStyle:"fill: #333",labelpos:"c",labelType:"text",thickness:"normal",classes:"transition"};let s=i.state1.id,o=i.state2.id;t.setEdge(s,o,a,is),is++}})},t_=(t,e)=>{let r=e||"TB";if(t.doc)for(let n=0;n<t.doc.length;n++){const i=t.doc[n];i.stmt==="dir"&&(r=i.value)}return r},Gat={setConf:Uat,getClasses:Wat,draw:function(t,e,r,n){H.info("Drawing state diagram (v2)",e),Fe={},n.db.getDirection();const{securityLevel:i,state:a}=nt(),s=a.nodeSpacing||50,o=a.rankSpacing||50;H.info(n.db.getRootDocV2()),n.db.extract(n.db.getRootDocV2()),H.info(n.db.getRootDocV2());const l=new cr.Graph({multigraph:!0,compound:!0}).setGraph({rankdir:t_(n.db.getRootDocV2()),nodesep:s,ranksep:o,marginx:8,marginy:8}).setDefaultEdgeLabel(function(){return{}});v0(l,void 0,n.db.getRootDocV2(),!0);let u;i==="sandbox"&&(u=St("#i"+e));const h=St(i==="sandbox"?u.nodes()[0].contentDocument.body:"body"),d=h.select(`[id="${e}"]`),f=h.select("#"+e+" g");i4(f,l,["barb"],"statediagram",e);const p=8,m=d.node().getBBox(),_=m.width+p*2,y=m.height+p*2;d.attr("class","statediagram");const b=d.node().getBBox();li(d,y,_,a.useMaxWidth);const x=`${b.x-p} ${b.y-p} ${_} ${y}`;H.debug(`viewBox ${x}`),d.attr("viewBox",x);const k=document.querySelectorAll('[id="'+e+'"] .edgeLabel .label');for(let T=0;T<k.length;T++){const C=k[T],M=C.getBBox(),S=document.createElementNS("http://www.w3.org/2000/svg","rect");S.setAttribute("rx",0),S.setAttribute("ry",0),S.setAttribute("width",M.width),S.setAttribute("height",M.height),C.insertBefore(S,C.firstChild)}bn(n.db,d,e)}};var e_=function(){var t=function(_,y,b,x){for(b=b||{},x=_.length;x--;b[_[x]]=y);return b},e=[1,2],r=[1,5],n=[6,9,11,17,18,20,22,23,24,26],i=[1,15],a=[1,16],s=[1,17],o=[1,18],l=[1,19],u=[1,20],h=[1,24],d=[4,6,9,11,17,18,20,22,23,24,26],f={trace:function(){},yy:{},symbols_:{error:2,start:3,journey:4,document:5,EOF:6,directive:7,line:8,SPACE:9,statement:10,NEWLINE:11,openDirective:12,typeDirective:13,closeDirective:14,":":15,argDirective:16,title:17,acc_title:18,acc_title_value:19,acc_descr:20,acc_descr_value:21,acc_descr_multiline_value:22,section:23,taskName:24,taskData:25,open_directive:26,type_directive:27,arg_directive:28,close_directive:29,$accept:0,$end:1},terminals_:{2:"error",4:"journey",6:"EOF",9:"SPACE",11:"NEWLINE",15:":",17:"title",18:"acc_title",19:"acc_title_value",20:"acc_descr",21:"acc_descr_value",22:"acc_descr_multiline_value",23:"section",24:"taskName",25:"taskData",26:"open_directive",27:"type_directive",28:"arg_directive",29:"close_directive"},productions_:[0,[3,3],[3,2],[5,0],[5,2],[8,2],[8,1],[8,1],[8,1],[7,4],[7,6],[10,1],[10,2],[10,2],[10,1],[10,1],[10,2],[10,1],[12,1],[13,1],[16,1],[14,1]],performAction:function(y,b,x,k,T,C,M){var S=C.length-1;switch(T){case 1:return C[S-1];case 3:this.$=[];break;case 4:C[S-1].push(C[S]),this.$=C[S-1];break;case 5:case 6:this.$=C[S];break;case 7:case 8:this.$=[];break;case 11:k.setDiagramTitle(C[S].substr(6)),this.$=C[S].substr(6);break;case 12:this.$=C[S].trim(),k.setAccTitle(this.$);break;case 13:case 14:this.$=C[S].trim(),k.setAccDescription(this.$);break;case 15:k.addSection(C[S].substr(8)),this.$=C[S].substr(8);break;case 16:k.addTask(C[S-1],C[S]),this.$="task";break;case 18:k.parseDirective("%%{","open_directive");break;case 19:k.parseDirective(C[S],"type_directive");break;case 20:C[S]=C[S].trim().replace(/'/g,'"'),k.parseDirective(C[S],"arg_directive");break;case 21:k.parseDirective("}%%","close_directive","journey");break}},table:[{3:1,4:e,7:3,12:4,26:r},{1:[3]},t(n,[2,3],{5:6}),{3:7,4:e,7:3,12:4,26:r},{13:8,27:[1,9]},{27:[2,18]},{6:[1,10],7:21,8:11,9:[1,12],10:13,11:[1,14],12:4,17:i,18:a,20:s,22:o,23:l,24:u,26:r},{1:[2,2]},{14:22,15:[1,23],29:h},t([15,29],[2,19]),t(n,[2,8],{1:[2,1]}),t(n,[2,4]),{7:21,10:25,12:4,17:i,18:a,20:s,22:o,23:l,24:u,26:r},t(n,[2,6]),t(n,[2,7]),t(n,[2,11]),{19:[1,26]},{21:[1,27]},t(n,[2,14]),t(n,[2,15]),{25:[1,28]},t(n,[2,17]),{11:[1,29]},{16:30,28:[1,31]},{11:[2,21]},t(n,[2,5]),t(n,[2,12]),t(n,[2,13]),t(n,[2,16]),t(d,[2,9]),{14:32,29:h},{29:[2,20]},{11:[1,33]},t(d,[2,10])],defaultActions:{5:[2,18],7:[2,2],24:[2,21],31:[2,20]},parseError:function(y,b){if(b.recoverable)this.trace(y);else{var x=new Error(y);throw x.hash=b,x}},parse:function(y){var b=this,x=[0],k=[],T=[null],C=[],M=this.table,S="",R=0,A=0,L=2,v=1,B=C.slice.call(arguments,1),w=Object.create(this.lexer),D={yy:{}};for(var N in this.yy)Object.prototype.hasOwnProperty.call(this.yy,N)&&(D.yy[N]=this.yy[N]);w.setInput(y,D.yy),D.yy.lexer=w,D.yy.parser=this,typeof w.yylloc>"u"&&(w.yylloc={});var z=w.yylloc;C.push(z);var X=w.options&&w.options.ranges;typeof D.yy.parseError=="function"?this.parseError=D.yy.parseError:this.parseError=Object.getPrototypeOf(this).parseError;function ct(){var V;return V=k.pop()||w.lex()||v,typeof V!="number"&&(V instanceof Array&&(k=V,V=k.pop()),V=b.symbols_[V]||V),V}for(var J,Y,$,lt,ut={},W,tt,K,it;;){if(Y=x[x.length-1],this.defaultActions[Y]?$=this.defaultActions[Y]:((J===null||typeof J>"u")&&(J=ct()),$=M[Y]&&M[Y][J]),typeof $>"u"||!$.length||!$[0]){var Z="";it=[];for(W in M[Y])this.terminals_[W]&&W>L&&it.push("'"+this.terminals_[W]+"'");w.showPosition?Z="Parse error on line "+(R+1)+`: -`+w.showPosition()+` -Expecting `+it.join(", ")+", got '"+(this.terminals_[J]||J)+"'":Z="Parse error on line "+(R+1)+": Unexpected "+(J==v?"end of input":"'"+(this.terminals_[J]||J)+"'"),this.parseError(Z,{text:w.match,token:this.terminals_[J]||J,line:w.yylineno,loc:z,expected:it})}if($[0]instanceof Array&&$.length>1)throw new Error("Parse Error: multiple actions possible at state: "+Y+", token: "+J);switch($[0]){case 1:x.push(J),T.push(w.yytext),C.push(w.yylloc),x.push($[1]),J=null,A=w.yyleng,S=w.yytext,R=w.yylineno,z=w.yylloc;break;case 2:if(tt=this.productions_[$[1]][1],ut.$=T[T.length-tt],ut._$={first_line:C[C.length-(tt||1)].first_line,last_line:C[C.length-1].last_line,first_column:C[C.length-(tt||1)].first_column,last_column:C[C.length-1].last_column},X&&(ut._$.range=[C[C.length-(tt||1)].range[0],C[C.length-1].range[1]]),lt=this.performAction.apply(ut,[S,A,R,D.yy,$[1],T,C].concat(B)),typeof lt<"u")return lt;tt&&(x=x.slice(0,-1*tt*2),T=T.slice(0,-1*tt),C=C.slice(0,-1*tt)),x.push(this.productions_[$[1]][0]),T.push(ut.$),C.push(ut._$),K=M[x[x.length-2]][x[x.length-1]],x.push(K);break;case 3:return!0}}return!0}},p=function(){var _={EOF:1,parseError:function(b,x){if(this.yy.parser)this.yy.parser.parseError(b,x);else throw new Error(b)},setInput:function(y,b){return this.yy=b||this.yy||{},this._input=y,this._more=this._backtrack=this.done=!1,this.yylineno=this.yyleng=0,this.yytext=this.matched=this.match="",this.conditionStack=["INITIAL"],this.yylloc={first_line:1,first_column:0,last_line:1,last_column:0},this.options.ranges&&(this.yylloc.range=[0,0]),this.offset=0,this},input:function(){var y=this._input[0];this.yytext+=y,this.yyleng++,this.offset++,this.match+=y,this.matched+=y;var b=y.match(/(?:\r\n?|\n).*/g);return b?(this.yylineno++,this.yylloc.last_line++):this.yylloc.last_column++,this.options.ranges&&this.yylloc.range[1]++,this._input=this._input.slice(1),y},unput:function(y){var b=y.length,x=y.split(/(?:\r\n?|\n)/g);this._input=y+this._input,this.yytext=this.yytext.substr(0,this.yytext.length-b),this.offset-=b;var k=this.match.split(/(?:\r\n?|\n)/g);this.match=this.match.substr(0,this.match.length-1),this.matched=this.matched.substr(0,this.matched.length-1),x.length-1&&(this.yylineno-=x.length-1);var T=this.yylloc.range;return this.yylloc={first_line:this.yylloc.first_line,last_line:this.yylineno+1,first_column:this.yylloc.first_column,last_column:x?(x.length===k.length?this.yylloc.first_column:0)+k[k.length-x.length].length-x[0].length:this.yylloc.first_column-b},this.options.ranges&&(this.yylloc.range=[T[0],T[0]+this.yyleng-b]),this.yyleng=this.yytext.length,this},more:function(){return this._more=!0,this},reject:function(){if(this.options.backtrack_lexer)this._backtrack=!0;else return this.parseError("Lexical error on line "+(this.yylineno+1)+`. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true). -`+this.showPosition(),{text:"",token:null,line:this.yylineno});return this},less:function(y){this.unput(this.match.slice(y))},pastInput:function(){var y=this.matched.substr(0,this.matched.length-this.match.length);return(y.length>20?"...":"")+y.substr(-20).replace(/\n/g,"")},upcomingInput:function(){var y=this.match;return y.length<20&&(y+=this._input.substr(0,20-y.length)),(y.substr(0,20)+(y.length>20?"...":"")).replace(/\n/g,"")},showPosition:function(){var y=this.pastInput(),b=new Array(y.length+1).join("-");return y+this.upcomingInput()+` -`+b+"^"},test_match:function(y,b){var x,k,T;if(this.options.backtrack_lexer&&(T={yylineno:this.yylineno,yylloc:{first_line:this.yylloc.first_line,last_line:this.last_line,first_column:this.yylloc.first_column,last_column:this.yylloc.last_column},yytext:this.yytext,match:this.match,matches:this.matches,matched:this.matched,yyleng:this.yyleng,offset:this.offset,_more:this._more,_input:this._input,yy:this.yy,conditionStack:this.conditionStack.slice(0),done:this.done},this.options.ranges&&(T.yylloc.range=this.yylloc.range.slice(0))),k=y[0].match(/(?:\r\n?|\n).*/g),k&&(this.yylineno+=k.length),this.yylloc={first_line:this.yylloc.last_line,last_line:this.yylineno+1,first_column:this.yylloc.last_column,last_column:k?k[k.length-1].length-k[k.length-1].match(/\r?\n?/)[0].length:this.yylloc.last_column+y[0].length},this.yytext+=y[0],this.match+=y[0],this.matches=y,this.yyleng=this.yytext.length,this.options.ranges&&(this.yylloc.range=[this.offset,this.offset+=this.yyleng]),this._more=!1,this._backtrack=!1,this._input=this._input.slice(y[0].length),this.matched+=y[0],x=this.performAction.call(this,this.yy,this,b,this.conditionStack[this.conditionStack.length-1]),this.done&&this._input&&(this.done=!1),x)return x;if(this._backtrack){for(var C in T)this[C]=T[C];return!1}return!1},next:function(){if(this.done)return this.EOF;this._input||(this.done=!0);var y,b,x,k;this._more||(this.yytext="",this.match="");for(var T=this._currentRules(),C=0;C<T.length;C++)if(x=this._input.match(this.rules[T[C]]),x&&(!b||x[0].length>b[0].length)){if(b=x,k=C,this.options.backtrack_lexer){if(y=this.test_match(x,T[C]),y!==!1)return y;if(this._backtrack){b=!1;continue}else return!1}else if(!this.options.flex)break}return b?(y=this.test_match(b,T[k]),y!==!1?y:!1):this._input===""?this.EOF:this.parseError("Lexical error on line "+(this.yylineno+1)+`. Unrecognized text. -`+this.showPosition(),{text:"",token:null,line:this.yylineno})},lex:function(){var b=this.next();return b||this.lex()},begin:function(b){this.conditionStack.push(b)},popState:function(){var b=this.conditionStack.length-1;return b>0?this.conditionStack.pop():this.conditionStack[0]},_currentRules:function(){return this.conditionStack.length&&this.conditionStack[this.conditionStack.length-1]?this.conditions[this.conditionStack[this.conditionStack.length-1]].rules:this.conditions.INITIAL.rules},topState:function(b){return b=this.conditionStack.length-1-Math.abs(b||0),b>=0?this.conditionStack[b]:"INITIAL"},pushState:function(b){this.begin(b)},stateStackSize:function(){return this.conditionStack.length},options:{"case-insensitive":!0},performAction:function(b,x,k,T){switch(k){case 0:return this.begin("open_directive"),26;case 1:return this.begin("type_directive"),27;case 2:return this.popState(),this.begin("arg_directive"),15;case 3:return this.popState(),this.popState(),29;case 4:return 28;case 5:break;case 6:break;case 7:return 11;case 8:break;case 9:break;case 10:return 4;case 11:return 17;case 12:return this.begin("acc_title"),18;case 13:return this.popState(),"acc_title_value";case 14:return this.begin("acc_descr"),20;case 15:return this.popState(),"acc_descr_value";case 16:this.begin("acc_descr_multiline");break;case 17:this.popState();break;case 18:return"acc_descr_multiline_value";case 19:return 23;case 20:return 24;case 21:return 25;case 22:return 15;case 23:return 6;case 24:return"INVALID"}},rules:[/^(?:%%\{)/i,/^(?:((?:(?!\}%%)[^:.])*))/i,/^(?::)/i,/^(?:\}%%)/i,/^(?:((?:(?!\}%%).|\n)*))/i,/^(?:%(?!\{)[^\n]*)/i,/^(?:[^\}]%%[^\n]*)/i,/^(?:[\n]+)/i,/^(?:\s+)/i,/^(?:#[^\n]*)/i,/^(?:journey\b)/i,/^(?:title\s[^#\n;]+)/i,/^(?:accTitle\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*:\s*)/i,/^(?:(?!\n||)*[^\n]*)/i,/^(?:accDescr\s*\{\s*)/i,/^(?:[\}])/i,/^(?:[^\}]*)/i,/^(?:section\s[^#:\n;]+)/i,/^(?:[^#:\n;]+)/i,/^(?::[^#\n;]+)/i,/^(?::)/i,/^(?:$)/i,/^(?:.)/i],conditions:{open_directive:{rules:[1],inclusive:!1},type_directive:{rules:[2,3],inclusive:!1},arg_directive:{rules:[3,4],inclusive:!1},acc_descr_multiline:{rules:[17,18],inclusive:!1},acc_descr:{rules:[15],inclusive:!1},acc_title:{rules:[13],inclusive:!1},INITIAL:{rules:[0,5,6,7,8,9,10,11,12,14,16,19,20,21,22,23,24],inclusive:!0}}};return _}();f.lexer=p;function m(){this.yy={}}return m.prototype=f,f.Parser=m,new m}();e_.parser=e_;const jat=t=>t.match(/^\s*journey/)!==null;let pl="";const r_=[],gu=[],gl=[],$at=function(t,e,r){Xe.parseDirective(this,t,e,r)},Xat=function(){r_.length=0,gu.length=0,pl="",gl.length=0,ci()},Kat=function(t){pl=t,r_.push(t)},Zat=function(){return r_},Qat=function(){let t=qL();const e=100;let r=0;for(;!t&&r<e;)t=qL(),r++;return gu.push(...gl),gu},Jat=function(){const t=[];return gu.forEach(r=>{r.people&&t.push(...r.people)}),[...new Set(t)].sort()},tst=function(t,e){const r=e.substr(1).split(":");let n=0,i=[];r.length===1?(n=Number(r[0]),i=[]):(n=Number(r[0]),i=r[1].split(","));const a=i.map(o=>o.trim()),s={section:pl,type:pl,people:a,task:t,score:n};gl.push(s)},est=function(t){const e={section:pl,type:pl,description:t,task:t,classes:[]};gu.push(e)},qL=function(){const t=function(r){return gl[r].processed};let e=!0;for(let r=0;r<gl.length;r++)t(r),e=e&&gl[r].processed;return e},VL={parseDirective:$at,getConfig:()=>nt().journey,clear:Xat,setDiagramTitle:c1,getDiagramTitle:u1,setAccTitle:Yn,getAccTitle:ui,setAccDescription:hi,getAccDescription:fi,addSection:Kat,getSections:Zat,getTasks:Qat,addTask:tst,addTaskOrg:est,getActors:function(){return Jat()}},x0=function(t,e){const r=t.append("rect");return r.attr("x",e.x),r.attr("y",e.y),r.attr("fill",e.fill),r.attr("stroke",e.stroke),r.attr("width",e.width),r.attr("height",e.height),r.attr("rx",e.rx),r.attr("ry",e.ry),typeof e.class<"u"&&r.attr("class",e.class),r},rst=function(t,e){const n=t.append("circle").attr("cx",e.cx).attr("cy",e.cy).attr("class","face").attr("r",15).attr("stroke-width",2).attr("overflow","visible"),i=t.append("g");i.append("circle").attr("cx",e.cx-15/3).attr("cy",e.cy-15/3).attr("r",1.5).attr("stroke-width",2).attr("fill","#666").attr("stroke","#666"),i.append("circle").attr("cx",e.cx+15/3).attr("cy",e.cy-15/3).attr("r",1.5).attr("stroke-width",2).attr("fill","#666").attr("stroke","#666");function a(l){const u=yf().startAngle(Math.PI/2).endAngle(3*(Math.PI/2)).innerRadius(7.5).outerRadius(6.8181818181818175);l.append("path").attr("class","mouth").attr("d",u).attr("transform","translate("+e.cx+","+(e.cy+2)+")")}function s(l){const u=yf().startAngle(3*Math.PI/2).endAngle(5*(Math.PI/2)).innerRadius(7.5).outerRadius(6.8181818181818175);l.append("path").attr("class","mouth").attr("d",u).attr("transform","translate("+e.cx+","+(e.cy+7)+")")}function o(l){l.append("line").attr("class","mouth").attr("stroke",2).attr("x1",e.cx-5).attr("y1",e.cy+7).attr("x2",e.cx+5).attr("y2",e.cy+7).attr("class","mouth").attr("stroke-width","1px").attr("stroke","#666")}return e.score>3?a(i):e.score<3?s(i):o(i),n},zL=function(t,e){const r=t.append("circle");return r.attr("cx",e.cx),r.attr("cy",e.cy),r.attr("class","actor-"+e.pos),r.attr("fill",e.fill),r.attr("stroke",e.stroke),r.attr("r",e.r),typeof r.class<"u"&&r.attr("class",r.class),typeof e.title<"u"&&r.append("title").text(e.title),r},YL=function(t,e){const r=e.text.replace(/<br\s*\/?>/gi," "),n=t.append("text");n.attr("x",e.x),n.attr("y",e.y),n.attr("class","legend"),n.style("text-anchor",e.anchor),typeof e.class<"u"&&n.attr("class",e.class);const i=n.append("tspan");return i.attr("x",e.x+e.textMargin*2),i.text(r),n},nst=function(t,e){function r(i,a,s,o,l){return i+","+a+" "+(i+s)+","+a+" "+(i+s)+","+(a+o-l)+" "+(i+s-l*1.2)+","+(a+o)+" "+i+","+(a+o)}const n=t.append("polygon");n.attr("points",r(e.x,e.y,50,20,7)),n.attr("class","labelBox"),e.y=e.y+e.labelMargin,e.x=e.x+.5*e.labelMargin,YL(t,e)},ist=function(t,e,r){const n=t.append("g"),i=n_();i.x=e.x,i.y=e.y,i.fill=e.fill,i.width=r.width,i.height=r.height,i.class="journey-section section-type-"+e.num,i.rx=3,i.ry=3,x0(n,i),WL(r)(e.text,n,i.x,i.y,i.width,i.height,{class:"journey-section section-type-"+e.num},r,e.colour)};let UL=-1;const ast=function(t,e,r){const n=e.x+r.width/2,i=t.append("g");UL++;const a=300+5*30;i.append("line").attr("id","task"+UL).attr("x1",n).attr("y1",e.y).attr("x2",n).attr("y2",a).attr("class","task-line").attr("stroke-width","1px").attr("stroke-dasharray","4 2").attr("stroke","#666"),rst(i,{cx:n,cy:300+(5-e.score)*30,score:e.score});const s=n_();s.x=e.x,s.y=e.y,s.fill=e.fill,s.width=r.width,s.height=r.height,s.class="task task-type-"+e.num,s.rx=3,s.ry=3,x0(i,s);let o=e.x+14;e.people.forEach(l=>{const u=e.actors[l].color,h={cx:o,cy:e.y,r:7,fill:u,stroke:"#000",title:l,pos:e.actors[l].position};zL(i,h),o+=10}),WL(r)(e.task,i,s.x,s.y,s.width,s.height,{class:"task"},r,e.colour)},sst=function(t,e){x0(t,{x:e.startx,y:e.starty,width:e.stopx-e.startx,height:e.stopy-e.starty,fill:e.fill,class:"rect"}).lower()},ost=function(){return{x:0,y:0,fill:void 0,"text-anchor":"start",width:100,height:100,textMargin:0,rx:0,ry:0}},n_=function(){return{x:0,y:0,width:100,anchor:"start",height:100,rx:0,ry:0}},WL=function(){function t(i,a,s,o,l,u,h,d){const f=a.append("text").attr("x",s+l/2).attr("y",o+u/2+5).style("font-color",d).style("text-anchor","middle").text(i);n(f,h)}function e(i,a,s,o,l,u,h,d,f){const{taskFontSize:p,taskFontFamily:m}=d,_=i.split(/<br\s*\/?>/gi);for(let y=0;y<_.length;y++){const b=y*p-p*(_.length-1)/2,x=a.append("text").attr("x",s+l/2).attr("y",o).attr("fill",f).style("text-anchor","middle").style("font-size",p).style("font-family",m);x.append("tspan").attr("x",s+l/2).attr("dy",b).text(_[y]),x.attr("y",o+u/2).attr("dominant-baseline","central").attr("alignment-baseline","central"),n(x,h)}}function r(i,a,s,o,l,u,h,d){const f=a.append("switch"),m=f.append("foreignObject").attr("x",s).attr("y",o).attr("width",l).attr("height",u).attr("position","fixed").append("xhtml:div").style("display","table").style("height","100%").style("width","100%");m.append("div").attr("class","label").style("display","table-cell").style("text-align","center").style("vertical-align","middle").text(i),e(i,f,s,o,l,u,h,d),n(m,h)}function n(i,a){for(const s in a)s in a&&i.attr(s,a[s])}return function(i){return i.textPlacement==="fo"?r:i.textPlacement==="old"?t:e}}(),yu={drawRect:x0,drawCircle:zL,drawSection:ist,drawText:YL,drawLabel:nst,drawTask:ast,drawBackgroundRect:sst,getTextObj:ost,getNoteRect:n_,initGraphics:function(t){t.append("defs").append("marker").attr("id","arrowhead").attr("refX",5).attr("refY",2).attr("markerWidth",6).attr("markerHeight",4).attr("orient","auto").append("path").attr("d","M 0,0 V 4 L6,2 Z")}},lst=function(t){Object.keys(t).forEach(function(r){k0[r]=t[r]})},ba={};function cst(t){const e=nt().journey;let r=60;Object.keys(ba).forEach(n=>{const i=ba[n].color,a={cx:20,cy:r,r:7,fill:i,stroke:"#000",pos:ba[n].position};yu.drawCircle(t,a);const s={x:40,y:r+7,fill:"#666",text:n,textMargin:e.boxTextMargin|5};yu.drawText(t,s),r+=20})}const k0=nt().journey,$s=k0.leftMargin,ust=function(t,e,r,n){const i=nt().journey;n.db.clear(),n.parser.parse(t+` -`);const a=nt().securityLevel;let s;a==="sandbox"&&(s=St("#i"+e));const o=St(a==="sandbox"?s.nodes()[0].contentDocument.body:"body");jn.init();const l=o.select("#"+e);yu.initGraphics(l);const u=n.db.getTasks(),h=n.db.getDiagramTitle(),d=n.db.getActors();for(const b in ba)delete ba[b];let f=0;d.forEach(b=>{ba[b]={color:i.actorColours[f%i.actorColours.length],position:f},f++}),cst(l),jn.insert(0,0,$s,Object.keys(ba).length*50),hst(l,u,0);const p=jn.getBounds();h&&l.append("text").text(h).attr("x",$s).attr("font-size","4ex").attr("font-weight","bold").attr("y",25);const m=p.stopy-p.starty+2*i.diagramMarginY,_=$s+p.stopx+2*i.diagramMarginX;li(l,m,_,i.useMaxWidth),l.append("line").attr("x1",$s).attr("y1",i.height*4).attr("x2",_-$s-4).attr("y2",i.height*4).attr("stroke-width",4).attr("stroke","black").attr("marker-end","url(#arrowhead)");const y=h?70:0;l.attr("viewBox",`${p.startx} -25 ${_} ${m+y}`),l.attr("preserveAspectRatio","xMinYMin meet"),l.attr("height",m+y+25),bn(n.db,l,e)},jn={data:{startx:void 0,stopx:void 0,starty:void 0,stopy:void 0},verticalPos:0,sequenceItems:[],init:function(){this.sequenceItems=[],this.data={startx:void 0,stopx:void 0,starty:void 0,stopy:void 0},this.verticalPos=0},updateVal:function(t,e,r,n){typeof t[e]>"u"?t[e]=r:t[e]=n(r,t[e])},updateBounds:function(t,e,r,n){const i=nt().journey,a=this;let s=0;function o(l){return function(h){s++;const d=a.sequenceItems.length-s+1;a.updateVal(h,"starty",e-d*i.boxMargin,Math.min),a.updateVal(h,"stopy",n+d*i.boxMargin,Math.max),a.updateVal(jn.data,"startx",t-d*i.boxMargin,Math.min),a.updateVal(jn.data,"stopx",r+d*i.boxMargin,Math.max),l!=="activation"&&(a.updateVal(h,"startx",t-d*i.boxMargin,Math.min),a.updateVal(h,"stopx",r+d*i.boxMargin,Math.max),a.updateVal(jn.data,"starty",e-d*i.boxMargin,Math.min),a.updateVal(jn.data,"stopy",n+d*i.boxMargin,Math.max))}}this.sequenceItems.forEach(o())},insert:function(t,e,r,n){const i=Math.min(t,r),a=Math.max(t,r),s=Math.min(e,n),o=Math.max(e,n);this.updateVal(jn.data,"startx",i,Math.min),this.updateVal(jn.data,"starty",s,Math.min),this.updateVal(jn.data,"stopx",a,Math.max),this.updateVal(jn.data,"stopy",o,Math.max),this.updateBounds(i,s,a,o)},bumpVerticalPos:function(t){this.verticalPos=this.verticalPos+t,this.data.stopy=this.verticalPos},getVerticalPos:function(){return this.verticalPos},getBounds:function(){return this.data}},i_=k0.sectionFills,HL=k0.sectionColours,hst=function(t,e,r){const n=nt().journey;let i="";const a=n.height*2+n.diagramMarginY,s=r+a;let o=0,l="#CCC",u="black",h=0;for(let d=0;d<e.length;d++){const f=e[d];if(i!==f.section){l=i_[o%i_.length],h=o%i_.length,u=HL[o%HL.length];const m={x:d*n.taskMargin+d*n.width+$s,y:50,text:f.section,fill:l,num:h,colour:u};yu.drawSection(t,m,n),i=f.section,o++}const p=f.people.reduce((m,_)=>(ba[_]&&(m[_]=ba[_]),m),{});f.x=d*n.taskMargin+d*n.width+$s,f.y=s,f.width=n.diagramMarginX,f.height=n.diagramMarginY,f.colour=u,f.fill=l,f.num=h,f.actors=p,yu.drawTask(t,f,n),jn.insert(f.x,f.y,f.x+f.width+n.taskMargin,300+5*30)}},GL={setConf:lst,draw:ust};let jL={};const a_={setConf:function(t){jL={...jL,...t}},draw:(t,e,r)=>{try{H.debug(`Renering svg for syntax error -`);const n=St("#"+e),i=n.append("g");i.append("path").attr("class","error-icon").attr("d","m411.313,123.313c6.25-6.25 6.25-16.375 0-22.625s-16.375-6.25-22.625,0l-32,32-9.375,9.375-20.688-20.688c-12.484-12.5-32.766-12.5-45.25,0l-16,16c-1.261,1.261-2.304,2.648-3.31,4.051-21.739-8.561-45.324-13.426-70.065-13.426-105.867,0-192,86.133-192,192s86.133,192 192,192 192-86.133 192-192c0-24.741-4.864-48.327-13.426-70.065 1.402-1.007 2.79-2.049 4.051-3.31l16-16c12.5-12.492 12.5-32.758 0-45.25l-20.688-20.688 9.375-9.375 32.001-31.999zm-219.313,100.687c-52.938,0-96,43.063-96,96 0,8.836-7.164,16-16,16s-16-7.164-16-16c0-70.578 57.422-128 128-128 8.836,0 16,7.164 16,16s-7.164,16-16,16z"),i.append("path").attr("class","error-icon").attr("d","m459.02,148.98c-6.25-6.25-16.375-6.25-22.625,0s-6.25,16.375 0,22.625l16,16c3.125,3.125 7.219,4.688 11.313,4.688 4.094,0 8.188-1.563 11.313-4.688 6.25-6.25 6.25-16.375 0-22.625l-16.001-16z"),i.append("path").attr("class","error-icon").attr("d","m340.395,75.605c3.125,3.125 7.219,4.688 11.313,4.688 4.094,0 8.188-1.563 11.313-4.688 6.25-6.25 6.25-16.375 0-22.625l-16-16c-6.25-6.25-16.375-6.25-22.625,0s-6.25,16.375 0,22.625l15.999,16z"),i.append("path").attr("class","error-icon").attr("d","m400,64c8.844,0 16-7.164 16-16v-32c0-8.836-7.156-16-16-16-8.844,0-16,7.164-16,16v32c0,8.836 7.156,16 16,16z"),i.append("path").attr("class","error-icon").attr("d","m496,96.586h-32c-8.844,0-16,7.164-16,16 0,8.836 7.156,16 16,16h32c8.844,0 16-7.164 16-16 0-8.836-7.156-16-16-16z"),i.append("path").attr("class","error-icon").attr("d","m436.98,75.605c3.125,3.125 7.219,4.688 11.313,4.688 4.094,0 8.188-1.563 11.313-4.688l32-32c6.25-6.25 6.25-16.375 0-22.625s-16.375-6.25-22.625,0l-32,32c-6.251,6.25-6.251,16.375-0.001,22.625z"),i.append("text").attr("class","error-text").attr("x",1440).attr("y",250).attr("font-size","150px").style("text-anchor","middle").text("Syntax error in graph"),i.append("text").attr("class","error-text").attr("x",1250).attr("y",400).attr("font-size","100px").style("text-anchor","middle").text("mermaid version "+r),n.attr("height",100),n.attr("width",500),n.attr("viewBox","768 0 912 512")}catch(n){H.error("Error while rendering info diagram"),H.error(hX(n))}}};let $L=!1;const mu=()=>{$L||($L=!0,Lr("error",{db:{clear:()=>{}},styles:Aw,renderer:a_,parser:{parser:{yy:{}},parse:()=>{}},init:()=>{}},t=>t.toLowerCase().trim()==="error"),Lr("c4",{parser:qc,db:Xw,renderer:a9,styles:Dw,init:t=>{a9.setConf(t.c4)}},lK),Lr("class",{parser:_1,db:Jo,renderer:qtt,styles:Nc,init:t=>{t.class||(t.class={}),t.class.arrowMarkerAbsolute=t.arrowMarkerAbsolute,Jo.clear()}},ZK),Lr("classDiagram",{parser:_1,db:Jo,renderer:Oet,styles:Nc,init:t=>{t.class||(t.class={}),t.class.arrowMarkerAbsolute=t.arrowMarkerAbsolute,Jo.clear()}},QK),Lr("er",{parser:a4,db:zet,renderer:ert,styles:Sw},Fet),Lr("gantt",{parser:M4,db:P4,renderer:rit,styles:Mw},Snt),Lr("info",{parser:q4,db:nit,renderer:iit,styles:Lw},ait),Lr("pie",{parser:V4,db:oit,renderer:lit,styles:Rw},sit),Lr("requirement",{parser:Y4,db:uit,renderer:mit,styles:Iw},cit),Lr("sequence",{parser:H4,db:mL,renderer:AL,styles:Nw,init:t=>{if(t.sequence||(t.sequence={}),t.sequence.arrowMarkerAbsolute=t.arrowMarkerAbsolute,"sequenceDiagram"in t)throw new Error("`mermaid config.sequenceDiagram` has been renamed to `config.sequence`. Please update your mermaid config.");mL.setWrap(t.wrap),AL.setConf(t.sequence)}},bit),Lr("state",{parser:p0,db:ma,renderer:zat,styles:s1,init:t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute,ma.clear()}},fat),Lr("stateDiagram",{parser:p0,db:ma,renderer:Gat,styles:s1,init:t=>{t.state||(t.state={}),t.state.arrowMarkerAbsolute=t.arrowMarkerAbsolute,ma.clear()}},dat),Lr("journey",{parser:e_,db:VL,renderer:GL,styles:Bw,init:t=>{GL.setConf(t.journey),VL.clear()}},jat),Lr("flowchart",{parser:X1,db:fa,renderer:A4,styles:a1,init:t=>{t.flowchart||(t.flowchart={}),t.flowchart.arrowMarkerAbsolute=t.arrowMarkerAbsolute,S4.setConf(t.flowchart),fa.clear(),fa.setGen("gen-1")}},rrt),Lr("flowchart-v2",{parser:X1,db:fa,renderer:A4,styles:a1,init:t=>{t.flowchart||(t.flowchart={}),t.flowchart.arrowMarkerAbsolute=t.arrowMarkerAbsolute,kw({flowchart:{arrowMarkerAbsolute:t.arrowMarkerAbsolute}}),A4.setConf(t.flowchart),fa.clear(),fa.setGen("gen-2")}},nrt),Lr("gitGraph",{parser:hg,db:JX,renderer:sK,styles:oK},BX))};class bu{constructor(e,r){vl(this,"type","graph");vl(this,"parser");vl(this,"renderer");vl(this,"db");vl(this,"detectTypeFailed",!1);var a,s;this.txt=e;const n=nt();this.txt=e;try{this.type=Xp(e,n)}catch(o){this.handleError(o,r),this.type="error",this.detectTypeFailed=!0}const i=Pw(this.type);H.debug("Type "+this.type),this.db=i.db,(s=(a=this.db).clear)==null||s.call(a),this.renderer=i.renderer,this.parser=i.parser,this.parser.parser.yy=this.db,i.init&&(i.init(n),H.debug("Initialized diagram "+this.type,n)),this.txt+=` -`,this.parse(this.txt,r)}parse(e,r){if(this.detectTypeFailed)return!1;try{return e=e+` -`,this.db.clear(),this.parser.parse(e),!0}catch(n){this.handleError(n,r)}return!1}handleError(e,r){if(r)ng(e)?r(e.str,e.hash):r(e);else throw e}getParser(){return this.parser}getType(){return this.type}}const s_=(t,e)=>{const r=Xp(t,nt());try{return Pw(r),new bu(t,e)}catch(n){if(!(n instanceof qw))throw H.error(n),n;const i=wG(r);if(!i)throw new Error(`Loader for ${r} not found.`);return i().then(({diagram:a})=>(Lr(r,a,void 0),new bu(t,e)))}};function fst(t,e){return mu(),new bu(t,e).parse(t,e)}async function dst(t,e){return mu(),(await s_(t,e)).parse(t,e)}const XL=function(t){let e=t;return e=e.replace(/style.*:\S*#.*;/g,function(r){return r.substring(0,r.length-1)}),e=e.replace(/classDef.*:\S*#.*;/g,function(r){return r.substring(0,r.length-1)}),e=e.replace(/#\w+;/g,function(r){const n=r.substring(1,r.length-1);return/^\+?\d+$/.test(n)?"\uFB02\xB0\xB0"+n+"\xB6\xDF":"\uFB02\xB0"+n+"\xB6\xDF"}),e},w0=function(t){let e=t;return e=e.replace(/fl°°/g,function(){return"&#"}),e=e.replace(/fl°/g,function(){return"&"}),e=e.replace(/¶ß/g,function(){return";"}),e},pst=function(t,e,r,n){var T;mu(),Ic(),e=e.replace(/\r\n?/g,` -`);const i=Se.detectInit(e);i&&(Vs(i),ug(i));const a=nt();H.debug(a),e.length>a.maxTextSize&&(e="graph TB;a[Maximum text size in diagram exceeded];style a fill:#faa");let s=St("body");if(typeof n<"u"){if(n&&(n.innerHTML=""),a.securityLevel==="sandbox"){const C=St(n).append("iframe").attr("id","i"+t).attr("style","width: 100%; height: 100%;").attr("sandbox","");s=St(C.nodes()[0].contentDocument.body),s.node().style.margin=0}else s=St(n);s.append("div").attr("id","d"+t).attr("style","font-family: "+a.fontFamily).append("svg").attr("id",t).attr("width","100%").attr("xmlns","http://www.w3.org/2000/svg").attr("xmlns:xlink","http://www.w3.org/1999/xlink").append("g")}else{const C=document.getElementById(t);C&&C.remove();let M;if(a.securityLevel==="sandbox"?M=document.querySelector("#i"+t):M=document.querySelector("#d"+t),M&&M.remove(),a.securityLevel==="sandbox"){const S=St("body").append("iframe").attr("id","i"+t).attr("style","width: 100%; height: 100%;").attr("sandbox","");s=St(S.nodes()[0].contentDocument.body),s.node().style.margin=0}else s=St("body");s.append("div").attr("id","d"+t).append("svg").attr("id",t).attr("width","100%").attr("xmlns","http://www.w3.org/2000/svg").append("g")}e=XL(e);let o,l;try{if(o=s_(e),"then"in o)throw new Error("Diagram is a promise")}catch(C){o=new bu("error"),l=C}const u=s.select("#d"+t).node(),h=o.type,d=u.firstChild,f=d.firstChild;let p="";if(a.themeCSS!==void 0&&(p+=` -${a.themeCSS}`),a.fontFamily!==void 0&&(p+=` -:root { --mermaid-font-family: ${a.fontFamily}}`),a.altFontFamily!==void 0&&(p+=` -:root { --mermaid-alt-font-family: ${a.altFontFamily}}`),h==="flowchart"||h==="flowchart-v2"||h==="graph"){const C=S4.getClasses(e,o),M=a.htmlLabels||((T=a.flowchart)==null?void 0:T.htmlLabels);for(const S in C)M?(p+=` -.${S} > * { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} span { ${C[S].styles.join(" !important; ")} !important; }`):(p+=` -.${S} path { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} rect { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} polygon { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} ellipse { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} circle { ${C[S].styles.join(" !important; ")} !important; }`,C[S].textStyles&&(p+=` -.${S} tspan { ${C[S].textStyles.join(" !important; ")} !important; }`))}const _=((C,M)=>e1(yw(`${C}{${M}}`),_w))(`#${t}`,Ow(h,p,a.themeVariables)),y=document.createElement("style");y.innerHTML=`#${t} `+_,d.insertBefore(y,f);try{o.renderer.draw(e,t,r1.version,o)}catch(C){throw a_.draw(e,t,r1.version),C}s.select(`[id="${t}"]`).selectAll("foreignobject > *").attr("xmlns","http://www.w3.org/1999/xhtml");let b=s.select("#d"+t).node().innerHTML;if(H.debug("cnf.arrowMarkerAbsolute",a.arrowMarkerAbsolute),!Mr(a.arrowMarkerAbsolute)&&a.securityLevel!=="sandbox"&&(b=b.replace(/marker-end="url\(.*?#/g,'marker-end="url(#',"g")),b=w0(b),b=b.replace(/<br>/g,"<br/>"),a.securityLevel==="sandbox"){const C=s.select("#d"+t+" svg").node(),M="100%";let S="100%";C&&(S=C.viewBox.baseVal.height+"px"),b=`<iframe style="width:${M};height:${S};border:0;margin:0;" src="data:text/html;base64,${btoa('<body style="margin:0">'+b+"</body>")}" sandbox="allow-top-navigation-by-user-activation allow-popups"> - The \u201Ciframe\u201D tag is not supported by your browser. -</iframe>`}else a.securityLevel!=="loose"&&(b=Ec.sanitize(b,{ADD_TAGS:["foreignobject"],ADD_ATTR:["dominant-baseline"]}));if(typeof r<"u")switch(h){case"flowchart":case"flowchart-v2":r(b,fa.bindFunctions);break;case"gantt":r(b,P4.bindFunctions);break;case"class":case"classDiagram":r(b,Jo.bindFunctions);break;default:r(b)}else H.debug("CB = undefined!");bL();const x=a.securityLevel==="sandbox"?"#i"+t:"#d"+t,k=St(x).node();if(k&&"remove"in k&&k.remove(),l)throw l;return b},gst=async function(t,e,r,n){var T;mu(),Ic(),e=e.replace(/\r\n?/g,` -`);const i=Se.detectInit(e);i&&(Vs(i),ug(i));const a=nt();H.debug(a),e.length>a.maxTextSize&&(e="graph TB;a[Maximum text size in diagram exceeded];style a fill:#faa");let s=St("body");if(typeof n<"u"){if(n&&(n.innerHTML=""),a.securityLevel==="sandbox"){const C=St(n).append("iframe").attr("id","i"+t).attr("style","width: 100%; height: 100%;").attr("sandbox","");s=St(C.nodes()[0].contentDocument.body),s.node().style.margin=0}else s=St(n);s.append("div").attr("id","d"+t).attr("style","font-family: "+a.fontFamily).append("svg").attr("id",t).attr("width","100%").attr("xmlns","http://www.w3.org/2000/svg").attr("xmlns:xlink","http://www.w3.org/1999/xlink").append("g")}else{const C=document.getElementById(t);C&&C.remove();let M;if(a.securityLevel==="sandbox"?M=document.querySelector("#i"+t):M=document.querySelector("#d"+t),M&&M.remove(),a.securityLevel==="sandbox"){const S=St("body").append("iframe").attr("id","i"+t).attr("style","width: 100%; height: 100%;").attr("sandbox","");s=St(S.nodes()[0].contentDocument.body),s.node().style.margin=0}else s=St("body");s.append("div").attr("id","d"+t).append("svg").attr("id",t).attr("width","100%").attr("xmlns","http://www.w3.org/2000/svg").append("g")}e=XL(e);let o,l;try{o=await s_(e)}catch(C){o=new bu("error"),l=C}const u=s.select("#d"+t).node(),h=o.type,d=u.firstChild,f=d.firstChild;let p="";if(a.themeCSS!==void 0&&(p+=` -${a.themeCSS}`),a.fontFamily!==void 0&&(p+=` -:root { --mermaid-font-family: ${a.fontFamily}}`),a.altFontFamily!==void 0&&(p+=` -:root { --mermaid-alt-font-family: ${a.altFontFamily}}`),h==="flowchart"||h==="flowchart-v2"||h==="graph"){const C=S4.getClasses(e,o),M=a.htmlLabels||((T=a.flowchart)==null?void 0:T.htmlLabels);for(const S in C)M?(p+=` -.${S} > * { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} span { ${C[S].styles.join(" !important; ")} !important; }`):(p+=` -.${S} path { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} rect { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} polygon { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} ellipse { ${C[S].styles.join(" !important; ")} !important; }`,p+=` -.${S} circle { ${C[S].styles.join(" !important; ")} !important; }`,C[S].textStyles&&(p+=` -.${S} tspan { ${C[S].textStyles.join(" !important; ")} !important; }`))}const _=((C,M)=>e1(yw(`${C}{${M}}`),_w))(`#${t}`,Ow(h,p,a.themeVariables)),y=document.createElement("style");y.innerHTML=`#${t} `+_,d.insertBefore(y,f);try{await o.renderer.draw(e,t,r1.version,o)}catch(C){throw a_.draw(e,t,r1.version),C}s.select(`[id="${t}"]`).selectAll("foreignobject > *").attr("xmlns","http://www.w3.org/1999/xhtml");let b=s.select("#d"+t).node().innerHTML;if(H.debug("cnf.arrowMarkerAbsolute",a.arrowMarkerAbsolute),!Mr(a.arrowMarkerAbsolute)&&a.securityLevel!=="sandbox"&&(b=b.replace(/marker-end="url\(.*?#/g,'marker-end="url(#',"g")),b=w0(b),b=b.replace(/<br>/g,"<br/>"),a.securityLevel==="sandbox"){const C=s.select("#d"+t+" svg").node(),M="100%";let S="100%";C&&(S=C.viewBox.baseVal.height+"px"),b=`<iframe style="width:${M};height:${S};border:0;margin:0;" src="data:text/html;base64,${btoa('<body style="margin:0">'+b+"</body>")}" sandbox="allow-top-navigation-by-user-activation allow-popups"> - The \u201Ciframe\u201D tag is not supported by your browser. -</iframe>`}else a.securityLevel!=="loose"&&(b=Ec.sanitize(b,{ADD_TAGS:["foreignobject"],ADD_ATTR:["dominant-baseline"]}));if(typeof r<"u")switch(h){case"flowchart":case"flowchart-v2":r(b,fa.bindFunctions);break;case"gantt":r(b,P4.bindFunctions);break;case"class":case"classDiagram":r(b,Jo.bindFunctions);break;default:r(b)}else H.debug("CB = undefined!");bL();const x=a.securityLevel==="sandbox"?"#i"+t:"#d"+t,k=St(x).node();if(k&&"remove"in k&&k.remove(),l)throw l;return b};let Xs={};const yst=function(t,e,r,n){try{if(e!==void 0)switch(e=e.trim(),r){case"open_directive":Xs={};break;case"type_directive":if(!Xs)throw new Error("currentDirective is undefined");Xs.type=e.toLowerCase();break;case"arg_directive":if(!Xs)throw new Error("currentDirective is undefined");Xs.args=JSON.parse(e);break;case"close_directive":mst(t,Xs,n),Xs=void 0;break}}catch(i){H.error(`Error while rendering sequenceDiagram directive: ${e} jison context: ${r}`),H.error(i.message)}},mst=function(t,e,r){switch(H.debug(`Directive type=${e.type} with args:`,e.args),e.type){case"init":case"initialize":{["config"].forEach(n=>{typeof e.args[n]<"u"&&(r==="flowchart-v2"&&(r="flowchart"),e.args[r]=e.args[n],delete e.args[n])}),H.debug("sanitize in handleDirective",e.args),Vs(e.args),H.debug("sanitize in handleDirective (done)",e.args),ug(e.args);break}case"wrap":case"nowrap":t&&t.setWrap&&t.setWrap(e.type==="wrap");break;case"themeCss":H.warn("themeCss encountered");break;default:H.warn(`Unhandled directive: source: '%%{${e.type}: ${JSON.stringify(e.args?e.args:{})}}%%`,e);break}};function bst(t={}){t.fontFamily&&(t.themeVariables||(t.themeVariables={}),t.themeVariables.fontFamily=t.fontFamily),CX(t),(t==null?void 0:t.theme)&&t.theme in aa?t.themeVariables=aa[t.theme].getThemeVariables(t.themeVariables):t&&(t.themeVariables=aa.default.getThemeVariables(t.themeVariables));const e=typeof t=="object"?EX(t):xw();D0(e.logLevel),mu()}const Xe=Object.freeze({render:pst,renderAsync:gst,parse:fst,parseAsync:dst,parseDirective:yst,initialize:bst,getConfig:nt,setConfig:kw,getSiteConfig:xw,updateSiteConfig:SX,reset:()=>{Ic()},globalReset:()=>{Ic(Xo)},defaultConfig:Xo});D0(nt().logLevel),Ic(nt());let KL=!1;const _st=async function(t,e,r){try{KL?await JL(t,e,r):QL(t,e,r)}catch(n){H.warn("Syntax Error rendering"),ng(n)&&H.warn(n.str),$n.parseError&&$n.parseError(n)}},ZL=(t,e,r)=>{H.warn(t),ng(t)?(r&&r(t.str,t.hash),e.push({...t,message:t.str,error:t})):(r&&r(t),t instanceof Error&&e.push({str:t.message,message:t.message,hash:t.name,error:t}))},QL=function(t,e,r){const n=Xe.getConfig();t&&($n.sequenceConfig=t),H.debug(`${r?"":"No "}Callback function found`);let i;if(typeof e>"u")i=document.querySelectorAll(".mermaid");else if(typeof e=="string")i=document.querySelectorAll(e);else if(e instanceof HTMLElement)i=[e];else if(e instanceof NodeList)i=e;else throw new Error("Invalid argument nodes for mermaid.init");H.debug(`Found ${i.length} diagrams`),typeof(t==null?void 0:t.startOnLoad)<"u"&&(H.debug("Start On Load: "+(t==null?void 0:t.startOnLoad)),Xe.updateSiteConfig({startOnLoad:t==null?void 0:t.startOnLoad}));const a=new Se.initIdGenerator(n.deterministicIds,n.deterministicIDSeed);let s;const o=[];for(const l of Array.from(i)){H.info("Rendering diagram: "+l.id);/*! Check if previously processed */if(l.getAttribute("data-processed"))continue;l.setAttribute("data-processed","true");const u=`mermaid-${a.next()}`;s=l.innerHTML,s=Se.entityDecode(s).trim().replace(/<br\s*\/?>/gi,"<br/>");const h=Se.detectInit(s);h&&H.debug("Detected early reinit: ",h);try{Xe.render(u,s,(d,f)=>{l.innerHTML=d,typeof r<"u"&&r(u),f&&f(l)},l)}catch(d){ZL(d,o,$n.parseError)}}if(o.length>0)throw o[0]},vst=t=>{for(const{id:e,detector:r,loader:n}of t)$k(e,r,n)},xst=async t=>{H.debug(`Loading ${t.length} external diagrams`);const r=(await Promise.allSettled(t.map(async({id:n,detector:i,loader:a})=>{const{diagram:s}=await a();Lr(n,s,i)}))).filter(n=>n.status==="rejected");if(r.length>0){H.error(`Failed to load ${r.length} external diagrams`);for(const n of r)H.error(n);throw new Error(`Failed to load ${r.length} external diagrams`)}},JL=async function(t,e,r){const n=Xe.getConfig();t&&($n.sequenceConfig=t),H.debug(`${r?"":"No "}Callback function found`);let i;if(typeof e>"u")i=document.querySelectorAll(".mermaid");else if(typeof e=="string")i=document.querySelectorAll(e);else if(e instanceof HTMLElement)i=[e];else if(e instanceof NodeList)i=e;else throw new Error("Invalid argument nodes for mermaid.init");H.debug(`Found ${i.length} diagrams`),typeof(t==null?void 0:t.startOnLoad)<"u"&&(H.debug("Start On Load: "+(t==null?void 0:t.startOnLoad)),Xe.updateSiteConfig({startOnLoad:t==null?void 0:t.startOnLoad}));const a=new Se.initIdGenerator(n.deterministicIds,n.deterministicIDSeed);let s;const o=[];for(const l of Array.from(i)){H.info("Rendering diagram: "+l.id);/*! Check if previously processed */if(l.getAttribute("data-processed"))continue;l.setAttribute("data-processed","true");const u=`mermaid-${a.next()}`;s=l.innerHTML,s=Se.entityDecode(s).trim().replace(/<br\s*\/?>/gi,"<br/>");const h=Se.detectInit(s);h&&H.debug("Detected early reinit: ",h);try{await Xe.renderAsync(u,s,(d,f)=>{l.innerHTML=d,typeof r<"u"&&r(u),f&&f(l)},l)}catch(d){ZL(d,o,$n.parseError)}}if(o.length>0)throw o[0]},kst=function(t){Xe.initialize(t)},wst=async(t,{lazyLoad:e=!0}={})=>{e?vst(t):await xst(t),KL=!0},tR=function(){if($n.startOnLoad){const{startOnLoad:t}=Xe.getConfig();t&&$n.init()}};if(typeof document<"u"){/*! - * Wait for document loaded before starting the execution - */window.addEventListener("load",tR,!1)}const Tst=function(t){$n.parseError=t},Est=t=>Xe.parse(t,$n.parseError),T0=[];let o_=!1;const eR=async()=>{if(!o_){for(o_=!0;T0.length>0;){const t=T0.shift();if(t)try{await t()}catch(e){H.error("Error executing queue",e)}}o_=!1}},Cst=t=>new Promise((e,r)=>{const n=()=>new Promise((i,a)=>{Xe.parseAsync(t,$n.parseError).then(s=>{i(s),e(s)},s=>{H.error("Error parsing",s),a(s),r(s)})});T0.push(n),eR()}),Sst=(t,e,r,n)=>new Promise((i,a)=>{const s=()=>new Promise((o,l)=>{Xe.renderAsync(t,e,r,n).then(u=>{o(u),i(u)},u=>{H.error("Error parsing",u),l(u),a(u)})});T0.push(s),eR()}),$n={startOnLoad:!0,diagrams:{},mermaidAPI:Xe,parse:Est,parseAsync:Cst,render:Xe.render,renderAsync:Sst,init:_st,initThrowsErrors:QL,initThrowsErrorsAsync:JL,registerExternalDiagrams:wst,initialize:kst,parseError:void 0,contentLoaded:tR,setParseErrorHandler:Tst};return $n}); -//# sourceMappingURL=mermaid.min.js.map diff --git a/op-alloy/book/src/CONTRIBUTING.md b/op-alloy/book/src/CONTRIBUTING.md deleted file mode 100644 index 2a8da08cb42..00000000000 --- a/op-alloy/book/src/CONTRIBUTING.md +++ /dev/null @@ -1,66 +0,0 @@ -# Contributing - -Thank you for wanting to contribute! Before contributing to this repository, -please read through this document and discuss the change you wish to make via issue. - - -## Dependencies - -Before working with this repository locally, you'll need to install a few dependencies: - -- [Just](https://github.com/casey/just) for our command-runner scripts. -- [The Rust toolchain](https://rustup.rs/). - -**Optional** - -- [mdbook](https://github.com/rust-lang/mdBook) to contribute to the [book](/) - - [mdbook-template](https://github.com/sgoudham/mdbook-template) - - [mdbook-mermaid](https://github.com/badboy/mdbook-mermaid) - - -## Pull Request Process - -1. [Create an issue][new-issue] for any significant changes. Trivial changes may skip this step. -1. Once the change is implemented, ensure that all checks are passing before creating a PR. - The full CI pipeline can be run locally via the `Justfile`s in the repository. -1. Be sure to update any documentation that has gone stale as a result of the change, - in the `README` files, the [book][book], and in rustdoc comments. -1. Once your PR is approved by a maintainer, you may merge your pull request yourself - if you have permissions to do so. Otherwise, the maintainer who approves your pull - request will add it to the merge queue. - - -## Working with OP Stack Specs - -The [OP Stack][op-stack] is a set of standardized open-source specifications -that powers Optimism, developed by the Optimism Collective. - -`op-alloy` is a rust implementation of core OP Stack types, transports, -middleware and more. Not all types and implementation details in `op-alloy` -are present in the OP Stack [specs][specs], and on the flipside, not all -specifications are implemented by `op-alloy`. That said, `op-alloy` is -entirely _based off_ of the [specs][specs], and new functionality or -core modifications to `op-alloy` must be reflected in the [specs][specs]. - -As such, the first step for introducing changes to the OP Stack is to -[open a pr][specs-pr] in the [specs repository][specs-repo]. These -changes should target a [protocol upgrade][upgrades] so that all -implementations of the OP Stack are able to synchronize and implement -the changes. - -Once changes are merged in the OP Stack [specs][specs] repo, they -may be added to `op-alloy` in a **backwards-compatible** way such -that pre-upgrade functionality persists. The primary way to enable -backwards-compatibility is by using timestamp-based activation for -protocol upgrades. - - -<!-- Links --> - -[upgrades]: https://specs.optimism.io/protocol/isthmus/overview.html -[specs-repo]: https://github.com/ethereum-optimism/specs -[specs-pr]: https://github.com/ethereum-optimism/specs/pulls -[specs]: https://specs.optimism.io/ -[op-stack]: https://docs.optimism.io/stack/getting-started -[book]: https://github.com/alloy-rs/op-alloy/tree/main/book -[new-issue]: https://github.com/alloy-rs/op-alloy/issues/new diff --git a/op-alloy/book/src/LICENSE.md b/op-alloy/book/src/LICENSE.md deleted file mode 100644 index 414b0e98ab8..00000000000 --- a/op-alloy/book/src/LICENSE.md +++ /dev/null @@ -1,10 +0,0 @@ -# Licensing - -op-alloy is dually licensed under the [Apache 2.0][apache] and the [MIT][mit] license. - -The [SNAPPY license][snappy] is added for the use of [snap][snap] in `op-alloy-rpc-types-engine`. - -[snap]: https://github.com/BurntSushi/rust-snappy -[snappy]: https://github.com/alloy-rs/op-alloy/blob/main/SNAPPY-LICENSE -[apache]: https://github.com/alloy-rs/op-alloy/blob/main/LICENSE-APACHE -[mit]: https://github.com/alloy-rs/op-alloy/blob/main/LICENSE-MIT diff --git a/op-alloy/book/src/SUMMARY.md b/op-alloy/book/src/SUMMARY.md deleted file mode 100644 index f5d463cc6b2..00000000000 --- a/op-alloy/book/src/SUMMARY.md +++ /dev/null @@ -1,10 +0,0 @@ -# Summary - -- [Introduction](./intro.md) -- [Getting Started](./starting.md) -- [Building](./building/README.md) - - [Consensus](./building/consensus.md) - - [Engine RPC Types](./building/engine.md) -- [Contributing](./CONTRIBUTING.md) -- [Licensing](./LICENSE.md) -- [Glossary](./glossary.md) diff --git a/op-alloy/book/src/building/README.md b/op-alloy/book/src/building/README.md deleted file mode 100644 index 65019865b72..00000000000 --- a/op-alloy/book/src/building/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# Building - -This section offers in-depth documentation into the various `op-alloy` crates. -Some of the primary crates and their types are listed below. - - [`RollupConfig`][rollup-config] and [`SystemConfig`][system-config] types. -- [`op-alloy-consensus`][op-alloy-consensus] provides [`OpBlock`][op-block], - [`OpTxEnvelope`][op-tx-envelope], [`OpReceiptEnvelope`][op-rx-envelope], - and more. -- [`op-alloy-rpc-types-engine`][op-alloy-rpc-types-engine] provides the - [`OpPayloadAttributes`][op-payload-attributes]. - - -<!-- Links --> - -[op-block]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/type.OpBlock.html -[op-tx-envelope]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxEnvelope.html -[op-rx-envelope]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/enum.OpReceiptEnvelope.html - -[op-payload-attributes]: https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/struct.OpPayloadAttributes.html - -[op-alloy-consensus]: https://crates.io/crates/op-alloy-consensus -[op-alloy-rpc-types-engine]: https://crates.io/crates/op-alloy-rpc-types-engine diff --git a/op-alloy/book/src/building/consensus.md b/op-alloy/book/src/building/consensus.md deleted file mode 100644 index 312a1b29516..00000000000 --- a/op-alloy/book/src/building/consensus.md +++ /dev/null @@ -1,71 +0,0 @@ -# Consensus - -<a href="https://crates.io/crates/op-alloy-consensus"><img src="https://img.shields.io/crates/v/op-alloy-consensus.svg" alt="op-alloy-consensus crate"></a> - -The `op-alloy-consensus` crate provides an Optimism consensus interface. -It contains constants, types, and functions for implementing Optimism EL -consensus and communication. This includes an extended `OpTxEnvelope` type -with [deposit transactions][deposit], and receipts containing OP Stack -specific fields (`deposit_nonce` + `deposit_receipt_version`). - -In general a type belongs in this crate if it exists in the -`alloy-consensus` crate, but was modified from the base Ethereum protocol -in the OP Stack. For consensus types that are not modified by the OP Stack, -the `alloy-consensus` types should be used instead. - - -## Block - -[`op-alloy-consensus`][consensus] exports an Optimism block type, [`OpBlock`][op-block]. - -This type simply re-uses the `alloy-consensus` block type, with `OpTxEnvelope` -as the type of transactions in the block. - - -## Transactions - -Optimism extends the Ethereum [EIP-2718][2718] transaction envelope to include a -deposit variant. - -### [`OpTxEnvelope`][envelope] - -The [`OpTxEnvelope`][envelope] type is based on [Alloy][alloy]'s -[`TxEnvelope`][tx-envelope] type. - -Optimism modifies the `TxEnvelope` to the following. -- Legacy -- EIP-2930 -- EIP-1559 -- EIP-7702 -- Deposit - -Deposit is a custom transaction type that is either an L1 attributes -deposit transaction or a user-submitted deposit transaction. Read more -about deposit transactions in [the specs][specs]. - -### Transaction Types ([`OpTxType`][ty]) - -The [`OpTxType`][ty] enumerates the transaction types using their byte identifier, -represents as a `u8` in rust. - - -## Receipt Types - -Just like [`op-alloy-consensus`][consensus] defines transaction types, -it also defines associated receipt types. - -[`OpReceiptEnvelope`][ore] defines an [Eip-2718][2718] receipt envelope type -modified for the OP Stack. It contains the following variants - mapping -directly to the `OpTxEnvelope` variants defined above. - -- Legacy -- EIP-2930 -- EIP-1559 -- EIP-7702 -- Deposit - -There is also an [`OpDepositReceipt`][odr] type, extending the alloy receipt -type with a deposit nonce and deposit receipt version. - - -{{#include ../links.md}} diff --git a/op-alloy/book/src/building/engine.md b/op-alloy/book/src/building/engine.md deleted file mode 100644 index 34b1c8172b8..00000000000 --- a/op-alloy/book/src/building/engine.md +++ /dev/null @@ -1,25 +0,0 @@ -# RPC Engine Types - -<a href="https://crates.io/crates/op-alloy-rpc-types-engine"><img src="https://img.shields.io/crates/v/op-alloy-rpc-types-engine.svg" alt="op-alloy-rpc-types-engine crate"></a> - -The [`op-alloy-rpc-types-engine`][engine] crate provides Optimism types for interfacing -with the Engine API in the OP Stack. - -Optimism defines a custom payload attributes type called [`OpPayloadAttributes`][attributes]. -`OpPayloadAttributes` extends alloy's [`PayloadAttributes`][pa] with a few fields: transactions, -a flag for enabling the tx pool, the gas limit, and EIP 1559 parameters. - -Optimism also returns a custom type for the `engine_getPayload` request for both V3 and -V4 payload envelopes. These are the [`OpExecutionPayloadEnvelopeV3`][v3] and -[`OpExecutionPayloadEnvelopeV4`][v4] types, which both wrap payload envelope types -from [`alloy-rpc-types-engine`][alloy-engine]. - - -<!-- Links --> - -[alloy-engine]: https://crates.io/crates/alloy-rpc-types-engine -[v3]: https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/payload/v3/struct.OpExecutionPayloadEnvelopeV3.html -[v4]: https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/payload/v4/struct.OpExecutionPayloadEnvelopeV4.html -[pa]: https://docs.rs/alloy-rpc-types-engine/latest/alloy_rpc_types_engine/payload/struct.PayloadAttributes.html -[attributes]: https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/struct.OpPayloadAttributes.html -[engine]: https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/ diff --git a/op-alloy/book/src/building/rpc_types.md b/op-alloy/book/src/building/rpc_types.md deleted file mode 100644 index 79997ac2753..00000000000 --- a/op-alloy/book/src/building/rpc_types.md +++ /dev/null @@ -1,20 +0,0 @@ -# RPC Types - -The [`op-alloy-rpc-types`][rpc] crate contains RPC-related types. - -The [`OpTransactionRequest`][req] type acts as a builder for -[`OpTypedTransaction`][typed]. - -[`Transaction`][tx] is a transaction type. - -Related to receipts, [`op-alloy-rpc-types`][rpc] contains the -[`OpTransactionReceipt`][receipt] type and it's field types. - - -<!-- Links --> - -[rpc]: https://crates.io/crates/op-alloy-rpc-types -[typed]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTypedTransaction.html -[tx]: https://docs.rs/op-alloy-rpc-types/latest/op_alloy_rpc_types/transaction/struct.Transaction.html -[req]: https://docs.rs/op-alloy-rpc-types/latest/op_alloy_rpc_types/receipt/struct.OpTransactionReceipt.html -[receipt]: https://docs.rs/op-alloy-rpc-types/latest/op_alloy_rpc_types/receipt/struct.OpTransactionReceipt.html diff --git a/op-alloy/book/src/glossary.md b/op-alloy/book/src/glossary.md deleted file mode 100644 index 62c194a5633..00000000000 --- a/op-alloy/book/src/glossary.md +++ /dev/null @@ -1,5 +0,0 @@ -# Glossary - -*This document contains definitions for terms used throughout the op-alloy book.* - - diff --git a/op-alloy/book/src/intro.md b/op-alloy/book/src/intro.md deleted file mode 100644 index fe712bdf667..00000000000 --- a/op-alloy/book/src/intro.md +++ /dev/null @@ -1,43 +0,0 @@ -# op-alloy - -<a href="https://github.com/alloy-rs/op-alloy"><img src="https://img.shields.io/github/stars/alloy-rs/op-alloy"></a> - -Welcome to the hands-on guide for getting started with `op-alloy`! - -`op-alloy` connects applications to the OP Stack, leveraging high -performance types, traits, and middleware from [Alloy][alloy]. - -> 📖 Development Status -> -> `op-alloy` is in active development, and is not yet ready for use in production. -> During development, this book will evolve quickly and may contain inaccuracies. -> -> Please [open an issue][new-issue] if you find any errors or have any suggestions for -> improvements, and also feel free to [contribute][contributing] to the project! - -## Sections - -### [Getting Started](./starting.md) - -To get started with op-alloy, add its crates as a dependency and take your first steps. - -### [Building with op-alloy](./building/README.md) - -Walk through types and functionality available in different `op-alloy` crates. - -### [Contributing](./CONTRIBUTING.md) - -Contributors are welcome! It is built and maintained by Alloy contributors, -members of [OP Labs][op-labs], and the broader open source community. - -`op-alloy` follows and expands the OP Stack standards set in the [specs]. -The [contributing guide][contributing] breaks down how the [specs] -integrate with `op-alloy` and how to contribute to `op-alloy`. - -### [Licensing](./LICENSE.md) - -`op-alloy` is licensed under the combined Apache 2.0 and MIT License, along -with a SNAPPY license for snappy encoding use. - - -{{#include ./links.md}} diff --git a/op-alloy/book/src/links.md b/op-alloy/book/src/links.md deleted file mode 100644 index c7f2edee274..00000000000 --- a/op-alloy/book/src/links.md +++ /dev/null @@ -1,68 +0,0 @@ -<!-- op-alloy --> - -[check-no-std]: https://github.com/alloy-rs/op-alloy/blob/main/scripts/check_no_std.sh -[contributing]: https://alloy-rs.github.io/op-alloy -[op-alloy-crate]: https://crates.io/crates/op-alloy -[op-alloy-ff]: https://docs.rs/crate/op-alloy/latest/features - -[op-alloy-consensus]: https://crates.io/crates/op-alloy-consensus -[op-alloy-network]: https://crates.io/crates/op-alloy-network -[op-alloy-provider]: https://crates.io/crates/op-alloy-provider -[op-alloy-rpc-jsonrpsee]: https://crates.io/crates/op-alloy-rpc-jsonrpsee -[op-alloy-rpc-types-engine]: https://crates.io/crates/op-alloy-rpc-types-engine -[op-alloy-rpc-types]: https://crates.io/crates/op-alloy-rpc-types - -<!-- op-alloy-consensus --> - -[consensus]: https://crates.io/crates/op-alloy-consensus -[odr]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/struct.OpDepositReceipt.html -[ore]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/enum.OpReceiptEnvelope.html -[op-block]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/type.OpBlock.html -[ty]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxType.html -[envelope]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxEnvelope.html - -<!-- OP Stack Specs --> - -[specs]: https://specs.optimism.io -[deposit]: https://specs.optimism.io/protocol/deposits.html -[derivation]: https://specs.optimism.io/protocol/derivation.html -[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html -[spec-configurability]: https://specs.optimism.io/protocol/configurability.html -[system-config-specs]: https://specs.optimism.io/protocol/system-config.html#system-config -[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program -[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle -[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine -[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction - - -<!-- External --> - -[maili]: https://github.com/op-rs/maili -[revm]: https://github.com/bluealloy/revm -[2718]: https://eips.ethereum.org/EIPS/eip-2718 -[cannon-rs]: https://github.com/op-rs/cannon-rs -[asterisc]: https://github.com/ethereum-optimism/asterisc -[op-stack]: https://github.com/ethereum-optimism/optimism -[op-succinct]: https://github.com/succinctlabs/op-succinct -[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program -[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon - -<!-- Alloy --> - -[alloy]: https://github.com/alloy-rs/alloy -[op-alloy]: https://github.com/alloy-rs/op-alloy -[b64]: https://docs.rs/alloy-primitives/latest/alloy_primitives/aliases/type.B64.html -[tx-envelope]: https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxType.html - -<!-- Kona links --> - -[kona]: https://github.com/op-rs/kona -[book]: https://op-rs.github.io/kona/ -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md - -<!-- People --> - -[op-labs]: https://github.com/ethereum-optimism -[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/op-alloy/book/src/starting.md b/op-alloy/book/src/starting.md deleted file mode 100644 index f46fa63afbf..00000000000 --- a/op-alloy/book/src/starting.md +++ /dev/null @@ -1,83 +0,0 @@ -# Installation - -[op-alloy][op-alloy] consists of a number of crates that provide a range of functionality -essential for interfacing with any OP Stack chain. - -The most succinct way to work with `op-alloy` is to add the [`op-alloy`][op-alloy-crate] crate -with the `full` feature flag from the command-line using Cargo. - -```txt -cargo add op-alloy --features full -``` - -Alternatively, you can add the following to your `Cargo.toml` file. - -```txt -op-alloy = { version = "0.5", features = ["full"] } -``` - -For more fine-grained control over the features you wish to include, you can add the individual -crates to your `Cargo.toml` file, or use the `op-alloy` crate with the features you need. - -After `op-alloy` is added as a dependency, crates re-exported by `op-alloy` are now available. - -```rust -use op_alloy::{ - genesis::{RollupConfig, SystemConfig}, - consensus::OpBlock, - protocol::BlockInfo, - network::Optimism, - provider::ext::engine::OpEngineApi, - rpc_types::OpTransactionReceipt, - rpc_jsonrpsee::traits::RollupNode, - rpc_types_engine::OpAttributesWithParent, -}; -``` - -## Features - -The [`op-alloy`][op-alloy-crate] defines many [feature flags][op-alloy-ff] including the following. - -Default -- `std` -- `k256` -- `serde` - -Full enables the most commonly used crates. -- `full` - -The `k256` feature flag enables the `k256` feature on the `op-alloy-consensus` crate. -- `k256` - -Arbitrary enables arbitrary features on crates, deriving the `Arbitrary` trait on types. -- `arbitrary` - -Serde derives serde's Serialize and Deserialize traits on types. -- `serde` - -Additionally, individual crates can be enabled using their shorthand names. -For example, the `consensus` feature flag provides the `op-alloy-consensus` re-export -so `op-alloy-consensus` types can be used from `op-alloy` through `op_alloy::consensus::InsertTypeHere`. - -## Crates - -- [`op-alloy-network`][op-alloy-network] -- [`op-alloy-provider`][op-alloy-provider] -- [`op-alloy-consensus`][op-alloy-consensus] (supports `no_std`) -- [`op-alloy-rpc-jsonrpsee`][op-alloy-rpc-jsonrpsee] -- [`op-alloy-rpc-types`][op-alloy-rpc-types] (supports `no_std`) -- [`op-alloy-rpc-types-engine`][op-alloy-rpc-types-engine] (supports `no_std`) - -## `no_std` - -As noted above, the following crates are `no_std` compatible. - -- [`op-alloy-consensus`][op-alloy-consensus] -- [`op-alloy-rpc-types-engine`][op-alloy-rpc-types-engine] -- [`op-alloy-rpc-types`][op-alloy-rpc-types] - -To add `no_std` support to a crate, ensure the [check_no_std][check-no-std] -script is updated to include this crate once `no_std` compatible. - - -{{#include ./links.md}} diff --git a/op-alloy/book/templates/glossary-link.md b/op-alloy/book/templates/glossary-link.md deleted file mode 100644 index 560fcf252f8..00000000000 --- a/op-alloy/book/templates/glossary-link.md +++ /dev/null @@ -1 +0,0 @@ -[[[ #text ]]]([[ #root ]]glossary.md#[[ #ref ]]) diff --git a/op-alloy/book/theme/index.hbs b/op-alloy/book/theme/index.hbs deleted file mode 100644 index c4fd6f237de..00000000000 --- a/op-alloy/book/theme/index.hbs +++ /dev/null @@ -1,364 +0,0 @@ -<!DOCTYPE HTML> -<html lang="{{ language }}" class="{{ default_theme }}" dir="{{ text_direction }}"> - -<head> - <!-- Book generated using mdBook --> - <meta charset="UTF-8"> - <title>{{ title }} - {{#if is_print }} - - {{/if}} - {{#if base_url}} - - {{/if}} - - - - {{> head}} - - - - - - {{#if favicon_svg}} - - {{/if}} - {{#if favicon_png}} - - {{/if}} - - - - {{#if print_enable}} - - {{/if}} - - - - {{#if copy_fonts}} - - {{/if}} - - - - - - - - {{#each additional_css}} - - {{/each}} - - {{#if mathjax_support}} - - - {{/if}} - - - -
- - - - - - - - - - - - - - - - - - - -
- -
- {{> header}} - - - - {{#if search_enabled}} - - {{/if}} - - - - -
-
- {{{ content }}} -
- - -
-
- - - -
- - {{#if live_reload_endpoint}} - - - {{/if}} - - {{#if google_analytics}} - - - {{/if}} - - {{#if playground_line_numbers}} - - {{/if}} - - {{#if playground_copyable}} - - {{/if}} - - {{#if playground_js}} - - - - - - {{/if}} - - {{#if search_js}} - - - - {{/if}} - - - - - - - {{#each additional_js}} - - {{/each}} - - {{#if is_print}} - {{#if mathjax_support}} - - {{else}} - - {{/if}} - {{/if}} - -
- - - diff --git a/op-alloy/cliff.toml b/op-alloy/cliff.toml deleted file mode 100644 index dcb0f4b3d8d..00000000000 --- a/op-alloy/cliff.toml +++ /dev/null @@ -1,53 +0,0 @@ -# Configuration file for [`git-cliff`](https://github.com/orhun/git-cliff) -# See https://git-cliff.org/docs/configuration - -[changelog] -header = """ -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), -and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n -""" -# https://tera.netlify.app/docs/#introduction -body = """ -{% if version %}\ - ## [{{ version | trim_start_matches(pat="v") }}](https://github.com/alloy-rs/op-alloy -/releases/tag/v{{ version | trim_start_matches(pat="v") }}) - {{ timestamp | date(format="%Y-%m-%d") }} -{% endif %}\ -{% for group, commits in commits | group_by(attribute="group") %} - ### {{ group | title }} - {% for commit in commits %} - - {% if commit.scope %}[{{ commit.scope }}] {% endif %}{{ commit.message | upper_first | split(pat="\\n") | first }}\ - {% endfor %} -{% endfor %}\n -""" -trim = true -footer = "" - -[git] -conventional_commits = true -filter_unconventional = false -commit_preprocessors = [ - { pattern = '#(\d+)', replace = "[#$1](https://github.com/alloy-rs/op-alloy/issues/$1)" }, -] -commit_parsers = [ - { message = "^[Ff]eat", group = "Features" }, - { message = "^[Ff]ix", group = "Bug Fixes" }, - { message = "^[Dd]oc", group = "Documentation" }, - { message = ".*\\b([Dd]eps|[Dd]ependencies|[Bb]ump)\\b", group = "Dependencies" }, - { message = "^[Pp]erf", group = "Performance" }, - { message = "^[Rr]efactor", group = "Refactor" }, - { message = ".*\\b([Ss]tyle|[Ff]mt|[Ff]ormat)\\b", group = "Styling" }, - { message = "^[Tt]est", group = "Testing" }, - { message = "^[Cc]hore", group = "Miscellaneous Tasks" }, - - { message = ".*", group = "Other" }, -] -protect_breaking_commits = false -filter_commits = false -tag_pattern = "v[0-9]*" -skip_tags = "beta|alpha" -ignore_tags = "rc" -sort_commits = "newest" diff --git a/op-alloy/crates/consensus/Cargo.toml b/op-alloy/crates/consensus/Cargo.toml deleted file mode 100644 index d730a4604bd..00000000000 --- a/op-alloy/crates/consensus/Cargo.toml +++ /dev/null @@ -1,70 +0,0 @@ -[package] -name = "op-alloy-consensus" -description = "Optimism alloy consensus types" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Alloy -alloy-rlp.workspace = true -alloy-eips.workspace = true -alloy-consensus.workspace = true -alloy-primitives = { workspace = true, features = ["rlp"] } - -# compat -alloy-network = { workspace = true, optional = true } -alloy-rpc-types-eth = { workspace = true, optional = true } - -# misc -thiserror.workspace = true -derive_more = { workspace = true, features = ["display"] } - -# arbitrary -arbitrary = { workspace = true, features = ["derive"], optional = true } - -# serde -serde_with = { workspace = true, optional = true } -alloy-serde = { workspace = true, optional = true } -serde = { workspace = true, features = ["derive"], optional = true } - -[dev-dependencies] -rand.workspace = true -bincode = { workspace = true, features = ["serde"] } -serde_json.workspace = true -alloy-signer.workspace = true -tokio = { workspace = true, features = ["macros"] } -arbitrary = { workspace = true, features = ["derive"] } -alloy-primitives = { workspace = true, features = ["rand", "arbitrary"] } - -[features] -default = ["std"] -std = ["alloy-eips/std", "alloy-consensus/std", "derive_more/std"] -alloy-compat = ["serde", "dep:alloy-network", "dep:alloy-rpc-types-eth"] -k256 = ["alloy-primitives/k256", "alloy-consensus/k256"] -kzg = ["alloy-eips/kzg", "alloy-consensus/kzg", "std"] -arbitrary = [ - "std", - "dep:arbitrary", - "alloy-consensus/arbitrary", - "alloy-eips/arbitrary", - "alloy-primitives/rand", - "alloy-primitives/arbitrary", -] -serde = [ - "dep:serde", - "dep:alloy-serde", - "alloy-primitives/serde", - "alloy-consensus/serde", - "alloy-eips/serde", -] -serde-bincode-compat = ["serde_with", "alloy-consensus/serde-bincode-compat"] diff --git a/op-alloy/crates/consensus/README.md b/op-alloy/crates/consensus/README.md deleted file mode 100644 index 67451d3b143..00000000000 --- a/op-alloy/crates/consensus/README.md +++ /dev/null @@ -1,25 +0,0 @@ -## `op-alloy-consensus` - -CI -op-alloy-consensus crate -MIT License -Apache License -Book - - -Optimism consensus interface. - -This crate contains constants, types, and functions for implementing Optimism EL consensus and communication. This -includes an extended `OpTxEnvelope` type with [deposit transactions][deposit], and receipts containing OP Stack -specific fields (`deposit_nonce` + `deposit_receipt_version`). - -In general a type belongs in this crate if it exists in the `alloy-consensus` crate, but was modified from the base Ethereum protocol in the OP Stack. -For consensus types that are not modified by the OP Stack, the `alloy-consensus` types should be used instead. - -[deposit]: https://specs.optimism.io/protocol/deposits.html - -### Provenance - -Much of this code was ported from [reth-primitives] as part of ongoing alloy migrations. - -[reth-primitives]: https://github.com/paradigmxyz/reth/tree/main/crates/primitives diff --git a/op-alloy/crates/consensus/src/interop.rs b/op-alloy/crates/consensus/src/interop.rs deleted file mode 100644 index 1fd989215b7..00000000000 --- a/op-alloy/crates/consensus/src/interop.rs +++ /dev/null @@ -1,95 +0,0 @@ -//! Commonly used types for interop. - -use alloc::string::{String, ToString}; -use alloy_primitives::{Address, address}; -use core::str::FromStr; -use derive_more::Display; - -/// The address of the L2 cross chain inbox predeploy proxy. -pub const CROSS_L2_INBOX_ADDRESS: Address = address!("0x4200000000000000000000000000000000000022"); - -/// The safety level of a message. -#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "serde", serde(rename_all = "kebab-case"))] -pub enum SafetyLevel { - /// The message is finalized. - Finalized, - /// The message is safe across chains. - #[cfg_attr(feature = "serde", serde(rename = "safe"))] - CrossSafe, - /// The message is safe locally. - LocalSafe, - /// The message is unsafe across chains. - CrossUnsafe, - /// The message is unsafe locally. - #[cfg_attr(feature = "serde", serde(rename = "unsafe"))] - LocalUnsafe, - /// The message is invalid. - Invalid, -} - -impl FromStr for SafetyLevel { - type Err = SafetyLevelParseError; - - fn from_str(s: &str) -> Result { - match s.to_lowercase().as_str() { - "finalized" => Ok(Self::Finalized), - "safe" => Ok(Self::CrossSafe), - "local-safe" | "localsafe" => Ok(Self::LocalSafe), - "cross-unsafe" | "crossunsafe" => Ok(Self::CrossUnsafe), - "unsafe" => Ok(Self::LocalUnsafe), - "invalid" => Ok(Self::Invalid), - _ => Err(SafetyLevelParseError(s.to_string())), - } - } -} - -/// Error when parsing SafetyLevel from string. -#[derive(thiserror::Error, Debug)] -#[error("Invalid SafetyLevel, error: {0}")] -pub struct SafetyLevelParseError(pub String); - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - #[cfg(feature = "serde")] - fn test_safety_level_serde() { - let level = SafetyLevel::Finalized; - let json = serde_json::to_string(&level).unwrap(); - assert_eq!(json, r#""finalized""#); - - let level: SafetyLevel = serde_json::from_str(&json).unwrap(); - assert_eq!(level, SafetyLevel::Finalized); - } - - #[test] - #[cfg(feature = "serde")] - fn test_serde_safety_level_fails() { - let json = r#""failed""#; - let level: Result = serde_json::from_str(json); - assert!(level.is_err()); - } - - #[test] - fn test_safety_level_from_str_valid() { - assert_eq!(SafetyLevel::from_str("finalized").unwrap(), SafetyLevel::Finalized); - assert_eq!(SafetyLevel::from_str("safe").unwrap(), SafetyLevel::CrossSafe); - assert_eq!(SafetyLevel::from_str("local-safe").unwrap(), SafetyLevel::LocalSafe); - assert_eq!(SafetyLevel::from_str("localsafe").unwrap(), SafetyLevel::LocalSafe); - assert_eq!(SafetyLevel::from_str("cross-unsafe").unwrap(), SafetyLevel::CrossUnsafe); - assert_eq!(SafetyLevel::from_str("crossunsafe").unwrap(), SafetyLevel::CrossUnsafe); - assert_eq!(SafetyLevel::from_str("unsafe").unwrap(), SafetyLevel::LocalUnsafe); - assert_eq!(SafetyLevel::from_str("invalid").unwrap(), SafetyLevel::Invalid); - } - - #[test] - fn test_safety_level_from_str_invalid() { - assert!(SafetyLevel::from_str("unknown").is_err()); - assert!(SafetyLevel::from_str("123").is_err()); - assert!(SafetyLevel::from_str("").is_err()); - assert!(SafetyLevel::from_str("safe ").is_err()); - } -} diff --git a/op-alloy/crates/consensus/src/receipts/deposit.rs b/op-alloy/crates/consensus/src/receipts/deposit.rs deleted file mode 100644 index 0bff384972f..00000000000 --- a/op-alloy/crates/consensus/src/receipts/deposit.rs +++ /dev/null @@ -1,534 +0,0 @@ -//! Transaction receipt types for Optimism. - -use super::OpTxReceipt; -use crate::transaction::OpDepositInfo; -use alloy_consensus::{ - Eip658Value, Receipt, ReceiptWithBloom, RlpDecodableReceipt, RlpEncodableReceipt, TxReceipt, -}; -use alloy_primitives::{Bloom, Log}; -use alloy_rlp::{Buf, BufMut, Decodable, Encodable, Header}; - -/// [`OpDepositReceipt`] with calculated bloom filter, modified for the OP Stack. -/// -/// This convenience type allows us to lazily calculate the bloom filter for a -/// receipt, similar to [`Sealed`]. -/// -/// [`Sealed`]: alloy_consensus::Sealed -pub type OpDepositReceiptWithBloom = ReceiptWithBloom>; - -/// Receipt containing result of transaction execution. -#[derive(Clone, Debug, PartialEq, Eq, Default)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] -pub struct OpDepositReceipt { - /// The inner receipt type. - #[cfg_attr(feature = "serde", serde(flatten))] - pub inner: Receipt, - /// Deposit nonce for Optimism deposit transactions - #[cfg_attr( - feature = "serde", - serde( - default, - skip_serializing_if = "Option::is_none", - with = "alloy_serde::quantity::opt" - ) - )] - pub deposit_nonce: Option, - /// Deposit receipt version for Optimism deposit transactions - /// - /// The deposit receipt version was introduced in Canyon to indicate an update to how - /// receipt hashes should be computed when set. The state transition process - /// ensures this is only set for post-Canyon deposit transactions. - #[cfg_attr( - feature = "serde", - serde( - default, - skip_serializing_if = "Option::is_none", - with = "alloy_serde::quantity::opt" - ) - )] - pub deposit_receipt_version: Option, -} - -impl OpDepositReceipt { - /// Calculates [`Log`]'s bloom filter. this is slow operation and [OpDepositReceiptWithBloom] - /// can be used to cache this value. - pub fn bloom_slow(&self) -> Bloom { - self.inner.logs.iter().collect() - } - - /// Calculates the bloom filter for the receipt and returns the [OpDepositReceiptWithBloom] - /// container type. - pub fn with_bloom(self) -> OpDepositReceiptWithBloom { - self.into() - } -} - -impl OpDepositReceipt { - /// Maps the inner receipt value of this receipt. - /// - /// This is mainly useful for mapping the receipt log type to the rpc variant. - pub fn map_inner(self, f: F) -> OpDepositReceipt - where - F: FnOnce(Receipt) -> Receipt, - { - OpDepositReceipt { - inner: f(self.inner), - deposit_nonce: self.deposit_nonce, - deposit_receipt_version: self.deposit_receipt_version, - } - } - - /// Attaches the given bloom to the receipt returning [`ReceiptWithBloom`]. - pub const fn with_bloom_unchecked(self, bloom: Bloom) -> ReceiptWithBloom { - ReceiptWithBloom::new(self, bloom) - } - - /// Consumes the type and returns the inner [`Receipt`]. - pub fn into_inner(self) -> Receipt { - self.inner - } - - /// Returns the deposit info for this receipt. - pub const fn deposit_info(&self) -> OpDepositInfo { - OpDepositInfo { - deposit_nonce: self.deposit_nonce, - deposit_receipt_version: self.deposit_receipt_version, - } - } - - /// Converts the receipt's log type by applying a function to each log. - /// - /// Returns the receipt with the new log type - pub fn map_logs(self, f: impl FnMut(T) -> U) -> OpDepositReceipt { - self.map_inner(|r| r.map_logs(f)) - } -} - -impl OpDepositReceipt { - /// Returns length of RLP-encoded receipt fields with the given [`Bloom`] without an RLP header. - pub fn rlp_encoded_fields_length_with_bloom(&self, bloom: &Bloom) -> usize { - self.inner.rlp_encoded_fields_length_with_bloom(bloom) - + self.deposit_nonce.map_or(0, |nonce| nonce.length()) - + self.deposit_receipt_version.map_or(0, |version| version.length()) - } - - /// RLP-encodes receipt fields with the given [`Bloom`] without an RLP header. - pub fn rlp_encode_fields_with_bloom(&self, bloom: &Bloom, out: &mut dyn BufMut) { - self.inner.rlp_encode_fields_with_bloom(bloom, out); - - if let Some(nonce) = self.deposit_nonce { - nonce.encode(out); - } - if let Some(version) = self.deposit_receipt_version { - version.encode(out); - } - } - - /// Returns RLP header for this receipt encoding with the given [`Bloom`]. - pub fn rlp_header_with_bloom(&self, bloom: &Bloom) -> Header { - Header { list: true, payload_length: self.rlp_encoded_fields_length_with_bloom(bloom) } - } -} - -impl OpDepositReceipt { - /// RLP-decodes receipt's field with a [`Bloom`]. - /// - /// Does not expect an RLP header. - pub fn rlp_decode_fields_with_bloom( - buf: &mut &[u8], - ) -> alloy_rlp::Result> { - let ReceiptWithBloom { receipt: inner, logs_bloom } = - Receipt::rlp_decode_fields_with_bloom(buf)?; - - let deposit_nonce = (!buf.is_empty()).then(|| Decodable::decode(buf)).transpose()?; - let deposit_receipt_version = - (!buf.is_empty()).then(|| Decodable::decode(buf)).transpose()?; - - Ok(ReceiptWithBloom { - logs_bloom, - receipt: Self { inner, deposit_nonce, deposit_receipt_version }, - }) - } -} - -impl AsRef> for OpDepositReceipt { - fn as_ref(&self) -> &Receipt { - &self.inner - } -} - -impl From> for Receipt { - fn from(value: OpDepositReceipt) -> Self { - value.into_inner() - } -} - -impl TxReceipt for OpDepositReceipt -where - T: AsRef + Clone + core::fmt::Debug + PartialEq + Eq + Send + Sync, -{ - type Log = T; - - fn status_or_post_state(&self) -> Eip658Value { - self.inner.status_or_post_state() - } - - fn status(&self) -> bool { - self.inner.status() - } - - fn bloom(&self) -> Bloom { - self.inner.bloom_slow() - } - - fn cumulative_gas_used(&self) -> u64 { - self.inner.cumulative_gas_used() - } - - fn logs(&self) -> &[Self::Log] { - self.inner.logs() - } -} - -impl RlpEncodableReceipt for OpDepositReceipt { - fn rlp_encoded_length_with_bloom(&self, bloom: &Bloom) -> usize { - self.rlp_header_with_bloom(bloom).length_with_payload() - } - - fn rlp_encode_with_bloom(&self, bloom: &Bloom, out: &mut dyn BufMut) { - self.rlp_header_with_bloom(bloom).encode(out); - self.rlp_encode_fields_with_bloom(bloom, out); - } -} - -impl RlpDecodableReceipt for OpDepositReceipt { - fn rlp_decode_with_bloom(buf: &mut &[u8]) -> alloy_rlp::Result> { - let header = Header::decode(buf)?; - if !header.list { - return Err(alloy_rlp::Error::UnexpectedString); - } - - if buf.len() < header.payload_length { - return Err(alloy_rlp::Error::InputTooShort); - } - - // Note: we pass a separate buffer to `rlp_decode_fields_with_bloom` to allow it decode - // optional fields based on the remaining length. - let mut fields_buf = &buf[..header.payload_length]; - let this = Self::rlp_decode_fields_with_bloom(&mut fields_buf)?; - - if !fields_buf.is_empty() { - return Err(alloy_rlp::Error::UnexpectedLength); - } - - buf.advance(header.payload_length); - - Ok(this) - } -} - -impl OpTxReceipt for OpDepositReceipt { - fn deposit_nonce(&self) -> Option { - self.deposit_nonce - } - - fn deposit_receipt_version(&self) -> Option { - self.deposit_receipt_version - } -} - -impl From> for OpDepositReceipt { - fn from(value: ReceiptWithBloom) -> Self { - value.receipt - } -} - -#[cfg(feature = "arbitrary")] -impl<'a, T> arbitrary::Arbitrary<'a> for OpDepositReceipt -where - T: arbitrary::Arbitrary<'a>, -{ - fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { - use alloc::vec::Vec; - let deposit_nonce = Option::::arbitrary(u)?; - let deposit_receipt_version = - deposit_nonce.is_some().then(|| u64::arbitrary(u)).transpose()?; - Ok(Self { - inner: Receipt { - status: Eip658Value::arbitrary(u)?, - cumulative_gas_used: u64::arbitrary(u)?, - logs: Vec::::arbitrary(u)?, - }, - deposit_nonce, - deposit_receipt_version, - }) - } -} - -/// Bincode-compatible [`OpDepositReceipt`] serde implementation. -#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] -pub(crate) mod serde_bincode_compat { - use alloc::borrow::Cow; - use alloy_consensus::Receipt; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - use serde_with::{DeserializeAs, SerializeAs}; - - /// Bincode-compatible [`super::OpDepositReceipt`] serde implementation. - /// - /// Intended to use with the [`serde_with::serde_as`] macro in the following way: - /// ```rust - /// use op_alloy_consensus::{OpDepositReceipt, serde_bincode_compat}; - /// use serde::{Deserialize, Serialize, de::DeserializeOwned}; - /// use serde_with::serde_as; - /// - /// #[serde_as] - /// #[derive(Serialize, Deserialize)] - /// struct Data { - /// #[serde_as(as = "serde_bincode_compat::OpDepositReceipt<'_, T>")] - /// receipt: OpDepositReceipt, - /// } - /// ``` - #[derive(Debug, Serialize, Deserialize)] - pub struct OpDepositReceipt<'a, T: Clone> { - logs: Cow<'a, [T]>, - status: bool, - cumulative_gas_used: u64, - deposit_nonce: Option, - deposit_receipt_version: Option, - } - - impl<'a, T: Clone> From<&'a super::OpDepositReceipt> for OpDepositReceipt<'a, T> { - fn from(value: &'a super::OpDepositReceipt) -> Self { - Self { - logs: Cow::Borrowed(&value.inner.logs), - // OP has no post state root variant - status: value.inner.status.coerce_status(), - cumulative_gas_used: value.inner.cumulative_gas_used, - deposit_nonce: value.deposit_nonce, - deposit_receipt_version: value.deposit_receipt_version, - } - } - } - - impl<'a, T: Clone> From> for super::OpDepositReceipt { - fn from(value: OpDepositReceipt<'a, T>) -> Self { - Self { - inner: Receipt { - status: value.status.into(), - cumulative_gas_used: value.cumulative_gas_used, - logs: value.logs.into_owned(), - }, - deposit_nonce: value.deposit_nonce, - deposit_receipt_version: value.deposit_receipt_version, - } - } - } - - impl SerializeAs> for OpDepositReceipt<'_, T> { - fn serialize_as( - source: &super::OpDepositReceipt, - serializer: S, - ) -> Result - where - S: Serializer, - { - OpDepositReceipt::<'_, T>::from(source).serialize(serializer) - } - } - - impl<'de, T: Deserialize<'de> + Clone> DeserializeAs<'de, super::OpDepositReceipt> - for OpDepositReceipt<'de, T> - { - fn deserialize_as(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - OpDepositReceipt::<'_, T>::deserialize(deserializer).map(Into::into) - } - } - - #[cfg(test)] - mod tests { - use super::super::{OpDepositReceipt, serde_bincode_compat}; - use alloy_primitives::Log; - use arbitrary::Arbitrary; - use rand::Rng; - use serde::{Deserialize, Serialize, de::DeserializeOwned}; - use serde_with::serde_as; - - #[test] - fn test_tx_deposit_bincode_roundtrip() { - #[serde_as] - #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] - struct Data { - #[serde_as(as = "serde_bincode_compat::OpDepositReceipt<'_,T>")] - transaction: OpDepositReceipt, - } - - let mut bytes = [0u8; 1024]; - rand::rng().fill(bytes.as_mut_slice()); - let mut data = Data { - transaction: OpDepositReceipt::arbitrary(&mut arbitrary::Unstructured::new(&bytes)) - .unwrap(), - }; - // ensure we don't have an invalid poststate variant - data.transaction.inner.status = data.transaction.inner.status.coerce_status().into(); - - let encoded = bincode::serde::encode_to_vec(&data, bincode::config::legacy()).unwrap(); - let (decoded, _) = bincode::serde::decode_from_slice::, _>( - &encoded, - bincode::config::legacy(), - ) - .unwrap(); - assert_eq!(decoded, data); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_consensus::Receipt; - use alloy_primitives::{Bytes, Log, LogData, address, b256, bytes, hex}; - use alloy_rlp::{Decodable, Encodable}; - - #[cfg(not(feature = "std"))] - use alloc::{vec, vec::Vec}; - - // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 - #[test] - fn decode_legacy_receipt() { - let data = hex!( - "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" - ); - - // EIP658Receipt - let expected = OpDepositReceiptWithBloom { - receipt: OpDepositReceipt { - inner: Receipt { - status: false.into(), - cumulative_gas_used: 0x1, - logs: vec![Log { - address: address!("0000000000000000000000000000000000000011"), - data: LogData::new_unchecked( - vec![ - b256!( - "000000000000000000000000000000000000000000000000000000000000dead" - ), - b256!( - "000000000000000000000000000000000000000000000000000000000000beef" - ), - ], - bytes!("0100ff"), - ), - }], - }, - deposit_nonce: None, - deposit_receipt_version: None, - }, - logs_bloom: [0; 256].into(), - }; - - let receipt = OpDepositReceiptWithBloom::decode(&mut &data[..]).unwrap(); - assert_eq!(receipt, expected); - } - - #[test] - fn gigantic_receipt() { - let receipt = OpDepositReceipt { - inner: Receipt { - cumulative_gas_used: 16747627, - status: true.into(), - logs: vec![ - Log { - address: address!("4bf56695415f725e43c3e04354b604bcfb6dfb6e"), - data: LogData::new_unchecked( - vec![b256!( - "c69dc3d7ebff79e41f525be431d5cd3cc08f80eaf0f7819054a726eeb7086eb9" - )], - Bytes::from(vec![1; 0xffffff]), - ), - }, - Log { - address: address!("faca325c86bf9c2d5b413cd7b90b209be92229c2"), - data: LogData::new_unchecked( - vec![b256!( - "8cca58667b1e9ffa004720ac99a3d61a138181963b294d270d91c53d36402ae2" - )], - Bytes::from(vec![1; 0xffffff]), - ), - }, - ], - }, - deposit_nonce: None, - deposit_receipt_version: None, - } - .with_bloom(); - - let mut data = vec![]; - - receipt.encode(&mut data); - let decoded = OpDepositReceiptWithBloom::decode(&mut &data[..]).unwrap(); - - // receipt.clone().to_compact(&mut data); - // let (decoded, _) = Receipt::from_compact(&data[..], data.len()); - assert_eq!(decoded, receipt); - } - - #[test] - fn regolith_receipt_roundtrip() { - let data = hex!( - "f9010c0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf" - ); - - // Deposit Receipt (post-regolith) - let expected = OpDepositReceiptWithBloom { - receipt: OpDepositReceipt { - inner: Receipt:: { - cumulative_gas_used: 46913, - logs: vec![], - status: true.into(), - }, - deposit_nonce: Some(4012991), - deposit_receipt_version: None, - }, - logs_bloom: [0; 256].into(), - }; - - let receipt = OpDepositReceiptWithBloom::decode(&mut &data[..]).unwrap(); - assert_eq!(receipt, expected); - - let mut buf = Vec::new(); - receipt.encode(&mut buf); - assert_eq!(buf, &data[..]); - } - - #[test] - fn post_canyon_receipt_roundtrip() { - let data = hex!( - "f9010d0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf01" - ); - - // Deposit Receipt (post-regolith) - let expected = OpDepositReceiptWithBloom { - receipt: OpDepositReceipt { - inner: Receipt:: { - cumulative_gas_used: 46913, - logs: vec![], - status: true.into(), - }, - deposit_nonce: Some(4012991), - deposit_receipt_version: Some(1), - }, - logs_bloom: [0; 256].into(), - }; - - let receipt = OpDepositReceiptWithBloom::decode(&mut &data[..]).unwrap(); - assert_eq!(receipt, expected); - - let mut buf = Vec::new(); - expected.encode(&mut buf); - assert_eq!(buf, &data[..]); - } -} diff --git a/op-alloy/crates/consensus/src/receipts/envelope.rs b/op-alloy/crates/consensus/src/receipts/envelope.rs deleted file mode 100644 index 0a8995cca93..00000000000 --- a/op-alloy/crates/consensus/src/receipts/envelope.rs +++ /dev/null @@ -1,434 +0,0 @@ -//! Receipt envelope types for Optimism. - -use crate::{OpDepositReceipt, OpDepositReceiptWithBloom, OpTxType}; -use alloc::vec::Vec; -use alloy_consensus::{Eip658Value, Receipt, ReceiptWithBloom, TxReceipt}; -use alloy_eips::{ - Typed2718, - eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718, IsTyped2718}, -}; -use alloy_primitives::{Bloom, Log, logs_bloom}; -use alloy_rlp::{BufMut, Decodable, Encodable, length_of_length}; - -/// Receipt envelope, as defined in [EIP-2718], modified for OP Stack chains. -/// -/// This enum distinguishes between tagged and untagged legacy receipts, as the -/// in-protocol merkle tree may commit to EITHER 0-prefixed or raw. Therefore -/// we must ensure that encoding returns the precise byte-array that was -/// decoded, preserving the presence or absence of the `TransactionType` flag. -/// -/// Transaction receipt payloads are specified in their respective EIPs. -/// -/// [EIP-2718]: https://eips.ethereum.org/EIPS/eip-2718 -#[derive(Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "serde", serde(tag = "type"))] -pub enum OpReceiptEnvelope { - /// Receipt envelope with no type flag. - #[cfg_attr(feature = "serde", serde(rename = "0x0", alias = "0x00"))] - Legacy(ReceiptWithBloom>), - /// Receipt envelope with type flag 1, containing a [EIP-2930] receipt. - /// - /// [EIP-2930]: https://eips.ethereum.org/EIPS/eip-2930 - #[cfg_attr(feature = "serde", serde(rename = "0x1", alias = "0x01"))] - Eip2930(ReceiptWithBloom>), - /// Receipt envelope with type flag 2, containing a [EIP-1559] receipt. - /// - /// [EIP-1559]: https://eips.ethereum.org/EIPS/eip-1559 - #[cfg_attr(feature = "serde", serde(rename = "0x2", alias = "0x02"))] - Eip1559(ReceiptWithBloom>), - /// Receipt envelope with type flag 4, containing a [EIP-7702] receipt. - /// - /// [EIP-7702]: https://eips.ethereum.org/EIPS/eip-7702 - #[cfg_attr(feature = "serde", serde(rename = "0x4", alias = "0x04"))] - Eip7702(ReceiptWithBloom>), - /// Receipt envelope with type flag 126, containing a [deposit] receipt. - /// - /// [deposit]: https://specs.optimism.io/protocol/deposits.html - #[cfg_attr(feature = "serde", serde(rename = "0x7e", alias = "0x7E"))] - Deposit(ReceiptWithBloom>), -} - -impl OpReceiptEnvelope { - /// Creates a new [`OpReceiptEnvelope`] from the given parts. - pub fn from_parts<'a>( - status: bool, - cumulative_gas_used: u64, - logs: impl IntoIterator, - tx_type: OpTxType, - deposit_nonce: Option, - deposit_receipt_version: Option, - ) -> Self { - let logs = logs.into_iter().cloned().collect::>(); - let logs_bloom = logs_bloom(&logs); - let inner_receipt = - Receipt { status: Eip658Value::Eip658(status), cumulative_gas_used, logs }; - match tx_type { - OpTxType::Legacy => { - Self::Legacy(ReceiptWithBloom { receipt: inner_receipt, logs_bloom }) - } - OpTxType::Eip2930 => { - Self::Eip2930(ReceiptWithBloom { receipt: inner_receipt, logs_bloom }) - } - OpTxType::Eip1559 => { - Self::Eip1559(ReceiptWithBloom { receipt: inner_receipt, logs_bloom }) - } - OpTxType::Eip7702 => { - Self::Eip7702(ReceiptWithBloom { receipt: inner_receipt, logs_bloom }) - } - OpTxType::Deposit => { - let inner = OpDepositReceiptWithBloom { - receipt: OpDepositReceipt { - inner: inner_receipt, - deposit_nonce, - deposit_receipt_version, - }, - logs_bloom, - }; - Self::Deposit(inner) - } - } - } -} - -impl OpReceiptEnvelope { - /// Return the [`OpTxType`] of the inner receipt. - pub const fn tx_type(&self) -> OpTxType { - match self { - Self::Legacy(_) => OpTxType::Legacy, - Self::Eip2930(_) => OpTxType::Eip2930, - Self::Eip1559(_) => OpTxType::Eip1559, - Self::Eip7702(_) => OpTxType::Eip7702, - Self::Deposit(_) => OpTxType::Deposit, - } - } - - /// Return true if the transaction was successful. - pub const fn is_success(&self) -> bool { - self.status() - } - - /// Returns the success status of the receipt's transaction. - pub const fn status(&self) -> bool { - self.as_receipt().unwrap().status.coerce_status() - } - - /// Returns the cumulative gas used at this receipt. - pub const fn cumulative_gas_used(&self) -> u64 { - self.as_receipt().unwrap().cumulative_gas_used - } - - /// Converts the receipt's log type by applying a function to each log. - /// - /// Returns the receipt with the new log type. - pub fn map_logs(self, f: impl FnMut(T) -> U) -> OpReceiptEnvelope { - match self { - Self::Legacy(r) => OpReceiptEnvelope::Legacy(r.map_logs(f)), - Self::Eip2930(r) => OpReceiptEnvelope::Eip2930(r.map_logs(f)), - Self::Eip1559(r) => OpReceiptEnvelope::Eip1559(r.map_logs(f)), - Self::Eip7702(r) => OpReceiptEnvelope::Eip7702(r.map_logs(f)), - Self::Deposit(r) => OpReceiptEnvelope::Deposit(r.map_receipt(|r| r.map_logs(f))), - } - } - - /// Return the receipt logs. - pub fn logs(&self) -> &[T] { - &self.as_receipt().unwrap().logs - } - - /// Consumes the type and returns the logs. - pub fn into_logs(self) -> Vec { - self.into_receipt().logs - } - - /// Return the receipt's bloom. - pub const fn logs_bloom(&self) -> &Bloom { - match self { - Self::Legacy(t) => &t.logs_bloom, - Self::Eip2930(t) => &t.logs_bloom, - Self::Eip1559(t) => &t.logs_bloom, - Self::Eip7702(t) => &t.logs_bloom, - Self::Deposit(t) => &t.logs_bloom, - } - } - - /// Return the receipt's deposit_nonce if it is a deposit receipt. - pub fn deposit_nonce(&self) -> Option { - self.as_deposit_receipt().and_then(|r| r.deposit_nonce) - } - - /// Return the receipt's deposit version if it is a deposit receipt. - pub fn deposit_receipt_version(&self) -> Option { - self.as_deposit_receipt().and_then(|r| r.deposit_receipt_version) - } - - /// Returns the deposit receipt if it is a deposit receipt. - pub const fn as_deposit_receipt_with_bloom(&self) -> Option<&OpDepositReceiptWithBloom> { - match self { - Self::Deposit(t) => Some(t), - _ => None, - } - } - - /// Returns the deposit receipt if it is a deposit receipt. - pub const fn as_deposit_receipt(&self) -> Option<&OpDepositReceipt> { - match self { - Self::Deposit(t) => Some(&t.receipt), - _ => None, - } - } - - /// Consumes the type and returns the underlying [`Receipt`]. - pub fn into_receipt(self) -> Receipt { - match self { - Self::Legacy(t) | Self::Eip2930(t) | Self::Eip1559(t) | Self::Eip7702(t) => t.receipt, - Self::Deposit(t) => t.receipt.into_inner(), - } - } - - /// Return the inner receipt. Currently this is infallible, however, future - /// receipt types may be added. - pub const fn as_receipt(&self) -> Option<&Receipt> { - match self { - Self::Legacy(t) | Self::Eip2930(t) | Self::Eip1559(t) | Self::Eip7702(t) => { - Some(&t.receipt) - } - Self::Deposit(t) => Some(&t.receipt.inner), - } - } -} - -impl OpReceiptEnvelope { - /// Get the length of the inner receipt in the 2718 encoding. - pub fn inner_length(&self) -> usize { - match self { - Self::Legacy(t) => t.length(), - Self::Eip2930(t) => t.length(), - Self::Eip1559(t) => t.length(), - Self::Eip7702(t) => t.length(), - Self::Deposit(t) => t.length(), - } - } - - /// Calculate the length of the rlp payload of the network encoded receipt. - pub fn rlp_payload_length(&self) -> usize { - let length = self.inner_length(); - match self { - Self::Legacy(_) => length, - _ => length + 1, - } - } -} - -impl TxReceipt for OpReceiptEnvelope -where - T: Clone + core::fmt::Debug + PartialEq + Eq + Send + Sync, -{ - type Log = T; - - fn status_or_post_state(&self) -> Eip658Value { - self.as_receipt().unwrap().status - } - - fn status(&self) -> bool { - self.as_receipt().unwrap().status.coerce_status() - } - - /// Return the receipt's bloom. - fn bloom(&self) -> Bloom { - *self.logs_bloom() - } - - fn bloom_cheap(&self) -> Option { - Some(self.bloom()) - } - - /// Returns the cumulative gas used at this receipt. - fn cumulative_gas_used(&self) -> u64 { - self.as_receipt().unwrap().cumulative_gas_used - } - - /// Return the receipt logs. - fn logs(&self) -> &[T] { - &self.as_receipt().unwrap().logs - } -} - -impl Encodable for OpReceiptEnvelope { - fn encode(&self, out: &mut dyn alloy_rlp::BufMut) { - self.network_encode(out) - } - - fn length(&self) -> usize { - let mut payload_length = self.rlp_payload_length(); - if !self.is_legacy() { - payload_length += length_of_length(payload_length); - } - payload_length - } -} - -impl Decodable for OpReceiptEnvelope { - fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { - Self::network_decode(buf) - .map_or_else(|_| Err(alloy_rlp::Error::Custom("Unexpected type")), Ok) - } -} - -impl Typed2718 for OpReceiptEnvelope { - fn ty(&self) -> u8 { - let ty = match self { - Self::Legacy(_) => OpTxType::Legacy, - Self::Eip2930(_) => OpTxType::Eip2930, - Self::Eip1559(_) => OpTxType::Eip1559, - Self::Eip7702(_) => OpTxType::Eip7702, - Self::Deposit(_) => OpTxType::Deposit, - }; - ty as u8 - } -} - -impl IsTyped2718 for OpReceiptEnvelope { - fn is_type(type_id: u8) -> bool { - ::is_type(type_id) - } -} - -impl Encodable2718 for OpReceiptEnvelope { - fn encode_2718_len(&self) -> usize { - self.inner_length() + !self.is_legacy() as usize - } - - fn encode_2718(&self, out: &mut dyn BufMut) { - match self.type_flag() { - None => {} - Some(ty) => out.put_u8(ty), - } - match self { - Self::Deposit(t) => t.encode(out), - Self::Legacy(t) | Self::Eip2930(t) | Self::Eip1559(t) | Self::Eip7702(t) => { - t.encode(out) - } - } - } -} - -impl Decodable2718 for OpReceiptEnvelope { - fn typed_decode(ty: u8, buf: &mut &[u8]) -> Eip2718Result { - match ty.try_into().map_err(|_| Eip2718Error::UnexpectedType(ty))? { - OpTxType::Legacy => { - Err(alloy_rlp::Error::Custom("type-0 eip2718 transactions are not supported") - .into()) - } - OpTxType::Eip1559 => Ok(Self::Eip1559(Decodable::decode(buf)?)), - OpTxType::Eip7702 => Ok(Self::Eip7702(Decodable::decode(buf)?)), - OpTxType::Eip2930 => Ok(Self::Eip2930(Decodable::decode(buf)?)), - OpTxType::Deposit => Ok(Self::Deposit(Decodable::decode(buf)?)), - } - } - - fn fallback_decode(buf: &mut &[u8]) -> Eip2718Result { - Ok(Self::Legacy(Decodable::decode(buf)?)) - } -} - -impl From for OpReceiptEnvelope -where - T: Into>>, -{ - fn from(value: T) -> Self { - Self::Deposit(value.into()) - } -} - -impl From> for Receipt { - fn from(receipt: OpReceiptEnvelope) -> Self { - receipt.into_receipt() - } -} - -#[cfg(all(test, feature = "arbitrary"))] -impl<'a, T> arbitrary::Arbitrary<'a> for OpReceiptEnvelope -where - T: arbitrary::Arbitrary<'a>, -{ - fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { - match u.int_in_range(0..=4)? { - 0 => Ok(Self::Legacy(ReceiptWithBloom::arbitrary(u)?)), - 1 => Ok(Self::Eip2930(ReceiptWithBloom::arbitrary(u)?)), - 2 => Ok(Self::Eip1559(ReceiptWithBloom::arbitrary(u)?)), - _ => Ok(Self::Deposit(OpDepositReceiptWithBloom::arbitrary(u)?)), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_consensus::{Receipt, ReceiptWithBloom}; - use alloy_eips::eip2718::Encodable2718; - use alloy_primitives::{Log, LogData, address, b256, bytes, hex}; - use alloy_rlp::Encodable; - - #[cfg(not(feature = "std"))] - use alloc::vec; - - // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 - #[test] - fn encode_legacy_receipt() { - let expected = hex!( - "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" - ); - - let mut data = vec![]; - let receipt = OpReceiptEnvelope::Legacy(ReceiptWithBloom { - receipt: Receipt { - status: false.into(), - cumulative_gas_used: 0x1, - logs: vec![Log { - address: address!("0000000000000000000000000000000000000011"), - data: LogData::new_unchecked( - vec![ - b256!( - "000000000000000000000000000000000000000000000000000000000000dead" - ), - b256!( - "000000000000000000000000000000000000000000000000000000000000beef" - ), - ], - bytes!("0100ff"), - ), - }], - }, - logs_bloom: [0; 256].into(), - }); - - receipt.network_encode(&mut data); - - // check that the rlp length equals the length of the expected rlp - assert_eq!(receipt.length(), expected.len()); - assert_eq!(data, expected); - } - - #[test] - fn legacy_receipt_from_parts() { - let receipt = - OpReceiptEnvelope::from_parts(true, 100, vec![], OpTxType::Legacy, None, None); - assert!(receipt.status()); - assert_eq!(receipt.cumulative_gas_used(), 100); - assert_eq!(receipt.logs().len(), 0); - assert_eq!(receipt.tx_type(), OpTxType::Legacy); - } - - #[test] - fn deposit_receipt_from_parts() { - let receipt = - OpReceiptEnvelope::from_parts(true, 100, vec![], OpTxType::Deposit, Some(1), Some(2)); - assert!(receipt.status()); - assert_eq!(receipt.cumulative_gas_used(), 100); - assert_eq!(receipt.logs().len(), 0); - assert_eq!(receipt.tx_type(), OpTxType::Deposit); - assert_eq!(receipt.deposit_nonce(), Some(1)); - assert_eq!(receipt.deposit_receipt_version(), Some(2)); - } -} diff --git a/op-alloy/crates/consensus/src/receipts/receipt.rs b/op-alloy/crates/consensus/src/receipts/receipt.rs deleted file mode 100644 index 9777a174b28..00000000000 --- a/op-alloy/crates/consensus/src/receipts/receipt.rs +++ /dev/null @@ -1,769 +0,0 @@ -//! Optimism receipt type for execution and storage. - -use core::fmt::Debug; - -use super::{OpDepositReceipt, OpTxReceipt}; -use crate::{OpReceiptEnvelope, OpTxType}; -use alloc::vec::Vec; -use alloy_consensus::{ - Eip658Value, Eip2718DecodableReceipt, Eip2718EncodableReceipt, Receipt, ReceiptWithBloom, - RlpDecodableReceipt, RlpEncodableReceipt, TxReceipt, Typed2718, -}; -use alloy_eips::eip2718::{Eip2718Error, Eip2718Result, IsTyped2718}; -use alloy_primitives::{Bloom, Log}; -use alloy_rlp::{Buf, BufMut, Decodable, Encodable, Header}; - -/// Typed Optimism transaction receipt. -/// -/// Receipt containing result of transaction execution. -#[derive(Clone, Debug, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -#[cfg_attr(feature = "serde", serde(tag = "type"))] -pub enum OpReceipt { - /// Legacy receipt - #[cfg_attr(feature = "serde", serde(rename = "0x0", alias = "0x00"))] - Legacy(Receipt), - /// EIP-2930 receipt - #[cfg_attr(feature = "serde", serde(rename = "0x1", alias = "0x01"))] - Eip2930(Receipt), - /// EIP-1559 receipt - #[cfg_attr(feature = "serde", serde(rename = "0x2", alias = "0x02"))] - Eip1559(Receipt), - /// EIP-7702 receipt - #[cfg_attr(feature = "serde", serde(rename = "0x4", alias = "0x04"))] - Eip7702(Receipt), - /// Deposit receipt - #[cfg_attr(feature = "serde", serde(rename = "0x7e", alias = "0x7E"))] - Deposit(OpDepositReceipt), -} - -impl OpReceipt { - /// Returns [`OpTxType`] of the receipt. - pub const fn tx_type(&self) -> OpTxType { - match self { - Self::Legacy(_) => OpTxType::Legacy, - Self::Eip2930(_) => OpTxType::Eip2930, - Self::Eip1559(_) => OpTxType::Eip1559, - Self::Eip7702(_) => OpTxType::Eip7702, - Self::Deposit(_) => OpTxType::Deposit, - } - } - - /// Returns inner [`Receipt`]. - pub const fn as_receipt(&self) -> &Receipt { - match self { - Self::Legacy(receipt) - | Self::Eip2930(receipt) - | Self::Eip1559(receipt) - | Self::Eip7702(receipt) => receipt, - Self::Deposit(receipt) => &receipt.inner, - } - } - - /// Returns a mutable reference to the inner [`Receipt`]. - pub const fn as_receipt_mut(&mut self) -> &mut Receipt { - match self { - Self::Legacy(receipt) - | Self::Eip2930(receipt) - | Self::Eip1559(receipt) - | Self::Eip7702(receipt) => receipt, - Self::Deposit(receipt) => &mut receipt.inner, - } - } - - /// Consumes this and returns the inner [`Receipt`]. - pub fn into_receipt(self) -> Receipt { - match self { - Self::Legacy(receipt) - | Self::Eip2930(receipt) - | Self::Eip1559(receipt) - | Self::Eip7702(receipt) => receipt, - Self::Deposit(receipt) => receipt.inner, - } - } - - /// Converts the receipt's log type by applying a function to each log. - /// - /// Returns the receipt with the new log type - pub fn map_logs(self, f: impl FnMut(T) -> U) -> OpReceipt { - match self { - Self::Legacy(receipt) => OpReceipt::Legacy(receipt.map_logs(f)), - Self::Eip2930(receipt) => OpReceipt::Eip2930(receipt.map_logs(f)), - Self::Eip1559(receipt) => OpReceipt::Eip1559(receipt.map_logs(f)), - Self::Eip7702(receipt) => OpReceipt::Eip7702(receipt.map_logs(f)), - Self::Deposit(receipt) => OpReceipt::Deposit(receipt.map_logs(f)), - } - } - - /// Returns length of RLP-encoded receipt fields with the given [`Bloom`] without an RLP header. - pub fn rlp_encoded_fields_length(&self, bloom: &Bloom) -> usize - where - T: Encodable, - { - match self { - Self::Legacy(receipt) - | Self::Eip2930(receipt) - | Self::Eip1559(receipt) - | Self::Eip7702(receipt) => receipt.rlp_encoded_fields_length_with_bloom(bloom), - Self::Deposit(receipt) => receipt.rlp_encoded_fields_length_with_bloom(bloom), - } - } - - /// RLP-encodes receipt fields with the given [`Bloom`] without an RLP header. - pub fn rlp_encode_fields(&self, bloom: &Bloom, out: &mut dyn BufMut) - where - T: Encodable, - { - match self { - Self::Legacy(receipt) - | Self::Eip2930(receipt) - | Self::Eip1559(receipt) - | Self::Eip7702(receipt) => receipt.rlp_encode_fields_with_bloom(bloom, out), - Self::Deposit(receipt) => receipt.rlp_encode_fields_with_bloom(bloom, out), - } - } - - /// Returns RLP header for inner encoding. - pub fn rlp_header_inner(&self, bloom: &Bloom) -> Header - where - T: Encodable, - { - Header { list: true, payload_length: self.rlp_encoded_fields_length(bloom) } - } - - /// Returns RLP header for inner encoding without bloom. - pub fn rlp_header_without_bloom(&self) -> Header - where - T: Encodable, - { - Header { list: true, payload_length: self.rlp_encoded_fields_length_without_bloom() } - } - - /// RLP-decodes the receipt from the provided buffer. This does not expect a type byte or - /// network header. - pub fn rlp_decode_inner( - buf: &mut &[u8], - tx_type: OpTxType, - ) -> alloy_rlp::Result> - where - T: Decodable, - { - match tx_type { - OpTxType::Legacy => { - let ReceiptWithBloom { receipt, logs_bloom } = - RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; - Ok(ReceiptWithBloom { receipt: Self::Legacy(receipt), logs_bloom }) - } - OpTxType::Eip2930 => { - let ReceiptWithBloom { receipt, logs_bloom } = - RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; - Ok(ReceiptWithBloom { receipt: Self::Eip2930(receipt), logs_bloom }) - } - OpTxType::Eip1559 => { - let ReceiptWithBloom { receipt, logs_bloom } = - RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; - Ok(ReceiptWithBloom { receipt: Self::Eip1559(receipt), logs_bloom }) - } - OpTxType::Eip7702 => { - let ReceiptWithBloom { receipt, logs_bloom } = - RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; - Ok(ReceiptWithBloom { receipt: Self::Eip7702(receipt), logs_bloom }) - } - OpTxType::Deposit => { - let ReceiptWithBloom { receipt, logs_bloom } = - RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; - Ok(ReceiptWithBloom { receipt: Self::Deposit(receipt), logs_bloom }) - } - } - } - - /// RLP-encodes receipt fields without an RLP header. - pub fn rlp_encode_fields_without_bloom(&self, out: &mut dyn BufMut) - where - T: Encodable, - { - self.tx_type().encode(out); - match self { - Self::Legacy(receipt) - | Self::Eip2930(receipt) - | Self::Eip1559(receipt) - | Self::Eip7702(receipt) => { - receipt.status.encode(out); - receipt.cumulative_gas_used.encode(out); - receipt.logs.encode(out); - } - Self::Deposit(receipt) => { - receipt.inner.status.encode(out); - receipt.inner.cumulative_gas_used.encode(out); - receipt.inner.logs.encode(out); - if let Some(nonce) = receipt.deposit_nonce { - nonce.encode(out); - } - if let Some(version) = receipt.deposit_receipt_version { - version.encode(out); - } - } - } - } - - /// Returns length of RLP-encoded receipt fields without an RLP header. - pub fn rlp_encoded_fields_length_without_bloom(&self) -> usize - where - T: Encodable, - { - self.tx_type().length() - + match self { - Self::Legacy(receipt) - | Self::Eip2930(receipt) - | Self::Eip1559(receipt) - | Self::Eip7702(receipt) => { - receipt.status.length() - + receipt.cumulative_gas_used.length() - + receipt.logs.length() - } - Self::Deposit(receipt) => { - receipt.inner.status.length() - + receipt.inner.cumulative_gas_used.length() - + receipt.inner.logs.length() - + receipt.deposit_nonce.map_or(0, |nonce| nonce.length()) - + receipt.deposit_receipt_version.map_or(0, |version| version.length()) - } - } - } - - /// RLP-decodes the receipt from the provided buffer without bloom. - pub fn rlp_decode_fields_without_bloom(buf: &mut &[u8]) -> alloy_rlp::Result - where - T: Decodable, - { - let tx_type = OpTxType::decode(buf)?; - let status = Decodable::decode(buf)?; - let cumulative_gas_used = Decodable::decode(buf)?; - let logs = Decodable::decode(buf)?; - - let mut deposit_nonce = None; - let mut deposit_receipt_version = None; - - // For deposit receipts, try to decode nonce and version if they exist - if tx_type == OpTxType::Deposit && !buf.is_empty() { - deposit_nonce = Some(Decodable::decode(buf)?); - if !buf.is_empty() { - deposit_receipt_version = Some(Decodable::decode(buf)?); - } - } - - match tx_type { - OpTxType::Legacy => Ok(Self::Legacy(Receipt { status, cumulative_gas_used, logs })), - OpTxType::Eip2930 => Ok(Self::Eip2930(Receipt { status, cumulative_gas_used, logs })), - OpTxType::Eip1559 => Ok(Self::Eip1559(Receipt { status, cumulative_gas_used, logs })), - OpTxType::Eip7702 => Ok(Self::Eip7702(Receipt { status, cumulative_gas_used, logs })), - OpTxType::Deposit => Ok(Self::Deposit(OpDepositReceipt { - inner: Receipt { status, cumulative_gas_used, logs }, - deposit_nonce, - deposit_receipt_version, - })), - } - } -} - -impl Eip2718EncodableReceipt for OpReceipt { - fn eip2718_encoded_length_with_bloom(&self, bloom: &Bloom) -> usize { - !self.tx_type().is_legacy() as usize + self.rlp_header_inner(bloom).length_with_payload() - } - - fn eip2718_encode_with_bloom(&self, bloom: &Bloom, out: &mut dyn BufMut) { - if !self.tx_type().is_legacy() { - out.put_u8(self.tx_type() as u8); - } - self.rlp_header_inner(bloom).encode(out); - self.rlp_encode_fields(bloom, out); - } -} - -impl Eip2718DecodableReceipt for OpReceipt { - fn typed_decode_with_bloom(ty: u8, buf: &mut &[u8]) -> Eip2718Result> { - let tx_type = OpTxType::try_from(ty).map_err(|_| Eip2718Error::UnexpectedType(ty))?; - Ok(Self::rlp_decode_inner(buf, tx_type)?) - } - - fn fallback_decode_with_bloom(buf: &mut &[u8]) -> Eip2718Result> { - Ok(Self::rlp_decode_inner(buf, OpTxType::Legacy)?) - } -} - -impl RlpEncodableReceipt for OpReceipt { - fn rlp_encoded_length_with_bloom(&self, bloom: &Bloom) -> usize { - let mut len = self.eip2718_encoded_length_with_bloom(bloom); - if !self.tx_type().is_legacy() { - len += Header { - list: false, - payload_length: self.eip2718_encoded_length_with_bloom(bloom), - } - .length(); - } - - len - } - - fn rlp_encode_with_bloom(&self, bloom: &Bloom, out: &mut dyn BufMut) { - if !self.tx_type().is_legacy() { - Header { list: false, payload_length: self.eip2718_encoded_length_with_bloom(bloom) } - .encode(out); - } - self.eip2718_encode_with_bloom(bloom, out); - } -} - -impl RlpDecodableReceipt for OpReceipt { - fn rlp_decode_with_bloom(buf: &mut &[u8]) -> alloy_rlp::Result> { - let header_buf = &mut &**buf; - let header = Header::decode(header_buf)?; - - // Legacy receipt, reuse initial buffer without advancing - if header.list { - return Self::rlp_decode_inner(buf, OpTxType::Legacy); - } - - // Otherwise, advance the buffer and try decoding type flag followed by receipt - *buf = *header_buf; - - let remaining = buf.len(); - let tx_type = OpTxType::decode(buf)?; - let this = Self::rlp_decode_inner(buf, tx_type)?; - - if buf.len() + header.payload_length != remaining { - return Err(alloy_rlp::Error::UnexpectedLength); - } - - Ok(this) - } -} - -impl Encodable for OpReceipt { - fn encode(&self, out: &mut dyn BufMut) { - self.rlp_header_without_bloom().encode(out); - self.rlp_encode_fields_without_bloom(out); - } - - fn length(&self) -> usize { - self.rlp_header_without_bloom().length_with_payload() - } -} - -impl Decodable for OpReceipt { - fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { - let header = Header::decode(buf)?; - if !header.list { - return Err(alloy_rlp::Error::UnexpectedString); - } - - if buf.len() < header.payload_length { - return Err(alloy_rlp::Error::InputTooShort); - } - let mut fields_buf = &buf[..header.payload_length]; - let this = Self::rlp_decode_fields_without_bloom(&mut fields_buf)?; - - if !fields_buf.is_empty() { - return Err(alloy_rlp::Error::UnexpectedLength); - } - - buf.advance(header.payload_length); - - Ok(this) - } -} - -impl> TxReceipt for OpReceipt { - type Log = T; - - fn status_or_post_state(&self) -> Eip658Value { - self.as_receipt().status_or_post_state() - } - - fn status(&self) -> bool { - self.as_receipt().status() - } - - fn bloom(&self) -> Bloom { - self.as_receipt().bloom() - } - - fn cumulative_gas_used(&self) -> u64 { - self.as_receipt().cumulative_gas_used() - } - - fn logs(&self) -> &[Self::Log] { - self.as_receipt().logs() - } - - fn into_logs(self) -> Vec { - match self { - Self::Legacy(receipt) - | Self::Eip2930(receipt) - | Self::Eip1559(receipt) - | Self::Eip7702(receipt) => receipt.logs, - Self::Deposit(receipt) => receipt.inner.logs, - } - } -} - -impl Typed2718 for OpReceipt { - fn ty(&self) -> u8 { - self.tx_type().into() - } -} - -impl IsTyped2718 for OpReceipt { - fn is_type(type_id: u8) -> bool { - ::is_type(type_id) - } -} - -impl> OpTxReceipt for OpReceipt { - fn deposit_nonce(&self) -> Option { - match self { - Self::Deposit(receipt) => receipt.deposit_nonce, - _ => None, - } - } - - fn deposit_receipt_version(&self) -> Option { - match self { - Self::Deposit(receipt) => receipt.deposit_receipt_version, - _ => None, - } - } -} - -impl From for OpReceipt { - fn from(envelope: super::OpReceiptEnvelope) -> Self { - match envelope { - super::OpReceiptEnvelope::Legacy(receipt) => Self::Legacy(receipt.receipt), - super::OpReceiptEnvelope::Eip2930(receipt) => Self::Eip2930(receipt.receipt), - super::OpReceiptEnvelope::Eip1559(receipt) => Self::Eip1559(receipt.receipt), - super::OpReceiptEnvelope::Eip7702(receipt) => Self::Eip7702(receipt.receipt), - super::OpReceiptEnvelope::Deposit(receipt) => Self::Deposit(OpDepositReceipt { - deposit_nonce: receipt.receipt.deposit_nonce, - deposit_receipt_version: receipt.receipt.deposit_receipt_version, - inner: receipt.receipt.inner, - }), - } - } -} - -impl From>> for OpReceiptEnvelope { - fn from(value: ReceiptWithBloom>) -> Self { - let (receipt, logs_bloom) = value.into_components(); - match receipt { - OpReceipt::Legacy(receipt) => Self::Legacy(ReceiptWithBloom { receipt, logs_bloom }), - OpReceipt::Eip2930(receipt) => Self::Eip2930(ReceiptWithBloom { receipt, logs_bloom }), - OpReceipt::Eip1559(receipt) => Self::Eip1559(ReceiptWithBloom { receipt, logs_bloom }), - OpReceipt::Eip7702(receipt) => Self::Eip7702(ReceiptWithBloom { receipt, logs_bloom }), - OpReceipt::Deposit(receipt) => Self::Deposit(ReceiptWithBloom { receipt, logs_bloom }), - } - } -} - -/// Bincode-compatible serde implementations for opreceipt type. -#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] -pub(crate) mod serde_bincode_compat { - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - use serde_with::{DeserializeAs, SerializeAs}; - - /// Bincode-compatible [`super::OpReceipt`] serde implementation. - /// - /// Intended to use with the [`serde_with::serde_as`] macro in the following way: - /// ```rust - /// use op_alloy_consensus::{OpReceipt, serde_bincode_compat}; - /// use serde::{Deserialize, Serialize, de::DeserializeOwned}; - /// use serde_with::serde_as; - /// - /// #[serde_as] - /// #[derive(Serialize, Deserialize)] - /// struct Data { - /// #[serde_as(as = "serde_bincode_compat::OpReceipt<'_>")] - /// receipt: OpReceipt, - /// } - /// ``` - #[derive(Debug, Serialize, Deserialize)] - pub enum OpReceipt<'a> { - /// Legacy receipt - Legacy(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), - /// EIP-2930 receipt - Eip2930(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), - /// EIP-1559 receipt - Eip1559(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), - /// EIP-7702 receipt - Eip7702(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), - /// Deposit receipt - Deposit(crate::serde_bincode_compat::OpDepositReceipt<'a, alloy_primitives::Log>), - } - - impl<'a> From<&'a super::OpReceipt> for OpReceipt<'a> { - fn from(value: &'a super::OpReceipt) -> Self { - match value { - super::OpReceipt::Legacy(receipt) => Self::Legacy(receipt.into()), - super::OpReceipt::Eip2930(receipt) => Self::Eip2930(receipt.into()), - super::OpReceipt::Eip1559(receipt) => Self::Eip1559(receipt.into()), - super::OpReceipt::Eip7702(receipt) => Self::Eip7702(receipt.into()), - super::OpReceipt::Deposit(receipt) => Self::Deposit(receipt.into()), - } - } - } - - impl<'a> From> for super::OpReceipt { - fn from(value: OpReceipt<'a>) -> Self { - match value { - OpReceipt::Legacy(receipt) => Self::Legacy(receipt.into()), - OpReceipt::Eip2930(receipt) => Self::Eip2930(receipt.into()), - OpReceipt::Eip1559(receipt) => Self::Eip1559(receipt.into()), - OpReceipt::Eip7702(receipt) => Self::Eip7702(receipt.into()), - OpReceipt::Deposit(receipt) => Self::Deposit(receipt.into()), - } - } - } - - impl SerializeAs for OpReceipt<'_> { - fn serialize_as(source: &super::OpReceipt, serializer: S) -> Result - where - S: Serializer, - { - OpReceipt::<'_>::from(source).serialize(serializer) - } - } - - impl<'de> DeserializeAs<'de, super::OpReceipt> for OpReceipt<'de> { - fn deserialize_as(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - OpReceipt::<'_>::deserialize(deserializer).map(Into::into) - } - } - - #[cfg(test)] - mod tests { - use crate::OpReceipt; - use arbitrary::Arbitrary; - use rand::Rng; - use serde::{Deserialize, Serialize}; - use serde_with::serde_as; - - #[test] - fn test_tx_bincode_roundtrip() { - #[serde_as] - #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] - struct Data { - #[serde_as(as = "super::OpReceipt<'_>")] - receipt: OpReceipt, - } - - let mut bytes = [0u8; 1024]; - rand::rng().fill(bytes.as_mut_slice()); - let mut data = Data { - receipt: OpReceipt::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(), - }; - let success = data.receipt.as_receipt_mut().status.coerce_status(); - // // ensure we don't have an invalid poststate variant - data.receipt.as_receipt_mut().status = success.into(); - - let encoded = bincode::serde::encode_to_vec(&data, bincode::config::legacy()).unwrap(); - let (decoded, _) = - bincode::serde::decode_from_slice::(&encoded, bincode::config::legacy()) - .unwrap(); - assert_eq!(decoded, data); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::vec; - use alloy_eips::Encodable2718; - use alloy_primitives::{Bytes, address, b256, bytes, hex_literal::hex}; - use alloy_rlp::Encodable; - - // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 - #[test] - fn encode_legacy_receipt() { - let expected = hex!( - "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" - ); - - let mut data = Vec::with_capacity(expected.length()); - let receipt = ReceiptWithBloom { - receipt: OpReceipt::Legacy(Receipt { - status: Eip658Value::Eip658(false), - cumulative_gas_used: 0x1, - logs: vec![Log::new_unchecked( - address!("0x0000000000000000000000000000000000000011"), - vec![ - b256!("0x000000000000000000000000000000000000000000000000000000000000dead"), - b256!("0x000000000000000000000000000000000000000000000000000000000000beef"), - ], - bytes!("0100ff"), - )], - }), - logs_bloom: [0; 256].into(), - }; - - receipt.encode(&mut data); - - // check that the rlp length equals the length of the expected rlp - assert_eq!(receipt.length(), expected.len()); - assert_eq!(data, expected); - } - - // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 - #[test] - fn decode_legacy_receipt() { - let data = hex!( - "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" - ); - - // EIP658Receipt - let expected = ReceiptWithBloom { - receipt: OpReceipt::Legacy(Receipt { - status: Eip658Value::Eip658(false), - cumulative_gas_used: 0x1, - logs: vec![Log::new_unchecked( - address!("0x0000000000000000000000000000000000000011"), - vec![ - b256!("0x000000000000000000000000000000000000000000000000000000000000dead"), - b256!("0x000000000000000000000000000000000000000000000000000000000000beef"), - ], - bytes!("0100ff"), - )], - }), - logs_bloom: [0; 256].into(), - }; - - let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); - assert_eq!(receipt, expected); - } - - #[test] - fn decode_deposit_receipt_regolith_roundtrip() { - let data = hex!( - "b901107ef9010c0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf" - ); - - // Deposit Receipt (post-regolith) - let expected: ReceiptWithBloom = ReceiptWithBloom { - receipt: OpReceipt::Deposit(OpDepositReceipt { - inner: Receipt { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 46913, - logs: vec![], - }, - deposit_nonce: Some(4012991), - deposit_receipt_version: None, - }), - logs_bloom: [0; 256].into(), - }; - - let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); - assert_eq!(receipt, expected); - - let mut buf = Vec::with_capacity(data.len()); - receipt.encode(&mut buf); - assert_eq!(buf, &data[..]); - } - - #[test] - fn decode_deposit_receipt_canyon_roundtrip() { - let data = hex!( - "b901117ef9010d0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf01" - ); - - // Deposit Receipt (post-canyon) - let expected: ReceiptWithBloom = ReceiptWithBloom { - receipt: OpReceipt::Deposit(OpDepositReceipt { - inner: Receipt { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 46913, - logs: vec![], - }, - deposit_nonce: Some(4012991), - deposit_receipt_version: Some(1), - }), - logs_bloom: [0; 256].into(), - }; - - let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); - assert_eq!(receipt, expected); - - let mut buf = Vec::with_capacity(data.len()); - expected.encode(&mut buf); - assert_eq!(buf, &data[..]); - } - - #[test] - fn gigantic_receipt() { - let receipt = OpReceipt::Legacy(Receipt { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 16747627, - logs: vec![ - Log::new_unchecked( - address!("0x4bf56695415f725e43c3e04354b604bcfb6dfb6e"), - vec![b256!( - "0xc69dc3d7ebff79e41f525be431d5cd3cc08f80eaf0f7819054a726eeb7086eb9" - )], - Bytes::from(vec![1; 0xffffff]), - ), - Log::new_unchecked( - address!("0xfaca325c86bf9c2d5b413cd7b90b209be92229c2"), - vec![b256!( - "0x8cca58667b1e9ffa004720ac99a3d61a138181963b294d270d91c53d36402ae2" - )], - Bytes::from(vec![1; 0xffffff]), - ), - ], - }); - - let _bloom = receipt.bloom(); - let mut encoded = vec![]; - receipt.encode(&mut encoded); - - let decoded = OpReceipt::decode(&mut &encoded[..]).unwrap(); - assert_eq!(decoded, receipt); - } - - #[test] - fn test_encode_2718_length() { - let receipt: ReceiptWithBloom = ReceiptWithBloom { - receipt: OpReceipt::Eip1559(Receipt { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 21000, - logs: vec![], - }), - logs_bloom: Bloom::default(), - }; - - let encoded = receipt.encoded_2718(); - assert_eq!( - encoded.len(), - receipt.encode_2718_len(), - "Encoded length should match the actual encoded data length" - ); - - // Test for legacy receipt as well - let legacy_receipt: ReceiptWithBloom = ReceiptWithBloom { - receipt: OpReceipt::Legacy(Receipt { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 21000, - logs: vec![], - }), - logs_bloom: Bloom::default(), - }; - - let legacy_encoded = legacy_receipt.encoded_2718(); - assert_eq!( - legacy_encoded.len(), - legacy_receipt.encode_2718_len(), - "Encoded length for legacy receipt should match the actual encoded data length" - ); - } -} diff --git a/op-alloy/crates/consensus/src/transaction/deposit.rs b/op-alloy/crates/consensus/src/transaction/deposit.rs deleted file mode 100644 index 01c5c83fa7f..00000000000 --- a/op-alloy/crates/consensus/src/transaction/deposit.rs +++ /dev/null @@ -1,733 +0,0 @@ -//! Deposit Transaction type. - -use super::OpTxType; -use alloc::vec::Vec; -use alloy_consensus::{Sealable, Transaction, Typed2718}; -use alloy_eips::{ - eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718, IsTyped2718}, - eip2930::AccessList, -}; -use alloy_primitives::{Address, B256, Bytes, ChainId, Signature, TxHash, TxKind, U256, keccak256}; -use alloy_rlp::{BufMut, Decodable, Encodable, Header}; -use core::mem; - -/// Deposit transactions, also known as deposits are initiated on L1, and executed on L2. -#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] -#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] -pub struct TxDeposit { - /// Hash that uniquely identifies the source of the deposit. - pub source_hash: B256, - /// The address of the sender account. - pub from: Address, - /// The address of the recipient account, or the null (zero-length) address if the deposited - /// transaction is a contract creation. - #[cfg_attr(feature = "serde", serde(default, skip_serializing_if = "TxKind::is_create"))] - pub to: TxKind, - /// The ETH value to mint on L2. - #[cfg_attr(feature = "serde", serde(default, with = "alloy_serde::quantity"))] - pub mint: u128, - /// The ETH value to send to the recipient account. - pub value: U256, - /// The gas limit for the L2 transaction. - #[cfg_attr(feature = "serde", serde(with = "alloy_serde::quantity", rename = "gas"))] - pub gas_limit: u64, - /// Field indicating if this transaction is exempt from the L2 gas limit. - #[cfg_attr( - feature = "serde", - serde( - default, - with = "alloy_serde::quantity", - rename = "isSystemTx", - skip_serializing_if = "core::ops::Not::not" - ) - )] - pub is_system_transaction: bool, - /// Input has two uses depending if transaction is Create or Call (if `to` field is None or - /// Some). - pub input: Bytes, -} - -impl TxDeposit { - /// Decodes the inner [TxDeposit] fields from RLP bytes. - /// - /// NOTE: This assumes a RLP header has already been decoded, and _just_ decodes the following - /// RLP fields in the following order: - /// - /// - `source_hash` - /// - `from` - /// - `to` - /// - `mint` - /// - `value` - /// - `gas_limit` - /// - `is_system_transaction` - /// - `input` - pub fn rlp_decode_fields(buf: &mut &[u8]) -> alloy_rlp::Result { - Ok(Self { - source_hash: Decodable::decode(buf)?, - from: Decodable::decode(buf)?, - to: Decodable::decode(buf)?, - mint: Decodable::decode(buf)?, - value: Decodable::decode(buf)?, - gas_limit: Decodable::decode(buf)?, - is_system_transaction: Decodable::decode(buf)?, - input: Decodable::decode(buf)?, - }) - } - - /// Decodes the transaction from RLP bytes. - pub fn rlp_decode(buf: &mut &[u8]) -> alloy_rlp::Result { - let header = Header::decode(buf)?; - if !header.list { - return Err(alloy_rlp::Error::UnexpectedString); - } - let remaining = buf.len(); - - if header.payload_length > remaining { - return Err(alloy_rlp::Error::InputTooShort); - } - - let this = Self::rlp_decode_fields(buf)?; - - if buf.len() + header.payload_length != remaining { - return Err(alloy_rlp::Error::UnexpectedLength); - } - - Ok(this) - } - - /// Outputs the length of the transaction's fields, without a RLP header or length of the - /// eip155 fields. - pub(crate) fn rlp_encoded_fields_length(&self) -> usize { - self.source_hash.length() - + self.from.length() - + self.to.length() - + self.mint.length() - + self.value.length() - + self.gas_limit.length() - + self.is_system_transaction.length() - + self.input.0.length() - } - - /// Encodes only the transaction's fields into the desired buffer, without a RLP header. - /// - pub(crate) fn rlp_encode_fields(&self, out: &mut dyn alloy_rlp::BufMut) { - self.source_hash.encode(out); - self.from.encode(out); - self.to.encode(out); - self.mint.encode(out); - self.value.encode(out); - self.gas_limit.encode(out); - self.is_system_transaction.encode(out); - self.input.encode(out); - } - - /// Calculates a heuristic for the in-memory size of the [TxDeposit] transaction. - #[inline] - pub fn size(&self) -> usize { - mem::size_of::() + // source_hash - mem::size_of::
() + // from - self.to.size() + // to - mem::size_of::() + // mint - mem::size_of::() + // value - mem::size_of::() + // gas_limit - mem::size_of::() + // is_system_transaction - self.input.len() // input - } - - /// Get the transaction type - pub(crate) const fn tx_type(&self) -> OpTxType { - OpTxType::Deposit - } - - /// Create an rlp header for the transaction. - fn rlp_header(&self) -> Header { - Header { list: true, payload_length: self.rlp_encoded_fields_length() } - } - - /// RLP encodes the transaction. - pub fn rlp_encode(&self, out: &mut dyn BufMut) { - self.rlp_header().encode(out); - self.rlp_encode_fields(out); - } - - /// Get the length of the transaction when RLP encoded. - pub fn rlp_encoded_length(&self) -> usize { - self.rlp_header().length_with_payload() - } - - /// Get the length of the transaction when EIP-2718 encoded. This is the - /// 1 byte type flag + the length of the RLP encoded transaction. - pub fn eip2718_encoded_length(&self) -> usize { - self.rlp_encoded_length() + 1 - } - - fn network_header(&self) -> Header { - Header { list: false, payload_length: self.eip2718_encoded_length() } - } - - /// Get the length of the transaction when network encoded. This is the - /// EIP-2718 encoded length with an outer RLP header. - pub fn network_encoded_length(&self) -> usize { - self.network_header().length_with_payload() - } - - /// Network encode the transaction with the given signature. - pub fn network_encode(&self, out: &mut dyn BufMut) { - self.network_header().encode(out); - self.encode_2718(out); - } - - /// Calculate the transaction hash. - pub fn tx_hash(&self) -> TxHash { - let mut buf = Vec::with_capacity(self.eip2718_encoded_length()); - self.encode_2718(&mut buf); - keccak256(&buf) - } - - /// Returns the signature for the optimism deposit transactions, which don't include a - /// signature. - pub const fn signature() -> Signature { - Signature::new(U256::ZERO, U256::ZERO, false) - } -} - -impl Typed2718 for TxDeposit { - fn ty(&self) -> u8 { - OpTxType::Deposit as u8 - } -} - -impl IsTyped2718 for TxDeposit { - fn is_type(ty: u8) -> bool { - OpTxType::Deposit as u8 == ty - } -} - -impl Transaction for TxDeposit { - fn chain_id(&self) -> Option { - None - } - - fn nonce(&self) -> u64 { - 0u64 - } - - fn gas_limit(&self) -> u64 { - self.gas_limit - } - - fn gas_price(&self) -> Option { - None - } - - fn max_fee_per_gas(&self) -> u128 { - 0 - } - - fn max_priority_fee_per_gas(&self) -> Option { - None - } - - fn max_fee_per_blob_gas(&self) -> Option { - None - } - - fn priority_fee_or_price(&self) -> u128 { - 0 - } - - fn effective_gas_price(&self, _: Option) -> u128 { - 0 - } - - fn is_dynamic_fee(&self) -> bool { - false - } - - fn kind(&self) -> TxKind { - self.to - } - - fn is_create(&self) -> bool { - self.to.is_create() - } - - fn value(&self) -> U256 { - self.value - } - - fn input(&self) -> &Bytes { - &self.input - } - - fn access_list(&self) -> Option<&AccessList> { - None - } - - fn blob_versioned_hashes(&self) -> Option<&[B256]> { - None - } - - fn authorization_list(&self) -> Option<&[alloy_eips::eip7702::SignedAuthorization]> { - None - } -} - -impl Encodable2718 for TxDeposit { - fn type_flag(&self) -> Option { - Some(OpTxType::Deposit as u8) - } - - fn encode_2718_len(&self) -> usize { - self.eip2718_encoded_length() - } - - fn encode_2718(&self, out: &mut dyn alloy_rlp::BufMut) { - out.put_u8(self.tx_type() as u8); - self.rlp_encode(out); - } -} - -impl Decodable2718 for TxDeposit { - fn typed_decode(ty: u8, data: &mut &[u8]) -> Eip2718Result { - let ty: OpTxType = ty.try_into().map_err(|_| Eip2718Error::UnexpectedType(ty))?; - if ty != OpTxType::Deposit as u8 { - return Err(Eip2718Error::UnexpectedType(ty as u8)); - } - let tx = Self::decode(data)?; - Ok(tx) - } - - fn fallback_decode(data: &mut &[u8]) -> Eip2718Result { - let tx = Self::decode(data)?; - Ok(tx) - } -} - -impl Encodable for TxDeposit { - fn encode(&self, out: &mut dyn BufMut) { - Header { list: true, payload_length: self.rlp_encoded_fields_length() }.encode(out); - self.rlp_encode_fields(out); - } - - fn length(&self) -> usize { - let payload_length = self.rlp_encoded_fields_length(); - Header { list: true, payload_length }.length() + payload_length - } -} - -impl Decodable for TxDeposit { - fn decode(data: &mut &[u8]) -> alloy_rlp::Result { - Self::rlp_decode(data) - } -} - -impl Sealable for TxDeposit { - fn hash_slow(&self) -> B256 { - self.tx_hash() - } -} - -#[cfg(feature = "alloy-compat")] -impl From for alloy_rpc_types_eth::TransactionRequest { - fn from(tx: TxDeposit) -> Self { - let TxDeposit { - source_hash: _, - from, - to, - mint: _, - value, - gas_limit, - is_system_transaction: _, - input, - } = tx; - - Self { - from: Some(from), - to: Some(to), - value: Some(value), - gas: Some(gas_limit), - input: input.into(), - ..Default::default() - } - } -} - -/// A trait representing a deposit transaction with specific attributes. -pub trait DepositTransaction: Transaction { - /// Returns the hash that uniquely identifies the source of the deposit. - /// - /// # Returns - /// An `Option` containing the source hash if available. - fn source_hash(&self) -> Option; - - /// Returns the optional mint value of the deposit transaction. - /// - /// # Returns - /// An `u128` representing the ETH value to mint on L2, if any. - fn mint(&self) -> u128; - - /// Indicates whether the transaction is exempt from the L2 gas limit. - /// - /// # Returns - /// A `bool` indicating if the transaction is a system transaction. - fn is_system_transaction(&self) -> bool; -} - -impl DepositTransaction for TxDeposit { - #[inline] - fn source_hash(&self) -> Option { - Some(self.source_hash) - } - - #[inline] - fn mint(&self) -> u128 { - self.mint - } - - #[inline] - fn is_system_transaction(&self) -> bool { - self.is_system_transaction - } -} - -/// Deposit transactions don't have a signature, however, we include an empty signature in the -/// response for better compatibility. -/// -/// This function can be used as `serialize_with` serde attribute for the [`TxDeposit`] and will -/// flatten [`TxDeposit::signature`] into response. -#[cfg(feature = "serde")] -pub fn serde_deposit_tx_rpc( - value: &T, - serializer: S, -) -> Result { - use serde::Serialize; - - #[derive(Serialize)] - struct SerdeHelper<'a, T> { - #[serde(flatten)] - value: &'a T, - #[serde(flatten)] - signature: Signature, - } - - SerdeHelper { value, signature: TxDeposit::signature() }.serialize(serializer) -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::hex; - use alloy_rlp::BytesMut; - - #[test] - fn test_deposit_transaction_trait() { - let tx = TxDeposit { - source_hash: B256::with_last_byte(42), - from: Address::default(), - to: TxKind::default(), - mint: 100, - value: U256::from(1000), - gas_limit: 50000, - is_system_transaction: true, - input: Bytes::default(), - }; - - assert_eq!(tx.source_hash(), Some(B256::with_last_byte(42))); - assert_eq!(tx.mint(), 100); - assert!(tx.is_system_transaction()); - } - - #[test] - fn test_deposit_transaction_without_mint() { - let tx = TxDeposit { - source_hash: B256::default(), - from: Address::default(), - to: TxKind::default(), - mint: 0, - value: U256::default(), - gas_limit: 50000, - is_system_transaction: false, - input: Bytes::default(), - }; - - assert_eq!(tx.source_hash(), Some(B256::default())); - assert_eq!(tx.mint(), 0); - assert!(!tx.is_system_transaction()); - } - - #[test] - fn test_deposit_transaction_to_contract() { - let contract_address = Address::with_last_byte(0xFF); - let tx = TxDeposit { - source_hash: B256::default(), - from: Address::default(), - to: TxKind::Call(contract_address), - mint: 200, - value: U256::from(500), - gas_limit: 100000, - is_system_transaction: false, - input: Bytes::from_static(&[1, 2, 3]), - }; - - assert_eq!(tx.source_hash(), Some(B256::default())); - assert_eq!(tx.mint(), 200); - assert!(!tx.is_system_transaction()); - assert_eq!(tx.kind(), TxKind::Call(contract_address)); - } - - #[test] - fn test_rlp_roundtrip() { - let bytes = Bytes::from_static(&hex!( - "7ef9015aa044bae9d41b8380d781187b426c6fe43df5fb2fb57bd4466ef6a701e1f01e015694deaddeaddeaddeaddeaddeaddeaddeaddead000194420000000000000000000000000000000000001580808408f0d18001b90104015d8eb900000000000000000000000000000000000000000000000000000000008057650000000000000000000000000000000000000000000000000000000063d96d10000000000000000000000000000000000000000000000000000000000009f35273d89754a1e0387b89520d989d3be9c37c1f32495a88faf1ea05c61121ab0d1900000000000000000000000000000000000000000000000000000000000000010000000000000000000000002d679b567db6187c0c8323fa982cfb88b74dbcc7000000000000000000000000000000000000000000000000000000000000083400000000000000000000000000000000000000000000000000000000000f4240" - )); - let tx_a = TxDeposit::decode(&mut bytes[1..].as_ref()).unwrap(); - let mut buf_a = BytesMut::default(); - tx_a.encode(&mut buf_a); - assert_eq!(&buf_a[..], &bytes[1..]); - } - - #[test] - fn test_encode_decode_fields() { - let original = TxDeposit { - source_hash: B256::default(), - from: Address::default(), - to: TxKind::default(), - mint: 100, - value: U256::default(), - gas_limit: 50000, - is_system_transaction: true, - input: Bytes::default(), - }; - - let mut buffer = BytesMut::new(); - original.rlp_encode_fields(&mut buffer); - let decoded = TxDeposit::rlp_decode_fields(&mut &buffer[..]).expect("Failed to decode"); - - assert_eq!(original, decoded); - } - - #[test] - fn test_encode_with_and_without_header() { - let tx_deposit = TxDeposit { - source_hash: B256::default(), - from: Address::default(), - to: TxKind::default(), - mint: 100, - value: U256::default(), - gas_limit: 50000, - is_system_transaction: true, - input: Bytes::default(), - }; - - let mut buffer_with_header = BytesMut::new(); - tx_deposit.encode(&mut buffer_with_header); - - let mut buffer_without_header = BytesMut::new(); - tx_deposit.rlp_encode_fields(&mut buffer_without_header); - - assert!(buffer_with_header.len() > buffer_without_header.len()); - } - - #[test] - fn test_payload_length() { - let tx_deposit = TxDeposit { - source_hash: B256::default(), - from: Address::default(), - to: TxKind::default(), - mint: 100, - value: U256::default(), - gas_limit: 50000, - is_system_transaction: true, - input: Bytes::default(), - }; - - assert!(tx_deposit.size() > tx_deposit.rlp_encoded_fields_length()); - } - - #[test] - fn test_encode_inner_with_and_without_header() { - let tx_deposit = TxDeposit { - source_hash: B256::default(), - from: Address::default(), - to: TxKind::default(), - mint: 100, - value: U256::default(), - gas_limit: 50000, - is_system_transaction: true, - input: Bytes::default(), - }; - - let mut buffer_with_header = BytesMut::new(); - tx_deposit.network_encode(&mut buffer_with_header); - - let mut buffer_without_header = BytesMut::new(); - tx_deposit.encode_2718(&mut buffer_without_header); - - assert!(buffer_with_header.len() > buffer_without_header.len()); - } - - #[test] - fn test_payload_length_header() { - let tx_deposit = TxDeposit { - source_hash: B256::default(), - from: Address::default(), - to: TxKind::default(), - mint: 100, - value: U256::default(), - gas_limit: 50000, - is_system_transaction: true, - input: Bytes::default(), - }; - - let total_len = tx_deposit.network_encoded_length(); - let len_without_header = tx_deposit.eip2718_encoded_length(); - - assert!(total_len > len_without_header); - } - #[test] - fn test_deposit_tx_roundtrip() { - let raw_txs = [ - "7ef8f8a0871ec5fb6afe7e5ae950bbb4cfd7d7cb277b413e67da806d50834a814b14c9f494deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000008dd00101c12000000000000000400000000681c941f0000000001566261000000000000000000000000000000000000000000000000000000005f629c020000000000000000000000000000000000000000000000000000000000000001937badfbcce566e0ba932a3f7659644aa0c6ef019541d3134a1d8cb9f84d45c70000000000000000000000005050f69a9786f081509234f1a7f4684b5e5b76c9", - ]; - - for raw_tx_hex in raw_txs { - let raw_tx = hex::decode(raw_tx_hex).unwrap(); - - let tx = TxDeposit::decode_2718(&mut raw_tx.as_ref()).unwrap(); - let mut encoded = BytesMut::new(); - tx.encode_2718(&mut encoded); - assert_eq!(&encoded[..], &raw_tx[..], "Encoded bytes don't match original"); - - let tx_from_fields = TxDeposit::rlp_decode(&mut &raw_tx[1..]).unwrap(); - let mut encoded_fields = BytesMut::new(); - tx_from_fields.rlp_encode(&mut encoded_fields); - assert_eq!( - &encoded_fields[..], - &raw_tx[1..], - "RLP encoded fields don't match original" - ); - } - } -} - -/// Bincode-compatible [`TxDeposit`] serde implementation. -#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] -pub(super) mod serde_bincode_compat { - use alloc::borrow::Cow; - use alloy_primitives::{Address, B256, Bytes, TxKind, U256}; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - use serde_with::{DeserializeAs, SerializeAs}; - - /// Bincode-compatible [`super::TxDeposit`] serde implementation. - /// - /// Intended to use with the [`serde_with::serde_as`] macro in the following way: - /// ```rust - /// use op_alloy_consensus::{TxDeposit, serde_bincode_compat}; - /// use serde::{Deserialize, Serialize}; - /// use serde_with::serde_as; - /// - /// #[serde_as] - /// #[derive(Serialize, Deserialize)] - /// struct Data { - /// #[serde_as(as = "serde_bincode_compat::TxDeposit")] - /// transaction: TxDeposit, - /// } - /// ``` - #[derive(Debug, Serialize, Deserialize)] - pub struct TxDeposit<'a> { - source_hash: B256, - from: Address, - #[serde(default)] - to: TxKind, - #[serde(default)] - mint: u128, - value: U256, - gas_limit: u64, - is_system_transaction: bool, - input: Cow<'a, Bytes>, - } - - impl<'a> From<&'a super::TxDeposit> for TxDeposit<'a> { - fn from(value: &'a super::TxDeposit) -> Self { - Self { - source_hash: value.source_hash, - from: value.from, - to: value.to, - mint: value.mint, - value: value.value, - gas_limit: value.gas_limit, - is_system_transaction: value.is_system_transaction, - input: Cow::Borrowed(&value.input), - } - } - } - - impl<'a> From> for super::TxDeposit { - fn from(value: TxDeposit<'a>) -> Self { - Self { - source_hash: value.source_hash, - from: value.from, - to: value.to, - mint: value.mint, - value: value.value, - gas_limit: value.gas_limit, - is_system_transaction: value.is_system_transaction, - input: value.input.into_owned(), - } - } - } - - impl SerializeAs for TxDeposit<'_> { - fn serialize_as(source: &super::TxDeposit, serializer: S) -> Result - where - S: Serializer, - { - TxDeposit::from(source).serialize(serializer) - } - } - - impl<'de> DeserializeAs<'de, super::TxDeposit> for TxDeposit<'de> { - fn deserialize_as(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - TxDeposit::deserialize(deserializer).map(Into::into) - } - } - - #[cfg(test)] - mod tests { - use arbitrary::Arbitrary; - use rand::Rng; - use serde::{Deserialize, Serialize}; - use serde_with::serde_as; - - use super::super::{TxDeposit, serde_bincode_compat}; - - #[test] - fn test_tx_deposit_bincode_roundtrip() { - #[serde_as] - #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] - struct Data { - #[serde_as(as = "serde_bincode_compat::TxDeposit")] - transaction: TxDeposit, - } - - let mut bytes = [0u8; 1024]; - rand::rng().fill(bytes.as_mut_slice()); - let data = Data { - transaction: TxDeposit::arbitrary(&mut arbitrary::Unstructured::new(&bytes)) - .unwrap(), - }; - - let encoded = bincode::serde::encode_to_vec(&data, bincode::config::legacy()).unwrap(); - let (decoded, _) = - bincode::serde::decode_from_slice::(&encoded, bincode::config::legacy()) - .unwrap(); - assert_eq!(decoded, data); - } - } -} diff --git a/op-alloy/crates/consensus/src/transaction/envelope.rs b/op-alloy/crates/consensus/src/transaction/envelope.rs deleted file mode 100644 index 2f2e0cffc33..00000000000 --- a/op-alloy/crates/consensus/src/transaction/envelope.rs +++ /dev/null @@ -1,786 +0,0 @@ -use crate::{ - OpPooledTransaction, TxDeposit, - transaction::{OpDepositInfo, OpTransactionInfo}, -}; -use alloy_consensus::{ - EthereumTxEnvelope, Extended, Sealable, Sealed, SignableTransaction, Signed, - TransactionEnvelope, TxEip1559, TxEip2930, TxEip7702, TxEnvelope, TxLegacy, - error::ValueError, - transaction::{TransactionInfo, TxHashRef}, -}; -use alloy_eips::eip2718::Encodable2718; -use alloy_primitives::{B256, Bytes, Signature, TxHash}; - -/// The Ethereum [EIP-2718] Transaction Envelope, modified for OP Stack chains. -/// -/// # Note: -/// -/// This enum distinguishes between tagged and untagged legacy transactions, as -/// the in-protocol merkle tree may commit to EITHER 0-prefixed or raw. -/// Therefore we must ensure that encoding returns the precise byte-array that -/// was decoded, preserving the presence or absence of the `TransactionType` -/// flag. -/// -/// [EIP-2718]: https://eips.ethereum.org/EIPS/eip-2718 -#[derive(Debug, Clone, TransactionEnvelope)] -#[envelope(tx_type_name = OpTxType, typed = OpTypedTransaction, serde_cfg(feature = "serde"))] -pub enum OpTxEnvelope { - /// An untagged [`TxLegacy`]. - #[envelope(ty = 0)] - Legacy(Signed), - /// A [`TxEip2930`] tagged with type 1. - #[envelope(ty = 1)] - Eip2930(Signed), - /// A [`TxEip1559`] tagged with type 2. - #[envelope(ty = 2)] - Eip1559(Signed), - /// A [`TxEip7702`] tagged with type 4. - #[envelope(ty = 4)] - Eip7702(Signed), - /// A [`TxDeposit`] tagged with type 0x7E. - #[envelope(ty = 126)] - #[serde(serialize_with = "crate::serde_deposit_tx_rpc")] - Deposit(Sealed), -} - -/// Represents an Optimism transaction envelope. -/// -/// Compared to Ethereum it can tell whether the transaction is a deposit. -pub trait OpTransaction { - /// Returns `true` if the transaction is a deposit. - fn is_deposit(&self) -> bool; - - /// Returns `Some` if the transaction is a deposit. - fn as_deposit(&self) -> Option<&Sealed>; -} - -impl OpTransaction for OpTxEnvelope { - fn is_deposit(&self) -> bool { - self.is_deposit() - } - - fn as_deposit(&self) -> Option<&Sealed> { - self.as_deposit() - } -} - -impl OpTransaction for Extended -where - B: OpTransaction, - T: OpTransaction, -{ - fn is_deposit(&self) -> bool { - match self { - Self::BuiltIn(b) => b.is_deposit(), - Self::Other(t) => t.is_deposit(), - } - } - - fn as_deposit(&self) -> Option<&Sealed> { - match self { - Self::BuiltIn(b) => b.as_deposit(), - Self::Other(t) => t.as_deposit(), - } - } -} - -impl AsRef for OpTxEnvelope { - fn as_ref(&self) -> &Self { - self - } -} - -impl From> for OpTxEnvelope { - fn from(v: Signed) -> Self { - Self::Legacy(v) - } -} - -impl From> for OpTxEnvelope { - fn from(v: Signed) -> Self { - Self::Eip2930(v) - } -} - -impl From> for OpTxEnvelope { - fn from(v: Signed) -> Self { - Self::Eip1559(v) - } -} - -impl From> for OpTxEnvelope { - fn from(v: Signed) -> Self { - Self::Eip7702(v) - } -} - -impl From for OpTxEnvelope { - fn from(v: TxDeposit) -> Self { - v.seal_slow().into() - } -} - -impl From> for OpTxEnvelope { - fn from(value: Signed) -> Self { - let (tx, sig, hash) = value.into_parts(); - match tx { - OpTypedTransaction::Legacy(tx_legacy) => { - let tx = Signed::new_unchecked(tx_legacy, sig, hash); - Self::Legacy(tx) - } - OpTypedTransaction::Eip2930(tx_eip2930) => { - let tx = Signed::new_unchecked(tx_eip2930, sig, hash); - Self::Eip2930(tx) - } - OpTypedTransaction::Eip1559(tx_eip1559) => { - let tx = Signed::new_unchecked(tx_eip1559, sig, hash); - Self::Eip1559(tx) - } - OpTypedTransaction::Eip7702(tx_eip7702) => { - let tx = Signed::new_unchecked(tx_eip7702, sig, hash); - Self::Eip7702(tx) - } - OpTypedTransaction::Deposit(tx) => Self::Deposit(Sealed::new_unchecked(tx, hash)), - } - } -} - -impl From<(OpTypedTransaction, Signature)> for OpTxEnvelope { - fn from(value: (OpTypedTransaction, Signature)) -> Self { - Self::new_unhashed(value.0, value.1) - } -} - -impl From> for OpTxEnvelope { - fn from(v: Sealed) -> Self { - Self::Deposit(v) - } -} - -impl From for Extended { - fn from(value: OpTxEnvelope) -> Self { - Self::BuiltIn(value) - } -} - -impl TryFrom> for OpTxEnvelope { - type Error = EthereumTxEnvelope; - - fn try_from(value: EthereumTxEnvelope) -> Result { - Self::try_from_eth_envelope(value) - } -} - -impl TryFrom for TxEnvelope { - type Error = ValueError; - - fn try_from(value: OpTxEnvelope) -> Result { - value.try_into_eth_envelope() - } -} - -#[cfg(feature = "alloy-compat")] -impl From for alloy_rpc_types_eth::TransactionRequest { - fn from(value: OpTxEnvelope) -> Self { - match value { - OpTxEnvelope::Eip2930(tx) => tx.into_parts().0.into(), - OpTxEnvelope::Eip1559(tx) => tx.into_parts().0.into(), - OpTxEnvelope::Eip7702(tx) => tx.into_parts().0.into(), - OpTxEnvelope::Deposit(tx) => tx.into_inner().into(), - OpTxEnvelope::Legacy(tx) => tx.into_parts().0.into(), - } - } -} - -impl OpTxEnvelope { - /// Creates a new enveloped transaction from the given transaction, signature and hash. - /// - /// Caution: This assumes the given hash is the correct transaction hash. - pub fn new_unchecked( - transaction: OpTypedTransaction, - signature: Signature, - hash: B256, - ) -> Self { - Signed::new_unchecked(transaction, signature, hash).into() - } - - /// Creates a new signed transaction from the given typed transaction and signature without the - /// hash. - /// - /// Note: this only calculates the hash on the first [`OpTxEnvelope::hash`] call. - pub fn new_unhashed(transaction: OpTypedTransaction, signature: Signature) -> Self { - transaction.into_signed(signature).into() - } - - /// Returns true if the transaction is a legacy transaction. - #[inline] - pub const fn is_legacy(&self) -> bool { - matches!(self, Self::Legacy(_)) - } - - /// Returns true if the transaction is an EIP-2930 transaction. - #[inline] - pub const fn is_eip2930(&self) -> bool { - matches!(self, Self::Eip2930(_)) - } - - /// Returns true if the transaction is an EIP-1559 transaction. - #[inline] - pub const fn is_eip1559(&self) -> bool { - matches!(self, Self::Eip1559(_)) - } - - /// Returns true if the transaction is a system transaction. - #[inline] - pub const fn is_system_transaction(&self) -> bool { - match self { - Self::Deposit(tx) => tx.inner().is_system_transaction, - _ => false, - } - } - - /// Attempts to convert the envelope into the pooled variant. - /// - /// Returns an error if the envelope's variant is incompatible with the pooled format: - /// [`TxDeposit`]. - pub fn try_into_pooled(self) -> Result> { - match self { - Self::Legacy(tx) => Ok(tx.into()), - Self::Eip2930(tx) => Ok(tx.into()), - Self::Eip1559(tx) => Ok(tx.into()), - Self::Eip7702(tx) => Ok(tx.into()), - Self::Deposit(tx) => { - Err(ValueError::new(tx.into(), "Deposit transactions cannot be pooled")) - } - } - } - - /// Attempts to convert the envelope into the ethereum pooled variant. - /// - /// Returns an error if the envelope's variant is incompatible with the pooled format: - /// [`TxDeposit`]. - pub fn try_into_eth_pooled( - self, - ) -> Result> { - self.try_into_pooled().map(Into::into) - } - - /// Attempts to convert the optimism variant into an ethereum [`TxEnvelope`]. - /// - /// Returns the envelope as error if it is a variant unsupported on ethereum: [`TxDeposit`] - pub fn try_into_eth_envelope(self) -> Result> { - match self { - Self::Legacy(tx) => Ok(tx.into()), - Self::Eip2930(tx) => Ok(tx.into()), - Self::Eip1559(tx) => Ok(tx.into()), - Self::Eip7702(tx) => Ok(tx.into()), - tx @ Self::Deposit(_) => Err(ValueError::new( - tx, - "Deposit transactions cannot be converted to ethereum transaction", - )), - } - } - - /// Helper that creates [`OpTransactionInfo`] by adding [`OpDepositInfo`] obtained from the - /// given closure if this transaction is a deposit and return the [`OpTransactionInfo`]. - pub fn try_to_tx_info( - &self, - tx_info: TransactionInfo, - f: F, - ) -> Result - where - F: FnOnce(TxHash) -> Result, E>, - { - let deposit_meta = - if self.is_deposit() { f(self.tx_hash())? } else { None }.unwrap_or_default(); - - Ok(OpTransactionInfo::new(tx_info, deposit_meta)) - } - - /// Attempts to convert an ethereum [`TxEnvelope`] into the optimism variant. - /// - /// Returns the given envelope as error if [`OpTxEnvelope`] doesn't support the variant - /// (EIP-4844) - pub fn try_from_eth_envelope( - tx: EthereumTxEnvelope, - ) -> Result> { - match tx { - EthereumTxEnvelope::Legacy(tx) => Ok(tx.into()), - EthereumTxEnvelope::Eip2930(tx) => Ok(tx.into()), - EthereumTxEnvelope::Eip1559(tx) => Ok(tx.into()), - tx @ EthereumTxEnvelope::::Eip4844(_) => Err(tx), - EthereumTxEnvelope::Eip7702(tx) => Ok(tx.into()), - } - } - - /// Returns mutable access to the input bytes. - /// - /// Caution: modifying this will cause side-effects on the hash. - #[doc(hidden)] - pub const fn input_mut(&mut self) -> &mut Bytes { - match self { - Self::Eip1559(tx) => &mut tx.tx_mut().input, - Self::Eip2930(tx) => &mut tx.tx_mut().input, - Self::Legacy(tx) => &mut tx.tx_mut().input, - Self::Eip7702(tx) => &mut tx.tx_mut().input, - Self::Deposit(tx) => &mut tx.inner_mut().input, - } - } - - /// Attempts to convert an ethereum [`TxEnvelope`] into the optimism variant. - /// - /// Returns the given envelope as error if [`OpTxEnvelope`] doesn't support the variant - /// (EIP-4844) - #[cfg(feature = "alloy-compat")] - pub fn try_from_any_envelope( - tx: alloy_network::AnyTxEnvelope, - ) -> Result { - match tx.try_into_envelope() { - Ok(eth) => { - Self::try_from_eth_envelope(eth).map_err(alloy_network::AnyTxEnvelope::Ethereum) - } - Err(err) => match err.into_value() { - alloy_network::AnyTxEnvelope::Unknown(unknown) => { - let Ok(deposit) = unknown.inner.clone().try_into() else { - return Err(alloy_network::AnyTxEnvelope::Unknown(unknown)); - }; - Ok(Self::Deposit(Sealed::new_unchecked(deposit, unknown.hash))) - } - unsupported => Err(unsupported), - }, - } - } - - /// Returns true if the transaction is a deposit transaction. - #[inline] - pub const fn is_deposit(&self) -> bool { - matches!(self, Self::Deposit(_)) - } - - /// Returns the [`TxLegacy`] variant if the transaction is a legacy transaction. - pub const fn as_legacy(&self) -> Option<&Signed> { - match self { - Self::Legacy(tx) => Some(tx), - _ => None, - } - } - - /// Returns the [`TxEip2930`] variant if the transaction is an EIP-2930 transaction. - pub const fn as_eip2930(&self) -> Option<&Signed> { - match self { - Self::Eip2930(tx) => Some(tx), - _ => None, - } - } - - /// Returns the [`TxEip1559`] variant if the transaction is an EIP-1559 transaction. - pub const fn as_eip1559(&self) -> Option<&Signed> { - match self { - Self::Eip1559(tx) => Some(tx), - _ => None, - } - } - - /// Returns the [`TxEip1559`] variant if the transaction is an EIP-1559 transaction. - pub const fn as_deposit(&self) -> Option<&Sealed> { - match self { - Self::Deposit(tx) => Some(tx), - _ => None, - } - } - - /// Return the reference to signature. - /// - /// Returns `None` if this is a deposit variant. - pub const fn signature(&self) -> Option<&Signature> { - match self { - Self::Legacy(tx) => Some(tx.signature()), - Self::Eip2930(tx) => Some(tx.signature()), - Self::Eip1559(tx) => Some(tx.signature()), - Self::Eip7702(tx) => Some(tx.signature()), - Self::Deposit(_) => None, - } - } - - /// Return the [`OpTxType`] of the inner txn. - pub const fn tx_type(&self) -> OpTxType { - match self { - Self::Legacy(_) => OpTxType::Legacy, - Self::Eip2930(_) => OpTxType::Eip2930, - Self::Eip1559(_) => OpTxType::Eip1559, - Self::Eip7702(_) => OpTxType::Eip7702, - Self::Deposit(_) => OpTxType::Deposit, - } - } - - /// Returns the inner transaction hash. - pub fn hash(&self) -> &B256 { - match self { - Self::Legacy(tx) => tx.hash(), - Self::Eip1559(tx) => tx.hash(), - Self::Eip2930(tx) => tx.hash(), - Self::Eip7702(tx) => tx.hash(), - Self::Deposit(tx) => tx.hash_ref(), - } - } - - /// Returns the inner transaction hash. - pub fn tx_hash(&self) -> B256 { - *self.hash() - } - - /// Return the length of the inner txn, including type byte length - pub fn eip2718_encoded_length(&self) -> usize { - match self { - Self::Legacy(t) => t.eip2718_encoded_length(), - Self::Eip2930(t) => t.eip2718_encoded_length(), - Self::Eip1559(t) => t.eip2718_encoded_length(), - Self::Eip7702(t) => t.eip2718_encoded_length(), - Self::Deposit(t) => t.eip2718_encoded_length(), - } - } -} - -impl TxHashRef for OpTxEnvelope { - fn tx_hash(&self) -> &B256 { - Self::hash(self) - } -} - -#[cfg(feature = "k256")] -impl alloy_consensus::transaction::SignerRecoverable for OpTxEnvelope { - fn recover_signer( - &self, - ) -> Result { - let signature_hash = match self { - Self::Legacy(tx) => tx.signature_hash(), - Self::Eip2930(tx) => tx.signature_hash(), - Self::Eip1559(tx) => tx.signature_hash(), - Self::Eip7702(tx) => tx.signature_hash(), - // Optimism's Deposit transaction does not have a signature. Directly return the - // `from` address. - Self::Deposit(tx) => return Ok(tx.from), - }; - let signature = match self { - Self::Legacy(tx) => tx.signature(), - Self::Eip2930(tx) => tx.signature(), - Self::Eip1559(tx) => tx.signature(), - Self::Eip7702(tx) => tx.signature(), - Self::Deposit(_) => unreachable!("Deposit transactions should not be handled here"), - }; - alloy_consensus::crypto::secp256k1::recover_signer(signature, signature_hash) - } - - fn recover_signer_unchecked( - &self, - ) -> Result { - let signature_hash = match self { - Self::Legacy(tx) => tx.signature_hash(), - Self::Eip2930(tx) => tx.signature_hash(), - Self::Eip1559(tx) => tx.signature_hash(), - Self::Eip7702(tx) => tx.signature_hash(), - // Optimism's Deposit transaction does not have a signature. Directly return the - // `from` address. - Self::Deposit(tx) => return Ok(tx.from), - }; - let signature = match self { - Self::Legacy(tx) => tx.signature(), - Self::Eip2930(tx) => tx.signature(), - Self::Eip1559(tx) => tx.signature(), - Self::Eip7702(tx) => tx.signature(), - Self::Deposit(_) => unreachable!("Deposit transactions should not be handled here"), - }; - alloy_consensus::crypto::secp256k1::recover_signer_unchecked(signature, signature_hash) - } - - fn recover_unchecked_with_buf( - &self, - buf: &mut alloc::vec::Vec, - ) -> Result { - match self { - Self::Legacy(tx) => { - alloy_consensus::transaction::SignerRecoverable::recover_unchecked_with_buf(tx, buf) - } - Self::Eip2930(tx) => { - alloy_consensus::transaction::SignerRecoverable::recover_unchecked_with_buf(tx, buf) - } - Self::Eip1559(tx) => { - alloy_consensus::transaction::SignerRecoverable::recover_unchecked_with_buf(tx, buf) - } - Self::Eip7702(tx) => { - alloy_consensus::transaction::SignerRecoverable::recover_unchecked_with_buf(tx, buf) - } - Self::Deposit(tx) => Ok(tx.from), - } - } -} - -/// Bincode-compatible serde implementation for OpTxEnvelope. -#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] -pub mod serde_bincode_compat { - use crate::serde_bincode_compat::TxDeposit; - use alloy_consensus::{ - Sealed, Signed, - transaction::serde_bincode_compat::{TxEip1559, TxEip2930, TxEip7702, TxLegacy}, - }; - use alloy_primitives::{B256, Signature}; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - use serde_with::{DeserializeAs, SerializeAs}; - - /// Bincode-compatible representation of an OpTxEnvelope. - #[derive(Debug, Serialize, Deserialize)] - pub enum OpTxEnvelope<'a> { - /// Legacy variant. - Legacy { - /// Transaction signature. - signature: Signature, - /// Borrowed legacy transaction data. - transaction: TxLegacy<'a>, - }, - /// EIP-2930 variant. - Eip2930 { - /// Transaction signature. - signature: Signature, - /// Borrowed EIP-2930 transaction data. - transaction: TxEip2930<'a>, - }, - /// EIP-1559 variant. - Eip1559 { - /// Transaction signature. - signature: Signature, - /// Borrowed EIP-1559 transaction data. - transaction: TxEip1559<'a>, - }, - /// EIP-7702 variant. - Eip7702 { - /// Transaction signature. - signature: Signature, - /// Borrowed EIP-7702 transaction data. - transaction: TxEip7702<'a>, - }, - /// Deposit variant. - Deposit { - /// Precomputed hash. - hash: B256, - /// Borrowed deposit transaction data. - transaction: TxDeposit<'a>, - }, - } - - impl<'a> From<&'a super::OpTxEnvelope> for OpTxEnvelope<'a> { - fn from(value: &'a super::OpTxEnvelope) -> Self { - match value { - super::OpTxEnvelope::Legacy(signed_legacy) => Self::Legacy { - signature: *signed_legacy.signature(), - transaction: signed_legacy.tx().into(), - }, - super::OpTxEnvelope::Eip2930(signed_2930) => Self::Eip2930 { - signature: *signed_2930.signature(), - transaction: signed_2930.tx().into(), - }, - super::OpTxEnvelope::Eip1559(signed_1559) => Self::Eip1559 { - signature: *signed_1559.signature(), - transaction: signed_1559.tx().into(), - }, - super::OpTxEnvelope::Eip7702(signed_7702) => Self::Eip7702 { - signature: *signed_7702.signature(), - transaction: signed_7702.tx().into(), - }, - super::OpTxEnvelope::Deposit(sealed_deposit) => Self::Deposit { - hash: sealed_deposit.seal(), - transaction: sealed_deposit.inner().into(), - }, - } - } - } - - impl<'a> From> for super::OpTxEnvelope { - fn from(value: OpTxEnvelope<'a>) -> Self { - match value { - OpTxEnvelope::Legacy { signature, transaction } => { - Self::Legacy(Signed::new_unhashed(transaction.into(), signature)) - } - OpTxEnvelope::Eip2930 { signature, transaction } => { - Self::Eip2930(Signed::new_unhashed(transaction.into(), signature)) - } - OpTxEnvelope::Eip1559 { signature, transaction } => { - Self::Eip1559(Signed::new_unhashed(transaction.into(), signature)) - } - OpTxEnvelope::Eip7702 { signature, transaction } => { - Self::Eip7702(Signed::new_unhashed(transaction.into(), signature)) - } - OpTxEnvelope::Deposit { hash, transaction } => { - Self::Deposit(Sealed::new_unchecked(transaction.into(), hash)) - } - } - } - } - - impl SerializeAs for OpTxEnvelope<'_> { - fn serialize_as(source: &super::OpTxEnvelope, serializer: S) -> Result - where - S: Serializer, - { - let borrowed = OpTxEnvelope::from(source); - borrowed.serialize(serializer) - } - } - - impl<'de> DeserializeAs<'de, super::OpTxEnvelope> for OpTxEnvelope<'de> { - fn deserialize_as(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let borrowed = OpTxEnvelope::deserialize(deserializer)?; - Ok(borrowed.into()) - } - } - - #[cfg(test)] - mod tests { - use super::*; - use arbitrary::Arbitrary; - use rand::Rng; - use serde::{Deserialize, Serialize}; - use serde_with::serde_as; - - /// Tests a bincode round-trip for OpTxEnvelope using an arbitrary instance. - #[test] - fn test_op_tx_envelope_bincode_roundtrip_arbitrary() { - #[serde_as] - #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] - struct Data { - // Use the bincode-compatible representation defined in this module. - #[serde_as(as = "OpTxEnvelope<'_>")] - envelope: super::super::OpTxEnvelope, - } - - let mut bytes = [0u8; 1024]; - rand::rng().fill(bytes.as_mut_slice()); - let data = Data { - envelope: super::super::OpTxEnvelope::arbitrary(&mut arbitrary::Unstructured::new( - &bytes, - )) - .unwrap(), - }; - - let encoded = bincode::serde::encode_to_vec(&data, bincode::config::legacy()).unwrap(); - let (decoded, _) = - bincode::serde::decode_from_slice::(&encoded, bincode::config::legacy()) - .unwrap(); - assert_eq!(decoded, data); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::vec; - use alloy_consensus::{SignableTransaction, Transaction}; - use alloy_primitives::{Address, B256, Bytes, Signature, TxKind, U256, hex}; - - #[test] - fn test_tx_gas_limit() { - let tx = TxDeposit { gas_limit: 1, ..Default::default() }; - let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); - assert_eq!(tx_envelope.gas_limit(), 1); - } - - #[test] - fn test_deposit() { - let tx = TxDeposit { is_system_transaction: true, ..Default::default() }; - let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); - assert!(tx_envelope.is_deposit()); - - let tx = TxEip1559::default(); - let sig = Signature::test_signature(); - let tx_envelope = OpTxEnvelope::Eip1559(tx.into_signed(sig)); - assert!(!tx_envelope.is_system_transaction()); - } - - #[test] - fn test_system_transaction() { - let mut tx = TxDeposit { is_system_transaction: true, ..Default::default() }; - let tx_envelope = OpTxEnvelope::Deposit(tx.clone().seal_slow()); - assert!(tx_envelope.is_system_transaction()); - - tx.is_system_transaction = false; - let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); - assert!(!tx_envelope.is_system_transaction()); - } - - #[test] - fn test_encode_decode_deposit() { - let tx = TxDeposit { - source_hash: B256::left_padding_from(&[0xde, 0xad]), - from: Address::left_padding_from(&[0xbe, 0xef]), - mint: 1, - gas_limit: 2, - to: TxKind::Call(Address::left_padding_from(&[3])), - value: U256::from(4_u64), - input: Bytes::from(vec![5]), - is_system_transaction: false, - }; - let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); - let encoded = tx_envelope.encoded_2718(); - let decoded = OpTxEnvelope::decode_2718(&mut encoded.as_ref()).unwrap(); - assert_eq!(encoded.len(), tx_envelope.encode_2718_len()); - assert_eq!(decoded, tx_envelope); - } - - #[test] - #[cfg(feature = "serde")] - fn test_serde_roundtrip_deposit() { - let tx = TxDeposit { - gas_limit: u64::MAX, - to: TxKind::Call(Address::random()), - value: U256::MAX, - input: Bytes::new(), - source_hash: U256::MAX.into(), - from: Address::random(), - mint: u128::MAX, - is_system_transaction: false, - }; - let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); - - let serialized = serde_json::to_string(&tx_envelope).unwrap(); - let deserialized: OpTxEnvelope = serde_json::from_str(&serialized).unwrap(); - - assert_eq!(tx_envelope, deserialized); - } - - #[test] - fn eip2718_deposit_decode() { - // - let b = hex!( - "7ef8f8a0417d134467f4737fcdf2475f0ecdd2a0ed6d87ecffc888ba9f60ee7e3b8ac26a94deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000008dd00101c1200000000000000040000000066c352bb000000000139c4f500000000000000000000000000000000000000000000000000000000c0cff1460000000000000000000000000000000000000000000000000000000000000001d4c88f4065ac9671e8b1329b90773e89b5ddff9cf8675b2b5e9c1b28320609930000000000000000000000005050f69a9786f081509234f1a7f4684b5e5b76c9" - ); - - let tx = OpTxEnvelope::decode_2718(&mut b[..].as_ref()).unwrap(); - let deposit = tx.as_deposit().unwrap(); - assert_eq!(deposit.mint, 0); - } - - #[test] - fn eip1559_decode() { - let tx = TxEip1559 { - chain_id: 1u64, - nonce: 2, - max_fee_per_gas: 3, - max_priority_fee_per_gas: 4, - gas_limit: 5, - to: Address::left_padding_from(&[6]).into(), - value: U256::from(7_u64), - input: vec![8].into(), - access_list: Default::default(), - }; - let sig = Signature::test_signature(); - let tx_signed = tx.into_signed(sig); - let envelope: OpTxEnvelope = tx_signed.into(); - let encoded = envelope.encoded_2718(); - let mut slice = encoded.as_slice(); - let decoded = OpTxEnvelope::decode_2718(&mut slice).unwrap(); - assert!(matches!(decoded, OpTxEnvelope::Eip1559(_))); - } -} diff --git a/op-alloy/crates/network/Cargo.toml b/op-alloy/crates/network/Cargo.toml deleted file mode 100644 index 398616d9bdc..00000000000 --- a/op-alloy/crates/network/Cargo.toml +++ /dev/null @@ -1,32 +0,0 @@ -[package] -name = "op-alloy-network" -description = "Optimism blockchain RPC behavior abstraction" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -op-alloy-consensus = { workspace = true, features = ["alloy-compat"] } -op-alloy-rpc-types.workspace = true - -# Alloy -alloy-consensus.workspace = true -alloy-network.workspace = true -alloy-primitives.workspace = true -alloy-provider.workspace = true -alloy-rpc-types-eth.workspace = true -alloy-signer.workspace = true - -[features] -std = ["op-alloy-consensus/std", "op-alloy-rpc-types/std"] -serde = ["op-alloy-consensus/serde", "op-alloy-rpc-types/serde"] diff --git a/op-alloy/crates/network/README.md b/op-alloy/crates/network/README.md deleted file mode 100644 index 916d740ac36..00000000000 --- a/op-alloy/crates/network/README.md +++ /dev/null @@ -1,15 +0,0 @@ -## `op-alloy-network` - -CI -op-alloy-network crate -MIT License -Apache License -Book - - -Optimism blockchain RPC behavior abstraction. - -This crate contains a simple abstraction of the RPC behavior of an -Op-stack blockchain. It is intended to be used by the Alloy client to -provide a consistent interface to the rest of the library, regardless of -changes the underlying blockchain makes to the RPC interface. diff --git a/op-alloy/crates/op-alloy/Cargo.toml b/op-alloy/crates/op-alloy/Cargo.toml deleted file mode 100644 index 198d7d16626..00000000000 --- a/op-alloy/crates/op-alloy/Cargo.toml +++ /dev/null @@ -1,74 +0,0 @@ -[package] -name = "op-alloy" -description = "Connect applications to the OP Stack" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -authors.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[package.metadata.docs.rs] -all-features = true -rustdoc-args = ["--cfg", "docsrs"] - -[lints] -workspace = true - -[dependencies] -# Workspace -op-alloy-consensus = { workspace = true, optional = true } -op-alloy-provider = { workspace = true, optional = true } -op-alloy-network = { workspace = true, optional = true } -op-alloy-rpc-jsonrpsee = { workspace = true, optional = true } -op-alloy-rpc-types-engine = { workspace = true, optional = true } -op-alloy-rpc-types = { workspace = true, optional = true } - -[features] -default = ["std", "k256", "serde"] - -std = [ - "op-alloy-consensus?/std", - "op-alloy-rpc-types?/std", - "op-alloy-rpc-types-engine?/std", - "op-alloy-network?/std", - "op-alloy-provider?/std" -] - -full = [ - "consensus", - "network", - "rpc-types", - "rpc-types-engine", - "rpc-jsonrpsee", -] - -k256 = [ - "op-alloy-consensus?/k256", -] - -arbitrary = [ - "op-alloy-consensus?/arbitrary", - "op-alloy-rpc-types?/arbitrary", - "op-alloy-rpc-types-engine?/arbitrary", -] - -serde = [ - "op-alloy-consensus?/serde", - "op-alloy-rpc-types-engine?/serde", - "op-alloy-network?/serde", - "op-alloy-provider?/serde", - "op-alloy-rpc-types?/serde" -] - -# `no_std` support -consensus = ["dep:op-alloy-consensus"] -rpc-types = ["dep:op-alloy-rpc-types"] -rpc-types-engine = ["dep:op-alloy-rpc-types-engine"] - -# std features -network = ["dep:op-alloy-network"] -rpc-jsonrpsee = ["dep:op-alloy-rpc-jsonrpsee"] -provider = ["dep:op-alloy-provider"] diff --git a/op-alloy/crates/op-alloy/README.md b/op-alloy/crates/op-alloy/README.md deleted file mode 100644 index cbc299caa7b..00000000000 --- a/op-alloy/crates/op-alloy/README.md +++ /dev/null @@ -1,92 +0,0 @@ -## `op-alloy` - -CI -op-alloy crate -License -License -Book - - -Built on [Alloy][alloy], `op-alloy` connects applications to the OP Stack. - - -### Usage - -To use `op-alloy`, add the crate as a dependency to a `Cargo.toml`. - -```toml -op-alloy = "0.6" -``` - -### Development Status - -`op-alloy` is currently in active development, and is not yet ready for use in production. - - -### Supported Rust Versions (MSRV) - -The current MSRV (minimum supported rust version) is 1.86. - -Unlike Alloy, op-alloy may use the latest stable release, -to benefit from the latest features. - -The MSRV is not increased automatically, and will be updated -only as part of a patch (pre-1.0) or minor (post-1.0) release. - - -### Contributing - -op-alloy is built by open source contributors like you, thank you for improving the project! - -A [contributing guide][contributing] is available that sets guidelines for contributing. - -Pull requests will not be merged unless CI passes, so please ensure that your contribution follows the -linting rules and passes clippy. - - -### `no_std` - -op-alloy is intended to be `no_std` compatible, initially for use in [kona][kona]. - -The following crates support `no_std`. -Notice, provider crates do not support `no_std` compatibility. - -- [`op-alloy-consensus`][op-alloy-consensus] -- [`op-alloy-rpc-types-engine`][op-alloy-rpc-types-engine] -- [`op-alloy-rpc-types`][op-alloy-rpc-types] - -If you would like to add no_std support to a crate, -please make sure to update [scripts/check_no_std.sh][check-no-std]. - - -### Credits - -op-alloy is inspired by the work of several teams and projects, most notably [the Alloy project][alloy]. - -This would not be possible without the hard work from open source contributors. Thank you. - - -### License - -Licensed under either of Apache License, Version -2.0 or MIT license at your option. - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in these crates by you, as defined in the Apache-2.0 license, -shall be dual licensed as above, without any additional terms or conditions. - - - - -[check-no-std]: https://github.com/alloy-rs/op-alloy/blob/main/scripts/check_no_std.sh - -[maili]: https://github.com/op-rs/maili -[kona]: https://github.com/op-rs/kona -[alloy]: https://github.com/alloy-rs/alloy -[contributing]: https://alloy-rs.github.io/op-alloy - -[op-alloy-consensus]: https://crates.io/crates/op-alloy-consensus -[op-alloy-network]: https://crates.io/crates/op-alloy-network -[op-alloy-rpc-jsonrpsee]: https://crates.io/crates/op-alloy-rpc-jsonrpsee -[op-alloy-rpc-types-engine]: https://crates.io/crates/op-alloy-rpc-types-engine -[op-alloy-rpc-types]: https://crates.io/crates/op-alloy-rpc-types diff --git a/op-alloy/crates/provider/Cargo.toml b/op-alloy/crates/provider/Cargo.toml deleted file mode 100644 index baaca2ddd14..00000000000 --- a/op-alloy/crates/provider/Cargo.toml +++ /dev/null @@ -1,37 +0,0 @@ -[package] -name = "op-alloy-provider" -description = "Interface with an OP Stack blockchain" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -authors.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -op-alloy-rpc-types-engine = { workspace = true, features = ["serde"] } - -# Alloy -alloy-network.workspace = true -alloy-provider.workspace = true -alloy-transport.workspace = true -alloy-primitives = { workspace = true, features = ["rlp", "serde"] } -alloy-rpc-types-engine = { workspace = true, features = ["serde"] } - -# misc -async-trait.workspace = true - -[features] -std = [ - "op-alloy-rpc-types-engine/std" -] -serde = [ - "op-alloy-rpc-types-engine/serde" -] diff --git a/op-alloy/crates/provider/README.md b/op-alloy/crates/provider/README.md deleted file mode 100644 index 9fececea984..00000000000 --- a/op-alloy/crates/provider/README.md +++ /dev/null @@ -1,12 +0,0 @@ -## `op-alloy-provider` - -CI -op-alloy-provider crate -MIT License -Apache License -Book - - -Optimism providers to interface with the engine API, adopted from L1, and [OP-unique engine API][op-api]. - -[op-api]: https://github.com/op-rs/maili/blob/main/crates/provider/README.md diff --git a/op-alloy/crates/provider/src/ext/engine.rs b/op-alloy/crates/provider/src/ext/engine.rs deleted file mode 100644 index 855927a27c0..00000000000 --- a/op-alloy/crates/provider/src/ext/engine.rs +++ /dev/null @@ -1,318 +0,0 @@ -use alloy_network::Network; -use alloy_primitives::{B256, BlockHash, Bytes}; -use alloy_provider::Provider; -use alloy_rpc_types_engine::{ - ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadEnvelopeV2, ExecutionPayloadInputV2, - ExecutionPayloadV3, ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, -}; -use alloy_transport::{Transport, TransportResult}; -use op_alloy_rpc_types_engine::{ - OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, OpExecutionPayloadV4, - OpPayloadAttributes, ProtocolVersion, -}; - -/// Extension trait that gives access to Optimism engine API RPC methods. -/// -/// Note: -/// > The provider should use a JWT authentication layer. -/// -/// This follows the Optimism specs that can be found at: -/// -#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] -#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] -pub trait OpEngineApi { - /// Sends the given payload to the execution layer client, as specified for the Shanghai fork. - /// - /// See also - /// - /// No modifications needed for OP compatibility. - async fn new_payload_v2( - &self, - payload: ExecutionPayloadInputV2, - ) -> TransportResult; - - /// Sends the given payload to the execution layer client, as specified for the Cancun fork. - /// - /// See also - /// - /// OP modifications: - /// - expected versioned hashes MUST be an empty array: therefore the `versioned_hashes` - /// parameter is removed. - /// - parent beacon block root MUST be the parent beacon block root from the L1 origin block of - /// the L2 block. - async fn new_payload_v3( - &self, - payload: ExecutionPayloadV3, - parent_beacon_block_root: B256, - ) -> TransportResult; - - /// Sends the given payload to the execution layer client, as specified for the Prague fork. - /// - /// See also - /// - /// OP modifications: TODO - async fn new_payload_v4( - &self, - payload: OpExecutionPayloadV4, - parent_beacon_block_root: B256, - ) -> TransportResult; - - /// Updates the execution layer client with the given fork choice, as specified for the Shanghai - /// fork. - /// - /// Caution: This should not accept the `parentBeaconBlockRoot` field in the payload attributes. - /// - /// See also - /// - /// OP modifications: - /// - The `payload_attributes` parameter is extended with the [`OpPayloadAttributes`] type - /// as described in - async fn fork_choice_updated_v2( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> TransportResult; - - /// Updates the execution layer client with the given fork choice, as specified for the Cancun - /// fork. - /// - /// See also - /// - /// OP modifications: - /// - Must be called with an Ecotone payload - /// - Attributes must contain the parent beacon block root field - /// - The `payload_attributes` parameter is extended with the [`OpPayloadAttributes`] type - /// as described in - async fn fork_choice_updated_v3( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> TransportResult; - - /// Retrieves an execution payload from a previously started build process, as specified for the - /// Shanghai fork. - /// - /// See also - /// - /// Note: - /// > Provider software MAY stop the corresponding build process after serving this call. - /// - /// No modifications needed for OP compatibility. - async fn get_payload_v2( - &self, - payload_id: PayloadId, - ) -> TransportResult; - - /// Retrieves an execution payload from a previously started build process, as specified for the - /// Cancun fork. - /// - /// See also - /// - /// Note: - /// > Provider software MAY stop the corresponding build process after serving this call. - /// - /// OP modifications: - /// - the response type is extended to [`OpExecutionPayloadEnvelopeV3`]. - async fn get_payload_v3( - &self, - payload_id: PayloadId, - ) -> TransportResult; - - /// Returns the most recent version of the payload that is available in the corresponding - /// payload build process at the time of receiving this call. - /// - /// See also - /// - /// Note: - /// > Provider software MAY stop the corresponding build process after serving this call. - /// - /// OP modifications: - /// - the response type is extended to [`OpExecutionPayloadEnvelopeV4`]. - async fn get_payload_v4( - &self, - payload_id: PayloadId, - ) -> TransportResult; - - /// Returns the execution payload bodies by the given hash. - /// - /// See also - async fn get_payload_bodies_by_hash_v1( - &self, - block_hashes: Vec, - ) -> TransportResult; - - /// Returns the execution payload bodies by the range starting at `start`, containing `count` - /// blocks. - /// - /// WARNING: This method is associated with the BeaconBlocksByRange message in the consensus - /// layer p2p specification, meaning the input should be treated as untrusted or potentially - /// adversarial. - /// - /// Implementers should take care when acting on the input to this method, specifically - /// ensuring that the range is limited properly, and that the range boundaries are computed - /// correctly and without panics. - /// - /// See also - async fn get_payload_bodies_by_range_v1( - &self, - start: u64, - count: u64, - ) -> TransportResult; - - /// Returns the execution client version information. - /// - /// Note: - /// > The `client_version` parameter identifies the consensus client. - /// - /// See also - async fn get_client_version_v1( - &self, - client_version: ClientVersionV1, - ) -> TransportResult>; - - /// Optional extension to the Engine API. - /// - /// Signals superchain information to the Engine: V1 signals which protocol version is - /// recommended and required. - /// - /// See : - async fn signal_superchain_v1( - &self, - recommended: ProtocolVersion, - required: ProtocolVersion, - ) -> TransportResult; - - /// Returns the list of Engine API methods supported by the execution layer client software. - /// - /// See also - async fn exchange_capabilities( - &self, - capabilities: Vec, - ) -> TransportResult>; -} - -#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] -#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] -impl OpEngineApi for P -where - N: Network, - T: Transport + Clone, - P: Provider, -{ - async fn new_payload_v2( - &self, - payload: ExecutionPayloadInputV2, - ) -> TransportResult { - self.client().request("engine_newPayloadV2", (payload,)).await - } - - async fn new_payload_v3( - &self, - payload: ExecutionPayloadV3, - parent_beacon_block_root: B256, - ) -> TransportResult { - // Note: The `versioned_hashes` parameter is always an empty array for OP chains. - let versioned_hashes: Vec = vec![]; - - self.client() - .request("engine_newPayloadV3", (payload, versioned_hashes, parent_beacon_block_root)) - .await - } - - async fn new_payload_v4( - &self, - payload: OpExecutionPayloadV4, - parent_beacon_block_root: B256, - ) -> TransportResult { - // Note: The `versioned_hashes`, `execution_requests` parameters are always an empty array - // for OP chains. - let versioned_hashes: Vec = vec![]; - let execution_requests: Vec = vec![]; - - self.client() - .request( - "engine_newPayloadV4", - (payload, versioned_hashes, parent_beacon_block_root, execution_requests), - ) - .await - } - - async fn fork_choice_updated_v2( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> TransportResult { - self.client() - .request("engine_forkchoiceUpdatedV2", (fork_choice_state, payload_attributes)) - .await - } - - async fn fork_choice_updated_v3( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> TransportResult { - self.client() - .request("engine_forkchoiceUpdatedV3", (fork_choice_state, payload_attributes)) - .await - } - - async fn get_payload_v2( - &self, - payload_id: PayloadId, - ) -> TransportResult { - self.client().request("engine_getPayloadV2", (payload_id,)).await - } - - async fn get_payload_v3( - &self, - payload_id: PayloadId, - ) -> TransportResult { - self.client().request("engine_getPayloadV3", (payload_id,)).await - } - - async fn get_payload_v4( - &self, - payload_id: PayloadId, - ) -> TransportResult { - self.client().request("engine_getPayloadV4", (payload_id,)).await - } - - async fn get_payload_bodies_by_hash_v1( - &self, - block_hashes: Vec, - ) -> TransportResult { - self.client().request("engine_getPayloadBodiesByHashV1", (block_hashes,)).await - } - - async fn get_payload_bodies_by_range_v1( - &self, - start: u64, - count: u64, - ) -> TransportResult { - self.client().request("engine_getPayloadBodiesByRangeV1", (start, count)).await - } - - async fn get_client_version_v1( - &self, - client_version: ClientVersionV1, - ) -> TransportResult> { - self.client().request("engine_getClientVersionV1", (client_version,)).await - } - - async fn signal_superchain_v1( - &self, - recommended: ProtocolVersion, - required: ProtocolVersion, - ) -> TransportResult { - let signal = op_alloy_rpc_types_engine::SuperchainSignal { recommended, required }; - self.client().request("engine_signalSuperchainV1", (signal,)).await - } - - async fn exchange_capabilities( - &self, - capabilities: Vec, - ) -> TransportResult> { - self.client().request("engine_exchangeCapabilities", (capabilities,)).await - } -} diff --git a/op-alloy/crates/rpc-jsonrpsee/Cargo.toml b/op-alloy/crates/rpc-jsonrpsee/Cargo.toml deleted file mode 100644 index 943665c3b24..00000000000 --- a/op-alloy/crates/rpc-jsonrpsee/Cargo.toml +++ /dev/null @@ -1,28 +0,0 @@ -[package] -name = "op-alloy-rpc-jsonrpsee" -description = "Optimism RPC Client" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -authors.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Alloy -alloy-primitives = { workspace = true, features = ["serde"] } - -# rpc -jsonrpsee.workspace = true - -[features] -client = [ - "jsonrpsee/client", - "jsonrpsee/async-client", -] diff --git a/op-alloy/crates/rpc-jsonrpsee/README.md b/op-alloy/crates/rpc-jsonrpsee/README.md deleted file mode 100644 index 6eb7eb9eaa4..00000000000 --- a/op-alloy/crates/rpc-jsonrpsee/README.md +++ /dev/null @@ -1,10 +0,0 @@ -## `op-alloy-rpc-jsonrpsee` - -CI -op-alloy-rpc-jsonrpsee crate -MIT License -Apache License -Book - - -Low-level Optimism JSON-RPC server and client implementations. diff --git a/op-alloy/crates/rpc-types-engine/Cargo.toml b/op-alloy/crates/rpc-types-engine/Cargo.toml deleted file mode 100644 index 7cab2286fbc..00000000000 --- a/op-alloy/crates/rpc-types-engine/Cargo.toml +++ /dev/null @@ -1,76 +0,0 @@ -[package] -name = "op-alloy-rpc-types-engine" -description = "Optimism RPC types for the `engine` namespace" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -authors.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -op-alloy-consensus.workspace = true - -# Alloy -alloy-primitives.workspace = true -alloy-eips.workspace = true -alloy-rpc-types-engine.workspace = true -alloy-rlp.workspace = true -alloy-consensus.workspace = true - -# Encoding -snap = { workspace = true, optional = true } -ethereum_ssz = { workspace = true, optional = true } -ethereum_ssz_derive = { workspace = true, optional = true } - -# serde -serde = { workspace = true, optional = true } -alloy-serde = { workspace = true, optional = true } - -# misc -thiserror.workspace = true -arbitrary = { workspace = true, features = ["derive"], optional = true } - -# hashing -sha2.workspace = true - -derive_more = { workspace = true, features = ["as_ref", "deref_mut"] } - -[dev-dependencies] -arbtest.workspace = true -serde_json.workspace = true -arbitrary = { workspace = true, features = ["derive"] } -alloy-primitives = { workspace = true, features = ["arbitrary", "getrandom"] } - -[features] -default = ["std", "serde"] -std = [ - "dep:snap", - "dep:ethereum_ssz", - "dep:ethereum_ssz_derive", - "alloy-rpc-types-engine/ssz", - "alloy-primitives/std", - "alloy-rpc-types-engine/std", - "op-alloy-consensus/std", -] -serde = [ - "dep:serde", - "dep:alloy-serde", - "alloy-rpc-types-engine/serde", - "op-alloy-consensus/serde" -] -k256 = ["op-alloy-consensus/k256"] -arbitrary = [ - "std", - "dep:arbitrary", - "alloy-primitives/arbitrary", - "alloy-primitives/rand", - "op-alloy-consensus/arbitrary" -] diff --git a/op-alloy/crates/rpc-types-engine/README.md b/op-alloy/crates/rpc-types-engine/README.md deleted file mode 100644 index 9a0a84f0cd5..00000000000 --- a/op-alloy/crates/rpc-types-engine/README.md +++ /dev/null @@ -1,10 +0,0 @@ -## `op-alloy-rpc-types-engine` - -CI -op-alloy-rpc-types-engine crate -MIT License -Apache License -Book - - -Optimism RPC types for the `engine` namespace. diff --git a/op-alloy/crates/rpc-types-engine/src/envelope.rs b/op-alloy/crates/rpc-types-engine/src/envelope.rs deleted file mode 100644 index f87afb2f839..00000000000 --- a/op-alloy/crates/rpc-types-engine/src/envelope.rs +++ /dev/null @@ -1,944 +0,0 @@ -//! Optimism execution payload envelope in network format and related types. -//! -//! This module uses the `snappy` compression algorithm to decompress the payload. -//! The license for snappy can be found in the `SNAPPY-LICENSE` at the root of the repository. - -use crate::{ - OpExecutionPayload, OpExecutionPayloadSidecar, OpExecutionPayloadV4, OpFlashblockError, - OpFlashblockPayload, -}; -use alloc::vec::Vec; -use alloy_consensus::{Block, BlockHeader, Sealable, Transaction}; -use alloy_eips::{Encodable2718, eip4895::Withdrawal, eip7685::Requests}; -use alloy_primitives::{B256, Signature, keccak256}; -use alloy_rpc_types_engine::{ - CancunPayloadFields, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV2, - ExecutionPayloadV3, PraguePayloadFields, -}; - -/// A thin wrapper around [`OpExecutionPayload`] that includes the parent beacon block root. -#[derive(Debug, Clone, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] -pub struct OpExecutionPayloadEnvelope { - /// The parent beacon block root, if any. - pub parent_beacon_block_root: Option, - /// The execution payload. - pub execution_payload: OpExecutionPayload, -} - -impl OpExecutionPayloadEnvelope { - /// Returns the payload hash over the ssz encoded payload envelope data. - /// - /// - #[cfg(feature = "std")] - pub fn payload_hash(&self) -> crate::PayloadHash { - use ssz::Encode; - let ssz_bytes = self.as_ssz_bytes(); - crate::PayloadHash::from(ssz_bytes.as_slice()) - } -} - -#[cfg(feature = "std")] -impl ssz::Encode for OpExecutionPayloadEnvelope { - fn is_ssz_fixed_len() -> bool { - false - } - - fn ssz_append(&self, buf: &mut Vec) { - // Write parent beacon block root only if the payload is not a v1 or v2 payload. - // - if !matches!(self.execution_payload, OpExecutionPayload::V1(_) | OpExecutionPayload::V2(_)) - { - buf.extend_from_slice(self.parent_beacon_block_root.unwrap_or_default().as_slice()); - } - - // Write payload - self.execution_payload.ssz_append(buf); - } - - fn ssz_bytes_len(&self) -> usize { - let mut len = 0; - len += B256::ssz_fixed_len(); // parent_beacon_block_root is always 32 bytes - len += self.execution_payload.ssz_bytes_len(); - len - } -} - -#[cfg(feature = "std")] -impl ssz::Decode for OpExecutionPayloadEnvelope { - fn is_ssz_fixed_len() -> bool { - false - } - - fn from_ssz_bytes(bytes: &[u8]) -> Result { - if bytes.len() < B256::ssz_fixed_len() { - return Err(ssz::DecodeError::InvalidByteLength { - len: bytes.len(), - expected: B256::ssz_fixed_len(), - }); - } - - // Decode parent_beacon_block_root - let parent_beacon_block_root = { - let root_bytes = &bytes[..B256::ssz_fixed_len()]; - if root_bytes.iter().all(|&b| b == 0) { - None - } else { - Some(B256::from_slice(root_bytes)) - } - }; - - // Decode payload - let execution_payload = - OpExecutionPayload::from_ssz_bytes(&bytes[B256::ssz_fixed_len()..])?; - - Ok(Self { parent_beacon_block_root, execution_payload }) - } -} - -impl From for OpExecutionPayloadEnvelope { - fn from(envelope: OpNetworkPayloadEnvelope) -> Self { - Self { - execution_payload: envelope.payload, - parent_beacon_block_root: envelope.parent_beacon_block_root, - } - } -} - -/// Struct aggregating [`OpExecutionPayload`] and [`OpExecutionPayloadSidecar`] and encapsulating -/// complete payload supplied for execution. -#[derive(Debug, Clone)] -#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] -pub struct OpExecutionData { - /// Execution payload. - pub payload: OpExecutionPayload, - /// Additional fork-specific fields. - pub sidecar: OpExecutionPayloadSidecar, -} - -impl OpExecutionData { - /// Creates new instance of [`OpExecutionData`]. - pub const fn new(payload: OpExecutionPayload, sidecar: OpExecutionPayloadSidecar) -> Self { - Self { payload, sidecar } - } - - /// Conversion from [`alloy_consensus::Block`]. Also returns the [`OpExecutionPayloadSidecar`] - /// extracted from the block. - /// - /// See also [`from_block_unchecked`](OpExecutionPayload::from_block_slow). - /// - /// Note: This re-calculates the block hash. - pub fn from_block_slow(block: &Block) -> Self - where - T: Encodable2718 + Transaction, - H: BlockHeader + Sealable, - { - let (payload, sidecar) = OpExecutionPayload::from_block_slow(block); - - Self::new(payload, sidecar) - } - - /// Conversion from [`alloy_consensus::Block`]. Also returns the [`OpExecutionPayloadSidecar`] - /// extracted from the block. - /// - /// See also [`OpExecutionPayload::from_block_unchecked`]. - pub fn from_block_unchecked(block_hash: B256, block: &Block) -> Self - where - T: Encodable2718 + Transaction, - H: BlockHeader, - { - let (payload, sidecar) = OpExecutionPayload::from_block_unchecked(block_hash, block); - - Self::new(payload, sidecar) - } - - /// Conversion from a vec of [`OpFlashblockPayload`]. Also returns the - /// [`OpExecutionPayloadSidecar`] extracted from the payloads. - /// - /// # Validation - /// - /// This method performs the following validations: - /// - At least one flashblock must be present - /// - Indices must be sequential starting from 0 - /// - First flashblock (index 0) must have a base payload - /// - Only the first flashblock may have a base payload - /// - /// # Errors - /// - /// Returns an error if any validation fails. - pub fn from_flashblocks( - flashblocks: &[OpFlashblockPayload], - ) -> Result { - // Validate we have at least one flashblock - if flashblocks.is_empty() { - return Err(OpFlashblockError::MissingPayload); - } - - // Validate indices are sequential starting from 0 - for (i, fb) in flashblocks.iter().enumerate() { - if fb.index as usize != i { - return Err(OpFlashblockError::InvalidIndex); - } - } - - // Validate first flashblock has base and extract it - let first = flashblocks.first().unwrap(); // Safe: checked empty above - if first.base.is_none() { - return Err(OpFlashblockError::MissingBasePayload); - } - - // Validate no other flashblocks have base (only first should have it) - for fb in flashblocks.iter().skip(1) { - if fb.base.is_some() { - return Err(OpFlashblockError::UnexpectedBasePayload); - } - } - - Ok(Self::from_flashblocks_unchecked(flashblocks)) - } - - /// Conversion from a vec of [`OpFlashblockPayload`] without validation. - /// - /// This is a faster alternative to [`Self::from_flashblocks`] that skips all validation - /// checks. Use this method only when you are certain the input data is valid. - /// - /// # Safety Requirements - /// - /// The caller must ensure: - /// - At least one flashblock is present - /// - Indices are sequential starting from 0 - /// - First flashblock (index 0) has a base payload - /// - Only the first flashblock has a base payload - /// - /// # Panics - /// - /// Panics if any of the safety requirements are violated. - pub fn from_flashblocks_unchecked(flashblocks: &[OpFlashblockPayload]) -> Self { - // Extract base from first flashblock - // SAFETY: Caller guarantees at least one flashblock exists with base payload - let first = flashblocks.first().expect("flashblocks must not be empty"); - let base = first.base.as_ref().expect("first flashblock must have base payload"); - - // Get the final state from the last flashblock - // SAFETY: Caller guarantees at least one flashblock exists - let diff = &flashblocks.last().expect("flashblocks must not be empty").diff; - - // Collect all transactions and withdrawals from all flashblocks - let (transactions, withdrawals) = - flashblocks.iter().fold((Vec::new(), Vec::new()), |(mut txs, mut withdrawals), p| { - txs.extend(p.diff.transactions.iter().cloned()); - withdrawals.extend(p.diff.withdrawals.iter().cloned()); - (txs, withdrawals) - }); - - let v3 = ExecutionPayloadV3 { - blob_gas_used: diff.blob_gas_used.unwrap_or(0), - excess_blob_gas: 0, - payload_inner: ExecutionPayloadV2 { - withdrawals, - payload_inner: ExecutionPayloadV1 { - parent_hash: base.parent_hash, - fee_recipient: base.fee_recipient, - state_root: diff.state_root, - receipts_root: diff.receipts_root, - logs_bloom: diff.logs_bloom, - prev_randao: base.prev_randao, - block_number: base.block_number, - gas_limit: base.gas_limit, - gas_used: diff.gas_used, - timestamp: base.timestamp, - extra_data: base.extra_data.clone(), - base_fee_per_gas: base.base_fee_per_gas, - block_hash: diff.block_hash, - transactions, - }, - }, - }; - - // Before Isthmus hardfork, withdrawals_root was not included. - // A zero withdrawals_root indicates a pre-Isthmus flashblock. - if diff.withdrawals_root == B256::ZERO { - return Self::v3(v3, Vec::new(), base.parent_beacon_block_root); - } - - let v4 = - OpExecutionPayloadV4 { withdrawals_root: diff.withdrawals_root, payload_inner: v3 }; - - Self::v4(v4, Vec::new(), base.parent_beacon_block_root, Default::default()) - } - - /// Creates a new instance from args to engine API method `newPayloadV2`. - /// - /// Spec: - pub fn v2(payload: ExecutionPayloadInputV2) -> Self { - Self::new(OpExecutionPayload::v2(payload), OpExecutionPayloadSidecar::default()) - } - - /// Creates a new instance from args to engine API method `newPayloadV3`. - /// - /// Spec: - pub fn v3( - payload: ExecutionPayloadV3, - versioned_hashes: Vec, - parent_beacon_block_root: B256, - ) -> Self { - Self::new( - OpExecutionPayload::v3(payload), - OpExecutionPayloadSidecar::v3(CancunPayloadFields::new( - parent_beacon_block_root, - versioned_hashes, - )), - ) - } - - /// Creates a new instance from args to engine API method `newPayloadV4`. - /// - /// Spec: - pub fn v4( - payload: OpExecutionPayloadV4, - versioned_hashes: Vec, - parent_beacon_block_root: B256, - execution_requests: Requests, - ) -> Self { - Self::new( - OpExecutionPayload::v4(payload), - OpExecutionPayloadSidecar::v4( - CancunPayloadFields::new(parent_beacon_block_root, versioned_hashes), - PraguePayloadFields::new(execution_requests), - ), - ) - } - - /// Returns the parent beacon block root, if any. - pub fn parent_beacon_block_root(&self) -> Option { - self.sidecar.parent_beacon_block_root() - } - - /// Return the withdrawals for the payload or attributes. - pub const fn withdrawals(&self) -> Option<&Vec> { - match &self.payload { - OpExecutionPayload::V1(_) => None, - OpExecutionPayload::V2(execution_payload_v2) => Some(&execution_payload_v2.withdrawals), - OpExecutionPayload::V3(execution_payload_v3) => { - Some(execution_payload_v3.withdrawals()) - } - OpExecutionPayload::V4(op_execution_payload_v4) => { - Some(op_execution_payload_v4.payload_inner.withdrawals()) - } - } - } - - /// Returns the parent hash of the block. - pub const fn parent_hash(&self) -> B256 { - self.payload.parent_hash() - } - - /// Returns the hash of the block. - pub const fn block_hash(&self) -> B256 { - self.payload.block_hash() - } - - /// Returns the number of the block. - pub const fn block_number(&self) -> u64 { - self.payload.block_number() - } -} - -/// Optimism execution payload envelope in network format. -/// -/// This struct is used to represent payloads that are sent over the Optimism -/// CL p2p network in a snappy-compressed format. -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct OpNetworkPayloadEnvelope { - /// The execution payload. - pub payload: OpExecutionPayload, - /// A signature for the payload. - pub signature: Signature, - /// The hash of the payload. - pub payload_hash: PayloadHash, - /// The parent beacon block root. - pub parent_beacon_block_root: Option, -} - -impl OpNetworkPayloadEnvelope { - /// Decode a payload envelope from a snappy-compressed byte array. - /// The payload version decoded is `ExecutionPayloadV1` from SSZ bytes. - #[cfg(feature = "std")] - pub fn decode_v1(data: &[u8]) -> Result { - use ssz::Decode; - let mut decoder = snap::raw::Decoder::new(); - let decompressed = decoder.decompress_vec(data)?; - - if decompressed.len() < 66 { - return Err(PayloadEnvelopeError::InvalidLength); - } - - let sig_data = &decompressed[..65]; - let block_data = &decompressed[65..]; - - let signature = Signature::try_from(sig_data)?; - let hash = PayloadHash::from(block_data); - - let payload = OpExecutionPayload::V1( - alloy_rpc_types_engine::ExecutionPayloadV1::from_ssz_bytes(block_data)?, - ); - - Ok(Self { payload, signature, payload_hash: hash, parent_beacon_block_root: None }) - } - - /// Encodes a payload envelope as a snappy-compressed byte array. - #[cfg(feature = "std")] - pub fn encode_v1(&self) -> Result, PayloadEnvelopeEncodeError> { - use ssz::Encode; - let execution_payload_v1 = match &self.payload { - OpExecutionPayload::V1(execution_payload_v1) => execution_payload_v1, - _ => return Err(PayloadEnvelopeEncodeError::WrongVersion), - }; - - let mut data = Vec::new(); - let mut sig = self.signature.as_bytes(); - sig[64] = self.signature.v() as u8; - data.extend_from_slice(&sig[..]); - let block_data = execution_payload_v1.as_ssz_bytes(); - data.extend_from_slice(block_data.as_slice()); - - Ok(snap::raw::Encoder::new().compress_vec(&data)?) - } - - /// Decode a payload envelope from a snappy-compressed byte array. - /// The payload version decoded is `ExecutionPayloadV2` from SSZ bytes. - #[cfg(feature = "std")] - pub fn decode_v2(data: &[u8]) -> Result { - use ssz::Decode; - let mut decoder = snap::raw::Decoder::new(); - let decompressed = decoder.decompress_vec(data)?; - - if decompressed.len() < 66 { - return Err(PayloadEnvelopeError::InvalidLength); - } - - let sig_data = &decompressed[..65]; - let block_data = &decompressed[65..]; - - let signature = Signature::try_from(sig_data)?; - let hash = PayloadHash::from(block_data); - - let payload = OpExecutionPayload::V2( - alloy_rpc_types_engine::ExecutionPayloadV2::from_ssz_bytes(block_data)?, - ); - - Ok(Self { payload, signature, payload_hash: hash, parent_beacon_block_root: None }) - } - - /// Encodes a payload envelope as a snappy-compressed byte array. - #[cfg(feature = "std")] - pub fn encode_v2(&self) -> Result, PayloadEnvelopeEncodeError> { - use ssz::Encode; - let execution_payload_v2 = match &self.payload { - OpExecutionPayload::V2(execution_payload_v2) => execution_payload_v2, - _ => return Err(PayloadEnvelopeEncodeError::WrongVersion), - }; - - let mut data = Vec::new(); - let mut sig = self.signature.as_bytes(); - sig[64] = self.signature.v() as u8; - data.extend_from_slice(&sig[..]); - let block_data = execution_payload_v2.as_ssz_bytes(); - data.extend_from_slice(block_data.as_slice()); - - Ok(snap::raw::Encoder::new().compress_vec(&data)?) - } - - /// Decode a payload envelope from a snappy-compressed byte array. - /// The payload version decoded is `ExecutionPayloadV3` from SSZ bytes. - #[cfg(feature = "std")] - pub fn decode_v3(data: &[u8]) -> Result { - use ssz::Decode; - let mut decoder = snap::raw::Decoder::new(); - let decompressed = decoder.decompress_vec(data)?; - - if decompressed.len() < 98 { - return Err(PayloadEnvelopeError::InvalidLength); - } - - let sig_data = &decompressed[..65]; - let parent_beacon_block_root = &decompressed[65..97]; - let block_data = &decompressed[97..]; - - let signature = Signature::try_from(sig_data)?; - let parent_beacon_block_root = B256::from_slice(parent_beacon_block_root); - let hash = PayloadHash::from( - [parent_beacon_block_root.as_slice(), block_data].concat().as_slice(), - ); - - let payload = OpExecutionPayload::V3( - alloy_rpc_types_engine::ExecutionPayloadV3::from_ssz_bytes(block_data)?, - ); - - Ok(Self { - payload, - signature, - payload_hash: hash, - parent_beacon_block_root: Some(parent_beacon_block_root), - }) - } - - /// Encodes a payload envelope as a snappy-compressed byte array. - #[cfg(feature = "std")] - pub fn encode_v3(&self) -> Result, PayloadEnvelopeEncodeError> { - use ssz::Encode; - let execution_payload_v3 = match &self.payload { - OpExecutionPayload::V3(execution_payload_v3) => execution_payload_v3, - _ => return Err(PayloadEnvelopeEncodeError::WrongVersion), - }; - - let mut data = Vec::new(); - let mut sig = self.signature.as_bytes(); - sig[64] = self.signature.v() as u8; - data.extend_from_slice(&sig[..]); - data.extend_from_slice(self.parent_beacon_block_root.as_ref().unwrap().as_slice()); - let block_data = execution_payload_v3.as_ssz_bytes(); - data.extend_from_slice(block_data.as_slice()); - - Ok(snap::raw::Encoder::new().compress_vec(&data)?) - } - - /// Decode a payload envelope from a snappy-compressed byte array. - /// The payload version decoded is `ExecutionPayloadV4` from SSZ bytes. - #[cfg(feature = "std")] - pub fn decode_v4(data: &[u8]) -> Result { - use ssz::Decode; - let mut decoder = snap::raw::Decoder::new(); - let decompressed = decoder.decompress_vec(data)?; - - if decompressed.len() < 98 { - return Err(PayloadEnvelopeError::InvalidLength); - } - - let sig_data = &decompressed[..65]; - let parent_beacon_block_root = &decompressed[65..97]; - let block_data = &decompressed[97..]; - - let signature = Signature::try_from(sig_data)?; - let parent_beacon_block_root = B256::from_slice(parent_beacon_block_root); - let hash = PayloadHash::from( - [parent_beacon_block_root.as_slice(), block_data].concat().as_slice(), - ); - - let payload = OpExecutionPayload::V4(OpExecutionPayloadV4::from_ssz_bytes(block_data)?); - - Ok(Self { - payload, - signature, - payload_hash: hash, - parent_beacon_block_root: Some(parent_beacon_block_root), - }) - } - - /// Encodes a payload envelope as a snappy-compressed byte array. - #[cfg(feature = "std")] - pub fn encode_v4(&self) -> Result, PayloadEnvelopeEncodeError> { - use ssz::Encode; - let execution_payload_v4 = match &self.payload { - OpExecutionPayload::V4(execution_payload_v4) => execution_payload_v4, - _ => return Err(PayloadEnvelopeEncodeError::WrongVersion), - }; - - let mut data = Vec::new(); - let mut sig = self.signature.as_bytes(); - sig[64] = self.signature.v() as u8; - data.extend_from_slice(&sig[..]); - data.extend_from_slice(self.parent_beacon_block_root.as_ref().unwrap().as_slice()); - let block_data = execution_payload_v4.as_ssz_bytes(); - data.extend_from_slice(block_data.as_slice()); - - Ok(snap::raw::Encoder::new().compress_vec(&data)?) - } -} - -/// Errors that can occur when encoding a payload envelope. -#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)] -pub enum PayloadEnvelopeEncodeError { - /// Wrong versions of the payload. - #[error("Wrong version of the payload")] - WrongVersion, - /// An error occurred during snap encoding. - #[error(transparent)] - #[cfg(feature = "std")] - SnapEncoding(#[from] snap::Error), -} - -/// Errors that can occur when decoding a payload envelope. -#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)] -pub enum PayloadEnvelopeError { - /// The snappy encoding is broken. - #[error("Broken snappy encoding")] - BrokenSnappyEncoding, - /// The signature is invalid. - #[error("Invalid signature")] - InvalidSignature, - /// The SSZ encoding is broken. - #[error("Broken SSZ encoding")] - BrokenSszEncoding, - /// The payload envelope is of invalid length. - #[error("Invalid length")] - InvalidLength, -} - -impl From for PayloadEnvelopeError { - fn from(_: alloy_primitives::SignatureError) -> Self { - Self::InvalidSignature - } -} - -#[cfg(feature = "std")] -impl From for PayloadEnvelopeError { - fn from(_: snap::Error) -> Self { - Self::BrokenSnappyEncoding - } -} - -#[cfg(feature = "std")] -impl From for PayloadEnvelopeError { - fn from(_: ssz::DecodeError) -> Self { - Self::BrokenSszEncoding - } -} - -/// Represents the Keccak256 hash of the block -#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] -#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] -pub struct PayloadHash(pub B256); - -impl From<&[u8]> for PayloadHash { - /// Returns the Keccak256 hash of a sequence of bytes - fn from(value: &[u8]) -> Self { - Self(keccak256(value)) - } -} - -impl PayloadHash { - /// The expected message that should be signed by the unsafe block signer. - pub fn signature_message(&self, chain_id: u64) -> B256 { - let domain = B256::ZERO.as_slice(); - let chain_id = B256::left_padding_from(&chain_id.to_be_bytes()[..]); - let payload_hash = self.0.as_slice(); - keccak256([domain, chain_id.as_slice(), payload_hash].concat()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::b256; - - #[test] - #[cfg(feature = "std")] - fn test_roundtrip_encode_rpc_execution_payload_envelope() { - use alloy_primitives::hex; - use ssz::{Decode, Encode}; - let data = hex!( - "00000000000000000000000000000000000000000000000000000000000001230000000000000000000000000000000000000000000000000000000000000123000000000000000000000000000000000000045600000000000000000000000000000000000000000000000000000000000007890000000000000000000000000000000000000000000000000000000000000abc0d0e0f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111de000000000000004d01000000000000bc010000000000002b02000000000000300200000903000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000088832020000380200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001236666040000009999" - ); - - let payload = OpExecutionPayloadEnvelope::from_ssz_bytes(&data).unwrap(); - let serialized = payload.as_ssz_bytes(); - assert_eq!(data, &serialized[..]); - } - - #[test] - #[cfg(feature = "serde")] - fn test_serde_roundtrip_op_execution_payload_envelope() { - let envelope_str = r#"{ - "executionPayload": {"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[],"blobGasUsed":"0x0","excessBlobGas":"0x0","withdrawalsRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119"}, - "parentBeaconBlockRoot": "0x9999999999999999999999999999999999999999999999999999999999999999" - }"#; - - let envelope: OpExecutionPayloadEnvelope = serde_json::from_str(envelope_str).unwrap(); - let expected = b256!("9999999999999999999999999999999999999999999999999999999999999999"); - assert_eq!(envelope.parent_beacon_block_root.unwrap(), expected); - let _ = serde_json::to_string(&envelope).unwrap(); - } - - #[test] - fn test_signature_message() { - let inner = b256!("9999999999999999999999999999999999999999999999999999999999999999"); - let hash = PayloadHash::from(inner.as_slice()); - let chain_id = 10; - let expected = b256!("44a0e2b1aba1aae1771eddae1dcd2ad18a8cdac8891517153f03253e49d3f206"); - assert_eq!(hash.signature_message(chain_id), expected); - } - - #[test] - fn test_inner_payload_hash() { - arbtest::arbtest(|u| { - let inner = B256::from(u.arbitrary::<[u8; 32]>()?); - let hash = PayloadHash::from(inner.as_slice()); - assert_eq!(hash.0, keccak256(inner.as_slice())); - Ok(()) - }); - } - - #[test] - #[cfg(feature = "std")] - fn test_roundtrip_encode_envelope_v1() { - use alloy_primitives::hex; - let data = hex::decode("0xbd04f043128457c6ccf35128497167442bcc0f8cce78cda8b366e6a12e526d938d1e4c1046acffffbfc542a7e212bb7d80d3a4b2f84f7b196d935398a24eb84c519789b401000000fe0300fe0300fe0300fe0300fe0300fe0300a203000c4a8fd56621ad04fc0101067601008ce60be0005b220117c32c0f3b394b346c2aa42cfa8157cd41f891aa0bec485a62fc010000").unwrap(); - let payload_envelop = OpNetworkPayloadEnvelope::decode_v1(&data).unwrap(); - assert_eq!(1725271882, payload_envelop.payload.timestamp()); - let encoded = payload_envelop.encode_v1().unwrap(); - assert_eq!(data, encoded); - } - - #[test] - #[cfg(feature = "std")] - fn test_roundtrip_encode_envelope_v2() { - use alloy_primitives::hex; - let data = hex::decode("0xc104f0433805080eb36c0b130a7cc1dc74c3f721af4e249aa6f61bb89d1557143e971bb738a3f3b98df7c457e74048e9d2d7e5cd82bb45e3760467e2270e9db86d1271a700000000fe0300fe0300fe0300fe0300fe0300fe0300a203000c6b89d46525ad000205067201009cda69cb5b9b73fc4eb2458b37d37f04ff507fe6c9cd2ab704a05ea9dae3cd61760002000000020000").unwrap(); - let payload_envelop = OpNetworkPayloadEnvelope::decode_v2(&data).unwrap(); - assert_eq!(1708427627, payload_envelop.payload.timestamp()); - let encoded = payload_envelop.encode_v2().unwrap(); - assert_eq!(data, encoded); - } - - #[test] - #[cfg(feature = "std")] - fn test_roundtrip_encode_envelope_v3() { - use alloy_primitives::hex; - let data = hex::decode("0xf104f0434442b9eb38b259f5b23826e6b623e829d2fb878dac70187a1aecf42a3f9bedfd29793d1fcb5822324be0d3e12340a95855553a65d64b83e5579dffb31470df5d010000006a03000412346a1d00fe0100fe0100fe0100fe0100fe0100fe01004201000cc588d465219504100201067601007cfece77b89685f60e3663b6e0faf2de0734674eb91339700c4858c773a8ff921e014401043e0100").unwrap(); - let payload_envelop = OpNetworkPayloadEnvelope::decode_v3(&data).unwrap(); - assert_eq!(1708427461, payload_envelop.payload.timestamp()); - let encoded = payload_envelop.encode_v3().unwrap(); - assert_eq!(data, encoded); - } - - #[test] - #[cfg(feature = "std")] - fn test_roundtrip_encode_envelope_v4() { - use alloy_primitives::hex; - let data = hex::decode("0x9105f043cee25401b6853202950d1d8a082f31a80c4fef5782c049a731f5d104b1b9b9aa7618605b420438ae98b44c8aaaebd482854473c2ae57c079286bb634bece5210000000006a03000412346a1d00fe0100fe0100fe0100fe0100fe0100fe01004201000c5766d26721950430020106f6010001440104b60100049876").unwrap(); - let payload_envelop = OpNetworkPayloadEnvelope::decode_v4(&data).unwrap(); - assert_eq!(1741842007, payload_envelop.payload.timestamp()); - let encoded = payload_envelop.encode_v4().unwrap(); - assert_eq!(data, encoded); - } - - // Helper function to create a test flashblock - #[cfg(test)] - fn create_test_flashblock(index: u64, with_base: bool) -> OpFlashblockPayload { - use crate::flashblock::{ - OpFlashblockPayloadBase, OpFlashblockPayloadDelta, OpFlashblockPayloadMetadata, - }; - use alloc::collections::BTreeMap; - use alloy_primitives::{Address, Bloom, Bytes, U256}; - use alloy_rpc_types_engine::PayloadId; - - let base = if with_base { - Some(OpFlashblockPayloadBase { - parent_beacon_block_root: B256::ZERO, - parent_hash: B256::ZERO, - fee_recipient: Address::ZERO, - prev_randao: B256::ZERO, - block_number: 100, - gas_limit: 30_000_000, - timestamp: 1234567890, - extra_data: Bytes::default(), - base_fee_per_gas: U256::from(1000000000u64), - }) - } else { - None - }; - - let diff = OpFlashblockPayloadDelta { - state_root: B256::ZERO, - receipts_root: B256::ZERO, - logs_bloom: Bloom::ZERO, - gas_used: 21000, - block_hash: B256::ZERO, - transactions: Vec::new(), - withdrawals: Vec::new(), - withdrawals_root: B256::from([1u8; 32]), // Non-zero for Isthmus - blob_gas_used: Some(0), - }; - - let metadata = OpFlashblockPayloadMetadata { - block_number: 100, - new_account_balances: BTreeMap::new(), - receipts: BTreeMap::new(), - }; - - OpFlashblockPayload { payload_id: PayloadId::new([1u8; 8]), index, base, diff, metadata } - } - - #[test] - fn test_from_flashblocks_empty_vec() { - let result = OpExecutionData::from_flashblocks(&[]); - assert!(matches!(result, Err(OpFlashblockError::MissingPayload))); - } - - #[test] - fn test_from_flashblocks_non_sequential_indices() { - let fb1 = create_test_flashblock(0, true); - let fb2 = create_test_flashblock(2, false); // Skip index 1 - - let result = OpExecutionData::from_flashblocks(&[fb1, fb2]); - assert!(matches!(result, Err(OpFlashblockError::InvalidIndex))); - } - - #[test] - fn test_from_flashblocks_missing_base_in_first() { - let fb1 = create_test_flashblock(0, false); // First should have base - - let result = OpExecutionData::from_flashblocks(&[fb1]); - assert!(matches!(result, Err(OpFlashblockError::MissingBasePayload))); - } - - #[test] - fn test_from_flashblocks_unexpected_base_in_second() { - let fb1 = create_test_flashblock(0, true); - let fb2 = create_test_flashblock(1, true); // Should not have base - - let result = OpExecutionData::from_flashblocks(&[fb1, fb2]); - assert!(matches!(result, Err(OpFlashblockError::UnexpectedBasePayload))); - } - - #[test] - fn test_from_flashblocks_single_valid_flashblock() { - let fb1 = create_test_flashblock(0, true); - - let result = OpExecutionData::from_flashblocks(&[fb1]); - assert!(result.is_ok(), "Single valid flashblock should succeed"); - } - - #[test] - fn test_from_flashblocks_multiple_valid_flashblocks() { - let fb1 = create_test_flashblock(0, true); - let fb2 = create_test_flashblock(1, false); - let fb3 = create_test_flashblock(2, false); - - let result = OpExecutionData::from_flashblocks(&[fb1, fb2, fb3]); - assert!(result.is_ok(), "Multiple valid flashblocks should succeed"); - } - - #[test] - fn test_from_flashblocks_wrong_first_index() { - let fb1 = create_test_flashblock(1, true); // Should be index 0 - let result = OpExecutionData::from_flashblocks(&[fb1]); - assert!(matches!(result, Err(OpFlashblockError::InvalidIndex))); - } - - // Real-world test case from Unichain Sepolia - // - #[test] - #[cfg(feature = "serde")] - fn test_from_flashblocks_unichain_sepolia_block() { - use alloy_primitives::{address, b256}; - - let raw_sequence = r#"[{"payload_id":"0x03c446f063e3735a","index":0,"base":{"parent_beacon_block_root":"0xf6d335a6b2b4fd8fb539cd51a49769df4d53c31a90c54dd270e54542638ff101","parent_hash":"0x06ff95a9cd23b0328da74a984aa986b2e01d377dab1825f1029e39ece6c4a3ea","fee_recipient":"0x4200000000000000000000000000000000000011","prev_randao":"0x8beee738d20a9d77c5f27e9cb799ebe5b536f0985efad5f7d77ebff47f092c4a","block_number":"0x21e3b52","gas_limit":"0x3938700","timestamp":"0x690be89e","extra_data":"0x00000000320000000c","base_fee_per_gas":"0x33"},"diff":{"state_root":"0xb29a9bcae8cf3ae6d68985fcd70db80b3818cd629c9d5da0bb116451739b2078","receipts_root":"0x91d8ad10740ccfc1bd848fba0e02668d95769c08eeea30f10698692ba86c6159","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x10994","block_hash":"0xa66f8562a861f906a2438d7d6ba79495640d98d9c6922b9605c54b57f97a345c","transactions":["0x7ef90104a035dd2ec802504a143048c7830f8f570e0d6cf5147217af869939c6b4ba710a3694deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8b0098999be000007d0000dbba0000000000000000800000000690be848000000000092042e000000000000000000000000000000000000000000000000000000000000000900000000000000000000000000000000000000000000000000000000000000010ffd7e2fb2c36e5f27c015872ce733a7b4f3fc0f4ee668d7469c557c48f8250f0000000000000000000000004ab3387810ef500bfe05a49dc53a44c222cbab3e000000000000000000000000","0x02f87e8205158401c8ea9180338255789400000000000000000000000000000000000000008096426c6f636b204e756d6265723a203335353335363938c080a091f83058c881d9ad71c179ce680326501702eb68150d20b2bf7786e388f954a2a0180185d83e503f11bf3c265c1f9296ed8d3d7c04031cd8bb30509ad188ce7bbc"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c446f063e3735a","index":1,"base":null,"diff":{"state_root":"0xfb1794f74d405b345672c57a5053c6105cc55c8e63f96fb0db5b0260df42413a","receipts_root":"0x1eaaaeb9d43bead7d32b90f1b320589174c63d2fa8f5fd366f841a205b1eb2e0","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000004040000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x18f7d","block_hash":"0x67b0521ebfcb03d6ce2b6e1bad9c9c66795365f63ad8dc51e1e8f582a5ab7821","transactions":["0x02f86c8205158401c8ea92803382880994f878f0340bf132c28f3211e8b46c569edf81749580843fd553e8c001a0d73ce313aafea312e0b7244767e45f8b05d50305e0f4e4c3c564ddc751666815a02ee015ce2363311823c0b2e96bfb0e8090fd53c6cdd99be8cf343af123036dfc"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c446f063e3735a","index":2,"base":null,"diff":{"state_root":"0x90dd105c4a2a0dd9ffe994204bfa3e2b4f70f7ea760d5cb9a4263f26a89f91b4","receipts_root":"0x0fff0488aa3732c34018b938839ab2f0caa96018221e4ffaeca011fb06ba288f","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000004040000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x21566","block_hash":"0x720feb7457110a565b479fafbaa89cc984f5d673846a27d44bbb8cf5200b32fe","transactions":["0x02f86c8205158401c8ea93803382880994f878f0340bf132c28f3211e8b46c569edf81749580843fd553e8c001a0f8cd94080642e116bc772f36a02d002505227aa542e1c13e5129ab40b8b037fba00608318d3895388e39b218bcb275380cebc566e68f26d3d434e32b8b58366cdf"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c446f063e3735a","index":3,"base":null,"diff":{"state_root":"0x71f8c60fdfdd84cffda3b0b6af7c8ff92195918f4fc2abae750a7306521ac0dc","receipts_root":"0xa62d1d98f56ffb1464a2beb185484253df68208004306e155c0bd1519137afe6","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000004040000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x29b4f","block_hash":"0x670844e30f7325d4f290ea375e01f7e819afca317fc7db9723e6867a184984fa","transactions":["0x02f86c8205158401c8ea94803382880994f878f0340bf132c28f3211e8b46c569edf81749580843fd553e8c080a04368492ec1d087703aaf6f5fefe4427b3bf382e5cd07133f638bb6701f15fe61a05e28757fbdc7e744118be36d5a1548eb7c009eefcb5dc5c5040e09c2fc6de9d8"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c446f063e3735a","index":4,"base":null,"diff":{"state_root":"0x5615e4342d231c352438f0ba6a8f0f641459f67961961764b781a909969b28ad","receipts_root":"0x588e1d47b0618d7e935b20c3945cba3b7b8c00141904f79ceed20312ea502e63","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000004040000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x32138","block_hash":"0xc463a3120c35268f610d969f5608b479332ef10953af77c7a6be806195831196","transactions":["0x02f86c8205158401c8ea95803382880994f878f0340bf132c28f3211e8b46c569edf81749580843fd553e8c080a0802ba6d4f37e3b8de96095bd0b216144f276171d16dc62a004f1a89009af5deea00f0c6250cfd1a062a1bc2bc353a5c227a980cac0f233b7be8932f2192342ec4f"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}}]"#; - - let flashblocks: Vec = serde_json::from_str(raw_sequence).unwrap(); - let execution_data = OpExecutionData::from_flashblocks(&flashblocks).unwrap(); - - // Validate against expected final block state - assert_eq!( - execution_data.payload.parent_hash(), - b256!("06ff95a9cd23b0328da74a984aa986b2e01d377dab1825f1029e39ece6c4a3ea") - ); - assert_eq!( - execution_data.payload.block_hash(), - b256!("c463a3120c35268f610d969f5608b479332ef10953af77c7a6be806195831196") - ); - assert_eq!(execution_data.payload.block_number(), 0x21E3B52); - assert_eq!(execution_data.payload.timestamp(), 0x690be89e); - assert_eq!( - execution_data.payload.fee_recipient(), - address!("4200000000000000000000000000000000000011") - ); - assert_eq!(execution_data.payload.gas_limit(), 0x3938700); - assert_eq!(execution_data.payload.as_v1().gas_used, 0x32138); - assert_eq!( - execution_data.payload.as_v1().state_root, - b256!("5615e4342d231c352438f0ba6a8f0f641459f67961961764b781a909969b28ad") - ); - assert_eq!( - execution_data.payload.as_v1().receipts_root, - b256!("588e1d47b0618d7e935b20c3945cba3b7b8c00141904f79ceed20312ea502e63") - ); - assert_eq!(execution_data.payload.transactions().len(), 6); - assert_eq!( - execution_data.payload.as_v4().unwrap().withdrawals_root, - b256!("62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2") - ); - - // Verify parent beacon block root - assert_eq!( - execution_data.parent_beacon_block_root(), - Some(b256!("f6d335a6b2b4fd8fb539cd51a49769df4d53c31a90c54dd270e54542638ff101")) - ); - } - - // Real-world test case from Base Sepolia - // Block #33439826 with 11 flashblocks (indices 0-10) - #[test] - #[cfg(feature = "serde")] - fn test_from_flashblocks_base_sepolia_block() { - use alloy_primitives::{address, b256}; - - let raw_sequence = r#"[{"payload_id":"0x03c33cc62b81edb6","index":0,"base":{"parent_beacon_block_root":"0xf058b1e43890ed5f838bd07e77db06d075d894343d1b31f6099a345b0d8f7d1b","parent_hash":"0x6ffd2714d5af6c412c57db3f664a5a127516573bbd987fd242d06f71ea662741","fee_recipient":"0x4200000000000000000000000000000000000011","prev_randao":"0x9985c1f8ec25b468cbf2b727a8371b4554b7e7adb059c08abf7a7d51d86ceee5","block_number":"0x1fe4052","gas_limit":"0x3938700","timestamp":"0x690fdf84","extra_data":"0x000000003200000004","base_fee_per_gas":"0x34"},"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x1b2fa5e4cbbc1f8c01a7c7204571ebe339dbdfadc666451d8e70d5c10c99830f","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0xb41c","block_hash":"0x87c6775cc427caf4c0ffe0d4b6d76627536f38d77d23f105f9f104ef3e5541c7","transactions":["0x7ef90104a01c055ffd19ea027da4a8aae0a2734c6bf17c3f487d4cc22931d7dbe261409cda94deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8b0098999be0000044d000a118b000000000000000400000000690fde3c00000000009252e3000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000014f1595c3798e3082aa093e433bd5cbd102a11f9619d20e6e821c1a30fb56b12b000000000000000000000000fc56e7272eebbba5bc6c544e159483c4a38f8ba3000000000000000000000000"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":1,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xe38b2090ddfa6ee25b15a8ebcdd7ecc0f1ee9128ec98cb24f47909e29e11832e","logs_bloom":"0x00000000000000000000000020000000040080000000000000020005000000004000000040040000000080000000000000000000000000000002000000000000008000000000000000000000000000014000000000800000000000000000000000000000000000040100000000000000000000000100000000000380008a02000000100000400200000100800000000000000000000004001000200000000000000000000800020000000000400000000000000000008000400801080000000000005000000400000000000000000000000110000000000000000000000000100200021004400010000000010000000400000008002000004080000000000000","gas_used":"0x9d2f2","block_hash":"0x4548d5014de4883cec380838f1b225996fa3c08c176f2f63d98d8c23169fab44","transactions":["0x02f89283014a348202ea830f4275830f427583045dd594a449bc031fa0b815ca14fafd0c5edb75ccd9c80f80a4de0e9a3e000000000000000000000000000000000000000000000001236efcbcbb340000c001a0742ff606597cda39751dd369e66e9978946ce8f4eb578a8d73314535a2df4388a06a6f83c3606c32e1677f62408b8ec69b09a82f499395b26eaefea567deb83843","0x02f9101583014a34830597bd830f4240830f42aa8306aecc9442826e92e6418877459f0920cb058e462ac6a0a480b90fa4dbaa1e6400000000000000000000000000a739e4479c97289801654ec1a52a67077613c000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000691d0e7f4f6ae70adc2708ec4857d3d5ca54a11710c9ac11989b1cb3d3d8d3298a78f6a50000000000000000000000000000000000000000000000000000000000000f200000000000000000000000000000000000000000000000000000000000000e44b653f0c300000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000033bea00000000000000000000000000000000000000000000000000000000000000380000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000014000000000000000000000000000000000000000000000000000000000000002200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000004747970650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026f6b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000086f6b2e746f6b656e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003657468000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000086f6b2e74785f69640000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000046626173653a3078343865643835396232636630633962366261633864373134653162363436313264313232346436643a38343533323a33333433393832323a3333393131333600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a20000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000090000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000003e000000000000000000000000000000000000000000000000000000000000004c000000000000000000000000000000000000000000000000000000000000005a000000000000000000000000000000000000000000000000000000000000006a000000000000000000000000000000000000000000000000000000000000007c000000000000000000000000000000000000000000000000000000000000008c00000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000004747970650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000087769746864726177000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000001977697468647261772e73656e6465722e636861696e5f7569640000000000000000000000000000000000000000000000000000000000000000000000000000046261736500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000001777697468647261772e73656e6465722e61646472657373000000000000000000000000000000000000000000000000000000000000000000000000000000002a30783438656438353962326366306339623662616338643731346531623634363132643132323464366400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000e77697468647261772e746f6b656e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000036574680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000f77697468647261772e616d6f756e740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001431303030303030303030303030303030303030300000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000002f77697468647261772e63726f73735f636861696e5f6164647265737365732e302e757365722e636861696e5f756964000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000077365706f6c6961000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000002d77697468647261772e63726f73735f636861696e5f6164647265737365732e302e757365722e6164647265737300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002a307834386564383539623263663063396236626163386437313465316236343631326431323234643664000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000003977697468647261772e63726f73735f636861696e5f6164647265737365732e302e6c696d69742e6c6573735f7468616e5f6f725f657175616c0000000000000000000000000000000000000000000000000000000000000000000000000000143130303030303030303030303030303030303030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000e77697468647261772e74785f69640000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000046626173653a3078343865643835396232636630633962366261633864373134653162363436313264313232346436643a38343533323a33333433393832323a333339313133360000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000041ffb578b6e9ab1699e4d9cd0078d9f28e7f0ef2136a11596aa7b6d7fe7f896dd353b7b786bf155c924f35d5099f0df90650e74a5858b75673835d24ac6dc8f1e41b00000000000000000000000000000000000000000000000000000000000000c080a09c4f42d262ed1f1bee31461fd10d8d8fbac6e340d9bc2b8035df5faa30f88d4da06d832693c1e28d4f647a6ff08f5d037d08ad2599964a9f3600396efdaec07e4a"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":2,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xda7caba0b5682eda3aed5f47132da84aa2c2757499c23d609aa73dd3a449be1d","logs_bloom":"0x00000000000000000000000020000000040080000000000000020005000000004000000040040000000080000000000000000000000000000002000000000040008000000000000000000000000000014000000800800000000004000000000000000000000400040100000000000000000002800100000000000b80008a02000000100000400200000100800000000000000000000004001000200000000000000000000800020000000000400000000000000000008000440801080200000000005000000400000000000000000000000110000000000008000000000000100200021004400010000000110000000400000008002000004080010000000000","gas_used":"0xd6a91","block_hash":"0x17e106bfeebb2ff0123cf2e1f555e0441ed308773224513dc4ac6257d943e52c","transactions":["0x02f89283014a3482015f830f4275830f427583045dc694a449bc031fa0b815ca14fafd0c5edb75ccd9c80f80a4de0e9a3e000000000000000000000000000000000000000000000000c249fdd327780000c001a098b7dd6d4454a8d31170b5b2d1461bc8a74eed745eddc982232b2c1483cba322a07d3acfe989366b2729aa728ebca7009c15dc908954a9fb5459b75cff1bfd103f"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":3,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xda7caba0b5682eda3aed5f47132da84aa2c2757499c23d609aa73dd3a449be1d","logs_bloom":"0x00000000000000000000000020000000040080000000000000020005000000004000000040040000000080000000000000000000000000000002000000000040008000000000000000000000000000014000000800800000000004000000000000000000000400040100000000000000000002800100000000000b80008a02000000100000400200000100800000000000000000000004001000200000000000000000000800020000000000400000000000000000008000440801080200000000005000000400000000000000000000000110000000000008000000000000100200021004400010000000110000000400000008002000004080010000000000","gas_used":"0xd6a91","block_hash":"0x17e106bfeebb2ff0123cf2e1f555e0441ed308773224513dc4ac6257d943e52c","transactions":[],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":4,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xaff50907a173fc423a499319437afffb8abc2071ce36b6f040dc487579a5d4c3","logs_bloom":"0x0002800000000000002000012000040004008000000010000012000500000000480000004004000000918000000000000000000000000000000200821000806000800010000000000000000800000001c000000800800000000004202000000800000000000400040100020100000000000002800100000000000b90008a02000000100000480200000100800010080400000000000004001000224080000000000000008c0002040080000840000000000000000100c000c4080108020000000001500a000400000000000000000000100110000020000008000000000000100a00221004400010000000110100000400100008002100004280010000000000","gas_used":"0x1498a3","block_hash":"0x4764a20ee262986e45d29251db593320bd4bf6de1133de553b6363a5691e7644","transactions":["0x02f89283014a348203af830f4275830f427583045dd594a449bc031fa0b815ca14fafd0c5edb75ccd9c80f80a4de0e9a3e000000000000000000000000000000000000000000000002017a67f731740000c001a04ce59ff67dc25a76f3027441513f916b809f55b29d5de4fecd4aa0136a3a1a4fa02c1b32b3a1600f6bb2365130797238162cbc797843169a4cfb1ebb41465877c7","0x02f8d483014a348309087a830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000022e40d0a0c0bb77b570445fb59d39bcf14790b660000000000000000000000000000000000000000000000004a61b425a5ee98000000000000000000000000000000000000000000000000000006431e74449860c001a002c2402941acdc25bcaae67c62d58f1a942b32723827f77972c74b159b2c174ea04772118ec71bc7fbe0c9f1c9ef90f58927126480ca769d73704365bfbac65db3","0x02f8d483014a348308c06b830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b78177700000000000000000000000005643a7772017c8544d3841894c1f7c264cd05ffe0000000000000000000000000000000000000000000000000b035a61b2e8be000000000000000000000000000000000000000000000000000006431e7446c578c001a0ac31a5ad06a3897a0c1a909770badf8cec728abd2daf4d125a551778fa597124a013b1de6f741139d957f299bf22de0a91c1d8a4f2ade6743ddcec89bcc9e8b07d","0x02f8d483014a34830922b1830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000576831e77af4b5425b39efb23528441b79ee71e20000000000000000000000000000000000000000000000002bed26c4505ca4000000000000000000000000000000000000000000000000000006431e7446f712c080a0c105ef2c930e95694d112028a642399e5a56ce6416f9b8df9ad27baa26244483a064f6e5881fa728b7afaa2e2ddd62c3182789cb247f90b6276c14f8bfc1b4f2cf"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":5,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x6d12b13dcae85ef97ec3756b317ac9d33752bcd231a9323046ecd5a65e8ca8a2","logs_bloom":"0x0002800000000000002000012000040004008000000010000012000500000000480000004004000000918000000000000000000000000100000200821000806000800010000000000000000800000001c000000800800000000004202000000800000000000400040100020100000000000002800100000000004b90008a02000000100000480200000100800010080400000000000004001000224080000000000000008c0002040080000840000000000000000100c000c4080108020000000001500a000400000000000000000000100110004020000008000000000000100a00221004400010000000110100000400100008002100004280010000000000","gas_used":"0x153998","block_hash":"0x810679ccd05f90093eb0e88549d52ad196214f3a4a555cf0b06201f30aa61a2d","transactions":["0x02f8d483014a34830966ae830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000046195a8573f2610bba630bb0bd5c21c064594f3a0000000000000000000000000000000000000000000000002c94bc176f7cb4000000000000000000000000000000000000000000000000000006431e743d37eac080a053f1881c67ad8fa9838d83943afe83b6498dae96a13a019704f25e0df515dbdba05eef8e08269eaafd63ba7e14e13d73e03ec5e7fad5bcdbaaabc124da41e8e32c"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":6,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x1b76e086c31a8a08d1c4a93b868b00238faabd4d52d9e75e55a4abf3a75e65d8","logs_bloom":"0x0002800000000000002000012000048004008000000010000012000500000000480000004004000000918000000000000000000000000100000200821000806000800010000000000000000800000001c008000800800000000004202000000800000000000400040100020100000000000002800100000000004b90008a02000000100000480200000100800010080400000000000004001000224080000000000000008c0002040080000840000000000000000100c000c4080108020000010001500a000400000000000001000000100110004021000008000000000000100a00221004400010000000110100000400100008002100004280010000000001","gas_used":"0x189dc4","block_hash":"0xfdf2cbb452a36c9c4033d1c0bc2b3dd9cee7ba91d0ca5488aa3d9a23b127b79f","transactions":["0x02f89383014a348304e447830f4240830f42a8830226b494cd997aef0b9a1d8c02a16204ccce354844edeeff80a4f7a308060000000000000000000000000000000000000000000000000000000000016636c001a07dc2c0285cd2c53657c87826a698de9ae5bb38e2580657fe1772fc08ab53a9f2a05a183dac1ed51f6aac2eff4add4510fd76d71f9dce59a3536fc00bfbb2ac750c","0x02f8d483014a3483096a27830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000fde9b0be445930f929705125fe24049093e628e4000000000000000000000000000000000000000000000001517fd24c7f6670000000000000000000000000000000000000000000000000000006431e74408803c080a036f0e0df96ee863041cc41fad376f2f88364225ff6c10c2e492da014d71ab530a03cca82dd065d09a150f75103ea2e1f2867210c604fd82592ec49fae02cadc20a","0x02f8d483014a348309a03d830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000088c7e4701045571734e2147bad80e3d8c56500d300000000000000000000000000000000000000000000000023e284d65ede20000000000000000000000000000000000000000000000000000006431e7441d02ac080a03ee196fff4a614411f9d41431f0b174141ae6f62246df4e54117205bb19c4f64a022f123e006139ae334de3bf7b62c06b72045ba7dc0a508d137bcd056d950da33"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":7,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x065878c1c4d88295544c04fec2e74c9dd8b5d656e196a1b7b09ce8cadbb8f979","logs_bloom":"0x0002800000000010002000052000048004008000000010000012000500000000490000004004000000918000010000000000008000000100040200821000806800800010000000000000000800000001c008000800800000000004202000000804000020000400040100020100000000020002800100000000004b90008a02000000180000480200000100800010080400000000000004011000224080000000000000008c0002040080000840000200000000000100c000c4080108020000012001500a000400000000000001000000100110004021000008000000000000100a00221104400010000000110100000400100008002100004280010000000001","gas_used":"0x1bc281","block_hash":"0xcc9c18ed55c91e97f32353e253c69766cd0d2e0acb0e7f92098d01e1d7761ce3","transactions":["0x02f8d483014a3483091e1f830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000b501c0a0f800e68d980f5253650d0cf3a69d16c00000000000000000000000000000000000000000000000000b87d57d89ffe7800000000000000000000000000000000000000000000000000006431e7442365fc001a0b276c68f59bcfb78fe7905a720e9418130d5c87d60da4b6d55faf07e1b1724aba03425daae2e51a061a26bedcd89cf6ead44146ac97f831371ec36a0192728d204","0x02f8d483014a3483094dde830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b78177700000000000000000000000000097cc7164250c464fea5f9f91d1abec7718814a0000000000000000000000000000000000000000000000004c40d37c20f440000000000000000000000000000000000000000000000000000006431e744372abc001a01f3e58f3baa5e472c08097dafe1e756163c61e7200dc90751f167e796d542f20a02c10596de8b29462c0953a023a8b6c06f74fe77ea66a24598df920d542edab3b","0x02f8d483014a3483094ece830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000083fe74125ec8ffaeee4b2371d7ea17f6ad6f9ba2000000000000000000000000000000000000000000000000f9e4840a6e4938000000000000000000000000000000000000000000000000000006431e744362dec001a066724129c4de96e835cd1377b55541b4582bf4ebcd7c2a3faa4231ade86b14d8a03736bce9203cc0c92878fcc28ee8710961eaddde92bf6a2158c602b4d1bbdbd7","0x02f8d483014a348303750a830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000414d9179c5d2207a6e0efeb0319b6c556265974600000000000000000000000000000000000000000000000033979a45ffefac000000000000000000000000000000000000000000000000000006431e74442677c001a0682d2489ba1d9666324060a006f0abe06830cecdeed4398169dc9fbf7199eb59a02e971034255d087d02b25f45a7962b31360bbed70e3aa30e69ee8f64dd6afdb4","0x02f8d483014a348308acf2830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000097c152d0fa30c49603e0e3e013e36c4e29bf7fea0000000000000000000000000000000000000000000000001d58bdca2addf5000000000000000000000000000000000000000000000000000006431e744447a9c001a030e423ab3697fe4ccc5ce92232d7a642a8295f489f2e52b3c3ba2f110c828e7ca057fd4d3d0e700734568b0be067deda7927188f6a67f9600bae3d6c75d201fe57"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":8,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x7bf525f832aecc6bf7f7b7e329779640bb4477cb47bf1bde512934c5ed45519b","logs_bloom":"0x0003800000000210002000052000048004008000000010000012000500000000490000004004000000918000010000000000008000000100040200821000806800800010000000000000000800000001c00c000800802000000004202000000804000020000400040100020108000000020002800100000000044b98008a02200000180000480200000100800010080400000000002004011000224080000000000000108c0002040080000844000200000040000100c000c4080108020000012001500a000400000000000001000000104110004021000108000000000000100a00221104400010010000110100000400100008002140004280010000000001","gas_used":"0x213d0b","block_hash":"0x5f9c957cde671b50c5661b328b7f3f8a0e56e194a954d8d7cc4274eb1e014a1e","transactions":["0x02f89283014a34820392830f4275830f427583045dd594a449bc031fa0b815ca14fafd0c5edb75ccd9c80f80a4de0e9a3e000000000000000000000000000000000000000000000002017a67f731740000c001a05c4f86d9218cfab447e6ead7abb27444f7e8d3a185a1fbfb6860a36513c89d93a01d4b9b74f049bfc10feeabcb101a18a14e774e89de35ac246e6452c05e94bc98","0x02f8d483014a348309087a830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000022e40d0a0c0bb77b570445fb59d39bcf14790b660000000000000000000000000000000000000000000000004a61b425a5ee98000000000000000000000000000000000000000000000000000006431e74449860c001a002c2402941acdc25bcaae67c62d58f1a942b32723827f77972c74b159b2c174ea04772118ec71bc7fbe0c9f1c9ef90f58927126480ca769d73704365bfbac65db3","0x02f8d483014a348308c06b830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b78177700000000000000000000000005643a7772017c8544d3841894c1f7c264cd05ffe0000000000000000000000000000000000000000000000000b035a61b2e8be000000000000000000000000000000000000000000000000000006431e7446c578c001a0ac31a5ad06a3897a0c1a909770badf8cec728abd2daf4d125a551778fa597124a013b1de6f741139d957f299bf22de0a91c1d8a4f2ade6743ddcec89bcc9e8b07d","0x02f8d483014a34830922b1830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000576831e77af4b5425b39efb23528441b79ee71e20000000000000000000000000000000000000000000000002bed26c4505ca4000000000000000000000000000000000000000000000000000006431e7446f712c080a0c105ef2c930e95694d112028a642399e5a56ce6416f9b8df9ad27baa26244483a064f6e5881fa728b7afaa2e2ddd62c3182789cb247f90b6276c14f8bfc1b4f2cf"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":9,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xeb419bf069b8bf9738adcb7fad118724a1d4d6a83821bc532983a2949aa0910d","logs_bloom":"0x000380000000021000200005200004800400800000001000001a000500001000490000004004000000918000010000000000008000000100040200821000806800800010000000000000000800000001c00c000800802000000004202000000804000020000400040100020108000000020002800100000000044b98008a02200000180000480200000100800010080400000000002004011000224080000000000000108c0002040080000844000200000040000100c000c4080108020000012001500a000400000000000001000000104110004021000108000000000000100a00221104400010010004110100000400100008002140004280010000000001","gas_used":"0x21de0c","block_hash":"0xb802c08c65bdefdd507fe07634ea29eeaad1859b33ffac2c426dc7b620d22b19","transactions":["0x02f8d483014a3483095beb830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000f73c129529caa024337c39e467c720cfc45874220000000000000000000000000000000000000000000000000de4f04092790e800000000000000000000000000000000000000000000000000006431e74489081c080a0a100818c4c3ec3b0bced80f81f09fc878b23274266b45e2043956562b6714dcfa023dbcbc4df92ed5817fcc9bcd238a038aad806c69585dc8cf582e6012d012d28"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":10,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xaa280e93aa4a7d3f616ad391404411abbeebe8bc8fb1ed9b3ef4d0a42bf64ccd","logs_bloom":"0x000380000000021000200005200004800400800000001000001a000500001000490000204004000000918000010000000000008000000100040200821020886800800010000000000000000800000001c00c000800802000000004202000000804000020000400040100020108000000020002800100000000044b98008a02200000180000480200000100800010080400000000002004011000224080000000020000108c0002040080000844000200000040000100c000c4080108020000012001500a000400000000000001000000104110004021000108000000000200100a10221104400010010004110100000400100008002140004280010000000001","gas_used":"0x49f43c","block_hash":"0x2b440a266840a96993d85d45d1de1e81f7a859aaac4654dcd5a990ffa2ef947b","transactions":["0x02f90fb583014a34831d4797830f4240830f42a88327fdba94ebaff6d578733e4603b99cbdbb221482f29a78e180b90f4484779f44000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000032000000000000000000000000000000000000000000000000000000000000003c00000000000000000000000000000000000000000000000000000000000000460000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000005a0000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000006e00000000000000000000000000000000000000000000000000000000000000780000000000000000000000000000000000000000000000000000000000000082000000000000000000000000000000000000000000000000000000000000008c000000000000000000000000000000000000000000000000000000000000009600000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000aa00000000000000000000000000000000000000000000000000000000000000b400000000000000000000000000000000000000000000000000000000000000be00000000000000000000000000000000000000000000000000000000000000c800000000000000000000000000000000000000000000000000000000000000d200000000000000000000000000000000000000000000000000000000000000dc00000000000000000000000000000000000000000000000000000000000000e600000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce75532be4cf5bacb01e018950b5be900eafa59f2431fed6b869799529ab39fe0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce76343a51197104ee22e37cf9c48a9eb5c99031a25196c2f1264deb5d4d3ff80000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce770a821c08f4e200bf42a148754153d78e977260a213094b521b5625618ec70000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce78bb3bcd3592df48dcd3a6383c8f61d8434b6058f61a587dfb0c37134294420000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce79b35d157e36939c03df12e39599530f615a90e624610d8d023eaf2f8329030000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7a6370bb580180c882bf7214d1f701529ea455f8567b2be79496c9437a2ce30000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7bf53208371925c87cacbb0bbfbf330fc8a02818e1d73c56760a9fded7f8c80000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7cac670fbf544ec6d7360aacecd6e3fb35ea8a6ebef6161c9563a6d16a4a200000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7d91406552fdfe569345c8561328604a63912a36d21cafa1efed0275ce6b190000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7e6e0b5ccd73c9cea553a19e7ab6e533bc253f552e6b9145dd5470d2612f8d0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7fc72e52aaff88c842a2092b7ce047cf47a8f56da1035142a41b6a59b856420000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce80fcaa166cc2fd1353b40f3071a491cd7ca2746c8943caaa6c024c8df0131f0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce812aabb780f12ed0c0c5dc6932220d8c5f730c54ee63384fbfe1e7fa90a5090000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce82c1dea3b99a38cf0743f31402eba0d22c4da43e715d37533da9bc5f8ca4ae0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce835d696b1a6f5089cf9bc4c2c529e181678fa2f2feb745223e7520d885a2260000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce8400f527b7b931ddfe77007be944f58173dfc1c5928eb433ae71e96f61a8420000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce85b6dcd2b462f2d1c72e4b46ea316f9183fb9ea40866724b7eef10211a83390000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce860986c742f73c595e7cf75d5014bdccde828c0fa3891f8a7e77cbaf974e7d0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce879d5711ffb11c2d9fe9737837f55726ba0609c21d62e2783cc38db59edafa0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce882976c03e7cf30e96a5a578eff196e4062258f3d859abdf161bcb5fd18356000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000c080a0c7ccb6ec845a35639b2905d243be7a6cf2ee1412331d348a4bf65f53ae89cde8a06ecc40e8297c75e86332c2924b96c6bf2334a6d1b1ef803e27c9de692906b138","0x02f8b183014a3481ad830ecd10830ecdaf82b6a994af33add7918f685b2a82c1077bd8c07d220ffa0480b844095ea7b3000000000000000000000000a449bc031fa0b815ca14fafd0c5edb75ccd9c80f00000000000000000000000000000000000000000000000c6a036eb4bc740000c001a0d1877e98821074c02cf20dc84d31d70fbc00027d404fe99f3e887a33082bb6cda016f8a55aea1573b3834180e43d90eb6c4b1ffb321d2a0be8b3aa71eeaed5104a"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}}]"#; - - let flashblocks: Vec = serde_json::from_str(raw_sequence).unwrap(); - let execution_data = OpExecutionData::from_flashblocks(&flashblocks).unwrap(); - - // Validate against expected final block state from base payload (index 0) - assert_eq!( - execution_data.payload.parent_hash(), - b256!("6ffd2714d5af6c412c57db3f664a5a127516573bbd987fd242d06f71ea662741") - ); - assert_eq!(execution_data.payload.block_number(), 0x1fe4052); - assert_eq!(execution_data.payload.timestamp(), 0x690fdf84); - assert_eq!( - execution_data.payload.fee_recipient(), - address!("4200000000000000000000000000000000000011") - ); - assert_eq!(execution_data.payload.gas_limit(), 0x3938700); - assert_eq!(execution_data.payload.as_v1().gas_used, 0x49f43c); - - // Base skipped state root calculation thus state root is expected to be zeros. - // And subsequently the last flashblocks' block hash is not the final block's block hash. - // Real block hash: 0x0c3c3ff081d8a5ea1239bfb8a0593f641154a06b783fa142809880e011cd6a3f - assert_eq!( - execution_data.payload.as_v1().state_root, - b256!("0000000000000000000000000000000000000000000000000000000000000000") - ); - assert_eq!( - execution_data.payload.block_hash(), - // last flashblock block hash - b256!("2b440a266840a96993d85d45d1de1e81f7a859aaac4654dcd5a990ffa2ef947b") - ); - - // Verify receipts root from last flashblock (index 10) - assert_eq!( - execution_data.payload.as_v1().receipts_root, - b256!("aa280e93aa4a7d3f616ad391404411abbeebe8bc8fb1ed9b3ef4d0a42bf64ccd") - ); - - // Verify total transaction count across all 11 flashblocks - // Index 0: 1, Index 1: 2, Index 2: 1, Index 3: 0, Index 4: 4, Index 5: 1 - // Index 6: 3, Index 7: 5, Index 8: 4, Index 9: 1, Index 10: 2 - // Total: 24 transactions - assert_eq!(execution_data.payload.transactions().len(), 24); - - // Verify withdrawals root from last flashblock - assert_eq!( - execution_data.payload.as_v4().unwrap().withdrawals_root, - b256!("77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44") - ); - - // Verify parent beacon block root from base payload - assert_eq!( - execution_data.parent_beacon_block_root(), - Some(b256!("f058b1e43890ed5f838bd07e77db06d075d894343d1b31f6099a345b0d8f7d1b")) - ); - } -} diff --git a/op-alloy/crates/rpc-types-engine/src/payload/mod.rs b/op-alloy/crates/rpc-types-engine/src/payload/mod.rs deleted file mode 100644 index 4367784f5a1..00000000000 --- a/op-alloy/crates/rpc-types-engine/src/payload/mod.rs +++ /dev/null @@ -1,817 +0,0 @@ -//! Versioned Optimism execution payloads - -pub mod error; -pub mod v3; -pub mod v4; - -use crate::{OpExecutionPayloadSidecar, OpExecutionPayloadV4}; -use alloc::vec::Vec; -use alloy_consensus::{Block, BlockHeader, HeaderInfo, Transaction}; -use alloy_eips::{Decodable2718, Encodable2718, Typed2718, eip7685::EMPTY_REQUESTS_HASH}; -use alloy_primitives::{Address, B256, Bytes, Sealable, U256}; -use alloy_rpc_types_engine::{ - ExecutionPayload, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV2, - ExecutionPayloadV3, PayloadError, -}; -use error::OpPayloadError; - -/// An execution payload, which can be either [`ExecutionPayloadV2`], [`ExecutionPayloadV3`], or -/// [`OpExecutionPayloadV4`]. -#[derive(Clone, Debug, PartialEq, Eq)] -#[cfg_attr(feature = "serde", derive(serde::Serialize))] -#[cfg_attr(feature = "std", derive(ssz_derive::Encode, ssz_derive::Decode))] -#[cfg_attr(feature = "std", ssz(enum_behaviour = "transparent"))] -#[cfg_attr(feature = "serde", serde(untagged))] -pub enum OpExecutionPayload { - /// V1 payload - V1(ExecutionPayloadV1), - /// V2 payload - V2(ExecutionPayloadV2), - /// V3 payload - V3(ExecutionPayloadV3), - /// V4 payload - V4(OpExecutionPayloadV4), -} - -#[cfg(feature = "serde")] -impl<'de> serde::Deserialize<'de> for OpExecutionPayload { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - struct ExecutionPayloadVisitor; - - impl<'de> serde::de::Visitor<'de> for ExecutionPayloadVisitor { - type Value = OpExecutionPayload; - - fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - formatter.write_str("a valid OpExecutionPayload object") - } - - fn visit_map(self, mut map: A) -> Result - where - A: serde::de::MapAccess<'de>, - { - use alloc::string::String; - use alloy_primitives::{U64, map::HashMap}; - use alloy_rpc_types_engine::ExecutionPayloadV1; - - enum Fields { - ParentHash, - FeeRecipient, - StateRoot, - ReceiptsRoot, - LogsBloom, - PrevRandao, - BlockNumber, - GasLimit, - GasUsed, - Timestamp, - ExtraData, - BaseFeePerGas, - BlockHash, - Transactions, - Withdrawals, - BlobGasUsed, - ExcessBlobGas, - WithdrawalsRoot, - Unknown(alloc::string::String), - } - - impl<'de> serde::Deserialize<'de> for Fields { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - struct FieldVisitor; - - impl serde::de::Visitor<'_> for FieldVisitor { - type Value = Fields; - - fn expecting( - &self, - formatter: &mut core::fmt::Formatter<'_>, - ) -> core::fmt::Result { - formatter.write_str("a known field") - } - - fn visit_str(self, value: &str) -> Result - where - E: serde::de::Error, - { - Ok(match value { - "parentHash" => Fields::ParentHash, - "feeRecipient" => Fields::FeeRecipient, - "stateRoot" => Fields::StateRoot, - "receiptsRoot" => Fields::ReceiptsRoot, - "logsBloom" => Fields::LogsBloom, - "prevRandao" => Fields::PrevRandao, - "blockNumber" => Fields::BlockNumber, - "gasLimit" => Fields::GasLimit, - "gasUsed" => Fields::GasUsed, - "timestamp" => Fields::Timestamp, - "extraData" => Fields::ExtraData, - "baseFeePerGas" => Fields::BaseFeePerGas, - "blockHash" => Fields::BlockHash, - "transactions" => Fields::Transactions, - "withdrawals" => Fields::Withdrawals, - "blobGasUsed" => Fields::BlobGasUsed, - "excessBlobGas" => Fields::ExcessBlobGas, - "withdrawalsRoot" => Fields::WithdrawalsRoot, - _ => Fields::Unknown(value.into()), - }) - } - } - - deserializer.deserialize_str(FieldVisitor) - } - } - - let mut parent_hash = None; - let mut fee_recipient = None; - let mut state_root = None; - let mut receipts_root = None; - let mut logs_bloom = None; - let mut prev_randao = None; - let mut block_number = None; - let mut gas_limit = None; - let mut gas_used = None; - let mut timestamp = None; - let mut extra_data = None; - let mut base_fee_per_gas = None; - let mut block_hash = None; - let mut transactions = None; - let mut withdrawals = None; - let mut blob_gas_used = None; - let mut excess_blob_gas = None; - let mut withdrawals_root = None; - - let mut extra_fields = HashMap::new(); - - while let Some(key) = map.next_key()? { - match key { - Fields::ParentHash => parent_hash = Some(map.next_value()?), - Fields::FeeRecipient => fee_recipient = Some(map.next_value()?), - Fields::StateRoot => state_root = Some(map.next_value()?), - Fields::ReceiptsRoot => receipts_root = Some(map.next_value()?), - Fields::LogsBloom => logs_bloom = Some(map.next_value()?), - Fields::PrevRandao => prev_randao = Some(map.next_value()?), - Fields::BlockNumber => { - let raw = map.next_value::()?; - block_number = Some(raw.to()); - } - Fields::GasLimit => { - let raw = map.next_value::()?; - gas_limit = Some(raw.to()); - } - Fields::GasUsed => { - let raw = map.next_value::()?; - gas_used = Some(raw.to()); - } - Fields::Timestamp => { - let raw = map.next_value::()?; - timestamp = Some(raw.to()); - } - Fields::ExtraData => extra_data = Some(map.next_value()?), - Fields::BaseFeePerGas => base_fee_per_gas = Some(map.next_value()?), - Fields::BlockHash => block_hash = Some(map.next_value()?), - Fields::Transactions => transactions = Some(map.next_value()?), - Fields::Withdrawals => withdrawals = Some(map.next_value()?), - Fields::BlobGasUsed => { - let raw = map.next_value::()?; - blob_gas_used = Some(raw.to()); - } - Fields::ExcessBlobGas => { - let raw = map.next_value::()?; - excess_blob_gas = Some(raw.to()); - } - Fields::WithdrawalsRoot => withdrawals_root = Some(map.next_value()?), - Fields::Unknown(field) => { - let raw = map.next_value::()?; - extra_fields.insert(field, raw); - } - } - } - - let v1 = ExecutionPayloadV1 { - parent_hash: parent_hash - .ok_or_else(|| serde::de::Error::missing_field("parentHash"))?, - fee_recipient: fee_recipient - .ok_or_else(|| serde::de::Error::missing_field("feeRecipient"))?, - state_root: state_root - .ok_or_else(|| serde::de::Error::missing_field("stateRoot"))?, - receipts_root: receipts_root - .ok_or_else(|| serde::de::Error::missing_field("receiptsRoot"))?, - logs_bloom: logs_bloom - .ok_or_else(|| serde::de::Error::missing_field("logsBloom"))?, - prev_randao: prev_randao - .ok_or_else(|| serde::de::Error::missing_field("prevRandao"))?, - block_number: block_number - .ok_or_else(|| serde::de::Error::missing_field("blockNumber"))?, - gas_limit: gas_limit - .ok_or_else(|| serde::de::Error::missing_field("gasLimit"))?, - gas_used: gas_used.ok_or_else(|| serde::de::Error::missing_field("gasUsed"))?, - timestamp: timestamp - .ok_or_else(|| serde::de::Error::missing_field("timestamp"))?, - extra_data: extra_data - .ok_or_else(|| serde::de::Error::missing_field("extraData"))?, - base_fee_per_gas: base_fee_per_gas - .ok_or_else(|| serde::de::Error::missing_field("baseFeePerGas"))?, - block_hash: block_hash - .ok_or_else(|| serde::de::Error::missing_field("blockHash"))?, - transactions: transactions - .ok_or_else(|| serde::de::Error::missing_field("transactions"))?, - }; - - // Ensure `withdrawals` is present before proceeding - let withdrawals = - withdrawals.ok_or_else(|| serde::de::Error::missing_field("withdrawals"))?; - - // Construct base V2 payload - let payload_v2 = ExecutionPayloadV2 { payload_inner: v1, withdrawals }; - - // Ensure `blob_gas_used` and `excess_blob_gas` are either both present or both - // absent - match (blob_gas_used, excess_blob_gas) { - // If both are present, create V3 - (Some(blob_gas_used), Some(excess_blob_gas)) => { - let payload_v3 = ExecutionPayloadV3 { - payload_inner: payload_v2, - blob_gas_used, - excess_blob_gas, - }; - - // If `withdrawals_root` is present, wrap into V4; otherwise, return V3 - if let Some(withdrawals_root) = withdrawals_root { - Ok(OpExecutionPayload::V4(OpExecutionPayloadV4 { - payload_inner: payload_v3, - withdrawals_root, - })) - } else { - Ok(OpExecutionPayload::V3(payload_v3)) - } - } - // If one is missing, reject as invalid - (Some(_), None) | (None, Some(_)) => { - Err(serde::de::Error::custom("invalid enum variant")) - } - // If neither are present, return V2 - (None, None) => Ok(OpExecutionPayload::V2(payload_v2)), - } - } - } - - const FIELDS: &[&str] = &[ - "parentHash", - "feeRecipient", - "stateRoot", - "receiptsRoot", - "logsBloom", - "prevRandao", - "blockNumber", - "gasLimit", - "gasUsed", - "timestamp", - "extraData", - "baseFeePerGas", - "blockHash", - "transactions", - "withdrawals", - "blobGasUsed", - "excessBlobGas", - "withdrawalsRoot", - ]; - - deserializer.deserialize_struct("OpExecutionPayload", FIELDS, ExecutionPayloadVisitor) - } -} - -impl OpExecutionPayload { - /// Conversion from [`alloy_consensus::Block`]. Also returns the - /// [`OpExecutionPayloadSidecar`] extracted from the block. - /// - /// See also [`from_block_unchecked`](OpExecutionPayload::from_block_unchecked). - /// - /// Note: This re-calculates the block hash. - pub fn from_block_slow(block: &Block) -> (Self, OpExecutionPayloadSidecar) - where - T: Encodable2718 + Transaction, - H: BlockHeader + Sealable, - { - Self::from_block_unchecked(block.hash_slow(), block) - } - - /// Conversion from [`alloy_consensus::Block`]. Also returns the - /// [`OpExecutionPayloadSidecar`] extracted from the block. - /// - /// See also [`ExecutionPayload::from_block_unchecked`]. - /// See also [`OpExecutionPayloadSidecar::from_block`]. - pub fn from_block_unchecked( - block_hash: B256, - block: &Block, - ) -> (Self, OpExecutionPayloadSidecar) - where - T: Encodable2718 + Transaction, - H: BlockHeader, - { - let sidecar = OpExecutionPayloadSidecar::from_block(block); - - let execution_payload = match block.withdrawals_root() { - Some(withdrawals_root) if sidecar.isthmus().is_some() => { - // block with (empty) request hashes: V4 - Self::V4(OpExecutionPayloadV4::from_v3_with_withdrawals_root( - ExecutionPayloadV3::from_block_unchecked(block_hash, block), - withdrawals_root, - )) - } - Some(_) if block.header.parent_beacon_block_root().is_some() => { - // block with parent beacon block root: at least V3 - Self::V3(ExecutionPayloadV3::from_block_unchecked(block_hash, block)) - } - Some(_) => { - // block with withdrawals root: at least V2 - Self::V2(ExecutionPayloadV2::from_block_unchecked(block_hash, block)) - } - None => { - // otherwise V1 - Self::V1(ExecutionPayloadV1::from_block_unchecked(block_hash, block)) - } - }; - - (execution_payload, sidecar) - } - - /// Creates a new instance from `newPayloadV2` payload, i.e. [`V1`](Self::V1) or - /// [`V2`](Self::V2) variant. - /// - /// Spec: - pub fn v2(payload: ExecutionPayloadInputV2) -> Self { - match payload.into_payload() { - ExecutionPayload::V1(payload) => Self::V1(payload), - ExecutionPayload::V2(payload) => Self::V2(payload), - _ => unreachable!(), - } - } - - /// Creates a new instance from `newPayloadV3` payload, i.e. [`V3`](Self::V3) variant. - /// - /// Spec: - pub const fn v3(payload: ExecutionPayloadV3) -> Self { - Self::V3(payload) - } - - /// Creates a new instance from `newPayloadV4` payload, i.e. [`V4`](Self::V4) variant. - /// - /// Spec: - pub const fn v4(payload: OpExecutionPayloadV4) -> Self { - Self::V4(payload) - } - - /// Returns a reference to the V1 payload. - pub const fn as_v1(&self) -> &ExecutionPayloadV1 { - match self { - Self::V1(payload) => payload, - Self::V2(payload) => &payload.payload_inner, - Self::V3(payload) => &payload.payload_inner.payload_inner, - Self::V4(payload) => &payload.payload_inner.payload_inner.payload_inner, - } - } - - /// Returns a mutable reference to the V1 payload. - pub const fn as_v1_mut(&mut self) -> &mut ExecutionPayloadV1 { - match self { - Self::V1(payload) => payload, - Self::V2(payload) => &mut payload.payload_inner, - Self::V3(payload) => &mut payload.payload_inner.payload_inner, - Self::V4(payload) => &mut payload.payload_inner.payload_inner.payload_inner, - } - } - - /// Returns a reference to the V2 payload, if any. - pub const fn as_v2(&self) -> Option<&ExecutionPayloadV2> { - match self { - Self::V1(_) => None, - Self::V2(payload) => Some(payload), - Self::V3(payload) => Some(&payload.payload_inner), - Self::V4(payload) => Some(&payload.payload_inner.payload_inner), - } - } - - /// Returns a mutable reference to the V2 payload, if any. - pub const fn as_v2_mut(&mut self) -> Option<&mut ExecutionPayloadV2> { - match self { - Self::V1(_) => None, - Self::V2(payload) => Some(payload), - Self::V3(payload) => Some(&mut payload.payload_inner), - Self::V4(payload) => Some(&mut payload.payload_inner.payload_inner), - } - } - - /// Returns a reference to the V3 payload, if any. - pub const fn as_v3(&self) -> Option<&ExecutionPayloadV3> { - match self { - Self::V1(_) | Self::V2(_) => None, - Self::V3(payload) => Some(payload), - Self::V4(payload) => Some(&payload.payload_inner), - } - } - - /// Returns a mutable reference to the V3 payload, if any. - pub const fn as_v3_mut(&mut self) -> Option<&mut ExecutionPayloadV3> { - match self { - Self::V1(_) | Self::V2(_) => None, - Self::V3(payload) => Some(payload), - Self::V4(payload) => Some(&mut payload.payload_inner), - } - } - - /// Returns a reference to the V4 payload, if any. - pub const fn as_v4(&self) -> Option<&OpExecutionPayloadV4> { - match self { - Self::V1(_) | Self::V2(_) | Self::V3(_) => None, - Self::V4(payload) => Some(payload), - } - } - - /// Returns a mutable reference to the V4 payload, if any. - pub const fn as_v4_mut(&mut self) -> Option<&mut OpExecutionPayloadV4> { - match self { - Self::V1(_) | Self::V2(_) | Self::V3(_) => None, - Self::V4(payload) => Some(payload), - } - } - - /// Returns the transactions for the payload. - pub const fn transactions(&self) -> &Vec { - &self.as_v1().transactions - } - - /// Returns a mutable reference to the transactions for the payload. - pub const fn transactions_mut(&mut self) -> &mut Vec { - &mut self.as_v1_mut().transactions - } - - /// Returns the parent hash for the payload. - pub const fn parent_hash(&self) -> B256 { - self.as_v1().parent_hash - } - - /// Returns the block hash for the payload. - pub const fn block_hash(&self) -> B256 { - self.as_v1().block_hash - } - - /// Returns the block number for this payload. - pub const fn block_number(&self) -> u64 { - self.as_v1().block_number - } - - /// Returns the timestamp for this payload. - pub const fn timestamp(&self) -> u64 { - self.as_v1().timestamp - } - - /// Returns the fee recipient for this payload. - pub const fn fee_recipient(&self) -> Address { - self.as_v1().fee_recipient - } - - /// Returns the gas limit for this payload. - pub const fn gas_limit(&self) -> u64 { - self.as_v1().gas_limit - } - - /// Returns the saturated base fee per gas for this payload. - pub fn saturated_base_fee_per_gas(&self) -> u64 { - self.as_v1().base_fee_per_gas.saturating_to() - } - - /// Returns the excess blob gas for this payload. - pub fn excess_blob_gas(&self) -> Option { - self.as_v3().map(|payload| payload.excess_blob_gas) - } - - /// Returns the blob gas used for this payload. - pub fn blob_gas_used(&self) -> Option { - self.as_v3().map(|payload| payload.blob_gas_used) - } - - /// Returns the prev randao for this payload. - pub const fn prev_randao(&self) -> B256 { - self.as_v1().prev_randao - } - - /// Extracts essential information into one container type. - pub fn header_info(&self) -> HeaderInfo { - HeaderInfo { - number: self.block_number(), - beneficiary: self.fee_recipient(), - timestamp: self.timestamp(), - gas_limit: self.gas_limit(), - base_fee_per_gas: Some(self.saturated_base_fee_per_gas()), - excess_blob_gas: self.excess_blob_gas(), - blob_gas_used: self.blob_gas_used(), - difficulty: U256::ZERO, - mix_hash: Some(self.prev_randao()), - } - } - - /// Converts [`OpExecutionPayload`] to [`Block`] with raw transactions. - /// - /// Caution: This does not set fields that are not part of the payload and only part of the - /// [`OpExecutionPayloadSidecar`]: - /// - parent_beacon_block_root - /// - /// See also: [`OpExecutionPayload::into_block_with_sidecar_raw`] - pub fn into_block_raw(self) -> Result, PayloadError> { - match self { - Self::V1(payload) => payload.into_block_raw(), - Self::V2(payload) => payload.into_block_raw(), - Self::V3(payload) => payload.into_block_raw(), - Self::V4(payload) => payload.into_block_raw(), - } - } - - /// Creates a new unsealed block from the given payload and payload sidecar with raw - /// transactions. - /// - /// This sets the `parent_beacon_block_root` and `requests_hash` if present in the sidecar. - /// Also validates that L1 withdrawals are empty. - /// - /// See also: [`OpExecutionPayload::try_into_block_with_sidecar`] - pub fn into_block_with_sidecar_raw( - self, - sidecar: &OpExecutionPayloadSidecar, - ) -> Result, OpPayloadError> { - if let Some(payload) = self.as_v2() - && !payload.withdrawals.is_empty() - { - return Err(OpPayloadError::NonEmptyL1Withdrawals); - } - - let mut block = self.into_block_raw()?; - - if let Some(blobs_hashes) = sidecar.versioned_hashes() - && !blobs_hashes.is_empty() - { - return Err(OpPayloadError::NonEmptyBlobVersionedHashes); - } - if let Some(reqs_hash) = sidecar.requests_hash() { - if reqs_hash != EMPTY_REQUESTS_HASH { - return Err(OpPayloadError::NonEmptyELRequests); - } - block.header.requests_hash = Some(EMPTY_REQUESTS_HASH) - } - block.header.parent_beacon_block_root = sidecar.parent_beacon_block_root(); - - Ok(block) - } - - #[allow(rustdoc::broken_intra_doc_links)] - /// Converts [`OpExecutionPayload`] to [`Block`]. - /// - /// Checks that payload doesn't contain: - /// - blob transactions - /// - L1 withdrawals - /// - /// Caution: This does not set fields that are not part of the payload and only part of the - /// [`OpExecutionPayloadSidecar`]: - /// - parent_beacon_block_root - /// - /// See also: [`OpExecutionPayload::try_into_block_with_sidecar`] - pub fn try_into_block(self) -> Result, OpPayloadError> { - self.try_into_block_with(|tx| { - T::decode_2718_exact(tx.as_ref()) - .map_err(alloy_rlp::Error::from) - .map_err(PayloadError::from) - }) - } - - #[allow(rustdoc::broken_intra_doc_links)] - /// Converts [`OpExecutionPayload`] to [`Block`] with a custom transaction mapper. - /// - /// Checks that payload doesn't contain: - /// - blob transactions - /// - L1 withdrawals - /// - /// Caution: This does not set fields that are not part of the payload and only part of the - /// [`OpExecutionPayloadSidecar`]: - /// - parent_beacon_block_root - /// - /// See also: [`OpExecutionPayload::try_into_block_with_sidecar_with`] - pub fn try_into_block_with(self, f: F) -> Result, OpPayloadError> - where - T: Typed2718, - F: FnMut(alloy_primitives::Bytes) -> Result, - E: Into, - { - if let Some(payload) = self.as_v2() - && !payload.withdrawals.is_empty() - { - return Err(OpPayloadError::NonEmptyL1Withdrawals); - } - let block = match self { - Self::V1(payload) => return Ok(payload.try_into_block_with(f)?), - Self::V2(payload) => return Ok(payload.try_into_block_with(f)?), - Self::V3(payload) => payload.try_into_block_with(f)?, - Self::V4(payload) => payload.try_into_block_with(f)?, - }; - if block.body.has_eip4844_transactions() { - return Err(OpPayloadError::BlobTransaction); - } - - Ok(block) - } - - /// Tries to create a new unsealed block from the given payload and payload sidecar. - /// - /// Additional to checks performed in [`OpExecutionPayload::try_into_block`], which is called - /// under the hood, also checks that sidecar doesn't contain: - /// - blob versioned hashes - /// - execution layer requests - /// - /// See also docs for - /// [`ExecutionPayload::try_into_block_with_sidecar`](alloy_rpc_types_engine::ExecutionPayload::try_into_block_with_sidecar). - pub fn try_into_block_with_sidecar( - self, - sidecar: &OpExecutionPayloadSidecar, - ) -> Result, OpPayloadError> { - self.try_into_block_with_sidecar_with(sidecar, |tx| { - T::decode_2718_exact(tx.as_ref()) - .map_err(alloy_rlp::Error::from) - .map_err(PayloadError::from) - }) - } - - /// Tries to create a new unsealed block from the given payload and payload sidecar with a - /// custom transaction mapper. - /// - /// Additional to checks performed in [`OpExecutionPayload::try_into_block_with`], which is - /// called under the hood, also checks that sidecar doesn't contain: - /// - blob versioned hashes - /// - execution layer requests - /// - /// See also docs for - /// [`ExecutionPayload::try_into_block_with_sidecar_with`](alloy_rpc_types_engine::ExecutionPayload::try_into_block_with_sidecar_with). - pub fn try_into_block_with_sidecar_with( - self, - sidecar: &OpExecutionPayloadSidecar, - f: F, - ) -> Result, OpPayloadError> - where - T: Typed2718, - F: FnMut(alloy_primitives::Bytes) -> Result, - E: Into, - { - let mut base_payload = self.try_into_block_with(f)?; - if let Some(blobs_hashes) = sidecar.versioned_hashes() - && !blobs_hashes.is_empty() - { - return Err(OpPayloadError::NonEmptyBlobVersionedHashes); - } - if let Some(reqs_hash) = sidecar.requests_hash() { - if reqs_hash != EMPTY_REQUESTS_HASH { - return Err(OpPayloadError::NonEmptyELRequests); - } - base_payload.header.requests_hash = Some(EMPTY_REQUESTS_HASH) - } - base_payload.header.parent_beacon_block_root = sidecar.parent_beacon_block_root(); - - Ok(base_payload) - } - - /// Returns an iterator over the decoded transactions in this payload. - /// - /// This iterator will decode transactions on the fly. - pub fn decoded_transactions( - &self, - ) -> impl Iterator> + '_ { - self.transactions().iter().map(|tx_bytes| T::decode_2718_exact(tx_bytes.as_ref())) - } - - /// Returns iterator over decoded transactions with their original encoded bytes. - /// - /// This iterator will decode transactions on the fly and return them with their bytes. - pub fn decoded_transactions_with_encoded( - &self, - ) -> impl Iterator>> + '_ - { - self.transactions().iter().map(|tx_bytes| { - T::decode_2718_exact(tx_bytes.as_ref()) - .map(|tx| alloy_eips::eip2718::WithEncoded::new(tx_bytes.clone(), tx)) - }) - } - - /// Returns an iterator over the recovered transactions in this payload. - /// - /// This iterator will decode and recover signer addresses for transactions on the fly. - pub fn recovered_transactions( - &self, - ) -> impl Iterator< - Item = Result< - alloy_consensus::transaction::Recovered, - alloy_consensus::crypto::RecoveryError, - >, - > + '_ - where - T: Decodable2718 + alloy_consensus::transaction::SignerRecoverable, - { - self.decoded_transactions::().map(|res| { - res.map_err(alloy_consensus::crypto::RecoveryError::from_source) - .and_then(|tx| tx.try_into_recovered()) - }) - } - - /// Returns an iterator over the recovered transactions in this payload with their - /// original encoded bytes. - /// - /// This iterator will decode and recover signer addresses for transactions on the fly - /// and return them with their bytes. - pub fn recovered_transactions_with_encoded( - &self, - ) -> impl Iterator< - Item = Result< - alloy_eips::eip2718::WithEncoded>, - alloy_consensus::crypto::RecoveryError, - >, - > + '_ - where - T: Decodable2718 + alloy_consensus::transaction::SignerRecoverable, - { - self.transactions().iter().map(|tx_bytes| { - T::decode_2718_exact(tx_bytes.as_ref()) - .map_err(alloy_consensus::crypto::RecoveryError::from_source) - .and_then(|tx| { - tx.try_into_recovered().map(|recovered| { - alloy_eips::eip2718::WithEncoded::new(tx_bytes.clone(), recovered) - }) - }) - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - #[cfg(feature = "serde")] - fn serde_payload_input_enum_v4() { - let response_v4 = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[],"blobGasUsed":"0x0","excessBlobGas":"0x0","withdrawalsRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119"}"#; - - let payload: OpExecutionPayload = serde_json::from_str(response_v4).unwrap(); - assert!(payload.as_v4().is_some()); - assert_eq!(serde_json::to_string(&payload).unwrap(), response_v4); - - let payload_v4: OpExecutionPayloadV4 = serde_json::from_str(response_v4).unwrap(); - assert_eq!(payload.as_v4().unwrap(), &payload_v4); - } - - #[test] - #[cfg(feature = "serde")] - fn serde_payload_input_enum_v3() { - let response_v3 = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[],"blobGasUsed":"0x0","excessBlobGas":"0x0"}"#; - - let payload: OpExecutionPayload = serde_json::from_str(response_v3).unwrap(); - assert!(payload.as_v3().is_some()); - assert_eq!(serde_json::to_string(&payload).unwrap(), response_v3); - - let payload_v3: ExecutionPayloadV3 = serde_json::from_str(response_v3).unwrap(); - assert_eq!(payload.as_v3().unwrap(), &payload_v3); - } - - #[test] - #[cfg(feature = "serde")] - fn serde_payload_input_enum_v2() { - let response_v2 = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[]}"#; - - let payload: OpExecutionPayload = serde_json::from_str(response_v2).unwrap(); - assert!(payload.as_v3().is_none()); - assert_eq!(serde_json::to_string(&payload).unwrap(), response_v2); - - let payload_v2: ExecutionPayloadV2 = serde_json::from_str(response_v2).unwrap(); - assert_eq!(payload.as_v2(), Some(&payload_v2)); - } - - #[test] - #[cfg(feature = "serde")] - fn serde_payload_input_enum_faulty_v2() { - // incomplete V3 payload should be rejected even if it has all V2 fields - let response_faulty = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[], "blobGasUsed": "0x0"}"#; - - let payload: Result = - serde_json::from_str(response_faulty); - assert!(payload.is_err()); - } - - #[test] - #[cfg(feature = "serde")] - fn serde_payload_input_enum_faulty_v1() { - // incomplete V3 payload should be rejected even if it has all V1 fields - let response_faulty = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"blobGasUsed": "0x0"}"#; - - let payload: Result = - serde_json::from_str(response_faulty); - assert!(payload.is_err()); - } -} diff --git a/op-alloy/crates/rpc-types/Cargo.toml b/op-alloy/crates/rpc-types/Cargo.toml deleted file mode 100644 index 501906adbb6..00000000000 --- a/op-alloy/crates/rpc-types/Cargo.toml +++ /dev/null @@ -1,69 +0,0 @@ -[package] -name = "op-alloy-rpc-types" -description = "Optimism RPC types" - -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -authors.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# Workspace -op-alloy-consensus = { workspace = true, features = ["serde"] } - -# Alloy -alloy-serde.workspace = true -alloy-consensus.workspace = true -alloy-network-primitives.workspace = true -alloy-eips = { workspace = true, features = ["serde"] } -alloy-rpc-types-eth = { workspace = true, features = ["serde"] } -alloy-primitives = { workspace = true, features = ["map", "rlp", "serde"] } - -# Serde -serde_json.workspace = true -serde = { workspace = true, features = ["derive"] } - -# RPC -jsonrpsee = { workspace = true, optional = true } - -# arbitrary -arbitrary = { workspace = true, features = ["derive"], optional = true } - -# misc -derive_more = { workspace = true, features = ["as_ref", "deref_mut", "try_from"] } -thiserror.workspace = true - -[dev-dependencies] -rand.workspace = true -arbitrary = { workspace = true, features = ["derive"] } -alloy-consensus = { workspace = true, features = ["arbitrary"] } -alloy-primitives = { workspace = true, features = ["arbitrary"] } -alloy-rpc-types-eth = { workspace = true, features = ["arbitrary"] } -similar-asserts.workspace = true - -[features] -default = ["std"] -std = [ - "alloy-network-primitives/std", - "alloy-eips/std", - "alloy-primitives/std", - "alloy-rpc-types-eth/std", - "op-alloy-consensus/std", -] -arbitrary = [ - "std", - "dep:arbitrary", - "alloy-primitives/arbitrary", - "alloy-rpc-types-eth/arbitrary", - "op-alloy-consensus/arbitrary", -] -k256 = ["alloy-rpc-types-eth/k256", "op-alloy-consensus/k256"] -serde = ["op-alloy-consensus/serde"] -jsonrpsee = ["dep:jsonrpsee"] diff --git a/op-alloy/crates/rpc-types/README.md b/op-alloy/crates/rpc-types/README.md deleted file mode 100644 index 1c52207617e..00000000000 --- a/op-alloy/crates/rpc-types/README.md +++ /dev/null @@ -1,10 +0,0 @@ -## `op-alloy-rpc-types` - -CI -op-alloy-rpc-types crate -MIT License -Apache License -Book - - -Optimism RPC-related types. diff --git a/op-alloy/crates/rpc-types/src/receipt.rs b/op-alloy/crates/rpc-types/src/receipt.rs deleted file mode 100644 index 3148f948145..00000000000 --- a/op-alloy/crates/rpc-types/src/receipt.rs +++ /dev/null @@ -1,335 +0,0 @@ -//! Receipt types for RPC - -use alloy_consensus::{Receipt, ReceiptWithBloom, TxReceipt}; -use alloy_rpc_types_eth::Log; -use alloy_serde::OtherFields; -use op_alloy_consensus::{ - OpDepositReceipt, OpDepositReceiptWithBloom, OpReceipt, OpReceiptEnvelope, -}; -use serde::{Deserialize, Serialize}; - -/// OP Transaction Receipt type -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[doc(alias = "OpTxReceipt")] -pub struct OpTransactionReceipt { - /// Regular eth transaction receipt including deposit receipts - #[serde(flatten)] - pub inner: alloy_rpc_types_eth::TransactionReceipt>>, - /// L1 block info of the transaction. - #[serde(flatten)] - pub l1_block_info: L1BlockInfo, -} - -impl alloy_network_primitives::ReceiptResponse for OpTransactionReceipt { - fn contract_address(&self) -> Option { - self.inner.contract_address - } - - fn status(&self) -> bool { - self.inner.inner.status() - } - - fn block_hash(&self) -> Option { - self.inner.block_hash - } - - fn block_number(&self) -> Option { - self.inner.block_number - } - - fn transaction_hash(&self) -> alloy_primitives::TxHash { - self.inner.transaction_hash - } - - fn transaction_index(&self) -> Option { - self.inner.transaction_index() - } - - fn gas_used(&self) -> u64 { - self.inner.gas_used() - } - - fn effective_gas_price(&self) -> u128 { - self.inner.effective_gas_price() - } - - fn blob_gas_used(&self) -> Option { - self.inner.blob_gas_used() - } - - fn blob_gas_price(&self) -> Option { - self.inner.blob_gas_price() - } - - fn from(&self) -> alloy_primitives::Address { - self.inner.from() - } - - fn to(&self) -> Option { - self.inner.to() - } - - fn cumulative_gas_used(&self) -> u64 { - self.inner.cumulative_gas_used() - } - - fn state_root(&self) -> Option { - self.inner.state_root() - } -} - -/// Additional fields for Optimism transaction receipts: -#[derive(Clone, Copy, Debug, Default, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -#[doc(alias = "OptimismTxReceiptFields")] -pub struct OpTransactionReceiptFields { - /// L1 block info. - #[serde(flatten)] - pub l1_block_info: L1BlockInfo, - /* --------------------------------------- Regolith --------------------------------------- */ - /// Deposit nonce for deposit transactions. - /// - /// Always null prior to the Regolith hardfork. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub deposit_nonce: Option, - /* ---------------------------------------- Canyon ---------------------------------------- */ - /// Deposit receipt version for deposit transactions. - /// - /// Always null prior to the Canyon hardfork. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub deposit_receipt_version: Option, -} - -/// Serialize/Deserialize l1FeeScalar to/from string -mod l1_fee_scalar_serde { - use serde::{Deserialize, de}; - - pub(super) fn serialize(value: &Option, s: S) -> Result - where - S: serde::Serializer, - { - use alloc::string::ToString; - if let Some(v) = value { - return s.serialize_str(&v.to_string()); - } - s.serialize_none() - } - - pub(super) fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: serde::Deserializer<'de>, - { - use alloc::string::String; - let s: Option = Option::deserialize(deserializer)?; - if let Some(s) = s { - return Ok(Some(s.parse::().map_err(de::Error::custom)?)); - } - - Ok(None) - } -} - -impl From for OtherFields { - fn from(value: OpTransactionReceiptFields) -> Self { - serde_json::to_value(value).unwrap().try_into().unwrap() - } -} - -/// L1 block info extracted from input of first transaction in every block. -/// -/// The subset of [`OpTransactionReceiptFields`], that encompasses L1 block -/// info: -/// -#[derive(Clone, Copy, Debug, Default, PartialEq, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct L1BlockInfo { - /// L1 base fee is the minimum price per unit of gas. - /// - /// Present from pre-bedrock as de facto L1 price per unit of gas. L1 base fee after Bedrock. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub l1_gas_price: Option, - /// L1 gas used. - /// - /// Present from pre-bedrock, deprecated as of Fjord. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub l1_gas_used: Option, - /// L1 fee for the transaction. - /// - /// Present from pre-bedrock. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub l1_fee: Option, - /// L1 fee scalar for the transaction - /// - /// Present from pre-bedrock to Ecotone. Null after Ecotone. - #[serde(default, skip_serializing_if = "Option::is_none", with = "l1_fee_scalar_serde")] - pub l1_fee_scalar: Option, - /* ---------------------------------------- Ecotone ---------------------------------------- */ - /// L1 base fee scalar. Applied to base fee to compute weighted gas price multiplier. - /// - /// Always null prior to the Ecotone hardfork. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub l1_base_fee_scalar: Option, - /// L1 blob base fee. - /// - /// Always null prior to the Ecotone hardfork. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub l1_blob_base_fee: Option, - /// L1 blob base fee scalar. Applied to blob base fee to compute weighted gas price multiplier. - /// - /// Always null prior to the Ecotone hardfork. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub l1_blob_base_fee_scalar: Option, - /* ---------------------------------------- Isthmus ---------------------------------------- */ - /// Operator fee scalar. - /// - /// Always null prior to the Isthmus hardfork. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub operator_fee_scalar: Option, - /// Operator fee constant. - /// - /// Always null prior to the Isthmus hardfork. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub operator_fee_constant: Option, - /* ---------------------------------------- Jovian ---------------------------------------- */ - /// DA footprint gas scalar. Used to set the DA footprint block limit on the L2. - /// - /// Always null prior to the Jovian hardfork. - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub da_footprint_gas_scalar: Option, -} - -impl Eq for L1BlockInfo {} - -impl From for OpReceiptEnvelope { - fn from(value: OpTransactionReceipt) -> Self { - let inner_envelope = value.inner.inner.into(); - - /// Helper function to convert the inner logs within a [ReceiptWithBloom] from RPC to - /// consensus types. - #[inline(always)] - fn convert_standard_receipt( - receipt: ReceiptWithBloom>, - ) -> ReceiptWithBloom> { - let ReceiptWithBloom { logs_bloom, receipt } = receipt; - - let consensus_logs = receipt.logs.into_iter().map(|log| log.inner).collect(); - ReceiptWithBloom { - receipt: Receipt { - status: receipt.status, - cumulative_gas_used: receipt.cumulative_gas_used, - logs: consensus_logs, - }, - logs_bloom, - } - } - - match inner_envelope { - OpReceiptEnvelope::Legacy(receipt) => Self::Legacy(convert_standard_receipt(receipt)), - OpReceiptEnvelope::Eip2930(receipt) => Self::Eip2930(convert_standard_receipt(receipt)), - OpReceiptEnvelope::Eip1559(receipt) => Self::Eip1559(convert_standard_receipt(receipt)), - OpReceiptEnvelope::Eip7702(receipt) => Self::Eip7702(convert_standard_receipt(receipt)), - OpReceiptEnvelope::Deposit(OpDepositReceiptWithBloom { logs_bloom, receipt }) => { - let consensus_logs = receipt.inner.logs.into_iter().map(|log| log.inner).collect(); - let consensus_receipt = OpDepositReceiptWithBloom { - receipt: OpDepositReceipt { - inner: Receipt { - status: receipt.inner.status, - cumulative_gas_used: receipt.inner.cumulative_gas_used, - logs: consensus_logs, - }, - deposit_nonce: receipt.deposit_nonce, - deposit_receipt_version: receipt.deposit_receipt_version, - }, - logs_bloom, - }; - Self::Deposit(consensus_receipt) - } - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::string::ToString; - use serde_json::{Value, json}; - - // - #[test] - fn parse_rpc_receipt() { - let s = r#"{ - "blockHash": "0x9e6a0fb7e22159d943d760608cc36a0fb596d1ab3c997146f5b7c55c8c718c67", - "blockNumber": "0x6cfef89", - "contractAddress": null, - "cumulativeGasUsed": "0xfa0d", - "depositNonce": "0x8a2d11", - "effectiveGasPrice": "0x0", - "from": "0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001", - "gasUsed": "0xfa0d", - "logs": [], - "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "status": "0x1", - "to": "0x4200000000000000000000000000000000000015", - "transactionHash": "0xb7c74afdeb7c89fb9de2c312f49b38cb7a850ba36e064734c5223a477e83fdc9", - "transactionIndex": "0x0", - "type": "0x7e", - "l1GasPrice": "0x3ef12787", - "l1GasUsed": "0x1177", - "l1Fee": "0x5bf1ab43d", - "l1BaseFeeScalar": "0x1", - "l1BlobBaseFee": "0x600ab8f05e64", - "l1BlobBaseFeeScalar": "0x1", - "operatorFeeScalar": "0x1", - "operatorFeeConstant": "0x1", - "daFootprintGasScalar": "0x1" - }"#; - - let receipt: OpTransactionReceipt = serde_json::from_str(s).unwrap(); - let value = serde_json::to_value(&receipt).unwrap(); - let expected_value = serde_json::from_str::(s).unwrap(); - assert_eq!(value, expected_value); - } - - #[test] - fn serialize_empty_optimism_transaction_receipt_fields_struct() { - let op_fields = OpTransactionReceiptFields::default(); - - let json = serde_json::to_value(op_fields).unwrap(); - assert_eq!(json, json!({})); - } - - #[test] - fn serialize_l1_fee_scalar() { - let op_fields = OpTransactionReceiptFields { - l1_block_info: L1BlockInfo { l1_fee_scalar: Some(0.678), ..Default::default() }, - ..Default::default() - }; - - let json = serde_json::to_value(op_fields).unwrap(); - - assert_eq!(json["l1FeeScalar"], serde_json::Value::String("0.678".to_string())); - } - - #[test] - fn deserialize_l1_fee_scalar() { - let json = json!({ - "l1FeeScalar": "0.678" - }); - - let op_fields: OpTransactionReceiptFields = serde_json::from_value(json).unwrap(); - assert_eq!(op_fields.l1_block_info.l1_fee_scalar, Some(0.678f64)); - - let json = json!({ - "l1FeeScalar": Value::Null - }); - - let op_fields: OpTransactionReceiptFields = serde_json::from_value(json).unwrap(); - assert_eq!(op_fields.l1_block_info.l1_fee_scalar, None); - - let json = json!({}); - - let op_fields: OpTransactionReceiptFields = serde_json::from_value(json).unwrap(); - assert_eq!(op_fields.l1_block_info.l1_fee_scalar, None); - } -} diff --git a/op-alloy/crates/rpc-types/src/transaction.rs b/op-alloy/crates/rpc-types/src/transaction.rs deleted file mode 100644 index 5b1dac05275..00000000000 --- a/op-alloy/crates/rpc-types/src/transaction.rs +++ /dev/null @@ -1,359 +0,0 @@ -//! Optimism specific types related to transactions. - -use alloy_consensus::{Transaction as TransactionTrait, Typed2718, transaction::Recovered}; -use alloy_eips::{Encodable2718, eip2930::AccessList, eip7702::SignedAuthorization}; -use alloy_primitives::{Address, B256, BlockHash, Bytes, ChainId, TxKind, U256}; -use alloy_serde::OtherFields; -use op_alloy_consensus::{OpTransaction, OpTxEnvelope, transaction::OpTransactionInfo}; -use serde::{Deserialize, Serialize}; - -mod request; -pub use request::OpTransactionRequest; - -/// OP Transaction type -#[derive( - Clone, Debug, PartialEq, Eq, Serialize, Deserialize, derive_more::Deref, derive_more::DerefMut, -)] -#[cfg_attr(all(any(test, feature = "arbitrary"), feature = "k256"), derive(arbitrary::Arbitrary))] -#[serde( - try_from = "tx_serde::TransactionSerdeHelper", - into = "tx_serde::TransactionSerdeHelper", - bound = "T: TransactionTrait + OpTransaction + Clone + serde::Serialize + serde::de::DeserializeOwned" -)] -pub struct Transaction { - /// Ethereum Transaction Types - #[deref] - #[deref_mut] - pub inner: alloy_rpc_types_eth::Transaction, - - /// Nonce for deposit transactions. Only present in RPC responses. - pub deposit_nonce: Option, - - /// Deposit receipt version for deposit transactions post-canyon - pub deposit_receipt_version: Option, -} - -impl Transaction { - /// Converts a consensus `tx` with an additional context `tx_info` into an RPC [`Transaction`]. - pub fn from_transaction(tx: Recovered, tx_info: OpTransactionInfo) -> Self { - let base_fee = tx_info.inner.base_fee; - let effective_gas_price = if tx.is_deposit() { - // For deposits, we must always set the `gasPrice` field to 0 in rpc - // deposit tx don't have a gas price field, but serde of `Transaction` will take care of - // it - 0 - } else { - base_fee - .map(|base_fee| { - tx.effective_tip_per_gas(base_fee).unwrap_or_default() + base_fee as u128 - }) - .unwrap_or_else(|| tx.max_fee_per_gas()) - }; - - Self { - inner: alloy_rpc_types_eth::Transaction { - inner: tx, - block_hash: tx_info.inner.block_hash, - block_number: tx_info.inner.block_number, - transaction_index: tx_info.inner.index, - effective_gas_price: Some(effective_gas_price), - }, - deposit_nonce: tx_info.deposit_meta.deposit_nonce, - deposit_receipt_version: tx_info.deposit_meta.deposit_receipt_version, - } - } -} - -impl Typed2718 for Transaction { - fn ty(&self) -> u8 { - self.inner.ty() - } -} - -impl TransactionTrait for Transaction { - fn chain_id(&self) -> Option { - self.inner.chain_id() - } - - fn nonce(&self) -> u64 { - self.inner.nonce() - } - - fn gas_limit(&self) -> u64 { - self.inner.gas_limit() - } - - fn gas_price(&self) -> Option { - self.inner.gas_price() - } - - fn max_fee_per_gas(&self) -> u128 { - self.inner.max_fee_per_gas() - } - - fn max_priority_fee_per_gas(&self) -> Option { - self.inner.max_priority_fee_per_gas() - } - - fn max_fee_per_blob_gas(&self) -> Option { - self.inner.max_fee_per_blob_gas() - } - - fn priority_fee_or_price(&self) -> u128 { - self.inner.priority_fee_or_price() - } - - fn effective_gas_price(&self, base_fee: Option) -> u128 { - self.inner.effective_gas_price(base_fee) - } - - fn is_dynamic_fee(&self) -> bool { - self.inner.is_dynamic_fee() - } - - fn kind(&self) -> TxKind { - self.inner.kind() - } - - fn is_create(&self) -> bool { - self.inner.is_create() - } - - fn to(&self) -> Option
{ - self.inner.to() - } - - fn value(&self) -> U256 { - self.inner.value() - } - - fn input(&self) -> &Bytes { - self.inner.input() - } - - fn access_list(&self) -> Option<&AccessList> { - self.inner.access_list() - } - - fn blob_versioned_hashes(&self) -> Option<&[B256]> { - self.inner.blob_versioned_hashes() - } - - fn authorization_list(&self) -> Option<&[SignedAuthorization]> { - self.inner.authorization_list() - } -} - -impl alloy_network_primitives::TransactionResponse - for Transaction -{ - fn tx_hash(&self) -> alloy_primitives::TxHash { - self.inner.tx_hash() - } - - fn block_hash(&self) -> Option { - self.inner.block_hash() - } - - fn block_number(&self) -> Option { - self.inner.block_number() - } - - fn transaction_index(&self) -> Option { - self.inner.transaction_index() - } - - fn from(&self) -> Address { - self.inner.from() - } -} - -/// Optimism specific transaction fields -#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] -#[doc(alias = "OptimismTxFields")] -#[serde(rename_all = "camelCase")] -pub struct OpTransactionFields { - /// The ETH value to mint on L2 - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub mint: Option, - /// Hash that uniquely identifies the source of the deposit. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub source_hash: Option, - /// Field indicating whether the transaction is a system transaction, and therefore - /// exempt from the L2 gas limit. - #[serde(default, skip_serializing_if = "Option::is_none")] - pub is_system_tx: Option, - /// Deposit receipt version for deposit transactions post-canyon - #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - pub deposit_receipt_version: Option, -} - -impl From for OtherFields { - fn from(value: OpTransactionFields) -> Self { - serde_json::to_value(value).unwrap().try_into().unwrap() - } -} - -impl AsRef for Transaction { - fn as_ref(&self) -> &T { - self.inner.as_ref() - } -} - -mod tx_serde { - //! Helper module for serializing and deserializing OP [`Transaction`]. - //! - //! This is needed because we might need to deserialize the `from` field into both - //! [`alloy_consensus::transaction::Recovered::signer`] which resides in - //! [`alloy_rpc_types_eth::Transaction::inner`] and [`op_alloy_consensus::TxDeposit::from`]. - //! - //! Additionally, we need similar logic for the `gasPrice` field - use super::*; - use alloy_consensus::transaction::Recovered; - use op_alloy_consensus::OpTransaction; - use serde::de::Error; - - /// Helper struct which will be flattened into the transaction and will only contain `from` - /// field if inner [`OpTxEnvelope`] did not consume it. - #[derive(Serialize, Deserialize)] - struct OptionalFields { - #[serde(default, skip_serializing_if = "Option::is_none")] - from: Option
, - #[serde( - default, - rename = "gasPrice", - skip_serializing_if = "Option::is_none", - with = "alloy_serde::quantity::opt" - )] - effective_gas_price: Option, - #[serde( - default, - rename = "nonce", - skip_serializing_if = "Option::is_none", - with = "alloy_serde::quantity::opt" - )] - deposit_nonce: Option, - } - - #[derive(Serialize, Deserialize)] - #[serde(rename_all = "camelCase")] - pub(crate) struct TransactionSerdeHelper { - #[serde(flatten)] - inner: T, - #[serde(default)] - block_hash: Option, - #[serde(default, with = "alloy_serde::quantity::opt")] - block_number: Option, - #[serde(default, with = "alloy_serde::quantity::opt")] - transaction_index: Option, - #[serde( - default, - skip_serializing_if = "Option::is_none", - with = "alloy_serde::quantity::opt" - )] - deposit_receipt_version: Option, - - #[serde(flatten)] - other: OptionalFields, - } - - impl From> for TransactionSerdeHelper { - fn from(value: Transaction) -> Self { - let Transaction { - inner: - alloy_rpc_types_eth::Transaction { - inner, - block_hash, - block_number, - transaction_index, - effective_gas_price, - }, - deposit_receipt_version, - deposit_nonce, - } = value; - - // if inner transaction is a deposit, then don't serialize `from` directly - let from = if inner.as_deposit().is_some() { None } else { Some(inner.signer()) }; - - // if inner transaction has its own `gasPrice` don't serialize it in this struct. - let effective_gas_price = effective_gas_price.filter(|_| inner.gas_price().is_none()); - - Self { - inner: inner.into_inner(), - block_hash, - block_number, - transaction_index, - deposit_receipt_version, - other: OptionalFields { from, effective_gas_price, deposit_nonce }, - } - } - } - - impl TryFrom> for Transaction { - type Error = serde_json::Error; - - fn try_from(value: TransactionSerdeHelper) -> Result { - let TransactionSerdeHelper { - inner, - block_hash, - block_number, - transaction_index, - deposit_receipt_version, - other, - } = value; - - // Try to get `from` field from inner envelope or from `MaybeFrom`, otherwise return - // error - let from = if let Some(from) = other.from { - from - } else { - inner - .as_deposit() - .map(|v| v.from) - .ok_or_else(|| serde_json::Error::custom("missing `from` field"))? - }; - - // Only serialize deposit_nonce if inner transaction is deposit to avoid duplicated keys - let deposit_nonce = other.deposit_nonce.filter(|_| inner.is_deposit()); - - let effective_gas_price = other.effective_gas_price.or(inner.gas_price()); - - Ok(Self { - inner: alloy_rpc_types_eth::Transaction { - inner: Recovered::new_unchecked(inner, from), - block_hash, - block_number, - transaction_index, - effective_gas_price, - }, - deposit_receipt_version, - deposit_nonce, - }) - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_deserialize_deposit() { - // cast rpc eth_getTransactionByHash - // 0xbc9329afac05556497441e2b3ee4c5d4da7ca0b2a4c212c212d0739e94a24df9 --rpc-url optimism - let rpc_tx = r#"{"blockHash":"0x9d86bb313ebeedf4f9f82bf8a19b426be656a365648a7c089b618771311db9f9","blockNumber":"0x798ad0b","hash":"0xbc9329afac05556497441e2b3ee4c5d4da7ca0b2a4c212c212d0739e94a24df9","transactionIndex":"0x0","type":"0x7e","nonce":"0x152ea95","input":"0x440a5e200000146b000f79c50000000000000003000000006725333f000000000141e287000000000000000000000000000000000000000000000000000000012439ee7e0000000000000000000000000000000000000000000000000000000063f363e973e96e7145ff001c81b9562cba7b6104eeb12a2bc4ab9f07c27d45cd81a986620000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f32985","mint":"0x0","sourceHash":"0x04e9a69416471ead93b02f0c279ab11ca0b635db5c1726a56faf22623bafde52","r":"0x0","s":"0x0","v":"0x0","yParity":"0x0","gas":"0xf4240","from":"0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001","to":"0x4200000000000000000000000000000000000015","depositReceiptVersion":"0x1","value":"0x0","gasPrice":"0x0"}"#; - - let tx = serde_json::from_str::(rpc_tx).unwrap(); - - let OpTxEnvelope::Deposit(inner) = tx.as_ref() else { - panic!("Expected deposit transaction"); - }; - assert_eq!(tx.inner.inner.signer(), inner.from); - assert_eq!(tx.deposit_nonce, Some(22211221)); - assert_eq!(tx.inner.effective_gas_price, Some(0)); - - let deserialized = serde_json::to_value(&tx).unwrap(); - let expected = serde_json::from_str::(rpc_tx).unwrap(); - similar_asserts::assert_eq!(deserialized, expected); - } -} diff --git a/op-alloy/deny.toml b/op-alloy/deny.toml deleted file mode 100644 index 91a662c5e38..00000000000 --- a/op-alloy/deny.toml +++ /dev/null @@ -1,62 +0,0 @@ -[graph] -targets = [] -all-features = false -no-default-features = false - -[output] -feature-depth = 1 - -[advisories] -ignore = [ - # paste crate is no longer maintained. - "RUSTSEC-2024-0436", - # bincode is unmaintained, but only used as a dev-dependency for testing. - "RUSTSEC-2025-0141", -] - -[licenses] -allow = [ - "MIT", - "ISC", - "MPL-2.0", - "Apache-2.0", - "Apache-2.0 WITH LLVM-exception", - "BSD-3-Clause", - "Unlicense", - "Unicode-3.0", - "Zlib", - # webpki-root-certs - "CDLA-Permissive-2.0", -] -confidence-threshold = 0.8 -exceptions = [ - # CC0 is a permissive license but somewhat unclear status for source code - # so we prefer to not have dependencies using it - # https://tldrlegal.com/license/creative-commons-cc0-1.0-universal - { allow = ["CC0-1.0"], name = "tiny-keccak" }, -] - -[licenses.private] -ignore = false -registries = [] - -[bans] -multiple-versions = "warn" -wildcards = "allow" -highlight = "all" -workspace-default-features = "allow" -external-default-features = "allow" -allow = [] -deny = [] -skip = [] -skip-tree = [] - -[sources] -unknown-registry = "deny" -unknown-git = "deny" -allow-git = [] - -[sources.allow-org] -github = [] -gitlab = [] -bitbucket = [] diff --git a/op-alloy/rustfmt.toml b/op-alloy/rustfmt.toml deleted file mode 100644 index 3063df707a6..00000000000 --- a/op-alloy/rustfmt.toml +++ /dev/null @@ -1,12 +0,0 @@ -reorder_imports = true -use_field_init_shorthand = true -use_small_heuristics = "Max" - -# Nightly -max_width = 100 -comment_width = 100 -imports_granularity = "Crate" -wrap_comments = true -format_code_in_doc_comments = true -doc_comment_code_block_width = 100 -format_macro_matchers = true diff --git a/op-alloy/typos.toml b/op-alloy/typos.toml deleted file mode 100644 index 0345f80a04c..00000000000 --- a/op-alloy/typos.toml +++ /dev/null @@ -1,90 +0,0 @@ -[files] -extend-exclude = [ - ".git", - "target", - "Cargo.lock", - "book/mermaid*.js", # Minified JS files - "book/dist/", # Built documentation - "book/theme/", # Theme files may have intentional spellings - "*.min.js", # All minified JS files - "CHANGELOG.md", # Changelog may contain intentional typos in historical entries -] - -[default] -extend-ignore-re = [ - # Hex strings of various lengths (common in blockchain projects) - "(?i)0x[0-9a-f]{8,}", # 8+ hex chars with 0x prefix - "(?i)[0-9a-f]{32,}", # 32+ hex chars without 0x (hashes, addresses) - "(?i)[0-9a-f]{8,}", # 8+ hex chars without 0x (shorter hex values) - # Ordinals in identifiers - "[0-9]+nd", - "[0-9]+th", - "[0-9]+st", - "[0-9]+rd", - # Base64 encoded strings (common in tests and configs) - "[A-Za-z0-9+/]{20,}={0,2}", -] - -[default.extend-words] -# Valid Rust/Cargo terms -crate = "crate" -crates = "crates" - -# Blockchain/Ethereum specific terms -alloy = "alloy" -asterisc = "asterisc" # Valid project name in Optimism ecosystem -bedrock = "bedrock" # Optimism upgrade name -batcher = "batcher" # OP Stack component -bootnode = "bootnode" -chainid = "chainid" -codegen = "codegen" -derivation = "derivation" -enr = "enr" -ethereum = "ethereum" -fpvm = "fpvm" # Fault Proof Virtual Machine -geth = "geth" -interop = "interop" -libmdbx = "libmdbx" -merkle = "merkle" -mips = "mips" -mpsc = "mpsc" -optimism = "optimism" -preimage = "preimage" -revm = "revm" -risc = "risc" -rollup = "rollup" -rpc = "rpc" -sequencer = "sequencer" -ser = "ser" # Serialization abbreviation -serde = "serde" -superchain = "superchain" # Optimism Superchain -trie = "trie" -txs = "txs" # Transactions abbreviation -usize = "usize" -workspaces = "workspaces" - -# Technical abbreviations and acronyms -api = "api" -cli = "cli" -cfg = "cfg" -const = "const" -env = "env" -impl = "impl" -io = "io" -lru = "lru" -mpt = "mpt" # Merkle Patricia Trie -msg = "msg" -mut = "mut" -nums = "nums" -num = "num" -ok = "ok" -std = "std" -structs = "structs" -ty = "ty" # Type abbreviation -vec = "vec" - -# Additional project-specific terms that might be flagged as typos -jsonrpsee = "jsonrpsee" # JSON-RPC library -tokio = "tokio" -async = "async" -await = "await" \ No newline at end of file diff --git a/op-alt-da/damgr.go b/op-alt-da/damgr.go index 1e7b0fa493d..df0380a309c 100644 --- a/op-alt-da/damgr.go +++ b/op-alt-da/damgr.go @@ -117,6 +117,14 @@ func (d *DA) OnFinalizedHeadSignal(f HeadSignalFn) { // It is called by the Finalize function, as it has an L1 finalized head to use. func (d *DA) updateFinalizedHead(l1Finalized eth.L1BlockRef) { d.l1FinalizedHead = l1Finalized + + // If there are no commitments or challenges being tracked, finalizedHead is managed + // by updateFinalizedFromL1 (called from AdvanceL1Origin) which calculates it based + // on l1FinalizedHead - challengeWindow. Preserve that value. + if d.state.NoCommitments() { + return + } + // Prune the state to the finalized head d.state.Prune(l1Finalized.ID()) d.finalizedHead = d.state.lastPrunedCommitment diff --git a/op-alt-da/damgr_test.go b/op-alt-da/damgr_test.go index b487fc85c98..a4fec8d83c4 100644 --- a/op-alt-da/damgr_test.go +++ b/op-alt-da/damgr_test.go @@ -227,6 +227,148 @@ func (m *mockL1Fetcher) ExpectL1BlockRefByNumber(num uint64, ref eth.L1BlockRef, m.Mock.On("L1BlockRefByNumber", num).Once().Return(ref, err) } +// TestUpdateFinalizedHead tests the updateFinalizedHead behavior with and without commitments. +// When there are no commitments, updateFinalizedHead preserves the existing finalizedHead, +// which is managed by updateFinalizedFromL1 called from AdvanceL1Origin. +func TestUpdateFinalizedHead(t *testing.T) { + logger := testlog.Logger(t, log.LevelInfo) + cfg := Config{ + ResolveWindow: 6, + ChallengeWindow: 6, + } + + t.Run("no commitments preserves existing finalizedHead unchanged", func(t *testing.T) { + state := NewState(logger, &NoopMetrics{}, cfg) + storage := NewMockDAClient(logger) + da := NewAltDAWithState(logger, cfg, storage, &NoopMetrics{}, state) + + // Verify state has no commitments + require.True(t, state.NoCommitments()) + + // Set an initial finalizedHead (simulating what updateFinalizedFromL1 would do) + initialFinalizedHead := l1Ref(50) + da.finalizedHead = initialFinalizedHead + + // Call Finalize with l1Finalized + l1Finalized := l1Ref(100) + da.Finalize(l1Finalized) + + // finalizedHead should be preserved (not overwritten) since there are no commitments + require.Equal(t, initialFinalizedHead, da.finalizedHead) + // l1FinalizedHead should be updated + require.Equal(t, l1Finalized, da.l1FinalizedHead) + }) + + t.Run("no commitments after all pruned preserves existing finalizedHead", func(t *testing.T) { + rng := rand.New(rand.NewSource(1234)) + state := NewState(logger, &NoopMetrics{}, cfg) + storage := NewMockDAClient(logger) + da := NewAltDAWithState(logger, cfg, storage, &NoopMetrics{}, state) + + // Track and expire a commitment to set lastPrunedCommitment + c1 := RandomCommitment(rng) + bn1 := uint64(10) + state.TrackCommitment(c1, l1Ref(bn1)) + require.NoError(t, state.ExpireCommitments(bID(bn1+cfg.ChallengeWindow))) + state.Prune(bID(bn1 + cfg.ChallengeWindow)) + + // Verify lastPrunedCommitment is set and no more commitments + require.Equal(t, l1Ref(bn1), state.lastPrunedCommitment) + require.True(t, state.NoCommitments()) + + // Simulate updateFinalizedFromL1 having set the finalizedHead + initialFinalizedHead := l1Ref(80) + da.finalizedHead = initialFinalizedHead + + // Call Finalize with l1Finalized + l1Finalized := l1Ref(100) + da.Finalize(l1Finalized) + + // finalizedHead should be preserved since there are no commitments + require.Equal(t, initialFinalizedHead, da.finalizedHead) + }) + + t.Run("with pending commitments prunes and uses lastPrunedCommitment", func(t *testing.T) { + rng := rand.New(rand.NewSource(1234)) + state := NewState(logger, &NoopMetrics{}, cfg) + storage := NewMockDAClient(logger) + da := NewAltDAWithState(logger, cfg, storage, &NoopMetrics{}, state) + + // Track a commitment that will be pruned + c1 := RandomCommitment(rng) + bn1 := uint64(10) + state.TrackCommitment(c1, l1Ref(bn1)) + require.NoError(t, state.ExpireCommitments(bID(bn1+cfg.ChallengeWindow))) + state.Prune(bID(bn1 + cfg.ChallengeWindow)) + + // Track another commitment that won't be expired/pruned + c2 := RandomCommitment(rng) + bn2 := uint64(50) + state.TrackCommitment(c2, l1Ref(bn2)) + + // Verify state has pending commitments + require.False(t, state.NoCommitments()) + require.Equal(t, l1Ref(bn1), state.lastPrunedCommitment) + + // Call Finalize with l1Finalized higher than lastPrunedCommitment + l1Finalized := l1Ref(100) + da.Finalize(l1Finalized) + + // finalizedHead should be lastPrunedCommitment because there are pending commitments + require.Equal(t, l1Ref(bn1), da.finalizedHead) + }) + + t.Run("with commitments prunes up to l1Finalized and updates finalizedHead", func(t *testing.T) { + rng := rand.New(rand.NewSource(1234)) + state := NewState(logger, &NoopMetrics{}, cfg) + storage := NewMockDAClient(logger) + da := NewAltDAWithState(logger, cfg, storage, &NoopMetrics{}, state) + + // Track and expire multiple commitments + c1 := RandomCommitment(rng) + bn1 := uint64(10) + state.TrackCommitment(c1, l1Ref(bn1)) + + c2 := RandomCommitment(rng) + bn2 := uint64(20) + state.TrackCommitment(c2, l1Ref(bn2)) + + // Expire both commitments + require.NoError(t, state.ExpireCommitments(bID(bn2+cfg.ChallengeWindow))) + + // Verify we have expired commitments ready to prune + require.False(t, state.NoCommitments()) + + // Call Finalize - this should prune up to l1Finalized + l1Finalized := l1Ref(bn2 + cfg.ChallengeWindow) + da.Finalize(l1Finalized) + + // Both commitments should be pruned, finalizedHead should be the last pruned one + require.Equal(t, l1Ref(bn2), da.finalizedHead) + }) + + t.Run("finalized head signal handler is called with correct value", func(t *testing.T) { + state := NewState(logger, &NoopMetrics{}, cfg) + storage := NewMockDAClient(logger) + da := NewAltDAWithState(logger, cfg, storage, &NoopMetrics{}, state) + + // Set initial finalizedHead (simulating updateFinalizedFromL1) + initialFinalizedHead := l1Ref(50) + da.finalizedHead = initialFinalizedHead + + var receivedHead eth.L1BlockRef + da.OnFinalizedHeadSignal(func(ref eth.L1BlockRef) { + receivedHead = ref + }) + + l1Finalized := l1Ref(100) + da.Finalize(l1Finalized) + + // Handler should receive the preserved finalizedHead (since no commitments) + require.Equal(t, initialFinalizedHead, receivedHead) + }) +} + func TestAdvanceChallengeOrigin(t *testing.T) { logger := testlog.Logger(t, log.LevelWarn) ctx := context.Background() diff --git a/op-batcher/batcher/driver.go b/op-batcher/batcher/driver.go index db35c3bff0b..da21d10183a 100644 --- a/op-batcher/batcher/driver.go +++ b/op-batcher/batcher/driver.go @@ -423,16 +423,15 @@ func (l *BatchSubmitter) unsafeDABytes() int64 { } // sendToThrottlingLoop sends the current unsafe bytes to the throttling loop. -// It is not blocking, no signal will be sent if the channel is full. +// It is not blocking, no signal will be sent if the channel is full or if the throttling loop is not running. func (l *BatchSubmitter) sendToThrottlingLoop(unsafeBytesUpdated chan int64) { - if l.Config.ThrottleParams.LowerThreshold == 0 { - return - } - + unsafeDABytes := l.unsafeDABytes() + l.Metr.RecordUnsafeDABytes(unsafeDABytes) // notify the throttling loop it may be time to initiate throttling without blocking select { - case unsafeBytesUpdated <- l.unsafeDABytes(): + case unsafeBytesUpdated <- unsafeDABytes: default: + // drop the update if there is no ready reader for the channel } } @@ -703,7 +702,6 @@ func (l *BatchSubmitter) throttlingLoop(wg *sync.WaitGroup, unsafeBytesUpdated c } for unsafeBytes := range unsafeBytesUpdated { - l.Metr.RecordUnsafeDABytes(unsafeBytes) newParams := l.throttleController.Update(uint64(unsafeBytes)) controllerType := l.throttleController.GetType() diff --git a/op-batcher/batcher/driver_test.go b/op-batcher/batcher/driver_test.go index a58521b377a..964d604ce0e 100644 --- a/op-batcher/batcher/driver_test.go +++ b/op-batcher/batcher/driver_test.go @@ -13,11 +13,11 @@ import ( "testing" "time" - "github.com/ethereum-optimism/optimism/op-batcher/batcher/throttler" "github.com/ethereum-optimism/optimism/op-batcher/config" "github.com/ethereum-optimism/optimism/op-batcher/metrics" "github.com/ethereum-optimism/optimism/op-service/dial" "github.com/ethereum-optimism/optimism/op-service/eth" + opmetrics "github.com/ethereum-optimism/optimism/op-service/metrics" "github.com/ethereum-optimism/optimism/op-service/testlog" "github.com/ethereum-optimism/optimism/op-service/testutils" "github.com/ethereum-optimism/optimism/op-service/txmgr" @@ -233,7 +233,7 @@ func createHTTPHandler(t *testing.T, cb func(), failureMode handlerFailureMode) func TestBatchSubmitter_ThrottlingEndpoints(t *testing.T) { // Set a very long timeout to avoid flakiness timeout := time.Second * 120 - testThrottlingEndpoints := func(numHealthyServers, numUnhealthyServers int) func(t *testing.T) { + testThrottlingEndpoints := func(numHealthyServers, numUnhealthyServers int, throttlingEnabled bool) func(t *testing.T) { return func(t *testing.T) { healthyCalls := make([]int, numHealthyServers) @@ -258,38 +258,72 @@ func TestBatchSubmitter_ThrottlingEndpoints(t *testing.T) { // Setup test context ctx, cancel := context.WithCancel(context.Background()) - // Add in an endpoint with no server at all, representing an "always down" endpoint - urls = append(urls, "http://invalid/") + // Add in an endpoint with no server at all, representing an "always down" endpoint (only when throttling enabled) + if throttlingEnabled { + urls = append(urls, "http://invalid/") + } t.Log("Throttling endpoints:", urls) + t.Logf("Throttling enabled: %v", throttlingEnabled) var batcherShutdownError error - // Create test BatchSubmitter using the setup function - bs, _ := setup(t, func(cause error) { - batcherShutdownError = cause + // Create real metrics instead of NoopMetrics so we can verify metric recording + metr := metrics.NewMetrics("test") + + ep := newEndpointProvider() + cfg := defaultTestRollupConfig + cfg.Genesis.L1.Number = genesisL1Origin + + // Set threshold values based on whether throttling is enabled + lowerThreshold := uint64(0) + upperThreshold := uint64(0) + if throttlingEnabled { + lowerThreshold = 10000 + upperThreshold = 20000 + } + + // Create test BatchSubmitter + bs := NewBatchSubmitter(DriverSetup{ + closeApp: func(cause error) { batcherShutdownError = cause }, + Log: testlog.Logger(t, log.LevelDebug), + Metr: metr, // Use real metrics + RollupConfig: cfg, + Config: BatcherConfig{ + ThrottleParams: config.ThrottleParams{ + ControllerType: config.StepControllerType, + LowerThreshold: lowerThreshold, + UpperThreshold: upperThreshold, + TxSizeLowerLimit: 5000, + TxSizeUpperLimit: 10000, + BlockSizeLowerLimit: 20000, + BlockSizeUpperLimit: 30000, + Endpoints: urls, + }, + NetworkTimeout: time.Second, + }, + ChannelConfig: defaultTestChannelConfig(), + EndpointProvider: ep, }) + bs.shutdownCtx = ctx - bs.Config.NetworkTimeout = time.Second - bs.Config.ThrottleParams.Endpoints = urls - bs.throttleController = throttler.NewThrottleController( - throttler.NewStepStrategy(10000), - throttler.ThrottleConfig{ - TxSizeLowerLimit: 5000, - TxSizeUpperLimit: 10000, - BlockSizeLowerLimit: 20000, - BlockSizeUpperLimit: 30000, - }) // Test the throttling loop pendingBytesUpdated := make(chan int64, 1) wg1 := sync.WaitGroup{} - wg1.Add(1) - // Start throttling loop in a goroutine - go bs.throttlingLoop(&wg1, pendingBytesUpdated) + // Start throttling loop in a goroutine only if throttling is enabled + if throttlingEnabled { + wg1.Add(1) + go bs.throttlingLoop(&wg1, pendingBytesUpdated) + } + + // Add a block to the channel manager so unsafeDABytes() returns > 0 + testBlock := newMiniL2Block(5) // Create a block with 5 transactions + err := bs.channelMgr.AddL2Block(testBlock) + require.NoError(t, err, "Should be able to add block to channel manager") - // Simulate block loading by sending periodically on pendingBytesUpdated + // Simulate block loading by calling sendToThrottlingLoop periodically wg2 := sync.WaitGroup{} blockLoadingCtx, cancelBlockLoading := context.WithCancel(context.Background()) go func() { @@ -300,8 +334,9 @@ func TestBatchSubmitter_ThrottlingEndpoints(t *testing.T) { case <-blockLoadingCtx.Done(): return default: - // Simulate block loading - pendingBytesUpdated <- 20000 // the value doesn't actually matter for this test + // Simulate block loading - use sendToThrottlingLoop which records metrics + // and sends to the channel (this is what the real block loading loop does) + bs.sendToThrottlingLoop(pendingBytesUpdated) } } @@ -316,69 +351,95 @@ func TestBatchSubmitter_ThrottlingEndpoints(t *testing.T) { cancel() }) - require.Eventually(t, - func() bool { - // Check that all endpoints were called - if slices.Contains(healthyCalls, 0) || slices.Contains(unHealthyCalls, 0) { - return false + // Verify metrics: unsafe_da_bytes metric should be recorded in all cases + time.Sleep(200 * time.Millisecond) // Wait for metric updates + c := opmetrics.NewMetricChecker(t, metr.Registry()) + prefix := "op_batcher_test_" + unsafeDABytesFamily := c.FindByName(prefix + "unsafe_da_bytes") + require.NotNil(t, unsafeDABytesFamily, "unsafe_da_bytes metric should exist") + unsafeDABytesMetric := unsafeDABytesFamily.FindByLabels(map[string]string{}) + require.NotNil(t, unsafeDABytesMetric, "unsafe_da_bytes metric should be queryable") + metricValue := unsafeDABytesMetric.Gauge.GetValue() + require.Greater(t, metricValue, 0.0, "unsafe_da_bytes should be > 0 after adding blocks") + t.Logf("unsafe_da_bytes metric value: %.0f", metricValue) + + if throttlingEnabled { + // Only check endpoint calls when throttling is enabled + require.Eventually(t, + func() bool { + // Check that all endpoints were called + if slices.Contains(healthyCalls, 0) || slices.Contains(unHealthyCalls, 0) { + return false + } + return true + }, time.Second*10, time.Millisecond*10, "All endpoints should have been called within 10s") + + startTestServerAtAddr := func(addr string, handler http.HandlerFunc) *httptest.Server { + ln, err := net.Listen("tcp", addr) + require.NoError(t, err, "Failed to create new listener for test server") + + s := &httptest.Server{ + Listener: ln, + Config: &http.Server{Handler: handler}, } - return true - }, time.Second*10, time.Millisecond*10, "All endpoints should have been called within 10s") - - startTestServerAtAddr := func(addr string, handler http.HandlerFunc) *httptest.Server { - ln, err := net.Listen("tcp", addr) - require.NoError(t, err, "Failed to create new listener for test server") - - s := &httptest.Server{ - Listener: ln, - Config: &http.Server{Handler: handler}, + s.Start() + return s } - s.Start() - return s - } - // Take one of the healthy servers down, wait 2s and restart. Check it is called again. - if len(healthyServers) > 0 { - restartedServerCalled := false + // Take one of the healthy servers down, wait 2s and restart. Check it is called again. + if len(healthyServers) > 0 { + restartedServerCalled := false - addr := healthyServers[0].Listener.Addr().String() - healthyServers[0].Close() - time.Sleep(time.Second * 2) - startTestServerAtAddr(addr, createHTTPHandler(t, func() { restartedServerCalled = true }, noFailure)) - defer healthyServers[0].Close() - t.Log("restarted server at", addr) + addr := healthyServers[0].Listener.Addr().String() + healthyServers[0].Close() + time.Sleep(time.Second * 2) + startTestServerAtAddr(addr, createHTTPHandler(t, func() { restartedServerCalled = true }, noFailure)) + defer healthyServers[0].Close() + t.Log("restarted server at", addr) - require.Eventually(t, func() bool { - return restartedServerCalled - }, timeout, time.Millisecond*10, "Restarted server should have been called within 2s") - } + require.Eventually(t, func() bool { + return restartedServerCalled + }, timeout, time.Millisecond*10, "Restarted server should have been called within 2s") + } - // Take an unhealthy server down, wait 2s and bring it back up with misconfiguration. Check the batcher exits. - if len(unhealthyServers) > 0 { - restartedServerCalled := false + // Take an unhealthy server down, wait 2s and bring it back up with misconfiguration. Check the batcher exits. + if len(unhealthyServers) > 0 { + restartedServerCalled := false - addr := unhealthyServers[0].Listener.Addr().String() - unhealthyServers[0].Close() - time.Sleep(time.Second * 2) - startTestServerAtAddr(addr, createHTTPHandler(t, func() { restartedServerCalled = true }, methodNotFound)) - defer unhealthyServers[0].Close() - t.Log("restarted server at", addr) + addr := unhealthyServers[0].Listener.Addr().String() + unhealthyServers[0].Close() + time.Sleep(time.Second * 2) + startTestServerAtAddr(addr, createHTTPHandler(t, func() { restartedServerCalled = true }, methodNotFound)) + defer unhealthyServers[0].Close() + t.Log("restarted server at", addr) - require.Eventually(t, func() bool { - return restartedServerCalled - }, timeout, time.Millisecond*10, "Restarted server should have been called within 2s") + require.Eventually(t, func() bool { + return restartedServerCalled + }, timeout, time.Millisecond*10, "Restarted server should have been called within 2s") - require.Eventually(t, func() bool { - return batcherShutdownError != nil - }, timeout, time.Millisecond*10, "Batcher should have triggered self shutdown within 2s") + require.Eventually(t, func() bool { + return batcherShutdownError != nil + }, timeout, time.Millisecond*10, "Batcher should have triggered self shutdown within 2s") - require.Equal(t, batcherShutdownError.Error(), ErrSetMaxDASizeRPCMethodUnavailable("http://"+addr, errors.New("method not found")).Error(), "Batcher shutdown error should be the same as the expected error") + require.Equal(t, batcherShutdownError.Error(), ErrSetMaxDASizeRPCMethodUnavailable("http://"+addr, errors.New("method not found")).Error(), "Batcher shutdown error should be the same as the expected error") + } + } else { + // When throttling is disabled, verify endpoints were NOT called + time.Sleep(time.Second * 2) // Wait to ensure no calls are made + for i := range healthyCalls { + require.Equal(t, 0, healthyCalls[i], "No endpoint calls should be made when throttling is disabled") + } + for i := range unHealthyCalls { + require.Equal(t, 0, unHealthyCalls[i], "No endpoint calls should be made when throttling is disabled") + } + t.Log("Verified: no endpoint calls when throttling disabled") } } } - t.Run("two normal endpoints", testThrottlingEndpoints(2, 0)) - t.Run("two failing endpoints", testThrottlingEndpoints(0, 2)) - t.Run("one normal endpoint, one failing endpoint", testThrottlingEndpoints(1, 1)) + t.Run("two normal endpoints", testThrottlingEndpoints(2, 0, true)) + t.Run("two failing endpoints", testThrottlingEndpoints(0, 2, true)) + t.Run("one normal endpoint, one failing endpoint", testThrottlingEndpoints(1, 1, true)) + t.Run("throttling disabled", testThrottlingEndpoints(1, 0, false)) } func TestBatchSubmitter_CriticalError(t *testing.T) { diff --git a/op-chain-ops/interopgen/deploy.go b/op-chain-ops/interopgen/deploy.go index 9400474af96..634177b022e 100644 --- a/op-chain-ops/interopgen/deploy.go +++ b/op-chain-ops/interopgen/deploy.go @@ -355,6 +355,7 @@ func GenesisL2(l2Host *script.Host, cfg *L2Config, deployment *L2Deployment, mul GasPayingTokenSymbol: cfg.GasPayingTokenSymbol, NativeAssetLiquidityAmount: cfg.NativeAssetLiquidityAmount.ToInt(), LiquidityControllerOwner: cfg.LiquidityControllerOwner, + UseL2CM: false, // TODO(#19102): add support for L2CM }); err != nil { return fmt.Errorf("failed L2 genesis: %w", err) } diff --git a/op-challenger/game/fault/preimages/split.go b/op-challenger/game/fault/preimages/split.go index 84541c7feb6..e49ffc41f8f 100644 --- a/op-challenger/game/fault/preimages/split.go +++ b/op-challenger/game/fault/preimages/split.go @@ -4,6 +4,7 @@ import ( "context" "github.com/ethereum-optimism/optimism/op-challenger/game/fault/types" + preimage "github.com/ethereum-optimism/optimism/op-preimage" ) var _ PreimageUploader = (*SplitPreimageUploader)(nil) @@ -25,7 +26,9 @@ func (s *SplitPreimageUploader) UploadPreimage(ctx context.Context, parent uint6 return ErrNilPreimageData } // Always route local preimage uploads to the direct uploader. - if data.IsLocal || uint64(len(data.GetPreimageWithoutSize())) < s.largePreimageSizeThreshold { + // Large-preimage proposals are keccak-only on-chain, so non-keccak preimages must go through the direct path. + isKeccak := len(data.OracleKey) > 0 && preimage.KeyType(data.OracleKey[0]) == preimage.Keccak256KeyType + if data.IsLocal || !isKeccak || uint64(len(data.GetPreimageWithoutSize())) < s.largePreimageSizeThreshold { return s.directUploader.UploadPreimage(ctx, parent, data) } else { return s.largeUploader.UploadPreimage(ctx, parent, data) diff --git a/op-challenger/game/fault/preimages/split_test.go b/op-challenger/game/fault/preimages/split_test.go index fbecdc06467..506fa49e83e 100644 --- a/op-challenger/game/fault/preimages/split_test.go +++ b/op-challenger/game/fault/preimages/split_test.go @@ -2,9 +2,12 @@ package preimages import ( "context" + "crypto/sha256" + "encoding/binary" "testing" "github.com/ethereum-optimism/optimism/op-challenger/game/fault/types" + preimage "github.com/ethereum-optimism/optimism/op-preimage" "github.com/stretchr/testify/require" ) @@ -43,6 +46,14 @@ func TestSplitPreimageUploader_UploadPreimage(t *testing.T) { require.Equal(t, 0, direct.updates) }) + t.Run("NonKeccakAlwaysUsesDirect", func(t *testing.T) { + oracle, direct, large := newTestSplitPreimageUploader(t, mockLargePreimageSizeThreshold) + err := oracle.UploadPreimage(context.Background(), 0, makeSha256PreimageData(make([]byte, mockLargePreimageSizeThreshold), 0)) + require.NoError(t, err) + require.Equal(t, 1, direct.updates) + require.Equal(t, 0, large.updates) + }) + t.Run("NilPreimageOracleData", func(t *testing.T) { oracle, _, _ := newTestSplitPreimageUploader(t, mockLargePreimageSizeThreshold) err := oracle.UploadPreimage(context.Background(), 0, nil) @@ -68,3 +79,13 @@ func newTestSplitPreimageUploader(t *testing.T, threshold uint64) (*SplitPreimag large := &mockPreimageUploader{} return NewSplitPreimageUploader(direct, large, threshold), direct, large } + +func makeSha256PreimageData(pre []byte, offset uint32) *types.PreimageOracleData { + sum := sha256.Sum256(pre) + key := preimage.Sha256Key(sum).PreimageKey() + // Add the length prefix to match how Cannon formats oracle data. + dataWithLength := make([]byte, 0, 8+len(pre)) + dataWithLength = binary.BigEndian.AppendUint64(dataWithLength, uint64(len(pre))) + dataWithLength = append(dataWithLength, pre...) + return types.NewPreimageOracleData(key[:], dataWithLength, offset) +} diff --git a/op-challenger/game/scheduler/worker.go b/op-challenger/game/scheduler/worker.go index 5184889c526..05d78fd2ad2 100644 --- a/op-challenger/game/scheduler/worker.go +++ b/op-challenger/game/scheduler/worker.go @@ -17,7 +17,12 @@ func progressGames(ctx context.Context, in <-chan job, out chan<- job, wg *sync. case j := <-in: threadActive() j.status = j.player.ProgressGame(ctx) - out <- j + select { + case <-ctx.Done(): + // Context cancelled, shut down. Avoids blocking forever if the consumer has already stopped. + case out <- j: + // Successfully published + } threadIdle() } } diff --git a/op-challenger/game/scheduler/worker_test.go b/op-challenger/game/scheduler/worker_test.go index 97689367143..4bb3e317752 100644 --- a/op-challenger/game/scheduler/worker_test.go +++ b/op-challenger/game/scheduler/worker_test.go @@ -56,6 +56,33 @@ func TestWorkerShouldProcessJobsUntilContextDone(t *testing.T) { wg.Wait() } +func TestWorkerShouldShutdownWhenResultChannelBlocked(t *testing.T) { + in := make(chan job, 2) + out := make(chan job) // No buffer, will block on any publish + + ms := &metricSink{} + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + var wg sync.WaitGroup + wg.Add(1) + go progressGames(ctx, in, out, &wg, ms.ThreadActive, ms.ThreadIdle) + + in <- job{ + player: &test.StubGamePlayer{StatusValue: types.GameStatusInProgress}, + } + waitErr := wait.For(context.Background(), 100*time.Millisecond, func() (bool, error) { + return ms.activeCalls.Load() >= 1, nil + }) + require.NoError(t, waitErr) + require.EqualValues(t, ms.activeCalls.Load(), 1) + require.EqualValues(t, ms.idleCalls.Load(), 0) // Can't publish idle because it blocks on publishing the result + + // Cancel the context which should exit the worker + cancel() + wg.Wait() + +} + type metricSink struct { activeCalls atomic.Int32 idleCalls atomic.Int32 diff --git a/op-challenger/game/service.go b/op-challenger/game/service.go index 0fb36afb112..5028588a12e 100644 --- a/op-challenger/game/service.go +++ b/op-challenger/game/service.go @@ -268,17 +268,17 @@ func (s *Service) Stopped() bool { } func (s *Service) Stop(ctx context.Context) error { - s.logger.Info("stopping challenger game service") + s.logger.Info("Stopping challenger game service") var result error + if s.monitor != nil { + s.monitor.StopMonitoring() + } if s.sched != nil { if err := s.sched.Close(); err != nil { result = errors.Join(result, fmt.Errorf("failed to close scheduler: %w", err)) } } - if s.monitor != nil { - s.monitor.StopMonitoring() - } if s.claimer != nil { if err := s.claimer.Close(); err != nil { result = errors.Join(result, fmt.Errorf("failed to close claimer: %w", err)) @@ -314,6 +314,6 @@ func (s *Service) Stop(ctx context.Context) error { } } s.stopped.Store(true) - s.logger.Info("stopped challenger game service", "err", result) + s.logger.Info("Stopped challenger game service", "err", result) return result } diff --git a/op-deployer/pkg/deployer/devfeatures.go b/op-deployer/pkg/deployer/devfeatures.go index 88993d916ef..68d867366fa 100644 --- a/op-deployer/pkg/deployer/devfeatures.go +++ b/op-deployer/pkg/deployer/devfeatures.go @@ -20,6 +20,9 @@ var ( // OPCMV2DevFlag enables the OPContractsManagerV2 contract. OPCMV2DevFlag = common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000010000") + + // L2CMDevFlag enables L2CM. + L2CMDevFlag = common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000100000") ) // IsDevFeatureEnabled checks if a specific development feature is enabled in a feature bitmap. diff --git a/op-deployer/pkg/deployer/integration_test/apply_test.go b/op-deployer/pkg/deployer/integration_test/apply_test.go index b24b661beac..cae2024ae68 100644 --- a/op-deployer/pkg/deployer/integration_test/apply_test.go +++ b/op-deployer/pkg/deployer/integration_test/apply_test.go @@ -5,14 +5,12 @@ import ( "context" "encoding/hex" "encoding/json" - "fmt" "log/slog" "math/big" "strings" "testing" "time" - "github.com/Masterminds/semver/v3" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/bootstrap" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/broadcaster" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/inspect" @@ -36,7 +34,6 @@ import ( "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/standard" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/state" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/testutil" - opbindings "github.com/ethereum-optimism/optimism/op-e2e/bindings" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/rpc" @@ -51,7 +48,6 @@ import ( "github.com/ethereum-optimism/optimism/op-chain-ops/genesis" "github.com/ethereum-optimism/optimism/op-core/predeploys" "github.com/ethereum-optimism/optimism/op-service/testlog" - "github.com/ethereum/go-ethereum/accounts/abi/bind" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/ethclient" @@ -363,6 +359,32 @@ func TestEndToEndApply(t *testing.T) { require.Equal(t, amount, account.Balance, "Native asset liquidity predeploy should have the configured balance") }) + t.Run("with L2CM", func(t *testing.T) { + intent, st := shared.NewIntent(t, l1ChainID, dk, l2ChainID1, loc, loc, testCustomGasLimit) + + intent.GlobalDeployOverrides = map[string]any{ + "devFeatureBitmap": deployer.L2CMDevFlag, + } + + require.NoError(t, deployer.ApplyPipeline(ctx, deployer.ApplyPipelineOpts{ + DeploymentTarget: deployer.DeploymentTargetLive, + L1RPCUrl: l1RPC, + DeployerPrivateKey: pk, + Intent: intent, + State: st, + Logger: lgr, + StateWriter: pipeline.NoopStateWriter(), + CacheDir: testCacheDir, + })) + + // Check that the conditional deployer predeploy is deployed in L2 genesis + conditionalDeployerAddr := common.HexToAddress("0x420000000000000000000000000000000000002C") + l2Genesis := st.Chains[0].Allocs.Data.Accounts + account, exists := l2Genesis[conditionalDeployerAddr] + require.True(t, exists, "Conditional deployer should exist in L2 genesis") + require.NotEmpty(t, account.Code, "Conditional deployer should have code deployed") + }) + t.Run("OPCMV2 deployment", func(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() @@ -400,6 +422,7 @@ func TestEndToEndApply(t *testing.T) { require.NotEmpty(t, st.ImplementationsDeployment.OpcmV2Impl, "OPCMV2 implementation should be deployed") require.NotEmpty(t, st.ImplementationsDeployment.OpcmContainerImpl, "OPCM container implementation should be deployed") require.NotEmpty(t, st.ImplementationsDeployment.OpcmStandardValidatorImpl, "OPCM standard validator implementation should be deployed") + require.NotEmpty(t, st.ImplementationsDeployment.OpcmInteropMigratorImpl, "OPCM interop migrator implementation should be deployed") // Verify that implementations are deployed on L1 cg := ethClientCodeGetter(ctx, l1Client) @@ -416,7 +439,6 @@ func TestEndToEndApply(t *testing.T) { require.Equal(t, common.Address{}, st.ImplementationsDeployment.OpcmGameTypeAdderImpl, "OPCM game type adder implementation should be zero") require.Equal(t, common.Address{}, st.ImplementationsDeployment.OpcmDeployerImpl, "OPCM deployer implementation should be zero") require.Equal(t, common.Address{}, st.ImplementationsDeployment.OpcmUpgraderImpl, "OPCM upgrader implementation should be zero") - require.Equal(t, common.Address{}, st.ImplementationsDeployment.OpcmInteropMigratorImpl, "OPCM interop migrator implementation should be zero") }) } @@ -818,7 +840,7 @@ func runEndToEndBootstrapAndApplyUpgradeTest(t *testing.T, afactsFS foundry.Stat require.NoError(t, err) defer versionClient.Close() - shouldUpgradeSuperchainConfig, err := needsSuperchainConfigUpgrade( + shouldUpgradeSuperchainConfig, err := testutil.NeedsSuperchainConfigUpgrade( ctx, versionClient, implementationsConfig.SuperchainConfigProxy, @@ -842,13 +864,18 @@ func runEndToEndBootstrapAndApplyUpgradeTest(t *testing.T, afactsFS foundry.Stat ) require.NoError(t, err) + opcmAddress := impls.Opcm + if deployer.IsDevFeatureEnabled(implementationsConfig.DevFeatureBitmap, deployer.OPCMV2DevFlag) { + opcmAddress = impls.OpcmV2 + } + // Only run the superchain config upgrade if the live superchain config is behind the freshly deployed // implementation. Running the script when versions match will revert and panic the test harness. if shouldUpgradeSuperchainConfig { t.Run("upgrade superchain config", func(t *testing.T) { upgradeConfig := embedded.UpgradeSuperchainConfigInput{ Prank: superchainProxyAdminOwner, - Opcm: impls.Opcm, + Opcm: opcmAddress, SuperchainConfig: implementationsConfig.SuperchainConfigProxy, } @@ -1051,44 +1078,6 @@ func runEndToEndBootstrapAndApplyUpgradeTest(t *testing.T, afactsFS foundry.Stat }) } -func needsSuperchainConfigUpgrade( - ctx context.Context, - client *ethclient.Client, - currentProxy, targetImpl common.Address, -) (bool, error) { - currentVersion, err := superchainConfigVersion(ctx, client, currentProxy) - if err != nil { - return false, fmt.Errorf("failed to fetch proxy superchain config version: %w", err) - } - - targetVersion, err := superchainConfigVersion(ctx, client, targetImpl) - if err != nil { - return false, fmt.Errorf("failed to fetch implementation superchain config version: %w", err) - } - - return currentVersion.LessThan(targetVersion), nil -} - -func superchainConfigVersion( - ctx context.Context, - client *ethclient.Client, - addr common.Address, -) (*semver.Version, error) { - contract, err := opbindings.NewSuperchainConfig(addr, client) - if err != nil { - return nil, fmt.Errorf("failed to bind superchain config at %s: %w", addr.Hex(), err) - } - versionStr, err := contract.Version(&bind.CallOpts{Context: ctx}) - if err != nil { - return nil, fmt.Errorf("failed to read version from %s: %w", addr.Hex(), err) - } - version, err := semver.NewVersion(versionStr) - if err != nil { - return nil, fmt.Errorf("failed to parse version %q from %s: %w", versionStr, addr.Hex(), err) - } - return version, nil -} - func setupGenesisChain(t *testing.T, l1ChainID uint64) (deployer.ApplyPipelineOpts, *state.Intent, *state.State) { lgr := testlog.Logger(t, slog.LevelDebug) diff --git a/op-deployer/pkg/deployer/integration_test/cli/manage_add_game_type_v2_test.go b/op-deployer/pkg/deployer/integration_test/cli/manage_add_game_type_v2_test.go index ae2ca691f2f..67e530443e7 100644 --- a/op-deployer/pkg/deployer/integration_test/cli/manage_add_game_type_v2_test.go +++ b/op-deployer/pkg/deployer/integration_test/cli/manage_add_game_type_v2_test.go @@ -12,19 +12,20 @@ import ( "time" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer" - "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/artifacts" - "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/bootstrap" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/broadcaster" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/integration_test/shared" - "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/standard" + "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/pipeline" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/testutil" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/upgrade/embedded" "github.com/ethereum-optimism/optimism/op-service/testlog" "github.com/ethereum-optimism/optimism/op-service/testutils" "github.com/ethereum-optimism/optimism/op-service/testutils/devnet" "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/params" + "github.com/ethereum/go-ethereum/crypto" + "github.com/holiman/uint256" "github.com/stretchr/testify/require" + + "github.com/ethereum-optimism/optimism/op-chain-ops/devkeys" ) func TestManageAddGameTypeV2_CLI(t *testing.T) { @@ -109,8 +110,6 @@ func TestManageAddGameTypeV2_CLI(t *testing.T) { // Tests the manage add-game-type-v2 command, from the CLI to the actual contract execution through the Solidity scripts. func TestManageAddGameTypeV2_Integration(t *testing.T) { - // TODO(#18718): Update this to use an actual deployed OPCM V2 contract once we have one. - // For now, we manually deploy the OPCM V2 contract using bootstrap.Implementations. lgr := testlog.Logger(t, slog.LevelDebug) l1Rpc, stopL1, err := devnet.NewForkedSepolia(lgr) @@ -121,17 +120,12 @@ func TestManageAddGameTypeV2_Integration(t *testing.T) { runner := NewCLITestRunnerWithNetwork(t, WithL1RPC(l1Rpc.RPCUrl())) workDir := runner.GetWorkDir() - // Test values - using arbitrary addresses for testing - l1ProxyAdminOwner := deployer.DefaultL1ProxyAdminOwnerSepolia - systemConfigProxy := deployer.DefaultSystemConfigProxySepolia - - // Deploy the OPCM V2 contract. - opcmV2 := deployDependencies(t, runner) + // We deploy superchain, OPCM V2, and a fresh OP chain. + deployed := deployDependencies(t, runner) - // Run past upgrades before testing the V2 command. - // This is necessary when forking a network at a block before certain upgrades were executed. - _, afactsFS := testutil.LocalArtifacts(t) - shared.RunPastUpgradesWithRPC(t, runner.l1RPC, afactsFS, lgr, 11155111, l1ProxyAdminOwner, systemConfigProxy) + l1ProxyAdminOwner := deployed.proxyAdminOwner + systemConfigProxy := deployed.systemConfigProxy + opcmV2 := deployed.opcmV2 // FaultDisputeGameConfig just needs absolutePrestate (bytes32) testPrestate := common.Hash{'P', 'R', 'E', 'S', 'T', 'A', 'T', 'E'} @@ -247,64 +241,68 @@ func TestManageAddGameTypeV2_Integration(t *testing.T) { require.Equal(t, l1ProxyAdminOwner.Hex(), dump[0].To.Hex(), "calldata should be sent to prank address") } -// TODO(#18718): Remove this once we have a deployed OPCM V2 contract. -// deployDependencies deploys the superchain contracts and OPCM V2 implementation -// using the DeployImplementations script, and returns the OPCM V2 address -func deployDependencies(t *testing.T, runner *CLITestRunner) common.Address { +// deployedChain holds the addresses returned from deploying a fresh OP chain +type deployedChain struct { + opcmV2 common.Address + systemConfigProxy common.Address + proxyAdminOwner common.Address +} + +// deployDependencies deploys superchain, OPCM V2, and a fresh OP chain using ApplyPipeline. +// Returns addresses needed for testing the add-game-type-v2 command. +func deployDependencies(t *testing.T, runner *CLITestRunner) deployedChain { ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) defer cancel() testCacheDir := testutils.IsolatedTestDirWithAutoCleanup(t) - // First, deploy superchain contracts (required for OPCM deployment) - superchainProxyAdminOwner := common.Address{'S'} - superchainOut, err := bootstrap.Superchain(ctx, bootstrap.SuperchainConfig{ - L1RPCUrl: runner.l1RPC, - PrivateKey: runner.privateKeyHex, - ArtifactsLocator: artifacts.EmbeddedLocator, - Logger: runner.lgr, - SuperchainProxyAdminOwner: superchainProxyAdminOwner, - ProtocolVersionsOwner: common.Address{'P'}, - Guardian: common.Address{'G'}, - Paused: false, - RequiredProtocolVersion: params.ProtocolVersionV0{Major: 1}.Encode(), - RecommendedProtocolVersion: params.ProtocolVersionV0{Major: 2}.Encode(), - CacheDir: testCacheDir, - }) - require.NoError(t, err, "Failed to deploy superchain contracts") - - // Deploy implementations with OPCM V2 enabled - implOut, err := bootstrap.Implementations(ctx, bootstrap.ImplementationsConfig{ - L1RPCUrl: runner.l1RPC, - PrivateKey: runner.privateKeyHex, - ArtifactsLocator: artifacts.EmbeddedLocator, - Logger: runner.lgr, - WithdrawalDelaySeconds: standard.WithdrawalDelaySeconds, - MinProposalSizeBytes: standard.MinProposalSizeBytes, - ChallengePeriodSeconds: standard.ChallengePeriodSeconds, - ProofMaturityDelaySeconds: standard.ProofMaturityDelaySeconds, - DisputeGameFinalityDelaySeconds: standard.DisputeGameFinalityDelaySeconds, - MIPSVersion: int(standard.MIPSVersion), - DevFeatureBitmap: deployer.OPCMV2DevFlag, // Enable OPCM V2 - SuperchainConfigProxy: superchainOut.SuperchainConfigProxy, - ProtocolVersionsProxy: superchainOut.ProtocolVersionsProxy, - SuperchainProxyAdmin: superchainOut.SuperchainProxyAdmin, - L1ProxyAdminOwner: superchainProxyAdminOwner, - Challenger: common.Address{'C'}, - CacheDir: testCacheDir, - FaultGameMaxGameDepth: standard.DisputeMaxGameDepth, - FaultGameSplitDepth: standard.DisputeSplitDepth, - FaultGameClockExtension: standard.DisputeClockExtension, - FaultGameMaxClockDuration: standard.DisputeMaxClockDuration, + // Get the private key and devkeys + pk, err := crypto.HexToECDSA(runner.privateKeyHex) + require.NoError(t, err) + + dk, err := devkeys.NewMnemonicDevKeys(devkeys.TestMnemonic) + require.NoError(t, err) + + l1ChainID := big.NewInt(11155111) + + // We use the shared helper to create an intent and state + loc, _ := testutil.LocalArtifacts(t) + l2ChainID := uint256.NewInt(12345) // Test L2 chain ID + + intent, st := shared.NewIntent(t, l1ChainID, dk, l2ChainID, loc, loc, 30_000_000) + + // Ensure we are using OPCM V2 + intent.GlobalDeployOverrides = map[string]any{ + "devFeatureBitmap": deployer.OPCMV2DevFlag, + } + + // Deploy using ApplyPipeline with live target + err = deployer.ApplyPipeline(ctx, deployer.ApplyPipelineOpts{ + DeploymentTarget: deployer.DeploymentTargetLive, + L1RPCUrl: runner.l1RPC, + DeployerPrivateKey: pk, + Intent: intent, + State: st, + Logger: runner.lgr, + StateWriter: pipeline.NoopStateWriter(), + CacheDir: testCacheDir, }) - require.NoError(t, err, "Failed to deploy implementations") + require.NoError(t, err, "Failed to deploy OP chain") // Verify OPCM V2 was deployed - require.NotEqual(t, common.Address{}, implOut.OpcmV2, "OPCM V2 address should be set") - require.Equal(t, common.Address{}, implOut.Opcm, "OPCM V1 address should be zero when V2 is deployed") + require.NotEqual(t, common.Address{}, st.ImplementationsDeployment.OpcmV2Impl, "OPCM V2 address should be set") - t.Logf("Deployed OPCM V2 at address: %s", implOut.OpcmV2.Hex()) - t.Logf("SuperchainConfigProxy: %s", superchainOut.SuperchainConfigProxy.Hex()) + // Get the chain state + require.Len(t, st.Chains, 1, "Expected one chain to be deployed") + chainState := st.Chains[0] - return implOut.OpcmV2 + t.Logf("Deployed OPCM V2 at address: %s", st.ImplementationsDeployment.OpcmV2Impl.Hex()) + t.Logf("Deployed SystemConfigProxy at address: %s", chainState.SystemConfigProxy.Hex()) + t.Logf("ProxyAdminOwner: %s", intent.Chains[0].Roles.L1ProxyAdminOwner.Hex()) + + return deployedChain{ + opcmV2: st.ImplementationsDeployment.OpcmV2Impl, + systemConfigProxy: chainState.SystemConfigProxy, + proxyAdminOwner: intent.Chains[0].Roles.L1ProxyAdminOwner, + } } diff --git a/op-deployer/pkg/deployer/integration_test/shared/shared.go b/op-deployer/pkg/deployer/integration_test/shared/shared.go index 0987dc34107..1c3803ea951 100644 --- a/op-deployer/pkg/deployer/integration_test/shared/shared.go +++ b/op-deployer/pkg/deployer/integration_test/shared/shared.go @@ -299,8 +299,8 @@ func buildV2OPCMUpgradeConfig(t *testing.T, prank, opcmAddr, systemConfigProxy c } } -// deployDummyCaller deploys DummyCaller at the prank address with the given OPCM address. -func deployDummyCaller(t *testing.T, rpcClient *rpc.Client, afactsFS foundry.StatDirFs, prank, opcmAddr common.Address) { +// DeployDummyCaller deploys DummyCaller at the prank address with the given OPCM address. +func DeployDummyCaller(t *testing.T, rpcClient *rpc.Client, afactsFS foundry.StatDirFs, prank, opcmAddr common.Address) { t.Helper() artifacts := &foundry.ArtifactsFS{FS: afactsFS} @@ -429,7 +429,7 @@ func RunPastUpgradesWithRPC(t *testing.T, l1RPCUrl string, afactsFS foundry.Stat // Process each OPCM upgrade: deploy DummyCaller with correct OPCM, run upgrade, broadcast for _, opcm := range toApply { // Deploy DummyCaller with this OPCM's address - deployDummyCaller(t, rpcClient, afactsFS, prank, opcm.Address) + DeployDummyCaller(t, rpcClient, afactsFS, prank, opcm.Address) // Create fresh broadcaster and host for this upgrade bcaster := NewImpersonationBroadcaster(lgr, ethClient, rpcClient, prank, networkChainID) diff --git a/op-deployer/pkg/deployer/manage/migrate_test.go b/op-deployer/pkg/deployer/manage/migrate_test.go index b6fab04f5fe..c540e6f2feb 100644 --- a/op-deployer/pkg/deployer/manage/migrate_test.go +++ b/op-deployer/pkg/deployer/manage/migrate_test.go @@ -3,7 +3,6 @@ package manage import ( "context" "encoding/hex" - "encoding/json" "flag" "fmt" "log/slog" @@ -11,23 +10,20 @@ import ( "testing" "time" - "github.com/ethereum-optimism/optimism/op-chain-ops/script" + "github.com/ethereum-optimism/optimism/op-chain-ops/devkeys" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer" - "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/artifacts" - "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/bootstrap" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/broadcaster" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/integration_test/shared" - "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/standard" + "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/pipeline" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/testutil" - "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/upgrade/embedded" "github.com/ethereum-optimism/optimism/op-deployer/pkg/env" "github.com/ethereum-optimism/optimism/op-service/testlog" "github.com/ethereum-optimism/optimism/op-service/testutils" "github.com/ethereum-optimism/optimism/op-service/testutils/devnet" "github.com/ethereum/go-ethereum/accounts/abi" "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/params" "github.com/ethereum/go-ethereum/rpc" + "github.com/holiman/uint256" "github.com/stretchr/testify/require" "github.com/urfave/cli/v2" ) @@ -42,58 +38,16 @@ func TestInteropMigration(t *testing.T) { }) l1RPC := forkedL1.RPCUrl() - _, afactsFS := testutil.LocalArtifacts(t) + loc, afactsFS := testutil.LocalArtifacts(t) testCacheDir := testutils.IsolatedTestDirWithAutoCleanup(t) ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute) defer cancel() - pkHex, _, _ := shared.DefaultPrivkey(t) - - // Deploy superchain contracts first (required for OPCM deployment) - superchainProxyAdminOwner := common.Address{'S'} - superchainOut, err := bootstrap.Superchain(ctx, bootstrap.SuperchainConfig{ - L1RPCUrl: l1RPC, - PrivateKey: pkHex, - ArtifactsLocator: artifacts.EmbeddedLocator, - Logger: lgr, - SuperchainProxyAdminOwner: superchainProxyAdminOwner, - ProtocolVersionsOwner: common.Address{'P'}, - Guardian: common.Address{'G'}, - Paused: false, - RequiredProtocolVersion: params.ProtocolVersionV0{Major: 1}.Encode(), - RecommendedProtocolVersion: params.ProtocolVersionV0{Major: 2}.Encode(), - CacheDir: testCacheDir, - }) - require.NoError(t, err, "Failed to deploy superchain contracts") - - // Use a test SystemConfigProxy address - systemConfigProxy := common.HexToAddress("0x034edD2A225f7f429A63E0f1D2084B9E0A93b538") - l1ProxyAdminOwner := common.HexToAddress("0x1Eb2fFc903729a0F03966B917003800b145F56E2") - - cfg := bootstrap.ImplementationsConfig{ - L1RPCUrl: l1RPC, - PrivateKey: pkHex, - ArtifactsLocator: artifacts.EmbeddedLocator, - Logger: lgr, - MIPSVersion: int(standard.MIPSVersion), - WithdrawalDelaySeconds: standard.WithdrawalDelaySeconds, - MinProposalSizeBytes: standard.MinProposalSizeBytes, - ChallengePeriodSeconds: standard.ChallengePeriodSeconds, - ProofMaturityDelaySeconds: standard.ProofMaturityDelaySeconds, - DisputeGameFinalityDelaySeconds: standard.DisputeGameFinalityDelaySeconds, - DevFeatureBitmap: common.Hash{}, - SuperchainConfigProxy: superchainOut.SuperchainConfigProxy, - ProtocolVersionsProxy: superchainOut.ProtocolVersionsProxy, - SuperchainProxyAdmin: superchainOut.SuperchainProxyAdmin, - L1ProxyAdminOwner: superchainProxyAdminOwner, - Challenger: common.Address{'C'}, - CacheDir: testCacheDir, - FaultGameMaxGameDepth: standard.DisputeMaxGameDepth, - FaultGameSplitDepth: standard.DisputeSplitDepth, - FaultGameClockExtension: standard.DisputeClockExtension, - FaultGameMaxClockDuration: standard.DisputeMaxClockDuration, - } + _, pk, dk := shared.DefaultPrivkey(t) + + l1ChainID := big.NewInt(11155111) // Sepolia + l2ChainID := uint256.NewInt(12345) tests := []struct { name string @@ -105,41 +59,71 @@ func TestInteropMigration(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - // Deploy implementations with the specified dev feature - if tt.devFeature == (common.Hash{}) { - cfg.DevFeatureBitmap = deployer.EnableDevFeature(common.Hash{}, deployer.OptimismPortalInteropDevFlag) - } else { - cfg.DevFeatureBitmap = deployer.EnableDevFeature(tt.devFeature, deployer.OptimismPortalInteropDevFlag) + // Deploy a complete chain using ApplyPipeline - this ensures all addresses are properly connected + intent, st := shared.NewIntent(t, l1ChainID, dk, l2ChainID, loc, loc, 30_000_000) + + // Set dev features for this test + devBitmap := deployer.EnableDevFeature(tt.devFeature, deployer.OptimismPortalInteropDevFlag) + intent.GlobalDeployOverrides = map[string]any{ + "devFeatureBitmap": devBitmap, } - impls, err := bootstrap.Implementations(ctx, cfg) - require.NoError(t, err, "Failed to deploy implementations") + err := deployer.ApplyPipeline(ctx, deployer.ApplyPipelineOpts{ + DeploymentTarget: deployer.DeploymentTargetLive, + L1RPCUrl: l1RPC, + DeployerPrivateKey: pk, + Intent: intent, + State: st, + Logger: lgr, + StateWriter: pipeline.NoopStateWriter(), + CacheDir: testCacheDir, + }) + require.NoError(t, err, "Failed to deploy chain") + + // Get addresses from the deployed state + require.Len(t, st.Chains, 1, "Expected one chain to be deployed") + chainState := st.Chains[0] + systemConfigProxy := chainState.SystemConfigProxy + + // Get the L1ProxyAdminOwner from the intent + l1ProxyAdminOwner := intent.Chains[0].Roles.L1ProxyAdminOwner + + t.Logf("SystemConfigProxy: %s", systemConfigProxy.Hex()) + t.Logf("L1ProxyAdminOwner: %s", l1ProxyAdminOwner.Hex()) rpcClient, err := rpc.Dial(l1RPC) require.NoError(t, err) + var opcmAddr common.Address + if deployer.IsDevFeatureEnabled(tt.devFeature, deployer.OPCMV2DevFlag) { + require.NotEqual(t, common.Address{}, st.ImplementationsDeployment.OpcmV2Impl, "OPCM V2 address should be set") + opcmAddr = st.ImplementationsDeployment.OpcmV2Impl + t.Logf("OPCM V2: %s", opcmAddr.Hex()) + } else { + require.NotEqual(t, common.Address{}, st.ImplementationsDeployment.OpcmImpl, "OPCM V1 address should be set") + opcmAddr = st.ImplementationsDeployment.OpcmImpl + t.Logf("OPCM V1: %s", opcmAddr.Hex()) + } + + // Deploy DummyCaller at l1ProxyAdminOwner for the OPCM + shared.DeployDummyCaller(t, rpcClient, afactsFS, l1ProxyAdminOwner, opcmAddr) + bcast := new(broadcaster.CalldataBroadcaster) host, err := env.DefaultForkedScriptHost( ctx, bcast, lgr, - superchainProxyAdminOwner, + l1ProxyAdminOwner, afactsFS, rpcClient, ) require.NoError(t, err) var input InteropMigrationInput - var opcmAddr common.Address if deployer.IsDevFeatureEnabled(tt.devFeature, deployer.OPCMV2DevFlag) { // OPCM V2 path - require.NotEqual(t, common.Address{}, impls.OpcmV2, "OPCM V2 address should be set") - require.Equal(t, common.Address{}, impls.Opcm, "OPCM V1 address should be zero when V2 is deployed") - opcmAddr = impls.OpcmV2 - - // Upgrade the portal to OptimismPortalInterop - upgradeChainV2(t, host, l1ProxyAdminOwner, systemConfigProxy, impls.OpcmV2) + // Note: No need to call upgradeChainV2 since ApplyPipeline already deploys a fully initialized chain // Prepare game args for V2 - ABI encode the prestate bytes32Type, err := abi.NewType("bytes32", "", nil) @@ -178,12 +162,13 @@ func TestInteropMigration(t *testing.T) { } } else { // OPCM V1 path - require.NotEqual(t, common.Address{}, impls.Opcm, "OPCM V1 address should be set") - require.Equal(t, common.Address{}, impls.OpcmV2, "OPCM V2 address should be zero when V1 is deployed") - opcmAddr = impls.Opcm + // Note: No need to call upgradeChainV1 since ApplyPipeline already deploys a fully initialized chain - // Upgrade the portal to OptimismPortalInterop - upgradeChainV1(t, host, l1ProxyAdminOwner, systemConfigProxy, impls.Opcm) + // Get proposer and challenger from devkeys + proposer, err := dk.Address(devkeys.ProposerRole.Key(l1ChainID)) + require.NoError(t, err) + challenger, err := dk.Address(devkeys.ChallengerRole.Key(l1ChainID)) + require.NoError(t, err) input = InteropMigrationInput{ Prank: l1ProxyAdminOwner, @@ -195,8 +180,8 @@ func TestInteropMigration(t *testing.T) { L2SequenceNumber: big.NewInt(1), }, GameParameters: GameParameters{ - Proposer: common.Address{'A'}, - Challenger: common.Address{'B'}, + Proposer: proposer, + Challenger: challenger, MaxGameDepth: 73, SplitDepth: 30, InitBond: big.NewInt(1000000000000000000), // 1 ETH @@ -221,9 +206,9 @@ func TestInteropMigration(t *testing.T) { dump, err := bcast.Dump() require.NoError(t, err) - require.Len(t, dump, 2, "Should have two transactions") - require.True(t, dump[1].Value.ToInt().Cmp(common.Big0) == 0, "Transaction value should be zero") - require.Equal(t, l1ProxyAdminOwner, *dump[1].To, "Transaction should be sent to prank address") + require.Len(t, dump, 1, "Should have one transaction (migration)") + require.True(t, dump[0].Value.ToInt().Cmp(common.Big0) == 0, "Transaction value should be zero") + require.Equal(t, l1ProxyAdminOwner, *dump[0].To, "Transaction should be sent to prank address") }) } } @@ -535,83 +520,3 @@ func TestEncodedMigrateInputV2(t *testing.T) { require.Equal(t, expected, hex.EncodeToString(data)) } - -// upgradeChainV1 upgrades a chain via OPCM V1 using ChainConfigs array. -func upgradeChainV1(t *testing.T, host *script.Host, proxyAdminOwner common.Address, systemConfigProxy common.Address, opcm common.Address) { - testPrestate := common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000abc") - testKonaPrestate := common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000000fed") - - upgradeConfig := embedded.UpgradeOPChainInput{ - Prank: proxyAdminOwner, - Opcm: opcm, - ChainConfigs: []embedded.OPChainConfig{ - { - SystemConfigProxy: systemConfigProxy, - CannonPrestate: testPrestate, - CannonKonaPrestate: testKonaPrestate, - }, - }, - } - - upgradeConfigBytes, err := json.Marshal(upgradeConfig) - require.NoError(t, err, "UpgradeOPChainInput should marshal to JSON") - err = embedded.DefaultUpgrader.Upgrade(host, upgradeConfigBytes) - require.NoError(t, err, "OPCM V1 chain upgrade should succeed") -} - -// Upgrades a chain via OPCM V2 to ensure the OptimismPortal is upgraded to OptimismPortalInterop. -func upgradeChainV2(t *testing.T, host *script.Host, proxyAdminOwner common.Address, systemConfigProxy common.Address, opcm common.Address) { - // ABI-encode game args for FaultDisputeGameConfig{absolutePrestate} - - // FaultDisputeGameConfig just needs absolutePrestate (bytes32) - testPrestate := common.Hash{'P', 'R', 'E', 'S', 'T', 'A', 'T', 'E'} - - // PermissionedDisputeGameConfig needs absolutePrestate, proposer, challenger - testProposer := common.Address{'P'} - testChallenger := common.Address{'C'} - - upgradeConfig := embedded.UpgradeOPChainInput{ - Prank: proxyAdminOwner, - Opcm: opcm, - UpgradeInputV2: &embedded.UpgradeInputV2{ - SystemConfig: systemConfigProxy, - DisputeGameConfigs: []embedded.DisputeGameConfig{ - { - Enabled: true, - InitBond: big.NewInt(1000000000000000000), - GameType: embedded.GameTypeCannon, - FaultDisputeGameConfig: &embedded.FaultDisputeGameConfig{ - AbsolutePrestate: testPrestate, - }, - }, - { - Enabled: true, - InitBond: big.NewInt(1000000000000000000), - GameType: embedded.GameTypePermissionedCannon, - PermissionedDisputeGameConfig: &embedded.PermissionedDisputeGameConfig{ - AbsolutePrestate: testPrestate, - Proposer: testProposer, - Challenger: testChallenger, - }, - }, - { - Enabled: false, - InitBond: big.NewInt(0), - GameType: embedded.GameTypeCannonKona, - // Disabled games don't need args - }, - }, - ExtraInstructions: []embedded.ExtraInstruction{ - { - Key: "PermittedProxyDeployment", - Data: []byte("DelayedWETH"), - }, - }, - }, - } - - upgradeConfigBytes, err := json.Marshal(upgradeConfig) - require.NoError(t, err, "UpgradeOPChainV2Input should marshal to JSON") - err = embedded.DefaultUpgrader.Upgrade(host, upgradeConfigBytes) - require.NoError(t, err, "OPCM V2 chain upgrade should succeed") -} diff --git a/op-deployer/pkg/deployer/opcm/l2genesis.go b/op-deployer/pkg/deployer/opcm/l2genesis.go index 6a0c71cba1e..29deabfd740 100644 --- a/op-deployer/pkg/deployer/opcm/l2genesis.go +++ b/op-deployer/pkg/deployer/opcm/l2genesis.go @@ -39,6 +39,7 @@ type L2GenesisInput struct { GasPayingTokenSymbol string NativeAssetLiquidityAmount *big.Int LiquidityControllerOwner common.Address + UseL2CM bool } type L2GenesisScript script.DeployScriptWithoutOutput[L2GenesisInput] diff --git a/op-deployer/pkg/deployer/pipeline/l2genesis.go b/op-deployer/pkg/deployer/pipeline/l2genesis.go index b173f0e6f31..08e3f10b26c 100644 --- a/op-deployer/pkg/deployer/pipeline/l2genesis.go +++ b/op-deployer/pkg/deployer/pipeline/l2genesis.go @@ -7,15 +7,13 @@ import ( "github.com/ethereum-optimism/optimism/op-chain-ops/genesis" "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/ethereum-optimism/optimism/op-service/jsonutil" - - "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/standard" - "github.com/ethereum-optimism/optimism/op-deployer/pkg/env" + "github.com/ethereum-optimism/optimism/op-service/jsonutil" "github.com/ethereum-optimism/optimism/op-chain-ops/foundry" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/broadcaster" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/opcm" + "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/standard" "github.com/ethereum-optimism/optimism/op-deployer/pkg/deployer/state" "github.com/ethereum/go-ethereum/common" @@ -85,6 +83,16 @@ func GenerateL2Genesis(pEnv *Env, intent *state.Intent, bundle ArtifactsBundle, cgt := buildCGTConfig(thisIntent) + // Check if L2CM feature is enabled + var useL2CM bool + if devFeatureBitmap, ok := intent.GlobalDeployOverrides["devFeatureBitmap"].(common.Hash); ok { + // TODO(#19151): Replace this with the L2CMDevFlag constant when we fix import cycles. + l2CMFlag := common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000100000") + if isDevFeatureEnabled(devFeatureBitmap, l2CMFlag) { + useL2CM = true + } + } + if err := script.Run(opcm.L2GenesisInput{ L1ChainID: new(big.Int).SetUint64(intent.L1ChainID), L2ChainID: chainID.Big(), @@ -118,6 +126,7 @@ func GenerateL2Genesis(pEnv *Env, intent *state.Intent, bundle ArtifactsBundle, GasPayingTokenSymbol: cgt.GasPayingTokenSymbol, NativeAssetLiquidityAmount: cgt.NativeAssetLiquidityAmount, LiquidityControllerOwner: cgt.LiquidityControllerOwner, + UseL2CM: useL2CM, }); err != nil { return fmt.Errorf("failed to call L2Genesis script: %w", err) } diff --git a/op-deployer/pkg/deployer/pipeline/opchain.go b/op-deployer/pkg/deployer/pipeline/opchain.go index 302a2c33547..c923bc54aeb 100644 --- a/op-deployer/pkg/deployer/pipeline/opchain.go +++ b/op-deployer/pkg/deployer/pipeline/opchain.go @@ -134,6 +134,7 @@ func makeDCI(intent *state.Intent, thisIntent *state.ChainIntent, chainID common // Select which OPCM to use based on dev feature flag opcmAddr := st.ImplementationsDeployment.OpcmImpl if devFeatureBitmap, ok := intent.GlobalDeployOverrides["devFeatureBitmap"].(common.Hash); ok { + // TODO(#19151): Replace this with the OPCMV2DevFlag constant when we fix import cycles. opcmV2Flag := common.HexToHash("0x0000000000000000000000000000000000000000000000000000000000010000") if isDevFeatureEnabled(devFeatureBitmap, opcmV2Flag) { opcmAddr = st.ImplementationsDeployment.OpcmV2Impl @@ -211,6 +212,7 @@ func shouldDeployOPChain(st *state.State, chainID common.Hash) bool { return true } +// TODO(#19151): Remove this function when we fix import cycles. // isDevFeatureEnabled checks if a specific development feature is enabled in a feature bitmap. // This mirrors the function in devfeatures.go to avoid import cycles. func isDevFeatureEnabled(bitmap, flag common.Hash) bool { diff --git a/op-deployer/pkg/deployer/testutil/superchain_config.go b/op-deployer/pkg/deployer/testutil/superchain_config.go new file mode 100644 index 00000000000..7e979ba649e --- /dev/null +++ b/op-deployer/pkg/deployer/testutil/superchain_config.go @@ -0,0 +1,50 @@ +package testutil + +import ( + "context" + "fmt" + + "github.com/Masterminds/semver/v3" + opbindings "github.com/ethereum-optimism/optimism/op-e2e/bindings" + "github.com/ethereum/go-ethereum/accounts/abi/bind" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/ethclient" +) + +func NeedsSuperchainConfigUpgrade( + ctx context.Context, + client *ethclient.Client, + currentProxy, targetImpl common.Address, +) (bool, error) { + currentVersion, err := superchainConfigVersion(ctx, client, currentProxy) + if err != nil { + return false, fmt.Errorf("failed to fetch proxy superchain config version: %w", err) + } + + targetVersion, err := superchainConfigVersion(ctx, client, targetImpl) + if err != nil { + return false, fmt.Errorf("failed to fetch implementation superchain config version: %w", err) + } + + return currentVersion.LessThan(targetVersion), nil +} + +func superchainConfigVersion( + ctx context.Context, + client *ethclient.Client, + addr common.Address, +) (*semver.Version, error) { + contract, err := opbindings.NewSuperchainConfig(addr, client) + if err != nil { + return nil, fmt.Errorf("failed to bind superchain config at %s: %w", addr.Hex(), err) + } + versionStr, err := contract.Version(&bind.CallOpts{Context: ctx}) + if err != nil { + return nil, fmt.Errorf("failed to read version from %s: %w", addr.Hex(), err) + } + version, err := semver.NewVersion(versionStr) + if err != nil { + return nil, fmt.Errorf("failed to parse version %q from %s: %w", versionStr, addr.Hex(), err) + } + return version, nil +} diff --git a/op-devstack/dsl/eoa.go b/op-devstack/dsl/eoa.go index 49ce2063c48..1cfbd859fac 100644 --- a/op-devstack/dsl/eoa.go +++ b/op-devstack/dsl/eoa.go @@ -1,6 +1,7 @@ package dsl import ( + "context" "fmt" "math/big" "math/rand" @@ -12,6 +13,7 @@ import ( e2eBindings "github.com/ethereum-optimism/optimism/op-e2e/bindings" "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-service/retry" + "github.com/ethereum-optimism/optimism/op-service/testutils" "github.com/ethereum-optimism/optimism/op-service/txintent" txIntentBindings "github.com/ethereum-optimism/optimism/op-service/txintent/bindings" "github.com/ethereum-optimism/optimism/op-service/txintent/contractio" @@ -186,6 +188,35 @@ func (u *EOA) SendInitMessage(trigger *txintent.InitTrigger) (*txintent.IntentTx return tx, receipt } +// SendRandomInitMessage creates and sends a random initiating message using the given event logger. +// topicCount specifies the number of topics (clamped to 1-4), dataLen specifies the opaque data length (minimum 1). +func (u *EOA) SendRandomInitMessage(rng *rand.Rand, eventLoggerAddress common.Address, topicCount, dataLen int) (*txintent.IntentTx[*txintent.InitTrigger, *txintent.InteropOutput], *types.Receipt) { + // Clamp topicCount to valid range [1, 4] + if topicCount > 4 { + topicCount = 4 + } + if topicCount < 1 { + topicCount = 1 + } + // Ensure at least 1 byte of data + if dataLen < 1 { + dataLen = 1 + } + + // Generate random topics + topics := make([][32]byte, topicCount) + for i := range topics { + copy(topics[i][:], testutils.RandomData(rng, 32)) + } + + trigger := &txintent.InitTrigger{ + Emitter: eventLoggerAddress, + Topics: topics, + OpaqueData: testutils.RandomData(rng, dataLen), + } + return u.SendInitMessage(trigger) +} + func (u *EOA) SendExecMessage(initIntent *txintent.IntentTx[*txintent.InitTrigger, *txintent.InteropOutput], eventIdx int) (*txintent.IntentTx[*txintent.ExecTrigger, *txintent.InteropOutput], *types.Receipt) { tx := txintent.NewIntent[*txintent.ExecTrigger, *txintent.InteropOutput](u.Plan()) tx.Content.DependOn(&initIntent.Result) @@ -198,6 +229,39 @@ func (u *EOA) SendExecMessage(initIntent *txintent.IntentTx[*txintent.InitTrigge return tx, receipt } +// SendInvalidExecMessage sends an executing message with an invalid identifier. +// The log index is incremented to reference a non-existent log. +func (u *EOA) SendInvalidExecMessage( + initIntent *txintent.IntentTx[*txintent.InitTrigger, *txintent.InteropOutput], + eventIdx int, +) (*txintent.IntentTx[*txintent.ExecTrigger, *txintent.InteropOutput], *types.Receipt) { + result, err := initIntent.Result.Eval(u.ctx) + u.t.Require().NoError(err, "failed to evaluate init result") + u.t.Require().Greater(len(result.Entries), eventIdx, "event index out of range") + + // Get the message and modify it to be invalid by incrementing the log index + msg := result.Entries[eventIdx] + msg.Identifier.LogIndex++ + + // Create the exec trigger with the invalid message + execTrigger := &txintent.ExecTrigger{ + Executor: constants.CrossL2Inbox, + Msg: msg, + } + + // The Fn just returns the pre-built trigger + tx := txintent.NewIntent[*txintent.ExecTrigger, *txintent.InteropOutput](u.Plan()) + tx.Content.DependOn(&initIntent.Result) + tx.Content.Fn(func(ctx context.Context) (*txintent.ExecTrigger, error) { + return execTrigger, nil + }) + + receipt, err := tx.PlannedTx.Included.Eval(u.ctx) + u.t.Require().NoError(err, "invalid exec msg receipt not found") + u.log.Info("invalid exec message included", "chain", u.ChainID(), "block", receipt.BlockNumber) + return tx, receipt +} + // SendPackedRandomInitMessages batches random messages and initiates them via a single multicall func (u *EOA) SendPackedRandomInitMessages(rng *rand.Rand, eventLoggerAddress common.Address) (*txintent.IntentTx[*txintent.MultiTrigger, *txintent.InteropOutput], *types.Receipt, error) { // Intent to initiate messages diff --git a/op-devstack/dsl/l2_el.go b/op-devstack/dsl/l2_el.go index d1d80286640..cf9d866c216 100644 --- a/op-devstack/dsl/l2_el.go +++ b/op-devstack/dsl/l2_el.go @@ -397,6 +397,23 @@ func (el *L2ELNode) FinalizedHead() *BlockRefResult { return &BlockRefResult{T: el.t, BlockRef: el.BlockRefByLabel(eth.Finalized)} } +// AssertTxNotInBlock asserts that a transaction with the given hash does not exist in the block at the given number. +func (el *L2ELNode) AssertTxNotInBlock(blockNumber uint64, txHash common.Hash) { + ctx, cancel := context.WithTimeout(el.ctx, DefaultTimeout) + defer cancel() + + _, txs, err := el.inner.EthClient().InfoAndTxsByNumber(ctx, blockNumber) + el.require.NoError(err, "failed to fetch block %d", blockNumber) + + for _, tx := range txs { + if tx.Hash() == txHash { + el.require.Failf("transaction should not exist in block", + "tx_hash=%s found in block %d", txHash, blockNumber) + } + } + el.log.Info("confirmed transaction not in block", "blockNumber", blockNumber, "txHash", txHash) +} + type BlockRefResult struct { T devtest.T BlockRef eth.L2BlockRef diff --git a/op-devstack/dsl/l2_network.go b/op-devstack/dsl/l2_network.go index d2ae3f694a0..9a80d2bfe2b 100644 --- a/op-devstack/dsl/l2_network.go +++ b/op-devstack/dsl/l2_network.go @@ -45,6 +45,11 @@ func (n *L2Network) ChainID() eth.ChainID { return n.inner.ChainID() } +// TimestampForBlockNum returns the timestamp for the given L2 block number. +func (n *L2Network) TimestampForBlockNum(blockNum uint64) uint64 { + return n.inner.RollupConfig().TimestampForBlock(blockNum) +} + // Escape returns the underlying stack.L2Network func (n *L2Network) Escape() stack.L2Network { return n.inner diff --git a/op-devstack/dsl/proofs/dispute_game_factory.go b/op-devstack/dsl/proofs/dispute_game_factory.go index be6b04dbfad..99fab302134 100644 --- a/op-devstack/dsl/proofs/dispute_game_factory.go +++ b/op-devstack/dsl/proofs/dispute_game_factory.go @@ -5,7 +5,10 @@ import ( "encoding/binary" "math/big" "net/url" + "os" + "os/exec" "path" + "path/filepath" "time" challengerConfig "github.com/ethereum-optimism/optimism/op-challenger/config" @@ -13,6 +16,7 @@ import ( "github.com/ethereum-optimism/optimism/op-challenger/game/fault/trace/outputs" "github.com/ethereum-optimism/optimism/op-challenger/game/fault/trace/prestates" "github.com/ethereum-optimism/optimism/op-challenger/game/fault/trace/super" + "github.com/ethereum-optimism/optimism/op-challenger/game/fault/trace/utils" "github.com/ethereum-optimism/optimism/op-challenger/game/fault/trace/vm" gameTypes "github.com/ethereum-optimism/optimism/op-challenger/game/types" "github.com/ethereum-optimism/optimism/op-challenger/metrics" @@ -186,13 +190,13 @@ func (f *DisputeGameFactory) WaitForGame() *FaultDisputeGame { return f.GameAtIndex(initialCount) } -func (f *DisputeGameFactory) StartSuperCannonGame(eoa *dsl.EOA, opts ...GameOpt) *SuperFaultDisputeGame { +func (f *DisputeGameFactory) StartSuperCannonKonaGame(eoa *dsl.EOA, opts ...GameOpt) *SuperFaultDisputeGame { f.require.NotNil(f.superNode, "super node is required to start super games") - return f.startSuperCannonGameOfType(eoa, gameTypes.SuperCannonGameType, opts...) + return f.startSuperGameOfType(eoa, gameTypes.SuperCannonKonaGameType, opts...) } -func (f *DisputeGameFactory) startSuperCannonGameOfType(eoa *dsl.EOA, gameType gameTypes.GameType, opts ...GameOpt) *SuperFaultDisputeGame { +func (f *DisputeGameFactory) startSuperGameOfType(eoa *dsl.EOA, gameType gameTypes.GameType, opts ...GameOpt) *SuperFaultDisputeGame { cfg := NewGameCfg(opts...) if len(cfg.superOutputRoots) != 0 && cfg.rootClaimSet { f.t.Error("cannot set both super output roots and root claim in super game") @@ -270,13 +274,13 @@ func (f *DisputeGameFactory) honestTraceForGame(game *FaultDisputeGame) challeng f.challengerCfg.CannonKona, vm.NewKonaExecutor(), ) - case gameTypes.SuperCannonGameType: + case gameTypes.SuperCannonKonaGameType: return f.honestSuperCannonTrace( game, - f.challengerCfg.CannonAbsolutePreStateBaseURL, - f.challengerCfg.CannonAbsolutePreState, - f.challengerCfg.Cannon, - vm.NewOpProgramServerExecutor(f.log), + f.challengerCfg.CannonKonaAbsolutePreStateBaseURL, + f.challengerCfg.CannonKonaAbsolutePreState, + f.challengerCfg.CannonKona, + vm.NewKonaSuperExecutor(), ) default: f.require.Truef(false, "Honest trace not supported for game type %v", game.GameType()) @@ -450,6 +454,80 @@ func (f *DisputeGameFactory) safeTimestamp() uint64 { return resp.CurrentSafeTimestamp } +// RunFPP runs the fault proof program between the two supplied timestamps. Currently only supports kona-interop. +func (f *DisputeGameFactory) RunFPP(startTimestamp uint64, endTimestamp uint64) { + f.require.NotNil(f.superNode, "super node is required to run FPP") + f.require.NotNil(f.challengerCfg, "challenger config is required to run FPP") + + splitDepth := f.GameImpl(gameTypes.SuperCannonKonaGameType).SplitDepth() + + // Use the current L1 head that the super node has processed. Otherwise the trace provider will fail because the node is not sufficiently up to date. + superRootResp, err := f.superNode.QueryAPI().SuperRootAtTimestamp(f.t.Ctx(), endTimestamp) + f.require.NoError(err, "Failed to fetch super root at timestamp") + l1Head := superRootResp.CurrentL1 + + prestateProvider := super.NewSuperNodePrestateProvider(f.superNode.QueryAPI(), startTimestamp) + traceProvider := super.NewSuperNodeTraceProvider( + f.log.New("role", "fpp-trace"), + prestateProvider, + f.superNode.QueryAPI(), + eth.BlockID{Hash: l1Head.Hash, Number: l1Head.Number}, + splitDepth, + startTimestamp, + endTimestamp, + ) + + tmpDir := f.t.TempDir() + + // Starting prestate is the aboslutePrestate + absolutePrestate, err := prestateProvider.AbsolutePreState(f.t.Ctx()) + f.require.NoError(err, "Failed to get absolute prestate") + agreedPrestate := absolutePrestate.Marshal() + + // Iterate through valid claims at splitDepth (the leaves of the top game) to get a few steps past the endTimestamp + for i := uint64(0); i < (endTimestamp-startTimestamp)*super.StepsPerTimestamp+3; i++ { + pos := challengerTypes.NewPosition(splitDepth, new(big.Int).SetUint64(i)) + + // Create LocalGameInputs using the previous claim (or anchor state) as agreed and current as disputed + claimedPreimage, err := traceProvider.GetPreimageBytes(f.t.Ctx(), pos) + f.require.NoError(err, "Failed to get claim at position %v", pos) + inputs := utils.LocalGameInputs{ + L1Head: l1Head.Hash, + AgreedPreState: agreedPrestate, + L2Claim: crypto.Keccak256Hash(claimedPreimage), + L2SequenceNumber: new(big.Int).SetUint64(endTimestamp), + } + + f.log.Info("Created LocalGameInputs for FPP", + "index", pos.IndexAtDepth(), + "l1Head", inputs.L1Head, + "l2Claim", inputs.L2Claim, + ) + + runFPPForStep(f, tmpDir, inputs) + + // This claim becomes the agreed prestate for the next iteration + agreedPrestate = claimedPreimage + } +} + +// runFPPForStep executes the native kona interop client using the LocalGameInputs and requires the claim to be successfully validated. +func runFPPForStep(f *DisputeGameFactory, tmpDir string, inputs utils.LocalGameInputs) { + executor := vm.NewNativeKonaSuperExecutor() + oracleCommand, err := executor.OracleCommand(f.challengerCfg.CannonKona, tmpDir, inputs) + f.require.NoError(err, "Failed to create command") + f.log.Info("Executing FPP", "command", oracleCommand) + exePath, err := filepath.Abs(oracleCommand[0]) + f.require.NoError(err, "Failed to get absolute path to executable") + cmd := exec.Command(exePath, oracleCommand[1:]...) + cmd.Dir = tmpDir + cmd.Stdout = os.Stdout + cmd.Stderr = os.Stderr + cmd.Env = append(append(cmd.Env, os.Environ()...), "NO_COLOR=1") + err = cmd.Run() + f.require.NoError(err, "Failed to execute game") +} + type GameHelperEOA struct { helper *GameHelper EOA *dsl.EOA diff --git a/op-devstack/dsl/supernode.go b/op-devstack/dsl/supernode.go index 74a0ec94f84..9dfeb0bbb6e 100644 --- a/op-devstack/dsl/supernode.go +++ b/op-devstack/dsl/supernode.go @@ -2,8 +2,10 @@ package dsl import ( "context" + "time" "github.com/ethereum-optimism/optimism/op-devstack/stack" + "github.com/ethereum-optimism/optimism/op-e2e/e2eutils/wait" "github.com/ethereum-optimism/optimism/op-service/apis" "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum/go-ethereum/common" @@ -12,7 +14,8 @@ import ( // Supernode wraps a stack.Supernode interface for DSL operations type Supernode struct { commonImpl - inner stack.Supernode + inner stack.Supernode + testControl stack.InteropTestControl } // NewSupernode creates a new Supernode DSL wrapper @@ -23,6 +26,16 @@ func NewSupernode(inner stack.Supernode) *Supernode { } } +// NewSupernodeWithTestControl creates a new Supernode DSL wrapper with test control support. +// The testControl parameter can be nil if no test control is needed. +func NewSupernodeWithTestControl(inner stack.Supernode, testControl stack.InteropTestControl) *Supernode { + return &Supernode{ + commonImpl: commonFromT(inner.T()), + inner: inner, + testControl: testControl, + } +} + func (s *Supernode) ID() stack.SupernodeID { return s.inner.ID() } @@ -57,3 +70,98 @@ func (s *Supernode) AssertSuperRootAtTimestamp(l2SequenceNumber uint64, rootClai superRoot := eth.SuperRoot(resp.Data.Super) s.require.Equal(superRoot[:], rootClaim[:]) } + +// AwaitValidatedTimestamp waits for the super-root at the given timestamp to be fully validated +func (s *Supernode) AwaitValidatedTimestamp(timestamp uint64) { + ctx, cancel := context.WithTimeout(s.ctx, DefaultTimeout) + defer cancel() + err := wait.For(ctx, 1*time.Second, func() (bool, error) { + resp, err := s.inner.QueryAPI().SuperRootAtTimestamp(ctx, timestamp) + if err != nil { + return false, nil // Ignore transient errors. + } + return resp.Data != nil, nil + }) + s.require.NoError(err, "super-root at timestamp %d was not validated in time", timestamp) +} + +// PauseInterop pauses the interop activity at the given timestamp. +// When the interop activity attempts to process this timestamp, it returns early. +// This function is for integration test control only. +// Requires the Supernode to be created with NewSupernodeWithTestControl. +func (s *Supernode) PauseInterop(ts uint64) { + s.require.NotNil(s.testControl, "PauseInterop requires test control; use NewSupernodeWithTestControl") + s.testControl.PauseInteropActivity(ts) +} + +// ResumeInterop clears any pause on the interop activity, allowing normal processing. +// This function is for integration test control only. +// Requires the Supernode to be created with NewSupernodeWithTestControl. +func (s *Supernode) ResumeInterop() { + s.require.NotNil(s.testControl, "ResumeInterop requires test control; use NewSupernodeWithTestControl") + s.testControl.ResumeInteropActivity() +} + +// EnsureInteropPaused pauses the interop activity and verifies it has stopped. +// It takes the local safe timestamps from two CL nodes, uses the maximum, then: +// 1. Pauses interop at localSafeTimestamp + pauseOffset +// 2. Awaits validation of localSafeTimestamp + pauseOffset - 1 +// 3. Finds the first timestamp that is NOT verified (the actual pause point) +// Returns the first unverified timestamp (adjusted if pause came in late). +// Requires the Supernode to be created with NewSupernodeWithTestControl. +func (s *Supernode) EnsureInteropPaused(clA, clB *L2CLNode, pauseOffset uint64) uint64 { + s.require.NotNil(s.testControl, "EnsureInteropPaused requires test control; use NewSupernodeWithTestControl") + + // Get the local safe of both chains from sync status + statusA := clA.SyncStatus() + statusB := clB.SyncStatus() + + // Use the maximum local safe timestamp between both chains + localSafeTimestamp := max(statusA.LocalSafeL2.Time, statusB.LocalSafeL2.Time) + + s.log.Info("EnsureInteropPaused: initial sync status", + "chainA_local_safe_num", statusA.LocalSafeL2.Number, + "chainA_local_safe_ts", statusA.LocalSafeL2.Time, + "chainB_local_safe_num", statusB.LocalSafeL2.Number, + "chainB_local_safe_ts", statusB.LocalSafeL2.Time, + "localSafeTimestamp", localSafeTimestamp, + ) + + pauseTimestamp := localSafeTimestamp + pauseOffset + awaitTimestamp := pauseTimestamp - 1 + + // Pause interop activity at the pause timestamp + s.testControl.PauseInteropActivity(pauseTimestamp) + + // Await interop validation of the timestamp before the pause + s.AwaitValidatedTimestamp(awaitTimestamp) + + s.log.Info("EnsureInteropPaused: validation confirmed before pause", "timestamp", awaitTimestamp) + + // Find the first timestamp that is NOT verified. + // If the pause came in late, some timestamps past pauseTimestamp may already be verified. + // We scan forward to find where interop actually stopped. + ctx, cancel := context.WithTimeout(s.ctx, DefaultTimeout) + defer cancel() + + for ts := pauseTimestamp; ts < pauseTimestamp+100; ts++ { + resp, err := s.inner.QueryAPI().SuperRootAtTimestamp(ctx, ts) + if err != nil || resp.Data == nil { + // Found the first unverified timestamp + s.log.Info("EnsureInteropPaused: confirmed interop is paused", + "intendedPauseTimestamp", pauseTimestamp, + "actualPauseTimestamp", ts, + ) + return ts + } + // This timestamp is verified, continue scanning + s.log.Warn("EnsureInteropPaused: pause came in late, timestamp already verified", + "timestamp", ts, + "intendedPause", pauseTimestamp, + ) + } + + s.t.Error("EnsureInteropPaused: failed to find unverified timestamp within 100 timestamps") + s.t.FailNow() + return pauseTimestamp +} diff --git a/op-devstack/presets/interop.go b/op-devstack/presets/interop.go index 5ef379934a5..3ddd26647f3 100644 --- a/op-devstack/presets/interop.go +++ b/op-devstack/presets/interop.go @@ -6,6 +6,7 @@ import ( "github.com/ethereum/go-ethereum/log" "github.com/ethereum-optimism/optimism/op-chain-ops/devkeys" + challengerConfig "github.com/ethereum-optimism/optimism/op-challenger/config" "github.com/ethereum-optimism/optimism/op-core/forks" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" @@ -41,6 +42,9 @@ type SingleChainInterop struct { FaucetL1 *dsl.Faucet FunderL1 *dsl.Funder FunderA *dsl.Funder + + // May be nil if not using sysgo + challengerConfig *challengerConfig.Config } func NewSingleChainInterop(t devtest.T) *SingleChainInterop { @@ -68,22 +72,29 @@ func NewSingleChainInterop(t devtest.T) *SingleChainInterop { default: t.Gate().True(false, "expected at least one supervisor or supernode") } + + var challengerCfg *challengerConfig.Config + if len(l2A.L2Challengers()) > 0 { + challengerCfg = l2A.L2Challengers()[0].Config() + } + out := &SingleChainInterop{ - Log: t.Logger(), - T: t, - system: system, - TestSequencer: dsl.NewTestSequencer(system.TestSequencer(match.Assume(t, match.FirstTestSequencer))), - Supervisor: supervisor, - SuperRoots: superRoots, - ControlPlane: orch.ControlPlane(), - L1Network: dsl.NewL1Network(l1Net), - L1EL: dsl.NewL1ELNode(l1Net.L1ELNode(match.Assume(t, match.FirstL1EL))), - L2ChainA: dsl.NewL2Network(l2A, orch.ControlPlane()), - L2ELA: dsl.NewL2ELNode(l2A.L2ELNode(match.Assume(t, match.FirstL2EL)), orch.ControlPlane()), - L2CLA: dsl.NewL2CLNode(l2A.L2CLNode(match.Assume(t, match.FirstL2CL)), orch.ControlPlane()), - Wallet: dsl.NewRandomHDWallet(t, 30), // Random for test isolation - FaucetA: dsl.NewFaucet(l2A.Faucet(match.Assume(t, match.FirstFaucet))), - L2BatcherA: dsl.NewL2Batcher(l2A.L2Batcher(match.Assume(t, match.FirstL2Batcher))), + Log: t.Logger(), + T: t, + system: system, + TestSequencer: dsl.NewTestSequencer(system.TestSequencer(match.Assume(t, match.FirstTestSequencer))), + Supervisor: supervisor, + SuperRoots: superRoots, + ControlPlane: orch.ControlPlane(), + L1Network: dsl.NewL1Network(l1Net), + L1EL: dsl.NewL1ELNode(l1Net.L1ELNode(match.Assume(t, match.FirstL1EL))), + L2ChainA: dsl.NewL2Network(l2A, orch.ControlPlane()), + L2ELA: dsl.NewL2ELNode(l2A.L2ELNode(match.Assume(t, match.FirstL2EL)), orch.ControlPlane()), + L2CLA: dsl.NewL2CLNode(l2A.L2CLNode(match.Assume(t, match.FirstL2CL)), orch.ControlPlane()), + Wallet: dsl.NewRandomHDWallet(t, 30), // Random for test isolation + FaucetA: dsl.NewFaucet(l2A.Faucet(match.Assume(t, match.FirstFaucet))), + L2BatcherA: dsl.NewL2Batcher(l2A.L2Batcher(match.Assume(t, match.FirstL2Batcher))), + challengerConfig: challengerCfg, } out.FaucetL1 = dsl.NewFaucet(out.L1Network.Escape().Faucet(match.Assume(t, match.FirstFaucet))) out.FunderL1 = dsl.NewFunder(out.Wallet, out.FaucetL1, out.L1EL) @@ -128,7 +139,7 @@ func (s *SimpleInterop) L2Networks() []*dsl.L2Network { func (s *SimpleInterop) DisputeGameFactory() *proofs.DisputeGameFactory { supernode := s.system.Supernode(match.Assume(s.T, match.FirstSupernode)) - return proofs.NewDisputeGameFactory(s.T, s.L1Network, s.L1EL.EthClient(), s.L2ChainA.DisputeGameFactoryProxyAddr(), nil, nil, supernode, nil) + return proofs.NewDisputeGameFactory(s.T, s.L1Network, s.L1EL.EthClient(), s.L2ChainA.DisputeGameFactoryProxyAddr(), nil, nil, supernode, s.challengerConfig) } func (s *SingleChainInterop) StandardBridge(l2Chain *dsl.L2Network) *dsl.StandardBridge { diff --git a/op-devstack/presets/twol2.go b/op-devstack/presets/twol2.go index abdb9babb65..1584d91c5fc 100644 --- a/op-devstack/presets/twol2.go +++ b/op-devstack/presets/twol2.go @@ -146,6 +146,13 @@ func NewTwoL2SupernodeInterop(t devtest.T, delaySeconds uint64) *TwoL2SupernodeI l2aNet := dsl.NewL2Network(l2a, orch.ControlPlane()) genesisTime := l2aNet.Escape().RollupConfig().Genesis.L2Time + // Get the supernode and its test control + stackSupernode := system.Supernode(match.Assume(t, match.FirstSupernode)) + var testControl stack.InteropTestControl + if sysgoOrch, ok := orch.(*sysgo.Orchestrator); ok { + testControl = sysgoOrch.InteropTestControl(stackSupernode.ID()) + } + out := &TwoL2SupernodeInterop{ TwoL2: TwoL2{ Log: t.Logger(), @@ -158,7 +165,7 @@ func NewTwoL2SupernodeInterop(t devtest.T, delaySeconds uint64) *TwoL2SupernodeI L2ACL: dsl.NewL2CLNode(l2aCL, orch.ControlPlane()), L2BCL: dsl.NewL2CLNode(l2bCL, orch.ControlPlane()), }, - Supernode: dsl.NewSupernode(system.Supernode(match.Assume(t, match.FirstSupernode))), + Supernode: dsl.NewSupernodeWithTestControl(stackSupernode, testControl), L2ELA: dsl.NewL2ELNode(l2a.L2ELNode(match.Assume(t, match.FirstL2EL)), orch.ControlPlane()), L2ELB: dsl.NewL2ELNode(l2b.L2ELNode(match.Assume(t, match.FirstL2EL)), orch.ControlPlane()), L2BatcherA: dsl.NewL2Batcher(l2a.L2Batcher(match.Assume(t, match.FirstL2Batcher))), diff --git a/op-devstack/shared/challenger/challenger.go b/op-devstack/shared/challenger/challenger.go index 44e7719d60f..71abb5017c3 100644 --- a/op-devstack/shared/challenger/challenger.go +++ b/op-devstack/shared/challenger/challenger.go @@ -43,6 +43,7 @@ func WithDepset(ds *depset.StaticConfigDependencySet) Option { return fmt.Errorf("failed to write dependency set config: %w", err) } c.Cannon.DepsetConfigPath = path + c.CannonKona.DepsetConfigPath = path return nil } } @@ -71,7 +72,7 @@ func applyCannonConfig(c *config.Config, rollupCfgs []*rollup.Config, l1Genesis return nil } -func applyCannonKonaConfig(c *config.Config, rollupCfgs []*rollup.Config, l1Genesis *core.Genesis, l2Geneses []*core.Genesis) error { +func applyCannonKonaConfig(c *config.Config, rollupCfgs []*rollup.Config, l1Genesis *core.Genesis, l2Geneses []*core.Genesis, interop bool) error { root, err := findMonorepoRoot() if err != nil { return err @@ -79,14 +80,21 @@ func applyCannonKonaConfig(c *config.Config, rollupCfgs []*rollup.Config, l1Gene if err := applyVmConfig(root, &c.CannonKona, c.Datadir, rollupCfgs, l1Genesis, l2Geneses); err != nil { return err } - c.CannonKona.Server = root + "kona/target/release/kona-host" + c.CannonKona.Server = root + "rust/target/release/kona-host" absRoot, err := filepath.Abs(root) if err != nil { return fmt.Errorf("failed to get absolute path to prestate dir: %w", err) } - c.CannonKonaAbsolutePreStateBaseURL, err = url.Parse("file:" + absRoot + "/kona/prestate-artifacts-cannon") - if err != nil { - return fmt.Errorf("failed to create kona prestates url: %w", err) + if interop { + c.CannonKonaAbsolutePreStateBaseURL, err = url.Parse("file:" + absRoot + "/rust/kona/prestate-artifacts-cannon-interop") + if err != nil { + return err + } + } else { + c.CannonKonaAbsolutePreStateBaseURL, err = url.Parse("file:" + absRoot + "/rust/kona/prestate-artifacts-cannon") + if err != nil { + return err + } } return nil } @@ -149,7 +157,13 @@ func WithCannonConfig(rollupCfgs []*rollup.Config, l1Genesis *core.Genesis, l2Ge func WithCannonKonaConfig(rollupCfgs []*rollup.Config, l1Genesis *core.Genesis, l2Geneses []*core.Genesis) Option { return func(c *config.Config) error { - return applyCannonKonaConfig(c, rollupCfgs, l1Genesis, l2Geneses) + return applyCannonKonaConfig(c, rollupCfgs, l1Genesis, l2Geneses, false) + } +} + +func WithCannonKonaInteropConfig(rollupCfgs []*rollup.Config, l1Genesis *core.Genesis, l2Geneses []*core.Genesis) Option { + return func(c *config.Config) error { + return applyCannonKonaConfig(c, rollupCfgs, l1Genesis, l2Geneses, true) } } @@ -181,6 +195,13 @@ func WithSuperCannonGameType() Option { } } +func WithSuperCannonKonaGameType() Option { + return func(c *config.Config) error { + c.GameTypes = append(c.GameTypes, gameTypes.SuperCannonKonaGameType) + return nil + } +} + func WithSuperPermissionedGameType() Option { return func(c *config.Config) error { c.GameTypes = append(c.GameTypes, gameTypes.SuperPermissionedGameType) diff --git a/op-devstack/stack/supernode.go b/op-devstack/stack/supernode.go index a88b07ef627..c7b1fa6080f 100644 --- a/op-devstack/stack/supernode.go +++ b/op-devstack/stack/supernode.go @@ -62,3 +62,16 @@ type Supernode interface { ID() SupernodeID QueryAPI() apis.SupernodeQueryAPI } + +// InteropTestControl provides integration test control methods for the interop activity. +// This interface is for integration test control only. +type InteropTestControl interface { + // PauseInteropActivity pauses the interop activity at the given timestamp. + // When the interop activity attempts to process this timestamp, it returns early. + // This function is for integration test control only. + PauseInteropActivity(ts uint64) + + // ResumeInteropActivity clears any pause on the interop activity, allowing normal processing. + // This function is for integration test control only. + ResumeInteropActivity() +} diff --git a/op-devstack/sysgo/l2_challenger.go b/op-devstack/sysgo/l2_challenger.go index 9964320766b..1a29e80b55f 100644 --- a/op-devstack/sysgo/l2_challenger.go +++ b/op-devstack/sysgo/l2_challenger.go @@ -2,6 +2,8 @@ package sysgo import ( "context" + "runtime" + "time" "github.com/ethereum-optimism/optimism/op-chain-ops/devkeys" opchallenger "github.com/ethereum-optimism/optimism/op-challenger" @@ -168,8 +170,8 @@ func WithL2ChallengerPostDeploy(orch *Orchestrator, challengerID stack.L2Challen } if orch.l2ChallengerOpts.useCannonKonaConfig { options = append(options, - shared.WithCannonKonaConfig(rollupCfgs, l1Genesis, l2Geneses), - shared.WithCannonKonaGameType(), + shared.WithCannonKonaInteropConfig(rollupCfgs, l1Genesis, l2Geneses), + shared.WithSuperCannonKonaGameType(), ) } cfg, err = shared.NewInteropChallengerConfig(dir, l1EL.UserRPC(), l1CL.beaconHTTPAddr, superRPC, l2ELRPCs, options...) @@ -217,7 +219,18 @@ func WithL2ChallengerPostDeploy(orch *Orchestrator, challengerID stack.L2Challen ctx, cancel := context.WithCancel(ctx) cancel() // force-quit logger.Info("Closing challenger") + // Start a separate goroutine to print a stack trace if the challenger fails to stop in a timely manner. + timer := time.AfterFunc(1*time.Minute, func() { + if svc.Stopped() { + return + } + // Print stack trace of all goroutines + buf := make([]byte, 1<<20) // 1MB buffer + stacklen := runtime.Stack(buf, true) + logger.Error("Challenger failed to stop; printing all goroutine stacks:\n%v", string(buf[:stacklen])) + }) _ = svc.Stop(ctx) + timer.Stop() logger.Info("Closed challenger") }) diff --git a/op-devstack/sysgo/l2_cl_supernode.go b/op-devstack/sysgo/l2_cl_supernode.go index 75906923813..2da664fa20d 100644 --- a/op-devstack/sysgo/l2_cl_supernode.go +++ b/op-devstack/sysgo/l2_cl_supernode.go @@ -143,6 +143,26 @@ func (n *SuperNode) Stop() { n.sn = nil } +// PauseInteropActivity pauses the interop activity at the given timestamp. +// This function is for integration test control only. +func (n *SuperNode) PauseInteropActivity(ts uint64) { + n.mu.Lock() + defer n.mu.Unlock() + if n.sn != nil { + n.sn.PauseInteropActivity(ts) + } +} + +// ResumeInteropActivity clears any pause on the interop activity. +// This function is for integration test control only. +func (n *SuperNode) ResumeInteropActivity() { + n.mu.Lock() + defer n.mu.Unlock() + if n.sn != nil { + n.sn.ResumeInteropActivity() + } +} + // WithSupernode constructs a Supernode-based L2 CL node func WithSupernode(supernodeID stack.SupernodeID, l2CLID stack.L2CLNodeID, l1CLID stack.L1CLNodeID, l1ELID stack.L1ELNodeID, l2ELID stack.L2ELNodeID, opts ...L2CLOption) stack.Option[*Orchestrator] { args := []L2CLs{{CLID: l2CLID, ELID: l2ELID}} diff --git a/op-devstack/sysgo/orchestrator.go b/op-devstack/sysgo/orchestrator.go index db8d4c1ced9..9243fda3408 100644 --- a/op-devstack/sysgo/orchestrator.go +++ b/op-devstack/sysgo/orchestrator.go @@ -179,6 +179,17 @@ func (o *Orchestrator) RegisterL2MetricsTargets(id stack.IDWithChain, endpoints } } +// InteropTestControl returns the InteropTestControl for a given SupernodeID. +// Returns nil if the supernode doesn't exist or doesn't implement the interface. +// This function is for integration test control only. +func (o *Orchestrator) InteropTestControl(id stack.SupernodeID) stack.InteropTestControl { + sn, ok := o.supernodes.Get(id) + if !ok { + return nil + } + return sn +} + type hydrator interface { hydrate(system stack.ExtensibleSystem) } diff --git a/op-devstack/sysgo/superroot.go b/op-devstack/sysgo/superroot.go index 942cc3f065b..2ed4708e285 100644 --- a/op-devstack/sysgo/superroot.go +++ b/op-devstack/sysgo/superroot.go @@ -339,11 +339,11 @@ func getInteropCannonAbsolutePrestate(t devtest.CommonT) common.Hash { } func getInteropCannonKonaAbsolutePrestate(t devtest.CommonT) common.Hash { - return getAbsolutePrestate(t, "kona/prestate-artifacts-cannon-interop/prestate-proof.json") + return getAbsolutePrestate(t, "rust/kona/prestate-artifacts-cannon-interop/prestate-proof.json") } func getCannonKonaAbsolutePrestate(t devtest.CommonT) common.Hash { - return getAbsolutePrestate(t, "kona/prestate-artifacts-cannon/prestate-proof.json") + return getAbsolutePrestate(t, "rust/kona/prestate-artifacts-cannon/prestate-proof.json") } func getAbsolutePrestate(t devtest.CommonT, prestatePath string) common.Hash { diff --git a/op-e2e/actions/helpers/l2_verifier.go b/op-e2e/actions/helpers/l2_verifier.go index aeab9fcb99f..08a4ee69e95 100644 --- a/op-e2e/actions/helpers/l2_verifier.go +++ b/op-e2e/actions/helpers/l2_verifier.go @@ -150,7 +150,7 @@ func NewL2Verifier(t Testing, log log.Logger, l1 derive.L1Fetcher, supervisorEnabled := interopSys != nil metrics := &testutils.TestDerivationMetrics{} - ec := engine.NewEngineController(ctx, eng, log, opnodemetrics.NoopMetrics, cfg, syncCfg, supervisorEnabled, l1, sys.Register("engine-controller", nil, opts)) + ec := engine.NewEngineController(ctx, eng, log, opnodemetrics.NoopMetrics, cfg, syncCfg, supervisorEnabled, l1, sys.Register("engine-controller", nil, opts), nil) if mm, ok := interopSys.(*indexing.IndexingMode); ok { mm.SetEngineController(ec) diff --git a/op-e2e/actions/supernode/rewind_test.go b/op-e2e/actions/supernode/rewind_test.go index bc4ff2550fc..f8282a2f71c 100644 --- a/op-e2e/actions/supernode/rewind_test.go +++ b/op-e2e/actions/supernode/rewind_test.go @@ -195,7 +195,6 @@ func RewindSafeHeadBackward(gt *testing.T) { } // RewindFinalizedHeadBackward tests rewinding to a target behind the current finalized head. -// All heads (unsafe, safe, finalized) should move backward. func RewindFinalizedHeadBackward(gt *testing.T) { env := setupRewindTest(gt) @@ -265,12 +264,7 @@ func RewindFinalizedHeadBackward(gt *testing.T) { rewindTarget := finalizedBefore.Number / 2 require.Greater(gt, rewindTarget, uint64(0), "rewind target should be past genesis") require.Less(gt, rewindTarget, finalizedBefore.Number, "rewind target should be behind finalized") - unsafeAfter, safeAfter, finalizedAfter := env.rewindToBlock(rewindTarget) - // Verify: all heads moved backward to target - require.Equal(gt, rewindTarget, unsafeAfter.Number, "unsafe should be at rewind target") - require.Equal(gt, rewindTarget, safeAfter.Number, "safe should have moved backward to target") - require.Equal(gt, rewindTarget, finalizedAfter.Number, "finalized should have moved backward to target") - require.Less(gt, safeAfter.Number, safeBefore.Number, "safe should have decreased") - require.Less(gt, finalizedAfter.Number, finalizedBefore.Number, "finalized should have decreased") + err := env.ec.RewindToTimestamp(context.Background(), env.timestampForBlock(rewindTarget)) + require.ErrorIs(gt, err, engine_controller.ErrRewindOverFinalizedHead) } diff --git a/op-e2e/e2eutils/blobstore/blobs.go b/op-e2e/e2eutils/blobstore/blobs.go index 45fcc5dfddd..05e5ac31691 100644 --- a/op-e2e/e2eutils/blobstore/blobs.go +++ b/op-e2e/e2eutils/blobstore/blobs.go @@ -3,9 +3,10 @@ package blobstore import ( "context" "fmt" + "slices" "github.com/ethereum/go-ethereum" - "github.com/ethereum/go-ethereum/crypto/kzg4844" + "github.com/ethereum/go-ethereum/common" "github.com/ethereum-optimism/optimism/op-node/rollup/derive" "github.com/ethereum-optimism/optimism/op-service/eth" @@ -30,82 +31,57 @@ func (store *Store) StoreBlob(blockTime uint64, indexedHash eth.IndexedBlobHash, m[indexedHash] = blob } -func (store *Store) GetBlobs(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) { - out := make([]*eth.Blob, 0, len(hashes)) - m, ok := store.blobs[ref.Time] +// GetBlobsByHash returns a slice of blobs in the slot at the given timestamp, +// corresponding to the supplied versioned hashes. +// If the provided hashes is empty, all blobs in the store at the supplied timestamp are returned. +// Blobs are ordered by their index in the block. +func (store *Store) GetBlobsByHash(ctx context.Context, time uint64, hashes []common.Hash) ([]*eth.Blob, error) { + blobMap, ok := store.blobs[time] if !ok { return nil, fmt.Errorf("no blobs known with given time: %w", ethereum.NotFound) } - for _, h := range hashes { - b, ok := m[h] - if !ok { - return nil, fmt.Errorf("blob %d %s is not in store: %w", h.Index, h.Hash, ethereum.NotFound) + + // Case of empty hashes + if len(hashes) == 0 { + out := make([]*eth.Blob, len(blobMap)) + for k, v := range blobMap { + out[k.Index] = v } - out = append(out, b) + return out, nil } - return out, nil -} -func (store *Store) GetBlobSidecars(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.BlobSidecar, error) { - out := make([]*eth.BlobSidecar, 0, len(hashes)) - m, ok := store.blobs[ref.Time] - if !ok { - return nil, fmt.Errorf("no blobs known with given time: %w", ethereum.NotFound) + // When hashes is not empty, + type indexedBlob struct { + Index uint64 + Blob *eth.Blob } - for _, h := range hashes { - b, ok := m[h] - if !ok { - return nil, fmt.Errorf("blob %d %s is not in store: %w", h.Index, h.Hash, ethereum.NotFound) - } - if b == nil { - return nil, fmt.Errorf("blob %d %s is nil, cannot copy: %w", h.Index, h.Hash, ethereum.NotFound) - } - commitment, err := kzg4844.BlobToCommitment(b.KZGBlob()) - if err != nil { - return nil, fmt.Errorf("failed to convert blob to commitment: %w", err) - } - proof, err := kzg4844.ComputeBlobProof(b.KZGBlob(), commitment) - if err != nil { - return nil, fmt.Errorf("failed to compute blob proof: %w", err) + // find the blob for each hash + indexedBlobSlice := make([]indexedBlob, 0, len(hashes)) + for _, h := range hashes { + for k, v := range blobMap { + if h == k.Hash { + indexedBlobSlice = append(indexedBlobSlice, indexedBlob{Index: k.Index, Blob: v}) + } } - out = append(out, ð.BlobSidecar{ - Index: eth.Uint64String(h.Index), - Blob: *b, - KZGCommitment: eth.Bytes48(commitment), - KZGProof: eth.Bytes48(proof), - }) } - return out, nil -} -func (store *Store) GetAllSidecars(ctx context.Context, l1Timestamp uint64) ([]*eth.BlobSidecar, error) { - m, ok := store.blobs[l1Timestamp] - if !ok { - return nil, fmt.Errorf("no blobs known with given time: %w", ethereum.NotFound) + if len(indexedBlobSlice) != len(hashes) { + return nil, fmt.Errorf("not all blobs found") } - out := make([]*eth.BlobSidecar, len(m)) - for h, b := range m { - if b == nil { - return nil, fmt.Errorf("blob %d %s is nil, cannot copy: %w", h.Index, h.Hash, ethereum.NotFound) - } - commitment, err := kzg4844.BlobToCommitment(b.KZGBlob()) - if err != nil { - return nil, fmt.Errorf("failed to convert blob to commitment: %w", err) - } - proof, err := kzg4844.ComputeBlobProof(b.KZGBlob(), commitment) - if err != nil { - return nil, fmt.Errorf("failed to compute blob proof: %w", err) - } - out[h.Index] = ð.BlobSidecar{ - Index: eth.Uint64String(h.Index), - Blob: *b, - KZGCommitment: eth.Bytes48(commitment), - KZGProof: eth.Bytes48(proof), - } + // sort by index + slices.SortFunc(indexedBlobSlice, func(a, b indexedBlob) int { + return int(a.Index) - int(b.Index) + }) + + // extract blobs + blobSlice := make([]*eth.Blob, len(indexedBlobSlice)) + for i, blob := range indexedBlobSlice { + blobSlice[i] = blob.Blob } - return out, nil + + return blobSlice, nil } var _ derive.L1BlobsFetcher = (*Store)(nil) diff --git a/op-e2e/e2eutils/fakebeacon/blobs.go b/op-e2e/e2eutils/fakebeacon/blobs.go index 85cef77e4f4..7872767c25d 100644 --- a/op-e2e/e2eutils/fakebeacon/blobs.go +++ b/op-e2e/e2eutils/fakebeacon/blobs.go @@ -2,13 +2,11 @@ package fakebeacon import ( "context" - "encoding/binary" "encoding/json" "errors" "fmt" "net" "net/http" - "slices" "strconv" "strings" "sync" @@ -19,7 +17,6 @@ import ( "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/beacon/engine" "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/crypto/kzg4844" "github.com/ethereum/go-ethereum/log" ) @@ -70,89 +67,25 @@ func (f *FakeBeacon) Start(addr string) error { f.log.Error("config handler err", "err", err) } }) - mux.HandleFunc("/eth/v1/beacon/blob_sidecars/", func(w http.ResponseWriter, r *http.Request) { - blockID := strings.TrimPrefix(r.URL.Path, "/eth/v1/beacon/blob_sidecars/") + mux.HandleFunc("/eth/v1/beacon/blobs/", func(w http.ResponseWriter, r *http.Request) { + blockID := strings.TrimPrefix(r.URL.Path, "/eth/v1/beacon/blobs/") slot, err := strconv.ParseUint(blockID, 10, 64) if err != nil { f.log.Error("could not parse block id from request", "url", r.URL.Path, "err", err) w.WriteHeader(http.StatusBadRequest) return } - bundle, err := f.LoadBlobsBundle(slot) - if errors.Is(err, ethereum.NotFound) { - f.log.Error("failed to load blobs bundle - not found", "slot", slot, "err", err) - w.WriteHeader(http.StatusNotFound) - return - } else if err != nil { - f.log.Error("failed to load blobs bundle", "slot", slot, "err", err) - w.WriteHeader(http.StatusInternalServerError) - return - } query := r.URL.Query() - rawIndices := query["indices"] - indices := make([]uint64, 0, len(bundle.Blobs)) - if len(rawIndices) == 0 { - // request is for all blobs - for i := range bundle.Blobs { - indices = append(indices, uint64(i)) - } - } else { - for _, raw := range rawIndices { - ix, err := strconv.ParseUint(raw, 10, 64) - if err != nil { - f.log.Error("could not parse index from request", "url", r.URL) - w.WriteHeader(http.StatusBadRequest) - return - } - indices = append(indices, ix) - } + versionedHashesFromQueryHex := query["versioned_hashes"] + versionedHashesFromQuery := make([]common.Hash, 0, len(versionedHashesFromQueryHex)) + for _, h := range versionedHashesFromQueryHex { + versionedHashesFromQuery = append(versionedHashesFromQuery, common.HexToHash(h)) } - var mockBeaconBlockRoot [32]byte - mockBeaconBlockRoot[0] = 42 - binary.LittleEndian.PutUint64(mockBeaconBlockRoot[32-8:], slot) - sidecars := make([]*eth.APIBlobSidecar, len(indices)) - for i, ix := range indices { - if ix >= uint64(len(bundle.Blobs)) { - f.log.Error("blob index from request is out of range", "url", r.URL) - w.WriteHeader(http.StatusBadRequest) - return - } - - // From Fulu onwards, a blob proof is not provided. - // Derivation should not rely on a valid proof here. - proof := eth.Bytes48(kzg4844.Proof(hexutil.MustDecode("0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"))) - - sidecars[i] = ð.APIBlobSidecar{ - Index: eth.Uint64String(ix), - KZGCommitment: eth.Bytes48(bundle.Commitments[ix]), - KZGProof: proof, - SignedBlockHeader: eth.SignedBeaconBlockHeader{ - Message: eth.BeaconBlockHeader{ - StateRoot: mockBeaconBlockRoot, - Slot: eth.Uint64String(slot), - }, - }, - InclusionProof: make([]eth.Bytes32, 0), - } - copy(sidecars[i].Blob[:], bundle.Blobs[ix]) - } - if err := json.NewEncoder(w).Encode(ð.APIGetBlobSidecarsResponse{Data: sidecars}); err != nil { - f.log.Error("blobs handler err", "err", err) - } - }) - mux.HandleFunc("/eth/v1/beacon/blobs/", func(w http.ResponseWriter, r *http.Request) { - blockID := strings.TrimPrefix(r.URL.Path, "/eth/v1/beacon/blobs/") - slot, err := strconv.ParseUint(blockID, 10, 64) + blobs, err := f.LoadBlobsByHash(slot, versionedHashesFromQuery) if err != nil { - f.log.Error("could not parse block id from request", "url", r.URL.Path, "err", err) - w.WriteHeader(http.StatusBadRequest) - return - } - bundle, err := f.LoadBlobsBundle(slot) - if err != nil { - f.log.Error("failed to load blobs bundle", "slot", slot, "err", err) + f.log.Error("failed to load blobs", "slot", slot, "err", err) if errors.Is(err, ethereum.NotFound) { w.WriteHeader(http.StatusNotFound) } else { @@ -161,21 +94,6 @@ func (f *FakeBeacon) Start(addr string) error { return } - query := r.URL.Query() - versionedHashes := make([]common.Hash, 0, len(bundle.Blobs)) - for _, raw := range query["versioned_hashes"] { - versionedHashes = append(versionedHashes, common.HexToHash(raw)) - } - blobs := make([]*eth.Blob, 0) - for i := range bundle.Blobs { - blob := eth.Blob(bundle.Blobs[i]) - versionedHash := eth.KZGToVersionedHash(kzg4844.Commitment(bundle.Commitments[i])) - if len(versionedHashes) > 0 && !slices.Contains(versionedHashes, versionedHash) { - continue - } - blobs = append(blobs, &blob) - } - if err := json.NewEncoder(w).Encode(ð.APIBeaconBlobsResponse{Data: blobs}); err != nil { f.log.Error("blobs handler err", "err", err) } @@ -224,7 +142,10 @@ func (f *FakeBeacon) StoreBlobsBundle(slot uint64, bundle *engine.BlobsBundle) e return nil } -func (f *FakeBeacon) LoadBlobsBundle(slot uint64) (*engine.BlobsBundle, error) { +// LoadBlobsByHashreturns a slice of blobs in the given slot, corresponding to the supplied versioned hashes. +// If the provided hashes is empty, all blobs in the store at the supplied timestamp are returned. +// Blobs are ordered by their index in the block. +func (f *FakeBeacon) LoadBlobsByHash(slot uint64, hashes []common.Hash) ([]*eth.Blob, error) { f.blobsLock.Lock() defer f.blobsLock.Unlock() @@ -233,25 +154,7 @@ func (f *FakeBeacon) LoadBlobsBundle(slot uint64) (*engine.BlobsBundle, error) { // timestamp = slot * slot_time + genesis slotTimestamp := slot*f.blockTime + f.genesisTime - // Load blobs from the store - blobs, err := f.blobStore.GetAllSidecars(context.Background(), slotTimestamp) - if err != nil { - return nil, fmt.Errorf("failed to load blobs from store: %w", err) - } - - // Convert blobs to the bundle - out := engine.BlobsBundle{ - Commitments: make([]hexutil.Bytes, len(blobs)), - Proofs: make([]hexutil.Bytes, len(blobs)), - Blobs: make([]hexutil.Bytes, len(blobs)), - } - for _, b := range blobs { - out.Commitments[b.Index] = hexutil.Bytes(b.KZGCommitment[:]) - out.Proofs[b.Index] = hexutil.Bytes(b.KZGProof[:]) - out.Blobs[b.Index] = hexutil.Bytes(b.Blob[:]) - } - - return &out, nil + return f.blobStore.GetBlobsByHash(context.Background(), slotTimestamp, hashes) } func (f *FakeBeacon) Close() error { diff --git a/op-e2e/e2eutils/fakebeacon/blobs_test.go b/op-e2e/e2eutils/fakebeacon/blobs_test.go new file mode 100644 index 00000000000..36b5deb605e --- /dev/null +++ b/op-e2e/e2eutils/fakebeacon/blobs_test.go @@ -0,0 +1,156 @@ +package fakebeacon + +import ( + "encoding/json" + "fmt" + "net/http" + "testing" + + "github.com/ethereum-optimism/optimism/op-e2e/e2eutils/blobstore" + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum/go-ethereum/beacon/engine" + "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/ethereum/go-ethereum/crypto/kzg4844" + "github.com/ethereum/go-ethereum/log" + "github.com/stretchr/testify/require" + + "github.com/ethereum-optimism/optimism/op-service/testlog" +) + +// TestBlobsEndpoints consolidates multiple checks of the /eth/v1/beacon/blobs/ endpoint +// into subtests that share a single setup. The subtests cover: +// - returning all blobs for a slot (no query params) +// - returning blobs filtered by a single versioned_hash +// - returning blobs filtered by multiple versioned_hashes (both should be returned) +func TestBlobsEndpoints(t *testing.T) { + t.Parallel() + + l := testlog.Logger(t, log.LevelInfo) + + // shared setup: in-memory blob store + blobStore := blobstore.New() + zero := uint64(0) + beaconApi := NewBeacon(l, blobStore, zero, uint64(12)) + t.Cleanup(func() { _ = beaconApi.Close() }) + require.NoError(t, beaconApi.Start("127.0.0.1:0")) + + blobToCommitmentProofAndBundle := func(blob eth.Blob) (kzg4844.Commitment, kzg4844.Proof, engine.BlobsBundle) { + kzgBlob := kzg4844.Blob(blob) + commitment, err := kzg4844.BlobToCommitment(&kzgBlob) + require.NoError(t, err) + proof, err := kzg4844.ComputeBlobProof(&kzgBlob, commitment) + require.NoError(t, err) + bundle := engine.BlobsBundle{ + Commitments: []hexutil.Bytes{hexutil.Bytes(commitment[:])}, + Proofs: []hexutil.Bytes{hexutil.Bytes(proof[:])}, + Blobs: []hexutil.Bytes{hexutil.Bytes(blob[:])}, + } + return commitment, proof, bundle + } + + // Prepare bundles for different slots used in subtests. + + // Slot 10: single blob (pattern used by first subtest) + var blobSlot10 eth.Blob + for i := range blobSlot10 { + blobSlot10[i] = 0x01 + } + _, _, bundle10 := blobToCommitmentProofAndBundle(blobSlot10) + slot10 := uint64(10) + require.NoError(t, beaconApi.StoreBlobsBundle(slot10, &bundle10)) + + // Slot 20: single blob, we'll query by its versioned hash + var blobSlot20 eth.Blob + blobSlot20[0] = 0x42 + commit20, _, bundle20 := blobToCommitmentProofAndBundle(blobSlot20) + slot20 := uint64(20) + require.NoError(t, beaconApi.StoreBlobsBundle(slot20, &bundle20)) + + // Slot 15: four blobs; used to test multiple versioned_hashes query + var blobA, blobB, blobC, blobD eth.Blob + blobA[0] = 0x11 + blobB[0] = 0x22 + blobC[0] = 0x33 + blobD[0] = 0x44 + commitA, proofA, _ := blobToCommitmentProofAndBundle(blobA) + commitB, proofB, _ := blobToCommitmentProofAndBundle(blobB) + commitC, proofC, _ := blobToCommitmentProofAndBundle(blobC) + commitD, proofD, _ := blobToCommitmentProofAndBundle(blobD) + bundle15 := engine.BlobsBundle{ + Commitments: []hexutil.Bytes{hexutil.Bytes(commitA[:]), hexutil.Bytes(commitB[:]), hexutil.Bytes(commitC[:]), hexutil.Bytes(commitD[:])}, + Proofs: []hexutil.Bytes{hexutil.Bytes(proofA[:]), hexutil.Bytes(proofB[:]), hexutil.Bytes(proofC[:]), hexutil.Bytes(proofD[:])}, + Blobs: []hexutil.Bytes{hexutil.Bytes(blobA[:]), hexutil.Bytes(blobB[:]), hexutil.Bytes(blobC[:]), hexutil.Bytes(blobD[:])}, + } + slot15 := uint64(15) + require.NoError(t, beaconApi.StoreBlobsBundle(slot15, &bundle15)) + + // Helper to perform GET and decode response + getBlobs := func(url string) (eth.APIBeaconBlobsResponse, error) { + var resp eth.APIBeaconBlobsResponse + r, err := http.Get(url) + if err != nil { + return resp, err + } + defer r.Body.Close() + if r.StatusCode != http.StatusOK { + return resp, fmt.Errorf("unexpected status: %d", r.StatusCode) + } + return resp, json.NewDecoder(r.Body).Decode(&resp) + } + + t.Run("GetAllBlobsForSlot", func(t *testing.T) { + t.Parallel() + url := fmt.Sprintf("%s/eth/v1/beacon/blobs/%d", beaconApi.BeaconAddr(), slot10) + apiResp, err := getBlobs(url) + require.NoError(t, err) + require.Len(t, apiResp.Data, 1) + require.Equal(t, blobSlot10, *apiResp.Data[0]) + }) + + t.Run("GetBlobsBySingleVersionedHash", func(t *testing.T) { + t.Parallel() + // compute versioned hash for slot20's commitment + vh := eth.KZGToVersionedHash(commit20) + + url := fmt.Sprintf("%s/eth/v1/beacon/blobs/%d?versioned_hashes=%s", beaconApi.BeaconAddr(), slot20, vh.Hex()) + apiResp, err := getBlobs(url) + require.NoError(t, err) + require.Len(t, apiResp.Data, 1) + require.Equal(t, blobSlot20, *apiResp.Data[0]) + }) + + t.Run("GetBlobsByMultipleVersionedHashesProperSubset", func(t *testing.T) { + t.Parallel() + vhA := eth.KZGToVersionedHash(commitA) + vhC := eth.KZGToVersionedHash(commitC) + + // Provide two versioned_hashes params; + // Let's reverse the order in the query params for a stronger test + // And remember we stored 4 blobs in this slot, so the query is for a proper subset + url := fmt.Sprintf("%s/eth/v1/beacon/blobs/%d?versioned_hashes=%s&versioned_hashes=%s", beaconApi.BeaconAddr(), slot15, vhC.Hex(), vhA.Hex()) + apiResp, err := getBlobs(url) + require.NoError(t, err) + // Both blobs should be returned (order is not strictly specified by the endpoint), + // so assert we have exactly two and that both expected blobs are present. + require.Len(t, apiResp.Data, 2) + + require.Condition(t, func() bool { + for _, b := range apiResp.Data { + if *b == blobA { + return true + } + } + return false + }, "blobA not returned") + + require.Condition(t, func() bool { + for _, b := range apiResp.Data { + if *b == blobC { + return true + } + } + return false + }, "blobC not returned") + + }) +} diff --git a/op-e2e/system/da/eip4844_test.go b/op-e2e/system/da/eip4844_test.go index 2b0ca172f28..ef24940a917 100644 --- a/op-e2e/system/da/eip4844_test.go +++ b/op-e2e/system/da/eip4844_test.go @@ -23,7 +23,6 @@ import ( "github.com/ethereum-optimism/optimism/op-e2e/e2eutils/wait" "github.com/ethereum-optimism/optimism/op-node/rollup/derive" "github.com/ethereum-optimism/optimism/op-service/bigs" - "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-service/testlog" "github.com/ethereum-optimism/optimism/op-service/testutils" "github.com/ethereum-optimism/optimism/op-service/txmgr" @@ -222,30 +221,21 @@ func testSystem4844E2E(t *testing.T, multiBlob bool, daType batcherFlags.DataAva require.Equal(t, maxBlobsPerBlock, numBlobs, fmt.Sprintf("multi-blob: expected to find L1 blob tx with %d blobs", maxBlobsPerBlock)) // blob tx should have filled up all but last blob bcl := sys.L1BeaconHTTPClient() - hashes := toIndexedBlobHashes(blobTx.BlobHashes()...) - sidecars, err := bcl.BeaconBlobSideCars(context.Background(), false, sys.L1Slot(blobBlock.Time()), hashes) + blobs, err := bcl.BeaconBlobs(context.Background(), sys.L1Slot(blobBlock.Time()), blobTx.BlobHashes()) require.NoError(t, err) - require.Len(t, sidecars.Data, maxBlobsPerBlock) + require.Len(t, blobs.Data, maxBlobsPerBlock) for i := 0; i < maxBlobsPerBlock-1; i++ { - data, err := sidecars.Data[i].Blob.ToData() + data, err := blobs.Data[i].ToData() require.NoError(t, err) - require.Len(t, data, maxL1TxSize) + assert.Len(t, data, maxL1TxSize, "blob %d should be full", i) } // last blob should only be partially filled - data, err := sidecars.Data[maxBlobsPerBlock-1].Blob.ToData() + data, err := blobs.Data[maxBlobsPerBlock-1].ToData() require.NoError(t, err) require.Less(t, len(data), maxL1TxSize) } } -func toIndexedBlobHashes(hs ...common.Hash) []eth.IndexedBlobHash { - hashes := make([]eth.IndexedBlobHash, 0, len(hs)) - for i, hash := range hs { - hashes = append(hashes, eth.IndexedBlobHash{Index: uint64(i), Hash: hash}) - } - return hashes -} - // TestBatcherAutoDA tests that the batcher with Auto data availability type // correctly chooses the cheaper Ethereum-DA type (calldata or blobs). // The L1 chain is set up with a genesis block that has an excess blob gas that leads diff --git a/op-e2e/system/da/l1_beacon_client_test.go b/op-e2e/system/da/l1_beacon_client_test.go index 6b7b186b034..11770e6f4ef 100644 --- a/op-e2e/system/da/l1_beacon_client_test.go +++ b/op-e2e/system/da/l1_beacon_client_test.go @@ -9,10 +9,10 @@ import ( "github.com/ethereum-optimism/optimism/op-e2e/e2eutils/blobstore" "github.com/ethereum-optimism/optimism/op-e2e/e2eutils/fakebeacon" "github.com/ethereum-optimism/optimism/op-service/client" - "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-service/sources" "github.com/ethereum-optimism/optimism/op-service/testlog" "github.com/ethereum/go-ethereum" + "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/log" "github.com/stretchr/testify/require" ) @@ -52,7 +52,7 @@ func Test404NotFound(t *testing.T) { beaconCfg := sources.L1BeaconClientConfig{FetchAllSidecars: false} cl := sources.NewL1BeaconClient(sources.NewBeaconHTTPClient(client.NewBasicHTTPClient(beaconApi.BeaconAddr(), l)), beaconCfg) - hashes := []eth.IndexedBlobHash{{Index: 1}} - _, err := cl.GetBlobs(context.Background(), eth.L1BlockRef{Number: 10, Time: 120}, hashes) + hashes := []common.Hash{{}} + _, err := cl.GetBlobsByHash(context.Background(), uint64(0), hashes) require.ErrorIs(t, err, ethereum.NotFound) } diff --git a/op-node/cmd/batch_decoder/fetch/fetch.go b/op-node/cmd/batch_decoder/fetch/fetch.go index b34879b47e9..dcb5bac3f68 100644 --- a/op-node/cmd/batch_decoder/fetch/fetch.go +++ b/op-node/cmd/batch_decoder/fetch/fetch.go @@ -13,7 +13,6 @@ import ( "github.com/ethereum-optimism/optimism/op-node/rollup/derive" "github.com/ethereum-optimism/optimism/op-service/bigs" - "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-service/sources" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" @@ -114,21 +113,8 @@ func fetchBatchesPerBlock(ctx context.Context, client *ethclient.Client, beacon blobIndex += len(tx.BlobHashes()) continue } - var hashes []eth.IndexedBlobHash - for _, h := range tx.BlobHashes() { - idh := eth.IndexedBlobHash{ - Index: uint64(blobIndex), - Hash: h, - } - hashes = append(hashes, idh) - blobIndex += 1 - } - blobs, err := beacon.GetBlobs(ctx, eth.L1BlockRef{ - Hash: block.Hash(), - Number: block.NumberU64(), - ParentHash: block.ParentHash(), - Time: block.Time(), - }, hashes) + hashes := tx.BlobHashes() + blobs, err := beacon.GetBlobsByHash(ctx, block.Time(), hashes) if err != nil { log.Fatal(fmt.Errorf("failed to fetch blobs: %w", err)) } diff --git a/op-node/config/beacon.go b/op-node/config/beacon.go index 2de46e217aa..c819da044a8 100644 --- a/op-node/config/beacon.go +++ b/op-node/config/beacon.go @@ -15,7 +15,7 @@ import ( ) type L1BeaconEndpointSetup interface { - Setup(ctx context.Context, log log.Logger) (cl apis.BeaconClient, fb []apis.BlobSideCarsClient, err error) + Setup(ctx context.Context, log log.Logger) (cl apis.BeaconClient, fb []apis.BeaconClient, err error) // ShouldIgnoreBeaconCheck returns true if the Beacon-node version check should not halt startup. ShouldIgnoreBeaconCheck() bool ShouldFetchAllSidecars() bool @@ -32,7 +32,7 @@ type L1BeaconEndpointConfig struct { var _ L1BeaconEndpointSetup = (*L1BeaconEndpointConfig)(nil) -func (cfg *L1BeaconEndpointConfig) Setup(ctx context.Context, log log.Logger) (cl apis.BeaconClient, fb []apis.BlobSideCarsClient, err error) { +func (cfg *L1BeaconEndpointConfig) Setup(ctx context.Context, log log.Logger) (cl apis.BeaconClient, fb []apis.BeaconClient, err error) { var opts []client.BasicHTTPClientOption if cfg.BeaconHeader != "" { hdr, err := parseHTTPHeader(cfg.BeaconHeader) diff --git a/op-node/node/node.go b/op-node/node/node.go index b4a8df27031..3cbec3404fc 100644 --- a/op-node/node/node.go +++ b/op-node/node/node.go @@ -97,7 +97,7 @@ type L1Source interface { // L1Beacon provides access to L1 beacon chain data, specifically for blob data retrieval. type L1Beacon interface { - GetBlobs(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) + GetBlobsByHash(ctx context.Context, time uint64, hashes []common.Hash) ([]*eth.Blob, error) } type OpNode struct { @@ -108,6 +108,8 @@ type OpNode struct { appVersion string metrics *metrics.Metrics + superAuthority rollup.SuperAuthority // Supernode authority for payload validation (may be nil) + l1HeadsSub ethereum.Subscription // Subscription to get L1 heads (automatically re-subscribes on error) l1SafeSub ethereum.Subscription // Subscription to get L1 safe blocks, a.k.a. justified data (polling) l1FinalizedSub ethereum.Subscription // Subscription to get L1 safe blocks, a.k.a. justified data (polling) @@ -200,6 +202,7 @@ type InitializationOverrides struct { Beacon L1Beacon RPCHandler *oprpc.Handler MetricsRegistry func(*prometheus.Registry) + SuperAuthority rollup.SuperAuthority // Supernode authority for payload validation } // init progressively creates and sets up all the components of the OpNode @@ -225,6 +228,9 @@ func (n *OpNode) init(ctx context.Context, cfg *config.Config, overrides Initial return fmt.Errorf("failed to init event system: %w", err) } + // Store the supernode authority for payload validation + n.superAuthority = overrides.SuperAuthority + if overrides.Beacon == nil { beacon, err := initL1BeaconAPI(ctx, cfg, n) if err != nil { @@ -607,7 +613,7 @@ func initL2(ctx context.Context, cfg *config.Config, node *OpNode) (*sources.Eng } l2Driver := driver.NewDriver(node.eventSys, node.eventDrain, &cfg.Driver, &cfg.Rollup, cfg.L1ChainConfig, cfg.DependencySet, l2Source, node.l1Source, upstreamFollowSource, - node.beacon, node, node, node.log, node.metrics, cfg.ConfigPersistence, safeDB, &cfg.Sync, sequencerConductor, altDA, indexingMode) + node.beacon, node, node, node.log, node.metrics, cfg.ConfigPersistence, safeDB, &cfg.Sync, sequencerConductor, altDA, indexingMode, node.superAuthority) // Wire up IndexingMode to engine controller for direct procedure call if sys != nil { diff --git a/op-node/rollup/derive/blob_data_source.go b/op-node/rollup/derive/blob_data_source.go index 2c4626941b8..d4ec55fa1b2 100644 --- a/op-node/rollup/derive/blob_data_source.go +++ b/op-node/rollup/derive/blob_data_source.go @@ -93,8 +93,8 @@ func (ds *BlobDataSource) open(ctx context.Context) ([]blobOrCalldata, error) { return data, nil } - // download the actual blob bodies corresponding to the indexed blob hashes - blobs, err := ds.blobsFetcher.GetBlobs(ctx, ds.ref, hashes) + // download the actual blob bodies corresponding to the versioned hashes + blobs, err := ds.blobsFetcher.GetBlobsByHash(ctx, ds.ref.Time, hashes) if errors.Is(err, ethereum.NotFound) { // If the L1 block was available, then the blobs should be available too. The only // exception is if the blob retention window has expired, which we will ultimately handle @@ -115,14 +115,12 @@ func (ds *BlobDataSource) open(ctx context.Context) ([]blobOrCalldata, error) { // dataAndHashesFromTxs extracts calldata and datahashes from the input transactions and returns them. It // creates a placeholder blobOrCalldata element for each returned blob hash that must be populated // by fillBlobPointers after blob bodies are retrieved. -func dataAndHashesFromTxs(txs types.Transactions, config *DataSourceConfig, batcherAddr common.Address, logger log.Logger) ([]blobOrCalldata, []eth.IndexedBlobHash) { +func dataAndHashesFromTxs(txs types.Transactions, config *DataSourceConfig, batcherAddr common.Address, logger log.Logger) ([]blobOrCalldata, []common.Hash) { data := []blobOrCalldata{} - var hashes []eth.IndexedBlobHash - blobIndex := 0 // index of each blob in the block's blob sidecar + var hashes []common.Hash for _, tx := range txs { // skip any non-batcher transactions if !isValidBatchTx(tx, config.l1Signer, config.batchInboxAddress, batcherAddr, logger) { - blobIndex += len(tx.BlobHashes()) continue } // handle non-blob batcher transactions by extracting their calldata @@ -136,13 +134,8 @@ func dataAndHashesFromTxs(txs types.Transactions, config *DataSourceConfig, batc log.Warn("blob tx has calldata, which will be ignored", "txhash", tx.Hash()) } for _, h := range tx.BlobHashes() { - idh := eth.IndexedBlobHash{ - Index: uint64(blobIndex), - Hash: h, - } - hashes = append(hashes, idh) + hashes = append(hashes, h) data = append(data, blobOrCalldata{nil, nil}) // will fill in blob pointers after we download them below - blobIndex += 1 } } return data, hashes diff --git a/op-node/rollup/derive/data_source.go b/op-node/rollup/derive/data_source.go index dfeda599501..964dea20aaf 100644 --- a/op-node/rollup/derive/data_source.go +++ b/op-node/rollup/derive/data_source.go @@ -22,8 +22,8 @@ type L1TransactionFetcher interface { } type L1BlobsFetcher interface { - // GetBlobs fetches blobs that were confirmed in the given L1 block with the given indexed hashes. - GetBlobs(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) + // GetBlobsByHash fetches blobs that were confirmed at the given timestamp with the given versioned hashes. + GetBlobsByHash(ctx context.Context, time uint64, hashes []common.Hash) ([]*eth.Blob, error) } type AltDAInputFetcher interface { diff --git a/op-node/rollup/derive/parse_upgrade_transactions.go b/op-node/rollup/derive/parse_upgrade_transactions.go new file mode 100644 index 00000000000..4793aadc35e --- /dev/null +++ b/op-node/rollup/derive/parse_upgrade_transactions.go @@ -0,0 +1,78 @@ +package derive + +import ( + "encoding/json" + "fmt" + "io" + "math/big" + + "github.com/ethereum-optimism/optimism/op-core/forks" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/ethereum/go-ethereum/core/types" +) + +// Network Upgrade Transactions (NUTs) are read from a JSON file and +// converted into deposit transactions. + +// NUTMetadata contains version information for the NUT bundle format. +type NUTMetadata struct { + Version string `json:"version"` +} + +// NetworkUpgradeTransaction defines a single deposit transaction within a NUT bundle. +type NetworkUpgradeTransaction struct { + Intent string `json:"intent"` + From common.Address `json:"from"` + To *common.Address `json:"to"` + Data hexutil.Bytes `json:"data"` + GasLimit uint64 `json:"gasLimit"` +} + +// NUTBundle is the top-level structure of a NUT file. +type NUTBundle struct { + ForkName forks.Name `json:"-"` + Metadata NUTMetadata `json:"metadata"` + Transactions []NetworkUpgradeTransaction `json:"transactions"` +} + +// ReadNUTBundle reads and parses a NUT bundle from an io.Reader. The fork name +// is used to namespace each transaction's intent when deriving source hashes. +func ReadNUTBundle(fork forks.Name, r io.Reader) (*NUTBundle, error) { + var bundle NUTBundle + if err := json.NewDecoder(r).Decode(&bundle); err != nil { + return nil, fmt.Errorf("failed to parse NUT bundle: %w", err) + } + bundle.ForkName = fork + return &bundle, nil +} + +// ToDepositTransactions converts the bundle's transactions into serialized deposit transactions. +func (b *NUTBundle) ToDepositTransactions() ([]hexutil.Bytes, error) { + txs := make([]hexutil.Bytes, 0, len(b.Transactions)) + for i, nutTx := range b.Transactions { + if nutTx.Intent == "" { + return nil, fmt.Errorf("tx %d: missing intent", i) + } + + qualifiedIntent := fmt.Sprintf("%s %d: %s", b.ForkName, i, nutTx.Intent) + source := UpgradeDepositSource{Intent: qualifiedIntent} + depTx := &types.DepositTx{ + SourceHash: source.SourceHash(), + From: nutTx.From, + To: nutTx.To, + Mint: big.NewInt(0), + Value: big.NewInt(0), + Gas: nutTx.GasLimit, + IsSystemTransaction: false, + Data: nutTx.Data, + } + + encoded, err := types.NewTx(depTx).MarshalBinary() + if err != nil { + return nil, fmt.Errorf("tx %d: failed to marshal deposit tx: %w", i, err) + } + txs = append(txs, encoded) + } + return txs, nil +} diff --git a/op-node/rollup/derive/parse_upgrade_transactions_test.go b/op-node/rollup/derive/parse_upgrade_transactions_test.go new file mode 100644 index 00000000000..cc8d70a97af --- /dev/null +++ b/op-node/rollup/derive/parse_upgrade_transactions_test.go @@ -0,0 +1,126 @@ +package derive + +import ( + "bytes" + "math/big" + "os" + "testing" + + "github.com/ethereum-optimism/optimism/op-core/forks" + "github.com/ethereum/go-ethereum/common" + "github.com/stretchr/testify/require" +) + +func TestReadNUTBundle(t *testing.T) { + f, err := os.Open("testdata/test-nut.json") + require.NoError(t, err) + defer f.Close() + + bundle, err := ReadNUTBundle("Test", f) + require.NoError(t, err) + + require.Equal(t, forks.Name("Test"), bundle.ForkName) + require.Equal(t, "1.0.0", bundle.Metadata.Version) + require.Len(t, bundle.Transactions, 2) + + // First tx: no value field, zero address from + tx0 := bundle.Transactions[0] + require.Equal(t, "First Transaction", tx0.Intent) + require.Equal(t, common.Address{}, tx0.From) + require.NotNil(t, tx0.To) + require.Equal(t, common.HexToAddress("0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"), *tx0.To) + require.Equal(t, common.FromHex("0xabcdef"), []byte(tx0.Data)) + require.Equal(t, uint64(1000000), tx0.GasLimit) + + // Second tx: non-zero from + tx1 := bundle.Transactions[1] + require.Equal(t, "Second Transaction", tx1.Intent) + require.Equal(t, common.HexToAddress("0x000000000000000000000000000000000000abba"), tx1.From) + require.NotNil(t, tx1.To) + require.Equal(t, uint64(5000000), tx1.GasLimit) +} + +func TestNUTBundleToDepositTransactions(t *testing.T) { + f, err := os.Open("testdata/test-nut.json") + require.NoError(t, err) + defer f.Close() + + bundle, err := ReadNUTBundle("Test", f) + require.NoError(t, err) + + txs, err := bundle.ToDepositTransactions() + require.NoError(t, err) + require.Len(t, txs, 2) + + // Verify first tx: qualified intent is "Test 0: First Transaction" + expectedSource0 := UpgradeDepositSource{Intent: "Test 0: First Transaction"} + from0, dep0 := toDepositTxn(t, txs[0]) + require.Equal(t, common.Address{}, from0) + require.Equal(t, expectedSource0.SourceHash(), dep0.SourceHash()) + require.NotNil(t, dep0.To()) + require.Equal(t, common.HexToAddress("0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"), *dep0.To()) + require.Equal(t, uint64(1000000), dep0.Gas()) + require.Equal(t, common.FromHex("0xabcdef"), dep0.Data()) + require.Equal(t, big.NewInt(0), dep0.Value()) + + // Verify second tx: qualified intent is "Test 1: Second Transaction" + expectedSource1 := UpgradeDepositSource{Intent: "Test 1: Second Transaction"} + from1, dep1 := toDepositTxn(t, txs[1]) + require.Equal(t, common.HexToAddress("0x000000000000000000000000000000000000abba"), from1) + require.Equal(t, expectedSource1.SourceHash(), dep1.SourceHash()) + require.Equal(t, uint64(5000000), dep1.Gas()) + require.Equal(t, big.NewInt(0), dep1.Value()) + // Source hashes must be unique + require.NotEqual(t, dep0.SourceHash(), dep1.SourceHash()) +} + +func TestReadNUTBundleInvalidJSON(t *testing.T) { + _, err := ReadNUTBundle("Test", bytes.NewReader([]byte(`{invalid`))) + require.Error(t, err) + require.Contains(t, err.Error(), "failed to parse NUT bundle") +} + +func TestNUTBundleMissingIntent(t *testing.T) { + jsonData := []byte(`{ + "metadata": {"version": "1.0.0"}, + "transactions": [{ + "from": "0x0000000000000000000000000000000000000000", + "to": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + "data": "0xabcdef", + "gasLimit": 1000000 + }] + }`) + + bundle, err := ReadNUTBundle("Test", bytes.NewReader(jsonData)) + require.NoError(t, err) + + _, err = bundle.ToDepositTransactions() + require.Error(t, err) + require.Contains(t, err.Error(), "missing intent") +} + +// TestNUTBundleNullTo verifies that "to": null in JSON produces a contract creation (deploy) transaction. +// Although NUTs are expected to use Arachnid's deterministic deployer, this sending to null +// is how previous deployments have been handled and is useful to maintain going forward. +func TestNUTBundleNullTo(t *testing.T) { + jsonData := []byte(`{ + "metadata": {"version": "1.0.0"}, + "transactions": [{ + "intent": "Deploy Contract", + "from": "0x4210000000000000000000000000000000000006", + "to": null, + "data": "0xdeadbeef", + "gasLimit": 500000 + }] + }`) + + bundle, err := ReadNUTBundle("Test", bytes.NewReader(jsonData)) + require.NoError(t, err) + require.Nil(t, bundle.Transactions[0].To) + + txs, err := bundle.ToDepositTransactions() + require.NoError(t, err) + + _, dep := toDepositTxn(t, txs[0]) + require.Nil(t, dep.To()) +} diff --git a/op-node/rollup/derive/testdata/test-nut.json b/op-node/rollup/derive/testdata/test-nut.json new file mode 100644 index 00000000000..292c19ad91f --- /dev/null +++ b/op-node/rollup/derive/testdata/test-nut.json @@ -0,0 +1,21 @@ +{ + "metadata": { + "version": "1.0.0" + }, + "transactions": [ + { + "intent": "First Transaction", + "to": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + "data": "0xabcdef", + "gasLimit": 1000000, + "from": "0x0000000000000000000000000000000000000000" + }, + { + "intent": "Second Transaction", + "to": "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266", + "data": "0xabcdef", + "gasLimit": 5000000, + "from": "0x000000000000000000000000000000000000abba" + } + ] +} diff --git a/op-node/rollup/driver/driver.go b/op-node/rollup/driver/driver.go index 2b23d62e5dc..4705b9385b9 100644 --- a/op-node/rollup/driver/driver.go +++ b/op-node/rollup/driver/driver.go @@ -49,6 +49,7 @@ func NewDriver( sequencerConductor conductor.SequencerConductor, altDA AltDAIface, indexingMode bool, + superAuthority rollup.SuperAuthority, ) *Driver { driverCtx, driverCancel := context.WithCancel(context.Background()) @@ -60,7 +61,7 @@ func NewDriver( l1 = metered.NewMeteredL1Fetcher(l1Tracker, metrics) verifConfDepth := confdepth.NewConfDepth(driverCfg.VerifierConfDepth, statusTracker.L1Head, l1) - ec := engine.NewEngineController(driverCtx, l2, log, metrics, cfg, syncCfg, indexingMode, l1, sys.Register("engine-controller", nil)) + ec := engine.NewEngineController(driverCtx, l2, log, metrics, cfg, syncCfg, indexingMode, l1, sys.Register("engine-controller", nil), superAuthority) // TODO(#17115): Refactor dependency cycles ec.SetCrossUpdateHandler(statusTracker) diff --git a/op-node/rollup/engine/engine_controller.go b/op-node/rollup/engine/engine_controller.go index 7e89edd3f4a..5e6836b53f3 100644 --- a/op-node/rollup/engine/engine_controller.go +++ b/op-node/rollup/engine/engine_controller.go @@ -158,6 +158,9 @@ type EngineController struct { // Handler for cross-unsafe and cross-safe updates crossUpdateHandler CrossUpdateHandler + // SuperAuthority for payload validation (may be nil when not in supernode context) + superAuthority rollup.SuperAuthority + unsafePayloads *PayloadsQueue // queue of unsafe payloads, ordered by ascending block number, may have gaps and duplicates } @@ -165,6 +168,7 @@ var _ event.Deriver = (*EngineController)(nil) func NewEngineController(ctx context.Context, engine ExecEngine, log log.Logger, m opmetrics.Metricer, rollupCfg *rollup.Config, syncCfg *sync.Config, supervisorEnabled bool, l1 sync.L1Chain, emitter event.Emitter, + superAuthority rollup.SuperAuthority, ) *EngineController { syncStatus := syncStatusCL if syncCfg.SyncMode == sync.ELSync { @@ -184,6 +188,7 @@ func NewEngineController(ctx context.Context, engine ExecEngine, log log.Logger, l1: l1, ctx: ctx, emitter: emitter, + superAuthority: superAuthority, unsafePayloads: NewPayloadsQueue(log, maxUnsafePayloadsMemory, payloadMemSize), } } diff --git a/op-node/rollup/engine/engine_controller_test.go b/op-node/rollup/engine/engine_controller_test.go index d0a0e4d646b..dd865974af3 100644 --- a/op-node/rollup/engine/engine_controller_test.go +++ b/op-node/rollup/engine/engine_controller_test.go @@ -22,7 +22,7 @@ import ( func TestInvalidPayloadDropsHead(t *testing.T) { emitter := &testutils.MockEmitter{} - ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, &rollup.Config{}, &sync.Config{}, false, &testutils.MockL1Source{}, emitter) + ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, &rollup.Config{}, &sync.Config{}, false, &testutils.MockL1Source{}, emitter, nil) payload := ð.ExecutionPayloadEnvelope{ExecutionPayload: ð.ExecutionPayload{ BlockHash: common.Hash{0x01}, @@ -110,7 +110,7 @@ func TestOnUnsafePayload_EnqueueEmit(t *testing.T) { cfg, _, _, payloadA1 := buildSimpleCfgAndPayload(t) emitter := &testutils.MockEmitter{} - ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, cfg, &sync.Config{}, false, &testutils.MockL1Source{}, emitter) + ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, cfg, &sync.Config{}, false, &testutils.MockL1Source{}, emitter, nil) emitter.ExpectOnce(PayloadInvalidEvent{}) emitter.ExpectOnce(ForkchoiceUpdateEvent{}) @@ -127,7 +127,7 @@ func TestOnForkchoiceUpdate_ProcessRetryAndPop(t *testing.T) { emitter := &testutils.MockEmitter{} mockEngine := &testutils.MockEngine{} - cl := NewEngineController(context.Background(), mockEngine, testlog.Logger(t, 0), metrics.NoopMetrics, cfg, &sync.Config{SyncMode: sync.CLSync}, false, &testutils.MockL1Source{}, emitter) + cl := NewEngineController(context.Background(), mockEngine, testlog.Logger(t, 0), metrics.NoopMetrics, cfg, &sync.Config{SyncMode: sync.CLSync}, false, &testutils.MockL1Source{}, emitter, nil) // queue payload A1 emitter.ExpectOnceType("UnsafeUpdateEvent") @@ -156,7 +156,7 @@ func TestPeekUnsafePayload(t *testing.T) { cfg, _, _, payloadA1 := buildSimpleCfgAndPayload(t) emitter := &testutils.MockEmitter{} - ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, cfg, &sync.Config{SyncMode: sync.CLSync}, false, &testutils.MockL1Source{}, emitter) + ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, cfg, &sync.Config{SyncMode: sync.CLSync}, false, &testutils.MockL1Source{}, emitter, nil) // empty -> zero _, ref := ec.PeekUnsafePayload() @@ -174,7 +174,7 @@ func TestPeekUnsafePayload(t *testing.T) { func TestPeekUnsafePayload_OnDeriveErrorReturnsZero(t *testing.T) { // missing L1-info in txs will cause derive error emitter := &testutils.MockEmitter{} - ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, &rollup.Config{}, &sync.Config{SyncMode: sync.CLSync}, false, &testutils.MockL1Source{}, emitter) + ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, &rollup.Config{}, &sync.Config{SyncMode: sync.CLSync}, false, &testutils.MockL1Source{}, emitter, nil) bad := ð.ExecutionPayloadEnvelope{ExecutionPayload: ð.ExecutionPayload{BlockNumber: 1, BlockHash: common.Hash{0xaa}}} _ = ec.unsafePayloads.Push(bad) @@ -184,7 +184,7 @@ func TestPeekUnsafePayload_OnDeriveErrorReturnsZero(t *testing.T) { func TestInvalidPayloadForNonHead_NoDrop(t *testing.T) { emitter := &testutils.MockEmitter{} - ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, &rollup.Config{}, &sync.Config{SyncMode: sync.CLSync}, false, &testutils.MockL1Source{}, emitter) + ec := NewEngineController(context.Background(), nil, testlog.Logger(t, 0), metrics.NoopMetrics, &rollup.Config{}, &sync.Config{SyncMode: sync.CLSync}, false, &testutils.MockL1Source{}, emitter, nil) // Head payload (lower block number) head := ð.ExecutionPayloadEnvelope{ExecutionPayload: ð.ExecutionPayload{ @@ -212,3 +212,4 @@ func TestInvalidPayloadForNonHead_NoDrop(t *testing.T) { } // note: nil-envelope behavior is not tested to match current implementation +// SuperAuthority tests are in super_authority_deny_test.go diff --git a/op-node/rollup/engine/payload_process.go b/op-node/rollup/engine/payload_process.go index b80968167d7..d741741182c 100644 --- a/op-node/rollup/engine/payload_process.go +++ b/op-node/rollup/engine/payload_process.go @@ -28,6 +28,26 @@ func (e *EngineController) onPayloadProcess(ctx context.Context, ev PayloadProce rpcCtx, cancel := context.WithTimeout(e.ctx, payloadProcessTimeout) defer cancel() + // Check SuperAuthority denylist before inserting the payload + if e.superAuthority != nil && ev.Envelope != nil && ev.Envelope.ExecutionPayload != nil { + payload := ev.Envelope.ExecutionPayload + denied, err := e.superAuthority.IsDenied(uint64(payload.BlockNumber), payload.BlockHash) + if err != nil { + e.log.Error("Failed to check SuperAuthority denylist, proceeding with payload", + "blockNumber", payload.BlockNumber, + "blockHash", payload.BlockHash, + "err", err, + ) + } else if denied { + e.log.Warn("Payload denied by SuperAuthority", + "blockNumber", payload.BlockNumber, + "blockHash", payload.BlockHash, + ) + e.emitDepositsOnlyPayloadAttributesRequest(ctx, ev.Ref.ParentID(), ev.DerivedFrom) + return + } + } + insertStart := time.Now() status, err := e.engine.NewPayload(rpcCtx, ev.Envelope.ExecutionPayload, ev.Envelope.ParentBeaconBlockRoot) diff --git a/op-node/rollup/engine/super_authority_deny_test.go b/op-node/rollup/engine/super_authority_deny_test.go new file mode 100644 index 00000000000..fa030f1d396 --- /dev/null +++ b/op-node/rollup/engine/super_authority_deny_test.go @@ -0,0 +1,157 @@ +package engine + +import ( + "context" + "fmt" + "testing" + + "github.com/ethereum/go-ethereum/common" + + "github.com/ethereum-optimism/optimism/op-node/metrics" + "github.com/ethereum-optimism/optimism/op-node/rollup" + "github.com/ethereum-optimism/optimism/op-node/rollup/derive" + "github.com/ethereum-optimism/optimism/op-node/rollup/sync" + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum-optimism/optimism/op-service/testlog" + "github.com/ethereum-optimism/optimism/op-service/testutils" +) + +// mockSuperAuthority implements SuperAuthority for testing. +type mockSuperAuthority struct { + deniedBlocks map[uint64]common.Hash + shouldError bool +} + +func newMockSuperAuthority() *mockSuperAuthority { + return &mockSuperAuthority{ + deniedBlocks: make(map[uint64]common.Hash), + } +} + +func (m *mockSuperAuthority) denyBlock(blockNumber uint64, hash common.Hash) { + m.deniedBlocks[blockNumber] = hash +} + +func (m *mockSuperAuthority) IsDenied(blockNumber uint64, payloadHash common.Hash) (bool, error) { + if m.shouldError { + return false, fmt.Errorf("superauthority check failed") + } + deniedHash, exists := m.deniedBlocks[blockNumber] + if exists && deniedHash == payloadHash { + return true, nil + } + return false, nil +} + +// superAuthorityTestCase defines a test scenario for SuperAuthority behavior +type superAuthorityTestCase struct { + name string + // setup is called to configure the test scenario + // Returns: engine (nil if not needed), superAuthority (nil if testing nil case), derivedFrom + setup func(payload *eth.ExecutionPayloadEnvelope) (*testutils.MockEngine, rollup.SuperAuthority, eth.L1BlockRef) + // expectations sets up expected calls on the emitter and engine + expectations func(emitter *testutils.MockEmitter, engine *testutils.MockEngine, payload *eth.ExecutionPayloadEnvelope) +} + +func TestSuperAuthority(t *testing.T) { + tests := []superAuthorityTestCase{ + { + name: "DeniedPayload_EmitsDepositsOnlyRequest", + setup: func(payload *eth.ExecutionPayloadEnvelope) (*testutils.MockEngine, rollup.SuperAuthority, eth.L1BlockRef) { + sa := newMockSuperAuthority() + sa.denyBlock(uint64(payload.ExecutionPayload.BlockNumber), payload.ExecutionPayload.BlockHash) + // Need DerivedFrom for Holocene path + return nil, sa, eth.L1BlockRef{Number: 1} + }, + expectations: func(emitter *testutils.MockEmitter, engine *testutils.MockEngine, payload *eth.ExecutionPayloadEnvelope) { + emitter.ExpectOnceType("DepositsOnlyPayloadAttributesRequestEvent") + }, + }, + { + name: "AllowedPayload_Proceeds", + setup: func(payload *eth.ExecutionPayloadEnvelope) (*testutils.MockEngine, rollup.SuperAuthority, eth.L1BlockRef) { + sa := newMockSuperAuthority() + // Do NOT deny the payload + return &testutils.MockEngine{}, sa, eth.L1BlockRef{} + }, + expectations: func(emitter *testutils.MockEmitter, engine *testutils.MockEngine, payload *eth.ExecutionPayloadEnvelope) { + engine.ExpectNewPayload(payload.ExecutionPayload, nil, ð.PayloadStatusV1{Status: eth.ExecutionValid}, nil) + emitter.ExpectOnceType("PayloadSuccessEvent") + }, + }, + { + name: "Error_ProceedsWithPayload", + setup: func(payload *eth.ExecutionPayloadEnvelope) (*testutils.MockEngine, rollup.SuperAuthority, eth.L1BlockRef) { + sa := newMockSuperAuthority() + sa.shouldError = true + return &testutils.MockEngine{}, sa, eth.L1BlockRef{} + }, + expectations: func(emitter *testutils.MockEmitter, engine *testutils.MockEngine, payload *eth.ExecutionPayloadEnvelope) { + // Despite error, expect NewPayload (graceful degradation) + engine.ExpectNewPayload(payload.ExecutionPayload, nil, ð.PayloadStatusV1{Status: eth.ExecutionValid}, nil) + emitter.ExpectOnceType("PayloadSuccessEvent") + }, + }, + { + name: "NilAuthority_Proceeds", + setup: func(payload *eth.ExecutionPayloadEnvelope) (*testutils.MockEngine, rollup.SuperAuthority, eth.L1BlockRef) { + return &testutils.MockEngine{}, nil, eth.L1BlockRef{} + }, + expectations: func(emitter *testutils.MockEmitter, engine *testutils.MockEngine, payload *eth.ExecutionPayloadEnvelope) { + engine.ExpectNewPayload(payload.ExecutionPayload, nil, ð.PayloadStatusV1{Status: eth.ExecutionValid}, nil) + emitter.ExpectOnceType("PayloadSuccessEvent") + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + runSuperAuthorityTest(t, tc) + }) + } +} + +func runSuperAuthorityTest(t *testing.T, tc superAuthorityTestCase) { + cfg, _, _, payload := buildSimpleCfgAndPayload(t) + emitter := &testutils.MockEmitter{} + + engine, sa, derivedFrom := tc.setup(payload) + tc.expectations(emitter, engine, payload) + + ec := NewEngineController( + context.Background(), + engine, + testlog.Logger(t, 0), + metrics.NoopMetrics, + cfg, + &sync.Config{}, + false, + &testutils.MockL1Source{}, + emitter, + sa, + ) + + blockRef := eth.L2BlockRef{ + Hash: payload.ExecutionPayload.BlockHash, + Number: uint64(payload.ExecutionPayload.BlockNumber), + ParentHash: payload.ExecutionPayload.ParentHash, + Time: uint64(payload.ExecutionPayload.Timestamp), + } + + ec.onPayloadProcess(context.Background(), PayloadProcessEvent{ + Envelope: payload, + Ref: blockRef, + DerivedFrom: derivedFrom, + }) + + if engine != nil { + engine.AssertExpectations(t) + } + emitter.AssertExpectations(t) +} + +// Ensure derive.DepositsOnlyPayloadAttributesRequestEvent is referenced to verify import +var _ = derive.DepositsOnlyPayloadAttributesRequestEvent{} + +// Ensure rollup is imported (used by buildSimpleCfgAndPayload) +var _ *rollup.Config diff --git a/op-node/rollup/iface.go b/op-node/rollup/iface.go index f6cf0882de2..957a5c3d320 100644 --- a/op-node/rollup/iface.go +++ b/op-node/rollup/iface.go @@ -1,6 +1,20 @@ package rollup -import "github.com/ethereum-optimism/optimism/op-service/eth" +import ( + "github.com/ethereum/go-ethereum/common" + + "github.com/ethereum-optimism/optimism/op-service/eth" +) + +// SuperAuthority provides payload validation functionality from a supernode. +// When running inside a supernode, this allows the engine controller to check +// if payloads are denied before applying them, enabling coordinated block invalidation. +type SuperAuthority interface { + // IsDenied checks if a payload hash is denied at the given block number. + // Returns true if the payload should not be applied. + // The error indicates if the check could not be performed (should be logged but not fatal). + IsDenied(blockNumber uint64, payloadHash common.Hash) (bool, error) +} // SafeHeadListener is called when the safe head is updated. // The safe head may advance by more than one block in a single update diff --git a/op-program/client/driver/driver.go b/op-program/client/driver/driver.go index c2ba9372fc6..1810bf318c7 100644 --- a/op-program/client/driver/driver.go +++ b/op-program/client/driver/driver.go @@ -46,7 +46,7 @@ func NewDriver(logger log.Logger, cfg *rollup.Config, depSet derive.DependencySe pipelineDeriver.AttachEmitter(d) syncCfg := &sync.Config{SyncMode: sync.CLSync} - ec := engine.NewEngineController(context.Background(), l2Source, logger, metrics.NoopMetrics, cfg, syncCfg, false, l1Source, d) + ec := engine.NewEngineController(context.Background(), l2Source, logger, metrics.NoopMetrics, cfg, syncCfg, false, l1Source, d, nil) attrHandler := attributes.NewAttributesHandler(logger, cfg, context.Background(), l2Source, ec) ec.SetAttributesResetter(attrHandler) diff --git a/op-program/client/l1/blob_fetcher.go b/op-program/client/l1/blob_fetcher.go index dd704bf8dbd..bee80f84413 100644 --- a/op-program/client/l1/blob_fetcher.go +++ b/op-program/client/l1/blob_fetcher.go @@ -6,6 +6,7 @@ import ( "github.com/ethereum-optimism/optimism/op-node/rollup/derive" "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/log" ) @@ -25,11 +26,12 @@ func NewBlobFetcher(logger log.Logger, oracle Oracle) *BlobFetcher { } } -// GetBlobs fetches blobs that were confirmed in the given L1 block with the given indexed blob hashes. -func (b *BlobFetcher) GetBlobs(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) { +// GetBlobsByHash fetches blobs that were confirmed at the given timestamp with the given versioned hashes. +func (b *BlobFetcher) GetBlobsByHash(ctx context.Context, time uint64, hashes []common.Hash) ([]*eth.Blob, error) { blobs := make([]*eth.Blob, len(hashes)) + ref := eth.L1BlockRef{Time: time} for i := 0; i < len(hashes); i++ { - b.logger.Info("Fetching blob", "l1_ref", ref.Hash, "blob_versioned_hash", hashes[i].Hash, "index", hashes[i].Index) + b.logger.Info("Fetching blob", "time", time, "blob_versioned_hash", hashes[i]) blobs[i] = b.oracle.GetBlob(ref, hashes[i]) } return blobs, nil diff --git a/op-program/client/l1/cache.go b/op-program/client/l1/cache.go index ab8154195ec..23d3b2d51ad 100644 --- a/op-program/client/l1/cache.go +++ b/op-program/client/l1/cache.go @@ -78,12 +78,11 @@ func (o *CachingOracle) ReceiptsByBlockHash(blockHash common.Hash) (eth.BlockInf return block, rcpts } -func (o *CachingOracle) GetBlob(ref eth.L1BlockRef, blobHash eth.IndexedBlobHash) *eth.Blob { - // Create a 32 byte hash key by hashing `blobHash.Hash ++ ref.Time ++ blobHash.Index` - hashBuf := make([]byte, 48) - copy(hashBuf[0:32], blobHash.Hash[:]) +func (o *CachingOracle) GetBlob(ref eth.L1BlockRef, blobHash common.Hash) *eth.Blob { + // Create a 32 byte hash key by hashing `blobHash ++ ref.Time` + hashBuf := make([]byte, 40) + copy(hashBuf[0:32], blobHash[:]) binary.BigEndian.PutUint64(hashBuf[32:], ref.Time) - binary.BigEndian.PutUint64(hashBuf[40:], blobHash.Index) cacheKey := crypto.Keccak256Hash(hashBuf) blob, ok := o.blobs.Get(cacheKey) diff --git a/op-program/client/l1/cache_test.go b/op-program/client/l1/cache_test.go index f47481b47a2..6ad54706b83 100644 --- a/op-program/client/l1/cache_test.go +++ b/op-program/client/l1/cache_test.go @@ -80,18 +80,18 @@ func TestCachingOracle_GetBlobs(t *testing.T) { oracle := NewCachingOracle(stub) l1BlockRef := eth.L1BlockRef{Time: 0} - indexedBlobHash := eth.IndexedBlobHash{Hash: [32]byte{0xFA, 0xCA, 0xDE}, Index: 0} + blobHash := common.Hash{0xFA, 0xCA, 0xDE} blob := eth.Blob{0xFF} // Initial call retrieves from the stub - stub.Blobs[l1BlockRef] = make(map[eth.IndexedBlobHash]*eth.Blob) - stub.Blobs[l1BlockRef][indexedBlobHash] = &blob - actualBlob := oracle.GetBlob(l1BlockRef, indexedBlobHash) + stub.Blobs[l1BlockRef] = make(map[common.Hash]*eth.Blob) + stub.Blobs[l1BlockRef][blobHash] = &blob + actualBlob := oracle.GetBlob(l1BlockRef, blobHash) require.Equal(t, &blob, actualBlob) // Later calls should retrieve from cache - delete(stub.Blobs[l1BlockRef], indexedBlobHash) - actualBlob = oracle.GetBlob(l1BlockRef, indexedBlobHash) + delete(stub.Blobs[l1BlockRef], blobHash) + actualBlob = oracle.GetBlob(l1BlockRef, blobHash) require.Equal(t, &blob, actualBlob) } diff --git a/op-program/client/l1/hints.go b/op-program/client/l1/hints.go index 1fcb77d761e..4f358747104 100644 --- a/op-program/client/l1/hints.go +++ b/op-program/client/l1/hints.go @@ -1,6 +1,8 @@ package l1 import ( + "encoding/binary" + "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" @@ -48,6 +50,21 @@ func (l BlobHint) Hint() string { return HintL1Blob + " " + hexutil.Encode(l) } +// NewBlobHint constructs a 40 byte blob hint with timestamp. +func NewBlobHint(blobHash common.Hash, timeStamp uint64) BlobHint { + metaData := make([]byte, 8) + binary.BigEndian.PutUint64(metaData[:], timeStamp) + return BlobHint(append(blobHash[:], metaData[:]...)) +} + +// NewLegacyBlobHint is deprecated, do not use. Constructs a 48 byte blob hint with timestamp and index. +func NewLegacyBlobHint(blobHash common.Hash, index uint64, timeStamp uint64) BlobHint { + metaData := make([]byte, 16) + binary.BigEndian.PutUint64(metaData[0:8], index) + binary.BigEndian.PutUint64(metaData[8:16], timeStamp) + return BlobHint(append(blobHash[:], metaData[:]...)) +} + type PrecompileHint []byte var _ preimage.Hint = PrecompileHint{} diff --git a/op-program/client/l1/oracle.go b/op-program/client/l1/oracle.go index f8caa68e4ea..0a7400ce3fc 100644 --- a/op-program/client/l1/oracle.go +++ b/op-program/client/l1/oracle.go @@ -29,7 +29,7 @@ type Oracle interface { ReceiptsByBlockHash(blockHash common.Hash) (eth.BlockInfo, types.Receipts) // GetBlob retrieves the blob with the given hash. - GetBlob(ref eth.L1BlockRef, blobHash eth.IndexedBlobHash) *eth.Blob + GetBlob(ref eth.L1BlockRef, blobHash common.Hash) *eth.Blob // Precompile retrieves the result and success indicator of a precompile call for the given input. Precompile(precompileAddress common.Address, input []byte, requiredGas uint64) ([]byte, bool) @@ -99,14 +99,10 @@ func (p *PreimageOracle) ReceiptsByBlockHash(blockHash common.Hash) (eth.BlockIn return info, receipts } -func (p *PreimageOracle) GetBlob(ref eth.L1BlockRef, blobHash eth.IndexedBlobHash) *eth.Blob { +func (p *PreimageOracle) GetBlob(ref eth.L1BlockRef, blobHash common.Hash) *eth.Blob { // Send a hint for the blob commitment & blob field elements. - blobReqMeta := make([]byte, 16) - binary.BigEndian.PutUint64(blobReqMeta[0:8], blobHash.Index) - binary.BigEndian.PutUint64(blobReqMeta[8:16], ref.Time) - p.hint.Hint(BlobHint(append(blobHash.Hash[:], blobReqMeta...))) - - commitment := p.oracle.Get(preimage.Sha256Key(blobHash.Hash)) + p.hint.Hint(NewBlobHint(blobHash, ref.Time)) + commitment := p.oracle.Get(preimage.Sha256Key(blobHash)) // Reconstruct the full blob from the 4096 field elements. blob := eth.Blob{} @@ -116,7 +112,6 @@ func (p *PreimageOracle) GetBlob(ref eth.L1BlockRef, blobHash eth.IndexedBlobHas rootOfUnity := RootsOfUnity[i].Bytes() copy(fieldElemKey[48:], rootOfUnity[:]) fieldElement := p.oracle.Get(preimage.BlobKey(crypto.Keccak256(fieldElemKey))) - copy(blob[i<<5:(i+1)<<5], fieldElement[:]) } diff --git a/op-program/client/l1/oracle_test.go b/op-program/client/l1/oracle_test.go index f0174df43cd..c4a5c7b8563 100644 --- a/op-program/client/l1/oracle_test.go +++ b/op-program/client/l1/oracle_test.go @@ -127,16 +127,15 @@ func TestGetBlob(t *testing.T) { } // Setup expected hint - blobReqMeta := make([]byte, 16) - binary.BigEndian.PutUint64(blobReqMeta[0:8], indexedBlobHash.Index) - binary.BigEndian.PutUint64(blobReqMeta[8:16], blockRef.Time) + blobReqMeta := make([]byte, 8) + binary.BigEndian.PutUint64(blobReqMeta[0:8], blockRef.Time) expectedBlobHint := BlobHint(append(indexedBlobHash.Hash[:], blobReqMeta...)).Hint() po, hints := createTestPreimageOracle(t, preimages) // Get Blob and verify expectations hints.On("hint", expectedBlobHint).Once().Return() - actualBlob := po.GetBlob(blockRef, indexedBlobHash) + actualBlob := po.GetBlob(blockRef, indexedBlobHash.Hash) hints.AssertExpectations(t) require.Equal(t, blob[:], actualBlob[:]) }) diff --git a/op-program/client/l1/test/stub_oracle.go b/op-program/client/l1/test/stub_oracle.go index c4579fbe559..256bf4ba2fd 100644 --- a/op-program/client/l1/test/stub_oracle.go +++ b/op-program/client/l1/test/stub_oracle.go @@ -22,8 +22,8 @@ type StubOracle struct { // Rcpts maps Block hash to receipts Rcpts map[common.Hash]types.Receipts - // Blobs maps indexed blob hash to l1 block ref to blob - Blobs map[eth.L1BlockRef]map[eth.IndexedBlobHash]*eth.Blob + // Blobs maps l1 block ref to blob hash to blob + Blobs map[eth.L1BlockRef]map[common.Hash]*eth.Blob // PcmpResults maps hashed input to the results of precompile calls PcmpResults map[common.Hash][]byte @@ -35,7 +35,7 @@ func NewStubOracle(t *testing.T) *StubOracle { Blocks: make(map[common.Hash]eth.BlockInfo), Txs: make(map[common.Hash]types.Transactions), Rcpts: make(map[common.Hash]types.Receipts), - Blobs: make(map[eth.L1BlockRef]map[eth.IndexedBlobHash]*eth.Blob), + Blobs: make(map[eth.L1BlockRef]map[common.Hash]*eth.Blob), PcmpResults: make(map[common.Hash][]byte), } } @@ -64,14 +64,14 @@ func (o StubOracle) ReceiptsByBlockHash(blockHash common.Hash) (eth.BlockInfo, t return o.HeaderByBlockHash(blockHash), rcpts } -func (o StubOracle) GetBlob(ref eth.L1BlockRef, blobHash eth.IndexedBlobHash) *eth.Blob { +func (o StubOracle) GetBlob(ref eth.L1BlockRef, blobHash common.Hash) *eth.Blob { blobMap, ok := o.Blobs[ref] if !ok { o.t.Fatalf("unknown blob ref %s", ref) } blob, ok := blobMap[blobHash] if !ok { - o.t.Fatalf("unknown blob hash %s %d", blobHash.Hash, blobHash.Index) + o.t.Fatalf("unknown blob hash %s", blobHash) } return blob } diff --git a/op-program/host/prefetcher/prefetcher.go b/op-program/host/prefetcher/prefetcher.go index ece6fa7ad1b..5a3dac242a0 100644 --- a/op-program/host/prefetcher/prefetcher.go +++ b/op-program/host/prefetcher/prefetcher.go @@ -54,7 +54,7 @@ type L1Source interface { } type L1BlobSource interface { - GetBlobs(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) + GetBlobsByHash(ctx context.Context, time uint64, hashes []common.Hash) ([]*eth.Blob, error) } type Prefetcher struct { @@ -316,23 +316,14 @@ func (p *Prefetcher) prefetch(ctx context.Context, hint string) error { } return p.storeReceipts(receipts) case l1.HintL1Blob: - if len(hintBytes) != 48 { - return fmt.Errorf("invalid blob hint: %x", hint) - } - - blobVersionHash := common.Hash(hintBytes[:32]) - blobHashIndex := binary.BigEndian.Uint64(hintBytes[32:40]) - refTimestamp := binary.BigEndian.Uint64(hintBytes[40:48]) - - // Fetch the blob for the indexed blob hash passed in the hint. - indexedBlobHash := eth.IndexedBlobHash{ - Hash: blobVersionHash, - Index: blobHashIndex, + blobVersionHash, refTimestamp, err := parseBlobHint(hintBytes) + if err != nil { + return err } - // We pass an `eth.L1BlockRef`, but `GetBlobs` only uses the timestamp, which we received in the hint. - blobs, err := p.l1BlobFetcher.GetBlobs(ctx, eth.L1BlockRef{Time: refTimestamp}, []eth.IndexedBlobHash{indexedBlobHash}) + // Fetch the blob for the versioned hash passed in the hint. + blobs, err := p.l1BlobFetcher.GetBlobsByHash(ctx, refTimestamp, []common.Hash{blobVersionHash}) if err != nil || len(blobs) != 1 { - return fmt.Errorf("failed to fetch blobs for %s %d: %w", blobVersionHash, blobHashIndex, err) + return fmt.Errorf("failed to fetch blob for %s: %w", blobVersionHash, err) } blob := blobs[0] kzgCommitment, err := blob.ComputeKZGCommitment() @@ -591,3 +582,24 @@ func parseHint(hint string) (string, []byte, error) { func getPrecompiledContract(address common.Address) vm.PrecompiledContract { return vm.PrecompiledContractsPrague[address] } + +// parseBlobHint parses a blob hint string in wire protocol. +// Returns the blob version hash, reference timestamp and error (if any). +// It can parse legacy blob hints which contain a legacy blobHashIndex, no longer used. +// It uses the length of the hintBytes to determine the format. +func parseBlobHint(hintBytes []byte) (common.Hash, uint64, error) { + var blobVersionHash common.Hash + var refTimestamp uint64 + switch len(hintBytes) { + case 48: + blobVersionHash = common.Hash(hintBytes[:32]) + _ = binary.BigEndian.Uint64(hintBytes[32:40]) // contains legacy blobHashIndex, no longer used + refTimestamp = binary.BigEndian.Uint64(hintBytes[40:48]) + case 40: + blobVersionHash = common.Hash(hintBytes[:32]) + refTimestamp = binary.BigEndian.Uint64(hintBytes[32:40]) + default: + return common.Hash{}, 0, fmt.Errorf("invalid blob hint: %x", hintBytes) + } + return blobVersionHash, refTimestamp, nil +} diff --git a/op-program/host/prefetcher/prefetcher_test.go b/op-program/host/prefetcher/prefetcher_test.go index 68a03e81cfa..07940360903 100644 --- a/op-program/host/prefetcher/prefetcher_test.go +++ b/op-program/host/prefetcher/prefetcher_test.go @@ -1,6 +1,7 @@ package prefetcher import ( + "bytes" "context" "crypto/sha256" "encoding/binary" @@ -186,7 +187,6 @@ func TestFetchL1Blob(t *testing.T) { commitment, err := blob.ComputeKZGCommitment() require.NoError(t, err) versionedHash := eth.KZGToVersionedHash(commitment) - blobHash := eth.IndexedBlobHash{Hash: versionedHash, Index: 0xFACADE} l1Ref := eth.L1BlockRef{Time: 0} t.Run("AlreadyKnown", func(t *testing.T) { @@ -196,7 +196,7 @@ func TestFetchL1Blob(t *testing.T) { oracle := l1.NewPreimageOracle(asOracleFn(t, prefetcher), asHinter(t, prefetcher)) defer blobFetcher.AssertExpectations(t) - blobs := oracle.GetBlob(l1Ref, blobHash) + blobs := oracle.GetBlob(l1Ref, versionedHash) require.EqualValues(t, blobs[:], blob[:]) }) @@ -204,16 +204,16 @@ func TestFetchL1Blob(t *testing.T) { prefetcher, _, blobFetcher, _, _ := createPrefetcher(t) oracle := l1.NewPreimageOracle(asOracleFn(t, prefetcher), asHinter(t, prefetcher)) - blobFetcher.ExpectOnGetBlobs( + blobFetcher.ExpectOnGetBlobsByHash( context.Background(), - l1Ref, - []eth.IndexedBlobHash{blobHash}, + l1Ref.Time, + []common.Hash{versionedHash}, []*eth.Blob{&blob}, nil, ) defer blobFetcher.AssertExpectations(t) - blobs := oracle.GetBlob(l1Ref, blobHash) + blobs := oracle.GetBlob(l1Ref, versionedHash) require.EqualValues(t, blobs[:], blob[:]) // Check that the preimages of field element keys are also stored @@ -778,6 +778,51 @@ func TestBadHints(t *testing.T) { }) } +func TestBlobHints(t *testing.T) { + // Static test data + blob := eth.Blob(bytes.Repeat([]byte{0x01}, 131072)) + kzgCommitment, err := blob.ComputeKZGCommitment() + blobHash := eth.KZGToVersionedHash(kzgCommitment) + key := preimage.Sha256Key(blobHash).PreimageKey() + require.NoError(t, err) + timeStamp := uint64(1234567890) + index := uint64(3) + + type testCase struct { + name string + hint l1.BlobHint + } + + testsCases := []testCase{ + { + name: "LegacyBlobHint", + hint: l1.NewLegacyBlobHint(blobHash, index, timeStamp), + }, + { + name: "BlobHint", + hint: l1.NewBlobHint(blobHash, timeStamp), + }, + } + + for _, testCase := range testsCases { + t.Run(testCase.name, func(t *testing.T) { + prefetcher, _, blobsSource, _, _ := createPrefetcher(t) + blobsSource.ExpectOnGetBlobsByHash( + context.Background(), timeStamp, []common.Hash{blobHash}, []*eth.Blob{&blob}, nil, + ) + + // Check that the prefetcher accepts the hint + require.NoError(t, prefetcher.Hint(testCase.hint.Hint())) + + // Check that the prefetcher can retrieve the blob from the mock source + // using the stored hint. + pre, err := prefetcher.GetPreimage(context.Background(), key) + require.NoError(t, err) + require.Equal(t, []byte(kzgCommitment[:]), pre) + }) + } +} + func TestFallbackWhenExperimentalFails(t *testing.T) { rng := rand.New(rand.NewSource(123)) node := testutils.RandomData(rng, 30) diff --git a/op-program/host/prefetcher/retry.go b/op-program/host/prefetcher/retry.go index a5405e0223b..2e5c284d95b 100644 --- a/op-program/host/prefetcher/retry.go +++ b/op-program/host/prefetcher/retry.go @@ -75,11 +75,11 @@ func NewRetryingL1BlobSource(logger log.Logger, source L1BlobSource) *RetryingL1 } } -func (s *RetryingL1BlobSource) GetBlobs(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) { +func (s *RetryingL1BlobSource) GetBlobsByHash(ctx context.Context, time uint64, hashes []common.Hash) ([]*eth.Blob, error) { return retry.Do(ctx, maxAttempts, s.strategy, func() ([]*eth.Blob, error) { - blobs, err := s.source.GetBlobs(ctx, ref, hashes) + blobs, err := s.source.GetBlobsByHash(ctx, time, hashes) if err != nil { - s.logger.Warn("Failed to retrieve blobs", "ref", ref, "err", err) + s.logger.Warn("Failed to retrieve blobs", "time", time, "err", err) } return blobs, err }) diff --git a/op-program/host/prefetcher/retry_test.go b/op-program/host/prefetcher/retry_test.go index 90d5c153219..83048114bcb 100644 --- a/op-program/host/prefetcher/retry_test.go +++ b/op-program/host/prefetcher/retry_test.go @@ -119,46 +119,45 @@ func TestRetryingL1BlobSource(t *testing.T) { commitment, err := blob.ComputeKZGCommitment() require.NoError(t, err) versionedHash := eth.KZGToVersionedHash(commitment) - blobHash := eth.IndexedBlobHash{Hash: versionedHash, Index: 0xFACADE} - l1BlockRef := eth.L1BlockRef{Time: 0} + l1Time := uint64(0) - t.Run("GetBlobs Success", func(t *testing.T) { + t.Run("GetBlobsByHash Success", func(t *testing.T) { source, mock := createL1BlobSource(t) defer mock.AssertExpectations(t) - mock.ExpectOnGetBlobs( + mock.ExpectOnGetBlobsByHash( ctx, - l1BlockRef, - []eth.IndexedBlobHash{blobHash}, + l1Time, + []common.Hash{versionedHash}, []*eth.Blob{(*eth.Blob)(&blob)}, nil, ) - result, err := source.GetBlobs(ctx, l1BlockRef, []eth.IndexedBlobHash{blobHash}) + result, err := source.GetBlobsByHash(ctx, l1Time, []common.Hash{versionedHash}) require.NoError(t, err) require.Equal(t, len(result), 1) require.Equal(t, blob[:], result[0][:]) }) - t.Run("GetBlobs Error", func(t *testing.T) { + t.Run("GetBlobsByHash Error", func(t *testing.T) { source, mock := createL1BlobSource(t) defer mock.AssertExpectations(t) expectedErr := errors.New("boom") - mock.ExpectOnGetBlobs( + mock.ExpectOnGetBlobsByHash( ctx, - l1BlockRef, - []eth.IndexedBlobHash{blobHash}, + l1Time, + []common.Hash{versionedHash}, nil, expectedErr, ) - mock.ExpectOnGetBlobs( + mock.ExpectOnGetBlobsByHash( ctx, - l1BlockRef, - []eth.IndexedBlobHash{blobHash}, + l1Time, + []common.Hash{versionedHash}, []*eth.Blob{(*eth.Blob)(&blob)}, nil, ) - result, err := source.GetBlobs(ctx, l1BlockRef, []eth.IndexedBlobHash{blobHash}) + result, err := source.GetBlobsByHash(ctx, l1Time, []common.Hash{versionedHash}) require.NoError(t, err) require.Equal(t, len(result), 1) require.Equal(t, blob[:], result[0][:]) diff --git a/op-program/scripts/build-prestates.sh b/op-program/scripts/build-prestates.sh index d10be689a78..85072374332 100755 --- a/op-program/scripts/build-prestates.sh +++ b/op-program/scripts/build-prestates.sh @@ -105,7 +105,7 @@ EOF fi rm -rf "${BIN_DIR}" - rm -rf kona/prestate-artifacts-* + rm -rf rust/kona/prestate-artifacts-* make reproducible-prestate >> "${log_file}" 2>&1 if [[ "${version}" =~ ^op-program/v ]]; then @@ -139,18 +139,18 @@ EOF fi if [[ "${version}" =~ ^kona-client/v ]]; then - if [ -f "kona/prestate-artifacts-cannon/prestate-proof.json" ]; then + if [ -f "rust/kona/prestate-artifacts-cannon/prestate-proof.json" ]; then local hash - hash=$(jq -r .pre kona/prestate-artifacts-cannon/prestate-proof.json) - cp kona/prestate-artifacts-cannon/prestate.bin.gz "${STATES_DIR}/${hash}.bin.gz" + hash=$(jq -r .pre rust/kona/prestate-artifacts-cannon/prestate-proof.json) + cp rust/kona/prestate-artifacts-cannon/prestate.bin.gz "${STATES_DIR}/${hash}.bin.gz" VERSIONS_JSON=$(echo "${VERSIONS_JSON}" | jq ". += [{\"version\": \"${short_version}\", \"hash\": \"${hash}\", \"type\": \"cannon64-kona\"}]") echo "Built cannon64-kona ${version}: ${hash}" fi - if [ -f "kona/prestate-artifacts-cannon-interop/prestate-proof.json" ]; then + if [ -f "rust/kona/prestate-artifacts-cannon-interop/prestate-proof.json" ]; then local hash - hash=$(jq -r .pre kona/prestate-artifacts-cannon-interop/prestate-proof.json) - cp kona/prestate-artifacts-cannon-interop/prestate.bin.gz "${STATES_DIR}/${hash}.bin.gz" + hash=$(jq -r .pre rust/kona/prestate-artifacts-cannon-interop/prestate-proof.json) + cp rust/kona/prestate-artifacts-cannon-interop/prestate.bin.gz "${STATES_DIR}/${hash}.bin.gz" VERSIONS_JSON=$(echo "${VERSIONS_JSON}" | jq ". += [{\"version\": \"${short_version}\", \"hash\": \"${hash}\", \"type\": \"cannon64-kona-interop\"}]") echo "Built cannon64-kona-interop ${version}: ${hash}" fi diff --git a/op-reth/.gitignore b/op-reth/.gitignore deleted file mode 100644 index 2f7896d1d13..00000000000 --- a/op-reth/.gitignore +++ /dev/null @@ -1 +0,0 @@ -target/ diff --git a/op-reth/Cargo.lock b/op-reth/Cargo.lock deleted file mode 100644 index 21bee41a842..00000000000 --- a/op-reth/Cargo.lock +++ /dev/null @@ -1,12700 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 4 - -[[package]] -name = "addr2line" -version = "0.25.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" -dependencies = [ - "gimli", -] - -[[package]] -name = "adler2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" - -[[package]] -name = "aead" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" -dependencies = [ - "crypto-common", - "generic-array", -] - -[[package]] -name = "aes" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if", - "cipher", - "cpufeatures", -] - -[[package]] -name = "aes-gcm" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" -dependencies = [ - "aead", - "aes", - "cipher", - "ctr", - "ghash", - "subtle", -] - -[[package]] -name = "ahash" -version = "0.8.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" -dependencies = [ - "cfg-if", - "getrandom 0.3.4", - "once_cell", - "version_check", - "zerocopy", -] - -[[package]] -name = "aho-corasick" -version = "1.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" -dependencies = [ - "memchr", -] - -[[package]] -name = "aligned-vec" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc890384c8602f339876ded803c97ad529f3842aba97f6392b3dba0dd171769b" -dependencies = [ - "equator", -] - -[[package]] -name = "alloc-no-stdlib" -version = "2.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" - -[[package]] -name = "alloc-stdlib" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" -dependencies = [ - "alloc-no-stdlib", -] - -[[package]] -name = "allocator-api2" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" - -[[package]] -name = "alloy-chains" -version = "0.2.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90f374d3c6d729268bbe2d0e0ff992bb97898b2df756691a62ee1d5f0506bc39" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "num_enum", - "proptest", - "serde", - "strum 0.27.2", -] - -[[package]] -name = "alloy-consensus" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed1958f0294ecc05ebe7b3c9a8662a3e221c2523b7f2bcd94c7a651efbd510bf" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "alloy-trie", - "alloy-tx-macros", - "arbitrary", - "auto_impl", - "borsh", - "c-kzg", - "derive_more", - "either", - "k256", - "once_cell", - "rand 0.8.5", - "secp256k1 0.30.0", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-consensus-any" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f752e99497ddc39e22d547d7dfe516af10c979405a034ed90e69b914b7dddeae" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "arbitrary", - "serde", -] - -[[package]] -name = "alloy-dyn-abi" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14ff5ee5f27aa305bda825c735f686ad71bb65508158f059f513895abe69b8c3" -dependencies = [ - "alloy-json-abi", - "alloy-primitives", - "alloy-sol-type-parser", - "alloy-sol-types", - "derive_more", - "itoa", - "serde", - "serde_json", - "winnow", -] - -[[package]] -name = "alloy-eip2124" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "crc", - "rand 0.8.5", - "serde", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-eip2930" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9441120fa82df73e8959ae0e4ab8ade03de2aaae61be313fbf5746277847ce25" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "borsh", - "rand 0.8.5", - "serde", -] - -[[package]] -name = "alloy-eip7702" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2919c5a56a1007492da313e7a3b6d45ef5edc5d33416fdec63c0d7a2702a0d20" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "borsh", - "k256", - "rand 0.8.5", - "serde", - "serde_with", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-eip7928" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3231de68d5d6e75332b7489cfcc7f4dfabeba94d990a10e4b923af0e6623540" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "borsh", - "serde", -] - -[[package]] -name = "alloy-eips" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "813a67f87e56b38554d18b182616ee5006e8e2bf9df96a0df8bf29dff1d52e3f" -dependencies = [ - "alloy-eip2124", - "alloy-eip2930", - "alloy-eip7702", - "alloy-eip7928", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "arbitrary", - "auto_impl", - "borsh", - "c-kzg", - "derive_more", - "either", - "ethereum_ssz", - "ethereum_ssz_derive", - "serde", - "serde_with", - "sha2", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-evm" -version = "0.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1582933a9fc27c0953220eb4f18f6492ff577822e9a8d848890ff59f6b4f5beb" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-hardforks", - "alloy-op-hardforks", - "alloy-primitives", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-sol-types", - "auto_impl", - "derive_more", - "op-alloy", - "op-revm", - "revm", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-genesis" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05864eef929c4d28895ae4b4d8ac9c6753c4df66e873b9c8fafc8089b59c1502" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-serde", - "alloy-trie", - "borsh", - "serde", - "serde_with", -] - -[[package]] -name = "alloy-hardforks" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83ba208044232d14d4adbfa77e57d6329f51bc1acc21f5667bb7db72d88a0831" -dependencies = [ - "alloy-chains", - "alloy-eip2124", - "alloy-primitives", - "auto_impl", - "dyn-clone", - "serde", -] - -[[package]] -name = "alloy-json-abi" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8708475665cc00e081c085886e68eada2f64cfa08fc668213a9231655093d4de" -dependencies = [ - "alloy-primitives", - "alloy-sol-type-parser", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-json-rpc" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2dd146b3de349a6ffaa4e4e319ab3a90371fb159fb0bddeb1c7bbe8b1792eff" -dependencies = [ - "alloy-primitives", - "alloy-sol-types", - "http", - "serde", - "serde_json", - "thiserror 2.0.18", - "tracing", -] - -[[package]] -name = "alloy-network" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c12278ffbb8872dfba3b2f17d8ea5e8503c2df5155d9bc5ee342794bde505c3" -dependencies = [ - "alloy-consensus", - "alloy-consensus-any", - "alloy-eips", - "alloy-json-rpc", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-types-any", - "alloy-rpc-types-eth", - "alloy-serde", - "alloy-signer", - "alloy-sol-types", - "async-trait", - "auto_impl", - "derive_more", - "futures-utils-wasm", - "serde", - "serde_json", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-network-primitives" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "833037c04917bc2031541a60e8249e4ab5500e24c637c1c62e95e963a655d66f" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-serde", - "serde", -] - -[[package]] -name = "alloy-op-evm" -version = "0.27.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f19214adae08ea95600c3ede76bcbf0c40b36a263534a8f441a4c732f60e868" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-op-hardforks", - "alloy-primitives", - "auto_impl", - "op-alloy", - "op-revm", - "revm", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-op-hardforks" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6472c610150c4c4c15be9e1b964c9b78068f933bda25fb9cdf09b9ac2bb66f36" -dependencies = [ - "alloy-chains", - "alloy-hardforks", - "alloy-primitives", - "auto_impl", - "serde", -] - -[[package]] -name = "alloy-primitives" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b88cf92ed20685979ed1d8472422f0c6c2d010cec77caf63aaa7669cc1a7bc2" -dependencies = [ - "alloy-rlp", - "arbitrary", - "bytes", - "cfg-if", - "const-hex", - "derive_more", - "fixed-cache", - "foldhash 0.2.0", - "getrandom 0.3.4", - "hashbrown 0.16.1", - "indexmap 2.13.0", - "itoa", - "k256", - "keccak-asm", - "paste", - "proptest", - "proptest-derive 0.6.0", - "rand 0.9.2", - "rapidhash", - "ruint", - "rustc-hash", - "serde", - "sha3", -] - -[[package]] -name = "alloy-provider" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eafa840b0afe01c889a3012bb2fde770a544f74eab2e2870303eb0a5fb869c48" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-network", - "alloy-network-primitives", - "alloy-primitives", - "alloy-pubsub", - "alloy-rpc-client", - "alloy-rpc-types-debug", - "alloy-rpc-types-eth", - "alloy-rpc-types-trace", - "alloy-signer", - "alloy-sol-types", - "alloy-transport", - "alloy-transport-http", - "alloy-transport-ipc", - "alloy-transport-ws", - "async-stream", - "async-trait", - "auto_impl", - "dashmap", - "either", - "futures", - "futures-utils-wasm", - "lru 0.16.3", - "parking_lot", - "pin-project", - "reqwest", - "serde", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-pubsub" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57b3a3b3e4efc9f4d30e3326b6bd6811231d16ef94837e18a802b44ca55119e6" -dependencies = [ - "alloy-json-rpc", - "alloy-primitives", - "alloy-transport", - "auto_impl", - "bimap", - "futures", - "parking_lot", - "serde", - "serde_json", - "tokio", - "tokio-stream", - "tower", - "tracing", - "wasmtimer", -] - -[[package]] -name = "alloy-rlp" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f70d83b765fdc080dbcd4f4db70d8d23fe4761f2f02ebfa9146b833900634b4" -dependencies = [ - "alloy-rlp-derive", - "arrayvec", - "bytes", -] - -[[package]] -name = "alloy-rlp-derive" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64b728d511962dda67c1bc7ea7c03736ec275ed2cf4c35d9585298ac9ccf3b73" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "alloy-rpc-client" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12768ae6303ec764905a8a7cd472aea9072f9f9c980d18151e26913da8ae0123" -dependencies = [ - "alloy-json-rpc", - "alloy-primitives", - "alloy-pubsub", - "alloy-transport", - "alloy-transport-http", - "alloy-transport-ipc", - "alloy-transport-ws", - "futures", - "pin-project", - "reqwest", - "serde", - "serde_json", - "tokio", - "tokio-stream", - "tower", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-rpc-types" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0622d8bcac2f16727590aa33f4c3f05ea98130e7e4b4924bce8be85da5ad0dae" -dependencies = [ - "alloy-primitives", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-serde", - "serde", -] - -[[package]] -name = "alloy-rpc-types-admin" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c38c5ac70457ecc74e87fe1a5a19f936419224ded0eb0636241452412ca92733" -dependencies = [ - "alloy-genesis", - "alloy-primitives", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-rpc-types-anvil" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae8eb0e5d6c48941b61ab76fabab4af66f7d88309a98aa14ad3dec7911c1eba3" -dependencies = [ - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", - "serde", -] - -[[package]] -name = "alloy-rpc-types-any" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1cf5a093e437dfd62df48e480f24e1a3807632358aad6816d7a52875f1c04aa" -dependencies = [ - "alloy-consensus-any", - "alloy-rpc-types-eth", - "alloy-serde", -] - -[[package]] -name = "alloy-rpc-types-beacon" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e07949e912479ef3b848e1cf8db54b534bdd7bc58e6c23f28ea9488960990c8c" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "derive_more", - "ethereum_ssz", - "ethereum_ssz_derive", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.18", - "tree_hash", - "tree_hash_derive", -] - -[[package]] -name = "alloy-rpc-types-debug" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "925ff0f48c2169c050f0ae7a82769bdf3f45723d6742ebb6a5efb4ed2f491b26" -dependencies = [ - "alloy-primitives", - "derive_more", - "serde", - "serde_with", -] - -[[package]] -name = "alloy-rpc-types-engine" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "336ef381c7409f23c69f6e79bddc1917b6e832cff23e7a5cf84b9381d53582e6" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "derive_more", - "ethereum_ssz", - "ethereum_ssz_derive", - "jsonwebtoken", - "rand 0.8.5", - "serde", - "strum 0.27.2", -] - -[[package]] -name = "alloy-rpc-types-eth" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28e97603095020543a019ab133e0e3dc38cd0819f19f19bdd70c642404a54751" -dependencies = [ - "alloy-consensus", - "alloy-consensus-any", - "alloy-eips", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rlp", - "alloy-serde", - "alloy-sol-types", - "arbitrary", - "itertools 0.14.0", - "serde", - "serde_json", - "serde_with", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-rpc-types-mev" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2805153975e25d38e37ee100880e642d5b24e421ed3014a7d2dae1d9be77562e" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-rpc-types-trace" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1aec4e1c66505d067933ea1a949a4fb60a19c4cfc2f109aa65873ea99e62ea8" -dependencies = [ - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", - "serde", - "serde_json", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-rpc-types-txpool" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25b73c1d6e4f1737a20d246dad5a0abd6c1b76ec4c3d153684ef8c6f1b6bb4f4" -dependencies = [ - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", - "serde", -] - -[[package]] -name = "alloy-serde" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "946a0d413dbb5cd9adba0de5f8a1a34d5b77deda9b69c1d7feed8fc875a1aa26" -dependencies = [ - "alloy-primitives", - "arbitrary", - "serde", - "serde_json", -] - -[[package]] -name = "alloy-signer" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7481dc8316768f042495eaf305d450c32defbc9bce09d8bf28afcd956895bb" -dependencies = [ - "alloy-primitives", - "async-trait", - "auto_impl", - "either", - "elliptic-curve", - "k256", - "thiserror 2.0.18", -] - -[[package]] -name = "alloy-signer-local" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1259dac1f534a4c66c1d65237c89915d0010a2a91d6c3b0bada24dc5ee0fb917" -dependencies = [ - "alloy-consensus", - "alloy-network", - "alloy-primitives", - "alloy-signer", - "async-trait", - "coins-bip32", - "coins-bip39", - "k256", - "rand 0.8.5", - "thiserror 2.0.18", - "zeroize", -] - -[[package]] -name = "alloy-sol-macro" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5fa1ca7e617c634d2bd9fa71f9ec8e47c07106e248b9fcbd3eaddc13cabd625" -dependencies = [ - "alloy-sol-macro-expander", - "alloy-sol-macro-input", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "alloy-sol-macro-expander" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27c00c0c3a75150a9dc7c8c679ca21853a137888b4e1c5569f92d7e2b15b5102" -dependencies = [ - "alloy-sol-macro-input", - "const-hex", - "heck", - "indexmap 2.13.0", - "proc-macro-error2", - "proc-macro2", - "quote", - "sha3", - "syn 2.0.114", - "syn-solidity", -] - -[[package]] -name = "alloy-sol-macro-input" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "297db260eb4d67c105f68d6ba11b8874eec681caec5505eab8fbebee97f790bc" -dependencies = [ - "const-hex", - "dunce", - "heck", - "macro-string", - "proc-macro2", - "quote", - "syn 2.0.114", - "syn-solidity", -] - -[[package]] -name = "alloy-sol-type-parser" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b91b13181d3bcd23680fd29d7bc861d1f33fbe90fdd0af67162434aeba902d" -dependencies = [ - "serde", - "winnow", -] - -[[package]] -name = "alloy-sol-types" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc442cc2a75207b708d481314098a0f8b6f7b58e3148dd8d8cc7407b0d6f9385" -dependencies = [ - "alloy-json-abi", - "alloy-primitives", - "alloy-sol-macro", - "serde", -] - -[[package]] -name = "alloy-transport" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78f169b85eb9334871db986e7eaf59c58a03d86a30cc68b846573d47ed0656bb" -dependencies = [ - "alloy-json-rpc", - "auto_impl", - "base64 0.22.1", - "derive_more", - "futures", - "futures-utils-wasm", - "parking_lot", - "serde", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tower", - "tracing", - "url", - "wasmtimer", -] - -[[package]] -name = "alloy-transport-http" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "019821102e70603e2c141954418255bec539ef64ac4117f8e84fb493769acf73" -dependencies = [ - "alloy-json-rpc", - "alloy-transport", - "reqwest", - "serde_json", - "tower", - "tracing", - "url", -] - -[[package]] -name = "alloy-transport-ipc" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e574ca2f490fb5961d2cdd78188897392c46615cd88b35c202d34bbc31571a81" -dependencies = [ - "alloy-json-rpc", - "alloy-pubsub", - "alloy-transport", - "bytes", - "futures", - "interprocess", - "pin-project", - "serde", - "serde_json", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "alloy-transport-ws" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b92dea6996269769f74ae56475570e3586910661e037b7b52d50c9641f76c68f" -dependencies = [ - "alloy-pubsub", - "alloy-transport", - "futures", - "http", - "serde_json", - "tokio", - "tokio-tungstenite", - "tracing", - "ws_stream_wasm", -] - -[[package]] -name = "alloy-trie" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428aa0f0e0658ff091f8f667c406e034b431cb10abd39de4f507520968acc499" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "arrayvec", - "derive_arbitrary", - "derive_more", - "nybbles", - "proptest", - "proptest-derive 0.5.1", - "serde", - "smallvec", - "tracing", -] - -[[package]] -name = "alloy-tx-macros" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45ceac797eb8a56bdf5ab1fab353072c17d472eab87645ca847afe720db3246d" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "android_system_properties" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" -dependencies = [ - "libc", -] - -[[package]] -name = "anstream" -version = "0.6.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" -dependencies = [ - "anstyle", - "anstyle-parse", - "anstyle-query", - "anstyle-wincon", - "colorchoice", - "is_terminal_polyfill", - "utf8parse", -] - -[[package]] -name = "anstyle" -version = "1.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" - -[[package]] -name = "anstyle-parse" -version = "0.2.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" -dependencies = [ - "utf8parse", -] - -[[package]] -name = "anstyle-query" -version = "1.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "anstyle-wincon" -version = "3.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" -dependencies = [ - "anstyle", - "once_cell_polyfill", - "windows-sys 0.61.2", -] - -[[package]] -name = "anyhow" -version = "1.0.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" - -[[package]] -name = "aquamarine" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f50776554130342de4836ba542aa85a4ddb361690d7e8df13774d7284c3d5c2" -dependencies = [ - "include_dir", - "itertools 0.10.5", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "arbitrary" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" -dependencies = [ - "derive_arbitrary", -] - -[[package]] -name = "ark-bls12-381" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3df4dcc01ff89867cd86b0da835f23c3f02738353aaee7dde7495af71363b8d5" -dependencies = [ - "ark-ec", - "ark-ff 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", -] - -[[package]] -name = "ark-bn254" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d69eab57e8d2663efa5c63135b2af4f396d66424f88954c21104125ab6b3e6bc" -dependencies = [ - "ark-ec", - "ark-ff 0.5.0", - "ark-r1cs-std", - "ark-std 0.5.0", -] - -[[package]] -name = "ark-ec" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43d68f2d516162846c1238e755a7c4d131b892b70cc70c471a8e3ca3ed818fce" -dependencies = [ - "ahash", - "ark-ff 0.5.0", - "ark-poly", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "educe", - "fnv", - "hashbrown 0.15.5", - "itertools 0.13.0", - "num-bigint", - "num-integer", - "num-traits", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" -dependencies = [ - "ark-ff-asm 0.3.0", - "ark-ff-macros 0.3.0", - "ark-serialize 0.3.0", - "ark-std 0.3.0", - "derivative", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.3.3", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" -dependencies = [ - "ark-ff-asm 0.4.2", - "ark-ff-macros 0.4.2", - "ark-serialize 0.4.2", - "ark-std 0.4.0", - "derivative", - "digest 0.10.7", - "itertools 0.10.5", - "num-bigint", - "num-traits", - "paste", - "rustc_version 0.4.1", - "zeroize", -] - -[[package]] -name = "ark-ff" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70" -dependencies = [ - "ark-ff-asm 0.5.0", - "ark-ff-macros 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "arrayvec", - "digest 0.10.7", - "educe", - "itertools 0.13.0", - "num-bigint", - "num-traits", - "paste", - "zeroize", -] - -[[package]] -name = "ark-ff-asm" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-asm" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" -dependencies = [ - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-asm" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" -dependencies = [ - "quote", - "syn 2.0.114", -] - -[[package]] -name = "ark-ff-macros" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" -dependencies = [ - "num-bigint", - "num-traits", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "ark-ff-macros" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "ark-poly" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "579305839da207f02b89cd1679e50e67b4331e2f9294a57693e5051b7703fe27" -dependencies = [ - "ahash", - "ark-ff 0.5.0", - "ark-serialize 0.5.0", - "ark-std 0.5.0", - "educe", - "fnv", - "hashbrown 0.15.5", -] - -[[package]] -name = "ark-r1cs-std" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "941551ef1df4c7a401de7068758db6503598e6f01850bdb2cfdb614a1f9dbea1" -dependencies = [ - "ark-ec", - "ark-ff 0.5.0", - "ark-relations", - "ark-std 0.5.0", - "educe", - "num-bigint", - "num-integer", - "num-traits", - "tracing", -] - -[[package]] -name = "ark-relations" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec46ddc93e7af44bcab5230937635b06fb5744464dd6a7e7b083e80ebd274384" -dependencies = [ - "ark-ff 0.5.0", - "ark-std 0.5.0", - "tracing", - "tracing-subscriber 0.2.25", -] - -[[package]] -name = "ark-serialize" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" -dependencies = [ - "ark-std 0.3.0", - "digest 0.9.0", -] - -[[package]] -name = "ark-serialize" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" -dependencies = [ - "ark-std 0.4.0", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7" -dependencies = [ - "ark-serialize-derive", - "ark-std 0.5.0", - "arrayvec", - "digest 0.10.7", - "num-bigint", -] - -[[package]] -name = "ark-serialize-derive" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "ark-std" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "ark-std" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "ark-std" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a" -dependencies = [ - "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "arrayref" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" - -[[package]] -name = "arrayvec" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -dependencies = [ - "serde", -] - -[[package]] -name = "asn1_der" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "155a5a185e42c6b77ac7b88a15143d930a9e9727a5b7b77eed417404ab15c247" - -[[package]] -name = "async-compression" -version = "0.4.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d10e4f991a553474232bc0a31799f6d24b034a84c0971d80d2e2f78b2e576e40" -dependencies = [ - "compression-codecs", - "compression-core", - "pin-project-lite", - "tokio", -] - -[[package]] -name = "async-stream" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" -dependencies = [ - "async-stream-impl", - "futures-core", - "pin-project-lite", -] - -[[package]] -name = "async-stream-impl" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "async-trait" -version = "0.1.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "async_io_stream" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" -dependencies = [ - "futures", - "pharos", - "rustc_version 0.4.1", -] - -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - -[[package]] -name = "aurora-engine-modexp" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "518bc5745a6264b5fd7b09dffb9667e400ee9e2bbe18555fac75e1fe9afa0df9" -dependencies = [ - "hex", - "num", -] - -[[package]] -name = "auto_impl" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "autocfg" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" - -[[package]] -name = "backon" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef" -dependencies = [ - "fastrand", - "tokio", -] - -[[package]] -name = "backtrace" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-link", -] - -[[package]] -name = "base-x" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" - -[[package]] -name = "base16ct" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" - -[[package]] -name = "base256emoji" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e9430d9a245a77c92176e649af6e275f20839a48389859d1661e9a128d077c" -dependencies = [ - "const-str", - "match-lookup", -] - -[[package]] -name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - -[[package]] -name = "base64ct" -version = "1.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" - -[[package]] -name = "bech32" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d86b93f97252c47b41663388e6d155714a9d0c398b99f1005cbc5f978b29f445" - -[[package]] -name = "bimap" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" - -[[package]] -name = "bincode" -version = "1.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" -dependencies = [ - "serde", -] - -[[package]] -name = "bindgen" -version = "0.71.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f58bf3d7db68cfbac37cfc485a8d711e87e064c3d0fe0435b92f7a407f9d6b3" -dependencies = [ - "bitflags 2.10.0", - "cexpr", - "clang-sys", - "itertools 0.13.0", - "proc-macro2", - "quote", - "regex", - "rustc-hash", - "shlex", - "syn 2.0.114", -] - -[[package]] -name = "bindgen" -version = "0.72.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" -dependencies = [ - "bitflags 2.10.0", - "cexpr", - "clang-sys", - "itertools 0.13.0", - "proc-macro2", - "quote", - "regex", - "rustc-hash", - "shlex", - "syn 2.0.114", -] - -[[package]] -name = "bit-set" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" -dependencies = [ - "bit-vec", -] - -[[package]] -name = "bit-vec" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" - -[[package]] -name = "bitcoin-io" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dee39a0ee5b4095224a0cfc6bf4cc1baf0f9624b96b367e53b66d974e51d953" - -[[package]] -name = "bitcoin_hashes" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26ec84b80c482df901772e931a9a681e26a1b9ee2302edeff23cb30328745c8b" -dependencies = [ - "bitcoin-io", - "hex-conservative", -] - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bitflags" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" -dependencies = [ - "arbitrary", - "serde_core", -] - -[[package]] -name = "bitvec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" -dependencies = [ - "funty", - "radium", - "serde", - "tap", - "wyz", -] - -[[package]] -name = "block-buffer" -version = "0.10.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" -dependencies = [ - "generic-array", -] - -[[package]] -name = "block-padding" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" -dependencies = [ - "generic-array", -] - -[[package]] -name = "blst" -version = "0.3.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcdb4c7013139a150f9fc55d123186dbfaba0d912817466282c73ac49e71fb45" -dependencies = [ - "cc", - "glob", - "threadpool", - "zeroize", -] - -[[package]] -name = "boa_ast" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc119a5ad34c3f459062a96907f53358989b173d104258891bb74f95d93747e8" -dependencies = [ - "bitflags 2.10.0", - "boa_interner", - "boa_macros", - "boa_string", - "indexmap 2.13.0", - "num-bigint", - "rustc-hash", -] - -[[package]] -name = "boa_engine" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e637ec52ea66d76b0ca86180c259d6c7bb6e6a6e14b2f36b85099306d8b00cc3" -dependencies = [ - "aligned-vec", - "arrayvec", - "bitflags 2.10.0", - "boa_ast", - "boa_gc", - "boa_interner", - "boa_macros", - "boa_parser", - "boa_string", - "bytemuck", - "cfg-if", - "cow-utils", - "dashmap", - "dynify", - "fast-float2", - "float16", - "futures-channel", - "futures-concurrency", - "futures-lite", - "hashbrown 0.16.1", - "icu_normalizer", - "indexmap 2.13.0", - "intrusive-collections", - "itertools 0.14.0", - "num-bigint", - "num-integer", - "num-traits", - "num_enum", - "paste", - "portable-atomic", - "rand 0.9.2", - "regress", - "rustc-hash", - "ryu-js", - "serde", - "serde_json", - "small_btree", - "static_assertions", - "tag_ptr", - "tap", - "thin-vec", - "thiserror 2.0.18", - "time", - "xsum", -] - -[[package]] -name = "boa_gc" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1179f690cbfcbe5364cceee5f1cb577265bb6f07b0be6f210aabe270adcf9da" -dependencies = [ - "boa_macros", - "boa_string", - "hashbrown 0.16.1", - "thin-vec", -] - -[[package]] -name = "boa_interner" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9626505d33dc63d349662437297df1d3afd9d5fc4a2b3ad34e5e1ce879a78848" -dependencies = [ - "boa_gc", - "boa_macros", - "hashbrown 0.16.1", - "indexmap 2.13.0", - "once_cell", - "phf", - "rustc-hash", - "static_assertions", -] - -[[package]] -name = "boa_macros" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f36418a46544b152632c141b0a0b7a453cd69ca150caeef83aee9e2f4b48b7d" -dependencies = [ - "cfg-if", - "cow-utils", - "proc-macro2", - "quote", - "syn 2.0.114", - "synstructure", -] - -[[package]] -name = "boa_parser" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02f99bf5b684f0de946378fcfe5f38c3a0fbd51cbf83a0f39ff773a0e218541f" -dependencies = [ - "bitflags 2.10.0", - "boa_ast", - "boa_interner", - "boa_macros", - "fast-float2", - "icu_properties", - "num-bigint", - "num-traits", - "regress", - "rustc-hash", -] - -[[package]] -name = "boa_string" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45ce9d7aa5563a2e14eab111e2ae1a06a69a812f6c0c3d843196c9d03fbef440" -dependencies = [ - "fast-float2", - "itoa", - "paste", - "rustc-hash", - "ryu-js", - "static_assertions", -] - -[[package]] -name = "borsh" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1da5ab77c1437701eeff7c88d968729e7766172279eab0676857b3d63af7a6f" -dependencies = [ - "borsh-derive", - "cfg_aliases", -] - -[[package]] -name = "borsh-derive" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0686c856aa6aac0c4498f936d7d6a02df690f614c03e4d906d1018062b5c5e2c" -dependencies = [ - "once_cell", - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "boyer-moore-magiclen" -version = "0.2.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7441b4796eb8a7107d4cd99d829810be75f5573e1081c37faa0e8094169ea0d6" -dependencies = [ - "debug-helper", -] - -[[package]] -name = "brotli" -version = "8.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", - "brotli-decompressor", -] - -[[package]] -name = "brotli-decompressor" -version = "5.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" -dependencies = [ - "alloc-no-stdlib", - "alloc-stdlib", -] - -[[package]] -name = "bs58" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" -dependencies = [ - "sha2", - "tinyvec", -] - -[[package]] -name = "bumpalo" -version = "3.19.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" - -[[package]] -name = "byte-slice-cast" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7575182f7272186991736b70173b0ea045398f984bf5ebbb3804736ce1330c9d" - -[[package]] -name = "bytemuck" -version = "1.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" -dependencies = [ - "bytemuck_derive", -] - -[[package]] -name = "bytemuck_derive" -version = "1.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "byteorder" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" - -[[package]] -name = "bytes" -version = "1.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" -dependencies = [ - "serde", -] - -[[package]] -name = "bzip2-sys" -version = "0.1.13+1.0.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" -dependencies = [ - "cc", - "pkg-config", -] - -[[package]] -name = "c-kzg" -version = "2.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e00bf4b112b07b505472dbefd19e37e53307e2bfed5a79e0cc161d58ccd0e687" -dependencies = [ - "arbitrary", - "blst", - "cc", - "glob", - "hex", - "libc", - "once_cell", - "serde", -] - -[[package]] -name = "camino" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48" -dependencies = [ - "serde_core", -] - -[[package]] -name = "cargo-platform" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87a0c0e6148f11f01f32650a2ea02d532b2ad4e81d8bd41e6e565b5adc5e6082" -dependencies = [ - "serde", - "serde_core", -] - -[[package]] -name = "cargo_metadata" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef987d17b0a113becdd19d3d0022d04d7ef41f9efe4f3fb63ac44ba61df3ade9" -dependencies = [ - "camino", - "cargo-platform", - "semver 1.0.27", - "serde", - "serde_json", - "thiserror 2.0.18", -] - -[[package]] -name = "cassowary" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" - -[[package]] -name = "castaway" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" -dependencies = [ - "rustversion", -] - -[[package]] -name = "cc" -version = "1.2.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47b26a0954ae34af09b50f0de26458fa95369a0d478d8236d3f93082b219bd29" -dependencies = [ - "find-msvc-tools", - "jobserver", - "libc", - "shlex", -] - -[[package]] -name = "cesu8" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" - -[[package]] -name = "cexpr" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" -dependencies = [ - "nom", -] - -[[package]] -name = "cfg-if" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" - -[[package]] -name = "cfg_aliases" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" - -[[package]] -name = "chrono" -version = "0.4.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" -dependencies = [ - "iana-time-zone", - "js-sys", - "num-traits", - "serde", - "wasm-bindgen", - "windows-link", -] - -[[package]] -name = "cipher" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" -dependencies = [ - "crypto-common", - "inout", -] - -[[package]] -name = "clang-sys" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" -dependencies = [ - "glob", - "libc", - "libloading", -] - -[[package]] -name = "clap" -version = "4.5.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75ca66430e33a14957acc24c5077b503e7d374151b2b4b3a10c83b4ceb4be0e" -dependencies = [ - "clap_builder", - "clap_derive", -] - -[[package]] -name = "clap_builder" -version = "4.5.56" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793207c7fa6300a0608d1080b858e5fdbe713cdc1c8db9fb17777d8a13e63df0" -dependencies = [ - "anstream", - "anstyle", - "clap_lex", - "strsim", -] - -[[package]] -name = "clap_derive" -version = "4.5.55" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "clap_lex" -version = "0.7.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" - -[[package]] -name = "coins-bip32" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2073678591747aed4000dd468b97b14d7007f7936851d3f2f01846899f5ebf08" -dependencies = [ - "bs58", - "coins-core", - "digest 0.10.7", - "hmac", - "k256", - "serde", - "sha2", - "thiserror 1.0.69", -] - -[[package]] -name = "coins-bip39" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74b169b26623ff17e9db37a539fe4f15342080df39f129ef7631df7683d6d9d4" -dependencies = [ - "bitvec", - "coins-bip32", - "hmac", - "once_cell", - "pbkdf2", - "rand 0.8.5", - "sha2", - "thiserror 1.0.69", -] - -[[package]] -name = "coins-core" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b962ad8545e43a28e14e87377812ba9ae748dd4fd963f4c10e9fcc6d13475b" -dependencies = [ - "base64 0.21.7", - "bech32", - "bs58", - "const-hex", - "digest 0.10.7", - "generic-array", - "ripemd", - "serde", - "sha2", - "sha3", - "thiserror 1.0.69", -] - -[[package]] -name = "colorchoice" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" - -[[package]] -name = "combine" -version = "4.6.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" -dependencies = [ - "bytes", - "memchr", -] - -[[package]] -name = "comfy-table" -version = "7.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "958c5d6ecf1f214b4c2bbbbf6ab9523a864bd136dcf71a7e8904799acfe1ad47" -dependencies = [ - "crossterm 0.29.0", - "unicode-segmentation", - "unicode-width 0.2.0", -] - -[[package]] -name = "compact_str" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b79c4069c6cad78e2e0cdfcbd26275770669fb39fd308a752dc110e83b9af32" -dependencies = [ - "castaway", - "cfg-if", - "itoa", - "rustversion", - "ryu", - "static_assertions", -] - -[[package]] -name = "compression-codecs" -version = "0.4.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00828ba6fd27b45a448e57dbfe84f1029d4c9f26b368157e9a448a5f49a2ec2a" -dependencies = [ - "brotli", - "compression-core", - "flate2", - "memchr", - "zstd", - "zstd-safe", -] - -[[package]] -name = "compression-core" -version = "0.4.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d" - -[[package]] -name = "concat-kdf" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d72c1252426a83be2092dd5884a5f6e3b8e7180f6891b6263d2c21b92ec8816" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "const-hex" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735" -dependencies = [ - "cfg-if", - "cpufeatures", - "proptest", - "serde_core", -] - -[[package]] -name = "const-oid" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" - -[[package]] -name = "const-str" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3" - -[[package]] -name = "const_format" -version = "0.2.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" -dependencies = [ - "const_format_proc_macros", -] - -[[package]] -name = "const_format_proc_macros" -version = "0.2.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" -dependencies = [ - "proc-macro2", - "quote", - "unicode-xid", -] - -[[package]] -name = "convert_case" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" -dependencies = [ - "unicode-segmentation", -] - -[[package]] -name = "core-foundation" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "core-foundation-sys" -version = "0.8.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" - -[[package]] -name = "core2" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" -dependencies = [ - "memchr", -] - -[[package]] -name = "cow-utils" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "417bef24afe1460300965a25ff4a24b8b45ad011948302ec221e8a0a81eb2c79" - -[[package]] -name = "cpufeatures" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" -dependencies = [ - "libc", -] - -[[package]] -name = "crc" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" -dependencies = [ - "crc-catalog", -] - -[[package]] -name = "crc-catalog" -version = "2.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" - -[[package]] -name = "crc32fast" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "critical-section" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" - -[[package]] -name = "crossbeam-channel" -version = "0.5.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" -dependencies = [ - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-utils" -version = "0.8.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" - -[[package]] -name = "crossterm" -version = "0.28.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" -dependencies = [ - "bitflags 2.10.0", - "crossterm_winapi", - "mio", - "parking_lot", - "rustix 0.38.44", - "signal-hook", - "signal-hook-mio", - "winapi", -] - -[[package]] -name = "crossterm" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" -dependencies = [ - "bitflags 2.10.0", - "crossterm_winapi", - "document-features", - "parking_lot", - "rustix 1.1.3", - "winapi", -] - -[[package]] -name = "crossterm_winapi" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" -dependencies = [ - "winapi", -] - -[[package]] -name = "crunchy" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" - -[[package]] -name = "crypto-bigint" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" -dependencies = [ - "generic-array", - "rand_core 0.6.4", - "subtle", - "zeroize", -] - -[[package]] -name = "crypto-common" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" -dependencies = [ - "generic-array", - "rand_core 0.6.4", - "typenum", -] - -[[package]] -name = "ctr" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" -dependencies = [ - "cipher", -] - -[[package]] -name = "curve25519-dalek" -version = "4.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" -dependencies = [ - "cfg-if", - "cpufeatures", - "curve25519-dalek-derive", - "digest 0.10.7", - "fiat-crypto", - "rustc_version 0.4.1", - "subtle", - "zeroize", -] - -[[package]] -name = "curve25519-dalek-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "darling" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" -dependencies = [ - "darling_core 0.20.11", - "darling_macro 0.20.11", -] - -[[package]] -name = "darling" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" -dependencies = [ - "darling_core 0.21.3", - "darling_macro 0.21.3", -] - -[[package]] -name = "darling" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" -dependencies = [ - "darling_core 0.23.0", - "darling_macro 0.23.0", -] - -[[package]] -name = "darling_core" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.114", -] - -[[package]] -name = "darling_core" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "serde", - "strsim", - "syn 2.0.114", -] - -[[package]] -name = "darling_core" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" -dependencies = [ - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.114", -] - -[[package]] -name = "darling_macro" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" -dependencies = [ - "darling_core 0.20.11", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "darling_macro" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" -dependencies = [ - "darling_core 0.21.3", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "darling_macro" -version = "0.23.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" -dependencies = [ - "darling_core 0.23.0", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "dashmap" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" -dependencies = [ - "cfg-if", - "crossbeam-utils", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", -] - -[[package]] -name = "data-encoding" -version = "2.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" - -[[package]] -name = "data-encoding-macro" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8142a83c17aa9461d637e649271eae18bf2edd00e91f2e105df36c3c16355bdb" -dependencies = [ - "data-encoding", - "data-encoding-macro-internal", -] - -[[package]] -name = "data-encoding-macro-internal" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ab67060fc6b8ef687992d439ca0fa36e7ed17e9a0b16b25b601e8757df720de" -dependencies = [ - "data-encoding", - "syn 1.0.109", -] - -[[package]] -name = "debug-helper" -version = "0.3.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f578e8e2c440e7297e008bb5486a3a8a194775224bbc23729b0dbdfaeebf162e" - -[[package]] -name = "delay_map" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88e365f083a5cb5972d50ce8b1b2c9f125dc5ec0f50c0248cfb568ae59efcf0b" -dependencies = [ - "futures", - "tokio", - "tokio-util", -] - -[[package]] -name = "der" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" -dependencies = [ - "const-oid", - "zeroize", -] - -[[package]] -name = "deranged" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" -dependencies = [ - "powerfmt", - "serde_core", -] - -[[package]] -name = "derivative" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "derive-where" -version = "1.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef941ded77d15ca19b40374869ac6000af1c9f2a4c0f3d4c70926287e6364a8f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "derive_arbitrary" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "derive_builder" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" -dependencies = [ - "derive_builder_macro", -] - -[[package]] -name = "derive_builder_core" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" -dependencies = [ - "darling 0.20.11", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "derive_builder_macro" -version = "0.20.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" -dependencies = [ - "derive_builder_core", - "syn 2.0.114", -] - -[[package]] -name = "derive_more" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" -dependencies = [ - "derive_more-impl", -] - -[[package]] -name = "derive_more-impl" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" -dependencies = [ - "convert_case", - "proc-macro2", - "quote", - "rustc_version 0.4.1", - "syn 2.0.114", - "unicode-xid", -] - -[[package]] -name = "diff" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" - -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array", -] - -[[package]] -name = "digest" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" -dependencies = [ - "block-buffer", - "const-oid", - "crypto-common", - "subtle", -] - -[[package]] -name = "dirs" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs-next" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" -dependencies = [ - "cfg-if", - "dirs-sys-next", -] - -[[package]] -name = "dirs-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" -dependencies = [ - "libc", - "option-ext", - "redox_users 0.5.2", - "windows-sys 0.61.2", -] - -[[package]] -name = "dirs-sys-next" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" -dependencies = [ - "libc", - "redox_users 0.4.6", - "winapi", -] - -[[package]] -name = "discv5" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f170f4f6ed0e1df52bf43b403899f0081917ecf1500bfe312505cc3b515a8899" -dependencies = [ - "aes", - "aes-gcm", - "alloy-rlp", - "arrayvec", - "ctr", - "delay_map", - "enr", - "fnv", - "futures", - "hashlink", - "hex", - "hkdf", - "lazy_static", - "libp2p-identity", - "lru 0.12.5", - "more-asserts", - "multiaddr", - "parking_lot", - "rand 0.8.5", - "smallvec", - "socket2 0.5.10", - "tokio", - "tracing", - "uint 0.10.0", - "zeroize", -] - -[[package]] -name = "displaydoc" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "doctest-file" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aac81fa3e28d21450aa4d2ac065992ba96a1d7303efbce51a95f4fd175b67562" - -[[package]] -name = "document-features" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4b8a88685455ed29a21542a33abd9cb6510b6b129abadabdcef0f4c55bc8f61" -dependencies = [ - "litrs", -] - -[[package]] -name = "dunce" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" - -[[package]] -name = "dyn-clone" -version = "1.0.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" - -[[package]] -name = "dynify" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81acb15628a3e22358bf73de5e7e62360b8a777dbcb5fc9ac7dfa9ae73723747" -dependencies = [ - "dynify-macros", -] - -[[package]] -name = "dynify-macros" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec431cd708430d5029356535259c5d645d60edd3d39c54e5eea9782d46caa7d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "ecdsa" -version = "0.16.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" -dependencies = [ - "der", - "digest 0.10.7", - "elliptic-curve", - "rfc6979", - "serdect", - "signature", - "spki", -] - -[[package]] -name = "ed25519" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" -dependencies = [ - "pkcs8", - "signature", -] - -[[package]] -name = "ed25519-dalek" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" -dependencies = [ - "curve25519-dalek", - "ed25519", - "rand_core 0.6.4", - "serde", - "sha2", - "subtle", - "zeroize", -] - -[[package]] -name = "educe" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" -dependencies = [ - "enum-ordinalize", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "either" -version = "1.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -dependencies = [ - "serde", -] - -[[package]] -name = "elliptic-curve" -version = "0.13.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" -dependencies = [ - "base16ct", - "crypto-bigint", - "digest 0.10.7", - "ff", - "generic-array", - "group", - "pkcs8", - "rand_core 0.6.4", - "sec1", - "serdect", - "subtle", - "zeroize", -] - -[[package]] -name = "enr" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "851bd664a3d3a3c175cff92b2f0df02df3c541b4895d0ae307611827aae46152" -dependencies = [ - "alloy-rlp", - "base64 0.22.1", - "bytes", - "ed25519-dalek", - "hex", - "k256", - "log", - "rand 0.8.5", - "secp256k1 0.30.0", - "serde", - "sha3", - "zeroize", -] - -[[package]] -name = "enum-as-inner" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "enum-ordinalize" -version = "4.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" -dependencies = [ - "enum-ordinalize-derive", -] - -[[package]] -name = "enum-ordinalize-derive" -version = "4.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "equator" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4711b213838dfee0117e3be6ac926007d7f433d7bbe33595975d4190cb07e6fc" -dependencies = [ - "equator-macro", -] - -[[package]] -name = "equator-macro" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "equivalent" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" - -[[package]] -name = "errno" -version = "0.3.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" -dependencies = [ - "libc", - "windows-sys 0.61.2", -] - -[[package]] -name = "ethereum_hashing" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c853bd72c9e5787f8aafc3df2907c2ed03cff3150c3acd94e2e53a98ab70a8ab" -dependencies = [ - "cpufeatures", - "ring", - "sha2", -] - -[[package]] -name = "ethereum_serde_utils" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dc1355dbb41fbbd34ec28d4fb2a57d9a70c67ac3c19f6a5ca4d4a176b9e997a" -dependencies = [ - "alloy-primitives", - "hex", - "serde", - "serde_derive", - "serde_json", -] - -[[package]] -name = "ethereum_ssz" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dcddb2554d19cde19b099fadddde576929d7a4d0c1cd3512d1fd95cf174375c" -dependencies = [ - "alloy-primitives", - "ethereum_serde_utils", - "itertools 0.13.0", - "serde", - "serde_derive", - "smallvec", - "typenum", -] - -[[package]] -name = "ethereum_ssz_derive" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a657b6b3b7e153637dc6bdc6566ad9279d9ee11a15b12cfb24a2e04360637e9f" -dependencies = [ - "darling 0.20.11", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "eyre" -version = "0.6.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" -dependencies = [ - "indenter", - "once_cell", -] - -[[package]] -name = "fast-float2" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8eb564c5c7423d25c886fb561d1e4ee69f72354d16918afa32c08811f6b6a55" - -[[package]] -name = "fastrand" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" - -[[package]] -name = "fastrlp" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", -] - -[[package]] -name = "fastrlp" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", -] - -[[package]] -name = "fdlimit" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e182f7dbc2ef73d9ef67351c5fbbea084729c48362d3ce9dd44c28e32e277fe5" -dependencies = [ - "libc", - "thiserror 1.0.69", -] - -[[package]] -name = "ff" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" -dependencies = [ - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "fiat-crypto" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" - -[[package]] -name = "filetime" -version = "0.2.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" -dependencies = [ - "cfg-if", - "libc", - "libredox", -] - -[[package]] -name = "find-msvc-tools" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" - -[[package]] -name = "fixed-cache" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0aaafa7294e9617eb29e5c684a3af33324ef512a1bf596af2d1938a03798da29" -dependencies = [ - "equivalent", - "typeid", -] - -[[package]] -name = "fixed-hash" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" -dependencies = [ - "byteorder", - "rand 0.8.5", - "rustc-hex", - "static_assertions", -] - -[[package]] -name = "fixed-map" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ed19add84e8cb9e8cc5f7074de0324247149ffef0b851e215fb0edc50c229b" -dependencies = [ - "fixed-map-derive", - "serde", -] - -[[package]] -name = "fixed-map-derive" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dc7a9cb3326bafb80642c5ce99b39a2c0702d4bfa8ee8a3e773791a6cbe2407" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "fixedbitset" -version = "0.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" - -[[package]] -name = "flate2" -version = "1.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b375d6465b98090a5f25b1c7703f3859783755aa9a80433b36e0379a3ec2f369" -dependencies = [ - "crc32fast", - "miniz_oxide", -] - -[[package]] -name = "float16" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bffafbd079d520191c7c2779ae9cf757601266cf4167d3f659ff09617ff8483" -dependencies = [ - "cfg-if", - "rustc_version 0.2.3", -] - -[[package]] -name = "fnv" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" - -[[package]] -name = "foldhash" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" - -[[package]] -name = "foldhash" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" - -[[package]] -name = "form_urlencoded" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" -dependencies = [ - "percent-encoding", -] - -[[package]] -name = "fsevent-sys" -version = "4.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" -dependencies = [ - "libc", -] - -[[package]] -name = "funty" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" - -[[package]] -name = "futures" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" -dependencies = [ - "futures-channel", - "futures-core", - "futures-executor", - "futures-io", - "futures-sink", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-channel" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" -dependencies = [ - "futures-core", - "futures-sink", -] - -[[package]] -name = "futures-concurrency" -version = "7.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "175cd8cca9e1d45b87f18ffa75088f2099e3c4fe5e2f83e42de112560bea8ea6" -dependencies = [ - "fixedbitset", - "futures-core", - "futures-lite", - "pin-project", - "smallvec", -] - -[[package]] -name = "futures-core" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" - -[[package]] -name = "futures-executor" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" -dependencies = [ - "futures-core", - "futures-task", - "futures-util", -] - -[[package]] -name = "futures-io" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" - -[[package]] -name = "futures-lite" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" -dependencies = [ - "fastrand", - "futures-core", - "futures-io", - "parking", - "pin-project-lite", -] - -[[package]] -name = "futures-macro" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "futures-sink" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" - -[[package]] -name = "futures-task" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" - -[[package]] -name = "futures-timer" -version = "3.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" -dependencies = [ - "gloo-timers", - "send_wrapper 0.4.0", -] - -[[package]] -name = "futures-util" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" -dependencies = [ - "futures-channel", - "futures-core", - "futures-io", - "futures-macro", - "futures-sink", - "futures-task", - "memchr", - "pin-project-lite", - "pin-utils", - "slab", -] - -[[package]] -name = "futures-utils-wasm" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" - -[[package]] -name = "generator" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52f04ae4152da20c76fe800fa48659201d5cf627c5149ca0b707b69d7eef6cf9" -dependencies = [ - "cc", - "cfg-if", - "libc", - "log", - "rustversion", - "windows-link", - "windows-result 0.4.1", -] - -[[package]] -name = "generic-array" -version = "0.14.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" -dependencies = [ - "typenum", - "version_check", - "zeroize", -] - -[[package]] -name = "getrandom" -version = "0.2.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "wasi", - "wasm-bindgen", -] - -[[package]] -name = "getrandom" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" -dependencies = [ - "cfg-if", - "js-sys", - "libc", - "r-efi", - "wasip2", - "wasm-bindgen", -] - -[[package]] -name = "ghash" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" -dependencies = [ - "opaque-debug", - "polyval", -] - -[[package]] -name = "gimli" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" - -[[package]] -name = "git2" -version = "0.20.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e2b37e2f62729cdada11f0e6b3b6fe383c69c29fc619e391223e12856af308c" -dependencies = [ - "bitflags 2.10.0", - "libc", - "libgit2-sys", - "log", - "url", -] - -[[package]] -name = "glob" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" - -[[package]] -name = "gloo-net" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06f627b1a58ca3d42b45d6104bf1e1a03799df472df00988b6ba21accc10580" -dependencies = [ - "futures-channel", - "futures-core", - "futures-sink", - "gloo-utils", - "http", - "js-sys", - "pin-project", - "serde", - "serde_json", - "thiserror 1.0.69", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "gloo-timers" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "gloo-utils" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" -dependencies = [ - "js-sys", - "serde", - "serde_json", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "group" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" -dependencies = [ - "ff", - "rand_core 0.6.4", - "subtle", -] - -[[package]] -name = "h2" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http", - "indexmap 2.13.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "hash-db" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" - -[[package]] -name = "hashbrown" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" - -[[package]] -name = "hashbrown" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" - -[[package]] -name = "hashbrown" -version = "0.14.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -dependencies = [ - "ahash", -] - -[[package]] -name = "hashbrown" -version = "0.15.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" -dependencies = [ - "allocator-api2", - "equivalent", - "foldhash 0.1.5", -] - -[[package]] -name = "hashbrown" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" -dependencies = [ - "allocator-api2", - "equivalent", - "foldhash 0.2.0", - "serde", - "serde_core", -] - -[[package]] -name = "hashlink" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" -dependencies = [ - "hashbrown 0.14.5", -] - -[[package]] -name = "hdrhistogram" -version = "7.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" -dependencies = [ - "byteorder", - "num-traits", -] - -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - -[[package]] -name = "hermit-abi" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" - -[[package]] -name = "hex" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" - -[[package]] -name = "hex-conservative" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda06d18ac606267c40c04e41b9947729bf8b9efe74bd4e82b61a5f26a510b9f" -dependencies = [ - "arrayvec", -] - -[[package]] -name = "hickory-proto" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8a6fe56c0038198998a6f217ca4e7ef3a5e51f46163bd6dd60b5c71ca6c6502" -dependencies = [ - "async-trait", - "cfg-if", - "data-encoding", - "enum-as-inner", - "futures-channel", - "futures-io", - "futures-util", - "idna", - "ipnet", - "once_cell", - "rand 0.9.2", - "ring", - "serde", - "thiserror 2.0.18", - "tinyvec", - "tokio", - "tracing", - "url", -] - -[[package]] -name = "hickory-resolver" -version = "0.25.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc62a9a99b0bfb44d2ab95a7208ac952d31060efc16241c87eaf36406fecf87a" -dependencies = [ - "cfg-if", - "futures-util", - "hickory-proto", - "ipconfig", - "moka", - "once_cell", - "parking_lot", - "rand 0.9.2", - "resolv-conf", - "serde", - "smallvec", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "hkdf" -version = "0.12.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" -dependencies = [ - "hmac", -] - -[[package]] -name = "hmac" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "http" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" -dependencies = [ - "bytes", - "itoa", -] - -[[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http", -] - -[[package]] -name = "http-body-util" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" -dependencies = [ - "bytes", - "futures-core", - "http", - "http-body", - "pin-project-lite", -] - -[[package]] -name = "http-range-header" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" - -[[package]] -name = "httparse" -version = "1.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" - -[[package]] -name = "httpdate" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" - -[[package]] -name = "human_bytes" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91f255a4535024abf7640cb288260811fc14794f62b063652ed349f9a6c2348e" - -[[package]] -name = "humantime" -version = "2.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" - -[[package]] -name = "humantime-serde" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a3db5ea5923d99402c94e9feb261dc5ee9b4efa158b0315f788cf549cc200c" -dependencies = [ - "humantime", - "serde", -] - -[[package]] -name = "hyper" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" -dependencies = [ - "atomic-waker", - "bytes", - "futures-channel", - "futures-core", - "h2", - "http", - "http-body", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "pin-utils", - "smallvec", - "tokio", - "want", -] - -[[package]] -name = "hyper-rustls" -version = "0.27.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" -dependencies = [ - "http", - "hyper", - "hyper-util", - "log", - "rustls", - "rustls-native-certs", - "rustls-pki-types", - "tokio", - "tokio-rustls", - "tower-service", - "webpki-roots 1.0.5", -] - -[[package]] -name = "hyper-timeout" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" -dependencies = [ - "hyper", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", -] - -[[package]] -name = "hyper-util" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" -dependencies = [ - "base64 0.22.1", - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http", - "http-body", - "hyper", - "ipnet", - "libc", - "percent-encoding", - "pin-project-lite", - "socket2 0.6.2", - "tokio", - "tower-service", - "tracing", -] - -[[package]] -name = "iana-time-zone" -version = "0.1.65" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" -dependencies = [ - "android_system_properties", - "core-foundation-sys", - "iana-time-zone-haiku", - "js-sys", - "log", - "wasm-bindgen", - "windows-core 0.62.2", -] - -[[package]] -name = "iana-time-zone-haiku" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" -dependencies = [ - "cc", -] - -[[package]] -name = "icu_collections" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" -dependencies = [ - "displaydoc", - "potential_utf", - "yoke", - "zerofrom", - "zerovec", -] - -[[package]] -name = "icu_locale_core" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" -dependencies = [ - "displaydoc", - "litemap", - "serde", - "tinystr", - "writeable", - "zerovec", -] - -[[package]] -name = "icu_normalizer" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b24a59706036ba941c9476a55cd57b82b77f38a3c667d637ee7cabbc85eaedc" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_normalizer_data", - "icu_properties", - "icu_provider", - "smallvec", - "utf16_iter", - "write16", - "zerovec", -] - -[[package]] -name = "icu_normalizer_data" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" - -[[package]] -name = "icu_properties" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5a97b8ac6235e69506e8dacfb2adf38461d2ce6d3e9bd9c94c4cbc3cd4400a4" -dependencies = [ - "displaydoc", - "icu_collections", - "icu_locale_core", - "icu_properties_data", - "icu_provider", - "potential_utf", - "zerotrie", - "zerovec", -] - -[[package]] -name = "icu_properties_data" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" - -[[package]] -name = "icu_provider" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" -dependencies = [ - "displaydoc", - "icu_locale_core", - "serde", - "stable_deref_trait", - "writeable", - "yoke", - "zerofrom", - "zerotrie", - "zerovec", -] - -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - -[[package]] -name = "idna" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" -dependencies = [ - "idna_adapter", - "smallvec", - "utf8_iter", -] - -[[package]] -name = "idna_adapter" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" -dependencies = [ - "icu_normalizer", - "icu_properties", -] - -[[package]] -name = "if-addrs" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf39cc0423ee66021dc5eccface85580e4a001e0c5288bae8bea7ecb69225e90" -dependencies = [ - "libc", - "windows-sys 0.59.0", -] - -[[package]] -name = "impl-codec" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" -dependencies = [ - "parity-scale-codec", -] - -[[package]] -name = "impl-trait-for-tuples" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "include_dir" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd" -dependencies = [ - "include_dir_macros", -] - -[[package]] -name = "include_dir_macros" -version = "0.7.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "indenter" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "964de6e86d545b246d84badc0fef527924ace5134f30641c203ef52ba83f58d5" - -[[package]] -name = "indexmap" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" -dependencies = [ - "autocfg", - "hashbrown 0.12.3", - "serde", -] - -[[package]] -name = "indexmap" -version = "2.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" -dependencies = [ - "arbitrary", - "equivalent", - "hashbrown 0.16.1", - "serde", - "serde_core", -] - -[[package]] -name = "indoc" -version = "2.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" -dependencies = [ - "rustversion", -] - -[[package]] -name = "inotify" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" -dependencies = [ - "bitflags 2.10.0", - "inotify-sys", - "libc", -] - -[[package]] -name = "inotify-sys" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" -dependencies = [ - "libc", -] - -[[package]] -name = "inout" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" -dependencies = [ - "block-padding", - "generic-array", -] - -[[package]] -name = "instability" -version = "0.3.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357b7205c6cd18dd2c86ed312d1e70add149aea98e7ef72b9fdf0270e555c11d" -dependencies = [ - "darling 0.23.0", - "indoc", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "interprocess" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d941b405bd2322993887859a8ee6ac9134945a24ec5ec763a8a962fc64dfec2d" -dependencies = [ - "doctest-file", - "futures-core", - "libc", - "recvmsg", - "tokio", - "widestring", - "windows-sys 0.52.0", -] - -[[package]] -name = "intrusive-collections" -version = "0.9.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86" -dependencies = [ - "memoffset", -] - -[[package]] -name = "ipconfig" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" -dependencies = [ - "socket2 0.5.10", - "widestring", - "windows-sys 0.48.0", - "winreg", -] - -[[package]] -name = "ipnet" -version = "2.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" - -[[package]] -name = "iri-string" -version = "0.7.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" -dependencies = [ - "memchr", - "serde", -] - -[[package]] -name = "is_terminal_polyfill" -version = "1.70.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" - -[[package]] -name = "itertools" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - -[[package]] -name = "itertools" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" -dependencies = [ - "either", -] - -[[package]] -name = "itoa" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" - -[[package]] -name = "jemalloc_pprof" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74ff642505c7ce8d31c0d43ec0e235c6fd4585d9b8172d8f9dd04d36590200b5" -dependencies = [ - "anyhow", - "libc", - "mappings", - "once_cell", - "pprof_util", - "tempfile", - "tikv-jemalloc-ctl", - "tokio", - "tracing", -] - -[[package]] -name = "jni" -version = "0.21.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" -dependencies = [ - "cesu8", - "cfg-if", - "combine", - "jni-sys", - "log", - "thiserror 1.0.69", - "walkdir", - "windows-sys 0.45.0", -] - -[[package]] -name = "jni-sys" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" - -[[package]] -name = "jobserver" -version = "0.1.34" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" -dependencies = [ - "getrandom 0.3.4", - "libc", -] - -[[package]] -name = "js-sys" -version = "0.3.85" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" -dependencies = [ - "once_cell", - "wasm-bindgen", -] - -[[package]] -name = "jsonrpsee" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f3f48dc3e6b8bd21e15436c1ddd0bc22a6a54e8ec46fedd6adf3425f396ec6a" -dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-http-client", - "jsonrpsee-proc-macros", - "jsonrpsee-server", - "jsonrpsee-types", - "jsonrpsee-wasm-client", - "jsonrpsee-ws-client", - "tokio", - "tracing", -] - -[[package]] -name = "jsonrpsee-client-transport" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98" -dependencies = [ - "base64 0.22.1", - "futures-channel", - "futures-util", - "gloo-net", - "http", - "jsonrpsee-core", - "pin-project", - "rustls", - "rustls-pki-types", - "rustls-platform-verifier", - "soketto", - "thiserror 2.0.18", - "tokio", - "tokio-rustls", - "tokio-util", - "tracing", - "url", -] - -[[package]] -name = "jsonrpsee-core" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "316c96719901f05d1137f19ba598b5fe9c9bc39f4335f67f6be8613921946480" -dependencies = [ - "async-trait", - "bytes", - "futures-timer", - "futures-util", - "http", - "http-body", - "http-body-util", - "jsonrpsee-types", - "parking_lot", - "pin-project", - "rand 0.9.2", - "rustc-hash", - "serde", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tower", - "tracing", - "wasm-bindgen-futures", -] - -[[package]] -name = "jsonrpsee-http-client" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8" -dependencies = [ - "base64 0.22.1", - "http-body", - "hyper", - "hyper-rustls", - "hyper-util", - "jsonrpsee-core", - "jsonrpsee-types", - "rustls", - "rustls-platform-verifier", - "serde", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tower", - "url", -] - -[[package]] -name = "jsonrpsee-proc-macros" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da3f8ab5ce1bb124b6d082e62dffe997578ceaf0aeb9f3174a214589dc00f07" -dependencies = [ - "heck", - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "jsonrpsee-server" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c51b7c290bb68ce3af2d029648148403863b982f138484a73f02a9dd52dbd7f" -dependencies = [ - "futures-util", - "http", - "http-body", - "http-body-util", - "hyper", - "hyper-util", - "jsonrpsee-core", - "jsonrpsee-types", - "pin-project", - "route-recognizer", - "serde", - "serde_json", - "soketto", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tokio-util", - "tower", - "tracing", -] - -[[package]] -name = "jsonrpsee-types" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc88ff4688e43cc3fa9883a8a95c6fa27aa2e76c96e610b737b6554d650d7fd5" -dependencies = [ - "http", - "serde", - "serde_json", - "thiserror 2.0.18", -] - -[[package]] -name = "jsonrpsee-wasm-client" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7902885de4779f711a95d82c8da2d7e5f9f3a7c7cfa44d51c067fd1c29d72a3c" -dependencies = [ - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", - "tower", -] - -[[package]] -name = "jsonrpsee-ws-client" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b6fceceeb05301cc4c065ab3bd2fa990d41ff4eb44e4ca1b30fa99c057c3e79" -dependencies = [ - "http", - "jsonrpsee-client-transport", - "jsonrpsee-core", - "jsonrpsee-types", - "tower", - "url", -] - -[[package]] -name = "jsonwebtoken" -version = "9.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" -dependencies = [ - "base64 0.22.1", - "js-sys", - "pem", - "ring", - "serde", - "serde_json", - "simple_asn1", -] - -[[package]] -name = "k256" -version = "0.13.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" -dependencies = [ - "cfg-if", - "ecdsa", - "elliptic-curve", - "once_cell", - "serdect", - "sha2", - "signature", -] - -[[package]] -name = "keccak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "keccak-asm" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b646a74e746cd25045aa0fd42f4f7f78aa6d119380182c7e63a5593c4ab8df6f" -dependencies = [ - "digest 0.10.7", - "sha3-asm", -] - -[[package]] -name = "kqueue" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" -dependencies = [ - "kqueue-sys", - "libc", -] - -[[package]] -name = "kqueue-sys" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" -dependencies = [ - "bitflags 1.3.2", - "libc", -] - -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" - -[[package]] -name = "libc" -version = "0.2.180" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" - -[[package]] -name = "libgit2-sys" -version = "0.18.3+1.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9b3acc4b91781bb0b3386669d325163746af5f6e4f73e6d2d630e09a35f3487" -dependencies = [ - "cc", - "libc", - "libz-sys", - "pkg-config", -] - -[[package]] -name = "libloading" -version = "0.8.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" -dependencies = [ - "cfg-if", - "windows-link", -] - -[[package]] -name = "libm" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" - -[[package]] -name = "libp2p-identity" -version = "0.2.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c7892c221730ba55f7196e98b0b8ba5e04b4155651736036628e9f73ed6fc3" -dependencies = [ - "asn1_der", - "bs58", - "ed25519-dalek", - "hkdf", - "k256", - "multihash", - "quick-protobuf", - "sha2", - "thiserror 2.0.18", - "tracing", - "zeroize", -] - -[[package]] -name = "libproc" -version = "0.14.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a54ad7278b8bc5301d5ffd2a94251c004feb971feba96c971ea4063645990757" -dependencies = [ - "bindgen 0.72.1", - "errno", - "libc", -] - -[[package]] -name = "libredox" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" -dependencies = [ - "bitflags 2.10.0", - "libc", - "redox_syscall 0.7.0", -] - -[[package]] -name = "librocksdb-sys" -version = "0.17.3+10.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cef2a00ee60fe526157c9023edab23943fae1ce2ab6f4abb2a807c1746835de9" -dependencies = [ - "bindgen 0.72.1", - "bzip2-sys", - "cc", - "libc", - "libz-sys", - "lz4-sys", - "tikv-jemalloc-sys", - "zstd-sys", -] - -[[package]] -name = "libz-sys" -version = "1.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - -[[package]] -name = "linked_hash_set" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "984fb35d06508d1e69fc91050cceba9c0b748f983e6739fa2c7a9237154c52c8" -dependencies = [ - "linked-hash-map", - "serde_core", -] - -[[package]] -name = "linux-raw-sys" -version = "0.4.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" - -[[package]] -name = "linux-raw-sys" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" - -[[package]] -name = "litemap" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" - -[[package]] -name = "litrs" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" - -[[package]] -name = "lock_api" -version = "0.4.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" -dependencies = [ - "scopeguard", - "serde", -] - -[[package]] -name = "log" -version = "0.4.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" - -[[package]] -name = "loom" -version = "0.7.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" -dependencies = [ - "cfg-if", - "generator", - "scoped-tls", - "tracing", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "lru" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" -dependencies = [ - "hashbrown 0.15.5", -] - -[[package]] -name = "lru" -version = "0.16.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" -dependencies = [ - "hashbrown 0.16.1", -] - -[[package]] -name = "lru-slab" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" - -[[package]] -name = "lz4" -version = "1.28.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a20b523e860d03443e98350ceaac5e71c6ba89aea7d960769ec3ce37f4de5af4" -dependencies = [ - "lz4-sys", -] - -[[package]] -name = "lz4-sys" -version = "1.11.1+lz4-1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bd8c0d6c6ed0cd30b3652886bb8711dc4bb01d637a68105a3d5158039b418e6" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "lz4_flex" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08ab2867e3eeeca90e844d1940eab391c9dc5228783db2ed999acbc0a9ed375a" - -[[package]] -name = "mach2" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a1b95cd5421ec55b445b5ae102f5ea0e768de1f82bd3001e11f426c269c3aea" -dependencies = [ - "libc", -] - -[[package]] -name = "macro-string" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "mappings" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db4d277bb50d4508057e7bddd7fcd19ef4a4cc38051b6a5a36868d75ae2cbeb9" -dependencies = [ - "anyhow", - "libc", - "once_cell", - "pprof_util", - "tracing", -] - -[[package]] -name = "match-lookup" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "757aee279b8bdbb9f9e676796fd459e4207a1f986e87886700abf589f5abf771" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "matchers" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" -dependencies = [ - "regex-automata", -] - -[[package]] -name = "memchr" -version = "2.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" - -[[package]] -name = "memmap2" -version = "0.9.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744133e4a0e0a658e1374cf3bf8e415c4052a15a111acd372764c55b4177d490" -dependencies = [ - "libc", -] - -[[package]] -name = "memoffset" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" -dependencies = [ - "autocfg", -] - -[[package]] -name = "metrics" -version = "0.24.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d5312e9ba3771cfa961b585728215e3d972c950a3eed9252aa093d6301277e8" -dependencies = [ - "ahash", - "portable-atomic", -] - -[[package]] -name = "metrics-derive" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3dbdd96ed57d565ec744cba02862d707acf373c5772d152abae6ec5c4e24f6c" -dependencies = [ - "proc-macro2", - "quote", - "regex", - "syn 2.0.114", -] - -[[package]] -name = "metrics-exporter-prometheus" -version = "0.18.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3589659543c04c7dc5526ec858591015b87cd8746583b51b48ef4353f99dbcda" -dependencies = [ - "base64 0.22.1", - "indexmap 2.13.0", - "metrics", - "metrics-util", - "quanta", - "thiserror 2.0.18", -] - -[[package]] -name = "metrics-process" -version = "2.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f615e08e049bd14a44c4425415782efb9bcd479fc1e19ddeb971509074c060d0" -dependencies = [ - "libc", - "libproc", - "mach2", - "metrics", - "once_cell", - "procfs 0.18.0", - "rlimit", - "windows 0.62.2", -] - -[[package]] -name = "metrics-util" -version = "0.20.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdfb1365fea27e6dd9dc1dbc19f570198bc86914533ad639dae939635f096be4" -dependencies = [ - "crossbeam-epoch", - "crossbeam-utils", - "hashbrown 0.16.1", - "metrics", - "quanta", - "rand 0.9.2", - "rand_xoshiro", - "sketches-ddsketch", -] - -[[package]] -name = "mime" -version = "0.3.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" - -[[package]] -name = "mime_guess" -version = "2.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" -dependencies = [ - "mime", - "unicase", -] - -[[package]] -name = "minimal-lexical" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" - -[[package]] -name = "miniz_oxide" -version = "0.8.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" -dependencies = [ - "adler2", - "serde", - "simd-adler32", -] - -[[package]] -name = "mio" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" -dependencies = [ - "libc", - "log", - "wasi", - "windows-sys 0.61.2", -] - -[[package]] -name = "modular-bitfield" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a53d79ba8304ac1c4f9eb3b9d281f21f7be9d4626f72ce7df4ad8fbde4f38a74" -dependencies = [ - "modular-bitfield-impl", - "static_assertions", -] - -[[package]] -name = "modular-bitfield-impl" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a7d5f7076603ebc68de2dc6a650ec331a062a13abaa346975be747bbfa4b789" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "moka" -version = "0.12.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4ac832c50ced444ef6be0767a008b02c106a909ba79d1d830501e94b96f6b7e" -dependencies = [ - "crossbeam-channel", - "crossbeam-epoch", - "crossbeam-utils", - "equivalent", - "parking_lot", - "portable-atomic", - "smallvec", - "tagptr", - "uuid", -] - -[[package]] -name = "more-asserts" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fafa6961cabd9c63bcd77a45d7e3b7f3b552b70417831fb0f56db717e72407e" - -[[package]] -name = "multiaddr" -version = "0.18.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe6351f60b488e04c1d21bc69e56b89cb3f5e8f5d22557d6e8031bdfd79b6961" -dependencies = [ - "arrayref", - "byteorder", - "data-encoding", - "libp2p-identity", - "multibase", - "multihash", - "percent-encoding", - "serde", - "static_assertions", - "unsigned-varint", - "url", -] - -[[package]] -name = "multibase" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8694bb4835f452b0e3bb06dbebb1d6fc5385b6ca1caf2e55fd165c042390ec77" -dependencies = [ - "base-x", - "base256emoji", - "data-encoding", - "data-encoding-macro", -] - -[[package]] -name = "multihash" -version = "0.19.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b430e7953c29dd6a09afc29ff0bb69c6e306329ee6794700aee27b76a1aea8d" -dependencies = [ - "core2", - "unsigned-varint", -] - -[[package]] -name = "nom" -version = "7.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" -dependencies = [ - "memchr", - "minimal-lexical", -] - -[[package]] -name = "notify" -version = "8.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" -dependencies = [ - "bitflags 2.10.0", - "fsevent-sys", - "inotify", - "kqueue", - "libc", - "log", - "mio", - "notify-types", - "walkdir", - "windows-sys 0.60.2", -] - -[[package]] -name = "notify-types" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42b8cfee0e339a0337359f3c88165702ac6e600dc01c0cc9579a92d62b08477a" -dependencies = [ - "bitflags 2.10.0", -] - -[[package]] -name = "ntapi" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c70f219e21142367c70c0b30c6a9e3a14d55b4d12a204d897fbec83a0363f081" -dependencies = [ - "winapi", -] - -[[package]] -name = "nu-ansi-term" -version = "0.50.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "num" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" -dependencies = [ - "num-bigint", - "num-complex", - "num-integer", - "num-iter", - "num-rational", - "num-traits", -] - -[[package]] -name = "num-bigint" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" -dependencies = [ - "num-integer", - "num-traits", - "serde", -] - -[[package]] -name = "num-complex" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-conv" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" - -[[package]] -name = "num-integer" -version = "0.1.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" -dependencies = [ - "num-traits", -] - -[[package]] -name = "num-iter" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" -dependencies = [ - "autocfg", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-rational" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" -dependencies = [ - "num-bigint", - "num-integer", - "num-traits", -] - -[[package]] -name = "num-traits" -version = "0.2.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" -dependencies = [ - "autocfg", - "libm", -] - -[[package]] -name = "num_cpus" -version = "1.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" -dependencies = [ - "hermit-abi", - "libc", -] - -[[package]] -name = "num_enum" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" -dependencies = [ - "num_enum_derive", - "rustversion", -] - -[[package]] -name = "num_enum_derive" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "num_threads" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" -dependencies = [ - "libc", -] - -[[package]] -name = "nybbles" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5676b5c379cf5b03da1df2b3061c4a4e2aa691086a56ac923e08c143f53f59" -dependencies = [ - "alloy-rlp", - "arbitrary", - "cfg-if", - "proptest", - "ruint", - "serde", - "smallvec", -] - -[[package]] -name = "object" -version = "0.37.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" -dependencies = [ - "memchr", -] - -[[package]] -name = "once_cell" -version = "1.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" -dependencies = [ - "critical-section", - "portable-atomic", -] - -[[package]] -name = "once_cell_polyfill" -version = "1.70.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" - -[[package]] -name = "op-alloy" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9b8fee21003dd4f076563de9b9d26f8c97840157ef78593cd7f262c5ca99848" -dependencies = [ - "op-alloy-consensus", - "op-alloy-network", - "op-alloy-provider", - "op-alloy-rpc-types", - "op-alloy-rpc-types-engine", -] - -[[package]] -name = "op-alloy-consensus" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "736381a95471d23e267263cfcee9e1d96d30b9754a94a2819148f83379de8a86" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-serde", - "arbitrary", - "derive_more", - "serde", - "serde_with", - "thiserror 2.0.18", -] - -[[package]] -name = "op-alloy-flz" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a79f352fc3893dcd670172e615afef993a41798a1d3fc0db88a3e60ef2e70ecc" - -[[package]] -name = "op-alloy-network" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4034183dca6bff6632e7c24c92e75ff5f0eabb58144edb4d8241814851334d47" -dependencies = [ - "alloy-consensus", - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-eth", - "alloy-signer", - "op-alloy-consensus", - "op-alloy-rpc-types", -] - -[[package]] -name = "op-alloy-provider" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6753d90efbaa8ea8bcb89c1737408ca85fa60d7adb875049d3f382c063666f86" -dependencies = [ - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-engine", - "alloy-transport", - "async-trait", - "op-alloy-rpc-types-engine", -] - -[[package]] -name = "op-alloy-rpc-jsonrpsee" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1c820ef9c802ebc732281a940bfb6ac2345af4d9fff041cbb64b4b546676686" -dependencies = [ - "alloy-primitives", - "jsonrpsee", -] - -[[package]] -name = "op-alloy-rpc-types" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddd87c6b9e5b6eee8d6b76f41b04368dca0e9f38d83338e5b00e730c282098a4" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network-primitives", - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-serde", - "derive_more", - "op-alloy-consensus", - "serde", - "serde_json", - "thiserror 2.0.18", -] - -[[package]] -name = "op-alloy-rpc-types-engine" -version = "0.23.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77727699310a18cdeed32da3928c709e2704043b6584ed416397d5da65694efc" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "alloy-serde", - "derive_more", - "ethereum_ssz", - "ethereum_ssz_derive", - "op-alloy-consensus", - "serde", - "sha2", - "snap", - "thiserror 2.0.18", -] - -[[package]] -name = "op-reth" -version = "1.10.2" -dependencies = [ - "clap", - "reth-cli-util", - "reth-optimism-chainspec", - "reth-optimism-cli", - "reth-optimism-consensus", - "reth-optimism-evm", - "reth-optimism-forks", - "reth-optimism-node", - "reth-optimism-payload-builder", - "reth-optimism-primitives", - "reth-optimism-rpc", - "tracing", -] - -[[package]] -name = "op-revm" -version = "15.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79c92b75162c2ed1661849fa51683b11254a5b661798360a2c24be918edafd40" -dependencies = [ - "auto_impl", - "revm", - "serde", -] - -[[package]] -name = "opaque-debug" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" - -[[package]] -name = "openssl-probe" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" - -[[package]] -name = "opentelemetry" -version = "0.31.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b84bcd6ae87133e903af7ef497404dda70c60d0ea14895fc8a5e6722754fc2a0" -dependencies = [ - "futures-core", - "futures-sink", - "js-sys", - "pin-project-lite", - "thiserror 2.0.18", - "tracing", -] - -[[package]] -name = "opentelemetry-appender-tracing" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef6a1ac5ca3accf562b8c306fa8483c85f4390f768185ab775f242f7fe8fdcc2" -dependencies = [ - "opentelemetry", - "tracing", - "tracing-core", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "opentelemetry-http" -version = "0.31.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7a6d09a73194e6b66df7c8f1b680f156d916a1a942abf2de06823dd02b7855d" -dependencies = [ - "async-trait", - "bytes", - "http", - "opentelemetry", - "reqwest", -] - -[[package]] -name = "opentelemetry-otlp" -version = "0.31.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2366db2dca4d2ad033cad11e6ee42844fd727007af5ad04a1730f4cb8163bf" -dependencies = [ - "http", - "opentelemetry", - "opentelemetry-http", - "opentelemetry-proto", - "opentelemetry_sdk", - "prost 0.14.3", - "reqwest", - "thiserror 2.0.18", - "tokio", - "tonic", - "tracing", -] - -[[package]] -name = "opentelemetry-proto" -version = "0.31.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7175df06de5eaee9909d4805a3d07e28bb752c34cab57fa9cff549da596b30f" -dependencies = [ - "opentelemetry", - "opentelemetry_sdk", - "prost 0.14.3", - "tonic", - "tonic-prost", -] - -[[package]] -name = "opentelemetry-semantic-conventions" -version = "0.31.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e62e29dfe041afb8ed2a6c9737ab57db4907285d999ef8ad3a59092a36bdc846" - -[[package]] -name = "opentelemetry_sdk" -version = "0.31.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e14ae4f5991976fd48df6d843de219ca6d31b01daaab2dad5af2badeded372bd" -dependencies = [ - "futures-channel", - "futures-executor", - "futures-util", - "opentelemetry", - "percent-encoding", - "rand 0.9.2", - "thiserror 2.0.18", -] - -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] -name = "p256" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" -dependencies = [ - "ecdsa", - "elliptic-curve", - "primeorder", - "sha2", -] - -[[package]] -name = "page_size" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "parity-scale-codec" -version = "3.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "799781ae679d79a948e13d4824a40970bfa500058d245760dd857301059810fa" -dependencies = [ - "arbitrary", - "arrayvec", - "bitvec", - "byte-slice-cast", - "bytes", - "const_format", - "impl-trait-for-tuples", - "parity-scale-codec-derive", - "rustversion", - "serde", -] - -[[package]] -name = "parity-scale-codec-derive" -version = "3.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34b4653168b563151153c9e4c08ebed57fb8262bebfa79711552fa983c623e7a" -dependencies = [ - "proc-macro-crate", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "parking" -version = "2.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" - -[[package]] -name = "parking_lot" -version = "0.12.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" -dependencies = [ - "lock_api", - "parking_lot_core", -] - -[[package]] -name = "parking_lot_core" -version = "0.9.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" -dependencies = [ - "cfg-if", - "libc", - "redox_syscall 0.5.18", - "smallvec", - "windows-link", -] - -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - -[[package]] -name = "pbkdf2" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" -dependencies = [ - "digest 0.10.7", - "hmac", -] - -[[package]] -name = "pem" -version = "3.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" -dependencies = [ - "base64 0.22.1", - "serde_core", -] - -[[package]] -name = "percent-encoding" -version = "2.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" - -[[package]] -name = "pest" -version = "2.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9eb05c21a464ea704b53158d358a31e6425db2f63a1a7312268b05fe2b75f7" -dependencies = [ - "memchr", - "ucd-trie", -] - -[[package]] -name = "pharos" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" -dependencies = [ - "futures", - "rustc_version 0.4.1", -] - -[[package]] -name = "phf" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" -dependencies = [ - "phf_macros", - "phf_shared", - "serde", -] - -[[package]] -name = "phf_generator" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" -dependencies = [ - "fastrand", - "phf_shared", -] - -[[package]] -name = "phf_macros" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" -dependencies = [ - "phf_generator", - "phf_shared", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "phf_shared" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" -dependencies = [ - "siphasher", -] - -[[package]] -name = "pin-project" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" -dependencies = [ - "pin-project-internal", -] - -[[package]] -name = "pin-project-internal" -version = "1.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "pin-project-lite" -version = "0.2.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" - -[[package]] -name = "pin-utils" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" - -[[package]] -name = "pkcs8" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" -dependencies = [ - "der", - "spki", -] - -[[package]] -name = "pkg-config" -version = "0.3.32" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" - -[[package]] -name = "plain_hasher" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e19e6491bdde87c2c43d70f4c194bc8a758f2eb732df00f61e43f7362e3b4cc" -dependencies = [ - "crunchy", -] - -[[package]] -name = "polyval" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" -dependencies = [ - "cfg-if", - "cpufeatures", - "opaque-debug", - "universal-hash", -] - -[[package]] -name = "portable-atomic" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" - -[[package]] -name = "potential_utf" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" -dependencies = [ - "zerovec", -] - -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "pprof_util" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4429d44e5e2c8a69399fc0070379201eed018e3df61e04eb7432811df073c224" -dependencies = [ - "anyhow", - "backtrace", - "flate2", - "num", - "paste", - "prost 0.13.5", -] - -[[package]] -name = "ppv-lite86" -version = "0.2.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" -dependencies = [ - "zerocopy", -] - -[[package]] -name = "pretty_assertions" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" -dependencies = [ - "diff", - "yansi", -] - -[[package]] -name = "primeorder" -version = "0.13.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" -dependencies = [ - "elliptic-curve", -] - -[[package]] -name = "primitive-types" -version = "0.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" -dependencies = [ - "fixed-hash", - "impl-codec", - "uint 0.9.5", -] - -[[package]] -name = "proc-macro-crate" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" -dependencies = [ - "toml_edit 0.23.10+spec-1.0.0", -] - -[[package]] -name = "proc-macro-error-attr2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "proc-macro-error2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" -dependencies = [ - "proc-macro-error-attr2", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "proc-macro2" -version = "1.0.106" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "procfs" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc5b72d8145275d844d4b5f6d4e1eef00c8cd889edb6035c21675d1bb1f45c9f" -dependencies = [ - "bitflags 2.10.0", - "chrono", - "flate2", - "hex", - "procfs-core 0.17.0", - "rustix 0.38.44", -] - -[[package]] -name = "procfs" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25485360a54d6861439d60facef26de713b1e126bf015ec8f98239467a2b82f7" -dependencies = [ - "bitflags 2.10.0", - "procfs-core 0.18.0", - "rustix 1.1.3", -] - -[[package]] -name = "procfs-core" -version = "0.17.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" -dependencies = [ - "bitflags 2.10.0", - "chrono", - "hex", -] - -[[package]] -name = "procfs-core" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6401bf7b6af22f78b563665d15a22e9aef27775b79b149a66ca022468a4e405" -dependencies = [ - "bitflags 2.10.0", - "hex", -] - -[[package]] -name = "proptest" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" -dependencies = [ - "bit-set", - "bit-vec", - "bitflags 2.10.0", - "num-traits", - "rand 0.9.2", - "rand_chacha 0.9.0", - "rand_xorshift", - "regex-syntax", - "rusty-fork", - "tempfile", - "unarray", -] - -[[package]] -name = "proptest-arbitrary-interop" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1981e49bd2432249da8b0e11e5557099a8e74690d6b94e721f7dc0bb7f3555f" -dependencies = [ - "arbitrary", - "proptest", -] - -[[package]] -name = "proptest-derive" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "proptest-derive" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "095a99f75c69734802359b682be8daaf8980296731f6470434ea2c652af1dd30" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "prost" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" -dependencies = [ - "bytes", - "prost-derive 0.13.5", -] - -[[package]] -name = "prost" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568" -dependencies = [ - "bytes", - "prost-derive 0.14.3", -] - -[[package]] -name = "prost-derive" -version = "0.13.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" -dependencies = [ - "anyhow", - "itertools 0.14.0", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "prost-derive" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27c6023962132f4b30eb4c172c91ce92d933da334c59c23cddee82358ddafb0b" -dependencies = [ - "anyhow", - "itertools 0.14.0", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "quanta" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7" -dependencies = [ - "crossbeam-utils", - "libc", - "once_cell", - "raw-cpuid", - "wasi", - "web-sys", - "winapi", -] - -[[package]] -name = "quick-error" -version = "1.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" - -[[package]] -name = "quick-protobuf" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6da84cc204722a989e01ba2f6e1e276e190f22263d0cb6ce8526fcdb0d2e1f" -dependencies = [ - "byteorder", -] - -[[package]] -name = "quinn" -version = "0.11.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" -dependencies = [ - "bytes", - "cfg_aliases", - "pin-project-lite", - "quinn-proto", - "quinn-udp", - "rustc-hash", - "rustls", - "socket2 0.6.2", - "thiserror 2.0.18", - "tokio", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-proto" -version = "0.11.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" -dependencies = [ - "bytes", - "getrandom 0.3.4", - "lru-slab", - "rand 0.9.2", - "ring", - "rustc-hash", - "rustls", - "rustls-pki-types", - "slab", - "thiserror 2.0.18", - "tinyvec", - "tracing", - "web-time", -] - -[[package]] -name = "quinn-udp" -version = "0.5.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" -dependencies = [ - "cfg_aliases", - "libc", - "once_cell", - "socket2 0.6.2", - "tracing", - "windows-sys 0.60.2", -] - -[[package]] -name = "quote" -version = "1.0.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "r-efi" -version = "5.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" - -[[package]] -name = "radium" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" - -[[package]] -name = "rand" -version = "0.8.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" -dependencies = [ - "libc", - "rand_chacha 0.3.1", - "rand_core 0.6.4", - "serde", -] - -[[package]] -name = "rand" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" -dependencies = [ - "rand_chacha 0.9.0", - "rand_core 0.9.5", - "serde", -] - -[[package]] -name = "rand_chacha" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" -dependencies = [ - "ppv-lite86", - "rand_core 0.6.4", -] - -[[package]] -name = "rand_chacha" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" -dependencies = [ - "ppv-lite86", - "rand_core 0.9.5", -] - -[[package]] -name = "rand_core" -version = "0.6.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" -dependencies = [ - "getrandom 0.2.17", -] - -[[package]] -name = "rand_core" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" -dependencies = [ - "getrandom 0.3.4", - "serde", -] - -[[package]] -name = "rand_xorshift" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" -dependencies = [ - "rand_core 0.9.5", -] - -[[package]] -name = "rand_xoshiro" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f703f4665700daf5512dcca5f43afa6af89f09db47fb56be587f80636bda2d41" -dependencies = [ - "rand_core 0.9.5", -] - -[[package]] -name = "rapidhash" -version = "4.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d8b5b858a440a0bc02625b62dd95131b9201aa9f69f411195dd4a7cfb1de3d7" -dependencies = [ - "rand 0.9.2", - "rustversion", -] - -[[package]] -name = "ratatui" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabd94c2f37801c20583fc49dd5cd6b0ba68c716787c2dd6ed18571e1e63117b" -dependencies = [ - "bitflags 2.10.0", - "cassowary", - "compact_str", - "crossterm 0.28.1", - "indoc", - "instability", - "itertools 0.13.0", - "lru 0.12.5", - "paste", - "strum 0.26.3", - "unicode-segmentation", - "unicode-truncate", - "unicode-width 0.2.0", -] - -[[package]] -name = "raw-cpuid" -version = "11.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186" -dependencies = [ - "bitflags 2.10.0", -] - -[[package]] -name = "rayon" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" -dependencies = [ - "either", - "rayon-core", -] - -[[package]] -name = "rayon-core" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" -dependencies = [ - "crossbeam-deque", - "crossbeam-utils", -] - -[[package]] -name = "recvmsg" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" - -[[package]] -name = "redox_syscall" -version = "0.5.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" -dependencies = [ - "bitflags 2.10.0", -] - -[[package]] -name = "redox_syscall" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27" -dependencies = [ - "bitflags 2.10.0", -] - -[[package]] -name = "redox_users" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" -dependencies = [ - "getrandom 0.2.17", - "libredox", - "thiserror 1.0.69", -] - -[[package]] -name = "redox_users" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" -dependencies = [ - "getrandom 0.2.17", - "libredox", - "thiserror 2.0.18", -] - -[[package]] -name = "ref-cast" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "regex" -version = "1.12.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" -dependencies = [ - "aho-corasick", - "memchr", - "regex-automata", - "regex-syntax", -] - -[[package]] -name = "regex-automata" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" -dependencies = [ - "aho-corasick", - "memchr", - "regex-syntax", -] - -[[package]] -name = "regex-syntax" -version = "0.8.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" - -[[package]] -name = "regress" -version = "0.10.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2057b2325e68a893284d1538021ab90279adac1139957ca2a74426c6f118fb48" -dependencies = [ - "hashbrown 0.16.1", - "memchr", -] - -[[package]] -name = "relative-path" -version = "1.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" - -[[package]] -name = "reqwest" -version = "0.12.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" -dependencies = [ - "base64 0.22.1", - "bytes", - "futures-channel", - "futures-core", - "futures-util", - "http", - "http-body", - "http-body-util", - "hyper", - "hyper-rustls", - "hyper-util", - "js-sys", - "log", - "percent-encoding", - "pin-project-lite", - "quinn", - "rustls", - "rustls-native-certs", - "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper", - "tokio", - "tokio-rustls", - "tokio-util", - "tower", - "tower-http", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "wasm-streams", - "web-sys", - "webpki-roots 1.0.5", -] - -[[package]] -name = "resolv-conf" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7" - -[[package]] -name = "reth-basic-payload-builder" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "futures-core", - "futures-util", - "metrics", - "reth-chain-state", - "reth-metrics", - "reth-payload-builder", - "reth-payload-builder-primitives", - "reth-payload-primitives", - "reth-primitives-traits", - "reth-revm", - "reth-storage-api", - "reth-tasks", - "tokio", - "tracing", -] - -[[package]] -name = "reth-chain-state" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-signer", - "alloy-signer-local", - "derive_more", - "metrics", - "parking_lot", - "pin-project", - "rand 0.9.2", - "rayon", - "reth-chainspec", - "reth-errors", - "reth-ethereum-primitives", - "reth-execution-types", - "reth-metrics", - "reth-primitives-traits", - "reth-storage-api", - "reth-trie", - "revm-database", - "revm-state", - "serde", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "reth-chainspec" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-genesis", - "alloy-primitives", - "alloy-trie", - "auto_impl", - "derive_more", - "reth-ethereum-forks", - "reth-network-peers", - "reth-primitives-traits", - "serde_json", -] - -[[package]] -name = "reth-cli" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-genesis", - "clap", - "eyre", - "reth-cli-runner", - "reth-db", - "serde_json", - "shellexpand", -] - -[[package]] -name = "reth-cli-commands" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "backon", - "clap", - "comfy-table", - "crossterm 0.28.1", - "eyre", - "fdlimit", - "futures", - "human_bytes", - "humantime", - "itertools 0.14.0", - "lz4", - "metrics", - "proptest", - "proptest-arbitrary-interop", - "ratatui", - "reqwest", - "reth-chainspec", - "reth-cli", - "reth-cli-runner", - "reth-cli-util", - "reth-codecs", - "reth-config", - "reth-consensus", - "reth-db", - "reth-db-api", - "reth-db-common", - "reth-discv4", - "reth-discv5", - "reth-downloaders", - "reth-ecies", - "reth-era", - "reth-era-downloader", - "reth-era-utils", - "reth-eth-wire", - "reth-ethereum-primitives", - "reth-etl", - "reth-evm", - "reth-exex", - "reth-fs-util", - "reth-net-nat", - "reth-network", - "reth-network-p2p", - "reth-network-peers", - "reth-node-api", - "reth-node-builder", - "reth-node-core", - "reth-node-events", - "reth-node-metrics", - "reth-primitives-traits", - "reth-provider", - "reth-prune", - "reth-prune-types", - "reth-revm", - "reth-stages", - "reth-stages-types", - "reth-static-file", - "reth-static-file-types", - "reth-storage-api", - "reth-tasks", - "reth-trie", - "reth-trie-common", - "reth-trie-db", - "secp256k1 0.30.0", - "serde", - "serde_json", - "tar", - "tokio", - "tokio-stream", - "toml", - "tracing", - "url", - "zstd", -] - -[[package]] -name = "reth-cli-runner" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "reth-tasks", - "tokio", - "tracing", -] - -[[package]] -name = "reth-cli-util" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "cfg-if", - "eyre", - "libc", - "rand 0.8.5", - "reth-fs-util", - "reth-tracing", - "secp256k1 0.30.0", - "serde", - "thiserror 2.0.18", - "tikv-jemallocator", - "tracy-client", -] - -[[package]] -name = "reth-codecs" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-genesis", - "alloy-primitives", - "alloy-trie", - "arbitrary", - "bytes", - "modular-bitfield", - "op-alloy-consensus", - "reth-codecs-derive", - "reth-zstd-compressors", - "serde", - "visibility", -] - -[[package]] -name = "reth-codecs-derive" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "reth-config" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "eyre", - "humantime-serde", - "reth-network-types", - "reth-prune-types", - "reth-stages-types", - "reth-static-file-types", - "serde", - "toml", - "url", -] - -[[package]] -name = "reth-consensus" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "auto_impl", - "reth-execution-types", - "reth-primitives-traits", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-consensus-common" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "reth-chainspec", - "reth-consensus", - "reth-primitives-traits", -] - -[[package]] -name = "reth-consensus-debug-client" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types-engine", - "alloy-transport", - "auto_impl", - "derive_more", - "eyre", - "futures", - "reqwest", - "reth-node-api", - "reth-primitives-traits", - "reth-tracing", - "ringbuffer", - "serde", - "serde_json", - "tokio", -] - -[[package]] -name = "reth-db" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "derive_more", - "eyre", - "metrics", - "page_size", - "parking_lot", - "reth-db-api", - "reth-fs-util", - "reth-libmdbx", - "reth-metrics", - "reth-nippy-jar", - "reth-static-file-types", - "reth-storage-errors", - "reth-tracing", - "rustc-hash", - "strum 0.27.2", - "sysinfo", - "tempfile", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-db-api" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-genesis", - "alloy-primitives", - "arbitrary", - "arrayvec", - "bytes", - "derive_more", - "metrics", - "modular-bitfield", - "op-alloy-consensus", - "parity-scale-codec", - "proptest", - "reth-codecs", - "reth-db-models", - "reth-ethereum-primitives", - "reth-primitives-traits", - "reth-prune-types", - "reth-stages-types", - "reth-storage-errors", - "reth-trie-common", - "roaring", - "serde", -] - -[[package]] -name = "reth-db-common" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-genesis", - "alloy-primitives", - "boyer-moore-magiclen", - "eyre", - "reth-chainspec", - "reth-codecs", - "reth-config", - "reth-db-api", - "reth-etl", - "reth-execution-errors", - "reth-fs-util", - "reth-node-types", - "reth-primitives-traits", - "reth-provider", - "reth-stages-types", - "reth-static-file-types", - "reth-trie", - "reth-trie-db", - "serde", - "serde_json", - "thiserror 2.0.18", - "tracing", -] - -[[package]] -name = "reth-db-models" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "arbitrary", - "bytes", - "modular-bitfield", - "reth-codecs", - "reth-primitives-traits", - "serde", -] - -[[package]] -name = "reth-discv4" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "discv5", - "enr", - "itertools 0.14.0", - "parking_lot", - "rand 0.8.5", - "reth-ethereum-forks", - "reth-net-banlist", - "reth-net-nat", - "reth-network-peers", - "schnellru", - "secp256k1 0.30.0", - "serde", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "reth-discv5" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "derive_more", - "discv5", - "enr", - "futures", - "itertools 0.14.0", - "metrics", - "rand 0.9.2", - "reth-chainspec", - "reth-ethereum-forks", - "reth-metrics", - "reth-network-peers", - "secp256k1 0.30.0", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "reth-dns-discovery" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "data-encoding", - "enr", - "hickory-resolver", - "linked_hash_set", - "parking_lot", - "reth-ethereum-forks", - "reth-network-peers", - "reth-tokio-util", - "schnellru", - "secp256k1 0.30.0", - "serde", - "serde_with", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "reth-downloaders" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "async-compression", - "futures", - "futures-util", - "itertools 0.14.0", - "metrics", - "pin-project", - "rayon", - "reth-config", - "reth-consensus", - "reth-ethereum-primitives", - "reth-metrics", - "reth-network-p2p", - "reth-network-peers", - "reth-primitives-traits", - "reth-provider", - "reth-storage-api", - "reth-tasks", - "reth-testing-utils", - "tempfile", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tokio-util", - "tracing", -] - -[[package]] -name = "reth-e2e-test-utils" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-network", - "alloy-primitives", - "alloy-provider", - "alloy-rlp", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-signer", - "alloy-signer-local", - "derive_more", - "eyre", - "futures-util", - "jsonrpsee", - "reth-chainspec", - "reth-cli-commands", - "reth-config", - "reth-consensus", - "reth-db", - "reth-db-common", - "reth-engine-local", - "reth-engine-primitives", - "reth-ethereum-primitives", - "reth-network-api", - "reth-network-p2p", - "reth-network-peers", - "reth-node-api", - "reth-node-builder", - "reth-node-core", - "reth-node-ethereum", - "reth-payload-builder", - "reth-payload-builder-primitives", - "reth-payload-primitives", - "reth-primitives", - "reth-primitives-traits", - "reth-provider", - "reth-rpc-api", - "reth-rpc-builder", - "reth-rpc-eth-api", - "reth-rpc-server-types", - "reth-stages-types", - "reth-tasks", - "reth-tokio-util", - "reth-tracing", - "revm", - "serde_json", - "tempfile", - "tokio", - "tokio-stream", - "tracing", - "url", -] - -[[package]] -name = "reth-ecies" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "aes", - "alloy-primitives", - "alloy-rlp", - "block-padding", - "byteorder", - "cipher", - "concat-kdf", - "ctr", - "digest 0.10.7", - "futures", - "hmac", - "pin-project", - "rand 0.8.5", - "reth-network-peers", - "secp256k1 0.30.0", - "sha2", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tokio-util", - "tracing", -] - -[[package]] -name = "reth-engine-local" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "alloy-rpc-types-engine", - "eyre", - "futures-util", - "op-alloy-rpc-types-engine", - "reth-chainspec", - "reth-engine-primitives", - "reth-ethereum-engine-primitives", - "reth-payload-builder", - "reth-payload-primitives", - "reth-primitives-traits", - "reth-storage-api", - "reth-transaction-pool", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "reth-engine-primitives" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "auto_impl", - "futures", - "reth-chain-state", - "reth-errors", - "reth-ethereum-primitives", - "reth-evm", - "reth-execution-types", - "reth-payload-builder-primitives", - "reth-payload-primitives", - "reth-primitives-traits", - "reth-trie-common", - "serde", - "thiserror 2.0.18", - "tokio", -] - -[[package]] -name = "reth-engine-service" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "futures", - "pin-project", - "reth-chainspec", - "reth-consensus", - "reth-engine-primitives", - "reth-engine-tree", - "reth-ethereum-primitives", - "reth-evm", - "reth-network-p2p", - "reth-node-types", - "reth-payload-builder", - "reth-provider", - "reth-prune", - "reth-stages-api", - "reth-tasks", - "reth-trie-db", -] - -[[package]] -name = "reth-engine-tree" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eip7928", - "alloy-eips", - "alloy-evm", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "crossbeam-channel", - "dashmap", - "derive_more", - "fixed-cache", - "futures", - "metrics", - "moka", - "parking_lot", - "rayon", - "reth-chain-state", - "reth-chainspec", - "reth-consensus", - "reth-db", - "reth-engine-primitives", - "reth-errors", - "reth-ethereum-primitives", - "reth-evm", - "reth-execution-types", - "reth-metrics", - "reth-network-p2p", - "reth-payload-builder", - "reth-payload-primitives", - "reth-primitives-traits", - "reth-provider", - "reth-prune", - "reth-prune-types", - "reth-revm", - "reth-stages", - "reth-stages-api", - "reth-static-file", - "reth-tasks", - "reth-tracing", - "reth-trie", - "reth-trie-common", - "reth-trie-db", - "reth-trie-parallel", - "reth-trie-sparse", - "reth-trie-sparse-parallel", - "revm", - "revm-primitives", - "schnellru", - "smallvec", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "reth-engine-util" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-rpc-types-engine", - "eyre", - "futures", - "itertools 0.14.0", - "pin-project", - "reth-chainspec", - "reth-engine-primitives", - "reth-engine-tree", - "reth-errors", - "reth-evm", - "reth-fs-util", - "reth-payload-primitives", - "reth-primitives-traits", - "reth-revm", - "reth-storage-api", - "serde", - "serde_json", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "reth-era" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "ethereum_ssz", - "ethereum_ssz_derive", - "snap", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-era-downloader" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "bytes", - "eyre", - "futures-util", - "reqwest", - "reth-era", - "reth-fs-util", - "sha2", - "tokio", -] - -[[package]] -name = "reth-era-utils" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "eyre", - "futures-util", - "reth-db-api", - "reth-era", - "reth-era-downloader", - "reth-etl", - "reth-fs-util", - "reth-primitives-traits", - "reth-provider", - "reth-stages-types", - "reth-storage-api", - "tokio", - "tracing", -] - -[[package]] -name = "reth-errors" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "reth-consensus", - "reth-execution-errors", - "reth-storage-errors", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-eth-wire" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-chains", - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "bytes", - "derive_more", - "futures", - "pin-project", - "reth-codecs", - "reth-ecies", - "reth-eth-wire-types", - "reth-ethereum-forks", - "reth-metrics", - "reth-network-peers", - "reth-primitives-traits", - "serde", - "snap", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tokio-util", - "tracing", -] - -[[package]] -name = "reth-eth-wire-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-hardforks", - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "bytes", - "derive_more", - "proptest", - "proptest-arbitrary-interop", - "reth-chainspec", - "reth-codecs-derive", - "reth-ethereum-primitives", - "reth-primitives-traits", - "serde", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-ethereum-consensus" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "reth-chainspec", - "reth-consensus", - "reth-consensus-common", - "reth-execution-types", - "reth-primitives-traits", - "tracing", -] - -[[package]] -name = "reth-ethereum-engine-primitives" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "reth-engine-primitives", - "reth-ethereum-primitives", - "reth-payload-primitives", - "reth-primitives-traits", - "serde", - "sha2", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-ethereum-forks" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eip2124", - "alloy-hardforks", - "alloy-primitives", - "arbitrary", - "auto_impl", - "once_cell", - "rustc-hash", -] - -[[package]] -name = "reth-ethereum-payload-builder" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-engine", - "reth-basic-payload-builder", - "reth-chainspec", - "reth-consensus-common", - "reth-errors", - "reth-ethereum-primitives", - "reth-evm", - "reth-evm-ethereum", - "reth-payload-builder", - "reth-payload-builder-primitives", - "reth-payload-primitives", - "reth-payload-validator", - "reth-primitives-traits", - "reth-revm", - "reth-storage-api", - "reth-transaction-pool", - "revm", - "tracing", -] - -[[package]] -name = "reth-ethereum-primitives" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-serde", - "arbitrary", - "modular-bitfield", - "reth-codecs", - "reth-primitives-traits", - "reth-zstd-compressors", - "serde", - "serde_with", -] - -[[package]] -name = "reth-etl" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "rayon", - "reth-db-api", - "tempfile", -] - -[[package]] -name = "reth-evm" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-primitives", - "auto_impl", - "derive_more", - "futures-util", - "metrics", - "rayon", - "reth-execution-errors", - "reth-execution-types", - "reth-metrics", - "reth-primitives-traits", - "reth-storage-api", - "reth-storage-errors", - "reth-trie-common", - "revm", -] - -[[package]] -name = "reth-evm-ethereum" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-primitives", - "alloy-rpc-types-engine", - "derive_more", - "parking_lot", - "reth-chainspec", - "reth-ethereum-forks", - "reth-ethereum-primitives", - "reth-evm", - "reth-execution-types", - "reth-primitives-traits", - "reth-storage-errors", - "revm", -] - -[[package]] -name = "reth-execution-errors" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-evm", - "alloy-primitives", - "alloy-rlp", - "nybbles", - "reth-storage-errors", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-execution-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-primitives", - "derive_more", - "reth-ethereum-primitives", - "reth-primitives-traits", - "reth-trie-common", - "revm", - "serde", - "serde_with", -] - -[[package]] -name = "reth-exex" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "eyre", - "futures", - "itertools 0.14.0", - "metrics", - "parking_lot", - "reth-chain-state", - "reth-chainspec", - "reth-config", - "reth-ethereum-primitives", - "reth-evm", - "reth-exex-types", - "reth-fs-util", - "reth-metrics", - "reth-node-api", - "reth-node-core", - "reth-payload-builder", - "reth-primitives-traits", - "reth-provider", - "reth-prune-types", - "reth-revm", - "reth-stages-api", - "reth-tasks", - "reth-tracing", - "rmp-serde", - "thiserror 2.0.18", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "reth-exex-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "reth-chain-state", - "reth-execution-types", - "reth-primitives-traits", - "serde", - "serde_with", -] - -[[package]] -name = "reth-fs-util" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "serde", - "serde_json", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-invalid-block-hooks" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-debug", - "eyre", - "futures", - "jsonrpsee", - "pretty_assertions", - "reth-engine-primitives", - "reth-evm", - "reth-primitives-traits", - "reth-provider", - "reth-revm", - "reth-rpc-api", - "reth-tracing", - "reth-trie", - "revm", - "revm-bytecode", - "revm-database", - "serde", - "serde_json", -] - -[[package]] -name = "reth-ipc" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "bytes", - "futures", - "futures-util", - "interprocess", - "jsonrpsee", - "pin-project", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tokio-util", - "tower", - "tracing", -] - -[[package]] -name = "reth-libmdbx" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "bitflags 2.10.0", - "byteorder", - "dashmap", - "derive_more", - "parking_lot", - "reth-mdbx-sys", - "smallvec", - "thiserror 2.0.18", - "tracing", -] - -[[package]] -name = "reth-mdbx-sys" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "bindgen 0.71.1", - "cc", -] - -[[package]] -name = "reth-metrics" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "futures", - "metrics", - "metrics-derive", - "tokio", - "tokio-util", -] - -[[package]] -name = "reth-net-banlist" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "ipnet", -] - -[[package]] -name = "reth-net-nat" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "futures-util", - "if-addrs", - "reqwest", - "serde_with", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "reth-network" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "aquamarine", - "auto_impl", - "derive_more", - "discv5", - "enr", - "futures", - "itertools 0.14.0", - "metrics", - "parking_lot", - "pin-project", - "rand 0.8.5", - "rand 0.9.2", - "rayon", - "reth-chainspec", - "reth-consensus", - "reth-discv4", - "reth-discv5", - "reth-dns-discovery", - "reth-ecies", - "reth-eth-wire", - "reth-eth-wire-types", - "reth-ethereum-forks", - "reth-ethereum-primitives", - "reth-evm-ethereum", - "reth-fs-util", - "reth-metrics", - "reth-net-banlist", - "reth-network-api", - "reth-network-p2p", - "reth-network-peers", - "reth-network-types", - "reth-primitives-traits", - "reth-storage-api", - "reth-tasks", - "reth-tokio-util", - "reth-transaction-pool", - "rustc-hash", - "schnellru", - "secp256k1 0.30.0", - "serde", - "smallvec", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tokio-util", - "tracing", -] - -[[package]] -name = "reth-network-api" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "alloy-rpc-types-admin", - "alloy-rpc-types-eth", - "auto_impl", - "derive_more", - "enr", - "futures", - "reth-eth-wire-types", - "reth-ethereum-forks", - "reth-network-p2p", - "reth-network-peers", - "reth-network-types", - "reth-tokio-util", - "serde", - "thiserror 2.0.18", - "tokio", - "tokio-stream", -] - -[[package]] -name = "reth-network-p2p" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "auto_impl", - "derive_more", - "futures", - "parking_lot", - "reth-consensus", - "reth-eth-wire-types", - "reth-ethereum-primitives", - "reth-network-peers", - "reth-network-types", - "reth-primitives-traits", - "reth-storage-errors", - "tokio", - "tracing", -] - -[[package]] -name = "reth-network-peers" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "enr", - "secp256k1 0.30.0", - "serde_with", - "thiserror 2.0.18", - "tokio", - "url", -] - -[[package]] -name = "reth-network-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eip2124", - "humantime-serde", - "reth-net-banlist", - "reth-network-peers", - "serde", - "serde_json", - "tracing", -] - -[[package]] -name = "reth-nippy-jar" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "anyhow", - "bincode", - "derive_more", - "lz4_flex", - "memmap2", - "reth-fs-util", - "serde", - "thiserror 2.0.18", - "tracing", - "zstd", -] - -[[package]] -name = "reth-node-api" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-rpc-types-engine", - "eyre", - "reth-basic-payload-builder", - "reth-consensus", - "reth-db-api", - "reth-engine-primitives", - "reth-evm", - "reth-network-api", - "reth-node-core", - "reth-node-types", - "reth-payload-builder", - "reth-payload-builder-primitives", - "reth-payload-primitives", - "reth-provider", - "reth-tasks", - "reth-tokio-util", - "reth-transaction-pool", -] - -[[package]] -name = "reth-node-builder" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-provider", - "alloy-rpc-types", - "alloy-rpc-types-engine", - "aquamarine", - "eyre", - "fdlimit", - "futures", - "jsonrpsee", - "parking_lot", - "rayon", - "reth-basic-payload-builder", - "reth-chain-state", - "reth-chainspec", - "reth-config", - "reth-consensus", - "reth-consensus-debug-client", - "reth-db", - "reth-db-api", - "reth-db-common", - "reth-downloaders", - "reth-engine-local", - "reth-engine-primitives", - "reth-engine-service", - "reth-engine-tree", - "reth-engine-util", - "reth-evm", - "reth-exex", - "reth-fs-util", - "reth-invalid-block-hooks", - "reth-network", - "reth-network-api", - "reth-network-p2p", - "reth-node-api", - "reth-node-core", - "reth-node-ethstats", - "reth-node-events", - "reth-node-metrics", - "reth-payload-builder", - "reth-primitives-traits", - "reth-provider", - "reth-prune", - "reth-rpc", - "reth-rpc-api", - "reth-rpc-builder", - "reth-rpc-engine-api", - "reth-rpc-eth-types", - "reth-rpc-layer", - "reth-stages", - "reth-static-file", - "reth-tasks", - "reth-tokio-util", - "reth-tracing", - "reth-transaction-pool", - "reth-trie-db", - "secp256k1 0.30.0", - "serde_json", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "reth-node-core" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "clap", - "derive_more", - "dirs-next", - "eyre", - "futures", - "humantime", - "ipnet", - "rand 0.9.2", - "reth-chainspec", - "reth-cli-util", - "reth-config", - "reth-consensus", - "reth-db", - "reth-discv4", - "reth-discv5", - "reth-engine-local", - "reth-engine-primitives", - "reth-ethereum-forks", - "reth-net-banlist", - "reth-net-nat", - "reth-network", - "reth-network-p2p", - "reth-network-peers", - "reth-primitives-traits", - "reth-prune-types", - "reth-rpc-convert", - "reth-rpc-eth-types", - "reth-rpc-server-types", - "reth-stages-types", - "reth-storage-api", - "reth-storage-errors", - "reth-tracing", - "reth-tracing-otlp", - "reth-transaction-pool", - "secp256k1 0.30.0", - "serde", - "shellexpand", - "strum 0.27.2", - "thiserror 2.0.18", - "toml", - "tracing", - "url", - "vergen", - "vergen-git2", -] - -[[package]] -name = "reth-node-ethereum" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eips", - "alloy-network", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "eyre", - "reth-chainspec", - "reth-engine-local", - "reth-engine-primitives", - "reth-ethereum-consensus", - "reth-ethereum-engine-primitives", - "reth-ethereum-payload-builder", - "reth-ethereum-primitives", - "reth-evm", - "reth-evm-ethereum", - "reth-network", - "reth-node-api", - "reth-node-builder", - "reth-payload-primitives", - "reth-primitives-traits", - "reth-provider", - "reth-revm", - "reth-rpc", - "reth-rpc-api", - "reth-rpc-builder", - "reth-rpc-eth-api", - "reth-rpc-eth-types", - "reth-rpc-server-types", - "reth-tracing", - "reth-transaction-pool", - "revm", - "tokio", -] - -[[package]] -name = "reth-node-ethstats" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "chrono", - "futures-util", - "reth-chain-state", - "reth-network-api", - "reth-primitives-traits", - "reth-storage-api", - "reth-transaction-pool", - "serde", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tokio-tungstenite", - "tracing", - "url", -] - -[[package]] -name = "reth-node-events" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "derive_more", - "futures", - "humantime", - "pin-project", - "reth-engine-primitives", - "reth-network-api", - "reth-primitives-traits", - "reth-prune-types", - "reth-stages", - "reth-static-file-types", - "reth-storage-api", - "tokio", - "tracing", -] - -[[package]] -name = "reth-node-metrics" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "bytes", - "eyre", - "http", - "http-body-util", - "jemalloc_pprof", - "jsonrpsee-server", - "mappings", - "metrics", - "metrics-exporter-prometheus", - "metrics-process", - "metrics-util", - "pprof_util", - "procfs 0.17.0", - "reqwest", - "reth-fs-util", - "reth-metrics", - "reth-tasks", - "tempfile", - "tikv-jemalloc-ctl", - "tokio", - "tower", - "tracing", -] - -[[package]] -name = "reth-node-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "reth-chainspec", - "reth-db-api", - "reth-engine-primitives", - "reth-payload-primitives", - "reth-primitives-traits", -] - -[[package]] -name = "reth-op" -version = "1.10.2" -dependencies = [ - "reth-chainspec", - "reth-cli-util", - "reth-codecs", - "reth-consensus", - "reth-consensus-common", - "reth-db", - "reth-engine-local", - "reth-eth-wire", - "reth-evm", - "reth-exex", - "reth-network", - "reth-network-api", - "reth-node-api", - "reth-node-builder", - "reth-node-core", - "reth-optimism-chainspec", - "reth-optimism-cli", - "reth-optimism-consensus", - "reth-optimism-evm", - "reth-optimism-node", - "reth-optimism-primitives", - "reth-optimism-rpc", - "reth-primitives-traits", - "reth-provider", - "reth-revm", - "reth-rpc", - "reth-rpc-api", - "reth-rpc-builder", - "reth-rpc-eth-types", - "reth-storage-api", - "reth-tasks", - "reth-transaction-pool", - "reth-trie", - "reth-trie-db", -] - -[[package]] -name = "reth-optimism-chainspec" -version = "1.10.2" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-genesis", - "alloy-hardforks", - "alloy-op-hardforks", - "alloy-primitives", - "derive_more", - "miniz_oxide", - "op-alloy-consensus", - "op-alloy-rpc-types", - "paste", - "reth-chainspec", - "reth-ethereum-forks", - "reth-network-peers", - "reth-optimism-forks", - "reth-optimism-primitives", - "reth-primitives-traits", - "serde", - "serde_json", - "tar-no-std", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-optimism-cli" -version = "1.10.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "clap", - "derive_more", - "eyre", - "futures-util", - "op-alloy-consensus", - "proptest", - "reth-chainspec", - "reth-cli", - "reth-cli-commands", - "reth-cli-runner", - "reth-consensus", - "reth-db", - "reth-db-api", - "reth-db-common", - "reth-downloaders", - "reth-execution-types", - "reth-fs-util", - "reth-node-builder", - "reth-node-core", - "reth-node-events", - "reth-node-metrics", - "reth-optimism-chainspec", - "reth-optimism-consensus", - "reth-optimism-evm", - "reth-optimism-node", - "reth-optimism-primitives", - "reth-primitives-traits", - "reth-provider", - "reth-prune", - "reth-rpc-server-types", - "reth-stages", - "reth-static-file", - "reth-static-file-types", - "reth-tracing", - "serde", - "tempfile", - "tokio", - "tokio-util", - "tracing", -] - -[[package]] -name = "reth-optimism-consensus" -version = "1.10.2" -dependencies = [ - "alloy-chains", - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-trie", - "op-alloy-consensus", - "reth-chainspec", - "reth-consensus", - "reth-consensus-common", - "reth-db-common", - "reth-execution-types", - "reth-optimism-chainspec", - "reth-optimism-forks", - "reth-optimism-node", - "reth-optimism-primitives", - "reth-primitives-traits", - "reth-provider", - "reth-revm", - "reth-storage-api", - "reth-storage-errors", - "reth-trie", - "reth-trie-common", - "revm", - "thiserror 2.0.18", - "tracing", -] - -[[package]] -name = "reth-optimism-evm" -version = "1.10.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-genesis", - "alloy-op-evm", - "alloy-primitives", - "op-alloy-consensus", - "op-alloy-rpc-types-engine", - "op-revm", - "reth-chainspec", - "reth-evm", - "reth-execution-errors", - "reth-execution-types", - "reth-optimism-chainspec", - "reth-optimism-consensus", - "reth-optimism-forks", - "reth-optimism-primitives", - "reth-primitives-traits", - "reth-revm", - "reth-rpc-eth-api", - "reth-storage-errors", - "revm", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-optimism-flashblocks" -version = "1.10.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "brotli", - "derive_more", - "eyre", - "futures-util", - "metrics", - "op-alloy-consensus", - "op-alloy-rpc-types-engine", - "reth-chain-state", - "reth-engine-primitives", - "reth-errors", - "reth-evm", - "reth-execution-types", - "reth-metrics", - "reth-optimism-payload-builder", - "reth-optimism-primitives", - "reth-payload-primitives", - "reth-primitives-traits", - "reth-revm", - "reth-rpc-eth-types", - "reth-storage-api", - "reth-tasks", - "ringbuffer", - "serde_json", - "test-case", - "tokio", - "tokio-tungstenite", - "tracing", - "url", -] - -[[package]] -name = "reth-optimism-forks" -version = "1.10.2" -dependencies = [ - "alloy-op-hardforks", - "alloy-primitives", - "once_cell", - "reth-ethereum-forks", -] - -[[package]] -name = "reth-optimism-node" -version = "1.10.2" -dependencies = [ - "alloy-consensus", - "alloy-genesis", - "alloy-network", - "alloy-op-hardforks", - "alloy-primitives", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "clap", - "eyre", - "futures", - "op-alloy-consensus", - "op-alloy-network", - "op-alloy-rpc-types-engine", - "op-revm", - "reth-chainspec", - "reth-consensus", - "reth-db", - "reth-e2e-test-utils", - "reth-engine-local", - "reth-evm", - "reth-network", - "reth-node-api", - "reth-node-builder", - "reth-node-core", - "reth-optimism-chainspec", - "reth-optimism-consensus", - "reth-optimism-evm", - "reth-optimism-forks", - "reth-optimism-node", - "reth-optimism-payload-builder", - "reth-optimism-primitives", - "reth-optimism-rpc", - "reth-optimism-storage", - "reth-optimism-txpool", - "reth-payload-builder", - "reth-payload-util", - "reth-primitives-traits", - "reth-provider", - "reth-revm", - "reth-rpc", - "reth-rpc-api", - "reth-rpc-engine-api", - "reth-rpc-eth-types", - "reth-rpc-server-types", - "reth-stages-types", - "reth-tasks", - "reth-tracing", - "reth-transaction-pool", - "reth-trie-common", - "reth-trie-db", - "revm", - "serde", - "serde_json", - "tokio", - "url", -] - -[[package]] -name = "reth-optimism-payload-builder" -version = "1.10.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-debug", - "alloy-rpc-types-engine", - "derive_more", - "either", - "op-alloy-consensus", - "op-alloy-rpc-types-engine", - "reth-basic-payload-builder", - "reth-chainspec", - "reth-evm", - "reth-execution-types", - "reth-optimism-evm", - "reth-optimism-forks", - "reth-optimism-primitives", - "reth-optimism-txpool", - "reth-payload-builder", - "reth-payload-builder-primitives", - "reth-payload-primitives", - "reth-payload-util", - "reth-payload-validator", - "reth-primitives-traits", - "reth-revm", - "reth-storage-api", - "reth-transaction-pool", - "revm", - "serde", - "sha2", - "thiserror 2.0.18", - "tracing", -] - -[[package]] -name = "reth-optimism-primitives" -version = "1.10.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "arbitrary", - "bincode", - "bytes", - "modular-bitfield", - "op-alloy-consensus", - "proptest", - "proptest-arbitrary-interop", - "rand 0.8.5", - "rand 0.9.2", - "reth-codecs", - "reth-primitives-traits", - "reth-zstd-compressors", - "rstest", - "secp256k1 0.30.0", - "serde", - "serde_json", - "serde_with", -] - -[[package]] -name = "reth-optimism-rpc" -version = "1.10.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-op-hardforks", - "alloy-primitives", - "alloy-rpc-client", - "alloy-rpc-types-debug", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-transport", - "alloy-transport-http", - "async-trait", - "derive_more", - "eyre", - "futures", - "jsonrpsee", - "jsonrpsee-core", - "jsonrpsee-types", - "metrics", - "op-alloy-consensus", - "op-alloy-network", - "op-alloy-rpc-jsonrpsee", - "op-alloy-rpc-types", - "op-alloy-rpc-types-engine", - "op-revm", - "reqwest", - "reth-chain-state", - "reth-chainspec", - "reth-evm", - "reth-metrics", - "reth-node-api", - "reth-node-builder", - "reth-optimism-chainspec", - "reth-optimism-evm", - "reth-optimism-flashblocks", - "reth-optimism-forks", - "reth-optimism-payload-builder", - "reth-optimism-primitives", - "reth-optimism-txpool", - "reth-primitives-traits", - "reth-rpc", - "reth-rpc-api", - "reth-rpc-engine-api", - "reth-rpc-eth-api", - "reth-rpc-eth-types", - "reth-rpc-server-types", - "reth-storage-api", - "reth-tasks", - "reth-transaction-pool", - "revm", - "serde_json", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tower", - "tracing", -] - -[[package]] -name = "reth-optimism-storage" -version = "1.10.2" -dependencies = [ - "alloy-consensus", - "reth-codecs", - "reth-optimism-primitives", - "reth-prune-types", - "reth-stages-types", - "reth-storage-api", -] - -[[package]] -name = "reth-optimism-txpool" -version = "1.10.2" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-json-rpc", - "alloy-primitives", - "alloy-rpc-client", - "alloy-rpc-types-eth", - "alloy-serde", - "c-kzg", - "derive_more", - "futures-util", - "metrics", - "op-alloy-consensus", - "op-alloy-flz", - "op-alloy-rpc-types", - "op-revm", - "parking_lot", - "reth-chain-state", - "reth-chainspec", - "reth-evm", - "reth-metrics", - "reth-optimism-chainspec", - "reth-optimism-evm", - "reth-optimism-forks", - "reth-optimism-primitives", - "reth-primitives-traits", - "reth-provider", - "reth-storage-api", - "reth-transaction-pool", - "serde", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "reth-payload-builder" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "alloy-rpc-types", - "futures-util", - "metrics", - "reth-chain-state", - "reth-ethereum-engine-primitives", - "reth-metrics", - "reth-payload-builder-primitives", - "reth-payload-primitives", - "reth-primitives-traits", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "reth-payload-builder-primitives" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "pin-project", - "reth-payload-primitives", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "reth-payload-primitives" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "auto_impl", - "either", - "op-alloy-rpc-types-engine", - "reth-chain-state", - "reth-chainspec", - "reth-errors", - "reth-execution-types", - "reth-primitives-traits", - "reth-trie-common", - "serde", - "thiserror 2.0.18", - "tokio", -] - -[[package]] -name = "reth-payload-util" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "reth-transaction-pool", -] - -[[package]] -name = "reth-payload-validator" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-rpc-types-engine", - "reth-primitives-traits", -] - -[[package]] -name = "reth-primitives" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "once_cell", - "reth-ethereum-forks", - "reth-ethereum-primitives", - "reth-primitives-traits", - "reth-static-file-types", -] - -[[package]] -name = "reth-primitives-traits" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-genesis", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-trie", - "arbitrary", - "auto_impl", - "byteorder", - "bytes", - "derive_more", - "modular-bitfield", - "once_cell", - "op-alloy-consensus", - "proptest", - "proptest-arbitrary-interop", - "rayon", - "reth-codecs", - "revm-bytecode", - "revm-primitives", - "revm-state", - "secp256k1 0.30.0", - "serde", - "serde_with", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-provider" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "dashmap", - "eyre", - "itertools 0.14.0", - "metrics", - "notify", - "parking_lot", - "rayon", - "reth-chain-state", - "reth-chainspec", - "reth-codecs", - "reth-db", - "reth-db-api", - "reth-errors", - "reth-ethereum-engine-primitives", - "reth-ethereum-primitives", - "reth-execution-types", - "reth-metrics", - "reth-nippy-jar", - "reth-node-types", - "reth-primitives-traits", - "reth-prune-types", - "reth-stages-types", - "reth-static-file-types", - "reth-storage-api", - "reth-storage-errors", - "reth-trie", - "reth-trie-db", - "revm-database", - "revm-state", - "rocksdb", - "strum 0.27.2", - "tokio", - "tracing", -] - -[[package]] -name = "reth-prune" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "itertools 0.14.0", - "metrics", - "rayon", - "reth-config", - "reth-db-api", - "reth-errors", - "reth-exex-types", - "reth-metrics", - "reth-primitives-traits", - "reth-provider", - "reth-prune-types", - "reth-stages-types", - "reth-static-file-types", - "reth-storage-api", - "reth-tokio-util", - "rustc-hash", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "reth-prune-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "arbitrary", - "derive_more", - "modular-bitfield", - "reth-codecs", - "serde", - "strum 0.27.2", - "thiserror 2.0.18", - "tracing", -] - -[[package]] -name = "reth-revm" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "reth-primitives-traits", - "reth-storage-api", - "reth-storage-errors", - "reth-trie", - "revm", -] - -[[package]] -name = "reth-rpc" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-dyn-abi", - "alloy-eip7928", - "alloy-eips", - "alloy-evm", - "alloy-genesis", - "alloy-network", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-client", - "alloy-rpc-types", - "alloy-rpc-types-admin", - "alloy-rpc-types-beacon", - "alloy-rpc-types-debug", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-rpc-types-mev", - "alloy-rpc-types-trace", - "alloy-rpc-types-txpool", - "alloy-serde", - "alloy-signer", - "alloy-signer-local", - "async-trait", - "derive_more", - "dyn-clone", - "futures", - "http", - "http-body", - "hyper", - "itertools 0.14.0", - "jsonrpsee", - "jsonrpsee-types", - "jsonwebtoken", - "parking_lot", - "pin-project", - "reth-chain-state", - "reth-chainspec", - "reth-consensus", - "reth-consensus-common", - "reth-engine-primitives", - "reth-errors", - "reth-ethereum-engine-primitives", - "reth-ethereum-primitives", - "reth-evm", - "reth-evm-ethereum", - "reth-execution-types", - "reth-metrics", - "reth-network-api", - "reth-network-peers", - "reth-network-types", - "reth-node-api", - "reth-primitives-traits", - "reth-revm", - "reth-rpc-api", - "reth-rpc-convert", - "reth-rpc-engine-api", - "reth-rpc-eth-api", - "reth-rpc-eth-types", - "reth-rpc-server-types", - "reth-storage-api", - "reth-tasks", - "reth-transaction-pool", - "reth-trie-common", - "revm", - "revm-inspectors", - "revm-primitives", - "serde", - "serde_json", - "sha2", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tower", - "tracing", - "tracing-futures", -] - -[[package]] -name = "reth-rpc-api" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eip7928", - "alloy-eips", - "alloy-genesis", - "alloy-json-rpc", - "alloy-primitives", - "alloy-rpc-types", - "alloy-rpc-types-admin", - "alloy-rpc-types-anvil", - "alloy-rpc-types-beacon", - "alloy-rpc-types-debug", - "alloy-rpc-types-engine", - "alloy-rpc-types-eth", - "alloy-rpc-types-mev", - "alloy-rpc-types-trace", - "alloy-rpc-types-txpool", - "alloy-serde", - "jsonrpsee", - "reth-chain-state", - "reth-engine-primitives", - "reth-network-peers", - "reth-rpc-eth-api", - "reth-trie-common", - "serde_json", -] - -[[package]] -name = "reth-rpc-builder" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-network", - "alloy-provider", - "dyn-clone", - "http", - "jsonrpsee", - "metrics", - "pin-project", - "reth-chain-state", - "reth-chainspec", - "reth-consensus", - "reth-engine-primitives", - "reth-evm", - "reth-ipc", - "reth-metrics", - "reth-network-api", - "reth-node-core", - "reth-primitives-traits", - "reth-rpc", - "reth-rpc-api", - "reth-rpc-eth-api", - "reth-rpc-eth-types", - "reth-rpc-layer", - "reth-rpc-server-types", - "reth-storage-api", - "reth-tasks", - "reth-tokio-util", - "reth-transaction-pool", - "serde", - "thiserror 2.0.18", - "tokio", - "tokio-util", - "tower", - "tower-http", - "tracing", -] - -[[package]] -name = "reth-rpc-convert" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-evm", - "alloy-json-rpc", - "alloy-network", - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-signer", - "auto_impl", - "dyn-clone", - "jsonrpsee-types", - "op-alloy-consensus", - "op-alloy-network", - "op-alloy-rpc-types", - "reth-ethereum-primitives", - "reth-evm", - "reth-primitives-traits", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-rpc-engine-api" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "async-trait", - "jsonrpsee-core", - "jsonrpsee-types", - "metrics", - "reth-chainspec", - "reth-engine-primitives", - "reth-metrics", - "reth-network-api", - "reth-payload-builder", - "reth-payload-builder-primitives", - "reth-payload-primitives", - "reth-primitives-traits", - "reth-rpc-api", - "reth-storage-api", - "reth-tasks", - "reth-transaction-pool", - "serde", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "reth-rpc-eth-api" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-dyn-abi", - "alloy-eips", - "alloy-evm", - "alloy-json-rpc", - "alloy-network", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-rpc-types-mev", - "alloy-serde", - "async-trait", - "auto_impl", - "dyn-clone", - "futures", - "jsonrpsee", - "jsonrpsee-types", - "parking_lot", - "reth-chain-state", - "reth-chainspec", - "reth-errors", - "reth-evm", - "reth-network-api", - "reth-node-api", - "reth-primitives-traits", - "reth-revm", - "reth-rpc-convert", - "reth-rpc-eth-types", - "reth-rpc-server-types", - "reth-storage-api", - "reth-tasks", - "reth-transaction-pool", - "reth-trie-common", - "revm", - "revm-inspectors", - "tokio", - "tracing", -] - -[[package]] -name = "reth-rpc-eth-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-evm", - "alloy-network", - "alloy-primitives", - "alloy-rpc-client", - "alloy-rpc-types-eth", - "alloy-sol-types", - "alloy-transport", - "derive_more", - "futures", - "itertools 0.14.0", - "jsonrpsee-core", - "jsonrpsee-types", - "metrics", - "rand 0.9.2", - "reqwest", - "reth-chain-state", - "reth-chainspec", - "reth-errors", - "reth-ethereum-primitives", - "reth-evm", - "reth-execution-types", - "reth-metrics", - "reth-primitives-traits", - "reth-revm", - "reth-rpc-convert", - "reth-rpc-server-types", - "reth-storage-api", - "reth-tasks", - "reth-transaction-pool", - "reth-trie", - "revm", - "revm-inspectors", - "schnellru", - "serde", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tracing", - "url", -] - -[[package]] -name = "reth-rpc-layer" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-rpc-types-engine", - "http", - "jsonrpsee-http-client", - "pin-project", - "tower", - "tower-http", - "tracing", -] - -[[package]] -name = "reth-rpc-server-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "jsonrpsee-core", - "jsonrpsee-types", - "reth-errors", - "reth-network-api", - "serde", - "strum 0.27.2", -] - -[[package]] -name = "reth-stages" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "bincode", - "eyre", - "futures-util", - "itertools 0.14.0", - "num-traits", - "rayon", - "reqwest", - "reth-chainspec", - "reth-codecs", - "reth-config", - "reth-consensus", - "reth-db", - "reth-db-api", - "reth-era", - "reth-era-downloader", - "reth-era-utils", - "reth-ethereum-primitives", - "reth-etl", - "reth-evm", - "reth-execution-types", - "reth-exex", - "reth-fs-util", - "reth-network-p2p", - "reth-primitives-traits", - "reth-provider", - "reth-prune", - "reth-prune-types", - "reth-revm", - "reth-stages-api", - "reth-static-file-types", - "reth-storage-api", - "reth-storage-errors", - "reth-testing-utils", - "reth-trie", - "reth-trie-db", - "tempfile", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "reth-stages-api" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "aquamarine", - "auto_impl", - "futures-util", - "metrics", - "reth-consensus", - "reth-errors", - "reth-metrics", - "reth-network-p2p", - "reth-primitives-traits", - "reth-provider", - "reth-prune", - "reth-stages-types", - "reth-static-file", - "reth-static-file-types", - "reth-tokio-util", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "reth-stages-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "arbitrary", - "bytes", - "modular-bitfield", - "reth-codecs", - "reth-trie-common", - "serde", -] - -[[package]] -name = "reth-static-file" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "parking_lot", - "rayon", - "reth-codecs", - "reth-db-api", - "reth-primitives-traits", - "reth-provider", - "reth-prune-types", - "reth-stages-types", - "reth-static-file-types", - "reth-storage-errors", - "reth-tokio-util", - "tracing", -] - -[[package]] -name = "reth-static-file-types" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "clap", - "derive_more", - "fixed-map", - "serde", - "strum 0.27.2", -] - -[[package]] -name = "reth-storage-api" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rpc-types-engine", - "auto_impl", - "reth-chainspec", - "reth-db-api", - "reth-db-models", - "reth-ethereum-primitives", - "reth-execution-types", - "reth-primitives-traits", - "reth-prune-types", - "reth-stages-types", - "reth-storage-errors", - "reth-trie-common", - "revm-database", - "serde_json", -] - -[[package]] -name = "reth-storage-errors" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "derive_more", - "reth-primitives-traits", - "reth-prune-types", - "reth-static-file-types", - "revm-database-interface", - "revm-state", - "thiserror 2.0.18", -] - -[[package]] -name = "reth-tasks" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "auto_impl", - "dyn-clone", - "futures-util", - "metrics", - "pin-project", - "rayon", - "reth-metrics", - "thiserror 2.0.18", - "tokio", - "tracing", - "tracing-futures", -] - -[[package]] -name = "reth-testing-utils" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-genesis", - "alloy-primitives", - "rand 0.8.5", - "rand 0.9.2", - "reth-ethereum-primitives", - "reth-primitives-traits", - "secp256k1 0.30.0", -] - -[[package]] -name = "reth-tokio-util" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "reth-tracing" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "clap", - "eyre", - "reth-tracing-otlp", - "rolling-file", - "tracing", - "tracing-appender", - "tracing-journald", - "tracing-logfmt", - "tracing-samply", - "tracing-subscriber 0.3.22", - "tracing-tracy", - "tracy-client", -] - -[[package]] -name = "reth-tracing-otlp" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "clap", - "eyre", - "opentelemetry", - "opentelemetry-appender-tracing", - "opentelemetry-otlp", - "opentelemetry-semantic-conventions", - "opentelemetry_sdk", - "tracing", - "tracing-opentelemetry", - "tracing-subscriber 0.3.22", - "url", -] - -[[package]] -name = "reth-transaction-pool" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "aquamarine", - "auto_impl", - "bitflags 2.10.0", - "futures-util", - "metrics", - "parking_lot", - "paste", - "pin-project", - "proptest", - "proptest-arbitrary-interop", - "rand 0.9.2", - "reth-chain-state", - "reth-chainspec", - "reth-eth-wire-types", - "reth-ethereum-primitives", - "reth-evm", - "reth-evm-ethereum", - "reth-execution-types", - "reth-fs-util", - "reth-metrics", - "reth-primitives-traits", - "reth-storage-api", - "reth-tasks", - "revm", - "revm-interpreter", - "revm-primitives", - "rustc-hash", - "schnellru", - "serde", - "serde_json", - "smallvec", - "thiserror 2.0.18", - "tokio", - "tokio-stream", - "tracing", -] - -[[package]] -name = "reth-trie" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-eips", - "alloy-primitives", - "alloy-rlp", - "alloy-trie", - "auto_impl", - "itertools 0.14.0", - "metrics", - "parking_lot", - "reth-execution-errors", - "reth-metrics", - "reth-primitives-traits", - "reth-stages-types", - "reth-storage-errors", - "reth-trie-common", - "reth-trie-sparse", - "revm-database", - "tracing", - "triehash", -] - -[[package]] -name = "reth-trie-common" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-consensus", - "alloy-primitives", - "alloy-rlp", - "alloy-rpc-types-eth", - "alloy-serde", - "alloy-trie", - "arbitrary", - "arrayvec", - "bytes", - "derive_more", - "hash-db", - "itertools 0.14.0", - "nybbles", - "plain_hasher", - "rayon", - "reth-codecs", - "reth-primitives-traits", - "revm-database", - "serde", - "serde_with", -] - -[[package]] -name = "reth-trie-db" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "metrics", - "parking_lot", - "reth-db-api", - "reth-execution-errors", - "reth-metrics", - "reth-primitives-traits", - "reth-stages-types", - "reth-storage-api", - "reth-storage-errors", - "reth-trie", - "reth-trie-common", - "tracing", -] - -[[package]] -name = "reth-trie-parallel" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "crossbeam-channel", - "dashmap", - "derive_more", - "itertools 0.14.0", - "metrics", - "rayon", - "reth-execution-errors", - "reth-metrics", - "reth-primitives-traits", - "reth-provider", - "reth-storage-errors", - "reth-trie", - "reth-trie-common", - "reth-trie-sparse", - "thiserror 2.0.18", - "tokio", - "tracing", -] - -[[package]] -name = "reth-trie-sparse" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "alloy-trie", - "auto_impl", - "metrics", - "rayon", - "reth-execution-errors", - "reth-metrics", - "reth-primitives-traits", - "reth-trie-common", - "smallvec", - "tracing", -] - -[[package]] -name = "reth-trie-sparse-parallel" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "alloy-primitives", - "alloy-rlp", - "alloy-trie", - "metrics", - "rayon", - "reth-execution-errors", - "reth-metrics", - "reth-trie-common", - "reth-trie-sparse", - "smallvec", - "tracing", -] - -[[package]] -name = "reth-zstd-compressors" -version = "1.10.2" -source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce9d09b925ebbce7bdf213d04c402e124c#b3d532ce9d09b925ebbce7bdf213d04c402e124c" -dependencies = [ - "zstd", -] - -[[package]] -name = "revm" -version = "34.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2aabdebaa535b3575231a88d72b642897ae8106cf6b0d12eafc6bfdf50abfc7" -dependencies = [ - "revm-bytecode", - "revm-context", - "revm-context-interface", - "revm-database", - "revm-database-interface", - "revm-handler", - "revm-inspector", - "revm-interpreter", - "revm-precompile", - "revm-primitives", - "revm-state", -] - -[[package]] -name = "revm-bytecode" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d1e5c1eaa44d39d537f668bc5c3409dc01e5c8be954da6c83370bbdf006457" -dependencies = [ - "bitvec", - "phf", - "revm-primitives", - "serde", -] - -[[package]] -name = "revm-context" -version = "13.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "892ff3e6a566cf8d72ffb627fdced3becebbd9ba64089c25975b9b028af326a5" -dependencies = [ - "bitvec", - "cfg-if", - "derive-where", - "revm-bytecode", - "revm-context-interface", - "revm-database-interface", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-context-interface" -version = "14.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57f61cc6d23678c4840af895b19f8acfbbd546142ec8028b6526c53cc1c16c98" -dependencies = [ - "alloy-eip2930", - "alloy-eip7702", - "auto_impl", - "either", - "revm-database-interface", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-database" -version = "10.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "529528d0b05fe646be86223032c3e77aa8b05caa2a35447d538c55965956a511" -dependencies = [ - "alloy-eips", - "revm-bytecode", - "revm-database-interface", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-database-interface" -version = "9.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7bf93ac5b91347c057610c0d96e923db8c62807e03f036762d03e981feddc1d" -dependencies = [ - "auto_impl", - "either", - "revm-primitives", - "revm-state", - "serde", - "thiserror 2.0.18", -] - -[[package]] -name = "revm-handler" -version = "15.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cd0e43e815a85eded249df886c4badec869195e70cdd808a13cfca2794622d2" -dependencies = [ - "auto_impl", - "derive-where", - "revm-bytecode", - "revm-context", - "revm-context-interface", - "revm-database-interface", - "revm-interpreter", - "revm-precompile", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-inspector" -version = "15.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f3ccad59db91ef93696536a0dbaf2f6f17cfe20d4d8843ae118edb7e97947ef" -dependencies = [ - "auto_impl", - "either", - "revm-context", - "revm-database-interface", - "revm-handler", - "revm-interpreter", - "revm-primitives", - "revm-state", - "serde", - "serde_json", -] - -[[package]] -name = "revm-inspectors" -version = "0.34.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e435414e9de50a1b930da602067c76365fea2fea11e80ceb50783c94ddd127f" -dependencies = [ - "alloy-primitives", - "alloy-rpc-types-eth", - "alloy-rpc-types-trace", - "alloy-sol-types", - "anstyle", - "boa_engine", - "boa_gc", - "colorchoice", - "revm", - "serde", - "serde_json", - "thiserror 2.0.18", -] - -[[package]] -name = "revm-interpreter" -version = "32.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11406408597bc249392d39295831c4b641b3a6f5c471a7c41104a7a1e3564c07" -dependencies = [ - "revm-bytecode", - "revm-context-interface", - "revm-primitives", - "revm-state", - "serde", -] - -[[package]] -name = "revm-precompile" -version = "32.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50c1285c848d240678bf69cb0f6179ff5a4aee6fc8e921d89708087197a0aff3" -dependencies = [ - "ark-bls12-381", - "ark-bn254", - "ark-ec", - "ark-ff 0.5.0", - "ark-serialize 0.5.0", - "arrayref", - "aurora-engine-modexp", - "blst", - "c-kzg", - "cfg-if", - "k256", - "p256", - "revm-primitives", - "ripemd", - "secp256k1 0.31.1", - "sha2", -] - -[[package]] -name = "revm-primitives" -version = "22.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba580c56a8ec824a64f8a1683577876c2e1dbe5247044199e9b881421ad5dcf9" -dependencies = [ - "alloy-primitives", - "num_enum", - "once_cell", - "serde", -] - -[[package]] -name = "revm-state" -version = "9.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "311720d4f0f239b041375e7ddafdbd20032a33b7bae718562ea188e188ed9fd3" -dependencies = [ - "alloy-eip7928", - "bitflags 2.10.0", - "revm-bytecode", - "revm-primitives", - "serde", -] - -[[package]] -name = "rfc6979" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" -dependencies = [ - "hmac", - "subtle", -] - -[[package]] -name = "ring" -version = "0.17.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" -dependencies = [ - "cc", - "cfg-if", - "getrandom 0.2.17", - "libc", - "untrusted", - "windows-sys 0.52.0", -] - -[[package]] -name = "ringbuffer" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3df6368f71f205ff9c33c076d170dd56ebf68e8161c733c0caa07a7a5509ed53" - -[[package]] -name = "ripemd" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" -dependencies = [ - "digest 0.10.7", -] - -[[package]] -name = "rlimit" -version = "0.10.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7043b63bd0cd1aaa628e476b80e6d4023a3b50eb32789f2728908107bd0c793a" -dependencies = [ - "libc", -] - -[[package]] -name = "rlp" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" -dependencies = [ - "bytes", - "rustc-hex", -] - -[[package]] -name = "rmp" -version = "0.8.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ba8be72d372b2c9b35542551678538b562e7cf86c3315773cae48dfbfe7790c" -dependencies = [ - "num-traits", -] - -[[package]] -name = "rmp-serde" -version = "1.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72f81bee8c8ef9b577d1681a70ebbc962c232461e397b22c208c43c04b67a155" -dependencies = [ - "rmp", - "serde", -] - -[[package]] -name = "roaring" -version = "0.10.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19e8d2cfa184d94d0726d650a9f4a1be7f9b76ac9fdb954219878dc00c1c1e7b" -dependencies = [ - "bytemuck", - "byteorder", -] - -[[package]] -name = "rocksdb" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddb7af00d2b17dbd07d82c0063e25411959748ff03e8d4f96134c2ff41fce34f" -dependencies = [ - "libc", - "librocksdb-sys", -] - -[[package]] -name = "rolling-file" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8395b4f860856b740f20a296ea2cd4d823e81a2658cf05ef61be22916026a906" -dependencies = [ - "chrono", -] - -[[package]] -name = "route-recognizer" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" - -[[package]] -name = "rstest" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03e905296805ab93e13c1ec3a03f4b6c4f35e9498a3d5fa96dc626d22c03cd89" -dependencies = [ - "futures-timer", - "futures-util", - "rstest_macros", - "rustc_version 0.4.1", -] - -[[package]] -name = "rstest_macros" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef0053bbffce09062bee4bcc499b0fbe7a57b879f1efe088d6d8d4c7adcdef9b" -dependencies = [ - "cfg-if", - "glob", - "proc-macro-crate", - "proc-macro2", - "quote", - "regex", - "relative-path", - "rustc_version 0.4.1", - "syn 2.0.114", - "unicode-ident", -] - -[[package]] -name = "ruint" -version = "1.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" -dependencies = [ - "alloy-rlp", - "arbitrary", - "ark-ff 0.3.0", - "ark-ff 0.4.2", - "ark-ff 0.5.0", - "bytes", - "fastrlp 0.3.1", - "fastrlp 0.4.0", - "num-bigint", - "num-integer", - "num-traits", - "parity-scale-codec", - "primitive-types", - "proptest", - "rand 0.8.5", - "rand 0.9.2", - "rlp", - "ruint-macro", - "serde_core", - "valuable", - "zeroize", -] - -[[package]] -name = "ruint-macro" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" - -[[package]] -name = "rustc-demangle" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b50b8869d9fc858ce7266cce0194bd74df58b9d0e3f6df3a9fc8eb470d95c09d" - -[[package]] -name = "rustc-hash" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" -dependencies = [ - "rand 0.8.5", -] - -[[package]] -name = "rustc-hex" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" - -[[package]] -name = "rustc_version" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" -dependencies = [ - "semver 0.9.0", -] - -[[package]] -name = "rustc_version" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" -dependencies = [ - "semver 0.11.0", -] - -[[package]] -name = "rustc_version" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" -dependencies = [ - "semver 1.0.27", -] - -[[package]] -name = "rustix" -version = "0.38.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" -dependencies = [ - "bitflags 2.10.0", - "errno", - "libc", - "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustix" -version = "1.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" -dependencies = [ - "bitflags 2.10.0", - "errno", - "libc", - "linux-raw-sys 0.11.0", - "windows-sys 0.61.2", -] - -[[package]] -name = "rustls" -version = "0.23.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" -dependencies = [ - "log", - "once_cell", - "ring", - "rustls-pki-types", - "rustls-webpki", - "subtle", - "zeroize", -] - -[[package]] -name = "rustls-native-certs" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" -dependencies = [ - "openssl-probe", - "rustls-pki-types", - "schannel", - "security-framework", -] - -[[package]] -name = "rustls-pki-types" -version = "1.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" -dependencies = [ - "web-time", - "zeroize", -] - -[[package]] -name = "rustls-platform-verifier" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19787cda76408ec5404443dc8b31795c87cd8fec49762dc75fa727740d34acc1" -dependencies = [ - "core-foundation", - "core-foundation-sys", - "jni", - "log", - "once_cell", - "rustls", - "rustls-native-certs", - "rustls-platform-verifier-android", - "rustls-webpki", - "security-framework", - "security-framework-sys", - "webpki-root-certs 0.26.11", - "windows-sys 0.59.0", -] - -[[package]] -name = "rustls-platform-verifier-android" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" - -[[package]] -name = "rustls-webpki" -version = "0.103.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" -dependencies = [ - "ring", - "rustls-pki-types", - "untrusted", -] - -[[package]] -name = "rustversion" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" - -[[package]] -name = "rusty-fork" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" -dependencies = [ - "fnv", - "quick-error", - "tempfile", - "wait-timeout", -] - -[[package]] -name = "ryu" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" - -[[package]] -name = "ryu-js" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd29631678d6fb0903b69223673e122c32e9ae559d0960a38d574695ebc0ea15" - -[[package]] -name = "same-file" -version = "1.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" -dependencies = [ - "winapi-util", -] - -[[package]] -name = "schannel" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "schemars" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "schemars" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "schnellru" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "356285bbf17bea63d9e52e96bd18f039672ac92b55b8cb997d6162a2a37d1649" -dependencies = [ - "ahash", - "cfg-if", - "hashbrown 0.13.2", -] - -[[package]] -name = "scoped-tls" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" - -[[package]] -name = "scopeguard" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" - -[[package]] -name = "sec1" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" -dependencies = [ - "base16ct", - "der", - "generic-array", - "pkcs8", - "serdect", - "subtle", - "zeroize", -] - -[[package]] -name = "secp256k1" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" -dependencies = [ - "bitcoin_hashes", - "rand 0.8.5", - "secp256k1-sys 0.10.1", - "serde", -] - -[[package]] -name = "secp256k1" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c3c81b43dc2d8877c216a3fccf76677ee1ebccd429566d3e67447290d0c42b2" -dependencies = [ - "bitcoin_hashes", - "rand 0.9.2", - "secp256k1-sys 0.11.0", -] - -[[package]] -name = "secp256k1-sys" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9" -dependencies = [ - "cc", -] - -[[package]] -name = "secp256k1-sys" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcb913707158fadaf0d8702c2db0e857de66eb003ccfdda5924b5f5ac98efb38" -dependencies = [ - "cc", -] - -[[package]] -name = "security-framework" -version = "3.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" -dependencies = [ - "bitflags 2.10.0", - "core-foundation", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - -[[package]] -name = "security-framework-sys" -version = "2.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" -dependencies = [ - "core-foundation-sys", - "libc", -] - -[[package]] -name = "semver" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" -dependencies = [ - "semver-parser 0.7.0", -] - -[[package]] -name = "semver" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" -dependencies = [ - "semver-parser 0.10.3", -] - -[[package]] -name = "semver" -version = "1.0.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" -dependencies = [ - "serde", - "serde_core", -] - -[[package]] -name = "semver-parser" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" - -[[package]] -name = "semver-parser" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" -dependencies = [ - "pest", -] - -[[package]] -name = "send_wrapper" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" - -[[package]] -name = "send_wrapper" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" - -[[package]] -name = "serde" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" -dependencies = [ - "serde_core", - "serde_derive", -] - -[[package]] -name = "serde_core" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" -dependencies = [ - "serde_derive", -] - -[[package]] -name = "serde_derive" -version = "1.0.228" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "serde_json" -version = "1.0.149" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" -dependencies = [ - "indexmap 2.13.0", - "itoa", - "memchr", - "serde", - "serde_core", - "zmij", -] - -[[package]] -name = "serde_spanned" -version = "0.6.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" -dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "serde_with" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7" -dependencies = [ - "base64 0.22.1", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.13.0", - "schemars 0.9.0", - "schemars 1.2.0", - "serde_core", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "serdect" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a84f14a19e9a014bb9f4512488d9829a68e04ecabffb0f9904cd1ace94598177" -dependencies = [ - "base16ct", - "serde", -] - -[[package]] -name = "sha1" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha2" -version = "0.10.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", -] - -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest 0.10.7", - "keccak", -] - -[[package]] -name = "sha3-asm" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b31139435f327c93c6038ed350ae4588e2c70a13d50599509fee6349967ba35a" -dependencies = [ - "cc", - "cfg-if", -] - -[[package]] -name = "sharded-slab" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" -dependencies = [ - "lazy_static", -] - -[[package]] -name = "shellexpand" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b1fdf65dd6331831494dd616b30351c38e96e45921a27745cf98490458b90bb" -dependencies = [ - "dirs", -] - -[[package]] -name = "shlex" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" - -[[package]] -name = "signal-hook" -version = "0.3.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2" -dependencies = [ - "libc", - "signal-hook-registry", -] - -[[package]] -name = "signal-hook-mio" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b75a19a7a740b25bc7944bdee6172368f988763b744e3d4dfe753f6b4ece40cc" -dependencies = [ - "libc", - "mio", - "signal-hook", -] - -[[package]] -name = "signal-hook-registry" -version = "1.4.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" -dependencies = [ - "errno", - "libc", -] - -[[package]] -name = "signature" -version = "2.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" -dependencies = [ - "digest 0.10.7", - "rand_core 0.6.4", -] - -[[package]] -name = "simd-adler32" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" - -[[package]] -name = "simple_asn1" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" -dependencies = [ - "num-bigint", - "num-traits", - "thiserror 2.0.18", - "time", -] - -[[package]] -name = "siphasher" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" - -[[package]] -name = "sketches-ddsketch" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e9a774a6c28142ac54bb25d25562e6bcf957493a184f15ad4eebccb23e410a" - -[[package]] -name = "slab" -version = "0.4.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" - -[[package]] -name = "small_btree" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ba60d2df92ba73864714808ca68c059734853e6ab722b40e1cf543ebb3a057a" -dependencies = [ - "arrayvec", -] - -[[package]] -name = "smallvec" -version = "1.15.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" -dependencies = [ - "arbitrary", - "serde", -] - -[[package]] -name = "snap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" - -[[package]] -name = "socket2" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] - -[[package]] -name = "socket2" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" -dependencies = [ - "libc", - "windows-sys 0.60.2", -] - -[[package]] -name = "soketto" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" -dependencies = [ - "base64 0.22.1", - "bytes", - "futures", - "http", - "httparse", - "log", - "rand 0.8.5", - "sha1", -] - -[[package]] -name = "spki" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" -dependencies = [ - "base64ct", - "der", -] - -[[package]] -name = "stable_deref_trait" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" - -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - -[[package]] -name = "strsim" -version = "0.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" - -[[package]] -name = "strum" -version = "0.26.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" -dependencies = [ - "strum_macros 0.26.4", -] - -[[package]] -name = "strum" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" -dependencies = [ - "strum_macros 0.27.2", -] - -[[package]] -name = "strum_macros" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.114", -] - -[[package]] -name = "strum_macros" -version = "0.27.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" -dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "subtle" -version = "2.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" - -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn" -version = "2.0.114" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "syn-solidity" -version = "1.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2379beea9476b89d0237078be761cf8e012d92d5ae4ae0c9a329f974838870fc" -dependencies = [ - "paste", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" -dependencies = [ - "futures-core", -] - -[[package]] -name = "synstructure" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "sysinfo" -version = "0.33.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fc858248ea01b66f19d8e8a6d55f41deaf91e9d495246fd01368d99935c6c01" -dependencies = [ - "core-foundation-sys", - "libc", - "memchr", - "ntapi", - "windows 0.57.0", -] - -[[package]] -name = "tag_ptr" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0e973b34477b7823833469eb0f5a3a60370fef7a453e02d751b59180d0a5a05" - -[[package]] -name = "tagptr" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" - -[[package]] -name = "tap" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" - -[[package]] -name = "tar" -version = "0.4.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" -dependencies = [ - "filetime", - "libc", - "xattr", -] - -[[package]] -name = "tar-no-std" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac9ee8b664c9f1740cd813fea422116f8ba29997bb7c878d1940424889802897" -dependencies = [ - "bitflags 2.10.0", - "log", - "num-traits", -] - -[[package]] -name = "tempfile" -version = "3.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" -dependencies = [ - "fastrand", - "getrandom 0.3.4", - "once_cell", - "rustix 1.1.3", - "windows-sys 0.61.2", -] - -[[package]] -name = "test-case" -version = "3.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb2550dd13afcd286853192af8601920d959b14c401fcece38071d53bf0768a8" -dependencies = [ - "test-case-macros", -] - -[[package]] -name = "test-case-core" -version = "3.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adcb7fd841cd518e279be3d5a3eb0636409487998a4aff22f3de87b81e88384f" -dependencies = [ - "cfg-if", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "test-case-macros" -version = "3.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", - "test-case-core", -] - -[[package]] -name = "thin-vec" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d" - -[[package]] -name = "thiserror" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" -dependencies = [ - "thiserror-impl 1.0.69", -] - -[[package]] -name = "thiserror" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" -dependencies = [ - "thiserror-impl 2.0.18", -] - -[[package]] -name = "thiserror-impl" -version = "1.0.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "thread_local" -version = "1.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "threadpool" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" -dependencies = [ - "num_cpus", -] - -[[package]] -name = "tikv-jemalloc-ctl" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "661f1f6a57b3a36dc9174a2c10f19513b4866816e13425d3e418b11cc37bc24c" -dependencies = [ - "libc", - "paste", - "tikv-jemalloc-sys", -] - -[[package]] -name = "tikv-jemalloc-sys" -version = "0.6.1+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd8aa5b2ab86a2cefa406d889139c162cbb230092f7d1d7cbc1716405d852a3b" -dependencies = [ - "cc", - "libc", -] - -[[package]] -name = "tikv-jemallocator" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0359b4327f954e0567e69fb191cf1436617748813819c94b8cd4a431422d053a" -dependencies = [ - "libc", - "tikv-jemalloc-sys", -] - -[[package]] -name = "time" -version = "0.3.46" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9da98b7d9b7dad93488a84b8248efc35352b0b2657397d4167e7ad67e5d535e5" -dependencies = [ - "deranged", - "itoa", - "js-sys", - "libc", - "num-conv", - "num_threads", - "powerfmt", - "serde_core", - "time-core", - "time-macros", -] - -[[package]] -name = "time-core" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" - -[[package]] -name = "time-macros" -version = "0.2.26" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78cc610bac2dcee56805c99642447d4c5dbde4d01f752ffea0199aee1f601dc4" -dependencies = [ - "num-conv", - "time-core", -] - -[[package]] -name = "tinystr" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" -dependencies = [ - "displaydoc", - "serde_core", - "zerovec", -] - -[[package]] -name = "tinyvec" -version = "1.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" -dependencies = [ - "tinyvec_macros", -] - -[[package]] -name = "tinyvec_macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" - -[[package]] -name = "tokio" -version = "1.49.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" -dependencies = [ - "bytes", - "libc", - "mio", - "parking_lot", - "pin-project-lite", - "signal-hook-registry", - "socket2 0.6.2", - "tokio-macros", - "windows-sys 0.61.2", -] - -[[package]] -name = "tokio-macros" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "tokio-rustls" -version = "0.26.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" -dependencies = [ - "rustls", - "tokio", -] - -[[package]] -name = "tokio-stream" -version = "0.1.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" -dependencies = [ - "futures-core", - "pin-project-lite", - "tokio", - "tokio-util", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.26.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" -dependencies = [ - "futures-util", - "log", - "rustls", - "rustls-native-certs", - "rustls-pki-types", - "tokio", - "tokio-rustls", - "tungstenite", - "webpki-roots 0.26.11", -] - -[[package]] -name = "tokio-util" -version = "0.7.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" -dependencies = [ - "bytes", - "futures-core", - "futures-io", - "futures-sink", - "pin-project-lite", - "slab", - "tokio", -] - -[[package]] -name = "toml" -version = "0.8.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime 0.6.11", - "toml_edit 0.22.27", -] - -[[package]] -name = "toml_datetime" -version = "0.6.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" -dependencies = [ - "serde", -] - -[[package]] -name = "toml_datetime" -version = "0.7.5+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" -dependencies = [ - "serde_core", -] - -[[package]] -name = "toml_edit" -version = "0.22.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" -dependencies = [ - "indexmap 2.13.0", - "serde", - "serde_spanned", - "toml_datetime 0.6.11", - "toml_write", - "winnow", -] - -[[package]] -name = "toml_edit" -version = "0.23.10+spec-1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" -dependencies = [ - "indexmap 2.13.0", - "toml_datetime 0.7.5+spec-1.1.0", - "toml_parser", - "winnow", -] - -[[package]] -name = "toml_parser" -version = "1.0.6+spec-1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" -dependencies = [ - "winnow", -] - -[[package]] -name = "toml_write" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" - -[[package]] -name = "tonic" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a286e33f82f8a1ee2df63f4fa35c0becf4a85a0cb03091a15fd7bf0b402dc94a" -dependencies = [ - "async-trait", - "base64 0.22.1", - "bytes", - "http", - "http-body", - "http-body-util", - "hyper", - "hyper-timeout", - "hyper-util", - "percent-encoding", - "pin-project", - "sync_wrapper", - "tokio", - "tokio-stream", - "tower", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tonic-prost" -version = "0.14.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6c55a2d6a14174563de34409c9f92ff981d006f56da9c6ecd40d9d4a31500b0" -dependencies = [ - "bytes", - "prost 0.14.3", - "tonic", -] - -[[package]] -name = "tower" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" -dependencies = [ - "futures-core", - "futures-util", - "hdrhistogram", - "indexmap 2.13.0", - "pin-project-lite", - "slab", - "sync_wrapper", - "tokio", - "tokio-util", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "tower-http" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" -dependencies = [ - "async-compression", - "base64 0.22.1", - "bitflags 2.10.0", - "bytes", - "futures-core", - "futures-util", - "http", - "http-body", - "http-body-util", - "http-range-header", - "httpdate", - "iri-string", - "mime", - "mime_guess", - "percent-encoding", - "pin-project-lite", - "tokio", - "tokio-util", - "tower", - "tower-layer", - "tower-service", - "tracing", - "uuid", -] - -[[package]] -name = "tower-layer" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" - -[[package]] -name = "tower-service" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" - -[[package]] -name = "tracing" -version = "0.1.44" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" -dependencies = [ - "log", - "pin-project-lite", - "tracing-attributes", - "tracing-core", -] - -[[package]] -name = "tracing-appender" -version = "0.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" -dependencies = [ - "crossbeam-channel", - "thiserror 2.0.18", - "time", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "tracing-attributes" -version = "0.1.31" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "tracing-core" -version = "0.1.36" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" -dependencies = [ - "once_cell", - "valuable", -] - -[[package]] -name = "tracing-futures" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" -dependencies = [ - "pin-project", - "tracing", -] - -[[package]] -name = "tracing-journald" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3a81ed245bfb62592b1e2bc153e77656d94ee6a0497683a65a12ccaf2438d0" -dependencies = [ - "libc", - "tracing-core", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-logfmt" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b1f47d22deb79c3f59fcf2a1f00f60cbdc05462bf17d1cd356c1fefa3f444bd" -dependencies = [ - "time", - "tracing", - "tracing-core", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "tracing-opentelemetry" -version = "0.32.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ac28f2d093c6c477eaa76b23525478f38de514fa9aeb1285738d4b97a9552fc" -dependencies = [ - "js-sys", - "opentelemetry", - "smallvec", - "tracing", - "tracing-core", - "tracing-log", - "tracing-subscriber 0.3.22", - "web-time", -] - -[[package]] -name = "tracing-samply" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c175f7ecc002b6ef04776a39f440503e4e788790ddbdbfac8259b7a069526334" -dependencies = [ - "cfg-if", - "itoa", - "libc", - "mach2", - "memmap2", - "smallvec", - "tracing-core", - "tracing-subscriber 0.3.22", -] - -[[package]] -name = "tracing-serde" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" -dependencies = [ - "serde", - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.2.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" -dependencies = [ - "tracing-core", -] - -[[package]] -name = "tracing-subscriber" -version = "0.3.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" -dependencies = [ - "matchers", - "nu-ansi-term", - "once_cell", - "regex-automata", - "serde", - "serde_json", - "sharded-slab", - "smallvec", - "thread_local", - "tracing", - "tracing-core", - "tracing-log", - "tracing-serde", -] - -[[package]] -name = "tracing-tracy" -version = "0.11.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eaa1852afa96e0fe9e44caa53dc0bd2d9d05e0f2611ce09f97f8677af56e4ba" -dependencies = [ - "tracing-core", - "tracing-subscriber 0.3.22", - "tracy-client", -] - -[[package]] -name = "tracy-client" -version = "0.18.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4f6fc3baeac5d86ab90c772e9e30620fc653bf1864295029921a15ef478e6a5" -dependencies = [ - "loom", - "once_cell", - "rustc-demangle", - "tracy-client-sys", -] - -[[package]] -name = "tracy-client-sys" -version = "0.28.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f7c95348f20c1c913d72157b3c6dee6ea3e30b3d19502c5a7f6d3f160dacbf" -dependencies = [ - "cc", - "windows-targets 0.52.6", -] - -[[package]] -name = "tree_hash" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee44f4cef85f88b4dea21c0b1f58320bdf35715cf56d840969487cff00613321" -dependencies = [ - "alloy-primitives", - "ethereum_hashing", - "ethereum_ssz", - "smallvec", - "typenum", -] - -[[package]] -name = "tree_hash_derive" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bee2ea1551f90040ab0e34b6fb7f2fa3bad8acc925837ac654f2c78a13e3089" -dependencies = [ - "darling 0.20.11", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "triehash" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1631b201eb031b563d2e85ca18ec8092508e262a3196ce9bd10a67ec87b9f5c" -dependencies = [ - "hash-db", - "rlp", -] - -[[package]] -name = "try-lock" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" - -[[package]] -name = "tungstenite" -version = "0.26.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" -dependencies = [ - "bytes", - "data-encoding", - "http", - "httparse", - "log", - "rand 0.9.2", - "rustls", - "rustls-pki-types", - "sha1", - "thiserror 2.0.18", - "utf-8", -] - -[[package]] -name = "typeid" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" - -[[package]] -name = "typenum" -version = "1.19.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" - -[[package]] -name = "ucd-trie" -version = "0.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" - -[[package]] -name = "uint" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "uint" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "909988d098b2f738727b161a106cfc7cab00c539c2687a8836f8e565976fb53e" -dependencies = [ - "byteorder", - "crunchy", - "hex", - "static_assertions", -] - -[[package]] -name = "unarray" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" - -[[package]] -name = "unicase" -version = "2.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142" - -[[package]] -name = "unicode-ident" -version = "1.0.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" - -[[package]] -name = "unicode-segmentation" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" - -[[package]] -name = "unicode-truncate" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf" -dependencies = [ - "itertools 0.13.0", - "unicode-segmentation", - "unicode-width 0.1.14", -] - -[[package]] -name = "unicode-width" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" - -[[package]] -name = "unicode-width" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" - -[[package]] -name = "unicode-xid" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" - -[[package]] -name = "universal-hash" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" -dependencies = [ - "crypto-common", - "subtle", -] - -[[package]] -name = "unsigned-varint" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" - -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - -[[package]] -name = "url" -version = "2.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" -dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", - "serde_derive", -] - -[[package]] -name = "utf-8" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" - -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - -[[package]] -name = "utf8_iter" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" - -[[package]] -name = "utf8parse" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" - -[[package]] -name = "uuid" -version = "1.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee48d38b119b0cd71fe4141b30f5ba9c7c5d9f4e7a3a8b4a674e4b6ef789976f" -dependencies = [ - "getrandom 0.3.4", - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "valuable" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "vergen" -version = "9.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b849a1f6d8639e8de261e81ee0fc881e3e3620db1af9f2e0da015d4382ceaf75" -dependencies = [ - "anyhow", - "cargo_metadata", - "derive_builder", - "regex", - "rustversion", - "time", - "vergen-lib", -] - -[[package]] -name = "vergen-git2" -version = "9.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d51ab55ddf1188c8d679f349775362b0fa9e90bd7a4ac69838b2a087623f0d57" -dependencies = [ - "anyhow", - "derive_builder", - "git2", - "rustversion", - "time", - "vergen", - "vergen-lib", -] - -[[package]] -name = "vergen-lib" -version = "9.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b34a29ba7e9c59e62f229ae1932fb1b8fb8a6fdcc99215a641913f5f5a59a569" -dependencies = [ - "anyhow", - "derive_builder", - "rustversion", -] - -[[package]] -name = "version_check" -version = "0.9.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" - -[[package]] -name = "visibility" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "wait-timeout" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" -dependencies = [ - "libc", -] - -[[package]] -name = "walkdir" -version = "2.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" -dependencies = [ - "same-file", - "winapi-util", -] - -[[package]] -name = "want" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" -dependencies = [ - "try-lock", -] - -[[package]] -name = "wasi" -version = "0.11.1+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" - -[[package]] -name = "wasip2" -version = "1.0.2+wasi-0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" -dependencies = [ - "wit-bindgen", -] - -[[package]] -name = "wasm-bindgen" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" -dependencies = [ - "cfg-if", - "once_cell", - "rustversion", - "wasm-bindgen-macro", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-futures" -version = "0.4.58" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" -dependencies = [ - "cfg-if", - "futures-util", - "js-sys", - "once_cell", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" -dependencies = [ - "bumpalo", - "proc-macro2", - "quote", - "syn 2.0.114", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.108" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "wasm-streams" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" -dependencies = [ - "futures-util", - "js-sys", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "wasmtimer" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b" -dependencies = [ - "futures", - "js-sys", - "parking_lot", - "pin-utils", - "slab", - "wasm-bindgen", -] - -[[package]] -name = "web-sys" -version = "0.3.85" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "web-time" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki-root-certs" -version = "0.26.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75c7f0ef91146ebfb530314f5f1d24528d7f0767efbfd31dce919275413e393e" -dependencies = [ - "webpki-root-certs 1.0.5", -] - -[[package]] -name = "webpki-root-certs" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36a29fc0408b113f68cf32637857ab740edfafdf460c326cd2afaa2d84cc05dc" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "webpki-roots" -version = "0.26.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" -dependencies = [ - "webpki-roots 1.0.5", -] - -[[package]] -name = "webpki-roots" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" -dependencies = [ - "rustls-pki-types", -] - -[[package]] -name = "widestring" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-util" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" -dependencies = [ - "windows-sys 0.61.2", -] - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "windows" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" -dependencies = [ - "windows-core 0.57.0", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows" -version = "0.62.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "527fadee13e0c05939a6a05d5bd6eec6cd2e3dbd648b9f8e447c6518133d8580" -dependencies = [ - "windows-collections", - "windows-core 0.62.2", - "windows-future", - "windows-numerics", -] - -[[package]] -name = "windows-collections" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b2d95af1a8a14a3c7367e1ed4fc9c20e0a26e79551b1454d72583c97cc6610" -dependencies = [ - "windows-core 0.62.2", -] - -[[package]] -name = "windows-core" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" -dependencies = [ - "windows-implement 0.57.0", - "windows-interface 0.57.0", - "windows-result 0.1.2", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-core" -version = "0.62.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" -dependencies = [ - "windows-implement 0.60.2", - "windows-interface 0.59.3", - "windows-link", - "windows-result 0.4.1", - "windows-strings", -] - -[[package]] -name = "windows-future" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d6f90251fe18a279739e78025bd6ddc52a7e22f921070ccdc67dde84c605cb" -dependencies = [ - "windows-core 0.62.2", - "windows-link", - "windows-threading", -] - -[[package]] -name = "windows-implement" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "windows-implement" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "windows-interface" -version = "0.57.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "windows-interface" -version = "0.59.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "windows-link" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" - -[[package]] -name = "windows-numerics" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e2e40844ac143cdb44aead537bbf727de9b044e107a0f1220392177d15b0f26" -dependencies = [ - "windows-core 0.62.2", - "windows-link", -] - -[[package]] -name = "windows-result" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-result" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - -[[package]] -name = "windows-sys" -version = "0.48.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" -dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.60.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" -dependencies = [ - "windows-targets 0.53.5", -] - -[[package]] -name = "windows-sys" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - -[[package]] -name = "windows-targets" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" -dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm 0.52.6", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", -] - -[[package]] -name = "windows-targets" -version = "0.53.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" -dependencies = [ - "windows-link", - "windows_aarch64_gnullvm 0.53.1", - "windows_aarch64_msvc 0.53.1", - "windows_i686_gnu 0.53.1", - "windows_i686_gnullvm 0.53.1", - "windows_i686_msvc 0.53.1", - "windows_x86_64_gnu 0.53.1", - "windows_x86_64_gnullvm 0.53.1", - "windows_x86_64_msvc 0.53.1", -] - -[[package]] -name = "windows-threading" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3949bd5b99cafdf1c7ca86b43ca564028dfe27d66958f2470940f73d86d75b37" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - -[[package]] -name = "windows_i686_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" - -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - -[[package]] -name = "windows_i686_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" - -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - -[[package]] -name = "windows_i686_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.53.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" - -[[package]] -name = "winnow" -version = "0.7.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" -dependencies = [ - "memchr", -] - -[[package]] -name = "winreg" -version = "0.50.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" -dependencies = [ - "cfg-if", - "windows-sys 0.48.0", -] - -[[package]] -name = "wit-bindgen" -version = "0.51.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" - -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - -[[package]] -name = "writeable" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" - -[[package]] -name = "ws_stream_wasm" -version = "0.7.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c173014acad22e83f16403ee360115b38846fe754e735c5d9d3803fe70c6abc" -dependencies = [ - "async_io_stream", - "futures", - "js-sys", - "log", - "pharos", - "rustc_version 0.4.1", - "send_wrapper 0.6.0", - "thiserror 2.0.18", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", -] - -[[package]] -name = "wyz" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" -dependencies = [ - "tap", -] - -[[package]] -name = "xattr" -version = "1.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" -dependencies = [ - "libc", - "rustix 1.1.3", -] - -[[package]] -name = "xsum" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0637d3a5566a82fa5214bae89087bc8c9fb94cd8e8a3c07feb691bb8d9c632db" - -[[package]] -name = "yansi" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" - -[[package]] -name = "yoke" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" -dependencies = [ - "stable_deref_trait", - "yoke-derive", - "zerofrom", -] - -[[package]] -name = "yoke-derive" -version = "0.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", - "synstructure", -] - -[[package]] -name = "zerocopy" -version = "0.8.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7456cf00f0685ad319c5b1693f291a650eaf345e941d082fc4e03df8a03996ac" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.8.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1328722bbf2115db7e19d69ebcc15e795719e2d66b60827c6a69a117365e37a0" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "zerofrom" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" -dependencies = [ - "zerofrom-derive", -] - -[[package]] -name = "zerofrom-derive" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", - "synstructure", -] - -[[package]] -name = "zeroize" -version = "1.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" -dependencies = [ - "zeroize_derive", -] - -[[package]] -name = "zeroize_derive" -version = "1.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "zerotrie" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" -dependencies = [ - "displaydoc", - "yoke", - "zerofrom", -] - -[[package]] -name = "zerovec" -version = "0.11.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" -dependencies = [ - "serde", - "yoke", - "zerofrom", - "zerovec-derive", -] - -[[package]] -name = "zerovec-derive" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "zmij" -version = "1.0.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02aae0f83f69aafc94776e879363e9771d7ecbffe2c7fbb6c14c5e00dfe88439" - -[[package]] -name = "zstd" -version = "0.13.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" -dependencies = [ - "zstd-safe", -] - -[[package]] -name = "zstd-safe" -version = "7.2.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" -dependencies = [ - "zstd-sys", -] - -[[package]] -name = "zstd-sys" -version = "2.0.16+zstd.1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" -dependencies = [ - "cc", - "pkg-config", -] diff --git a/op-reth/Cargo.toml b/op-reth/Cargo.toml deleted file mode 100644 index 15a63a91c86..00000000000 --- a/op-reth/Cargo.toml +++ /dev/null @@ -1,451 +0,0 @@ -[workspace.package] -version = "1.10.2" -edition = "2024" -rust-version = "1.88" -license = "MIT OR Apache-2.0" -homepage = "https://paradigmxyz.github.io/reth" -repository = "https://github.com/paradigmxyz/reth" -exclude = [".github/"] - -[workspace] -members = [ - "bin/", - "crates/chainspec/", - "crates/cli/", - "crates/consensus/", - "crates/evm/", - "crates/flashblocks/", - "crates/hardforks/", - "crates/node/", - "crates/payload/", - "crates/primitives/", - "crates/reth/", - "crates/rpc/", - "crates/storage/", - "crates/txpool/", -] -default-members = ["bin/"] -resolver = "2" - -[workspace.lints] -rust.missing_debug_implementations = "warn" -rust.missing_docs = "warn" -rust.rust_2018_idioms = { level = "deny", priority = -1 } -rust.unreachable_pub = "warn" -rust.unused_must_use = "deny" -rust.rust_2024_incompatible_pat = "warn" -rustdoc.all = "warn" - -[workspace.lints.clippy] -borrow_as_ptr = "warn" -branches_sharing_code = "warn" -clear_with_drain = "warn" -cloned_instead_of_copied = "warn" -collection_is_never_read = "warn" -dbg_macro = "warn" -derive_partial_eq_without_eq = "warn" -doc_markdown = "warn" -empty_line_after_doc_comments = "warn" -empty_line_after_outer_attr = "warn" -enum_glob_use = "warn" -equatable_if_let = "warn" -explicit_into_iter_loop = "warn" -explicit_iter_loop = "warn" -flat_map_option = "warn" -from_iter_instead_of_collect = "warn" -if_not_else = "warn" -if_then_some_else_none = "warn" -implicit_clone = "warn" -imprecise_flops = "warn" -iter_on_empty_collections = "warn" -iter_on_single_items = "warn" -iter_with_drain = "warn" -iter_without_into_iter = "warn" -large_stack_frames = "warn" -manual_assert = "warn" -manual_clamp = "warn" -manual_is_variant_and = "warn" -manual_string_new = "warn" -match_same_arms = "warn" -missing-const-for-fn = "warn" -mutex_integer = "warn" -naive_bytecount = "warn" -needless_bitwise_bool = "warn" -needless_continue = "warn" -needless_for_each = "warn" -needless_pass_by_ref_mut = "warn" -nonstandard_macro_braces = "warn" -option_as_ref_cloned = "warn" -or_fun_call = "warn" -path_buf_push_overwrite = "warn" -read_zero_byte_vec = "warn" -result_large_err = "allow" -redundant_clone = "warn" -redundant_else = "warn" -single_char_pattern = "warn" -string_lit_as_bytes = "warn" -string_lit_chars_any = "warn" -suboptimal_flops = "warn" -suspicious_operation_groupings = "warn" -trailing_empty_array = "warn" -trait_duplication_in_bounds = "warn" -transmute_undefined_repr = "warn" -trivial_regex = "warn" -tuple_array_conversions = "warn" -type_repetition_in_bounds = "warn" -uninhabited_references = "warn" -unnecessary_self_imports = "warn" -unnecessary_struct_initialization = "warn" -unnested_or_patterns = "warn" -unused_peekable = "warn" -unused_rounding = "warn" -use_self = "warn" -useless_let_if_seq = "warn" -while_float = "warn" -zero_sized_map_values = "warn" - -as_ptr_cast_mut = "allow" -cognitive_complexity = "allow" -debug_assert_with_mut_call = "allow" -fallible_impl_from = "allow" -future_not_send = "allow" -needless_collect = "allow" -non_send_fields_in_send_ty = "allow" -redundant_pub_crate = "allow" -significant_drop_in_scrutinee = "allow" -significant_drop_tightening = "allow" -too_long_first_doc_paragraph = "allow" - -[profile.dev] -debug = "line-tables-only" -split-debuginfo = "unpacked" - -[profile.dev.package] -proptest.opt-level = 3 -rand_chacha.opt-level = 3 -rand_xorshift.opt-level = 3 -unarray.opt-level = 3 - -[profile.hivetests] -inherits = "test" -opt-level = 3 -lto = "thin" - -[profile.release] -opt-level = 3 -lto = "thin" -debug = "none" -strip = "symbols" -panic = "unwind" -codegen-units = 16 - -[profile.profiling] -inherits = "release" -debug = "full" -strip = "none" - -[profile.bench] -inherits = "profiling" - -[profile.maxperf] -inherits = "release" -lto = "fat" -codegen-units = 1 - -[profile.reproducible] -inherits = "release" -panic = "abort" -codegen-units = 1 -incremental = false - -[workspace.dependencies] -# ==================== OP-RETH INTERNAL CRATES ==================== -op-reth = { path = "bin/" } -reth-optimism-chainspec = { path = "crates/chainspec/", default-features = false } -reth-optimism-cli = { path = "crates/cli/", default-features = false } -reth-optimism-consensus = { path = "crates/consensus/", default-features = false } -reth-optimism-evm = { path = "crates/evm/", default-features = false } -reth-optimism-flashblocks = { path = "crates/flashblocks/" } -reth-optimism-forks = { path = "crates/hardforks/", default-features = false } -reth-optimism-node = { path = "crates/node/" } -reth-optimism-payload-builder = { path = "crates/payload/" } -reth-optimism-primitives = { path = "crates/primitives/", default-features = false } -reth-op = { path = "crates/reth/", default-features = false } -reth-optimism-rpc = { path = "crates/rpc/" } -reth-optimism-storage = { path = "crates/storage/" } -reth-optimism-txpool = { path = "crates/txpool/" } - -# ==================== RETH CRATES (from git revision b3d532ce9d09b925ebbce7bdf213d04c402e124c) ==================== -reth = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-basic-payload-builder = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-chain-state = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-chainspec = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-cli = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-cli-commands = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-cli-runner = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-cli-util = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-codecs = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-consensus = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-consensus-common = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-db = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-db-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-db-common = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-downloaders = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-e2e-test-utils = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-engine-local = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-engine-primitives = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-errors = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-eth-wire = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-ethereum-forks = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-evm = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-exex = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-execution-errors = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-execution-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-fs-util = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-metrics = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-network = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-network-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-network-peers = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-node-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-node-builder = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-node-core = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-node-events = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-node-metrics = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-payload-builder = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-payload-builder-primitives = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-payload-primitives = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-payload-util = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-payload-validator = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-primitives-traits = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-provider = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-prune = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-prune-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-revm = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-rpc = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-rpc-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-rpc-builder = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-rpc-engine-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-rpc-eth-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-rpc-eth-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-rpc-server-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-stages = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-stages-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-static-file = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-static-file-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-storage-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-storage-errors = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-tasks = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-tracing = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-transaction-pool = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-trie = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-trie-common = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } -reth-trie-db = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c" } -reth-zstd-compressors = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce9d09b925ebbce7bdf213d04c402e124c", default-features = false } - -# ==================== REVM ==================== -revm = { version = "34.0.0", default-features = false } -revm-bytecode = { version = "8.0.0", default-features = false } -revm-database = { version = "10.0.0", default-features = false } -revm-state = { version = "9.0.0", default-features = false } -revm-primitives = { version = "22.0.0", default-features = false } -revm-interpreter = { version = "32.0.0", default-features = false } -revm-database-interface = { version = "9.0.0", default-features = false } -op-revm = { version = "15.0.0", default-features = false } -revm-inspectors = "0.34.1" - -# ==================== ALLOY ==================== -alloy-chains = { version = "0.2.5", default-features = false } -alloy-dyn-abi = "1.5.4" -alloy-eip2124 = { version = "0.2.0", default-features = false } -alloy-eip7928 = { version = "0.3.0", default-features = false } -alloy-evm = { version = "0.27.0", default-features = false } -alloy-primitives = { version = "1.5.4", default-features = false, features = ["map-foldhash"] } -alloy-rlp = { version = "0.3.10", default-features = false, features = ["core-net"] } -alloy-sol-macro = "1.5.4" -alloy-sol-types = { version = "1.5.4", default-features = false } -alloy-trie = { version = "0.9.1", default-features = false } - -alloy-hardforks = "0.4.5" - -alloy-consensus = { version = "1.5.2", default-features = false } -alloy-contract = { version = "1.5.2", default-features = false } -alloy-eips = { version = "1.5.2", default-features = false } -alloy-genesis = { version = "1.5.2", default-features = false } -alloy-json-rpc = { version = "1.5.2", default-features = false } -alloy-network = { version = "1.5.2", default-features = false } -alloy-network-primitives = { version = "1.5.2", default-features = false } -alloy-provider = { version = "1.5.2", features = ["reqwest", "debug-api"], default-features = false } -alloy-pubsub = { version = "1.5.2", default-features = false } -alloy-rpc-client = { version = "1.5.2", default-features = false } -alloy-rpc-types = { version = "1.5.2", features = ["eth"], default-features = false } -alloy-rpc-types-admin = { version = "1.5.2", default-features = false } -alloy-rpc-types-anvil = { version = "1.5.2", default-features = false } -alloy-rpc-types-beacon = { version = "1.5.2", default-features = false } -alloy-rpc-types-debug = { version = "1.5.2", default-features = false } -alloy-rpc-types-engine = { version = "1.5.2", default-features = false } -alloy-rpc-types-eth = { version = "1.5.2", default-features = false } -alloy-rpc-types-mev = { version = "1.5.2", default-features = false } -alloy-rpc-types-trace = { version = "1.5.2", default-features = false } -alloy-rpc-types-txpool = { version = "1.5.2", default-features = false } -alloy-serde = { version = "1.5.2", default-features = false } -alloy-signer = { version = "1.5.2", default-features = false } -alloy-signer-local = { version = "1.5.2", default-features = false } -alloy-transport = { version = "1.5.2" } -alloy-transport-http = { version = "1.5.2", features = ["reqwest-rustls-tls"], default-features = false } -alloy-transport-ipc = { version = "1.5.2", default-features = false } -alloy-transport-ws = { version = "1.5.2", default-features = false } - -# ==================== OP-ALLOY ==================== -alloy-op-evm = { version = "0.27.0", default-features = false } -alloy-op-hardforks = "0.4.4" -op-alloy-rpc-types = { version = "0.23.1", default-features = false } -op-alloy-rpc-types-engine = { version = "0.23.1", default-features = false } -op-alloy-network = { version = "0.23.1", default-features = false } -op-alloy-consensus = { version = "0.23.1", default-features = false } -op-alloy-rpc-jsonrpsee = { version = "0.23.1", default-features = false } -op-alloy-flz = { version = "0.13.1", default-features = false } - -# ==================== ASYNC ==================== -async-compression = { version = "0.4", default-features = false } -async-stream = "0.3" -async-trait = "0.1.68" -futures = "0.3" -futures-core = "0.3" -futures-util = { version = "0.3", default-features = false } -hyper = "1.3" -hyper-util = "0.1.5" -pin-project = "1.0.12" -reqwest = { version = "0.12", default-features = false } -tracing-futures = "0.2" -tower = "0.5" -tower-http = "0.6" - -# ==================== TOKIO ==================== -tokio = { version = "1.44.2", default-features = false } -tokio-stream = "0.1.11" -tokio-tungstenite = "0.26.2" -tokio-util = { version = "0.7.4", features = ["codec"] } - -# ==================== RPC ==================== -jsonrpsee = "0.26.0" -jsonrpsee-core = "0.26.0" -jsonrpsee-server = "0.26.0" -jsonrpsee-http-client = "0.26.0" -jsonrpsee-types = "0.26.0" - -# ==================== CRYPTO ==================== -c-kzg = "2.1.5" -enr = { version = "0.13", default-features = false } -k256 = { version = "0.13", default-features = false, features = ["ecdsa"] } -secp256k1 = { version = "0.30", default-features = false, features = ["global-context", "recovery"] } -sha2 = { version = "0.10", default-features = false } - -# ==================== SERIALIZATION ==================== -bincode = "1.3" -serde = { version = "1.0", default-features = false } -serde_json = { version = "1.0", default-features = false, features = ["alloc"] } -serde_with = { version = "3", default-features = false, features = ["macros"] } -toml = "0.8" - -# ==================== METRICS ==================== -metrics = "0.24.0" -metrics-derive = "0.1" -metrics-exporter-prometheus = { version = "0.18.0", default-features = false } -metrics-process = "2.1.0" -metrics-util = { default-features = false, version = "0.20.0" } - -# ==================== TRACING ==================== -tracing = { version = "0.1.0", default-features = false } -tracing-appender = "0.2" -tracing-subscriber = { version = "0.3", default-features = false } - -# ==================== TESTING ==================== -arbitrary = "1.3" -assert_matches = "1.5.0" -criterion = { package = "codspeed-criterion-compat", version = "4.3" } -insta = "1.41" -proptest = "1.7" -proptest-derive = "0.5" -proptest-arbitrary-interop = "0.1.0" -similar-asserts = { version = "1.5.0", features = ["serde"] } -tempfile = "3.20" -test-fuzz = "7" -rstest = "0.24.0" -test-case = "3" - -# ==================== COMPRESSION ==================== -brotli = "8" -lz4 = "1.28.1" -lz4_flex = { version = "0.11", default-features = false } -miniz_oxide = { version = "0.8.4", default-features = false } -zstd = "0.13" -tar-no-std = { version = "0.3.2", default-features = false } - -# ==================== MISC ==================== -either = { version = "1.15.0", default-features = false } -arrayvec = { version = "0.7.6", default-features = false } -aquamarine = "0.6" -auto_impl = "1" -backon = { version = "1.2", default-features = false, features = ["std-blocking-sleep", "tokio-sleep"] } -bitflags = "2.4" -boyer-moore-magiclen = "0.2.16" -bytes = { version = "1.5", default-features = false } -cfg-if = "1.0" -clap = "4" -color-eyre = "0.6" -dashmap = "6.0" -derive_more = { version = "2", default-features = false, features = ["full"] } -dirs-next = "2.0.0" -dyn-clone = "1.0.17" -eyre = "0.6" -fdlimit = "0.3.0" -fixed-map = { version = "0.9", default-features = false } -humantime = "2.1" -humantime-serde = "1.1" -itertools = { version = "0.14", default-features = false } -linked_hash_set = "0.1" -modular-bitfield = "0.11.2" -notify = { version = "8.0.0", default-features = false, features = ["macos_fsevent"] } -nybbles = { version = "0.4.2", default-features = false } -once_cell = { version = "1.19", default-features = false, features = ["critical-section"] } -parking_lot = "0.12" -paste = "1.0" -rand = "0.9" -rand_08 = { package = "rand", version = "0.8" } -rayon = "1.7" -ringbuffer = "0.15.0" -rustc-hash = { version = "2.0", default-features = false } -schnellru = "0.2" -shellexpand = "3.0.0" -shlex = "1.3" -smallvec = "1" -strum = { version = "0.27", default-features = false } -strum_macros = "0.27" -syn = "2.0" -thiserror = { version = "2.0.0", default-features = false } -tar = "0.4.44" -url = { version = "2.3", default-features = false } -byteorder = "1" -mini-moka = "0.10" -moka = "0.12" -chrono = "0.4.41" - -# proc-macros -proc-macro2 = "1.0" -quote = "1.0" - -# http -http = "1.0" -http-body = "1.0" -http-body-util = "0.1.2" -jsonwebtoken = "9" - -# networking -ipnet = "2.11" - -# allocators -jemalloc_pprof = { version = "0.8", default-features = false } -tikv-jemalloc-ctl = "0.6" -tikv-jemallocator = "0.6" -tracy-client = "0.18.0" -snmalloc-rs = { version = "0.3.7", features = ["build_cc"] } diff --git a/op-reth/README.md b/op-reth/README.md deleted file mode 100644 index caa9d136cb5..00000000000 --- a/op-reth/README.md +++ /dev/null @@ -1,149 +0,0 @@ -# reth - -[![bench status](https://github.com/paradigmxyz/reth/actions/workflows/bench.yml/badge.svg)](https://github.com/paradigmxyz/reth/actions/workflows/bench.yml) -[![CI status](https://github.com/paradigmxyz/reth/workflows/unit/badge.svg)][gh-ci] -[![cargo-lint status](https://github.com/paradigmxyz/reth/actions/workflows/lint.yml/badge.svg)][gh-lint] -[![Telegram Chat][tg-badge]][tg-url] - -**Modular, contributor-friendly and blazing-fast implementation of the Ethereum protocol** - -![](./assets/reth-prod.png) - -**[Install](https://paradigmxyz.github.io/reth/installation/installation.html)** -| [User Docs](https://reth.rs) -| [Developer Docs](./docs) -| [Crate Docs](https://reth.rs/docs) - -[gh-ci]: https://github.com/paradigmxyz/reth/actions/workflows/unit.yml -[gh-lint]: https://github.com/paradigmxyz/reth/actions/workflows/lint.yml -[tg-badge]: https://img.shields.io/endpoint?color=neon&logo=telegram&label=chat&url=https%3A%2F%2Ftg.sumanjay.workers.dev%2Fparadigm%5Freth - -## What is Reth? - -Reth (short for Rust Ethereum, [pronunciation](https://x.com/kelvinfichter/status/1597653609411268608)) is a new Ethereum full node implementation that is focused on being user-friendly, highly modular, as well as being fast and efficient. Reth is an Execution Layer (EL) and is compatible with all Ethereum Consensus Layer (CL) implementations that support the [Engine API](https://github.com/ethereum/execution-apis/tree/a0d03086564ab1838b462befbc083f873dcf0c0f/src/engine). It is originally built and driven forward by [Paradigm](https://paradigm.xyz/), and is licensed under the Apache and MIT licenses. - -## Goals - -As a full Ethereum node, Reth allows users to connect to the Ethereum network and interact with the Ethereum blockchain. This includes sending and receiving transactions/logs/traces, as well as accessing and interacting with smart contracts. Building a successful Ethereum node requires creating a high-quality implementation that is both secure and efficient, as well as being easy to use on consumer hardware. It also requires building a strong community of contributors who can help support and improve the software. - -More concretely, our goals are: - -1. **Modularity**: Every component of Reth is built to be used as a library: well-tested, heavily documented and benchmarked. We envision that developers will import the node's crates, mix and match, and innovate on top of them. Examples of such usage include but are not limited to spinning up standalone P2P networks, talking directly to a node's database, or "unbundling" the node into the components you need. To achieve that, we are licensing Reth under the Apache/MIT permissive license. You can learn more about the project's components [here](./docs/repo/layout.md). -2. **Performance**: Reth aims to be fast, so we use Rust and the [Erigon staged-sync](https://erigon.substack.com/p/erigon-stage-sync-and-control-flows) node architecture. We also use our Ethereum libraries (including [Alloy](https://github.com/alloy-rs/alloy/) and [revm](https://github.com/bluealloy/revm/)) which we've battle-tested and optimized via [Foundry](https://github.com/foundry-rs/foundry/). -3. **Free for anyone to use any way they want**: Reth is free open source software, built for the community, by the community. By licensing the software under the Apache/MIT license, we want developers to use it without being bound by business licenses, or having to think about the implications of GPL-like licenses. -4. **Client Diversity**: The Ethereum protocol becomes more antifragile when no node implementation dominates. This ensures that if there's a software bug, the network does not finalize a bad block. By building a new client, we hope to contribute to Ethereum's antifragility. -5. **Support as many EVM chains as possible**: We aspire that Reth can full-sync not only Ethereum, but also other chains like Optimism, Polygon, BNB Smart Chain, and more. If you're working on any of these projects, please reach out. -6. **Configurability**: We want to solve for node operators that care about fast historical queries, but also for hobbyists who cannot operate on large hardware. We also want to support teams and individuals who want both sync from genesis and via "fast sync". We envision that Reth will be configurable enough and provide configurable "profiles" for the tradeoffs that each team faces. - -## Status - -Reth is production ready, and suitable for usage in mission-critical environments such as staking or high-uptime services. We also actively recommend professional node operators to switch to Reth in production for performance and cost reasons in use cases where high performance with great margins is required such as RPC, MEV, Indexing, Simulations, and P2P activities. - -More historical context below: - -- We released 1.0 "production-ready" stable Reth in June 2024. - - Reth completed an audit with [Sigma Prime](https://sigmaprime.io/), the developers of [Lighthouse](https://github.com/sigp/lighthouse), the Rust Consensus Layer implementation. Find it [here](./audit/sigma_prime_audit_v2.pdf). - - Revm (the EVM used in Reth) underwent an audit with [Guido Vranken](https://x.com/guidovranken) (#1 [Ethereum Bug Bounty](https://ethereum.org/en/bug-bounty)). We will publish the results soon. -- We released multiple iterative beta versions, up to [beta.9](https://github.com/paradigmxyz/reth/releases/tag/v0.2.0-beta.9) on Monday June 3, 2024, the last beta release. -- We released [beta](https://github.com/paradigmxyz/reth/releases/tag/v0.2.0-beta.1) on Monday March 4, 2024, our first breaking change to the database model, providing faster query speed, smaller database footprint, and allowing "history" to be mounted on separate drives. -- We shipped iterative improvements until the last alpha release on February 28, 2024, [0.1.0-alpha.21](https://github.com/paradigmxyz/reth/releases/tag/v0.1.0-alpha.21). -- We [initially announced](https://www.paradigm.xyz/2023/06/reth-alpha) [0.1.0-alpha.1](https://github.com/paradigmxyz/reth/releases/tag/v0.1.0-alpha.1) on June 20, 2023. - -### Database compatibility - -We do not have any breaking database changes since beta.1, and we do not plan any in the near future. - -Reth [v0.2.0-beta.1](https://github.com/paradigmxyz/reth/releases/tag/v0.2.0-beta.1) includes -a [set of breaking database changes](https://github.com/paradigmxyz/reth/pull/5191) that makes it impossible to use database files produced by earlier versions. - -If you had a database produced by alpha versions of Reth, you need to drop it with `reth db drop` -(using the same arguments such as `--config` or `--datadir` that you passed to `reth node`), and resync using the same `reth node` command you've used before. - -## For Users - -See the [Reth documentation](https://reth.rs/) for instructions on how to install and run Reth. - -## For Developers - -### Using reth as a library - -You can use individual crates of reth in your project. - -The crate docs can be found [here](https://reth.rs/docs/). - -For a general overview of the crates, see [Project Layout](./docs/repo/layout.md). - -### Contributing - -If you want to contribute, or follow along with contributor discussion, you can use our [main telegram](https://t.me/paradigm_reth) to chat with us about the development of Reth! - -- Our contributor guidelines can be found in [`CONTRIBUTING.md`](./CONTRIBUTING.md). -- See our [contributor docs](./docs) for more information on the project. A good starting point is [Project Layout](./docs/repo/layout.md). - -### Building and testing - - - -The Minimum Supported Rust Version (MSRV) of this project is [1.88.0](https://blog.rust-lang.org/2025/06/26/Rust-1.88.0/). - -See the docs for detailed instructions on how to [build from source](https://reth.rs/installation/source/). - -To fully test Reth, you will need to have [Geth installed](https://geth.ethereum.org/docs/getting-started/installing-geth), but it is possible to run a subset of tests without Geth. - -First, clone the repository: - -```sh -git clone https://github.com/paradigmxyz/reth -cd reth -``` - -Next, run the tests: - -```sh -cargo nextest run --workspace - -# Run the Ethereum Foundation tests -make ef-tests -``` - -We highly recommend using [`cargo nextest`](https://nexte.st/) to speed up testing. -Using `cargo test` to run tests may work fine, but this is not tested and does not support more advanced features like retries for spurious failures. - -> **Note** -> -> Some tests use random number generators to generate test data. If you want to use a deterministic seed, you can set the `SEED` environment variable. - -## Getting Help - -If you have any questions, first see if the answer to your question can be found in the [docs][book]. - -If the answer is not there: - -- Join the [Telegram][tg-url] to get help, or -- Open a [discussion](https://github.com/paradigmxyz/reth/discussions/new) with your question, or -- Open an issue with [the bug](https://github.com/paradigmxyz/reth/issues/new?assignees=&labels=C-bug%2CS-needs-triage&projects=&template=bug.yml) - -## Security - -See [`SECURITY.md`](./SECURITY.md). - -## Acknowledgements - -Reth is a new implementation of the Ethereum protocol. In the process of developing the node we investigated the design decisions other nodes have made to understand what is done well, what is not, and where we can improve the status quo. - -None of this would have been possible without them, so big shoutout to the teams below: - -- [Geth](https://github.com/ethereum/go-ethereum/): We would like to express our heartfelt gratitude to the go-ethereum team for their outstanding contributions to Ethereum over the years. Their tireless efforts and dedication have helped to shape the Ethereum ecosystem and make it the vibrant and innovative community it is today. Thank you for your hard work and commitment to the project. -- [Erigon](https://github.com/ledgerwatch/erigon) (fka Turbo-Geth): Erigon pioneered the ["Staged Sync" architecture](https://erigon.substack.com/p/erigon-stage-sync-and-control-flows) that Reth is using, as well as [introduced MDBX](https://github.com/ledgerwatch/erigon/wiki/Choice-of-storage-engine) as the database of choice. We thank Erigon for pushing the state of the art research on the performance limits of Ethereum nodes. -- [Akula](https://github.com/akula-bft/akula/): Reth uses forks of the Apache versions of Akula's [MDBX Bindings](https://github.com/paradigmxyz/reth/pull/132), [FastRLP](https://github.com/paradigmxyz/reth/pull/63) and [ECIES](https://github.com/paradigmxyz/reth/pull/80). Given that these packages were already released under the Apache License, and they implement standardized solutions, we decided not to reimplement them to iterate faster. We thank the Akula team for their contributions to the Rust Ethereum ecosystem and for publishing these packages. - -## Warning - -The `NippyJar` and `Compact` encoding formats and their implementations are designed for storing and retrieving data internally. They are not hardened to safely read potentially malicious data. - -[book]: https://reth.rs/ -[tg-url]: https://t.me/paradigm_reth diff --git a/op-reth/bin/Cargo.toml b/op-reth/bin/Cargo.toml deleted file mode 100644 index 4049ee1ecad..00000000000 --- a/op-reth/bin/Cargo.toml +++ /dev/null @@ -1,64 +0,0 @@ -[package] -name = "op-reth" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[dependencies] -reth-cli-util.workspace = true -reth-optimism-cli.workspace = true -reth-optimism-rpc.workspace = true -reth-optimism-node.workspace = true -reth-optimism-chainspec.workspace = true -reth-optimism-consensus.workspace = true -reth-optimism-evm.workspace = true -reth-optimism-payload-builder.workspace = true -reth-optimism-primitives.workspace = true -reth-optimism-forks.workspace = true - -clap = { workspace = true, features = ["derive", "env"] } -tracing.workspace = true - -[lints] -workspace = true - -[features] -default = ["jemalloc", "otlp", "reth-optimism-evm/portable", "js-tracer", "keccak-cache-global", "asm-keccak"] - -otlp = ["reth-optimism-cli/otlp"] - -js-tracer = [ - "reth-optimism-node/js-tracer", -] - -jemalloc = ["reth-cli-util/jemalloc", "reth-optimism-cli/jemalloc"] -jemalloc-prof = ["jemalloc", "reth-cli-util/jemalloc-prof", "reth-optimism-cli/jemalloc-prof"] -jemalloc-symbols = ["jemalloc-prof", "reth-optimism-cli/jemalloc-symbols"] -tracy-allocator = ["reth-cli-util/tracy-allocator", "tracy"] -tracy = ["reth-optimism-cli/tracy"] - -asm-keccak = ["reth-optimism-cli/asm-keccak", "reth-optimism-node/asm-keccak"] -keccak-cache-global = [ - "reth-optimism-cli/keccak-cache-global", - "reth-optimism-node/keccak-cache-global", -] -dev = [ - "reth-optimism-cli/dev", - "reth-optimism-primitives/arbitrary", -] - -min-error-logs = ["tracing/release_max_level_error"] -min-warn-logs = ["tracing/release_max_level_warn"] -min-info-logs = ["tracing/release_max_level_info"] -min-debug-logs = ["tracing/release_max_level_debug"] -min-trace-logs = ["tracing/release_max_level_trace"] - -edge = ["reth-optimism-cli/edge"] - -[[bin]] -name = "op-reth" -path = "src/main.rs" diff --git a/op-reth/bin/src/main.rs b/op-reth/bin/src/main.rs deleted file mode 100644 index 48452326d8b..00000000000 --- a/op-reth/bin/src/main.rs +++ /dev/null @@ -1,36 +0,0 @@ -#![allow(missing_docs, rustdoc::missing_crate_level_docs)] - -use clap::Parser; -use reth_optimism_cli::{chainspec::OpChainSpecParser, Cli}; -use reth_optimism_node::{args::RollupArgs, OpNode}; -use tracing::info; - -#[global_allocator] -static ALLOC: reth_cli_util::allocator::Allocator = reth_cli_util::allocator::new_allocator(); - -#[cfg(all(feature = "jemalloc-prof", unix))] -#[unsafe(export_name = "_rjem_malloc_conf")] -static MALLOC_CONF: &[u8] = b"prof:true,prof_active:true,lg_prof_sample:19\0"; - -fn main() { - reth_cli_util::sigsegv_handler::install(); - - // Enable backtraces unless a RUST_BACKTRACE value has already been explicitly provided. - if std::env::var_os("RUST_BACKTRACE").is_none() { - unsafe { - std::env::set_var("RUST_BACKTRACE", "1"); - } - } - - if let Err(err) = - Cli::::parse().run(async move |builder, rollup_args| { - info!(target: "reth::cli", "Launching node"); - let handle = - builder.node(OpNode::new(rollup_args)).launch_with_debug_capabilities().await?; - handle.node_exit_future.await - }) - { - eprintln!("Error: {err:?}"); - std::process::exit(1); - } -} diff --git a/op-reth/clippy.toml b/op-reth/clippy.toml deleted file mode 100644 index 9ddf1014802..00000000000 --- a/op-reth/clippy.toml +++ /dev/null @@ -1,17 +0,0 @@ -too-large-for-stack = 128 -doc-valid-idents = [ - "P2P", - "ExEx", - "ExExes", - "IPv4", - "IPv6", - "KiB", - "MiB", - "GiB", - "TiB", - "PiB", - "EiB", - "WAL", - "MessagePack", -] -allow-dbg-in-tests = true diff --git a/op-reth/crates/chainspec/Cargo.toml b/op-reth/crates/chainspec/Cargo.toml deleted file mode 100644 index a4ef9263b1c..00000000000 --- a/op-reth/crates/chainspec/Cargo.toml +++ /dev/null @@ -1,90 +0,0 @@ -[package] -name = "reth-optimism-chainspec" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -description = "EVM chain spec implementation for optimism." - -[lints] -workspace = true - -[dependencies] -# reth -reth-chainspec.workspace = true -reth-ethereum-forks.workspace = true -reth-primitives-traits.workspace = true -reth-network-peers.workspace = true - -# op-reth -reth-optimism-forks.workspace = true -reth-optimism-primitives.workspace = true - -# ethereum -alloy-chains.workspace = true -alloy-genesis.workspace = true -alloy-primitives.workspace = true -alloy-consensus.workspace = true -alloy-eips.workspace = true -alloy-hardforks.workspace = true - -# op -op-alloy-rpc-types.workspace = true - -serde = { workspace = true, optional = true } -serde_json.workspace = true - -# io -tar-no-std = { workspace = true, optional = true } -miniz_oxide = { workspace = true, features = ["with-alloc"], optional = true } - -# misc -derive_more.workspace = true -paste = { workspace = true, optional = true } -thiserror = { workspace = true, optional = true } -op-alloy-consensus.workspace = true - -[dev-dependencies] -reth-chainspec = { workspace = true, features = ["test-utils"] } -alloy-op-hardforks.workspace = true - -[features] -default = ["std"] -superchain-configs = ["miniz_oxide", "paste", "tar-no-std", "thiserror", "thiserror", "dep:serde"] -std = [ - "alloy-chains/std", - "alloy-genesis/std", - "alloy-primitives/std", - "alloy-eips/std", - "op-alloy-rpc-types/std", - "reth-chainspec/std", - "reth-ethereum-forks/std", - "reth-primitives-traits/std", - "reth-optimism-forks/std", - "reth-optimism-primitives/std", - "alloy-consensus/std", - "derive_more/std", - "reth-network-peers/std", - "serde_json/std", - "serde?/std", - "miniz_oxide?/std", - "thiserror?/std", - "op-alloy-consensus/std", -] -serde = [ - "alloy-chains/serde", - "alloy-consensus/serde", - "alloy-eips/serde", - "alloy-hardforks/serde", - "alloy-primitives/serde", - "miniz_oxide?/serde", - "op-alloy-rpc-types/serde", - "reth-ethereum-forks/serde", - "reth-optimism-forks/serde", - "reth-optimism-primitives/serde", - "reth-primitives-traits/serde", - "op-alloy-consensus/serde", - "alloy-op-hardforks/serde", -] diff --git a/op-reth/crates/chainspec/src/base_sepolia.rs b/op-reth/crates/chainspec/src/base_sepolia.rs deleted file mode 100644 index 2734bef3fdb..00000000000 --- a/op-reth/crates/chainspec/src/base_sepolia.rs +++ /dev/null @@ -1,41 +0,0 @@ -//! Chain specification for the Base Sepolia testnet network. - -use alloc::{sync::Arc, vec}; - -use alloy_chains::Chain; -use alloy_primitives::{b256, U256}; -use reth_chainspec::{BaseFeeParams, BaseFeeParamsKind, ChainSpec, Hardfork}; -use reth_ethereum_forks::EthereumHardfork; -use reth_optimism_forks::{OpHardfork, BASE_SEPOLIA_HARDFORKS}; -use reth_primitives_traits::SealedHeader; - -use crate::{make_op_genesis_header, LazyLock, OpChainSpec}; - -/// The Base Sepolia spec -pub static BASE_SEPOLIA: LazyLock> = LazyLock::new(|| { - let genesis = serde_json::from_str(include_str!("../res/genesis/sepolia_base.json")) - .expect("Can't deserialize Base Sepolia genesis json"); - let hardforks = BASE_SEPOLIA_HARDFORKS.clone(); - OpChainSpec { - inner: ChainSpec { - chain: Chain::base_sepolia(), - genesis_header: SealedHeader::new( - make_op_genesis_header(&genesis, &hardforks), - b256!("0x0dcc9e089e30b90ddfc55be9a37dd15bc551aeee999d2e2b51414c54eaf934e4"), - ), - genesis, - paris_block_and_final_difficulty: Some((0, U256::from(0))), - hardforks, - base_fee_params: BaseFeeParamsKind::Variable( - vec![ - (EthereumHardfork::London.boxed(), BaseFeeParams::base_sepolia()), - (OpHardfork::Canyon.boxed(), BaseFeeParams::base_sepolia_canyon()), - ] - .into(), - ), - prune_delete_limit: 10000, - ..Default::default() - }, - } - .into() -}); diff --git a/op-reth/crates/chainspec/src/lib.rs b/op-reth/crates/chainspec/src/lib.rs deleted file mode 100644 index a91102c4f89..00000000000 --- a/op-reth/crates/chainspec/src/lib.rs +++ /dev/null @@ -1,1348 +0,0 @@ -//! OP-Reth chain specs. - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(not(feature = "std"), no_std)] - -// About the provided chain specs from `res/superchain-configs.tar`: -// The provided `OpChainSpec` structs are built from config files read from -// `superchain-configs.tar`. This `superchain-configs.tar` file contains the chain configs and -// genesis files for all chains. It is created by the `fetch_superchain_config.sh` script in -// the `res` directory. Where all configs are where initial loaded from -// . See the script for more details. -// -// The file is a tar archive containing the following files: -// - `genesis//.json.zz`: The genesis file compressed with deflate. It -// contains the initial accounts, etc. -// - `configs//.json`: The chain metadata file containing the chain id, -// hard forks, etc. -// -// For example, for `UNICHAIN_MAINNET`, the `genesis/mainnet/unichain.json.zz` and -// `configs/mainnet/base.json` is loaded and combined into the `OpChainSpec` struct. -// See `read_superchain_genesis` in `configs.rs` for more details. -// -// To update the chain specs, run the `fetch_superchain_config.sh` script in the `res` directory. -// This will fetch the latest chain configs from the superchain registry and create a new -// `superchain-configs.tar` file. See the script for more details. - -extern crate alloc; - -mod base; -mod base_sepolia; -mod basefee; - -pub mod constants; -mod dev; -mod op; -mod op_sepolia; - -#[cfg(feature = "superchain-configs")] -mod superchain; -#[cfg(feature = "superchain-configs")] -pub use superchain::*; - -pub use base::BASE_MAINNET; -pub use base_sepolia::BASE_SEPOLIA; -pub use basefee::*; -pub use dev::OP_DEV; -pub use op::OP_MAINNET; -pub use op_sepolia::OP_SEPOLIA; - -/// Re-export for convenience -pub use reth_optimism_forks::*; - -use alloc::{boxed::Box, vec, vec::Vec}; -use alloy_chains::Chain; -use alloy_consensus::{proofs::storage_root_unhashed, BlockHeader, Header}; -use alloy_eips::eip7840::BlobParams; -use alloy_genesis::Genesis; -use alloy_hardforks::Hardfork; -use alloy_primitives::{B256, U256}; -use derive_more::{Constructor, Deref, From, Into}; -use reth_chainspec::{ - BaseFeeParams, BaseFeeParamsKind, ChainSpec, ChainSpecBuilder, DepositContract, - DisplayHardforks, EthChainSpec, EthereumHardforks, ForkFilter, ForkId, Hardforks, Head, -}; -use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, ForkCondition}; -use reth_network_peers::NodeRecord; -use reth_optimism_primitives::L2_TO_L1_MESSAGE_PASSER_ADDRESS; -use reth_primitives_traits::{sync::LazyLock, SealedHeader}; - -/// Chain spec builder for a OP stack chain. -#[derive(Debug, Default, From)] -pub struct OpChainSpecBuilder { - /// [`ChainSpecBuilder`] - inner: ChainSpecBuilder, -} - -impl OpChainSpecBuilder { - /// Construct a new builder from the base mainnet chain spec. - pub fn base_mainnet() -> Self { - let mut inner = ChainSpecBuilder::default() - .chain(BASE_MAINNET.chain) - .genesis(BASE_MAINNET.genesis.clone()); - let forks = BASE_MAINNET.hardforks.clone(); - inner = inner.with_forks(forks); - - Self { inner } - } - - /// Construct a new builder from the optimism mainnet chain spec. - pub fn optimism_mainnet() -> Self { - let mut inner = - ChainSpecBuilder::default().chain(OP_MAINNET.chain).genesis(OP_MAINNET.genesis.clone()); - let forks = OP_MAINNET.hardforks.clone(); - inner = inner.with_forks(forks); - - Self { inner } - } -} - -impl OpChainSpecBuilder { - /// Set the chain ID - pub fn chain(mut self, chain: Chain) -> Self { - self.inner = self.inner.chain(chain); - self - } - - /// Set the genesis block. - pub fn genesis(mut self, genesis: Genesis) -> Self { - self.inner = self.inner.genesis(genesis); - self - } - - /// Add the given fork with the given activation condition to the spec. - pub fn with_fork(mut self, fork: H, condition: ForkCondition) -> Self { - self.inner = self.inner.with_fork(fork, condition); - self - } - - /// Add the given forks with the given activation condition to the spec. - pub fn with_forks(mut self, forks: ChainHardforks) -> Self { - self.inner = self.inner.with_forks(forks); - self - } - - /// Remove the given fork from the spec. - pub fn without_fork(mut self, fork: OpHardfork) -> Self { - self.inner = self.inner.without_fork(fork); - self - } - - /// Enable Bedrock at genesis - pub fn bedrock_activated(mut self) -> Self { - self.inner = self.inner.paris_activated(); - self.inner = self.inner.with_fork(OpHardfork::Bedrock, ForkCondition::Block(0)); - self - } - - /// Enable Regolith at genesis - pub fn regolith_activated(mut self) -> Self { - self = self.bedrock_activated(); - self.inner = self.inner.with_fork(OpHardfork::Regolith, ForkCondition::Timestamp(0)); - self - } - - /// Enable Canyon at genesis - pub fn canyon_activated(mut self) -> Self { - self = self.regolith_activated(); - // Canyon also activates changes from L1's Shanghai hardfork - self.inner = self.inner.with_fork(EthereumHardfork::Shanghai, ForkCondition::Timestamp(0)); - self.inner = self.inner.with_fork(OpHardfork::Canyon, ForkCondition::Timestamp(0)); - self - } - - /// Enable Ecotone at genesis - pub fn ecotone_activated(mut self) -> Self { - self = self.canyon_activated(); - self.inner = self.inner.with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(0)); - self.inner = self.inner.with_fork(OpHardfork::Ecotone, ForkCondition::Timestamp(0)); - self - } - - /// Enable Fjord at genesis - pub fn fjord_activated(mut self) -> Self { - self = self.ecotone_activated(); - self.inner = self.inner.with_fork(OpHardfork::Fjord, ForkCondition::Timestamp(0)); - self - } - - /// Enable Granite at genesis - pub fn granite_activated(mut self) -> Self { - self = self.fjord_activated(); - self.inner = self.inner.with_fork(OpHardfork::Granite, ForkCondition::Timestamp(0)); - self - } - - /// Enable Holocene at genesis - pub fn holocene_activated(mut self) -> Self { - self = self.granite_activated(); - self.inner = self.inner.with_fork(OpHardfork::Holocene, ForkCondition::Timestamp(0)); - self - } - - /// Enable Isthmus at genesis - pub fn isthmus_activated(mut self) -> Self { - self = self.holocene_activated(); - self.inner = self.inner.with_fork(OpHardfork::Isthmus, ForkCondition::Timestamp(0)); - self - } - - /// Enable Jovian at genesis - pub fn jovian_activated(mut self) -> Self { - self = self.isthmus_activated(); - self.inner = self.inner.with_fork(OpHardfork::Jovian, ForkCondition::Timestamp(0)); - self - } - - /// Enable Interop at genesis - pub fn interop_activated(mut self) -> Self { - self = self.jovian_activated(); - self.inner = self.inner.with_fork(OpHardfork::Interop, ForkCondition::Timestamp(0)); - self - } - - /// Build the resulting [`OpChainSpec`]. - /// - /// # Panics - /// - /// This function panics if the chain ID and genesis is not set ([`Self::chain`] and - /// [`Self::genesis`]) - pub fn build(self) -> OpChainSpec { - let mut inner = self.inner.build(); - inner.genesis_header = - SealedHeader::seal_slow(make_op_genesis_header(&inner.genesis, &inner.hardforks)); - - OpChainSpec { inner } - } -} - -/// OP stack chain spec type. -#[derive(Debug, Clone, Deref, Into, Constructor, PartialEq, Eq)] -pub struct OpChainSpec { - /// [`ChainSpec`]. - pub inner: ChainSpec, -} - -impl OpChainSpec { - /// Converts the given [`Genesis`] into a [`OpChainSpec`]. - pub fn from_genesis(genesis: Genesis) -> Self { - genesis.into() - } -} - -impl EthChainSpec for OpChainSpec { - type Header = Header; - - fn chain(&self) -> Chain { - self.inner.chain() - } - - fn base_fee_params_at_timestamp(&self, timestamp: u64) -> BaseFeeParams { - self.inner.base_fee_params_at_timestamp(timestamp) - } - - fn blob_params_at_timestamp(&self, timestamp: u64) -> Option { - self.inner.blob_params_at_timestamp(timestamp) - } - - fn deposit_contract(&self) -> Option<&DepositContract> { - self.inner.deposit_contract() - } - - fn genesis_hash(&self) -> B256 { - self.inner.genesis_hash() - } - - fn prune_delete_limit(&self) -> usize { - self.inner.prune_delete_limit() - } - - fn display_hardforks(&self) -> Box { - // filter only op hardforks - let op_forks = self.inner.hardforks.forks_iter().filter(|(fork, _)| { - !EthereumHardfork::VARIANTS.iter().any(|h| h.name() == (*fork).name()) - }); - - Box::new(DisplayHardforks::new(op_forks)) - } - - fn genesis_header(&self) -> &Self::Header { - self.inner.genesis_header() - } - - fn genesis(&self) -> &Genesis { - self.inner.genesis() - } - - fn bootnodes(&self) -> Option> { - self.inner.bootnodes() - } - - fn is_optimism(&self) -> bool { - true - } - - fn final_paris_total_difficulty(&self) -> Option { - self.inner.final_paris_total_difficulty() - } - - fn next_block_base_fee(&self, parent: &Header, target_timestamp: u64) -> Option { - if self.is_jovian_active_at_timestamp(parent.timestamp()) { - compute_jovian_base_fee(self, parent, target_timestamp).ok() - } else if self.is_holocene_active_at_timestamp(parent.timestamp()) { - decode_holocene_base_fee(self, parent, target_timestamp).ok() - } else { - self.inner.next_block_base_fee(parent, target_timestamp) - } - } -} - -impl Hardforks for OpChainSpec { - fn fork(&self, fork: H) -> ForkCondition { - self.inner.fork(fork) - } - - fn forks_iter(&self) -> impl Iterator { - self.inner.forks_iter() - } - - fn fork_id(&self, head: &Head) -> ForkId { - self.inner.fork_id(head) - } - - fn latest_fork_id(&self) -> ForkId { - self.inner.latest_fork_id() - } - - fn fork_filter(&self, head: Head) -> ForkFilter { - self.inner.fork_filter(head) - } -} - -impl EthereumHardforks for OpChainSpec { - fn ethereum_fork_activation(&self, fork: EthereumHardfork) -> ForkCondition { - self.fork(fork) - } -} - -impl OpHardforks for OpChainSpec { - fn op_fork_activation(&self, fork: OpHardfork) -> ForkCondition { - self.fork(fork) - } -} - -impl From for OpChainSpec { - fn from(genesis: Genesis) -> Self { - use reth_optimism_forks::OpHardfork; - let optimism_genesis_info = OpGenesisInfo::extract_from(&genesis); - let genesis_info = - optimism_genesis_info.optimism_chain_info.genesis_info.unwrap_or_default(); - - // Block-based hardforks - let hardfork_opts = [ - (EthereumHardfork::Frontier.boxed(), Some(0)), - (EthereumHardfork::Homestead.boxed(), genesis.config.homestead_block), - (EthereumHardfork::Tangerine.boxed(), genesis.config.eip150_block), - (EthereumHardfork::SpuriousDragon.boxed(), genesis.config.eip155_block), - (EthereumHardfork::Byzantium.boxed(), genesis.config.byzantium_block), - (EthereumHardfork::Constantinople.boxed(), genesis.config.constantinople_block), - (EthereumHardfork::Petersburg.boxed(), genesis.config.petersburg_block), - (EthereumHardfork::Istanbul.boxed(), genesis.config.istanbul_block), - (EthereumHardfork::MuirGlacier.boxed(), genesis.config.muir_glacier_block), - (EthereumHardfork::Berlin.boxed(), genesis.config.berlin_block), - (EthereumHardfork::London.boxed(), genesis.config.london_block), - (EthereumHardfork::ArrowGlacier.boxed(), genesis.config.arrow_glacier_block), - (EthereumHardfork::GrayGlacier.boxed(), genesis.config.gray_glacier_block), - (OpHardfork::Bedrock.boxed(), genesis_info.bedrock_block), - ]; - let mut block_hardforks = hardfork_opts - .into_iter() - .filter_map(|(hardfork, opt)| opt.map(|block| (hardfork, ForkCondition::Block(block)))) - .collect::>(); - - // We set the paris hardfork for OP networks to zero - block_hardforks.push(( - EthereumHardfork::Paris.boxed(), - ForkCondition::TTD { - activation_block_number: 0, - total_difficulty: U256::ZERO, - fork_block: genesis.config.merge_netsplit_block, - }, - )); - - // Time-based hardforks - let time_hardfork_opts = [ - // L1 - // we need to map the L1 hardforks to the activation timestamps of the correspondong op - // hardforks - (EthereumHardfork::Shanghai.boxed(), genesis_info.canyon_time), - (EthereumHardfork::Cancun.boxed(), genesis_info.ecotone_time), - (EthereumHardfork::Prague.boxed(), genesis_info.isthmus_time), - // OP - (OpHardfork::Regolith.boxed(), genesis_info.regolith_time), - (OpHardfork::Canyon.boxed(), genesis_info.canyon_time), - (OpHardfork::Ecotone.boxed(), genesis_info.ecotone_time), - (OpHardfork::Fjord.boxed(), genesis_info.fjord_time), - (OpHardfork::Granite.boxed(), genesis_info.granite_time), - (OpHardfork::Holocene.boxed(), genesis_info.holocene_time), - (OpHardfork::Isthmus.boxed(), genesis_info.isthmus_time), - (OpHardfork::Jovian.boxed(), genesis_info.jovian_time), - (OpHardfork::Interop.boxed(), genesis_info.interop_time), - ]; - - let mut time_hardforks = time_hardfork_opts - .into_iter() - .filter_map(|(hardfork, opt)| { - opt.map(|time| (hardfork, ForkCondition::Timestamp(time))) - }) - .collect::>(); - - block_hardforks.append(&mut time_hardforks); - - // Ordered Hardforks - let mainnet_hardforks = OP_MAINNET_HARDFORKS.clone(); - let mainnet_order = mainnet_hardforks.forks_iter(); - - let mut ordered_hardforks = Vec::with_capacity(block_hardforks.len()); - for (hardfork, _) in mainnet_order { - if let Some(pos) = block_hardforks.iter().position(|(e, _)| **e == *hardfork) { - ordered_hardforks.push(block_hardforks.remove(pos)); - } - } - - // append the remaining unknown hardforks to ensure we don't filter any out - ordered_hardforks.append(&mut block_hardforks); - - let hardforks = ChainHardforks::new(ordered_hardforks); - let genesis_header = SealedHeader::seal_slow(make_op_genesis_header(&genesis, &hardforks)); - - Self { - inner: ChainSpec { - chain: genesis.config.chain_id.into(), - genesis_header, - genesis, - hardforks, - // We assume no OP network merges, and set the paris block and total difficulty to - // zero - paris_block_and_final_difficulty: Some((0, U256::ZERO)), - base_fee_params: optimism_genesis_info.base_fee_params, - ..Default::default() - }, - } - } -} - -impl From for OpChainSpec { - fn from(value: ChainSpec) -> Self { - Self { inner: value } - } -} - -#[derive(Default, Debug)] -struct OpGenesisInfo { - optimism_chain_info: op_alloy_rpc_types::OpChainInfo, - base_fee_params: BaseFeeParamsKind, -} - -impl OpGenesisInfo { - fn extract_from(genesis: &Genesis) -> Self { - let mut info = Self { - optimism_chain_info: op_alloy_rpc_types::OpChainInfo::extract_from( - &genesis.config.extra_fields, - ) - .unwrap_or_default(), - ..Default::default() - }; - if let Some(optimism_base_fee_info) = &info.optimism_chain_info.base_fee_info && - let (Some(elasticity), Some(denominator)) = ( - optimism_base_fee_info.eip1559_elasticity, - optimism_base_fee_info.eip1559_denominator, - ) - { - let base_fee_params = if let Some(canyon_denominator) = - optimism_base_fee_info.eip1559_denominator_canyon - { - BaseFeeParamsKind::Variable( - vec![ - ( - EthereumHardfork::London.boxed(), - BaseFeeParams::new(denominator as u128, elasticity as u128), - ), - ( - OpHardfork::Canyon.boxed(), - BaseFeeParams::new(canyon_denominator as u128, elasticity as u128), - ), - ] - .into(), - ) - } else { - BaseFeeParams::new(denominator as u128, elasticity as u128).into() - }; - - info.base_fee_params = base_fee_params; - } - - info - } -} - -/// Helper method building a [`Header`] given [`Genesis`] and [`ChainHardforks`]. -pub fn make_op_genesis_header(genesis: &Genesis, hardforks: &ChainHardforks) -> Header { - let mut header = reth_chainspec::make_genesis_header(genesis, hardforks); - - // If Isthmus is active, overwrite the withdrawals root with the storage root of predeploy - // `L2ToL1MessagePasser.sol` - if hardforks.fork(OpHardfork::Isthmus).active_at_timestamp(header.timestamp) && - let Some(predeploy) = genesis.alloc.get(&L2_TO_L1_MESSAGE_PASSER_ADDRESS) && - let Some(storage) = &predeploy.storage - { - header.withdrawals_root = - Some(storage_root_unhashed(storage.iter().filter_map(|(k, v)| { - if v.is_zero() { - None - } else { - Some((*k, (*v).into())) - } - }))); - } - - header -} - -#[cfg(test)] -mod tests { - use alloc::string::{String, ToString}; - use alloy_genesis::{ChainConfig, Genesis}; - use alloy_op_hardforks::{ - BASE_MAINNET_JOVIAN_TIMESTAMP, BASE_SEPOLIA_JOVIAN_TIMESTAMP, OP_MAINNET_JOVIAN_TIMESTAMP, - OP_SEPOLIA_JOVIAN_TIMESTAMP, - }; - use alloy_primitives::{b256, hex}; - use reth_chainspec::{test_fork_ids, BaseFeeParams, BaseFeeParamsKind}; - use reth_ethereum_forks::{EthereumHardfork, ForkCondition, ForkHash, ForkId, Head}; - use reth_optimism_forks::{OpHardfork, OpHardforks}; - - use crate::*; - - #[test] - fn test_storage_root_consistency() { - use alloy_primitives::{B256, U256}; - use core::str::FromStr; - - let k1 = - B256::from_str("0x0000000000000000000000000000000000000000000000000000000000000001") - .unwrap(); - let v1 = - U256::from_str("0x0000000000000000000000000000000000000000000000000000000000000000") - .unwrap(); - let k2 = - B256::from_str("0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc") - .unwrap(); - let v2 = - U256::from_str("0x000000000000000000000000c0d3c0d3c0d3c0d3c0d3c0d3c0d3c0d3c0d30016") - .unwrap(); - let k3 = - B256::from_str("0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103") - .unwrap(); - let v3 = - U256::from_str("0x0000000000000000000000004200000000000000000000000000000000000018") - .unwrap(); - let origin_root = - B256::from_str("0x5d5ba3a8093ede3901ad7a569edfb7b9aecafa54730ba0bf069147cbcc00e345") - .unwrap(); - let expected_root = - B256::from_str("0x8ed4baae3a927be3dea54996b4d5899f8c01e7594bf50b17dc1e741388ce3d12") - .unwrap(); - - let storage_origin = vec![(k1, v1), (k2, v2), (k3, v3)]; - let storage_fix = vec![(k2, v2), (k3, v3)]; - let root_origin = storage_root_unhashed(storage_origin); - let root_fix = storage_root_unhashed(storage_fix); - assert_ne!(root_origin, root_fix); - assert_eq!(root_origin, origin_root); - assert_eq!(root_fix, expected_root); - } - - #[test] - fn base_mainnet_forkids() { - let mut base_mainnet = OpChainSpecBuilder::base_mainnet().build(); - base_mainnet.inner.genesis_header.set_hash(BASE_MAINNET.genesis_hash()); - test_fork_ids( - &BASE_MAINNET, - &[ - ( - Head { number: 0, ..Default::default() }, - ForkId { hash: ForkHash([0x67, 0xda, 0x02, 0x60]), next: 1704992401 }, - ), - ( - Head { number: 0, timestamp: 1704992400, ..Default::default() }, - ForkId { hash: ForkHash([0x67, 0xda, 0x02, 0x60]), next: 1704992401 }, - ), - ( - Head { number: 0, timestamp: 1704992401, ..Default::default() }, - ForkId { hash: ForkHash([0x3c, 0x28, 0x3c, 0xb3]), next: 1710374401 }, - ), - ( - Head { number: 0, timestamp: 1710374400, ..Default::default() }, - ForkId { hash: ForkHash([0x3c, 0x28, 0x3c, 0xb3]), next: 1710374401 }, - ), - ( - Head { number: 0, timestamp: 1710374401, ..Default::default() }, - ForkId { hash: ForkHash([0x51, 0xcc, 0x98, 0xb3]), next: 1720627201 }, - ), - ( - Head { number: 0, timestamp: 1720627200, ..Default::default() }, - ForkId { hash: ForkHash([0x51, 0xcc, 0x98, 0xb3]), next: 1720627201 }, - ), - ( - Head { number: 0, timestamp: 1720627201, ..Default::default() }, - ForkId { hash: ForkHash([0xe4, 0x01, 0x0e, 0xb9]), next: 1726070401 }, - ), - ( - Head { number: 0, timestamp: 1726070401, ..Default::default() }, - ForkId { hash: ForkHash([0xbc, 0x38, 0xf9, 0xca]), next: 1736445601 }, - ), - ( - Head { number: 0, timestamp: 1736445601, ..Default::default() }, - ForkId { hash: ForkHash([0x3a, 0x2a, 0xf1, 0x83]), next: 1746806401 }, - ), - // Isthmus - ( - Head { number: 0, timestamp: 1746806401, ..Default::default() }, - ForkId { - hash: ForkHash([0x86, 0x72, 0x8b, 0x4e]), - next: BASE_MAINNET_JOVIAN_TIMESTAMP, - }, - ), - // Jovian - ( - Head { - number: 0, - timestamp: BASE_MAINNET_JOVIAN_TIMESTAMP, - ..Default::default() - }, - BASE_MAINNET.hardfork_fork_id(OpHardfork::Jovian).unwrap(), - ), - ], - ); - } - - #[test] - fn op_sepolia_forkids() { - test_fork_ids( - &OP_SEPOLIA, - &[ - ( - Head { number: 0, ..Default::default() }, - ForkId { hash: ForkHash([0x67, 0xa4, 0x03, 0x28]), next: 1699981200 }, - ), - ( - Head { number: 0, timestamp: 1699981199, ..Default::default() }, - ForkId { hash: ForkHash([0x67, 0xa4, 0x03, 0x28]), next: 1699981200 }, - ), - ( - Head { number: 0, timestamp: 1699981200, ..Default::default() }, - ForkId { hash: ForkHash([0xa4, 0x8d, 0x6a, 0x00]), next: 1708534800 }, - ), - ( - Head { number: 0, timestamp: 1708534799, ..Default::default() }, - ForkId { hash: ForkHash([0xa4, 0x8d, 0x6a, 0x00]), next: 1708534800 }, - ), - ( - Head { number: 0, timestamp: 1708534800, ..Default::default() }, - ForkId { hash: ForkHash([0xcc, 0x17, 0xc7, 0xeb]), next: 1716998400 }, - ), - ( - Head { number: 0, timestamp: 1716998399, ..Default::default() }, - ForkId { hash: ForkHash([0xcc, 0x17, 0xc7, 0xeb]), next: 1716998400 }, - ), - ( - Head { number: 0, timestamp: 1716998400, ..Default::default() }, - ForkId { hash: ForkHash([0x54, 0x0a, 0x8c, 0x5d]), next: 1723478400 }, - ), - ( - Head { number: 0, timestamp: 1723478399, ..Default::default() }, - ForkId { hash: ForkHash([0x54, 0x0a, 0x8c, 0x5d]), next: 1723478400 }, - ), - ( - Head { number: 0, timestamp: 1723478400, ..Default::default() }, - ForkId { hash: ForkHash([0x75, 0xde, 0xa4, 0x1e]), next: 1732633200 }, - ), - ( - Head { number: 0, timestamp: 1732633200, ..Default::default() }, - ForkId { hash: ForkHash([0x4a, 0x1c, 0x79, 0x2e]), next: 1744905600 }, - ), - // Isthmus - ( - Head { number: 0, timestamp: 1744905600, ..Default::default() }, - ForkId { - hash: ForkHash([0x6c, 0x62, 0x5e, 0xe1]), - next: OP_SEPOLIA_JOVIAN_TIMESTAMP, - }, - ), - // Jovian - ( - Head { - number: 0, - timestamp: OP_SEPOLIA_JOVIAN_TIMESTAMP, - ..Default::default() - }, - OP_SEPOLIA.hardfork_fork_id(OpHardfork::Jovian).unwrap(), - ), - ], - ); - } - - #[test] - fn op_mainnet_forkids() { - let mut op_mainnet = OpChainSpecBuilder::optimism_mainnet().build(); - // for OP mainnet we have to do this because the genesis header can't be properly computed - // from the genesis.json file - op_mainnet.inner.genesis_header.set_hash(OP_MAINNET.genesis_hash()); - test_fork_ids( - &op_mainnet, - &[ - ( - Head { number: 0, ..Default::default() }, - ForkId { hash: ForkHash([0xca, 0xf5, 0x17, 0xed]), next: 3950000 }, - ), - // London - ( - Head { number: 105235063, ..Default::default() }, - ForkId { hash: ForkHash([0xe3, 0x39, 0x8d, 0x7c]), next: 1704992401 }, - ), - // Bedrock - ( - Head { number: 105235063, ..Default::default() }, - ForkId { hash: ForkHash([0xe3, 0x39, 0x8d, 0x7c]), next: 1704992401 }, - ), - // Shanghai - ( - Head { number: 105235063, timestamp: 1704992401, ..Default::default() }, - ForkId { hash: ForkHash([0xbd, 0xd4, 0xfd, 0xb2]), next: 1710374401 }, - ), - // OP activation timestamps - // https://specs.optimism.io/protocol/superchain-upgrades.html#activation-timestamps - // Canyon - ( - Head { number: 105235063, timestamp: 1704992401, ..Default::default() }, - ForkId { hash: ForkHash([0xbd, 0xd4, 0xfd, 0xb2]), next: 1710374401 }, - ), - // Ecotone - ( - Head { number: 105235063, timestamp: 1710374401, ..Default::default() }, - ForkId { hash: ForkHash([0x19, 0xda, 0x4c, 0x52]), next: 1720627201 }, - ), - // Fjord - ( - Head { number: 105235063, timestamp: 1720627201, ..Default::default() }, - ForkId { hash: ForkHash([0x49, 0xfb, 0xfe, 0x1e]), next: 1726070401 }, - ), - // Granite - ( - Head { number: 105235063, timestamp: 1726070401, ..Default::default() }, - ForkId { hash: ForkHash([0x44, 0x70, 0x4c, 0xde]), next: 1736445601 }, - ), - // Holocene - ( - Head { number: 105235063, timestamp: 1736445601, ..Default::default() }, - ForkId { hash: ForkHash([0x2b, 0xd9, 0x3d, 0xc8]), next: 1746806401 }, - ), - // Isthmus - ( - Head { number: 105235063, timestamp: 1746806401, ..Default::default() }, - ForkId { - hash: ForkHash([0x37, 0xbe, 0x75, 0x8f]), - next: OP_MAINNET_JOVIAN_TIMESTAMP, - }, - ), - // Jovian - ( - Head { - number: 105235063, - timestamp: OP_MAINNET_JOVIAN_TIMESTAMP, - ..Default::default() - }, - OP_MAINNET.hardfork_fork_id(OpHardfork::Jovian).unwrap(), - ), - ], - ); - } - - #[test] - fn base_sepolia_forkids() { - test_fork_ids( - &BASE_SEPOLIA, - &[ - ( - Head { number: 0, ..Default::default() }, - ForkId { hash: ForkHash([0xb9, 0x59, 0xb9, 0xf7]), next: 1699981200 }, - ), - ( - Head { number: 0, timestamp: 1699981199, ..Default::default() }, - ForkId { hash: ForkHash([0xb9, 0x59, 0xb9, 0xf7]), next: 1699981200 }, - ), - ( - Head { number: 0, timestamp: 1699981200, ..Default::default() }, - ForkId { hash: ForkHash([0x60, 0x7c, 0xd5, 0xa1]), next: 1708534800 }, - ), - ( - Head { number: 0, timestamp: 1708534799, ..Default::default() }, - ForkId { hash: ForkHash([0x60, 0x7c, 0xd5, 0xa1]), next: 1708534800 }, - ), - ( - Head { number: 0, timestamp: 1708534800, ..Default::default() }, - ForkId { hash: ForkHash([0xbe, 0x96, 0x9b, 0x17]), next: 1716998400 }, - ), - ( - Head { number: 0, timestamp: 1716998399, ..Default::default() }, - ForkId { hash: ForkHash([0xbe, 0x96, 0x9b, 0x17]), next: 1716998400 }, - ), - ( - Head { number: 0, timestamp: 1716998400, ..Default::default() }, - ForkId { hash: ForkHash([0x4e, 0x45, 0x7a, 0x49]), next: 1723478400 }, - ), - ( - Head { number: 0, timestamp: 1723478399, ..Default::default() }, - ForkId { hash: ForkHash([0x4e, 0x45, 0x7a, 0x49]), next: 1723478400 }, - ), - ( - Head { number: 0, timestamp: 1723478400, ..Default::default() }, - ForkId { hash: ForkHash([0x5e, 0xdf, 0xa3, 0xb6]), next: 1732633200 }, - ), - ( - Head { number: 0, timestamp: 1732633200, ..Default::default() }, - ForkId { hash: ForkHash([0x8b, 0x5e, 0x76, 0x29]), next: 1744905600 }, - ), - // Isthmus - ( - Head { number: 0, timestamp: 1744905600, ..Default::default() }, - ForkId { - hash: ForkHash([0x06, 0x0a, 0x4d, 0x1d]), - next: BASE_SEPOLIA_JOVIAN_TIMESTAMP, - }, - ), - // Jovian - ( - Head { - number: 0, - timestamp: BASE_SEPOLIA_JOVIAN_TIMESTAMP, - ..Default::default() - }, - BASE_SEPOLIA.hardfork_fork_id(OpHardfork::Jovian).unwrap(), - ), - ], - ); - } - - #[test] - fn base_mainnet_genesis() { - let genesis = BASE_MAINNET.genesis_header(); - assert_eq!( - genesis.hash_slow(), - b256!("0xf712aa9241cc24369b143cf6dce85f0902a9731e70d66818a3a5845b296c73dd") - ); - let base_fee = BASE_MAINNET.next_block_base_fee(genesis, genesis.timestamp).unwrap(); - // - assert_eq!(base_fee, 980000000); - } - - #[test] - fn base_sepolia_genesis() { - let genesis = BASE_SEPOLIA.genesis_header(); - assert_eq!( - genesis.hash_slow(), - b256!("0x0dcc9e089e30b90ddfc55be9a37dd15bc551aeee999d2e2b51414c54eaf934e4") - ); - let base_fee = BASE_SEPOLIA.next_block_base_fee(genesis, genesis.timestamp).unwrap(); - // - assert_eq!(base_fee, 980000000); - } - - #[test] - fn op_sepolia_genesis() { - let genesis = OP_SEPOLIA.genesis_header(); - assert_eq!( - genesis.hash_slow(), - b256!("0x102de6ffb001480cc9b8b548fd05c34cd4f46ae4aa91759393db90ea0409887d") - ); - let base_fee = OP_SEPOLIA.next_block_base_fee(genesis, genesis.timestamp).unwrap(); - // - assert_eq!(base_fee, 980000000); - } - - #[test] - fn latest_base_mainnet_fork_id() { - assert_eq!( - ForkId { hash: ForkHash(hex!("1cfeafc9")), next: 0 }, - BASE_MAINNET.latest_fork_id() - ) - } - - #[test] - fn latest_base_mainnet_fork_id_with_builder() { - let base_mainnet = OpChainSpecBuilder::base_mainnet().build(); - assert_eq!( - ForkId { hash: ForkHash(hex!("1cfeafc9")), next: 0 }, - base_mainnet.latest_fork_id() - ) - } - - #[test] - fn is_bedrock_active() { - let op_mainnet = OpChainSpecBuilder::optimism_mainnet().build(); - assert!(!op_mainnet.is_bedrock_active_at_block(1)) - } - - #[test] - fn parse_optimism_hardforks() { - let geth_genesis = r#" - { - "config": { - "bedrockBlock": 10, - "regolithTime": 20, - "canyonTime": 30, - "ecotoneTime": 40, - "fjordTime": 50, - "graniteTime": 51, - "holoceneTime": 52, - "isthmusTime": 53, - "optimism": { - "eip1559Elasticity": 60, - "eip1559Denominator": 70 - } - } - } - "#; - let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); - - let actual_bedrock_block = genesis.config.extra_fields.get("bedrockBlock"); - assert_eq!(actual_bedrock_block, Some(serde_json::Value::from(10)).as_ref()); - let actual_regolith_timestamp = genesis.config.extra_fields.get("regolithTime"); - assert_eq!(actual_regolith_timestamp, Some(serde_json::Value::from(20)).as_ref()); - let actual_canyon_timestamp = genesis.config.extra_fields.get("canyonTime"); - assert_eq!(actual_canyon_timestamp, Some(serde_json::Value::from(30)).as_ref()); - let actual_ecotone_timestamp = genesis.config.extra_fields.get("ecotoneTime"); - assert_eq!(actual_ecotone_timestamp, Some(serde_json::Value::from(40)).as_ref()); - let actual_fjord_timestamp = genesis.config.extra_fields.get("fjordTime"); - assert_eq!(actual_fjord_timestamp, Some(serde_json::Value::from(50)).as_ref()); - let actual_granite_timestamp = genesis.config.extra_fields.get("graniteTime"); - assert_eq!(actual_granite_timestamp, Some(serde_json::Value::from(51)).as_ref()); - let actual_holocene_timestamp = genesis.config.extra_fields.get("holoceneTime"); - assert_eq!(actual_holocene_timestamp, Some(serde_json::Value::from(52)).as_ref()); - let actual_isthmus_timestamp = genesis.config.extra_fields.get("isthmusTime"); - assert_eq!(actual_isthmus_timestamp, Some(serde_json::Value::from(53)).as_ref()); - - let optimism_object = genesis.config.extra_fields.get("optimism").unwrap(); - assert_eq!( - optimism_object, - &serde_json::json!({ - "eip1559Elasticity": 60, - "eip1559Denominator": 70, - }) - ); - - let chain_spec: OpChainSpec = genesis.into(); - - assert_eq!( - chain_spec.base_fee_params, - BaseFeeParamsKind::Constant(BaseFeeParams::new(70, 60)) - ); - - assert!(!chain_spec.is_fork_active_at_block(OpHardfork::Bedrock, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Regolith, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Canyon, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Ecotone, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Fjord, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Granite, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Holocene, 0)); - - assert!(chain_spec.is_fork_active_at_block(OpHardfork::Bedrock, 10)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Regolith, 20)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Canyon, 30)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Ecotone, 40)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Fjord, 50)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Granite, 51)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Holocene, 52)); - } - - #[test] - fn parse_optimism_hardforks_variable_base_fee_params() { - let geth_genesis = r#" - { - "config": { - "bedrockBlock": 10, - "regolithTime": 20, - "canyonTime": 30, - "ecotoneTime": 40, - "fjordTime": 50, - "graniteTime": 51, - "holoceneTime": 52, - "isthmusTime": 53, - "optimism": { - "eip1559Elasticity": 60, - "eip1559Denominator": 70, - "eip1559DenominatorCanyon": 80 - } - } - } - "#; - let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); - - let actual_bedrock_block = genesis.config.extra_fields.get("bedrockBlock"); - assert_eq!(actual_bedrock_block, Some(serde_json::Value::from(10)).as_ref()); - let actual_regolith_timestamp = genesis.config.extra_fields.get("regolithTime"); - assert_eq!(actual_regolith_timestamp, Some(serde_json::Value::from(20)).as_ref()); - let actual_canyon_timestamp = genesis.config.extra_fields.get("canyonTime"); - assert_eq!(actual_canyon_timestamp, Some(serde_json::Value::from(30)).as_ref()); - let actual_ecotone_timestamp = genesis.config.extra_fields.get("ecotoneTime"); - assert_eq!(actual_ecotone_timestamp, Some(serde_json::Value::from(40)).as_ref()); - let actual_fjord_timestamp = genesis.config.extra_fields.get("fjordTime"); - assert_eq!(actual_fjord_timestamp, Some(serde_json::Value::from(50)).as_ref()); - let actual_granite_timestamp = genesis.config.extra_fields.get("graniteTime"); - assert_eq!(actual_granite_timestamp, Some(serde_json::Value::from(51)).as_ref()); - let actual_holocene_timestamp = genesis.config.extra_fields.get("holoceneTime"); - assert_eq!(actual_holocene_timestamp, Some(serde_json::Value::from(52)).as_ref()); - let actual_isthmus_timestamp = genesis.config.extra_fields.get("isthmusTime"); - assert_eq!(actual_isthmus_timestamp, Some(serde_json::Value::from(53)).as_ref()); - - let optimism_object = genesis.config.extra_fields.get("optimism").unwrap(); - assert_eq!( - optimism_object, - &serde_json::json!({ - "eip1559Elasticity": 60, - "eip1559Denominator": 70, - "eip1559DenominatorCanyon": 80 - }) - ); - - let chain_spec: OpChainSpec = genesis.into(); - - assert_eq!( - chain_spec.base_fee_params, - BaseFeeParamsKind::Variable( - vec![ - (EthereumHardfork::London.boxed(), BaseFeeParams::new(70, 60)), - (OpHardfork::Canyon.boxed(), BaseFeeParams::new(80, 60)), - ] - .into() - ) - ); - - assert!(!chain_spec.is_fork_active_at_block(OpHardfork::Bedrock, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Regolith, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Canyon, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Ecotone, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Fjord, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Granite, 0)); - assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Holocene, 0)); - - assert!(chain_spec.is_fork_active_at_block(OpHardfork::Bedrock, 10)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Regolith, 20)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Canyon, 30)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Ecotone, 40)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Fjord, 50)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Granite, 51)); - assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Holocene, 52)); - } - - #[test] - fn parse_genesis_optimism_with_variable_base_fee_params() { - use op_alloy_rpc_types::OpBaseFeeInfo; - - let geth_genesis = r#" - { - "config": { - "chainId": 8453, - "homesteadBlock": 0, - "eip150Block": 0, - "eip155Block": 0, - "eip158Block": 0, - "byzantiumBlock": 0, - "constantinopleBlock": 0, - "petersburgBlock": 0, - "istanbulBlock": 0, - "muirGlacierBlock": 0, - "berlinBlock": 0, - "londonBlock": 0, - "arrowGlacierBlock": 0, - "grayGlacierBlock": 0, - "mergeNetsplitBlock": 0, - "bedrockBlock": 0, - "regolithTime": 15, - "terminalTotalDifficulty": 0, - "terminalTotalDifficultyPassed": true, - "optimism": { - "eip1559Elasticity": 6, - "eip1559Denominator": 50 - } - } - } - "#; - let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); - let chainspec = OpChainSpec::from(genesis.clone()); - - let actual_chain_id = genesis.config.chain_id; - assert_eq!(actual_chain_id, 8453); - - assert_eq!( - chainspec.hardforks.get(EthereumHardfork::Istanbul), - Some(ForkCondition::Block(0)) - ); - - let actual_bedrock_block = genesis.config.extra_fields.get("bedrockBlock"); - assert_eq!(actual_bedrock_block, Some(serde_json::Value::from(0)).as_ref()); - let actual_canyon_timestamp = genesis.config.extra_fields.get("canyonTime"); - assert_eq!(actual_canyon_timestamp, None); - - assert!(genesis.config.terminal_total_difficulty_passed); - - let optimism_object = genesis.config.extra_fields.get("optimism").unwrap(); - let optimism_base_fee_info = - serde_json::from_value::(optimism_object.clone()).unwrap(); - - assert_eq!( - optimism_base_fee_info, - OpBaseFeeInfo { - eip1559_elasticity: Some(6), - eip1559_denominator: Some(50), - eip1559_denominator_canyon: None, - } - ); - assert_eq!( - chainspec.base_fee_params, - BaseFeeParamsKind::Constant(BaseFeeParams { - max_change_denominator: 50, - elasticity_multiplier: 6, - }) - ); - - assert!(chainspec.is_fork_active_at_block(OpHardfork::Bedrock, 0)); - - assert!(chainspec.is_fork_active_at_timestamp(OpHardfork::Regolith, 20)); - } - - #[test] - fn test_fork_order_optimism_mainnet() { - use reth_optimism_forks::OpHardfork; - - let genesis = Genesis { - config: ChainConfig { - chain_id: 0, - homestead_block: Some(0), - dao_fork_block: Some(0), - dao_fork_support: false, - eip150_block: Some(0), - eip155_block: Some(0), - eip158_block: Some(0), - byzantium_block: Some(0), - constantinople_block: Some(0), - petersburg_block: Some(0), - istanbul_block: Some(0), - muir_glacier_block: Some(0), - berlin_block: Some(0), - london_block: Some(0), - arrow_glacier_block: Some(0), - gray_glacier_block: Some(0), - merge_netsplit_block: Some(0), - shanghai_time: Some(0), - cancun_time: Some(0), - prague_time: Some(0), - terminal_total_difficulty: Some(U256::ZERO), - extra_fields: [ - (String::from("bedrockBlock"), 0.into()), - (String::from("regolithTime"), 0.into()), - (String::from("canyonTime"), 0.into()), - (String::from("ecotoneTime"), 0.into()), - (String::from("fjordTime"), 0.into()), - (String::from("graniteTime"), 0.into()), - (String::from("holoceneTime"), 0.into()), - (String::from("isthmusTime"), 0.into()), - (String::from("jovianTime"), 0.into()), - ] - .into_iter() - .collect(), - ..Default::default() - }, - ..Default::default() - }; - - let chain_spec: OpChainSpec = genesis.into(); - - let hardforks: Vec<_> = chain_spec.hardforks.forks_iter().map(|(h, _)| h).collect(); - let expected_hardforks = vec![ - EthereumHardfork::Frontier.boxed(), - EthereumHardfork::Homestead.boxed(), - EthereumHardfork::Tangerine.boxed(), - EthereumHardfork::SpuriousDragon.boxed(), - EthereumHardfork::Byzantium.boxed(), - EthereumHardfork::Constantinople.boxed(), - EthereumHardfork::Petersburg.boxed(), - EthereumHardfork::Istanbul.boxed(), - EthereumHardfork::MuirGlacier.boxed(), - EthereumHardfork::Berlin.boxed(), - EthereumHardfork::London.boxed(), - EthereumHardfork::ArrowGlacier.boxed(), - EthereumHardfork::GrayGlacier.boxed(), - EthereumHardfork::Paris.boxed(), - OpHardfork::Bedrock.boxed(), - OpHardfork::Regolith.boxed(), - EthereumHardfork::Shanghai.boxed(), - OpHardfork::Canyon.boxed(), - EthereumHardfork::Cancun.boxed(), - OpHardfork::Ecotone.boxed(), - OpHardfork::Fjord.boxed(), - OpHardfork::Granite.boxed(), - OpHardfork::Holocene.boxed(), - EthereumHardfork::Prague.boxed(), - OpHardfork::Isthmus.boxed(), - OpHardfork::Jovian.boxed(), - // OpHardfork::Interop.boxed(), - ]; - - for (expected, actual) in expected_hardforks.iter().zip(hardforks.iter()) { - assert_eq!(&**expected, &**actual); - } - assert_eq!(expected_hardforks.len(), hardforks.len()); - } - - #[test] - fn json_genesis() { - let geth_genesis = r#" -{ - "config": { - "chainId": 1301, - "homesteadBlock": 0, - "eip150Block": 0, - "eip155Block": 0, - "eip158Block": 0, - "byzantiumBlock": 0, - "constantinopleBlock": 0, - "petersburgBlock": 0, - "istanbulBlock": 0, - "muirGlacierBlock": 0, - "berlinBlock": 0, - "londonBlock": 0, - "arrowGlacierBlock": 0, - "grayGlacierBlock": 0, - "mergeNetsplitBlock": 0, - "shanghaiTime": 0, - "cancunTime": 0, - "bedrockBlock": 0, - "regolithTime": 0, - "canyonTime": 0, - "ecotoneTime": 0, - "fjordTime": 0, - "graniteTime": 0, - "holoceneTime": 1732633200, - "terminalTotalDifficulty": 0, - "terminalTotalDifficultyPassed": true, - "optimism": { - "eip1559Elasticity": 6, - "eip1559Denominator": 50, - "eip1559DenominatorCanyon": 250 - } - }, - "nonce": "0x0", - "timestamp": "0x66edad4c", - "extraData": "0x424544524f434b", - "gasLimit": "0x1c9c380", - "difficulty": "0x0", - "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "coinbase": "0x4200000000000000000000000000000000000011", - "alloc": {}, - "number": "0x0", - "gasUsed": "0x0", - "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "baseFeePerGas": "0x3b9aca00", - "excessBlobGas": "0x0", - "blobGasUsed": "0x0" -} - "#; - - let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); - let chainspec = OpChainSpec::from_genesis(genesis); - assert!(chainspec.is_holocene_active_at_timestamp(1732633200)); - } - - #[test] - fn json_genesis_mapped_l1_timestamps() { - let geth_genesis = r#" -{ - "config": { - "chainId": 1301, - "homesteadBlock": 0, - "eip150Block": 0, - "eip155Block": 0, - "eip158Block": 0, - "byzantiumBlock": 0, - "constantinopleBlock": 0, - "petersburgBlock": 0, - "istanbulBlock": 0, - "muirGlacierBlock": 0, - "berlinBlock": 0, - "londonBlock": 0, - "arrowGlacierBlock": 0, - "grayGlacierBlock": 0, - "mergeNetsplitBlock": 0, - "bedrockBlock": 0, - "regolithTime": 0, - "canyonTime": 0, - "ecotoneTime": 1712633200, - "fjordTime": 0, - "graniteTime": 0, - "holoceneTime": 1732633200, - "isthmusTime": 1742633200, - "terminalTotalDifficulty": 0, - "terminalTotalDifficultyPassed": true, - "optimism": { - "eip1559Elasticity": 6, - "eip1559Denominator": 50, - "eip1559DenominatorCanyon": 250 - } - }, - "nonce": "0x0", - "timestamp": "0x66edad4c", - "extraData": "0x424544524f434b", - "gasLimit": "0x1c9c380", - "difficulty": "0x0", - "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "coinbase": "0x4200000000000000000000000000000000000011", - "alloc": {}, - "number": "0x0", - "gasUsed": "0x0", - "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", - "baseFeePerGas": "0x3b9aca00", - "excessBlobGas": "0x0", - "blobGasUsed": "0x0" -} - "#; - - let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); - let chainspec = OpChainSpec::from_genesis(genesis); - assert!(chainspec.is_holocene_active_at_timestamp(1732633200)); - - assert!(chainspec.is_shanghai_active_at_timestamp(0)); - assert!(chainspec.is_canyon_active_at_timestamp(0)); - - assert!(chainspec.is_ecotone_active_at_timestamp(1712633200)); - assert!(chainspec.is_cancun_active_at_timestamp(1712633200)); - - assert!(chainspec.is_prague_active_at_timestamp(1742633200)); - assert!(chainspec.is_isthmus_active_at_timestamp(1742633200)); - } - - #[test] - fn display_hardorks() { - let content = BASE_MAINNET.display_hardforks().to_string(); - for eth_hf in EthereumHardfork::VARIANTS { - assert!(!content.contains(eth_hf.name())); - } - } -} diff --git a/op-reth/crates/chainspec/src/op_sepolia.rs b/op-reth/crates/chainspec/src/op_sepolia.rs deleted file mode 100644 index 8f80e6e2094..00000000000 --- a/op-reth/crates/chainspec/src/op_sepolia.rs +++ /dev/null @@ -1,39 +0,0 @@ -//! Chain specification for the Optimism Sepolia testnet network. - -use crate::{make_op_genesis_header, LazyLock, OpChainSpec}; -use alloc::{sync::Arc, vec}; -use alloy_chains::{Chain, NamedChain}; -use alloy_primitives::{b256, U256}; -use reth_chainspec::{BaseFeeParams, BaseFeeParamsKind, ChainSpec, Hardfork}; -use reth_ethereum_forks::EthereumHardfork; -use reth_optimism_forks::{OpHardfork, OP_SEPOLIA_HARDFORKS}; -use reth_primitives_traits::SealedHeader; - -/// The OP Sepolia spec -pub static OP_SEPOLIA: LazyLock> = LazyLock::new(|| { - let genesis = serde_json::from_str(include_str!("../res/genesis/sepolia_op.json")) - .expect("Can't deserialize OP Sepolia genesis json"); - let hardforks = OP_SEPOLIA_HARDFORKS.clone(); - OpChainSpec { - inner: ChainSpec { - chain: Chain::from_named(NamedChain::OptimismSepolia), - genesis_header: SealedHeader::new( - make_op_genesis_header(&genesis, &hardforks), - b256!("0x102de6ffb001480cc9b8b548fd05c34cd4f46ae4aa91759393db90ea0409887d"), - ), - genesis, - paris_block_and_final_difficulty: Some((0, U256::from(0))), - hardforks, - base_fee_params: BaseFeeParamsKind::Variable( - vec![ - (EthereumHardfork::London.boxed(), BaseFeeParams::optimism_sepolia()), - (OpHardfork::Canyon.boxed(), BaseFeeParams::optimism_sepolia_canyon()), - ] - .into(), - ), - prune_delete_limit: 10000, - ..Default::default() - }, - } - .into() -}); diff --git a/op-reth/crates/cli/Cargo.toml b/op-reth/crates/cli/Cargo.toml deleted file mode 100644 index a109c2fc8b0..00000000000 --- a/op-reth/crates/cli/Cargo.toml +++ /dev/null @@ -1,127 +0,0 @@ -[package] -name = "reth-optimism-cli" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -reth-static-file-types = { workspace = true, features = ["clap"] } -reth-cli.workspace = true -reth-cli-commands.workspace = true -reth-consensus.workspace = true -reth-rpc-server-types.workspace = true -reth-primitives-traits.workspace = true -reth-db = { workspace = true, features = ["mdbx", "op"] } -reth-db-api.workspace = true -reth-db-common.workspace = true -reth-downloaders.workspace = true -reth-provider.workspace = true -reth-prune.workspace = true -reth-stages.workspace = true -reth-static-file.workspace = true -reth-execution-types.workspace = true -reth-node-core.workspace = true -reth-optimism-node.workspace = true -reth-fs-util.workspace = true - -# so jemalloc metrics can be included -reth-node-metrics.workspace = true - -## optimism -reth-optimism-primitives.workspace = true -reth-optimism-chainspec = { workspace = true, features = ["superchain-configs"] } -reth-optimism-consensus.workspace = true - -reth-chainspec.workspace = true -reth-node-events.workspace = true -reth-optimism-evm.workspace = true -reth-cli-runner.workspace = true -reth-node-builder = { workspace = true, features = ["op"] } -reth-tracing.workspace = true - -# eth -alloy-eips.workspace = true -alloy-consensus.workspace = true -alloy-primitives.workspace = true -alloy-rlp.workspace = true - -# misc -futures-util.workspace = true -derive_more.workspace = true -serde.workspace = true -clap = { workspace = true, features = ["derive", "env"] } - -tokio = { workspace = true, features = ["sync", "macros", "time", "rt-multi-thread"] } -tokio-util = { workspace = true, features = ["codec"] } -tracing.workspace = true -eyre.workspace = true - -# reth test-vectors -proptest = { workspace = true, optional = true } -op-alloy-consensus.workspace = true - -[dev-dependencies] -tempfile.workspace = true -reth-stages = { workspace = true, features = ["test-utils"] } - -[build-dependencies] -reth-optimism-chainspec = { workspace = true, features = ["std", "superchain-configs"] } - -[features] -default = [] - -# Opentelemetry feature to activate tracing and logs export -otlp = ["reth-tracing/otlp", "reth-node-core/otlp"] -otlp-logs = ["reth-tracing/otlp-logs", "reth-node-core/otlp-logs"] - -asm-keccak = [ - "alloy-primitives/asm-keccak", - "reth-node-core/asm-keccak", - "reth-optimism-node/asm-keccak", -] - -keccak-cache-global = [ - "alloy-primitives/keccak-cache-global", - "reth-node-core/keccak-cache-global", - "reth-optimism-node/keccak-cache-global", -] - -# Jemalloc feature for vergen to generate correct env vars -jemalloc = [ - "reth-node-core/jemalloc", - "reth-node-metrics/jemalloc", -] -jemalloc-prof = [ - "jemalloc", - "reth-node-metrics/jemalloc-prof", -] -jemalloc-symbols = [ - "jemalloc-prof", - "reth-node-metrics/jemalloc-symbols", -] - -tracy = ["reth-tracing/tracy", "reth-node-core/tracy"] - -dev = [ - "dep:proptest", - "reth-cli-commands/arbitrary", -] - -serde = [ - "alloy-consensus/serde", - "alloy-eips/serde", - "alloy-primitives/serde", - "op-alloy-consensus/serde", - "reth-execution-types/serde", - "reth-optimism-primitives/serde", - "reth-primitives-traits/serde", - "reth-optimism-chainspec/serde", -] - -edge = ["reth-cli-commands/edge", "reth-node-core/edge"] diff --git a/op-reth/crates/cli/src/commands/mod.rs b/op-reth/crates/cli/src/commands/mod.rs deleted file mode 100644 index 5edd55b0ccb..00000000000 --- a/op-reth/crates/cli/src/commands/mod.rs +++ /dev/null @@ -1,90 +0,0 @@ -use crate::chainspec::OpChainSpecParser; -use clap::Subcommand; -use import::ImportOpCommand; -use import_receipts::ImportReceiptsOpCommand; -use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks}; -use reth_cli::chainspec::ChainSpecParser; -use reth_cli_commands::{ - config_cmd, db, dump_genesis, init_cmd, - node::{self, NoArgs}, - p2p, prune, re_execute, stage, -}; -use std::{fmt, sync::Arc}; - -pub mod import; -pub mod import_receipts; -pub mod init_state; - -#[cfg(feature = "dev")] -pub mod test_vectors; - -/// Commands to be executed -#[derive(Debug, Subcommand)] -pub enum Commands -{ - /// Start the node - #[command(name = "node")] - Node(Box>), - /// Initialize the database from a genesis file. - #[command(name = "init")] - Init(init_cmd::InitCommand), - /// Initialize the database from a state dump file. - #[command(name = "init-state")] - InitState(init_state::InitStateCommandOp), - /// This syncs RLP encoded OP blocks below Bedrock from a file, without executing. - #[command(name = "import-op")] - ImportOp(ImportOpCommand), - /// This imports RLP encoded receipts from a file. - #[command(name = "import-receipts-op")] - ImportReceiptsOp(ImportReceiptsOpCommand), - /// Dumps genesis block JSON configuration to stdout. - DumpGenesis(dump_genesis::DumpGenesisCommand), - /// Database debugging utilities - #[command(name = "db")] - Db(db::Command), - /// Manipulate individual stages. - #[command(name = "stage")] - Stage(Box>), - /// P2P Debugging utilities - #[command(name = "p2p")] - P2P(Box>), - /// Write config to stdout - #[command(name = "config")] - Config(config_cmd::Command), - /// Prune according to the configuration without any limits - #[command(name = "prune")] - Prune(prune::PruneCommand), - /// Generate Test Vectors - #[cfg(feature = "dev")] - #[command(name = "test-vectors")] - TestVectors(test_vectors::Command), - /// Re-execute blocks in parallel to verify historical sync correctness. - #[command(name = "re-execute")] - ReExecute(re_execute::Command), -} - -impl< - C: ChainSpecParser, - Ext: clap::Args + fmt::Debug, - > Commands -{ - /// Returns the underlying chain being used for commands - pub fn chain_spec(&self) -> Option<&Arc> { - match self { - Self::Node(cmd) => cmd.chain_spec(), - Self::Init(cmd) => cmd.chain_spec(), - Self::InitState(cmd) => cmd.chain_spec(), - Self::DumpGenesis(cmd) => cmd.chain_spec(), - Self::Db(cmd) => cmd.chain_spec(), - Self::Stage(cmd) => cmd.chain_spec(), - Self::P2P(cmd) => cmd.chain_spec(), - Self::Config(_) => None, - Self::Prune(cmd) => cmd.chain_spec(), - Self::ImportOp(cmd) => cmd.chain_spec(), - Self::ImportReceiptsOp(cmd) => cmd.chain_spec(), - #[cfg(feature = "dev")] - Self::TestVectors(_) => None, - Self::ReExecute(cmd) => cmd.chain_spec(), - } - } -} diff --git a/op-reth/crates/cli/src/lib.rs b/op-reth/crates/cli/src/lib.rs deleted file mode 100644 index 52fdcc2ddd5..00000000000 --- a/op-reth/crates/cli/src/lib.rs +++ /dev/null @@ -1,215 +0,0 @@ -//! OP-Reth CLI implementation. - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" -)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg))] - -/// A configurable App on top of the cli parser. -pub mod app; -/// Optimism chain specification parser. -pub mod chainspec; -/// Optimism CLI commands. -pub mod commands; -/// Module with a codec for reading and encoding receipts in files. -/// -/// Enables decoding and encoding `OpGethReceipt` type. See . -/// -/// Currently configured to use codec [`OpGethReceipt`](receipt_file_codec::OpGethReceipt) based on -/// export of below Bedrock data using . Codec can -/// be replaced with regular encoding of receipts for export. -/// -/// NOTE: receipts can be exported using regular op-geth encoding for `Receipt` type, to fit -/// reth's needs for importing. However, this would require patching the diff in to export the `Receipt` and not `OpGethReceipt` type (originally -/// made for op-erigon's import needs). -pub mod receipt_file_codec; - -/// OVM block, same as EVM block at bedrock, except for signature of deposit transaction -/// not having a signature back then. -/// Enables decoding and encoding `Block` types within file contexts. -pub mod ovm_file_codec; - -pub use app::CliApp; -pub use commands::{import::ImportOpCommand, import_receipts::ImportReceiptsOpCommand}; -use reth_optimism_chainspec::OpChainSpec; -use reth_rpc_server_types::{DefaultRpcModuleValidator, RpcModuleValidator}; - -use std::{ffi::OsString, fmt, marker::PhantomData, sync::Arc}; - -use chainspec::OpChainSpecParser; -use clap::Parser; -use commands::Commands; -use futures_util::Future; -use reth_cli::chainspec::ChainSpecParser; -use reth_cli_commands::launcher::FnLauncher; -use reth_cli_runner::CliRunner; -use reth_db::DatabaseEnv; -use reth_node_builder::{NodeBuilder, WithLaunchContext}; -use reth_node_core::{ - args::{LogArgs, TraceArgs}, - version::version_metadata, -}; -use reth_optimism_node::args::RollupArgs; - -// This allows us to manually enable node metrics features, required for proper jemalloc metric -// reporting -use reth_node_metrics as _; - -/// The main op-reth cli interface. -/// -/// This is the entrypoint to the executable. -#[derive(Debug, Parser)] -#[command(author, name = version_metadata().name_client.as_ref(), version = version_metadata().short_version.as_ref(), long_version = version_metadata().long_version.as_ref(), about = "Reth", long_about = None)] -pub struct Cli< - Spec: ChainSpecParser = OpChainSpecParser, - Ext: clap::Args + fmt::Debug = RollupArgs, - Rpc: RpcModuleValidator = DefaultRpcModuleValidator, -> { - /// The command to run - #[command(subcommand)] - pub command: Commands, - - /// The logging configuration for the CLI. - #[command(flatten)] - pub logs: LogArgs, - - /// The metrics configuration for the CLI. - #[command(flatten)] - pub traces: TraceArgs, - - /// Type marker for the RPC module validator - #[arg(skip)] - _phantom: PhantomData, -} - -impl Cli { - /// Parsers only the default CLI arguments - pub fn parse_args() -> Self { - Self::parse() - } - - /// Parsers only the default CLI arguments from the given iterator - pub fn try_parse_args_from(itr: I) -> Result - where - I: IntoIterator, - T: Into + Clone, - { - Self::try_parse_from(itr) - } -} - -impl Cli -where - C: ChainSpecParser, - Ext: clap::Args + fmt::Debug, - Rpc: RpcModuleValidator, -{ - /// Configures the CLI and returns a [`CliApp`] instance. - /// - /// This method is used to prepare the CLI for execution by wrapping it in a - /// [`CliApp`] that can be further configured before running. - pub fn configure(self) -> CliApp { - CliApp::new(self) - } - - /// Execute the configured cli command. - /// - /// This accepts a closure that is used to launch the node via the - /// [`NodeCommand`](reth_cli_commands::node::NodeCommand). - pub fn run(self, launcher: L) -> eyre::Result<()> - where - L: FnOnce(WithLaunchContext, C::ChainSpec>>, Ext) -> Fut, - Fut: Future>, - { - self.with_runner(CliRunner::try_default_runtime()?, launcher) - } - - /// Execute the configured cli command with the provided [`CliRunner`]. - pub fn with_runner(self, runner: CliRunner, launcher: L) -> eyre::Result<()> - where - L: FnOnce(WithLaunchContext, C::ChainSpec>>, Ext) -> Fut, - Fut: Future>, - { - let mut this = self.configure(); - this.set_runner(runner); - this.run(FnLauncher::new::(async move |builder, chain_spec| { - launcher(builder, chain_spec).await - })) - } -} - -#[cfg(test)] -mod test { - use crate::{chainspec::OpChainSpecParser, commands::Commands, Cli}; - use clap::Parser; - use reth_cli_commands::{node::NoArgs, NodeCommand}; - use reth_optimism_chainspec::{BASE_MAINNET, OP_DEV}; - use reth_optimism_node::args::RollupArgs; - - #[test] - fn parse_dev() { - let cmd = NodeCommand::::parse_from(["op-reth", "--dev"]); - let chain = OP_DEV.clone(); - assert_eq!(cmd.chain.chain, chain.chain); - assert_eq!(cmd.chain.genesis_hash(), chain.genesis_hash()); - assert_eq!( - cmd.chain.paris_block_and_final_difficulty, - chain.paris_block_and_final_difficulty - ); - assert_eq!(cmd.chain.hardforks, chain.hardforks); - - assert!(cmd.rpc.http); - assert!(cmd.network.discovery.disable_discovery); - - assert!(cmd.dev.dev); - } - - #[test] - fn parse_node() { - let cmd = Cli::::parse_from([ - "op-reth", - "node", - "--chain", - "base", - "--datadir", - "/mnt/datadirs/base", - "--instance", - "2", - "--http", - "--http.addr", - "0.0.0.0", - "--ws", - "--ws.addr", - "0.0.0.0", - "--http.api", - "admin,debug,eth,net,trace,txpool,web3,rpc,reth,ots", - "--rollup.sequencer-http", - "https://mainnet-sequencer.base.org", - "--rpc-max-tracing-requests", - "1000000", - "--rpc.gascap", - "18446744073709551615", - "--rpc.max-connections", - "429496729", - "--rpc.max-logs-per-response", - "0", - "--rpc.max-subscriptions-per-connection", - "10000", - "--metrics", - "9003", - "--tracing-otlp=http://localhost:4318/v1/traces", - "--log.file.max-size", - "100", - ]); - - match cmd.command { - Commands::Node(command) => { - assert_eq!(command.chain.as_ref(), BASE_MAINNET.as_ref()); - } - _ => panic!("unexpected command"), - } - } -} diff --git a/op-reth/crates/consensus/Cargo.toml b/op-reth/crates/consensus/Cargo.toml deleted file mode 100644 index 54df0af80d2..00000000000 --- a/op-reth/crates/consensus/Cargo.toml +++ /dev/null @@ -1,76 +0,0 @@ -[package] -name = "reth-optimism-consensus" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -exclude.workspace = true - -[lints] -workspace = true - -[dependencies] -# reth -reth-execution-types.workspace = true -reth-chainspec.workspace = true -reth-consensus-common.workspace = true -reth-consensus.workspace = true -reth-primitives-traits.workspace = true -reth-storage-api.workspace = true -reth-storage-errors.workspace = true -reth-trie-common.workspace = true - -# op-reth -reth-optimism-forks.workspace = true -reth-optimism-primitives.workspace = true - -# ethereum -alloy-eips.workspace = true -alloy-primitives.workspace = true -alloy-consensus.workspace = true -alloy-trie.workspace = true -revm.workspace = true - -# misc -tracing.workspace = true -thiserror.workspace = true -reth-optimism-chainspec.workspace = true - -[dev-dependencies] -reth-provider = { workspace = true, features = ["test-utils"] } -reth-db-common.workspace = true -reth-revm.workspace = true -reth-trie.workspace = true -reth-optimism-node.workspace = true - -alloy-chains.workspace = true - -op-alloy-consensus.workspace = true - -[features] -default = ["std"] -std = [ - "reth-chainspec/std", - "reth-consensus/std", - "reth-consensus-common/std", - "reth-primitives-traits/std", - "reth-optimism-forks/std", - "reth-optimism-chainspec/std", - "reth-optimism-primitives/std", - "reth-storage-api/std", - "reth-storage-errors/std", - "reth-trie-common/std", - "alloy-chains/std", - "alloy-eips/std", - "alloy-primitives/std", - "alloy-consensus/std", - "alloy-trie/std", - "reth-revm/std", - "revm/std", - "tracing/std", - "thiserror/std", - "reth-execution-types/std", - "op-alloy-consensus/std", -] diff --git a/op-reth/crates/consensus/src/lib.rs b/op-reth/crates/consensus/src/lib.rs deleted file mode 100644 index 1d3cb421c45..00000000000 --- a/op-reth/crates/consensus/src/lib.rs +++ /dev/null @@ -1,789 +0,0 @@ -//! Optimism Consensus implementation. - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] - -extern crate alloc; - -use alloc::{format, sync::Arc}; -use alloy_consensus::{ - constants::MAXIMUM_EXTRA_DATA_SIZE, BlockHeader as _, EMPTY_OMMER_ROOT_HASH, -}; -use alloy_primitives::B64; -use core::fmt::Debug; -use reth_chainspec::EthChainSpec; -use reth_consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator, ReceiptRootBloom}; -use reth_consensus_common::validation::{ - validate_against_parent_eip1559_base_fee, validate_against_parent_hash_number, - validate_against_parent_timestamp, validate_cancun_gas, validate_header_base_fee, - validate_header_extra_data, validate_header_gas, -}; -use reth_execution_types::BlockExecutionResult; -use reth_optimism_forks::OpHardforks; -use reth_optimism_primitives::DepositReceipt; -use reth_primitives_traits::{ - Block, BlockBody, BlockHeader, GotExpected, NodePrimitives, RecoveredBlock, SealedBlock, - SealedHeader, -}; - -mod proof; -pub use proof::calculate_receipt_root_no_memo_optimism; - -pub mod validation; -pub use validation::{canyon, isthmus, validate_block_post_execution}; - -pub mod error; -pub use error::OpConsensusError; - -/// Optimism consensus implementation. -/// -/// Provides basic checks as outlined in the execution specs. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct OpBeaconConsensus { - /// Configuration - chain_spec: Arc, - /// Maximum allowed extra data size in bytes - max_extra_data_size: usize, -} - -impl OpBeaconConsensus { - /// Create a new instance of [`OpBeaconConsensus`] - pub const fn new(chain_spec: Arc) -> Self { - Self { chain_spec, max_extra_data_size: MAXIMUM_EXTRA_DATA_SIZE } - } - - /// Returns the maximum allowed extra data size. - pub const fn max_extra_data_size(&self) -> usize { - self.max_extra_data_size - } - - /// Sets the maximum allowed extra data size and returns the updated instance. - pub const fn with_max_extra_data_size(mut self, size: usize) -> Self { - self.max_extra_data_size = size; - self - } -} - -impl FullConsensus for OpBeaconConsensus -where - N: NodePrimitives, - ChainSpec: EthChainSpec
+ OpHardforks + Debug + Send + Sync, -{ - fn validate_block_post_execution( - &self, - block: &RecoveredBlock, - result: &BlockExecutionResult, - receipt_root_bloom: Option, - ) -> Result<(), ConsensusError> { - validate_block_post_execution(block.header(), &self.chain_spec, result, receipt_root_bloom) - } -} - -impl Consensus for OpBeaconConsensus -where - B: Block, - ChainSpec: EthChainSpec
+ OpHardforks + Debug + Send + Sync, -{ - fn validate_body_against_header( - &self, - body: &B::Body, - header: &SealedHeader, - ) -> Result<(), ConsensusError> { - validation::validate_body_against_header_op(&self.chain_spec, body, header.header()) - } - - fn validate_block_pre_execution(&self, block: &SealedBlock) -> Result<(), ConsensusError> { - // Check ommers hash - let ommers_hash = block.body().calculate_ommers_root(); - if Some(block.ommers_hash()) != ommers_hash { - return Err(ConsensusError::BodyOmmersHashDiff( - GotExpected { - got: ommers_hash.unwrap_or(EMPTY_OMMER_ROOT_HASH), - expected: block.ommers_hash(), - } - .into(), - )) - } - - // Check transaction root - if let Err(error) = block.ensure_transaction_root_valid() { - return Err(ConsensusError::BodyTransactionRootDiff(error.into())) - } - - // Check empty shanghai-withdrawals - if self.chain_spec.is_canyon_active_at_timestamp(block.timestamp()) { - canyon::ensure_empty_shanghai_withdrawals(block.body()).map_err(|err| { - ConsensusError::Other(format!("failed to verify block {}: {err}", block.number())) - })? - } else { - return Ok(()) - } - - // Blob gas used validation - // In Jovian, the blob gas used computation has changed. We are moving the blob base fee - // validation to post-execution since the DA footprint calculation is stateful. - // Pre-execution we only validate that the blob gas used is present in the header. - if self.chain_spec.is_jovian_active_at_timestamp(block.timestamp()) { - block.blob_gas_used().ok_or(ConsensusError::BlobGasUsedMissing)?; - } else if self.chain_spec.is_ecotone_active_at_timestamp(block.timestamp()) { - validate_cancun_gas(block)?; - } - - // Check withdrawals root field in header - if self.chain_spec.is_isthmus_active_at_timestamp(block.timestamp()) { - // storage root of withdrawals pre-deploy is verified post-execution - isthmus::ensure_withdrawals_storage_root_is_some(block.header()).map_err(|err| { - ConsensusError::Other(format!("failed to verify block {}: {err}", block.number())) - })? - } else { - // canyon is active, else would have returned already - canyon::ensure_empty_withdrawals_root(block.header())? - } - - Ok(()) - } -} - -impl HeaderValidator for OpBeaconConsensus -where - H: BlockHeader, - ChainSpec: EthChainSpec
+ OpHardforks + Debug + Send + Sync, -{ - fn validate_header(&self, header: &SealedHeader) -> Result<(), ConsensusError> { - let header = header.header(); - // with OP-stack Bedrock activation number determines when TTD (eth Merge) has been reached. - debug_assert!( - self.chain_spec.is_bedrock_active_at_block(header.number()), - "manually import OVM blocks" - ); - - if header.nonce() != Some(B64::ZERO) { - return Err(ConsensusError::TheMergeNonceIsNotZero) - } - - if header.ommers_hash() != EMPTY_OMMER_ROOT_HASH { - return Err(ConsensusError::TheMergeOmmerRootIsNotEmpty) - } - - // Post-merge, the consensus layer is expected to perform checks such that the block - // timestamp is a function of the slot. This is different from pre-merge, where blocks - // are only allowed to be in the future (compared to the system's clock) by a certain - // threshold. - // - // Block validation with respect to the parent should ensure that the block timestamp - // is greater than its parent timestamp. - - // validate header extra data for all networks post merge - validate_header_extra_data(header, self.max_extra_data_size)?; - validate_header_gas(header)?; - validate_header_base_fee(header, &self.chain_spec) - } - - fn validate_header_against_parent( - &self, - header: &SealedHeader, - parent: &SealedHeader, - ) -> Result<(), ConsensusError> { - validate_against_parent_hash_number(header.header(), parent)?; - - if self.chain_spec.is_bedrock_active_at_block(header.number()) { - validate_against_parent_timestamp(header.header(), parent.header())?; - } - - validate_against_parent_eip1559_base_fee( - header.header(), - parent.header(), - &self.chain_spec, - )?; - - // Ensure that the blob gas fields for this block are correctly set. - // In the op-stack, the excess blob gas is always 0 for all blocks after ecotone. - // The blob gas used and the excess blob gas should both be set after ecotone. - // After Jovian, the blob gas used contains the current DA footprint. - if self.chain_spec.is_ecotone_active_at_timestamp(header.timestamp()) { - let blob_gas_used = header.blob_gas_used().ok_or(ConsensusError::BlobGasUsedMissing)?; - - // Before Jovian and after ecotone, the blob gas used should be 0. - if !self.chain_spec.is_jovian_active_at_timestamp(header.timestamp()) && - blob_gas_used != 0 - { - return Err(ConsensusError::BlobGasUsedDiff(GotExpected { - got: blob_gas_used, - expected: 0, - })); - } - - let excess_blob_gas = - header.excess_blob_gas().ok_or(ConsensusError::ExcessBlobGasMissing)?; - if excess_blob_gas != 0 { - return Err(ConsensusError::ExcessBlobGasDiff { - diff: GotExpected { got: excess_blob_gas, expected: 0 }, - parent_excess_blob_gas: parent.excess_blob_gas().unwrap_or(0), - parent_blob_gas_used: parent.blob_gas_used().unwrap_or(0), - }) - } - } - - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use std::sync::Arc; - - use alloy_consensus::{BlockBody, Eip658Value, Header, Receipt, TxEip7702, TxReceipt}; - use alloy_eips::{eip4895::Withdrawals, eip7685::Requests}; - use alloy_primitives::{Address, Bytes, Log, Signature, U256}; - use op_alloy_consensus::{ - encode_holocene_extra_data, encode_jovian_extra_data, OpTypedTransaction, - }; - use reth_chainspec::BaseFeeParams; - use reth_consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator}; - use reth_optimism_chainspec::{OpChainSpec, OpChainSpecBuilder, OP_MAINNET}; - use reth_optimism_primitives::{OpPrimitives, OpReceipt, OpTransactionSigned}; - use reth_primitives_traits::{proofs, RecoveredBlock, SealedBlock, SealedHeader}; - use reth_provider::BlockExecutionResult; - - use crate::OpBeaconConsensus; - - fn mock_tx(nonce: u64) -> OpTransactionSigned { - let tx = TxEip7702 { - chain_id: 1u64, - nonce, - max_fee_per_gas: 0x28f000fff, - max_priority_fee_per_gas: 0x28f000fff, - gas_limit: 10, - to: Address::default(), - value: U256::from(3_u64), - input: Bytes::from(vec![1, 2]), - access_list: Default::default(), - authorization_list: Default::default(), - }; - - let signature = Signature::new(U256::default(), U256::default(), true); - - OpTransactionSigned::new_unhashed(OpTypedTransaction::Eip7702(tx), signature) - } - - #[test] - fn test_block_blob_gas_used_validation_isthmus() { - let chain_spec = OpChainSpecBuilder::default() - .isthmus_activated() - .genesis(OP_MAINNET.genesis.clone()) - .chain(OP_MAINNET.chain) - .build(); - - // create a tx - let transaction = mock_tx(0); - - let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); - - let header = Header { - base_fee_per_gas: Some(1337), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(0), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - timestamp: u64::MAX, - ..Default::default() - }; - let body = BlockBody { - transactions: vec![transaction], - ommers: vec![], - withdrawals: Some(Withdrawals::default()), - }; - - let block = SealedBlock::seal_slow(alloy_consensus::Block { header, body }); - - // validate blob, it should pass blob gas used validation - let pre_execution = beacon_consensus.validate_block_pre_execution(&block); - - assert!(pre_execution.is_ok()); - } - - #[test] - fn test_block_blob_gas_used_validation_failure_isthmus() { - let chain_spec = OpChainSpecBuilder::default() - .isthmus_activated() - .genesis(OP_MAINNET.genesis.clone()) - .chain(OP_MAINNET.chain) - .build(); - - // create a tx - let transaction = mock_tx(0); - - let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); - - let header = Header { - base_fee_per_gas: Some(1337), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(10), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - timestamp: u64::MAX, - ..Default::default() - }; - let body = BlockBody { - transactions: vec![transaction], - ommers: vec![], - withdrawals: Some(Withdrawals::default()), - }; - - let block = SealedBlock::seal_slow(alloy_consensus::Block { header, body }); - - // validate blob, it should fail blob gas used validation - let pre_execution = beacon_consensus.validate_block_pre_execution(&block); - - assert!(matches!( - pre_execution.unwrap_err(), - ConsensusError::BlobGasUsedDiff(diff) if diff.got == 10 && diff.expected == 0 - )); - } - - #[test] - fn test_block_blob_gas_used_validation_jovian() { - const BLOB_GAS_USED: u64 = 1000; - const GAS_USED: u64 = 10; - - let chain_spec = OpChainSpecBuilder::default() - .jovian_activated() - .genesis(OP_MAINNET.genesis.clone()) - .chain(OP_MAINNET.chain) - .build(); - - // create a tx - let transaction = mock_tx(0); - - let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); - - let receipt = OpReceipt::Eip7702(Receipt:: { - status: Eip658Value::success(), - cumulative_gas_used: GAS_USED, - logs: vec![], - }); - - let header = Header { - base_fee_per_gas: Some(1337), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(BLOB_GAS_USED), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - timestamp: u64::MAX, - gas_used: GAS_USED, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - ..Default::default() - }; - let body = BlockBody { - transactions: vec![transaction], - ommers: vec![], - withdrawals: Some(Withdrawals::default()), - }; - - let block = SealedBlock::seal_slow(alloy_consensus::Block { header, body }); - - let result = BlockExecutionResult:: { - blob_gas_used: BLOB_GAS_USED, - receipts: vec![receipt], - requests: Requests::default(), - gas_used: GAS_USED, - }; - - // validate blob, it should pass blob gas used validation - let pre_execution = beacon_consensus.validate_block_pre_execution(&block); - - assert!(pre_execution.is_ok()); - - let block = RecoveredBlock::new_sealed(block, vec![Address::default()]); - - let post_execution = as FullConsensus>::validate_block_post_execution( - &beacon_consensus, - &block, - &result, - None, - ); - - // validate blob, it should pass blob gas used validation - assert!(post_execution.is_ok()); - } - - #[test] - fn test_block_blob_gas_used_validation_failure_jovian() { - const BLOB_GAS_USED: u64 = 1000; - const GAS_USED: u64 = 10; - - let chain_spec = OpChainSpecBuilder::default() - .jovian_activated() - .genesis(OP_MAINNET.genesis.clone()) - .chain(OP_MAINNET.chain) - .build(); - - // create a tx - let transaction = mock_tx(0); - - let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); - - let receipt = OpReceipt::Eip7702(Receipt:: { - status: Eip658Value::success(), - cumulative_gas_used: GAS_USED, - logs: vec![], - }); - - let header = Header { - base_fee_per_gas: Some(1337), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(BLOB_GAS_USED), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - gas_used: GAS_USED, - timestamp: u64::MAX, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - ..Default::default() - }; - let body = BlockBody { - transactions: vec![transaction], - ommers: vec![], - withdrawals: Some(Withdrawals::default()), - }; - - let block = SealedBlock::seal_slow(alloy_consensus::Block { header, body }); - - let result = BlockExecutionResult:: { - blob_gas_used: BLOB_GAS_USED + 1, - receipts: vec![receipt], - requests: Requests::default(), - gas_used: GAS_USED, - }; - - // validate blob, it should pass blob gas used validation - let pre_execution = beacon_consensus.validate_block_pre_execution(&block); - - assert!(pre_execution.is_ok()); - - let block = RecoveredBlock::new_sealed(block, vec![Address::default()]); - - let post_execution = as FullConsensus>::validate_block_post_execution( - &beacon_consensus, - &block, - &result, - None, - ); - - // validate blob, it should fail blob gas used validation post execution. - assert!(matches!( - post_execution.unwrap_err(), - ConsensusError::BlobGasUsedDiff(diff) - if diff.got == BLOB_GAS_USED + 1 && diff.expected == BLOB_GAS_USED - )); - } - - #[test] - fn test_header_min_base_fee_validation() { - const MIN_BASE_FEE: u64 = 1000; - - let chain_spec = OpChainSpecBuilder::default() - .jovian_activated() - .genesis(OP_MAINNET.genesis.clone()) - .chain(OP_MAINNET.chain) - .build(); - - // create a tx - let transaction = mock_tx(0); - - let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); - - let receipt = OpReceipt::Eip7702(Receipt:: { - status: Eip658Value::success(), - cumulative_gas_used: 0, - logs: vec![], - }); - - let parent = Header { - number: 0, - base_fee_per_gas: Some(MIN_BASE_FEE / 10), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(0), - excess_blob_gas: Some(0), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - gas_used: 0, - timestamp: u64::MAX - 1, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - extra_data: encode_jovian_extra_data( - Default::default(), - BaseFeeParams::optimism(), - MIN_BASE_FEE, - ) - .unwrap(), - ..Default::default() - }; - let parent = SealedHeader::seal_slow(parent); - - let header = Header { - number: 1, - base_fee_per_gas: Some(MIN_BASE_FEE), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(0), - excess_blob_gas: Some(0), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - gas_used: 0, - timestamp: u64::MAX, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - parent_hash: parent.hash(), - ..Default::default() - }; - let header = SealedHeader::seal_slow(header); - - let result = beacon_consensus.validate_header_against_parent(&header, &parent); - - assert!(result.is_ok()); - } - - #[test] - fn test_header_min_base_fee_validation_failure() { - const MIN_BASE_FEE: u64 = 1000; - - let chain_spec = OpChainSpecBuilder::default() - .jovian_activated() - .genesis(OP_MAINNET.genesis.clone()) - .chain(OP_MAINNET.chain) - .build(); - - // create a tx - let transaction = mock_tx(0); - - let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); - - let receipt = OpReceipt::Eip7702(Receipt:: { - status: Eip658Value::success(), - cumulative_gas_used: 0, - logs: vec![], - }); - - let parent = Header { - number: 0, - base_fee_per_gas: Some(MIN_BASE_FEE / 10), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(0), - excess_blob_gas: Some(0), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - gas_used: 0, - timestamp: u64::MAX - 1, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - extra_data: encode_jovian_extra_data( - Default::default(), - BaseFeeParams::optimism(), - MIN_BASE_FEE, - ) - .unwrap(), - ..Default::default() - }; - let parent = SealedHeader::seal_slow(parent); - - let header = Header { - number: 1, - base_fee_per_gas: Some(MIN_BASE_FEE - 1), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(0), - excess_blob_gas: Some(0), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - gas_used: 0, - timestamp: u64::MAX, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - parent_hash: parent.hash(), - ..Default::default() - }; - let header = SealedHeader::seal_slow(header); - - let result = beacon_consensus.validate_header_against_parent(&header, &parent); - - assert!(matches!( - result.unwrap_err(), - ConsensusError::BaseFeeDiff(diff) - if diff.got == MIN_BASE_FEE - 1 && diff.expected == MIN_BASE_FEE - )); - } - - #[test] - fn test_header_da_footprint_validation() { - const MIN_BASE_FEE: u64 = 100_000; - const DA_FOOTPRINT: u64 = GAS_LIMIT - 1; - const GAS_LIMIT: u64 = 100_000_000; - - let chain_spec = OpChainSpecBuilder::default() - .jovian_activated() - .genesis(OP_MAINNET.genesis.clone()) - .chain(OP_MAINNET.chain) - .build(); - - // create a tx - let transaction = mock_tx(0); - - let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); - - let receipt = OpReceipt::Eip7702(Receipt:: { - status: Eip658Value::success(), - cumulative_gas_used: 0, - logs: vec![], - }); - - let parent = Header { - number: 0, - base_fee_per_gas: Some(MIN_BASE_FEE), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(DA_FOOTPRINT), - excess_blob_gas: Some(0), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - gas_used: 0, - timestamp: u64::MAX - 1, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - extra_data: encode_jovian_extra_data( - Default::default(), - BaseFeeParams::optimism(), - MIN_BASE_FEE, - ) - .unwrap(), - gas_limit: GAS_LIMIT, - ..Default::default() - }; - let parent = SealedHeader::seal_slow(parent); - - let header = Header { - number: 1, - base_fee_per_gas: Some(MIN_BASE_FEE + MIN_BASE_FEE / 10), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(DA_FOOTPRINT), - excess_blob_gas: Some(0), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - gas_used: 0, - timestamp: u64::MAX, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - parent_hash: parent.hash(), - ..Default::default() - }; - let header = SealedHeader::seal_slow(header); - - let result = beacon_consensus.validate_header_against_parent(&header, &parent); - - assert!(result.is_ok()); - } - - #[test] - fn test_header_isthmus_validation() { - const MIN_BASE_FEE: u64 = 100_000; - const DA_FOOTPRINT: u64 = GAS_LIMIT - 1; - const GAS_LIMIT: u64 = 100_000_000; - - let chain_spec = OpChainSpecBuilder::default() - .isthmus_activated() - .genesis(OP_MAINNET.genesis.clone()) - .chain(OP_MAINNET.chain) - .build(); - - // create a tx - let transaction = mock_tx(0); - - let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); - - let receipt = OpReceipt::Eip7702(Receipt:: { - status: Eip658Value::success(), - cumulative_gas_used: 0, - logs: vec![], - }); - - let parent = Header { - number: 0, - base_fee_per_gas: Some(MIN_BASE_FEE), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(DA_FOOTPRINT), - excess_blob_gas: Some(0), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - gas_used: 0, - timestamp: u64::MAX - 1, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - extra_data: encode_holocene_extra_data(Default::default(), BaseFeeParams::optimism()) - .unwrap(), - gas_limit: GAS_LIMIT, - ..Default::default() - }; - let parent = SealedHeader::seal_slow(parent); - - let header = Header { - number: 1, - base_fee_per_gas: Some(MIN_BASE_FEE - 2 * MIN_BASE_FEE / 100), - withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), - blob_gas_used: Some(DA_FOOTPRINT), - excess_blob_gas: Some(0), - transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( - &transaction, - )), - gas_used: 0, - timestamp: u64::MAX, - receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( - &receipt.with_bloom_ref(), - )), - logs_bloom: receipt.bloom(), - parent_hash: parent.hash(), - ..Default::default() - }; - let header = SealedHeader::seal_slow(header); - - let result = beacon_consensus.validate_header_against_parent(&header, &parent); - - assert!(matches!( - result.unwrap_err(), - ConsensusError::BlobGasUsedDiff(diff) - if diff.got == DA_FOOTPRINT && diff.expected == 0 - )); - } -} diff --git a/op-reth/crates/consensus/src/proof.rs b/op-reth/crates/consensus/src/proof.rs deleted file mode 100644 index 8c601942ece..00000000000 --- a/op-reth/crates/consensus/src/proof.rs +++ /dev/null @@ -1,338 +0,0 @@ -//! Helper function for Receipt root calculation for Optimism hardforks. - -use alloc::vec::Vec; -use alloy_consensus::ReceiptWithBloom; -use alloy_eips::eip2718::Encodable2718; -use alloy_primitives::B256; -use alloy_trie::root::ordered_trie_root_with_encoder; -use reth_optimism_forks::OpHardforks; -use reth_optimism_primitives::DepositReceipt; - -/// Calculates the receipt root for a header. -pub(crate) fn calculate_receipt_root_optimism( - receipts: &[ReceiptWithBloom<&R>], - chain_spec: impl OpHardforks, - timestamp: u64, -) -> B256 { - // There is a minor bug in op-geth and op-erigon where in the Regolith hardfork, - // the receipt root calculation does not include the deposit nonce in the receipt - // encoding. In the Regolith Hardfork, we must strip the deposit nonce from the - // receipts before calculating the receipt root. This was corrected in the Canyon - // hardfork. - if chain_spec.is_regolith_active_at_timestamp(timestamp) && - !chain_spec.is_canyon_active_at_timestamp(timestamp) - { - let receipts = receipts - .iter() - .map(|receipt| { - let mut receipt = receipt.clone().map_receipt(|r| r.clone()); - if let Some(receipt) = receipt.receipt.as_deposit_receipt_mut() { - receipt.deposit_nonce = None; - } - receipt - }) - .collect::>(); - - return ordered_trie_root_with_encoder(receipts.as_slice(), |r, buf| r.encode_2718(buf)) - } - - ordered_trie_root_with_encoder(receipts, |r, buf| r.encode_2718(buf)) -} - -/// Calculates the receipt root for a header for the reference type of an OP receipt. -/// -/// NOTE: Prefer calculate receipt root optimism if you have log blooms memoized. -pub fn calculate_receipt_root_no_memo_optimism( - receipts: &[R], - chain_spec: impl OpHardforks, - timestamp: u64, -) -> B256 { - // There is a minor bug in op-geth and op-erigon where in the Regolith hardfork, - // the receipt root calculation does not include the deposit nonce in the receipt - // encoding. In the Regolith Hardfork, we must strip the deposit nonce from the - // receipts before calculating the receipt root. This was corrected in the Canyon - // hardfork. - if chain_spec.is_regolith_active_at_timestamp(timestamp) && - !chain_spec.is_canyon_active_at_timestamp(timestamp) - { - let receipts = receipts - .iter() - .map(|r| { - let mut r = (*r).clone(); - if let Some(receipt) = r.as_deposit_receipt_mut() { - receipt.deposit_nonce = None; - } - r - }) - .collect::>(); - - return ordered_trie_root_with_encoder(&receipts, |r, buf| { - r.with_bloom_ref().encode_2718(buf); - }) - } - - ordered_trie_root_with_encoder(receipts, |r, buf| { - r.with_bloom_ref().encode_2718(buf); - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_consensus::{Receipt, ReceiptWithBloom, TxReceipt}; - use alloy_primitives::{b256, bloom, hex, Address, Bytes, Log, LogData}; - use op_alloy_consensus::OpDepositReceipt; - use reth_optimism_chainspec::BASE_SEPOLIA; - use reth_optimism_primitives::OpReceipt; - - /// Tests that the receipt root is computed correctly for the regolith block. - /// This was implemented due to a minor bug in op-geth and op-erigon where in - /// the Regolith hardfork, the receipt root calculation does not include the - /// deposit nonce in the receipt encoding. - /// To fix this an op-reth patch was applied to the receipt root calculation - /// to strip the deposit nonce from each receipt before calculating the root. - #[test] - fn check_optimism_receipt_root() { - let cases = [ - // Deposit nonces didn't exist in Bedrock; No need to strip. For the purposes of this - // test, we do have them, so we should get the same root as Canyon. - ( - "bedrock", - 1679079599, - b256!("0xe255fed45eae7ede0556fe4fabc77b0d294d18781a5a581cab09127bc4cd9ffb"), - ), - // Deposit nonces introduced in Regolith. They weren't included in the receipt RLP, - // so we need to strip them - the receipt root will differ. - ( - "regolith", - 1679079600, - b256!("0xe255fed45eae7ede0556fe4fabc77b0d294d18781a5a581cab09127bc4cd9ffb"), - ), - // Receipt root hashing bug fixed in Canyon. Back to including the deposit nonce - // in the receipt RLP when computing the receipt root. - ( - "canyon", - 1699981200, - b256!("0x6eefbb5efb95235476654a8bfbf8cb64a4f5f0b0c80b700b0c5964550beee6d7"), - ), - ]; - - for case in cases { - let receipts = [ - // 0xb0d6ee650637911394396d81172bd1c637d568ed1fbddab0daddfca399c58b53 - OpReceipt::Deposit(OpDepositReceipt { - inner: Receipt { - status: true.into(), - cumulative_gas_used: 46913, - logs: vec![], - }, - deposit_nonce: Some(4012991u64), - deposit_receipt_version: None, - }), - // 0x2f433586bae30573c393adfa02bc81d2a1888a3d6c9869f473fb57245166bd9a - OpReceipt::Eip1559(Receipt { - status: true.into(), - cumulative_gas_used: 118083, - logs: vec![ - Log { - address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), - data: LogData::new_unchecked( - vec![ - b256!("0xc3d58168c5ae7397731d063d5bbf3d657854427343f4c083240f7aacaa2d0f62"), - b256!("0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9"), - b256!("0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9"), - b256!("0x0000000000000000000000000000000000000000000000000000000000000000"), - ], - Bytes::from_static(&hex!("00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001")) - ) - }, - Log { - address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), - data: LogData::new_unchecked( - vec![ - b256!("0xc3d58168c5ae7397731d063d5bbf3d657854427343f4c083240f7aacaa2d0f62"), - b256!("0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9"), - b256!("0x0000000000000000000000000000000000000000000000000000000000000000"), - b256!("0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9"), - ], - Bytes::from_static(&hex!("00000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000001")) - ) - }, - Log { - address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), - data: LogData::new_unchecked( - vec![ - b256!("0x0eb774bb9698a73583fe07b6972cf2dcc08d1d97581a22861f45feb86b395820"), - b256!("0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9"), - b256!("0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9"), - ], Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000000000000000003"))) - }, - ]}), - // 0x6c33676e8f6077f46a62eabab70bc6d1b1b18a624b0739086d77093a1ecf8266 - OpReceipt::Eip1559(Receipt { - status: true.into(), - cumulative_gas_used: 189253, - logs: vec![ - Log { - address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), - data: LogData::new_unchecked(vec![ - b256!("0xc3d58168c5ae7397731d063d5bbf3d657854427343f4c083240f7aacaa2d0f62"), - b256!("0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec"), - b256!("0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec"), - b256!("0x0000000000000000000000000000000000000000000000000000000000000000"), - ], - Bytes::from_static(&hex!("00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001"))) - }, - Log { - address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), - data: LogData::new_unchecked(vec![ - b256!("0xc3d58168c5ae7397731d063d5bbf3d657854427343f4c083240f7aacaa2d0f62"), - b256!("0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec"), - b256!("0x0000000000000000000000000000000000000000000000000000000000000000"), - b256!("0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec"), - ], - Bytes::from_static(&hex!("00000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000001"))) - }, - Log { - address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), - data: LogData::new_unchecked(vec![ - b256!("0x0eb774bb9698a73583fe07b6972cf2dcc08d1d97581a22861f45feb86b395820"), - b256!("0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec"), - b256!("0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec"), - ], - Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000000000000000003"))) - }, - ], - }), - // 0x4d3ecbef04ba7ce7f5ab55be0c61978ca97c117d7da448ed9771d4ff0c720a3f - OpReceipt::Eip1559(Receipt { - status: true.into(), - cumulative_gas_used: 346969, - logs: vec![ - Log { - address: hex!("4200000000000000000000000000000000000006").into(), - data: LogData::new_unchecked( vec![ - b256!("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"), - b256!("0x000000000000000000000000c3feb4ef4c2a5af77add15c95bd98f6b43640cc8"), - b256!("0x0000000000000000000000002992607c1614484fe6d865088e5c048f0650afd4"), - ], - Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000018de76816d8000"))) - }, - Log { - address: hex!("cf8e7e6b26f407dee615fc4db18bf829e7aa8c09").into(), - data: LogData::new_unchecked( vec![ - b256!("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"), - b256!("0x0000000000000000000000002992607c1614484fe6d865088e5c048f0650afd4"), - b256!("0x0000000000000000000000008dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09"), - ], - Bytes::from_static(&hex!("000000000000000000000000000000000000000000000002d24d8e9ac1aa79e2"))) - }, - Log { - address: hex!("2992607c1614484fe6d865088e5c048f0650afd4").into(), - data: LogData::new_unchecked( vec![ - b256!("0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1"), - ], - Bytes::from_static(&hex!("000000000000000000000000000000000000000000000009bd50642785c15736000000000000000000000000000000000000000000011bb7ac324f724a29bbbf"))) - }, - Log { - address: hex!("2992607c1614484fe6d865088e5c048f0650afd4").into(), - data: LogData::new_unchecked( vec![ - b256!("0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822"), - b256!("0x00000000000000000000000029843613c7211d014f5dd5718cf32bcd314914cb"), - b256!("0x0000000000000000000000008dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09"), - ], - Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000018de76816d800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002d24d8e9ac1aa79e2"))) - }, - Log { - address: hex!("6d0f8d488b669aa9ba2d0f0b7b75a88bf5051cd3").into(), - data: LogData::new_unchecked( vec![ - b256!("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"), - b256!("0x0000000000000000000000008dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09"), - b256!("0x000000000000000000000000c3feb4ef4c2a5af77add15c95bd98f6b43640cc8"), - ], - Bytes::from_static(&hex!("00000000000000000000000000000000000000000000000014bc73062aea8093"))) - }, - Log { - address: hex!("8dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09").into(), - data: LogData::new_unchecked( vec![ - b256!("0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1"), - ], - Bytes::from_static(&hex!("00000000000000000000000000000000000000000000002f122cfadc1ca82a35000000000000000000000000000000000000000000000665879dc0609945d6d1"))) - }, - Log { - address: hex!("8dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09").into(), - data: LogData::new_unchecked( vec![ - b256!("0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822"), - b256!("0x00000000000000000000000029843613c7211d014f5dd5718cf32bcd314914cb"), - b256!("0x000000000000000000000000c3feb4ef4c2a5af77add15c95bd98f6b43640cc8"), - ], - Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002d24d8e9ac1aa79e200000000000000000000000000000000000000000000000014bc73062aea80930000000000000000000000000000000000000000000000000000000000000000"))) - }, - ], - }), - // 0xf738af5eb00ba23dbc1be2dbce41dbc0180f0085b7fb46646e90bf737af90351 - OpReceipt::Eip1559(Receipt { - status: true.into(), - cumulative_gas_used: 623249, - logs: vec![ - Log { - address: hex!("ac6564f3718837caadd42eed742d75c12b90a052").into(), - data: LogData::new_unchecked( vec![ - b256!("0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"), - b256!("0x0000000000000000000000000000000000000000000000000000000000000000"), - b256!("0x000000000000000000000000a4fa7f3fbf0677f254ebdb1646146864c305b76e"), - b256!("0x000000000000000000000000000000000000000000000000000000000011a1d3"), - ], - Default::default()) - }, - Log { - address: hex!("ac6564f3718837caadd42eed742d75c12b90a052").into(), - data: LogData::new_unchecked( vec![ - b256!("0x9d89e36eadf856db0ad9ffb5a569e07f95634dddd9501141ecf04820484ad0dc"), - b256!("0x000000000000000000000000a4fa7f3fbf0677f254ebdb1646146864c305b76e"), - b256!("0x000000000000000000000000000000000000000000000000000000000011a1d3"), - ], - Bytes::from_static(&hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000037697066733a2f2f516d515141646b33736538396b47716577395256567a316b68643548375562476d4d4a485a62566f386a6d346f4a2f30000000000000000000"))) - }, - Log { - address: hex!("ac6564f3718837caadd42eed742d75c12b90a052").into(), - data: LogData::new_unchecked( vec![ - b256!("0x110d160a1bedeea919a88fbc4b2a9fb61b7e664084391b6ca2740db66fef80fe"), - b256!("0x00000000000000000000000084d47f6eea8f8d87910448325519d1bb45c2972a"), - b256!("0x000000000000000000000000a4fa7f3fbf0677f254ebdb1646146864c305b76e"), - b256!("0x000000000000000000000000000000000000000000000000000000000011a1d3"), - ], - Bytes::from_static(&hex!("0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000a4fa7f3fbf0677f254ebdb1646146864c305b76e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007717500762343034303661353035646234633961386163316433306335633332303265370000000000000000000000000000000000000000000000000000000000000037697066733a2f2f516d515141646b33736538396b47716577395256567a316b68643548375562476d4d4a485a62566f386a6d346f4a2f30000000000000000000"))) - }, - ], - }), - ]; - let root = calculate_receipt_root_optimism( - &receipts.iter().map(TxReceipt::with_bloom_ref).collect::>(), - BASE_SEPOLIA.as_ref(), - case.1, - ); - assert_eq!(root, case.2); - } - } - - #[test] - fn check_receipt_root_optimism() { - let logs = vec![Log { - address: Address::ZERO, - data: LogData::new_unchecked(vec![], Default::default()), - }]; - let logs_bloom = bloom!( - "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001" - ); - let inner = - OpReceipt::Eip2930(Receipt { status: true.into(), cumulative_gas_used: 102068, logs }); - let receipt = ReceiptWithBloom { receipt: &inner, logs_bloom }; - let receipt = vec![receipt]; - let root = calculate_receipt_root_optimism(&receipt, BASE_SEPOLIA.as_ref(), 0); - assert_eq!( - root, - b256!("0xfe70ae4a136d98944951b2123859698d59ad251a381abc9960fa81cae3d0d4a0") - ); - } -} diff --git a/op-reth/crates/consensus/src/validation/canyon.rs b/op-reth/crates/consensus/src/validation/canyon.rs deleted file mode 100644 index 886f53bb20b..00000000000 --- a/op-reth/crates/consensus/src/validation/canyon.rs +++ /dev/null @@ -1,41 +0,0 @@ -//! Canyon consensus rule checks. - -use alloy_consensus::BlockHeader; -use alloy_trie::EMPTY_ROOT_HASH; -use reth_consensus::ConsensusError; -use reth_primitives_traits::{BlockBody, GotExpected}; - -use crate::OpConsensusError; - -/// Verifies that withdrawals root in block header (Shanghai) is always [`EMPTY_ROOT_HASH`] in -/// Canyon. -#[inline] -pub fn ensure_empty_withdrawals_root(header: &H) -> Result<(), ConsensusError> { - // Shanghai rule - let header_withdrawals_root = - &header.withdrawals_root().ok_or(ConsensusError::WithdrawalsRootMissing)?; - - // Canyon rules - if *header_withdrawals_root != EMPTY_ROOT_HASH { - return Err(ConsensusError::BodyWithdrawalsRootDiff( - GotExpected { got: *header_withdrawals_root, expected: EMPTY_ROOT_HASH }.into(), - )); - } - - Ok(()) -} - -/// Verifies that withdrawals in block body (Shanghai) is always empty in Canyon. -/// -#[inline] -pub fn ensure_empty_shanghai_withdrawals(body: &T) -> Result<(), OpConsensusError> { - // Shanghai rule - let withdrawals = body.withdrawals().ok_or(ConsensusError::BodyWithdrawalsMissing)?; - - // Canyon rule - if !withdrawals.as_ref().is_empty() { - return Err(OpConsensusError::WithdrawalsNonEmpty) - } - - Ok(()) -} diff --git a/op-reth/crates/consensus/src/validation/isthmus.rs b/op-reth/crates/consensus/src/validation/isthmus.rs deleted file mode 100644 index f35f4ea69a7..00000000000 --- a/op-reth/crates/consensus/src/validation/isthmus.rs +++ /dev/null @@ -1,195 +0,0 @@ -//! Block verification w.r.t. consensus rules new in Isthmus hardfork. - -use crate::OpConsensusError; -use alloy_consensus::BlockHeader; -use alloy_primitives::B256; -use alloy_trie::EMPTY_ROOT_HASH; -use reth_optimism_primitives::L2_TO_L1_MESSAGE_PASSER_ADDRESS; -use reth_storage_api::{errors::ProviderResult, StorageRootProvider}; -use reth_trie_common::HashedStorage; -use revm::database::BundleState; -use tracing::warn; - -/// Verifies that `withdrawals_root` (i.e. `l2tol1-msg-passer` storage root since Isthmus) field is -/// set in block header. -pub fn ensure_withdrawals_storage_root_is_some( - header: H, -) -> Result<(), OpConsensusError> { - header.withdrawals_root().ok_or(OpConsensusError::L2WithdrawalsRootMissing)?; - - Ok(()) -} - -/// Computes the storage root of predeploy `L2ToL1MessagePasser.sol`. -/// -/// Uses state updates from block execution. See also [`withdrawals_root_prehashed`]. -pub fn withdrawals_root( - state_updates: &BundleState, - state: DB, -) -> ProviderResult { - // if l2 withdrawals transactions were executed there will be storage updates for - // `L2ToL1MessagePasser.sol` predeploy - withdrawals_root_prehashed( - state_updates - .state() - .get(&L2_TO_L1_MESSAGE_PASSER_ADDRESS) - .map(|acc| { - HashedStorage::from_plain_storage( - acc.status, - acc.storage.iter().map(|(slot, value)| (slot, &value.present_value)), - ) - }) - .unwrap_or_default(), - state, - ) -} - -/// Computes the storage root of predeploy `L2ToL1MessagePasser.sol`. -/// -/// Uses pre-hashed storage updates of `L2ToL1MessagePasser.sol` predeploy, resulting from -/// execution of L2 withdrawals transactions. If none, takes empty [`HashedStorage::default`]. -pub fn withdrawals_root_prehashed( - hashed_storage_updates: HashedStorage, - state: DB, -) -> ProviderResult { - state.storage_root(L2_TO_L1_MESSAGE_PASSER_ADDRESS, hashed_storage_updates) -} - -/// Verifies block header field `withdrawals_root` against storage root of -/// `L2ToL1MessagePasser.sol` predeploy post block execution. -/// -/// Takes state updates resulting from execution of block. -/// -/// See . -pub fn verify_withdrawals_root( - state_updates: &BundleState, - state: DB, - header: H, -) -> Result<(), OpConsensusError> -where - DB: StorageRootProvider, - H: BlockHeader, -{ - let header_storage_root = - header.withdrawals_root().ok_or(OpConsensusError::L2WithdrawalsRootMissing)?; - - let storage_root = withdrawals_root(state_updates, state) - .map_err(OpConsensusError::L2WithdrawalsRootCalculationFail)?; - - if storage_root == EMPTY_ROOT_HASH { - // if there was no MessagePasser contract storage, something is wrong - // (it should at least store an implementation address and owner address) - warn!("isthmus: no storage root for L2ToL1MessagePasser contract"); - } - - if header_storage_root != storage_root { - return Err(OpConsensusError::L2WithdrawalsRootMismatch { - header: header_storage_root, - exec_res: storage_root, - }) - } - - Ok(()) -} - -/// Verifies block header field `withdrawals_root` against storage root of -/// `L2ToL1MessagePasser.sol` predeploy post block execution. -/// -/// Takes pre-hashed storage updates of `L2ToL1MessagePasser.sol` predeploy, resulting from -/// execution of block, if any. Otherwise takes empty [`HashedStorage::default`]. -/// -/// See . -pub fn verify_withdrawals_root_prehashed( - hashed_storage_updates: HashedStorage, - state: DB, - header: H, -) -> Result<(), OpConsensusError> -where - DB: StorageRootProvider, - H: BlockHeader, -{ - let header_storage_root = - header.withdrawals_root().ok_or(OpConsensusError::L2WithdrawalsRootMissing)?; - - let storage_root = withdrawals_root_prehashed(hashed_storage_updates, state) - .map_err(OpConsensusError::L2WithdrawalsRootCalculationFail)?; - - if header_storage_root != storage_root { - return Err(OpConsensusError::L2WithdrawalsRootMismatch { - header: header_storage_root, - exec_res: storage_root, - }) - } - - Ok(()) -} - -#[cfg(test)] -mod test { - use super::*; - use alloc::sync::Arc; - use alloy_chains::Chain; - use alloy_consensus::Header; - use alloy_primitives::{keccak256, B256, U256}; - use core::str::FromStr; - use reth_db_common::init::init_genesis; - use reth_optimism_chainspec::OpChainSpecBuilder; - use reth_optimism_node::OpNode; - use reth_provider::{ - providers::BlockchainProvider, test_utils::create_test_provider_factory_with_node_types, - StateWriter, - }; - use reth_revm::db::BundleState; - use reth_storage_api::StateProviderFactory; - use reth_trie::{test_utils::storage_root_prehashed, HashedStorage}; - use reth_trie_common::HashedPostState; - - #[test] - fn l2tol1_message_passer_no_withdrawals() { - let hashed_address = keccak256(L2_TO_L1_MESSAGE_PASSER_ADDRESS); - - // create account storage - let init_storage = HashedStorage::from_iter( - false, - [ - "50000000000000000000000000000004253371b55351a08cb3267d4d265530b6", - "512428ed685fff57294d1a9cbb147b18ae5db9cf6ae4b312fa1946ba0561882e", - "51e6784c736ef8548f856909870b38e49ef7a4e3e77e5e945e0d5e6fcaa3037f", - ] - .into_iter() - .map(|str| (B256::from_str(str).unwrap(), U256::from(1))), - ); - let mut state = HashedPostState::default(); - state.storages.insert(hashed_address, init_storage.clone()); - - // init test db - // note: must be empty (default) chain spec to ensure storage is empty after init genesis, - // otherwise can't use `storage_root_prehashed` to determine storage root later - let provider_factory = create_test_provider_factory_with_node_types::(Arc::new( - OpChainSpecBuilder::default().chain(Chain::dev()).genesis(Default::default()).build(), - )); - let _ = init_genesis(&provider_factory).unwrap(); - - // write account storage to database - let provider_rw = provider_factory.provider_rw().unwrap(); - provider_rw.write_hashed_state(&state.clone().into_sorted()).unwrap(); - provider_rw.commit().unwrap(); - - // create block header with withdrawals root set to storage root of l2tol1-msg-passer - let header = Header { - withdrawals_root: Some(storage_root_prehashed(init_storage.storage)), - ..Default::default() - }; - - // create state provider factory - let state_provider_factory = BlockchainProvider::new(provider_factory).unwrap(); - - // validate block against existing state by passing empty state updates - verify_withdrawals_root( - &BundleState::default(), - state_provider_factory.latest().expect("load state"), - &header, - ) - .unwrap(); - } -} diff --git a/op-reth/crates/consensus/src/validation/mod.rs b/op-reth/crates/consensus/src/validation/mod.rs deleted file mode 100644 index 21685486088..00000000000 --- a/op-reth/crates/consensus/src/validation/mod.rs +++ /dev/null @@ -1,588 +0,0 @@ -//! Verification of blocks w.r.t. Optimism hardforks. - -pub mod canyon; -pub mod isthmus; - -// Re-export the decode_holocene_base_fee function for compatibility -use reth_execution_types::BlockExecutionResult; -pub use reth_optimism_chainspec::decode_holocene_base_fee; - -use crate::proof::calculate_receipt_root_optimism; -use alloc::vec::Vec; -use alloy_consensus::{BlockHeader, TxReceipt, EMPTY_OMMER_ROOT_HASH}; -use alloy_eips::Encodable2718; -use alloy_primitives::{Bloom, Bytes, B256}; -use alloy_trie::EMPTY_ROOT_HASH; -use reth_consensus::ConsensusError; -use reth_optimism_forks::OpHardforks; -use reth_optimism_primitives::DepositReceipt; -use reth_primitives_traits::{receipt::gas_spent_by_transactions, BlockBody, GotExpected}; - -/// Ensures the block response data matches the header. -/// -/// This ensures the body response items match the header's hashes: -/// - ommer hash -/// - transaction root -/// - withdrawals root: the body's withdrawals root must only match the header's before isthmus -pub fn validate_body_against_header_op( - chain_spec: impl OpHardforks, - body: &B, - header: &H, -) -> Result<(), ConsensusError> -where - B: BlockBody, - H: reth_primitives_traits::BlockHeader, -{ - let ommers_hash = body.calculate_ommers_root(); - if Some(header.ommers_hash()) != ommers_hash { - return Err(ConsensusError::BodyOmmersHashDiff( - GotExpected { - got: ommers_hash.unwrap_or(EMPTY_OMMER_ROOT_HASH), - expected: header.ommers_hash(), - } - .into(), - )) - } - - let tx_root = body.calculate_tx_root(); - if header.transactions_root() != tx_root { - return Err(ConsensusError::BodyTransactionRootDiff( - GotExpected { got: tx_root, expected: header.transactions_root() }.into(), - )) - } - - match (header.withdrawals_root(), body.calculate_withdrawals_root()) { - (Some(header_withdrawals_root), Some(withdrawals_root)) => { - // after isthmus, the withdrawals root field is repurposed and no longer mirrors the - // withdrawals root computed from the body - if chain_spec.is_isthmus_active_at_timestamp(header.timestamp()) { - // After isthmus we only ensure that the body has empty withdrawals - if withdrawals_root != EMPTY_ROOT_HASH { - return Err(ConsensusError::BodyWithdrawalsRootDiff( - GotExpected { got: withdrawals_root, expected: EMPTY_ROOT_HASH }.into(), - )) - } - } else { - // before isthmus we ensure that the header root matches the body - if withdrawals_root != header_withdrawals_root { - return Err(ConsensusError::BodyWithdrawalsRootDiff( - GotExpected { got: withdrawals_root, expected: header_withdrawals_root } - .into(), - )) - } - } - } - (None, None) => { - // this is ok because we assume the fork is not active in this case - } - _ => return Err(ConsensusError::WithdrawalsRootUnexpected), - } - - Ok(()) -} - -/// Validate a block with regard to execution results: -/// -/// - Compares the receipts root in the block header to the block body -/// - Compares the gas used in the block header to the actual gas usage after execution -/// -/// If `receipt_root_bloom` is provided, the pre-computed receipt root and logs bloom are used -/// instead of computing them from the receipts. -pub fn validate_block_post_execution( - header: impl BlockHeader, - chain_spec: impl OpHardforks, - result: &BlockExecutionResult, - receipt_root_bloom: Option<(B256, Bloom)>, -) -> Result<(), ConsensusError> { - // Validate that the blob gas used is present and correctly computed if Jovian is active. - if chain_spec.is_jovian_active_at_timestamp(header.timestamp()) { - let computed_blob_gas_used = result.blob_gas_used; - let header_blob_gas_used = - header.blob_gas_used().ok_or(ConsensusError::BlobGasUsedMissing)?; - - if computed_blob_gas_used != header_blob_gas_used { - return Err(ConsensusError::BlobGasUsedDiff(GotExpected { - got: computed_blob_gas_used, - expected: header_blob_gas_used, - })); - } - } - - let receipts = &result.receipts; - - // Before Byzantium, receipts contained state root that would mean that expensive - // operation as hashing that is required for state root got calculated in every - // transaction This was replaced with is_success flag. - // See more about EIP here: https://eips.ethereum.org/EIPS/eip-658 - if chain_spec.is_byzantium_active_at_block(header.number()) { - let result = if let Some((receipts_root, logs_bloom)) = receipt_root_bloom { - compare_receipts_root_and_logs_bloom( - receipts_root, - logs_bloom, - header.receipts_root(), - header.logs_bloom(), - ) - } else { - verify_receipts_optimism( - header.receipts_root(), - header.logs_bloom(), - receipts, - chain_spec, - header.timestamp(), - ) - }; - - if let Err(error) = result { - let receipts = receipts - .iter() - .map(|r| Bytes::from(r.with_bloom_ref().encoded_2718())) - .collect::>(); - tracing::debug!(%error, ?receipts, "receipts verification failed"); - return Err(error) - } - } - - // Check if gas used matches the value set in header. - let cumulative_gas_used = - receipts.last().map(|receipt| receipt.cumulative_gas_used()).unwrap_or(0); - if header.gas_used() != cumulative_gas_used { - return Err(ConsensusError::BlockGasUsed { - gas: GotExpected { got: cumulative_gas_used, expected: header.gas_used() }, - gas_spent_by_tx: gas_spent_by_transactions(receipts), - }) - } - - Ok(()) -} - -/// Verify the calculated receipts root against the expected receipts root. -fn verify_receipts_optimism( - expected_receipts_root: B256, - expected_logs_bloom: Bloom, - receipts: &[R], - chain_spec: impl OpHardforks, - timestamp: u64, -) -> Result<(), ConsensusError> { - // Calculate receipts root. - let receipts_with_bloom = receipts.iter().map(TxReceipt::with_bloom_ref).collect::>(); - let receipts_root = - calculate_receipt_root_optimism(&receipts_with_bloom, chain_spec, timestamp); - - // Calculate header logs bloom. - let logs_bloom = receipts_with_bloom.iter().fold(Bloom::ZERO, |bloom, r| bloom | r.bloom_ref()); - - compare_receipts_root_and_logs_bloom( - receipts_root, - logs_bloom, - expected_receipts_root, - expected_logs_bloom, - )?; - - Ok(()) -} - -/// Compare the calculated receipts root with the expected receipts root, also compare -/// the calculated logs bloom with the expected logs bloom. -fn compare_receipts_root_and_logs_bloom( - calculated_receipts_root: B256, - calculated_logs_bloom: Bloom, - expected_receipts_root: B256, - expected_logs_bloom: Bloom, -) -> Result<(), ConsensusError> { - if calculated_receipts_root != expected_receipts_root { - return Err(ConsensusError::BodyReceiptRootDiff( - GotExpected { got: calculated_receipts_root, expected: expected_receipts_root }.into(), - )) - } - - if calculated_logs_bloom != expected_logs_bloom { - return Err(ConsensusError::BodyBloomLogDiff( - GotExpected { got: calculated_logs_bloom, expected: expected_logs_bloom }.into(), - )) - } - - Ok(()) -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_consensus::Header; - use alloy_eips::eip7685::Requests; - use alloy_primitives::{b256, hex, Bytes, U256}; - use op_alloy_consensus::OpTxEnvelope; - use reth_chainspec::{BaseFeeParams, ChainSpec, EthChainSpec, ForkCondition, Hardfork}; - use reth_optimism_chainspec::{OpChainSpec, BASE_SEPOLIA}; - use reth_optimism_forks::{OpHardfork, BASE_SEPOLIA_HARDFORKS}; - use reth_optimism_primitives::OpReceipt; - use std::sync::Arc; - - const HOLOCENE_TIMESTAMP: u64 = 1700000000; - const ISTHMUS_TIMESTAMP: u64 = 1750000000; - const JOVIAN_TIMESTAMP: u64 = 1800000000; - const BLOCK_TIME_SECONDS: u64 = 2; - - fn holocene_chainspec() -> Arc { - let mut hardforks = BASE_SEPOLIA_HARDFORKS.clone(); - hardforks - .insert(OpHardfork::Holocene.boxed(), ForkCondition::Timestamp(HOLOCENE_TIMESTAMP)); - Arc::new(OpChainSpec { - inner: ChainSpec { - chain: BASE_SEPOLIA.inner.chain, - genesis: BASE_SEPOLIA.inner.genesis.clone(), - genesis_header: BASE_SEPOLIA.inner.genesis_header.clone(), - paris_block_and_final_difficulty: Some((0, U256::from(0))), - hardforks, - base_fee_params: BASE_SEPOLIA.inner.base_fee_params.clone(), - prune_delete_limit: 10000, - ..Default::default() - }, - }) - } - - fn isthmus_chainspec() -> OpChainSpec { - let mut chainspec = BASE_SEPOLIA.as_ref().clone(); - chainspec - .inner - .hardforks - .insert(OpHardfork::Isthmus.boxed(), ForkCondition::Timestamp(ISTHMUS_TIMESTAMP)); - chainspec - } - - fn jovian_chainspec() -> OpChainSpec { - let mut chainspec = BASE_SEPOLIA.as_ref().clone(); - chainspec - .inner - .hardforks - .insert(OpHardfork::Jovian.boxed(), ForkCondition::Timestamp(JOVIAN_TIMESTAMP)); - chainspec - } - - #[test] - fn test_get_base_fee_pre_holocene() { - let op_chain_spec = BASE_SEPOLIA.clone(); - let parent = Header { - base_fee_per_gas: Some(1), - gas_used: 15763614, - gas_limit: 144000000, - ..Default::default() - }; - let base_fee = - reth_optimism_chainspec::OpChainSpec::next_block_base_fee(&op_chain_spec, &parent, 0); - assert_eq!( - base_fee.unwrap(), - op_chain_spec.next_block_base_fee(&parent, 0).unwrap_or_default() - ); - } - - #[test] - fn test_get_base_fee_holocene_extra_data_not_set() { - let op_chain_spec = holocene_chainspec(); - let parent = Header { - base_fee_per_gas: Some(1), - gas_used: 15763614, - gas_limit: 144000000, - timestamp: HOLOCENE_TIMESTAMP + 3, - extra_data: Bytes::from_static(&[0, 0, 0, 0, 0, 0, 0, 0, 0]), - ..Default::default() - }; - let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( - &op_chain_spec, - &parent, - HOLOCENE_TIMESTAMP + 5, - ); - assert_eq!( - base_fee.unwrap(), - op_chain_spec.next_block_base_fee(&parent, 0).unwrap_or_default() - ); - } - - #[test] - fn test_get_base_fee_holocene_extra_data_set() { - let parent = Header { - base_fee_per_gas: Some(1), - gas_used: 15763614, - gas_limit: 144000000, - extra_data: Bytes::from_static(&[0, 0, 0, 0, 8, 0, 0, 0, 8]), - timestamp: HOLOCENE_TIMESTAMP + 3, - ..Default::default() - }; - - let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( - &holocene_chainspec(), - &parent, - HOLOCENE_TIMESTAMP + 5, - ); - assert_eq!( - base_fee.unwrap(), - parent - .next_block_base_fee(BaseFeeParams::new(0x00000008, 0x00000008)) - .unwrap_or_default() - ); - } - - // - #[test] - fn test_get_base_fee_holocene_extra_data_set_base_sepolia() { - let parent = Header { - base_fee_per_gas: Some(507), - gas_used: 4847634, - gas_limit: 60000000, - extra_data: hex!("00000000fa0000000a").into(), - timestamp: 1735315544, - ..Default::default() - }; - - let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( - &*BASE_SEPOLIA, - &parent, - 1735315546, - ) - .unwrap(); - assert_eq!(base_fee, 507); - } - - #[test] - fn test_get_base_fee_holocene_extra_data_set_and_min_base_fee_set() { - const MIN_BASE_FEE: u64 = 10; - - let mut extra_data = Vec::new(); - // eip1559 params - extra_data.append(&mut hex!("00000000fa0000000a").to_vec()); - // min base fee - extra_data.append(&mut MIN_BASE_FEE.to_be_bytes().to_vec()); - let extra_data = Bytes::from(extra_data); - - let parent = Header { - base_fee_per_gas: Some(507), - gas_used: 4847634, - gas_limit: 60000000, - extra_data, - timestamp: 1735315544, - ..Default::default() - }; - - let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( - &*BASE_SEPOLIA, - &parent, - 1735315546, - ); - assert_eq!(base_fee, None); - } - - /// The version byte for Jovian is 1. - const JOVIAN_EXTRA_DATA_VERSION_BYTE: u8 = 1; - - #[test] - fn test_get_base_fee_jovian_extra_data_and_min_base_fee_not_set() { - let op_chain_spec = jovian_chainspec(); - - let mut extra_data = Vec::new(); - extra_data.push(JOVIAN_EXTRA_DATA_VERSION_BYTE); - // eip1559 params - extra_data.append(&mut [0_u8; 8].to_vec()); - let extra_data = Bytes::from(extra_data); - - let parent = Header { - base_fee_per_gas: Some(1), - gas_used: 15763614, - gas_limit: 144000000, - timestamp: JOVIAN_TIMESTAMP, - extra_data, - ..Default::default() - }; - let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( - &op_chain_spec, - &parent, - JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS, - ); - assert_eq!(base_fee, None); - } - - /// After Jovian, the next block base fee cannot be less than the minimum base fee. - #[test] - fn test_get_base_fee_jovian_default_extra_data_and_min_base_fee() { - const CURR_BASE_FEE: u64 = 1; - const MIN_BASE_FEE: u64 = 10; - - let mut extra_data = Vec::new(); - extra_data.push(JOVIAN_EXTRA_DATA_VERSION_BYTE); - // eip1559 params - extra_data.append(&mut [0_u8; 8].to_vec()); - // min base fee - extra_data.append(&mut MIN_BASE_FEE.to_be_bytes().to_vec()); - let extra_data = Bytes::from(extra_data); - - let op_chain_spec = jovian_chainspec(); - let parent = Header { - base_fee_per_gas: Some(CURR_BASE_FEE), - gas_used: 15763614, - gas_limit: 144000000, - timestamp: JOVIAN_TIMESTAMP, - extra_data, - ..Default::default() - }; - let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( - &op_chain_spec, - &parent, - JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS, - ); - assert_eq!(base_fee, Some(MIN_BASE_FEE)); - } - - /// After Jovian, the next block base fee cannot be less than the minimum base fee. - #[test] - fn test_jovian_min_base_fee_cannot_decrease() { - const MIN_BASE_FEE: u64 = 10; - - let mut extra_data = Vec::new(); - extra_data.push(JOVIAN_EXTRA_DATA_VERSION_BYTE); - // eip1559 params - extra_data.append(&mut [0_u8; 8].to_vec()); - // min base fee - extra_data.append(&mut MIN_BASE_FEE.to_be_bytes().to_vec()); - let extra_data = Bytes::from(extra_data); - - let op_chain_spec = jovian_chainspec(); - - // If we're currently at the minimum base fee, the next block base fee cannot decrease. - let parent = Header { - base_fee_per_gas: Some(MIN_BASE_FEE), - gas_used: 10, - gas_limit: 144000000, - timestamp: JOVIAN_TIMESTAMP, - extra_data: extra_data.clone(), - ..Default::default() - }; - let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( - &op_chain_spec, - &parent, - JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS, - ); - assert_eq!(base_fee, Some(MIN_BASE_FEE)); - - // The next block can increase the base fee - let parent = Header { - base_fee_per_gas: Some(MIN_BASE_FEE), - gas_used: 144000000, - gas_limit: 144000000, - timestamp: JOVIAN_TIMESTAMP, - extra_data, - ..Default::default() - }; - let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( - &op_chain_spec, - &parent, - JOVIAN_TIMESTAMP + 2 * BLOCK_TIME_SECONDS, - ); - assert_eq!(base_fee, Some(MIN_BASE_FEE + 1)); - } - - #[test] - fn test_jovian_base_fee_can_decrease_if_above_min_base_fee() { - const MIN_BASE_FEE: u64 = 10; - - let mut extra_data = Vec::new(); - extra_data.push(JOVIAN_EXTRA_DATA_VERSION_BYTE); - // eip1559 params - extra_data.append(&mut [0_u8; 8].to_vec()); - // min base fee - extra_data.append(&mut MIN_BASE_FEE.to_be_bytes().to_vec()); - let extra_data = Bytes::from(extra_data); - - let op_chain_spec = jovian_chainspec(); - - let parent = Header { - base_fee_per_gas: Some(100 * MIN_BASE_FEE), - gas_used: 10, - gas_limit: 144000000, - timestamp: JOVIAN_TIMESTAMP, - extra_data, - ..Default::default() - }; - let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( - &op_chain_spec, - &parent, - JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS, - ) - .unwrap(); - assert_eq!( - base_fee, - op_chain_spec - .inner - .next_block_base_fee(&parent, JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS) - .unwrap() - ); - } - - #[test] - fn body_against_header_isthmus() { - let chainspec = isthmus_chainspec(); - let header = Header { - base_fee_per_gas: Some(507), - gas_used: 4847634, - gas_limit: 60000000, - extra_data: hex!("00000000fa0000000a").into(), - timestamp: 1800000000, - withdrawals_root: Some(b256!( - "0x611e1d75cbb77fa782d79485a8384e853bc92e56883c313a51e3f9feef9a9a71" - )), - ..Default::default() - }; - let mut body = alloy_consensus::BlockBody:: { - transactions: vec![], - ommers: vec![], - withdrawals: Some(Default::default()), - }; - validate_body_against_header_op(&chainspec, &body, &header).unwrap(); - - body.withdrawals.take(); - validate_body_against_header_op(&chainspec, &body, &header).unwrap_err(); - } - - #[test] - fn test_jovian_blob_gas_used_validation() { - const BLOB_GAS_USED: u64 = 1000; - const GAS_USED: u64 = 5000; - - let chainspec = jovian_chainspec(); - let header = Header { - timestamp: JOVIAN_TIMESTAMP, - blob_gas_used: Some(BLOB_GAS_USED), - ..Default::default() - }; - - let result = BlockExecutionResult:: { - blob_gas_used: BLOB_GAS_USED, - receipts: vec![], - requests: Requests::default(), - gas_used: GAS_USED, - }; - validate_block_post_execution(&header, &chainspec, &result, None).unwrap(); - } - - #[test] - fn test_jovian_blob_gas_used_validation_mismatched() { - const BLOB_GAS_USED: u64 = 1000; - const GAS_USED: u64 = 5000; - - let chainspec = jovian_chainspec(); - let header = Header { - timestamp: JOVIAN_TIMESTAMP, - blob_gas_used: Some(BLOB_GAS_USED + 1), - ..Default::default() - }; - - let result = BlockExecutionResult:: { - blob_gas_used: BLOB_GAS_USED, - receipts: vec![], - requests: Requests::default(), - gas_used: GAS_USED, - }; - assert!(matches!( - validate_block_post_execution(&header, &chainspec, &result, None).unwrap_err(), - ConsensusError::BlobGasUsedDiff(diff) - if diff.got == BLOB_GAS_USED && diff.expected == BLOB_GAS_USED + 1 - )); - } -} diff --git a/op-reth/crates/evm/Cargo.toml b/op-reth/crates/evm/Cargo.toml deleted file mode 100644 index 724f8555e09..00000000000 --- a/op-reth/crates/evm/Cargo.toml +++ /dev/null @@ -1,83 +0,0 @@ -[package] -name = "reth-optimism-evm" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -# Reth -reth-chainspec.workspace = true -reth-evm = { workspace = true, features = ["op"] } -reth-primitives-traits.workspace = true -reth-execution-errors.workspace = true -reth-execution-types.workspace = true -reth-storage-errors.workspace = true - -reth-rpc-eth-api = { workspace = true, optional = true } - -# ethereum -alloy-eips.workspace = true -alloy-evm.workspace = true -alloy-primitives.workspace = true -alloy-op-evm.workspace = true -op-alloy-consensus.workspace = true -op-alloy-rpc-types-engine.workspace = true -alloy-consensus.workspace = true - -# Optimism -reth-optimism-chainspec.workspace = true -reth-optimism-consensus.workspace = true -reth-optimism-forks.workspace = true -reth-optimism-primitives.workspace = true - -# revm -revm.workspace = true -op-revm.workspace = true - -# misc -thiserror.workspace = true - -[dev-dependencies] -reth-evm = { workspace = true, features = ["test-utils"] } -reth-revm = { workspace = true, features = ["test-utils"] } -alloy-genesis.workspace = true -reth-optimism-primitives = { workspace = true, features = ["arbitrary"] } - -[features] -default = ["std"] -std = [ - "reth-revm/std", - "alloy-consensus/std", - "alloy-eips/std", - "alloy-genesis/std", - "alloy-primitives/std", - "reth-primitives-traits/std", - "revm/std", - "reth-optimism-primitives/std", - "reth-optimism-forks/std", - "thiserror/std", - "op-alloy-consensus/std", - "reth-chainspec/std", - "reth-optimism-consensus/std", - "reth-optimism-chainspec/std", - "reth-execution-errors/std", - "reth-execution-types/std", - "alloy-evm/std", - "alloy-op-evm/std", - "op-revm/std", - "reth-evm/std", - "op-alloy-rpc-types-engine/std", - "reth-storage-errors/std", -] -portable = [ - "reth-revm/portable", - "op-revm/portable", - "revm/portable", -] -rpc = ["reth-rpc-eth-api", "reth-optimism-primitives/serde", "reth-optimism-primitives/reth-codec", "alloy-evm/rpc"] diff --git a/op-reth/crates/evm/src/build.rs b/op-reth/crates/evm/src/build.rs deleted file mode 100644 index b8fab18833c..00000000000 --- a/op-reth/crates/evm/src/build.rs +++ /dev/null @@ -1,154 +0,0 @@ -use alloc::sync::Arc; -use alloy_consensus::{ - constants::EMPTY_WITHDRAWALS, proofs, Block, BlockBody, Header, TxReceipt, - EMPTY_OMMER_ROOT_HASH, -}; -use alloy_eips::{eip7685::EMPTY_REQUESTS_HASH, merge::BEACON_NONCE}; -use alloy_evm::block::BlockExecutorFactory; -use alloy_op_evm::OpBlockExecutionCtx; -use alloy_primitives::logs_bloom; -use reth_evm::execute::{BlockAssembler, BlockAssemblerInput}; -use reth_execution_errors::BlockExecutionError; -use reth_execution_types::BlockExecutionResult; -use reth_optimism_consensus::{calculate_receipt_root_no_memo_optimism, isthmus}; -use reth_optimism_forks::OpHardforks; -use reth_optimism_primitives::DepositReceipt; -use reth_primitives_traits::{Receipt, SignedTransaction}; -use revm::context::Block as _; - -/// Block builder for Optimism. -#[derive(Debug)] -pub struct OpBlockAssembler { - chain_spec: Arc, -} - -impl OpBlockAssembler { - /// Creates a new [`OpBlockAssembler`]. - pub const fn new(chain_spec: Arc) -> Self { - Self { chain_spec } - } -} - -impl OpBlockAssembler { - /// Builds a block for `input` without any bounds on header `H`. - pub fn assemble_block< - F: for<'a> BlockExecutorFactory< - ExecutionCtx<'a>: Into, - Transaction: SignedTransaction, - Receipt: Receipt + DepositReceipt, - >, - H, - >( - &self, - input: BlockAssemblerInput<'_, '_, F, H>, - ) -> Result, BlockExecutionError> { - let BlockAssemblerInput { - evm_env, - execution_ctx: ctx, - transactions, - output: BlockExecutionResult { receipts, gas_used, blob_gas_used, requests: _ }, - bundle_state, - state_root, - state_provider, - .. - } = input; - let ctx = ctx.into(); - - let timestamp = evm_env.block_env.timestamp().saturating_to(); - - let transactions_root = proofs::calculate_transaction_root(&transactions); - let receipts_root = - calculate_receipt_root_no_memo_optimism(receipts, &self.chain_spec, timestamp); - let logs_bloom = logs_bloom(receipts.iter().flat_map(|r| r.logs())); - - let mut requests_hash = None; - - let withdrawals_root = if self.chain_spec.is_isthmus_active_at_timestamp(timestamp) { - // always empty requests hash post isthmus - requests_hash = Some(EMPTY_REQUESTS_HASH); - - // withdrawals root field in block header is used for storage root of L2 predeploy - // `l2tol1-message-passer` - Some( - isthmus::withdrawals_root(bundle_state, state_provider) - .map_err(BlockExecutionError::other)?, - ) - } else if self.chain_spec.is_canyon_active_at_timestamp(timestamp) { - Some(EMPTY_WITHDRAWALS) - } else { - None - }; - - let (excess_blob_gas, blob_gas_used) = - if self.chain_spec.is_jovian_active_at_timestamp(timestamp) { - // In jovian, we're using the blob gas used field to store the current da - // footprint's value. - (Some(0), Some(*blob_gas_used)) - } else if self.chain_spec.is_ecotone_active_at_timestamp(timestamp) { - (Some(0), Some(0)) - } else { - (None, None) - }; - - let header = Header { - parent_hash: ctx.parent_hash, - ommers_hash: EMPTY_OMMER_ROOT_HASH, - beneficiary: evm_env.block_env.beneficiary(), - state_root, - transactions_root, - receipts_root, - withdrawals_root, - logs_bloom, - timestamp, - mix_hash: evm_env.block_env.prevrandao().unwrap_or_default(), - nonce: BEACON_NONCE.into(), - base_fee_per_gas: Some(evm_env.block_env.basefee()), - number: evm_env.block_env.number().saturating_to(), - gas_limit: evm_env.block_env.gas_limit(), - difficulty: evm_env.block_env.difficulty(), - gas_used: *gas_used, - extra_data: ctx.extra_data, - parent_beacon_block_root: ctx.parent_beacon_block_root, - blob_gas_used, - excess_blob_gas, - requests_hash, - }; - - Ok(Block::new( - header, - BlockBody { - transactions, - ommers: Default::default(), - withdrawals: self - .chain_spec - .is_canyon_active_at_timestamp(timestamp) - .then(Default::default), - }, - )) - } -} - -impl Clone for OpBlockAssembler { - fn clone(&self) -> Self { - Self { chain_spec: self.chain_spec.clone() } - } -} - -impl BlockAssembler for OpBlockAssembler -where - ChainSpec: OpHardforks, - F: for<'a> BlockExecutorFactory< - ExecutionCtx<'a> = OpBlockExecutionCtx, - Transaction: SignedTransaction, - Receipt: Receipt + DepositReceipt, - >, -{ - type Block = Block; - - fn assemble_block( - &self, - input: BlockAssemblerInput<'_, '_, F>, - ) -> Result { - self.assemble_block(input) - } -} diff --git a/op-reth/crates/evm/src/config.rs b/op-reth/crates/evm/src/config.rs deleted file mode 100644 index 1f1068c40d9..00000000000 --- a/op-reth/crates/evm/src/config.rs +++ /dev/null @@ -1,51 +0,0 @@ -pub use alloy_op_evm::{ - spec as revm_spec, spec_by_timestamp_after_bedrock as revm_spec_by_timestamp_after_bedrock, -}; -use op_alloy_rpc_types_engine::OpFlashblockPayloadBase; -use revm::primitives::{Address, Bytes, B256}; - -/// Context relevant for execution of a next block w.r.t OP. -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct OpNextBlockEnvAttributes { - /// The timestamp of the next block. - pub timestamp: u64, - /// The suggested fee recipient for the next block. - pub suggested_fee_recipient: Address, - /// The randomness value for the next block. - pub prev_randao: B256, - /// Block gas limit. - pub gas_limit: u64, - /// The parent beacon block root. - pub parent_beacon_block_root: Option, - /// Encoded EIP-1559 parameters to include into block's `extra_data` field. - pub extra_data: Bytes, -} - -#[cfg(feature = "rpc")] -impl reth_rpc_eth_api::helpers::pending_block::BuildPendingEnv - for OpNextBlockEnvAttributes -{ - fn build_pending_env(parent: &crate::SealedHeader) -> Self { - Self { - timestamp: parent.timestamp().saturating_add(12), - suggested_fee_recipient: parent.beneficiary(), - prev_randao: B256::random(), - gas_limit: parent.gas_limit(), - parent_beacon_block_root: parent.parent_beacon_block_root(), - extra_data: parent.extra_data().clone(), - } - } -} - -impl From for OpNextBlockEnvAttributes { - fn from(base: OpFlashblockPayloadBase) -> Self { - Self { - timestamp: base.timestamp, - suggested_fee_recipient: base.fee_recipient, - prev_randao: base.prev_randao, - gas_limit: base.gas_limit, - parent_beacon_block_root: Some(base.parent_beacon_block_root), - extra_data: base.extra_data, - } - } -} diff --git a/op-reth/crates/evm/src/lib.rs b/op-reth/crates/evm/src/lib.rs deleted file mode 100644 index d7985b8b1c5..00000000000 --- a/op-reth/crates/evm/src/lib.rs +++ /dev/null @@ -1,902 +0,0 @@ -//! EVM config for vanilla optimism. - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![cfg_attr(not(feature = "std"), no_std)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] - -extern crate alloc; - -use alloc::sync::Arc; -use alloy_consensus::{BlockHeader, Header}; -use alloy_evm::{EvmFactory, FromRecoveredTx, FromTxWithEncoded}; -use alloy_op_evm::block::{receipt_builder::OpReceiptBuilder, OpTxEnv}; -use core::fmt::Debug; -use op_alloy_consensus::EIP1559ParamError; -use op_revm::{OpSpecId, OpTransaction}; -use reth_chainspec::EthChainSpec; -use reth_evm::{ - eth::NextEvmEnvAttributes, precompiles::PrecompilesMap, ConfigureEvm, EvmEnv, TransactionEnv, -}; -use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_forks::OpHardforks; -use reth_optimism_primitives::{DepositReceipt, OpPrimitives}; -use reth_primitives_traits::{NodePrimitives, SealedBlock, SealedHeader, SignedTransaction}; -use revm::context::{BlockEnv, TxEnv}; - -#[allow(unused_imports)] -use { - alloy_eips::Decodable2718, - alloy_primitives::{Bytes, U256}, - op_alloy_rpc_types_engine::OpExecutionData, - reth_evm::{EvmEnvFor, ExecutionCtxFor}, - reth_primitives_traits::{TxTy, WithEncoded}, - reth_storage_errors::any::AnyError, - revm::{ - context::CfgEnv, context_interface::block::BlobExcessGasAndPrice, - primitives::hardfork::SpecId, - }, -}; - -#[cfg(feature = "std")] -use reth_evm::{ConfigureEngineEvm, ExecutableTxIterator}; - -mod config; -pub use config::{revm_spec, revm_spec_by_timestamp_after_bedrock, OpNextBlockEnvAttributes}; -mod execute; -pub use execute::*; -pub mod l1; -pub use l1::*; -mod receipts; -pub use receipts::*; -mod build; -pub use build::OpBlockAssembler; - -mod error; -pub use error::OpBlockExecutionError; - -pub use alloy_op_evm::{OpBlockExecutionCtx, OpBlockExecutorFactory, OpEvm, OpEvmFactory}; - -/// Optimism-related EVM configuration. -#[derive(Debug)] -pub struct OpEvmConfig< - ChainSpec = OpChainSpec, - N: NodePrimitives = OpPrimitives, - R = OpRethReceiptBuilder, - EvmFactory = OpEvmFactory, -> { - /// Inner [`OpBlockExecutorFactory`]. - pub executor_factory: OpBlockExecutorFactory, EvmFactory>, - /// Optimism block assembler. - pub block_assembler: OpBlockAssembler, - #[doc(hidden)] - pub _pd: core::marker::PhantomData, -} - -impl Clone - for OpEvmConfig -{ - fn clone(&self) -> Self { - Self { - executor_factory: self.executor_factory.clone(), - block_assembler: self.block_assembler.clone(), - _pd: self._pd, - } - } -} - -impl OpEvmConfig { - /// Creates a new [`OpEvmConfig`] with the given chain spec for OP chains. - pub fn optimism(chain_spec: Arc) -> Self { - Self::new(chain_spec, OpRethReceiptBuilder::default()) - } -} - -impl OpEvmConfig { - /// Creates a new [`OpEvmConfig`] with the given chain spec. - pub fn new(chain_spec: Arc, receipt_builder: R) -> Self { - Self { - block_assembler: OpBlockAssembler::new(chain_spec.clone()), - executor_factory: OpBlockExecutorFactory::new( - receipt_builder, - chain_spec, - OpEvmFactory::default(), - ), - _pd: core::marker::PhantomData, - } - } -} - -impl OpEvmConfig -where - ChainSpec: OpHardforks, - N: NodePrimitives, -{ - /// Returns the chain spec associated with this configuration. - pub const fn chain_spec(&self) -> &Arc { - self.executor_factory.spec() - } -} - -impl ConfigureEvm for OpEvmConfig -where - ChainSpec: EthChainSpec
+ OpHardforks, - N: NodePrimitives< - Receipt = R::Receipt, - SignedTx = R::Transaction, - BlockHeader = Header, - BlockBody = alloy_consensus::BlockBody, - Block = alloy_consensus::Block, - >, - OpTransaction: FromRecoveredTx + FromTxWithEncoded, - R: OpReceiptBuilder, - EvmF: EvmFactory< - Tx: FromRecoveredTx - + FromTxWithEncoded - + TransactionEnv - + OpTxEnv, - Precompiles = PrecompilesMap, - Spec = OpSpecId, - BlockEnv = BlockEnv, - > + Debug, - Self: Send + Sync + Unpin + Clone + 'static, -{ - type Primitives = N; - type Error = EIP1559ParamError; - type NextBlockEnvCtx = OpNextBlockEnvAttributes; - type BlockExecutorFactory = OpBlockExecutorFactory, EvmF>; - type BlockAssembler = OpBlockAssembler; - - fn block_executor_factory(&self) -> &Self::BlockExecutorFactory { - &self.executor_factory - } - - fn block_assembler(&self) -> &Self::BlockAssembler { - &self.block_assembler - } - - fn evm_env(&self, header: &Header) -> Result, Self::Error> { - Ok(EvmEnv::for_op_block(header, self.chain_spec(), self.chain_spec().chain().id())) - } - - fn next_evm_env( - &self, - parent: &Header, - attributes: &Self::NextBlockEnvCtx, - ) -> Result, Self::Error> { - Ok(EvmEnv::for_op_next_block( - parent, - NextEvmEnvAttributes { - timestamp: attributes.timestamp, - suggested_fee_recipient: attributes.suggested_fee_recipient, - prev_randao: attributes.prev_randao, - gas_limit: attributes.gas_limit, - }, - self.chain_spec().next_block_base_fee(parent, attributes.timestamp).unwrap_or_default(), - self.chain_spec(), - self.chain_spec().chain().id(), - )) - } - - fn context_for_block( - &self, - block: &'_ SealedBlock, - ) -> Result { - Ok(OpBlockExecutionCtx { - parent_hash: block.header().parent_hash(), - parent_beacon_block_root: block.header().parent_beacon_block_root(), - extra_data: block.header().extra_data().clone(), - }) - } - - fn context_for_next_block( - &self, - parent: &SealedHeader, - attributes: Self::NextBlockEnvCtx, - ) -> Result { - Ok(OpBlockExecutionCtx { - parent_hash: parent.hash(), - parent_beacon_block_root: attributes.parent_beacon_block_root, - extra_data: attributes.extra_data, - }) - } -} - -#[cfg(feature = "std")] -impl ConfigureEngineEvm for OpEvmConfig -where - ChainSpec: EthChainSpec
+ OpHardforks, - N: NodePrimitives< - Receipt = R::Receipt, - SignedTx = R::Transaction, - BlockHeader = Header, - BlockBody = alloy_consensus::BlockBody, - Block = alloy_consensus::Block, - >, - OpTransaction: FromRecoveredTx + FromTxWithEncoded, - R: OpReceiptBuilder, - Self: Send + Sync + Unpin + Clone + 'static, -{ - fn evm_env_for_payload( - &self, - payload: &OpExecutionData, - ) -> Result, Self::Error> { - let timestamp = payload.payload.timestamp(); - let block_number = payload.payload.block_number(); - - let spec = revm_spec_by_timestamp_after_bedrock(self.chain_spec(), timestamp); - - let cfg_env = CfgEnv::new() - .with_chain_id(self.chain_spec().chain().id()) - .with_spec_and_mainnet_gas_params(spec); - - let blob_excess_gas_and_price = spec - .into_eth_spec() - .is_enabled_in(SpecId::CANCUN) - .then_some(BlobExcessGasAndPrice { excess_blob_gas: 0, blob_gasprice: 1 }); - - let block_env = BlockEnv { - number: U256::from(block_number), - beneficiary: payload.payload.as_v1().fee_recipient, - timestamp: U256::from(timestamp), - difficulty: if spec.into_eth_spec() >= SpecId::MERGE { - U256::ZERO - } else { - payload.payload.as_v1().prev_randao.into() - }, - prevrandao: (spec.into_eth_spec() >= SpecId::MERGE) - .then(|| payload.payload.as_v1().prev_randao), - gas_limit: payload.payload.as_v1().gas_limit, - basefee: payload.payload.as_v1().base_fee_per_gas.to(), - // EIP-4844 excess blob gas of this block, introduced in Cancun - blob_excess_gas_and_price, - }; - - Ok(EvmEnv { cfg_env, block_env }) - } - - fn context_for_payload<'a>( - &self, - payload: &'a OpExecutionData, - ) -> Result, Self::Error> { - Ok(OpBlockExecutionCtx { - parent_hash: payload.parent_hash(), - parent_beacon_block_root: payload.sidecar.parent_beacon_block_root(), - extra_data: payload.payload.as_v1().extra_data.clone(), - }) - } - - fn tx_iterator_for_payload( - &self, - payload: &OpExecutionData, - ) -> Result, Self::Error> { - let transactions = payload.payload.transactions().clone(); - let convert = |encoded: Bytes| { - let tx = TxTy::::decode_2718_exact(encoded.as_ref()) - .map_err(AnyError::new)?; - let signer = tx.try_recover().map_err(AnyError::new)?; - Ok::<_, AnyError>(WithEncoded::new(encoded, tx.with_signer(signer))) - }; - - Ok((transactions, convert)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloc::collections::BTreeMap; - use alloy_consensus::{Header, Receipt}; - use alloy_eips::eip7685::Requests; - use alloy_genesis::Genesis; - use alloy_primitives::{bytes, map::HashMap, Address, LogData, B256}; - use op_revm::OpSpecId; - use reth_chainspec::ChainSpec; - use reth_evm::execute::ProviderError; - use reth_execution_types::{ - AccountRevertInit, BundleStateInit, Chain, ExecutionOutcome, RevertsInit, - }; - use reth_optimism_chainspec::{OpChainSpec, BASE_MAINNET}; - use reth_optimism_primitives::{OpBlock, OpPrimitives, OpReceipt}; - use reth_primitives_traits::{Account, RecoveredBlock}; - use revm::{ - database::{BundleState, CacheDB}, - database_interface::EmptyDBTyped, - inspector::NoOpInspector, - primitives::Log, - state::AccountInfo, - }; - use std::sync::Arc; - - fn test_evm_config() -> OpEvmConfig { - OpEvmConfig::optimism(BASE_MAINNET.clone()) - } - - #[test] - fn test_fill_cfg_and_block_env() { - // Create a default header - let header = Header::default(); - - // Build the ChainSpec for Ethereum mainnet, activating London, Paris, and Shanghai - // hardforks - let chain_spec = ChainSpec::builder() - .chain(0.into()) - .genesis(Genesis::default()) - .london_activated() - .paris_activated() - .shanghai_activated() - .build(); - - // Use the `OpEvmConfig` to create the `cfg_env` and `block_env` based on the ChainSpec, - // Header, and total difficulty - let EvmEnv { cfg_env, .. } = - OpEvmConfig::optimism(Arc::new(OpChainSpec { inner: chain_spec.clone() })) - .evm_env(&header) - .unwrap(); - - // Assert that the chain ID in the `cfg_env` is correctly set to the chain ID of the - // ChainSpec - assert_eq!(cfg_env.chain_id, chain_spec.chain().id()); - } - - #[test] - fn test_evm_with_env_default_spec() { - let evm_config = test_evm_config(); - - let db = CacheDB::>::default(); - - let evm_env = EvmEnv::default(); - - let evm = evm_config.evm_with_env(db, evm_env.clone()); - - // Check that the EVM environment - assert_eq!(evm.cfg, evm_env.cfg_env); - } - - #[test] - fn test_evm_with_env_custom_cfg() { - let evm_config = test_evm_config(); - - let db = CacheDB::>::default(); - - // Create a custom configuration environment with a chain ID of 111 - let cfg = - CfgEnv::new().with_chain_id(111).with_spec_and_mainnet_gas_params(OpSpecId::default()); - - let evm_env = EvmEnv { cfg_env: cfg.clone(), ..Default::default() }; - - let evm = evm_config.evm_with_env(db, evm_env); - - // Check that the EVM environment is initialized with the custom environment - assert_eq!(evm.cfg, cfg); - } - - #[test] - fn test_evm_with_env_custom_block_and_tx() { - let evm_config = test_evm_config(); - - let db = CacheDB::>::default(); - - // Create customs block and tx env - let block = BlockEnv { - basefee: 1000, - gas_limit: 10_000_000, - number: U256::from(42), - ..Default::default() - }; - - let evm_env = EvmEnv { block_env: block, ..Default::default() }; - - let evm = evm_config.evm_with_env(db, evm_env.clone()); - - // Verify that the block and transaction environments are set correctly - assert_eq!(evm.block, evm_env.block_env); - } - - #[test] - fn test_evm_with_spec_id() { - let evm_config = test_evm_config(); - - let db = CacheDB::>::default(); - - let evm_env = EvmEnv { - cfg_env: CfgEnv::new().with_spec_and_mainnet_gas_params(OpSpecId::ECOTONE), - ..Default::default() - }; - - let evm = evm_config.evm_with_env(db, evm_env.clone()); - - assert_eq!(evm.cfg, evm_env.cfg_env); - } - - #[test] - fn test_evm_with_env_and_default_inspector() { - let evm_config = test_evm_config(); - let db = CacheDB::>::default(); - - let evm_env = EvmEnv { cfg_env: Default::default(), ..Default::default() }; - - let evm = evm_config.evm_with_env_and_inspector(db, evm_env.clone(), NoOpInspector {}); - - // Check that the EVM environment is set to default values - assert_eq!(evm.block, evm_env.block_env); - assert_eq!(evm.cfg, evm_env.cfg_env); - } - - #[test] - fn test_evm_with_env_inspector_and_custom_cfg() { - let evm_config = test_evm_config(); - let db = CacheDB::>::default(); - - let cfg = - CfgEnv::new().with_chain_id(111).with_spec_and_mainnet_gas_params(OpSpecId::default()); - let block = BlockEnv::default(); - let evm_env = EvmEnv { block_env: block, cfg_env: cfg.clone() }; - - let evm = evm_config.evm_with_env_and_inspector(db, evm_env.clone(), NoOpInspector {}); - - // Check that the EVM environment is set with custom configuration - assert_eq!(evm.cfg, cfg); - assert_eq!(evm.block, evm_env.block_env); - } - - #[test] - fn test_evm_with_env_inspector_and_custom_block_tx() { - let evm_config = test_evm_config(); - let db = CacheDB::>::default(); - - // Create custom block and tx environment - let block = BlockEnv { - basefee: 1000, - gas_limit: 10_000_000, - number: U256::from(42), - ..Default::default() - }; - let evm_env = EvmEnv { block_env: block, ..Default::default() }; - - let evm = evm_config.evm_with_env_and_inspector(db, evm_env.clone(), NoOpInspector {}); - - // Verify that the block and transaction environments are set correctly - assert_eq!(evm.block, evm_env.block_env); - } - - #[test] - fn test_evm_with_env_inspector_and_spec_id() { - let evm_config = test_evm_config(); - let db = CacheDB::>::default(); - - let evm_env = EvmEnv { - cfg_env: CfgEnv::new().with_spec_and_mainnet_gas_params(OpSpecId::ECOTONE), - ..Default::default() - }; - - let evm = evm_config.evm_with_env_and_inspector(db, evm_env.clone(), NoOpInspector {}); - - // Check that the spec ID is set properly - assert_eq!(evm.cfg, evm_env.cfg_env); - assert_eq!(evm.block, evm_env.block_env); - } - - #[test] - fn receipts_by_block_hash() { - // Create a default recovered block - let block: RecoveredBlock = Default::default(); - - // Define block hashes for block1 and block2 - let block1_hash = B256::new([0x01; 32]); - let block2_hash = B256::new([0x02; 32]); - - // Clone the default block into block1 and block2 - let mut block1 = block.clone(); - let mut block2 = block; - - // Set the hashes of block1 and block2 - block1.set_block_number(10); - block1.set_hash(block1_hash); - - block2.set_block_number(11); - block2.set_hash(block2_hash); - - // Create a random receipt object, receipt1 - let receipt1 = OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![], - status: true.into(), - }); - - // Create another random receipt object, receipt2 - let receipt2 = OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 1325345, - logs: vec![], - status: true.into(), - }); - - // Create a Receipts object with a vector of receipt vectors - let receipts = vec![vec![receipt1.clone()], vec![receipt2]]; - - // Create an ExecutionOutcome object with the created bundle, receipts, an empty requests - // vector, and first_block set to 10 - let execution_outcome = ExecutionOutcome:: { - bundle: Default::default(), - receipts, - requests: vec![], - first_block: 10, - }; - - // Create a Chain object with a BTreeMap of blocks mapped to their block numbers, - // including block1_hash and block2_hash, and the execution_outcome - let chain: Chain = - Chain::new([block1, block2], execution_outcome.clone(), BTreeMap::new()); - - // Assert that the proper receipt vector is returned for block1_hash - assert_eq!(chain.receipts_by_block_hash(block1_hash), Some(vec![&receipt1])); - - // Create an ExecutionOutcome object with a single receipt vector containing receipt1 - let execution_outcome1 = ExecutionOutcome { - bundle: Default::default(), - receipts: vec![vec![receipt1]], - requests: vec![], - first_block: 10, - }; - - // Assert that the execution outcome at the first block contains only the first receipt - assert_eq!(chain.execution_outcome_at_block(10), Some(execution_outcome1)); - - // Assert that the execution outcome at the tip block contains the whole execution outcome - assert_eq!(chain.execution_outcome_at_block(11), Some(execution_outcome)); - } - - #[test] - fn test_initialization() { - // Create a new BundleState object with initial data - let bundle = BundleState::new( - vec![(Address::new([2; 20]), None, Some(AccountInfo::default()), HashMap::default())], - vec![vec![(Address::new([2; 20]), None, vec![])]], - vec![], - ); - - // Create a Receipts object with a vector of receipt vectors - let receipts = vec![vec![Some(OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![], - status: true.into(), - }))]]; - - // Create a Requests object with a vector of requests - let requests = vec![Requests::new(vec![bytes!("dead"), bytes!("beef"), bytes!("beebee")])]; - - // Define the first block number - let first_block = 123; - - // Create a ExecutionOutcome object with the created bundle, receipts, requests, and - // first_block - let exec_res = ExecutionOutcome { - bundle: bundle.clone(), - receipts: receipts.clone(), - requests: requests.clone(), - first_block, - }; - - // Assert that creating a new ExecutionOutcome using the constructor matches exec_res - assert_eq!( - ExecutionOutcome::new(bundle, receipts.clone(), first_block, requests.clone()), - exec_res - ); - - // Create a BundleStateInit object and insert initial data - let mut state_init: BundleStateInit = HashMap::default(); - state_init - .insert(Address::new([2; 20]), (None, Some(Account::default()), HashMap::default())); - - // Create a HashMap for account reverts and insert initial data - let mut revert_inner: HashMap = HashMap::default(); - revert_inner.insert(Address::new([2; 20]), (None, vec![])); - - // Create a RevertsInit object and insert the revert_inner data - let mut revert_init: RevertsInit = HashMap::default(); - revert_init.insert(123, revert_inner); - - // Assert that creating a new ExecutionOutcome using the new_init method matches - // exec_res - assert_eq!( - ExecutionOutcome::new_init( - state_init, - revert_init, - vec![], - receipts, - first_block, - requests, - ), - exec_res - ); - } - - #[test] - fn test_block_number_to_index() { - // Create a Receipts object with a vector of receipt vectors - let receipts = vec![vec![Some(OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![], - status: true.into(), - }))]]; - - // Define the first block number - let first_block = 123; - - // Create a ExecutionOutcome object with the created bundle, receipts, requests, and - // first_block - let exec_res = ExecutionOutcome { - bundle: Default::default(), - receipts, - requests: vec![], - first_block, - }; - - // Test before the first block - assert_eq!(exec_res.block_number_to_index(12), None); - - // Test after the first block but index larger than receipts length - assert_eq!(exec_res.block_number_to_index(133), None); - - // Test after the first block - assert_eq!(exec_res.block_number_to_index(123), Some(0)); - } - - #[test] - fn test_get_logs() { - // Create a Receipts object with a vector of receipt vectors - let receipts = vec![vec![OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![Log::::default()], - status: true.into(), - })]]; - - // Define the first block number - let first_block = 123; - - // Create a ExecutionOutcome object with the created bundle, receipts, requests, and - // first_block - let exec_res = ExecutionOutcome { - bundle: Default::default(), - receipts, - requests: vec![], - first_block, - }; - - // Get logs for block number 123 - let logs: Vec<&Log> = exec_res.logs(123).unwrap().collect(); - - // Assert that the logs match the expected logs - assert_eq!(logs, vec![&Log::::default()]); - } - - #[test] - fn test_receipts_by_block() { - // Create a Receipts object with a vector of receipt vectors - let receipts = vec![vec![Some(OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![Log::::default()], - status: true.into(), - }))]]; - - // Define the first block number - let first_block = 123; - - // Create a ExecutionOutcome object with the created bundle, receipts, requests, and - // first_block - let exec_res = ExecutionOutcome { - bundle: Default::default(), // Default value for bundle - receipts, // Include the created receipts - requests: vec![], // Empty vector for requests - first_block, // Set the first block number - }; - - // Get receipts for block number 123 and convert the result into a vector - let receipts_by_block: Vec<_> = exec_res.receipts_by_block(123).iter().collect(); - - // Assert that the receipts for block number 123 match the expected receipts - assert_eq!( - receipts_by_block, - vec![&Some(OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![Log::::default()], - status: true.into(), - }))] - ); - } - - #[test] - fn test_receipts_len() { - // Create a Receipts object with a vector of receipt vectors - let receipts = vec![vec![Some(OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![Log::::default()], - status: true.into(), - }))]]; - - // Create an empty Receipts object - let receipts_empty = vec![]; - - // Define the first block number - let first_block = 123; - - // Create a ExecutionOutcome object with the created bundle, receipts, requests, and - // first_block - let exec_res = ExecutionOutcome { - bundle: Default::default(), // Default value for bundle - receipts, // Include the created receipts - requests: vec![], // Empty vector for requests - first_block, // Set the first block number - }; - - // Assert that the length of receipts in exec_res is 1 - assert_eq!(exec_res.len(), 1); - - // Assert that exec_res is not empty - assert!(!exec_res.is_empty()); - - // Create a ExecutionOutcome object with an empty Receipts object - let exec_res_empty_receipts: ExecutionOutcome = ExecutionOutcome { - bundle: Default::default(), // Default value for bundle - receipts: receipts_empty, // Include the empty receipts - requests: vec![], // Empty vector for requests - first_block, // Set the first block number - }; - - // Assert that the length of receipts in exec_res_empty_receipts is 0 - assert_eq!(exec_res_empty_receipts.len(), 0); - - // Assert that exec_res_empty_receipts is empty - assert!(exec_res_empty_receipts.is_empty()); - } - - #[test] - fn test_revert_to() { - // Create a random receipt object - let receipt = OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![], - status: true.into(), - }); - - // Create a Receipts object with a vector of receipt vectors - let receipts = vec![vec![Some(receipt.clone())], vec![Some(receipt.clone())]]; - - // Define the first block number - let first_block = 123; - - // Create a request. - let request = bytes!("deadbeef"); - - // Create a vector of Requests containing the request. - let requests = - vec![Requests::new(vec![request.clone()]), Requests::new(vec![request.clone()])]; - - // Create a ExecutionOutcome object with the created bundle, receipts, requests, and - // first_block - let mut exec_res = - ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; - - // Assert that the revert_to method returns true when reverting to the initial block number. - assert!(exec_res.revert_to(123)); - - // Assert that the receipts are properly cut after reverting to the initial block number. - assert_eq!(exec_res.receipts, vec![vec![Some(receipt)]]); - - // Assert that the requests are properly cut after reverting to the initial block number. - assert_eq!(exec_res.requests, vec![Requests::new(vec![request])]); - - // Assert that the revert_to method returns false when attempting to revert to a block - // number greater than the initial block number. - assert!(!exec_res.revert_to(133)); - - // Assert that the revert_to method returns false when attempting to revert to a block - // number less than the initial block number. - assert!(!exec_res.revert_to(10)); - } - - #[test] - fn test_extend_execution_outcome() { - // Create a Receipt object with specific attributes. - let receipt = OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![], - status: true.into(), - }); - - // Create a Receipts object containing the receipt. - let receipts = vec![vec![Some(receipt.clone())]]; - - // Create a request. - let request = bytes!("deadbeef"); - - // Create a vector of Requests containing the request. - let requests = vec![Requests::new(vec![request.clone()])]; - - // Define the initial block number. - let first_block = 123; - - // Create an ExecutionOutcome object. - let mut exec_res = - ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; - - // Extend the ExecutionOutcome object by itself. - exec_res.extend(exec_res.clone()); - - // Assert the extended ExecutionOutcome matches the expected outcome. - assert_eq!( - exec_res, - ExecutionOutcome { - bundle: Default::default(), - receipts: vec![vec![Some(receipt.clone())], vec![Some(receipt)]], - requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], - first_block: 123, - } - ); - } - - #[test] - fn test_split_at_execution_outcome() { - // Create a random receipt object - let receipt = OpReceipt::Legacy(Receipt:: { - cumulative_gas_used: 46913, - logs: vec![], - status: true.into(), - }); - - // Create a Receipts object with a vector of receipt vectors - let receipts = vec![ - vec![Some(receipt.clone())], - vec![Some(receipt.clone())], - vec![Some(receipt.clone())], - ]; - - // Define the first block number - let first_block = 123; - - // Create a request. - let request = bytes!("deadbeef"); - - // Create a vector of Requests containing the request. - let requests = vec![ - Requests::new(vec![request.clone()]), - Requests::new(vec![request.clone()]), - Requests::new(vec![request.clone()]), - ]; - - // Create a ExecutionOutcome object with the created bundle, receipts, requests, and - // first_block - let exec_res = - ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; - - // Split the ExecutionOutcome at block number 124 - let result = exec_res.clone().split_at(124); - - // Define the expected lower ExecutionOutcome after splitting - let lower_execution_outcome = ExecutionOutcome { - bundle: Default::default(), - receipts: vec![vec![Some(receipt.clone())]], - requests: vec![Requests::new(vec![request.clone()])], - first_block, - }; - - // Define the expected higher ExecutionOutcome after splitting - let higher_execution_outcome = ExecutionOutcome { - bundle: Default::default(), - receipts: vec![vec![Some(receipt.clone())], vec![Some(receipt)]], - requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], - first_block: 124, - }; - - // Assert that the split result matches the expected lower and higher outcomes - assert_eq!(result.0, Some(lower_execution_outcome)); - assert_eq!(result.1, higher_execution_outcome); - - // Assert that splitting at the first block number returns None for the lower outcome - assert_eq!(exec_res.clone().split_at(123), (None, exec_res)); - } -} diff --git a/op-reth/crates/flashblocks/Cargo.toml b/op-reth/crates/flashblocks/Cargo.toml deleted file mode 100644 index e0754aab95e..00000000000 --- a/op-reth/crates/flashblocks/Cargo.toml +++ /dev/null @@ -1,60 +0,0 @@ -[package] -name = "reth-optimism-flashblocks" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -# reth -reth-optimism-primitives = { workspace = true, features = ["serde"] } -reth-chain-state = { workspace = true, features = ["serde"] } -reth-primitives-traits = { workspace = true, features = ["serde"] } -reth-engine-primitives = { workspace = true, features = ["std"] } -reth-execution-types = { workspace = true, features = ["serde"] } -reth-evm.workspace = true -reth-revm.workspace = true -reth-optimism-payload-builder.workspace = true -reth-rpc-eth-types.workspace = true -reth-errors.workspace = true -reth-payload-primitives.workspace = true -reth-storage-api.workspace = true -reth-tasks.workspace = true -reth-metrics.workspace = true - -# alloy -alloy-eips = { workspace = true, features = ["serde"] } -alloy-primitives = { workspace = true, features = ["serde"] } -alloy-rpc-types-engine = { workspace = true, features = ["serde"] } -alloy-consensus.workspace = true - -# op-alloy -op-alloy-rpc-types-engine = { workspace = true, features = ["k256"] } - -# io -tokio.workspace = true -tokio-tungstenite = { workspace = true, features = ["rustls-tls-native-roots"] } -serde_json.workspace = true -url.workspace = true -futures-util.workspace = true -brotli.workspace = true - -# debug -tracing.workspace = true -metrics.workspace = true - -# errors -eyre.workspace = true - -ringbuffer.workspace = true -derive_more.workspace = true - -[dev-dependencies] -test-case.workspace = true -alloy-consensus.workspace = true -op-alloy-consensus.workspace = true diff --git a/op-reth/crates/flashblocks/src/lib.rs b/op-reth/crates/flashblocks/src/lib.rs deleted file mode 100644 index c283d8e221f..00000000000 --- a/op-reth/crates/flashblocks/src/lib.rs +++ /dev/null @@ -1,85 +0,0 @@ -//! A downstream integration of Flashblocks. - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] - -use reth_primitives_traits::NodePrimitives; -use std::sync::Arc; - -// Included to enable serde feature for OpReceipt type used transitively -use reth_optimism_primitives as _; - -mod consensus; -pub use consensus::FlashBlockConsensusClient; - -mod payload; -pub use payload::{FlashBlock, PendingFlashBlock}; - -mod sequence; -pub use sequence::{FlashBlockCompleteSequence, FlashBlockPendingSequence}; - -mod service; -pub use service::{FlashBlockBuildInfo, FlashBlockService}; - -mod worker; - -mod cache; - -pub mod validation; - -#[cfg(test)] -mod test_utils; - -mod ws; -pub use ws::{FlashBlockDecoder, WsConnect, WsFlashBlockStream}; - -/// Receiver of the most recent [`PendingFlashBlock`] built out of [`FlashBlock`]s. -/// -/// [`FlashBlock`]: crate::FlashBlock -pub type PendingBlockRx = tokio::sync::watch::Receiver>>; - -/// Receiver of the sequences of [`FlashBlock`]s built. -/// -/// [`FlashBlock`]: crate::FlashBlock -pub type FlashBlockCompleteSequenceRx = - tokio::sync::broadcast::Receiver; - -/// Receiver of received [`FlashBlock`]s from the (websocket) subscription. -/// -/// [`FlashBlock`]: crate::FlashBlock -pub type FlashBlockRx = tokio::sync::broadcast::Receiver>; - -/// Receiver that signals whether a [`FlashBlock`] is currently being built. -pub type InProgressFlashBlockRx = tokio::sync::watch::Receiver>; - -/// Container for all flashblocks-related listeners. -/// -/// Groups together the channels for flashblock-related updates. -#[derive(Debug)] -pub struct FlashblocksListeners { - /// Receiver of the most recent executed [`PendingFlashBlock`] built out of [`FlashBlock`]s. - pub pending_block_rx: PendingBlockRx, - /// Subscription channel of the complete sequences of [`FlashBlock`]s built. - pub flashblocks_sequence: tokio::sync::broadcast::Sender, - /// Receiver that signals whether a [`FlashBlock`] is currently being built. - pub in_progress_rx: InProgressFlashBlockRx, - /// Subscription channel for received flashblocks from the (websocket) connection. - pub received_flashblocks: tokio::sync::broadcast::Sender>, -} - -impl FlashblocksListeners { - /// Creates a new [`FlashblocksListeners`] with the given channels. - pub const fn new( - pending_block_rx: PendingBlockRx, - flashblocks_sequence: tokio::sync::broadcast::Sender, - in_progress_rx: InProgressFlashBlockRx, - received_flashblocks: tokio::sync::broadcast::Sender>, - ) -> Self { - Self { pending_block_rx, flashblocks_sequence, in_progress_rx, received_flashblocks } - } -} diff --git a/op-reth/crates/flashblocks/src/test_utils.rs b/op-reth/crates/flashblocks/src/test_utils.rs deleted file mode 100644 index deea2cf52fe..00000000000 --- a/op-reth/crates/flashblocks/src/test_utils.rs +++ /dev/null @@ -1,331 +0,0 @@ -//! Test utilities for flashblocks. -//! -//! Provides a factory for creating test flashblocks with automatic timestamp management. -//! -//! # Examples -//! -//! ## Simple: Create a flashblock sequence for the same block -//! -//! ```ignore -//! let factory = TestFlashBlockFactory::new(); // Default 2 second block time -//! let fb0 = factory.flashblock_at(0).build(); -//! let fb1 = factory.flashblock_after(&fb0).build(); -//! let fb2 = factory.flashblock_after(&fb1).build(); -//! ``` -//! -//! ## Create flashblocks with transactions -//! -//! ```ignore -//! let factory = TestFlashBlockFactory::new(); -//! let fb0 = factory.flashblock_at(0).build(); -//! let txs = vec![Bytes::from_static(&[1, 2, 3])]; -//! let fb1 = factory.flashblock_after(&fb0).transactions(txs).build(); -//! ``` -//! -//! ## Test across multiple blocks (timestamps auto-increment) -//! -//! ```ignore -//! let factory = TestFlashBlockFactory::new(); // Default 2 second blocks -//! -//! // Block 100 at timestamp 1000000 -//! let fb0 = factory.flashblock_at(0).build(); -//! let fb1 = factory.flashblock_after(&fb0).build(); -//! -//! // Block 101 at timestamp 1000002 (auto-incremented by block_time) -//! let fb2 = factory.flashblock_for_next_block(&fb1).build(); -//! let fb3 = factory.flashblock_after(&fb2).build(); -//! ``` -//! -//! ## Full control with builder -//! -//! ```ignore -//! let factory = TestFlashBlockFactory::new(); -//! let fb = factory.builder() -//! .block_number(100) -//! .parent_hash(specific_hash) -//! .state_root(computed_root) -//! .transactions(txs) -//! .build(); -//! ``` - -use crate::FlashBlock; -use alloy_primitives::{Address, Bloom, Bytes, B256, U256}; -use alloy_rpc_types_engine::PayloadId; -use op_alloy_rpc_types_engine::{ - OpFlashblockPayloadBase, OpFlashblockPayloadDelta, OpFlashblockPayloadMetadata, -}; - -/// Factory for creating test flashblocks with automatic timestamp management. -/// -/// Tracks `block_time` to automatically increment timestamps when creating new blocks. -/// Returns builders that can be further customized before calling `build()`. -/// -/// # Examples -/// -/// ```ignore -/// let factory = TestFlashBlockFactory::new(); // Default 2 second block time -/// let fb0 = factory.flashblock_at(0).build(); -/// let fb1 = factory.flashblock_after(&fb0).build(); -/// let fb2 = factory.flashblock_for_next_block(&fb1).build(); // timestamp auto-increments -/// ``` -#[derive(Debug)] -pub(crate) struct TestFlashBlockFactory { - /// Block time in seconds (used to auto-increment timestamps) - block_time: u64, - /// Starting timestamp for the first block - base_timestamp: u64, - /// Current block number being tracked - current_block_number: u64, -} - -impl TestFlashBlockFactory { - /// Creates a new factory with a default block time of 2 seconds. - /// - /// Use [`with_block_time`](Self::with_block_time) to customize the block time. - pub(crate) fn new() -> Self { - Self { block_time: 2, base_timestamp: 1_000_000, current_block_number: 100 } - } - - pub(crate) fn with_block_time(mut self, block_time: u64) -> Self { - self.block_time = block_time; - self - } - - /// Creates a builder for a flashblock at the specified index (within the current block). - /// - /// Returns a builder with index set, allowing further customization before building. - /// - /// # Examples - /// - /// ```ignore - /// let factory = TestFlashBlockFactory::new(); - /// let fb0 = factory.flashblock_at(0).build(); // Simple usage - /// let fb1 = factory.flashblock_at(1).state_root(specific_root).build(); // Customize - /// ``` - pub(crate) fn flashblock_at(&self, index: u64) -> TestFlashBlockBuilder { - self.builder().index(index).block_number(self.current_block_number) - } - - /// Creates a builder for a flashblock following the previous one in the same sequence. - /// - /// Automatically increments the index and maintains `block_number` and `payload_id`. - /// Returns a builder allowing further customization. - /// - /// # Examples - /// - /// ```ignore - /// let factory = TestFlashBlockFactory::new(); - /// let fb0 = factory.flashblock_at(0).build(); - /// let fb1 = factory.flashblock_after(&fb0).build(); // Simple - /// let fb2 = factory.flashblock_after(&fb1).transactions(txs).build(); // With txs - /// ``` - pub(crate) fn flashblock_after(&self, previous: &FlashBlock) -> TestFlashBlockBuilder { - let parent_hash = - previous.base.as_ref().map(|b| b.parent_hash).unwrap_or(previous.diff.block_hash); - - self.builder() - .index(previous.index + 1) - .block_number(previous.metadata.block_number) - .payload_id(previous.payload_id) - .parent_hash(parent_hash) - .timestamp(previous.base.as_ref().map(|b| b.timestamp).unwrap_or(self.base_timestamp)) - } - - /// Creates a builder for a flashblock for the next block, starting a new sequence at index 0. - /// - /// Increments block number, uses previous `block_hash` as `parent_hash`, generates new - /// `payload_id`, and automatically increments the timestamp by `block_time`. - /// Returns a builder allowing further customization. - /// - /// # Examples - /// - /// ```ignore - /// let factory = TestFlashBlockFactory::new(); // 2 second blocks - /// let fb0 = factory.flashblock_at(0).build(); // Block 100, timestamp 1000000 - /// let fb1 = factory.flashblock_for_next_block(&fb0).build(); // Block 101, timestamp 1000002 - /// let fb2 = factory.flashblock_for_next_block(&fb1).transactions(txs).build(); // Customize - /// ``` - pub(crate) fn flashblock_for_next_block(&self, previous: &FlashBlock) -> TestFlashBlockBuilder { - let prev_timestamp = - previous.base.as_ref().map(|b| b.timestamp).unwrap_or(self.base_timestamp); - - self.builder() - .index(0) - .block_number(previous.metadata.block_number + 1) - .payload_id(PayloadId::new(B256::random().0[0..8].try_into().unwrap())) - .parent_hash(previous.diff.block_hash) - .timestamp(prev_timestamp + self.block_time) - } - - /// Returns a custom builder for full control over flashblock creation. - /// - /// Use this when the convenience methods don't provide enough control. - /// - /// # Examples - /// - /// ```ignore - /// let factory = TestFlashBlockFactory::new(); - /// let fb = factory.builder() - /// .index(5) - /// .block_number(200) - /// .parent_hash(specific_hash) - /// .state_root(computed_root) - /// .build(); - /// ``` - pub(crate) fn builder(&self) -> TestFlashBlockBuilder { - TestFlashBlockBuilder { - index: 0, - block_number: self.current_block_number, - payload_id: PayloadId::new([1u8; 8]), - parent_hash: B256::random(), - timestamp: self.base_timestamp, - base: None, - block_hash: B256::random(), - state_root: B256::ZERO, - receipts_root: B256::ZERO, - logs_bloom: Bloom::default(), - gas_used: 0, - transactions: vec![], - withdrawals: vec![], - withdrawals_root: B256::ZERO, - blob_gas_used: None, - } - } -} - -/// Custom builder for creating test flashblocks with full control. -/// -/// Created via [`TestFlashBlockFactory::builder()`]. -#[derive(Debug)] -pub(crate) struct TestFlashBlockBuilder { - index: u64, - block_number: u64, - payload_id: PayloadId, - parent_hash: B256, - timestamp: u64, - base: Option, - block_hash: B256, - state_root: B256, - receipts_root: B256, - logs_bloom: Bloom, - gas_used: u64, - transactions: Vec, - withdrawals: Vec, - withdrawals_root: B256, - blob_gas_used: Option, -} - -impl TestFlashBlockBuilder { - /// Sets the flashblock index. - pub(crate) fn index(mut self, index: u64) -> Self { - self.index = index; - self - } - - /// Sets the block number. - pub(crate) fn block_number(mut self, block_number: u64) -> Self { - self.block_number = block_number; - self - } - - /// Sets the payload ID. - pub(crate) fn payload_id(mut self, payload_id: PayloadId) -> Self { - self.payload_id = payload_id; - self - } - - /// Sets the parent hash. - pub(crate) fn parent_hash(mut self, parent_hash: B256) -> Self { - self.parent_hash = parent_hash; - self - } - - /// Sets the timestamp. - pub(crate) fn timestamp(mut self, timestamp: u64) -> Self { - self.timestamp = timestamp; - self - } - - /// Sets the base payload. Automatically created for index 0 if not set. - #[allow(dead_code)] - pub(crate) fn base(mut self, base: OpFlashblockPayloadBase) -> Self { - self.base = Some(base); - self - } - - /// Sets the block hash in the diff. - #[allow(dead_code)] - pub(crate) fn block_hash(mut self, block_hash: B256) -> Self { - self.block_hash = block_hash; - self - } - - /// Sets the state root in the diff. - #[allow(dead_code)] - pub(crate) fn state_root(mut self, state_root: B256) -> Self { - self.state_root = state_root; - self - } - - /// Sets the receipts root in the diff. - #[allow(dead_code)] - pub(crate) fn receipts_root(mut self, receipts_root: B256) -> Self { - self.receipts_root = receipts_root; - self - } - - /// Sets the transactions in the diff. - pub(crate) fn transactions(mut self, transactions: Vec) -> Self { - self.transactions = transactions; - self - } - - /// Sets the gas used in the diff. - #[allow(dead_code)] - pub(crate) fn gas_used(mut self, gas_used: u64) -> Self { - self.gas_used = gas_used; - self - } - - /// Builds the flashblock. - /// - /// If index is 0 and no base was explicitly set, creates a default base. - pub(crate) fn build(mut self) -> FlashBlock { - // Auto-create base for index 0 if not set - if self.index == 0 && self.base.is_none() { - self.base = Some(OpFlashblockPayloadBase { - parent_hash: self.parent_hash, - parent_beacon_block_root: B256::random(), - fee_recipient: Address::default(), - prev_randao: B256::random(), - block_number: self.block_number, - gas_limit: 30_000_000, - timestamp: self.timestamp, - extra_data: Default::default(), - base_fee_per_gas: U256::from(1_000_000_000u64), - }); - } - - FlashBlock { - index: self.index, - payload_id: self.payload_id, - base: self.base, - diff: OpFlashblockPayloadDelta { - block_hash: self.block_hash, - state_root: self.state_root, - receipts_root: self.receipts_root, - logs_bloom: self.logs_bloom, - gas_used: self.gas_used, - transactions: self.transactions, - withdrawals: self.withdrawals, - withdrawals_root: self.withdrawals_root, - blob_gas_used: self.blob_gas_used, - }, - metadata: OpFlashblockPayloadMetadata { - block_number: self.block_number, - receipts: Default::default(), - new_account_balances: Default::default(), - }, - } - } -} diff --git a/op-reth/crates/flashblocks/src/ws/mod.rs b/op-reth/crates/flashblocks/src/ws/mod.rs deleted file mode 100644 index 651d83c916b..00000000000 --- a/op-reth/crates/flashblocks/src/ws/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -pub use stream::{WsConnect, WsFlashBlockStream}; - -mod decoding; -pub use decoding::FlashBlockDecoder; - -mod stream; diff --git a/op-reth/crates/flashblocks/src/ws/stream.rs b/op-reth/crates/flashblocks/src/ws/stream.rs deleted file mode 100644 index e46fd6d747f..00000000000 --- a/op-reth/crates/flashblocks/src/ws/stream.rs +++ /dev/null @@ -1,544 +0,0 @@ -use crate::{ws::FlashBlockDecoder, FlashBlock}; -use futures_util::{ - stream::{SplitSink, SplitStream}, - FutureExt, Sink, Stream, StreamExt, -}; -use std::{ - fmt::{Debug, Formatter}, - future::Future, - pin::Pin, - task::{ready, Context, Poll}, -}; -use tokio::net::TcpStream; -use tokio_tungstenite::{ - connect_async, - tungstenite::{protocol::CloseFrame, Bytes, Error, Message}, - MaybeTlsStream, WebSocketStream, -}; -use tracing::debug; -use url::Url; - -/// An asynchronous stream of [`FlashBlock`] from a websocket connection. -/// -/// The stream attempts to connect to a websocket URL and then decode each received item. -/// -/// If the connection fails, the error is returned and connection retried. The number of retries is -/// unbounded. -pub struct WsFlashBlockStream { - ws_url: Url, - state: State, - connector: Connector, - decoder: Box, - connect: ConnectFuture, - stream: Option, - sink: Option, -} - -impl WsFlashBlockStream { - /// Creates a new websocket stream over `ws_url`. - pub fn new(ws_url: Url) -> Self { - Self { - ws_url, - state: State::default(), - connector: WsConnector, - decoder: Box::new(()), - connect: Box::pin(async move { Err(Error::ConnectionClosed)? }), - stream: None, - sink: None, - } - } - - /// Sets the [`FlashBlock`] decoder for the websocket stream. - pub fn with_decoder(self, decoder: Box) -> Self { - Self { decoder, ..self } - } -} - -impl WsFlashBlockStream { - /// Creates a new websocket stream over `ws_url`. - pub fn with_connector(ws_url: Url, connector: C) -> Self { - Self { - ws_url, - state: State::default(), - decoder: Box::new(()), - connector, - connect: Box::pin(async move { Err(Error::ConnectionClosed)? }), - stream: None, - sink: None, - } - } -} - -impl Stream for WsFlashBlockStream -where - Str: Stream> + Unpin, - S: Sink + Send + Unpin, - C: WsConnect + Clone + Send + 'static + Unpin, -{ - type Item = eyre::Result; - - fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - let this = self.get_mut(); - - 'start: loop { - if this.state == State::Initial { - this.connect(); - } - - if this.state == State::Connect { - match ready!(this.connect.poll_unpin(cx)) { - Ok((sink, stream)) => this.stream(sink, stream), - Err(err) => { - this.state = State::Initial; - - return Poll::Ready(Some(Err(err))); - } - } - } - - while let State::Stream(msg) = &mut this.state { - if msg.is_some() { - let mut sink = Pin::new(this.sink.as_mut().unwrap()); - let _ = ready!(sink.as_mut().poll_ready(cx)); - if let Some(pong) = msg.take() { - let _ = sink.as_mut().start_send(pong); - } - let _ = ready!(sink.as_mut().poll_flush(cx)); - } - - let Some(msg) = ready!(this - .stream - .as_mut() - .expect("Stream state should be unreachable without stream") - .poll_next_unpin(cx)) - else { - this.state = State::Initial; - - continue 'start; - }; - - match msg { - Ok(Message::Binary(bytes)) => { - return Poll::Ready(Some(this.decoder.decode(bytes))) - } - Ok(Message::Text(bytes)) => { - return Poll::Ready(Some(this.decoder.decode(bytes.into()))) - } - Ok(Message::Ping(bytes)) => this.ping(bytes), - Ok(Message::Close(frame)) => this.close(frame), - Ok(msg) => { - debug!(target: "flashblocks", "Received unexpected message: {:?}", msg) - } - Err(err) => return Poll::Ready(Some(Err(err.into()))), - } - } - } - } -} - -impl WsFlashBlockStream -where - C: WsConnect + Clone + Send + 'static, -{ - fn connect(&mut self) { - let ws_url = self.ws_url.clone(); - let mut connector = self.connector.clone(); - - Pin::new(&mut self.connect).set(Box::pin(async move { connector.connect(ws_url).await })); - - self.state = State::Connect; - } - - fn stream(&mut self, sink: S, stream: Stream) { - self.sink.replace(sink); - self.stream.replace(stream); - - self.state = State::Stream(None); - } - - fn ping(&mut self, pong: Bytes) { - if let State::Stream(current) = &mut self.state { - current.replace(Message::Pong(pong)); - } - } - - fn close(&mut self, frame: Option) { - if let State::Stream(current) = &mut self.state { - current.replace(Message::Close(frame)); - } - } -} - -impl Debug for WsFlashBlockStream { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.debug_struct("FlashBlockStream") - .field("ws_url", &self.ws_url) - .field("state", &self.state) - .field("connector", &self.connector) - .field("connect", &"Pin>>") - .field("stream", &self.stream) - .finish() - } -} - -#[derive(Default, Debug, Eq, PartialEq)] -enum State { - #[default] - Initial, - Connect, - Stream(Option), -} - -type Ws = WebSocketStream>; -type WsStream = SplitStream; -type WsSink = SplitSink; -type ConnectFuture = - Pin> + Send + 'static>>; - -/// The `WsConnect` trait allows for connecting to a websocket. -/// -/// Implementors of the `WsConnect` trait are called 'connectors'. -/// -/// Connectors are defined by one method, [`connect()`]. A call to [`connect()`] attempts to -/// establish a secure websocket connection and return an asynchronous stream of [`Message`]s -/// wrapped in a [`Result`]. -/// -/// [`connect()`]: Self::connect -pub trait WsConnect { - /// An associated `Stream` of [`Message`]s wrapped in a [`Result`] that this connection returns. - type Stream; - - /// An associated `Sink` of [`Message`]s that this connection sends. - type Sink; - - /// Asynchronously connects to a websocket hosted on `ws_url`. - /// - /// See the [`WsConnect`] documentation for details. - fn connect( - &mut self, - ws_url: Url, - ) -> impl Future> + Send; -} - -/// Establishes a secure websocket subscription. -/// -/// See the [`WsConnect`] documentation for details. -#[derive(Debug, Clone)] -pub struct WsConnector; - -impl WsConnect for WsConnector { - type Stream = WsStream; - type Sink = WsSink; - - async fn connect(&mut self, ws_url: Url) -> eyre::Result<(WsSink, WsStream)> { - let (stream, _response) = connect_async(ws_url.as_str()).await?; - - Ok(stream.split()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::bytes::Bytes; - use brotli::enc::BrotliEncoderParams; - use std::{future, iter}; - use tokio_tungstenite::tungstenite::{ - protocol::frame::{coding::CloseCode, Frame}, - Error, - }; - - /// A `FakeConnector` creates [`FakeStream`]. - /// - /// It simulates the websocket stream instead of connecting to a real websocket. - #[derive(Clone)] - struct FakeConnector(FakeStream); - - /// A `FakeConnectorWithSink` creates [`FakeStream`] and [`FakeSink`]. - /// - /// It simulates the websocket stream instead of connecting to a real websocket. It also accepts - /// messages into an in-memory buffer. - #[derive(Clone)] - struct FakeConnectorWithSink(FakeStream); - - /// Simulates a websocket stream while using a preprogrammed set of messages instead. - #[derive(Default)] - struct FakeStream(Vec>); - - impl FakeStream { - fn new(mut messages: Vec>) -> Self { - messages.reverse(); - - Self(messages) - } - } - - impl Clone for FakeStream { - fn clone(&self) -> Self { - Self( - self.0 - .iter() - .map(|v| match v { - Ok(msg) => Ok(msg.clone()), - Err(err) => Err(match err { - Error::AttackAttempt => Error::AttackAttempt, - err => unimplemented!("Cannot clone this error: {err}"), - }), - }) - .collect(), - ) - } - } - - impl Stream for FakeStream { - type Item = Result; - - fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll> { - let this = self.get_mut(); - - Poll::Ready(this.0.pop()) - } - } - - #[derive(Clone)] - struct NoopSink; - - impl Sink for NoopSink { - type Error = (); - - fn poll_ready( - self: Pin<&mut Self>, - _cx: &mut Context<'_>, - ) -> Poll> { - unimplemented!() - } - - fn start_send(self: Pin<&mut Self>, _item: T) -> Result<(), Self::Error> { - unimplemented!() - } - - fn poll_flush( - self: Pin<&mut Self>, - _cx: &mut Context<'_>, - ) -> Poll> { - unimplemented!() - } - - fn poll_close( - self: Pin<&mut Self>, - _cx: &mut Context<'_>, - ) -> Poll> { - unimplemented!() - } - } - - /// Receives [`Message`]s and stores them. A call to `start_send` first buffers the message - /// to simulate flushing behavior. - #[derive(Clone, Default)] - struct FakeSink(Option, Vec); - - impl Sink for FakeSink { - type Error = (); - - fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { - self.poll_flush(cx) - } - - fn start_send(self: Pin<&mut Self>, item: Message) -> Result<(), Self::Error> { - self.get_mut().0.replace(item); - Ok(()) - } - - fn poll_flush( - self: Pin<&mut Self>, - _cx: &mut Context<'_>, - ) -> Poll> { - let this = self.get_mut(); - if let Some(item) = this.0.take() { - this.1.push(item); - } - Poll::Ready(Ok(())) - } - - fn poll_close( - self: Pin<&mut Self>, - _cx: &mut Context<'_>, - ) -> Poll> { - Poll::Ready(Ok(())) - } - } - - impl WsConnect for FakeConnector { - type Stream = FakeStream; - type Sink = NoopSink; - - fn connect( - &mut self, - _ws_url: Url, - ) -> impl Future> + Send { - future::ready(Ok((NoopSink, self.0.clone()))) - } - } - - impl>> From for FakeConnector { - fn from(value: T) -> Self { - Self(FakeStream::new(value.into_iter().collect())) - } - } - - impl WsConnect for FakeConnectorWithSink { - type Stream = FakeStream; - type Sink = FakeSink; - - fn connect( - &mut self, - _ws_url: Url, - ) -> impl Future> + Send { - future::ready(Ok((FakeSink::default(), self.0.clone()))) - } - } - - impl>> From for FakeConnectorWithSink { - fn from(value: T) -> Self { - Self(FakeStream::new(value.into_iter().collect())) - } - } - - /// Repeatedly fails to connect with the given error message. - #[derive(Clone)] - struct FailingConnector(String); - - impl WsConnect for FailingConnector { - type Stream = FakeStream; - type Sink = NoopSink; - - fn connect( - &mut self, - _ws_url: Url, - ) -> impl Future> + Send { - future::ready(Err(eyre::eyre!("{}", &self.0))) - } - } - - fn to_json_message, F: Fn(B) -> Message>( - wrapper_f: F, - ) -> impl Fn(&FlashBlock) -> Result + use { - move |block| to_json_message_using(block, &wrapper_f) - } - - fn to_json_binary_message(block: &FlashBlock) -> Result { - to_json_message_using(block, Message::Binary) - } - - fn to_json_message_using, F: Fn(B) -> Message>( - block: &FlashBlock, - wrapper_f: F, - ) -> Result { - Ok(wrapper_f(B::try_from(Bytes::from(serde_json::to_vec(block).unwrap())).unwrap())) - } - - fn to_brotli_message(block: &FlashBlock) -> Result { - let json = serde_json::to_vec(block).unwrap(); - let mut compressed = Vec::new(); - brotli::BrotliCompress( - &mut json.as_slice(), - &mut compressed, - &BrotliEncoderParams::default(), - )?; - - Ok(Message::Binary(Bytes::from(compressed))) - } - - fn flashblock() -> FlashBlock { - Default::default() - } - - #[test_case::test_case(to_json_message(Message::Binary); "json binary")] - #[test_case::test_case(to_json_message(Message::Text); "json UTF-8")] - #[test_case::test_case(to_brotli_message; "brotli")] - #[tokio::test] - async fn test_stream_decodes_messages_successfully( - to_message: impl Fn(&FlashBlock) -> Result, - ) { - let flashblocks = [flashblock()]; - let connector = FakeConnector::from(flashblocks.iter().map(to_message)); - let ws_url = "http://localhost".parse().unwrap(); - let stream = WsFlashBlockStream::with_connector(ws_url, connector); - - let actual_messages: Vec<_> = stream.take(1).map(Result::unwrap).collect().await; - let expected_messages = flashblocks.to_vec(); - - assert_eq!(actual_messages, expected_messages); - } - - #[test_case::test_case(Message::Pong(Bytes::from(b"test".as_slice())); "pong")] - #[test_case::test_case(Message::Frame(Frame::pong(b"test".as_slice())); "frame")] - #[tokio::test] - async fn test_stream_ignores_unexpected_message(message: Message) { - let flashblock = flashblock(); - let connector = FakeConnector::from([Ok(message), to_json_binary_message(&flashblock)]); - let ws_url = "http://localhost".parse().unwrap(); - let mut stream = WsFlashBlockStream::with_connector(ws_url, connector); - - let expected_message = flashblock; - let actual_message = - stream.next().await.expect("Binary message should not be ignored").unwrap(); - - assert_eq!(actual_message, expected_message) - } - - #[tokio::test] - async fn test_stream_passes_errors_through() { - let connector = FakeConnector::from([Err(Error::AttackAttempt)]); - let ws_url = "http://localhost".parse().unwrap(); - let stream = WsFlashBlockStream::with_connector(ws_url, connector); - - let actual_messages: Vec<_> = - stream.take(1).map(Result::unwrap_err).map(|e| format!("{e}")).collect().await; - let expected_messages = vec!["Attack attempt detected".to_owned()]; - - assert_eq!(actual_messages, expected_messages); - } - - #[tokio::test] - async fn test_connect_error_causes_retries() { - let tries = 3; - let error_msg = "test".to_owned(); - let connector = FailingConnector(error_msg.clone()); - let ws_url = "http://localhost".parse().unwrap(); - let stream = WsFlashBlockStream::with_connector(ws_url, connector); - - let actual_errors: Vec<_> = - stream.take(tries).map(Result::unwrap_err).map(|e| format!("{e}")).collect().await; - let expected_errors: Vec<_> = iter::repeat_n(error_msg, tries).collect(); - - assert_eq!(actual_errors, expected_errors); - } - - #[test_case::test_case( - Message::Close(Some(CloseFrame { code: CloseCode::Normal, reason: "test".into() })), - Message::Close(Some(CloseFrame { code: CloseCode::Normal, reason: "test".into() })); - "close" - )] - #[test_case::test_case( - Message::Ping(Bytes::from_static(&[1u8, 2, 3])), - Message::Pong(Bytes::from_static(&[1u8, 2, 3])); - "ping" - )] - #[tokio::test] - async fn test_stream_responds_to_messages(msg: Message, expected_response: Message) { - let flashblock = flashblock(); - let messages = [Ok(msg), to_json_binary_message(&flashblock)]; - let connector = FakeConnectorWithSink::from(messages); - let ws_url = "http://localhost".parse().unwrap(); - let mut stream = WsFlashBlockStream::with_connector(ws_url, connector); - - let _ = stream.next().await; - - let expected_response = vec![expected_response]; - let FakeSink(actual_buffer, actual_response) = stream.sink.unwrap(); - - assert!(actual_buffer.is_none(), "buffer not flushed: {actual_buffer:#?}"); - assert_eq!(actual_response, expected_response); - } -} diff --git a/op-reth/crates/flashblocks/tests/it/stream.rs b/op-reth/crates/flashblocks/tests/it/stream.rs deleted file mode 100644 index 99e78fee23a..00000000000 --- a/op-reth/crates/flashblocks/tests/it/stream.rs +++ /dev/null @@ -1,15 +0,0 @@ -use futures_util::stream::StreamExt; -use reth_optimism_flashblocks::WsFlashBlockStream; - -#[tokio::test] -async fn test_streaming_flashblocks_from_remote_source_is_successful() { - let items = 3; - let ws_url = "wss://sepolia.flashblocks.base.org/ws".parse().unwrap(); - let stream = WsFlashBlockStream::new(ws_url); - - let blocks: Vec<_> = stream.take(items).collect().await; - - for block in blocks { - assert!(block.is_ok()); - } -} diff --git a/op-reth/crates/hardforks/Cargo.toml b/op-reth/crates/hardforks/Cargo.toml deleted file mode 100644 index 3de01c4b139..00000000000 --- a/op-reth/crates/hardforks/Cargo.toml +++ /dev/null @@ -1,36 +0,0 @@ -[package] -name = "reth-optimism-forks" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -description = "Optimism hardforks used in op-reth" - -[lints] -workspace = true - -[dependencies] -# reth -reth-ethereum-forks.workspace = true - -# ethereum -alloy-op-hardforks.workspace = true -alloy-primitives.workspace = true - -# misc -once_cell.workspace = true - -[features] -default = ["std"] -std = [ - "alloy-primitives/std", - "once_cell/std", - "reth-ethereum-forks/std", -] -serde = [ - "alloy-primitives/serde", - "reth-ethereum-forks/serde", - "alloy-op-hardforks/serde", -] diff --git a/op-reth/crates/node/Cargo.toml b/op-reth/crates/node/Cargo.toml deleted file mode 100644 index 062b9c8c810..00000000000 --- a/op-reth/crates/node/Cargo.toml +++ /dev/null @@ -1,136 +0,0 @@ -[package] -name = "reth-optimism-node" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -# reth -reth-chainspec.workspace = true -## ensure secp256k1 recovery with rayon support is activated -reth-primitives-traits = { workspace = true, features = ["secp256k1", "rayon"] } -reth-payload-builder.workspace = true -reth-consensus.workspace = true -reth-node-api.workspace = true -reth-node-builder.workspace = true -reth-tracing.workspace = true -reth-provider.workspace = true -reth-transaction-pool.workspace = true -reth-network.workspace = true -reth-evm.workspace = true -reth-rpc-server-types.workspace = true -reth-tasks = { workspace = true, optional = true } -reth-trie-common.workspace = true -reth-node-core.workspace = true -reth-rpc-engine-api.workspace = true -reth-engine-local = { workspace = true, features = ["op"] } -reth-rpc-api.workspace = true - -# op-reth -reth-optimism-payload-builder.workspace = true -reth-optimism-evm = { workspace = true, features = ["std", "rpc"] } -reth-optimism-rpc.workspace = true -reth-optimism-storage.workspace = true -reth-optimism-txpool.workspace = true -reth-optimism-chainspec.workspace = true -reth-optimism-consensus = { workspace = true, features = ["std"] } -reth-optimism-forks.workspace = true -reth-optimism-primitives = { workspace = true, features = ["serde", "serde-bincode-compat", "reth-codec"] } - -# revm with required optimism features -# Note: this must be kept to ensure all features are properly enabled/forwarded -revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg", "memory_limit"] } -op-revm.workspace = true - -# ethereum -alloy-primitives.workspace = true -op-alloy-consensus.workspace = true -op-alloy-rpc-types-engine.workspace = true -alloy-rpc-types-engine.workspace = true -alloy-rpc-types-eth.workspace = true -alloy-consensus.workspace = true - -# async -tokio.workspace = true - -# misc -clap.workspace = true -serde.workspace = true -eyre.workspace = true -url.workspace = true - -# test-utils dependencies -reth-e2e-test-utils = { workspace = true, optional = true } -alloy-genesis = { workspace = true, optional = true } -serde_json = { workspace = true, optional = true } - -[dev-dependencies] -reth-optimism-node = { workspace = true, features = ["test-utils"] } -reth-db = { workspace = true, features = ["op", "test-utils"] } -reth-node-builder = { workspace = true, features = ["test-utils"] } -reth-provider = { workspace = true, features = ["test-utils"] } -reth-tasks.workspace = true -reth-payload-util.workspace = true -reth-revm = { workspace = true, features = ["std"] } -reth-rpc.workspace = true -reth-rpc-eth-types.workspace = true -reth-stages-types.workspace = true -reth-trie-db.workspace = true - -alloy-network.workspace = true -alloy-op-hardforks.workspace = true -futures.workspace = true -op-alloy-network.workspace = true - -[features] -default = ["reth-codec"] -asm-keccak = [ - "alloy-primitives/asm-keccak", - "reth-optimism-node/asm-keccak", - "reth-node-core/asm-keccak", - "revm/asm-keccak", -] -keccak-cache-global = [ - "alloy-primitives/keccak-cache-global", - "reth-node-core/keccak-cache-global", - "reth-optimism-node/keccak-cache-global", -] -js-tracer = [ - "reth-node-builder/js-tracer", - "reth-optimism-node/js-tracer", - "reth-rpc/js-tracer", - "reth-rpc-eth-types/js-tracer", -] -test-utils = [ - "reth-tasks", - "reth-e2e-test-utils", - "alloy-genesis", - "serde_json", - "reth-node-builder/test-utils", - "reth-chainspec/test-utils", - "reth-consensus/test-utils", - "reth-evm/test-utils", - "reth-network/test-utils", - "reth-payload-builder/test-utils", - "reth-revm/test-utils", - "reth-db/test-utils", - "reth-provider/test-utils", - "reth-transaction-pool/test-utils", - "reth-optimism-node/test-utils", - "reth-optimism-primitives/arbitrary", - "reth-primitives-traits/test-utils", - "reth-trie-common/test-utils", - "reth-trie-db/test-utils", - "reth-stages-types/test-utils", -] -reth-codec = ["reth-optimism-primitives/reth-codec"] - -[[test]] -name = "e2e_testsuite" -path = "tests/e2e-testsuite/main.rs" diff --git a/op-reth/crates/node/src/engine.rs b/op-reth/crates/node/src/engine.rs deleted file mode 100644 index 652bb44f473..00000000000 --- a/op-reth/crates/node/src/engine.rs +++ /dev/null @@ -1,511 +0,0 @@ -use alloy_consensus::BlockHeader; -use alloy_primitives::B256; -use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1}; -use op_alloy_rpc_types_engine::{ - OpExecutionData, OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, - OpPayloadAttributes, -}; -use reth_consensus::ConsensusError; -use reth_node_api::{ - payload::{ - validate_parent_beacon_block_root_presence, EngineApiMessageVersion, - EngineObjectValidationError, MessageValidationKind, NewPayloadError, PayloadOrAttributes, - PayloadTypes, VersionSpecificValidationError, - }, - validate_version_specific_fields, BuiltPayload, EngineApiValidator, EngineTypes, - NodePrimitives, PayloadValidator, -}; -use reth_optimism_consensus::isthmus; -use reth_optimism_forks::OpHardforks; -use reth_optimism_payload_builder::{OpExecutionPayloadValidator, OpPayloadTypes}; -use reth_optimism_primitives::{OpBlock, L2_TO_L1_MESSAGE_PASSER_ADDRESS}; -use reth_primitives_traits::{Block, RecoveredBlock, SealedBlock, SignedTransaction}; -use reth_provider::StateProviderFactory; -use reth_trie_common::{HashedPostState, KeyHasher}; -use std::{marker::PhantomData, sync::Arc}; - -/// The types used in the optimism beacon consensus engine. -#[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] -#[non_exhaustive] -pub struct OpEngineTypes { - _marker: PhantomData, -} - -impl> PayloadTypes for OpEngineTypes { - type ExecutionData = T::ExecutionData; - type BuiltPayload = T::BuiltPayload; - type PayloadAttributes = T::PayloadAttributes; - type PayloadBuilderAttributes = T::PayloadBuilderAttributes; - - fn block_to_payload( - block: SealedBlock< - <::Primitives as NodePrimitives>::Block, - >, - ) -> ::ExecutionData { - OpExecutionData::from_block_unchecked( - block.hash(), - &block.into_block().into_ethereum_block(), - ) - } -} - -impl> EngineTypes for OpEngineTypes -where - T::BuiltPayload: BuiltPayload> - + TryInto - + TryInto - + TryInto - + TryInto, -{ - type ExecutionPayloadEnvelopeV1 = ExecutionPayloadV1; - type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; - type ExecutionPayloadEnvelopeV3 = OpExecutionPayloadEnvelopeV3; - type ExecutionPayloadEnvelopeV4 = OpExecutionPayloadEnvelopeV4; - type ExecutionPayloadEnvelopeV5 = OpExecutionPayloadEnvelopeV4; - type ExecutionPayloadEnvelopeV6 = OpExecutionPayloadEnvelopeV4; -} - -/// Validator for Optimism engine API. -#[derive(Debug)] -pub struct OpEngineValidator { - inner: OpExecutionPayloadValidator, - provider: P, - hashed_addr_l2tol1_msg_passer: B256, - phantom: PhantomData, -} - -impl OpEngineValidator { - /// Instantiates a new validator. - pub fn new(chain_spec: Arc, provider: P) -> Self { - let hashed_addr_l2tol1_msg_passer = KH::hash_key(L2_TO_L1_MESSAGE_PASSER_ADDRESS); - Self { - inner: OpExecutionPayloadValidator::new(chain_spec), - provider, - hashed_addr_l2tol1_msg_passer, - phantom: PhantomData, - } - } -} - -impl Clone for OpEngineValidator -where - P: Clone, - ChainSpec: OpHardforks, -{ - fn clone(&self) -> Self { - Self { - inner: OpExecutionPayloadValidator::new(self.inner.clone()), - provider: self.provider.clone(), - hashed_addr_l2tol1_msg_passer: self.hashed_addr_l2tol1_msg_passer, - phantom: Default::default(), - } - } -} - -impl OpEngineValidator -where - ChainSpec: OpHardforks, -{ - /// Returns the chain spec used by the validator. - #[inline] - pub fn chain_spec(&self) -> &ChainSpec { - self.inner.chain_spec() - } -} - -impl PayloadValidator for OpEngineValidator -where - P: StateProviderFactory + Unpin + 'static, - Tx: SignedTransaction + Unpin + 'static, - ChainSpec: OpHardforks + Send + Sync + 'static, - Types: PayloadTypes, -{ - type Block = alloy_consensus::Block; - - fn validate_block_post_execution_with_hashed_state( - &self, - state_updates: &HashedPostState, - block: &RecoveredBlock, - ) -> Result<(), ConsensusError> { - if self.chain_spec().is_isthmus_active_at_timestamp(block.timestamp()) { - let Ok(state) = self.provider.state_by_block_hash(block.parent_hash()) else { - // FIXME: we don't necessarily have access to the parent block here because the - // parent block isn't necessarily part of the canonical chain yet. Instead this - // function should receive the list of in memory blocks as input - return Ok(()) - }; - let predeploy_storage_updates = state_updates - .storages - .get(&self.hashed_addr_l2tol1_msg_passer) - .cloned() - .unwrap_or_default(); - isthmus::verify_withdrawals_root_prehashed( - predeploy_storage_updates, - state, - block.header(), - ) - .map_err(|err| { - ConsensusError::Other(format!("failed to verify block post-execution: {err}")) - })? - } - - Ok(()) - } - - fn convert_payload_to_block( - &self, - payload: OpExecutionData, - ) -> Result, NewPayloadError> { - self.inner.ensure_well_formed_payload(payload).map_err(NewPayloadError::other) - } -} - -impl EngineApiValidator for OpEngineValidator -where - Types: PayloadTypes< - PayloadAttributes = OpPayloadAttributes, - ExecutionData = OpExecutionData, - BuiltPayload: BuiltPayload>, - >, - P: StateProviderFactory + Unpin + 'static, - Tx: SignedTransaction + Unpin + 'static, - ChainSpec: OpHardforks + Send + Sync + 'static, -{ - fn validate_version_specific_fields( - &self, - version: EngineApiMessageVersion, - payload_or_attrs: PayloadOrAttributes< - '_, - Types::ExecutionData, - ::PayloadAttributes, - >, - ) -> Result<(), EngineObjectValidationError> { - validate_withdrawals_presence( - self.chain_spec(), - version, - payload_or_attrs.message_validation_kind(), - payload_or_attrs.timestamp(), - payload_or_attrs.withdrawals().is_some(), - )?; - validate_parent_beacon_block_root_presence( - self.chain_spec(), - version, - payload_or_attrs.message_validation_kind(), - payload_or_attrs.timestamp(), - payload_or_attrs.parent_beacon_block_root().is_some(), - ) - } - - fn ensure_well_formed_attributes( - &self, - version: EngineApiMessageVersion, - attributes: &::PayloadAttributes, - ) -> Result<(), EngineObjectValidationError> { - validate_version_specific_fields( - self.chain_spec(), - version, - PayloadOrAttributes::::PayloadAttributes( - attributes, - ), - )?; - - if attributes.gas_limit.is_none() { - return Err(EngineObjectValidationError::InvalidParams( - "MissingGasLimitInPayloadAttributes".to_string().into(), - )); - } - - if self - .chain_spec() - .is_holocene_active_at_timestamp(attributes.payload_attributes.timestamp) - { - let (elasticity, denominator) = - attributes.decode_eip_1559_params().ok_or_else(|| { - EngineObjectValidationError::InvalidParams( - "MissingEip1559ParamsInPayloadAttributes".to_string().into(), - ) - })?; - - if elasticity != 0 && denominator == 0 { - return Err(EngineObjectValidationError::InvalidParams( - "Eip1559ParamsDenominatorZero".to_string().into(), - )); - } else if denominator != 0 && elasticity == 0 { - return Err(EngineObjectValidationError::InvalidParams( - "Eip1559ParamsElasticityZero".to_string().into(), - )); - } - } - - if self.chain_spec().is_jovian_active_at_timestamp(attributes.payload_attributes.timestamp) - { - if attributes.min_base_fee.is_none() { - return Err(EngineObjectValidationError::InvalidParams( - "MissingMinBaseFeeInPayloadAttributes".to_string().into(), - )); - } - } else if attributes.min_base_fee.is_some() { - return Err(EngineObjectValidationError::InvalidParams( - "MinBaseFeeNotAllowedBeforeJovian".to_string().into(), - )); - } - - Ok(()) - } -} - -/// Validates the presence of the `withdrawals` field according to the payload timestamp. -/// -/// After Canyon, withdrawals field must be [Some]. -/// Before Canyon, withdrawals field must be [None]; -/// -/// Canyon activates the Shanghai EIPs, see the Canyon specs for more details: -/// -pub fn validate_withdrawals_presence( - chain_spec: impl OpHardforks, - version: EngineApiMessageVersion, - message_validation_kind: MessageValidationKind, - timestamp: u64, - has_withdrawals: bool, -) -> Result<(), EngineObjectValidationError> { - let is_shanghai = chain_spec.is_canyon_active_at_timestamp(timestamp); - - match version { - EngineApiMessageVersion::V1 => { - if has_withdrawals { - return Err(message_validation_kind - .to_error(VersionSpecificValidationError::WithdrawalsNotSupportedInV1)); - } - if is_shanghai { - return Err(message_validation_kind - .to_error(VersionSpecificValidationError::NoWithdrawalsPostShanghai)); - } - } - EngineApiMessageVersion::V2 | - EngineApiMessageVersion::V3 | - EngineApiMessageVersion::V4 | - EngineApiMessageVersion::V5 => { - if is_shanghai && !has_withdrawals { - return Err(message_validation_kind - .to_error(VersionSpecificValidationError::NoWithdrawalsPostShanghai)); - } - if !is_shanghai && has_withdrawals { - return Err(message_validation_kind - .to_error(VersionSpecificValidationError::HasWithdrawalsPreShanghai)); - } - } - }; - - Ok(()) -} - -#[cfg(test)] -mod test { - use super::*; - - use crate::engine; - use alloy_op_hardforks::BASE_SEPOLIA_JOVIAN_TIMESTAMP; - use alloy_primitives::{b64, Address, B256, B64}; - use alloy_rpc_types_engine::PayloadAttributes; - use reth_optimism_chainspec::BASE_SEPOLIA; - use reth_provider::noop::NoopProvider; - use reth_trie_common::KeccakKeyHasher; - - macro_rules! assert_invalid_params_error { - ($result:expr, $msg:expr) => {{ - let err = $result.expect_err("expected InvalidParams error"); - match err { - EngineObjectValidationError::InvalidParams(inner) => { - assert_eq!(inner.to_string(), $msg); - } - other => panic!("expected InvalidParams, got {other:?}"), - } - }}; - } - - const fn get_attributes( - eip_1559_params: Option, - min_base_fee: Option, - timestamp: u64, - ) -> OpPayloadAttributes { - OpPayloadAttributes { - gas_limit: Some(1000), - eip_1559_params, - min_base_fee, - transactions: None, - no_tx_pool: None, - payload_attributes: PayloadAttributes { - timestamp, - prev_randao: B256::ZERO, - suggested_fee_recipient: Address::ZERO, - withdrawals: Some(vec![]), - parent_beacon_block_root: Some(B256::ZERO), - }, - } - } - - #[test] - fn test_well_formed_attributes_pre_holocene() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = get_attributes(None, None, 1732633199); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_well_formed_attributes_holocene_no_eip1559_params() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = get_attributes(None, None, 1732633200); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert_invalid_params_error!(result, "MissingEip1559ParamsInPayloadAttributes"); - } - - #[test] - fn test_well_formed_attributes_holocene_eip1559_params_zero_denominator() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = get_attributes(Some(b64!("0000000000000008")), None, 1732633200); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert_invalid_params_error!(result, "Eip1559ParamsDenominatorZero"); - } - - #[test] - fn test_well_formed_attributes_holocene_eip1559_params_zero_elasticity() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = get_attributes(Some(b64!("0000000800000000")), None, 1732633200); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert_invalid_params_error!(result, "Eip1559ParamsElasticityZero"); - } - - #[test] - fn test_well_formed_attributes_holocene_valid() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = get_attributes(Some(b64!("0000000800000008")), None, 1732633200); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_well_formed_attributes_holocene_valid_all_zero() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = get_attributes(Some(b64!("0000000000000000")), None, 1732633200); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert!(result.is_ok()); - } - - #[test] - fn test_well_formed_attributes_jovian_valid() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = - get_attributes(Some(b64!("0000000000000000")), Some(1), BASE_SEPOLIA_JOVIAN_TIMESTAMP); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert!(result.is_ok()); - } - - /// After Jovian (and holocene), eip1559 params must be Some - #[test] - fn test_malformed_attributes_jovian_with_eip_1559_params_none() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = get_attributes(None, Some(1), BASE_SEPOLIA_JOVIAN_TIMESTAMP); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert_invalid_params_error!(result, "MissingEip1559ParamsInPayloadAttributes"); - } - - /// Before Jovian, min base fee must be None - #[test] - fn test_malformed_attributes_pre_jovian_with_min_base_fee() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = get_attributes(Some(b64!("0000000000000000")), Some(1), 1732633200); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert_invalid_params_error!(result, "MinBaseFeeNotAllowedBeforeJovian"); - } - - /// After Jovian, min base fee must be Some - #[test] - fn test_malformed_attributes_post_jovian_with_min_base_fee_none() { - let validator = OpEngineValidator::new::( - BASE_SEPOLIA.clone(), - NoopProvider::default(), - ); - let attributes = - get_attributes(Some(b64!("0000000000000000")), None, BASE_SEPOLIA_JOVIAN_TIMESTAMP); - - let result = as EngineApiValidator< - OpEngineTypes, - >>::ensure_well_formed_attributes( - &validator, EngineApiMessageVersion::V3, &attributes, - ); - assert_invalid_params_error!(result, "MissingMinBaseFeeInPayloadAttributes"); - } -} diff --git a/op-reth/crates/node/src/lib.rs b/op-reth/crates/node/src/lib.rs deleted file mode 100644 index 9fcc8d4e549..00000000000 --- a/op-reth/crates/node/src/lib.rs +++ /dev/null @@ -1,47 +0,0 @@ -//! Standalone crate for Optimism-specific Reth configuration and builder types. -//! -//! # features -//! - `js-tracer`: Enable the `JavaScript` tracer for the `debug_trace` endpoints - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" -)] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] - -/// CLI argument parsing for the optimism node. -pub mod args; - -/// Exports optimism-specific implementations of the [`EngineTypes`](reth_node_api::EngineTypes) -/// trait. -pub mod engine; -pub use engine::OpEngineTypes; - -pub mod node; -pub use node::*; - -pub mod rpc; -pub use rpc::OpEngineApiBuilder; - -pub mod version; -pub use version::OP_NAME_CLIENT; - -pub use reth_optimism_txpool as txpool; - -/// Helpers for running test node instances. -#[cfg(feature = "test-utils")] -pub mod utils; - -pub use reth_optimism_payload_builder::{ - self as payload, config::OpDAConfig, OpBuiltPayload, OpPayloadAttributes, OpPayloadBuilder, - OpPayloadBuilderAttributes, OpPayloadPrimitives, OpPayloadTypes, -}; - -pub use reth_optimism_evm::*; - -pub use reth_optimism_storage::OpStorage; - -use op_revm as _; -use revm as _; diff --git a/op-reth/crates/node/src/node.rs b/op-reth/crates/node/src/node.rs deleted file mode 100644 index 51d0ff3022d..00000000000 --- a/op-reth/crates/node/src/node.rs +++ /dev/null @@ -1,1304 +0,0 @@ -//! Optimism Node types config. - -use crate::{ - args::RollupArgs, - engine::OpEngineValidator, - txpool::{OpTransactionPool, OpTransactionValidator}, - OpEngineApiBuilder, OpEngineTypes, -}; -use op_alloy_consensus::{interop::SafetyLevel, OpPooledTransaction}; -use op_alloy_rpc_types_engine::OpExecutionData; -use reth_chainspec::{ChainSpecProvider, EthChainSpec, Hardforks}; -use reth_engine_local::LocalPayloadAttributesBuilder; -use reth_evm::ConfigureEvm; -use reth_network::{ - types::BasicNetworkPrimitives, NetworkConfig, NetworkHandle, NetworkManager, NetworkPrimitives, - PeersInfo, -}; -use reth_node_api::{ - AddOnsContext, BuildNextEnv, EngineTypes, FullNodeComponents, HeaderTy, NodeAddOns, - NodePrimitives, PayloadAttributesBuilder, PayloadTypes, PrimitivesTy, TxTy, -}; -use reth_node_builder::{ - components::{ - BasicPayloadServiceBuilder, ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, - NetworkBuilder, PayloadBuilderBuilder, PoolBuilder, PoolBuilderConfigOverrides, - TxPoolBuilder, - }, - node::{FullNodeTypes, NodeTypes}, - rpc::{ - BasicEngineValidatorBuilder, EngineApiBuilder, EngineValidatorAddOn, - EngineValidatorBuilder, EthApiBuilder, Identity, PayloadValidatorBuilder, RethRpcAddOns, - RethRpcMiddleware, RethRpcServerHandles, RpcAddOns, RpcContext, RpcHandle, - }, - BuilderContext, DebugNode, Node, NodeAdapter, NodeComponentsBuilder, -}; -use reth_optimism_chainspec::{OpChainSpec, OpHardfork}; -use reth_optimism_consensus::OpBeaconConsensus; -use reth_optimism_evm::{OpEvmConfig, OpRethReceiptBuilder}; -use reth_optimism_forks::OpHardforks; -use reth_optimism_payload_builder::{ - builder::OpPayloadTransactions, - config::{OpBuilderConfig, OpDAConfig, OpGasLimitConfig}, - OpAttributes, OpBuiltPayload, OpPayloadPrimitives, -}; -use reth_optimism_primitives::{DepositReceipt, OpPrimitives}; -use reth_optimism_rpc::{ - eth::{ext::OpEthExtApi, OpEthApiBuilder}, - historical::{HistoricalRpc, HistoricalRpcClient}, - miner::{MinerApiExtServer, OpMinerExtApi}, - witness::{DebugExecutionWitnessApiServer, OpDebugWitnessApi}, - SequencerClient, -}; -use reth_optimism_storage::OpStorage; -use reth_optimism_txpool::{ - supervisor::{SupervisorClient, DEFAULT_SUPERVISOR_URL}, - OpPooledTx, -}; -use reth_provider::{providers::ProviderFactoryBuilder, CanonStateSubscriptions}; -use reth_rpc_api::{eth::RpcTypes, DebugApiServer, L2EthApiExtServer}; -use reth_rpc_server_types::RethRpcModule; -use reth_tracing::tracing::{debug, info}; -use reth_transaction_pool::{ - blobstore::DiskFileBlobStore, EthPoolTransaction, PoolPooledTx, PoolTransaction, - TransactionPool, TransactionValidationTaskExecutor, -}; -use reth_trie_common::KeccakKeyHasher; -use serde::de::DeserializeOwned; -use std::{marker::PhantomData, sync::Arc}; -use url::Url; - -/// Marker trait for Optimism node types with standard engine, chain spec, and primitives. -pub trait OpNodeTypes: - NodeTypes -{ -} -/// Blanket impl for all node types that conform to the Optimism spec. -impl OpNodeTypes for N where - N: NodeTypes< - Payload = OpEngineTypes, - ChainSpec: OpHardforks + Hardforks, - Primitives = OpPrimitives, - > -{ -} - -/// Helper trait for Optimism node types with full configuration including storage and execution -/// data. -pub trait OpFullNodeTypes: - NodeTypes< - ChainSpec: OpHardforks, - Primitives: OpPayloadPrimitives, - Storage = OpStorage, - Payload: EngineTypes, -> -{ -} - -impl OpFullNodeTypes for N where - N: NodeTypes< - ChainSpec: OpHardforks, - Primitives: OpPayloadPrimitives, - Storage = OpStorage, - Payload: EngineTypes, - > -{ -} - -/// Type configuration for a regular Optimism node. -#[derive(Debug, Default, Clone)] -#[non_exhaustive] -pub struct OpNode { - /// Additional Optimism args - pub args: RollupArgs, - /// Data availability configuration for the OP builder. - /// - /// Used to throttle the size of the data availability payloads (configured by the batcher via - /// the `miner_` api). - /// - /// By default no throttling is applied. - pub da_config: OpDAConfig, - /// Gas limit configuration for the OP builder. - /// Used to control the gas limit of the blocks produced by the OP builder.(configured by the - /// batcher via the `miner_` api) - pub gas_limit_config: OpGasLimitConfig, -} - -/// A [`ComponentsBuilder`] with its generic arguments set to a stack of Optimism specific builders. -pub type OpNodeComponentBuilder = ComponentsBuilder< - Node, - OpPoolBuilder, - BasicPayloadServiceBuilder, - OpNetworkBuilder, - OpExecutorBuilder, - OpConsensusBuilder, ->; - -impl OpNode { - /// Creates a new instance of the Optimism node type. - pub fn new(args: RollupArgs) -> Self { - Self { - args, - da_config: OpDAConfig::default(), - gas_limit_config: OpGasLimitConfig::default(), - } - } - - /// Configure the data availability configuration for the OP builder. - pub fn with_da_config(mut self, da_config: OpDAConfig) -> Self { - self.da_config = da_config; - self - } - - /// Configure the gas limit configuration for the OP builder. - pub fn with_gas_limit_config(mut self, gas_limit_config: OpGasLimitConfig) -> Self { - self.gas_limit_config = gas_limit_config; - self - } - - /// Returns the components for the given [`RollupArgs`]. - pub fn components(&self) -> OpNodeComponentBuilder - where - Node: FullNodeTypes, - { - let RollupArgs { disable_txpool_gossip, compute_pending_block, discovery_v4, .. } = - self.args; - ComponentsBuilder::default() - .node_types::() - .executor(OpExecutorBuilder::default()) - .pool( - OpPoolBuilder::default() - .with_enable_tx_conditional(self.args.enable_tx_conditional) - .with_supervisor( - self.args.supervisor_http.clone(), - self.args.supervisor_safety_level, - ), - ) - .payload(BasicPayloadServiceBuilder::new( - OpPayloadBuilder::new(compute_pending_block) - .with_da_config(self.da_config.clone()) - .with_gas_limit_config(self.gas_limit_config.clone()), - )) - .network(OpNetworkBuilder::new(disable_txpool_gossip, !discovery_v4)) - .consensus(OpConsensusBuilder::default()) - } - - /// Returns [`OpAddOnsBuilder`] with configured arguments. - pub fn add_ons_builder(&self) -> OpAddOnsBuilder { - OpAddOnsBuilder::default() - .with_sequencer(self.args.sequencer.clone()) - .with_sequencer_headers(self.args.sequencer_headers.clone()) - .with_da_config(self.da_config.clone()) - .with_gas_limit_config(self.gas_limit_config.clone()) - .with_enable_tx_conditional(self.args.enable_tx_conditional) - .with_min_suggested_priority_fee(self.args.min_suggested_priority_fee) - .with_historical_rpc(self.args.historical_rpc.clone()) - .with_flashblocks(self.args.flashblocks_url.clone()) - .with_flashblock_consensus(self.args.flashblock_consensus) - } - - /// Instantiates the [`ProviderFactoryBuilder`] for an opstack node. - /// - /// # Open a Providerfactory in read-only mode from a datadir - /// - /// See also: [`ProviderFactoryBuilder`] and - /// [`ReadOnlyConfig`](reth_provider::providers::ReadOnlyConfig). - /// - /// ```no_run - /// use reth_optimism_chainspec::BASE_MAINNET; - /// use reth_optimism_node::OpNode; - /// - /// let factory = - /// OpNode::provider_factory_builder().open_read_only(BASE_MAINNET.clone(), "datadir").unwrap(); - /// ``` - /// - /// # Open a Providerfactory manually with all required components - /// - /// ```no_run - /// use reth_db::open_db_read_only; - /// use reth_optimism_chainspec::OpChainSpecBuilder; - /// use reth_optimism_node::OpNode; - /// use reth_provider::providers::{RocksDBProvider, StaticFileProvider}; - /// use std::sync::Arc; - /// - /// let factory = OpNode::provider_factory_builder() - /// .db(Arc::new(open_db_read_only("db", Default::default()).unwrap())) - /// .chainspec(OpChainSpecBuilder::base_mainnet().build().into()) - /// .static_file(StaticFileProvider::read_only("db/static_files", false).unwrap()) - /// .rocksdb_provider(RocksDBProvider::builder("db/rocksdb").build().unwrap()) - /// .build_provider_factory(); - /// ``` - pub fn provider_factory_builder() -> ProviderFactoryBuilder { - ProviderFactoryBuilder::default() - } -} - -impl Node for OpNode -where - N: FullNodeTypes, -{ - type ComponentsBuilder = ComponentsBuilder< - N, - OpPoolBuilder, - BasicPayloadServiceBuilder, - OpNetworkBuilder, - OpExecutorBuilder, - OpConsensusBuilder, - >; - - type AddOns = OpAddOns< - NodeAdapter>::Components>, - OpEthApiBuilder, - OpEngineValidatorBuilder, - OpEngineApiBuilder, - BasicEngineValidatorBuilder, - >; - - fn components_builder(&self) -> Self::ComponentsBuilder { - Self::components(self) - } - - fn add_ons(&self) -> Self::AddOns { - self.add_ons_builder().build() - } -} - -impl DebugNode for OpNode -where - N: FullNodeComponents, -{ - type RpcBlock = alloy_rpc_types_eth::Block; - - fn rpc_to_primitive_block(rpc_block: Self::RpcBlock) -> reth_node_api::BlockTy { - rpc_block.into_consensus() - } - - fn local_payload_attributes_builder( - chain_spec: &Self::ChainSpec, - ) -> impl PayloadAttributesBuilder<::PayloadAttributes> { - LocalPayloadAttributesBuilder::new(Arc::new(chain_spec.clone())) - } -} - -impl NodeTypes for OpNode { - type Primitives = OpPrimitives; - type ChainSpec = OpChainSpec; - type Storage = OpStorage; - type Payload = OpEngineTypes; -} - -/// Add-ons w.r.t. optimism. -/// -/// This type provides optimism-specific addons to the node and exposes the RPC server and engine -/// API. -#[derive(Debug)] -pub struct OpAddOns< - N: FullNodeComponents, - EthB: EthApiBuilder, - PVB, - EB = OpEngineApiBuilder, - EVB = BasicEngineValidatorBuilder, - RpcMiddleware = Identity, -> { - /// Rpc add-ons responsible for launching the RPC servers and instantiating the RPC handlers - /// and eth-api. - pub rpc_add_ons: RpcAddOns, - /// Data availability configuration for the OP builder. - pub da_config: OpDAConfig, - /// Gas limit configuration for the OP builder. - pub gas_limit_config: OpGasLimitConfig, - /// Sequencer client, configured to forward submitted transactions to sequencer of given OP - /// network. - pub sequencer_url: Option, - /// Headers to use for the sequencer client requests. - pub sequencer_headers: Vec, - /// RPC endpoint for historical data. - /// - /// This can be used to forward pre-bedrock rpc requests (op-mainnet). - pub historical_rpc: Option, - /// Enable transaction conditionals. - enable_tx_conditional: bool, - min_suggested_priority_fee: u64, -} - -impl OpAddOns -where - N: FullNodeComponents, - EthB: EthApiBuilder, -{ - /// Creates a new instance from components. - #[allow(clippy::too_many_arguments)] - pub const fn new( - rpc_add_ons: RpcAddOns, - da_config: OpDAConfig, - gas_limit_config: OpGasLimitConfig, - sequencer_url: Option, - sequencer_headers: Vec, - historical_rpc: Option, - enable_tx_conditional: bool, - min_suggested_priority_fee: u64, - ) -> Self { - Self { - rpc_add_ons, - da_config, - gas_limit_config, - sequencer_url, - sequencer_headers, - historical_rpc, - enable_tx_conditional, - min_suggested_priority_fee, - } - } -} - -impl Default for OpAddOns -where - N: FullNodeComponents, - OpEthApiBuilder: EthApiBuilder, -{ - fn default() -> Self { - Self::builder().build() - } -} - -impl - OpAddOns< - N, - OpEthApiBuilder, - OpEngineValidatorBuilder, - OpEngineApiBuilder, - RpcMiddleware, - > -where - N: FullNodeComponents, - OpEthApiBuilder: EthApiBuilder, -{ - /// Build a [`OpAddOns`] using [`OpAddOnsBuilder`]. - pub fn builder() -> OpAddOnsBuilder { - OpAddOnsBuilder::default() - } -} - -impl OpAddOns -where - N: FullNodeComponents, - EthB: EthApiBuilder, -{ - /// Maps the [`reth_node_builder::rpc::EngineApiBuilder`] builder type. - pub fn with_engine_api( - self, - engine_api_builder: T, - ) -> OpAddOns { - let Self { - rpc_add_ons, - da_config, - gas_limit_config, - sequencer_url, - sequencer_headers, - historical_rpc, - enable_tx_conditional, - min_suggested_priority_fee, - .. - } = self; - OpAddOns::new( - rpc_add_ons.with_engine_api(engine_api_builder), - da_config, - gas_limit_config, - sequencer_url, - sequencer_headers, - historical_rpc, - enable_tx_conditional, - min_suggested_priority_fee, - ) - } - - /// Maps the [`PayloadValidatorBuilder`] builder type. - pub fn with_payload_validator( - self, - payload_validator_builder: T, - ) -> OpAddOns { - let Self { - rpc_add_ons, - da_config, - gas_limit_config, - sequencer_url, - sequencer_headers, - enable_tx_conditional, - min_suggested_priority_fee, - historical_rpc, - .. - } = self; - OpAddOns::new( - rpc_add_ons.with_payload_validator(payload_validator_builder), - da_config, - gas_limit_config, - sequencer_url, - sequencer_headers, - historical_rpc, - enable_tx_conditional, - min_suggested_priority_fee, - ) - } - - /// Sets the RPC middleware stack for processing RPC requests. - /// - /// This method configures a custom middleware stack that will be applied to all RPC requests - /// across HTTP, `WebSocket`, and IPC transports. The middleware is applied to the RPC service - /// layer, allowing you to intercept, modify, or enhance RPC request processing. - /// - /// See also [`RpcAddOns::with_rpc_middleware`]. - pub fn with_rpc_middleware(self, rpc_middleware: T) -> OpAddOns { - let Self { - rpc_add_ons, - da_config, - gas_limit_config, - sequencer_url, - sequencer_headers, - enable_tx_conditional, - min_suggested_priority_fee, - historical_rpc, - .. - } = self; - OpAddOns::new( - rpc_add_ons.with_rpc_middleware(rpc_middleware), - da_config, - gas_limit_config, - sequencer_url, - sequencer_headers, - historical_rpc, - enable_tx_conditional, - min_suggested_priority_fee, - ) - } - - /// Sets the hook that is run once the rpc server is started. - pub fn on_rpc_started(mut self, hook: F) -> Self - where - F: FnOnce(RpcContext<'_, N, EthB::EthApi>, RethRpcServerHandles) -> eyre::Result<()> - + Send - + 'static, - { - self.rpc_add_ons = self.rpc_add_ons.on_rpc_started(hook); - self - } - - /// Sets the hook that is run to configure the rpc modules. - pub fn extend_rpc_modules(mut self, hook: F) -> Self - where - F: FnOnce(RpcContext<'_, N, EthB::EthApi>) -> eyre::Result<()> + Send + 'static, - { - self.rpc_add_ons = self.rpc_add_ons.extend_rpc_modules(hook); - self - } -} - -impl NodeAddOns - for OpAddOns -where - N: FullNodeComponents< - Types: NodeTypes< - ChainSpec: OpHardforks, - Primitives: OpPayloadPrimitives, - Payload: PayloadTypes, - >, - Evm: ConfigureEvm< - NextBlockEnvCtx: BuildNextEnv< - Attrs, - HeaderTy, - ::ChainSpec, - >, - >, - Pool: TransactionPool, - >, - EthB: EthApiBuilder, - PVB: Send, - EB: EngineApiBuilder, - EVB: EngineValidatorBuilder, - RpcMiddleware: RethRpcMiddleware, - Attrs: OpAttributes, RpcPayloadAttributes: DeserializeOwned>, -{ - type Handle = RpcHandle; - - async fn launch_add_ons( - self, - ctx: reth_node_api::AddOnsContext<'_, N>, - ) -> eyre::Result { - let Self { - rpc_add_ons, - da_config, - gas_limit_config, - sequencer_url, - sequencer_headers, - enable_tx_conditional, - historical_rpc, - .. - } = self; - - let maybe_pre_bedrock_historical_rpc = historical_rpc - .and_then(|historical_rpc| { - ctx.node - .provider() - .chain_spec() - .op_fork_activation(OpHardfork::Bedrock) - .block_number() - .filter(|activation| *activation > 0) - .map(|bedrock_block| (historical_rpc, bedrock_block)) - }) - .map(|(historical_rpc, bedrock_block)| -> eyre::Result<_> { - info!(target: "reth::cli", %bedrock_block, ?historical_rpc, "Using historical RPC endpoint pre bedrock"); - let provider = ctx.node.provider().clone(); - let client = HistoricalRpcClient::new(&historical_rpc)?; - let layer = HistoricalRpc::new(provider, client, bedrock_block); - Ok(layer) - }) - .transpose()? - ; - - let rpc_add_ons = rpc_add_ons.option_layer_rpc_middleware(maybe_pre_bedrock_historical_rpc); - - let builder = reth_optimism_payload_builder::OpPayloadBuilder::new( - ctx.node.pool().clone(), - ctx.node.provider().clone(), - ctx.node.evm_config().clone(), - ); - // install additional OP specific rpc methods - let debug_ext = OpDebugWitnessApi::<_, _, _, Attrs>::new( - ctx.node.provider().clone(), - Box::new(ctx.node.task_executor().clone()), - builder, - ); - let miner_ext = OpMinerExtApi::new(da_config, gas_limit_config); - - let sequencer_client = if let Some(url) = sequencer_url { - Some(SequencerClient::new_with_headers(url, sequencer_headers).await?) - } else { - None - }; - - let tx_conditional_ext: OpEthExtApi = OpEthExtApi::new( - sequencer_client, - ctx.node.pool().clone(), - ctx.node.provider().clone(), - ); - - rpc_add_ons - .launch_add_ons_with(ctx, move |container| { - let reth_node_builder::rpc::RpcModuleContainer { modules, auth_module, registry } = - container; - - debug!(target: "reth::cli", "Installing debug payload witness rpc endpoint"); - modules.merge_if_module_configured(RethRpcModule::Debug, debug_ext.into_rpc())?; - - // extend the miner namespace if configured in the regular http server - modules.add_or_replace_if_module_configured( - RethRpcModule::Miner, - miner_ext.clone().into_rpc(), - )?; - - // install the miner extension in the authenticated if configured - if modules.module_config().contains_any(&RethRpcModule::Miner) { - debug!(target: "reth::cli", "Installing miner DA rpc endpoint"); - auth_module.merge_auth_methods(miner_ext.into_rpc())?; - } - - // install the debug namespace in the authenticated if configured - if modules.module_config().contains_any(&RethRpcModule::Debug) { - debug!(target: "reth::cli", "Installing debug rpc endpoint"); - auth_module.merge_auth_methods(registry.debug_api().into_rpc())?; - } - - if enable_tx_conditional { - // extend the eth namespace if configured in the regular http server - modules.merge_if_module_configured( - RethRpcModule::Eth, - tx_conditional_ext.into_rpc(), - )?; - } - - Ok(()) - }) - .await - } -} - -impl RethRpcAddOns - for OpAddOns -where - N: FullNodeComponents< - Types: NodeTypes< - ChainSpec: OpHardforks, - Primitives: OpPayloadPrimitives, - Payload: PayloadTypes, - >, - Evm: ConfigureEvm< - NextBlockEnvCtx: BuildNextEnv< - Attrs, - HeaderTy, - ::ChainSpec, - >, - >, - >, - <::Pool as TransactionPool>::Transaction: OpPooledTx, - EthB: EthApiBuilder, - PVB: PayloadValidatorBuilder, - EB: EngineApiBuilder, - EVB: EngineValidatorBuilder, - RpcMiddleware: RethRpcMiddleware, - Attrs: OpAttributes, RpcPayloadAttributes: DeserializeOwned>, -{ - type EthApi = EthB::EthApi; - - fn hooks_mut(&mut self) -> &mut reth_node_builder::rpc::RpcHooks { - self.rpc_add_ons.hooks_mut() - } -} - -impl EngineValidatorAddOn - for OpAddOns -where - N: FullNodeComponents, - EthB: EthApiBuilder, - PVB: Send, - EB: EngineApiBuilder, - EVB: EngineValidatorBuilder, - RpcMiddleware: Send, -{ - type ValidatorBuilder = EVB; - - fn engine_validator_builder(&self) -> Self::ValidatorBuilder { - EngineValidatorAddOn::engine_validator_builder(&self.rpc_add_ons) - } -} - -/// A regular optimism evm and executor builder. -#[derive(Debug, Clone)] -#[non_exhaustive] -pub struct OpAddOnsBuilder { - /// Sequencer client, configured to forward submitted transactions to sequencer of given OP - /// network. - sequencer_url: Option, - /// Headers to use for the sequencer client requests. - sequencer_headers: Vec, - /// RPC endpoint for historical data. - historical_rpc: Option, - /// Data availability configuration for the OP builder. - da_config: Option, - /// Gas limit configuration for the OP builder. - gas_limit_config: Option, - /// Enable transaction conditionals. - enable_tx_conditional: bool, - /// Marker for network types. - _nt: PhantomData, - /// Minimum suggested priority fee (tip) - min_suggested_priority_fee: u64, - /// RPC middleware to use - rpc_middleware: RpcMiddleware, - /// Optional tokio runtime to use for the RPC server. - tokio_runtime: Option, - /// A URL pointing to a secure websocket service that streams out flashblocks. - flashblocks_url: Option, - /// Enable flashblock consensus client to drive chain forward. - flashblock_consensus: bool, -} - -impl Default for OpAddOnsBuilder { - fn default() -> Self { - Self { - sequencer_url: None, - sequencer_headers: Vec::new(), - historical_rpc: None, - da_config: None, - gas_limit_config: None, - enable_tx_conditional: false, - min_suggested_priority_fee: 1_000_000, - _nt: PhantomData, - rpc_middleware: Identity::new(), - tokio_runtime: None, - flashblocks_url: None, - flashblock_consensus: false, - } - } -} - -impl OpAddOnsBuilder { - /// With a [`SequencerClient`]. - pub fn with_sequencer(mut self, sequencer_client: Option) -> Self { - self.sequencer_url = sequencer_client; - self - } - - /// With headers to use for the sequencer client requests. - pub fn with_sequencer_headers(mut self, sequencer_headers: Vec) -> Self { - self.sequencer_headers = sequencer_headers; - self - } - - /// Configure the data availability configuration for the OP builder. - pub fn with_da_config(mut self, da_config: OpDAConfig) -> Self { - self.da_config = Some(da_config); - self - } - - /// Configure the gas limit configuration for the OP payload builder. - pub fn with_gas_limit_config(mut self, gas_limit_config: OpGasLimitConfig) -> Self { - self.gas_limit_config = Some(gas_limit_config); - self - } - - /// Configure if transaction conditional should be enabled. - pub const fn with_enable_tx_conditional(mut self, enable_tx_conditional: bool) -> Self { - self.enable_tx_conditional = enable_tx_conditional; - self - } - - /// Configure the minimum priority fee (tip) - pub const fn with_min_suggested_priority_fee(mut self, min: u64) -> Self { - self.min_suggested_priority_fee = min; - self - } - - /// Configures the endpoint for historical RPC forwarding. - pub fn with_historical_rpc(mut self, historical_rpc: Option) -> Self { - self.historical_rpc = historical_rpc; - self - } - - /// Configures a custom tokio runtime for the RPC server. - /// - /// Caution: This runtime must not be created from within asynchronous context. - pub fn with_tokio_runtime(mut self, tokio_runtime: Option) -> Self { - self.tokio_runtime = tokio_runtime; - self - } - - /// Configure the RPC middleware to use - pub fn with_rpc_middleware(self, rpc_middleware: T) -> OpAddOnsBuilder { - let Self { - sequencer_url, - sequencer_headers, - historical_rpc, - da_config, - gas_limit_config, - enable_tx_conditional, - min_suggested_priority_fee, - tokio_runtime, - _nt, - flashblocks_url, - flashblock_consensus, - .. - } = self; - OpAddOnsBuilder { - sequencer_url, - sequencer_headers, - historical_rpc, - da_config, - gas_limit_config, - enable_tx_conditional, - min_suggested_priority_fee, - _nt, - rpc_middleware, - tokio_runtime, - flashblocks_url, - flashblock_consensus, - } - } - - /// With a URL pointing to a flashblocks secure websocket subscription. - pub fn with_flashblocks(mut self, flashblocks_url: Option) -> Self { - self.flashblocks_url = flashblocks_url; - self - } - - /// With a flashblock consensus client to drive chain forward. - pub const fn with_flashblock_consensus(mut self, flashblock_consensus: bool) -> Self { - self.flashblock_consensus = flashblock_consensus; - self - } -} - -impl OpAddOnsBuilder { - /// Builds an instance of [`OpAddOns`]. - pub fn build( - self, - ) -> OpAddOns, PVB, EB, EVB, RpcMiddleware> - where - N: FullNodeComponents, - OpEthApiBuilder: EthApiBuilder, - PVB: PayloadValidatorBuilder + Default, - EB: Default, - EVB: Default, - { - let Self { - sequencer_url, - sequencer_headers, - da_config, - gas_limit_config, - enable_tx_conditional, - min_suggested_priority_fee, - historical_rpc, - rpc_middleware, - tokio_runtime, - flashblocks_url, - flashblock_consensus, - .. - } = self; - - OpAddOns::new( - RpcAddOns::new( - OpEthApiBuilder::default() - .with_sequencer(sequencer_url.clone()) - .with_sequencer_headers(sequencer_headers.clone()) - .with_min_suggested_priority_fee(min_suggested_priority_fee) - .with_flashblocks(flashblocks_url) - .with_flashblock_consensus(flashblock_consensus), - PVB::default(), - EB::default(), - EVB::default(), - rpc_middleware, - ) - .with_tokio_runtime(tokio_runtime), - da_config.unwrap_or_default(), - gas_limit_config.unwrap_or_default(), - sequencer_url, - sequencer_headers, - historical_rpc, - enable_tx_conditional, - min_suggested_priority_fee, - ) - } -} - -/// A regular optimism evm and executor builder. -#[derive(Debug, Copy, Clone, Default)] -#[non_exhaustive] -pub struct OpExecutorBuilder; - -impl ExecutorBuilder for OpExecutorBuilder -where - Node: FullNodeTypes>, -{ - type EVM = - OpEvmConfig<::ChainSpec, ::Primitives>; - - async fn build_evm(self, ctx: &BuilderContext) -> eyre::Result { - let evm_config = OpEvmConfig::new(ctx.chain_spec(), OpRethReceiptBuilder::default()); - - Ok(evm_config) - } -} - -/// A basic optimism transaction pool. -/// -/// This contains various settings that can be configured and take precedence over the node's -/// config. -#[derive(Debug)] -pub struct OpPoolBuilder { - /// Enforced overrides that are applied to the pool config. - pub pool_config_overrides: PoolBuilderConfigOverrides, - /// Enable transaction conditionals. - pub enable_tx_conditional: bool, - /// Supervisor client url - pub supervisor_http: String, - /// Supervisor safety level - pub supervisor_safety_level: SafetyLevel, - /// Marker for the pooled transaction type. - _pd: core::marker::PhantomData, -} - -impl Default for OpPoolBuilder { - fn default() -> Self { - Self { - pool_config_overrides: Default::default(), - enable_tx_conditional: false, - supervisor_http: DEFAULT_SUPERVISOR_URL.to_string(), - supervisor_safety_level: SafetyLevel::CrossUnsafe, - _pd: Default::default(), - } - } -} - -impl Clone for OpPoolBuilder { - fn clone(&self) -> Self { - Self { - pool_config_overrides: self.pool_config_overrides.clone(), - enable_tx_conditional: self.enable_tx_conditional, - supervisor_http: self.supervisor_http.clone(), - supervisor_safety_level: self.supervisor_safety_level, - _pd: core::marker::PhantomData, - } - } -} - -impl OpPoolBuilder { - /// Sets the `enable_tx_conditional` flag on the pool builder. - pub const fn with_enable_tx_conditional(mut self, enable_tx_conditional: bool) -> Self { - self.enable_tx_conditional = enable_tx_conditional; - self - } - - /// Sets the [`PoolBuilderConfigOverrides`] on the pool builder. - pub fn with_pool_config_overrides( - mut self, - pool_config_overrides: PoolBuilderConfigOverrides, - ) -> Self { - self.pool_config_overrides = pool_config_overrides; - self - } - - /// Sets the supervisor client - pub fn with_supervisor( - mut self, - supervisor_client: String, - supervisor_safety_level: SafetyLevel, - ) -> Self { - self.supervisor_http = supervisor_client; - self.supervisor_safety_level = supervisor_safety_level; - self - } -} - -impl PoolBuilder for OpPoolBuilder -where - Node: FullNodeTypes>, - T: EthPoolTransaction> + OpPooledTx, - Evm: ConfigureEvm> + Clone + 'static, -{ - type Pool = OpTransactionPool; - - async fn build_pool( - self, - ctx: &BuilderContext, - evm_config: Evm, - ) -> eyre::Result { - let Self { pool_config_overrides, .. } = self; - - // supervisor used for interop - if ctx.chain_spec().is_interop_active_at_timestamp(ctx.head().timestamp) && - self.supervisor_http == DEFAULT_SUPERVISOR_URL - { - info!(target: "reth::cli", - url=%DEFAULT_SUPERVISOR_URL, - "Default supervisor url is used, consider changing --rollup.supervisor-http." - ); - } - let supervisor_client = SupervisorClient::builder(self.supervisor_http.clone()) - .minimum_safety(self.supervisor_safety_level) - .build() - .await; - - let blob_store = reth_node_builder::components::create_blob_store(ctx)?; - let validator = - TransactionValidationTaskExecutor::eth_builder(ctx.provider().clone(), evm_config) - .no_eip4844() - .with_max_tx_input_bytes(ctx.config().txpool.max_tx_input_bytes) - .kzg_settings(ctx.kzg_settings()?) - .set_tx_fee_cap(ctx.config().rpc.rpc_tx_fee_cap) - .with_max_tx_gas_limit(ctx.config().txpool.max_tx_gas_limit) - .with_minimum_priority_fee(ctx.config().txpool.minimum_priority_fee) - .with_additional_tasks( - pool_config_overrides - .additional_validation_tasks - .unwrap_or_else(|| ctx.config().txpool.additional_validation_tasks), - ) - .build_with_tasks(ctx.task_executor().clone(), blob_store.clone()) - .map(|validator| { - OpTransactionValidator::new(validator) - // In --dev mode we can't require gas fees because we're unable to decode - // the L1 block info - .require_l1_data_gas_fee(!ctx.config().dev.dev) - .with_supervisor(supervisor_client.clone()) - }); - - let final_pool_config = pool_config_overrides.apply(ctx.pool_config()); - - let transaction_pool = TxPoolBuilder::new(ctx) - .with_validator(validator) - .build_and_spawn_maintenance_task(blob_store, final_pool_config)?; - - info!(target: "reth::cli", "Transaction pool initialized"); - debug!(target: "reth::cli", "Spawned txpool maintenance task"); - - // The Op txpool maintenance task is only spawned when interop is active - if ctx.chain_spec().is_interop_active_at_timestamp(ctx.head().timestamp) { - // spawn the Op txpool maintenance task - let chain_events = ctx.provider().canonical_state_stream(); - ctx.task_executor().spawn_critical( - "Op txpool interop maintenance task", - reth_optimism_txpool::maintain::maintain_transaction_pool_interop_future( - transaction_pool.clone(), - chain_events, - supervisor_client, - ), - ); - debug!(target: "reth::cli", "Spawned Op interop txpool maintenance task"); - } - - if self.enable_tx_conditional { - // spawn the Op txpool maintenance task - let chain_events = ctx.provider().canonical_state_stream(); - ctx.task_executor().spawn_critical( - "Op txpool conditional maintenance task", - reth_optimism_txpool::maintain::maintain_transaction_pool_conditional_future( - transaction_pool.clone(), - chain_events, - ), - ); - debug!(target: "reth::cli", "Spawned Op conditional txpool maintenance task"); - } - - Ok(transaction_pool) - } -} - -/// A basic optimism payload service builder -#[derive(Debug, Default, Clone)] -pub struct OpPayloadBuilder { - /// By default the pending block equals the latest block - /// to save resources and not leak txs from the tx-pool, - /// this flag enables computing of the pending block - /// from the tx-pool instead. - /// - /// If `compute_pending_block` is not enabled, the payload builder - /// will use the payload attributes from the latest block. Note - /// that this flag is not yet functional. - pub compute_pending_block: bool, - /// The type responsible for yielding the best transactions for the payload if mempool - /// transactions are allowed. - pub best_transactions: Txs, - /// This data availability configuration specifies constraints for the payload builder - /// when assembling payloads - pub da_config: OpDAConfig, - /// Gas limit configuration for the OP builder. - /// This is used to configure gas limit related constraints for the payload builder. - pub gas_limit_config: OpGasLimitConfig, -} - -impl OpPayloadBuilder { - /// Create a new instance with the given `compute_pending_block` flag and data availability - /// config. - pub fn new(compute_pending_block: bool) -> Self { - Self { - compute_pending_block, - best_transactions: (), - da_config: OpDAConfig::default(), - gas_limit_config: OpGasLimitConfig::default(), - } - } - - /// Configure the data availability configuration for the OP payload builder. - pub fn with_da_config(mut self, da_config: OpDAConfig) -> Self { - self.da_config = da_config; - self - } - - /// Configure the gas limit configuration for the OP payload builder. - pub fn with_gas_limit_config(mut self, gas_limit_config: OpGasLimitConfig) -> Self { - self.gas_limit_config = gas_limit_config; - self - } -} - -impl OpPayloadBuilder { - /// Configures the type responsible for yielding the transactions that should be included in the - /// payload. - pub fn with_transactions(self, best_transactions: T) -> OpPayloadBuilder { - let Self { compute_pending_block, da_config, gas_limit_config, .. } = self; - OpPayloadBuilder { compute_pending_block, best_transactions, da_config, gas_limit_config } - } -} - -impl PayloadBuilderBuilder for OpPayloadBuilder -where - Node: FullNodeTypes< - Provider: ChainSpecProvider, - Types: NodeTypes< - Primitives: OpPayloadPrimitives, - Payload: PayloadTypes< - BuiltPayload = OpBuiltPayload>, - PayloadBuilderAttributes = Attrs, - >, - >, - >, - Evm: ConfigureEvm< - Primitives = PrimitivesTy, - NextBlockEnvCtx: BuildNextEnv< - Attrs, - HeaderTy, - ::ChainSpec, - >, - > + 'static, - Pool: TransactionPool>> + Unpin + 'static, - Txs: OpPayloadTransactions, - Attrs: OpAttributes>, -{ - type PayloadBuilder = - reth_optimism_payload_builder::OpPayloadBuilder; - - async fn build_payload_builder( - self, - ctx: &BuilderContext, - pool: Pool, - evm_config: Evm, - ) -> eyre::Result { - let payload_builder = reth_optimism_payload_builder::OpPayloadBuilder::with_builder_config( - pool, - ctx.provider().clone(), - evm_config, - OpBuilderConfig { - da_config: self.da_config.clone(), - gas_limit_config: self.gas_limit_config.clone(), - }, - ) - .with_transactions(self.best_transactions.clone()) - .set_compute_pending_block(self.compute_pending_block); - Ok(payload_builder) - } -} - -/// A basic optimism network builder. -#[derive(Debug, Default)] -pub struct OpNetworkBuilder { - /// Disable transaction pool gossip - pub disable_txpool_gossip: bool, - /// Disable discovery v4 - pub disable_discovery_v4: bool, -} - -impl Clone for OpNetworkBuilder { - fn clone(&self) -> Self { - Self::new(self.disable_txpool_gossip, self.disable_discovery_v4) - } -} - -impl OpNetworkBuilder { - /// Creates a new `OpNetworkBuilder`. - pub const fn new(disable_txpool_gossip: bool, disable_discovery_v4: bool) -> Self { - Self { disable_txpool_gossip, disable_discovery_v4 } - } -} - -impl OpNetworkBuilder { - /// Returns the [`NetworkConfig`] that contains the settings to launch the p2p network. - /// - /// This applies the configured [`OpNetworkBuilder`] settings. - pub fn network_config( - &self, - ctx: &BuilderContext, - ) -> eyre::Result> - where - Node: FullNodeTypes>, - NetworkP: NetworkPrimitives, - { - let disable_txpool_gossip = self.disable_txpool_gossip; - let disable_discovery_v4 = self.disable_discovery_v4; - let args = &ctx.config().network; - let network_builder = ctx - .network_config_builder()? - // apply discovery settings - .apply(|mut builder| { - let rlpx_socket = (args.addr, args.port).into(); - if disable_discovery_v4 || args.discovery.disable_discovery { - builder = builder.disable_discv4_discovery(); - } - if !args.discovery.disable_discovery { - builder = builder.discovery_v5( - args.discovery.discovery_v5_builder( - rlpx_socket, - ctx.config() - .network - .resolved_bootnodes() - .or_else(|| ctx.chain_spec().bootnodes()) - .unwrap_or_default(), - ), - ); - } - - builder - }); - - let mut network_config = ctx.build_network_config(network_builder); - - // When `sequencer_endpoint` is configured, the node will forward all transactions to a - // Sequencer node for execution and inclusion on L1, and disable its own txpool - // gossip to prevent other parties in the network from learning about them. - network_config.tx_gossip_disabled = disable_txpool_gossip; - - Ok(network_config) - } -} - -impl NetworkBuilder for OpNetworkBuilder -where - Node: FullNodeTypes>, - Pool: TransactionPool>> - + Unpin - + 'static, -{ - type Network = - NetworkHandle, PoolPooledTx>>; - - async fn build_network( - self, - ctx: &BuilderContext, - pool: Pool, - ) -> eyre::Result { - let network_config = self.network_config(ctx)?; - let network = NetworkManager::builder(network_config).await?; - let handle = ctx.start_network(network, pool); - info!(target: "reth::cli", enode=%handle.local_node_record(), "P2P networking initialized"); - - Ok(handle) - } -} - -/// A basic optimism consensus builder. -#[derive(Debug, Default, Clone)] -#[non_exhaustive] -pub struct OpConsensusBuilder; - -impl ConsensusBuilder for OpConsensusBuilder -where - Node: FullNodeTypes< - Types: NodeTypes< - ChainSpec: OpHardforks, - Primitives: NodePrimitives, - >, - >, -{ - type Consensus = Arc::ChainSpec>>; - - async fn build_consensus(self, ctx: &BuilderContext) -> eyre::Result { - Ok(Arc::new(OpBeaconConsensus::new(ctx.chain_spec()))) - } -} - -/// Builder for [`OpEngineValidator`]. -#[derive(Debug, Default, Clone)] -#[non_exhaustive] -pub struct OpEngineValidatorBuilder; - -impl PayloadValidatorBuilder for OpEngineValidatorBuilder -where - Node: FullNodeComponents< - Types: NodeTypes< - ChainSpec: OpHardforks, - Payload: PayloadTypes, - >, - >, -{ - type Validator = OpEngineValidator< - Node::Provider, - <::Primitives as NodePrimitives>::SignedTx, - ::ChainSpec, - >; - - async fn build(self, ctx: &AddOnsContext<'_, Node>) -> eyre::Result { - Ok(OpEngineValidator::new::( - ctx.config.chain.clone(), - ctx.node.provider().clone(), - )) - } -} - -/// Network primitive types used by Optimism networks. -pub type OpNetworkPrimitives = BasicNetworkPrimitives; diff --git a/op-reth/crates/node/src/rpc.rs b/op-reth/crates/node/src/rpc.rs deleted file mode 100644 index 33de471753c..00000000000 --- a/op-reth/crates/node/src/rpc.rs +++ /dev/null @@ -1,155 +0,0 @@ -//! RPC component builder -//! -//! # Example -//! -//! Builds offline `TraceApi` with only EVM and database. This can be useful -//! for example when downloading a state snapshot (pre-synced node) from some mirror. -//! -//! ```rust -//! use alloy_rpc_types_eth::BlockId; -//! use op_alloy_network::Optimism; -//! use reth_db::test_utils::create_test_rw_db_with_path; -//! use reth_node_builder::{ -//! components::ComponentsBuilder, -//! hooks::OnComponentInitializedHook, -//! rpc::{EthApiBuilder, EthApiCtx}, -//! ConsensusEngineHandle, LaunchContext, NodeConfig, RethFullAdapter, -//! }; -//! use reth_optimism_chainspec::OP_SEPOLIA; -//! use reth_optimism_evm::OpEvmConfig; -//! use reth_optimism_node::{OpExecutorBuilder, OpNetworkPrimitives, OpNode}; -//! use reth_optimism_rpc::OpEthApiBuilder; -//! use reth_optimism_txpool::OpPooledTransaction; -//! use reth_provider::providers::BlockchainProvider; -//! use reth_rpc::TraceApi; -//! use reth_rpc_eth_types::{EthConfig, EthStateCache}; -//! use reth_tasks::{pool::BlockingTaskGuard, TaskManager}; -//! use reth_trie_db::ChangesetCache; -//! use std::sync::Arc; -//! -//! #[tokio::main] -//! async fn main() { -//! // build core node with all components disabled except EVM and state -//! let sepolia = NodeConfig::new(OP_SEPOLIA.clone()); -//! let db = create_test_rw_db_with_path(sepolia.datadir()); -//! let tasks = TaskManager::current(); -//! let launch_ctx = LaunchContext::new(tasks.executor(), sepolia.datadir()); -//! let node = launch_ctx -//! .with_loaded_toml_config(sepolia) -//! .unwrap() -//! .attach(Arc::new(db)) -//! .with_provider_factory::<_, OpEvmConfig>(ChangesetCache::new()) -//! .await -//! .unwrap() -//! .with_genesis() -//! .unwrap() -//! .with_metrics_task() // todo: shouldn't be req to set up blockchain db -//! .with_blockchain_db::, _>(move |provider_factory| { -//! Ok(BlockchainProvider::new(provider_factory).unwrap()) -//! }) -//! .unwrap() -//! .with_components( -//! ComponentsBuilder::default() -//! .node_types::>() -//! .noop_pool::() -//! .executor(OpExecutorBuilder::default()) -//! .noop_consensus() -//! .noop_network::() -//! .noop_payload(), -//! Box::new(()) as Box>, -//! ) -//! .await -//! .unwrap(); -//! -//! // build `eth` namespace API -//! let config = EthConfig::default(); -//! let cache = EthStateCache::spawn_with( -//! node.provider_factory().clone(), -//! config.cache, -//! node.task_executor().clone(), -//! ); -//! // Create a dummy beacon engine handle for offline mode -//! let (tx, _) = tokio::sync::mpsc::unbounded_channel(); -//! let ctx = EthApiCtx { -//! components: node.node_adapter(), -//! config, -//! cache, -//! engine_handle: ConsensusEngineHandle::new(tx), -//! }; -//! let eth_api = OpEthApiBuilder::::default().build_eth_api(ctx).await.unwrap(); -//! -//! // build `trace` namespace API -//! let trace_api = TraceApi::new(eth_api, BlockingTaskGuard::new(10), EthConfig::default()); -//! -//! // fetch traces for latest block -//! let traces = trace_api.trace_block(BlockId::latest()).await.unwrap(); -//! } -//! ``` - -pub use reth_optimism_rpc::{OpEngineApi, OpEthApi, OpEthApiBuilder}; - -use crate::OP_NAME_CLIENT; -use alloy_rpc_types_engine::ClientVersionV1; -use op_alloy_rpc_types_engine::OpExecutionData; -use reth_chainspec::EthereumHardforks; -use reth_node_api::{ - AddOnsContext, EngineApiValidator, EngineTypes, FullNodeComponents, NodeTypes, -}; -use reth_node_builder::rpc::{EngineApiBuilder, PayloadValidatorBuilder}; -use reth_node_core::version::{version_metadata, CLIENT_CODE}; -use reth_optimism_rpc::engine::OP_ENGINE_CAPABILITIES; -use reth_payload_builder::PayloadStore; -use reth_rpc_engine_api::{EngineApi, EngineCapabilities}; - -/// Builder for basic [`OpEngineApi`] implementation. -#[derive(Debug, Default, Clone)] -pub struct OpEngineApiBuilder { - engine_validator_builder: EV, -} - -impl EngineApiBuilder for OpEngineApiBuilder -where - N: FullNodeComponents< - Types: NodeTypes< - ChainSpec: EthereumHardforks, - Payload: EngineTypes, - >, - >, - EV: PayloadValidatorBuilder, - EV::Validator: EngineApiValidator<::Payload>, -{ - type EngineApi = OpEngineApi< - N::Provider, - ::Payload, - N::Pool, - EV::Validator, - ::ChainSpec, - >; - - async fn build_engine_api(self, ctx: &AddOnsContext<'_, N>) -> eyre::Result { - let Self { engine_validator_builder } = self; - - let engine_validator = engine_validator_builder.build(ctx).await?; - let client = ClientVersionV1 { - code: CLIENT_CODE, - name: OP_NAME_CLIENT.to_string(), - version: version_metadata().cargo_pkg_version.to_string(), - commit: version_metadata().vergen_git_sha.to_string(), - }; - let inner = EngineApi::new( - ctx.node.provider().clone(), - ctx.config.chain.clone(), - ctx.beacon_engine_handle.clone(), - PayloadStore::new(ctx.node.payload_builder_handle().clone()), - ctx.node.pool().clone(), - Box::new(ctx.node.task_executor().clone()), - client, - EngineCapabilities::new(OP_ENGINE_CAPABILITIES.iter().copied()), - engine_validator, - ctx.config.engine.accept_execution_requests_hash, - ctx.node.network().clone(), - ); - - Ok(OpEngineApi::new(inner)) - } -} diff --git a/op-reth/crates/node/src/utils.rs b/op-reth/crates/node/src/utils.rs deleted file mode 100644 index 42104c9df73..00000000000 --- a/op-reth/crates/node/src/utils.rs +++ /dev/null @@ -1,74 +0,0 @@ -use crate::{OpBuiltPayload, OpNode as OtherOpNode, OpPayloadBuilderAttributes}; -use alloy_genesis::Genesis; -use alloy_primitives::{Address, B256}; -use alloy_rpc_types_engine::PayloadAttributes; -use reth_e2e_test_utils::{ - transaction::TransactionTestContext, wallet::Wallet, NodeHelperType, TmpDB, -}; -use reth_node_api::NodeTypesWithDBAdapter; -use reth_optimism_chainspec::OpChainSpecBuilder; -use reth_payload_builder::EthPayloadBuilderAttributes; -use reth_provider::providers::BlockchainProvider; -use reth_tasks::TaskManager; -use std::sync::Arc; -use tokio::sync::Mutex; - -/// Optimism Node Helper type -pub(crate) type OpNode = - NodeHelperType>>; - -/// Creates the initial setup with `num_nodes` of the node config, started and connected. -pub async fn setup(num_nodes: usize) -> eyre::Result<(Vec, TaskManager, Wallet)> { - let genesis: Genesis = - serde_json::from_str(include_str!("../tests/assets/genesis.json")).unwrap(); - reth_e2e_test_utils::setup_engine( - num_nodes, - Arc::new(OpChainSpecBuilder::base_mainnet().genesis(genesis).ecotone_activated().build()), - false, - Default::default(), - optimism_payload_attributes, - ) - .await -} - -/// Advance the chain with sequential payloads returning them in the end. -pub async fn advance_chain( - length: usize, - node: &mut OpNode, - wallet: Arc>, -) -> eyre::Result> { - node.advance(length as u64, |_| { - let wallet = wallet.clone(); - Box::pin(async move { - let mut wallet = wallet.lock().await; - let tx_fut = TransactionTestContext::optimism_l1_block_info_tx( - wallet.chain_id, - wallet.inner.clone(), - wallet.inner_nonce, - ); - wallet.inner_nonce += 1; - tx_fut.await - }) - }) - .await -} - -/// Helper function to create a new eth payload attributes -pub fn optimism_payload_attributes(timestamp: u64) -> OpPayloadBuilderAttributes { - let attributes = PayloadAttributes { - timestamp, - prev_randao: B256::ZERO, - suggested_fee_recipient: Address::ZERO, - withdrawals: Some(vec![]), - parent_beacon_block_root: Some(B256::ZERO), - }; - - OpPayloadBuilderAttributes { - payload_attributes: EthPayloadBuilderAttributes::new(B256::ZERO, attributes), - transactions: vec![], - no_tx_pool: false, - gas_limit: Some(30_000_000), - eip_1559_params: None, - min_base_fee: None, - } -} diff --git a/op-reth/crates/node/tests/it/builder.rs b/op-reth/crates/node/tests/it/builder.rs deleted file mode 100644 index 3d4eda33f75..00000000000 --- a/op-reth/crates/node/tests/it/builder.rs +++ /dev/null @@ -1,169 +0,0 @@ -//! Node builder setup tests. - -use alloy_primitives::{address, Bytes}; -use core::marker::PhantomData; -use op_revm::{ - precompiles::OpPrecompiles, OpContext, OpHaltReason, OpSpecId, OpTransaction, - OpTransactionError, -}; -use reth_db::test_utils::create_test_rw_db; -use reth_evm::{precompiles::PrecompilesMap, Database, Evm, EvmEnv, EvmFactory}; -use reth_node_api::{FullNodeComponents, NodeTypesWithDBAdapter}; -use reth_node_builder::{ - components::ExecutorBuilder, BuilderContext, FullNodeTypes, Node, NodeBuilder, NodeConfig, - NodeTypes, -}; -use reth_optimism_chainspec::{OpChainSpec, BASE_MAINNET, OP_SEPOLIA}; -use reth_optimism_evm::{OpBlockExecutorFactory, OpEvm, OpEvmFactory, OpRethReceiptBuilder}; -use reth_optimism_node::{args::RollupArgs, OpEvmConfig, OpExecutorBuilder, OpNode}; -use reth_optimism_primitives::OpPrimitives; -use reth_provider::providers::BlockchainProvider; -use revm::{ - context::{BlockEnv, ContextTr, TxEnv}, - context_interface::result::EVMError, - inspector::NoOpInspector, - interpreter::interpreter::EthInterpreter, - precompile::{Precompile, PrecompileId, PrecompileOutput, PrecompileResult, Precompiles}, - Inspector, -}; -use std::sync::OnceLock; - -#[test] -fn test_basic_setup() { - // parse CLI -> config - let config = NodeConfig::new(BASE_MAINNET.clone()); - let db = create_test_rw_db(); - let args = RollupArgs::default(); - let op_node = OpNode::new(args); - let _builder = NodeBuilder::new(config) - .with_database(db) - .with_types_and_provider::>>() - .with_components(op_node.components()) - .with_add_ons(op_node.add_ons()) - .on_component_initialized(move |ctx| { - let _provider = ctx.provider(); - Ok(()) - }) - .on_node_started(|_full_node| Ok(())) - .on_rpc_started(|_ctx, handles| { - let _client = handles.rpc.http_client(); - Ok(()) - }) - .extend_rpc_modules(|ctx| { - let _ = ctx.config(); - let _ = ctx.node().provider(); - - Ok(()) - }) - .check_launch(); -} - -#[test] -fn test_setup_custom_precompiles() { - /// Unichain custom precompiles. - struct UniPrecompiles; - - impl UniPrecompiles { - /// Returns map of precompiles for Unichain. - fn precompiles(spec_id: OpSpecId) -> PrecompilesMap { - static INSTANCE: OnceLock = OnceLock::new(); - - PrecompilesMap::from_static(INSTANCE.get_or_init(|| { - let mut precompiles = OpPrecompiles::new_with_spec(spec_id).precompiles().clone(); - // Custom precompile. - let precompile = Precompile::new( - PrecompileId::custom("custom"), - address!("0x0000000000000000000000000000000000756e69"), - |_, _| PrecompileResult::Ok(PrecompileOutput::new(0, Bytes::new())), - ); - precompiles.extend([precompile]); - precompiles - })) - } - } - - /// Builds Unichain EVM configuration. - #[derive(Clone, Debug)] - struct UniEvmFactory; - - impl EvmFactory for UniEvmFactory { - type Evm>> = OpEvm; - type Context = OpContext; - type Tx = OpTransaction; - type Error = - EVMError; - type HaltReason = OpHaltReason; - type Spec = OpSpecId; - type BlockEnv = BlockEnv; - type Precompiles = PrecompilesMap; - - fn create_evm( - &self, - db: DB, - input: EvmEnv, - ) -> Self::Evm { - let mut op_evm = OpEvmFactory::default().create_evm(db, input); - *op_evm.components_mut().2 = UniPrecompiles::precompiles(*op_evm.ctx().cfg().spec()); - - op_evm - } - - fn create_evm_with_inspector< - DB: Database, - I: Inspector, EthInterpreter>, - >( - &self, - db: DB, - input: EvmEnv, - inspector: I, - ) -> Self::Evm { - let mut op_evm = - OpEvmFactory::default().create_evm_with_inspector(db, input, inspector); - *op_evm.components_mut().2 = UniPrecompiles::precompiles(*op_evm.ctx().cfg().spec()); - - op_evm - } - } - - /// Unichain executor builder. - struct UniExecutorBuilder; - - impl ExecutorBuilder for UniExecutorBuilder - where - Node: FullNodeTypes>, - { - type EVM = OpEvmConfig< - OpChainSpec, - ::Primitives, - OpRethReceiptBuilder, - UniEvmFactory, - >; - - async fn build_evm(self, ctx: &BuilderContext) -> eyre::Result { - let OpEvmConfig { executor_factory, block_assembler, _pd: _ } = - OpExecutorBuilder::default().build_evm(ctx).await?; - let uni_executor_factory = OpBlockExecutorFactory::new( - *executor_factory.receipt_builder(), - ctx.chain_spec(), - UniEvmFactory, - ); - let uni_evm_config = OpEvmConfig { - executor_factory: uni_executor_factory, - block_assembler, - _pd: PhantomData, - }; - Ok(uni_evm_config) - } - } - - NodeBuilder::new(NodeConfig::new(OP_SEPOLIA.clone())) - .with_database(create_test_rw_db()) - .with_types::() - .with_components( - OpNode::default() - .components() - // Custom EVM configuration - .executor(UniExecutorBuilder), - ) - .check_launch(); -} diff --git a/op-reth/crates/node/tests/it/rpc.rs b/op-reth/crates/node/tests/it/rpc.rs deleted file mode 100644 index 8869975ea9c..00000000000 --- a/op-reth/crates/node/tests/it/rpc.rs +++ /dev/null @@ -1,41 +0,0 @@ -//! RPC integration tests. - -use reth_network::types::NatResolver; -use reth_node_builder::{NodeBuilder, NodeHandle}; -use reth_node_core::{ - args::{NetworkArgs, RpcServerArgs}, - node_config::NodeConfig, -}; -use reth_optimism_chainspec::BASE_MAINNET; -use reth_optimism_node::OpNode; -use reth_rpc_api::servers::AdminApiServer; -use reth_tasks::TaskManager; - -// -#[tokio::test] -async fn test_admin_external_ip() -> eyre::Result<()> { - reth_tracing::init_test_tracing(); - - let exec = TaskManager::current(); - let exec = exec.executor(); - - let external_ip = "10.64.128.71".parse().unwrap(); - // Node setup - let node_config = NodeConfig::test() - .map_chain(BASE_MAINNET.clone()) - .with_network( - NetworkArgs::default().with_nat_resolver(NatResolver::ExternalIp(external_ip)), - ) - .with_rpc(RpcServerArgs::default().with_unused_ports().with_http()); - - let NodeHandle { node, node_exit_future: _ } = - NodeBuilder::new(node_config).testing_node(exec).node(OpNode::default()).launch().await?; - - let api = node.add_ons_handle.admin_api(); - - let info = api.node_info().await.unwrap(); - - assert_eq!(info.ip, external_ip); - - Ok(()) -} diff --git a/op-reth/crates/payload/Cargo.toml b/op-reth/crates/payload/Cargo.toml deleted file mode 100644 index 0674ed7cf73..00000000000 --- a/op-reth/crates/payload/Cargo.toml +++ /dev/null @@ -1,54 +0,0 @@ -[package] -name = "reth-optimism-payload-builder" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -description = "A payload builder for op-reth that builds optimistic payloads." - -[lints] -workspace = true - -[dependencies] -# reth -reth-chainspec.workspace = true -reth-primitives-traits.workspace = true -reth-revm = { workspace = true, features = ["witness"] } -reth-transaction-pool.workspace = true -reth-storage-api.workspace = true -reth-evm.workspace = true -reth-execution-types.workspace = true -reth-payload-builder.workspace = true -reth-payload-builder-primitives.workspace = true -reth-payload-util.workspace = true -reth-payload-primitives = { workspace = true, features = ["op"] } -reth-basic-payload-builder.workspace = true -reth-payload-validator.workspace = true - -# op-reth -reth-optimism-evm.workspace = true -reth-optimism-forks.workspace = true -reth-optimism-primitives.workspace = true -reth-optimism-txpool.workspace = true - -# ethereum -revm.workspace = true -alloy-eips.workspace = true -alloy-primitives.workspace = true -alloy-rlp.workspace = true -op-alloy-rpc-types-engine = { workspace = true, features = ["serde"] } -op-alloy-consensus.workspace = true -alloy-rpc-types-engine.workspace = true -alloy-rpc-types-debug.workspace = true -alloy-consensus.workspace = true -alloy-evm.workspace = true - -# misc -derive_more.workspace = true -tracing.workspace = true -thiserror.workspace = true -sha2.workspace = true -serde.workspace = true -either.workspace = true diff --git a/op-reth/crates/payload/src/builder.rs b/op-reth/crates/payload/src/builder.rs deleted file mode 100644 index 8503e020a1c..00000000000 --- a/op-reth/crates/payload/src/builder.rs +++ /dev/null @@ -1,775 +0,0 @@ -//! Optimism payload builder implementation. -use crate::{ - config::OpBuilderConfig, error::OpPayloadBuilderError, payload::OpBuiltPayload, OpAttributes, - OpPayloadBuilderAttributes, OpPayloadPrimitives, -}; -use alloy_consensus::{BlockHeader, Transaction, Typed2718}; -use alloy_evm::Evm as AlloyEvm; -use alloy_primitives::{B256, U256}; -use alloy_rpc_types_debug::ExecutionWitness; -use alloy_rpc_types_engine::PayloadId; -use reth_basic_payload_builder::*; -use reth_chainspec::{ChainSpecProvider, EthChainSpec}; -use reth_evm::{ - block::BlockExecutorFor, - execute::{ - BlockBuilder, BlockBuilderOutcome, BlockExecutionError, BlockExecutor, BlockValidationError, - }, - op_revm::{constants::L1_BLOCK_CONTRACT, L1BlockInfo}, - ConfigureEvm, Database, -}; -use reth_execution_types::BlockExecutionOutput; -use reth_optimism_forks::OpHardforks; -use reth_optimism_primitives::{transaction::OpTransaction, L2_TO_L1_MESSAGE_PASSER_ADDRESS}; -use reth_optimism_txpool::{ - estimated_da_size::DataAvailabilitySized, - interop::{is_valid_interop, MaybeInteropTransaction}, - OpPooledTx, -}; -use reth_payload_builder_primitives::PayloadBuilderError; -use reth_payload_primitives::{BuildNextEnv, BuiltPayloadExecutedBlock, PayloadBuilderAttributes}; -use reth_payload_util::{BestPayloadTransactions, NoopPayloadTransactions, PayloadTransactions}; -use reth_primitives_traits::{ - HeaderTy, NodePrimitives, SealedHeader, SealedHeaderFor, SignedTransaction, TxTy, -}; -use reth_revm::{ - cancelled::CancelOnDrop, database::StateProviderDatabase, db::State, - witness::ExecutionWitnessRecord, -}; -use reth_storage_api::{errors::ProviderError, StateProvider, StateProviderFactory}; -use reth_transaction_pool::{BestTransactionsAttributes, PoolTransaction, TransactionPool}; -use revm::context::{Block, BlockEnv}; -use std::{marker::PhantomData, sync::Arc}; -use tracing::{debug, trace, warn}; - -/// Optimism's payload builder -#[derive(Debug)] -pub struct OpPayloadBuilder< - Pool, - Client, - Evm, - Txs = (), - Attrs = OpPayloadBuilderAttributes::Primitives>>, -> { - /// The rollup's compute pending block configuration option. - pub compute_pending_block: bool, - /// The type responsible for creating the evm. - pub evm_config: Evm, - /// Transaction pool. - pub pool: Pool, - /// Node client. - pub client: Client, - /// Settings for the builder, e.g. DA settings. - pub config: OpBuilderConfig, - /// The type responsible for yielding the best transactions for the payload if mempool - /// transactions are allowed. - pub best_transactions: Txs, - /// Marker for the payload attributes type. - _pd: PhantomData, -} - -impl Clone for OpPayloadBuilder -where - Pool: Clone, - Client: Clone, - Evm: ConfigureEvm, - Txs: Clone, -{ - fn clone(&self) -> Self { - Self { - evm_config: self.evm_config.clone(), - pool: self.pool.clone(), - client: self.client.clone(), - config: self.config.clone(), - best_transactions: self.best_transactions.clone(), - compute_pending_block: self.compute_pending_block, - _pd: PhantomData, - } - } -} - -impl OpPayloadBuilder { - /// `OpPayloadBuilder` constructor. - /// - /// Configures the builder with the default settings. - pub fn new(pool: Pool, client: Client, evm_config: Evm) -> Self { - Self::with_builder_config(pool, client, evm_config, Default::default()) - } - - /// Configures the builder with the given [`OpBuilderConfig`]. - pub const fn with_builder_config( - pool: Pool, - client: Client, - evm_config: Evm, - config: OpBuilderConfig, - ) -> Self { - Self { - pool, - client, - compute_pending_block: true, - evm_config, - config, - best_transactions: (), - _pd: PhantomData, - } - } -} - -impl OpPayloadBuilder { - /// Sets the rollup's compute pending block configuration option. - pub const fn set_compute_pending_block(mut self, compute_pending_block: bool) -> Self { - self.compute_pending_block = compute_pending_block; - self - } - - /// Configures the type responsible for yielding the transactions that should be included in the - /// payload. - pub fn with_transactions( - self, - best_transactions: T, - ) -> OpPayloadBuilder { - let Self { pool, client, compute_pending_block, evm_config, config, .. } = self; - OpPayloadBuilder { - pool, - client, - compute_pending_block, - evm_config, - best_transactions, - config, - _pd: PhantomData, - } - } - - /// Enables the rollup's compute pending block configuration option. - pub const fn compute_pending_block(self) -> Self { - self.set_compute_pending_block(true) - } - - /// Returns the rollup's compute pending block configuration option. - pub const fn is_compute_pending_block(&self) -> bool { - self.compute_pending_block - } -} - -impl OpPayloadBuilder -where - Pool: TransactionPool>, - Client: StateProviderFactory + ChainSpecProvider, - N: OpPayloadPrimitives, - Evm: ConfigureEvm< - Primitives = N, - NextBlockEnvCtx: BuildNextEnv, - >, - Attrs: OpAttributes>, -{ - /// Constructs an Optimism payload from the transactions sent via the - /// Payload attributes by the sequencer. If the `no_tx_pool` argument is passed in - /// the payload attributes, the transaction pool will be ignored and the only transactions - /// included in the payload will be those sent through the attributes. - /// - /// Given build arguments including an Optimism client, transaction pool, - /// and configuration, this function creates a transaction payload. Returns - /// a result indicating success with the payload or an error in case of failure. - fn build_payload<'a, Txs>( - &self, - args: BuildArguments>, - best: impl FnOnce(BestTransactionsAttributes) -> Txs + Send + Sync + 'a, - ) -> Result>, PayloadBuilderError> - where - Txs: - PayloadTransactions + OpPooledTx>, - { - let BuildArguments { mut cached_reads, config, cancel, best_payload } = args; - - let ctx = OpPayloadBuilderCtx { - evm_config: self.evm_config.clone(), - builder_config: self.config.clone(), - chain_spec: self.client.chain_spec(), - config, - cancel, - best_payload, - }; - - let builder = OpBuilder::new(best); - - let state_provider = self.client.state_by_block_hash(ctx.parent().hash())?; - let state = StateProviderDatabase::new(&state_provider); - - if ctx.attributes().no_tx_pool() { - builder.build(state, &state_provider, ctx) - } else { - // sequencer mode we can reuse cachedreads from previous runs - builder.build(cached_reads.as_db_mut(state), &state_provider, ctx) - } - .map(|out| out.with_cached_reads(cached_reads)) - } - - /// Computes the witness for the payload. - pub fn payload_witness( - &self, - parent: SealedHeader, - attributes: Attrs::RpcPayloadAttributes, - ) -> Result - where - Attrs: PayloadBuilderAttributes, - { - let attributes = - Attrs::try_new(parent.hash(), attributes, 3).map_err(PayloadBuilderError::other)?; - - let config = PayloadConfig { parent_header: Arc::new(parent), attributes }; - let ctx = OpPayloadBuilderCtx { - evm_config: self.evm_config.clone(), - builder_config: self.config.clone(), - chain_spec: self.client.chain_spec(), - config, - cancel: Default::default(), - best_payload: Default::default(), - }; - - let state_provider = self.client.state_by_block_hash(ctx.parent().hash())?; - - let builder = OpBuilder::new(|_| NoopPayloadTransactions::::default()); - builder.witness(state_provider, &ctx) - } -} - -/// Implementation of the [`PayloadBuilder`] trait for [`OpPayloadBuilder`]. -impl PayloadBuilder - for OpPayloadBuilder -where - N: OpPayloadPrimitives, - Client: StateProviderFactory + ChainSpecProvider + Clone, - Pool: TransactionPool>, - Evm: ConfigureEvm< - Primitives = N, - NextBlockEnvCtx: BuildNextEnv, - >, - Txs: OpPayloadTransactions, - Attrs: OpAttributes, -{ - type Attributes = Attrs; - type BuiltPayload = OpBuiltPayload; - - fn try_build( - &self, - args: BuildArguments, - ) -> Result, PayloadBuilderError> { - let pool = self.pool.clone(); - self.build_payload(args, |attrs| self.best_transactions.best_transactions(pool, attrs)) - } - - fn on_missing_payload( - &self, - _args: BuildArguments, - ) -> MissingPayloadBehaviour { - // we want to await the job that's already in progress because that should be returned as - // is, there's no benefit in racing another job - MissingPayloadBehaviour::AwaitInProgress - } - - // NOTE: this should only be used for testing purposes because this doesn't have access to L1 - // system txs, hence on_missing_payload we return [MissingPayloadBehaviour::AwaitInProgress]. - fn build_empty_payload( - &self, - config: PayloadConfig, - ) -> Result { - let args = BuildArguments { - config, - cached_reads: Default::default(), - cancel: Default::default(), - best_payload: None, - }; - self.build_payload(args, |_| NoopPayloadTransactions::::default())? - .into_payload() - .ok_or_else(|| PayloadBuilderError::MissingPayload) - } -} - -/// The type that builds the payload. -/// -/// Payload building for optimism is composed of several steps. -/// The first steps are mandatory and defined by the protocol. -/// -/// 1. first all System calls are applied. -/// 2. After canyon the forced deployed `create2deployer` must be loaded -/// 3. all sequencer transactions are executed (part of the payload attributes) -/// -/// Depending on whether the node acts as a sequencer and is allowed to include additional -/// transactions (`no_tx_pool == false`): -/// 4. include additional transactions -/// -/// And finally -/// 5. build the block: compute all roots (txs, state) -#[derive(derive_more::Debug)] -pub struct OpBuilder<'a, Txs> { - /// Yields the best transaction to include if transactions from the mempool are allowed. - #[debug(skip)] - best: Box Txs + 'a>, -} - -impl<'a, Txs> OpBuilder<'a, Txs> { - /// Creates a new [`OpBuilder`]. - pub fn new(best: impl FnOnce(BestTransactionsAttributes) -> Txs + Send + Sync + 'a) -> Self { - Self { best: Box::new(best) } - } -} - -impl OpBuilder<'_, Txs> { - /// Builds the payload on top of the state. - pub fn build( - self, - db: impl Database, - state_provider: impl StateProvider, - ctx: OpPayloadBuilderCtx, - ) -> Result>, PayloadBuilderError> - where - Evm: ConfigureEvm< - Primitives = N, - NextBlockEnvCtx: BuildNextEnv, - >, - ChainSpec: EthChainSpec + OpHardforks, - N: OpPayloadPrimitives, - Txs: - PayloadTransactions + OpPooledTx>, - Attrs: OpAttributes, - { - let Self { best } = self; - debug!(target: "payload_builder", id=%ctx.payload_id(), parent_header = ?ctx.parent().hash(), parent_number = ctx.parent().number(), "building new payload"); - - let mut db = State::builder().with_database(db).with_bundle_update().build(); - - // Load the L1 block contract into the database cache. If the L1 block contract is not - // pre-loaded the database will panic when trying to fetch the DA footprint gas - // scalar. - db.load_cache_account(L1_BLOCK_CONTRACT).map_err(BlockExecutionError::other)?; - - let mut builder = ctx.block_builder(&mut db)?; - - // 1. apply pre-execution changes - builder.apply_pre_execution_changes().map_err(|err| { - warn!(target: "payload_builder", %err, "failed to apply pre-execution changes"); - PayloadBuilderError::Internal(err.into()) - })?; - - // 2. execute sequencer transactions - let mut info = ctx.execute_sequencer_transactions(&mut builder)?; - - // 3. if mem pool transactions are requested we execute them - if !ctx.attributes().no_tx_pool() { - let best_txs = best(ctx.best_transaction_attributes(builder.evm_mut().block())); - if ctx.execute_best_transactions(&mut info, &mut builder, best_txs)?.is_some() { - return Ok(BuildOutcomeKind::Cancelled) - } - - // check if the new payload is even more valuable - if !ctx.is_better_payload(info.total_fees) { - // can skip building the block - return Ok(BuildOutcomeKind::Aborted { fees: info.total_fees }) - } - } - - let BlockBuilderOutcome { execution_result, hashed_state, trie_updates, block } = - builder.finish(state_provider)?; - - let sealed_block = Arc::new(block.sealed_block().clone()); - debug!(target: "payload_builder", id=%ctx.attributes().payload_id(), sealed_block_header = ?sealed_block.header(), "sealed built block"); - - let execution_outcome = - BlockExecutionOutput { state: db.take_bundle(), result: execution_result }; - - // create the executed block data - let executed: BuiltPayloadExecutedBlock = BuiltPayloadExecutedBlock { - recovered_block: Arc::new(block), - execution_output: Arc::new(execution_outcome), - // Keep unsorted; conversion to sorted happens when needed downstream - hashed_state: either::Either::Left(Arc::new(hashed_state)), - trie_updates: either::Either::Left(Arc::new(trie_updates)), - }; - - let no_tx_pool = ctx.attributes().no_tx_pool(); - - let payload = - OpBuiltPayload::new(ctx.payload_id(), sealed_block, info.total_fees, Some(executed)); - - if no_tx_pool { - // if `no_tx_pool` is set only transactions from the payload attributes will be included - // in the payload. In other words, the payload is deterministic and we can - // freeze it once we've successfully built it. - Ok(BuildOutcomeKind::Freeze(payload)) - } else { - Ok(BuildOutcomeKind::Better { payload }) - } - } - - /// Builds the payload and returns its [`ExecutionWitness`] based on the state after execution. - pub fn witness( - self, - state_provider: impl StateProvider, - ctx: &OpPayloadBuilderCtx, - ) -> Result - where - Evm: ConfigureEvm< - Primitives = N, - NextBlockEnvCtx: BuildNextEnv, - >, - ChainSpec: EthChainSpec + OpHardforks, - N: OpPayloadPrimitives, - Txs: PayloadTransactions>, - Attrs: OpAttributes, - { - let mut db = State::builder() - .with_database(StateProviderDatabase::new(&state_provider)) - .with_bundle_update() - .build(); - let mut builder = ctx.block_builder(&mut db)?; - - builder.apply_pre_execution_changes()?; - ctx.execute_sequencer_transactions(&mut builder)?; - builder.into_executor().apply_post_execution_changes()?; - - if ctx.chain_spec.is_isthmus_active_at_timestamp(ctx.attributes().timestamp()) { - // force load `L2ToL1MessagePasser.sol` so l2 withdrawals root can be computed even if - // no l2 withdrawals in block - _ = db.load_cache_account(L2_TO_L1_MESSAGE_PASSER_ADDRESS)?; - } - - let ExecutionWitnessRecord { hashed_state, codes, keys, lowest_block_number: _ } = - ExecutionWitnessRecord::from_executed_state(&db); - let state = state_provider.witness(Default::default(), hashed_state)?; - Ok(ExecutionWitness { - state: state.into_iter().collect(), - codes, - keys, - ..Default::default() - }) - } -} - -/// A type that returns a the [`PayloadTransactions`] that should be included in the pool. -pub trait OpPayloadTransactions: Clone + Send + Sync + Unpin + 'static { - /// Returns an iterator that yields the transaction in the order they should get included in the - /// new payload. - fn best_transactions>( - &self, - pool: Pool, - attr: BestTransactionsAttributes, - ) -> impl PayloadTransactions; -} - -impl OpPayloadTransactions for () { - fn best_transactions>( - &self, - pool: Pool, - attr: BestTransactionsAttributes, - ) -> impl PayloadTransactions { - BestPayloadTransactions::new(pool.best_transactions_with_attributes(attr)) - } -} - -/// Holds the state after execution -#[derive(Debug)] -pub struct ExecutedPayload { - /// Tracked execution info - pub info: ExecutionInfo, - /// Withdrawal hash. - pub withdrawals_root: Option, - /// The transaction receipts. - pub receipts: Vec, - /// The block env used during execution. - pub block_env: BlockEnv, -} - -/// This acts as the container for executed transactions and its byproducts (receipts, gas used) -#[derive(Default, Debug)] -pub struct ExecutionInfo { - /// All gas used so far - pub cumulative_gas_used: u64, - /// Estimated DA size - pub cumulative_da_bytes_used: u64, - /// Tracks fees from executed mempool transactions - pub total_fees: U256, -} - -impl ExecutionInfo { - /// Create a new instance with allocated slots. - pub const fn new() -> Self { - Self { cumulative_gas_used: 0, cumulative_da_bytes_used: 0, total_fees: U256::ZERO } - } - - /// Returns true if the transaction would exceed the block limits: - /// - block gas limit: ensures the transaction still fits into the block. - /// - tx DA limit: if configured, ensures the tx does not exceed the maximum allowed DA limit - /// per tx. - /// - block DA limit: if configured, ensures the transaction's DA size does not exceed the - /// maximum allowed DA limit per block. - pub fn is_tx_over_limits( - &self, - tx_da_size: u64, - block_gas_limit: u64, - tx_data_limit: Option, - block_data_limit: Option, - tx_gas_limit: u64, - da_footprint_gas_scalar: Option, - ) -> bool { - if tx_data_limit.is_some_and(|da_limit| tx_da_size > da_limit) { - return true; - } - - let total_da_bytes_used = self.cumulative_da_bytes_used.saturating_add(tx_da_size); - - if block_data_limit.is_some_and(|da_limit| total_da_bytes_used > da_limit) { - return true; - } - - // Post Jovian: the tx DA footprint must be less than the block gas limit - if let Some(da_footprint_gas_scalar) = da_footprint_gas_scalar { - let tx_da_footprint = - total_da_bytes_used.saturating_mul(da_footprint_gas_scalar as u64); - if tx_da_footprint > block_gas_limit { - return true; - } - } - - self.cumulative_gas_used + tx_gas_limit > block_gas_limit - } -} - -/// Container type that holds all necessities to build a new payload. -#[derive(derive_more::Debug)] -pub struct OpPayloadBuilderCtx< - Evm: ConfigureEvm, - ChainSpec, - Attrs = OpPayloadBuilderAttributes::Primitives>>, -> { - /// The type that knows how to perform system calls and configure the evm. - pub evm_config: Evm, - /// Additional config for the builder/sequencer, e.g. DA and gas limit - pub builder_config: OpBuilderConfig, - /// The chainspec - pub chain_spec: Arc, - /// How to build the payload. - pub config: PayloadConfig>, - /// Marker to check whether the job has been cancelled. - pub cancel: CancelOnDrop, - /// The currently best payload. - pub best_payload: Option>, -} - -impl OpPayloadBuilderCtx -where - Evm: ConfigureEvm< - Primitives: OpPayloadPrimitives, - NextBlockEnvCtx: BuildNextEnv, ChainSpec>, - >, - ChainSpec: EthChainSpec + OpHardforks, - Attrs: OpAttributes>, -{ - /// Returns the parent block the payload will be build on. - pub fn parent(&self) -> &SealedHeaderFor { - self.config.parent_header.as_ref() - } - - /// Returns the builder attributes. - pub const fn attributes(&self) -> &Attrs { - &self.config.attributes - } - - /// Returns the current fee settings for transactions from the mempool - pub fn best_transaction_attributes(&self, block_env: impl Block) -> BestTransactionsAttributes { - BestTransactionsAttributes::new( - block_env.basefee(), - block_env.blob_gasprice().map(|p| p as u64), - ) - } - - /// Returns the unique id for this payload job. - pub fn payload_id(&self) -> PayloadId { - self.attributes().payload_id() - } - - /// Returns true if the fees are higher than the previous payload. - pub fn is_better_payload(&self, total_fees: U256) -> bool { - is_better_payload(self.best_payload.as_ref(), total_fees) - } - - /// Prepares a [`BlockBuilder`] for the next block. - pub fn block_builder<'a, DB: Database>( - &'a self, - db: &'a mut State, - ) -> Result< - impl BlockBuilder< - Primitives = Evm::Primitives, - Executor: BlockExecutorFor<'a, Evm::BlockExecutorFactory, DB>, - > + 'a, - PayloadBuilderError, - > { - self.evm_config - .builder_for_next_block( - db, - self.parent(), - Evm::NextBlockEnvCtx::build_next_env( - self.attributes(), - self.parent(), - self.chain_spec.as_ref(), - ) - .map_err(PayloadBuilderError::other)?, - ) - .map_err(PayloadBuilderError::other) - } - - /// Executes all sequencer transactions that are included in the payload attributes. - pub fn execute_sequencer_transactions( - &self, - builder: &mut impl BlockBuilder, - ) -> Result { - let mut info = ExecutionInfo::new(); - - for sequencer_tx in self.attributes().sequencer_transactions() { - // A sequencer's block should never contain blob transactions. - if sequencer_tx.value().is_eip4844() { - return Err(PayloadBuilderError::other( - OpPayloadBuilderError::BlobTransactionRejected, - )); - } - - // Convert the transaction to a [RecoveredTx]. This is - // purely for the purposes of utilizing the `evm_config.tx_env`` function. - // Deposit transactions do not have signatures, so if the tx is a deposit, this - // will just pull in its `from` address. - let sequencer_tx = sequencer_tx.value().try_clone_into_recovered().map_err(|_| { - PayloadBuilderError::other(OpPayloadBuilderError::TransactionEcRecoverFailed) - })?; - - let gas_used = match builder.execute_transaction(sequencer_tx.clone()) { - Ok(gas_used) => gas_used, - Err(BlockExecutionError::Validation(BlockValidationError::InvalidTx { - error, - .. - })) => { - trace!(target: "payload_builder", %error, ?sequencer_tx, "Error in sequencer transaction, skipping."); - continue - } - Err(err) => { - // this is an error that we should treat as fatal for this attempt - return Err(PayloadBuilderError::EvmExecutionError(Box::new(err))) - } - }; - - // add gas used by the transaction to cumulative gas used, before creating the receipt - info.cumulative_gas_used += gas_used; - } - - Ok(info) - } - - /// Executes the given best transactions and updates the execution info. - /// - /// Returns `Ok(Some(())` if the job was cancelled. - pub fn execute_best_transactions( - &self, - info: &mut ExecutionInfo, - builder: &mut Builder, - mut best_txs: impl PayloadTransactions< - Transaction: PoolTransaction> + OpPooledTx, - >, - ) -> Result, PayloadBuilderError> - where - Builder: BlockBuilder, - <::Evm as AlloyEvm>::DB: Database, - { - let mut block_gas_limit = builder.evm_mut().block().gas_limit(); - if let Some(gas_limit_config) = self.builder_config.gas_limit_config.gas_limit() { - // If a gas limit is configured, use that limit as target if it's smaller, otherwise use - // the block's actual gas limit. - block_gas_limit = gas_limit_config.min(block_gas_limit); - }; - let block_da_limit = self.builder_config.da_config.max_da_block_size(); - let tx_da_limit = self.builder_config.da_config.max_da_tx_size(); - let base_fee = builder.evm_mut().block().basefee(); - - while let Some(tx) = best_txs.next(()) { - let interop = tx.interop_deadline(); - let tx_da_size = tx.estimated_da_size(); - let tx = tx.into_consensus(); - - let da_footprint_gas_scalar = self - .chain_spec - .is_jovian_active_at_timestamp(self.attributes().timestamp()) - .then_some( - L1BlockInfo::fetch_da_footprint_gas_scalar(builder.evm_mut().db_mut()).expect( - "DA footprint should always be available from the database post jovian", - ), - ); - - if info.is_tx_over_limits( - tx_da_size, - block_gas_limit, - tx_da_limit, - block_da_limit, - tx.gas_limit(), - da_footprint_gas_scalar, - ) { - // we can't fit this transaction into the block, so we need to mark it as - // invalid which also removes all dependent transaction from - // the iterator before we can continue - best_txs.mark_invalid(tx.signer(), tx.nonce()); - continue - } - - // A sequencer's block should never contain blob or deposit transactions from the pool. - if tx.is_eip4844() || tx.is_deposit() { - best_txs.mark_invalid(tx.signer(), tx.nonce()); - continue - } - - // We skip invalid cross chain txs, they would be removed on the next block update in - // the maintenance job - if let Some(interop) = interop && - !is_valid_interop(interop, self.config.attributes.timestamp()) - { - best_txs.mark_invalid(tx.signer(), tx.nonce()); - continue - } - // check if the job was cancelled, if so we can exit early - if self.cancel.is_cancelled() { - return Ok(Some(())) - } - - let gas_used = match builder.execute_transaction(tx.clone()) { - Ok(gas_used) => gas_used, - Err(BlockExecutionError::Validation(BlockValidationError::InvalidTx { - error, - .. - })) => { - if error.is_nonce_too_low() { - // if the nonce is too low, we can skip this transaction - trace!(target: "payload_builder", %error, ?tx, "skipping nonce too low transaction"); - } else { - // if the transaction is invalid, we can skip it and all of its - // descendants - trace!(target: "payload_builder", %error, ?tx, "skipping invalid transaction and its descendants"); - best_txs.mark_invalid(tx.signer(), tx.nonce()); - } - continue - } - Err(err) => { - // this is an error that we should treat as fatal for this attempt - return Err(PayloadBuilderError::EvmExecutionError(Box::new(err))) - } - }; - - // add gas used by the transaction to cumulative gas used, before creating the - // receipt - info.cumulative_gas_used += gas_used; - info.cumulative_da_bytes_used += tx_da_size; - - // update and add to total fees - let miner_fee = tx - .effective_tip_per_gas(base_fee) - .expect("fee is always valid; execution succeeded"); - info.total_fees += U256::from(miner_fee) * U256::from(gas_used); - } - - Ok(None) - } -} diff --git a/op-reth/crates/payload/src/config.rs b/op-reth/crates/payload/src/config.rs deleted file mode 100644 index c79ee0ece4b..00000000000 --- a/op-reth/crates/payload/src/config.rs +++ /dev/null @@ -1,171 +0,0 @@ -//! Additional configuration for the OP builder - -use std::sync::{atomic::AtomicU64, Arc}; - -/// Settings for the OP builder. -#[derive(Debug, Clone, Default)] -pub struct OpBuilderConfig { - /// Data availability configuration for the OP builder. - pub da_config: OpDAConfig, - /// Gas limit configuration for the OP builder. - pub gas_limit_config: OpGasLimitConfig, -} - -impl OpBuilderConfig { - /// Creates a new OP builder configuration with the given data availability configuration. - pub const fn new(da_config: OpDAConfig, gas_limit_config: OpGasLimitConfig) -> Self { - Self { da_config, gas_limit_config } - } - - /// Returns the Data Availability configuration for the OP builder, if it has configured - /// constraints. - pub fn constrained_da_config(&self) -> Option<&OpDAConfig> { - if self.da_config.is_empty() { - None - } else { - Some(&self.da_config) - } - } -} - -/// Contains the Data Availability configuration for the OP builder. -/// -/// This type is shareable and can be used to update the DA configuration for the OP payload -/// builder. -#[derive(Debug, Clone, Default)] -pub struct OpDAConfig { - inner: Arc, -} - -impl OpDAConfig { - /// Creates a new Data Availability configuration with the given maximum sizes. - pub fn new(max_da_tx_size: u64, max_da_block_size: u64) -> Self { - let this = Self::default(); - this.set_max_da_size(max_da_tx_size, max_da_block_size); - this - } - - /// Returns whether the configuration is empty. - pub fn is_empty(&self) -> bool { - self.max_da_tx_size().is_none() && self.max_da_block_size().is_none() - } - - /// Returns the max allowed data availability size per transactions, if any. - pub fn max_da_tx_size(&self) -> Option { - let val = self.inner.max_da_tx_size.load(std::sync::atomic::Ordering::Relaxed); - if val == 0 { - None - } else { - Some(val) - } - } - - /// Returns the max allowed data availability size per block, if any. - pub fn max_da_block_size(&self) -> Option { - let val = self.inner.max_da_block_size.load(std::sync::atomic::Ordering::Relaxed); - if val == 0 { - None - } else { - Some(val) - } - } - - /// Sets the maximum data availability size currently allowed for inclusion. 0 means no maximum. - pub fn set_max_da_size(&self, max_da_tx_size: u64, max_da_block_size: u64) { - self.set_max_tx_size(max_da_tx_size); - self.set_max_block_size(max_da_block_size); - } - - /// Sets the maximum data availability size per transaction currently allowed for inclusion. 0 - /// means no maximum. - pub fn set_max_tx_size(&self, max_da_tx_size: u64) { - self.inner.max_da_tx_size.store(max_da_tx_size, std::sync::atomic::Ordering::Relaxed); - } - - /// Sets the maximum data availability size per block currently allowed for inclusion. 0 means - /// no maximum. - pub fn set_max_block_size(&self, max_da_block_size: u64) { - self.inner.max_da_block_size.store(max_da_block_size, std::sync::atomic::Ordering::Relaxed); - } -} - -#[derive(Debug, Default)] -struct OpDAConfigInner { - /// Don't include any transactions with data availability size larger than this in any built - /// block - /// - /// 0 means no limit. - max_da_tx_size: AtomicU64, - /// Maximum total data availability size for a block - /// - /// 0 means no limit. - max_da_block_size: AtomicU64, -} - -/// Contains the Gas Limit configuration for the OP builder. -/// -/// This type is shareable and can be used to update the Gas Limit configuration for the OP payload -/// builder. -#[derive(Debug, Clone, Default)] -pub struct OpGasLimitConfig { - /// Gas limit for a transaction - /// - /// 0 means use the default gas limit. - gas_limit: Arc, -} - -impl OpGasLimitConfig { - /// Creates a new Gas Limit configuration with the given maximum gas limit. - pub fn new(max_gas_limit: u64) -> Self { - let this = Self::default(); - this.set_gas_limit(max_gas_limit); - this - } - /// Returns the gas limit for a transaction, if any. - pub fn gas_limit(&self) -> Option { - let val = self.gas_limit.load(std::sync::atomic::Ordering::Relaxed); - if val == 0 { - None - } else { - Some(val) - } - } - /// Sets the gas limit for a transaction. 0 means use the default gas limit. - pub fn set_gas_limit(&self, gas_limit: u64) { - self.gas_limit.store(gas_limit, std::sync::atomic::Ordering::Relaxed); - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_da() { - let da = OpDAConfig::default(); - assert_eq!(da.max_da_tx_size(), None); - assert_eq!(da.max_da_block_size(), None); - da.set_max_da_size(100, 200); - assert_eq!(da.max_da_tx_size(), Some(100)); - assert_eq!(da.max_da_block_size(), Some(200)); - da.set_max_da_size(0, 0); - assert_eq!(da.max_da_tx_size(), None); - assert_eq!(da.max_da_block_size(), None); - } - - #[test] - fn test_da_constrained() { - let config = OpBuilderConfig::default(); - assert!(config.constrained_da_config().is_none()); - } - - #[test] - fn test_gas_limit() { - let gas_limit = OpGasLimitConfig::default(); - assert_eq!(gas_limit.gas_limit(), None); - gas_limit.set_gas_limit(50000); - assert_eq!(gas_limit.gas_limit(), Some(50000)); - gas_limit.set_gas_limit(0); - assert_eq!(gas_limit.gas_limit(), None); - } -} diff --git a/op-reth/crates/payload/src/lib.rs b/op-reth/crates/payload/src/lib.rs deleted file mode 100644 index 57f21ef967f..00000000000 --- a/op-reth/crates/payload/src/lib.rs +++ /dev/null @@ -1,56 +0,0 @@ -//! Optimism's payload builder implementation. - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" -)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg))] -#![allow(clippy::useless_let_if_seq)] - -extern crate alloc; - -pub mod builder; -pub use builder::OpPayloadBuilder; -pub mod error; -pub mod payload; -use op_alloy_rpc_types_engine::OpExecutionData; -pub use payload::{ - payload_id_optimism, OpBuiltPayload, OpPayloadAttributes, OpPayloadBuilderAttributes, -}; -mod traits; -use reth_optimism_primitives::OpPrimitives; -use reth_payload_primitives::{BuiltPayload, PayloadTypes}; -use reth_primitives_traits::{Block, NodePrimitives, SealedBlock}; -pub use traits::*; -pub mod validator; -pub use validator::OpExecutionPayloadValidator; - -pub mod config; - -/// ZST that aggregates Optimism [`PayloadTypes`]. -#[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] -#[non_exhaustive] -pub struct OpPayloadTypes(core::marker::PhantomData); - -impl PayloadTypes for OpPayloadTypes -where - OpBuiltPayload: BuiltPayload, -{ - type ExecutionData = OpExecutionData; - type BuiltPayload = OpBuiltPayload; - type PayloadAttributes = OpPayloadAttributes; - type PayloadBuilderAttributes = OpPayloadBuilderAttributes; - - fn block_to_payload( - block: SealedBlock< - <::Primitives as NodePrimitives>::Block, - >, - ) -> Self::ExecutionData { - OpExecutionData::from_block_unchecked( - block.hash(), - &block.into_block().into_ethereum_block(), - ) - } -} diff --git a/op-reth/crates/payload/src/payload.rs b/op-reth/crates/payload/src/payload.rs deleted file mode 100644 index 3f7b3d401ec..00000000000 --- a/op-reth/crates/payload/src/payload.rs +++ /dev/null @@ -1,581 +0,0 @@ -//! Payload related types - -use std::{fmt::Debug, sync::Arc}; - -use alloy_consensus::{Block, BlockHeader}; -use alloy_eips::{ - eip1559::BaseFeeParams, eip2718::Decodable2718, eip4895::Withdrawals, eip7685::Requests, -}; -use alloy_primitives::{keccak256, Address, Bytes, B256, B64, U256}; -use alloy_rlp::Encodable; -use alloy_rpc_types_engine::{ - BlobsBundleV1, ExecutionPayloadEnvelopeV2, ExecutionPayloadFieldV2, ExecutionPayloadV1, - ExecutionPayloadV3, PayloadId, -}; -use op_alloy_consensus::{encode_holocene_extra_data, encode_jovian_extra_data, EIP1559ParamError}; -use op_alloy_rpc_types_engine::{ - OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, OpExecutionPayloadV4, -}; -use reth_chainspec::EthChainSpec; -use reth_optimism_evm::OpNextBlockEnvAttributes; -use reth_optimism_forks::OpHardforks; -use reth_payload_builder::{EthPayloadBuilderAttributes, PayloadBuilderError}; -use reth_payload_primitives::{ - BuildNextEnv, BuiltPayload, BuiltPayloadExecutedBlock, PayloadBuilderAttributes, -}; -use reth_primitives_traits::{ - NodePrimitives, SealedBlock, SealedHeader, SignedTransaction, WithEncoded, -}; - -/// Re-export for use in downstream arguments. -pub use op_alloy_rpc_types_engine::OpPayloadAttributes; -use reth_optimism_primitives::OpPrimitives; - -/// Optimism Payload Builder Attributes -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct OpPayloadBuilderAttributes { - /// Inner ethereum payload builder attributes - pub payload_attributes: EthPayloadBuilderAttributes, - /// `NoTxPool` option for the generated payload - pub no_tx_pool: bool, - /// Decoded transactions and the original EIP-2718 encoded bytes as received in the payload - /// attributes. - pub transactions: Vec>, - /// The gas limit for the generated payload - pub gas_limit: Option, - /// EIP-1559 parameters for the generated payload - pub eip_1559_params: Option, - /// Min base fee for the generated payload (only available post-Jovian) - pub min_base_fee: Option, -} - -impl Default for OpPayloadBuilderAttributes { - fn default() -> Self { - Self { - payload_attributes: Default::default(), - no_tx_pool: Default::default(), - gas_limit: Default::default(), - eip_1559_params: Default::default(), - transactions: Default::default(), - min_base_fee: Default::default(), - } - } -} - -impl OpPayloadBuilderAttributes { - /// Extracts the extra data parameters post-Holocene hardfork. - /// In Holocene, those parameters are the EIP-1559 base fee parameters. - pub fn get_holocene_extra_data( - &self, - default_base_fee_params: BaseFeeParams, - ) -> Result { - self.eip_1559_params - .map(|params| encode_holocene_extra_data(params, default_base_fee_params)) - .ok_or(EIP1559ParamError::NoEIP1559Params)? - } - - /// Extracts the extra data parameters post-Jovian hardfork. - /// Those parameters are the EIP-1559 parameters from Holocene and the minimum base fee. - pub fn get_jovian_extra_data( - &self, - default_base_fee_params: BaseFeeParams, - ) -> Result { - let min_base_fee = self.min_base_fee.ok_or(EIP1559ParamError::MinBaseFeeNotSet)?; - self.eip_1559_params - .map(|params| encode_jovian_extra_data(params, default_base_fee_params, min_base_fee)) - .ok_or(EIP1559ParamError::NoEIP1559Params)? - } -} - -impl PayloadBuilderAttributes - for OpPayloadBuilderAttributes -{ - type RpcPayloadAttributes = OpPayloadAttributes; - type Error = alloy_rlp::Error; - - /// Creates a new payload builder for the given parent block and the attributes. - /// - /// Derives the unique [`PayloadId`] for the given parent and attributes - fn try_new( - parent: B256, - attributes: OpPayloadAttributes, - version: u8, - ) -> Result { - let id = payload_id_optimism(&parent, &attributes, version); - - let transactions = attributes - .transactions - .unwrap_or_default() - .into_iter() - .map(|data| { - Decodable2718::decode_2718_exact(data.as_ref()).map(|tx| WithEncoded::new(data, tx)) - }) - .collect::>()?; - - let payload_attributes = EthPayloadBuilderAttributes { - id, - parent, - timestamp: attributes.payload_attributes.timestamp, - suggested_fee_recipient: attributes.payload_attributes.suggested_fee_recipient, - prev_randao: attributes.payload_attributes.prev_randao, - withdrawals: attributes.payload_attributes.withdrawals.unwrap_or_default().into(), - parent_beacon_block_root: attributes.payload_attributes.parent_beacon_block_root, - }; - - Ok(Self { - payload_attributes, - no_tx_pool: attributes.no_tx_pool.unwrap_or_default(), - transactions, - gas_limit: attributes.gas_limit, - eip_1559_params: attributes.eip_1559_params, - min_base_fee: attributes.min_base_fee, - }) - } - - fn payload_id(&self) -> PayloadId { - self.payload_attributes.id - } - - fn parent(&self) -> B256 { - self.payload_attributes.parent - } - - fn timestamp(&self) -> u64 { - self.payload_attributes.timestamp - } - - fn parent_beacon_block_root(&self) -> Option { - self.payload_attributes.parent_beacon_block_root - } - - fn suggested_fee_recipient(&self) -> Address { - self.payload_attributes.suggested_fee_recipient - } - - fn prev_randao(&self) -> B256 { - self.payload_attributes.prev_randao - } - - fn withdrawals(&self) -> &Withdrawals { - &self.payload_attributes.withdrawals - } -} - -impl From - for OpPayloadBuilderAttributes -{ - fn from(value: EthPayloadBuilderAttributes) -> Self { - Self { payload_attributes: value, ..Default::default() } - } -} - -/// Contains the built payload. -#[derive(Debug, Clone)] -pub struct OpBuiltPayload { - /// Identifier of the payload - pub(crate) id: PayloadId, - /// Sealed block - pub(crate) block: Arc>, - /// Block execution data for the payload, if any. - pub(crate) executed_block: Option>, - /// The fees of the block - pub(crate) fees: U256, -} - -// === impl BuiltPayload === - -impl OpBuiltPayload { - /// Initializes the payload with the given initial block. - pub const fn new( - id: PayloadId, - block: Arc>, - fees: U256, - executed_block: Option>, - ) -> Self { - Self { id, block, fees, executed_block } - } - - /// Returns the identifier of the payload. - pub const fn id(&self) -> PayloadId { - self.id - } - - /// Returns the built block(sealed) - pub fn block(&self) -> &SealedBlock { - &self.block - } - - /// Fees of the block - pub const fn fees(&self) -> U256 { - self.fees - } - - /// Converts the value into [`SealedBlock`]. - pub fn into_sealed_block(self) -> SealedBlock { - Arc::unwrap_or_clone(self.block) - } -} - -impl BuiltPayload for OpBuiltPayload { - type Primitives = N; - - fn block(&self) -> &SealedBlock { - self.block() - } - - fn fees(&self) -> U256 { - self.fees - } - - fn executed_block(&self) -> Option> { - self.executed_block.clone() - } - - fn requests(&self) -> Option { - None - } -} - -// V1 engine_getPayloadV1 response -impl From> for ExecutionPayloadV1 -where - T: SignedTransaction, - N: NodePrimitives>, -{ - fn from(value: OpBuiltPayload) -> Self { - Self::from_block_unchecked( - value.block().hash(), - &Arc::unwrap_or_clone(value.block).into_block(), - ) - } -} - -// V2 engine_getPayloadV2 response -impl From> for ExecutionPayloadEnvelopeV2 -where - T: SignedTransaction, - N: NodePrimitives>, -{ - fn from(value: OpBuiltPayload) -> Self { - let OpBuiltPayload { block, fees, .. } = value; - - Self { - block_value: fees, - execution_payload: ExecutionPayloadFieldV2::from_block_unchecked( - block.hash(), - &Arc::unwrap_or_clone(block).into_block(), - ), - } - } -} - -impl From> for OpExecutionPayloadEnvelopeV3 -where - T: SignedTransaction, - N: NodePrimitives>, -{ - fn from(value: OpBuiltPayload) -> Self { - let OpBuiltPayload { block, fees, .. } = value; - - let parent_beacon_block_root = block.parent_beacon_block_root.unwrap_or_default(); - - Self { - execution_payload: ExecutionPayloadV3::from_block_unchecked( - block.hash(), - &Arc::unwrap_or_clone(block).into_block(), - ), - block_value: fees, - // From the engine API spec: - // - // > Client software **MAY** use any heuristics to decide whether to set - // `shouldOverrideBuilder` flag or not. If client software does not implement any - // heuristic this flag **SHOULD** be set to `false`. - // - // Spec: - // - should_override_builder: false, - // No blobs for OP. - blobs_bundle: BlobsBundleV1 { blobs: vec![], commitments: vec![], proofs: vec![] }, - parent_beacon_block_root, - } - } -} - -impl From> for OpExecutionPayloadEnvelopeV4 -where - T: SignedTransaction, - N: NodePrimitives>, -{ - fn from(value: OpBuiltPayload) -> Self { - let OpBuiltPayload { block, fees, .. } = value; - - let parent_beacon_block_root = block.parent_beacon_block_root.unwrap_or_default(); - - let l2_withdrawals_root = block.withdrawals_root.unwrap_or_default(); - let payload_v3 = ExecutionPayloadV3::from_block_unchecked( - block.hash(), - &Arc::unwrap_or_clone(block).into_block(), - ); - - Self { - execution_payload: OpExecutionPayloadV4::from_v3_with_withdrawals_root( - payload_v3, - l2_withdrawals_root, - ), - block_value: fees, - // From the engine API spec: - // - // > Client software **MAY** use any heuristics to decide whether to set - // `shouldOverrideBuilder` flag or not. If client software does not implement any - // heuristic this flag **SHOULD** be set to `false`. - // - // Spec: - // - should_override_builder: false, - // No blobs for OP. - blobs_bundle: BlobsBundleV1 { blobs: vec![], commitments: vec![], proofs: vec![] }, - parent_beacon_block_root, - execution_requests: vec![], - } - } -} - -/// Generates the payload id for the configured payload from the [`OpPayloadAttributes`]. -/// -/// Returns an 8-byte identifier by hashing the payload components with sha256 hash. -/// -/// Note: This must be updated whenever the [`OpPayloadAttributes`] changes for a hardfork. -/// See also -pub fn payload_id_optimism( - parent: &B256, - attributes: &OpPayloadAttributes, - payload_version: u8, -) -> PayloadId { - use sha2::Digest; - let mut hasher = sha2::Sha256::new(); - hasher.update(parent.as_slice()); - hasher.update(&attributes.payload_attributes.timestamp.to_be_bytes()[..]); - hasher.update(attributes.payload_attributes.prev_randao.as_slice()); - hasher.update(attributes.payload_attributes.suggested_fee_recipient.as_slice()); - if let Some(withdrawals) = &attributes.payload_attributes.withdrawals { - let mut buf = Vec::new(); - withdrawals.encode(&mut buf); - hasher.update(buf); - } - - if let Some(parent_beacon_block) = attributes.payload_attributes.parent_beacon_block_root { - hasher.update(parent_beacon_block); - } - - let no_tx_pool = attributes.no_tx_pool.unwrap_or_default(); - if no_tx_pool || attributes.transactions.as_ref().is_some_and(|txs| !txs.is_empty()) { - hasher.update([no_tx_pool as u8]); - let txs_len = attributes.transactions.as_ref().map(|txs| txs.len()).unwrap_or_default(); - hasher.update(&txs_len.to_be_bytes()[..]); - if let Some(txs) = &attributes.transactions { - for tx in txs { - // we have to just hash the bytes here because otherwise we would need to decode - // the transactions here which really isn't ideal - let tx_hash = keccak256(tx); - // maybe we can try just taking the hash and not decoding - hasher.update(tx_hash) - } - } - } - - if let Some(gas_limit) = attributes.gas_limit { - hasher.update(gas_limit.to_be_bytes()); - } - - if let Some(eip_1559_params) = attributes.eip_1559_params { - hasher.update(eip_1559_params.as_slice()); - } - - if let Some(min_base_fee) = attributes.min_base_fee { - hasher.update(min_base_fee.to_be_bytes()); - } - - let mut out = hasher.finalize(); - out[0] = payload_version; - - #[allow(deprecated)] // generic-array 0.14 deprecated - PayloadId::new(out.as_slice()[..8].try_into().expect("sufficient length")) -} - -impl BuildNextEnv, H, ChainSpec> - for OpNextBlockEnvAttributes -where - H: BlockHeader, - T: SignedTransaction, - ChainSpec: EthChainSpec + OpHardforks, -{ - fn build_next_env( - attributes: &OpPayloadBuilderAttributes, - parent: &SealedHeader, - chain_spec: &ChainSpec, - ) -> Result { - let extra_data = if chain_spec.is_jovian_active_at_timestamp(attributes.timestamp()) { - attributes - .get_jovian_extra_data( - chain_spec.base_fee_params_at_timestamp(attributes.timestamp()), - ) - .map_err(PayloadBuilderError::other)? - } else if chain_spec.is_holocene_active_at_timestamp(attributes.timestamp()) { - attributes - .get_holocene_extra_data( - chain_spec.base_fee_params_at_timestamp(attributes.timestamp()), - ) - .map_err(PayloadBuilderError::other)? - } else { - Default::default() - }; - - Ok(Self { - timestamp: attributes.timestamp(), - suggested_fee_recipient: attributes.suggested_fee_recipient(), - prev_randao: attributes.prev_randao(), - gas_limit: attributes.gas_limit.unwrap_or_else(|| parent.gas_limit()), - parent_beacon_block_root: attributes.parent_beacon_block_root(), - extra_data, - }) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::OpPayloadAttributes; - use alloy_primitives::{address, b256, bytes, FixedBytes}; - use alloy_rpc_types_engine::PayloadAttributes; - use reth_optimism_primitives::OpTransactionSigned; - use reth_payload_primitives::EngineApiMessageVersion; - use std::str::FromStr; - - #[test] - fn test_payload_id_parity_op_geth() { - // INFO rollup_boost::server:received fork_choice_updated_v3 from builder and l2_client - // payload_id_builder="0x6ef26ca02318dcf9" payload_id_l2="0x03d2dae446d2a86a" - let expected = - PayloadId::new(FixedBytes::<8>::from_str("0x03d2dae446d2a86a").unwrap().into()); - let attrs = OpPayloadAttributes { - payload_attributes: PayloadAttributes { - timestamp: 1728933301, - prev_randao: b256!("0x9158595abbdab2c90635087619aa7042bbebe47642dfab3c9bfb934f6b082765"), - suggested_fee_recipient: address!("0x4200000000000000000000000000000000000011"), - withdrawals: Some([].into()), - parent_beacon_block_root: b256!("0x8fe0193b9bf83cb7e5a08538e494fecc23046aab9a497af3704f4afdae3250ff").into(), - }, - transactions: Some([bytes!("7ef8f8a0dc19cfa777d90980e4875d0a548a881baaa3f83f14d1bc0d3038bc329350e54194deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000f424000000000000000000000000300000000670d6d890000000000000125000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000014bf9181db6e381d4384bbf69c48b0ee0eed23c6ca26143c6d2544f9d39997a590000000000000000000000007f83d659683caf2767fd3c720981d51f5bc365bc")].into()), - no_tx_pool: None, - gas_limit: Some(30000000), - eip_1559_params: None, - min_base_fee: None, - }; - - // Reth's `PayloadId` should match op-geth's `PayloadId`. This fails - assert_eq!( - expected, - payload_id_optimism( - &b256!("0x3533bf30edaf9505d0810bf475cbe4e5f4b9889904b9845e83efdeab4e92eb1e"), - &attrs, - EngineApiMessageVersion::V3 as u8 - ) - ); - } - - #[test] - fn test_payload_id_parity_op_geth_jovian() { - // - let expected = - PayloadId::new(FixedBytes::<8>::from_str("0x046c65ffc4d659ec").unwrap().into()); - let attrs = OpPayloadAttributes { - payload_attributes: PayloadAttributes { - timestamp: 1728933301, - prev_randao: b256!("0x9158595abbdab2c90635087619aa7042bbebe47642dfab3c9bfb934f6b082765"), - suggested_fee_recipient: address!("0x4200000000000000000000000000000000000011"), - withdrawals: Some([].into()), - parent_beacon_block_root: b256!("0x8fe0193b9bf83cb7e5a08538e494fecc23046aab9a497af3704f4afdae3250ff").into(), - }, - transactions: Some([bytes!("7ef8f8a0dc19cfa777d90980e4875d0a548a881baaa3f83f14d1bc0d3038bc329350e54194deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000f424000000000000000000000000300000000670d6d890000000000000125000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000014bf9181db6e381d4384bbf69c48b0ee0eed23c6ca26143c6d2544f9d39997a590000000000000000000000007f83d659683caf2767fd3c720981d51f5bc365bc")].into()), - no_tx_pool: None, - gas_limit: Some(30000000), - eip_1559_params: None, - min_base_fee: Some(100), - }; - - // Reth's `PayloadId` should match op-geth's `PayloadId`. This fails - assert_eq!( - expected, - payload_id_optimism( - &b256!("0x3533bf30edaf9505d0810bf475cbe4e5f4b9889904b9845e83efdeab4e92eb1e"), - &attrs, - EngineApiMessageVersion::V4 as u8 - ) - ); - } - - #[test] - fn test_get_extra_data_post_holocene() { - let attributes: OpPayloadBuilderAttributes = - OpPayloadBuilderAttributes { - eip_1559_params: Some(B64::from_str("0x0000000800000008").unwrap()), - ..Default::default() - }; - let extra_data = attributes.get_holocene_extra_data(BaseFeeParams::new(80, 60)); - assert_eq!(extra_data.unwrap(), Bytes::copy_from_slice(&[0, 0, 0, 0, 8, 0, 0, 0, 8])); - } - - #[test] - fn test_get_extra_data_post_holocene_default() { - let attributes: OpPayloadBuilderAttributes = - OpPayloadBuilderAttributes { eip_1559_params: Some(B64::ZERO), ..Default::default() }; - let extra_data = attributes.get_holocene_extra_data(BaseFeeParams::new(80, 60)); - assert_eq!(extra_data.unwrap(), Bytes::copy_from_slice(&[0, 0, 0, 0, 80, 0, 0, 0, 60])); - } - - #[test] - fn test_get_extra_data_post_jovian() { - let attributes: OpPayloadBuilderAttributes = - OpPayloadBuilderAttributes { - eip_1559_params: Some(B64::from_str("0x0000000800000008").unwrap()), - min_base_fee: Some(10), - ..Default::default() - }; - let extra_data = attributes.get_jovian_extra_data(BaseFeeParams::new(80, 60)); - assert_eq!( - extra_data.unwrap(), - // Version byte is 1 for Jovian, then holocene payload followed by 8 bytes for the - // minimum base fee - Bytes::copy_from_slice(&[1, 0, 0, 0, 8, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 10]) - ); - } - - #[test] - fn test_get_extra_data_post_jovian_default() { - let attributes: OpPayloadBuilderAttributes = - OpPayloadBuilderAttributes { - eip_1559_params: Some(B64::ZERO), - min_base_fee: Some(10), - ..Default::default() - }; - let extra_data = attributes.get_jovian_extra_data(BaseFeeParams::new(80, 60)); - assert_eq!( - extra_data.unwrap(), - // Version byte is 1 for Jovian, then holocene payload followed by 8 bytes for the - // minimum base fee - Bytes::copy_from_slice(&[1, 0, 0, 0, 80, 0, 0, 0, 60, 0, 0, 0, 0, 0, 0, 0, 10]) - ); - } - - #[test] - fn test_get_extra_data_post_jovian_no_base_fee() { - let attributes: OpPayloadBuilderAttributes = - OpPayloadBuilderAttributes { - eip_1559_params: Some(B64::ZERO), - min_base_fee: None, - ..Default::default() - }; - let extra_data = attributes.get_jovian_extra_data(BaseFeeParams::new(80, 60)); - assert_eq!(extra_data.unwrap_err(), EIP1559ParamError::MinBaseFeeNotSet); - } -} diff --git a/op-reth/crates/payload/src/traits.rs b/op-reth/crates/payload/src/traits.rs deleted file mode 100644 index 485b8d1df9e..00000000000 --- a/op-reth/crates/payload/src/traits.rs +++ /dev/null @@ -1,60 +0,0 @@ -use alloy_consensus::BlockBody; -use reth_optimism_primitives::{transaction::OpTransaction, DepositReceipt}; -use reth_payload_primitives::PayloadBuilderAttributes; -use reth_primitives_traits::{FullBlockHeader, NodePrimitives, SignedTransaction, WithEncoded}; - -use crate::OpPayloadBuilderAttributes; - -/// Helper trait to encapsulate common bounds on [`NodePrimitives`] for OP payload builder. -pub trait OpPayloadPrimitives: - NodePrimitives< - Receipt: DepositReceipt, - SignedTx = Self::_TX, - BlockBody = BlockBody, - BlockHeader = Self::_Header, -> -{ - /// Helper AT to bound [`NodePrimitives::Block`] type without causing bound cycle. - type _TX: SignedTransaction + OpTransaction; - /// Helper AT to bound [`NodePrimitives::Block`] type without causing bound cycle. - type _Header: FullBlockHeader; -} - -impl OpPayloadPrimitives for T -where - Tx: SignedTransaction + OpTransaction, - T: NodePrimitives< - SignedTx = Tx, - Receipt: DepositReceipt, - BlockBody = BlockBody, - BlockHeader = Header, - >, - Header: FullBlockHeader, -{ - type _TX = Tx; - type _Header = Header; -} - -/// Attributes for the OP payload builder. -pub trait OpAttributes: PayloadBuilderAttributes { - /// Primitive transaction type. - type Transaction: SignedTransaction; - - /// Whether to use the transaction pool for the payload. - fn no_tx_pool(&self) -> bool; - - /// Sequencer transactions to include in the payload. - fn sequencer_transactions(&self) -> &[WithEncoded]; -} - -impl OpAttributes for OpPayloadBuilderAttributes { - type Transaction = T; - - fn no_tx_pool(&self) -> bool { - self.no_tx_pool - } - - fn sequencer_transactions(&self) -> &[WithEncoded] { - &self.transactions - } -} diff --git a/op-reth/crates/payload/src/validator.rs b/op-reth/crates/payload/src/validator.rs deleted file mode 100644 index fa0d610469c..00000000000 --- a/op-reth/crates/payload/src/validator.rs +++ /dev/null @@ -1,100 +0,0 @@ -//! Validates execution payload wrt Optimism consensus rules - -use alloc::sync::Arc; -use alloy_consensus::Block; -use alloy_rpc_types_engine::PayloadError; -use derive_more::{Constructor, Deref}; -use op_alloy_rpc_types_engine::{OpExecutionData, OpPayloadError}; -use reth_optimism_forks::OpHardforks; -use reth_payload_validator::{cancun, prague, shanghai}; -use reth_primitives_traits::{Block as _, SealedBlock, SignedTransaction}; - -/// Execution payload validator. -#[derive(Clone, Debug, Deref, Constructor)] -pub struct OpExecutionPayloadValidator { - /// Chain spec to validate against. - #[deref] - inner: Arc, -} - -impl OpExecutionPayloadValidator -where - ChainSpec: OpHardforks, -{ - /// Returns reference to chain spec. - pub fn chain_spec(&self) -> &ChainSpec { - &self.inner - } - - /// Ensures that the given payload does not violate any consensus rules that concern the block's - /// layout. - /// - /// See also [`ensure_well_formed_payload`]. - pub fn ensure_well_formed_payload( - &self, - payload: OpExecutionData, - ) -> Result>, OpPayloadError> { - ensure_well_formed_payload(self.chain_spec(), payload) - } -} - -/// Ensures that the given payload does not violate any consensus rules that concern the block's -/// layout, like: -/// - missing or invalid base fee -/// - invalid extra data -/// - invalid transactions -/// - incorrect hash -/// - block contains blob transactions or blob versioned hashes -/// - block contains l1 withdrawals -/// -/// The checks are done in the order that conforms with the engine-API specification. -/// -/// This is intended to be invoked after receiving the payload from the CLI. -/// The additional fields, starting with [`MaybeCancunPayloadFields`](alloy_rpc_types_engine::MaybeCancunPayloadFields), are not part of the payload, but are additional fields starting in the `engine_newPayloadV3` RPC call, See also -/// -/// If the cancun fields are provided this also validates that the versioned hashes in the block -/// are empty as well as those passed in the sidecar. If the payload fields are not provided. -/// -/// Validation according to specs . -pub fn ensure_well_formed_payload( - chain_spec: ChainSpec, - payload: OpExecutionData, -) -> Result>, OpPayloadError> -where - ChainSpec: OpHardforks, - T: SignedTransaction, -{ - let OpExecutionData { payload, sidecar } = payload; - - let expected_hash = payload.block_hash(); - - // First parse the block - let sealed_block = payload.try_into_block_with_sidecar(&sidecar)?.seal_slow(); - - // Ensure the hash included in the payload matches the block hash - if expected_hash != sealed_block.hash() { - return Err(PayloadError::BlockHash { - execution: sealed_block.hash(), - consensus: expected_hash, - })? - } - - shanghai::ensure_well_formed_fields( - sealed_block.body(), - chain_spec.is_shanghai_active_at_timestamp(sealed_block.timestamp), - )?; - - cancun::ensure_well_formed_header_and_sidecar_fields( - &sealed_block, - sidecar.ecotone(), - chain_spec.is_cancun_active_at_timestamp(sealed_block.timestamp), - )?; - - prague::ensure_well_formed_fields( - sealed_block.body(), - sidecar.isthmus(), - chain_spec.is_prague_active_at_timestamp(sealed_block.timestamp), - )?; - - Ok(sealed_block) -} diff --git a/op-reth/crates/primitives/Cargo.toml b/op-reth/crates/primitives/Cargo.toml deleted file mode 100644 index ef83fe3ddbc..00000000000 --- a/op-reth/crates/primitives/Cargo.toml +++ /dev/null @@ -1,98 +0,0 @@ -[package] -name = "reth-optimism-primitives" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -description = "OP primitive types" - -[lints] -workspace = true - -[dependencies] -# reth -reth-primitives-traits = { workspace = true, features = ["op"] } - -# ethereum -alloy-primitives.workspace = true -alloy-consensus.workspace = true -alloy-eips.workspace = true -alloy-rlp.workspace = true - -# op -op-alloy-consensus.workspace = true - -# codec -serde = { workspace = true, optional = true } -serde_with = { workspace = true, optional = true } - -[dev-dependencies] -reth-codecs = { workspace = true, features = ["test-utils", "op"] } - -bytes.workspace = true -modular-bitfield.workspace = true -reth-zstd-compressors.workspace = true -rand.workspace = true -arbitrary.workspace = true -rstest.workspace = true -serde_json.workspace = true -bincode.workspace = true - -proptest-arbitrary-interop.workspace = true -proptest.workspace = true -rand_08.workspace = true -secp256k1 = { workspace = true, features = ["rand"] } - -[features] -default = ["std"] -std = [ - "reth-primitives-traits/std", - "alloy-consensus/std", - "alloy-primitives/std", - "serde?/std", - "alloy-rlp/std", - "op-alloy-consensus/std", - "serde_json/std", - "serde_with?/std", - "alloy-eips/std", - "secp256k1/std", - "bytes/std", - "reth-zstd-compressors/std", -] -alloy-compat = ["op-alloy-consensus/alloy-compat"] -reth-codec = [ - "std", - "reth-primitives-traits/reth-codec", -] -serde = [ - "dep:serde", - "reth-primitives-traits/serde", - "alloy-primitives/serde", - "alloy-consensus/serde", - "op-alloy-consensus/serde", - "alloy-eips/serde", - "rand/serde", - "rand_08/serde", - "secp256k1/serde", - "bytes/serde", - "reth-codecs/serde", -] -serde-bincode-compat = [ - "serde", - "serde_with", - "alloy-consensus/serde-bincode-compat", - "op-alloy-consensus/serde-bincode-compat", - "reth-primitives-traits/serde-bincode-compat", - "alloy-eips/serde-bincode-compat", -] -arbitrary = [ - "std", - "reth-primitives-traits/arbitrary", - "op-alloy-consensus/arbitrary", - "alloy-consensus/arbitrary", - "alloy-primitives/arbitrary", - "alloy-eips/arbitrary", - "reth-codecs/arbitrary", -] diff --git a/op-reth/crates/primitives/src/receipt.rs b/op-reth/crates/primitives/src/receipt.rs deleted file mode 100644 index 1ed7cde2c91..00000000000 --- a/op-reth/crates/primitives/src/receipt.rs +++ /dev/null @@ -1,349 +0,0 @@ -use alloc::vec::Vec; -use alloy_consensus::{ - Eip2718EncodableReceipt, Eip658Value, Receipt, ReceiptWithBloom, RlpDecodableReceipt, - RlpEncodableReceipt, TxReceipt, Typed2718, -}; -use alloy_eips::{ - eip2718::{Eip2718Result, IsTyped2718}, - Decodable2718, Encodable2718, -}; -use alloy_primitives::{Bloom, Log}; -use alloy_rlp::{BufMut, Decodable, Encodable, Header}; -use op_alloy_consensus::{OpDepositReceipt, OpReceipt, OpTxType}; -use reth_primitives_traits::InMemorySize; - -/// Trait for deposit receipt. -pub trait DepositReceipt: reth_primitives_traits::Receipt { - /// Converts a `Receipt` into a mutable Optimism deposit receipt. - fn as_deposit_receipt_mut(&mut self) -> Option<&mut OpDepositReceipt>; - - /// Extracts an Optimism deposit receipt from `Receipt`. - fn as_deposit_receipt(&self) -> Option<&OpDepositReceipt>; -} - -impl DepositReceipt for OpReceipt { - fn as_deposit_receipt_mut(&mut self) -> Option<&mut OpDepositReceipt> { - match self { - Self::Deposit(receipt) => Some(receipt), - _ => None, - } - } - - fn as_deposit_receipt(&self) -> Option<&OpDepositReceipt> { - match self { - Self::Deposit(receipt) => Some(receipt), - _ => None, - } - } -} - -#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] -pub(super) mod serde_bincode_compat { - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - use serde_with::{DeserializeAs, SerializeAs}; - - /// Bincode-compatible [`super::OpReceipt`] serde implementation. - /// - /// Intended to use with the [`serde_with::serde_as`] macro in the following way: - /// ```rust - /// use reth_optimism_primitives::OpReceipt; - /// use reth_primitives_traits::serde_bincode_compat::SerdeBincodeCompat; - /// use serde::{de::DeserializeOwned, Deserialize, Serialize}; - /// use serde_with::serde_as; - /// - /// #[serde_as] - /// #[derive(Serialize, Deserialize)] - /// struct Data { - /// #[serde_as( - /// as = "reth_primitives_traits::serde_bincode_compat::BincodeReprFor<'_, OpReceipt>" - /// )] - /// receipt: OpReceipt, - /// } - /// ``` - #[allow(rustdoc::private_doc_tests)] - #[derive(Debug, Serialize, Deserialize)] - pub enum OpReceipt<'a> { - /// Legacy receipt - Legacy(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), - /// EIP-2930 receipt - Eip2930(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), - /// EIP-1559 receipt - Eip1559(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), - /// EIP-7702 receipt - Eip7702(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), - /// Deposit receipt - Deposit( - op_alloy_consensus::serde_bincode_compat::OpDepositReceipt<'a, alloy_primitives::Log>, - ), - } - - impl<'a> From<&'a super::OpReceipt> for OpReceipt<'a> { - fn from(value: &'a super::OpReceipt) -> Self { - match value { - super::OpReceipt::Legacy(receipt) => Self::Legacy(receipt.into()), - super::OpReceipt::Eip2930(receipt) => Self::Eip2930(receipt.into()), - super::OpReceipt::Eip1559(receipt) => Self::Eip1559(receipt.into()), - super::OpReceipt::Eip7702(receipt) => Self::Eip7702(receipt.into()), - super::OpReceipt::Deposit(receipt) => Self::Deposit(receipt.into()), - } - } - } - - impl<'a> From> for super::OpReceipt { - fn from(value: OpReceipt<'a>) -> Self { - match value { - OpReceipt::Legacy(receipt) => Self::Legacy(receipt.into()), - OpReceipt::Eip2930(receipt) => Self::Eip2930(receipt.into()), - OpReceipt::Eip1559(receipt) => Self::Eip1559(receipt.into()), - OpReceipt::Eip7702(receipt) => Self::Eip7702(receipt.into()), - OpReceipt::Deposit(receipt) => Self::Deposit(receipt.into()), - } - } - } - - impl SerializeAs for OpReceipt<'_> { - fn serialize_as(source: &super::OpReceipt, serializer: S) -> Result - where - S: Serializer, - { - OpReceipt::<'_>::from(source).serialize(serializer) - } - } - - impl<'de> DeserializeAs<'de, super::OpReceipt> for OpReceipt<'de> { - fn deserialize_as(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - OpReceipt::<'_>::deserialize(deserializer).map(Into::into) - } - } - - #[cfg(test)] - mod tests { - use crate::{receipt::serde_bincode_compat, OpReceipt}; - use arbitrary::Arbitrary; - use rand::Rng; - use serde::{Deserialize, Serialize}; - use serde_with::serde_as; - - #[test] - fn test_tx_bincode_roundtrip() { - #[serde_as] - #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] - struct Data { - #[serde_as(as = "serde_bincode_compat::OpReceipt<'_>")] - receipt: OpReceipt, - } - - let mut bytes = [0u8; 1024]; - rand::rng().fill(bytes.as_mut_slice()); - let mut data = Data { - receipt: OpReceipt::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(), - }; - let success = data.receipt.as_receipt_mut().status.coerce_status(); - // // ensure we don't have an invalid poststate variant - data.receipt.as_receipt_mut().status = success.into(); - - let encoded = bincode::serialize(&data).unwrap(); - let decoded: Data = bincode::deserialize(&encoded).unwrap(); - assert_eq!(decoded, data); - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_eips::eip2718::Encodable2718; - use alloy_primitives::{address, b256, bytes, hex_literal::hex, Bytes}; - use alloy_rlp::Encodable; - use reth_codecs::Compact; - - #[test] - fn test_decode_receipt() { - reth_codecs::test_utils::test_decode::(&hex!( - "c30328b52ffd23fc426961a00105007eb0042307705a97e503562eacf2b95060cce9de6de68386b6c155b73a9650021a49e2f8baad17f30faff5899d785c4c0873e45bc268bcf07560106424570d11f9a59e8f3db1efa4ceec680123712275f10d92c3411e1caaa11c7c5d591bc11487168e09934a9986848136da1b583babf3a7188e3aed007a1520f1cf4c1ca7d3482c6c28d37c298613c70a76940008816c4c95644579fd08471dc34732fd0f24" - )); - } - - // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 - #[test] - fn encode_legacy_receipt() { - let expected = hex!( - "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" - ); - - let mut data = Vec::with_capacity(expected.length()); - let receipt = ReceiptWithBloom { - receipt: OpReceipt::Legacy(Receipt:: { - status: Eip658Value::Eip658(false), - cumulative_gas_used: 0x1, - logs: vec![Log::new_unchecked( - address!("0x0000000000000000000000000000000000000011"), - vec![ - b256!("0x000000000000000000000000000000000000000000000000000000000000dead"), - b256!("0x000000000000000000000000000000000000000000000000000000000000beef"), - ], - bytes!("0100ff"), - )], - }), - logs_bloom: [0; 256].into(), - }; - - receipt.encode(&mut data); - - // check that the rlp length equals the length of the expected rlp - assert_eq!(receipt.length(), expected.len()); - assert_eq!(data, expected); - } - - // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 - #[test] - fn decode_legacy_receipt() { - let data = hex!( - "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" - ); - - // EIP658Receipt - let expected = ReceiptWithBloom { - receipt: OpReceipt::Legacy(Receipt:: { - status: Eip658Value::Eip658(false), - cumulative_gas_used: 0x1, - logs: vec![Log::new_unchecked( - address!("0x0000000000000000000000000000000000000011"), - vec![ - b256!("0x000000000000000000000000000000000000000000000000000000000000dead"), - b256!("0x000000000000000000000000000000000000000000000000000000000000beef"), - ], - bytes!("0100ff"), - )], - }), - logs_bloom: [0; 256].into(), - }; - - let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); - assert_eq!(receipt, expected); - } - - #[test] - fn decode_deposit_receipt_regolith_roundtrip() { - let data = hex!( - "b901107ef9010c0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf" - ); - - // Deposit Receipt (post-regolith) - let expected = ReceiptWithBloom { - receipt: OpReceipt::Deposit(OpDepositReceipt { - inner: Receipt:: { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 46913, - logs: vec![], - }, - deposit_nonce: Some(4012991), - deposit_receipt_version: None, - }), - logs_bloom: [0; 256].into(), - }; - - let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); - assert_eq!(receipt, expected); - - let mut buf = Vec::with_capacity(data.len()); - receipt.encode(&mut buf); - assert_eq!(buf, &data[..]); - } - - #[test] - fn decode_deposit_receipt_canyon_roundtrip() { - let data = hex!( - "b901117ef9010d0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf01" - ); - - // Deposit Receipt (post-canyon) - let expected = ReceiptWithBloom { - receipt: OpReceipt::Deposit(OpDepositReceipt { - inner: Receipt:: { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 46913, - logs: vec![], - }, - deposit_nonce: Some(4012991), - deposit_receipt_version: Some(1), - }), - logs_bloom: [0; 256].into(), - }; - - let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); - assert_eq!(receipt, expected); - - let mut buf = Vec::with_capacity(data.len()); - expected.encode(&mut buf); - assert_eq!(buf, &data[..]); - } - - #[test] - fn gigantic_receipt() { - let receipt = OpReceipt::Legacy(Receipt:: { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 16747627, - logs: vec![ - Log::new_unchecked( - address!("0x4bf56695415f725e43c3e04354b604bcfb6dfb6e"), - vec![b256!( - "0xc69dc3d7ebff79e41f525be431d5cd3cc08f80eaf0f7819054a726eeb7086eb9" - )], - Bytes::from(vec![1; 0xffffff]), - ), - Log::new_unchecked( - address!("0xfaca325c86bf9c2d5b413cd7b90b209be92229c2"), - vec![b256!( - "0x8cca58667b1e9ffa004720ac99a3d61a138181963b294d270d91c53d36402ae2" - )], - Bytes::from(vec![1; 0xffffff]), - ), - ], - }); - - let mut data = vec![]; - receipt.to_compact(&mut data); - let (decoded, _) = OpReceipt::from_compact(&data[..], data.len()); - assert_eq!(decoded, receipt); - } - - #[test] - fn test_encode_2718_length() { - let receipt = ReceiptWithBloom { - receipt: OpReceipt::Eip1559(Receipt:: { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 21000, - logs: vec![], - }), - logs_bloom: Bloom::default(), - }; - - let encoded = receipt.encoded_2718(); - assert_eq!( - encoded.len(), - receipt.encode_2718_len(), - "Encoded length should match the actual encoded data length" - ); - - // Test for legacy receipt as well - let legacy_receipt = ReceiptWithBloom { - receipt: OpReceipt::Legacy(Receipt:: { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 21000, - logs: vec![], - }), - logs_bloom: Bloom::default(), - }; - - let legacy_encoded = legacy_receipt.encoded_2718(); - assert_eq!( - legacy_encoded.len(), - legacy_receipt.encode_2718_len(), - "Encoded length for legacy receipt should match the actual encoded data length" - ); - } -} diff --git a/op-reth/crates/primitives/src/transaction/tx_type.rs b/op-reth/crates/primitives/src/transaction/tx_type.rs deleted file mode 100644 index deaaebc0682..00000000000 --- a/op-reth/crates/primitives/src/transaction/tx_type.rs +++ /dev/null @@ -1,47 +0,0 @@ -//! Optimism transaction type. - -#[cfg(test)] -mod tests { - use alloy_consensus::constants::EIP7702_TX_TYPE_ID; - use op_alloy_consensus::{OpTxType, DEPOSIT_TX_TYPE_ID}; - use reth_codecs::{txtype::*, Compact}; - use rstest::rstest; - - #[rstest] - #[case(OpTxType::Legacy, COMPACT_IDENTIFIER_LEGACY, vec![])] - #[case(OpTxType::Eip2930, COMPACT_IDENTIFIER_EIP2930, vec![])] - #[case(OpTxType::Eip1559, COMPACT_IDENTIFIER_EIP1559, vec![])] - #[case(OpTxType::Eip7702, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![EIP7702_TX_TYPE_ID])] - #[case(OpTxType::Deposit, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![DEPOSIT_TX_TYPE_ID])] - fn test_txtype_to_compact( - #[case] tx_type: OpTxType, - #[case] expected_identifier: usize, - #[case] expected_buf: Vec, - ) { - let mut buf = vec![]; - let identifier = tx_type.to_compact(&mut buf); - - assert_eq!( - identifier, expected_identifier, - "Unexpected identifier for OpTxType {tx_type:?}", - ); - assert_eq!(buf, expected_buf, "Unexpected buffer for OpTxType {tx_type:?}",); - } - - #[rstest] - #[case(OpTxType::Legacy, COMPACT_IDENTIFIER_LEGACY, vec![])] - #[case(OpTxType::Eip2930, COMPACT_IDENTIFIER_EIP2930, vec![])] - #[case(OpTxType::Eip1559, COMPACT_IDENTIFIER_EIP1559, vec![])] - #[case(OpTxType::Eip7702, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![EIP7702_TX_TYPE_ID])] - #[case(OpTxType::Deposit, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![DEPOSIT_TX_TYPE_ID])] - fn test_txtype_from_compact( - #[case] expected_type: OpTxType, - #[case] identifier: usize, - #[case] buf: Vec, - ) { - let (actual_type, remaining_buf) = OpTxType::from_compact(&buf, identifier); - - assert_eq!(actual_type, expected_type, "Unexpected TxType for identifier {identifier}"); - assert!(remaining_buf.is_empty(), "Buffer not fully consumed for identifier {identifier}"); - } -} diff --git a/op-reth/crates/reth/Cargo.toml b/op-reth/crates/reth/Cargo.toml deleted file mode 100644 index cd39a0bcf81..00000000000 --- a/op-reth/crates/reth/Cargo.toml +++ /dev/null @@ -1,158 +0,0 @@ -[package] -name = "reth-op" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -# reth -reth-primitives-traits = { workspace = true, features = ["op"] } -reth-chainspec.workspace = true -reth-network = { workspace = true, optional = true } -reth-network-api = { workspace = true, optional = true } -reth-eth-wire = { workspace = true, optional = true } -reth-provider = { workspace = true, optional = true } -reth-db = { workspace = true, optional = true, features = ["mdbx", "op"] } -reth-codecs = { workspace = true, optional = true } -reth-storage-api = { workspace = true, optional = true } -reth-node-api = { workspace = true, optional = true } -reth-node-core = { workspace = true, optional = true } -reth-consensus = { workspace = true, optional = true } -reth-consensus-common = { workspace = true, optional = true } -reth-evm = { workspace = true, optional = true } -reth-revm = { workspace = true, optional = true } -reth-rpc = { workspace = true, optional = true } -reth-rpc-api = { workspace = true, optional = true } -reth-rpc-eth-types = { workspace = true, optional = true } -reth-rpc-builder = { workspace = true, optional = true } -reth-exex = { workspace = true, optional = true } -reth-transaction-pool = { workspace = true, optional = true } -reth-trie = { workspace = true, optional = true } -reth-trie-db = { workspace = true, optional = true } -reth-node-builder = { workspace = true, optional = true } -reth-tasks = { workspace = true, optional = true } -reth-cli-util = { workspace = true, optional = true } -reth-engine-local = { workspace = true, optional = true } - -# reth-op -reth-optimism-primitives.workspace = true -reth-optimism-chainspec.workspace = true -reth-optimism-consensus = { workspace = true, optional = true } -reth-optimism-evm = { workspace = true, optional = true } -reth-optimism-node = { workspace = true, optional = true } -reth-optimism-rpc = { workspace = true, optional = true } -reth-optimism-cli = { workspace = true, optional = true } - -[features] -default = ["std"] -std = [ - "reth-chainspec/std", - "reth-consensus?/std", - "reth-consensus-common?/std", - "reth-optimism-chainspec/std", - "reth-optimism-consensus?/std", - "reth-optimism-evm?/std", - "reth-optimism-primitives/std", - "reth-primitives-traits/std", - "reth-storage-api?/std", - "reth-evm?/std", - "reth-revm?/std", -] -arbitrary = [ - "std", - "reth-chainspec/arbitrary", - "reth-optimism-primitives/arbitrary", - "reth-primitives-traits/arbitrary", - "reth-db?/arbitrary", - "reth-transaction-pool?/arbitrary", - "reth-eth-wire?/arbitrary", - "reth-codecs?/arbitrary", -] -keccak-cache-global = [ - "reth-optimism-node?/keccak-cache-global", - "reth-node-core?/keccak-cache-global", - "reth-optimism-cli?/keccak-cache-global", -] -test-utils = [ - "reth-chainspec/test-utils", - "reth-consensus?/test-utils", - "reth-db?/test-utils", - "reth-evm?/test-utils", - "reth-revm?/test-utils", - "reth-network?/test-utils", - "reth-optimism-node?/test-utils", - "reth-primitives-traits/test-utils", - "reth-provider?/test-utils", - "reth-trie?/test-utils", - "reth-transaction-pool?/test-utils", - "reth-node-builder?/test-utils", - "reth-trie-db?/test-utils", - "reth-codecs?/test-utils", -] - -full = ["consensus", "evm", "node", "provider", "rpc", "trie", "pool", "network"] - -alloy-compat = ["reth-optimism-primitives/alloy-compat"] -cli = ["dep:reth-optimism-cli", "dep:reth-cli-util"] -consensus = [ - "dep:reth-consensus", - "dep:reth-consensus-common", - "dep:reth-optimism-consensus", -] -evm = ["dep:reth-evm", "dep:reth-optimism-evm", "dep:reth-revm"] -exex = ["provider", "dep:reth-exex"] -node-api = ["dep:reth-node-api", "dep:reth-node-core"] -node = [ - "provider", - "consensus", - "evm", - "network", - "node-api", - "dep:reth-optimism-node", - "dep:reth-node-builder", - "dep:reth-engine-local", - "rpc", - "trie-db", - "pool", -] -rpc = [ - "tasks", - "dep:reth-rpc", - "dep:reth-rpc-builder", - "dep:reth-rpc-api", - "dep:reth-rpc-eth-types", - "dep:reth-optimism-rpc", -] -tasks = ["dep:reth-tasks"] -jemalloc = [ - "reth-cli-util?/jemalloc", - "reth-node-core?/jemalloc", - "reth-optimism-cli?/jemalloc", -] -js-tracer = [ - "rpc", - "reth-rpc/js-tracer", - "reth-node-builder?/js-tracer", - "reth-optimism-node?/js-tracer", - "reth-rpc-eth-types?/js-tracer", -] -network = ["dep:reth-network", "tasks", "dep:reth-network-api", "dep:reth-eth-wire"] -otlp = [ - "reth-node-core?/otlp", - "reth-optimism-cli?/otlp", -] -portable = [ - "reth-optimism-evm?/portable", - "reth-revm?/portable", -] -provider = ["storage-api", "tasks", "dep:reth-provider", "dep:reth-db", "dep:reth-codecs"] -pool = ["dep:reth-transaction-pool"] -storage-api = ["dep:reth-storage-api"] -trie = ["dep:reth-trie"] -trie-db = ["trie", "dep:reth-trie-db"] diff --git a/op-reth/crates/rpc/Cargo.toml b/op-reth/crates/rpc/Cargo.toml deleted file mode 100644 index 5d926caf159..00000000000 --- a/op-reth/crates/rpc/Cargo.toml +++ /dev/null @@ -1,93 +0,0 @@ -[package] -name = "reth-optimism-rpc" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -description = "Ethereum RPC implementation for optimism." - -[lints] -workspace = true - -[dependencies] -# reth -reth-evm.workspace = true -reth-primitives-traits = { workspace = true, features = ["op"] } -reth-storage-api.workspace = true -reth-rpc-eth-api = { workspace = true, features = ["op"] } -reth-rpc-eth-types.workspace = true -reth-rpc-server-types.workspace = true -reth-tasks = { workspace = true, features = ["rayon"] } -reth-transaction-pool.workspace = true -reth-rpc.workspace = true -reth-rpc-api.workspace = true -reth-node-api.workspace = true -reth-node-builder.workspace = true -reth-chainspec.workspace = true -reth-chain-state.workspace = true -reth-rpc-engine-api.workspace = true - -# op-reth -reth-optimism-evm.workspace = true -reth-optimism-flashblocks.workspace = true -reth-optimism-payload-builder.workspace = true -reth-optimism-txpool.workspace = true -# TODO remove node-builder import -reth-optimism-primitives = { workspace = true, features = ["reth-codec", "serde-bincode-compat", "serde"] } -reth-optimism-forks.workspace = true - -# ethereum -alloy-eips.workspace = true -alloy-json-rpc.workspace = true -alloy-primitives.workspace = true -alloy-rpc-client.workspace = true -alloy-rpc-types-eth.workspace = true -alloy-rpc-types-debug.workspace = true -alloy-transport.workspace = true -alloy-transport-http.workspace = true -alloy-consensus.workspace = true -alloy-rpc-types-engine.workspace = true -op-alloy-network.workspace = true -op-alloy-rpc-types.workspace = true -op-alloy-rpc-types-engine.workspace = true -op-alloy-rpc-jsonrpsee.workspace = true -op-alloy-consensus.workspace = true -revm.workspace = true -op-revm.workspace = true - -# async -tokio.workspace = true -futures.workspace = true -tokio-stream.workspace = true -reqwest = { workspace = true, features = ["rustls-tls-native-roots"] } -async-trait.workspace = true -tower.workspace = true - -# rpc -jsonrpsee-core.workspace = true -jsonrpsee-types.workspace = true -jsonrpsee.workspace = true -serde_json.workspace = true - -# misc -eyre.workspace = true -thiserror.workspace = true -tracing.workspace = true -derive_more = { workspace = true, features = ["constructor"] } - -# metrics -reth-metrics.workspace = true -metrics.workspace = true - -[dev-dependencies] -reth-optimism-chainspec.workspace = true -alloy-op-hardforks.workspace = true - -[features] -client = [ - "jsonrpsee/client", - "jsonrpsee/async-client", - "reth-rpc-eth-api/client", -] diff --git a/op-reth/crates/rpc/src/engine.rs b/op-reth/crates/rpc/src/engine.rs deleted file mode 100644 index a31a64daca9..00000000000 --- a/op-reth/crates/rpc/src/engine.rs +++ /dev/null @@ -1,412 +0,0 @@ -//! Implements the Optimism engine API RPC methods. - -use alloy_eips::eip7685::Requests; -use alloy_primitives::{BlockHash, B256, B64, U64}; -use alloy_rpc_types_engine::{ - ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadInputV2, ExecutionPayloadV3, - ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, -}; -use derive_more::Constructor; -use jsonrpsee::proc_macros::rpc; -use jsonrpsee_core::{server::RpcModule, RpcResult}; -use op_alloy_rpc_types_engine::{ - OpExecutionData, OpExecutionPayloadV4, ProtocolVersion, ProtocolVersionFormatV0, - SuperchainSignal, -}; -use reth_chainspec::EthereumHardforks; -use reth_node_api::{EngineApiValidator, EngineTypes}; -use reth_rpc_api::IntoEngineApiRpcModule; -use reth_rpc_engine_api::EngineApi; -use reth_storage_api::{BlockReader, HeaderProvider, StateProviderFactory}; -use reth_transaction_pool::TransactionPool; -use tracing::{debug, info, trace}; - -/// The list of all supported Engine capabilities available over the engine endpoint. -/// -/// Spec: -pub const OP_ENGINE_CAPABILITIES: &[&str] = &[ - "engine_forkchoiceUpdatedV1", - "engine_forkchoiceUpdatedV2", - "engine_forkchoiceUpdatedV3", - "engine_getClientVersionV1", - "engine_getPayloadV2", - "engine_getPayloadV3", - "engine_getPayloadV4", - "engine_newPayloadV2", - "engine_newPayloadV3", - "engine_newPayloadV4", - "engine_getPayloadBodiesByHashV1", - "engine_getPayloadBodiesByRangeV1", - "engine_signalSuperchainV1", -]; - -/// OP Stack protocol version -/// See also: -pub const OP_STACK_SUPPORT: ProtocolVersion = ProtocolVersion::V0(ProtocolVersionFormatV0 { - build: B64::ZERO, - major: 9, - minor: 0, - patch: 0, - pre_release: 0, -}); - -/// Extension trait that gives access to Optimism engine API RPC methods. -/// -/// Note: -/// > The provider should use a JWT authentication layer. -/// -/// This follows the Optimism specs that can be found at: -/// -#[cfg_attr(not(feature = "client"), rpc(server, namespace = "engine"), server_bounds(Engine::PayloadAttributes: jsonrpsee::core::DeserializeOwned))] -#[cfg_attr(feature = "client", rpc(server, client, namespace = "engine", client_bounds(Engine::PayloadAttributes: jsonrpsee::core::Serialize + Clone), server_bounds(Engine::PayloadAttributes: jsonrpsee::core::DeserializeOwned)))] -pub trait OpEngineApi { - /// Sends the given payload to the execution layer client, as specified for the Shanghai fork. - /// - /// See also - /// - /// No modifications needed for OP compatibility. - #[method(name = "newPayloadV2")] - async fn new_payload_v2(&self, payload: ExecutionPayloadInputV2) -> RpcResult; - - /// Sends the given payload to the execution layer client, as specified for the Cancun fork. - /// - /// See also - /// - /// OP modifications: - /// - expected versioned hashes MUST be an empty array: therefore the `versioned_hashes` - /// parameter is removed. - /// - parent beacon block root MUST be the parent beacon block root from the L1 origin block of - /// the L2 block. - /// - blob versioned hashes MUST be empty list. - #[method(name = "newPayloadV3")] - async fn new_payload_v3( - &self, - payload: ExecutionPayloadV3, - versioned_hashes: Vec, - parent_beacon_block_root: B256, - ) -> RpcResult; - - /// Sends the given payload to the execution layer client, as specified for the Prague fork. - /// - /// See also - /// - /// - blob versioned hashes MUST be empty list. - /// - execution layer requests MUST be empty list. - #[method(name = "newPayloadV4")] - async fn new_payload_v4( - &self, - payload: OpExecutionPayloadV4, - versioned_hashes: Vec, - parent_beacon_block_root: B256, - execution_requests: Requests, - ) -> RpcResult; - - /// See also - /// - /// This exists because it is used by op-node: - /// - /// Caution: This should not accept the `withdrawals` field in the payload attributes. - #[method(name = "forkchoiceUpdatedV1")] - async fn fork_choice_updated_v1( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> RpcResult; - - /// Updates the execution layer client with the given fork choice, as specified for the Shanghai - /// fork. - /// - /// Caution: This should not accept the `parentBeaconBlockRoot` field in the payload attributes. - /// - /// See also - /// - /// OP modifications: - /// - The `payload_attributes` parameter is extended with the [`EngineTypes::PayloadAttributes`](EngineTypes) type as described in - #[method(name = "forkchoiceUpdatedV2")] - async fn fork_choice_updated_v2( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> RpcResult; - - /// Updates the execution layer client with the given fork choice, as specified for the Cancun - /// fork. - /// - /// See also - /// - /// OP modifications: - /// - Must be called with an Ecotone payload - /// - Attributes must contain the parent beacon block root field - /// - The `payload_attributes` parameter is extended with the [`EngineTypes::PayloadAttributes`](EngineTypes) type as described in - #[method(name = "forkchoiceUpdatedV3")] - async fn fork_choice_updated_v3( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> RpcResult; - - /// Retrieves an execution payload from a previously started build process, as specified for the - /// Shanghai fork. - /// - /// See also - /// - /// Note: - /// > Provider software MAY stop the corresponding build process after serving this call. - /// - /// No modifications needed for OP compatibility. - #[method(name = "getPayloadV2")] - async fn get_payload_v2( - &self, - payload_id: PayloadId, - ) -> RpcResult; - - /// Retrieves an execution payload from a previously started build process, as specified for the - /// Cancun fork. - /// - /// See also - /// - /// Note: - /// > Provider software MAY stop the corresponding build process after serving this call. - /// - /// OP modifications: - /// - the response type is extended to [`EngineTypes::ExecutionPayloadEnvelopeV3`]. - #[method(name = "getPayloadV3")] - async fn get_payload_v3( - &self, - payload_id: PayloadId, - ) -> RpcResult; - - /// Returns the most recent version of the payload that is available in the corresponding - /// payload build process at the time of receiving this call. - /// - /// See also - /// - /// Note: - /// > Provider software MAY stop the corresponding build process after serving this call. - /// - /// OP modifications: - /// - the response type is extended to [`EngineTypes::ExecutionPayloadEnvelopeV4`]. - #[method(name = "getPayloadV4")] - async fn get_payload_v4( - &self, - payload_id: PayloadId, - ) -> RpcResult; - - /// Returns the execution payload bodies by the given hash. - /// - /// See also - #[method(name = "getPayloadBodiesByHashV1")] - async fn get_payload_bodies_by_hash_v1( - &self, - block_hashes: Vec, - ) -> RpcResult; - - /// Returns the execution payload bodies by the range starting at `start`, containing `count` - /// blocks. - /// - /// WARNING: This method is associated with the `BeaconBlocksByRange` message in the consensus - /// layer p2p specification, meaning the input should be treated as untrusted or potentially - /// adversarial. - /// - /// Implementers should take care when acting on the input to this method, specifically - /// ensuring that the range is limited properly, and that the range boundaries are computed - /// correctly and without panics. - /// - /// See also - #[method(name = "getPayloadBodiesByRangeV1")] - async fn get_payload_bodies_by_range_v1( - &self, - start: U64, - count: U64, - ) -> RpcResult; - - /// Signals superchain information to the Engine. - /// Returns the latest supported OP-Stack protocol version of the execution engine. - /// See also - #[method(name = "engine_signalSuperchainV1")] - async fn signal_superchain_v1(&self, _signal: SuperchainSignal) -> RpcResult; - - /// Returns the execution client version information. - /// - /// Note: - /// > The `client_version` parameter identifies the consensus client. - /// - /// See also - #[method(name = "getClientVersionV1")] - async fn get_client_version_v1( - &self, - client_version: ClientVersionV1, - ) -> RpcResult>; - - /// Returns the list of Engine API methods supported by the execution layer client software. - /// - /// See also - #[method(name = "exchangeCapabilities")] - async fn exchange_capabilities(&self, capabilities: Vec) -> RpcResult>; -} - -/// The Engine API implementation that grants the Consensus layer access to data and -/// functions in the Execution layer that are crucial for the consensus process. -#[derive(Debug, Constructor)] -pub struct OpEngineApi { - inner: EngineApi, -} - -impl Clone - for OpEngineApi -where - PayloadT: EngineTypes, -{ - fn clone(&self) -> Self { - Self { inner: self.inner.clone() } - } -} - -#[async_trait::async_trait] -impl OpEngineApiServer - for OpEngineApi -where - Provider: HeaderProvider + BlockReader + StateProviderFactory + 'static, - EngineT: EngineTypes, - Pool: TransactionPool + 'static, - Validator: EngineApiValidator, - ChainSpec: EthereumHardforks + Send + Sync + 'static, -{ - async fn new_payload_v2(&self, payload: ExecutionPayloadInputV2) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_newPayloadV2"); - let payload = OpExecutionData::v2(payload); - Ok(self.inner.new_payload_v2_metered(payload).await?) - } - - async fn new_payload_v3( - &self, - payload: ExecutionPayloadV3, - versioned_hashes: Vec, - parent_beacon_block_root: B256, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_newPayloadV3"); - let payload = OpExecutionData::v3(payload, versioned_hashes, parent_beacon_block_root); - - Ok(self.inner.new_payload_v3_metered(payload).await?) - } - - async fn new_payload_v4( - &self, - payload: OpExecutionPayloadV4, - versioned_hashes: Vec, - parent_beacon_block_root: B256, - execution_requests: Requests, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_newPayloadV4"); - let payload = OpExecutionData::v4( - payload, - versioned_hashes, - parent_beacon_block_root, - execution_requests, - ); - - Ok(self.inner.new_payload_v4_metered(payload).await?) - } - - async fn fork_choice_updated_v1( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> RpcResult { - Ok(self.inner.fork_choice_updated_v1_metered(fork_choice_state, payload_attributes).await?) - } - - async fn fork_choice_updated_v2( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_forkchoiceUpdatedV2"); - Ok(self.inner.fork_choice_updated_v2_metered(fork_choice_state, payload_attributes).await?) - } - - async fn fork_choice_updated_v3( - &self, - fork_choice_state: ForkchoiceState, - payload_attributes: Option, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_forkchoiceUpdatedV3"); - Ok(self.inner.fork_choice_updated_v3_metered(fork_choice_state, payload_attributes).await?) - } - - async fn get_payload_v2( - &self, - payload_id: PayloadId, - ) -> RpcResult { - debug!(target: "rpc::engine", id = %payload_id, "Serving engine_getPayloadV2"); - Ok(self.inner.get_payload_v2_metered(payload_id).await?) - } - - async fn get_payload_v3( - &self, - payload_id: PayloadId, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_getPayloadV3"); - Ok(self.inner.get_payload_v3_metered(payload_id).await?) - } - - async fn get_payload_v4( - &self, - payload_id: PayloadId, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_getPayloadV4"); - Ok(self.inner.get_payload_v4_metered(payload_id).await?) - } - - async fn get_payload_bodies_by_hash_v1( - &self, - block_hashes: Vec, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_getPayloadBodiesByHashV1"); - Ok(self.inner.get_payload_bodies_by_hash_v1_metered(block_hashes).await?) - } - - async fn get_payload_bodies_by_range_v1( - &self, - start: U64, - count: U64, - ) -> RpcResult { - trace!(target: "rpc::engine", "Serving engine_getPayloadBodiesByRangeV1"); - Ok(self.inner.get_payload_bodies_by_range_v1_metered(start.to(), count.to()).await?) - } - - async fn signal_superchain_v1(&self, signal: SuperchainSignal) -> RpcResult { - trace!(target: "rpc::engine", "Serving signal_superchain_v1"); - info!( - target: "rpc::engine", - "Received superchain version signal local={:?} required={:?} recommended={:?}", - OP_STACK_SUPPORT, - signal.required, - signal.recommended - ); - Ok(OP_STACK_SUPPORT) - } - - async fn get_client_version_v1( - &self, - client: ClientVersionV1, - ) -> RpcResult> { - trace!(target: "rpc::engine", "Serving engine_getClientVersionV1"); - Ok(self.inner.get_client_version_v1(client)?) - } - - async fn exchange_capabilities(&self, _capabilities: Vec) -> RpcResult> { - Ok(self.inner.capabilities().list()) - } -} - -impl IntoEngineApiRpcModule - for OpEngineApi -where - EngineT: EngineTypes, - Self: OpEngineApiServer, -{ - fn into_rpc_module(self) -> RpcModule<()> { - self.into_rpc().remove_context() - } -} diff --git a/op-reth/crates/rpc/src/error.rs b/op-reth/crates/rpc/src/error.rs deleted file mode 100644 index b457ce9d9c1..00000000000 --- a/op-reth/crates/rpc/src/error.rs +++ /dev/null @@ -1,235 +0,0 @@ -//! RPC errors specific to OP. - -use alloy_json_rpc::ErrorPayload; -use alloy_primitives::Bytes; -use alloy_rpc_types_eth::{error::EthRpcErrorCode, BlockError}; -use alloy_transport::{RpcError, TransportErrorKind}; -use jsonrpsee_types::error::{INTERNAL_ERROR_CODE, INVALID_PARAMS_CODE}; -use op_revm::{OpHaltReason, OpTransactionError}; -use reth_evm::execute::ProviderError; -use reth_optimism_evm::OpBlockExecutionError; -use reth_rpc_eth_api::{AsEthApiError, EthTxEnvError, TransactionConversionError}; -use reth_rpc_eth_types::{ - error::api::{FromEvmHalt, FromRevert}, - EthApiError, -}; -use reth_rpc_server_types::result::{internal_rpc_err, rpc_err}; -use revm::context_interface::result::{EVMError, InvalidTransaction}; -use std::{convert::Infallible, fmt::Display}; - -/// Optimism specific errors, that extend [`EthApiError`]. -#[derive(Debug, thiserror::Error)] -pub enum OpEthApiError { - /// L1 ethereum error. - #[error(transparent)] - Eth(#[from] EthApiError), - /// EVM error originating from invalid optimism data. - #[error(transparent)] - Evm(#[from] OpBlockExecutionError), - /// Thrown when calculating L1 gas fee. - #[error("failed to calculate l1 gas fee")] - L1BlockFeeError, - /// Thrown when calculating L1 gas used - #[error("failed to calculate l1 gas used")] - L1BlockGasError, - /// Wrapper for [`revm_primitives::InvalidTransaction`](InvalidTransaction). - #[error(transparent)] - InvalidTransaction(#[from] OpInvalidTransactionError), - /// Sequencer client error. - #[error(transparent)] - Sequencer(#[from] SequencerClientError), -} - -impl AsEthApiError for OpEthApiError { - fn as_err(&self) -> Option<&EthApiError> { - match self { - Self::Eth(err) => Some(err), - _ => None, - } - } -} - -impl From for jsonrpsee_types::error::ErrorObject<'static> { - fn from(err: OpEthApiError) -> Self { - match err { - OpEthApiError::Eth(err) => err.into(), - OpEthApiError::InvalidTransaction(err) => err.into(), - OpEthApiError::Evm(_) | - OpEthApiError::L1BlockFeeError | - OpEthApiError::L1BlockGasError => internal_rpc_err(err.to_string()), - OpEthApiError::Sequencer(err) => err.into(), - } - } -} - -/// Optimism specific invalid transaction errors -#[derive(thiserror::Error, Debug)] -pub enum OpInvalidTransactionError { - /// A deposit transaction was submitted as a system transaction post-regolith. - #[error("no system transactions allowed after regolith")] - DepositSystemTxPostRegolith, - /// A deposit transaction halted post-regolith - #[error("deposit transaction halted after regolith")] - HaltedDepositPostRegolith, - /// The encoded transaction was missing during evm execution. - #[error("missing enveloped transaction bytes")] - MissingEnvelopedTx, - /// Transaction conditional errors. - #[error(transparent)] - TxConditionalErr(#[from] TxConditionalErr), -} - -impl From for jsonrpsee_types::error::ErrorObject<'static> { - fn from(err: OpInvalidTransactionError) -> Self { - match err { - OpInvalidTransactionError::DepositSystemTxPostRegolith | - OpInvalidTransactionError::HaltedDepositPostRegolith | - OpInvalidTransactionError::MissingEnvelopedTx => { - rpc_err(EthRpcErrorCode::TransactionRejected.code(), err.to_string(), None) - } - OpInvalidTransactionError::TxConditionalErr(_) => err.into(), - } - } -} - -impl TryFrom for OpInvalidTransactionError { - type Error = InvalidTransaction; - - fn try_from(err: OpTransactionError) -> Result { - match err { - OpTransactionError::DepositSystemTxPostRegolith => { - Ok(Self::DepositSystemTxPostRegolith) - } - OpTransactionError::HaltedDepositPostRegolith => Ok(Self::HaltedDepositPostRegolith), - OpTransactionError::MissingEnvelopedTx => Ok(Self::MissingEnvelopedTx), - OpTransactionError::Base(err) => Err(err), - } - } -} - -/// Transaction conditional related errors. -#[derive(Debug, thiserror::Error)] -pub enum TxConditionalErr { - /// Transaction conditional cost exceeded maximum allowed - #[error("conditional cost exceeded maximum allowed")] - ConditionalCostExceeded, - /// Invalid conditional parameters - #[error("invalid conditional parameters")] - InvalidCondition, - /// Internal error - #[error("internal error: {0}")] - Internal(String), - /// Thrown if the conditional's storage value doesn't match the latest state's. - #[error("storage value mismatch")] - StorageValueMismatch, - /// Thrown when the conditional's storage root doesn't match the latest state's root. - #[error("storage root mismatch")] - StorageRootMismatch, -} - -impl TxConditionalErr { - /// Creates an internal error variant - pub fn internal(err: E) -> Self { - Self::Internal(err.to_string()) - } -} - -impl From for jsonrpsee_types::error::ErrorObject<'static> { - fn from(err: TxConditionalErr) -> Self { - let code = match &err { - TxConditionalErr::Internal(_) => INTERNAL_ERROR_CODE, - _ => INVALID_PARAMS_CODE, - }; - - jsonrpsee_types::error::ErrorObject::owned(code, err.to_string(), None::) - } -} - -/// Error type when interacting with the Sequencer -#[derive(Debug, thiserror::Error)] -pub enum SequencerClientError { - /// Wrapper around an [`RpcError`]. - #[error(transparent)] - HttpError(#[from] RpcError), -} - -impl From for jsonrpsee_types::error::ErrorObject<'static> { - fn from(err: SequencerClientError) -> Self { - match err { - SequencerClientError::HttpError(RpcError::ErrorResp(ErrorPayload { - code, - message, - data, - })) => jsonrpsee_types::error::ErrorObject::owned(code as i32, message, data), - err => jsonrpsee_types::error::ErrorObject::owned( - INTERNAL_ERROR_CODE, - err.to_string(), - None::, - ), - } - } -} - -impl From> for OpEthApiError -where - T: Into, -{ - fn from(error: EVMError) -> Self { - match error { - EVMError::Transaction(err) => match err.try_into() { - Ok(err) => Self::InvalidTransaction(err), - Err(err) => Self::Eth(EthApiError::InvalidTransaction(err.into())), - }, - EVMError::Database(err) => Self::Eth(err.into()), - EVMError::Header(err) => Self::Eth(err.into()), - EVMError::Custom(err) => Self::Eth(EthApiError::EvmCustom(err)), - } - } -} - -impl FromEvmHalt for OpEthApiError { - fn from_evm_halt(halt: OpHaltReason, gas_limit: u64) -> Self { - match halt { - OpHaltReason::FailedDeposit => { - OpInvalidTransactionError::HaltedDepositPostRegolith.into() - } - OpHaltReason::Base(halt) => EthApiError::from_evm_halt(halt, gas_limit).into(), - } - } -} - -impl FromRevert for OpEthApiError { - fn from_revert(output: Bytes) -> Self { - Self::Eth(EthApiError::from_revert(output)) - } -} - -impl From for OpEthApiError { - fn from(value: TransactionConversionError) -> Self { - Self::Eth(EthApiError::from(value)) - } -} - -impl From for OpEthApiError { - fn from(value: EthTxEnvError) -> Self { - Self::Eth(EthApiError::from(value)) - } -} - -impl From for OpEthApiError { - fn from(value: ProviderError) -> Self { - Self::Eth(EthApiError::from(value)) - } -} - -impl From for OpEthApiError { - fn from(value: BlockError) -> Self { - Self::Eth(EthApiError::from(value)) - } -} - -impl From for OpEthApiError { - fn from(value: Infallible) -> Self { - match value {} - } -} diff --git a/op-reth/crates/rpc/src/eth/block.rs b/op-reth/crates/rpc/src/eth/block.rs deleted file mode 100644 index 0efd9aea988..00000000000 --- a/op-reth/crates/rpc/src/eth/block.rs +++ /dev/null @@ -1,23 +0,0 @@ -//! Loads and formats OP block RPC response. - -use crate::{eth::RpcNodeCore, OpEthApi, OpEthApiError}; -use reth_rpc_eth_api::{ - helpers::{EthBlocks, LoadBlock}, - FromEvmError, RpcConvert, -}; - -impl EthBlocks for OpEthApi -where - N: RpcNodeCore, - OpEthApiError: FromEvmError, - Rpc: RpcConvert, -{ -} - -impl LoadBlock for OpEthApi -where - N: RpcNodeCore, - OpEthApiError: FromEvmError, - Rpc: RpcConvert, -{ -} diff --git a/op-reth/crates/rpc/src/eth/mod.rs b/op-reth/crates/rpc/src/eth/mod.rs deleted file mode 100644 index f4b9fcb08fd..00000000000 --- a/op-reth/crates/rpc/src/eth/mod.rs +++ /dev/null @@ -1,623 +0,0 @@ -//! OP-Reth `eth_` endpoint implementation. - -pub mod ext; -pub mod receipt; -pub mod transaction; - -mod block; -mod call; -mod pending_block; - -use crate::{ - eth::{receipt::OpReceiptConverter, transaction::OpTxInfoMapper}, - OpEthApiError, SequencerClient, -}; -use alloy_consensus::BlockHeader; -use alloy_eips::BlockNumHash; -use alloy_primitives::{B256, U256}; -use alloy_rpc_types_eth::{Filter, Log}; -use eyre::WrapErr; -use futures::StreamExt; -use op_alloy_network::Optimism; -use op_alloy_rpc_types_engine::OpFlashblockPayloadBase; -pub use receipt::{OpReceiptBuilder, OpReceiptFieldsBuilder}; -use reqwest::Url; -use reth_chainspec::{EthereumHardforks, Hardforks}; -use reth_evm::ConfigureEvm; -use reth_node_api::{FullNodeComponents, FullNodeTypes, HeaderTy, NodeTypes}; -use reth_node_builder::rpc::{EthApiBuilder, EthApiCtx}; -use reth_optimism_flashblocks::{ - FlashBlockBuildInfo, FlashBlockCompleteSequence, FlashBlockCompleteSequenceRx, - FlashBlockConsensusClient, FlashBlockRx, FlashBlockService, FlashblocksListeners, - PendingBlockRx, PendingFlashBlock, WsFlashBlockStream, -}; -use reth_rpc::eth::core::EthApiInner; -use reth_rpc_eth_api::{ - helpers::{ - pending_block::BuildPendingEnv, EthApiSpec, EthFees, EthState, LoadFee, LoadPendingBlock, - LoadState, SpawnBlocking, Trace, - }, - EthApiTypes, FromEvmError, FullEthApiServer, RpcConvert, RpcConverter, RpcNodeCore, - RpcNodeCoreExt, RpcTypes, -}; -use reth_rpc_eth_types::{ - logs_utils::matching_block_logs_with_tx_hashes, EthStateCache, FeeHistoryCache, GasPriceOracle, - PendingBlock, -}; -use reth_storage_api::{BlockReaderIdExt, ProviderHeader}; -use reth_tasks::{ - pool::{BlockingTaskGuard, BlockingTaskPool}, - TaskSpawner, -}; -use std::{ - fmt::{self, Formatter}, - marker::PhantomData, - sync::Arc, - time::Duration, -}; -use tokio::{sync::watch, time}; -use tokio_stream::{wrappers::BroadcastStream, Stream}; -use tracing::info; - -/// Maximum duration to wait for a fresh flashblock when one is being built. -const MAX_FLASHBLOCK_WAIT_DURATION: Duration = Duration::from_millis(50); - -/// Adapter for [`EthApiInner`], which holds all the data required to serve core `eth_` API. -pub type EthApiNodeBackend = EthApiInner; - -/// OP-Reth `Eth` API implementation. -/// -/// This type provides the functionality for handling `eth_` related requests. -/// -/// This wraps a default `Eth` implementation, and provides additional functionality where the -/// optimism spec deviates from the default (ethereum) spec, e.g. transaction forwarding to the -/// sequencer, receipts, additional RPC fields for transaction receipts. -/// -/// This type implements the [`FullEthApi`](reth_rpc_eth_api::helpers::FullEthApi) by implemented -/// all the `Eth` helper traits and prerequisite traits. -pub struct OpEthApi { - /// Gateway to node's core components. - inner: Arc>, -} - -impl Clone for OpEthApi { - fn clone(&self) -> Self { - Self { inner: self.inner.clone() } - } -} - -impl OpEthApi { - /// Creates a new `OpEthApi`. - pub fn new( - eth_api: EthApiNodeBackend, - sequencer_client: Option, - min_suggested_priority_fee: U256, - flashblocks: Option>, - ) -> Self { - let inner = Arc::new(OpEthApiInner { - eth_api, - sequencer_client, - min_suggested_priority_fee, - flashblocks, - }); - Self { inner } - } - - /// Build a [`OpEthApi`] using [`OpEthApiBuilder`]. - pub const fn builder() -> OpEthApiBuilder { - OpEthApiBuilder::new() - } - - /// Returns a reference to the [`EthApiNodeBackend`]. - pub fn eth_api(&self) -> &EthApiNodeBackend { - self.inner.eth_api() - } - /// Returns the configured sequencer client, if any. - pub fn sequencer_client(&self) -> Option<&SequencerClient> { - self.inner.sequencer_client() - } - - /// Returns a cloned pending block receiver, if any. - pub fn pending_block_rx(&self) -> Option> { - self.inner.flashblocks.as_ref().map(|f| f.pending_block_rx.clone()) - } - - /// Returns a new subscription to received flashblocks. - pub fn subscribe_received_flashblocks(&self) -> Option { - self.inner.flashblocks.as_ref().map(|f| f.received_flashblocks.subscribe()) - } - - /// Returns a new subscription to flashblock sequences. - pub fn subscribe_flashblock_sequence(&self) -> Option { - self.inner.flashblocks.as_ref().map(|f| f.flashblocks_sequence.subscribe()) - } - - /// Returns a stream of matching flashblock receipts, if any. - /// - /// This will yield all new matching receipts received from _new_ flashblocks. - pub fn flashblock_receipts_stream( - &self, - filter: Filter, - ) -> Option + Send + Unpin> { - self.subscribe_received_flashblocks().map(|rx| { - BroadcastStream::new(rx) - .scan( - None::<(u64, u64)>, // state buffers base block number and timestamp - move |state, result| { - let fb = match result.ok() { - Some(fb) => fb, - None => return futures::future::ready(None), - }; - - // Update state from base flashblock for block level meta data. - if let Some(base) = &fb.base { - *state = Some((base.block_number, base.timestamp)); - } - - let Some((block_number, timestamp)) = *state else { - // we haven't received a new flashblock sequence yet, so we can skip - // until we receive the first index 0 (base) - return futures::future::ready(Some(Vec::new())) - }; - - let receipts = - fb.metadata.receipts.iter().map(|(tx, receipt)| (*tx, receipt)); - - let all_logs = matching_block_logs_with_tx_hashes( - &filter, - BlockNumHash::new(block_number, fb.diff.block_hash), - timestamp, - receipts, - false, - ); - - futures::future::ready(Some(all_logs)) - }, - ) - .flat_map(futures::stream::iter) - }) - } - - /// Returns information about the flashblock currently being built, if any. - fn flashblock_build_info(&self) -> Option { - self.inner.flashblocks.as_ref().and_then(|f| *f.in_progress_rx.borrow()) - } - - /// Extracts pending block if it matches the expected parent hash. - fn extract_matching_block( - &self, - block: Option<&PendingFlashBlock>, - parent_hash: B256, - ) -> Option> { - block.filter(|b| b.block().parent_hash() == parent_hash).map(|b| b.pending.clone()) - } - - /// Awaits a fresh flashblock if one is being built, otherwise returns current. - async fn flashblock( - &self, - parent_hash: B256, - ) -> eyre::Result>> { - let Some(rx) = self.inner.flashblocks.as_ref().map(|f| &f.pending_block_rx) else { - return Ok(None) - }; - - // Check if a flashblock is being built - if let Some(build_info) = self.flashblock_build_info() { - let current_index = rx.borrow().as_ref().map(|b| b.last_flashblock_index); - - // Check if this is the first flashblock or the next consecutive index - let is_next_index = current_index.is_none_or(|idx| build_info.index == idx + 1); - - // Wait only for relevant flashblocks: matching parent and next in sequence - if build_info.parent_hash == parent_hash && is_next_index { - let mut rx_clone = rx.clone(); - // Wait up to MAX_FLASHBLOCK_WAIT_DURATION for a new flashblock to arrive - let _ = time::timeout(MAX_FLASHBLOCK_WAIT_DURATION, rx_clone.changed()).await; - } - } - - // Fall back to current block - Ok(self.extract_matching_block(rx.borrow().as_ref(), parent_hash)) - } - - /// Returns a [`PendingBlock`] that is built out of flashblocks. - /// - /// If flashblocks receiver is not set, then it always returns `None`. - /// - /// It may wait up to 50ms for a fresh flashblock if one is currently being built. - pub async fn pending_flashblock(&self) -> eyre::Result>> - where - OpEthApiError: FromEvmError, - Rpc: RpcConvert, - { - let Some(latest) = self.provider().latest_header()? else { - return Ok(None); - }; - - self.flashblock(latest.hash()).await - } -} - -impl EthApiTypes for OpEthApi -where - N: RpcNodeCore, - Rpc: RpcConvert, -{ - type Error = OpEthApiError; - type NetworkTypes = Rpc::Network; - type RpcConvert = Rpc; - - fn converter(&self) -> &Self::RpcConvert { - self.inner.eth_api.converter() - } -} - -impl RpcNodeCore for OpEthApi -where - N: RpcNodeCore, - Rpc: RpcConvert, -{ - type Primitives = N::Primitives; - type Provider = N::Provider; - type Pool = N::Pool; - type Evm = N::Evm; - type Network = N::Network; - - #[inline] - fn pool(&self) -> &Self::Pool { - self.inner.eth_api.pool() - } - - #[inline] - fn evm_config(&self) -> &Self::Evm { - self.inner.eth_api.evm_config() - } - - #[inline] - fn network(&self) -> &Self::Network { - self.inner.eth_api.network() - } - - #[inline] - fn provider(&self) -> &Self::Provider { - self.inner.eth_api.provider() - } -} - -impl RpcNodeCoreExt for OpEthApi -where - N: RpcNodeCore, - Rpc: RpcConvert, -{ - #[inline] - fn cache(&self) -> &EthStateCache { - self.inner.eth_api.cache() - } -} - -impl EthApiSpec for OpEthApi -where - N: RpcNodeCore, - Rpc: RpcConvert, -{ - #[inline] - fn starting_block(&self) -> U256 { - self.inner.eth_api.starting_block() - } -} - -impl SpawnBlocking for OpEthApi -where - N: RpcNodeCore, - Rpc: RpcConvert, -{ - #[inline] - fn io_task_spawner(&self) -> impl TaskSpawner { - self.inner.eth_api.task_spawner() - } - - #[inline] - fn tracing_task_pool(&self) -> &BlockingTaskPool { - self.inner.eth_api.blocking_task_pool() - } - - #[inline] - fn tracing_task_guard(&self) -> &BlockingTaskGuard { - self.inner.eth_api.blocking_task_guard() - } - - #[inline] - fn blocking_io_task_guard(&self) -> &Arc { - self.inner.eth_api.blocking_io_request_semaphore() - } -} - -impl LoadFee for OpEthApi -where - N: RpcNodeCore, - OpEthApiError: FromEvmError, - Rpc: RpcConvert, -{ - #[inline] - fn gas_oracle(&self) -> &GasPriceOracle { - self.inner.eth_api.gas_oracle() - } - - #[inline] - fn fee_history_cache(&self) -> &FeeHistoryCache> { - self.inner.eth_api.fee_history_cache() - } - - async fn suggested_priority_fee(&self) -> Result { - self.inner - .eth_api - .gas_oracle() - .op_suggest_tip_cap(self.inner.min_suggested_priority_fee) - .await - .map_err(Into::into) - } -} - -impl LoadState for OpEthApi -where - N: RpcNodeCore, - Rpc: RpcConvert, - Self: LoadPendingBlock, -{ -} - -impl EthState for OpEthApi -where - N: RpcNodeCore, - Rpc: RpcConvert, - Self: LoadPendingBlock, -{ - #[inline] - fn max_proof_window(&self) -> u64 { - self.inner.eth_api.eth_proof_window() - } -} - -impl EthFees for OpEthApi -where - N: RpcNodeCore, - OpEthApiError: FromEvmError, - Rpc: RpcConvert, -{ -} - -impl Trace for OpEthApi -where - N: RpcNodeCore, - OpEthApiError: FromEvmError, - Rpc: RpcConvert, -{ -} - -impl fmt::Debug for OpEthApi { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("OpEthApi").finish_non_exhaustive() - } -} - -/// Container type `OpEthApi` -pub struct OpEthApiInner { - /// Gateway to node's core components. - eth_api: EthApiNodeBackend, - /// Sequencer client, configured to forward submitted transactions to sequencer of given OP - /// network. - sequencer_client: Option, - /// Minimum priority fee enforced by OP-specific logic. - /// - /// See also - min_suggested_priority_fee: U256, - /// Flashblocks listeners. - /// - /// If set, provides receivers for pending blocks, flashblock sequences, and build status. - flashblocks: Option>, -} - -impl fmt::Debug for OpEthApiInner { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - f.debug_struct("OpEthApiInner").finish() - } -} - -impl OpEthApiInner { - /// Returns a reference to the [`EthApiNodeBackend`]. - const fn eth_api(&self) -> &EthApiNodeBackend { - &self.eth_api - } - - /// Returns the configured sequencer client, if any. - const fn sequencer_client(&self) -> Option<&SequencerClient> { - self.sequencer_client.as_ref() - } -} - -/// Converter for OP RPC types. -pub type OpRpcConvert = RpcConverter< - NetworkT, - ::Evm, - OpReceiptConverter<::Provider>, - (), - OpTxInfoMapper<::Provider>, ->; - -/// Builds [`OpEthApi`] for Optimism. -#[derive(Debug)] -pub struct OpEthApiBuilder { - /// Sequencer client, configured to forward submitted transactions to sequencer of given OP - /// network. - sequencer_url: Option, - /// Headers to use for the sequencer client requests. - sequencer_headers: Vec, - /// Minimum suggested priority fee (tip) - min_suggested_priority_fee: u64, - /// A URL pointing to a secure websocket connection (wss) that streams out [flashblocks]. - /// - /// [flashblocks]: reth_optimism_flashblocks - flashblocks_url: Option, - /// Enable flashblock consensus client to drive the chain forward. - /// - /// When enabled, flashblock sequences are submitted to the engine API via - /// `newPayload` and `forkchoiceUpdated` calls, advancing the canonical chain state. - /// Requires `flashblocks_url` to be set. - flashblock_consensus: bool, - /// Marker for network types. - _nt: PhantomData, -} - -impl Default for OpEthApiBuilder { - fn default() -> Self { - Self { - sequencer_url: None, - sequencer_headers: Vec::new(), - min_suggested_priority_fee: 1_000_000, - flashblocks_url: None, - flashblock_consensus: false, - _nt: PhantomData, - } - } -} - -impl OpEthApiBuilder { - /// Creates a [`OpEthApiBuilder`] instance from core components. - pub const fn new() -> Self { - Self { - sequencer_url: None, - sequencer_headers: Vec::new(), - min_suggested_priority_fee: 1_000_000, - flashblocks_url: None, - flashblock_consensus: false, - _nt: PhantomData, - } - } - - /// With a [`SequencerClient`]. - pub fn with_sequencer(mut self, sequencer_url: Option) -> Self { - self.sequencer_url = sequencer_url; - self - } - - /// With headers to use for the sequencer client requests. - pub fn with_sequencer_headers(mut self, sequencer_headers: Vec) -> Self { - self.sequencer_headers = sequencer_headers; - self - } - - /// With minimum suggested priority fee (tip). - pub const fn with_min_suggested_priority_fee(mut self, min: u64) -> Self { - self.min_suggested_priority_fee = min; - self - } - - /// With a subscription to flashblocks secure websocket connection. - pub fn with_flashblocks(mut self, flashblocks_url: Option) -> Self { - self.flashblocks_url = flashblocks_url; - self - } - - /// With flashblock consensus client enabled to drive chain forward - pub const fn with_flashblock_consensus(mut self, flashblock_consensus: bool) -> Self { - self.flashblock_consensus = flashblock_consensus; - self - } -} - -impl EthApiBuilder for OpEthApiBuilder -where - N: FullNodeComponents< - Evm: ConfigureEvm< - NextBlockEnvCtx: BuildPendingEnv> - + From - + Unpin, - >, - Types: NodeTypes< - ChainSpec: Hardforks + EthereumHardforks, - Payload: reth_node_api::PayloadTypes< - ExecutionData: for<'a> TryFrom< - &'a FlashBlockCompleteSequence, - Error: std::fmt::Display, - >, - >, - >, - >, - NetworkT: RpcTypes, - OpRpcConvert: RpcConvert, - OpEthApi>: - FullEthApiServer, -{ - type EthApi = OpEthApi>; - - async fn build_eth_api(self, ctx: EthApiCtx<'_, N>) -> eyre::Result { - let Self { - sequencer_url, - sequencer_headers, - min_suggested_priority_fee, - flashblocks_url, - flashblock_consensus, - .. - } = self; - let rpc_converter = - RpcConverter::new(OpReceiptConverter::new(ctx.components.provider().clone())) - .with_mapper(OpTxInfoMapper::new(ctx.components.provider().clone())); - - let sequencer_client = if let Some(url) = sequencer_url { - Some( - SequencerClient::new_with_headers(&url, sequencer_headers) - .await - .wrap_err_with(|| format!("Failed to init sequencer client with: {url}"))?, - ) - } else { - None - }; - - let flashblocks = if let Some(ws_url) = flashblocks_url { - info!(target: "reth:cli", %ws_url, "Launching flashblocks service"); - - let (tx, pending_rx) = watch::channel(None); - let stream = WsFlashBlockStream::new(ws_url); - let service = FlashBlockService::new( - stream, - ctx.components.evm_config().clone(), - ctx.components.provider().clone(), - ctx.components.task_executor().clone(), - // enable state root calculation if flashblock_consensus is enabled. - flashblock_consensus, - ); - - let flashblocks_sequence = service.block_sequence_broadcaster().clone(); - let received_flashblocks = service.flashblocks_broadcaster().clone(); - let in_progress_rx = service.subscribe_in_progress(); - ctx.components.task_executor().spawn(Box::pin(service.run(tx))); - - if flashblock_consensus { - info!(target: "reth::cli", "Launching FlashBlockConsensusClient"); - let flashblock_client = FlashBlockConsensusClient::new( - ctx.engine_handle.clone(), - flashblocks_sequence.subscribe(), - )?; - ctx.components.task_executor().spawn(Box::pin(flashblock_client.run())); - } - - Some(FlashblocksListeners::new( - pending_rx, - flashblocks_sequence, - in_progress_rx, - received_flashblocks, - )) - } else { - None - }; - - let eth_api = ctx.eth_api_builder().with_rpc_converter(rpc_converter).build_inner(); - - Ok(OpEthApi::new( - eth_api, - sequencer_client, - U256::from(min_suggested_priority_fee), - flashblocks, - )) - } -} diff --git a/op-reth/crates/rpc/src/eth/receipt.rs b/op-reth/crates/rpc/src/eth/receipt.rs deleted file mode 100644 index e86aa615672..00000000000 --- a/op-reth/crates/rpc/src/eth/receipt.rs +++ /dev/null @@ -1,723 +0,0 @@ -//! Loads and formats OP receipt RPC response. - -use crate::{eth::RpcNodeCore, OpEthApi, OpEthApiError}; -use alloy_consensus::{BlockHeader, Receipt, ReceiptWithBloom, TxReceipt}; -use alloy_eips::eip2718::Encodable2718; -use alloy_rpc_types_eth::{Log, TransactionReceipt}; -use op_alloy_consensus::{OpReceipt, OpTransaction}; -use op_alloy_rpc_types::{L1BlockInfo, OpTransactionReceipt, OpTransactionReceiptFields}; -use op_revm::estimate_tx_compressed_size; -use reth_chainspec::{ChainSpecProvider, EthChainSpec}; -use reth_node_api::NodePrimitives; -use reth_optimism_evm::RethL1BlockInfo; -use reth_optimism_forks::OpHardforks; -use reth_primitives_traits::SealedBlock; -use reth_rpc_eth_api::{ - helpers::LoadReceipt, - transaction::{ConvertReceiptInput, ReceiptConverter}, - RpcConvert, -}; -use reth_rpc_eth_types::{receipt::build_receipt, EthApiError}; -use reth_storage_api::BlockReader; -use std::fmt::Debug; - -impl LoadReceipt for OpEthApi -where - N: RpcNodeCore, - Rpc: RpcConvert, -{ -} - -/// Converter for OP receipts. -#[derive(Debug, Clone)] -pub struct OpReceiptConverter { - provider: Provider, -} - -impl OpReceiptConverter { - /// Creates a new [`OpReceiptConverter`]. - pub const fn new(provider: Provider) -> Self { - Self { provider } - } -} - -impl ReceiptConverter for OpReceiptConverter -where - N: NodePrimitives, - Provider: - BlockReader + ChainSpecProvider + Debug + 'static, -{ - type RpcReceipt = OpTransactionReceipt; - type Error = OpEthApiError; - - fn convert_receipts( - &self, - inputs: Vec>, - ) -> Result, Self::Error> { - let Some(block_number) = inputs.first().map(|r| r.meta.block_number) else { - return Ok(Vec::new()); - }; - - let block = self - .provider - .block_by_number(block_number)? - .ok_or(EthApiError::HeaderNotFound(block_number.into()))?; - - self.convert_receipts_with_block(inputs, &SealedBlock::new_unhashed(block)) - } - - fn convert_receipts_with_block( - &self, - inputs: Vec>, - block: &SealedBlock, - ) -> Result, Self::Error> { - let mut l1_block_info = match reth_optimism_evm::extract_l1_info(block.body()) { - Ok(l1_block_info) => l1_block_info, - Err(err) => { - let genesis_number = - self.provider.chain_spec().genesis().number.unwrap_or_default(); - // If it is the genesis block (i.e. block number is 0), there is no L1 info, so - // we return an empty l1_block_info. - if block.header().number() == genesis_number { - return Ok(vec![]); - } - return Err(err.into()); - } - }; - - let mut receipts = Vec::with_capacity(inputs.len()); - - for input in inputs { - // We must clear this cache as different L2 transactions can have different - // L1 costs. A potential improvement here is to only clear the cache if the - // new transaction input has changed, since otherwise the L1 cost wouldn't. - l1_block_info.clear_tx_l1_cost(); - - receipts.push( - OpReceiptBuilder::new(&self.provider.chain_spec(), input, &mut l1_block_info)? - .build(), - ); - } - - Ok(receipts) - } -} - -/// L1 fee and data gas for a non-deposit transaction, or deposit nonce and receipt version for a -/// deposit transaction. -#[derive(Debug, Clone)] -pub struct OpReceiptFieldsBuilder { - /// Block number. - pub block_number: u64, - /// Block timestamp. - pub block_timestamp: u64, - /// The L1 fee for transaction. - pub l1_fee: Option, - /// L1 gas used by transaction. - pub l1_data_gas: Option, - /// L1 fee scalar. - pub l1_fee_scalar: Option, - /* ---------------------------------------- Bedrock ---------------------------------------- */ - /// The base fee of the L1 origin block. - pub l1_base_fee: Option, - /* --------------------------------------- Regolith ---------------------------------------- */ - /// Deposit nonce, if this is a deposit transaction. - pub deposit_nonce: Option, - /* ---------------------------------------- Canyon ----------------------------------------- */ - /// Deposit receipt version, if this is a deposit transaction. - pub deposit_receipt_version: Option, - /* ---------------------------------------- Ecotone ---------------------------------------- */ - /// The current L1 fee scalar. - pub l1_base_fee_scalar: Option, - /// The current L1 blob base fee. - pub l1_blob_base_fee: Option, - /// The current L1 blob base fee scalar. - pub l1_blob_base_fee_scalar: Option, - /* ---------------------------------------- Isthmus ---------------------------------------- */ - /// The current operator fee scalar. - pub operator_fee_scalar: Option, - /// The current L1 blob base fee scalar. - pub operator_fee_constant: Option, - /* ---------------------------------------- Jovian ----------------------------------------- */ - /// The current DA footprint gas scalar. - pub da_footprint_gas_scalar: Option, -} - -impl OpReceiptFieldsBuilder { - /// Returns a new builder. - pub const fn new(block_timestamp: u64, block_number: u64) -> Self { - Self { - block_number, - block_timestamp, - l1_fee: None, - l1_data_gas: None, - l1_fee_scalar: None, - l1_base_fee: None, - deposit_nonce: None, - deposit_receipt_version: None, - l1_base_fee_scalar: None, - l1_blob_base_fee: None, - l1_blob_base_fee_scalar: None, - operator_fee_scalar: None, - operator_fee_constant: None, - da_footprint_gas_scalar: None, - } - } - - /// Applies [`L1BlockInfo`](op_revm::L1BlockInfo). - pub fn l1_block_info( - mut self, - chain_spec: &impl OpHardforks, - tx: &T, - l1_block_info: &mut op_revm::L1BlockInfo, - ) -> Result { - let raw_tx = tx.encoded_2718(); - let timestamp = self.block_timestamp; - - self.l1_fee = Some( - l1_block_info - .l1_tx_data_fee(chain_spec, timestamp, &raw_tx, tx.is_deposit()) - .map_err(|_| OpEthApiError::L1BlockFeeError)? - .saturating_to(), - ); - - self.l1_data_gas = Some( - l1_block_info - .l1_data_gas(chain_spec, timestamp, &raw_tx) - .map_err(|_| OpEthApiError::L1BlockGasError)? - .saturating_add(l1_block_info.l1_fee_overhead.unwrap_or_default()) - .saturating_to(), - ); - - self.l1_fee_scalar = (!chain_spec.is_ecotone_active_at_timestamp(timestamp)) - .then_some(f64::from(l1_block_info.l1_base_fee_scalar) / 1_000_000.0); - - self.l1_base_fee = Some(l1_block_info.l1_base_fee.saturating_to()); - self.l1_base_fee_scalar = Some(l1_block_info.l1_base_fee_scalar.saturating_to()); - self.l1_blob_base_fee = l1_block_info.l1_blob_base_fee.map(|fee| fee.saturating_to()); - self.l1_blob_base_fee_scalar = - l1_block_info.l1_blob_base_fee_scalar.map(|scalar| scalar.saturating_to()); - - // If the operator fee params are both set to 0, we don't add them to the receipt. - let operator_fee_scalar_has_non_zero_value: bool = - l1_block_info.operator_fee_scalar.is_some_and(|scalar| !scalar.is_zero()); - - let operator_fee_constant_has_non_zero_value = - l1_block_info.operator_fee_constant.is_some_and(|constant| !constant.is_zero()); - - if operator_fee_scalar_has_non_zero_value || operator_fee_constant_has_non_zero_value { - self.operator_fee_scalar = - l1_block_info.operator_fee_scalar.map(|scalar| scalar.saturating_to()); - self.operator_fee_constant = - l1_block_info.operator_fee_constant.map(|constant| constant.saturating_to()); - } - - self.da_footprint_gas_scalar = l1_block_info.da_footprint_gas_scalar; - - Ok(self) - } - - /// Applies deposit transaction metadata: deposit nonce. - pub const fn deposit_nonce(mut self, nonce: Option) -> Self { - self.deposit_nonce = nonce; - self - } - - /// Applies deposit transaction metadata: deposit receipt version. - pub const fn deposit_version(mut self, version: Option) -> Self { - self.deposit_receipt_version = version; - self - } - - /// Builds the [`OpTransactionReceiptFields`] object. - pub const fn build(self) -> OpTransactionReceiptFields { - let Self { - block_number: _, // used to compute other fields - block_timestamp: _, // used to compute other fields - l1_fee, - l1_data_gas: l1_gas_used, - l1_fee_scalar, - l1_base_fee: l1_gas_price, - deposit_nonce, - deposit_receipt_version, - l1_base_fee_scalar, - l1_blob_base_fee, - l1_blob_base_fee_scalar, - operator_fee_scalar, - operator_fee_constant, - da_footprint_gas_scalar, - } = self; - - OpTransactionReceiptFields { - l1_block_info: L1BlockInfo { - l1_gas_price, - l1_gas_used, - l1_fee, - l1_fee_scalar, - l1_base_fee_scalar, - l1_blob_base_fee, - l1_blob_base_fee_scalar, - operator_fee_scalar, - operator_fee_constant, - da_footprint_gas_scalar, - }, - deposit_nonce, - deposit_receipt_version, - } - } -} - -/// Builds an [`OpTransactionReceipt`]. -#[derive(Debug)] -pub struct OpReceiptBuilder { - /// Core receipt, has all the fields of an L1 receipt and is the basis for the OP receipt. - pub core_receipt: TransactionReceipt>>, - /// Additional OP receipt fields. - pub op_receipt_fields: OpTransactionReceiptFields, -} - -impl OpReceiptBuilder { - /// Returns a new builder. - pub fn new( - chain_spec: &impl OpHardforks, - input: ConvertReceiptInput<'_, N>, - l1_block_info: &mut op_revm::L1BlockInfo, - ) -> Result - where - N: NodePrimitives, - { - let timestamp = input.meta.timestamp; - let block_number = input.meta.block_number; - let tx_signed = *input.tx.inner(); - let mut core_receipt = build_receipt(input, None, |receipt, next_log_index, meta| { - let map_logs = move |receipt: alloy_consensus::Receipt| { - let Receipt { status, cumulative_gas_used, logs } = receipt; - let logs = Log::collect_for_receipt(next_log_index, meta, logs); - Receipt { status, cumulative_gas_used, logs } - }; - let mapped_receipt: OpReceipt = match receipt { - OpReceipt::Legacy(receipt) => OpReceipt::Legacy(map_logs(receipt)), - OpReceipt::Eip2930(receipt) => OpReceipt::Eip2930(map_logs(receipt)), - OpReceipt::Eip1559(receipt) => OpReceipt::Eip1559(map_logs(receipt)), - OpReceipt::Eip7702(receipt) => OpReceipt::Eip7702(map_logs(receipt)), - OpReceipt::Deposit(receipt) => OpReceipt::Deposit(receipt.map_inner(map_logs)), - }; - mapped_receipt.into_with_bloom() - }); - - // In jovian, we're using the blob gas used field to store the current da - // footprint's value. - // We're computing the jovian blob gas used before building the receipt since the inputs get - // consumed by the `build_receipt` function. - chain_spec.is_jovian_active_at_timestamp(timestamp).then(|| { - // Estimate the size of the transaction in bytes and multiply by the DA - // footprint gas scalar. - // Jovian specs: `https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/exec-engine.md#da-footprint-block-limit` - let da_size = estimate_tx_compressed_size(tx_signed.encoded_2718().as_slice()) - .saturating_div(1_000_000) - .saturating_mul(l1_block_info.da_footprint_gas_scalar.unwrap_or_default().into()); - - core_receipt.blob_gas_used = Some(da_size); - }); - - let op_receipt_fields = OpReceiptFieldsBuilder::new(timestamp, block_number) - .l1_block_info(chain_spec, tx_signed, l1_block_info)? - .build(); - - Ok(Self { core_receipt, op_receipt_fields }) - } - - /// Builds [`OpTransactionReceipt`] by combining core (l1) receipt fields and additional OP - /// receipt fields. - pub fn build(self) -> OpTransactionReceipt { - let Self { core_receipt: inner, op_receipt_fields } = self; - - let OpTransactionReceiptFields { l1_block_info, .. } = op_receipt_fields; - - OpTransactionReceipt { inner, l1_block_info } - } -} - -#[cfg(test)] -mod test { - use super::*; - use alloy_consensus::{transaction::TransactionMeta, Block, BlockBody, Eip658Value, TxEip7702}; - use alloy_op_hardforks::{ - OpChainHardforks, OP_MAINNET_ISTHMUS_TIMESTAMP, OP_MAINNET_JOVIAN_TIMESTAMP, - }; - use alloy_primitives::{hex, Address, Bytes, Signature, U256}; - use op_alloy_consensus::OpTypedTransaction; - use op_alloy_network::eip2718::Decodable2718; - use reth_optimism_chainspec::{BASE_MAINNET, OP_MAINNET}; - use reth_optimism_primitives::{OpPrimitives, OpTransactionSigned}; - use reth_primitives_traits::Recovered; - - /// OP Mainnet transaction at index 0 in block 124665056. - /// - /// - const TX_SET_L1_BLOCK_OP_MAINNET_BLOCK_124665056: [u8; 251] = hex!( - "7ef8f8a0683079df94aa5b9cf86687d739a60a9b4f0835e520ec4d664e2e415dca17a6df94deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e200000146b000f79c500000000000000040000000066d052e700000000013ad8a3000000000000000000000000000000000000000000000000000000003ef1278700000000000000000000000000000000000000000000000000000000000000012fdf87b89884a61e74b322bbcf60386f543bfae7827725efaaf0ab1de2294a590000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f32985" - ); - - /// OP Mainnet transaction at index 1 in block 124665056. - /// - /// - const TX_1_OP_MAINNET_BLOCK_124665056: [u8; 1176] = hex!( - "02f904940a8303fba78401d6d2798401db2b6d830493e0943e6f4f7866654c18f536170780344aa8772950b680b904246a761202000000000000000000000000087000a300de7200382b55d40045000000e5d60e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003a0000000000000000000000000000000000000000000000000000000000000022482ad56cb0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000120000000000000000000000000dc6ff44d5d932cbd77b52e5612ba0529dc6226f1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000044095ea7b300000000000000000000000021c4928109acb0659a88ae5329b5374a3024694c0000000000000000000000000000000000000000000000049b9ca9a6943400000000000000000000000000000000000000000000000000000000000000000000000000000000000021c4928109acb0659a88ae5329b5374a3024694c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000024b6b55f250000000000000000000000000000000000000000000000049b9ca9a694340000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000415ec214a3950bea839a7e6fbb0ba1540ac2076acd50820e2d5ef83d0902cdffb24a47aff7de5190290769c4f0a9c6fabf63012986a0d590b1b571547a8c7050ea1b00000000000000000000000000000000000000000000000000000000000000c080a06db770e6e25a617fe9652f0958bd9bd6e49281a53036906386ed39ec48eadf63a07f47cf51a4a40b4494cf26efc686709a9b03939e20ee27e59682f5faa536667e" - ); - - /// Timestamp of OP mainnet block 124665056. - /// - /// - const BLOCK_124665056_TIMESTAMP: u64 = 1724928889; - - /// L1 block info for transaction at index 1 in block 124665056. - /// - /// - const TX_META_TX_1_OP_MAINNET_BLOCK_124665056: OpTransactionReceiptFields = - OpTransactionReceiptFields { - l1_block_info: L1BlockInfo { - l1_gas_price: Some(1055991687), // since bedrock l1 base fee - l1_gas_used: Some(4471), - l1_fee: Some(24681034813), - l1_fee_scalar: None, - l1_base_fee_scalar: Some(5227), - l1_blob_base_fee: Some(1), - l1_blob_base_fee_scalar: Some(1014213), - operator_fee_scalar: None, - operator_fee_constant: None, - da_footprint_gas_scalar: None, - }, - deposit_nonce: None, - deposit_receipt_version: None, - }; - - #[test] - fn op_receipt_fields_from_block_and_tx() { - // rig - let tx_0 = OpTransactionSigned::decode_2718( - &mut TX_SET_L1_BLOCK_OP_MAINNET_BLOCK_124665056.as_slice(), - ) - .unwrap(); - - let tx_1 = - OpTransactionSigned::decode_2718(&mut TX_1_OP_MAINNET_BLOCK_124665056.as_slice()) - .unwrap(); - - let block: Block = Block { - body: BlockBody { transactions: [tx_0, tx_1.clone()].to_vec(), ..Default::default() }, - ..Default::default() - }; - - let mut l1_block_info = - reth_optimism_evm::extract_l1_info(&block.body).expect("should extract l1 info"); - - // test - assert!(OP_MAINNET.is_fjord_active_at_timestamp(BLOCK_124665056_TIMESTAMP)); - - let receipt_meta = OpReceiptFieldsBuilder::new(BLOCK_124665056_TIMESTAMP, 124665056) - .l1_block_info(&*OP_MAINNET, &tx_1, &mut l1_block_info) - .expect("should parse revm l1 info") - .build(); - - let L1BlockInfo { - l1_gas_price, - l1_gas_used, - l1_fee, - l1_fee_scalar, - l1_base_fee_scalar, - l1_blob_base_fee, - l1_blob_base_fee_scalar, - operator_fee_scalar, - operator_fee_constant, - da_footprint_gas_scalar, - } = receipt_meta.l1_block_info; - - assert_eq!( - l1_gas_price, TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_gas_price, - "incorrect l1 base fee (former gas price)" - ); - assert_eq!( - l1_gas_used, TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_gas_used, - "incorrect l1 gas used" - ); - assert_eq!( - l1_fee, TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_fee, - "incorrect l1 fee" - ); - assert_eq!( - l1_fee_scalar, TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_fee_scalar, - "incorrect l1 fee scalar" - ); - assert_eq!( - l1_base_fee_scalar, - TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_base_fee_scalar, - "incorrect l1 base fee scalar" - ); - assert_eq!( - l1_blob_base_fee, - TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_blob_base_fee, - "incorrect l1 blob base fee" - ); - assert_eq!( - l1_blob_base_fee_scalar, - TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_blob_base_fee_scalar, - "incorrect l1 blob base fee scalar" - ); - assert_eq!( - operator_fee_scalar, - TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.operator_fee_scalar, - "incorrect operator fee scalar" - ); - assert_eq!( - operator_fee_constant, - TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.operator_fee_constant, - "incorrect operator fee constant" - ); - assert_eq!( - da_footprint_gas_scalar, - TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.da_footprint_gas_scalar, - "incorrect da footprint gas scalar" - ); - } - - #[test] - fn op_non_zero_operator_fee_params_included_in_receipt() { - let tx_1 = - OpTransactionSigned::decode_2718(&mut TX_1_OP_MAINNET_BLOCK_124665056.as_slice()) - .unwrap(); - - let mut l1_block_info = op_revm::L1BlockInfo { - operator_fee_scalar: Some(U256::ZERO), - operator_fee_constant: Some(U256::from(2)), - ..Default::default() - }; - - let receipt_meta = OpReceiptFieldsBuilder::new(BLOCK_124665056_TIMESTAMP, 124665056) - .l1_block_info(&*OP_MAINNET, &tx_1, &mut l1_block_info) - .expect("should parse revm l1 info") - .build(); - - let L1BlockInfo { operator_fee_scalar, operator_fee_constant, .. } = - receipt_meta.l1_block_info; - - assert_eq!(operator_fee_scalar, Some(0), "incorrect operator fee scalar"); - assert_eq!(operator_fee_constant, Some(2), "incorrect operator fee constant"); - } - - #[test] - fn op_zero_operator_fee_params_not_included_in_receipt() { - let tx_1 = - OpTransactionSigned::decode_2718(&mut TX_1_OP_MAINNET_BLOCK_124665056.as_slice()) - .unwrap(); - - let mut l1_block_info = op_revm::L1BlockInfo { - operator_fee_scalar: Some(U256::ZERO), - operator_fee_constant: Some(U256::ZERO), - ..Default::default() - }; - - let receipt_meta = OpReceiptFieldsBuilder::new(BLOCK_124665056_TIMESTAMP, 124665056) - .l1_block_info(&*OP_MAINNET, &tx_1, &mut l1_block_info) - .expect("should parse revm l1 info") - .build(); - - let L1BlockInfo { operator_fee_scalar, operator_fee_constant, .. } = - receipt_meta.l1_block_info; - - assert_eq!(operator_fee_scalar, None, "incorrect operator fee scalar"); - assert_eq!(operator_fee_constant, None, "incorrect operator fee constant"); - } - - // - #[test] - fn base_receipt_gas_fields() { - // https://basescan.org/tx/0x510fd4c47d78ba9f97c91b0f2ace954d5384c169c9545a77a373cf3ef8254e6e - let system = hex!( - "7ef8f8a0389e292420bcbf9330741f72074e39562a09ff5a00fd22e4e9eee7e34b81bca494deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000008dd00101c120000000000000004000000006721035b00000000014189960000000000000000000000000000000000000000000000000000000349b4dcdc000000000000000000000000000000000000000000000000000000004ef9325cc5991ce750960f636ca2ffbb6e209bb3ba91412f21dd78c14ff154d1930f1f9a0000000000000000000000005050f69a9786f081509234f1a7f4684b5e5b76c9" - ); - let tx_0 = OpTransactionSigned::decode_2718(&mut &system[..]).unwrap(); - - let block: alloy_consensus::Block = Block { - body: BlockBody { transactions: vec![tx_0], ..Default::default() }, - ..Default::default() - }; - let mut l1_block_info = - reth_optimism_evm::extract_l1_info(&block.body).expect("should extract l1 info"); - - // https://basescan.org/tx/0xf9420cbaf66a2dda75a015488d37262cbfd4abd0aad7bb2be8a63e14b1fa7a94 - let tx = hex!( - "02f86c8221058034839a4ae283021528942f16386bb37709016023232523ff6d9daf444be380841249c58bc080a001b927eda2af9b00b52a57be0885e0303c39dd2831732e14051c2336470fd468a0681bf120baf562915841a48601c2b54a6742511e535cf8f71c95115af7ff63bd" - ); - let tx_1 = OpTransactionSigned::decode_2718(&mut &tx[..]).unwrap(); - - let receipt_meta = OpReceiptFieldsBuilder::new(1730216981, 21713817) - .l1_block_info(&*BASE_MAINNET, &tx_1, &mut l1_block_info) - .expect("should parse revm l1 info") - .build(); - - let L1BlockInfo { - l1_gas_price, - l1_gas_used, - l1_fee, - l1_fee_scalar, - l1_base_fee_scalar, - l1_blob_base_fee, - l1_blob_base_fee_scalar, - operator_fee_scalar, - operator_fee_constant, - da_footprint_gas_scalar, - } = receipt_meta.l1_block_info; - - assert_eq!(l1_gas_price, Some(14121491676), "incorrect l1 base fee (former gas price)"); - assert_eq!(l1_gas_used, Some(1600), "incorrect l1 gas used"); - assert_eq!(l1_fee, Some(191150293412), "incorrect l1 fee"); - assert!(l1_fee_scalar.is_none(), "incorrect l1 fee scalar"); - assert_eq!(l1_base_fee_scalar, Some(2269), "incorrect l1 base fee scalar"); - assert_eq!(l1_blob_base_fee, Some(1324954204), "incorrect l1 blob base fee"); - assert_eq!(l1_blob_base_fee_scalar, Some(1055762), "incorrect l1 blob base fee scalar"); - assert_eq!(operator_fee_scalar, None, "incorrect operator fee scalar"); - assert_eq!(operator_fee_constant, None, "incorrect operator fee constant"); - assert_eq!(da_footprint_gas_scalar, None, "incorrect da footprint gas scalar"); - } - - #[test] - fn da_footprint_gas_scalar_included_in_receipt_post_jovian() { - const DA_FOOTPRINT_GAS_SCALAR: u16 = 10; - - let tx = TxEip7702 { - chain_id: 1u64, - nonce: 0, - max_fee_per_gas: 0x28f000fff, - max_priority_fee_per_gas: 0x28f000fff, - gas_limit: 10, - to: Address::default(), - value: U256::from(3_u64), - input: Bytes::from(vec![1, 2]), - access_list: Default::default(), - authorization_list: Default::default(), - }; - - let signature = Signature::new(U256::default(), U256::default(), true); - - let tx = OpTransactionSigned::new_unhashed(OpTypedTransaction::Eip7702(tx), signature); - - let mut l1_block_info = op_revm::L1BlockInfo { - da_footprint_gas_scalar: Some(DA_FOOTPRINT_GAS_SCALAR), - ..Default::default() - }; - - let op_hardforks = OpChainHardforks::op_mainnet(); - - let receipt = OpReceiptFieldsBuilder::new(OP_MAINNET_JOVIAN_TIMESTAMP, u64::MAX) - .l1_block_info(&op_hardforks, &tx, &mut l1_block_info) - .expect("should parse revm l1 info") - .build(); - - assert_eq!(receipt.l1_block_info.da_footprint_gas_scalar, Some(DA_FOOTPRINT_GAS_SCALAR)); - } - - #[test] - fn blob_gas_used_included_in_receipt_post_jovian() { - const DA_FOOTPRINT_GAS_SCALAR: u16 = 100; - let tx = TxEip7702 { - chain_id: 1u64, - nonce: 0, - max_fee_per_gas: 0x28f000fff, - max_priority_fee_per_gas: 0x28f000fff, - gas_limit: 10, - to: Address::default(), - value: U256::from(3_u64), - access_list: Default::default(), - authorization_list: Default::default(), - input: Bytes::from(vec![0; 1_000_000]), - }; - - let signature = Signature::new(U256::default(), U256::default(), true); - - let tx = OpTransactionSigned::new_unhashed(OpTypedTransaction::Eip7702(tx), signature); - - let mut l1_block_info = op_revm::L1BlockInfo { - da_footprint_gas_scalar: Some(DA_FOOTPRINT_GAS_SCALAR), - ..Default::default() - }; - - let op_hardforks = OpChainHardforks::op_mainnet(); - - let op_receipt = OpReceiptBuilder::new( - &op_hardforks, - ConvertReceiptInput:: { - tx: Recovered::new_unchecked(&tx, Address::default()), - receipt: OpReceipt::Eip7702(Receipt { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 100, - logs: vec![], - }), - gas_used: 100, - next_log_index: 0, - meta: TransactionMeta { - timestamp: OP_MAINNET_JOVIAN_TIMESTAMP, - ..Default::default() - }, - }, - &mut l1_block_info, - ) - .unwrap(); - - let expected_blob_gas_used = estimate_tx_compressed_size(tx.encoded_2718().as_slice()) - .saturating_div(1_000_000) - .saturating_mul(DA_FOOTPRINT_GAS_SCALAR.into()); - - assert_eq!(op_receipt.core_receipt.blob_gas_used, Some(expected_blob_gas_used)); - } - - #[test] - fn blob_gas_used_not_included_in_receipt_post_isthmus() { - const DA_FOOTPRINT_GAS_SCALAR: u16 = 100; - let tx = TxEip7702 { - chain_id: 1u64, - nonce: 0, - max_fee_per_gas: 0x28f000fff, - max_priority_fee_per_gas: 0x28f000fff, - gas_limit: 10, - to: Address::default(), - value: U256::from(3_u64), - access_list: Default::default(), - authorization_list: Default::default(), - input: Bytes::from(vec![0; 1_000_000]), - }; - - let signature = Signature::new(U256::default(), U256::default(), true); - - let tx = OpTransactionSigned::new_unhashed(OpTypedTransaction::Eip7702(tx), signature); - - let mut l1_block_info = op_revm::L1BlockInfo { - da_footprint_gas_scalar: Some(DA_FOOTPRINT_GAS_SCALAR), - ..Default::default() - }; - - let op_hardforks = OpChainHardforks::op_mainnet(); - - let op_receipt = OpReceiptBuilder::new( - &op_hardforks, - ConvertReceiptInput:: { - tx: Recovered::new_unchecked(&tx, Address::default()), - receipt: OpReceipt::Eip7702(Receipt { - status: Eip658Value::Eip658(true), - cumulative_gas_used: 100, - logs: vec![], - }), - gas_used: 100, - next_log_index: 0, - meta: TransactionMeta { - timestamp: OP_MAINNET_ISTHMUS_TIMESTAMP, - ..Default::default() - }, - }, - &mut l1_block_info, - ) - .unwrap(); - - assert_eq!(op_receipt.core_receipt.blob_gas_used, None); - } -} diff --git a/op-reth/crates/rpc/src/eth/transaction.rs b/op-reth/crates/rpc/src/eth/transaction.rs deleted file mode 100644 index af842e71c6f..00000000000 --- a/op-reth/crates/rpc/src/eth/transaction.rs +++ /dev/null @@ -1,302 +0,0 @@ -//! Loads and formats OP transaction RPC response. - -use crate::{OpEthApi, OpEthApiError, SequencerClient}; -use alloy_primitives::{Bytes, B256}; -use alloy_rpc_types_eth::TransactionInfo; -use futures::StreamExt; -use op_alloy_consensus::{ - transaction::{OpDepositInfo, OpTransactionInfo}, - OpTransaction, -}; -use reth_chain_state::CanonStateSubscriptions; -use reth_optimism_primitives::DepositReceipt; -use reth_primitives_traits::{Recovered, SignedTransaction, SignerRecoverable, WithEncoded}; -use reth_rpc_eth_api::{ - helpers::{spec::SignersForRpc, EthTransactions, LoadReceipt, LoadTransaction, SpawnBlocking}, - EthApiTypes as _, FromEthApiError, FromEvmError, RpcConvert, RpcNodeCore, RpcReceipt, - TxInfoMapper, -}; -use reth_rpc_eth_types::{block::convert_transaction_receipt, EthApiError, TransactionSource}; -use reth_storage_api::{errors::ProviderError, ProviderTx, ReceiptProvider, TransactionsProvider}; -use reth_transaction_pool::{ - AddedTransactionOutcome, PoolPooledTx, PoolTransaction, TransactionOrigin, TransactionPool, -}; -use std::{ - fmt::{Debug, Formatter}, - future::Future, - time::Duration, -}; -use tokio_stream::wrappers::WatchStream; - -impl EthTransactions for OpEthApi -where - N: RpcNodeCore, - OpEthApiError: FromEvmError, - Rpc: RpcConvert, -{ - fn signers(&self) -> &SignersForRpc { - self.inner.eth_api.signers() - } - - fn send_raw_transaction_sync_timeout(&self) -> Duration { - self.inner.eth_api.send_raw_transaction_sync_timeout() - } - - async fn send_transaction( - &self, - tx: WithEncoded>>, - ) -> Result { - let (tx, recovered) = tx.split(); - - // broadcast raw transaction to subscribers if there is any. - self.eth_api().broadcast_raw_transaction(tx.clone()); - - let pool_transaction = ::Transaction::from_pooled(recovered); - - // On optimism, transactions are forwarded directly to the sequencer to be included in - // blocks that it builds. - if let Some(client) = self.raw_tx_forwarder().as_ref() { - tracing::debug!(target: "rpc::eth", hash = %pool_transaction.hash(), "forwarding raw transaction to sequencer"); - let hash = client.forward_raw_transaction(&tx).await.inspect_err(|err| { - tracing::debug!(target: "rpc::eth", %err, hash=% *pool_transaction.hash(), "failed to forward raw transaction"); - })?; - - // Retain tx in local tx pool after forwarding, for local RPC usage. - let _ = self.inner.eth_api.add_pool_transaction(pool_transaction).await.inspect_err(|err| { - tracing::warn!(target: "rpc::eth", %err, %hash, "successfully sent tx to sequencer, but failed to persist in local tx pool"); - }); - - return Ok(hash) - } - - // submit the transaction to the pool with a `Local` origin - let AddedTransactionOutcome { hash, .. } = self - .pool() - .add_transaction(TransactionOrigin::Local, pool_transaction) - .await - .map_err(Self::Error::from_eth_err)?; - - Ok(hash) - } - - /// Decodes and recovers the transaction and submits it to the pool. - /// - /// And awaits the receipt, checking both canonical blocks and flashblocks for faster - /// confirmation. - fn send_raw_transaction_sync( - &self, - tx: Bytes, - ) -> impl Future, Self::Error>> + Send { - let this = self.clone(); - let timeout_duration = self.send_raw_transaction_sync_timeout(); - async move { - let mut canonical_stream = this.provider().canonical_state_stream(); - let hash = EthTransactions::send_raw_transaction(&this, tx).await?; - let mut flashblock_stream = this.pending_block_rx().map(WatchStream::new); - - tokio::time::timeout(timeout_duration, async { - loop { - tokio::select! { - biased; - // check if the tx was preconfirmed in a new flashblock - flashblock = async { - if let Some(stream) = &mut flashblock_stream { - stream.next().await - } else { - futures::future::pending().await - } - } => { - if let Some(flashblock) = flashblock.flatten() { - // if flashblocks are supported, attempt to find id from the pending block - if let Some(receipt) = flashblock - .find_and_convert_transaction_receipt(hash, this.converter()) - { - return receipt; - } - } - } - // Listen for regular canonical block updates for inclusion - canonical_notification = canonical_stream.next() => { - if let Some(notification) = canonical_notification { - let chain = notification.committed(); - if let Some((block, tx, receipt, all_receipts)) = - chain.find_transaction_and_receipt_by_hash(hash) && - let Some(receipt) = convert_transaction_receipt( - block, - all_receipts, - tx, - receipt, - this.converter(), - ) - .transpose()? - { - return Ok(receipt); - } - } else { - // Canonical stream ended - break; - } - } - } - } - Err(Self::Error::from_eth_err(EthApiError::TransactionConfirmationTimeout { - hash, - duration: timeout_duration, - })) - }) - .await - .unwrap_or_else(|_elapsed| { - Err(Self::Error::from_eth_err(EthApiError::TransactionConfirmationTimeout { - hash, - duration: timeout_duration, - })) - }) - } - } - - /// Returns the transaction receipt for the given hash. - /// - /// With flashblocks, we should also lookup the pending block for the transaction - /// because this is considered confirmed/mined. - fn transaction_receipt( - &self, - hash: B256, - ) -> impl Future>, Self::Error>> + Send - { - let this = self.clone(); - async move { - // first attempt to fetch the mined transaction receipt data - let tx_receipt = this.load_transaction_and_receipt(hash).await?; - - if tx_receipt.is_none() { - // if flashblocks are supported, attempt to find id from the pending block - if let Ok(Some(pending_block)) = this.pending_flashblock().await && - let Some(Ok(receipt)) = pending_block - .find_and_convert_transaction_receipt(hash, this.converter()) - { - return Ok(Some(receipt)); - } - } - let Some((tx, meta, receipt)) = tx_receipt else { return Ok(None) }; - self.build_transaction_receipt(tx, meta, receipt).await.map(Some) - } - } -} - -impl LoadTransaction for OpEthApi -where - N: RpcNodeCore, - OpEthApiError: FromEvmError, - Rpc: RpcConvert, -{ - async fn transaction_by_hash( - &self, - hash: B256, - ) -> Result>>, Self::Error> { - // 1. Try to find the transaction on disk (historical blocks) - if let Some((tx, meta)) = self - .spawn_blocking_io(move |this| { - this.provider() - .transaction_by_hash_with_meta(hash) - .map_err(Self::Error::from_eth_err) - }) - .await? - { - let transaction = tx - .try_into_recovered_unchecked() - .map_err(|_| EthApiError::InvalidTransactionSignature)?; - - return Ok(Some(TransactionSource::Block { - transaction, - index: meta.index, - block_hash: meta.block_hash, - block_number: meta.block_number, - base_fee: meta.base_fee, - })); - } - - // 2. check flashblocks (sequencer preconfirmations) - if let Ok(Some(pending_block)) = self.pending_flashblock().await && - let Some(indexed_tx) = pending_block.block().find_indexed(hash) - { - let meta = indexed_tx.meta(); - return Ok(Some(TransactionSource::Block { - transaction: indexed_tx.recovered_tx().cloned(), - index: meta.index, - block_hash: meta.block_hash, - block_number: meta.block_number, - base_fee: meta.base_fee, - })); - } - - // 3. check local pool - if let Some(tx) = self.pool().get(&hash).map(|tx| tx.transaction.clone_into_consensus()) { - return Ok(Some(TransactionSource::Pool(tx))); - } - - Ok(None) - } -} - -impl OpEthApi -where - N: RpcNodeCore, - Rpc: RpcConvert, -{ - /// Returns the [`SequencerClient`] if one is set. - pub fn raw_tx_forwarder(&self) -> Option { - self.inner.sequencer_client.clone() - } -} - -/// Optimism implementation of [`TxInfoMapper`]. -/// -/// For deposits, receipt is fetched to extract `deposit_nonce` and `deposit_receipt_version`. -/// Otherwise, it works like regular Ethereum implementation, i.e. uses [`TransactionInfo`]. -pub struct OpTxInfoMapper { - provider: Provider, -} - -impl Clone for OpTxInfoMapper { - fn clone(&self) -> Self { - Self { provider: self.provider.clone() } - } -} - -impl Debug for OpTxInfoMapper { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - f.debug_struct("OpTxInfoMapper").finish() - } -} - -impl OpTxInfoMapper { - /// Creates [`OpTxInfoMapper`] that uses [`ReceiptProvider`] borrowed from given `eth_api`. - pub const fn new(provider: Provider) -> Self { - Self { provider } - } -} - -impl TxInfoMapper for OpTxInfoMapper -where - T: OpTransaction + SignedTransaction, - Provider: ReceiptProvider, -{ - type Out = OpTransactionInfo; - type Err = ProviderError; - - fn try_map(&self, tx: &T, tx_info: TransactionInfo) -> Result { - let deposit_meta = if tx.is_deposit() { - self.provider.receipt_by_hash(*tx.tx_hash())?.and_then(|receipt| { - receipt.as_deposit_receipt().map(|receipt| OpDepositInfo { - deposit_receipt_version: receipt.deposit_receipt_version, - deposit_nonce: receipt.deposit_nonce, - }) - }) - } else { - None - } - .unwrap_or_default(); - - Ok(OpTransactionInfo::new(tx_info, deposit_meta)) - } -} diff --git a/op-reth/crates/rpc/src/lib.rs b/op-reth/crates/rpc/src/lib.rs deleted file mode 100644 index 10f8ad5dccd..00000000000 --- a/op-reth/crates/rpc/src/lib.rs +++ /dev/null @@ -1,26 +0,0 @@ -//! OP-Reth RPC support. - -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" -)] -#![cfg_attr(not(test), warn(unused_crate_dependencies))] -#![cfg_attr(docsrs, feature(doc_cfg))] - -pub mod engine; -pub mod error; -pub mod eth; -pub mod historical; -pub mod metrics; -pub mod miner; -pub mod sequencer; -pub mod witness; - -#[cfg(feature = "client")] -pub use engine::OpEngineApiClient; -pub use engine::{OpEngineApi, OpEngineApiServer, OP_ENGINE_CAPABILITIES}; -pub use error::{OpEthApiError, OpInvalidTransactionError, SequencerClientError}; -pub use eth::{OpEthApi, OpEthApiBuilder, OpReceiptBuilder}; -pub use metrics::SequencerMetrics; -pub use sequencer::SequencerClient; diff --git a/op-reth/crates/rpc/src/sequencer.rs b/op-reth/crates/rpc/src/sequencer.rs deleted file mode 100644 index 86ed000e863..00000000000 --- a/op-reth/crates/rpc/src/sequencer.rs +++ /dev/null @@ -1,282 +0,0 @@ -//! Helpers for optimism specific RPC implementations. - -use crate::{SequencerClientError, SequencerMetrics}; -use alloy_json_rpc::{RpcRecv, RpcSend}; -use alloy_primitives::{hex, B256}; -use alloy_rpc_client::{BuiltInConnectionString, ClientBuilder, RpcClient as Client}; -use alloy_rpc_types_eth::erc4337::TransactionConditional; -use alloy_transport_http::Http; -use std::{str::FromStr, sync::Arc, time::Instant}; -use thiserror::Error; -use tracing::warn; - -/// Sequencer client error -#[derive(Error, Debug)] -pub enum Error { - /// Invalid scheme - #[error("Invalid scheme of sequencer url: {0}")] - InvalidScheme(String), - /// Invalid header or value provided. - #[error("Invalid header: {0}")] - InvalidHeader(String), - /// Invalid url - #[error("Invalid sequencer url: {0}")] - InvalidUrl(String), - /// Establishing a connection to the sequencer endpoint resulted in an error. - #[error("Failed to connect to sequencer: {0}")] - TransportError( - #[from] - #[source] - alloy_transport::TransportError, - ), - /// Reqwest failed to init client - #[error("Failed to init reqwest client for sequencer: {0}")] - ReqwestError( - #[from] - #[source] - reqwest::Error, - ), -} - -/// A client to interact with a Sequencer -#[derive(Debug, Clone)] -pub struct SequencerClient { - inner: Arc, -} - -impl SequencerClientInner { - /// Creates a new instance with the given endpoint and client. - pub(crate) fn new(sequencer_endpoint: String, client: Client) -> Self { - let metrics = SequencerMetrics::default(); - Self { sequencer_endpoint, client, metrics } - } -} - -impl SequencerClient { - /// Creates a new [`SequencerClient`] for the given URL. - /// - /// If the URL is a websocket endpoint we connect a websocket instance. - pub async fn new(sequencer_endpoint: impl Into) -> Result { - Self::new_with_headers(sequencer_endpoint, Default::default()).await - } - - /// Creates a new `SequencerClient` for the given URL with the given headers - /// - /// This expects headers in the form: `header=value` - pub async fn new_with_headers( - sequencer_endpoint: impl Into, - headers: Vec, - ) -> Result { - let sequencer_endpoint = sequencer_endpoint.into(); - let endpoint = BuiltInConnectionString::from_str(&sequencer_endpoint)?; - if let BuiltInConnectionString::Http(url) = endpoint { - let mut builder = reqwest::Client::builder() - // we force use tls to prevent native issues - .use_rustls_tls(); - - if !headers.is_empty() { - let mut header_map = reqwest::header::HeaderMap::new(); - for header in headers { - if let Some((key, value)) = header.split_once('=') { - header_map.insert( - key.trim() - .parse::() - .map_err(|err| Error::InvalidHeader(err.to_string()))?, - value - .trim() - .parse::() - .map_err(|err| Error::InvalidHeader(err.to_string()))?, - ); - } - } - builder = builder.default_headers(header_map); - } - - let client = builder.build()?; - Self::with_http_client(url, client) - } else { - let client = ClientBuilder::default().connect_with(endpoint).await?; - let inner = SequencerClientInner::new(sequencer_endpoint, client); - Ok(Self { inner: Arc::new(inner) }) - } - } - - /// Creates a new [`SequencerClient`] with http transport with the given http client. - pub fn with_http_client( - sequencer_endpoint: impl Into, - client: reqwest::Client, - ) -> Result { - let sequencer_endpoint: String = sequencer_endpoint.into(); - let url = sequencer_endpoint - .parse() - .map_err(|_| Error::InvalidUrl(sequencer_endpoint.clone()))?; - - let http_client = Http::with_client(client, url); - let is_local = http_client.guess_local(); - let client = ClientBuilder::default().transport(http_client, is_local); - - let inner = SequencerClientInner::new(sequencer_endpoint, client); - Ok(Self { inner: Arc::new(inner) }) - } - - /// Returns the network of the client - pub fn endpoint(&self) -> &str { - &self.inner.sequencer_endpoint - } - - /// Returns the client - pub fn client(&self) -> &Client { - &self.inner.client - } - - /// Returns a reference to the [`SequencerMetrics`] for tracking client metrics. - fn metrics(&self) -> &SequencerMetrics { - &self.inner.metrics - } - - /// Sends a [`alloy_rpc_client::RpcCall`] request to the sequencer endpoint. - pub async fn request( - &self, - method: &str, - params: Params, - ) -> Result { - let resp = - self.client().request::(method.to_string(), params).await.inspect_err( - |err| { - warn!( - target: "rpc::sequencer", - %err, - "HTTP request to sequencer failed", - ); - }, - )?; - Ok(resp) - } - - /// Forwards a transaction to the sequencer endpoint. - pub async fn forward_raw_transaction(&self, tx: &[u8]) -> Result { - let start = Instant::now(); - let rlp_hex = hex::encode_prefixed(tx); - let tx_hash = - self.request("eth_sendRawTransaction", (rlp_hex,)).await.inspect_err(|err| { - warn!( - target: "rpc::eth", - %err, - "Failed to forward transaction to sequencer", - ); - })?; - self.metrics().record_forward_latency(start.elapsed()); - Ok(tx_hash) - } - - /// Forwards a transaction conditional to the sequencer endpoint. - pub async fn forward_raw_transaction_conditional( - &self, - tx: &[u8], - condition: TransactionConditional, - ) -> Result { - let start = Instant::now(); - let rlp_hex = hex::encode_prefixed(tx); - let tx_hash = self - .request("eth_sendRawTransactionConditional", (rlp_hex, condition)) - .await - .inspect_err(|err| { - warn!( - target: "rpc::eth", - %err, - "Failed to forward transaction conditional for sequencer", - ); - })?; - self.metrics().record_forward_latency(start.elapsed()); - Ok(tx_hash) - } -} - -#[derive(Debug)] -struct SequencerClientInner { - /// The endpoint of the sequencer - sequencer_endpoint: String, - /// The client - client: Client, - // Metrics for tracking sequencer forwarding - metrics: SequencerMetrics, -} - -#[cfg(test)] -mod tests { - use super::*; - use alloy_primitives::U64; - - #[tokio::test] - async fn test_http_body_str() { - let client = SequencerClient::new("http://localhost:8545").await.unwrap(); - - let request = client - .client() - .make_request("eth_getBlockByNumber", (U64::from(10),)) - .serialize() - .unwrap() - .take_request(); - let body = request.get(); - - assert_eq!( - body, - r#"{"method":"eth_getBlockByNumber","params":["0xa"],"id":0,"jsonrpc":"2.0"}"# - ); - - let condition = TransactionConditional::default(); - - let request = client - .client() - .make_request( - "eth_sendRawTransactionConditional", - (format!("0x{}", hex::encode("abcd")), condition), - ) - .serialize() - .unwrap() - .take_request(); - let body = request.get(); - - assert_eq!( - body, - r#"{"method":"eth_sendRawTransactionConditional","params":["0x61626364",{"knownAccounts":{}}],"id":1,"jsonrpc":"2.0"}"# - ); - } - - #[tokio::test] - #[ignore = "Start if WS is reachable at ws://localhost:8546"] - async fn test_ws_body_str() { - let client = SequencerClient::new("ws://localhost:8546").await.unwrap(); - - let request = client - .client() - .make_request("eth_getBlockByNumber", (U64::from(10),)) - .serialize() - .unwrap() - .take_request(); - let body = request.get(); - - assert_eq!( - body, - r#"{"method":"eth_getBlockByNumber","params":["0xa"],"id":0,"jsonrpc":"2.0"}"# - ); - - let condition = TransactionConditional::default(); - - let request = client - .client() - .make_request( - "eth_sendRawTransactionConditional", - (format!("0x{}", hex::encode("abcd")), condition), - ) - .serialize() - .unwrap() - .take_request(); - let body = request.get(); - - assert_eq!( - body, - r#"{"method":"eth_sendRawTransactionConditional","params":["0x61626364",{"knownAccounts":{}}],"id":1,"jsonrpc":"2.0"}"# - ); - } -} diff --git a/op-reth/crates/storage/Cargo.toml b/op-reth/crates/storage/Cargo.toml deleted file mode 100644 index aab6ee7d8e0..00000000000 --- a/op-reth/crates/storage/Cargo.toml +++ /dev/null @@ -1,34 +0,0 @@ -[package] -name = "reth-optimism-storage" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true - -[lints] -workspace = true - -[dependencies] -# reth -reth-optimism-primitives = { workspace = true, features = ["serde", "reth-codec"] } -reth-storage-api = { workspace = true, features = ["db-api"] } - -# ethereum -alloy-consensus.workspace = true - -[dev-dependencies] -reth-codecs = { workspace = true, features = ["test-utils"] } -reth-prune-types.workspace = true -reth-stages-types.workspace = true - -[features] -default = ["std"] -std = [ - "reth-storage-api/std", - "reth-prune-types/std", - "reth-stages-types/std", - "alloy-consensus/std", - "reth-optimism-primitives/std", -] diff --git a/op-reth/crates/txpool/Cargo.toml b/op-reth/crates/txpool/Cargo.toml deleted file mode 100644 index 3737d23cf0d..00000000000 --- a/op-reth/crates/txpool/Cargo.toml +++ /dev/null @@ -1,59 +0,0 @@ -[package] -name = "reth-optimism-txpool" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license.workspace = true -homepage.workspace = true -repository.workspace = true -description = "OP-Reth Transaction Pool" - -[lints] -workspace = true - -[dependencies] -# ethereum -alloy-consensus.workspace = true -alloy-eips.workspace = true -alloy-primitives.workspace = true -alloy-rpc-types-eth.workspace = true -alloy-rpc-client = { workspace = true, features = ["reqwest", "default"] } -alloy-json-rpc.workspace = true -alloy-serde.workspace = true - -# reth -reth-chainspec.workspace = true -reth-evm.workspace = true -reth-primitives-traits.workspace = true -reth-chain-state.workspace = true -reth-storage-api.workspace = true -reth-transaction-pool.workspace = true - -# revm -op-revm.workspace = true - -# optimism -op-alloy-consensus.workspace = true -op-alloy-flz.workspace = true -op-alloy-rpc-types.workspace = true -reth-optimism-evm.workspace = true -reth-optimism-forks.workspace = true -reth-optimism-primitives.workspace = true - -# metrics -reth-metrics.workspace = true -metrics.workspace = true - -# misc -c-kzg.workspace = true -derive_more.workspace = true -futures-util.workspace = true -parking_lot.workspace = true -serde.workspace = true -tracing.workspace = true -thiserror.workspace = true -tokio = { workspace = true, features = ["time"] } - -[dev-dependencies] -reth-optimism-chainspec.workspace = true -reth-provider = { workspace = true, features = ["test-utils"] } diff --git a/op-reth/crates/txpool/src/supervisor/access_list.rs b/op-reth/crates/txpool/src/supervisor/access_list.rs deleted file mode 100644 index 7565c960c38..00000000000 --- a/op-reth/crates/txpool/src/supervisor/access_list.rs +++ /dev/null @@ -1,41 +0,0 @@ -// Source: https://github.com/op-rs/kona -// Copyright © 2023 kona contributors Copyright © 2024 Optimism -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and -// associated documentation files (the “Software”), to deal in the Software without restriction, -// including without limitation the rights to use, copy, modify, merge, publish, distribute, -// sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all copies or -// substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT -// NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -use crate::supervisor::CROSS_L2_INBOX_ADDRESS; -use alloy_eips::eip2930::AccessListItem; -use alloy_primitives::B256; - -/// Parses [`AccessListItem`]s to inbox entries. -/// -/// Return flattened iterator with all inbox entries. -pub fn parse_access_list_items_to_inbox_entries<'a>( - access_list_items: impl Iterator, -) -> impl Iterator { - access_list_items.filter_map(parse_access_list_item_to_inbox_entries).flatten() -} - -/// Parse [`AccessListItem`] to inbox entries, if any. -/// Max 3 inbox entries can exist per [`AccessListItem`] that points to [`CROSS_L2_INBOX_ADDRESS`]. -/// -/// Returns `Vec::new()` if [`AccessListItem`] address doesn't point to [`CROSS_L2_INBOX_ADDRESS`]. -// Access-list spec: -fn parse_access_list_item_to_inbox_entries( - access_list_item: &AccessListItem, -) -> Option> { - (access_list_item.address == CROSS_L2_INBOX_ADDRESS) - .then(|| access_list_item.storage_keys.iter()) -} diff --git a/op-reth/crates/txpool/src/supervisor/client.rs b/op-reth/crates/txpool/src/supervisor/client.rs deleted file mode 100644 index a49704ac50a..00000000000 --- a/op-reth/crates/txpool/src/supervisor/client.rs +++ /dev/null @@ -1,282 +0,0 @@ -//! This is our custom implementation of validator struct - -use crate::{ - supervisor::{ - metrics::SupervisorMetrics, parse_access_list_items_to_inbox_entries, ExecutingDescriptor, - InteropTxValidatorError, - }, - InvalidCrossTx, -}; -use alloy_consensus::Transaction; -use alloy_eips::eip2930::AccessList; -use alloy_primitives::{TxHash, B256}; -use alloy_rpc_client::ReqwestClient; -use futures_util::{ - future::BoxFuture, - stream::{self, StreamExt}, - Stream, -}; -use op_alloy_consensus::interop::SafetyLevel; -use reth_transaction_pool::PoolTransaction; -use std::{ - borrow::Cow, - future::IntoFuture, - sync::Arc, - time::{Duration, Instant}, -}; -use tracing::trace; - -/// Supervisor hosted by op-labs -// TODO: This should be changed to actual supervisor url -pub const DEFAULT_SUPERVISOR_URL: &str = "http://localhost:1337/"; - -/// The default request timeout to use -pub const DEFAULT_REQUEST_TIMEOUT: Duration = Duration::from_millis(100); - -/// Implementation of the supervisor trait for the interop. -#[derive(Debug, Clone)] -pub struct SupervisorClient { - /// Stores type's data. - inner: Arc, -} - -impl SupervisorClient { - /// Returns a new [`SupervisorClientBuilder`]. - pub fn builder(supervisor_endpoint: impl Into) -> SupervisorClientBuilder { - SupervisorClientBuilder::new(supervisor_endpoint) - } - - /// Returns configured timeout. See [`SupervisorClientInner`]. - pub fn timeout(&self) -> Duration { - self.inner.timeout - } - - /// Returns configured minimum safety level. See [`SupervisorClient`]. - pub fn safety(&self) -> SafetyLevel { - self.inner.safety - } - - /// Executes a `supervisor_checkAccessList` with the configured safety level. - pub fn check_access_list<'a>( - &self, - inbox_entries: &'a [B256], - executing_descriptor: ExecutingDescriptor, - ) -> CheckAccessListRequest<'a> { - CheckAccessListRequest { - client: self.inner.client.clone(), - inbox_entries: Cow::Borrowed(inbox_entries), - executing_descriptor, - timeout: self.inner.timeout, - safety: self.inner.safety, - metrics: self.inner.metrics.clone(), - } - } - - /// Extracts commitment from access list entries, pointing to 0x420..022 and validates them - /// against supervisor. - /// - /// If commitment present pre-interop tx rejected. - /// - /// Returns: - /// None - if tx is not cross chain, - /// Some(Ok(()) - if tx is valid cross chain, - /// Some(Err(e)) - if tx is not valid or interop is not active - pub async fn is_valid_cross_tx( - &self, - access_list: Option<&AccessList>, - hash: &TxHash, - timestamp: u64, - timeout: Option, - is_interop_active: bool, - ) -> Option> { - // We don't need to check for deposit transaction in here, because they won't come from - // txpool - let access_list = access_list?; - let inbox_entries = parse_access_list_items_to_inbox_entries(access_list.iter()) - .copied() - .collect::>(); - if inbox_entries.is_empty() { - return None; - } - - // Interop check - if !is_interop_active { - // No cross chain tx allowed before interop - return Some(Err(InvalidCrossTx::CrossChainTxPreInterop)) - } - - if let Err(err) = self - .check_access_list( - inbox_entries.as_slice(), - ExecutingDescriptor::new(timestamp, timeout), - ) - .await - { - self.inner.metrics.increment_metrics_for_error(&err); - trace!(target: "txpool", hash=%hash, err=%err, "Cross chain transaction invalid"); - return Some(Err(InvalidCrossTx::ValidationError(err))); - } - Some(Ok(())) - } - - /// Creates a stream that revalidates interop transactions against the supervisor. - /// Returns - /// An implementation of `Stream` that is `Send`-able and tied to the lifetime `'a` of `self`. - /// Each item yielded by the stream is a tuple `(TItem, Option>)`. - /// - The first element is the original `TItem` that was revalidated. - /// - The second element is the `Option>` describes the outcome - /// - `None`: Transaction was not identified as a cross-chain candidate by initial checks. - /// - `Some(Ok(()))`: Supervisor confirmed the transaction is valid. - /// - `Some(Err(InvalidCrossTx))`: Supervisor indicated the transaction is invalid. - pub fn revalidate_interop_txs_stream<'a, TItem, InputIter>( - &'a self, - txs_to_revalidate: InputIter, - current_timestamp: u64, - revalidation_window: u64, - max_concurrent_queries: usize, - ) -> impl Stream>)> + Send + 'a - where - InputIter: IntoIterator + Send + 'a, - InputIter::IntoIter: Send + 'a, - TItem: PoolTransaction + Transaction + Send, - { - stream::iter(txs_to_revalidate.into_iter().map(move |tx_item| { - let client_for_async_task = self.clone(); - - async move { - let validation_result = client_for_async_task - .is_valid_cross_tx( - tx_item.access_list(), - tx_item.hash(), - current_timestamp, - Some(revalidation_window), - true, - ) - .await; - - // return the original transaction paired with its validation result. - (tx_item, validation_result) - } - })) - .buffered(max_concurrent_queries) - } -} - -/// Holds supervisor data. Inner type of [`SupervisorClient`]. -#[derive(Debug, Clone)] -pub struct SupervisorClientInner { - client: ReqwestClient, - /// The default - safety: SafetyLevel, - /// The default request timeout - timeout: Duration, - /// Metrics for tracking supervisor operations - metrics: SupervisorMetrics, -} - -/// Builds [`SupervisorClient`]. -#[derive(Debug)] -pub struct SupervisorClientBuilder { - /// Supervisor server's socket. - endpoint: String, - /// Timeout for requests. - /// - /// NOTE: this timeout is only effective if it's shorter than the timeout configured for the - /// underlying [`ReqwestClient`]. - timeout: Duration, - /// Minimum [`SafetyLevel`] of cross-chain transactions accepted by this client. - safety: SafetyLevel, -} - -impl SupervisorClientBuilder { - /// Creates a new builder. - pub fn new(supervisor_endpoint: impl Into) -> Self { - Self { - endpoint: supervisor_endpoint.into(), - timeout: DEFAULT_REQUEST_TIMEOUT, - safety: SafetyLevel::CrossUnsafe, - } - } - - /// Configures a custom timeout - pub const fn timeout(mut self, timeout: Duration) -> Self { - self.timeout = timeout; - self - } - - /// Sets minimum safety level to accept for cross chain transactions. - pub const fn minimum_safety(mut self, min_safety: SafetyLevel) -> Self { - self.safety = min_safety; - self - } - - /// Creates a new supervisor validator. - pub async fn build(self) -> SupervisorClient { - let Self { endpoint, timeout, safety } = self; - - let client = ReqwestClient::builder() - .connect(endpoint.as_str()) - .await - .expect("building supervisor client"); - - SupervisorClient { - inner: Arc::new(SupervisorClientInner { - client, - safety, - timeout, - metrics: SupervisorMetrics::default(), - }), - } - } -} - -/// A Request future that issues a `supervisor_checkAccessList` request. -#[derive(Debug, Clone)] -pub struct CheckAccessListRequest<'a> { - client: ReqwestClient, - inbox_entries: Cow<'a, [B256]>, - executing_descriptor: ExecutingDescriptor, - timeout: Duration, - safety: SafetyLevel, - metrics: SupervisorMetrics, -} - -impl<'a> CheckAccessListRequest<'a> { - /// Configures the timeout to use for the request if any. - pub const fn with_timeout(mut self, timeout: Duration) -> Self { - self.timeout = timeout; - self - } - - /// Configures the [`SafetyLevel`] for this request - pub const fn with_safety(mut self, safety: SafetyLevel) -> Self { - self.safety = safety; - self - } -} - -impl<'a> IntoFuture for CheckAccessListRequest<'a> { - type Output = Result<(), InteropTxValidatorError>; - type IntoFuture = BoxFuture<'a, Self::Output>; - - fn into_future(self) -> Self::IntoFuture { - let Self { client, inbox_entries, executing_descriptor, timeout, safety, metrics } = self; - Box::pin(async move { - let start = Instant::now(); - - let result = tokio::time::timeout( - timeout, - client.request( - "supervisor_checkAccessList", - (inbox_entries, safety, executing_descriptor), - ), - ) - .await; - metrics.record_supervisor_query(start.elapsed()); - - result - .map_err(|_| InteropTxValidatorError::Timeout(timeout.as_secs()))? - .map_err(InteropTxValidatorError::from_json_rpc) - }) - } -} diff --git a/op-reth/crates/txpool/src/supervisor/message.rs b/op-reth/crates/txpool/src/supervisor/message.rs deleted file mode 100644 index 65b15d2af7b..00000000000 --- a/op-reth/crates/txpool/src/supervisor/message.rs +++ /dev/null @@ -1,37 +0,0 @@ -//! Interop message primitives. -// Source: https://github.com/op-rs/kona -// Copyright © 2023 kona contributors Copyright © 2024 Optimism -// -// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and -// associated documentation files (the “Software”), to deal in the Software without restriction, -// including without limitation the rights to use, copy, modify, merge, publish, distribute, -// sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in all copies or -// substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT -// NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -/// An [`ExecutingDescriptor`] is a part of the payload to `supervisor_checkAccessList` -/// Spec: -#[derive(Default, Debug, PartialEq, Eq, Clone, serde::Serialize, serde::Deserialize)] -pub struct ExecutingDescriptor { - /// The timestamp used to enforce timestamp [invariant](https://github.com/ethereum-optimism/specs/blob/main/specs/interop/derivation.md#invariants) - #[serde(with = "alloy_serde::quantity")] - timestamp: u64, - /// The timeout that requests verification to still hold at `timestamp+timeout` - /// (message expiry may drop previously valid messages). - #[serde(skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] - timeout: Option, -} - -impl ExecutingDescriptor { - /// Create a new [`ExecutingDescriptor`] from the timestamp and timeout - pub const fn new(timestamp: u64, timeout: Option) -> Self { - Self { timestamp, timeout } - } -} diff --git a/op-reth/crates/txpool/src/supervisor/metrics.rs b/op-reth/crates/txpool/src/supervisor/metrics.rs deleted file mode 100644 index cb51a52bfc5..00000000000 --- a/op-reth/crates/txpool/src/supervisor/metrics.rs +++ /dev/null @@ -1,72 +0,0 @@ -//! Optimism supervisor metrics - -use crate::supervisor::InteropTxValidatorError; -use op_alloy_rpc_types::SuperchainDAError; -use reth_metrics::{ - metrics::{Counter, Histogram}, - Metrics, -}; -use std::time::Duration; - -/// Optimism supervisor metrics -#[derive(Metrics, Clone)] -#[metrics(scope = "optimism_transaction_pool.supervisor")] -pub struct SupervisorMetrics { - /// How long it takes to query the supervisor in the Optimism transaction pool - pub(crate) supervisor_query_latency: Histogram, - - /// Counter for the number of times data was skipped - pub(crate) skipped_data_count: Counter, - /// Counter for the number of times an unknown chain was encountered - pub(crate) unknown_chain_count: Counter, - /// Counter for the number of times conflicting data was encountered - pub(crate) conflicting_data_count: Counter, - /// Counter for the number of times ineffective data was encountered - pub(crate) ineffective_data_count: Counter, - /// Counter for the number of times data was out of order - pub(crate) out_of_order_count: Counter, - /// Counter for the number of times data was awaiting replacement - pub(crate) awaiting_replacement_count: Counter, - /// Counter for the number of times data was out of scope - pub(crate) out_of_scope_count: Counter, - /// Counter for the number of times there was no parent for the first block - pub(crate) no_parent_for_first_block_count: Counter, - /// Counter for the number of times future data was encountered - pub(crate) future_data_count: Counter, - /// Counter for the number of times data was missed - pub(crate) missed_data_count: Counter, - /// Counter for the number of times data corruption was encountered - pub(crate) data_corruption_count: Counter, -} - -impl SupervisorMetrics { - /// Records the duration of supervisor queries - #[inline] - pub fn record_supervisor_query(&self, duration: Duration) { - self.supervisor_query_latency.record(duration.as_secs_f64()); - } - - /// Increments the metrics for the given error - pub fn increment_metrics_for_error(&self, error: &InteropTxValidatorError) { - if let InteropTxValidatorError::InvalidEntry(inner) = error { - match inner { - SuperchainDAError::SkippedData => self.skipped_data_count.increment(1), - SuperchainDAError::UnknownChain => self.unknown_chain_count.increment(1), - SuperchainDAError::ConflictingData => self.conflicting_data_count.increment(1), - SuperchainDAError::IneffectiveData => self.ineffective_data_count.increment(1), - SuperchainDAError::OutOfOrder => self.out_of_order_count.increment(1), - SuperchainDAError::AwaitingReplacement => { - self.awaiting_replacement_count.increment(1) - } - SuperchainDAError::OutOfScope => self.out_of_scope_count.increment(1), - SuperchainDAError::NoParentForFirstBlock => { - self.no_parent_for_first_block_count.increment(1) - } - SuperchainDAError::FutureData => self.future_data_count.increment(1), - SuperchainDAError::MissedData => self.missed_data_count.increment(1), - SuperchainDAError::DataCorruption => self.data_corruption_count.increment(1), - _ => {} - } - } - } -} diff --git a/op-reth/crates/txpool/src/supervisor/mod.rs b/op-reth/crates/txpool/src/supervisor/mod.rs deleted file mode 100644 index 36cec338d70..00000000000 --- a/op-reth/crates/txpool/src/supervisor/mod.rs +++ /dev/null @@ -1,12 +0,0 @@ -//! Supervisor support for interop -mod access_list; -pub use access_list::parse_access_list_items_to_inbox_entries; -pub use op_alloy_consensus::interop::*; - -pub mod client; -pub use client::{SupervisorClient, SupervisorClientBuilder, DEFAULT_SUPERVISOR_URL}; -mod errors; -pub use errors::InteropTxValidatorError; -mod message; -pub use message::ExecutingDescriptor; -pub mod metrics; diff --git a/op-reth/crates/txpool/src/transaction.rs b/op-reth/crates/txpool/src/transaction.rs deleted file mode 100644 index d13ba555ba1..00000000000 --- a/op-reth/crates/txpool/src/transaction.rs +++ /dev/null @@ -1,362 +0,0 @@ -use crate::{ - conditional::MaybeConditionalTransaction, estimated_da_size::DataAvailabilitySized, - interop::MaybeInteropTransaction, -}; -use alloy_consensus::{transaction::Recovered, BlobTransactionValidationError, Typed2718}; -use alloy_eips::{ - eip2718::{Encodable2718, WithEncoded}, - eip2930::AccessList, - eip7594::BlobTransactionSidecarVariant, - eip7702::SignedAuthorization, -}; -use alloy_primitives::{Address, Bytes, TxHash, TxKind, B256, U256}; -use alloy_rpc_types_eth::erc4337::TransactionConditional; -use c_kzg::KzgSettings; -use core::fmt::Debug; -use reth_optimism_primitives::OpTransactionSigned; -use reth_primitives_traits::{InMemorySize, SignedTransaction}; -use reth_transaction_pool::{ - EthBlobTransactionSidecar, EthPoolTransaction, EthPooledTransaction, PoolTransaction, -}; -use std::{ - borrow::Cow, - sync::{ - atomic::{AtomicU64, Ordering}, - Arc, OnceLock, - }, -}; - -/// Marker for no-interop transactions -pub(crate) const NO_INTEROP_TX: u64 = 0; - -/// Pool transaction for OP. -/// -/// This type wraps the actual transaction and caches values that are frequently used by the pool. -/// For payload building this lazily tracks values that are required during payload building: -/// - Estimated compressed size of this transaction -#[derive(Debug, Clone, derive_more::Deref)] -pub struct OpPooledTransaction< - Cons = OpTransactionSigned, - Pooled = op_alloy_consensus::OpPooledTransaction, -> { - #[deref] - inner: EthPooledTransaction, - /// The estimated size of this transaction, lazily computed. - estimated_tx_compressed_size: OnceLock, - /// The pooled transaction type. - _pd: core::marker::PhantomData, - - /// Optional conditional attached to this transaction. - conditional: Option>, - - /// Optional interop deadline attached to this transaction. - interop: Arc, - - /// Cached EIP-2718 encoded bytes of the transaction, lazily computed. - encoded_2718: OnceLock, -} - -impl OpPooledTransaction { - /// Create new instance of [Self]. - pub fn new(transaction: Recovered, encoded_length: usize) -> Self { - Self { - inner: EthPooledTransaction::new(transaction, encoded_length), - estimated_tx_compressed_size: Default::default(), - conditional: None, - interop: Arc::new(AtomicU64::new(NO_INTEROP_TX)), - _pd: core::marker::PhantomData, - encoded_2718: Default::default(), - } - } - - /// Returns the estimated compressed size of a transaction in bytes. - /// This value is computed based on the following formula: - /// `max(minTransactionSize, intercept + fastlzCoef*fastlzSize) / 1e6` - /// Uses cached EIP-2718 encoded bytes to avoid recomputing the encoding for each estimation. - pub fn estimated_compressed_size(&self) -> u64 { - *self - .estimated_tx_compressed_size - .get_or_init(|| op_alloy_flz::tx_estimated_size_fjord_bytes(self.encoded_2718())) - } - - /// Returns lazily computed EIP-2718 encoded bytes of the transaction. - pub fn encoded_2718(&self) -> &Bytes { - self.encoded_2718.get_or_init(|| self.inner.transaction().encoded_2718().into()) - } - - /// Conditional setter. - pub fn with_conditional(mut self, conditional: TransactionConditional) -> Self { - self.conditional = Some(Box::new(conditional)); - self - } -} - -impl MaybeConditionalTransaction for OpPooledTransaction { - fn set_conditional(&mut self, conditional: TransactionConditional) { - self.conditional = Some(Box::new(conditional)) - } - - fn conditional(&self) -> Option<&TransactionConditional> { - self.conditional.as_deref() - } -} - -impl MaybeInteropTransaction for OpPooledTransaction { - fn set_interop_deadline(&self, deadline: u64) { - self.interop.store(deadline, Ordering::Relaxed); - } - - fn interop_deadline(&self) -> Option { - let interop = self.interop.load(Ordering::Relaxed); - if interop > NO_INTEROP_TX { - return Some(interop) - } - None - } -} - -impl DataAvailabilitySized for OpPooledTransaction { - fn estimated_da_size(&self) -> u64 { - self.estimated_compressed_size() - } -} - -impl PoolTransaction for OpPooledTransaction -where - Cons: SignedTransaction + From, - Pooled: SignedTransaction + TryFrom, -{ - type TryFromConsensusError = >::Error; - type Consensus = Cons; - type Pooled = Pooled; - - fn clone_into_consensus(&self) -> Recovered { - self.inner.transaction().clone() - } - - fn into_consensus(self) -> Recovered { - self.inner.transaction - } - - fn into_consensus_with2718(self) -> WithEncoded> { - let encoding = self.encoded_2718().clone(); - self.inner.transaction.into_encoded_with(encoding) - } - - fn from_pooled(tx: Recovered) -> Self { - let encoded_len = tx.encode_2718_len(); - Self::new(tx.convert(), encoded_len) - } - - fn hash(&self) -> &TxHash { - self.inner.transaction.tx_hash() - } - - fn sender(&self) -> Address { - self.inner.transaction.signer() - } - - fn sender_ref(&self) -> &Address { - self.inner.transaction.signer_ref() - } - - fn cost(&self) -> &U256 { - &self.inner.cost - } - - fn encoded_length(&self) -> usize { - self.inner.encoded_length - } -} - -impl Typed2718 for OpPooledTransaction { - fn ty(&self) -> u8 { - self.inner.ty() - } -} - -impl InMemorySize for OpPooledTransaction { - fn size(&self) -> usize { - self.inner.size() - } -} - -impl alloy_consensus::Transaction for OpPooledTransaction -where - Cons: alloy_consensus::Transaction, - Pooled: Debug + Send + Sync + 'static, -{ - fn chain_id(&self) -> Option { - self.inner.chain_id() - } - - fn nonce(&self) -> u64 { - self.inner.nonce() - } - - fn gas_limit(&self) -> u64 { - self.inner.gas_limit() - } - - fn gas_price(&self) -> Option { - self.inner.gas_price() - } - - fn max_fee_per_gas(&self) -> u128 { - self.inner.max_fee_per_gas() - } - - fn max_priority_fee_per_gas(&self) -> Option { - self.inner.max_priority_fee_per_gas() - } - - fn max_fee_per_blob_gas(&self) -> Option { - self.inner.max_fee_per_blob_gas() - } - - fn priority_fee_or_price(&self) -> u128 { - self.inner.priority_fee_or_price() - } - - fn effective_gas_price(&self, base_fee: Option) -> u128 { - self.inner.effective_gas_price(base_fee) - } - - fn is_dynamic_fee(&self) -> bool { - self.inner.is_dynamic_fee() - } - - fn kind(&self) -> TxKind { - self.inner.kind() - } - - fn is_create(&self) -> bool { - self.inner.is_create() - } - - fn value(&self) -> U256 { - self.inner.value() - } - - fn input(&self) -> &Bytes { - self.inner.input() - } - - fn access_list(&self) -> Option<&AccessList> { - self.inner.access_list() - } - - fn blob_versioned_hashes(&self) -> Option<&[B256]> { - self.inner.blob_versioned_hashes() - } - - fn authorization_list(&self) -> Option<&[SignedAuthorization]> { - self.inner.authorization_list() - } -} - -impl EthPoolTransaction for OpPooledTransaction -where - Cons: SignedTransaction + From, - Pooled: SignedTransaction + TryFrom, - >::Error: core::error::Error, -{ - fn take_blob(&mut self) -> EthBlobTransactionSidecar { - EthBlobTransactionSidecar::None - } - - fn try_into_pooled_eip4844( - self, - _sidecar: Arc, - ) -> Option> { - None - } - - fn try_from_eip4844( - _tx: Recovered, - _sidecar: BlobTransactionSidecarVariant, - ) -> Option { - None - } - - fn validate_blob( - &self, - _sidecar: &BlobTransactionSidecarVariant, - _settings: &KzgSettings, - ) -> Result<(), BlobTransactionValidationError> { - Err(BlobTransactionValidationError::NotBlobTransaction(self.ty())) - } -} - -/// Helper trait to provide payload builder with access to conditionals and encoded bytes of -/// transaction. -pub trait OpPooledTx: - MaybeConditionalTransaction + MaybeInteropTransaction + PoolTransaction + DataAvailabilitySized -{ - /// Returns the EIP-2718 encoded bytes of the transaction. - fn encoded_2718(&self) -> Cow<'_, Bytes>; -} - -impl OpPooledTx for OpPooledTransaction -where - Cons: SignedTransaction + From, - Pooled: SignedTransaction + TryFrom, - >::Error: core::error::Error, -{ - fn encoded_2718(&self) -> Cow<'_, Bytes> { - Cow::Borrowed(self.encoded_2718()) - } -} - -#[cfg(test)] -mod tests { - use crate::{OpPooledTransaction, OpTransactionValidator}; - use alloy_consensus::transaction::Recovered; - use alloy_eips::eip2718::Encodable2718; - use alloy_primitives::{TxKind, U256}; - use op_alloy_consensus::TxDeposit; - use reth_optimism_chainspec::OP_MAINNET; - use reth_optimism_evm::OpEvmConfig; - use reth_optimism_primitives::{OpPrimitives, OpTransactionSigned}; - use reth_provider::test_utils::MockEthProvider; - use reth_transaction_pool::{ - blobstore::InMemoryBlobStore, validate::EthTransactionValidatorBuilder, TransactionOrigin, - TransactionValidationOutcome, - }; - #[tokio::test] - async fn validate_optimism_transaction() { - let client = MockEthProvider::::new() - .with_chain_spec(OP_MAINNET.clone()) - .with_genesis_block(); - let evm_config = OpEvmConfig::optimism(OP_MAINNET.clone()); - let validator = EthTransactionValidatorBuilder::new(client, evm_config) - .no_shanghai() - .no_cancun() - .build(InMemoryBlobStore::default()); - let validator = OpTransactionValidator::new(validator); - - let origin = TransactionOrigin::External; - let signer = Default::default(); - let deposit_tx = TxDeposit { - source_hash: Default::default(), - from: signer, - to: TxKind::Create, - mint: 0, - value: U256::ZERO, - gas_limit: 0, - is_system_transaction: false, - input: Default::default(), - }; - let signed_tx: OpTransactionSigned = deposit_tx.into(); - let signed_recovered = Recovered::new_unchecked(signed_tx, signer); - let len = signed_recovered.encode_2718_len(); - let pooled_tx: OpPooledTransaction = OpPooledTransaction::new(signed_recovered, len); - let outcome = validator.validate_one(origin, pooled_tx).await; - - let err = match outcome { - TransactionValidationOutcome::Invalid(_, err) => err, - _ => panic!("Expected invalid transaction"), - }; - assert_eq!(err.to_string(), "transaction type not supported"); - } -} diff --git a/op-reth/crates/txpool/src/validator.rs b/op-reth/crates/txpool/src/validator.rs deleted file mode 100644 index 99f03e1a1e7..00000000000 --- a/op-reth/crates/txpool/src/validator.rs +++ /dev/null @@ -1,354 +0,0 @@ -use crate::{supervisor::SupervisorClient, InvalidCrossTx, OpPooledTx}; -use alloy_consensus::{BlockHeader, Transaction}; -use op_revm::L1BlockInfo; -use parking_lot::RwLock; -use reth_chainspec::ChainSpecProvider; -use reth_evm::ConfigureEvm; -use reth_optimism_evm::RethL1BlockInfo; -use reth_optimism_forks::OpHardforks; -use reth_primitives_traits::{ - transaction::error::InvalidTransactionError, Block, BlockBody, BlockTy, GotExpected, - SealedBlock, -}; -use reth_storage_api::{AccountInfoReader, BlockReaderIdExt, StateProviderFactory}; -use reth_transaction_pool::{ - error::InvalidPoolTransactionError, EthPoolTransaction, EthTransactionValidator, - TransactionOrigin, TransactionValidationOutcome, TransactionValidator, -}; -use std::sync::{ - atomic::{AtomicBool, AtomicU64, Ordering}, - Arc, -}; - -/// The interval for which we check transaction against supervisor, 1 hour. -const TRANSACTION_VALIDITY_WINDOW_SECS: u64 = 3600; - -/// Tracks additional infos for the current block. -#[derive(Debug, Default)] -pub struct OpL1BlockInfo { - /// The current L1 block info. - l1_block_info: RwLock, - /// Current block timestamp. - timestamp: AtomicU64, -} - -impl OpL1BlockInfo { - /// Returns the most recent timestamp - pub fn timestamp(&self) -> u64 { - self.timestamp.load(Ordering::Relaxed) - } -} - -/// Validator for Optimism transactions. -#[derive(Debug, Clone)] -pub struct OpTransactionValidator { - /// The type that performs the actual validation. - inner: Arc>, - /// Additional block info required for validation. - block_info: Arc, - /// If true, ensure that the transaction's sender has enough balance to cover the L1 gas fee - /// derived from the tracked L1 block info that is extracted from the first transaction in the - /// L2 block. - require_l1_data_gas_fee: bool, - /// Client used to check transaction validity with op-supervisor - supervisor_client: Option, - /// tracks activated forks relevant for transaction validation - fork_tracker: Arc, -} - -impl OpTransactionValidator { - /// Returns the configured chain spec - pub fn chain_spec(&self) -> Arc - where - Client: ChainSpecProvider, - { - self.inner.chain_spec() - } - - /// Returns the configured client - pub fn client(&self) -> &Client { - self.inner.client() - } - - /// Returns the current block timestamp. - fn block_timestamp(&self) -> u64 { - self.block_info.timestamp.load(Ordering::Relaxed) - } - - /// Whether to ensure that the transaction's sender has enough balance to also cover the L1 gas - /// fee. - pub fn require_l1_data_gas_fee(self, require_l1_data_gas_fee: bool) -> Self { - Self { require_l1_data_gas_fee, ..self } - } - - /// Returns whether this validator also requires the transaction's sender to have enough balance - /// to cover the L1 gas fee. - pub const fn requires_l1_data_gas_fee(&self) -> bool { - self.require_l1_data_gas_fee - } -} - -impl OpTransactionValidator -where - Client: - ChainSpecProvider + StateProviderFactory + BlockReaderIdExt + Sync, - Tx: EthPoolTransaction + OpPooledTx, - Evm: ConfigureEvm, -{ - /// Create a new [`OpTransactionValidator`]. - pub fn new(inner: EthTransactionValidator) -> Self { - let this = Self::with_block_info(inner, OpL1BlockInfo::default()); - if let Ok(Some(block)) = - this.inner.client().block_by_number_or_tag(alloy_eips::BlockNumberOrTag::Latest) - { - // genesis block has no txs, so we can't extract L1 info, we set the block info to empty - // so that we will accept txs into the pool before the first block - if block.header().number() == 0 { - this.block_info.timestamp.store(block.header().timestamp(), Ordering::Relaxed); - } else { - this.update_l1_block_info(block.header(), block.body().transactions().first()); - } - } - - this - } - - /// Create a new [`OpTransactionValidator`] with the given [`OpL1BlockInfo`]. - pub fn with_block_info( - inner: EthTransactionValidator, - block_info: OpL1BlockInfo, - ) -> Self { - Self { - inner: Arc::new(inner), - block_info: Arc::new(block_info), - require_l1_data_gas_fee: true, - supervisor_client: None, - fork_tracker: Arc::new(OpForkTracker { interop: AtomicBool::from(false) }), - } - } - - /// Set the supervisor client and safety level - pub fn with_supervisor(mut self, supervisor_client: SupervisorClient) -> Self { - self.supervisor_client = Some(supervisor_client); - self - } - - /// Update the L1 block info for the given header and system transaction, if any. - /// - /// Note: this supports optional system transaction, in case this is used in a dev setup - pub fn update_l1_block_info(&self, header: &H, tx: Option<&T>) - where - H: BlockHeader, - T: Transaction, - { - self.block_info.timestamp.store(header.timestamp(), Ordering::Relaxed); - - if let Some(Ok(l1_block_info)) = tx.map(reth_optimism_evm::extract_l1_info_from_tx) { - *self.block_info.l1_block_info.write() = l1_block_info; - } - - if self.chain_spec().is_interop_active_at_timestamp(header.timestamp()) { - self.fork_tracker.interop.store(true, Ordering::Relaxed); - } - } - - /// Validates a single transaction. - /// - /// See also [`TransactionValidator::validate_transaction`] - /// - /// This behaves the same as [`OpTransactionValidator::validate_one_with_state`], but creates - /// a new state provider internally. - pub async fn validate_one( - &self, - origin: TransactionOrigin, - transaction: Tx, - ) -> TransactionValidationOutcome { - self.validate_one_with_state(origin, transaction, &mut None).await - } - - /// Validates a single transaction with a provided state provider. - /// - /// This allows reusing the same state provider across multiple transaction validations. - /// - /// See also [`TransactionValidator::validate_transaction`] - /// - /// This behaves the same as [`EthTransactionValidator::validate_one_with_state`], but in - /// addition applies OP validity checks: - /// - ensures tx is not eip4844 - /// - ensures cross chain transactions are valid wrt locally configured safety level - /// - ensures that the account has enough balance to cover the L1 gas cost - pub async fn validate_one_with_state( - &self, - origin: TransactionOrigin, - transaction: Tx, - state: &mut Option>, - ) -> TransactionValidationOutcome { - if transaction.is_eip4844() { - return TransactionValidationOutcome::Invalid( - transaction, - InvalidTransactionError::TxTypeNotSupported.into(), - ) - } - - // Interop cross tx validation - match self.is_valid_cross_tx(&transaction).await { - Some(Err(err)) => { - let err = match err { - InvalidCrossTx::CrossChainTxPreInterop => { - InvalidTransactionError::TxTypeNotSupported.into() - } - err => InvalidPoolTransactionError::Other(Box::new(err)), - }; - return TransactionValidationOutcome::Invalid(transaction, err) - } - Some(Ok(_)) => { - // valid interop tx - transaction.set_interop_deadline( - self.block_timestamp() + TRANSACTION_VALIDITY_WINDOW_SECS, - ); - } - _ => {} - } - - let outcome = self.inner.validate_one_with_state(origin, transaction, state); - - self.apply_op_checks(outcome) - } - - /// Performs the necessary opstack specific checks based on top of the regular eth outcome. - fn apply_op_checks( - &self, - outcome: TransactionValidationOutcome, - ) -> TransactionValidationOutcome { - if !self.requires_l1_data_gas_fee() { - // no need to check L1 gas fee - return outcome - } - // ensure that the account has enough balance to cover the L1 gas cost - if let TransactionValidationOutcome::Valid { - balance, - state_nonce, - transaction: valid_tx, - propagate, - bytecode_hash, - authorities, - } = outcome - { - let mut l1_block_info = self.block_info.l1_block_info.read().clone(); - - let encoded = valid_tx.transaction().encoded_2718(); - - let cost_addition = match l1_block_info.l1_tx_data_fee( - self.chain_spec(), - self.block_timestamp(), - &encoded, - false, - ) { - Ok(cost) => cost, - Err(err) => { - return TransactionValidationOutcome::Error(*valid_tx.hash(), Box::new(err)) - } - }; - let cost = valid_tx.transaction().cost().saturating_add(cost_addition); - - // Checks for max cost - if cost > balance { - return TransactionValidationOutcome::Invalid( - valid_tx.into_transaction(), - InvalidTransactionError::InsufficientFunds( - GotExpected { got: balance, expected: cost }.into(), - ) - .into(), - ) - } - - return TransactionValidationOutcome::Valid { - balance, - state_nonce, - transaction: valid_tx, - propagate, - bytecode_hash, - authorities, - } - } - outcome - } - - /// Wrapper for is valid cross tx - pub async fn is_valid_cross_tx(&self, tx: &Tx) -> Option> { - // We don't need to check for deposit transaction in here, because they won't come from - // txpool - self.supervisor_client - .as_ref()? - .is_valid_cross_tx( - tx.access_list(), - tx.hash(), - self.block_info.timestamp.load(Ordering::Relaxed), - Some(TRANSACTION_VALIDITY_WINDOW_SECS), - self.fork_tracker.is_interop_activated(), - ) - .await - } -} - -impl TransactionValidator for OpTransactionValidator -where - Client: - ChainSpecProvider + StateProviderFactory + BlockReaderIdExt + Sync, - Tx: EthPoolTransaction + OpPooledTx, - Evm: ConfigureEvm, -{ - type Transaction = Tx; - type Block = BlockTy; - - async fn validate_transaction( - &self, - origin: TransactionOrigin, - transaction: Self::Transaction, - ) -> TransactionValidationOutcome { - self.validate_one(origin, transaction).await - } - - async fn validate_transactions( - &self, - transactions: Vec<(TransactionOrigin, Self::Transaction)>, - ) -> Vec> { - futures_util::future::join_all( - transactions.into_iter().map(|(origin, tx)| self.validate_one(origin, tx)), - ) - .await - } - - async fn validate_transactions_with_origin( - &self, - origin: TransactionOrigin, - transactions: impl IntoIterator + Send, - ) -> Vec> { - futures_util::future::join_all( - transactions.into_iter().map(|tx| self.validate_one(origin, tx)), - ) - .await - } - - fn on_new_head_block(&self, new_tip_block: &SealedBlock) { - self.inner.on_new_head_block(new_tip_block); - self.update_l1_block_info( - new_tip_block.header(), - new_tip_block.body().transactions().first(), - ); - } -} - -/// Keeps track of whether certain forks are activated -#[derive(Debug)] -pub(crate) struct OpForkTracker { - /// Tracks if interop is activated at the block's timestamp. - interop: AtomicBool, -} - -impl OpForkTracker { - /// Returns `true` if Interop fork is activated. - pub(crate) fn is_interop_activated(&self) -> bool { - self.interop.load(Ordering::Relaxed) - } -} diff --git a/op-reth/deny.toml b/op-reth/deny.toml deleted file mode 100644 index 80b3b190e7c..00000000000 --- a/op-reth/deny.toml +++ /dev/null @@ -1,95 +0,0 @@ -# This section is considered when running `cargo deny check advisories` -# More documentation for the advisories section can be found here: -# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html -[advisories] -yanked = "warn" -ignore = [ - # https://rustsec.org/advisories/RUSTSEC-2024-0436 paste! is unmaintained - "RUSTSEC-2024-0436", - # https://rustsec.org/advisories/RUSTSEC-2025-0141 bincode is unmaintained, need to transition all deps to wincode first - "RUSTSEC-2025-0141", - # https://rustsec.org/advisories/RUSTSEC-2026-0002 lru unused directly: - "RUSTSEC-2026-0002", -] - -# This section is considered when running `cargo deny check bans`. -# More documentation about the 'bans' section can be found here: -# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html -[bans] -# Lint level for when multiple versions of the same crate are detected -multiple-versions = "warn" -# Lint level for when a crate version requirement is `*` -wildcards = "allow" -highlight = "all" -# List of crates to deny -deny = [{ name = "openssl" }] -# Certain crates/versions that will be skipped when doing duplicate detection. -skip = [] -# Similarly to `skip` allows you to skip certain crates during duplicate -# detection. Unlike skip, it also includes the entire tree of transitive -# dependencies starting at the specified crate, up to a certain depth, which is -# by default infinite -skip-tree = [] - -[licenses] -version = 2 -confidence-threshold = 0.8 - -# List of explicitly allowed licenses -# See https://spdx.org/licenses/ for list of possible licenses -# [possible values: any SPDX 3.7 short identifier (+ optional exception)]. -allow = [ - "MIT", - "MIT-0", - "Apache-2.0", - "Apache-2.0 WITH LLVM-exception", - "BSD-2-Clause", - "BSD-3-Clause", - "BSL-1.0", - "0BSD", - "CC0-1.0", - "ISC", - "Unlicense", - "Unicode-3.0", - "Zlib", - # https://github.com/rustls/webpki/blob/main/LICENSE ISC Style - "LicenseRef-rustls-webpki", - "CDLA-Permissive-2.0", - "MPL-2.0", -] - -# Allow 1 or more licenses on a per-crate basis, so that particular licenses -# aren't accepted for every possible crate as with the normal allow list -exceptions = [ - # TODO: decide on MPL-2.0 handling - # These dependencies are grandfathered in https://github.com/paradigmxyz/reth/pull/6980 - { allow = ["MPL-2.0"], name = "option-ext" }, - { allow = ["MPL-2.0"], name = "webpki-root-certs" }, -] - -[[licenses.clarify]] -name = "rustls-webpki" -expression = "LicenseRef-rustls-webpki" -license-files = [{ path = "LICENSE", hash = 0x001c7e6c }] - -# This section is considered when running `cargo deny check sources`. -# More documentation about the 'sources' section can be found here: -# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html -[sources] -# Lint level for what to happen when a crate from a crate registry that is not -# in the allow list is encountered -unknown-registry = "warn" -# Lint level for what to happen when a crate from a git repository that is not -# in the allow list is encountered -unknown-git = "deny" -allow-git = [ - # TODO: Please avoid adding new entries to this list. - "https://github.com/alloy-rs/alloy", - "https://github.com/foundry-rs/block-explorers", - "https://github.com/bluealloy/revm", - "https://github.com/paradigmxyz/revm-inspectors", - "https://github.com/alloy-rs/evm", - "https://github.com/alloy-rs/hardforks", - "https://github.com/paradigmxyz/jsonrpsee", - "https://github.com/paradigmxyz/reth", -] diff --git a/op-reth/examples/custom-node/src/engine.rs b/op-reth/examples/custom-node/src/engine.rs deleted file mode 100644 index d6d363db356..00000000000 --- a/op-reth/examples/custom-node/src/engine.rs +++ /dev/null @@ -1,335 +0,0 @@ -use crate::{ - chainspec::CustomChainSpec, - evm::CustomEvmConfig, - primitives::{CustomHeader, CustomNodePrimitives, CustomTransaction}, - CustomNode, -}; -use alloy_eips::eip2718::WithEncoded; -use alloy_primitives::Bytes; -use op_alloy_rpc_types_engine::{OpExecutionData, OpExecutionPayload}; -use reth_engine_primitives::EngineApiValidator; -use reth_ethereum::{ - node::api::{ - validate_version_specific_fields, AddOnsContext, BuiltPayload, BuiltPayloadExecutedBlock, - EngineApiMessageVersion, EngineObjectValidationError, ExecutionPayload, FullNodeComponents, - NewPayloadError, NodePrimitives, PayloadAttributes, PayloadBuilderAttributes, - PayloadOrAttributes, PayloadTypes, PayloadValidator, - }, - primitives::SealedBlock, - storage::StateProviderFactory, - trie::{KeccakKeyHasher, KeyHasher}, -}; -use reth_node_builder::{rpc::PayloadValidatorBuilder, InvalidPayloadAttributesError}; -use reth_op::node::{ - engine::OpEngineValidator, payload::OpAttributes, OpBuiltPayload, OpEngineTypes, - OpPayloadAttributes, OpPayloadBuilderAttributes, -}; -use revm_primitives::U256; -use serde::{Deserialize, Serialize}; -use std::sync::Arc; -use thiserror::Error; - -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] -pub struct CustomPayloadTypes; - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CustomExecutionData { - pub inner: OpExecutionData, - pub extension: u64, -} - -impl ExecutionPayload for CustomExecutionData { - fn parent_hash(&self) -> revm_primitives::B256 { - self.inner.parent_hash() - } - - fn block_hash(&self) -> revm_primitives::B256 { - self.inner.block_hash() - } - - fn block_number(&self) -> u64 { - self.inner.block_number() - } - - fn withdrawals(&self) -> Option<&Vec> { - None - } - - fn block_access_list(&self) -> Option<&Bytes> { - None - } - - fn parent_beacon_block_root(&self) -> Option { - self.inner.parent_beacon_block_root() - } - - fn timestamp(&self) -> u64 { - self.inner.timestamp() - } - - fn gas_used(&self) -> u64 { - self.inner.gas_used() - } - - fn transaction_count(&self) -> usize { - self.inner.payload.as_v1().transactions.len() - } -} - -impl TryFrom<&reth_optimism_flashblocks::FlashBlockCompleteSequence> for CustomExecutionData { - type Error = &'static str; - - fn try_from( - sequence: &reth_optimism_flashblocks::FlashBlockCompleteSequence, - ) -> Result { - let inner = OpExecutionData::try_from(sequence)?; - Ok(Self { inner, extension: sequence.last().diff.gas_used }) - } -} - -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct CustomPayloadAttributes { - #[serde(flatten)] - inner: OpPayloadAttributes, - extension: u64, -} - -impl PayloadAttributes for CustomPayloadAttributes { - fn timestamp(&self) -> u64 { - self.inner.timestamp() - } - - fn withdrawals(&self) -> Option<&Vec> { - self.inner.withdrawals() - } - - fn parent_beacon_block_root(&self) -> Option { - self.inner.parent_beacon_block_root() - } -} - -#[derive(Debug, Clone)] -pub struct CustomPayloadBuilderAttributes { - pub inner: OpPayloadBuilderAttributes, - pub extension: u64, -} - -impl PayloadBuilderAttributes for CustomPayloadBuilderAttributes { - type RpcPayloadAttributes = CustomPayloadAttributes; - type Error = alloy_rlp::Error; - - fn try_new( - parent: revm_primitives::B256, - rpc_payload_attributes: Self::RpcPayloadAttributes, - version: u8, - ) -> Result - where - Self: Sized, - { - let CustomPayloadAttributes { inner, extension } = rpc_payload_attributes; - - Ok(Self { inner: OpPayloadBuilderAttributes::try_new(parent, inner, version)?, extension }) - } - - fn payload_id(&self) -> alloy_rpc_types_engine::PayloadId { - self.inner.payload_id() - } - - fn parent(&self) -> revm_primitives::B256 { - self.inner.parent() - } - - fn timestamp(&self) -> u64 { - self.inner.timestamp() - } - - fn parent_beacon_block_root(&self) -> Option { - self.inner.parent_beacon_block_root() - } - - fn suggested_fee_recipient(&self) -> revm_primitives::Address { - self.inner.suggested_fee_recipient() - } - - fn prev_randao(&self) -> revm_primitives::B256 { - self.inner.prev_randao() - } - - fn withdrawals(&self) -> &alloy_eips::eip4895::Withdrawals { - self.inner.withdrawals() - } -} - -impl OpAttributes for CustomPayloadBuilderAttributes { - type Transaction = CustomTransaction; - - fn no_tx_pool(&self) -> bool { - self.inner.no_tx_pool - } - - fn sequencer_transactions(&self) -> &[WithEncoded] { - &self.inner.transactions - } -} - -#[derive(Debug, Clone)] -pub struct CustomBuiltPayload(pub OpBuiltPayload); - -impl BuiltPayload for CustomBuiltPayload { - type Primitives = CustomNodePrimitives; - - fn block(&self) -> &SealedBlock<::Block> { - self.0.block() - } - - fn fees(&self) -> U256 { - self.0.fees() - } - - fn executed_block(&self) -> Option> { - self.0.executed_block() - } - - fn requests(&self) -> Option { - self.0.requests() - } -} - -impl From - for alloy_consensus::Block<::SignedTx> -{ - fn from(value: CustomBuiltPayload) -> Self { - value.0.into_sealed_block().into_block().map_header(|header| header.inner) - } -} - -impl PayloadTypes for CustomPayloadTypes { - type ExecutionData = CustomExecutionData; - type BuiltPayload = OpBuiltPayload; - type PayloadAttributes = CustomPayloadAttributes; - type PayloadBuilderAttributes = CustomPayloadBuilderAttributes; - - fn block_to_payload( - block: SealedBlock< - <::Primitives as NodePrimitives>::Block, - >, - ) -> Self::ExecutionData { - let extension = block.header().extension; - let block_hash = block.hash(); - let block = block.into_block().map_header(|header| header.inner); - let (payload, sidecar) = OpExecutionPayload::from_block_unchecked(block_hash, &block); - CustomExecutionData { inner: OpExecutionData { payload, sidecar }, extension } - } -} - -/// Custom engine validator -#[derive(Debug, Clone)] -pub struct CustomEngineValidator

{ - inner: OpEngineValidator, -} - -impl

CustomEngineValidator

-where - P: Send + Sync + Unpin + 'static, -{ - /// Instantiates a new validator. - pub fn new(chain_spec: Arc, provider: P) -> Self { - Self { inner: OpEngineValidator::new::(chain_spec, provider) } - } - - /// Returns the chain spec used by the validator. - #[inline] - fn chain_spec(&self) -> &CustomChainSpec { - self.inner.chain_spec() - } -} - -impl

PayloadValidator for CustomEngineValidator

-where - P: StateProviderFactory + Send + Sync + Unpin + 'static, -{ - type Block = crate::primitives::block::Block; - - fn validate_payload_attributes_against_header( - &self, - _attr: &CustomPayloadAttributes, - _header: &::Header, - ) -> Result<(), InvalidPayloadAttributesError> { - // skip default timestamp validation - Ok(()) - } - - fn convert_payload_to_block( - &self, - payload: CustomExecutionData, - ) -> Result, NewPayloadError> { - let sealed_block = PayloadValidator::::convert_payload_to_block( - &self.inner, - payload.inner, - )?; - let (header, body) = sealed_block.split_sealed_header_body(); - let header = CustomHeader { inner: header.into_header(), extension: payload.extension }; - let body = body.map_ommers(|_| CustomHeader::default()); - Ok(SealedBlock::::from_parts_unhashed(header, body)) - } -} - -impl

EngineApiValidator for CustomEngineValidator

-where - P: StateProviderFactory + Send + Sync + Unpin + 'static, -{ - fn validate_version_specific_fields( - &self, - version: EngineApiMessageVersion, - payload_or_attrs: PayloadOrAttributes<'_, CustomExecutionData, CustomPayloadAttributes>, - ) -> Result<(), EngineObjectValidationError> { - validate_version_specific_fields(self.chain_spec(), version, payload_or_attrs) - } - - fn ensure_well_formed_attributes( - &self, - version: EngineApiMessageVersion, - attributes: &CustomPayloadAttributes, - ) -> Result<(), EngineObjectValidationError> { - validate_version_specific_fields( - self.chain_spec(), - version, - PayloadOrAttributes::::PayloadAttributes(attributes), - )?; - - // custom validation logic - ensure that the custom field is not zero - // if attributes.extension == 0 { - // return Err(EngineObjectValidationError::invalid_params( - // CustomError::CustomFieldIsNotZero, - // )) - // } - - Ok(()) - } -} - -/// Custom error type used in payload attributes validation -#[derive(Debug, Error)] -pub enum CustomError { - #[error("Custom field is not zero")] - CustomFieldIsNotZero, -} - -/// Custom engine validator builder -#[derive(Debug, Default, Clone, Copy)] -#[non_exhaustive] -pub struct CustomEngineValidatorBuilder; - -impl PayloadValidatorBuilder for CustomEngineValidatorBuilder -where - N: FullNodeComponents, -{ - type Validator = CustomEngineValidator; - - async fn build(self, ctx: &AddOnsContext<'_, N>) -> eyre::Result { - Ok(CustomEngineValidator::new::( - ctx.config.chain.clone(), - ctx.node.provider().clone(), - )) - } -} diff --git a/op-reth/examples/custom-node/src/evm/builder.rs b/op-reth/examples/custom-node/src/evm/builder.rs deleted file mode 100644 index fe7e7cf7113..00000000000 --- a/op-reth/examples/custom-node/src/evm/builder.rs +++ /dev/null @@ -1,22 +0,0 @@ -use crate::{chainspec::CustomChainSpec, evm::CustomEvmConfig, primitives::CustomNodePrimitives}; -use reth_ethereum::node::api::FullNodeTypes; -use reth_node_builder::{components::ExecutorBuilder, BuilderContext, NodeTypes}; -use std::{future, future::Future}; - -#[derive(Debug, Clone, Default)] -#[non_exhaustive] -pub struct CustomExecutorBuilder; - -impl ExecutorBuilder for CustomExecutorBuilder -where - Node::Types: NodeTypes, -{ - type EVM = CustomEvmConfig; - - fn build_evm( - self, - ctx: &BuilderContext, - ) -> impl Future> + Send { - future::ready(Ok(CustomEvmConfig::new(ctx.chain_spec()))) - } -} diff --git a/op-reth/examples/custom-node/src/evm/config.rs b/op-reth/examples/custom-node/src/evm/config.rs deleted file mode 100644 index c29ac075638..00000000000 --- a/op-reth/examples/custom-node/src/evm/config.rs +++ /dev/null @@ -1,180 +0,0 @@ -use crate::{ - chainspec::CustomChainSpec, - engine::{CustomExecutionData, CustomPayloadBuilderAttributes}, - evm::{alloy::CustomEvmFactory, executor::CustomBlockExecutionCtx, CustomBlockAssembler}, - primitives::{Block, CustomHeader, CustomNodePrimitives, CustomTransaction}, -}; -use alloy_consensus::BlockHeader; -use alloy_eips::{eip2718::WithEncoded, Decodable2718}; -use alloy_evm::EvmEnv; -use alloy_op_evm::OpBlockExecutionCtx; -use alloy_rpc_types_engine::PayloadError; -use op_alloy_rpc_types_engine::flashblock::OpFlashblockPayloadBase; -use op_revm::OpSpecId; -use reth_engine_primitives::ExecutableTxIterator; -use reth_ethereum::{ - chainspec::EthChainSpec, - node::api::{BuildNextEnv, ConfigureEvm, PayloadBuilderError}, - primitives::{SealedBlock, SealedHeader}, -}; -use reth_node_builder::{ConfigureEngineEvm, NewPayloadError}; -use reth_op::{ - chainspec::OpHardforks, - evm::primitives::{EvmEnvFor, ExecutionCtxFor}, - node::{OpEvmConfig, OpNextBlockEnvAttributes, OpRethReceiptBuilder}, - primitives::SignedTransaction, -}; -use reth_rpc_api::eth::helpers::pending_block::BuildPendingEnv; -use revm_primitives::Bytes; -use std::sync::Arc; - -#[derive(Debug, Clone)] -pub struct CustomEvmConfig { - pub(super) inner: OpEvmConfig, - pub(super) block_assembler: CustomBlockAssembler, - pub(super) custom_evm_factory: CustomEvmFactory, -} - -impl CustomEvmConfig { - pub fn new(chain_spec: Arc) -> Self { - Self { - inner: OpEvmConfig::new( - Arc::new(chain_spec.inner().clone()), - OpRethReceiptBuilder::default(), - ), - block_assembler: CustomBlockAssembler::new(chain_spec), - custom_evm_factory: CustomEvmFactory::new(), - } - } -} - -impl ConfigureEvm for CustomEvmConfig { - type Primitives = CustomNodePrimitives; - type Error = ::Error; - type NextBlockEnvCtx = CustomNextBlockEnvAttributes; - type BlockExecutorFactory = Self; - type BlockAssembler = CustomBlockAssembler; - - fn block_executor_factory(&self) -> &Self::BlockExecutorFactory { - self - } - - fn block_assembler(&self) -> &Self::BlockAssembler { - &self.block_assembler - } - - fn evm_env(&self, header: &CustomHeader) -> Result, Self::Error> { - self.inner.evm_env(header) - } - - fn next_evm_env( - &self, - parent: &CustomHeader, - attributes: &CustomNextBlockEnvAttributes, - ) -> Result, Self::Error> { - self.inner.next_evm_env(parent, &attributes.inner) - } - - fn context_for_block( - &self, - block: &SealedBlock, - ) -> Result { - Ok(CustomBlockExecutionCtx { - inner: OpBlockExecutionCtx { - parent_hash: block.header().parent_hash(), - parent_beacon_block_root: block.header().parent_beacon_block_root(), - extra_data: block.header().extra_data().clone(), - }, - extension: block.extension, - }) - } - - fn context_for_next_block( - &self, - parent: &SealedHeader, - attributes: Self::NextBlockEnvCtx, - ) -> Result { - Ok(CustomBlockExecutionCtx { - inner: OpBlockExecutionCtx { - parent_hash: parent.hash(), - parent_beacon_block_root: attributes.inner.parent_beacon_block_root, - extra_data: attributes.inner.extra_data, - }, - extension: attributes.extension, - }) - } -} - -impl ConfigureEngineEvm for CustomEvmConfig { - fn evm_env_for_payload( - &self, - payload: &CustomExecutionData, - ) -> Result, Self::Error> { - self.inner.evm_env_for_payload(&payload.inner) - } - - fn context_for_payload<'a>( - &self, - payload: &'a CustomExecutionData, - ) -> Result, Self::Error> { - Ok(CustomBlockExecutionCtx { - inner: self.inner.context_for_payload(&payload.inner)?, - extension: payload.extension, - }) - } - - fn tx_iterator_for_payload( - &self, - payload: &CustomExecutionData, - ) -> Result, Self::Error> { - let transactions = payload.inner.payload.transactions().clone(); - let convert = |encoded: Bytes| { - let tx = CustomTransaction::decode_2718_exact(encoded.as_ref()) - .map_err(Into::into) - .map_err(PayloadError::Decode)?; - let signer = tx.try_recover().map_err(NewPayloadError::other)?; - Ok::<_, NewPayloadError>(WithEncoded::new(encoded, tx.with_signer(signer))) - }; - Ok((transactions, convert)) - } -} - -/// Additional parameters required for executing next block of custom transactions. -#[derive(Debug, Clone)] -pub struct CustomNextBlockEnvAttributes { - inner: OpNextBlockEnvAttributes, - extension: u64, -} - -impl From for CustomNextBlockEnvAttributes { - fn from(value: OpFlashblockPayloadBase) -> Self { - Self { inner: value.into(), extension: 0 } - } -} - -impl BuildPendingEnv for CustomNextBlockEnvAttributes { - fn build_pending_env(parent: &SealedHeader) -> Self { - Self { - inner: OpNextBlockEnvAttributes::build_pending_env(parent), - extension: parent.extension, - } - } -} - -impl BuildNextEnv - for CustomNextBlockEnvAttributes -where - H: BlockHeader, - ChainSpec: EthChainSpec + OpHardforks, -{ - fn build_next_env( - attributes: &CustomPayloadBuilderAttributes, - parent: &SealedHeader, - chain_spec: &ChainSpec, - ) -> Result { - let inner = - OpNextBlockEnvAttributes::build_next_env(&attributes.inner, parent, chain_spec)?; - - Ok(CustomNextBlockEnvAttributes { inner, extension: attributes.extension }) - } -} diff --git a/op-reth/examples/custom-node/src/evm/env.rs b/op-reth/examples/custom-node/src/evm/env.rs deleted file mode 100644 index 53a2b4e3f15..00000000000 --- a/op-reth/examples/custom-node/src/evm/env.rs +++ /dev/null @@ -1,340 +0,0 @@ -use crate::primitives::{CustomTransaction, TxPayment}; -use alloy_eips::{eip2930::AccessList, Typed2718}; -use alloy_evm::{FromRecoveredTx, FromTxWithEncoded, IntoTxEnv}; -use alloy_op_evm::block::OpTxEnv; -use alloy_primitives::{Address, Bytes, TxKind, B256, U256}; -use op_alloy_consensus::OpTxEnvelope; -use op_revm::OpTransaction; -use reth_ethereum::evm::{primitives::TransactionEnv, revm::context::TxEnv}; - -/// An Optimism transaction extended by [`PaymentTxEnv`] that can be fed to [`Evm`]. -/// -/// [`Evm`]: alloy_evm::Evm -#[derive(Clone, Debug)] -pub enum CustomTxEnv { - Op(OpTransaction), - Payment(PaymentTxEnv), -} - -/// A transaction environment is a set of information related to an Ethereum transaction that can be -/// fed to [`Evm`] for execution. -/// -/// [`Evm`]: alloy_evm::Evm -#[derive(Clone, Debug, Default)] -pub struct PaymentTxEnv(pub TxEnv); - -impl revm::context::Transaction for CustomTxEnv { - type AccessListItem<'a> - = ::AccessListItem<'a> - where - Self: 'a; - type Authorization<'a> - = ::Authorization<'a> - where - Self: 'a; - - fn tx_type(&self) -> u8 { - match self { - Self::Op(tx) => tx.tx_type(), - Self::Payment(tx) => tx.tx_type(), - } - } - - fn caller(&self) -> Address { - match self { - Self::Op(tx) => tx.caller(), - Self::Payment(tx) => tx.caller(), - } - } - - fn gas_limit(&self) -> u64 { - match self { - Self::Op(tx) => tx.gas_limit(), - Self::Payment(tx) => tx.gas_limit(), - } - } - - fn value(&self) -> U256 { - match self { - Self::Op(tx) => tx.value(), - Self::Payment(tx) => tx.value(), - } - } - - fn input(&self) -> &Bytes { - match self { - Self::Op(tx) => tx.input(), - Self::Payment(tx) => tx.input(), - } - } - - fn nonce(&self) -> u64 { - match self { - Self::Op(tx) => revm::context::Transaction::nonce(tx), - Self::Payment(tx) => revm::context::Transaction::nonce(tx), - } - } - - fn kind(&self) -> TxKind { - match self { - Self::Op(tx) => tx.kind(), - Self::Payment(tx) => tx.kind(), - } - } - - fn chain_id(&self) -> Option { - match self { - Self::Op(tx) => tx.chain_id(), - Self::Payment(tx) => tx.chain_id(), - } - } - - fn gas_price(&self) -> u128 { - match self { - Self::Op(tx) => tx.gas_price(), - Self::Payment(tx) => tx.gas_price(), - } - } - - fn access_list(&self) -> Option>> { - Some(match self { - Self::Op(tx) => tx.base.access_list.iter(), - Self::Payment(tx) => tx.0.access_list.iter(), - }) - } - - fn blob_versioned_hashes(&self) -> &[B256] { - match self { - Self::Op(tx) => tx.blob_versioned_hashes(), - Self::Payment(tx) => tx.blob_versioned_hashes(), - } - } - - fn max_fee_per_blob_gas(&self) -> u128 { - match self { - Self::Op(tx) => tx.max_fee_per_blob_gas(), - Self::Payment(tx) => tx.max_fee_per_blob_gas(), - } - } - - fn authorization_list_len(&self) -> usize { - match self { - Self::Op(tx) => tx.authorization_list_len(), - Self::Payment(tx) => tx.authorization_list_len(), - } - } - - fn authorization_list(&self) -> impl Iterator> { - match self { - Self::Op(tx) => tx.base.authorization_list.iter(), - Self::Payment(tx) => tx.0.authorization_list.iter(), - } - } - - fn max_priority_fee_per_gas(&self) -> Option { - match self { - Self::Op(tx) => tx.max_priority_fee_per_gas(), - Self::Payment(tx) => tx.max_priority_fee_per_gas(), - } - } -} - -impl revm::context::Transaction for PaymentTxEnv { - type AccessListItem<'a> - = ::AccessListItem<'a> - where - Self: 'a; - type Authorization<'a> - = ::Authorization<'a> - where - Self: 'a; - - fn tx_type(&self) -> u8 { - self.0.tx_type() - } - - fn caller(&self) -> Address { - self.0.caller() - } - - fn gas_limit(&self) -> u64 { - self.0.gas_limit() - } - - fn value(&self) -> U256 { - self.0.value() - } - - fn input(&self) -> &Bytes { - self.0.input() - } - - fn nonce(&self) -> u64 { - revm::context::Transaction::nonce(&self.0) - } - - fn kind(&self) -> TxKind { - self.0.kind() - } - - fn chain_id(&self) -> Option { - self.0.chain_id() - } - - fn gas_price(&self) -> u128 { - self.0.gas_price() - } - - fn access_list(&self) -> Option>> { - self.0.access_list() - } - - fn blob_versioned_hashes(&self) -> &[B256] { - self.0.blob_versioned_hashes() - } - - fn max_fee_per_blob_gas(&self) -> u128 { - self.0.max_fee_per_blob_gas() - } - - fn authorization_list_len(&self) -> usize { - self.0.authorization_list_len() - } - - fn authorization_list(&self) -> impl Iterator> { - self.0.authorization_list() - } - - fn max_priority_fee_per_gas(&self) -> Option { - self.0.max_priority_fee_per_gas() - } -} - -impl TransactionEnv for PaymentTxEnv { - fn set_gas_limit(&mut self, gas_limit: u64) { - self.0.set_gas_limit(gas_limit); - } - - fn nonce(&self) -> u64 { - self.0.nonce() - } - - fn set_nonce(&mut self, nonce: u64) { - self.0.set_nonce(nonce); - } - - fn set_access_list(&mut self, access_list: AccessList) { - self.0.set_access_list(access_list); - } -} - -impl TransactionEnv for CustomTxEnv { - fn set_gas_limit(&mut self, gas_limit: u64) { - match self { - Self::Op(tx) => tx.set_gas_limit(gas_limit), - Self::Payment(tx) => tx.set_gas_limit(gas_limit), - } - } - - fn nonce(&self) -> u64 { - match self { - Self::Op(tx) => tx.nonce(), - Self::Payment(tx) => tx.nonce(), - } - } - - fn set_nonce(&mut self, nonce: u64) { - match self { - Self::Op(tx) => tx.set_nonce(nonce), - Self::Payment(tx) => tx.set_nonce(nonce), - } - } - - fn set_access_list(&mut self, access_list: AccessList) { - match self { - Self::Op(tx) => tx.set_access_list(access_list), - Self::Payment(tx) => tx.set_access_list(access_list), - } - } -} - -impl FromRecoveredTx for TxEnv { - fn from_recovered_tx(tx: &TxPayment, caller: Address) -> Self { - let TxPayment { - chain_id, - nonce, - gas_limit, - max_fee_per_gas, - max_priority_fee_per_gas, - to, - value, - } = tx; - Self { - tx_type: tx.ty(), - caller, - gas_limit: *gas_limit, - gas_price: *max_fee_per_gas, - gas_priority_fee: Some(*max_priority_fee_per_gas), - kind: TxKind::Call(*to), - value: *value, - nonce: *nonce, - chain_id: Some(*chain_id), - ..Default::default() - } - } -} - -impl FromTxWithEncoded for TxEnv { - fn from_encoded_tx(tx: &TxPayment, sender: Address, _encoded: Bytes) -> Self { - Self::from_recovered_tx(tx, sender) - } -} - -impl FromRecoveredTx for CustomTxEnv { - fn from_recovered_tx(tx: &OpTxEnvelope, sender: Address) -> Self { - Self::Op(OpTransaction::from_recovered_tx(tx, sender)) - } -} - -impl FromTxWithEncoded for CustomTxEnv { - fn from_encoded_tx(tx: &OpTxEnvelope, sender: Address, encoded: Bytes) -> Self { - Self::Op(OpTransaction::from_encoded_tx(tx, sender, encoded)) - } -} - -impl FromRecoveredTx for CustomTxEnv { - fn from_recovered_tx(tx: &CustomTransaction, sender: Address) -> Self { - match tx { - CustomTransaction::Op(tx) => Self::from_recovered_tx(tx, sender), - CustomTransaction::Payment(tx) => { - Self::Payment(PaymentTxEnv(TxEnv::from_recovered_tx(tx.tx(), sender))) - } - } - } -} - -impl FromTxWithEncoded for CustomTxEnv { - fn from_encoded_tx(tx: &CustomTransaction, sender: Address, encoded: Bytes) -> Self { - match tx { - CustomTransaction::Op(tx) => Self::from_encoded_tx(tx, sender, encoded), - CustomTransaction::Payment(tx) => { - Self::Payment(PaymentTxEnv(TxEnv::from_encoded_tx(tx.tx(), sender, encoded))) - } - } - } -} - -impl IntoTxEnv for CustomTxEnv { - fn into_tx_env(self) -> Self { - self - } -} - -impl OpTxEnv for CustomTxEnv { - fn encoded_bytes(&self) -> Option<&Bytes> { - match self { - Self::Op(tx) => tx.encoded_bytes(), - Self::Payment(_) => None, - } - } -} diff --git a/op-reth/examples/custom-node/src/lib.rs b/op-reth/examples/custom-node/src/lib.rs deleted file mode 100644 index 4210ac9b767..00000000000 --- a/op-reth/examples/custom-node/src/lib.rs +++ /dev/null @@ -1,86 +0,0 @@ -//! This example shows how to implement a custom node. -//! -//! A node consists of: -//! - primitives: block,header,transactions -//! - components: network,pool,evm -//! - engine: advances the node - -#![cfg_attr(not(test), warn(unused_crate_dependencies))] - -use crate::{ - engine::{CustomEngineValidatorBuilder, CustomPayloadTypes}, - engine_api::CustomEngineApiBuilder, - evm::CustomExecutorBuilder, - pool::CustomPooledTransaction, - primitives::CustomTransaction, - rpc::CustomRpcTypes, -}; -use chainspec::CustomChainSpec; -use primitives::CustomNodePrimitives; -use reth_ethereum::node::api::{FullNodeTypes, NodeTypes}; -use reth_node_builder::{ - components::{BasicPayloadServiceBuilder, ComponentsBuilder}, - Node, NodeAdapter, -}; -use reth_op::{ - node::{ - node::{OpConsensusBuilder, OpNetworkBuilder, OpPayloadBuilder, OpPoolBuilder}, - txpool, OpAddOns, OpNode, - }, - rpc::OpEthApiBuilder, -}; - -pub mod chainspec; -pub mod engine; -pub mod engine_api; -pub mod evm; -pub mod pool; -pub mod primitives; -pub mod rpc; - -#[derive(Debug, Clone)] -pub struct CustomNode { - inner: OpNode, -} - -impl NodeTypes for CustomNode { - type Primitives = CustomNodePrimitives; - type ChainSpec = CustomChainSpec; - type Storage = ::Storage; - type Payload = CustomPayloadTypes; -} - -impl Node for CustomNode -where - N: FullNodeTypes, -{ - type ComponentsBuilder = ComponentsBuilder< - N, - OpPoolBuilder>, - BasicPayloadServiceBuilder, - OpNetworkBuilder, - CustomExecutorBuilder, - OpConsensusBuilder, - >; - - type AddOns = OpAddOns< - NodeAdapter, - OpEthApiBuilder, - CustomEngineValidatorBuilder, - CustomEngineApiBuilder, - >; - - fn components_builder(&self) -> Self::ComponentsBuilder { - ComponentsBuilder::default() - .node_types::() - .pool(OpPoolBuilder::default()) - .executor(CustomExecutorBuilder::default()) - .payload(BasicPayloadServiceBuilder::new(OpPayloadBuilder::new(false))) - .network(OpNetworkBuilder::new(false, false)) - .consensus(OpConsensusBuilder::default()) - } - - fn add_ons(&self) -> Self::AddOns { - self.inner.add_ons_builder().build() - } -} diff --git a/op-reth/examples/custom-node/src/primitives/tx.rs b/op-reth/examples/custom-node/src/primitives/tx.rs deleted file mode 100644 index fe763e079e5..00000000000 --- a/op-reth/examples/custom-node/src/primitives/tx.rs +++ /dev/null @@ -1,144 +0,0 @@ -use super::TxPayment; -use alloy_consensus::{ - crypto::RecoveryError, - transaction::{SignerRecoverable, TxHashRef}, - Signed, TransactionEnvelope, -}; -use alloy_eips::Encodable2718; -use alloy_primitives::{Sealed, Signature, B256}; -use alloy_rlp::BufMut; -use op_alloy_consensus::{OpTxEnvelope, TxDeposit}; -use reth_codecs::{ - alloy::transaction::{CompactEnvelope, FromTxCompact, ToTxCompact}, - Compact, -}; -use reth_ethereum::primitives::{serde_bincode_compat::RlpBincode, InMemorySize}; -use reth_op::{primitives::SignedTransaction, OpTransaction}; -use revm_primitives::Address; - -/// Either [`OpTxEnvelope`] or [`TxPayment`]. -#[derive(Debug, Clone, TransactionEnvelope)] -#[envelope(tx_type_name = TxTypeCustom)] -pub enum CustomTransaction { - /// A regular Optimism transaction as defined by [`OpTxEnvelope`]. - #[envelope(flatten)] - Op(OpTxEnvelope), - /// A [`TxPayment`] tagged with type 0x2A (decimal 42). - #[envelope(ty = 42)] - Payment(Signed), -} - -impl RlpBincode for CustomTransaction {} - -impl reth_codecs::alloy::transaction::Envelope for CustomTransaction { - fn signature(&self) -> &Signature { - match self { - CustomTransaction::Op(tx) => reth_codecs::alloy::transaction::Envelope::signature(tx), - CustomTransaction::Payment(tx) => tx.signature(), - } - } - - fn tx_type(&self) -> Self::TxType { - match self { - CustomTransaction::Op(tx) => TxTypeCustom::Op(tx.tx_type()), - CustomTransaction::Payment(_) => TxTypeCustom::Payment, - } - } -} - -impl FromTxCompact for CustomTransaction { - type TxType = TxTypeCustom; - - fn from_tx_compact(buf: &[u8], tx_type: Self::TxType, signature: Signature) -> (Self, &[u8]) - where - Self: Sized, - { - match tx_type { - TxTypeCustom::Op(tx_type) => { - let (tx, buf) = OpTxEnvelope::from_tx_compact(buf, tx_type, signature); - (Self::Op(tx), buf) - } - TxTypeCustom::Payment => { - let (tx, buf) = TxPayment::from_compact(buf, buf.len()); - let tx = Signed::new_unhashed(tx, signature); - (Self::Payment(tx), buf) - } - } - } -} - -impl ToTxCompact for CustomTransaction { - fn to_tx_compact(&self, buf: &mut (impl BufMut + AsMut<[u8]>)) { - match self { - CustomTransaction::Op(tx) => tx.to_tx_compact(buf), - CustomTransaction::Payment(tx) => { - tx.tx().to_compact(buf); - } - } - } -} - -impl Compact for CustomTransaction { - fn to_compact(&self, buf: &mut B) -> usize - where - B: BufMut + AsMut<[u8]>, - { - ::to_compact(self, buf) - } - - fn from_compact(buf: &[u8], len: usize) -> (Self, &[u8]) { - ::from_compact(buf, len) - } -} - -impl OpTransaction for CustomTransaction { - fn is_deposit(&self) -> bool { - match self { - CustomTransaction::Op(op) => op.is_deposit(), - CustomTransaction::Payment(_) => false, - } - } - - fn as_deposit(&self) -> Option<&Sealed> { - match self { - CustomTransaction::Op(op) => op.as_deposit(), - CustomTransaction::Payment(_) => None, - } - } -} - -impl SignerRecoverable for CustomTransaction { - fn recover_signer(&self) -> Result { - match self { - CustomTransaction::Op(tx) => SignerRecoverable::recover_signer(tx), - CustomTransaction::Payment(tx) => SignerRecoverable::recover_signer(tx), - } - } - - fn recover_signer_unchecked(&self) -> Result { - match self { - CustomTransaction::Op(tx) => SignerRecoverable::recover_signer_unchecked(tx), - CustomTransaction::Payment(tx) => SignerRecoverable::recover_signer_unchecked(tx), - } - } -} - -impl TxHashRef for CustomTransaction { - fn tx_hash(&self) -> &B256 { - match self { - CustomTransaction::Op(tx) => TxHashRef::tx_hash(tx), - CustomTransaction::Payment(tx) => tx.hash(), - } - } -} - -impl SignedTransaction for CustomTransaction {} - -impl InMemorySize for CustomTransaction { - fn size(&self) -> usize { - match self { - CustomTransaction::Op(tx) => InMemorySize::size(tx), - CustomTransaction::Payment(tx) => InMemorySize::size(tx), - } - } -} diff --git a/op-reth/examples/custom-node/src/primitives/tx_type.rs b/op-reth/examples/custom-node/src/primitives/tx_type.rs deleted file mode 100644 index 46c7de3f5cd..00000000000 --- a/op-reth/examples/custom-node/src/primitives/tx_type.rs +++ /dev/null @@ -1,39 +0,0 @@ -use crate::primitives::TxTypeCustom; -use alloy_primitives::bytes::{Buf, BufMut}; -use reth_codecs::{txtype::COMPACT_EXTENDED_IDENTIFIER_FLAG, Compact}; - -pub const PAYMENT_TX_TYPE_ID: u8 = 42; - -impl Compact for TxTypeCustom { - fn to_compact(&self, buf: &mut B) -> usize - where - B: BufMut + AsMut<[u8]>, - { - match self { - Self::Op(ty) => ty.to_compact(buf), - Self::Payment => { - buf.put_u8(PAYMENT_TX_TYPE_ID); - COMPACT_EXTENDED_IDENTIFIER_FLAG - } - } - } - - fn from_compact(mut buf: &[u8], identifier: usize) -> (Self, &[u8]) { - match identifier { - COMPACT_EXTENDED_IDENTIFIER_FLAG => ( - { - let extended_identifier = buf.get_u8(); - match extended_identifier { - PAYMENT_TX_TYPE_ID => Self::Payment, - _ => panic!("Unsupported TxType identifier: {extended_identifier}"), - } - }, - buf, - ), - v => { - let (inner, buf) = TxTypeCustom::from_compact(buf, v); - (inner, buf) - } - } - } -} diff --git a/op-reth/examples/engine-api-access/src/main.rs b/op-reth/examples/engine-api-access/src/main.rs deleted file mode 100644 index 5f43d94bf6e..00000000000 --- a/op-reth/examples/engine-api-access/src/main.rs +++ /dev/null @@ -1,47 +0,0 @@ -//! Example demonstrating how to access the Engine API instance during construction. -//! -//! Run with -//! -//! ```sh -//! cargo run -p example-engine-api-access -//! ``` - -use reth_db::test_utils::create_test_rw_db; -use reth_node_builder::{EngineApiExt, FullNodeComponents, NodeBuilder, NodeConfig}; -use reth_optimism_chainspec::BASE_MAINNET; -use reth_optimism_node::{ - args::RollupArgs, node::OpEngineValidatorBuilder, OpAddOns, OpEngineApiBuilder, OpNode, -}; -use tokio::sync::oneshot; - -#[tokio::main] -async fn main() { - // Op node configuration and setup - let config = NodeConfig::new(BASE_MAINNET.clone()); - let db = create_test_rw_db(); - let args = RollupArgs::default(); - let op_node = OpNode::new(args); - - let (engine_api_tx, _engine_api_rx) = oneshot::channel(); - - let engine_api = - EngineApiExt::new(OpEngineApiBuilder::::default(), move |api| { - let _ = engine_api_tx.send(api); - }); - - let _builder = NodeBuilder::new(config) - .with_database(db) - .with_types::() - .with_components(op_node.components()) - .with_add_ons(OpAddOns::default().with_engine_api(engine_api)) - .on_component_initialized(move |ctx| { - let _provider = ctx.provider(); - Ok(()) - }) - .on_node_started(|_full_node| Ok(())) - .on_rpc_started(|_ctx, handles| { - let _client = handles.rpc.http_client(); - Ok(()) - }) - .check_launch(); -} diff --git a/op-reth/examples/exex-hello-world/src/main.rs b/op-reth/examples/exex-hello-world/src/main.rs deleted file mode 100644 index 3e86ee785a2..00000000000 --- a/op-reth/examples/exex-hello-world/src/main.rs +++ /dev/null @@ -1,145 +0,0 @@ -//! Example for a simple Execution Extension -//! -//! Run with -//! -//! ```sh -//! cargo run -p example-exex-hello-world -- node --dev --dev.block-time 5s -//! ``` - -use clap::Parser; -use futures::TryStreamExt; -use reth_ethereum::{ - chainspec::EthereumHardforks, - exex::{ExExContext, ExExEvent, ExExNotification}, - node::{ - api::{FullNodeComponents, NodeTypes}, - builder::rpc::RpcHandle, - EthereumNode, - }, - rpc::api::eth::helpers::FullEthApi, -}; -use reth_tracing::tracing::info; -use tokio::sync::oneshot; - -/// Additional CLI arguments -#[derive(Parser)] -struct ExExArgs { - /// whether to launch an op-reth node - #[arg(long)] - optimism: bool, -} - -/// A basic subscription loop of new blocks. -async fn my_exex(mut ctx: ExExContext) -> eyre::Result<()> { - while let Some(notification) = ctx.notifications.try_next().await? { - match ¬ification { - ExExNotification::ChainCommitted { new } => { - info!(committed_chain = ?new.range(), "Received commit"); - } - ExExNotification::ChainReorged { old, new } => { - info!(from_chain = ?old.range(), to_chain = ?new.range(), "Received reorg"); - } - ExExNotification::ChainReverted { old } => { - info!(reverted_chain = ?old.range(), "Received revert"); - } - }; - - if let Some(committed_chain) = notification.committed_chain() { - ctx.events.send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; - } - } - - Ok(()) -} - -/// This is an example of how to access the [`RpcHandle`] inside an ExEx. It receives the -/// [`RpcHandle`] once the node is launched fully. -/// -/// This function supports both Opstack Eth API and ethereum Eth API. -/// -/// The received handle gives access to the `EthApi` has full access to all eth api functionality -/// [`FullEthApi`]. And also gives access to additional eth-related rpc method handlers, such as eth -/// filter. -async fn ethapi_exex( - mut ctx: ExExContext, - rpc_handle: oneshot::Receiver>, -) -> eyre::Result<()> -where - Node: FullNodeComponents>, - EthApi: FullEthApi, -{ - // Wait for the ethapi to be sent from the main function - let rpc_handle = rpc_handle.await?; - info!("Received rpc handle inside exex"); - - // obtain the ethapi from the rpc handle - let ethapi = rpc_handle.eth_api(); - - // EthFilter type that provides all eth_getlogs related logic - let _eth_filter = rpc_handle.eth_handlers().filter.clone(); - // EthPubSub type that provides all eth_subscribe logic - let _eth_pubsub = rpc_handle.eth_handlers().pubsub.clone(); - // The TraceApi type that provides all the trace_ handlers - let _trace_api = rpc_handle.trace_api(); - // The DebugApi type that provides all the debug_ handlers - let _debug_api = rpc_handle.debug_api(); - - while let Some(notification) = ctx.notifications.try_next().await? { - if let Some(committed_chain) = notification.committed_chain() { - ctx.events.send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; - - // can use the eth api to interact with the node - let _rpc_block = ethapi.rpc_block(committed_chain.tip().hash().into(), true).await?; - } - } - - Ok(()) -} - -fn main() -> eyre::Result<()> { - let args = ExExArgs::parse(); - - if args.optimism { - reth_op::cli::Cli::parse_args().run(|builder, _| { - let (rpc_handle_tx, rpc_handle_rx) = oneshot::channel(); - Box::pin(async move { - let handle = builder - .node(reth_op::node::OpNode::default()) - .install_exex("my-exex", async move |ctx| Ok(my_exex(ctx))) - .install_exex("ethapi-exex", async move |ctx| { - Ok(ethapi_exex(ctx, rpc_handle_rx)) - }) - .launch() - .await?; - - // Retrieve the rpc handle from the node and send it to the exex - rpc_handle_tx - .send(handle.node.add_ons_handle.clone()) - .expect("Failed to send ethapi to ExEx"); - - handle.wait_for_node_exit().await - }) - }) - } else { - reth_ethereum::cli::Cli::parse_args().run(|builder, _| { - Box::pin(async move { - let (rpc_handle_tx, rpc_handle_rx) = oneshot::channel(); - let handle = builder - .node(EthereumNode::default()) - .install_exex("my-exex", async move |ctx| Ok(my_exex(ctx))) - .install_exex("ethapi-exex", async move |ctx| { - Ok(ethapi_exex(ctx, rpc_handle_rx)) - }) - .launch() - .await?; - - // Retrieve the rpc handle from the node and send it to the exex - rpc_handle_tx - .send(handle.node.add_ons_handle.clone()) - .expect("Failed to send ethapi to ExEx"); - - handle.wait_for_node_exit().await - }) - }) - } -} diff --git a/op-reth/justfile b/op-reth/justfile deleted file mode 100644 index da12e5c8aa1..00000000000 --- a/op-reth/justfile +++ /dev/null @@ -1,21 +0,0 @@ -# default recipe to display help information -default: - @just --list - -# Check for unused dependencies in the crate graph. -check-udeps: - cargo +nightly udeps --workspace --lib --examples --tests --benches --all-features --locked - -# Run unit tests with optional edge storage feature -test edge='': - #!/usr/bin/env bash - set -euo pipefail - RUST_BACKTRACE=1 cargo nextest run \ - --features "asm-keccak {{edge}}" --locked \ - --workspace \ - --no-tests=warn \ - -E "!kind(test) and not binary(e2e_testsuite)" - -# Run integration tests for reth-optimism-node -test-integration: - RUST_BACKTRACE=1 cargo nextest run --locked -p reth-optimism-node diff --git a/op-reth/rustfmt.toml b/op-reth/rustfmt.toml deleted file mode 100644 index bf86a535083..00000000000 --- a/op-reth/rustfmt.toml +++ /dev/null @@ -1,12 +0,0 @@ -style_edition = "2021" -reorder_imports = true -imports_granularity = "Crate" -use_small_heuristics = "Max" -comment_width = 100 -wrap_comments = true -binop_separator = "Back" -trailing_comma = "Vertical" -trailing_semicolon = false -use_field_init_shorthand = true -format_code_in_doc_comments = true -doc_comment_code_block_width = 100 diff --git a/op-reth/typos.toml b/op-reth/typos.toml deleted file mode 100644 index 896c2c783e3..00000000000 --- a/op-reth/typos.toml +++ /dev/null @@ -1,40 +0,0 @@ -[files] -extend-exclude = [ - ".git", - "target", - "crates/storage/libmdbx-rs/mdbx-sys/libmdbx", - "Cargo.toml", - "Cargo.lock", - "testing/ef-tests", -] - -[default] -extend-ignore-re = [ - # Hex strings of various lengths - "(?i)0x[0-9a-f]{8}", # 8 hex chars - "(?i)0x[0-9a-f]{40}", # 40 hex chars - "(?i)0x[0-9a-f]{64}", # 64 hex chars - "(?i)[0-9a-f]{8}", # 8 hex chars without 0x - "(?i)[0-9a-f]{40}", # 40 hex chars without 0x - "(?i)[0-9a-f]{64}", # 64 hex chars without 0x - # Ordinals in identifiers - "[0-9]+nd", - "[0-9]+th", - "[0-9]+st", - "[0-9]+rd", -] - -[default.extend-words] -# These are valid identifiers/terms that should be allowed -crate = "crate" -ser = "ser" -ratatui = "ratatui" -seeked = "seeked" # Past tense of seek, used in trie iterator -Seeked = "Seeked" # Type name in trie iterator -Whe = "Whe" # Part of base64 encoded signature -hel = "hel" # Part of hostname bootnode-hetzner-hel -ONL = "ONL" # Part of base64 encoded ENR -Iy = "Iy" # Part of base64 encoded ENR -flate = "flate" # zlib-flate is a valid tool name -Pn = "Pn" # Part of UPnP (Universal Plug and Play) -BA = "BA" # Part of BAL - Block Access List (EIP-7928) diff --git a/op-service/apis/beacon.go b/op-service/apis/beacon.go index fd24f2c55b0..85e15ee2898 100644 --- a/op-service/apis/beacon.go +++ b/op-service/apis/beacon.go @@ -4,6 +4,7 @@ import ( "context" "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum/go-ethereum/common" ) // BeaconClient is a thin wrapper over the Beacon APIs. @@ -11,11 +12,5 @@ type BeaconClient interface { NodeVersion(ctx context.Context) (string, error) ConfigSpec(ctx context.Context) (eth.APIConfigResponse, error) BeaconGenesis(ctx context.Context) (eth.APIGenesisResponse, error) - BeaconBlobs(ctx context.Context, slot uint64, hashes []eth.IndexedBlobHash) (eth.APIBeaconBlobsResponse, error) - BlobSideCarsClient -} - -// BlobSideCarsClient provides beacon blob sidecars -type BlobSideCarsClient interface { - BeaconBlobSideCars(ctx context.Context, fetchAllSidecars bool, slot uint64, hashes []eth.IndexedBlobHash) (eth.APIGetBlobSidecarsResponse, error) + BeaconBlobs(ctx context.Context, slot uint64, hashes []common.Hash) (eth.APIBeaconBlobsResponse, error) } diff --git a/op-service/bgpo/oracle.go b/op-service/bgpo/oracle.go index c945263637b..abd4a1f3059 100644 --- a/op-service/bgpo/oracle.go +++ b/op-service/bgpo/oracle.go @@ -2,6 +2,7 @@ package bgpo import ( "context" + "errors" "fmt" "math/big" "sort" @@ -24,7 +25,6 @@ type BTOBackend interface { BlockNumber(ctx context.Context) (uint64, error) HeaderByNumber(ctx context.Context, number *big.Int) (*types.Header, error) BlockByNumber(ctx context.Context, number *big.Int) (*types.Block, error) - SubscribeNewHead(ctx context.Context, ch chan<- *types.Header) (ethereum.Subscription, error) } // BlobTipOracle tracks blob base gas prices by subscribing to new block headers @@ -49,7 +49,6 @@ type BlobTipOracle struct { ctx context.Context cancel context.CancelFunc - sub ethereum.Subscription loopDone chan struct{} } @@ -121,48 +120,49 @@ func NewBlobTipOracle(backend BTOBackend, chainConfig *params.ChainConfig, log l } // Start starts the oracle's background processing. It returns after the cache is prepopulated and -// the subscription is set up. To stop the background processing, call [BlobTipOracle.Close]. The -// background processing will also stop if the subscription fails. +// the polling loop is started. To stop the background processing, call [BlobTipOracle.Close]. func (o *BlobTipOracle) Start() error { - // Pre-populate cache with recent blocks before subscribing + // Pre-populate cache with recent blocks before starting the polling loop if err := o.prePopulateCache(); err != nil { o.log.Warn("Failed to pre-populate cache, continuing anyway", "err", err) } - headers := make(chan *types.Header, 10) - - sub, err := o.backend.SubscribeNewHead(o.ctx, headers) - if err != nil { - return err - } - o.sub = sub - o.log.Info("Blob tip oracle started, subscribed to newHeads") + o.log.Info("Blob tip oracle started, polling for new headers") o.loopDone = make(chan struct{}) - go o.processHeaders(headers) + go o.pollLoop() return nil } -func (o *BlobTipOracle) processHeaders(headers chan *types.Header) { - defer o.log.Debug("Blob tip oracle header processing loop exited") +func (o *BlobTipOracle) pollLoop() { + defer o.log.Debug("Blob tip oracle polling loop exited") defer close(o.loopDone) - // Process headers as they arrive + ticker := time.NewTicker(o.config.PollRate) + defer ticker.Stop() + for { select { - case header := <-headers: - if err := o.processHeader(header); err != nil { - o.log.Error("Error processing header", "err", err, "block", header.Number) - } - case err := <-o.sub.Err(): - if err != nil { - o.log.Error("Subscription error", "err", err) - return - } - return case <-o.ctx.Done(): o.log.Info("Blob tip oracle context canceled") return + case <-ticker.C: + nextBlock := o.latestBlock + 1 + header, err := func() (*types.Header, error) { + ctx, cancel := context.WithTimeout(o.ctx, o.config.NetworkTimeout) + defer cancel() + return o.backend.HeaderByNumber(ctx, big.NewInt(int64(nextBlock))) + }() + if errors.Is(err, ethereum.NotFound) { + continue // Block not yet available + } + if err != nil { + o.log.Warn("Failed to get header", "err", err, "block", nextBlock) + continue + } + if err := o.processHeader(header); err != nil { + o.log.Error("Error processing header", "err", err, "block", nextBlock) + } } } } @@ -379,9 +379,6 @@ func (o *BlobTipOracle) extractTipsForBlobTxs(block *types.Block, baseFee *big.I // Close stops the oracle and cleans up resources. func (o *BlobTipOracle) Close() { o.cancel() - if o.sub != nil { - o.sub.Unsubscribe() - } if o.loopDone != nil { <-o.loopDone } diff --git a/op-service/bgpo/oracle_test.go b/op-service/bgpo/oracle_test.go index 02c687c3ee4..b0258566f97 100644 --- a/op-service/bgpo/oracle_test.go +++ b/op-service/bgpo/oracle_test.go @@ -19,6 +19,8 @@ import ( "github.com/ethereum-optimism/optimism/op-service/testlog" ) +// mockBTOBackend mocks BTOBackend for testing. + type mockBTOBackend struct { mock.Mock } @@ -44,38 +46,8 @@ func (m *mockBTOBackend) BlockByNumber(ctx context.Context, number *big.Int) (*t return args.Get(0).(*types.Block), args.Error(1) } -func (m *mockBTOBackend) SubscribeNewHead(ctx context.Context, ch chan<- *types.Header) (ethereum.Subscription, error) { - args := m.Called(ctx, ch) - if args.Get(0) == nil { - return nil, args.Error(1) - } - return args.Get(0).(ethereum.Subscription), args.Error(1) -} - var _ BTOBackend = (*mockBTOBackend)(nil) -// mockSubscription implements ethereum.Subscription for testing. -type mockSubscription struct { - errCh chan error - unsubbed bool -} - -func newMockSubscription() *mockSubscription { - return &mockSubscription{ - errCh: make(chan error, 1), - } -} - -func (s *mockSubscription) Unsubscribe() { - if !s.unsubbed { - s.unsubbed = true - } -} - -func (s *mockSubscription) Err() <-chan error { - return s.errCh -} - func createHeader(blockNum uint64, excessBlobGas *uint64) *types.Header { header := &types.Header{ Number: big.NewInt(int64(blockNum)), @@ -433,19 +405,20 @@ func TestExtractBlobFeeCaps(t *testing.T) { } func TestOracleLifecycle(t *testing.T) { - mbackend := new(mockBTOBackend) chainConfig := params.MainnetChainConfig logger := testlog.Logger(t, log.LevelDebug) - oracle := NewBlobTipOracle(mbackend, chainConfig, logger, &BlobTipOracleConfig{ - PricesCacheSize: 10, - BlockCacheSize: 10, - MaxBlocks: 2, - Percentile: 60, - NetworkTimeout: time.Second, - }) + t.Run("start and close with polling", func(t *testing.T) { + mbackend := new(mockBTOBackend) + oracle := NewBlobTipOracle(mbackend, chainConfig, logger, &BlobTipOracleConfig{ + PricesCacheSize: 10, + BlockCacheSize: 10, + MaxBlocks: 2, + Percentile: 60, + NetworkTimeout: time.Second, + PollRate: 50 * time.Millisecond, // Fast polling for test + }) - t.Run("start and close", func(t *testing.T) { latestBlock := uint64(100) // Mock pre-population calls @@ -459,12 +432,15 @@ func TestOracleLifecycle(t *testing.T) { mbackend.On("BlockByNumber", mock.Anything, big.NewInt(int64(i))).Return(block, nil).Once() } - // Mock subscription - sub := newMockSubscription() - var headerCh chan<- *types.Header - mbackend.On("SubscribeNewHead", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { - headerCh = args.Get(1).(chan<- *types.Header) - }).Return(sub, nil).Once() + // Mock polling: first return NotFound, then return a new header + mbackend.On("HeaderByNumber", mock.Anything, big.NewInt(101)).Return(nil, ethereum.NotFound).Once() + newHeader := createHeader(101, &excessBlobGas) + newBlock := createBlock(101, newHeader.BaseFee, []*types.Transaction{}) + mbackend.On("HeaderByNumber", mock.Anything, big.NewInt(101)).Return(newHeader, nil).Once() + mbackend.On("BlockByNumber", mock.Anything, big.NewInt(101)).Return(newBlock, nil).Once() + + // After processing block 101, polling will try block 102 which doesn't exist + mbackend.On("HeaderByNumber", mock.Anything, big.NewInt(102)).Return(nil, ethereum.NotFound).Maybe() // Start the oracle err := oracle.Start() @@ -481,14 +457,7 @@ func TestOracleLifecycle(t *testing.T) { require.Equal(t, uint64(100), latestBlockNum) require.NotNil(t, fee) - // Send a new header through the subscription to verify processing works - newHeader := createHeader(101, &excessBlobGas) - newBlock := createBlock(101, newHeader.BaseFee, []*types.Transaction{}) - mbackend.On("BlockByNumber", mock.Anything, big.NewInt(101)).Return(newBlock, nil).Once() - - headerCh <- newHeader - - // Give the goroutine time to process + // Wait for polling to pick up the new header require.Eventually(t, func() bool { latestBlockNum, _ = oracle.GetLatestBlobBaseFee() return latestBlockNum == 101 @@ -497,8 +466,6 @@ func TestOracleLifecycle(t *testing.T) { // Close the oracle oracle.Close() - // Verify subscription was unsubscribed - require.True(t, sub.unsubbed, "subscription should be unsubscribed after Close") select { case <-oracle.loopDone: // Expect loop to have exited @@ -510,6 +477,7 @@ func TestOracleLifecycle(t *testing.T) { }) t.Run("close before start is safe", func(t *testing.T) { + mbackend := new(mockBTOBackend) oracle2 := NewBlobTipOracle(mbackend, chainConfig, logger, &BlobTipOracleConfig{ PricesCacheSize: 10, BlockCacheSize: 10, diff --git a/op-service/sources/l1_beacon_client.go b/op-service/sources/l1_beacon_client.go index c193f1bd351..e43b1c96cd4 100644 --- a/op-service/sources/l1_beacon_client.go +++ b/op-service/sources/l1_beacon_client.go @@ -35,7 +35,7 @@ type L1BeaconClientConfig struct { // L1BeaconClient is a high level golang client for the Beacon API. type L1BeaconClient struct { cl apis.BeaconClient - pool *ClientPool[apis.BlobSideCarsClient] + pool *ClientPool[apis.BeaconClient] cfg L1BeaconClientConfig initLock sync.Mutex @@ -101,46 +101,20 @@ func (cl *BeaconHTTPClient) BeaconGenesis(ctx context.Context) (eth.APIGenesisRe return genesisResp, nil } -func (cl *BeaconHTTPClient) BeaconBlobs(ctx context.Context, slot uint64, hashes []eth.IndexedBlobHash) (eth.APIBeaconBlobsResponse, error) { +func (cl *BeaconHTTPClient) BeaconBlobs(ctx context.Context, slot uint64, hashes []common.Hash) (eth.APIBeaconBlobsResponse, error) { reqQuery := url.Values{} for _, hash := range hashes { - reqQuery.Add("versioned_hashes", hash.Hash.Hex()) + reqQuery.Add("versioned_hashes", hash.Hex()) } reqPath := path.Join(blobsMethodPrefix, strconv.FormatUint(slot, 10)) var blobsResp eth.APIBeaconBlobsResponse if err := cl.apiReq(ctx, &blobsResp, reqPath, reqQuery); err != nil { return eth.APIBeaconBlobsResponse{}, err } - return blobsResp, nil -} - -func (cl *BeaconHTTPClient) BeaconBlobSideCars(ctx context.Context, fetchAllSidecars bool, slot uint64, hashes []eth.IndexedBlobHash) (eth.APIGetBlobSidecarsResponse, error) { - reqPath := path.Join(sidecarsMethodPrefix, strconv.FormatUint(slot, 10)) - var reqQuery url.Values - if !fetchAllSidecars { - reqQuery = url.Values{} - for i := range hashes { - reqQuery.Add("indices", strconv.FormatUint(hashes[i].Index, 10)) - } - } - var resp eth.APIGetBlobSidecarsResponse - if err := cl.apiReq(ctx, &resp, reqPath, reqQuery); err != nil { - return eth.APIGetBlobSidecarsResponse{}, err - } - - indices := make(map[uint64]struct{}, len(hashes)) - for _, h := range hashes { - indices[h.Index] = struct{}{} - } - - for _, apisc := range resp.Data { - delete(indices, uint64(apisc.Index)) + if len(blobsResp.Data) != len(hashes) { + return eth.APIBeaconBlobsResponse{}, fmt.Errorf("#returned blobs(%d) != #requested blobs(%d)", len(blobsResp.Data), len(hashes)) } - - if len(indices) > 0 { - return eth.APIGetBlobSidecarsResponse{}, fmt.Errorf("#returned blobs(%d) != #requested blobs(%d)", len(hashes)-len(indices), len(hashes)) - } - return resp, nil + return blobsResp, nil } type ClientPool[T any] struct { @@ -173,8 +147,8 @@ func (p *ClientPool[T]) MoveToNext() { // NewL1BeaconClient returns a client for making requests to an L1 consensus layer node. // Fallbacks are optional clients that will be used for fetching blobs. L1BeaconClient will rotate between // the `cl` and the fallbacks whenever a client runs into an error while fetching blobs. -func NewL1BeaconClient(cl apis.BeaconClient, cfg L1BeaconClientConfig, fallbacks ...apis.BlobSideCarsClient) *L1BeaconClient { - cs := append([]apis.BlobSideCarsClient{cl}, fallbacks...) +func NewL1BeaconClient(cl apis.BeaconClient, cfg L1BeaconClientConfig, fallbacks ...apis.BeaconClient) *L1BeaconClient { + cs := append([]apis.BeaconClient{cl}, fallbacks...) return &L1BeaconClient{ cl: cl, pool: NewClientPool(cs...), @@ -228,11 +202,11 @@ func (cl *L1BeaconClient) timeToSlot(ctx context.Context, timestamp uint64) (uin return slot, nil } -func (cl *L1BeaconClient) fetchSidecars(ctx context.Context, slot uint64, hashes []eth.IndexedBlobHash) (eth.APIGetBlobSidecarsResponse, error) { +func (cl *L1BeaconClient) fetchBlobs(ctx context.Context, slot uint64, hashes []common.Hash) (eth.APIBeaconBlobsResponse, error) { var errs []error for i := 0; i < cl.pool.Len(); i++ { f := cl.pool.Get() - resp, err := f.BeaconBlobSideCars(ctx, cl.cfg.FetchAllSidecars, slot, hashes) + resp, err := f.BeaconBlobs(ctx, slot, hashes) if err != nil { cl.pool.MoveToNext() errs = append(errs, err) @@ -240,88 +214,26 @@ func (cl *L1BeaconClient) fetchSidecars(ctx context.Context, slot uint64, hashes return resp, nil } } - return eth.APIGetBlobSidecarsResponse{}, errors.Join(errs...) + return eth.APIBeaconBlobsResponse{}, errors.Join(errs...) } -// GetBlobSidecars fetches blob sidecars that were confirmed in the specified -// L1 block with the given indexed hashes. -// Order of the returned sidecars is guaranteed to be that of the hashes. -// Blob data is not checked for validity. -func (cl *L1BeaconClient) GetBlobSidecars(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.BlobSidecar, error) { - if len(hashes) == 0 { - return []*eth.BlobSidecar{}, nil - } - slot, err := cl.timeToSlot(ctx, ref.Time) - if err != nil { - return nil, err - } - sidecars, err := cl.getBlobSidecars(ctx, slot, hashes) - if err != nil { - return nil, fmt.Errorf("get blob sidecars for block %v: %w", ref, err) - } - return sidecars, nil -} - -func (cl *L1BeaconClient) getBlobSidecars(ctx context.Context, slot uint64, hashes []eth.IndexedBlobHash) ([]*eth.BlobSidecar, error) { - resp, err := cl.fetchSidecars(ctx, slot, hashes) - if err != nil { - return nil, fmt.Errorf("failed to fetch blob sidecars for slot %v: %w", slot, err) - } - - apiscs := make([]*eth.APIBlobSidecar, 0, len(hashes)) - // filter and order by hashes - for _, h := range hashes { - for _, apisc := range resp.Data { - if h.Index == uint64(apisc.Index) { - apiscs = append(apiscs, apisc) - break - } - } - } - - if len(hashes) != len(apiscs) { - return nil, fmt.Errorf("expected %v sidecars but got %v", len(hashes), len(apiscs)) - } - - bscs := make([]*eth.BlobSidecar, 0, len(hashes)) - for _, apisc := range apiscs { - bscs = append(bscs, apisc.BlobSidecar()) - } - - return bscs, nil -} - -// GetBlobs fetches blobs that were confirmed in the specified L1 block with the given indexed -// hashes. The order of the returned blobs will match the order of `hashes`. Confirms each -// blob's validity by checking its proof against the commitment, and confirming the commitment +// GetBlobsByHash fetches blobs that were confirmed at the given timestamp with the given versioned hashes. +// The order of the returned blobs will match the order of `hashes`. Confirms each +// blob's validity by recomputing the commitment and confirming the commitment // hashes to the expected value. Returns error if any blob is found invalid. -func (cl *L1BeaconClient) GetBlobs(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) { +func (cl *L1BeaconClient) GetBlobsByHash(ctx context.Context, time uint64, hashes []common.Hash) ([]*eth.Blob, error) { if len(hashes) == 0 { return []*eth.Blob{}, nil } - slot, err := cl.timeToSlot(ctx, ref.Time) + slot, err := cl.timeToSlot(ctx, time) if err != nil { return nil, err } - blobs, errBeaconBlobs := cl.beaconBlobs(ctx, slot, hashes) - if errBeaconBlobs == nil { - return blobs, nil - } - // If fetching from the post-Fulu /blobs/ endpoint fails, fall back to /blob_sidecars/. - errBeaconBlobs = fmt.Errorf("failed to get blobs: %w", errBeaconBlobs) - blobSidecars, err := cl.getBlobSidecars(ctx, slot, hashes) - if err != nil { - return nil, fmt.Errorf("%w; failed to get blob sidecars for L1BlockRef %s after falling back: %w", errBeaconBlobs, ref, err) - } - blobs, err = blobsFromSidecars(blobSidecars, hashes) - if err != nil { - return nil, fmt.Errorf("%w; failed to get blobs from sidecars for L1BlockRef %s after falling back: %w", errBeaconBlobs, ref, err) - } - return blobs, nil + return cl.beaconBlobs(ctx, slot, hashes) } -func (cl *L1BeaconClient) beaconBlobs(ctx context.Context, slot uint64, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) { - resp, err := cl.cl.BeaconBlobs(ctx, slot, hashes) +func (cl *L1BeaconClient) beaconBlobs(ctx context.Context, slot uint64, hashes []common.Hash) ([]*eth.Blob, error) { + resp, err := cl.fetchBlobs(ctx, slot, hashes) if err != nil { return nil, fmt.Errorf("get blobs from beacon client: %w", err) } @@ -344,8 +256,8 @@ func (cl *L1BeaconClient) beaconBlobs(ctx context.Context, slot uint64, hashes [ } got := eth.KZGToVersionedHash(commitment) idx := -1 - for i, indexedHash := range hashes { - if got == indexedHash.Hash && blobs[i] == nil { + for i, h := range hashes { + if got == h && blobs[i] == nil { idx = i break } @@ -358,25 +270,6 @@ func (cl *L1BeaconClient) beaconBlobs(ctx context.Context, slot uint64, hashes [ return blobs, nil } -// blobsFromSidecars pulls the blobs from the sidecars and verifies them against the supplied hashes. -func blobsFromSidecars(blobSidecars []*eth.BlobSidecar, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) { - if len(blobSidecars) != len(hashes) { - return nil, fmt.Errorf("number of hashes and blobSidecars mismatch, %d != %d", len(hashes), len(blobSidecars)) - } - out := make([]*eth.Blob, len(hashes)) - for i, ih := range hashes { - sidecar := blobSidecars[i] - if sidx := uint64(sidecar.Index); sidx != ih.Index { - return nil, fmt.Errorf("expected sidecars to be ordered by hashes, but got %d != %d", sidx, ih.Index) - } - if err := verifyBlob(&sidecar.Blob, ih.Hash); err != nil { - return nil, fmt.Errorf("blob %d failed verification: %w", i, err) - } - out[i] = &sidecar.Blob - } - return out, nil -} - // verifyBlob verifies that the blob data corresponds to the provided commitment. // It recomputes the commitment from the blob data and checks it matches the expected commitment hash. func verifyBlob(blob *eth.Blob, expectedCommitmentHash common.Hash) error { diff --git a/op-service/sources/l1_beacon_client_test.go b/op-service/sources/l1_beacon_client_test.go index 10fdb3755d5..50dd092425d 100644 --- a/op-service/sources/l1_beacon_client_test.go +++ b/op-service/sources/l1_beacon_client_test.go @@ -13,7 +13,6 @@ import ( "strconv" "testing" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/ethereum/go-ethereum/common" @@ -25,82 +24,22 @@ import ( "github.com/ethereum-optimism/optimism/op-service/sources/mocks" ) -//go:generate mockery --srcpkg=github.com/ethereum-optimism/optimism/op-service/apis --name BlobSideCarsClient --with-expecter=true - //go:generate mockery --srcpkg=github.com/ethereum-optimism/optimism/op-service/apis --name BeaconClient --with-expecter=true -func makeTestBlobSidecar(index uint64) (eth.IndexedBlobHash, *eth.BlobSidecar) { +func makeTestBlob(index uint64) (eth.IndexedBlobHash, *eth.Blob) { blob := kzg4844.Blob{} // make first byte of test blob match its index so we can easily verify if is returned in the // expected order blob[0] = byte(index) commit, _ := kzg4844.BlobToCommitment(&blob) - proof, _ := kzg4844.ComputeBlobProof(&blob, commit) hash := eth.KZGToVersionedHash(commit) idh := eth.IndexedBlobHash{ Index: index, Hash: hash, } - sidecar := eth.BlobSidecar{ - Index: eth.Uint64String(index), - Blob: eth.Blob(blob), - KZGCommitment: eth.Bytes48(commit), - KZGProof: eth.Bytes48(proof), - } - return idh, &sidecar -} - -func TestBlobsFromSidecars(t *testing.T) { - indices := []uint64{5, 7, 2} - - // blobs should be returned in order of their indices in the hashes array regardless - // of the sidecar ordering - index0, sidecar0 := makeTestBlobSidecar(indices[0]) - index1, sidecar1 := makeTestBlobSidecar(indices[1]) - index2, sidecar2 := makeTestBlobSidecar(indices[2]) - - hashes := []eth.IndexedBlobHash{index0, index1, index2} - - // put the sidecars in scrambled order to confirm error - sidecars := []*eth.BlobSidecar{sidecar2, sidecar0, sidecar1} - _, err := blobsFromSidecars(sidecars, hashes) - require.Error(t, err) - - // too few sidecars should error - sidecars = []*eth.BlobSidecar{sidecar0, sidecar1} - _, err = blobsFromSidecars(sidecars, hashes) - require.Error(t, err) - - // correct order should work - sidecars = []*eth.BlobSidecar{sidecar0, sidecar1, sidecar2} - blobs, err := blobsFromSidecars(sidecars, hashes) - require.NoError(t, err) - // confirm order by checking first blob byte against expected index - for i := range blobs { - require.Equal(t, byte(indices[i]), blobs[i][0]) - } - - // mangle a proof to make sure it's detected - badProof := *sidecar0 - badProof.KZGProof[11]++ - sidecars[1] = &badProof - _, err = blobsFromSidecars(sidecars, hashes) - require.Error(t, err) - - // mangle a commitment to make sure it's detected - badCommitment := *sidecar0 - badCommitment.KZGCommitment[13]++ - sidecars[1] = &badCommitment - _, err = blobsFromSidecars(sidecars, hashes) - require.Error(t, err) - - // mangle a hash to make sure it's detected - sidecars[1] = sidecar0 - hashes[2].Hash[17]++ - _, err = blobsFromSidecars(sidecars, hashes) - require.Error(t, err) - + ethBLob := eth.Blob(blob) + return idh, ðBLob } func KZGProofFromHex(s string) (kzg4844.Proof, error) { @@ -116,153 +55,66 @@ func KZGProofFromHex(s string) (kzg4844.Proof, error) { return out, nil } -var badProof, _ = KZGProofFromHex("0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000") - -func TestBlobsFromSidecars_BadProof(t *testing.T) { - indices := []uint64{5, 7, 2} - index0, sidecar0 := makeTestBlobSidecar(indices[0]) - index1, sidecar1 := makeTestBlobSidecar(indices[1]) - index2, sidecar2 := makeTestBlobSidecar(indices[2]) - hashes := []eth.IndexedBlobHash{index0, index1, index2} - - sidecars := []*eth.BlobSidecar{sidecar0, sidecar1, sidecar2} - - // Set proof to a bad / stubbed value - sidecars[1].KZGProof = eth.Bytes48(badProof) - - // Check that verification succeeds, the proof is not required - _, err := blobsFromSidecars(sidecars, hashes) - require.NoError(t, err) - -} - -func TestBlobsFromSidecars_EmptySidecarList(t *testing.T) { - hashes := []eth.IndexedBlobHash{} - sidecars := []*eth.BlobSidecar{} - blobs, err := blobsFromSidecars(sidecars, hashes) - require.NoError(t, err) - require.Empty(t, blobs, "blobs should be empty when no sidecars are provided") -} - -func toAPISideCars(sidecars []*eth.BlobSidecar) []*eth.APIBlobSidecar { - var out []*eth.APIBlobSidecar - for _, s := range sidecars { - out = append(out, ð.APIBlobSidecar{ - Index: s.Index, - Blob: s.Blob, - KZGCommitment: s.KZGCommitment, - KZGProof: s.KZGProof, - SignedBlockHeader: eth.SignedBeaconBlockHeader{}, - }) - } - return out -} - func TestBeaconClientNoErrorPrimary(t *testing.T) { indices := []uint64{5, 7, 2} - index0, sidecar0 := makeTestBlobSidecar(indices[0]) - index1, sidecar1 := makeTestBlobSidecar(indices[1]) - index2, sidecar2 := makeTestBlobSidecar(indices[2]) - - hashes := []eth.IndexedBlobHash{index0, index1, index2} - sidecars := []*eth.BlobSidecar{sidecar0, sidecar1, sidecar2} - apiSidecars := toAPISideCars(sidecars) + index0, blob0 := makeTestBlob(indices[0]) + index1, blob1 := makeTestBlob(indices[1]) + index2, blob2 := makeTestBlob(indices[2]) + hashes := []common.Hash{index0.Hash, index1.Hash, index2.Hash} + blobs := []*eth.Blob{blob0, blob1, blob2} ctx := context.Background() p := mocks.NewBeaconClient(t) - f := mocks.NewBlobSideCarsClient(t) + f := mocks.NewBeaconClient(t) c := NewL1BeaconClient(p, L1BeaconClientConfig{}, f) p.EXPECT().BeaconGenesis(ctx).Return(eth.APIGenesisResponse{Data: eth.ReducedGenesisData{GenesisTime: 10}}, nil) p.EXPECT().ConfigSpec(ctx).Return(eth.APIConfigResponse{Data: eth.ReducedConfigData{SecondsPerSlot: 2}}, nil) // Timestamp 12 = Slot 1 - p.EXPECT().BeaconBlobSideCars(ctx, false, uint64(1), hashes).Return(eth.APIGetBlobSidecarsResponse{Data: apiSidecars}, nil) + p.EXPECT().BeaconBlobs(ctx, uint64(1), hashes).Return(eth.APIBeaconBlobsResponse{Data: blobs}, nil) - resp, err := c.GetBlobSidecars(ctx, eth.L1BlockRef{Time: 12}, hashes) - require.Equal(t, sidecars, resp) + resp, err := c.GetBlobsByHash(ctx, 12, hashes) require.NoError(t, err) + require.Equal(t, blobs, resp) + } func TestBeaconClientFallback(t *testing.T) { indices := []uint64{5, 7, 2} - index0, sidecar0 := makeTestBlobSidecar(indices[0]) - index1, sidecar1 := makeTestBlobSidecar(indices[1]) - index2, sidecar2 := makeTestBlobSidecar(indices[2]) - - hashes := []eth.IndexedBlobHash{index0, index1, index2} - sidecars := []*eth.BlobSidecar{sidecar0, sidecar1, sidecar2} - apiSidecars := toAPISideCars(sidecars) + index0, blob0 := makeTestBlob(indices[0]) + index1, blob1 := makeTestBlob(indices[1]) + index2, blob2 := makeTestBlob(indices[2]) + hashes := []common.Hash{index0.Hash, index1.Hash, index2.Hash} + blobs := []*eth.Blob{blob0, blob1, blob2} ctx := context.Background() p := mocks.NewBeaconClient(t) - f := mocks.NewBlobSideCarsClient(t) + f := mocks.NewBeaconClient(t) c := NewL1BeaconClient(p, L1BeaconClientConfig{}, f) p.EXPECT().BeaconGenesis(ctx).Return(eth.APIGenesisResponse{Data: eth.ReducedGenesisData{GenesisTime: 10}}, nil) p.EXPECT().ConfigSpec(ctx).Return(eth.APIConfigResponse{Data: eth.ReducedConfigData{SecondsPerSlot: 2}}, nil) // Timestamp 12 = Slot 1 - p.EXPECT().BeaconBlobSideCars(ctx, false, uint64(1), hashes).Return(eth.APIGetBlobSidecarsResponse{}, errors.New("404 not found")) - f.EXPECT().BeaconBlobSideCars(ctx, false, uint64(1), hashes).Return(eth.APIGetBlobSidecarsResponse{Data: apiSidecars}, nil) + p.EXPECT().BeaconBlobs(ctx, uint64(1), hashes).Return(eth.APIBeaconBlobsResponse{}, errors.New("404 not found")) + f.EXPECT().BeaconBlobs(ctx, uint64(1), hashes).Return(eth.APIBeaconBlobsResponse{Data: blobs}, nil) - resp, err := c.GetBlobSidecars(ctx, eth.L1BlockRef{Time: 12}, hashes) - require.Equal(t, sidecars, resp) + resp, err := c.GetBlobsByHash(ctx, 12, hashes) require.NoError(t, err) + require.Equal(t, blobs, resp) // Second set of calls. This time rotate back to the primary indices = []uint64{3, 9, 11} - index0, sidecar0 = makeTestBlobSidecar(indices[0]) - index1, sidecar1 = makeTestBlobSidecar(indices[1]) - index2, sidecar2 = makeTestBlobSidecar(indices[2]) - - hashes = []eth.IndexedBlobHash{index0, index1, index2} - sidecars = []*eth.BlobSidecar{sidecar0, sidecar1, sidecar2} - apiSidecars = toAPISideCars(sidecars) + index0, blob0 = makeTestBlob(indices[0]) + index1, blob1 = makeTestBlob(indices[1]) + index2, blob2 = makeTestBlob(indices[2]) + hashes = []common.Hash{index0.Hash, index1.Hash, index2.Hash} + blobs = []*eth.Blob{blob0, blob1, blob2} // Timestamp 14 = Slot 2 - f.EXPECT().BeaconBlobSideCars(ctx, false, uint64(2), hashes).Return(eth.APIGetBlobSidecarsResponse{}, errors.New("404 not found")) - p.EXPECT().BeaconBlobSideCars(ctx, false, uint64(2), hashes).Return(eth.APIGetBlobSidecarsResponse{Data: apiSidecars}, nil) + f.EXPECT().BeaconBlobs(ctx, uint64(2), hashes).Return(eth.APIBeaconBlobsResponse{}, errors.New("404 not found")) + p.EXPECT().BeaconBlobs(ctx, uint64(2), hashes).Return(eth.APIBeaconBlobsResponse{Data: blobs}, nil) - resp, err = c.GetBlobSidecars(ctx, eth.L1BlockRef{Time: 14}, hashes) - require.Equal(t, sidecars, resp) + resp, err = c.GetBlobsByHash(ctx, 14, hashes) require.NoError(t, err) -} - -func TestBeaconClientBadProof(t *testing.T) { - indices := []uint64{5, 7, 2} - index0, sidecar0 := makeTestBlobSidecar(indices[0]) - index1, sidecar1 := makeTestBlobSidecar(indices[1]) - index2, sidecar2 := makeTestBlobSidecar(indices[2]) - - hashes := []eth.IndexedBlobHash{index0, index1, index2} - sidecars := []*eth.BlobSidecar{sidecar0, sidecar1, sidecar2} - blobs := []*eth.Blob{&sidecar0.Blob, &sidecar1.Blob, &sidecar2.Blob} - - // invalidate proof - sidecar1.KZGProof = eth.Bytes48(badProof) - apiSidecars := toAPISideCars(sidecars) - - t.Run("fallback to BeaconBlobSideCars", func(t *testing.T) { - ctx := context.Background() - p := mocks.NewBeaconClient(t) - p.EXPECT().BeaconGenesis(ctx).Return(eth.APIGenesisResponse{Data: eth.ReducedGenesisData{GenesisTime: 10}}, nil) - p.EXPECT().ConfigSpec(ctx).Return(eth.APIConfigResponse{Data: eth.ReducedConfigData{SecondsPerSlot: 2}}, nil) - client := NewL1BeaconClient(p, L1BeaconClientConfig{}) - ref := eth.L1BlockRef{Time: 12} - p.EXPECT().BeaconBlobs(ctx, uint64(1), hashes).Return(eth.APIBeaconBlobsResponse{}, errors.New("the sky is falling")) - p.EXPECT().BeaconBlobSideCars(ctx, false, uint64(1), hashes).Return(eth.APIGetBlobSidecarsResponse{Data: apiSidecars}, nil) - _, err := client.GetBlobs(ctx, ref, hashes) - assert.NoError(t, err) - }) - - t.Run("BeaconBlobs", func(t *testing.T) { - ctx := context.Background() - p := mocks.NewBeaconClient(t) - p.EXPECT().BeaconGenesis(ctx).Return(eth.APIGenesisResponse{Data: eth.ReducedGenesisData{GenesisTime: 10}}, nil) - p.EXPECT().ConfigSpec(ctx).Return(eth.APIConfigResponse{Data: eth.ReducedConfigData{SecondsPerSlot: 2}}, nil) - client := NewL1BeaconClient(p, L1BeaconClientConfig{}) - ref := eth.L1BlockRef{Time: 12} - p.EXPECT().BeaconBlobs(ctx, uint64(1), hashes).Return(eth.APIBeaconBlobsResponse{Data: blobs}, nil) - _, err := client.GetBlobs(ctx, ref, hashes) - assert.NoError(t, err) - }) + require.Equal(t, blobs, resp) } func TestBeaconHTTPClient(t *testing.T) { @@ -272,26 +124,26 @@ func TestBeaconHTTPClient(t *testing.T) { ctx := context.Background() indices := []uint64{3, 9, 11} - index0, _ := makeTestBlobSidecar(indices[0]) - index1, _ := makeTestBlobSidecar(indices[1]) - index2, _ := makeTestBlobSidecar(indices[2]) + index0, _ := makeTestBlob(indices[0]) + index1, _ := makeTestBlob(indices[1]) + index2, _ := makeTestBlob(indices[2]) - hashes := []eth.IndexedBlobHash{index0, index1, index2} + hashes := []common.Hash{index0.Hash, index1.Hash, index2.Hash} // mocks returning a 200 with empty list - respBytes, _ := json.Marshal(eth.APIGetBlobSidecarsResponse{}) + respBytes, _ := json.Marshal(eth.APIBeaconBlobsResponse{}) slot := uint64(2) - path := path.Join(sidecarsMethodPrefix, strconv.FormatUint(slot, 10)) + path := path.Join(blobsMethodPrefix, strconv.FormatUint(slot, 10)) reqQuery := url.Values{} for i := range hashes { - reqQuery.Add("indices", strconv.FormatUint(hashes[i].Index, 10)) + reqQuery.Add("versioned_hashes", hashes[i].Hex()) } headers := http.Header{} headers.Add("Accept", "application/json") c.EXPECT().Get(ctx, path, reqQuery, headers).Return(&http.Response{StatusCode: http.StatusOK, Body: io.NopCloser(bytes.NewReader(respBytes))}, nil) - // BeaconBlobSideCars should return error when client.HTTP returns a 200 with empty list - _, err := b.BeaconBlobSideCars(ctx, false, slot, hashes) + // BeaconBlobs should return error when client.HTTP returns a 200 with empty list + _, err := b.BeaconBlobs(ctx, slot, hashes) require.Error(t, err) require.Equal(t, err.Error(), fmt.Sprintf("#returned blobs(%d) != #requested blobs(%d)", 0, len(hashes))) } @@ -323,75 +175,29 @@ func TestVerifyBlob(t *testing.T) { } func TestGetBlobs(t *testing.T) { - hash0, sidecar0 := makeTestBlobSidecar(0) - hash1, sidecar1 := makeTestBlobSidecar(1) - hash2, sidecar2 := makeTestBlobSidecar(2) - - hashes := []eth.IndexedBlobHash{hash0, hash2, hash1} // Mix up the order. - - invalidBlob0 := sidecar0.Blob - invalidBlob0[10]++ - - cases := []struct { - name string - beaconBlobs []*eth.Blob - expectFallback bool - }{ - { - name: "happy path", - // From the /blobs/ spec: - // Blobs are returned as an ordered list matching the order of their corresponding - // KZG commitments in the block. - beaconBlobs: []*eth.Blob{&sidecar0.Blob, &sidecar1.Blob, &sidecar2.Blob}, - expectFallback: false, - }, - { - name: "fallback on client error", - beaconBlobs: nil, - expectFallback: true, - }, - { - name: "fallback on invalid number of blobs", - beaconBlobs: []*eth.Blob{&sidecar0.Blob}, - expectFallback: true, - }, - { - name: "fallback on invalid blob", - beaconBlobs: []*eth.Blob{&invalidBlob0, &sidecar1.Blob, &sidecar2.Blob}, - expectFallback: true, - }, - } + hash0, blob0 := makeTestBlob(0) + hash1, blob1 := makeTestBlob(1) + hash2, blob2 := makeTestBlob(2) - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - ctx := context.Background() - p := mocks.NewBeaconClient(t) - p.EXPECT().BeaconGenesis(ctx).Return(eth.APIGenesisResponse{Data: eth.ReducedGenesisData{GenesisTime: 10}}, nil) - p.EXPECT().ConfigSpec(ctx).Return(eth.APIConfigResponse{Data: eth.ReducedConfigData{SecondsPerSlot: 2}}, nil) - client := NewL1BeaconClient(p, L1BeaconClientConfig{}) - ref := eth.L1BlockRef{Time: 12} - - // construct the mock response for the beacon blobs call - var beaconBlobsResponse eth.APIBeaconBlobsResponse - var err error - if c.beaconBlobs == nil { - err = errors.New("client error") - } else { - beaconBlobsResponse = eth.APIBeaconBlobsResponse{Data: c.beaconBlobs} - } - p.EXPECT().BeaconBlobs(ctx, uint64(1), hashes).Return(beaconBlobsResponse, err) - - if c.expectFallback { - p.EXPECT().BeaconBlobSideCars(ctx, false, uint64(1), hashes).Return(eth.APIGetBlobSidecarsResponse{ - Data: toAPISideCars([]*eth.BlobSidecar{sidecar0, sidecar1, sidecar2}), - }, nil) - } - - resp, err := client.GetBlobs(ctx, ref, hashes) - require.NoError(t, err) - require.Equal(t, []*eth.Blob{&sidecar0.Blob, &sidecar2.Blob, &sidecar1.Blob}, resp) - }) - } + hashes := []common.Hash{hash0.Hash, hash2.Hash, hash1.Hash} // Mix up the order. + beaconBlobs := []*eth.Blob{blob0, blob2, blob1} + + ctx := context.Background() + p := mocks.NewBeaconClient(t) + p.EXPECT().BeaconGenesis(ctx).Return(eth.APIGenesisResponse{Data: eth.ReducedGenesisData{GenesisTime: 10}}, nil) + p.EXPECT().ConfigSpec(ctx).Return(eth.APIConfigResponse{Data: eth.ReducedConfigData{SecondsPerSlot: 2}}, nil) + client := NewL1BeaconClient(p, L1BeaconClientConfig{}) + ref := eth.L1BlockRef{Time: 12} + + // construct the mock response for the beacon blobs call + var beaconBlobsResponse eth.APIBeaconBlobsResponse + var err error + beaconBlobsResponse = eth.APIBeaconBlobsResponse{Data: beaconBlobs} + p.EXPECT().BeaconBlobs(ctx, uint64(1), hashes).Return(beaconBlobsResponse, err) + + resp, err := client.GetBlobsByHash(ctx, ref.Time, hashes) + require.NoError(t, err) + require.Equal(t, beaconBlobs, resp) } func TestRequestDuplicateBlobHashes(t *testing.T) { @@ -402,30 +208,25 @@ func TestRequestDuplicateBlobHashes(t *testing.T) { client := NewL1BeaconClient(p, L1BeaconClientConfig{}) ref := eth.L1BlockRef{Time: 12} - hash0, sidecar0 := makeTestBlobSidecar(0) - hash1, sidecar1 := makeTestBlobSidecar(1) - hash2, sidecar2 := makeTestBlobSidecar(2) + hash0, blob0 := makeTestBlob(0) + hash1, blob1 := makeTestBlob(1) + hash2, blob2 := makeTestBlob(2) sameHash := eth.IndexedBlobHash{ Index: 3, Hash: hash0.Hash, } - sameHashSidecar := ð.BlobSidecar{ - Blob: sidecar0.Blob, - Index: eth.Uint64String(sameHash.Index), - KZGCommitment: sidecar0.KZGCommitment, - KZGProof: sidecar0.KZGProof, - } - hashes := []eth.IndexedBlobHash{hash0, hash2, hash1, sameHash} // Mix up the order. - beaconBlobs := []*eth.Blob{&sidecar0.Blob, &sidecar1.Blob, &sidecar2.Blob, &sameHashSidecar.Blob} + + hashes := []common.Hash{hash0.Hash, hash2.Hash, hash1.Hash, sameHash.Hash} + beaconBlobs := []*eth.Blob{blob0, blob2, blob1, blob0} // construct the mock response for the beacon blobs call beaconBlobsResponse := eth.APIBeaconBlobsResponse{Data: beaconBlobs} p.EXPECT().BeaconBlobs(ctx, uint64(1), hashes).Return(beaconBlobsResponse, nil) - resp, err := client.GetBlobs(ctx, ref, hashes) + resp, err := client.GetBlobsByHash(ctx, ref.Time, hashes) require.NoError(t, err) for i, blob := range resp { require.NotNil(t, blob, fmt.Sprintf("blob at index %d should not be nil", i)) } - require.Equal(t, []*eth.Blob{&sidecar0.Blob, &sidecar2.Blob, &sidecar1.Blob, &sameHashSidecar.Blob}, resp) + require.Equal(t, []*eth.Blob{blob0, blob2, blob1, blob0}, resp) } diff --git a/op-service/sources/mocks/BeaconClient.go b/op-service/sources/mocks/BeaconClient.go index 6838f4bc459..fb4b8cea209 100644 --- a/op-service/sources/mocks/BeaconClient.go +++ b/op-service/sources/mocks/BeaconClient.go @@ -5,7 +5,10 @@ package mocks import ( context "context" + common "github.com/ethereum/go-ethereum/common" + eth "github.com/ethereum-optimism/optimism/op-service/eth" + mock "github.com/stretchr/testify/mock" ) @@ -22,67 +25,8 @@ func (_m *BeaconClient) EXPECT() *BeaconClient_Expecter { return &BeaconClient_Expecter{mock: &_m.Mock} } -// BeaconBlobSideCars provides a mock function with given fields: ctx, fetchAllSidecars, slot, hashes -func (_m *BeaconClient) BeaconBlobSideCars(ctx context.Context, fetchAllSidecars bool, slot uint64, hashes []eth.IndexedBlobHash) (eth.APIGetBlobSidecarsResponse, error) { - ret := _m.Called(ctx, fetchAllSidecars, slot, hashes) - - if len(ret) == 0 { - panic("no return value specified for BeaconBlobSideCars") - } - - var r0 eth.APIGetBlobSidecarsResponse - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, bool, uint64, []eth.IndexedBlobHash) (eth.APIGetBlobSidecarsResponse, error)); ok { - return rf(ctx, fetchAllSidecars, slot, hashes) - } - if rf, ok := ret.Get(0).(func(context.Context, bool, uint64, []eth.IndexedBlobHash) eth.APIGetBlobSidecarsResponse); ok { - r0 = rf(ctx, fetchAllSidecars, slot, hashes) - } else { - r0 = ret.Get(0).(eth.APIGetBlobSidecarsResponse) - } - - if rf, ok := ret.Get(1).(func(context.Context, bool, uint64, []eth.IndexedBlobHash) error); ok { - r1 = rf(ctx, fetchAllSidecars, slot, hashes) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// BeaconClient_BeaconBlobSideCars_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BeaconBlobSideCars' -type BeaconClient_BeaconBlobSideCars_Call struct { - *mock.Call -} - -// BeaconBlobSideCars is a helper method to define mock.On call -// - ctx context.Context -// - fetchAllSidecars bool -// - slot uint64 -// - hashes []eth.IndexedBlobHash -func (_e *BeaconClient_Expecter) BeaconBlobSideCars(ctx interface{}, fetchAllSidecars interface{}, slot interface{}, hashes interface{}) *BeaconClient_BeaconBlobSideCars_Call { - return &BeaconClient_BeaconBlobSideCars_Call{Call: _e.mock.On("BeaconBlobSideCars", ctx, fetchAllSidecars, slot, hashes)} -} - -func (_c *BeaconClient_BeaconBlobSideCars_Call) Run(run func(ctx context.Context, fetchAllSidecars bool, slot uint64, hashes []eth.IndexedBlobHash)) *BeaconClient_BeaconBlobSideCars_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(bool), args[2].(uint64), args[3].([]eth.IndexedBlobHash)) - }) - return _c -} - -func (_c *BeaconClient_BeaconBlobSideCars_Call) Return(_a0 eth.APIGetBlobSidecarsResponse, _a1 error) *BeaconClient_BeaconBlobSideCars_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *BeaconClient_BeaconBlobSideCars_Call) RunAndReturn(run func(context.Context, bool, uint64, []eth.IndexedBlobHash) (eth.APIGetBlobSidecarsResponse, error)) *BeaconClient_BeaconBlobSideCars_Call { - _c.Call.Return(run) - return _c -} - // BeaconBlobs provides a mock function with given fields: ctx, slot, hashes -func (_m *BeaconClient) BeaconBlobs(ctx context.Context, slot uint64, hashes []eth.IndexedBlobHash) (eth.APIBeaconBlobsResponse, error) { +func (_m *BeaconClient) BeaconBlobs(ctx context.Context, slot uint64, hashes []common.Hash) (eth.APIBeaconBlobsResponse, error) { ret := _m.Called(ctx, slot, hashes) if len(ret) == 0 { @@ -91,16 +35,16 @@ func (_m *BeaconClient) BeaconBlobs(ctx context.Context, slot uint64, hashes []e var r0 eth.APIBeaconBlobsResponse var r1 error - if rf, ok := ret.Get(0).(func(context.Context, uint64, []eth.IndexedBlobHash) (eth.APIBeaconBlobsResponse, error)); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint64, []common.Hash) (eth.APIBeaconBlobsResponse, error)); ok { return rf(ctx, slot, hashes) } - if rf, ok := ret.Get(0).(func(context.Context, uint64, []eth.IndexedBlobHash) eth.APIBeaconBlobsResponse); ok { + if rf, ok := ret.Get(0).(func(context.Context, uint64, []common.Hash) eth.APIBeaconBlobsResponse); ok { r0 = rf(ctx, slot, hashes) } else { r0 = ret.Get(0).(eth.APIBeaconBlobsResponse) } - if rf, ok := ret.Get(1).(func(context.Context, uint64, []eth.IndexedBlobHash) error); ok { + if rf, ok := ret.Get(1).(func(context.Context, uint64, []common.Hash) error); ok { r1 = rf(ctx, slot, hashes) } else { r1 = ret.Error(1) @@ -117,14 +61,14 @@ type BeaconClient_BeaconBlobs_Call struct { // BeaconBlobs is a helper method to define mock.On call // - ctx context.Context // - slot uint64 -// - hashes []eth.IndexedBlobHash +// - hashes []common.Hash func (_e *BeaconClient_Expecter) BeaconBlobs(ctx interface{}, slot interface{}, hashes interface{}) *BeaconClient_BeaconBlobs_Call { return &BeaconClient_BeaconBlobs_Call{Call: _e.mock.On("BeaconBlobs", ctx, slot, hashes)} } -func (_c *BeaconClient_BeaconBlobs_Call) Run(run func(ctx context.Context, slot uint64, hashes []eth.IndexedBlobHash)) *BeaconClient_BeaconBlobs_Call { +func (_c *BeaconClient_BeaconBlobs_Call) Run(run func(ctx context.Context, slot uint64, hashes []common.Hash)) *BeaconClient_BeaconBlobs_Call { _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(uint64), args[2].([]eth.IndexedBlobHash)) + run(args[0].(context.Context), args[1].(uint64), args[2].([]common.Hash)) }) return _c } @@ -134,7 +78,7 @@ func (_c *BeaconClient_BeaconBlobs_Call) Return(_a0 eth.APIBeaconBlobsResponse, return _c } -func (_c *BeaconClient_BeaconBlobs_Call) RunAndReturn(run func(context.Context, uint64, []eth.IndexedBlobHash) (eth.APIBeaconBlobsResponse, error)) *BeaconClient_BeaconBlobs_Call { +func (_c *BeaconClient_BeaconBlobs_Call) RunAndReturn(run func(context.Context, uint64, []common.Hash) (eth.APIBeaconBlobsResponse, error)) *BeaconClient_BeaconBlobs_Call { _c.Call.Return(run) return _c } diff --git a/op-service/sources/mocks/BlobSideCarsClient.go b/op-service/sources/mocks/BlobSideCarsClient.go deleted file mode 100644 index 242a207d8a3..00000000000 --- a/op-service/sources/mocks/BlobSideCarsClient.go +++ /dev/null @@ -1,96 +0,0 @@ -// Code generated by mockery v2.53.3. DO NOT EDIT. - -package mocks - -import ( - context "context" - - eth "github.com/ethereum-optimism/optimism/op-service/eth" - mock "github.com/stretchr/testify/mock" -) - -// BlobSideCarsClient is an autogenerated mock type for the BlobSideCarsClient type -type BlobSideCarsClient struct { - mock.Mock -} - -type BlobSideCarsClient_Expecter struct { - mock *mock.Mock -} - -func (_m *BlobSideCarsClient) EXPECT() *BlobSideCarsClient_Expecter { - return &BlobSideCarsClient_Expecter{mock: &_m.Mock} -} - -// BeaconBlobSideCars provides a mock function with given fields: ctx, fetchAllSidecars, slot, hashes -func (_m *BlobSideCarsClient) BeaconBlobSideCars(ctx context.Context, fetchAllSidecars bool, slot uint64, hashes []eth.IndexedBlobHash) (eth.APIGetBlobSidecarsResponse, error) { - ret := _m.Called(ctx, fetchAllSidecars, slot, hashes) - - if len(ret) == 0 { - panic("no return value specified for BeaconBlobSideCars") - } - - var r0 eth.APIGetBlobSidecarsResponse - var r1 error - if rf, ok := ret.Get(0).(func(context.Context, bool, uint64, []eth.IndexedBlobHash) (eth.APIGetBlobSidecarsResponse, error)); ok { - return rf(ctx, fetchAllSidecars, slot, hashes) - } - if rf, ok := ret.Get(0).(func(context.Context, bool, uint64, []eth.IndexedBlobHash) eth.APIGetBlobSidecarsResponse); ok { - r0 = rf(ctx, fetchAllSidecars, slot, hashes) - } else { - r0 = ret.Get(0).(eth.APIGetBlobSidecarsResponse) - } - - if rf, ok := ret.Get(1).(func(context.Context, bool, uint64, []eth.IndexedBlobHash) error); ok { - r1 = rf(ctx, fetchAllSidecars, slot, hashes) - } else { - r1 = ret.Error(1) - } - - return r0, r1 -} - -// BlobSideCarsClient_BeaconBlobSideCars_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'BeaconBlobSideCars' -type BlobSideCarsClient_BeaconBlobSideCars_Call struct { - *mock.Call -} - -// BeaconBlobSideCars is a helper method to define mock.On call -// - ctx context.Context -// - fetchAllSidecars bool -// - slot uint64 -// - hashes []eth.IndexedBlobHash -func (_e *BlobSideCarsClient_Expecter) BeaconBlobSideCars(ctx interface{}, fetchAllSidecars interface{}, slot interface{}, hashes interface{}) *BlobSideCarsClient_BeaconBlobSideCars_Call { - return &BlobSideCarsClient_BeaconBlobSideCars_Call{Call: _e.mock.On("BeaconBlobSideCars", ctx, fetchAllSidecars, slot, hashes)} -} - -func (_c *BlobSideCarsClient_BeaconBlobSideCars_Call) Run(run func(ctx context.Context, fetchAllSidecars bool, slot uint64, hashes []eth.IndexedBlobHash)) *BlobSideCarsClient_BeaconBlobSideCars_Call { - _c.Call.Run(func(args mock.Arguments) { - run(args[0].(context.Context), args[1].(bool), args[2].(uint64), args[3].([]eth.IndexedBlobHash)) - }) - return _c -} - -func (_c *BlobSideCarsClient_BeaconBlobSideCars_Call) Return(_a0 eth.APIGetBlobSidecarsResponse, _a1 error) *BlobSideCarsClient_BeaconBlobSideCars_Call { - _c.Call.Return(_a0, _a1) - return _c -} - -func (_c *BlobSideCarsClient_BeaconBlobSideCars_Call) RunAndReturn(run func(context.Context, bool, uint64, []eth.IndexedBlobHash) (eth.APIGetBlobSidecarsResponse, error)) *BlobSideCarsClient_BeaconBlobSideCars_Call { - _c.Call.Return(run) - return _c -} - -// NewBlobSideCarsClient creates a new instance of BlobSideCarsClient. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations. -// The first argument is typically a *testing.T value. -func NewBlobSideCarsClient(t interface { - mock.TestingT - Cleanup(func()) -}) *BlobSideCarsClient { - mock := &BlobSideCarsClient{} - mock.Mock.Test(t) - - t.Cleanup(func() { mock.AssertExpectations(t) }) - - return mock -} diff --git a/op-service/testutils/mock_blobs_fetcher.go b/op-service/testutils/mock_blobs_fetcher.go index df09f5f46a2..23fc2508a2f 100644 --- a/op-service/testutils/mock_blobs_fetcher.go +++ b/op-service/testutils/mock_blobs_fetcher.go @@ -4,6 +4,7 @@ import ( "context" "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum/go-ethereum/common" "github.com/stretchr/testify/mock" ) @@ -11,13 +12,13 @@ type MockBlobsFetcher struct { mock.Mock } -func (cl *MockBlobsFetcher) GetBlobs(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.Blob, error) { - out := cl.Mock.MethodCalled("GetBlobs", ref, hashes) +func (cl *MockBlobsFetcher) GetBlobsByHash(ctx context.Context, time uint64, hashes []common.Hash) ([]*eth.Blob, error) { + out := cl.Mock.MethodCalled("GetBlobsByHash", time, hashes) return out.Get(0).([]*eth.Blob), out.Error(1) } -func (cl *MockBlobsFetcher) ExpectOnGetBlobs(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash, blobs []*eth.Blob, err error) { - cl.Mock.On("GetBlobs", ref, hashes).Once().Return(blobs, err) +func (cl *MockBlobsFetcher) ExpectOnGetBlobsByHash(ctx context.Context, time uint64, hashes []common.Hash, blobs []*eth.Blob, err error) { + cl.Mock.On("GetBlobsByHash", time, hashes).Once().Return(blobs, err) } func (cl *MockBlobsFetcher) GetBlobSidecars(ctx context.Context, ref eth.L1BlockRef, hashes []eth.IndexedBlobHash) ([]*eth.BlobSidecar, error) { diff --git a/op-service/txinclude/interfaces.go b/op-service/txinclude/interfaces.go index 99498522000..36aacb6874b 100644 --- a/op-service/txinclude/interfaces.go +++ b/op-service/txinclude/interfaces.go @@ -4,6 +4,7 @@ import ( "context" "crypto/ecdsa" "math/big" + "time" "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum/go-ethereum/common" @@ -27,6 +28,18 @@ type EL interface { ReceiptGetter } +// NewReliableEL turns an implementation of the EL interface into one that will retry on +// intermittent failures. +func NewReliableEL(el EL, blockTime time.Duration) EL { + return struct { + *Monitor + *Resubmitter + }{ + NewMonitor(el, blockTime), + NewResubmitter(el, blockTime), + } +} + type ReceiptGetter interface { TransactionReceipt(context.Context, common.Hash) (*types.Receipt, error) } diff --git a/op-service/txinclude/limit.go b/op-service/txinclude/limit.go new file mode 100644 index 00000000000..e6de4ac9485 --- /dev/null +++ b/op-service/txinclude/limit.go @@ -0,0 +1,33 @@ +package txinclude + +import ( + "context" + + "github.com/ethereum/go-ethereum/core/types" +) + +type Limit struct { + inner Includer + sema chan struct{} +} + +func NewLimit(inner Includer, limit int) *Limit { + return &Limit{ + inner: inner, + sema: make(chan struct{}, limit), + } +} + +func (l *Limit) Include(ctx context.Context, tx types.TxData) (*IncludedTx, error) { + select { + case l.sema <- struct{}{}: + defer func() { + <-l.sema + }() + return l.inner.Include(ctx, tx) + case <-ctx.Done(): + return nil, ctx.Err() + } +} + +var _ Includer = (*Limit)(nil) diff --git a/op-supernode/supernode/activity/activity.go b/op-supernode/supernode/activity/activity.go index f3020798f81..cc4bb1d5e6e 100644 --- a/op-supernode/supernode/activity/activity.go +++ b/op-supernode/supernode/activity/activity.go @@ -8,6 +8,10 @@ import ( // Activity is an open interface to collect pluggable behaviors which satisfy sub-activitiy interfaces. type Activity interface { + // Reset is called when a chain container resets to a given timestamp. + // Activities should clean up any cached state for that chain at or after the timestamp. + // This is a no-op for activities that don't maintain chain-specific state. + Reset(chainID eth.ChainID, timestamp uint64) } // RunnableActivity is an Activity that can be started and stopped independently. diff --git a/op-supernode/supernode/activity/heartbeat/heartbeat.go b/op-supernode/supernode/activity/heartbeat/heartbeat.go index aab65475a71..857137d39d1 100644 --- a/op-supernode/supernode/activity/heartbeat/heartbeat.go +++ b/op-supernode/supernode/activity/heartbeat/heartbeat.go @@ -5,6 +5,7 @@ import ( "crypto/rand" "time" + "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-supernode/supernode/activity" "github.com/ethereum/go-ethereum/common/hexutil" gethlog "github.com/ethereum/go-ethereum/log" @@ -55,6 +56,11 @@ func (h *Heartbeat) Stop(ctx context.Context) error { return nil } +// Reset is a no-op for heartbeat - it has no chain-specific state. +func (h *Heartbeat) Reset(chainID eth.ChainID, timestamp uint64) { + // No-op: heartbeat has no chain-specific cached state +} + // RPCNamespace returns the JSON-RPC namespace for this activity. func (h *Heartbeat) RPCNamespace() string { return "heartbeat" } diff --git a/op-supernode/supernode/activity/interop/algo.go b/op-supernode/supernode/activity/interop/algo.go new file mode 100644 index 00000000000..48dabc0508a --- /dev/null +++ b/op-supernode/supernode/activity/interop/algo.go @@ -0,0 +1,153 @@ +package interop + +import ( + "errors" + "fmt" + + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" +) + +// ExpiryTime is the maximum age of an initiating message that can be executed. +// Messages older than this are considered expired and invalid. +// 7 days = 7 * 24 * 60 * 60 = 604800 seconds +const ExpiryTime = 604800 + +var ( + // ErrUnknownChain is returned when an executing message references + // a chain that is not registered with the interop activity. + ErrUnknownChain = errors.New("unknown chain") + + // ErrTimestampViolation is returned when an executing message references + // an initiating message with a timestamp >= the executing message's timestamp. + ErrTimestampViolation = errors.New("initiating message timestamp must be less than executing message timestamp") + + // ErrMessageExpired is returned when an executing message references + // an initiating message that has expired (older than ExpiryTime). + ErrMessageExpired = errors.New("initiating message has expired") +) + +// verifyInteropMessages validates all executing messages at the given timestamp. +// Returns a Result indicating whether all messages are valid or which chains have invalid blocks. +// +// For each chain: +// 1. Open the block from the logsDB and verify it matches blocksAtTimestamp +// 2. For each executing message in the block: +// - Verify the initiating message exists in the source chain's logsDB +// - Verify the initiating message timestamp < executing message timestamp +// - Verify the initiating message hasn't expired (within ExpiryTime) +func (i *Interop) verifyInteropMessages(ts uint64, blocksAtTimestamp map[eth.ChainID]eth.BlockID) (Result, error) { + result := Result{ + Timestamp: ts, + L2Heads: make(map[eth.ChainID]eth.BlockID), + InvalidHeads: make(map[eth.ChainID]eth.BlockID), + } + + for chainID, expectedBlock := range blocksAtTimestamp { + db, ok := i.logsDBs[chainID] + if !ok { + // Skip chains that we don't have a logsDB for + // This can happen if blocksAtTimestamp includes chains not registered with the interop activity + continue + } + + // Get the block from the logsDB + blockRef, _, execMsgs, err := db.OpenBlock(expectedBlock.Number) + if err != nil { + // OpenBlock fails for the first block in the DB because it tries to find the parent. + // Handle this by checking if this is the first sealed block and using FirstSealedBlock instead. + if errors.Is(err, types.ErrSkipped) { + firstBlock, firstErr := db.FirstSealedBlock() + if firstErr != nil { + return Result{}, fmt.Errorf("chain %s: failed to open block %d and failed to get first block: %w", chainID, expectedBlock.Number, err) + } + if firstBlock.Number == expectedBlock.Number { + // This is the first block in the logsDB. Use FirstSealedBlock info. + // The first block has no executing messages (since we can't verify them without prior data). + if firstBlock.Hash != expectedBlock.Hash { + i.log.Warn("first block hash mismatch", + "chain", chainID, + "expected", expectedBlock.Hash, + "got", firstBlock.Hash, + ) + result.InvalidHeads[chainID] = expectedBlock + } + result.L2Heads[chainID] = expectedBlock + continue + } + } + return Result{}, fmt.Errorf("chain %s: failed to open block %d: %w", chainID, expectedBlock.Number, err) + } + + // Verify the block hash matches what we expect + if blockRef.Hash != expectedBlock.Hash { + i.log.Warn("block hash mismatch", + "chain", chainID, + "expected", expectedBlock.Hash, + "got", blockRef.Hash, + ) + result.InvalidHeads[chainID] = expectedBlock + result.L2Heads[chainID] = expectedBlock + continue + } + + // Verify each executing message + blockValid := true + for logIdx, execMsg := range execMsgs { + if err := i.verifyExecutingMessage(chainID, blockRef.Time, logIdx, execMsg); err != nil { + i.log.Warn("invalid executing message", + "chain", chainID, + "block", expectedBlock.Number, + "logIdx", logIdx, + "execMsg", execMsg, + "err", err, + ) + blockValid = false + break + } + } + + result.L2Heads[chainID] = expectedBlock + if !blockValid { + result.InvalidHeads[chainID] = expectedBlock + } + } + + return result, nil +} + +// verifyExecutingMessage verifies a single executing message by checking: +// 1. The initiating message exists in the source chain's database +// 2. The initiating message's timestamp is less than the executing block's timestamp +// 3. The initiating message hasn't expired (timestamp + ExpiryTime >= executing timestamp) +func (i *Interop) verifyExecutingMessage(executingChain eth.ChainID, executingTimestamp uint64, logIdx uint32, execMsg *types.ExecutingMessage) error { + // Get the source chain's logsDB + sourceDB, ok := i.logsDBs[execMsg.ChainID] + if !ok { + return fmt.Errorf("source chain %s not found: %w", execMsg.ChainID, ErrUnknownChain) + } + + // Verify timestamp ordering: initiating message timestamp must be < executing block timestamp + if execMsg.Timestamp >= executingTimestamp { + return fmt.Errorf("initiating timestamp %d >= executing timestamp %d: %w", + execMsg.Timestamp, executingTimestamp, ErrTimestampViolation) + } + + // Verify the message hasn't expired: initiating timestamp + ExpiryTime must be >= executing timestamp + if execMsg.Timestamp+ExpiryTime < executingTimestamp { + return fmt.Errorf("initiating timestamp %d + expiry %d < executing timestamp %d: %w", + execMsg.Timestamp, ExpiryTime, executingTimestamp, ErrMessageExpired) + } + + // Build the query for the initiating message + query := types.ContainsQuery{ + BlockNum: execMsg.BlockNum, + LogIdx: execMsg.LogIdx, + Timestamp: execMsg.Timestamp, + Checksum: execMsg.Checksum, + } + + // Check if the initiating message exists in the source chain's logsDB + _, err := sourceDB.Contains(query) + return err +} diff --git a/op-supernode/supernode/activity/interop/algo_test.go b/op-supernode/supernode/activity/interop/algo_test.go new file mode 100644 index 00000000000..29a63c8e2a9 --- /dev/null +++ b/op-supernode/supernode/activity/interop/algo_test.go @@ -0,0 +1,598 @@ +package interop + +import ( + "errors" + "math/big" + "testing" + + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + gethlog "github.com/ethereum/go-ethereum/log" + "github.com/ethereum/go-ethereum/params" + "github.com/stretchr/testify/require" + + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/backend/reads" + suptypes "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" +) + +// ============================================================================= +// TestVerifyInteropMessages - Table-Driven Tests +// ============================================================================= + +// verifyInteropTestCase defines a single test case for verifyInteropMessages +type verifyInteropTestCase struct { + name string + setup func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) + expectError bool + errorMsg string + validate func(t *testing.T, result Result) +} + +func runVerifyInteropTest(t *testing.T, tc verifyInteropTestCase) { + t.Parallel() + interop, timestamp, blocks := tc.setup() + result, err := interop.verifyInteropMessages(timestamp, blocks) + + if tc.expectError { + require.Error(t, err) + if tc.errorMsg != "" { + require.Contains(t, err.Error(), tc.errorMsg) + } + } else { + require.NoError(t, err) + } + + if tc.validate != nil { + tc.validate(t, result) + } +} + +func TestVerifyInteropMessages(t *testing.T) { + t.Parallel() + + tests := []verifyInteropTestCase{ + // Valid block cases + { + name: "ValidBlocks/NoExecutingMessages", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + chainID := eth.ChainIDFromUInt64(10) + blockHash := common.HexToHash("0x123") + expectedBlock := eth.BlockID{Number: 100, Hash: blockHash} + + mockDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: blockHash, Number: 100, Time: 1000}, + openBlockExecMsg: nil, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{chainID: mockDB}, + } + + return interop, 1000, map[eth.ChainID]eth.BlockID{chainID: expectedBlock} + }, + validate: func(t *testing.T, result Result) { + chainID := eth.ChainIDFromUInt64(10) + expectedBlock := eth.BlockID{Number: 100, Hash: common.HexToHash("0x123")} + require.True(t, result.IsValid()) + require.Empty(t, result.InvalidHeads) + require.Equal(t, expectedBlock, result.L2Heads[chainID]) + }, + }, + { + name: "ValidBlocks/ValidExecutingMessage", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + sourceChainID := eth.ChainIDFromUInt64(10) + destChainID := eth.ChainIDFromUInt64(8453) + + sourceBlockHash := common.HexToHash("0xSource") + destBlockHash := common.HexToHash("0xDest") + + sourceBlock := eth.BlockID{Number: 50, Hash: sourceBlockHash} + destBlock := eth.BlockID{Number: 100, Hash: destBlockHash} + + execMsg := &suptypes.ExecutingMessage{ + ChainID: sourceChainID, + BlockNum: 50, + LogIdx: 0, + Timestamp: 500, // Source timestamp < dest timestamp (1000) + Checksum: suptypes.MessageChecksum{0x01}, + } + + sourceDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: sourceBlockHash, Number: 50, Time: 500}, + containsSeal: suptypes.BlockSeal{Number: 50, Timestamp: 500}, + } + + destDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: destBlockHash, Number: 100, Time: 1000}, + openBlockExecMsg: map[uint32]*suptypes.ExecutingMessage{ + 0: execMsg, + }, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{ + sourceChainID: sourceDB, + destChainID: destDB, + }, + } + + return interop, 1000, map[eth.ChainID]eth.BlockID{ + sourceChainID: sourceBlock, + destChainID: destBlock, + } + }, + validate: func(t *testing.T, result Result) { + require.True(t, result.IsValid()) + require.Empty(t, result.InvalidHeads) + }, + }, + { + name: "ValidBlocks/MessageAtExpiryBoundary", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + sourceChainID := eth.ChainIDFromUInt64(10) + destChainID := eth.ChainIDFromUInt64(8453) + + sourceBlockHash := common.HexToHash("0xSource") + destBlockHash := common.HexToHash("0xDest") + + // Message is exactly at the expiry boundary (should pass) + execTimestamp := uint64(1000000) + initTimestamp := execTimestamp - ExpiryTime // Exactly at boundary + + sourceBlock := eth.BlockID{Number: 50, Hash: sourceBlockHash} + destBlock := eth.BlockID{Number: 100, Hash: destBlockHash} + + execMsg := &suptypes.ExecutingMessage{ + ChainID: sourceChainID, + BlockNum: 50, + LogIdx: 0, + Timestamp: initTimestamp, // Exactly at expiry boundary + Checksum: suptypes.MessageChecksum{0x01}, + } + + sourceDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: sourceBlockHash, Number: 50, Time: initTimestamp}, + containsSeal: suptypes.BlockSeal{Number: 50, Timestamp: initTimestamp}, + } + + destDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: destBlockHash, Number: 100, Time: execTimestamp}, + openBlockExecMsg: map[uint32]*suptypes.ExecutingMessage{ + 0: execMsg, + }, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{ + sourceChainID: sourceDB, + destChainID: destDB, + }, + } + + return interop, execTimestamp, map[eth.ChainID]eth.BlockID{ + sourceChainID: sourceBlock, + destChainID: destBlock, + } + }, + validate: func(t *testing.T, result Result) { + require.True(t, result.IsValid()) + require.Empty(t, result.InvalidHeads) + }, + }, + { + name: "ValidBlocks/UnregisteredChainsSkipped", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + registeredChain := eth.ChainIDFromUInt64(10) + unregisteredChain := eth.ChainIDFromUInt64(9999) + + mockDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: common.HexToHash("0x1"), Number: 100, Time: 1000}, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{registeredChain: mockDB}, + } + + return interop, 1000, map[eth.ChainID]eth.BlockID{ + registeredChain: {Number: 100, Hash: common.HexToHash("0x1")}, + unregisteredChain: {Number: 200, Hash: common.HexToHash("0x2")}, + } + }, + validate: func(t *testing.T, result Result) { + registeredChain := eth.ChainIDFromUInt64(10) + unregisteredChain := eth.ChainIDFromUInt64(9999) + require.True(t, result.IsValid()) + require.Contains(t, result.L2Heads, registeredChain) + require.NotContains(t, result.L2Heads, unregisteredChain) + }, + }, + // Invalid block cases + { + name: "InvalidBlocks/BlockHashMismatch", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + chainID := eth.ChainIDFromUInt64(10) + expectedBlock := eth.BlockID{Number: 100, Hash: common.HexToHash("0xExpected")} + + mockDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{ + Hash: common.HexToHash("0xActual"), // Different from expected + Number: 100, + Time: 1000, + }, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{chainID: mockDB}, + } + + return interop, 1000, map[eth.ChainID]eth.BlockID{chainID: expectedBlock} + }, + validate: func(t *testing.T, result Result) { + chainID := eth.ChainIDFromUInt64(10) + expectedBlock := eth.BlockID{Number: 100, Hash: common.HexToHash("0xExpected")} + require.False(t, result.IsValid()) + require.Contains(t, result.InvalidHeads, chainID) + require.Equal(t, expectedBlock, result.InvalidHeads[chainID]) + }, + }, + { + name: "InvalidBlocks/InitiatingMessageNotFound", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + sourceChainID := eth.ChainIDFromUInt64(10) + destChainID := eth.ChainIDFromUInt64(8453) + + destBlockHash := common.HexToHash("0xDest") + destBlock := eth.BlockID{Number: 100, Hash: destBlockHash} + + execMsg := &suptypes.ExecutingMessage{ + ChainID: sourceChainID, + BlockNum: 50, + LogIdx: 0, + Timestamp: 500, + Checksum: suptypes.MessageChecksum{0x01}, + } + + sourceDB := &algoMockLogsDB{ + containsErr: suptypes.ErrConflict, // Message not found + } + + destDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: destBlockHash, Number: 100, Time: 1000}, + openBlockExecMsg: map[uint32]*suptypes.ExecutingMessage{ + 0: execMsg, + }, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{ + sourceChainID: sourceDB, + destChainID: destDB, + }, + } + + return interop, 1000, map[eth.ChainID]eth.BlockID{destChainID: destBlock} + }, + validate: func(t *testing.T, result Result) { + destChainID := eth.ChainIDFromUInt64(8453) + require.False(t, result.IsValid()) + require.Contains(t, result.InvalidHeads, destChainID) + }, + }, + { + name: "InvalidBlocks/TimestampViolation", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + sourceChainID := eth.ChainIDFromUInt64(10) + destChainID := eth.ChainIDFromUInt64(8453) + + destBlockHash := common.HexToHash("0xDest") + destBlock := eth.BlockID{Number: 100, Hash: destBlockHash} + + execMsg := &suptypes.ExecutingMessage{ + ChainID: sourceChainID, + BlockNum: 50, + LogIdx: 0, + Timestamp: 1001, // Future timestamp - INVALID! + Checksum: suptypes.MessageChecksum{0x01}, + } + + sourceDB := &algoMockLogsDB{ + containsSeal: suptypes.BlockSeal{Number: 50, Timestamp: 1001}, + } + + destDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: destBlockHash, Number: 100, Time: 1000}, + openBlockExecMsg: map[uint32]*suptypes.ExecutingMessage{ + 0: execMsg, + }, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{ + sourceChainID: sourceDB, + destChainID: destDB, + }, + } + + return interop, 1000, map[eth.ChainID]eth.BlockID{destChainID: destBlock} + }, + validate: func(t *testing.T, result Result) { + destChainID := eth.ChainIDFromUInt64(8453) + require.False(t, result.IsValid()) + require.Contains(t, result.InvalidHeads, destChainID) + }, + }, + { + name: "InvalidBlocks/UnknownSourceChain", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + unknownSourceChain := eth.ChainIDFromUInt64(9999) + destChainID := eth.ChainIDFromUInt64(8453) + + destBlockHash := common.HexToHash("0xDest") + destBlock := eth.BlockID{Number: 100, Hash: destBlockHash} + + execMsg := &suptypes.ExecutingMessage{ + ChainID: unknownSourceChain, // Not registered + BlockNum: 50, + LogIdx: 0, + Timestamp: 500, + Checksum: suptypes.MessageChecksum{0x01}, + } + + destDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: destBlockHash, Number: 100, Time: 1000}, + openBlockExecMsg: map[uint32]*suptypes.ExecutingMessage{ + 0: execMsg, + }, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{ + destChainID: destDB, + // Note: unknownSourceChain NOT in logsDBs + }, + } + + return interop, 1000, map[eth.ChainID]eth.BlockID{destChainID: destBlock} + }, + validate: func(t *testing.T, result Result) { + destChainID := eth.ChainIDFromUInt64(8453) + require.False(t, result.IsValid()) + require.Contains(t, result.InvalidHeads, destChainID) + }, + }, + { + name: "InvalidBlocks/ExpiredMessage", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + sourceChainID := eth.ChainIDFromUInt64(10) + destChainID := eth.ChainIDFromUInt64(8453) + + destBlockHash := common.HexToHash("0xDest") + // Executing block is at timestamp 1000000 (well after expiry) + execTimestamp := uint64(1000000) + // Initiating message timestamp is more than ExpiryTime (604800) before executing timestamp + initTimestamp := execTimestamp - ExpiryTime - 1 // 1 second past expiry + + destBlock := eth.BlockID{Number: 100, Hash: destBlockHash} + + execMsg := &suptypes.ExecutingMessage{ + ChainID: sourceChainID, + BlockNum: 50, + LogIdx: 0, + Timestamp: initTimestamp, // Expired! + Checksum: suptypes.MessageChecksum{0x01}, + } + + sourceDB := &algoMockLogsDB{ + containsSeal: suptypes.BlockSeal{Number: 50, Timestamp: initTimestamp}, + } + + destDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: destBlockHash, Number: 100, Time: execTimestamp}, + openBlockExecMsg: map[uint32]*suptypes.ExecutingMessage{ + 0: execMsg, + }, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{ + sourceChainID: sourceDB, + destChainID: destDB, + }, + } + + return interop, execTimestamp, map[eth.ChainID]eth.BlockID{destChainID: destBlock} + }, + validate: func(t *testing.T, result Result) { + destChainID := eth.ChainIDFromUInt64(8453) + require.False(t, result.IsValid()) + require.Contains(t, result.InvalidHeads, destChainID) + }, + }, + { + name: "InvalidBlocks/MultipleChainsOneInvalid", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + sourceChainID := eth.ChainIDFromUInt64(10) + validChainID := eth.ChainIDFromUInt64(8453) + invalidChainID := eth.ChainIDFromUInt64(420) + + validBlockHash := common.HexToHash("0xValid") + invalidBlockHash := common.HexToHash("0xInvalid") + + validBlock := eth.BlockID{Number: 100, Hash: validBlockHash} + invalidBlock := eth.BlockID{Number: 200, Hash: invalidBlockHash} + + badExecMsg := &suptypes.ExecutingMessage{ + ChainID: sourceChainID, + BlockNum: 50, + LogIdx: 0, + Timestamp: 1001, // Future timestamp - INVALID + Checksum: suptypes.MessageChecksum{0x01}, + } + + sourceDB := &algoMockLogsDB{ + containsSeal: suptypes.BlockSeal{Number: 50, Timestamp: 1001}, + } + + validDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: validBlockHash, Number: 100, Time: 1000}, + openBlockExecMsg: nil, // No executing messages - valid + } + + invalidDB := &algoMockLogsDB{ + openBlockRef: eth.BlockRef{Hash: invalidBlockHash, Number: 200, Time: 1000}, + openBlockExecMsg: map[uint32]*suptypes.ExecutingMessage{ + 0: badExecMsg, + }, + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{ + sourceChainID: sourceDB, + validChainID: validDB, + invalidChainID: invalidDB, + }, + } + + return interop, 1000, map[eth.ChainID]eth.BlockID{ + validChainID: validBlock, + invalidChainID: invalidBlock, + } + }, + validate: func(t *testing.T, result Result) { + validChainID := eth.ChainIDFromUInt64(8453) + invalidChainID := eth.ChainIDFromUInt64(420) + require.False(t, result.IsValid()) + // Both chains in L2Heads + require.Contains(t, result.L2Heads, validChainID) + require.Contains(t, result.L2Heads, invalidChainID) + // Only invalid in InvalidHeads + require.NotContains(t, result.InvalidHeads, validChainID) + require.Contains(t, result.InvalidHeads, invalidChainID) + }, + }, + // Error cases + { + name: "Errors/OpenBlockError", + setup: func() (*Interop, uint64, map[eth.ChainID]eth.BlockID) { + chainID := eth.ChainIDFromUInt64(10) + block := eth.BlockID{Number: 100, Hash: common.HexToHash("0x123")} + + mockDB := &algoMockLogsDB{ + openBlockErr: errors.New("database error"), + } + + interop := &Interop{ + log: gethlog.New(), + logsDBs: map[eth.ChainID]LogsDB{chainID: mockDB}, + } + + return interop, 1000, map[eth.ChainID]eth.BlockID{chainID: block} + }, + expectError: true, + errorMsg: "database error", + validate: func(t *testing.T, result Result) { + require.True(t, result.IsEmpty()) + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + runVerifyInteropTest(t, tc) + }) + } +} + +// ============================================================================= +// Mock Types for Algorithm Tests +// ============================================================================= + +// algoMockLogsDB is a mock LogsDB for algorithm tests +type algoMockLogsDB struct { + openBlockRef eth.BlockRef + openBlockLogCnt uint32 + openBlockExecMsg map[uint32]*suptypes.ExecutingMessage + openBlockErr error + + firstSealedBlock suptypes.BlockSeal + firstSealedBlockErr error + + containsSeal suptypes.BlockSeal + containsErr error +} + +func (m *algoMockLogsDB) LatestSealedBlock() (eth.BlockID, bool) { return eth.BlockID{}, false } +func (m *algoMockLogsDB) FirstSealedBlock() (suptypes.BlockSeal, error) { + if m.firstSealedBlockErr != nil { + return suptypes.BlockSeal{}, m.firstSealedBlockErr + } + return m.firstSealedBlock, nil +} +func (m *algoMockLogsDB) FindSealedBlock(number uint64) (suptypes.BlockSeal, error) { + return suptypes.BlockSeal{}, nil +} +func (m *algoMockLogsDB) OpenBlock(blockNum uint64) (eth.BlockRef, uint32, map[uint32]*suptypes.ExecutingMessage, error) { + if m.openBlockErr != nil { + return eth.BlockRef{}, 0, nil, m.openBlockErr + } + return m.openBlockRef, m.openBlockLogCnt, m.openBlockExecMsg, nil +} +func (m *algoMockLogsDB) Contains(query suptypes.ContainsQuery) (suptypes.BlockSeal, error) { + if m.containsErr != nil { + return suptypes.BlockSeal{}, m.containsErr + } + return m.containsSeal, nil +} +func (m *algoMockLogsDB) AddLog(logHash common.Hash, parentBlock eth.BlockID, logIdx uint32, execMsg *suptypes.ExecutingMessage) error { + return nil +} +func (m *algoMockLogsDB) SealBlock(parentHash common.Hash, block eth.BlockID, timestamp uint64) error { + return nil +} +func (m *algoMockLogsDB) Rewind(inv reads.Invalidator, newHead eth.BlockID) error { return nil } +func (m *algoMockLogsDB) Clear(inv reads.Invalidator) error { return nil } +func (m *algoMockLogsDB) Close() error { return nil } + +var _ LogsDB = (*algoMockLogsDB)(nil) + +// testBlockInfo implements eth.BlockInfo for testing +type testBlockInfo struct { + hash common.Hash + parentHash common.Hash + number uint64 + timestamp uint64 +} + +func (m *testBlockInfo) Hash() common.Hash { return m.hash } +func (m *testBlockInfo) ParentHash() common.Hash { return m.parentHash } +func (m *testBlockInfo) Coinbase() common.Address { return common.Address{} } +func (m *testBlockInfo) Root() common.Hash { return common.Hash{} } +func (m *testBlockInfo) NumberU64() uint64 { return m.number } +func (m *testBlockInfo) Time() uint64 { return m.timestamp } +func (m *testBlockInfo) MixDigest() common.Hash { return common.Hash{} } +func (m *testBlockInfo) BaseFee() *big.Int { return big.NewInt(1) } +func (m *testBlockInfo) BlobBaseFee(chainConfig *params.ChainConfig) *big.Int { return big.NewInt(1) } +func (m *testBlockInfo) ExcessBlobGas() *uint64 { return nil } +func (m *testBlockInfo) ReceiptHash() common.Hash { return common.Hash{} } +func (m *testBlockInfo) GasUsed() uint64 { return 0 } +func (m *testBlockInfo) GasLimit() uint64 { return 30000000 } +func (m *testBlockInfo) BlobGasUsed() *uint64 { return nil } +func (m *testBlockInfo) ParentBeaconRoot() *common.Hash { return nil } +func (m *testBlockInfo) WithdrawalsRoot() *common.Hash { return nil } +func (m *testBlockInfo) HeaderRLP() ([]byte, error) { return nil, nil } +func (m *testBlockInfo) Header() *types.Header { return nil } +func (m *testBlockInfo) ID() eth.BlockID { return eth.BlockID{Hash: m.hash, Number: m.number} } + +var _ eth.BlockInfo = (*testBlockInfo)(nil) diff --git a/op-supernode/supernode/activity/interop/interop.go b/op-supernode/supernode/activity/interop/interop.go index e54b66bae1c..14526f5ee6f 100644 --- a/op-supernode/supernode/activity/interop/interop.go +++ b/op-supernode/supernode/activity/interop/interop.go @@ -5,6 +5,7 @@ import ( "errors" "fmt" "sync" + "sync/atomic" "time" "github.com/ethereum-optimism/optimism/op-service/eth" @@ -18,9 +19,10 @@ import ( // Compile-time interface conformance assertions. var ( - _ activity.RunnableActivity = (*Interop)(nil) - _ activity.VerificationActivity = (*Interop)(nil) - tickerPeriod = 500 * time.Millisecond + _ activity.RunnableActivity = (*Interop)(nil) + _ activity.VerificationActivity = (*Interop)(nil) + backoffPeriod = 1 * time.Second // backoff when chains aren't ready + errorBackoffPeriod = 2 * time.Second // backoff on errors ) // InteropActivationTimestampFlag is the CLI flag for the interop activation timestamp. @@ -39,8 +41,10 @@ type Interop struct { log log.Logger chains map[eth.ChainID]cc.ChainContainer activationTimestamp uint64 + dataDir string verifiedDB *VerifiedDB + logsDBs map[eth.ChainID]LogsDB mu sync.RWMutex ctx context.Context @@ -50,6 +54,11 @@ type Interop struct { currentL1 eth.BlockID verifyFn func(ts uint64, blocksAtTimestamp map[eth.ChainID]eth.BlockID) (Result, error) + + // pauseAtTimestamp is used for integration test control only. + // When non-zero, progressInterop will return early without processing + // if the next timestamp to process is >= this value. + pauseAtTimestamp atomic.Uint64 } func (i *Interop) Name() string { @@ -68,10 +77,29 @@ func New( log.Error("failed to open verified DB", "err", err) return nil } + + // Initialize logsDBs for each chain + logsDBs := make(map[eth.ChainID]LogsDB) + for chainID := range chains { + logsDB, err := openLogsDB(log, chainID, dataDir) + if err != nil { + log.Error("failed to open logs DB for chain", "chainID", chainID, "err", err) + // Clean up already created logsDBs + for _, db := range logsDBs { + _ = db.Close() + } + _ = verifiedDB.Close() + return nil + } + logsDBs[chainID] = logsDB + } + i := &Interop{ log: log, chains: chains, verifiedDB: verifiedDB, + logsDBs: logsDBs, + dataDir: dataDir, currentL1: eth.BlockID{}, activationTimestamp: activationTimestamp, } @@ -93,21 +121,23 @@ func (i *Interop) Start(ctx context.Context) error { i.started = true i.mu.Unlock() - // Periodically query each chain container for its current safe head and log it. - ticker := time.NewTicker(tickerPeriod) - defer ticker.Stop() - for { select { case <-i.ctx.Done(): return i.ctx.Err() - case <-ticker.C: - err := i.progressAndRecord() + default: + madeProgress, err := i.progressAndRecord() if err != nil { + // Error: back off before next attempt i.log.Error("failed to progress and record interop", "err", err) - time.Sleep(2 * time.Second) + time.Sleep(errorBackoffPeriod) continue } + if !madeProgress { + // Chains not ready, back off before next attempt + time.Sleep(backoffPeriod) + } + // Otherwise: immediately ready for next iteration (aggressive catch-up) } } } @@ -119,36 +149,61 @@ func (i *Interop) Stop(ctx context.Context) error { if i.cancel != nil { i.cancel() } + // Close all logsDBs + for chainID, db := range i.logsDBs { + if err := db.Close(); err != nil { + i.log.Error("failed to close logs DB", "chainID", chainID, "err", err) + } + } if i.verifiedDB != nil { return i.verifiedDB.Close() } return nil } -func (i *Interop) progressAndRecord() error { +// PauseAt sets a timestamp at which the interop activity should pause. +// When progressInterop encounters this timestamp or any later timestamp, it returns early without processing. +// Uses >= check so that if the activity is already beyond the pause point, it will still stop. +// This function is for integration test control only. +// Pass 0 to clear the pause (equivalent to calling Resume). +func (i *Interop) PauseAt(ts uint64) { + i.pauseAtTimestamp.Store(ts) + i.log.Info("interop pause set", "pauseAtTimestamp", ts) +} + +// Resume clears any pause timestamp, allowing normal processing to continue. +// This function is for integration test control only. +func (i *Interop) Resume() { + i.pauseAtTimestamp.Store(0) + i.log.Info("interop pause cleared") +} + +// progressAndRecord attempts to progress interop and record the result. +// Returns (madeProgress, error) where madeProgress indicates if we advanced the verified timestamp. +func (i *Interop) progressAndRecord() (bool, error) { // Check the L1s of each chain prior to performing interop localCurrentL1, err := i.collectCurrentL1() if err != nil { i.log.Error("failed to collect current L1", "err", err) - return err + return false, err } // Perform the interop evaluation result, err := i.progressInterop() if err != nil { i.log.Error("failed to progress interop", "err", err) - return err + return false, err } // Handle the result by committing verified results or invalidating blocks err = i.handleResult(result) if err != nil { i.log.Error("failed to handle result", "err", err) - return err + return false, err } // if the result is invalid, exit without updating the current L1s if !result.IsEmpty() && !result.IsValid() { i.log.Warn("result is invalid, skipping current L1 update", "results", result) - return nil + return false, nil } // Once interop is complete and recorded, update the current L1s @@ -157,8 +212,9 @@ func (i *Interop) progressAndRecord() error { // - if interop ran but did not advance the verified timestamp, the CurrentL1 values collected are used directly // - if interop ran and advanced the verified timestamp, the CurrentL1 is the L1 head at the verified timestamp // this is because the individual chains may advance their CurrentL1, and if progress is being made, we might not be done using the collected L1s. + verifiedAdvanced := !result.IsEmpty() i.mu.Lock() - if !result.IsEmpty() { + if verifiedAdvanced { // the new CurrentL1 is the L1 head at the verified timestamp i.currentL1 = result.L1Head } else { @@ -166,7 +222,7 @@ func (i *Interop) progressAndRecord() error { i.currentL1 = localCurrentL1 } i.mu.Unlock() - return nil + return verifiedAdvanced, nil } // collectCurrentL1 collects the current L1 head of all chains, @@ -207,6 +263,13 @@ func (i *Interop) progressInterop() (Result, error) { ts = lastTimestamp + 1 } + // Check if we're paused at this timestamp (integration test control only) + // Uses >= so that if the activity is already beyond the pause point, it will still stop. + if pauseTs := i.pauseAtTimestamp.Load(); pauseTs != 0 && ts >= pauseTs { + i.log.Info("interop paused at timestamp", "timestamp", ts, "pauseTs", pauseTs) + return Result{}, nil + } + // 1: check if all chains are ready to process the next timestamp. // if all chains are ready, we can proceed to download the logs blocksAtTimestamp, err := i.checkChainsReady(ts) @@ -261,6 +324,17 @@ func (i *Interop) handleResult(result Result) error { return nil } +// invalidateBlock notifies the chain container to add the block to the denylist +// and potentially rewind if the chain is currently using that block. +func (i *Interop) invalidateBlock(chainID eth.ChainID, blockID eth.BlockID) error { + chain, ok := i.chains[chainID] + if !ok { + return fmt.Errorf("chain %s not found", chainID) + } + _, err := chain.InvalidateBlock(i.ctx, blockID.Number, blockID.Hash) + return err +} + // checkChainsReady checks if all chains are ready to process the next timestamp. // Queries all chains in parallel for better performance. func (i *Interop) checkChainsReady(ts uint64) (map[eth.ChainID]eth.BlockID, error) { @@ -297,26 +371,6 @@ func (i *Interop) checkChainsReady(ts uint64) (map[eth.ChainID]eth.BlockID, erro return blocksAtTimestamp, nil } -// TODO(#18743): Interop Algorithm -func (i *Interop) loadLogs(ts uint64) error { - return nil -} - -// TODO(#18743): Interop Algorithm -func (i *Interop) verifyInteropMessages(ts uint64, blocksAtTimestamp map[eth.ChainID]eth.BlockID) (Result, error) { - result := Result{Timestamp: ts, L2Heads: make(map[eth.ChainID]eth.BlockID)} - for _, chain := range i.chains { - blockID := blocksAtTimestamp[chain.ID()] - result.L2Heads[chain.ID()] = blockID - } - return result, nil -} - -// TODO(#18944): Invalidate Block -func (i *Interop) invalidateBlock(chainID eth.ChainID, blockID eth.BlockID) error { - return nil -} - func (i *Interop) commitVerifiedResult(timestamp uint64, verifiedResult VerifiedResult) error { return i.verifiedDB.Commit(verifiedResult) } @@ -341,3 +395,88 @@ func (i *Interop) VerifiedAtTimestamp(ts uint64) (bool, error) { } return i.verifiedDB.Has(ts) } + +// Reset is called when a chain container resets to a given timestamp. +// It prunes the logsDB and verifiedDB for that chain at and after the timestamp. +func (i *Interop) Reset(chainID eth.ChainID, timestamp uint64) { + i.mu.Lock() + defer i.mu.Unlock() + + i.log.Warn("Reset called", + "chainID", chainID, + "timestamp", timestamp, + ) + + chain, chainOk := i.chains[chainID] + if !chainOk { + i.log.Error("chain not found for reset", "chainID", chainID) + return + } + db, dbOk := i.logsDBs[chainID] + if !dbOk { + i.log.Error("logsDB not found for reset", "chainID", chainID) + return + } + + i.resetLogsDB(chainID, chain, db, timestamp) + i.resetVerifiedDB(timestamp) + + // Reset the currentL1 to force re-evaluation + i.currentL1 = eth.BlockID{} +} + +// resetLogsDB rewinds or clears the logsDB for a chain to the block before the given timestamp. +func (i *Interop) resetLogsDB(chainID eth.ChainID, chain cc.ChainContainer, db LogsDB, timestamp uint64) { + blockTime := chain.BlockTime() + targetTs := timestamp - blockTime + targetBlock, err := chain.BlockAtTimestamp(i.ctx, targetTs, eth.Safe) + if err != nil { + // If we can't find the target block, clear the entire logsDB + i.log.Warn("failed to get block at timestamp, clearing logsDB", "chainID", chainID, "timestamp", targetTs, "err", err) + if clearErr := db.Clear(&noopInvalidator{}); clearErr != nil { + i.log.Error("failed to clear logsDB", "chainID", chainID, "err", clearErr) + } + return + } + + // Check the first block in the logsDB to decide whether to clear or rewind + firstBlock, err := db.FirstSealedBlock() + if err != nil { + i.log.Error("failed to get first block", "chainID", chainID, "err", err) + return + } + + if firstBlock.Number > targetBlock.Number { + i.log.Info("logsDB is to be cleared", "chainID", chainID) + if err := db.Clear(&noopInvalidator{}); err != nil { + i.log.Error("failed to clear logsDB", "chainID", chainID, "err", err) + } + } else { + i.log.Info("logsDB is to be rewound", "chainID", chainID, "targetBlock", targetBlock.Number, "firstBlock", firstBlock.Number) + if err := db.Rewind(&noopInvalidator{}, targetBlock.ID()); err != nil { + i.log.Error("failed to rewind logsDB", "chainID", chainID, "err", err) + } + } +} + +// resetVerifiedDB removes any verified results at or after the given timestamp. +func (i *Interop) resetVerifiedDB(timestamp uint64) { + if i.verifiedDB == nil { + return + } + + deleted, err := i.verifiedDB.Rewind(timestamp) + if err != nil { + i.log.Error("failed to rewind verifiedDB", + "timestamp", timestamp, + "err", err, + ) + } + if deleted { + // This is unexpected - we shouldn't have verified results at timestamps + // that are being reset. Log an error for visibility. + i.log.Error("UNEXPECTED: verified results were deleted on reset", + "timestamp", timestamp, + ) + } +} diff --git a/op-supernode/supernode/activity/interop/interop_test.go b/op-supernode/supernode/activity/interop/interop_test.go index 2c51af6bfdf..3b2b288047a 100644 --- a/op-supernode/supernode/activity/interop/interop_test.go +++ b/op-supernode/supernode/activity/interop/interop_test.go @@ -3,6 +3,7 @@ package interop import ( "context" "errors" + "math/big" "sync" "testing" "time" @@ -10,976 +11,1336 @@ import ( "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-supernode/supernode/activity" cc "github.com/ethereum-optimism/optimism/op-supernode/supernode/chain_container" + "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/backend/reads" + suptypes "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" gethlog "github.com/ethereum/go-ethereum/log" + "github.com/ethereum/go-ethereum/params" "github.com/stretchr/testify/require" ) -// mockChainContainer implements cc.ChainContainer for testing -type mockChainContainer struct { - id eth.ChainID - - currentL1 eth.BlockRef - currentL1Err error - - blockAtTimestamp eth.L2BlockRef - blockAtTimestampErr error +// ============================================================================= +// Test Harness +// ============================================================================= - mu sync.Mutex +// interopTestHarness provides a builder-pattern test setup for Interop tests. +// It reduces boilerplate by handling common setup: temp directories, mock chains, +// interop creation, context assignment, and cleanup. +type interopTestHarness struct { + t *testing.T + interop *Interop + mocks map[eth.ChainID]*mockChainContainer + activationTime uint64 + dataDir string + skipBuild bool // for tests that need custom construction } -func newMockChainContainer(id uint64) *mockChainContainer { - return &mockChainContainer{ - id: eth.ChainIDFromUInt64(id), +// newInteropTestHarness creates a new test harness with sensible defaults. +func newInteropTestHarness(t *testing.T) *interopTestHarness { + t.Helper() + t.Parallel() + return &interopTestHarness{ + t: t, + mocks: make(map[eth.ChainID]*mockChainContainer), + activationTime: 1000, + dataDir: t.TempDir(), } } -func (m *mockChainContainer) ID() eth.ChainID { return m.id } - -func (m *mockChainContainer) Start(ctx context.Context) error { return nil } -func (m *mockChainContainer) Stop(ctx context.Context) error { return nil } -func (m *mockChainContainer) Pause(ctx context.Context) error { return nil } -func (m *mockChainContainer) Resume(ctx context.Context) error { return nil } - -func (m *mockChainContainer) RegisterVerifier(v activity.VerificationActivity) { -} -func (m *mockChainContainer) BlockAtTimestamp(ctx context.Context, ts uint64, label eth.BlockLabel) (eth.L2BlockRef, error) { - m.mu.Lock() - defer m.mu.Unlock() - return m.blockAtTimestamp, m.blockAtTimestampErr +// WithActivation sets the interop activation timestamp. +func (h *interopTestHarness) WithActivation(ts uint64) *interopTestHarness { + h.activationTime = ts + return h } -func (m *mockChainContainer) VerifiedAt(ctx context.Context, ts uint64) (eth.BlockID, eth.BlockID, error) { - return eth.BlockID{}, eth.BlockID{}, nil -} -func (m *mockChainContainer) L1ForL2(ctx context.Context, l2Block eth.BlockID) (eth.BlockID, error) { - return eth.BlockID{}, nil +// WithDataDir sets a custom data directory (useful for error testing). +func (h *interopTestHarness) WithDataDir(dir string) *interopTestHarness { + h.dataDir = dir + return h } -func (m *mockChainContainer) OptimisticAt(ctx context.Context, ts uint64) (eth.BlockID, eth.BlockID, error) { - return eth.BlockID{}, eth.BlockID{}, nil -} -func (m *mockChainContainer) OutputRootAtL2BlockNumber(ctx context.Context, l2BlockNum uint64) (eth.Bytes32, error) { - return eth.Bytes32{}, nil + +// WithChain adds a mock chain container with optional configuration. +func (h *interopTestHarness) WithChain(id uint64, configure func(*mockChainContainer)) *interopTestHarness { + mock := newMockChainContainer(id) + if configure != nil { + configure(mock) + } + h.mocks[mock.id] = mock + return h } -func (m *mockChainContainer) OptimisticOutputAtTimestamp(ctx context.Context, ts uint64) (*eth.OutputResponse, error) { - return nil, nil + +// SkipBuild marks that Build() should not create an Interop instance. +// Useful for tests that need to test New() directly. +func (h *interopTestHarness) SkipBuild() *interopTestHarness { + h.skipBuild = true + return h } -func (m *mockChainContainer) SyncStatus(ctx context.Context) (*eth.SyncStatus, error) { - m.mu.Lock() - defer m.mu.Unlock() - if m.currentL1Err != nil { - return nil, m.currentL1Err + +// Build creates the Interop instance from configured mocks. +// Sets up context and registers cleanup. +func (h *interopTestHarness) Build() *interopTestHarness { + if h.skipBuild { + return h } - return ð.SyncStatus{CurrentL1: m.currentL1}, nil -} -func (m *mockChainContainer) RewindEngine(ctx context.Context, timestamp uint64) error { - return nil + chains := make(map[eth.ChainID]cc.ChainContainer) + for id, mock := range h.mocks { + chains[id] = mock + } + h.interop = New(testLogger(), h.activationTime, chains, h.dataDir) + if h.interop != nil { + h.interop.ctx = context.Background() + h.t.Cleanup(func() { _ = h.interop.Stop(context.Background()) }) + } + return h } -var _ cc.ChainContainer = (*mockChainContainer)(nil) +// Chains returns the map of chain containers for use with New(). +func (h *interopTestHarness) Chains() map[eth.ChainID]cc.ChainContainer { + chains := make(map[eth.ChainID]cc.ChainContainer) + for id, mock := range h.mocks { + chains[id] = mock + } + return chains +} -// Helper to create a test logger -func testLogger() gethlog.Logger { - return gethlog.New() +// Mock returns the mock for a given chain ID. +func (h *interopTestHarness) Mock(id uint64) *mockChainContainer { + return h.mocks[eth.ChainIDFromUInt64(id)] } // ============================================================================= -// Constructor Tests +// TestNew // ============================================================================= -func TestNew_ValidInputs(t *testing.T) { +func TestNew(t *testing.T) { t.Parallel() - dataDir := t.TempDir() - chains := map[eth.ChainID]cc.ChainContainer{ - eth.ChainIDFromUInt64(10): newMockChainContainer(10), + tests := []struct { + name string + setup func(h *interopTestHarness) *interopTestHarness + run func(t *testing.T, h *interopTestHarness) + }{ + { + name: "valid inputs initializes all components", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, nil).WithChain(8453, nil).SkipBuild() + }, + run: func(t *testing.T, h *interopTestHarness) { + interop := New(testLogger(), h.activationTime, h.Chains(), h.dataDir) + require.NotNil(t, interop) + t.Cleanup(func() { _ = interop.Stop(context.Background()) }) + + require.Equal(t, uint64(1000), interop.activationTimestamp) + require.NotNil(t, interop.verifiedDB) + require.Len(t, interop.chains, 2) + require.Len(t, interop.logsDBs, 2) + require.NotNil(t, interop.verifyFn) + + for chainID := range h.Chains() { + require.Contains(t, interop.logsDBs, chainID) + require.NotNil(t, interop.logsDBs[chainID]) + } + }, + }, + { + name: "invalid dataDir returns nil", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithDataDir("/nonexistent/path").SkipBuild() + }, + run: func(t *testing.T, h *interopTestHarness) { + interop := New(testLogger(), h.activationTime, h.Chains(), h.dataDir) + require.Nil(t, interop) + }, + }, } - interop := New(testLogger(), 1000, chains, dataDir) - - require.NotNil(t, interop) - require.Equal(t, uint64(1000), interop.activationTimestamp) - require.NotNil(t, interop.verifiedDB) - require.Equal(t, eth.BlockID{}, interop.currentL1) // starts empty - require.Len(t, interop.chains, 1) + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + tc.setup(h) + tc.run(t, h) + }) + } } -func TestNew_InvalidDataDir(t *testing.T) { +// ============================================================================= +// TestStartStop +// ============================================================================= + +func TestStartStop(t *testing.T) { t.Parallel() - // Use a path that can't be written to - invalidDir := "/nonexistent/path/that/cannot/exist/db" - chains := map[eth.ChainID]cc.ChainContainer{} + tests := []struct { + name string + setup func(h *interopTestHarness) *interopTestHarness + run func(t *testing.T, h *interopTestHarness) + }{ + { + name: "Start blocks until context cancelled", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} + m.blockAtTimestamp = eth.L2BlockRef{Number: 50} + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + ctx, cancel := context.WithCancel(context.Background()) + done := make(chan error, 1) + go func() { done <- h.interop.Start(ctx) }() + + require.Eventually(t, func() bool { + h.interop.mu.RLock() + defer h.interop.mu.RUnlock() + return h.interop.started + }, 5*time.Second, 100*time.Millisecond) + + cancel() + + var err error + require.Eventually(t, func() bool { + select { + case err = <-done: + return true + default: + return false + } + }, 5*time.Second, 100*time.Millisecond) + require.ErrorIs(t, err, context.Canceled) + }, + }, + { + name: "double Start blocked", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + go func() { _ = h.interop.Start(ctx) }() + + require.Eventually(t, func() bool { + h.interop.mu.RLock() + defer h.interop.mu.RUnlock() + return h.interop.started + }, 5*time.Second, 100*time.Millisecond) - interop := New(testLogger(), 1000, chains, invalidDir) + ctx2, cancel2 := context.WithTimeout(context.Background(), 500*time.Millisecond) + defer cancel2() + + err := h.interop.Start(ctx2) + require.ErrorIs(t, err, context.DeadlineExceeded) + }, + }, + { + name: "Stop cancels running Start and closes DB", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} + m.blockAtTimestampErr = ethereum.NotFound + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + done := make(chan error, 1) + go func() { done <- h.interop.Start(context.Background()) }() + + require.Eventually(t, func() bool { + h.interop.mu.RLock() + defer h.interop.mu.RUnlock() + return h.interop.started + }, 5*time.Second, 100*time.Millisecond) + + err := h.interop.Stop(context.Background()) + require.NoError(t, err) + + require.Eventually(t, func() bool { + select { + case <-done: + return true + default: + return false + } + }, 5*time.Second, 100*time.Millisecond) + + // Verify DB is closed + _, err = h.interop.verifiedDB.Has(100) + require.Error(t, err) + }, + }, + } - // New returns nil when DB fails to open - require.Nil(t, interop) + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + tc.setup(h) + tc.run(t, h) + }) + } } -func TestNew_EmptyChains(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() +// ============================================================================= +// TestCollectCurrentL1 +// ============================================================================= - chains := map[eth.ChainID]cc.ChainContainer{} +func TestCollectCurrentL1(t *testing.T) { + t.Parallel() - interop := New(testLogger(), 0, chains, dataDir) + tests := []struct { + name string + setup func(h *interopTestHarness) *interopTestHarness + assert func(t *testing.T, l1 eth.BlockID, err error) + }{ + { + name: "returns minimum L1 across multiple chains", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 200, Hash: common.HexToHash("0x2")} + }).WithChain(8453, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} // minimum + }).Build() + }, + assert: func(t *testing.T, l1 eth.BlockID, err error) { + require.NoError(t, err) + require.Equal(t, uint64(100), l1.Number) + require.Equal(t, common.HexToHash("0x1"), l1.Hash) + }, + }, + { + name: "single chain returns its L1", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 500, Hash: common.HexToHash("0x5")} + }).Build() + }, + assert: func(t *testing.T, l1 eth.BlockID, err error) { + require.NoError(t, err) + require.Equal(t, uint64(500), l1.Number) + }, + }, + { + name: "chain error propagated", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1Err = errors.New("chain not synced") + }).Build() + }, + assert: func(t *testing.T, l1 eth.BlockID, err error) { + require.Error(t, err) + require.Contains(t, err.Error(), "not ready") + require.Equal(t, eth.BlockID{}, l1) + }, + }, + } - require.NotNil(t, interop) - require.Empty(t, interop.chains) + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + tc.setup(h) + l1, err := h.interop.collectCurrentL1() + tc.assert(t, l1, err) + }) + } } -func TestNew_MultipleChains(t *testing.T) { +// ============================================================================= +// TestCheckChainsReady +// ============================================================================= + +func TestCheckChainsReady(t *testing.T) { t.Parallel() - dataDir := t.TempDir() - chains := map[eth.ChainID]cc.ChainContainer{ - eth.ChainIDFromUInt64(10): newMockChainContainer(10), - eth.ChainIDFromUInt64(8453): newMockChainContainer(8453), - eth.ChainIDFromUInt64(420): newMockChainContainer(420), + tests := []struct { + name string + setup func(h *interopTestHarness) *interopTestHarness + assert func(t *testing.T, h *interopTestHarness, blocks map[eth.ChainID]eth.BlockID, err error) + }{ + { + name: "all chains ready returns blocks", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0x1")} + }).WithChain(8453, func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Number: 200, Hash: common.HexToHash("0x2")} + }).Build() + }, + assert: func(t *testing.T, h *interopTestHarness, blocks map[eth.ChainID]eth.BlockID, err error) { + require.NoError(t, err) + require.Len(t, blocks, 2) + require.NotEqual(t, common.Hash{}, blocks[h.Mock(10).id].Hash) + require.NotEqual(t, common.Hash{}, blocks[h.Mock(8453).id].Hash) + }, + }, + { + name: "one chain not ready returns error", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Number: 100} + }).WithChain(8453, func(m *mockChainContainer) { + m.blockAtTimestampErr = ethereum.NotFound + }).Build() + }, + assert: func(t *testing.T, h *interopTestHarness, blocks map[eth.ChainID]eth.BlockID, err error) { + require.Error(t, err) + require.Nil(t, blocks) + }, + }, + { + name: "parallel execution works", + setup: func(h *interopTestHarness) *interopTestHarness { + for i := 0; i < 5; i++ { + idx := i // capture loop var + h.WithChain(uint64(10+idx), func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Number: uint64(100 + idx)} + }) + } + return h.Build() + }, + assert: func(t *testing.T, h *interopTestHarness, blocks map[eth.ChainID]eth.BlockID, err error) { + require.NoError(t, err) + require.Len(t, blocks, 5) + }, + }, } - interop := New(testLogger(), 500, chains, dataDir) - - require.NotNil(t, interop) - require.Len(t, interop.chains, 3) + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + tc.setup(h) + blocks, err := h.interop.checkChainsReady(1000) + tc.assert(t, h, blocks, err) + }) + } } // ============================================================================= -// Lifecycle Tests +// TestProgressInterop // ============================================================================= -func TestStart_BlocksUntilContextCanceled(t *testing.T) { +func TestProgressInterop(t *testing.T) { t.Parallel() - dataDir := t.TempDir() - mock := newMockChainContainer(10) - mock.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} - mock.blockAtTimestamp = eth.L2BlockRef{Number: 50} + // Default verifyFn that passes through + passThroughVerifyFn := func(ts uint64, blocks map[eth.ChainID]eth.BlockID) (Result, error) { + return Result{Timestamp: ts, L2Heads: blocks}, nil + } - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) + tests := []struct { + name string + setup func(h *interopTestHarness) *interopTestHarness + verifyFn func(ts uint64, blocks map[eth.ChainID]eth.BlockID) (Result, error) + assert func(t *testing.T, result Result, err error) + run func(t *testing.T, h *interopTestHarness) // override for complex cases + }{ + { + name: "not initialized uses activation timestamp", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithActivation(5000).WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0x1")} + }).Build() + }, + verifyFn: passThroughVerifyFn, + assert: func(t *testing.T, result Result, err error) { + require.NoError(t, err) + require.Equal(t, uint64(5000), result.Timestamp) + }, + }, + { + name: "initialized uses next timestamp", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0x1")} + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + h.interop.verifyFn = passThroughVerifyFn + + // First progress + result1, err := h.interop.progressInterop() + require.NoError(t, err) + require.Equal(t, uint64(1000), result1.Timestamp) + + // Commit + err = h.interop.handleResult(result1) + require.NoError(t, err) + + // Second progress should use next timestamp + result2, err := h.interop.progressInterop() + require.NoError(t, err) + require.Equal(t, uint64(1001), result2.Timestamp) + }, + }, + { + name: "chains not ready returns empty result", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestampErr = ethereum.NotFound + }).Build() + }, + assert: func(t *testing.T, result Result, err error) { + require.NoError(t, err) + require.True(t, result.IsEmpty()) + }, + }, + { + name: "chain error propagated", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestampErr = errors.New("internal error") + }).Build() + }, + assert: func(t *testing.T, result Result, err error) { + require.Error(t, err) + require.True(t, result.IsEmpty()) + }, + }, + { + name: "verifyFn error propagated", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithActivation(100).WithChain(10, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 1000, Hash: common.HexToHash("0xL1")} + m.blockAtTimestamp = eth.L2BlockRef{Number: 500, Hash: common.HexToHash("0xL2")} + }).Build() + }, + verifyFn: func(ts uint64, blocks map[eth.ChainID]eth.BlockID) (Result, error) { + return Result{}, errors.New("verification failed") + }, + assert: func(t *testing.T, result Result, err error) { + require.Error(t, err) + require.Contains(t, err.Error(), "verification failed") + require.True(t, result.IsEmpty()) + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + tc.setup(h) + if tc.run != nil { + tc.run(t, h) + return + } + if tc.verifyFn != nil { + h.interop.verifyFn = tc.verifyFn + } + result, err := h.interop.progressInterop() + tc.assert(t, result, err) + }) + } +} + +// ============================================================================= +// TestVerifiedAtTimestamp +// ============================================================================= - ctx, cancel := context.WithCancel(context.Background()) +func TestVerifiedAtTimestamp(t *testing.T) { + t.Parallel() - done := make(chan error, 1) - go func() { - done <- interop.Start(ctx) - }() + tests := []struct { + name string + setup func(h *interopTestHarness) *interopTestHarness + run func(t *testing.T, h *interopTestHarness) + }{ + { + name: "before activation always verified", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + verified, err := h.interop.VerifiedAtTimestamp(999) + require.NoError(t, err) + require.True(t, verified) + + verified, err = h.interop.VerifiedAtTimestamp(0) + require.NoError(t, err) + require.True(t, verified) + }, + }, + { + name: "at/after activation not verified until committed", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + verified, err := h.interop.VerifiedAtTimestamp(1000) + require.NoError(t, err) + require.False(t, verified) + + verified, err = h.interop.VerifiedAtTimestamp(9999) + require.NoError(t, err) + require.False(t, verified) + }, + }, + { + name: "committed timestamp verified", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Number: 100} + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + h.interop.verifyFn = func(ts uint64, blocks map[eth.ChainID]eth.BlockID) (Result, error) { + return Result{Timestamp: ts, L2Heads: blocks}, nil + } - // Wait for it to start the loop - require.Eventually(t, func() bool { - interop.mu.RLock() - defer interop.mu.RUnlock() - return interop.started - }, 5*time.Second, 100*time.Millisecond, "Start should mark as started") + result, err := h.interop.progressInterop() + require.NoError(t, err) - // Cancel and verify it exits - cancel() + err = h.interop.handleResult(result) + require.NoError(t, err) - var err error - require.Eventually(t, func() bool { - select { - case err = <-done: - return true - default: - return false - } - }, 5*time.Second, 100*time.Millisecond, "Start should exit after context cancellation") + verified, err := h.interop.VerifiedAtTimestamp(1000) + require.NoError(t, err) + require.True(t, verified) + }, + }, + } - require.ErrorIs(t, err, context.Canceled) + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + tc.setup(h) + tc.run(t, h) + }) + } } -func TestStart_AlreadyStarted(t *testing.T) { +// ============================================================================= +// TestHandleResult +// ============================================================================= + +func TestHandleResult(t *testing.T) { t.Parallel() - dataDir := t.TempDir() - mock := newMockChainContainer(10) - mock.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} + tests := []struct { + name string + setup func(h *interopTestHarness) *interopTestHarness + run func(t *testing.T, h *interopTestHarness) + }{ + { + name: "empty result is no-op", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + err := h.interop.handleResult(Result{}) + require.NoError(t, err) - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) + has, err := h.interop.verifiedDB.Has(0) + require.NoError(t, err) + require.False(t, has) + }, + }, + { + name: "valid result commits to DB with correct data", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, nil).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + mock := h.Mock(10) + validResult := Result{ + Timestamp: 1000, + L1Head: eth.BlockID{Number: 100, Hash: common.HexToHash("0xL1")}, + L2Heads: map[eth.ChainID]eth.BlockID{ + mock.id: {Number: 500, Hash: common.HexToHash("0xL2")}, + }, + } + + err := h.interop.handleResult(validResult) + require.NoError(t, err) + + has, err := h.interop.verifiedDB.Has(1000) + require.NoError(t, err) + require.True(t, has) + + retrieved, err := h.interop.verifiedDB.Get(1000) + require.NoError(t, err) + require.Equal(t, validResult.Timestamp, retrieved.Timestamp) + require.Equal(t, validResult.L1Head, retrieved.L1Head) + require.Equal(t, validResult.L2Heads[mock.id], retrieved.L2Heads[mock.id]) + }, + }, + { + name: "invalid result does not commit", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, nil).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + mock := h.Mock(10) + invalidResult := Result{ + Timestamp: 1000, + L1Head: eth.BlockID{Number: 100, Hash: common.HexToHash("0xL1")}, + L2Heads: map[eth.ChainID]eth.BlockID{ + mock.id: {Number: 500, Hash: common.HexToHash("0xL2")}, + }, + InvalidHeads: map[eth.ChainID]eth.BlockID{ + mock.id: {Number: 500, Hash: common.HexToHash("0xBAD")}, + }, + } + + err := h.interop.handleResult(invalidResult) + require.NoError(t, err) + + has, err := h.interop.verifiedDB.Has(1000) + require.NoError(t, err) + require.False(t, has) + }, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + tc.setup(h) + tc.run(t, h) + }) + } +} - ctx, cancel := context.WithCancel(context.Background()) - defer cancel() +// ============================================================================= +// TestInvalidateBlock +// ============================================================================= - // Start first instance - go func() { - _ = interop.Start(ctx) - }() +// TestInvalidateBlock verifies the invalidateBlock method correctly calls +// ChainContainer.InvalidateBlock with the right parameters and handles errors. +func TestInvalidateBlock(t *testing.T) { + t.Parallel() - // Wait for it to mark as started - require.Eventually(t, func() bool { - interop.mu.RLock() - defer interop.mu.RUnlock() - return interop.started - }, 5*time.Second, 100*time.Millisecond, "Start should mark as started") + tests := []struct { + name string + setup func(h *interopTestHarness) *interopTestHarness + run func(t *testing.T, h *interopTestHarness) + }{ + { + name: "calls chain.InvalidateBlock with correct args", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, nil).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + mock := h.Mock(10) + blockID := eth.BlockID{Number: 500, Hash: common.HexToHash("0xBAD")} + err := h.interop.invalidateBlock(mock.id, blockID) + require.NoError(t, err) + + require.Len(t, mock.invalidateBlockCalls, 1) + require.Equal(t, uint64(500), mock.invalidateBlockCalls[0].height) + require.Equal(t, common.HexToHash("0xBAD"), mock.invalidateBlockCalls[0].payloadHash) + }, + }, + { + name: "returns error when chain not found", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, nil).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + mock := h.Mock(10) + unknownChain := eth.ChainIDFromUInt64(999) + blockID := eth.BlockID{Number: 500, Hash: common.HexToHash("0xBAD")} + err := h.interop.invalidateBlock(unknownChain, blockID) + + require.Error(t, err) + require.Contains(t, err.Error(), "not found") + require.Len(t, mock.invalidateBlockCalls, 0) + }, + }, + { + name: "returns error when chain.InvalidateBlock fails", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.invalidateBlockErr = errors.New("engine failure") + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + mock := h.Mock(10) + blockID := eth.BlockID{Number: 500, Hash: common.HexToHash("0xBAD")} + err := h.interop.invalidateBlock(mock.id, blockID) - // Try to start again - should block on context and return deadline exceeded - ctx2, cancel2 := context.WithTimeout(context.Background(), 500*time.Millisecond) - defer cancel2() + require.Error(t, err) + require.Contains(t, err.Error(), "engine failure") + }, + }, + { + name: "handleResult calls invalidateBlock for each invalid head", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, nil).WithChain(8453, nil).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + mock1 := h.Mock(10) + mock2 := h.Mock(8453) + + invalidResult := Result{ + Timestamp: 1000, + L1Head: eth.BlockID{Number: 100, Hash: common.HexToHash("0xL1")}, + L2Heads: map[eth.ChainID]eth.BlockID{ + mock1.id: {Number: 500, Hash: common.HexToHash("0xL2-1")}, + mock2.id: {Number: 600, Hash: common.HexToHash("0xL2-2")}, + }, + InvalidHeads: map[eth.ChainID]eth.BlockID{ + mock1.id: {Number: 500, Hash: common.HexToHash("0xBAD1")}, + mock2.id: {Number: 600, Hash: common.HexToHash("0xBAD2")}, + }, + } + + err := h.interop.handleResult(invalidResult) + require.NoError(t, err) + + require.Len(t, mock1.invalidateBlockCalls, 1) + require.Equal(t, uint64(500), mock1.invalidateBlockCalls[0].height) + require.Equal(t, common.HexToHash("0xBAD1"), mock1.invalidateBlockCalls[0].payloadHash) + + require.Len(t, mock2.invalidateBlockCalls, 1) + require.Equal(t, uint64(600), mock2.invalidateBlockCalls[0].height) + require.Equal(t, common.HexToHash("0xBAD2"), mock2.invalidateBlockCalls[0].payloadHash) + }, + }, + } - err := interop.Start(ctx2) - require.ErrorIs(t, err, context.DeadlineExceeded) + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + tc.setup(h) + tc.run(t, h) + }) + } } -func TestStop_ClosesVerifiedDB(t *testing.T) { +// ============================================================================= +// TestProgressAndRecord +// ============================================================================= + +func TestProgressAndRecord(t *testing.T) { t.Parallel() - dataDir := t.TempDir() - chains := map[eth.ChainID]cc.ChainContainer{} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) + tests := []struct { + name string + setup func(h *interopTestHarness) *interopTestHarness + run func(t *testing.T, h *interopTestHarness) + }{ + { + name: "empty result sets L1 to collected minimum", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 200, Hash: common.HexToHash("0x2")} + m.blockAtTimestampErr = ethereum.NotFound + }).WithChain(8453, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} + m.blockAtTimestampErr = ethereum.NotFound + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + require.Equal(t, eth.BlockID{}, h.interop.currentL1) - err := interop.Stop(context.Background()) - require.NoError(t, err) + madeProgress, err := h.interop.progressAndRecord() + require.NoError(t, err) + require.False(t, madeProgress, "empty result should not advance verified timestamp") + + require.Equal(t, uint64(100), h.interop.currentL1.Number) + require.Equal(t, common.HexToHash("0x1"), h.interop.currentL1.Hash) + }, + }, + { + name: "valid result sets L1 to result L1Head", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 200, Hash: common.HexToHash("0x200")} + m.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0xL2")} + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + expectedL1Head := eth.BlockID{Number: 150, Hash: common.HexToHash("0xL1Result")} + h.interop.verifyFn = func(ts uint64, blocks map[eth.ChainID]eth.BlockID) (Result, error) { + return Result{Timestamp: ts, L1Head: expectedL1Head, L2Heads: blocks}, nil + } + + madeProgress, err := h.interop.progressAndRecord() + require.NoError(t, err) + require.True(t, madeProgress, "valid result should advance verified timestamp") + + require.Equal(t, expectedL1Head.Number, h.interop.currentL1.Number) + require.Equal(t, expectedL1Head.Hash, h.interop.currentL1.Hash) + }, + }, + { + name: "invalid result does not update L1", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1 = eth.BlockRef{Number: 200, Hash: common.HexToHash("0x200")} + m.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0xL2")} + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + mock := h.Mock(10) + initialL1 := eth.BlockID{Number: 50, Hash: common.HexToHash("0x50")} + h.interop.currentL1 = initialL1 + + h.interop.verifyFn = func(ts uint64, blocks map[eth.ChainID]eth.BlockID) (Result, error) { + return Result{ + Timestamp: ts, + L1Head: eth.BlockID{Number: 999, Hash: common.HexToHash("0xShouldNotBeUsed")}, + L2Heads: blocks, + InvalidHeads: map[eth.ChainID]eth.BlockID{mock.id: {Number: 100}}, + }, nil + } + + madeProgress, err := h.interop.progressAndRecord() + require.NoError(t, err) + require.False(t, madeProgress, "invalid result should not advance verified timestamp") + + require.Equal(t, initialL1.Number, h.interop.currentL1.Number) + require.Equal(t, initialL1.Hash, h.interop.currentL1.Hash) + }, + }, + { + name: "errors propagated", + setup: func(h *interopTestHarness) *interopTestHarness { + return h.WithChain(10, func(m *mockChainContainer) { + m.currentL1Err = errors.New("L1 sync error") + }).Build() + }, + run: func(t *testing.T, h *interopTestHarness) { + madeProgress, err := h.interop.progressAndRecord() + require.Error(t, err) + require.False(t, madeProgress, "error should not advance verified timestamp") + }, + }, + } - // Verify DB is closed by trying to use it (should fail) - _, err = interop.verifiedDB.Has(100) - require.Error(t, err) // LevelDB returns error on closed DB + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + tc.setup(h) + tc.run(t, h) + }) + } } -func TestStop_CancelsRunningContext(t *testing.T) { +// ============================================================================= +// TestInterop_FullCycle +// ============================================================================= + +func TestInterop_FullCycle(t *testing.T) { t.Parallel() dataDir := t.TempDir() mock := newMockChainContainer(10) - mock.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} - mock.blockAtTimestampErr = ethereum.NotFound // Keep it in "not ready" state + mock.currentL1 = eth.BlockRef{Number: 1000, Hash: common.HexToHash("0xL1")} + mock.blockAtTimestamp = eth.L2BlockRef{Number: 500, Hash: common.HexToHash("0xL2")} chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) + interop := New(testLogger(), 100, chains, dataDir) require.NotNil(t, interop) + interop.ctx = context.Background() - ctx := context.Background() + // Verify logsDB is empty initially + _, hasBlocks := interop.logsDBs[mock.id].LatestSealedBlock() + require.False(t, hasBlocks) - done := make(chan error, 1) - go func() { - done <- interop.Start(ctx) - }() + // Stub verifyFn + interop.verifyFn = func(ts uint64, blocks map[eth.ChainID]eth.BlockID) (Result, error) { + return Result{Timestamp: ts, L2Heads: blocks}, nil + } - // Wait for it to start - require.Eventually(t, func() bool { - interop.mu.RLock() - defer interop.mu.RUnlock() - return interop.started - }, 5*time.Second, 100*time.Millisecond, "Start should mark as started") + // Run 3 cycles + for i := 0; i < 3; i++ { + l1, err := interop.collectCurrentL1() + require.NoError(t, err) + require.Equal(t, uint64(1000), l1.Number) - // Stop should cancel the internal context - err := interop.Stop(context.Background()) - require.NoError(t, err) + result, err := interop.progressInterop() + require.NoError(t, err) + require.False(t, result.IsEmpty()) - // Verify Start exited - require.Eventually(t, func() bool { - select { - case <-done: - return true - default: - return false - } - }, 5*time.Second, 100*time.Millisecond, "Start should exit after Stop is called") -} + err = interop.handleResult(result) + require.NoError(t, err) + } -func TestStop_NilCancel(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() + // Verify timestamps committed with correct L2Heads + for ts := uint64(100); ts <= 102; ts++ { + has, err := interop.verifiedDB.Has(ts) + require.NoError(t, err) + require.True(t, has) - chains := map[eth.ChainID]cc.ChainContainer{} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) + retrieved, err := interop.verifiedDB.Get(ts) + require.NoError(t, err) + require.Equal(t, ts, retrieved.Timestamp) + require.Contains(t, retrieved.L2Heads, mock.id) + require.Equal(t, ts, retrieved.L2Heads[mock.id].Number) + } - // Stop without ever starting - cancel is nil - err := interop.Stop(context.Background()) - require.NoError(t, err) + // Verify logsDB populated + latestBlock, hasBlocks := interop.logsDBs[mock.id].LatestSealedBlock() + require.True(t, hasBlocks) + require.Equal(t, uint64(102), latestBlock.Number) } // ============================================================================= -// collectCurrentL1 Tests +// TestResult_IsEmpty // ============================================================================= -func TestCollectCurrentL1_ReturnsMinimum(t *testing.T) { +func TestResult_IsEmpty(t *testing.T) { t.Parallel() - dataDir := t.TempDir() - - mock1 := newMockChainContainer(10) - mock1.currentL1 = eth.BlockRef{Number: 200, Hash: common.HexToHash("0x2")} - mock2 := newMockChainContainer(8453) - mock2.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} // minimum + tests := []struct { + name string + result Result + isEmpty bool + }{ + {"zero value", Result{}, true}, + {"only timestamp", Result{Timestamp: 1000}, true}, + {"with L1Head", Result{Timestamp: 1000, L1Head: eth.BlockID{Number: 100}}, false}, + {"with L2Heads", Result{Timestamp: 1000, L2Heads: map[eth.ChainID]eth.BlockID{eth.ChainIDFromUInt64(10): {Number: 50}}}, false}, + {"with InvalidHeads", Result{Timestamp: 1000, InvalidHeads: map[eth.ChainID]eth.BlockID{eth.ChainIDFromUInt64(10): {Number: 50}}}, false}, + } - chains := map[eth.ChainID]cc.ChainContainer{ - mock1.id: mock1, - mock2.id: mock2, + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + require.Equal(t, tt.isEmpty, tt.result.IsEmpty()) + }) } - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() +} - l1, err := interop.collectCurrentL1() +// ============================================================================= +// Mock Types +// ============================================================================= - require.NoError(t, err) - require.Equal(t, uint64(100), l1.Number) - require.Equal(t, common.HexToHash("0x1"), l1.Hash) +type mockBlockInfo struct { + hash common.Hash + parentHash common.Hash + number uint64 + timestamp uint64 } -func TestCollectCurrentL1_ChainNotReady_Error(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() +func (m *mockBlockInfo) Hash() common.Hash { return m.hash } +func (m *mockBlockInfo) ParentHash() common.Hash { return m.parentHash } +func (m *mockBlockInfo) Coinbase() common.Address { return common.Address{} } +func (m *mockBlockInfo) Root() common.Hash { return common.Hash{} } +func (m *mockBlockInfo) NumberU64() uint64 { return m.number } +func (m *mockBlockInfo) Time() uint64 { return m.timestamp } +func (m *mockBlockInfo) MixDigest() common.Hash { return common.Hash{} } +func (m *mockBlockInfo) BaseFee() *big.Int { return big.NewInt(1) } +func (m *mockBlockInfo) BlobBaseFee(chainConfig *params.ChainConfig) *big.Int { return big.NewInt(1) } +func (m *mockBlockInfo) ExcessBlobGas() *uint64 { return nil } +func (m *mockBlockInfo) ReceiptHash() common.Hash { return common.Hash{} } +func (m *mockBlockInfo) GasUsed() uint64 { return 0 } +func (m *mockBlockInfo) GasLimit() uint64 { return 30000000 } +func (m *mockBlockInfo) BlobGasUsed() *uint64 { return nil } +func (m *mockBlockInfo) ParentBeaconRoot() *common.Hash { return nil } +func (m *mockBlockInfo) WithdrawalsRoot() *common.Hash { return nil } +func (m *mockBlockInfo) HeaderRLP() ([]byte, error) { return nil, nil } +func (m *mockBlockInfo) Header() *types.Header { return nil } +func (m *mockBlockInfo) ID() eth.BlockID { return eth.BlockID{Hash: m.hash, Number: m.number} } + +var _ eth.BlockInfo = (*mockBlockInfo)(nil) - mock := newMockChainContainer(10) - mock.currentL1Err = errors.New("chain not synced") +type mockChainContainer struct { + id eth.ChainID - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - l1, err := interop.collectCurrentL1() - - require.Error(t, err) - require.Contains(t, err.Error(), "not ready") - require.Equal(t, eth.BlockID{}, l1) -} - -func TestCollectCurrentL1_EmptyChains(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() + currentL1 eth.BlockRef + currentL1Err error - chains := map[eth.ChainID]cc.ChainContainer{} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() + blockAtTimestamp eth.L2BlockRef + blockAtTimestampErr error - l1, err := interop.collectCurrentL1() + lastRequestedTimestamp uint64 + mu sync.Mutex - require.NoError(t, err) - require.Equal(t, eth.BlockID{}, l1) + // InvalidateBlock tracking + invalidateBlockCalls []invalidateBlockCall + invalidateBlockRet bool + invalidateBlockErr error } -func TestCollectCurrentL1_SingleChain(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - mock.currentL1 = eth.BlockRef{Number: 500, Hash: common.HexToHash("0x5")} - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - l1, err := interop.collectCurrentL1() - - require.NoError(t, err) - require.Equal(t, uint64(500), l1.Number) - require.Equal(t, common.HexToHash("0x5"), l1.Hash) +type invalidateBlockCall struct { + height uint64 + payloadHash common.Hash } -// ============================================================================= -// checkChainsReady Tests -// ============================================================================= - -func TestCheckChainsReady_AllReady(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock1 := newMockChainContainer(10) - mock1.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0x1")} - - mock2 := newMockChainContainer(8453) - mock2.blockAtTimestamp = eth.L2BlockRef{Number: 200, Hash: common.HexToHash("0x2")} - - chains := map[eth.ChainID]cc.ChainContainer{ - mock1.id: mock1, - mock2.id: mock2, - } - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - blocks, err := interop.checkChainsReady(1000) - - require.NoError(t, err) - require.Len(t, blocks, 2) - require.Equal(t, uint64(100), blocks[mock1.id].Number) - require.Equal(t, uint64(200), blocks[mock2.id].Number) +func newMockChainContainer(id uint64) *mockChainContainer { + return &mockChainContainer{id: eth.ChainIDFromUInt64(id)} } -func TestCheckChainsReady_OneNotReady(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock1 := newMockChainContainer(10) - mock1.blockAtTimestamp = eth.L2BlockRef{Number: 100} - - mock2 := newMockChainContainer(8453) - mock2.blockAtTimestampErr = ethereum.NotFound // Not ready - - chains := map[eth.ChainID]cc.ChainContainer{ - mock1.id: mock1, - mock2.id: mock2, +func (m *mockChainContainer) ID() eth.ChainID { return m.id } +func (m *mockChainContainer) Start(ctx context.Context) error { return nil } +func (m *mockChainContainer) Stop(ctx context.Context) error { return nil } +func (m *mockChainContainer) Pause(ctx context.Context) error { return nil } +func (m *mockChainContainer) Resume(ctx context.Context) error { return nil } +func (m *mockChainContainer) RegisterVerifier(v activity.VerificationActivity) { +} +func (m *mockChainContainer) BlockAtTimestamp(ctx context.Context, ts uint64, label eth.BlockLabel) (eth.L2BlockRef, error) { + m.mu.Lock() + defer m.mu.Unlock() + if m.blockAtTimestampErr != nil { + return eth.L2BlockRef{}, m.blockAtTimestampErr } - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - blocks, err := interop.checkChainsReady(1000) - - require.Error(t, err) - require.Nil(t, blocks) + m.lastRequestedTimestamp = ts + ref := m.blockAtTimestamp + ref.Time = ts + ref.Number = ts + ref.Hash = common.BigToHash(big.NewInt(int64(ts))) + return ref, nil } - -func TestCheckChainsReady_EmptyChains(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - chains := map[eth.ChainID]cc.ChainContainer{} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - blocks, err := interop.checkChainsReady(1000) - - require.NoError(t, err) - require.Empty(t, blocks) +func (m *mockChainContainer) VerifiedAt(ctx context.Context, ts uint64) (eth.BlockID, eth.BlockID, error) { + return eth.BlockID{}, eth.BlockID{}, nil } - -func TestCheckChainsReady_ParallelQueries(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - // Create multiple chains to test parallel execution - var mocks []*mockChainContainer - chains := make(map[eth.ChainID]cc.ChainContainer) - - for i := 0; i < 5; i++ { - mock := newMockChainContainer(uint64(10 + i)) - mock.blockAtTimestamp = eth.L2BlockRef{Number: uint64(100 + i)} - mocks = append(mocks, mock) - chains[mock.id] = mock +func (m *mockChainContainer) L1ForL2(ctx context.Context, l2Block eth.BlockID) (eth.BlockID, error) { + return eth.BlockID{}, nil +} +func (m *mockChainContainer) OptimisticAt(ctx context.Context, ts uint64) (eth.BlockID, eth.BlockID, error) { + return eth.BlockID{}, eth.BlockID{}, nil +} +func (m *mockChainContainer) OutputRootAtL2BlockNumber(ctx context.Context, l2BlockNum uint64) (eth.Bytes32, error) { + return eth.Bytes32{}, nil +} +func (m *mockChainContainer) OptimisticOutputAtTimestamp(ctx context.Context, ts uint64) (*eth.OutputResponse, error) { + return nil, nil +} +func (m *mockChainContainer) FetchReceipts(ctx context.Context, blockID eth.BlockID) (eth.BlockInfo, types.Receipts, error) { + m.mu.Lock() + defer m.mu.Unlock() + ts := m.lastRequestedTimestamp + var parentHash common.Hash + if ts > 0 { + parentHash = common.BigToHash(big.NewInt(int64(ts - 1))) } - - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - blocks, err := interop.checkChainsReady(1000) - - require.NoError(t, err) - require.Len(t, blocks, 5) - - // Verify all chains were queried - for _, mock := range mocks { - require.Contains(t, blocks, mock.id) + blockInfo := &mockBlockInfo{ + hash: blockID.Hash, + parentHash: parentHash, + number: blockID.Number, + timestamp: ts, } + return blockInfo, types.Receipts{}, nil } - -// ============================================================================= -// progressInterop Tests -// ============================================================================= - -func TestProgressInterop_NotInitialized_UsesActivationTimestamp(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - mock.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0x1")} - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 5000, chains, dataDir) // activation at 5000 - require.NotNil(t, interop) - interop.ctx = context.Background() - - result, err := interop.progressInterop() - - require.NoError(t, err) - require.False(t, result.IsEmpty()) - require.Equal(t, uint64(5000), result.Timestamp) - require.True(t, result.IsValid()) +func (m *mockChainContainer) SyncStatus(ctx context.Context) (*eth.SyncStatus, error) { + m.mu.Lock() + defer m.mu.Unlock() + if m.currentL1Err != nil { + return nil, m.currentL1Err + } + return ð.SyncStatus{CurrentL1: m.currentL1}, nil } - -func TestProgressInterop_Initialized_UsesNextTimestamp(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - mock.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0x1")} - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - // First progress - returns result for timestamp 1000 - result1, err := interop.progressInterop() - require.NoError(t, err) - require.Equal(t, uint64(1000), result1.Timestamp) - - // Commit the result so DB is initialized - err = interop.handleResult(result1) - require.NoError(t, err) - - // Second progress - should return result for timestamp 1001 - result2, err := interop.progressInterop() - require.NoError(t, err) - require.Equal(t, uint64(1001), result2.Timestamp) +func (m *mockChainContainer) RewindEngine(ctx context.Context, timestamp uint64) error { + return nil } - -func TestProgressInterop_ChainsNotReady_ReturnsEmptyResult(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - mock.blockAtTimestampErr = ethereum.NotFound // Not ready - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - result, err := interop.progressInterop() - - require.NoError(t, err) // Returns nil error when chains not ready - require.True(t, result.IsEmpty()) +func (m *mockChainContainer) BlockTime() uint64 { return 1 } +func (m *mockChainContainer) InvalidateBlock(ctx context.Context, height uint64, payloadHash common.Hash) (bool, error) { + m.mu.Lock() + defer m.mu.Unlock() + m.invalidateBlockCalls = append(m.invalidateBlockCalls, invalidateBlockCall{height: height, payloadHash: payloadHash}) + return m.invalidateBlockRet, m.invalidateBlockErr } - -func TestProgressInterop_ChainError(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - mock.blockAtTimestampErr = errors.New("internal error") - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - result, err := interop.progressInterop() - - require.Error(t, err) - require.Contains(t, err.Error(), "internal error") - require.True(t, result.IsEmpty()) +func (m *mockChainContainer) IsDenied(height uint64, payloadHash common.Hash) (bool, error) { + return false, nil } +func (m *mockChainContainer) SetResetCallback(cb cc.ResetCallback) {} -// ============================================================================= -// CurrentL1 Tests -// ============================================================================= - -func TestCurrentL1_ReturnsStoredValue(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - chains := map[eth.ChainID]cc.ChainContainer{} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - - interop.currentL1 = eth.BlockID{Number: 100, Hash: common.HexToHash("0x1")} - - result := interop.CurrentL1() +var _ cc.ChainContainer = (*mockChainContainer)(nil) - require.Equal(t, uint64(100), result.Number) - require.Equal(t, common.HexToHash("0x1"), result.Hash) +func testLogger() gethlog.Logger { + return gethlog.New() } -func TestCurrentL1_EmptyReturnsZero(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - chains := map[eth.ChainID]cc.ChainContainer{} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - - result := interop.CurrentL1() - - require.Equal(t, eth.BlockID{}, result) +// mockLogsDBForInterop implements LogsDB for interop tests +type mockLogsDBForInterop struct { + openBlockRef eth.BlockRef + openBlockLogCnt uint32 + openBlockExecMsg map[uint32]*suptypes.ExecutingMessage + openBlockErr error + containsSeal suptypes.BlockSeal + containsErr error + + // Track calls for verification + rewindCalls []eth.BlockID + clearCalls int + + // Configurable return value for FirstSealedBlock + firstSealedBlock suptypes.BlockSeal } -// ============================================================================= -// VerifiedAtTimestamp Tests -// ============================================================================= - -func TestVerifiedAtTimestamp_Exists(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - mock.blockAtTimestamp = eth.L2BlockRef{Number: 100} - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - // Progress to get result for timestamp 1000 - result, err := interop.progressInterop() - require.NoError(t, err) - - // Commit the result to DB - err = interop.handleResult(result) - require.NoError(t, err) - - verified, err := interop.VerifiedAtTimestamp(1000) - - require.NoError(t, err) - require.True(t, verified) +func (m *mockLogsDBForInterop) LatestSealedBlock() (eth.BlockID, bool) { return eth.BlockID{}, false } +func (m *mockLogsDBForInterop) FirstSealedBlock() (suptypes.BlockSeal, error) { + return m.firstSealedBlock, nil } - -func TestVerifiedAtTimestamp_NotExists(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - chains := map[eth.ChainID]cc.ChainContainer{} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - - verified, err := interop.VerifiedAtTimestamp(9999) - - require.NoError(t, err) - require.False(t, verified) +func (m *mockLogsDBForInterop) FindSealedBlock(number uint64) (suptypes.BlockSeal, error) { + return suptypes.BlockSeal{}, nil } - -// ============================================================================= -// verifyInteropMessages Tests -// ============================================================================= - -func TestVerifyInteropMessages_CopiesBlocks(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock1 := newMockChainContainer(10) - mock2 := newMockChainContainer(8453) - - chains := map[eth.ChainID]cc.ChainContainer{ - mock1.id: mock1, - mock2.id: mock2, +func (m *mockLogsDBForInterop) OpenBlock(blockNum uint64) (eth.BlockRef, uint32, map[uint32]*suptypes.ExecutingMessage, error) { + if m.openBlockErr != nil { + return eth.BlockRef{}, 0, nil, m.openBlockErr } - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - - blocksAtTimestamp := map[eth.ChainID]eth.BlockID{ - mock1.id: {Number: 100, Hash: common.HexToHash("0x1")}, - mock2.id: {Number: 200, Hash: common.HexToHash("0x2")}, + return m.openBlockRef, m.openBlockLogCnt, m.openBlockExecMsg, nil +} +func (m *mockLogsDBForInterop) Contains(query suptypes.ContainsQuery) (suptypes.BlockSeal, error) { + if m.containsErr != nil { + return suptypes.BlockSeal{}, m.containsErr } - - result, err := interop.verifyInteropMessages(1000, blocksAtTimestamp) - - require.NoError(t, err) - require.Equal(t, uint64(1000), result.Timestamp) - require.Len(t, result.L2Heads, 2) - require.Equal(t, blocksAtTimestamp[mock1.id], result.L2Heads[mock1.id]) - require.Equal(t, blocksAtTimestamp[mock2.id], result.L2Heads[mock2.id]) - require.True(t, result.IsValid()) // No invalid heads in stub implementation + return m.containsSeal, nil } - -// ============================================================================= -// handleResult Tests -// ============================================================================= - -func TestHandleResult_EmptyResult_ReturnsNil(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - chains := map[eth.ChainID]cc.ChainContainer{} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - - emptyResult := Result{} - require.True(t, emptyResult.IsEmpty()) - - err := interop.handleResult(emptyResult) - - require.NoError(t, err) - // Empty result should not commit anything to DB - has, err := interop.verifiedDB.Has(0) - require.NoError(t, err) - require.False(t, has) +func (m *mockLogsDBForInterop) AddLog(logHash common.Hash, parentBlock eth.BlockID, logIdx uint32, execMsg *suptypes.ExecutingMessage) error { + return nil } - -func TestHandleResult_ValidResult_CommitsToDb(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - - validResult := Result{ - Timestamp: 1000, - L1Head: eth.BlockID{Number: 100, Hash: common.HexToHash("0xL1")}, - L2Heads: map[eth.ChainID]eth.BlockID{ - mock.id: {Number: 500, Hash: common.HexToHash("0xL2")}, - }, - InvalidHeads: nil, // No invalid heads = valid result - } - require.True(t, validResult.IsValid()) - require.False(t, validResult.IsEmpty()) - - err := interop.handleResult(validResult) - - require.NoError(t, err) - // Valid result should be committed to DB - has, err := interop.verifiedDB.Has(1000) - require.NoError(t, err) - require.True(t, has) +func (m *mockLogsDBForInterop) SealBlock(parentHash common.Hash, block eth.BlockID, timestamp uint64) error { + return nil } - -func TestHandleResult_InvalidResult_DoesNotCommitToDb(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - - invalidResult := Result{ - Timestamp: 1000, - L1Head: eth.BlockID{Number: 100, Hash: common.HexToHash("0xL1")}, - L2Heads: map[eth.ChainID]eth.BlockID{ - mock.id: {Number: 500, Hash: common.HexToHash("0xL2")}, - }, - InvalidHeads: map[eth.ChainID]eth.BlockID{ - mock.id: {Number: 500, Hash: common.HexToHash("0xBAD")}, // Has invalid heads - }, - } - require.False(t, invalidResult.IsValid()) - require.False(t, invalidResult.IsEmpty()) - - err := interop.handleResult(invalidResult) - - require.NoError(t, err) - // Invalid results trigger block invalidation but are NOT committed to the verified DB - has, err := interop.verifiedDB.Has(1000) - require.NoError(t, err) - require.False(t, has) +func (m *mockLogsDBForInterop) Rewind(inv reads.Invalidator, newHead eth.BlockID) error { + m.rewindCalls = append(m.rewindCalls, newHead) + return nil } - -func TestHandleResult_InvalidResult_MultipleInvalidHeads(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock1 := newMockChainContainer(10) - mock2 := newMockChainContainer(8453) - chains := map[eth.ChainID]cc.ChainContainer{ - mock1.id: mock1, - mock2.id: mock2, - } - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - - invalidResult := Result{ - Timestamp: 1000, - L1Head: eth.BlockID{Number: 100, Hash: common.HexToHash("0xL1")}, - L2Heads: map[eth.ChainID]eth.BlockID{ - mock1.id: {Number: 500, Hash: common.HexToHash("0xL2a")}, - mock2.id: {Number: 600, Hash: common.HexToHash("0xL2b")}, - }, - InvalidHeads: map[eth.ChainID]eth.BlockID{ - mock1.id: {Number: 500, Hash: common.HexToHash("0xBAD1")}, - mock2.id: {Number: 600, Hash: common.HexToHash("0xBAD2")}, - }, - } - - err := interop.handleResult(invalidResult) - - require.NoError(t, err) - // Invalid results trigger block invalidation but are NOT committed to the verified DB - has, err := interop.verifiedDB.Has(1000) - require.NoError(t, err) - require.False(t, has) +func (m *mockLogsDBForInterop) Clear(inv reads.Invalidator) error { + m.clearCalls++ + return nil } +func (m *mockLogsDBForInterop) Close() error { return nil } + +var _ LogsDB = (*mockLogsDBForInterop)(nil) // ============================================================================= -// progressAndRecord L1 Update Tests +// TestReset // ============================================================================= -func TestProgressAndRecord_EmptyResult_SetsL1ToCollectedMinimum(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock1 := newMockChainContainer(10) - mock1.currentL1 = eth.BlockRef{Number: 200, Hash: common.HexToHash("0x2")} - mock1.blockAtTimestampErr = ethereum.NotFound // Chains not ready -> empty result - - mock2 := newMockChainContainer(8453) - mock2.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} // This is minimum - mock2.blockAtTimestampErr = ethereum.NotFound - - chains := map[eth.ChainID]cc.ChainContainer{ - mock1.id: mock1, - mock2.id: mock2, - } - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - // Verify currentL1 starts empty - require.Equal(t, eth.BlockID{}, interop.currentL1) - - err := interop.progressAndRecord() - - require.NoError(t, err) - // When result is empty, currentL1 should be set to the collected minimum - require.Equal(t, uint64(100), interop.currentL1.Number) - require.Equal(t, common.HexToHash("0x1"), interop.currentL1.Hash) -} - -func TestProgressAndRecord_ValidResult_SetsL1ToResultL1Head(t *testing.T) { +func TestReset(t *testing.T) { t.Parallel() - dataDir := t.TempDir() - mock := newMockChainContainer(10) - mock.currentL1 = eth.BlockRef{Number: 200, Hash: common.HexToHash("0x200")} - mock.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0xL2")} - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - // Override verifyFn to return a valid result with a specific L1Head - expectedL1Head := eth.BlockID{Number: 150, Hash: common.HexToHash("0xL1Result")} - interop.verifyFn = func(ts uint64, blocksAtTimestamp map[eth.ChainID]eth.BlockID) (Result, error) { - return Result{ - Timestamp: ts, - L1Head: expectedL1Head, - L2Heads: map[eth.ChainID]eth.BlockID{ - mock.id: blocksAtTimestamp[mock.id], + tests := []struct { + name string + setup func(h *interopTestHarness) (*interopTestHarness, *mockLogsDBForInterop) + run func(t *testing.T, h *interopTestHarness, mockLogsDB *mockLogsDBForInterop) + }{ + { + name: "rewinds logsDB when previous block available", + setup: func(h *interopTestHarness) (*interopTestHarness, *mockLogsDBForInterop) { + h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Hash: common.HexToHash("0xPREV"), Number: 99} + }).Build() + mockLogsDB := &mockLogsDBForInterop{} + h.interop.logsDBs[h.Mock(10).id] = mockLogsDB + return h, mockLogsDB }, - InvalidHeads: nil, // valid result - }, nil - } + run: func(t *testing.T, h *interopTestHarness, mockLogsDB *mockLogsDBForInterop) { + h.interop.Reset(h.Mock(10).id, 100) - // Verify currentL1 starts empty - require.Equal(t, eth.BlockID{}, interop.currentL1) - - err := interop.progressAndRecord() - - require.NoError(t, err) - // When result is valid (non-empty), currentL1 should be set to result.L1Head - require.Equal(t, expectedL1Head.Number, interop.currentL1.Number) - require.Equal(t, expectedL1Head.Hash, interop.currentL1.Hash) -} - -func TestProgressAndRecord_InvalidResult_DoesNotUpdateL1(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - mock.currentL1 = eth.BlockRef{Number: 200, Hash: common.HexToHash("0x200")} - mock.blockAtTimestamp = eth.L2BlockRef{Number: 100, Hash: common.HexToHash("0xL2")} - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - // Set an initial currentL1 value - initialL1 := eth.BlockID{Number: 50, Hash: common.HexToHash("0x50")} - interop.currentL1 = initialL1 - - // Override verifyFn to return an invalid result (has InvalidHeads) - interop.verifyFn = func(ts uint64, blocksAtTimestamp map[eth.ChainID]eth.BlockID) (Result, error) { - return Result{ - Timestamp: ts, - L1Head: eth.BlockID{Number: 999, Hash: common.HexToHash("0xShouldNotBeUsed")}, - L2Heads: map[eth.ChainID]eth.BlockID{ - mock.id: blocksAtTimestamp[mock.id], + require.Len(t, mockLogsDB.rewindCalls, 1) + require.Equal(t, uint64(99), mockLogsDB.rewindCalls[0].Number) + require.Equal(t, 0, mockLogsDB.clearCalls) }, - InvalidHeads: map[eth.ChainID]eth.BlockID{ - mock.id: {Number: 100, Hash: common.HexToHash("0xBAD")}, // marks result as invalid + }, + { + name: "clears logsDB when previous block not available", + setup: func(h *interopTestHarness) (*interopTestHarness, *mockLogsDBForInterop) { + h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestampErr = errors.New("block not found") + }).Build() + mockLogsDB := &mockLogsDBForInterop{} + h.interop.logsDBs[h.Mock(10).id] = mockLogsDB + return h, mockLogsDB }, - }, nil - } - - err := interop.progressAndRecord() - - require.NoError(t, err) - // When result is invalid, currentL1 should NOT be updated (remains at initial value) - require.Equal(t, initialL1.Number, interop.currentL1.Number) - require.Equal(t, initialL1.Hash, interop.currentL1.Hash) -} - -func TestProgressAndRecord_CollectL1Error_ReturnsError(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - mock.currentL1Err = errors.New("L1 sync error") - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - err := interop.progressAndRecord() - - require.Error(t, err) - require.Contains(t, err.Error(), "not ready") -} - -func TestProgressAndRecord_ProgressInteropError_ReturnsError(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() - - mock := newMockChainContainer(10) - mock.currentL1 = eth.BlockRef{Number: 100, Hash: common.HexToHash("0x1")} - mock.blockAtTimestampErr = errors.New("internal chain error") - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 1000, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() - - err := interop.progressAndRecord() - - require.Error(t, err) - require.Contains(t, err.Error(), "internal chain error") -} - -// ============================================================================= -// Integration Tests -// ============================================================================= - -func TestInterop_FullCycle(t *testing.T) { - t.Parallel() - dataDir := t.TempDir() + run: func(t *testing.T, h *interopTestHarness, mockLogsDB *mockLogsDBForInterop) { + h.interop.Reset(h.Mock(10).id, 100) - mock := newMockChainContainer(10) - mock.currentL1 = eth.BlockRef{Number: 1000, Hash: common.HexToHash("0xL1")} - mock.blockAtTimestamp = eth.L2BlockRef{Number: 500, Hash: common.HexToHash("0xL2")} - - chains := map[eth.ChainID]cc.ChainContainer{mock.id: mock} - interop := New(testLogger(), 100, chains, dataDir) - require.NotNil(t, interop) - interop.ctx = context.Background() + require.Len(t, mockLogsDB.rewindCalls, 0) + require.Equal(t, 1, mockLogsDB.clearCalls) + }, + }, + { + name: "clears logsDB when timestamp at or before blockTime", + setup: func(h *interopTestHarness) (*interopTestHarness, *mockLogsDBForInterop) { + h.WithChain(10, nil).Build() + mockLogsDB := &mockLogsDBForInterop{ + firstSealedBlock: suptypes.BlockSeal{Number: 5}, + } + h.interop.logsDBs[h.Mock(10).id] = mockLogsDB + return h, mockLogsDB + }, + run: func(t *testing.T, h *interopTestHarness, mockLogsDB *mockLogsDBForInterop) { + // Reset at timestamp 1 (blockTime=1, so targetTs=0) + // Since firstSealedBlock.Number (5) > targetBlock.Number (0), Clear is called + h.interop.Reset(h.Mock(10).id, 1) - // Simulate multiple interop cycles - for i := 0; i < 3; i++ { - // Collect L1 (returns minimum across chains) - l1, err := interop.collectCurrentL1() - require.NoError(t, err) - require.Equal(t, uint64(1000), l1.Number) + require.Len(t, mockLogsDB.rewindCalls, 0) + require.Equal(t, 1, mockLogsDB.clearCalls) + }, + }, + { + name: "rewinds verifiedDB", + setup: func(h *interopTestHarness) (*interopTestHarness, *mockLogsDBForInterop) { + h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Number: 99} + }).Build() + mockLogsDB := &mockLogsDBForInterop{} + h.interop.logsDBs[h.Mock(10).id] = mockLogsDB + return h, mockLogsDB + }, + run: func(t *testing.T, h *interopTestHarness, mockLogsDB *mockLogsDBForInterop) { + mock := h.Mock(10) + // Add some verified results + for ts := uint64(98); ts <= 102; ts++ { + err := h.interop.verifiedDB.Commit(VerifiedResult{ + Timestamp: ts, + L1Head: eth.BlockID{Number: ts}, + L2Heads: map[eth.ChainID]eth.BlockID{mock.id: {Number: ts}}, + }) + require.NoError(t, err) + } + + // Reset at timestamp 100 (should remove 100, 101, 102) + h.interop.Reset(mock.id, 100) + + // Verify results at 98, 99 still exist + has, _ := h.interop.verifiedDB.Has(98) + require.True(t, has) + has, _ = h.interop.verifiedDB.Has(99) + require.True(t, has) + + // Verify results at 100, 101, 102 are gone + has, _ = h.interop.verifiedDB.Has(100) + require.False(t, has) + has, _ = h.interop.verifiedDB.Has(101) + require.False(t, has) + has, _ = h.interop.verifiedDB.Has(102) + require.False(t, has) + }, + }, + { + name: "resets currentL1", + setup: func(h *interopTestHarness) (*interopTestHarness, *mockLogsDBForInterop) { + h.WithChain(10, func(m *mockChainContainer) { + m.blockAtTimestamp = eth.L2BlockRef{Number: 99} + }).Build() + mockLogsDB := &mockLogsDBForInterop{} + h.interop.logsDBs[h.Mock(10).id] = mockLogsDB + return h, mockLogsDB + }, + run: func(t *testing.T, h *interopTestHarness, mockLogsDB *mockLogsDBForInterop) { + h.interop.currentL1 = eth.BlockID{Number: 500, Hash: common.HexToHash("0xL1")} - // Progress and get result - result, err := interop.progressInterop() - require.NoError(t, err) - require.False(t, result.IsEmpty()) + h.interop.Reset(h.Mock(10).id, 100) - // Handle the result (commits to DB) - err = interop.handleResult(result) - require.NoError(t, err) + require.Equal(t, eth.BlockID{}, h.interop.currentL1) + }, + }, + { + name: "handles unknown chain gracefully", + setup: func(h *interopTestHarness) (*interopTestHarness, *mockLogsDBForInterop) { + h.WithChain(10, nil).Build() + return h, nil + }, + run: func(t *testing.T, h *interopTestHarness, mockLogsDB *mockLogsDBForInterop) { + // Reset on unknown chain (should not panic) + unknownChain := eth.ChainIDFromUInt64(999) + h.interop.Reset(unknownChain, 100) + // Just verify it didn't panic + }, + }, } - // Verify timestamps were committed sequentially - for ts := uint64(100); ts <= 102; ts++ { - has, err := interop.verifiedDB.Has(ts) - require.NoError(t, err) - require.True(t, has, "timestamp %d should be verified", ts) + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + h := newInteropTestHarness(t) + h, mockLogsDB := tc.setup(h) + tc.run(t, h, mockLogsDB) + }) } } diff --git a/op-supernode/supernode/activity/interop/logdb.go b/op-supernode/supernode/activity/interop/logdb.go new file mode 100644 index 00000000000..7a382b406c2 --- /dev/null +++ b/op-supernode/supernode/activity/interop/logdb.go @@ -0,0 +1,239 @@ +package interop + +import ( + "errors" + "fmt" + "os" + "path/filepath" + + "github.com/ethereum/go-ethereum/common" + gethTypes "github.com/ethereum/go-ethereum/core/types" + "github.com/ethereum/go-ethereum/log" + + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/backend/db/logs" + "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/backend/processors" + "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/backend/reads" + "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" +) + +// LogsDB is the interface for interacting with a chain's logs database. +// *logs.DB implements this interface. +type LogsDB interface { + // LatestSealedBlock returns the latest sealed block ID, or false if no blocks are sealed. + LatestSealedBlock() (eth.BlockID, bool) + // FirstSealedBlock returns the first block seal in the DB. + FirstSealedBlock() (types.BlockSeal, error) + // FindSealedBlock returns the block seal for the given block number. + FindSealedBlock(number uint64) (types.BlockSeal, error) + // OpenBlock returns the block reference, log count, and executing messages for a block. + OpenBlock(blockNum uint64) (ref eth.BlockRef, logCount uint32, execMsgs map[uint32]*types.ExecutingMessage, err error) + // Contains checks if an initiating message exists in the database. + // Returns the block seal if found, or an error (ErrConflict if not found, ErrFuture if not yet indexed). + Contains(query types.ContainsQuery) (types.BlockSeal, error) + // AddLog adds a log entry to the database. + AddLog(logHash common.Hash, parentBlock eth.BlockID, logIdx uint32, execMsg *types.ExecutingMessage) error + // SealBlock seals a block in the database. + SealBlock(parentHash common.Hash, block eth.BlockID, timestamp uint64) error + // Rewind removes all blocks after newHead from the database. + Rewind(inv reads.Invalidator, newHead eth.BlockID) error + // Clear removes all data from the database. + Clear(inv reads.Invalidator) error + // Close closes the database. + Close() error +} + +// Compile-time check that *logs.DB implements LogsDB. +var _ LogsDB = (*logs.DB)(nil) + +// noopLogsDBMetrics implements the logs.Metrics interface with no-op methods. +type noopLogsDBMetrics struct{} + +func (n *noopLogsDBMetrics) RecordDBEntryCount(kind string, count int64) {} +func (n *noopLogsDBMetrics) RecordDBSearchEntriesRead(count int64) {} + +// noopInvalidator implements reads.Invalidator as a no-op. +// Used for rewind operations where we don't need cache invalidation. +// noopInvalidator is a stub needed to use the logs.DB.Rewind method. +// read-handle invalidation is not currently used +type noopInvalidator struct{} + +func (n *noopInvalidator) TryInvalidate(rule reads.InvalidationRule) (release func(), err error) { + return func() {}, nil +} + +var _ reads.Invalidator = (*noopInvalidator)(nil) + +// openLogsDB opens a logs.DB for the given chain in the data directory. +func openLogsDB(logger log.Logger, chainID eth.ChainID, dataDir string) (LogsDB, error) { + chainDir := filepath.Join(dataDir, fmt.Sprintf("chain-%s", chainID)) + if err := os.MkdirAll(chainDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create chain directory: %w", err) + } + + dbPath := filepath.Join(chainDir, "logs.db") + db, err := logs.NewFromFile(logger, &noopLogsDBMetrics{}, chainID, dbPath, true) + if err != nil { + return nil, fmt.Errorf("failed to open logs DB for chain %s: %w", chainID, err) + } + + logger.Info("Initialized logs DB", "chain", chainID, "path", dbPath) + return db, nil +} + +var ( + // ErrPreviousTimestampNotSealed is returned when loadLogs is called but the + // previous timestamp has not been sealed in the logsDB. + ErrPreviousTimestampNotSealed = errors.New("previous timestamp not sealed in logsDB") + + // ErrParentHashMismatch is returned when the block's parent hash does not match + // the hash of the last sealed block in the logsDB. + ErrParentHashMismatch = errors.New("block parent hash does not match logsDB") +) + +// loadLogs loads and persists logs for the given timestamp for all chains. +// The previous timestamp MUST already be sealed in the database; if not, an error is returned. +// For the activation timestamp (first timestamp), the logsDB must be empty. +func (i *Interop) loadLogs(ts uint64) error { + for chainID, chain := range i.chains { + db := i.logsDBs[chainID] + + // Verify the previous timestamp is sealed (or DB is empty for activation timestamp) + // Returns the hash of the previous sealed block, or nil if DB is empty + latestBlock, hasBlocks, err := i.verifyCanAddTimestamp(chainID, db, ts, chain.BlockTime()) + if err != nil { + return err + } + + // Get the block at timestamp ts + block, err := chain.BlockAtTimestamp(i.ctx, ts, eth.Safe) + if err != nil { + return fmt.Errorf("chain %s: failed to get block at timestamp %d: %w", chainID, ts, err) + } + + // Fetch receipts for the block + blockInfo, receipts, err := chain.FetchReceipts(i.ctx, block.ID()) + if err != nil { + return fmt.Errorf("chain %s: failed to fetch receipts for block %d: %w", chainID, block.Number, err) + } + + // if the database has blocks, check if we can skip or need to verify continuity + if hasBlocks { + // if the latest block is the same or beyond the block we are loading, skip loading + if latestBlock.Number >= block.Number { + continue + } + + // Verify chain continuity: block's parent must match the last sealed block + if blockInfo.ParentHash() != latestBlock.Hash { + return fmt.Errorf("chain %s: block %d parent hash %s does not match logsDB last sealed block hash %s: %w", + chainID, block.Number, blockInfo.ParentHash(), latestBlock.Hash, ErrParentHashMismatch) + } + } + + // Process logs and seal the block + // If DB is empty (!hasBlocks), this is the first block - treat it as genesis for the logsDB + isFirstBlock := !hasBlocks + if err := i.processBlockLogs(db, blockInfo, receipts, isFirstBlock); err != nil { + return fmt.Errorf("chain %s: failed to process block logs for block %d: %w", chainID, block.Number, err) + } + + i.log.Debug("loaded logs for chain", + "chain", chainID, + "block", block.Number, + "timestamp", ts, + ) + } + + return nil +} + +func (i *Interop) verifyCanAddTimestamp(chainID eth.ChainID, db LogsDB, ts uint64, blockTime uint64) (eth.BlockID, bool, error) { + latestBlock, hasBlocks := db.LatestSealedBlock() + + // If no blocks in DB: + // - At activation timestamp: OK, proceed to load the first block + // - Not at activation timestamp: ERROR, we're missing data + if !hasBlocks { + if ts == i.activationTimestamp { + return eth.BlockID{}, hasBlocks, nil + } + return eth.BlockID{}, hasBlocks, fmt.Errorf("chain %s: logsDB is empty but expected blocks before timestamp %d: %w", + chainID, ts, ErrPreviousTimestampNotSealed) + } + + // DB has blocks - fall through to normal timestamp checks below + // This handles the case where we restart at activation timestamp but the logsDB already has data + + // determine the timestamp of the last sealed block + seal, err := db.FindSealedBlock(latestBlock.Number) + if err != nil { + return eth.BlockID{}, hasBlocks, fmt.Errorf("chain %s: failed to find sealed block %d: %w", chainID, latestBlock.Number, err) + } + + // if the last sealed block is already after the timestamp in question, return success + if seal.Timestamp > ts { + return latestBlock, hasBlocks, nil + } + + gap := ts - seal.Timestamp + + // if there is more than a block time of gap, we cannot append the timestamp to the database + if gap > blockTime { + return eth.BlockID{}, hasBlocks, fmt.Errorf("chain %s: the prior block timestamp %d (%d minus block time %d) is not sealed (last sealed block timestamp: %d): %w", + chainID, ts-blockTime, ts, blockTime, seal.Timestamp, ErrPreviousTimestampNotSealed) + } + + // if the gap is less than a block time, we can append the timestamp to the database, but warn the caller + if gap < blockTime { + i.log.Warn("verifyCanAddTimestamp: requested for timestamp which is not a multiple of block time", + "chain", chainID, + "timestamp", ts, + "block time", blockTime, + "gap", gap, + ) + } + + return latestBlock, hasBlocks, nil +} + +// processBlockLogs processes the receipts for a block and stores the logs in the database. +// If isFirstBlock is true, this is the first block being added to the logsDB (at activation timestamp), +// and we treat it as genesis by using an empty parent block. This allows the logsDB to start at any +// block number, not just genesis. +func (i *Interop) processBlockLogs(db LogsDB, blockInfo eth.BlockInfo, receipts gethTypes.Receipts, isFirstBlock bool) error { + blockNum := blockInfo.NumberU64() + blockID := eth.BlockID{Hash: blockInfo.Hash(), Number: blockNum} + parentHash := blockInfo.ParentHash() + + // For the first block in the logsDB (activation block), use empty parent to treat it as genesis. + // This allows OpenBlock to work correctly even when we start at a non-genesis block. + parentBlock := eth.BlockID{Hash: parentHash, Number: blockNum - 1} + sealParentHash := parentHash + if blockNum == 0 || isFirstBlock { + parentBlock = eth.BlockID{} + sealParentHash = common.Hash{} + } + + var logIndex uint32 + for _, receipt := range receipts { + for _, l := range receipt.Logs { + logHash := processors.LogToLogHash(l) + + // Decode executing message if present (nil if not an executing message) + execMsg, _ := processors.DecodeExecutingMessageLog(l) + + if err := db.AddLog(logHash, parentBlock, logIndex, execMsg); err != nil { + return fmt.Errorf("failed to add log %d: %w", logIndex, err) + } + logIndex++ + } + } + + // Seal the block - use empty parent hash for first block + if err := db.SealBlock(sealParentHash, blockID, blockInfo.Time()); err != nil { + return fmt.Errorf("failed to seal block: %w", err) + } + + return nil +} diff --git a/op-supernode/supernode/activity/interop/logdb_test.go b/op-supernode/supernode/activity/interop/logdb_test.go new file mode 100644 index 00000000000..5087668eafe --- /dev/null +++ b/op-supernode/supernode/activity/interop/logdb_test.go @@ -0,0 +1,608 @@ +package interop + +import ( + "context" + "errors" + "testing" + + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + gethlog "github.com/ethereum/go-ethereum/log" + "github.com/stretchr/testify/require" + + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum-optimism/optimism/op-supernode/supernode/activity" + cc "github.com/ethereum-optimism/optimism/op-supernode/supernode/chain_container" + "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/backend/reads" + suptypes "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" +) + +// ============================================================================= +// TestLogsDB_Persistence +// ============================================================================= + +func TestLogsDB_Persistence(t *testing.T) { + t.Parallel() + + t.Run("data survives close and reopen", func(t *testing.T) { + t.Parallel() + dataDir := t.TempDir() + chainID := eth.ChainIDFromUInt64(10) + + // Create and populate a logsDB + { + db, err := openLogsDB(gethlog.New(), chainID, dataDir) + require.NoError(t, err) + + // Seal parent block + parentBlock := eth.BlockID{Hash: common.Hash{0x01}, Number: 99} + err = db.SealBlock(common.Hash{}, parentBlock, 998) + require.NoError(t, err) + + // Add a log + logHash := common.Hash{0x02} + err = db.AddLog(logHash, parentBlock, 0, nil) + require.NoError(t, err) + + // Seal block 100 + block100 := eth.BlockID{Hash: common.Hash{0x03}, Number: 100} + err = db.SealBlock(parentBlock.Hash, block100, 1000) + require.NoError(t, err) + + err = db.Close() + require.NoError(t, err) + } + + // Reopen and verify persistence + { + db, err := openLogsDB(gethlog.New(), chainID, dataDir) + require.NoError(t, err) + defer db.Close() + + latestBlock, ok := db.LatestSealedBlock() + require.True(t, ok) + require.Equal(t, uint64(100), latestBlock.Number) + require.Equal(t, common.Hash{0x03}, latestBlock.Hash) + } + }) + + t.Run("multiple chains are isolated", func(t *testing.T) { + t.Parallel() + dataDir := t.TempDir() + + chainID1 := eth.ChainIDFromUInt64(10) + chainID2 := eth.ChainIDFromUInt64(8453) + + db1, err := openLogsDB(gethlog.New(), chainID1, dataDir) + require.NoError(t, err) + defer db1.Close() + + db2, err := openLogsDB(gethlog.New(), chainID2, dataDir) + require.NoError(t, err) + defer db2.Close() + + // Seal different blocks on each chain + parentBlock1 := eth.BlockID{Hash: common.Hash{0x01}, Number: 99} + err = db1.SealBlock(common.Hash{}, parentBlock1, 998) + require.NoError(t, err) + + block1 := eth.BlockID{Hash: common.Hash{0x02}, Number: 100} + err = db1.SealBlock(parentBlock1.Hash, block1, 1000) + require.NoError(t, err) + + parentBlock2 := eth.BlockID{Hash: common.Hash{0x11}, Number: 199} + err = db2.SealBlock(common.Hash{}, parentBlock2, 1998) + require.NoError(t, err) + + block2 := eth.BlockID{Hash: common.Hash{0x12}, Number: 200} + err = db2.SealBlock(parentBlock2.Hash, block2, 2000) + require.NoError(t, err) + + // Verify each chain has its own data + latestBlock1, ok := db1.LatestSealedBlock() + require.True(t, ok) + require.Equal(t, uint64(100), latestBlock1.Number) + + latestBlock2, ok := db2.LatestSealedBlock() + require.True(t, ok) + require.Equal(t, uint64(200), latestBlock2.Number) + }) +} + +// ============================================================================= +// TestVerifyPreviousTimestampSealed +// ============================================================================= + +func TestVerifyPreviousTimestampSealed(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + activationTS uint64 + queryTS uint64 + blockTime uint64 + dbHasBlocks bool + sealTimestamp uint64 + findSealErr error + wantErr bool + wantErrIs error + wantHashNil bool + }{ + { + name: "activation timestamp with empty DB returns nil hash", + activationTS: 1000, + queryTS: 1000, + blockTime: 1, + dbHasBlocks: false, + wantErr: false, + wantHashNil: true, + }, + { + name: "activation timestamp with non-empty DB succeeds (restart case)", + activationTS: 1000, + queryTS: 1000, + blockTime: 1, + dbHasBlocks: true, + sealTimestamp: 1000, // DB has block at activation timestamp + wantErr: false, + wantHashNil: false, + }, + { + name: "non-activation timestamp with empty DB errors", + activationTS: 1000, + queryTS: 1001, + blockTime: 1, + dbHasBlocks: false, + wantErr: true, + wantErrIs: ErrPreviousTimestampNotSealed, + wantHashNil: true, + }, + { + name: "seal timestamp == query timestamp succeeds (already sealed)", + activationTS: 1000, + queryTS: 1001, + blockTime: 1, + dbHasBlocks: true, + sealTimestamp: 1001, // Same as queryTS - already past this timestamp + wantErr: false, + wantHashNil: false, + }, + { + name: "seal timestamp > query timestamp succeeds (already past)", + activationTS: 1000, + queryTS: 1001, + blockTime: 1, + dbHasBlocks: true, + sealTimestamp: 1005, // Past queryTS + wantErr: false, + wantHashNil: false, + }, + { + name: "seal timestamp < query timestamp (exact ts-1) succeeds", + activationTS: 1000, + queryTS: 1001, + blockTime: 1, + dbHasBlocks: true, + sealTimestamp: 1000, // gap = 1, blockTime = 1 + wantErr: false, + wantHashNil: false, + }, + { + name: "seal timestamp within block time succeeds", + activationTS: 1000, + queryTS: 1002, + blockTime: 2, // blockTime = 2 + dbHasBlocks: true, + sealTimestamp: 1000, // gap = 2, blockTime = 2 - OK + wantErr: false, + wantHashNil: false, + }, + { + name: "gap exceeds block time errors", + activationTS: 1000, + queryTS: 1003, + blockTime: 2, // blockTime = 2 + dbHasBlocks: true, + sealTimestamp: 1000, // gap = 3, blockTime = 2 - ERROR + wantErr: true, + wantErrIs: ErrPreviousTimestampNotSealed, + wantHashNil: true, + }, + { + name: "FindSealedBlock error propagated", + activationTS: 1000, + queryTS: 1001, + blockTime: 1, + dbHasBlocks: true, + findSealErr: errors.New("database error"), + wantErr: true, + wantHashNil: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + interop := &Interop{ + log: gethlog.New(), + activationTimestamp: tt.activationTS, + } + chainID := eth.ChainIDFromUInt64(10) + expectedHash := common.Hash{0x01} + db := &mockLogsDB{ + hasBlocks: tt.dbHasBlocks, + latestBlock: eth.BlockID{Hash: expectedHash, Number: 100}, + seal: suptypes.BlockSeal{ + Hash: expectedHash, + Number: 100, + Timestamp: tt.sealTimestamp, + }, + findSealErr: tt.findSealErr, + } + + block, _, err := interop.verifyCanAddTimestamp(chainID, db, tt.queryTS, tt.blockTime) + + if tt.wantErr { + require.Error(t, err) + if tt.wantErrIs != nil { + require.ErrorIs(t, err, tt.wantErrIs) + } + } else { + require.NoError(t, err) + } + + if tt.wantHashNil { + require.Equal(t, common.Hash{}, block.Hash, "expected zero hash") + } else { + require.NotEqual(t, common.Hash{}, block.Hash, "expected non-zero hash") + require.Equal(t, expectedHash, block.Hash) + } + }) + } +} + +// ============================================================================= +// TestProcessBlockLogs +// ============================================================================= + +func TestProcessBlockLogs(t *testing.T) { + t.Parallel() + + t.Run("empty receipts seals block with no logs", func(t *testing.T) { + t.Parallel() + + interop := &Interop{log: gethlog.New()} + db := &mockLogsDB{} + blockInfo := &testBlockInfo{ + hash: common.Hash{0x02}, + parentHash: common.Hash{0x01}, + number: 100, + timestamp: 1000, + } + + err := interop.processBlockLogs(db, blockInfo, types.Receipts{}, false) + + require.NoError(t, err) + require.NotNil(t, db.sealBlockCall) + require.Equal(t, common.Hash{0x01}, db.sealBlockCall.parentHash) + require.Equal(t, uint64(100), db.sealBlockCall.block.Number) + require.Equal(t, uint64(1000), db.sealBlockCall.timestamp) + require.Equal(t, 0, db.addLogCalls) + }) + + t.Run("multiple logs extracted from receipts", func(t *testing.T) { + t.Parallel() + + interop := &Interop{log: gethlog.New()} + db := &mockLogsDB{} + blockInfo := &testBlockInfo{ + hash: common.Hash{0x02}, + parentHash: common.Hash{0x01}, + number: 100, + timestamp: 1000, + } + + receipts := types.Receipts{ + &types.Receipt{ + Logs: []*types.Log{ + {Address: common.Address{0xAA}, Data: []byte{0x01}}, + {Address: common.Address{0xBB}, Data: []byte{0x02}}, + }, + }, + &types.Receipt{ + Logs: []*types.Log{ + {Address: common.Address{0xCC}, Data: []byte{0x03}}, + }, + }, + } + + err := interop.processBlockLogs(db, blockInfo, receipts, false) + + require.NoError(t, err) + require.Equal(t, 3, db.addLogCalls) + require.NotNil(t, db.sealBlockCall) + }) + + t.Run("genesis block handled correctly", func(t *testing.T) { + t.Parallel() + + interop := &Interop{log: gethlog.New()} + db := &mockLogsDB{} + blockInfo := &testBlockInfo{ + hash: common.Hash{0x01}, + parentHash: common.Hash{}, // Genesis has no parent + number: 0, + timestamp: 1000, + } + + err := interop.processBlockLogs(db, blockInfo, types.Receipts{}, true) + + require.NoError(t, err) + require.NotNil(t, db.sealBlockCall) + require.Equal(t, uint64(0), db.sealBlockCall.block.Number) + }) + + t.Run("first block at non-zero number uses empty parent", func(t *testing.T) { + t.Parallel() + + interop := &Interop{log: gethlog.New()} + db := &mockLogsDB{} + blockInfo := &testBlockInfo{ + hash: common.Hash{0x02}, + parentHash: common.Hash{0x01}, // Real parent hash + number: 10, // Non-zero block number + timestamp: 1000, + } + + // isFirstBlock=true should use empty parent for both AddLog and SealBlock + // This allows the logsDB to treat this block as its genesis + err := interop.processBlockLogs(db, blockInfo, types.Receipts{}, true) + + require.NoError(t, err) + require.NotNil(t, db.sealBlockCall) + // Both AddLog and SealBlock should use empty parent for first block + require.Equal(t, common.Hash{}, db.sealBlockCall.parentHash) + }) + + t.Run("AddLog error propagated", func(t *testing.T) { + t.Parallel() + + interop := &Interop{log: gethlog.New()} + db := &mockLogsDB{addLogErr: errors.New("add log failed")} + blockInfo := &testBlockInfo{ + hash: common.Hash{0x02}, + parentHash: common.Hash{0x01}, + number: 100, + timestamp: 1000, + } + receipts := types.Receipts{ + &types.Receipt{ + Logs: []*types.Log{{Address: common.Address{0xAA}}}, + }, + } + + err := interop.processBlockLogs(db, blockInfo, receipts, false) + + require.Error(t, err) + require.Contains(t, err.Error(), "add log failed") + }) + + t.Run("SealBlock error propagated", func(t *testing.T) { + t.Parallel() + + interop := &Interop{log: gethlog.New()} + db := &mockLogsDB{sealBlockErr: errors.New("seal failed")} + blockInfo := &testBlockInfo{ + hash: common.Hash{0x02}, + parentHash: common.Hash{0x01}, + number: 100, + timestamp: 1000, + } + + err := interop.processBlockLogs(db, blockInfo, types.Receipts{}, false) + + require.Error(t, err) + require.Contains(t, err.Error(), "seal failed") + }) +} + +// ============================================================================= +// TestLoadLogs_ParentHashMismatch +// ============================================================================= + +func TestLoadLogs_ParentHashMismatch(t *testing.T) { + t.Parallel() + dataDir := t.TempDir() + + chainID := eth.ChainIDFromUInt64(10) + firstBlockHash := common.Hash{0x01} + wrongParentHash := common.Hash{0xFF} + + callCount := 0 + mockChain := &statefulMockChainContainer{ + id: chainID, + blockAtTimestampFn: func(ts uint64) (eth.L2BlockRef, error) { + if ts == 1000 { + return eth.L2BlockRef{ + Hash: firstBlockHash, + Number: 100, + Time: 1000, + }, nil + } + return eth.L2BlockRef{ + Hash: common.Hash{0x02}, + Number: 101, + Time: 1001, + }, nil + }, + fetchReceiptsFn: func(blockID eth.BlockID) (eth.BlockInfo, types.Receipts, error) { + callCount++ + if callCount == 1 { + return &testBlockInfo{ + hash: firstBlockHash, + parentHash: common.Hash{}, + number: 100, + timestamp: 1000, + }, types.Receipts{}, nil + } + return &testBlockInfo{ + hash: common.Hash{0x02}, + parentHash: wrongParentHash, // Wrong parent! + number: 101, + timestamp: 1001, + }, types.Receipts{}, nil + }, + } + + chains := map[eth.ChainID]cc.ChainContainer{chainID: mockChain} + interop := New(gethlog.New(), 1000, chains, dataDir) + require.NotNil(t, interop) + interop.ctx = context.Background() + defer func() { _ = interop.Stop(context.Background()) }() + + // Load logs for activation timestamp + err := interop.loadLogs(1000) + require.NoError(t, err) + + // Try to load logs for 1001 - should fail due to parent hash mismatch + err = interop.loadLogs(1001) + require.Error(t, err) + require.ErrorIs(t, err, ErrParentHashMismatch) +} + +// ============================================================================= +// Mock Types for LogsDB Tests +// ============================================================================= + +type mockLogsDB struct { + latestBlock eth.BlockID + hasBlocks bool + seal suptypes.BlockSeal + findSealErr error + addLogErr error + sealBlockErr error + addLogCalls int + sealBlockCall *sealBlockCall + + firstSealedBlock suptypes.BlockSeal + firstSealedBlockErr error + + openBlockRef eth.BlockRef + openBlockLogCnt uint32 + openBlockExecMsg map[uint32]*suptypes.ExecutingMessage + openBlockErr error + + containsSeal suptypes.BlockSeal + containsErr error +} + +type sealBlockCall struct { + parentHash common.Hash + block eth.BlockID + timestamp uint64 +} + +func (m *mockLogsDB) LatestSealedBlock() (eth.BlockID, bool) { + return m.latestBlock, m.hasBlocks +} + +func (m *mockLogsDB) FirstSealedBlock() (suptypes.BlockSeal, error) { + if m.firstSealedBlockErr != nil { + return suptypes.BlockSeal{}, m.firstSealedBlockErr + } + return m.firstSealedBlock, nil +} + +func (m *mockLogsDB) FindSealedBlock(number uint64) (suptypes.BlockSeal, error) { + if m.findSealErr != nil { + return suptypes.BlockSeal{}, m.findSealErr + } + return m.seal, nil +} + +func (m *mockLogsDB) OpenBlock(blockNum uint64) (eth.BlockRef, uint32, map[uint32]*suptypes.ExecutingMessage, error) { + if m.openBlockErr != nil { + return eth.BlockRef{}, 0, nil, m.openBlockErr + } + return m.openBlockRef, m.openBlockLogCnt, m.openBlockExecMsg, nil +} + +func (m *mockLogsDB) Contains(query suptypes.ContainsQuery) (suptypes.BlockSeal, error) { + if m.containsErr != nil { + return suptypes.BlockSeal{}, m.containsErr + } + return m.containsSeal, nil +} + +func (m *mockLogsDB) AddLog(logHash common.Hash, parentBlock eth.BlockID, logIdx uint32, execMsg *suptypes.ExecutingMessage) error { + m.addLogCalls++ + return m.addLogErr +} + +func (m *mockLogsDB) SealBlock(parentHash common.Hash, block eth.BlockID, timestamp uint64) error { + m.sealBlockCall = &sealBlockCall{ + parentHash: parentHash, + block: block, + timestamp: timestamp, + } + return m.sealBlockErr +} + +func (m *mockLogsDB) Rewind(inv reads.Invalidator, newHead eth.BlockID) error { return nil } +func (m *mockLogsDB) Clear(inv reads.Invalidator) error { return nil } +func (m *mockLogsDB) Close() error { return nil } + +var _ LogsDB = (*mockLogsDB)(nil) + +// statefulMockChainContainer allows dynamic behavior based on test state +type statefulMockChainContainer struct { + id eth.ChainID + blockAtTimestampFn func(ts uint64) (eth.L2BlockRef, error) + fetchReceiptsFn func(blockID eth.BlockID) (eth.BlockInfo, types.Receipts, error) +} + +func (m *statefulMockChainContainer) ID() eth.ChainID { return m.id } +func (m *statefulMockChainContainer) Start(ctx context.Context) error { return nil } +func (m *statefulMockChainContainer) Stop(ctx context.Context) error { return nil } +func (m *statefulMockChainContainer) Pause(ctx context.Context) error { return nil } +func (m *statefulMockChainContainer) Resume(ctx context.Context) error { return nil } +func (m *statefulMockChainContainer) RegisterVerifier(v activity.VerificationActivity) { +} +func (m *statefulMockChainContainer) BlockAtTimestamp(ctx context.Context, ts uint64, label eth.BlockLabel) (eth.L2BlockRef, error) { + return m.blockAtTimestampFn(ts) +} +func (m *statefulMockChainContainer) VerifiedAt(ctx context.Context, ts uint64) (eth.BlockID, eth.BlockID, error) { + return eth.BlockID{}, eth.BlockID{}, nil +} +func (m *statefulMockChainContainer) L1ForL2(ctx context.Context, l2Block eth.BlockID) (eth.BlockID, error) { + return eth.BlockID{}, nil +} +func (m *statefulMockChainContainer) OptimisticAt(ctx context.Context, ts uint64) (eth.BlockID, eth.BlockID, error) { + return eth.BlockID{}, eth.BlockID{}, nil +} +func (m *statefulMockChainContainer) OutputRootAtL2BlockNumber(ctx context.Context, l2BlockNum uint64) (eth.Bytes32, error) { + return eth.Bytes32{}, nil +} +func (m *statefulMockChainContainer) OptimisticOutputAtTimestamp(ctx context.Context, ts uint64) (*eth.OutputResponse, error) { + return nil, nil +} +func (m *statefulMockChainContainer) FetchReceipts(ctx context.Context, blockID eth.BlockID) (eth.BlockInfo, types.Receipts, error) { + return m.fetchReceiptsFn(blockID) +} +func (m *statefulMockChainContainer) SyncStatus(ctx context.Context) (*eth.SyncStatus, error) { + return ð.SyncStatus{}, nil +} +func (m *statefulMockChainContainer) BlockTime() uint64 { return 1 } +func (m *statefulMockChainContainer) RewindEngine(ctx context.Context, timestamp uint64) error { + return nil +} +func (m *statefulMockChainContainer) InvalidateBlock(ctx context.Context, height uint64, payloadHash common.Hash) (bool, error) { + return false, nil +} +func (m *statefulMockChainContainer) IsDenied(height uint64, payloadHash common.Hash) (bool, error) { + return false, nil +} +func (m *statefulMockChainContainer) SetResetCallback(cb cc.ResetCallback) {} + +var _ cc.ChainContainer = (*statefulMockChainContainer)(nil) diff --git a/op-supernode/supernode/activity/interop/types.go b/op-supernode/supernode/activity/interop/types.go index 629457dd2ce..252aa0f7e34 100644 --- a/op-supernode/supernode/activity/interop/types.go +++ b/op-supernode/supernode/activity/interop/types.go @@ -1,8 +1,6 @@ package interop import ( - "errors" - "github.com/ethereum-optimism/optimism/op-service/eth" ) @@ -32,8 +30,6 @@ func (r *Result) IsEmpty() bool { return r.L1Head == (eth.BlockID{}) && len(r.L2Heads) == 0 && len(r.InvalidHeads) == 0 } -var ErrInvalidResult = errors.New("result is invalid") - func (r *Result) ToVerifiedResult() VerifiedResult { return VerifiedResult{ Timestamp: r.Timestamp, diff --git a/op-supernode/supernode/activity/interop/verified_db.go b/op-supernode/supernode/activity/interop/verified_db.go index 9e7e03c4e48..246fdcef968 100644 --- a/op-supernode/supernode/activity/interop/verified_db.go +++ b/op-supernode/supernode/activity/interop/verified_db.go @@ -181,6 +181,52 @@ func (v *VerifiedDB) LastTimestamp() (uint64, bool) { return v.lastTimestamp, v.initialized } +// Rewind removes all verified results at or after the given timestamp. +// Returns true if any results were deleted, false otherwise. +func (v *VerifiedDB) Rewind(timestamp uint64) (bool, error) { + var deleted bool + + err := v.db.Update(func(tx *bolt.Tx) error { + b := tx.Bucket(bucketName) + c := b.Cursor() + + // Start from the timestamp and delete all entries at or after it + startKey := timestampToKey(timestamp) + for k, _ := c.Seek(startKey); k != nil; k, _ = c.Next() { + if err := b.Delete(k); err != nil { + return err + } + deleted = true + } + return nil + }) + if err != nil { + return false, fmt.Errorf("failed to rewind verifiedDB: %w", err) + } + + // Update state + if deleted { + // Reinitialize lastTimestamp from the database + if err := v.initLastTimestamp(); err != nil { + return deleted, fmt.Errorf("failed to reinitialize lastTimestamp after rewind: %w", err) + } + // If no timestamps remain, reset initialized state + if err := v.db.View(func(tx *bolt.Tx) error { + b := tx.Bucket(bucketName) + c := b.Cursor() + if k, _ := c.First(); k == nil { + v.initialized = false + v.lastTimestamp = 0 + } + return nil + }); err != nil { + return deleted, err + } + } + + return deleted, nil +} + // Close closes the database. func (v *VerifiedDB) Close() error { return v.db.Close() diff --git a/op-supernode/supernode/activity/interop/verified_db_test.go b/op-supernode/supernode/activity/interop/verified_db_test.go index 43885fd8d2e..3848c30b021 100644 --- a/op-supernode/supernode/activity/interop/verified_db_test.go +++ b/op-supernode/supernode/activity/interop/verified_db_test.go @@ -175,3 +175,160 @@ func TestVerifiedDB_Persistence(t *testing.T) { }) require.NoError(t, err) } + +func TestVerifiedDB_RewindTo(t *testing.T) { + t.Parallel() + + t.Run("removes entries at and after timestamp", func(t *testing.T) { + t.Parallel() + dataDir := t.TempDir() + + db, err := OpenVerifiedDB(dataDir) + require.NoError(t, err) + defer db.Close() + + chainID := eth.ChainIDFromUInt64(10) + + // Commit several timestamps + for ts := uint64(100); ts <= 105; ts++ { + err = db.Commit(VerifiedResult{ + Timestamp: ts, + L1Head: eth.BlockID{Hash: common.BytesToHash([]byte{byte(ts)}), Number: ts}, + L2Heads: map[eth.ChainID]eth.BlockID{chainID: {Hash: common.BytesToHash([]byte{byte(ts + 100)}), Number: ts}}, + }) + require.NoError(t, err) + } + + // Verify all exist + lastTs, _ := db.LastTimestamp() + require.Equal(t, uint64(105), lastTs) + + // Rewind to 103 (should remove 103, 104, 105) + deleted, err := db.Rewind(103) + require.NoError(t, err) + require.True(t, deleted) + + // Verify 100, 101, 102 still exist + for ts := uint64(100); ts <= 102; ts++ { + has, err := db.Has(ts) + require.NoError(t, err) + require.True(t, has, "timestamp %d should still exist", ts) + } + + // Verify 103, 104, 105 are gone + for ts := uint64(103); ts <= 105; ts++ { + has, err := db.Has(ts) + require.NoError(t, err) + require.False(t, has, "timestamp %d should be deleted", ts) + } + + // Last timestamp should be updated to 102 + lastTs, _ = db.LastTimestamp() + require.Equal(t, uint64(102), lastTs) + }) + + t.Run("returns false when no entries deleted", func(t *testing.T) { + t.Parallel() + dataDir := t.TempDir() + + db, err := OpenVerifiedDB(dataDir) + require.NoError(t, err) + defer db.Close() + + chainID := eth.ChainIDFromUInt64(10) + + // Commit up to timestamp 100 + for ts := uint64(98); ts <= 100; ts++ { + err = db.Commit(VerifiedResult{ + Timestamp: ts, + L1Head: eth.BlockID{Hash: common.BytesToHash([]byte{byte(ts)}), Number: ts}, + L2Heads: map[eth.ChainID]eth.BlockID{chainID: {Hash: common.BytesToHash([]byte{byte(ts + 100)}), Number: ts}}, + }) + require.NoError(t, err) + } + + // Rewind to 200 (nothing to delete) + deleted, err := db.Rewind(200) + require.NoError(t, err) + require.False(t, deleted) + + // All entries should still exist + lastTs, _ := db.LastTimestamp() + require.Equal(t, uint64(100), lastTs) + }) + + t.Run("rewind all entries", func(t *testing.T) { + t.Parallel() + dataDir := t.TempDir() + + db, err := OpenVerifiedDB(dataDir) + require.NoError(t, err) + defer db.Close() + + chainID := eth.ChainIDFromUInt64(10) + + // Commit a few entries + for ts := uint64(100); ts <= 102; ts++ { + err = db.Commit(VerifiedResult{ + Timestamp: ts, + L1Head: eth.BlockID{Hash: common.BytesToHash([]byte{byte(ts)}), Number: ts}, + L2Heads: map[eth.ChainID]eth.BlockID{chainID: {Hash: common.BytesToHash([]byte{byte(ts + 100)}), Number: ts}}, + }) + require.NoError(t, err) + } + + // Rewind to 0 (delete all) + deleted, err := db.Rewind(0) + require.NoError(t, err) + require.True(t, deleted) + + // No entries should exist + for ts := uint64(100); ts <= 102; ts++ { + has, err := db.Has(ts) + require.NoError(t, err) + require.False(t, has) + } + + // Last timestamp should be reset to uninitialized + _, initialized := db.LastTimestamp() + require.False(t, initialized) + }) + + t.Run("allows sequential commits after rewind", func(t *testing.T) { + t.Parallel() + dataDir := t.TempDir() + + db, err := OpenVerifiedDB(dataDir) + require.NoError(t, err) + defer db.Close() + + chainID := eth.ChainIDFromUInt64(10) + + // Commit 100-105 + for ts := uint64(100); ts <= 105; ts++ { + err = db.Commit(VerifiedResult{ + Timestamp: ts, + L1Head: eth.BlockID{Hash: common.BytesToHash([]byte{byte(ts)}), Number: ts}, + L2Heads: map[eth.ChainID]eth.BlockID{chainID: {Hash: common.BytesToHash([]byte{byte(ts + 100)}), Number: ts}}, + }) + require.NoError(t, err) + } + + // Rewind to 103 + _, err = db.Rewind(103) + require.NoError(t, err) + + // Should be able to commit 103 again (sequential from 102) + err = db.Commit(VerifiedResult{ + Timestamp: 103, + L1Head: eth.BlockID{Hash: common.HexToHash("0xNEW"), Number: 103}, + L2Heads: map[eth.ChainID]eth.BlockID{chainID: {Hash: common.HexToHash("0xNEW2"), Number: 103}}, + }) + require.NoError(t, err) + + // Verify new data + result, err := db.Get(103) + require.NoError(t, err) + require.Equal(t, common.HexToHash("0xNEW"), result.L1Head.Hash) + }) +} diff --git a/op-supernode/supernode/activity/superroot/superroot.go b/op-supernode/supernode/activity/superroot/superroot.go index dace9b15b86..618cb1e2a56 100644 --- a/op-supernode/supernode/activity/superroot/superroot.go +++ b/op-supernode/supernode/activity/superroot/superroot.go @@ -30,6 +30,12 @@ func New(log gethlog.Logger, chains map[eth.ChainID]cc.ChainContainer) *Superroo func (s *Superroot) ActivityName() string { return "superroot" } +// Reset is a no-op for superroot - it always queries chain containers directly +// and doesn't maintain any chain-specific cached state. +func (s *Superroot) Reset(chainID eth.ChainID, timestamp uint64) { + // No-op: superroot queries chain containers directly +} + func (s *Superroot) RPCNamespace() string { return "superroot" } func (s *Superroot) RPCService() interface{} { return &superrootAPI{s: s} } diff --git a/op-supernode/supernode/activity/superroot/superroot_test.go b/op-supernode/supernode/activity/superroot/superroot_test.go index 41d563e6f86..2d8823a191b 100644 --- a/op-supernode/supernode/activity/superroot/superroot_test.go +++ b/op-supernode/supernode/activity/superroot/superroot_test.go @@ -9,7 +9,9 @@ import ( "github.com/ethereum-optimism/optimism/op-supernode/supernode/activity" cc "github.com/ethereum-optimism/optimism/op-supernode/supernode/chain_container" "github.com/ethereum/go-ethereum" + "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/ethereum/go-ethereum/core/types" gethlog "github.com/ethereum/go-ethereum/log" "github.com/stretchr/testify/require" ) @@ -87,10 +89,23 @@ func (m *mockCC) L1ForL2(ctx context.Context, l2Block eth.BlockID) (eth.BlockID, return eth.BlockID{}, nil } +func (m *mockCC) FetchReceipts(ctx context.Context, blockID eth.BlockID) (eth.BlockInfo, types.Receipts, error) { + return nil, nil, nil +} + func (m *mockCC) ID() eth.ChainID { return eth.ChainIDFromUInt64(10) } +func (m *mockCC) BlockTime() uint64 { return 1 } +func (m *mockCC) InvalidateBlock(ctx context.Context, height uint64, payloadHash common.Hash) (bool, error) { + return false, nil +} +func (m *mockCC) IsDenied(height uint64, payloadHash common.Hash) (bool, error) { + return false, nil +} +func (m *mockCC) SetResetCallback(cb cc.ResetCallback) {} + var _ cc.ChainContainer = (*mockCC)(nil) func TestSuperroot_AtTimestamp_Succeeds(t *testing.T) { diff --git a/op-supernode/supernode/chain_container/chain_container.go b/op-supernode/supernode/chain_container/chain_container.go index a9993278ad5..e13a8d25a6c 100644 --- a/op-supernode/supernode/chain_container/chain_container.go +++ b/op-supernode/supernode/chain_container/chain_container.go @@ -11,6 +11,7 @@ import ( opnodecfg "github.com/ethereum-optimism/optimism/op-node/config" rollupNode "github.com/ethereum-optimism/optimism/op-node/node" + "github.com/ethereum-optimism/optimism/op-node/rollup" "github.com/ethereum-optimism/optimism/op-service/client" "github.com/ethereum-optimism/optimism/op-service/eth" oprpc "github.com/ethereum-optimism/optimism/op-service/rpc" @@ -20,6 +21,8 @@ import ( "github.com/ethereum-optimism/optimism/op-supernode/supernode/chain_container/engine_controller" "github.com/ethereum-optimism/optimism/op-supernode/supernode/chain_container/virtual_node" "github.com/ethereum/go-ethereum" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" gethlog "github.com/ethereum/go-ethereum/log" "github.com/prometheus/client_golang/prometheus" ) @@ -43,15 +46,34 @@ type ChainContainer interface { // RewindEngine rewinds the engine to the highest block with timestamp less than or equal to the given timestamp. RewindEngine(ctx context.Context, timestamp uint64) error RegisterVerifier(v activity.VerificationActivity) + // FetchReceipts fetches the receipts for a given block by hash. + // Returns block info and receipts, or an error if the block or receipts cannot be fetched. + FetchReceipts(ctx context.Context, blockHash eth.BlockID) (eth.BlockInfo, types.Receipts, error) + // BlockTime returns the block time in seconds for this chain. + BlockTime() uint64 + // InvalidateBlock adds a block to the deny list and triggers a rewind if the chain + // currently uses that block at the specified height. + // Returns true if a rewind was triggered, false otherwise. + InvalidateBlock(ctx context.Context, height uint64, payloadHash common.Hash) (bool, error) + // IsDenied checks if a block hash is on the deny list at the given height. + IsDenied(height uint64, payloadHash common.Hash) (bool, error) + // SetResetCallback sets a callback that is invoked when the chain resets. + // The supernode uses this to notify activities about chain resets. + SetResetCallback(cb ResetCallback) } type virtualNodeFactory func(cfg *opnodecfg.Config, log gethlog.Logger, initOverrides *rollupNode.InitializationOverrides, appVersion string) virtual_node.VirtualNode +// ResetCallback is called when the chain container resets to a given timestamp. +// The supernode uses this to notify activities about the reset. +type ResetCallback func(chainID eth.ChainID, timestamp uint64) + type simpleChainContainer struct { vn virtual_node.VirtualNode vncfg *opnodecfg.Config cfg config.CLIConfig engine engine_controller.EngineController + denyList *DenyList pause atomic.Bool stop atomic.Bool stopped chan struct{} @@ -65,10 +87,12 @@ type simpleChainContainer struct { virtualNodeFactory virtualNodeFactory // Factory function to create virtual node (for testing) rollupClient *sources.RollupClient // In-proc rollup RPC client bound to rpcHandler verifiers []activity.VerificationActivity + onReset ResetCallback // Called when chain resets to notify activities } // Interface conformance assertions var _ ChainContainer = (*simpleChainContainer)(nil) +var _ rollup.SuperAuthority = (*simpleChainContainer)(nil) func NewChainContainer( chainID eth.ChainID, @@ -101,6 +125,13 @@ func NewChainContainer( log.Warn("failed to attach in-proc rollup client (initial)", "err", err) } } + // Initialize the deny list for block invalidation + denyListPath := c.subPath("denylist") + if denyList, err := OpenDenyList(denyListPath); err != nil { + log.Error("failed to open deny list", "err", err) + } else { + c.denyList = denyList + } // Initialize engine controller (separate connection, not an op-node override) with a short setup timeout if vncfg.L2 != nil { setupCtx, cancel := context.WithTimeout(context.Background(), 10*time.Second) @@ -161,6 +192,8 @@ func (c *simpleChainContainer) Start(ctx context.Context) error { c.addMetricsRegistry(c.chainID.String(), reg) } } + // Pass the chain container as SuperAuthority for payload denylist checks + c.initOverload.SuperAuthority = c } c.vn = c.virtualNodeFactory(c.vncfg, c.log, c.initOverload, c.appVersion) if c.pause.Load() { @@ -221,6 +254,13 @@ func (c *simpleChainContainer) Stop(ctx context.Context) error { _ = c.engine.Close() } + // Close deny list database + if c.denyList != nil { + if err := c.denyList.Close(); err != nil { + c.log.Error("error closing deny list", "error", err) + } + } + select { case <-c.stopped: return nil @@ -373,6 +413,22 @@ func (c *simpleChainContainer) OptimisticOutputAtTimestamp(ctx context.Context, return out, nil } +// FetchReceipts fetches the receipts for a given block by hash. +func (c *simpleChainContainer) FetchReceipts(ctx context.Context, blockID eth.BlockID) (eth.BlockInfo, types.Receipts, error) { + if c.engine == nil { + return nil, nil, engine_controller.ErrNoEngineClient + } + return c.engine.FetchReceipts(ctx, blockID.Hash) +} + +// BlockTime returns the block time in seconds for this chain from the rollup config. +func (c *simpleChainContainer) BlockTime() uint64 { + if c.vncfg == nil { + return 0 + } + return c.vncfg.Rollup.BlockTime +} + // attachInProcRollupClient creates a new in-proc rollup RPC client bound to the current rpcHandler. // It will close any existing client before replacing it. func (c *simpleChainContainer) attachInProcRollupClient() error { @@ -397,7 +453,8 @@ func isCriticalRewindError(err error) bool { return errors.Is(err, engine_controller.ErrNoEngineClient) || errors.Is(err, engine_controller.ErrNoRollupConfig) || errors.Is(err, engine_controller.ErrRewindComputeTargetsFailed) || - errors.Is(err, engine_controller.ErrRewindTimestampToBlockConversion) + errors.Is(err, engine_controller.ErrRewindTimestampToBlockConversion) || + errors.Is(err, engine_controller.ErrRewindOverFinalizedHead) } func (c *simpleChainContainer) RewindEngine(ctx context.Context, timestamp uint64) error { @@ -445,6 +502,11 @@ retryLoop: } } + // Notify activities about the reset + if c.onReset != nil { + c.onReset(c.chainID, timestamp) + } + // resume the chain container to trigger a new vn to be started err = c.Resume(ctx) if err != nil { @@ -454,3 +516,26 @@ retryLoop: return nil } + +// SetResetCallback sets a callback that is invoked when the chain resets. +// This must only be called during initialization, before the chain container starts processing. +// Calling this while InvalidateBlock may be running is unsafe. +func (c *simpleChainContainer) SetResetCallback(cb ResetCallback) { + c.onReset = cb +} + +// blockNumberToTimestamp converts a block number to its timestamp using rollup config. +func (c *simpleChainContainer) blockNumberToTimestamp(blockNum uint64) uint64 { + if c.vncfg == nil { + return 0 + } + return c.vncfg.Rollup.Genesis.L2Time + (blockNum * c.vncfg.Rollup.BlockTime) +} + +// IsDenied checks if a block hash is on the deny list at the given height. +func (c *simpleChainContainer) IsDenied(height uint64, payloadHash common.Hash) (bool, error) { + if c.denyList == nil { + return false, fmt.Errorf("deny list not initialized") + } + return c.denyList.Contains(height, payloadHash) +} diff --git a/op-supernode/supernode/chain_container/chain_container_test.go b/op-supernode/supernode/chain_container/chain_container_test.go index 4c8c5db640e..343ceff34ee 100644 --- a/op-supernode/supernode/chain_container/chain_container_test.go +++ b/op-supernode/supernode/chain_container/chain_container_test.go @@ -19,6 +19,8 @@ import ( "github.com/ethereum-optimism/optimism/op-supernode/supernode/chain_container/engine_controller" "github.com/ethereum-optimism/optimism/op-supernode/supernode/chain_container/virtual_node" "github.com/ethereum/go-ethereum" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" gethlog "github.com/ethereum/go-ethereum/log" "github.com/stretchr/testify/require" ) @@ -135,6 +137,10 @@ func (m *mockEngineController) OutputV0AtBlockNumber(ctx context.Context, num ui return nil, nil } +func (m *mockEngineController) FetchReceipts(ctx context.Context, blockHash common.Hash) (eth.BlockInfo, types.Receipts, error) { + return nil, nil, nil +} + func (m *mockEngineController) Close() error { return nil } @@ -161,6 +167,8 @@ func (m *mockVerificationActivity) VerifiedAtTimestamp(ts uint64) (bool, error) return m.verifiedAtTimestampResult, m.verifiedAtTimestampErr } +func (m *mockVerificationActivity) Reset(chainID eth.ChainID, timestamp uint64) {} + // Test helpers func createTestVNConfig() *opnodecfg.Config { return &opnodecfg.Config{ @@ -170,9 +178,9 @@ func createTestVNConfig() *opnodecfg.Config { } } -func createTestCLIConfig() config.CLIConfig { +func createTestCLIConfig(dataDir string) config.CLIConfig { return config.CLIConfig{ - DataDir: "/tmp/test", + DataDir: dataDir, RPCConfig: oprpc.CLIConfig{ ListenAddr: "0.0.0.0", ListenPort: 8545, @@ -209,10 +217,10 @@ func TestChainContainer_Constructor(t *testing.T) { chainID := eth.ChainIDFromUInt64(420) vncfg := createTestVNConfig() log := createTestLogger(t) - cfg := createTestCLIConfig() initOverload := &rollupNode.InitializationOverrides{} t.Run("creates container with correct config", func(t *testing.T) { + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) require.NotNil(t, container) @@ -229,21 +237,22 @@ func TestChainContainer_Constructor(t *testing.T) { }) t.Run("SafeDBPath uses subPath", func(t *testing.T) { + dataDir := t.TempDir() cfg := config.CLIConfig{ - DataDir: "/tmp/datadir", + DataDir: dataDir, } container := NewChainContainer(eth.ChainIDFromUInt64(420), vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) - expectedPath := filepath.Join("/tmp/datadir", "420", "safe_db") + expectedPath := filepath.Join(dataDir, "420", "safe_db") require.Equal(t, expectedPath, impl.vncfg.SafeDBPath) }) t.Run("RPC config inherited from supernode config", func(t *testing.T) { cfg := config.CLIConfig{ - DataDir: "/tmp/test", + DataDir: t.TempDir(), RPCConfig: oprpc.CLIConfig{ ListenAddr: "127.0.0.1", ListenPort: 9545, @@ -258,6 +267,7 @@ func TestChainContainer_Constructor(t *testing.T) { }) t.Run("appVersion set correctly", func(t *testing.T) { + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -266,31 +276,32 @@ func TestChainContainer_Constructor(t *testing.T) { }) t.Run("subPath combines DataDir, chainID, and path correctly", func(t *testing.T) { + dataDir := t.TempDir() cfg := config.CLIConfig{ - DataDir: "/data", + DataDir: dataDir, } container := NewChainContainer(eth.ChainIDFromUInt64(420), vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) result := impl.subPath("safe_db") - expected := filepath.Join("/data", "420", "safe_db") + expected := filepath.Join(dataDir, "420", "safe_db") require.Equal(t, expected, result) }) t.Run("subPath works with various chain IDs", func(t *testing.T) { + dataDir := t.TempDir() cfg := config.CLIConfig{ - DataDir: "/data", + DataDir: dataDir, } testCases := []struct { - chainID eth.ChainID - path string - expected string + chainID eth.ChainID + path string }{ - {eth.ChainIDFromUInt64(10), "safe_db", "/data/10/safe_db"}, - {eth.ChainIDFromUInt64(11155420), "safe_db", "/data/11155420/safe_db"}, - {eth.ChainIDFromUInt64(8453), "peerstore", "/data/8453/peerstore"}, + {eth.ChainIDFromUInt64(10), "safe_db"}, + {eth.ChainIDFromUInt64(11155420), "safe_db"}, + {eth.ChainIDFromUInt64(8453), "peerstore"}, } for _, tc := range testCases { @@ -299,7 +310,7 @@ func TestChainContainer_Constructor(t *testing.T) { require.True(t, ok) result := impl.subPath(tc.path) - expected := filepath.Join(cfg.DataDir, tc.chainID.String(), tc.path) + expected := filepath.Join(dataDir, tc.chainID.String(), tc.path) require.Equal(t, expected, result, "subPath should work for chain %d", tc.chainID) } }) @@ -311,11 +322,11 @@ func TestChainContainer_Lifecycle(t *testing.T) { chainID := eth.ChainIDFromUInt64(420) vncfg := createTestVNConfig() - cfg := createTestCLIConfig() initOverload := &rollupNode.InitializationOverrides{} t.Run("Start respects stop flag", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -342,6 +353,7 @@ func TestChainContainer_Lifecycle(t *testing.T) { t.Run("Stop sets stop flag", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -356,6 +368,7 @@ func TestChainContainer_Lifecycle(t *testing.T) { t.Run("signals stopped channel on exit", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -385,6 +398,7 @@ func TestChainContainer_Lifecycle(t *testing.T) { t.Run("context cancellation stops restart loop", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -425,6 +439,7 @@ func TestChainContainer_Lifecycle(t *testing.T) { t.Run("Stop flag stops restart loop", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -467,11 +482,11 @@ func TestChainContainer_PauseResume(t *testing.T) { chainID := eth.ChainIDFromUInt64(420) vncfg := createTestVNConfig() - cfg := createTestCLIConfig() initOverload := &rollupNode.InitializationOverrides{} t.Run("Pause sets pause flag", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -485,6 +500,7 @@ func TestChainContainer_PauseResume(t *testing.T) { t.Run("Resume clears pause flag", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -500,6 +516,7 @@ func TestChainContainer_PauseResume(t *testing.T) { t.Run("paused container doesn't start VN, resumed does", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -739,11 +756,11 @@ func TestChainContainer_VirtualNodeIntegration(t *testing.T) { chainID := eth.ChainIDFromUInt64(420) vncfg := createTestVNConfig() - cfg := createTestCLIConfig() initOverload := &rollupNode.InitializationOverrides{} t.Run("Start creates and starts virtual node", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -774,6 +791,7 @@ func TestChainContainer_VirtualNodeIntegration(t *testing.T) { t.Run("auto-restart virtual node on exit", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -810,6 +828,7 @@ func TestChainContainer_VirtualNodeIntegration(t *testing.T) { t.Run("Stop calls virtual node Stop", func(t *testing.T) { log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, nil, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -855,6 +874,7 @@ func TestChainContainer_VirtualNodeIntegration(t *testing.T) { } log := createTestLogger(t) + cfg := createTestCLIConfig(t.TempDir()) container := NewChainContainer(chainID, vncfg, log, cfg, initOverload, nil, setHandler, nil) impl, ok := container.(*simpleChainContainer) require.True(t, ok) @@ -887,7 +907,7 @@ func TestChainContainer_VerifiedAt(t *testing.T) { chainID := eth.ChainIDFromUInt64(420) vncfg := createTestVNConfig() log := createTestLogger(t) - cfg := createTestCLIConfig() + cfg := createTestCLIConfig(t.TempDir()) initOverload := &rollupNode.InitializationOverrides{} t.Run("returns error when verification activity reports not verified", func(t *testing.T) { diff --git a/op-supernode/supernode/chain_container/engine_controller/engine_controller.go b/op-supernode/supernode/chain_container/engine_controller/engine_controller.go index 696a88ccbf2..58053ef64c2 100644 --- a/op-supernode/supernode/chain_container/engine_controller/engine_controller.go +++ b/op-supernode/supernode/chain_container/engine_controller/engine_controller.go @@ -12,6 +12,7 @@ import ( "github.com/ethereum-optimism/optimism/op-service/sources" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" gethlog "github.com/ethereum/go-ethereum/log" ) @@ -25,6 +26,8 @@ type EngineController interface { OutputV0AtBlockNumber(ctx context.Context, num uint64) (*eth.OutputV0, error) // RewindToTimestamp rewinds the L2 execution layer to block at or before the given timestamp. RewindToTimestamp(ctx context.Context, timestamp uint64) error + // FetchReceipts fetches the receipts for a given block by hash. + FetchReceipts(ctx context.Context, blockHash common.Hash) (eth.BlockInfo, types.Receipts, error) // Close releases any underlying RPC resources. Close() error } @@ -37,6 +40,7 @@ type l2Provider interface { PayloadByNumber(ctx context.Context, number uint64) (*eth.ExecutionPayloadEnvelope, error) ForkchoiceUpdate(ctx context.Context, state *eth.ForkchoiceState, attr *eth.PayloadAttributes) (*eth.ForkchoiceUpdatedResult, error) NewPayload(ctx context.Context, payload *eth.ExecutionPayload, parentBeaconBlockRoot *common.Hash) (*eth.PayloadStatusV1, error) + FetchReceipts(ctx context.Context, blockHash common.Hash) (eth.BlockInfo, types.Receipts, error) Close() } @@ -151,6 +155,13 @@ func (e *simpleEngineController) OutputV0AtBlockNumber(ctx context.Context, num return e.l2.OutputV0AtBlockNumber(ctx, num) } +func (e *simpleEngineController) FetchReceipts(ctx context.Context, blockHash common.Hash) (eth.BlockInfo, types.Receipts, error) { + if e.l2 == nil { + return nil, nil, ErrNoEngineClient + } + return e.l2.FetchReceipts(ctx, blockHash) +} + func (e *simpleEngineController) Close() error { if e.l2 != nil { e.l2.Close() diff --git a/op-supernode/supernode/chain_container/engine_controller/engine_controller_test.go b/op-supernode/supernode/chain_container/engine_controller/engine_controller_test.go index d756be2dff4..9794fd95d91 100644 --- a/op-supernode/supernode/chain_container/engine_controller/engine_controller_test.go +++ b/op-supernode/supernode/chain_container/engine_controller/engine_controller_test.go @@ -9,6 +9,7 @@ import ( "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" gethlog "github.com/ethereum/go-ethereum/log" "github.com/stretchr/testify/require" ) @@ -147,6 +148,9 @@ func (m *mockL2) ForkchoiceUpdate(ctx context.Context, state *eth.ForkchoiceStat } return ð.ForkchoiceUpdatedResult{PayloadStatus: eth.PayloadStatusV1{Status: eth.ExecutionValid}}, nil } +func (m *mockL2) FetchReceipts(ctx context.Context, blockHash common.Hash) (eth.BlockInfo, types.Receipts, error) { + return nil, nil, nil +} func (m *mockL2) Close() { } func (m *mockL2) NewPayload(ctx context.Context, payload *eth.ExecutionPayload, parentBeaconBlockRoot *common.Hash) (*eth.PayloadStatusV1, error) { diff --git a/op-supernode/supernode/chain_container/engine_controller/rewind.go b/op-supernode/supernode/chain_container/engine_controller/rewind.go index 72357509e3e..c8c9005fd15 100644 --- a/op-supernode/supernode/chain_container/engine_controller/rewind.go +++ b/op-supernode/supernode/chain_container/engine_controller/rewind.go @@ -20,6 +20,7 @@ var ( ErrRewindFCURejected = errors.New("forkchoice update rejected by engine") ErrRewindTimestampToBlockConversion = errors.New("failed to convert timestamp to block number") ErrRewindPayloadNotFound = errors.New("failed to get payload for block") + ErrRewindOverFinalizedHead = errors.New("cannot rewind over finalized head") ) // RewindToTimestamp rewinds the L2 execution layer to the block at or before the given timestamp. @@ -101,6 +102,10 @@ func (e *simpleEngineController) computeRewindTargets(ctx context.Context, targe return eth.L2BlockRef{}, eth.L2BlockRef{}, fmt.Errorf("failed to get current finalized block: %w", err) } + if targetBlock.Number < currentFinalized.Number { + return eth.L2BlockRef{}, eth.L2BlockRef{}, ErrRewindOverFinalizedHead + } + return earliest(currentSafe, targetBlock), earliest(currentFinalized, targetBlock), nil } diff --git a/op-supernode/supernode/chain_container/engine_controller/rewind_test.go b/op-supernode/supernode/chain_container/engine_controller/rewind_test.go index dc69b867b06..b9ee2f332eb 100644 --- a/op-supernode/supernode/chain_container/engine_controller/rewind_test.go +++ b/op-supernode/supernode/chain_container/engine_controller/rewind_test.go @@ -40,7 +40,8 @@ func TestEngineController_RewindToTimestamp(t *testing.T) { incorrectUnsafe, incorrectSafe, incorrectFinalized bool - targetBeforeGenesis bool + targetBeforeGenesis bool + targetBeforeFinalized bool } testCases := []testCase{ @@ -107,6 +108,11 @@ func TestEngineController_RewindToTimestamp(t *testing.T) { targetBeforeGenesis: true, expectedError: ErrRewindTimestampToBlockConversion, }, + { + name: "target before finalized", + targetBeforeFinalized: true, + expectedError: ErrRewindOverFinalizedHead, + }, } // Setup: chain is at block 10, we want to rewind to block 5 @@ -137,28 +143,28 @@ func TestEngineController_RewindToTimestamp(t *testing.T) { // Initial state before rewind refsByLabel: map[eth.BlockLabel]eth.L2BlockRef{ eth.Safe: {Number: 10, Hash: common.Hash{0x0a}}, - eth.Finalized: {Number: 8, Hash: common.Hash{0x08}}, + eth.Finalized: {Number: 2, Hash: common.Hash{0x08}}, }, // State after FCU completes - verification reads these values refsByLabelAfterFCU: map[eth.BlockLabel]eth.L2BlockRef{ eth.Unsafe: targetRef, - eth.Safe: targetRef, // clamped to target (min of 10 and 5) - eth.Finalized: targetRef, // clamped to target (min of 8 and 5) + eth.Safe: targetRef, // clamped to target (min of 10 and 5) + eth.Finalized: {Number: 2, Hash: common.Hash{0x08}}, // clamped to finalized head (min of 2 and 5) }, payloadsByNumber: map[uint64]*eth.ExecutionPayloadEnvelope{ targetBlockNum: &payloadEnvelope, }, } } - rollupConfig := rollup.Config{ - Genesis: rollup.Genesis{L2: eth.BlockID{Number: 0}, L2Time: genesisTime}, - BlockTime: 2, - L2ChainID: big.NewInt(420), - } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { // First, get a "good mock" which would pass the test with no error: + rollupConfig := rollup.Config{ + Genesis: rollup.Genesis{L2: eth.BlockID{Number: 0}, L2Time: genesisTime}, + BlockTime: 2, + L2ChainID: big.NewInt(420), + } l2 := createMockL2() // Next, apply the sabotage(s): @@ -189,6 +195,9 @@ func TestEngineController_RewindToTimestamp(t *testing.T) { if tc.targetBeforeGenesis { rollupConfig.Genesis = rollup.Genesis{L2Time: 2000} } + if tc.targetBeforeFinalized { + l2.refsByLabel[eth.Finalized] = eth.L2BlockRef{Number: targetBlockNum + 1, Hash: common.Hash{0xff}} + } // Make a "good" engine controller, using a potentially sabotaged mock L2 ec := &simpleEngineController{l2: &l2, rollup: &rollupConfig, log: testlog.Logger(t, log.LvlDebug)} diff --git a/op-supernode/supernode/chain_container/invalidation.go b/op-supernode/supernode/chain_container/invalidation.go new file mode 100644 index 00000000000..2c8e5ad4fb7 --- /dev/null +++ b/op-supernode/supernode/chain_container/invalidation.go @@ -0,0 +1,214 @@ +package chain_container + +import ( + "context" + "encoding/binary" + "fmt" + "os" + "path/filepath" + "sync" + + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum/go-ethereum/common" + bolt "go.etcd.io/bbolt" +) + +const ( + denyListDBName = "denylist" +) + +// denyListBucketName is the name of the bbolt bucket used to store denied block hashes. +var denyListBucketName = []byte("denied_blocks") + +// DenyList provides persistence for invalid block payload hashes using bbolt. +// Blocks are keyed by block height, with each height potentially having multiple denied hashes. +type DenyList struct { + db *bolt.DB + mu sync.RWMutex +} + +// OpenDenyList opens or creates a DenyList at the given data directory. +func OpenDenyList(dataDir string) (*DenyList, error) { + if err := os.MkdirAll(dataDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create denylist directory %s: %w", dataDir, err) + } + dbPath := filepath.Join(dataDir, denyListDBName+".db") + db, err := bolt.Open(dbPath, 0600, nil) + if err != nil { + return nil, fmt.Errorf("failed to open denylist bbolt at %s: %w", dbPath, err) + } + + // Ensure the bucket exists + err = db.Update(func(tx *bolt.Tx) error { + _, err := tx.CreateBucketIfNotExists(denyListBucketName) + return err + }) + if err != nil { + db.Close() + return nil, fmt.Errorf("failed to create denylist bucket: %w", err) + } + + return &DenyList{db: db}, nil +} + +// heightToKey converts a block height to a big-endian byte key. +// Using big-endian ensures lexicographic ordering matches numeric ordering. +func heightToKey(height uint64) []byte { + key := make([]byte, 8) + binary.BigEndian.PutUint64(key, height) + return key +} + +// Add adds a payload hash to the deny list at the given block height. +// Multiple hashes can be denied at the same height. +func (d *DenyList) Add(height uint64, payloadHash common.Hash) error { + d.mu.Lock() + defer d.mu.Unlock() + + key := heightToKey(height) + + return d.db.Update(func(tx *bolt.Tx) error { + b := tx.Bucket(denyListBucketName) + + // Get existing hashes at this height + existing := b.Get(key) + var hashes []byte + if existing != nil { + // Check if hash already exists + for i := 0; i+common.HashLength <= len(existing); i += common.HashLength { + if common.BytesToHash(existing[i:i+common.HashLength]) == payloadHash { + // Already denied + return nil + } + } + hashes = make([]byte, len(existing), len(existing)+common.HashLength) + copy(hashes, existing) + } + + // Append the new hash + hashes = append(hashes, payloadHash.Bytes()...) + return b.Put(key, hashes) + }) +} + +// Contains checks if a payload hash is denied at the given block height. +func (d *DenyList) Contains(height uint64, payloadHash common.Hash) (bool, error) { + d.mu.RLock() + defer d.mu.RUnlock() + + key := heightToKey(height) + var found bool + + err := d.db.View(func(tx *bolt.Tx) error { + b := tx.Bucket(denyListBucketName) + existing := b.Get(key) + if existing == nil { + return nil + } + + // Search for the hash in the list + for i := 0; i+common.HashLength <= len(existing); i += common.HashLength { + if common.BytesToHash(existing[i:i+common.HashLength]) == payloadHash { + found = true + return nil + } + } + return nil + }) + + return found, err +} + +// GetDeniedHashes returns all denied payload hashes at the given block height. +func (d *DenyList) GetDeniedHashes(height uint64) ([]common.Hash, error) { + d.mu.RLock() + defer d.mu.RUnlock() + + key := heightToKey(height) + var hashes []common.Hash + + err := d.db.View(func(tx *bolt.Tx) error { + b := tx.Bucket(denyListBucketName) + existing := b.Get(key) + if existing == nil { + return nil + } + + for i := 0; i+common.HashLength <= len(existing); i += common.HashLength { + hashes = append(hashes, common.BytesToHash(existing[i:i+common.HashLength])) + } + return nil + }) + + return hashes, err +} + +// Close closes the database. +func (d *DenyList) Close() error { + return d.db.Close() +} + +// InvalidateBlock adds a block to the deny list and triggers a rewind if the chain +// currently uses that block at the specified height. +// Returns true if a rewind was triggered, false otherwise. +// Note: Genesis block (height=0) cannot be invalidated as there is no prior block to rewind to. +func (c *simpleChainContainer) InvalidateBlock(ctx context.Context, height uint64, payloadHash common.Hash) (bool, error) { + if c.denyList == nil { + return false, fmt.Errorf("deny list not initialized") + } + + // Cannot invalidate genesis block - there is no prior block to rewind to + if height == 0 { + return false, fmt.Errorf("cannot invalidate genesis block (height=0)") + } + + // Add to deny list first + if err := c.denyList.Add(height, payloadHash); err != nil { + return false, fmt.Errorf("failed to add block to deny list: %w", err) + } + + c.log.Info("added block to deny list", + "height", height, + "payloadHash", payloadHash, + ) + + // Check if the current chain uses this block at this height + if c.engine == nil { + c.log.Warn("engine not initialized, cannot check current block") + return false, nil + } + + currentBlock, err := c.engine.BlockAtTimestamp(ctx, c.blockNumberToTimestamp(height), eth.Unsafe) + if err != nil { + c.log.Warn("failed to get current block at height", "height", height, "err", err) + return false, nil + } + + // Compare the current block hash with the invalidated hash + if currentBlock.Hash != payloadHash { + c.log.Info("current block differs from invalidated block, no rewind needed", + "height", height, + "currentHash", currentBlock.Hash, + "invalidatedHash", payloadHash, + ) + return false, nil + } + + c.log.Warn("current block matches invalidated block, initiating rewind", + "height", height, + "hash", payloadHash, + ) + + // Rewind to the prior block's timestamp + priorTimestamp := c.blockNumberToTimestamp(height - 1) + if err := c.RewindEngine(ctx, priorTimestamp); err != nil { + return false, fmt.Errorf("failed to rewind engine: %w", err) + } + + c.log.Info("rewind completed after block invalidation", + "invalidatedHeight", height, + "rewindToTimestamp", priorTimestamp, + ) + + return true, nil +} diff --git a/op-supernode/supernode/chain_container/invalidation_test.go b/op-supernode/supernode/chain_container/invalidation_test.go new file mode 100644 index 00000000000..2006d7e147f --- /dev/null +++ b/op-supernode/supernode/chain_container/invalidation_test.go @@ -0,0 +1,612 @@ +package chain_container + +import ( + "context" + "path/filepath" + "sync" + "testing" + + opnodecfg "github.com/ethereum-optimism/optimism/op-node/config" + "github.com/ethereum-optimism/optimism/op-service/eth" + "github.com/ethereum-optimism/optimism/op-supernode/supernode/chain_container/virtual_node" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/core/types" + gethlog "github.com/ethereum/go-ethereum/log" + "github.com/stretchr/testify/require" +) + +func TestDenyList_AddAndContains(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setup func(t *testing.T, dl *DenyList) + check func(t *testing.T, dl *DenyList) + }{ + { + name: "single hash at height", + setup: func(t *testing.T, dl *DenyList) { + hash := common.HexToHash("0x1111111111111111111111111111111111111111111111111111111111111111") + require.NoError(t, dl.Add(100, hash)) + }, + check: func(t *testing.T, dl *DenyList) { + hash := common.HexToHash("0x1111111111111111111111111111111111111111111111111111111111111111") + found, err := dl.Contains(100, hash) + require.NoError(t, err) + require.True(t, found, "hash should be found at height 100") + }, + }, + { + name: "multiple hashes same height", + setup: func(t *testing.T, dl *DenyList) { + hashes := []common.Hash{ + common.HexToHash("0xaaaa"), + common.HexToHash("0xbbbb"), + common.HexToHash("0xcccc"), + } + for _, h := range hashes { + require.NoError(t, dl.Add(50, h)) + } + }, + check: func(t *testing.T, dl *DenyList) { + hashes := []common.Hash{ + common.HexToHash("0xaaaa"), + common.HexToHash("0xbbbb"), + common.HexToHash("0xcccc"), + } + for _, h := range hashes { + found, err := dl.Contains(50, h) + require.NoError(t, err) + require.True(t, found, "hash %s should be found at height 50", h) + } + }, + }, + { + name: "hash at wrong height returns false", + setup: func(t *testing.T, dl *DenyList) { + hash := common.HexToHash("0xdddd") + require.NoError(t, dl.Add(10, hash)) + }, + check: func(t *testing.T, dl *DenyList) { + hash := common.HexToHash("0xdddd") + // Check at different height + found, err := dl.Contains(11, hash) + require.NoError(t, err) + require.False(t, found, "hash should NOT be found at height 11") + + // Verify it IS at height 10 + found, err = dl.Contains(10, hash) + require.NoError(t, err) + require.True(t, found, "hash should be found at height 10") + }, + }, + { + name: "duplicate add is idempotent", + setup: func(t *testing.T, dl *DenyList) { + hash := common.HexToHash("0xeeee") + require.NoError(t, dl.Add(200, hash)) + require.NoError(t, dl.Add(200, hash)) // Add again + require.NoError(t, dl.Add(200, hash)) // And again + }, + check: func(t *testing.T, dl *DenyList) { + hash := common.HexToHash("0xeeee") + hashes, err := dl.GetDeniedHashes(200) + require.NoError(t, err) + require.Len(t, hashes, 1, "should only have one entry despite multiple adds") + require.Equal(t, hash, hashes[0]) + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + dir := t.TempDir() + dl, err := OpenDenyList(dir) + require.NoError(t, err) + defer dl.Close() + + tt.setup(t, dl) + tt.check(t, dl) + }) + } +} + +func TestDenyList_Persistence(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setup func(t *testing.T, dir string) + check func(t *testing.T, dir string) + }{ + { + name: "survives close and reopen", + setup: func(t *testing.T, dir string) { + dl, err := OpenDenyList(dir) + require.NoError(t, err) + + hashes := []struct { + height uint64 + hash common.Hash + }{ + {100, common.HexToHash("0x1111")}, + {100, common.HexToHash("0x2222")}, + {200, common.HexToHash("0x3333")}, + {300, common.HexToHash("0x4444")}, + } + for _, h := range hashes { + require.NoError(t, dl.Add(h.height, h.hash)) + } + + require.NoError(t, dl.Close()) + }, + check: func(t *testing.T, dir string) { + dl, err := OpenDenyList(dir) + require.NoError(t, err) + defer dl.Close() + + // Verify all hashes are still present + found, err := dl.Contains(100, common.HexToHash("0x1111")) + require.NoError(t, err) + require.True(t, found) + + found, err = dl.Contains(100, common.HexToHash("0x2222")) + require.NoError(t, err) + require.True(t, found) + + found, err = dl.Contains(200, common.HexToHash("0x3333")) + require.NoError(t, err) + require.True(t, found) + + found, err = dl.Contains(300, common.HexToHash("0x4444")) + require.NoError(t, err) + require.True(t, found) + + // Verify counts + hashes100, err := dl.GetDeniedHashes(100) + require.NoError(t, err) + require.Len(t, hashes100, 2) + + hashes200, err := dl.GetDeniedHashes(200) + require.NoError(t, err) + require.Len(t, hashes200, 1) + }, + }, + { + name: "empty DB on fresh open", + setup: func(t *testing.T, dir string) { + // No setup - fresh directory + }, + check: func(t *testing.T, dir string) { + dl, err := OpenDenyList(dir) + require.NoError(t, err) + defer dl.Close() + + found, err := dl.Contains(100, common.HexToHash("0xabcd")) + require.NoError(t, err) + require.False(t, found, "fresh DB should not contain any hashes") + + hashes, err := dl.GetDeniedHashes(100) + require.NoError(t, err) + require.Empty(t, hashes, "fresh DB should return empty slice") + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + dir := filepath.Join(t.TempDir(), "denylist") + + tt.setup(t, dir) + tt.check(t, dir) + }) + } +} + +func TestDenyList_GetDeniedHashes(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setup func(t *testing.T, dl *DenyList) + check func(t *testing.T, dl *DenyList) + }{ + { + name: "returns all hashes at height", + setup: func(t *testing.T, dl *DenyList) { + for i := 0; i < 5; i++ { + hash := common.BigToHash(common.Big1.Add(common.Big1, common.Big0.SetInt64(int64(i)))) + require.NoError(t, dl.Add(100, hash)) + } + }, + check: func(t *testing.T, dl *DenyList) { + hashes, err := dl.GetDeniedHashes(100) + require.NoError(t, err) + require.Len(t, hashes, 5, "should return all 5 hashes") + }, + }, + { + name: "empty for clean height", + setup: func(t *testing.T, dl *DenyList) { + // Add hashes at other heights + require.NoError(t, dl.Add(10, common.HexToHash("0xaaaa"))) + require.NoError(t, dl.Add(30, common.HexToHash("0xbbbb"))) + }, + check: func(t *testing.T, dl *DenyList) { + hashes, err := dl.GetDeniedHashes(20) + require.NoError(t, err) + require.Empty(t, hashes, "height 20 should have no entries") + }, + }, + { + name: "isolated by height", + setup: func(t *testing.T, dl *DenyList) { + // Add different hashes at different heights + require.NoError(t, dl.Add(10, common.HexToHash("0x1010"))) + require.NoError(t, dl.Add(10, common.HexToHash("0x1011"))) + require.NoError(t, dl.Add(20, common.HexToHash("0x2020"))) + require.NoError(t, dl.Add(20, common.HexToHash("0x2021"))) + require.NoError(t, dl.Add(20, common.HexToHash("0x2022"))) + require.NoError(t, dl.Add(30, common.HexToHash("0x3030"))) + }, + check: func(t *testing.T, dl *DenyList) { + hashes10, err := dl.GetDeniedHashes(10) + require.NoError(t, err) + require.Len(t, hashes10, 2, "height 10 should have 2 hashes") + + hashes20, err := dl.GetDeniedHashes(20) + require.NoError(t, err) + require.Len(t, hashes20, 3, "height 20 should have 3 hashes") + + hashes30, err := dl.GetDeniedHashes(30) + require.NoError(t, err) + require.Len(t, hashes30, 1, "height 30 should have 1 hash") + + // Verify specific hashes at height 20 + expected := map[common.Hash]bool{ + common.HexToHash("0x2020"): true, + common.HexToHash("0x2021"): true, + common.HexToHash("0x2022"): true, + } + for _, h := range hashes20 { + require.True(t, expected[h], "unexpected hash at height 20: %s", h) + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + dir := t.TempDir() + dl, err := OpenDenyList(dir) + require.NoError(t, err) + defer dl.Close() + + tt.setup(t, dl) + tt.check(t, dl) + }) + } +} + +// mockEngineForInvalidation implements engine_controller.EngineController for invalidation tests +type mockEngineForInvalidation struct { + blockAtTimestampFn func(ctx context.Context, ts uint64, label eth.BlockLabel) (eth.L2BlockRef, error) + rewindCalled bool + rewindTimestamp uint64 +} + +func (m *mockEngineForInvalidation) BlockAtTimestamp(ctx context.Context, ts uint64, label eth.BlockLabel) (eth.L2BlockRef, error) { + if m.blockAtTimestampFn != nil { + return m.blockAtTimestampFn(ctx, ts, label) + } + return eth.L2BlockRef{}, nil +} + +func (m *mockEngineForInvalidation) OutputV0AtBlockNumber(ctx context.Context, num uint64) (*eth.OutputV0, error) { + return nil, nil +} + +func (m *mockEngineForInvalidation) RewindToTimestamp(ctx context.Context, timestamp uint64) error { + m.rewindCalled = true + m.rewindTimestamp = timestamp + return nil +} + +func (m *mockEngineForInvalidation) FetchReceipts(ctx context.Context, blockHash common.Hash) (eth.BlockInfo, types.Receipts, error) { + return nil, nil, nil +} + +func (m *mockEngineForInvalidation) Close() error { + return nil +} + +// mockVNForInvalidation implements virtual_node.VirtualNode for invalidation tests +type mockVNForInvalidation struct { + stopErr error +} + +func (m *mockVNForInvalidation) Start(ctx context.Context) error { return nil } +func (m *mockVNForInvalidation) Stop(ctx context.Context) error { return m.stopErr } +func (m *mockVNForInvalidation) LatestSafe(ctx context.Context) (eth.BlockID, error) { + return eth.BlockID{}, nil +} +func (m *mockVNForInvalidation) SafeHeadAtL1(ctx context.Context, l1BlockNum uint64) (eth.BlockID, eth.BlockID, error) { + return eth.BlockID{}, eth.BlockID{}, nil +} +func (m *mockVNForInvalidation) L1AtSafeHead(ctx context.Context, target eth.BlockID) (eth.BlockID, error) { + return eth.BlockID{}, nil +} +func (m *mockVNForInvalidation) SyncStatus(ctx context.Context) (*eth.SyncStatus, error) { + return ð.SyncStatus{}, nil +} + +var _ virtual_node.VirtualNode = (*mockVNForInvalidation)(nil) + +func TestInvalidateBlock(t *testing.T) { + t.Parallel() + + genesisTime := uint64(1000) + blockTime := uint64(2) + + tests := []struct { + name string + height uint64 + payloadHash common.Hash + currentBlockHash common.Hash + engineAvailable bool + expectRewind bool + expectRewindTs uint64 + }{ + { + name: "current block matches triggers rewind", + height: 5, + payloadHash: common.HexToHash("0xdead"), + currentBlockHash: common.HexToHash("0xdead"), // Same hash + engineAvailable: true, + expectRewind: true, + expectRewindTs: genesisTime + (4 * blockTime), // height-1 timestamp + }, + { + name: "current block differs no rewind", + height: 5, + payloadHash: common.HexToHash("0xdead"), + currentBlockHash: common.HexToHash("0xbeef"), // Different hash + engineAvailable: true, + expectRewind: false, + }, + { + name: "engine unavailable adds to denylist only", + height: 5, + payloadHash: common.HexToHash("0xdead"), + engineAvailable: false, + expectRewind: false, + }, + { + name: "rewind to height-1 timestamp calculated correctly", + height: 10, + payloadHash: common.HexToHash("0xabcd"), + currentBlockHash: common.HexToHash("0xabcd"), + engineAvailable: true, + expectRewind: true, + expectRewindTs: genesisTime + (9 * blockTime), // height 9 + }, + } + + // Separate test for genesis block (height=0) which should error + t.Run("genesis block invalidation returns error", func(t *testing.T) { + t.Parallel() + dir := t.TempDir() + + dl, err := OpenDenyList(filepath.Join(dir, "denylist")) + require.NoError(t, err) + defer dl.Close() + + c := &simpleChainContainer{ + denyList: dl, + log: testLogger(), + } + + ctx := context.Background() + rewound, err := c.InvalidateBlock(ctx, 0, common.HexToHash("0xgenesis")) + + require.Error(t, err) + require.Contains(t, err.Error(), "cannot invalidate genesis block") + require.False(t, rewound) + + // Genesis hash should NOT be added to denylist + found, err := dl.Contains(0, common.HexToHash("0xgenesis")) + require.NoError(t, err) + require.False(t, found, "genesis block should not be added to denylist") + }) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + dir := t.TempDir() + + // Create deny list + dl, err := OpenDenyList(filepath.Join(dir, "denylist")) + require.NoError(t, err) + defer dl.Close() + + // Create mock engine + mockEng := &mockEngineForInvalidation{ + blockAtTimestampFn: func(ctx context.Context, ts uint64, label eth.BlockLabel) (eth.L2BlockRef, error) { + return eth.L2BlockRef{Hash: tt.currentBlockHash}, nil + }, + } + + // Create container with minimal config + c := &simpleChainContainer{ + denyList: dl, + log: testLogger(), + vncfg: &opnodecfg.Config{}, + vn: &mockVNForInvalidation{}, + } + c.vncfg.Rollup.Genesis.L2Time = genesisTime + c.vncfg.Rollup.BlockTime = blockTime + + if tt.engineAvailable { + c.engine = mockEng + } + + // Call InvalidateBlock + ctx := context.Background() + rewound, err := c.InvalidateBlock(ctx, tt.height, tt.payloadHash) + require.NoError(t, err) + + // Verify rewind behavior + require.Equal(t, tt.expectRewind, rewound, "rewind triggered mismatch") + + if tt.expectRewind && tt.engineAvailable { + require.True(t, mockEng.rewindCalled, "RewindToTimestamp should have been called") + require.Equal(t, tt.expectRewindTs, mockEng.rewindTimestamp, "rewind timestamp mismatch") + } + + // Verify hash was added to denylist regardless + found, err := dl.Contains(tt.height, tt.payloadHash) + require.NoError(t, err) + require.True(t, found, "hash should be in denylist after InvalidateBlock") + }) + } +} + +func TestIsDenied(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + setupHash common.Hash + setupHeight uint64 + checkHash common.Hash + checkHeight uint64 + expectFound bool + }{ + { + name: "denied block returns true", + setupHash: common.HexToHash("0x1234"), + setupHeight: 100, + checkHash: common.HexToHash("0x1234"), + checkHeight: 100, + expectFound: true, + }, + { + name: "non-denied returns false", + setupHash: common.HexToHash("0x1234"), + setupHeight: 100, + checkHash: common.HexToHash("0x5678"), // Different hash + checkHeight: 100, + expectFound: false, + }, + { + name: "wrong height returns false", + setupHash: common.HexToHash("0xabcd"), + setupHeight: 10, + checkHash: common.HexToHash("0xabcd"), // Same hash + checkHeight: 11, // Different height + expectFound: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + dir := t.TempDir() + + dl, err := OpenDenyList(filepath.Join(dir, "denylist")) + require.NoError(t, err) + defer dl.Close() + + // Setup + require.NoError(t, dl.Add(tt.setupHeight, tt.setupHash)) + + // Create container + c := &simpleChainContainer{ + denyList: dl, + log: testLogger(), + } + + // Check + found, err := c.IsDenied(tt.checkHeight, tt.checkHash) + require.NoError(t, err) + require.Equal(t, tt.expectFound, found) + }) + } +} + +func testLogger() gethlog.Logger { + return gethlog.New() +} + +// TestDenyList_ConcurrentAccess verifies the DenyList is safe for concurrent use. +// 10 goroutines each perform 100 Add and Contains operations simultaneously. +func TestDenyList_ConcurrentAccess(t *testing.T) { + t.Parallel() + + dir := t.TempDir() + dl, err := OpenDenyList(dir) + require.NoError(t, err) + defer dl.Close() + + const numAccessors = 10 + const opsPerAccessor = 100 + + // Helper to generate deterministic hash from accessor and op index + makeHash := func(accessorID, opIdx int) common.Hash { + var h common.Hash + h[0] = byte(accessorID) + h[1] = byte(opIdx) + h[2] = byte(opIdx >> 8) + return h + } + + // Each accessor writes to its own height range and reads from all ranges + var wg sync.WaitGroup + wg.Add(numAccessors) + + for i := 0; i < numAccessors; i++ { + go func(accessorID int) { + defer wg.Done() + + baseHeight := uint64(accessorID * opsPerAccessor) + + for j := 0; j < opsPerAccessor; j++ { + height := baseHeight + uint64(j) + hash := makeHash(accessorID, j) + + // Write + err := dl.Add(height, hash) + require.NoError(t, err) + + // Read own write + found, err := dl.Contains(height, hash) + require.NoError(t, err) + require.True(t, found, "accessor %d should find its own hash at height %d", accessorID, height) + + // Read from another accessor's range (may or may not exist yet) + otherAccessor := (accessorID + 1) % numAccessors + otherHeight := uint64(otherAccessor*opsPerAccessor) + uint64(j/2) + _, err = dl.Contains(otherHeight, common.Hash{}) + require.NoError(t, err) // Should not error even if not found + } + }(i) + } + + wg.Wait() + + // Verify final state: each accessor should have written opsPerAccessor hashes + for i := 0; i < numAccessors; i++ { + baseHeight := uint64(i * opsPerAccessor) + for j := 0; j < opsPerAccessor; j++ { + height := baseHeight + uint64(j) + hash := makeHash(i, j) + + found, err := dl.Contains(height, hash) + require.NoError(t, err) + require.True(t, found, "hash from accessor %d at height %d should exist after concurrent access", i, height) + } + } +} diff --git a/op-supernode/supernode/chain_container/virtual_node/virtual_node.go b/op-supernode/supernode/chain_container/virtual_node/virtual_node.go index 576d0efe9bc..80d57e7be45 100644 --- a/op-supernode/supernode/chain_container/virtual_node/virtual_node.go +++ b/op-supernode/supernode/chain_container/virtual_node/virtual_node.go @@ -5,6 +5,7 @@ import ( "errors" "math" "sync" + "time" opnodecfg "github.com/ethereum-optimism/optimism/op-node/config" opmetrics "github.com/ethereum-optimism/optimism/op-node/metrics" @@ -148,7 +149,8 @@ func (v *simpleVirtualNode) Start(ctx context.Context) error { // Stop the inner node if it's still running if v.inner != nil { - stopCtx := context.Background() + stopCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) + defer cancel() if err := v.inner.Stop(stopCtx); err != nil { v.log.Error("error stopping inner node", "err", err) } @@ -226,8 +228,12 @@ func (v *simpleVirtualNode) L1AtSafeHead(ctx context.Context, target eth.BlockID } // Special case: genesis L2 block is trivially safe at genesis L1 + // Note: We use L1 block 0 (not cfg.Genesis.L1) because contracts may have been deployed + // earlier than cfg.Genesis.L1, allowing dispute games with L1 heads prior to cfg.Genesis.L1 if target == v.cfg.Rollup.Genesis.L2 { - return v.cfg.Rollup.Genesis.L1, nil + // Return L1 block 0 (L1 genesis) + l1Genesis := eth.BlockID{Number: 0} // Hash not necessary + return l1Genesis, nil } // Get the latest entry to start the walkback diff --git a/op-supernode/supernode/chain_container/virtual_node/virtual_node_test.go b/op-supernode/supernode/chain_container/virtual_node/virtual_node_test.go index 810dc016b23..b4c1a9f0822 100644 --- a/op-supernode/supernode/chain_container/virtual_node/virtual_node_test.go +++ b/op-supernode/supernode/chain_container/virtual_node/virtual_node_test.go @@ -474,7 +474,7 @@ func TestVirtualNode_L1AtSafeHead(t *testing.T) { // Query for genesis L2 block result, err := vn.L1AtSafeHead(context.Background(), genesisL2) require.NoError(t, err) - require.Equal(t, genesisL1, result) + require.Equal(t, eth.BlockID{}, result) // Genesis L2 target returns genesis L1 directly, but without the hash }) t.Run("genesis L2 number with different hash is not treated as genesis", func(t *testing.T) { diff --git a/op-supernode/supernode/supernode.go b/op-supernode/supernode/supernode.go index 91df00ca376..3048c1a207b 100644 --- a/op-supernode/supernode/supernode.go +++ b/op-supernode/supernode/supernode.go @@ -81,7 +81,8 @@ func New(ctx context.Context, log gethlog.Logger, version string, requestStop co log.Error("missing virtual node config for chain", "chain", id) continue } - s.chains[chainID] = cc.NewChainContainer(chainID, vnCfgs[chainID], log, *cfg, initOverrides, nil, s.rpcRouter.SetHandler, s.metricsFanIn.SetMetricsRegistry) + container := cc.NewChainContainer(chainID, vnCfgs[chainID], log, *cfg, initOverrides, nil, s.rpcRouter.SetHandler, s.metricsFanIn.SetMetricsRegistry) + s.chains[chainID] = container } // Initialize fixed activities @@ -100,6 +101,12 @@ func New(ctx context.Context, log gethlog.Logger, version string, requestStop co } } + // Set up reset callbacks on all chain containers + // When a chain resets, notify all activities + for _, chain := range s.chains { + chain.SetResetCallback(s.onChainReset) + } + // Set up http server addr := net.JoinHostPort(cfg.RPCConfig.ListenAddr, strconv.Itoa(cfg.RPCConfig.ListenPort)) s.httpServer = httputil.NewHTTPServer(addr, s.rpcRouter) @@ -221,6 +228,43 @@ func (s *Supernode) Stop(ctx context.Context) error { return nil } +// onChainReset is called when a chain container resets to a given timestamp. +// It notifies all activities about the reset so they can clean up cached state. +func (s *Supernode) onChainReset(chainID eth.ChainID, timestamp uint64) { + s.log.Info("chain reset detected, notifying activities", + "chainID", chainID, + "timestamp", timestamp, + ) + for _, a := range s.activities { + a.Reset(chainID, timestamp) + } +} + +// PauseInteropActivity pauses the interop activity at the given timestamp. +// When the interop activity attempts to process this timestamp, it returns early. +// This function is for integration test control only. +func (s *Supernode) PauseInteropActivity(ts uint64) { + for _, a := range s.activities { + if ia, ok := a.(*interop.Interop); ok { + ia.PauseAt(ts) + return + } + } + s.log.Warn("PauseInterop called but no interop activity found") +} + +// ResumeInteropActivity clears any pause on the interop activity, allowing normal processing. +// This function is for integration test control only. +func (s *Supernode) ResumeInteropActivity() { + for _, a := range s.activities { + if ia, ok := a.(*interop.Interop); ok { + ia.Resume() + return + } + } + s.log.Warn("ResumeInterop called but no interop activity found") +} + func (s *Supernode) Stopped() bool { return s.stopped } // RPCAddr returns the bound RPC address (host:port) if the server is listening. diff --git a/op-supernode/supernode/supernode_activities_test.go b/op-supernode/supernode/supernode_activities_test.go index 0af07a547ff..552b44afbc4 100644 --- a/op-supernode/supernode/supernode_activities_test.go +++ b/op-supernode/supernode/supernode_activities_test.go @@ -9,6 +9,7 @@ import ( "testing" "time" + "github.com/ethereum-optimism/optimism/op-service/eth" rpc "github.com/ethereum-optimism/optimism/op-service/rpc" "github.com/ethereum-optimism/optimism/op-supernode/supernode/activity" gethlog "github.com/ethereum/go-ethereum/log" @@ -26,7 +27,8 @@ func (m *mockRunnable) Start(ctx context.Context) error { <-ctx.Done() return ctx.Err() } -func (m *mockRunnable) Stop(ctx context.Context) error { m.stopped++; return nil } +func (m *mockRunnable) Stop(ctx context.Context) error { m.stopped++; return nil } +func (m *mockRunnable) Reset(chainID eth.ChainID, timestamp uint64) {} // ensure it satisfies both Activity and RunnableActivity var _ activity.Activity = (*mockRunnable)(nil) @@ -35,6 +37,8 @@ var _ activity.RunnableActivity = (*mockRunnable)(nil) // plain marker-only activity type plainActivity struct{} +func (p *plainActivity) Reset(chainID eth.ChainID, timestamp uint64) {} + var _ activity.Activity = (*plainActivity)(nil) // Start is implemented, but no Stop, so this is not runnable @@ -47,8 +51,9 @@ func (s *rpcSvc) Echo(_ context.Context) (string, error) { return "ok", nil } type rpcAct struct{} -func (a *rpcAct) RPCNamespace() string { return "act" } -func (a *rpcAct) RPCService() interface{} { return &rpcSvc{} } +func (a *rpcAct) RPCNamespace() string { return "act" } +func (a *rpcAct) RPCService() interface{} { return &rpcSvc{} } +func (a *rpcAct) Reset(chainID eth.ChainID, timestamp uint64) {} var _ activity.Activity = (*rpcAct)(nil) var _ activity.RPCActivity = (*rpcAct)(nil) diff --git a/ops/docker/op-stack-go/Dockerfile b/ops/docker/op-stack-go/Dockerfile index 23578ed54e7..45e9cb74df4 100644 --- a/ops/docker/op-stack-go/Dockerfile +++ b/ops/docker/op-stack-go/Dockerfile @@ -200,7 +200,7 @@ RUN --mount=type=cache,target=/go/pkg/mod --mount=type=cache,target=/root/.cache GOOS=$TARGETOS GOARCH=$TARGETARCH GITCOMMIT=$GIT_COMMIT GITDATE=$GIT_DATE VERSION="$OP_INTEROP_MON_VERSION" \ op-interop-mon/op-interop-mon -# The Rust version must match kona/rust-toolchain.toml. We don't use "latest" to ensure reproducibility +# The Rust version must match rust/rust-toolchain.toml. We don't use "latest" to ensure reproducibility FROM --platform=$BUILDPLATFORM rust:1.88 AS kona-host-builder ARG TARGETARCH # Install build dependencies and cross-compilation toolchains @@ -212,10 +212,11 @@ RUN apt-get update && apt-get install -y --no-install-recommends \ gcc-aarch64-linux-gnu \ gcc-x86-64-linux-gnu \ && rm -rf /var/lib/apt/lists/* -COPY ./kona /kona -WORKDIR /kona +# Copy the entire rust workspace since kona depends on sibling crates (op-alloy, alloy-op-evm, etc.) +COPY ./rust /rust +WORKDIR /rust # Map TARGETARCH to Rust target triple, install it, and build -RUN --mount=type=cache,target=/usr/local/cargo/registry --mount=type=cache,target=/kona/target \ +RUN --mount=type=cache,target=/usr/local/cargo/registry --mount=type=cache,target=/rust/target \ case "$TARGETARCH" in \ amd64) RUST_TARGET=x86_64-unknown-linux-gnu ;; \ arm64) RUST_TARGET=aarch64-unknown-linux-gnu ;; \ @@ -223,7 +224,7 @@ RUN --mount=type=cache,target=/usr/local/cargo/registry --mount=type=cache,targe esac && \ rustup target add "$RUST_TARGET" && \ RUSTFLAGS="-C target-cpu=generic" cargo build --bin kona-host --profile release-perf --target "$RUST_TARGET" && \ - cp "/kona/target/$RUST_TARGET/release-perf/kona-host" /kona-host + cp "/rust/target/$RUST_TARGET/release-perf/kona-host" /kona-host FROM $TARGET_BASE_IMAGE AS cannon-target COPY --from=cannon-builder /app/cannon/bin/cannon /usr/local/bin/ diff --git a/ops/docker/op-stack-go/Dockerfile.dockerignore b/ops/docker/op-stack-go/Dockerfile.dockerignore index 76b40037374..6067e181296 100644 --- a/ops/docker/op-stack-go/Dockerfile.dockerignore +++ b/ops/docker/op-stack-go/Dockerfile.dockerignore @@ -27,10 +27,10 @@ !/op-alt-da !/op-faucet !/op-interop-mon -!/kona -# Exclude large kona build artifacts -kona/target -kona/.git +!/rust +# Exclude large rust build artifacts +rust/target +rust/.git !/go.mod !/go.sum !/justfiles @@ -42,5 +42,5 @@ kona/.git **/testdata **/tests -# Re-include kona/bin which is excluded by **/bin above -!/kona/bin +# Re-include rust/bin which is excluded by **/bin above +!/rust/**/bin diff --git a/ops/scripts/compute-git-versions.sh b/ops/scripts/compute-git-versions.sh index 0d5da620490..e8e53e32191 100755 --- a/ops/scripts/compute-git-versions.sh +++ b/ops/scripts/compute-git-versions.sh @@ -30,6 +30,10 @@ IMAGES=( "cannon" "op-dripper" "op-interop-mon" + "op-reth" + "kona-node" + "kona-host" + "kona-client" ) echo "Checking git tags pointing at $GIT_COMMIT:" >&2 diff --git a/packages/contracts-bedrock/interfaces/L2/IConditionalDeployer.sol b/packages/contracts-bedrock/interfaces/L2/IConditionalDeployer.sol new file mode 100644 index 00000000000..9f4afa50576 --- /dev/null +++ b/packages/contracts-bedrock/interfaces/L2/IConditionalDeployer.sol @@ -0,0 +1,31 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +import { ISemver } from "interfaces/universal/ISemver.sol"; + +/// @title IConditionalDeployer +/// @notice Interface for the ConditionalDeployer contract. +interface IConditionalDeployer is ISemver { + /// @notice Emitted when an implementation is deployed. + /// @param implementation The address of the deployed implementation. + /// @param salt The salt used for deployment. + event ImplementationDeployed(address indexed implementation, bytes32 salt); + + /// @notice Emitted when deployment is skipped because implementation already exists. + /// @param implementation The address of the existing implementation. + event ImplementationExists(address indexed implementation); + + /// @notice Error thrown when deployment fails. + /// @param data The data returned from the deployment call. + error ConditionalDeployer_DeploymentFailed(bytes data); + + /// @notice Deploys an implementation using CREATE2 if it doesn't already exist. + /// @param _salt The salt to use for CREATE2 deployment. + /// @param _code The initialization code for the contract. + /// @return implementation_ The address of the deployed or existing implementation. + function deploy(bytes32 _salt, bytes memory _code) external returns (address implementation_); + + /// @notice Address of the Arachnid's DeterministicDeploymentProxy. + /// @return deterministicDeploymentProxy_ The address of the Arachnid's DeterministicDeploymentProxy. + function deterministicDeploymentProxy() external pure returns (address deterministicDeploymentProxy_); +} diff --git a/packages/contracts-bedrock/interfaces/L2/IL2ContractsManager.sol b/packages/contracts-bedrock/interfaces/L2/IL2ContractsManager.sol index 0f0212d399c..ee3ecb61381 100644 --- a/packages/contracts-bedrock/interfaces/L2/IL2ContractsManager.sol +++ b/packages/contracts-bedrock/interfaces/L2/IL2ContractsManager.sol @@ -10,6 +10,16 @@ import { L2ContractsManagerTypes } from "src/libraries/L2ContractsManagerTypes.s /// @title IL2ContractsManager /// @notice Interface for the L2ContractsManager contract. interface IL2ContractsManager is ISemver { + /// @notice Thrown when the upgrade function is called outside of a DELEGATECALL context. + error L2ContractsManager_OnlyDelegatecall(); + + /// @notice Thrown when a user attempts to downgrade a contract. + /// @param _target The address of the contract that was attempted to be downgraded. + error L2ContractsManager_DowngradeNotAllowed(address _target); + + /// @notice Error thrown when a semver string has less than 3 parts. + error SemverComp_InvalidSemverParts(); + /// @notice Executes the upgrade for all predeploys. /// @dev This function MUST be called via DELEGATECALL from the L2ProxyAdmin. function upgrade() external; diff --git a/packages/contracts-bedrock/interfaces/L2/IL2ProxyAdmin.sol b/packages/contracts-bedrock/interfaces/L2/IL2ProxyAdmin.sol new file mode 100644 index 00000000000..8f5f3f0dd98 --- /dev/null +++ b/packages/contracts-bedrock/interfaces/L2/IL2ProxyAdmin.sol @@ -0,0 +1,24 @@ +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.0; + +// Interfaces +import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { ISemver } from "interfaces/universal/ISemver.sol"; + +/// @title IL2ProxyAdmin +interface IL2ProxyAdmin is IProxyAdmin, ISemver { + /// @notice Emitted when the predeploys are upgraded. + /// @param l2ContractsManager Address of the L2ContractsManager contract. + event PredeploysUpgraded(address indexed l2ContractsManager); + + /// @notice Thrown when the caller is not the depositor account. + error L2ProxyAdmin__Unauthorized(); + + /// @notice Thrown when the upgrade fails. + error L2ProxyAdmin__UpgradeFailed(bytes data); + + function __constructor__(address _owner) external; + /// @notice Upgrades the predeploys via delegatecall to the L2ContractsManager contract. + /// @param _l2ContractsManager Address of the L2ContractsManager contract. + function upgradePredeploys(address _l2ContractsManager) external; +} diff --git a/packages/contracts-bedrock/interfaces/safe/ISaferSafes.sol b/packages/contracts-bedrock/interfaces/safe/ISaferSafes.sol index 27044452cf3..77c3c9e1b57 100644 --- a/packages/contracts-bedrock/interfaces/safe/ISaferSafes.sol +++ b/packages/contracts-bedrock/interfaces/safe/ISaferSafes.sol @@ -6,6 +6,8 @@ import { ILivenessModule2 } from "interfaces/safe/ILivenessModule2.sol"; import { ISemver } from "interfaces/universal/ISemver.sol"; interface ISaferSafes is ISemver { + function __constructor__() external; + event CancellationThresholdUpdated(ISafe indexed safe, uint256 oldThreshold, uint256 newThreshold); event ChallengeCancelled(address indexed safe); event ChallengeStarted(address indexed safe, uint256 challengeStartTime); diff --git a/packages/contracts-bedrock/scripts/L2Genesis.s.sol b/packages/contracts-bedrock/scripts/L2Genesis.s.sol index c189309fddd..8599dd99230 100644 --- a/packages/contracts-bedrock/scripts/L2Genesis.s.sol +++ b/packages/contracts-bedrock/scripts/L2Genesis.s.sol @@ -81,6 +81,7 @@ contract L2Genesis is Script { string gasPayingTokenSymbol; uint256 nativeAssetLiquidityAmount; address liquidityControllerOwner; + bool useL2CM; } using ForkUtils for Fork; @@ -202,7 +203,7 @@ contract L2Genesis is Script { /// @notice Set up the accounts that correspond to the predeploys. /// The Proxy bytecode should be set. All proxied predeploys should have - /// the 1967 admin slot set to the ProxyAdmin predeploy. All defined predeploys + /// the 1967 admin slot set to the L2ProxyAdmin predeploy. All defined predeploys /// should have their implementations set. /// Warning: the predeploy accounts have contract code, but 0 nonce value, contrary /// to the expected nonce of 1 per EIP-161. This is because the legacy go genesis @@ -221,8 +222,11 @@ contract L2Genesis is Script { vm.etch(addr, code); EIP1967Helper.setAdmin(addr, Predeploys.PROXY_ADMIN); - if (Predeploys.isSupportedPredeploy(addr, _input.fork, _input.deployCrossL2Inbox, _input.useCustomGasToken)) - { + if ( + Predeploys.isSupportedPredeploy( + addr, _input.fork, _input.deployCrossL2Inbox, _input.useCustomGasToken, _input.useL2CM + ) + ) { address implementation = Predeploys.predeployToCodeNamespace(addr); EIP1967Helper.setImplementation(addr, implementation); } @@ -249,7 +253,7 @@ contract L2Genesis is Script { setL1Block(_input.useCustomGasToken); // 15 setL2ToL1MessagePasser(_input.useCustomGasToken); // 16 setOptimismMintableERC721Factory(_input); // 17 - setProxyAdmin(_input); // 18 + setL2ProxyAdmin(_input); // 18 setBaseFeeVault(_input); // 19 setL1FeeVault(_input); // 1A setOperatorFeeVault(_input); // 1B @@ -268,16 +272,20 @@ contract L2Genesis is Script { setLiquidityController(_input); // 29 setNativeAssetLiquidity(_input); // 2A } + if (_input.useL2CM) { + setConditionalDeployer(); // 2C + } } function setInteropPredeployProxies() internal { } - function setProxyAdmin(Input memory _input) internal { - // Note the ProxyAdmin implementation itself is behind a proxy that owns itself. + function setL2ProxyAdmin(Input memory _input) internal { + // Note the L2ProxyAdmin implementation itself is behind a proxy that owns itself. address impl = _setImplementationCode(Predeploys.PROXY_ADMIN); bytes32 _ownerSlot = bytes32(0); + // TODO(#19182): Remove this once the L2ProxyAdmin is initializable. // there is no initialize() function, so we just set the storage manually. vm.store(Predeploys.PROXY_ADMIN, _ownerSlot, bytes32(uint256(uint160(_input.opChainProxyAdminOwner)))); // update the proxy to not be uninitialized (although not standard initialize pattern) @@ -578,6 +586,11 @@ contract L2Genesis is Script { vm.deal(Predeploys.NATIVE_ASSET_LIQUIDITY, _input.nativeAssetLiquidityAmount); } + /// @notice This predeploy is following the safety invariant #1. + function setConditionalDeployer() internal { + _setImplementationCode(Predeploys.CONDITIONAL_DEPLOYER); + } + /// @notice Sets all the preinstalls. function setPreinstalls() internal { address tmpSetPreinstalls = address(uint160(uint256(keccak256("SetPreinstalls")))); diff --git a/packages/contracts-bedrock/scripts/deploy/DeployConfig.s.sol b/packages/contracts-bedrock/scripts/deploy/DeployConfig.s.sol index 56fee1ab6c2..8ef6f83876b 100644 --- a/packages/contracts-bedrock/scripts/deploy/DeployConfig.s.sol +++ b/packages/contracts-bedrock/scripts/deploy/DeployConfig.s.sol @@ -92,6 +92,8 @@ contract DeployConfig is Script { uint256 public faultGameV2ClockExtension; uint256 public faultGameV2MaxClockDuration; + bool public useL2CM; + bool public useInterop; bool public useUpgradedFork; bytes32 public devFeatureBitmap; @@ -181,6 +183,8 @@ contract DeployConfig is Script { daBondSize = _readOr(_json, "$.daBondSize", 1000000000); daResolverRefundPercentage = _readOr(_json, "$.daResolverRefundPercentage", 0); + useL2CM = _readOr(_json, "$.useL2CM", false); + useInterop = _readOr(_json, "$.useInterop", false); devFeatureBitmap = bytes32(_readOr(_json, "$.devFeatureBitmap", 0)); useUpgradedFork; @@ -317,6 +321,11 @@ contract DeployConfig is Script { operatorFeeVaultWithdrawalNetwork = _operatorFeeVaultWithdrawalNetwork; } + /// @notice Allow the `useL2CM` config to be overridden in testing environments + function setUseL2CM(bool _useL2CM) public { + useL2CM = _useL2CM; + } + function latestGenesisFork() internal view returns (Fork) { if (l2GenesisJovianTimeOffset == 0) { return Fork.JOVIAN; diff --git a/packages/contracts-bedrock/scripts/deploy/DeployOPChain.s.sol b/packages/contracts-bedrock/scripts/deploy/DeployOPChain.s.sol index 3eff5166a5c..2faa6cdcb11 100644 --- a/packages/contracts-bedrock/scripts/deploy/DeployOPChain.s.sol +++ b/packages/contracts-bedrock/scripts/deploy/DeployOPChain.s.sol @@ -157,24 +157,23 @@ contract DeployOPChain is Script { pure returns (IOPContractsManagerV2.FullConfig memory config_) { + // Only PERMISSIONED_CANNON is allowed for initial deployment since no prestate exists for permissionless games. + require( + _input.disputeGameType.raw() == GameTypes.PERMISSIONED_CANNON.raw(), + "DeployOPChain: only PERMISSIONED_CANNON game type is supported for initial deployment" + ); + // Build dispute game configs - OPCMV2 requires exactly 3 configs: CANNON, PERMISSIONED_CANNON, CANNON_KONA IOPContractsManagerUtils.DisputeGameConfig[] memory disputeGameConfigs = new IOPContractsManagerUtils.DisputeGameConfig[](3); - // Determine which games should be enabled based on the starting respected game type - bool cannonEnabled = _input.disputeGameType.raw() == GameTypes.CANNON.raw(); - bool permissionedCannonEnabled = true; // PERMISSIONED_CANNON must always be enabled - bool cannonKonaEnabled = _input.disputeGameType.raw() == GameTypes.CANNON_KONA.raw(); - // Config 0: CANNON - IOPContractsManagerUtils.FaultDisputeGameConfig memory cannonConfig = - IOPContractsManagerUtils.FaultDisputeGameConfig({ absolutePrestate: _input.disputeAbsolutePrestate }); - + // Must be disabled for the initial deployment since no prestate exists for permissionless games. disputeGameConfigs[0] = IOPContractsManagerUtils.DisputeGameConfig({ - enabled: cannonEnabled, - initBond: cannonEnabled ? DEFAULT_INIT_BOND : 0, + enabled: false, + initBond: 0, gameType: GameTypes.CANNON, - gameArgs: abi.encode(cannonConfig) + gameArgs: bytes("") }); // Config 1: PERMISSIONED_CANNON (must be enabled) @@ -186,21 +185,19 @@ contract DeployOPChain is Script { }); disputeGameConfigs[1] = IOPContractsManagerUtils.DisputeGameConfig({ - enabled: permissionedCannonEnabled, + enabled: true, initBond: DEFAULT_INIT_BOND, gameType: GameTypes.PERMISSIONED_CANNON, gameArgs: abi.encode(pdgConfig) }); // Config 2: CANNON_KONA - IOPContractsManagerUtils.FaultDisputeGameConfig memory cannonKonaConfig = - IOPContractsManagerUtils.FaultDisputeGameConfig({ absolutePrestate: _input.disputeAbsolutePrestate }); - + // Must be disabled for the initial deployment since no prestate exists for permissionless games. disputeGameConfigs[2] = IOPContractsManagerUtils.DisputeGameConfig({ - enabled: cannonKonaEnabled, - initBond: cannonKonaEnabled ? DEFAULT_INIT_BOND : 0, + enabled: false, + initBond: 0, gameType: GameTypes.CANNON_KONA, - gameArgs: abi.encode(cannonKonaConfig) + gameArgs: bytes("") }); config_ = IOPContractsManagerV2.FullConfig({ @@ -211,7 +208,7 @@ contract DeployOPChain is Script { unsafeBlockSigner: _input.unsafeBlockSigner, batcher: _input.batcher, startingAnchorRoot: ScriptConstants.DEFAULT_OUTPUT_ROOT(), - startingRespectedGameType: _input.disputeGameType, + startingRespectedGameType: GameTypes.PERMISSIONED_CANNON, basefeeScalar: _input.basefeeScalar, blobBasefeeScalar: _input.blobBaseFeeScalar, gasLimit: _input.gasLimit, diff --git a/packages/contracts-bedrock/scripts/deploy/DeploySaferSafes.s.sol b/packages/contracts-bedrock/scripts/deploy/DeploySaferSafes.s.sol index 3c411baed94..e0bc2af7bfa 100644 --- a/packages/contracts-bedrock/scripts/deploy/DeploySaferSafes.s.sol +++ b/packages/contracts-bedrock/scripts/deploy/DeploySaferSafes.s.sol @@ -31,7 +31,7 @@ contract DeploySaferSafes is Script { output_.saferSafesSingleton = ISaferSafes( DeployUtils.createDeterministic({ _name: "SaferSafes", - _args: DeployUtils.encodeConstructor(bytes("")), + _args: DeployUtils.encodeConstructor(abi.encodeCall(ISaferSafes.__constructor__, ())), _salt: DeployUtils.DEFAULT_SALT }) ); diff --git a/packages/contracts-bedrock/scripts/deploy/ReadImplementationAddresses.s.sol b/packages/contracts-bedrock/scripts/deploy/ReadImplementationAddresses.s.sol index fbba07f883a..75ab6d7d3cd 100644 --- a/packages/contracts-bedrock/scripts/deploy/ReadImplementationAddresses.s.sol +++ b/packages/contracts-bedrock/scripts/deploy/ReadImplementationAddresses.s.sol @@ -69,14 +69,13 @@ contract ReadImplementationAddresses is Script { // Get implementations from OPCM V2 IOPContractsManagerV2 opcmV2 = IOPContractsManagerV2(_input.opcm); - // OPCMV2 doesn't expose these addresses directly, so we set them to zero - // These are internal to the OPCM container and not meant to be accessed externally + // These addresses are deprecated in OPCM V2 output_.opcmGameTypeAdder = address(0); output_.opcmDeployer = address(0); output_.opcmUpgrader = address(0); - output_.opcmInteropMigrator = address(0); - // StandardValidator is accessible via the standardValidator() method + // Get migrator and standard validator from OPCM V2 + output_.opcmInteropMigrator = address(opcmV2.opcmMigrator()); output_.opcmStandardValidator = address(opcmV2.opcmStandardValidator()); IOPContractsManagerContainer.Implementations memory impls = opcmV2.implementations(); diff --git a/packages/contracts-bedrock/scripts/deploy/VerifyOPCM.s.sol b/packages/contracts-bedrock/scripts/deploy/VerifyOPCM.s.sol index af899751e34..b727cd26aa4 100644 --- a/packages/contracts-bedrock/scripts/deploy/VerifyOPCM.s.sol +++ b/packages/contracts-bedrock/scripts/deploy/VerifyOPCM.s.sol @@ -18,10 +18,41 @@ import { Constants } from "src/libraries/Constants.sol"; // Interfaces import { IOPContractsManager } from "interfaces/L1/IOPContractsManager.sol"; +import { IOptimismPortal2 } from "interfaces/L1/IOptimismPortal2.sol"; +import { IAnchorStateRegistry } from "interfaces/dispute/IAnchorStateRegistry.sol"; +import { IMIPS64 } from "interfaces/cannon/IMIPS64.sol"; /// @title VerifyOPCM /// @notice Verifies the bytecode of an OPContractsManager instance and all associated blueprints /// and implementations against locally built artifacts. +/// @dev SECURITY MODEL +/// +/// This script verifies that deployed contracts match expected bytecode and configuration. +/// Understanding what this script can and cannot detect is critical for security. +/// +/// Attacker Capabilities (what the attacker controls): +/// - Deployment of all contracts (OPCM, Container, StandardValidator, implementations) +/// - All constructor arguments and immutable values +/// - Contract deployment addresses +/// +/// Trust Assumptions (what we assume is honest): +/// - Local artifacts are compiled from correct, audited source code +/// - Environment variables contain correct expected values from trusted sources +/// - Block explorer API returns authentic creation bytecode (for constructor verification) +/// - The RPC endpoint returns authentic on-chain bytecode and state +/// +/// What This Script Verifies: +/// - Runtime bytecode matches local artifacts (ignoring immutable slots) +/// - Creation bytecode matches local artifacts (when constructor verification enabled) +/// - Security-critical immutable values (delays, addresses) match expected values +/// - PreimageOracle bytecode referenced by MIPS64 is correct +/// - StandardValidator configuration matches Container implementations +/// +/// What This Script Does NOT Verify: +/// - Source code correctness (assumes artifacts are from audited code) +/// - Environment variable correctness (must be set from trusted governance/config) +/// - Proxy storage slot contents (only verifies implementation bytecode) +/// - Runtime behavior or logic correctness contract VerifyOPCM is Script { using stdJson for string; @@ -61,6 +92,15 @@ contract VerifyOPCM is Script { /// @notice Thrown when the dev feature bitmap is not empty on mainnet. error VerifyOPCM_DevFeatureBitmapNotEmpty(); + /// @notice Thrown when a security-critical value doesn't match expected. + error VerifyOPCM_SecurityCriticalValueMismatch(string name, uint256 expected, uint256 actual); + + /// @notice Thrown when a staticcall to a validator getter fails. + error VerifyOPCM_ValidatorCallFailed(string sig); + + /// @notice Thrown when _findChar is called with a multi-character string. + error VerifyOPCM_MustBeSingleChar(); + /// @notice Preamble used for blueprint contracts. bytes constant BLUEPRINT_PREAMBLE = hex"FE7100"; @@ -109,9 +149,25 @@ contract VerifyOPCM is Script { /// WARNING: Do NOT add new getters without understanding their verification method! mapping(string => string) internal expectedGetters; + /// @notice Maps StandardValidator getter names to their verification method. + /// Value can be: + /// - "CONTAINER_IMPL" - verify against Container's implementations struct + /// - "ENV:ADDRESS:" - verify against environment variable (address) + /// - "ENV:UINT256:" - verify against environment variable (uint256) + /// - "ZERO_ON_MAINNET" - verify is zero/empty on mainnet + /// - "SKIP" - explicitly skip (e.g., version) + mapping(string => string) internal validatorGetterChecks; + /// @notice Setup flag. bool internal ready; + /// @notice Returns whether to skip security-critical value checks. + /// Public to allow tests to mock via vm.mockCall. + function skipSecurityValueChecks() public view virtual returns (bool) { + // nosemgrep: sol-style-vm-env-only-in-config-sol + return vm.envOr("SKIP_SECURITY_VALUE_CHECKS", false); + } + /// @notice Populates override mappings. function setUp() public { // Overrides for situations where field names do not cleanly map to contract names. @@ -185,6 +241,36 @@ contract VerifyOPCM is Script { expectedGetters["isDevFeatureEnabled"] = "SKIP"; expectedGetters["version"] = "SKIP"; + // StandardValidator getter verification methods + // Implementation addresses - verify against Container + validatorGetterChecks["l1ERC721BridgeImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["optimismPortalImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["optimismPortalInteropImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["ethLockboxImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["systemConfigImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["optimismMintableERC20FactoryImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["l1CrossDomainMessengerImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["l1StandardBridgeImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["disputeGameFactoryImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["anchorStateRegistryImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["delayedWETHImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["mipsImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["faultDisputeGameImpl"] = "CONTAINER_IMPL"; + validatorGetterChecks["permissionedDisputeGameImpl"] = "CONTAINER_IMPL"; + + // Verify against env vars + validatorGetterChecks["superchainConfig"] = "ENV:ADDRESS:EXPECTED_SUPERCHAIN_CONFIG"; + validatorGetterChecks["l1PAOMultisig"] = "ENV:ADDRESS:EXPECTED_L1_PAO_MULTISIG"; + validatorGetterChecks["challenger"] = "ENV:ADDRESS:EXPECTED_CHALLENGER"; + validatorGetterChecks["withdrawalDelaySeconds"] = "ENV:UINT256:EXPECTED_WITHDRAWAL_DELAY_SECONDS"; + + // Must be empty on mainnet + validatorGetterChecks["devFeatureBitmap"] = "ZERO_ON_MAINNET"; + + // Skip - no security relevance or verified elsewhere + validatorGetterChecks["version"] = "SKIP"; + validatorGetterChecks["preimageOracleVersion"] = "SKIP"; + // Mark as ready. ready = true; } @@ -572,6 +658,11 @@ contract VerifyOPCM is Script { // Perform detailed bytecode comparison. success = _compareBytecode(actualCode, expectedCode, _target.name, artifact, !_target.blueprint) && success; + // For implementations, verify security-critical values. + if (!_target.blueprint) { + success = _verifySecurityCriticalValues(_opcm, _target, artifact) && success; + } + // If requested and this is not a blueprint, we also need to check the creation code. if (!_target.blueprint && !_skipConstructorVerification) { // Get the creation code from the selected block explorer. @@ -1165,4 +1256,368 @@ contract VerifyOPCM is Script { // nosemgrep: sol-style-vm-env-only-in-config-sol return vm.envOr("OPCM_ADDRESS", address(0)); } + + /// @notice Verifies security-critical values for contracts where immutables matter. + /// @param _opcm The OPCM contract that contains the target contract reference. + /// @param _target The contract reference being verified. + /// @param _artifact The artifact info for the contract. + /// @return True if all security-critical values are correct. + function _verifySecurityCriticalValues( + IOPContractsManager _opcm, + OpcmContractRef memory _target, + ArtifactInfo memory _artifact + ) + internal + returns (bool) + { + // Silence unused variable warning - artifact is available for future use + _artifact; + + // Allow skipping security-critical value checks (for tests that modify immutables) + if (skipSecurityValueChecks()) { + return true; + } + + bool success = true; + + // MIPS64: Verify the PreimageOracle it points to + if (LibString.eq(_target.name, "MIPS64")) { + success = _verifyPreimageOracle(IMIPS64(_target.addr)) && success; + } + + // OptimismPortal2: Verify PROOF_MATURITY_DELAY_SECONDS + if (LibString.eq(_target.name, "OptimismPortal2") || LibString.eq(_target.name, "OptimismPortalInterop")) { + success = _verifyPortalDelays(IOptimismPortal2(payable(_target.addr))) && success; + } + + // AnchorStateRegistry: Verify DISPUTE_GAME_FINALITY_DELAY_SECONDS + if (LibString.eq(_target.name, "AnchorStateRegistry")) { + success = _verifyAnchorStateRegistryDelays(IAnchorStateRegistry(_target.addr)) && success; + } + + // OPContractsManagerStandardValidator: Verify all constructor arg values + if (LibString.eq(_target.name, "OPContractsManagerStandardValidator")) { + success = _verifyStandardValidatorArgs(_opcm, _target.addr) && success; + } + + return success; + } + + /// @notice Verifies the PreimageOracle bytecode that MIPS64 points to. + /// @param _mips The MIPS64 contract. + /// @return True if the PreimageOracle bytecode matches expected. + function _verifyPreimageOracle(IMIPS64 _mips) internal view returns (bool) { + address oracleAddr = address(_mips.oracle()); + console.log(" Verifying PreimageOracle bytecode..."); + console.log(string.concat(" Address: ", vm.toString(oracleAddr))); + + ArtifactInfo memory oracleArtifact = _loadArtifactInfo(_buildArtifactPath("PreimageOracle")); + return _compareBytecode( + oracleAddr.code, + oracleArtifact.deployedBytecode, + "PreimageOracle", + oracleArtifact, + true // allow immutables for challengePeriod/minProposalSize + ); + } + + /// @notice Verifies OptimismPortal2 security-critical delay values. + /// @param _portal The OptimismPortal2 contract. + /// @return True if delay values match expected. + function _verifyPortalDelays(IOptimismPortal2 _portal) internal view returns (bool) { + // nosemgrep: sol-style-vm-env-only-in-config-sol + uint256 expectedDelay = vm.envOr("EXPECTED_PROOF_MATURITY_DELAY_SECONDS", uint256(604800)); + uint256 actualDelay = _portal.proofMaturityDelaySeconds(); + + console.log(" Verifying PROOF_MATURITY_DELAY_SECONDS..."); + console.log(string.concat(" Expected: ", vm.toString(expectedDelay))); + console.log(string.concat(" Actual: ", vm.toString(actualDelay))); + + if (actualDelay != expectedDelay) { + console.log(" [FAIL] PROOF_MATURITY_DELAY_SECONDS mismatch"); + return false; + } + console.log(" [OK] PROOF_MATURITY_DELAY_SECONDS verified"); + return true; + } + + /// @notice Verifies AnchorStateRegistry security-critical delay values. + /// @param _asr The AnchorStateRegistry contract. + /// @return True if delay values match expected. + function _verifyAnchorStateRegistryDelays(IAnchorStateRegistry _asr) internal view returns (bool) { + // nosemgrep: sol-style-vm-env-only-in-config-sol + uint256 expectedDelay = vm.envOr("EXPECTED_DISPUTE_GAME_FINALITY_DELAY_SECONDS", uint256(302400)); + uint256 actualDelay = _asr.disputeGameFinalityDelaySeconds(); + + console.log(" Verifying DISPUTE_GAME_FINALITY_DELAY_SECONDS..."); + console.log(string.concat(" Expected: ", vm.toString(expectedDelay))); + console.log(string.concat(" Actual: ", vm.toString(actualDelay))); + + if (actualDelay != expectedDelay) { + console.log(" [FAIL] DISPUTE_GAME_FINALITY_DELAY_SECONDS mismatch"); + return false; + } + console.log(" [OK] DISPUTE_GAME_FINALITY_DELAY_SECONDS verified"); + return true; + } + + /// @notice Verifies all StandardValidator getters are properly validated. + /// @param _opcm The OPCM contract. + /// @param _validator The StandardValidator contract address. + /// @return True if all getters are valid. + function _verifyStandardValidatorArgs(IOPContractsManager _opcm, address _validator) internal returns (bool) { + bool success = true; + console.log(" Verifying StandardValidator args..."); + + // Get ALL zero-arg view getters from ABI + string[] memory allGetters = abi.decode( + vm.parseJson( + Process.bash( + string.concat( + "jq -r '[.abi[] | select(.type == \"function\" and .stateMutability == \"view\" and (.inputs | length) == 0) | .name]' ", + _buildArtifactPath("OPContractsManagerStandardValidator") + ) + ) + ), + (string[]) + ); + + // Load Container impls for comparison + // nosemgrep: sol-style-use-abi-encodecall + (bool callOk, bytes memory containerData) = + address(_opcm).staticcall(abi.encodeWithSignature("implementations()")); + if (!callOk) { + console.log(" [FAIL] Could not fetch implementations from OPCM"); + return false; + } + + // Get container impl field names + string[] memory containerFields = _getContainerImplFields(); + + // Verify each getter + for (uint256 i = 0; i < allGetters.length; i++) { + string memory getter = allGetters[i]; + string memory check = validatorGetterChecks[getter]; + + // Fail if getter is unaccounted for + if (bytes(check).length == 0) { + console.log(string.concat(" [FAIL] Unaccounted getter: ", getter)); + success = false; + continue; + } + + // Skip explicitly skipped getters + if (LibString.eq(check, "SKIP")) { + continue; + } + + // Handle each check type + if (LibString.eq(check, "CONTAINER_IMPL")) { + success = _verifyContainerImpl(_validator, getter, containerFields, containerData) && success; + } else if (LibString.startsWith(check, "ENV:ADDRESS:")) { + string memory envVar = LibString.slice(check, bytes("ENV:ADDRESS:").length, bytes(check).length); + success = _verifyEnvAddress(_validator, getter, envVar) && success; + } else if (LibString.startsWith(check, "ENV:UINT256:")) { + string memory envVar = LibString.slice(check, bytes("ENV:UINT256:").length, bytes(check).length); + success = _verifyEnvUint256(_validator, getter, envVar) && success; + } else if (LibString.eq(check, "ZERO_ON_MAINNET")) { + success = _verifyZeroOnMainnet(_validator, getter) && success; + } + } + + if (success) { + console.log(" [OK] All StandardValidator args verified"); + } + return success; + } + + /// @notice Gets the field names from the Container implementations struct. + /// @return Array of field names. + function _getContainerImplFields() internal returns (string[] memory) { + return abi.decode( + vm.parseJson( + Process.bash( + string.concat( + "jq -r '[.abi[] | select(.name == \"implementations\") | .outputs[0].components[].name]' ", + _buildArtifactPath(_opcmContractName()) + ) + ) + ), + (string[]) + ); + } + + /// @notice Verifies a StandardValidator getter matches the corresponding Container impl. + /// @param _validator The StandardValidator address. + /// @param _getter The getter name. + /// @param _containerFields Array of Container field names. + /// @param _containerData ABI-encoded Container implementations struct. + /// @return True if the values match. + function _verifyContainerImpl( + address _validator, + string memory _getter, + string[] memory _containerFields, + bytes memory _containerData + ) + internal + view + returns (bool) + { + address actual = _getAddressFromValidator(_validator, string.concat(_getter, "()")); + address expected = _findContainerImpl(_getter, _containerFields, _containerData); + + if (actual != expected) { + console.log(string.concat(" [FAIL] ", _getter)); + console.log(string.concat(" Container: ", vm.toString(expected))); + console.log(string.concat(" Validator: ", vm.toString(actual))); + return false; + } + return true; + } + + /// @notice Verifies a StandardValidator getter matches an environment variable address. + /// @param _validator The StandardValidator address. + /// @param _getter The getter name. + /// @param _envVar The environment variable name. + /// @return True if the values match. + function _verifyEnvAddress( + address _validator, + string memory _getter, + string memory _envVar + ) + internal + view + returns (bool) + { + address actual = _getAddressFromValidator(_validator, string.concat(_getter, "()")); + // nosemgrep: sol-style-vm-env-only-in-config-sol + address expected = vm.envAddress(_envVar); + + if (actual != expected) { + console.log(string.concat(" [FAIL] ", _getter)); + console.log(string.concat(" Expected (", _envVar, "): ", vm.toString(expected))); + console.log(string.concat(" Actual: ", vm.toString(actual))); + return false; + } + return true; + } + + /// @notice Verifies a StandardValidator getter matches an environment variable uint256. + /// @param _validator The StandardValidator address. + /// @param _getter The getter name. + /// @param _envVar The environment variable name. + /// @return True if the values match. + function _verifyEnvUint256( + address _validator, + string memory _getter, + string memory _envVar + ) + internal + view + returns (bool) + { + uint256 actual = _getUintFromValidator(_validator, string.concat(_getter, "()")); + // nosemgrep: sol-style-vm-env-only-in-config-sol + uint256 expected = vm.envUint(_envVar); + + if (actual != expected) { + console.log(string.concat(" [FAIL] ", _getter)); + console.log(string.concat(" Expected (", _envVar, "): ", vm.toString(expected))); + console.log(string.concat(" Actual: ", vm.toString(actual))); + return false; + } + return true; + } + + /// @notice Verifies a StandardValidator getter is zero on mainnet. + /// @param _validator The StandardValidator address. + /// @param _getter The getter name. + /// @return True if zero on mainnet (or not mainnet). + function _verifyZeroOnMainnet(address _validator, string memory _getter) internal view returns (bool) { + // Skip check if not mainnet or if in a testing environment + // Testing environment is detected by code at the TESTING_ENVIRONMENT_ADDRESS + if (block.chainid != 1 || Constants.TESTING_ENVIRONMENT_ADDRESS.code.length > 0) { + return true; + } + + bytes32 actual = _getBytes32FromValidator(_validator, string.concat(_getter, "()")); + + if (actual != bytes32(0)) { + console.log(string.concat(" [FAIL] ", _getter, " must be zero on mainnet")); + return false; + } + return true; + } + + /// @notice Finds the address of a field in the Container implementations struct. + /// @param _getter The field name to find. + /// @param _containerFields Array of field names. + /// @param _containerData ABI-encoded implementations struct. + /// @return The address at the matching field, or address(0) if not found. + function _findContainerImpl( + string memory _getter, + string[] memory _containerFields, + bytes memory _containerData + ) + internal + pure + returns (address) + { + for (uint256 i = 0; i < _containerFields.length; i++) { + if (LibString.eq(_getter, _containerFields[i])) { + return abi.decode(Bytes.slice(_containerData, i * 32, 32), (address)); + } + } + return address(0); + } + + /// @notice Gets an address value from a StandardValidator getter. + /// @param _validator The StandardValidator address. + /// @param _sig The function signature (e.g., "superchainConfig()"). + /// @return The address returned by the getter. + function _getAddressFromValidator(address _validator, string memory _sig) internal view returns (address) { + // nosemgrep: sol-style-use-abi-encodecall + (bool ok, bytes memory data) = _validator.staticcall(abi.encodeWithSignature(_sig)); + if (!ok) revert VerifyOPCM_ValidatorCallFailed(_sig); + return abi.decode(data, (address)); + } + + /// @notice Gets a uint256 value from a StandardValidator getter. + /// @param _validator The StandardValidator address. + /// @param _sig The function signature. + /// @return The uint256 returned by the getter. + function _getUintFromValidator(address _validator, string memory _sig) internal view returns (uint256) { + // nosemgrep: sol-style-use-abi-encodecall + (bool ok, bytes memory data) = _validator.staticcall(abi.encodeWithSignature(_sig)); + if (!ok) revert VerifyOPCM_ValidatorCallFailed(_sig); + return abi.decode(data, (uint256)); + } + + /// @notice Gets a bytes32 value from a StandardValidator getter. + /// @param _validator The StandardValidator address. + /// @param _sig The function signature. + /// @return The bytes32 returned by the getter. + function _getBytes32FromValidator(address _validator, string memory _sig) internal view returns (bytes32) { + // nosemgrep: sol-style-use-abi-encodecall + (bool ok, bytes memory data) = _validator.staticcall(abi.encodeWithSignature(_sig)); + if (!ok) revert VerifyOPCM_ValidatorCallFailed(_sig); + return abi.decode(data, (bytes32)); + } + + /// @notice Finds the position of a character in a string. + /// @param _str The string to search. + /// @param _char The character to find (as a single-char string). + /// @return The index of the first occurrence, or string length if not found. + function _findChar(string memory _str, string memory _char) internal pure returns (uint256) { + bytes memory strBytes = bytes(_str); + bytes memory charBytes = bytes(_char); + if (charBytes.length != 1) revert VerifyOPCM_MustBeSingleChar(); + bytes1 target = charBytes[0]; + for (uint256 i = 0; i < strBytes.length; i++) { + if (strBytes[i] == target) { + return i; + } + } + return strBytes.length; + } } diff --git a/packages/contracts-bedrock/scripts/libraries/Config.sol b/packages/contracts-bedrock/scripts/libraries/Config.sol index 70988a582cf..6d087b1fc0d 100644 --- a/packages/contracts-bedrock/scripts/libraries/Config.sol +++ b/packages/contracts-bedrock/scripts/libraries/Config.sol @@ -281,6 +281,11 @@ library Config { return vm.envOr("DEV_FEATURE__OPCM_V2", false); } + /// @notice Returns true if the development feature l2cm is enabled. + function devFeatureL2CM() internal view returns (bool) { + return vm.envOr("DEV_FEATURE__L2CM", false); + } + /// @notice Returns true if the system feature custom_gas_token is enabled. function sysFeatureCustomGasToken() internal view returns (bool) { return vm.envOr("SYS_FEATURE__CUSTOM_GAS_TOKEN", false); diff --git a/packages/contracts-bedrock/snapshots/abi/ConditionalDeployer.json b/packages/contracts-bedrock/snapshots/abi/ConditionalDeployer.json new file mode 100644 index 00000000000..12e31561c84 --- /dev/null +++ b/packages/contracts-bedrock/snapshots/abi/ConditionalDeployer.json @@ -0,0 +1,95 @@ +[ + { + "inputs": [ + { + "internalType": "bytes32", + "name": "_salt", + "type": "bytes32" + }, + { + "internalType": "bytes", + "name": "_code", + "type": "bytes" + } + ], + "name": "deploy", + "outputs": [ + { + "internalType": "address", + "name": "implementation_", + "type": "address" + } + ], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "deterministicDeploymentProxy", + "outputs": [ + { + "internalType": "address", + "name": "deterministicDeploymentProxy_", + "type": "address" + } + ], + "stateMutability": "pure", + "type": "function" + }, + { + "inputs": [], + "name": "version", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "implementation", + "type": "address" + }, + { + "indexed": false, + "internalType": "bytes32", + "name": "salt", + "type": "bytes32" + } + ], + "name": "ImplementationDeployed", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "implementation", + "type": "address" + } + ], + "name": "ImplementationExists", + "type": "event" + }, + { + "inputs": [ + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + } + ], + "name": "ConditionalDeployer_DeploymentFailed", + "type": "error" + } +] \ No newline at end of file diff --git a/packages/contracts-bedrock/snapshots/abi/L2ProxyAdmin.json b/packages/contracts-bedrock/snapshots/abi/L2ProxyAdmin.json new file mode 100644 index 00000000000..28eecaa8bee --- /dev/null +++ b/packages/contracts-bedrock/snapshots/abi/L2ProxyAdmin.json @@ -0,0 +1,361 @@ +[ + { + "inputs": [ + { + "internalType": "address", + "name": "_owner", + "type": "address" + } + ], + "stateMutability": "nonpayable", + "type": "constructor" + }, + { + "inputs": [], + "name": "addressManager", + "outputs": [ + { + "internalType": "contract IAddressManager", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address payable", + "name": "_proxy", + "type": "address" + }, + { + "internalType": "address", + "name": "_newAdmin", + "type": "address" + } + ], + "name": "changeProxyAdmin", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address payable", + "name": "_proxy", + "type": "address" + } + ], + "name": "getProxyAdmin", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_proxy", + "type": "address" + } + ], + "name": "getProxyImplementation", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "name": "implementationName", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "isUpgrading", + "outputs": [ + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "owner", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "name": "proxyType", + "outputs": [ + { + "internalType": "enum ProxyAdmin.ProxyType", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "renounceOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "string", + "name": "_name", + "type": "string" + }, + { + "internalType": "address", + "name": "_address", + "type": "address" + } + ], + "name": "setAddress", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "contract IAddressManager", + "name": "_address", + "type": "address" + } + ], + "name": "setAddressManager", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_address", + "type": "address" + }, + { + "internalType": "string", + "name": "_name", + "type": "string" + } + ], + "name": "setImplementationName", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_address", + "type": "address" + }, + { + "internalType": "enum ProxyAdmin.ProxyType", + "name": "_type", + "type": "uint8" + } + ], + "name": "setProxyType", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "bool", + "name": "_upgrading", + "type": "bool" + } + ], + "name": "setUpgrading", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "newOwner", + "type": "address" + } + ], + "name": "transferOwnership", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address payable", + "name": "_proxy", + "type": "address" + }, + { + "internalType": "address", + "name": "_implementation", + "type": "address" + } + ], + "name": "upgrade", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address payable", + "name": "_proxy", + "type": "address" + }, + { + "internalType": "address", + "name": "_implementation", + "type": "address" + }, + { + "internalType": "bytes", + "name": "_data", + "type": "bytes" + } + ], + "name": "upgradeAndCall", + "outputs": [], + "stateMutability": "payable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_l2ContractsManager", + "type": "address" + } + ], + "name": "upgradePredeploys", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "version", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "previousOwner", + "type": "address" + }, + { + "indexed": true, + "internalType": "address", + "name": "newOwner", + "type": "address" + } + ], + "name": "OwnershipTransferred", + "type": "event" + }, + { + "anonymous": false, + "inputs": [ + { + "indexed": true, + "internalType": "address", + "name": "l2ContractsManager", + "type": "address" + } + ], + "name": "PredeploysUpgraded", + "type": "event" + }, + { + "inputs": [], + "name": "L2ProxyAdmin__Unauthorized", + "type": "error" + }, + { + "inputs": [ + { + "internalType": "bytes", + "name": "data", + "type": "bytes" + } + ], + "name": "L2ProxyAdmin__UpgradeFailed", + "type": "error" + } +] \ No newline at end of file diff --git a/packages/contracts-bedrock/snapshots/semver-lock.json b/packages/contracts-bedrock/snapshots/semver-lock.json index 57cb13ea024..777cf1ef9d0 100644 --- a/packages/contracts-bedrock/snapshots/semver-lock.json +++ b/packages/contracts-bedrock/snapshots/semver-lock.json @@ -44,21 +44,25 @@ "sourceCodeHash": "0x3b7b7a1023e6e87ce4680eee3cc4eebefc15b5ec80db3d39e824fbdd521762db" }, "src/L1/SuperchainConfig.sol:SuperchainConfig": { - "initCodeHash": "0xfb8c98028f1a0e70bb1afbbc532035ea71b0724883554eeaae62e1910a6c1cd9", - "sourceCodeHash": "0xbf344c4369b8cb00ec7a3108f72795747f3bc59ab5b37ac18cf21e72e2979dbf" + "initCodeHash": "0x7184a7d4ffb0e4624c8699d2573fa05eae028ca311e2f5bdd901e409cf6305ec", + "sourceCodeHash": "0x65c3a0c5d721408a6c8d66b817b4881820ea4119d6452796725fbf92ad9e3c8f" }, "src/L1/SystemConfig.sol:SystemConfig": { "initCodeHash": "0xd4ec112de4cf7173668374479b7405bab9c828e5b32c946ef8ab5cd021f9703b", "sourceCodeHash": "0xb3184aa5d95a82109e7134d1f61941b30e25f655b9849a0e303d04bbce0cde0b" }, "src/L1/opcm/OPContractsManagerV2.sol:OPContractsManagerV2": { - "initCodeHash": "0x5f3548d6d5502669d34ff3104826d8498c3f74be2f6840a6acb9860e266d96a8", - "sourceCodeHash": "0xf7c02dec35e9c34e7e3e8f1fe939f7b84243064b423e38ba82fb06e389732cc7" + "initCodeHash": "0x5cbc998e57035d8658824e16dacaab8c702f9e18f482e16989b9420e5a7e8190", + "sourceCodeHash": "0x11678225efb1fb4593085febd8f438eeb4752c0ab3dfd2ee1c4fe47970dda953" }, "src/L2/BaseFeeVault.sol:BaseFeeVault": { "initCodeHash": "0x838bbd7f381e84e21887f72bd1da605bfc4588b3c39aed96cbce67c09335b3ee", "sourceCodeHash": "0xcb329746df0baddd3dc03c6c88da5d6bdc0f0a96d30e6dc78d0891bb1e935032" }, + "src/L2/ConditionalDeployer.sol:ConditionalDeployer": { + "initCodeHash": "0x580996fd149ac478b9363399eab8ba479ed55a324a215ea8e4841f9e9f033ac5", + "sourceCodeHash": "0x7c83a95f65cfd963af0caf90579e8f8544e3f883a48bc803720ec251c72aeffe" + }, "src/L2/CrossL2Inbox.sol:CrossL2Inbox": { "initCodeHash": "0x56f868e561c4abe539043f98b16aad9305479e68fd03ece2233249b0c73a24ea", "sourceCodeHash": "0x7c6d362a69a480a06a079542a7fd2ce48cb1dd80d6b9043fba60218569371349" @@ -69,7 +73,7 @@ }, "src/L2/FeeSplitter.sol:FeeSplitter": { "initCodeHash": "0xdaae3903628f760e36da47c8f8d75d20962d1811fb5129cb09eb01803e67c095", - "sourceCodeHash": "0x95dd8da08e907fa398c98710bb12fda9fb50d9688c5d2144fd9a424c99e672c5" + "sourceCodeHash": "0xdc50fe7643d26950527dac75d69ddbdebf0b5c4f061fe2938eada849e179a217" }, "src/L2/GasPriceOracle.sol:GasPriceOracle": { "initCodeHash": "0xf72c23d9c3775afd7b645fde429d09800622d329116feb5ff9829634655123ca", @@ -89,7 +93,7 @@ }, "src/L2/L1Withdrawer.sol:L1Withdrawer": { "initCodeHash": "0x6efb9055142e90b408c6312074243769df0d365f6f984e226e0320bec55a45b8", - "sourceCodeHash": "0x6a12e541b47b79f19d1061ff7b64ffdcffa1e8d06225cca6798daca53fd96890" + "sourceCodeHash": "0x7e438cbbe9a8248887b8c21f68c811f90a5cae4902cbbf7b0a1f6cd644dc42d9" }, "src/L2/L2ContractsManager.sol:L2ContractsManager": { "initCodeHash": "0x5770f8b4f605f5b13b8f499a7d21f90df2aad86b09268dcb517c14fef33e2f76", @@ -103,6 +107,10 @@ "initCodeHash": "0x863f0f5b410983f3e51cd97c60a3a42915141b7452864d0e176571d640002b81", "sourceCodeHash": "0xc05bfcfadfd09a56cfea68e7c1853faa36d114d9a54cd307348be143e442c35a" }, + "src/L2/L2ProxyAdmin.sol:L2ProxyAdmin": { + "initCodeHash": "0x85b054c8105191d272014459858020a90fcf7db401ef0fb028999f967461d25a", + "sourceCodeHash": "0x0d402be0c35dcdd3f6642c2949932705dd09c8e2d08a06c328ccbf8ed6c65808" + }, "src/L2/L2StandardBridge.sol:L2StandardBridge": { "initCodeHash": "0xba5b288a396b34488ba7be68473305529c7da7c43e5f1cfc48d6a4aecd014103", "sourceCodeHash": "0x9dd26676cd1276c807ffd4747236783c5170d0919c70693e70b7e4c4c2675429" @@ -169,7 +177,7 @@ }, "src/L2/SuperchainRevSharesCalculator.sol:SuperchainRevSharesCalculator": { "initCodeHash": "0xdfff95660d2d470e198054bb1717a30a45a806d2eaa3720fb43acaa3356c9a3e", - "sourceCodeHash": "0x4f494790d6044882ca0150bb28bb4abbf45cd2617bbdae0ee13b0085961ca788" + "sourceCodeHash": "0x741d4fe13a88e0f2667ecb1c0c8a8550059800f8ef63241d6ffd14bf0a53ae5e" }, "src/L2/SuperchainTokenBridge.sol:SuperchainTokenBridge": { "initCodeHash": "0xb0d25dc03b9c84b07b263921c2b717e6caad3f4297fa939207e35978d7d25abe", diff --git a/packages/contracts-bedrock/snapshots/storageLayout/ConditionalDeployer.json b/packages/contracts-bedrock/snapshots/storageLayout/ConditionalDeployer.json new file mode 100644 index 00000000000..0637a088a01 --- /dev/null +++ b/packages/contracts-bedrock/snapshots/storageLayout/ConditionalDeployer.json @@ -0,0 +1 @@ +[] \ No newline at end of file diff --git a/packages/contracts-bedrock/snapshots/storageLayout/L2ProxyAdmin.json b/packages/contracts-bedrock/snapshots/storageLayout/L2ProxyAdmin.json new file mode 100644 index 00000000000..a0b6f46bf85 --- /dev/null +++ b/packages/contracts-bedrock/snapshots/storageLayout/L2ProxyAdmin.json @@ -0,0 +1,37 @@ +[ + { + "bytes": "20", + "label": "_owner", + "offset": 0, + "slot": "0", + "type": "address" + }, + { + "bytes": "32", + "label": "proxyType", + "offset": 0, + "slot": "1", + "type": "mapping(address => enum ProxyAdmin.ProxyType)" + }, + { + "bytes": "32", + "label": "implementationName", + "offset": 0, + "slot": "2", + "type": "mapping(address => string)" + }, + { + "bytes": "20", + "label": "addressManager", + "offset": 0, + "slot": "3", + "type": "contract IAddressManager" + }, + { + "bytes": "1", + "label": "upgrading", + "offset": 20, + "slot": "3", + "type": "bool" + } +] \ No newline at end of file diff --git a/packages/contracts-bedrock/src/L1/SuperchainConfig.sol b/packages/contracts-bedrock/src/L1/SuperchainConfig.sol index c0157fef35c..99c7c0dedd5 100644 --- a/packages/contracts-bedrock/src/L1/SuperchainConfig.sol +++ b/packages/contracts-bedrock/src/L1/SuperchainConfig.sol @@ -13,9 +13,6 @@ import { ISemver } from "interfaces/universal/ISemver.sol"; /// @custom:audit none This contracts is not yet audited. /// @title SuperchainConfig /// @notice The SuperchainConfig contract is used to manage configuration of global superchain values. -/// @dev WARNING: When upgrading this contract, any active pause states will be lost as the pause state -/// is stored in storage variables that are not preserved during upgrades. Therefore, this contract -/// should not be upgraded while the system is paused. contract SuperchainConfig is ProxyAdminOwnedBase, Initializable, ReinitializableBase, ISemver { /// @notice Thrown when a caller is not the guardian but tries to call a guardian-only function error SuperchainConfig_OnlyGuardian(); @@ -56,8 +53,8 @@ contract SuperchainConfig is ProxyAdminOwnedBase, Initializable, Reinitializable event ConfigUpdate(UpdateType indexed updateType, bytes data); /// @notice Semantic version. - /// @custom:semver 2.4.0 - string public constant version = "2.4.0"; + /// @custom:semver 2.4.1 + string public constant version = "2.4.1"; /// @notice Constructs the SuperchainConfig contract. constructor() ReinitializableBase(2) { diff --git a/packages/contracts-bedrock/src/L1/opcm/OPContractsManagerV2.sol b/packages/contracts-bedrock/src/L1/opcm/OPContractsManagerV2.sol index 24ac4a38b2b..c17aa044d23 100644 --- a/packages/contracts-bedrock/src/L1/opcm/OPContractsManagerV2.sol +++ b/packages/contracts-bedrock/src/L1/opcm/OPContractsManagerV2.sol @@ -147,9 +147,9 @@ contract OPContractsManagerV2 is ISemver, OPContractsManagerUtilsCaller { /// - Major bump: New required sequential upgrade /// - Minor bump: Replacement OPCM for same upgrade /// - Patch bump: Development changes (expected for normal dev work) - /// @custom:semver 7.0.6 + /// @custom:semver 7.0.8 function version() public pure returns (string memory) { - return "7.0.6"; + return "7.0.8"; } /// @param _standardValidator The standard validator for this OPCM release. @@ -181,7 +181,7 @@ contract OPContractsManagerV2 is ISemver, OPContractsManagerUtilsCaller { // If we expand the scope of this function to add other Superchain-wide contracts, we'll // probably want to start following a similar pattern to the chain upgrade flow. - // Upgrade the SuperchainConfig if it has changed. + // Upgrade the SuperchainConfig. _upgrade( IProxyAdmin(_inp.superchainConfig.proxyAdmin()), address(_inp.superchainConfig), @@ -643,7 +643,7 @@ contract OPContractsManagerV2 is ISemver, OPContractsManagerUtilsCaller { /// @notice Validates the deployment/upgrade config. /// @param _cfg The full config. - function _assertValidFullConfig(FullConfig memory _cfg) internal pure { + function _assertValidFullConfig(FullConfig memory _cfg, bool _isInitialDeployment) internal pure { // Start validating the dispute game configs. Put allowed game types here. GameType[] memory validGameTypes = new GameType[](3); validGameTypes[0] = GameTypes.CANNON; @@ -667,6 +667,15 @@ contract OPContractsManagerV2 is ISemver, OPContractsManagerUtilsCaller { if (!_cfg.disputeGameConfigs[i].enabled && _cfg.disputeGameConfigs[i].initBond != 0) { revert OPContractsManagerV2_InvalidGameConfigs(); } + + // During initial deployment, only PERMISSIONED_CANNON can be enabled, because no prestate exists for + // permissionless games. + if ( + _isInitialDeployment && (validGameTypes[i].raw() != GameTypes.PERMISSIONED_CANNON.raw()) + && _cfg.disputeGameConfigs[i].enabled + ) { + revert OPContractsManagerV2_InvalidGameConfigs(); + } } // We currently REQUIRE that the PermissionedDisputeGame is enabled. We may be able to @@ -691,7 +700,7 @@ contract OPContractsManagerV2 is ISemver, OPContractsManagerUtilsCaller { returns (ChainContracts memory) { // Validate the config. - _assertValidFullConfig(_cfg); + _assertValidFullConfig(_cfg, _isInitialDeployment); // Load the implementations. IOPContractsManagerContainer.Implementations memory impls = implementations(); diff --git a/packages/contracts-bedrock/src/L2/ConditionalDeployer.sol b/packages/contracts-bedrock/src/L2/ConditionalDeployer.sol new file mode 100644 index 00000000000..fce8e86677e --- /dev/null +++ b/packages/contracts-bedrock/src/L2/ConditionalDeployer.sol @@ -0,0 +1,72 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.15; + +// Interfaces +import { ISemver } from "interfaces/universal/ISemver.sol"; + +/// @custom:proxied true +/// @custom:predeploy 0x420000000000000000000000000000000000002C +/// @title ConditionalDeployer +/// @notice ConditionalDeployer is used to deploy implementations for predeploys during network upgrades. +/// It uses Arachnid's DeterministicDeploymentProxy to deploy the implementations. +contract ConditionalDeployer is ISemver { + /// @notice Emitted when an implementation is deployed. + /// @param implementation The address of the deployed implementation. + /// @param salt The salt used for deployment. + event ImplementationDeployed(address indexed implementation, bytes32 salt); + + /// @notice Emitted when deployment is skipped because implementation already exists. + /// @param implementation The address of the existing implementation. + event ImplementationExists(address indexed implementation); + + /// @notice Error thrown when deployment fails. + /// @param data The data returned from the deployment call. + error ConditionalDeployer_DeploymentFailed(bytes data); + + /// @notice Address of the Arachnid DeterministicDeploymentProxy. + address payable internal constant DETERMINISTIC_DEPLOYMENT_PROXY = + payable(0x4e59b44847b379578588920cA78FbF26c0B4956C); + + /// @notice Semantic version. + /// @custom:semver 1.0.0 + string public constant version = "1.0.0"; + + /// @notice Deploys an implementation using CREATE2 if it doesn't already exist. + /// @dev Does not support deployments requiring ETH. + /// @dev Reverts when the deployment call to the DeterministicDeploymentProxy fails. + /// @param _salt The salt to use for CREATE2 deployment. + /// @param _code The initialization code for the contract. + /// @return implementation_ The address of the deployed or existing implementation. + function deploy(bytes32 _salt, bytes memory _code) external returns (address implementation_) { + // Compute the address where the contract will be deployed using CREATE2 formula + bytes32 codeHash = keccak256(_code); + address expectedImplementation = address( + uint160(uint256(keccak256(abi.encodePacked(bytes1(0xff), DETERMINISTIC_DEPLOYMENT_PROXY, _salt, codeHash)))) + ); + + // Check if implementation already exists + if (expectedImplementation.code.length != 0) { + emit ImplementationExists(expectedImplementation); + return expectedImplementation; + } + + // Deploy using Arachnid's DeterministicDeploymentProxy + // Calldata format: salt + initcode + // Returns: raw 20 bytes (deployed address, not ABI-encoded) + (bool success, bytes memory data) = DETERMINISTIC_DEPLOYMENT_PROXY.call(abi.encodePacked(_salt, _code)); + + // Decode the returned address (raw 20 bytes) + implementation_ = address(bytes20(data)); + if (!success || implementation_ != expectedImplementation) { + revert ConditionalDeployer_DeploymentFailed(data); + } + + emit ImplementationDeployed(implementation_, _salt); + } + + /// @notice Returns the address of the Arachnid's DeterministicDeploymentProxy. + /// @return deterministicDeploymentProxy_ The address of the Arachnid's DeterministicDeploymentProxy. + function deterministicDeploymentProxy() external pure returns (address deterministicDeploymentProxy_) { + deterministicDeploymentProxy_ = DETERMINISTIC_DEPLOYMENT_PROXY; + } +} diff --git a/packages/contracts-bedrock/src/L2/FeeSplitter.sol b/packages/contracts-bedrock/src/L2/FeeSplitter.sol index b69b8a2691c..58008ece962 100644 --- a/packages/contracts-bedrock/src/L2/FeeSplitter.sol +++ b/packages/contracts-bedrock/src/L2/FeeSplitter.sol @@ -7,7 +7,7 @@ import { Types } from "src/libraries/Types.sol"; import { SafeCall } from "src/libraries/SafeCall.sol"; // Interfaces -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; import { ISemver } from "interfaces/universal/ISemver.sol"; import { ISharesCalculator } from "interfaces/L2/ISharesCalculator.sol"; import { IFeeVault } from "interfaces/L2/IFeeVault.sol"; @@ -188,7 +188,7 @@ contract FeeSplitter is ISemver, Initializable { /// @notice Updates the fee disbursement interval. Only callable by the ProxyAdmin owner. /// @param _newFeeDisbursementInterval The new fee disbursement interval in seconds. function setFeeDisbursementInterval(uint128 _newFeeDisbursementInterval) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert FeeSplitter_OnlyProxyAdminOwner(); } if (_newFeeDisbursementInterval == 0) { @@ -205,7 +205,7 @@ contract FeeSplitter is ISemver, Initializable { /// @notice Updates the share calculator contract. Only callable by the ProxyAdmin owner. /// @param _newSharesCalculator The new share calculator contract. function setSharesCalculator(ISharesCalculator _newSharesCalculator) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert FeeSplitter_OnlyProxyAdminOwner(); } if (address(_newSharesCalculator) == address(0)) revert FeeSplitter_SharesCalculatorCannotBeZero(); diff --git a/packages/contracts-bedrock/src/L2/FeeVault.sol b/packages/contracts-bedrock/src/L2/FeeVault.sol index 7443d2b57f9..866390b4118 100644 --- a/packages/contracts-bedrock/src/L2/FeeVault.sol +++ b/packages/contracts-bedrock/src/L2/FeeVault.sol @@ -8,7 +8,7 @@ import { Types } from "src/libraries/Types.sol"; // Interfaces import { IL2ToL1MessagePasser } from "interfaces/L2/IL2ToL1MessagePasser.sol"; -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; // External // import from openzeppelin-contracts-v5 @@ -104,7 +104,7 @@ abstract contract FeeVault is Initializable { /// @dev If integrating the FeeSplitter contract, the minimum withdrawal amount must be set to 0 to /// avoid blocking withdrawals and disbursements for all vaults if one vault doesn't reach the threshold. function setMinWithdrawalAmount(uint256 _newMinWithdrawalAmount) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert FeeVault_OnlyProxyAdminOwner(); } @@ -117,7 +117,7 @@ abstract contract FeeVault is Initializable { /// @notice Updates the recipient of vault fees when they are withdrawn from the vault. /// @param _newRecipient The new recipient address. function setRecipient(address _newRecipient) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert FeeVault_OnlyProxyAdminOwner(); } @@ -132,7 +132,7 @@ abstract contract FeeVault is Initializable { /// withdraw them to an address on the same chain. /// @param _newWithdrawalNetwork The new withdrawal network. function setWithdrawalNetwork(Types.WithdrawalNetwork _newWithdrawalNetwork) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert FeeVault_OnlyProxyAdminOwner(); } diff --git a/packages/contracts-bedrock/src/L2/L1Withdrawer.sol b/packages/contracts-bedrock/src/L2/L1Withdrawer.sol index 8b55fdea67e..fbfc6fac6c0 100644 --- a/packages/contracts-bedrock/src/L2/L1Withdrawer.sol +++ b/packages/contracts-bedrock/src/L2/L1Withdrawer.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.25; import { ISemver } from "interfaces/universal/ISemver.sol"; import { IL2CrossDomainMessenger } from "interfaces/L2/IL2CrossDomainMessenger.sol"; -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; import { Predeploys } from "src/libraries/Predeploys.sol"; /// @title L1Withdrawer @@ -82,7 +82,7 @@ contract L1Withdrawer is ISemver { /// @notice Updates the minimum withdrawal amount. Only callable by the ProxyAdmin owner. /// @param _newMinWithdrawalAmount The new minimum withdrawal amount. function setMinWithdrawalAmount(uint256 _newMinWithdrawalAmount) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert L1Withdrawer_OnlyProxyAdminOwner(); } uint256 oldMinWithdrawalAmount = minWithdrawalAmount; @@ -95,7 +95,7 @@ contract L1Withdrawer is ISemver { /// when the withdrawal is finalized. /// @param _newRecipient The new recipient address. function setRecipient(address _newRecipient) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert L1Withdrawer_OnlyProxyAdminOwner(); } address oldRecipient = recipient; @@ -107,7 +107,7 @@ contract L1Withdrawer is ISemver { /// @param _newWithdrawalGasLimit The new withdrawal gas limit. /// @dev If target on L1 is `FeesDepositor`, the gas limit should be at or above 800k gas. function setWithdrawalGasLimit(uint32 _newWithdrawalGasLimit) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert L1Withdrawer_OnlyProxyAdminOwner(); } uint32 oldWithdrawalGasLimit = withdrawalGasLimit; diff --git a/packages/contracts-bedrock/src/L2/L2ProxyAdmin.sol b/packages/contracts-bedrock/src/L2/L2ProxyAdmin.sol new file mode 100644 index 00000000000..7d7dfe2579a --- /dev/null +++ b/packages/contracts-bedrock/src/L2/L2ProxyAdmin.sol @@ -0,0 +1,52 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.15; + +// Interfaces +import { IL2ContractsManager } from "interfaces/L2/IL2ContractsManager.sol"; +import { ISemver } from "interfaces/universal/ISemver.sol"; + +// Libraries +import { Constants } from "src/libraries/Constants.sol"; + +// Contracts +import { ProxyAdmin } from "src/universal/ProxyAdmin.sol"; + +/// @custom:proxied true +/// @custom:predeploy 0x4200000000000000000000000000000000000018 +/// @title L2ProxyAdmin +/// @notice The L2ProxyAdmin is the administrative contract responsible for managing proxy upgrades +/// for L2 predeploy contracts. +/// @dev It extends the standard ProxyAdmin with an upgradePredeploys() function that orchestates +/// batch upgrades of multiple predeploys by delegating to an L2ContractsManager contract. +contract L2ProxyAdmin is ProxyAdmin, ISemver { + /// @notice Emitted when the predeploys are upgraded. + /// @param l2ContractsManager Address of the L2ContractsManager contract. + event PredeploysUpgraded(address indexed l2ContractsManager); + + /// @notice Thrown when the caller is not the depositor account. + error L2ProxyAdmin__Unauthorized(); + + /// @notice Thrown when the upgrade fails. + error L2ProxyAdmin__UpgradeFailed(bytes data); + + /// @notice The semantic version of the contract. + /// @custom:semver 1.0.0 + string public constant version = "1.0.0"; + + /// @notice The constructor for the L2ProxyAdmin contract. + /// @param _owner Address of the initial owner of this contract. + constructor(address _owner) ProxyAdmin(_owner) { } + + /// @notice Upgrades the predeploys via delegatecall to the l2ContractsManager contract. + /// @param _l2ContractsManager Address of the l2ContractsManager contract. + function upgradePredeploys(address _l2ContractsManager) external { + if (msg.sender != Constants.DEPOSITOR_ACCOUNT) revert L2ProxyAdmin__Unauthorized(); + + (bool success, bytes memory data) = + _l2ContractsManager.delegatecall(abi.encodeCall(IL2ContractsManager.upgrade, ())); + + if (!success) revert L2ProxyAdmin__UpgradeFailed(data); + + emit PredeploysUpgraded(_l2ContractsManager); + } +} diff --git a/packages/contracts-bedrock/src/L2/SuperchainRevSharesCalculator.sol b/packages/contracts-bedrock/src/L2/SuperchainRevSharesCalculator.sol index 7270140a34f..a9ba651d4dd 100644 --- a/packages/contracts-bedrock/src/L2/SuperchainRevSharesCalculator.sol +++ b/packages/contracts-bedrock/src/L2/SuperchainRevSharesCalculator.sol @@ -5,7 +5,7 @@ pragma solidity 0.8.25; import { Predeploys } from "src/libraries/Predeploys.sol"; // Interfaces -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; import { ISemver } from "interfaces/universal/ISemver.sol"; import { ISharesCalculator } from "interfaces/L2/ISharesCalculator.sol"; @@ -103,7 +103,7 @@ contract SuperchainRevSharesCalculator is ISemver, ISharesCalculator { /// @notice Sets the share recipient. Only callable by the ProxyAdmin owner. /// @param _newShareRecipient The new share recipient address. function setShareRecipient(address payable _newShareRecipient) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert SharesCalculator_OnlyProxyAdminOwner(); } address oldShareRecipient = shareRecipient; @@ -114,7 +114,7 @@ contract SuperchainRevSharesCalculator is ISemver, ISharesCalculator { /// @notice Sets the remainder recipient. Only callable by the ProxyAdmin owner. /// @param _newRemainderRecipient The new remainder recipient address. function setRemainderRecipient(address payable _newRemainderRecipient) external { - if (msg.sender != IProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { + if (msg.sender != IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()) { revert SharesCalculator_OnlyProxyAdminOwner(); } address oldRemainderRecipient = remainderRecipient; diff --git a/packages/contracts-bedrock/src/libraries/DevFeatures.sol b/packages/contracts-bedrock/src/libraries/DevFeatures.sol index 2a1dc1854c1..4fa5287c1b5 100644 --- a/packages/contracts-bedrock/src/libraries/DevFeatures.sol +++ b/packages/contracts-bedrock/src/libraries/DevFeatures.sol @@ -28,6 +28,9 @@ library DevFeatures { /// @notice The feature that enables the OPContractsManagerV2 contract. bytes32 public constant OPCM_V2 = bytes32(0x0000000000000000000000000000000000000000000000000000000000010000); + /// @notice The feature that enables L2CM. + bytes32 public constant L2CM = bytes32(0x0000000000000000000000000000000000000000000000000000000000100000); + /// @notice Checks if a feature is enabled in a bitmap. Note that this function does not check /// that the input feature represents a single feature and the bitwise AND operation /// allows for multiple features to be enabled at once. Users should generally check diff --git a/packages/contracts-bedrock/src/libraries/Predeploys.sol b/packages/contracts-bedrock/src/libraries/Predeploys.sol index 698126c4b35..6bb8245d76b 100644 --- a/packages/contracts-bedrock/src/libraries/Predeploys.sol +++ b/packages/contracts-bedrock/src/libraries/Predeploys.sol @@ -65,7 +65,7 @@ library Predeploys { /// @notice Address of the OptimismMintableERC721Factory predeploy. address internal constant OPTIMISM_MINTABLE_ERC721_FACTORY = 0x4200000000000000000000000000000000000017; - /// @notice Address of the ProxyAdmin predeploy. + /// @notice Address of the L2ProxyAdmin predeploy. address internal constant PROXY_ADMIN = 0x4200000000000000000000000000000000000018; /// @notice Address of the BaseFeeVault predeploy. @@ -126,6 +126,9 @@ library Predeploys { /// @notice Address of the FeeSplitter predeploy. address internal constant FEE_SPLITTER = 0x420000000000000000000000000000000000002B; + /// @notice Address of the ConditionalDeployer predeploy. + address internal constant CONDITIONAL_DEPLOYER = 0x420000000000000000000000000000000000002C; + /// @notice Returns the name of the predeploy at the given address. function getName(address _addr) internal pure returns (string memory out_) { require(isPredeployNamespace(_addr), "Predeploys: address must be a predeploy"); @@ -143,7 +146,7 @@ library Predeploys { if (_addr == L1_BLOCK_ATTRIBUTES) return "L1Block"; if (_addr == L2_TO_L1_MESSAGE_PASSER) return "L2ToL1MessagePasser"; if (_addr == OPTIMISM_MINTABLE_ERC721_FACTORY) return "OptimismMintableERC721Factory"; - if (_addr == PROXY_ADMIN) return "ProxyAdmin"; + if (_addr == PROXY_ADMIN) return "L2ProxyAdmin"; if (_addr == BASE_FEE_VAULT) return "BaseFeeVault"; if (_addr == L1_FEE_VAULT) return "L1FeeVault"; if (_addr == OPERATOR_FEE_VAULT) return "OperatorFeeVault"; @@ -161,6 +164,7 @@ library Predeploys { if (_addr == LIQUIDITY_CONTROLLER) return "LiquidityController"; if (_addr == NATIVE_ASSET_LIQUIDITY) return "NativeAssetLiquidity"; if (_addr == FEE_SPLITTER) return "FeeSplitter"; + if (_addr == CONDITIONAL_DEPLOYER) return "ConditionalDeployer"; revert("Predeploys: unnamed predeploy"); } @@ -174,7 +178,8 @@ library Predeploys { address _addr, uint256 _fork, bool _enableCrossL2Inbox, - bool _isCustomGasToken + bool _isCustomGasToken, + bool _useL2CM ) internal pure @@ -190,7 +195,7 @@ library Predeploys { || (_fork >= uint256(Fork.INTEROP) && _enableCrossL2Inbox && _addr == CROSS_L2_INBOX) || (_fork >= uint256(Fork.INTEROP) && _addr == L2_TO_L2_CROSS_DOMAIN_MESSENGER) || (_isCustomGasToken && _addr == LIQUIDITY_CONTROLLER) - || (_isCustomGasToken && _addr == NATIVE_ASSET_LIQUIDITY); + || (_isCustomGasToken && _addr == NATIVE_ASSET_LIQUIDITY) || (_useL2CM && _addr == CONDITIONAL_DEPLOYER); } /// @notice Returns true if the address is in the predeploy namespace. diff --git a/packages/contracts-bedrock/test/L1/OPContractsManager.t.sol b/packages/contracts-bedrock/test/L1/OPContractsManager.t.sol index d74c85b2a9a..e175104ef75 100644 --- a/packages/contracts-bedrock/test/L1/OPContractsManager.t.sol +++ b/packages/contracts-bedrock/test/L1/OPContractsManager.t.sol @@ -163,23 +163,16 @@ contract OPContractsManager_Upgrade_Harness is CommonTest { // Artifacts encoded as an address. l2ChainId = uint256(uint160(address(artifacts.mustGetAddress("L2ChainId")))); - delayedWETHPermissionedGameProxy = - IDelayedWETH(payable(artifacts.mustGetAddress("PermissionedDelayedWETHProxy"))); - permissionedDisputeGame = IPermissionedDisputeGame(address(artifacts.mustGetAddress("PermissionedDisputeGame"))); IDisputeGameFactory dgf = IDisputeGameFactory(address(artifacts.mustGetAddress("DisputeGameFactoryProxy"))); - faultDisputeGame = IFaultDisputeGame(address(dgf.gameImpls(GameTypes.CANNON))); - delayedWeth = faultDisputeGame.weth(); // Grab the pre-upgrade state. Use getGameImplPrestate to handle both v1 and v2 // dispute games (v1 stores prestate on game impl, v2 stores it in gameArgs). preUpgradeState = PreUpgradeState({ - cannonAbsolutePrestate: DisputeGames.getGameImplPrestate(disputeGameFactory, GameTypes.CANNON), - permissionedAbsolutePrestate: DisputeGames.getGameImplPrestate( - disputeGameFactory, GameTypes.PERMISSIONED_CANNON - ), - cannonKonaAbsolutePrestate: DisputeGames.getGameImplPrestate(disputeGameFactory, GameTypes.CANNON_KONA), - permissionlessWethProxy: delayedWeth, - permissionedCannonWethProxy: delayedWETHPermissionedGameProxy + cannonAbsolutePrestate: DisputeGames.getGameImplPrestate(dgf, GameTypes.CANNON), + permissionedAbsolutePrestate: DisputeGames.getGameImplPrestate(dgf, GameTypes.PERMISSIONED_CANNON), + cannonKonaAbsolutePrestate: DisputeGames.getGameImplPrestate(dgf, GameTypes.CANNON_KONA), + permissionlessWethProxy: DisputeGames.getGameImplDelayedWeth(dgf, GameTypes.CANNON), + permissionedCannonWethProxy: DisputeGames.getGameImplDelayedWeth(dgf, GameTypes.PERMISSIONED_CANNON) }); // Since this superchainConfig is already at the expected reinitializer version... @@ -397,11 +390,19 @@ contract OPContractsManager_Upgrade_Harness is CommonTest { vm.assertEq(blockhash(block.number - 1), game.l1Head().raw()); if (gt.raw() == GameTypes.PERMISSIONED_CANNON.raw()) { - vm.assertEq(address(preUpgradeState.permissionedCannonWethProxy), address(game.weth())); + vm.assertEq( + address(preUpgradeState.permissionedCannonWethProxy), + address(game.weth()), + "Incorrect permissioned WETH" + ); vm.assertEq(_challenger, game.challenger()); vm.assertEq(_proposer, game.proposer()); } else { - vm.assertEq(address(preUpgradeState.permissionlessWethProxy), address(game.weth())); + vm.assertEq( + address(preUpgradeState.permissionlessWethProxy), + address(game.weth()), + "Incorrect permissionless WETH" + ); } } @@ -1428,8 +1429,10 @@ contract OPContractsManager_Upgrade_Test is OPContractsManager_Upgrade_Harness { disputeGameFactory, GameTypes.PERMISSIONED_CANNON ), cannonKonaAbsolutePrestate: DisputeGames.getGameImplPrestate(disputeGameFactory, GameTypes.CANNON_KONA), - permissionlessWethProxy: delayedWeth, - permissionedCannonWethProxy: delayedWETHPermissionedGameProxy + permissionlessWethProxy: DisputeGames.getGameImplDelayedWeth(disputeGameFactory, GameTypes.CANNON), + permissionedCannonWethProxy: DisputeGames.getGameImplDelayedWeth( + disputeGameFactory, GameTypes.PERMISSIONED_CANNON + ) }); } @@ -1447,9 +1450,14 @@ contract OPContractsManager_Upgrade_Test is OPContractsManager_Upgrade_Harness { function test_verifyOpcmCorrectness_succeeds() public { skipIfCoverage(); // Coverage changes bytecode and breaks the verification script. - // Set up environment variables with the actual OPCM addresses for tests that need themqq + // Set up environment variables with the actual OPCM addresses for tests that need them. + // These values come from the StandardValidator that was deployed with the OPCM. vm.setEnv("EXPECTED_SUPERCHAIN_CONFIG", vm.toString(address(opcm.superchainConfig()))); vm.setEnv("EXPECTED_PROTOCOL_VERSIONS", vm.toString(address(opcm.protocolVersions()))); + IOPContractsManagerStandardValidator validator = opcm.opcmStandardValidator(); + vm.setEnv("EXPECTED_L1_PAO_MULTISIG", vm.toString(validator.l1PAOMultisig())); + vm.setEnv("EXPECTED_CHALLENGER", vm.toString(validator.challenger())); + vm.setEnv("EXPECTED_WITHDRAWAL_DELAY_SECONDS", vm.toString(validator.withdrawalDelaySeconds())); // Run the upgrade test and checks runCurrentUpgrade(upgrader); diff --git a/packages/contracts-bedrock/test/L1/opcm/OPContractsManagerV2.t.sol b/packages/contracts-bedrock/test/L1/opcm/OPContractsManagerV2.t.sol index 9780b948066..e8c184634df 100644 --- a/packages/contracts-bedrock/test/L1/opcm/OPContractsManagerV2.t.sol +++ b/packages/contracts-bedrock/test/L1/opcm/OPContractsManagerV2.t.sol @@ -1030,10 +1030,10 @@ contract OPContractsManagerV2_Deploy_Test is OPContractsManagerV2_TestInit { address initialProposer = DisputeGames.permissionedGameProposer(disputeGameFactory); deployConfig.disputeGameConfigs.push( IOPContractsManagerUtils.DisputeGameConfig({ - enabled: true, - initBond: DEFAULT_DISPUTE_GAME_INIT_BOND, // Standard init bond + enabled: false, + initBond: 0, gameType: GameTypes.CANNON, - gameArgs: abi.encode(IOPContractsManagerUtils.FaultDisputeGameConfig({ absolutePrestate: cannonPrestate })) + gameArgs: bytes("") }) ); deployConfig.disputeGameConfigs.push( @@ -1052,12 +1052,10 @@ contract OPContractsManagerV2_Deploy_Test is OPContractsManagerV2_TestInit { ); deployConfig.disputeGameConfigs.push( IOPContractsManagerUtils.DisputeGameConfig({ - enabled: true, - initBond: DEFAULT_DISPUTE_GAME_INIT_BOND, // Standard init bond + enabled: false, + initBond: 0, gameType: GameTypes.CANNON_KONA, - gameArgs: abi.encode( - IOPContractsManagerUtils.FaultDisputeGameConfig({ absolutePrestate: cannonKonaPrestate }) - ) + gameArgs: bytes("") }) ); } @@ -1065,7 +1063,9 @@ contract OPContractsManagerV2_Deploy_Test is OPContractsManagerV2_TestInit { /// @notice Tests that the deploy function succeeds and passes standard validation. function test_deploy_succeeds() public { // Run the deploy and standard validator checks. - IOPContractsManagerV2.ChainContracts memory cts = runDeployV2(deployConfig); + // We expect PLDG-10 and CKDG-10 validator errors because CANNON and CANNON_KONA are + // disabled during initial deployment (no implementations registered). + IOPContractsManagerV2.ChainContracts memory cts = runDeployV2(deployConfig, bytes(""), "PLDG-10,CKDG-10"); // Verify key contracts are deployed. assertTrue(address(cts.systemConfig) != address(0), "systemConfig not deployed"); @@ -1138,6 +1138,26 @@ contract OPContractsManagerV2_Deploy_Test is OPContractsManagerV2_TestInit { deployConfig, abi.encodeWithSelector(IOPContractsManagerV2.OPContractsManagerV2_InvalidGameConfigs.selector) ); } + + function test_deploy_cannonGameEnabled_reverts() public { + deployConfig.disputeGameConfigs[0].enabled = true; + deployConfig.disputeGameConfigs[0].initBond = 1 ether; + + // nosemgrep: sol-style-use-abi-encodecall + runDeployV2( + deployConfig, abi.encodeWithSelector(IOPContractsManagerV2.OPContractsManagerV2_InvalidGameConfigs.selector) + ); + } + + function test_deploy_cannonKonaGameEnabled_reverts() public { + deployConfig.disputeGameConfigs[2].enabled = true; + deployConfig.disputeGameConfigs[2].initBond = 1 ether; + + // nosemgrep: sol-style-use-abi-encodecall + runDeployV2( + deployConfig, abi.encodeWithSelector(IOPContractsManagerV2.OPContractsManagerV2_InvalidGameConfigs.selector) + ); + } } /// @title OPContractsManagerV2_DevFeatureBitmap_Test @@ -1188,10 +1208,10 @@ contract OPContractsManagerV2_Migrate_Test is OPContractsManagerV2_TestInit { IOPContractsManagerUtils.DisputeGameConfig[] memory dgConfigs = new IOPContractsManagerUtils.DisputeGameConfig[](3); dgConfigs[0] = IOPContractsManagerUtils.DisputeGameConfig({ - enabled: true, - initBond: 0.08 ether, + enabled: false, + initBond: 0, gameType: GameTypes.CANNON, - gameArgs: abi.encode(IOPContractsManagerUtils.FaultDisputeGameConfig({ absolutePrestate: cannonPrestate })) + gameArgs: bytes("") }); dgConfigs[1] = IOPContractsManagerUtils.DisputeGameConfig({ enabled: true, @@ -1206,10 +1226,10 @@ contract OPContractsManagerV2_Migrate_Test is OPContractsManagerV2_TestInit { ) }); dgConfigs[2] = IOPContractsManagerUtils.DisputeGameConfig({ - enabled: true, - initBond: 0.08 ether, + enabled: false, + initBond: 0, gameType: GameTypes.CANNON_KONA, - gameArgs: abi.encode(IOPContractsManagerUtils.FaultDisputeGameConfig({ absolutePrestate: cannonKonaPrestate })) + gameArgs: bytes("") }); // Set up the deploy config using struct literal for compile-time field checking. @@ -1544,10 +1564,10 @@ contract OPContractsManagerV2_FeatBatchUpgrade_Test is OPContractsManagerV2_Test address initialProposer = makeAddr("proposer"); baseConfig.disputeGameConfigs = new IOPContractsManagerUtils.DisputeGameConfig[](3); baseConfig.disputeGameConfigs[0] = IOPContractsManagerUtils.DisputeGameConfig({ - enabled: true, - initBond: DEFAULT_DISPUTE_GAME_INIT_BOND, + enabled: false, + initBond: 0, gameType: GameTypes.CANNON, - gameArgs: abi.encode(IOPContractsManagerUtils.FaultDisputeGameConfig({ absolutePrestate: cannonPrestate })) + gameArgs: bytes("") }); baseConfig.disputeGameConfigs[1] = IOPContractsManagerUtils.DisputeGameConfig({ enabled: true, @@ -1562,10 +1582,10 @@ contract OPContractsManagerV2_FeatBatchUpgrade_Test is OPContractsManagerV2_Test ) }); baseConfig.disputeGameConfigs[2] = IOPContractsManagerUtils.DisputeGameConfig({ - enabled: true, - initBond: DEFAULT_DISPUTE_GAME_INIT_BOND, + enabled: false, + initBond: 0, gameType: GameTypes.CANNON_KONA, - gameArgs: abi.encode(IOPContractsManagerUtils.FaultDisputeGameConfig({ absolutePrestate: cannonKonaPrestate })) + gameArgs: bytes("") }); // 3. Deploy 15 separate chains using opcmV2.deploy(). diff --git a/packages/contracts-bedrock/test/L2/ConditionalDeployer.t.sol b/packages/contracts-bedrock/test/L2/ConditionalDeployer.t.sol new file mode 100644 index 00000000000..af24eee2c96 --- /dev/null +++ b/packages/contracts-bedrock/test/L2/ConditionalDeployer.t.sol @@ -0,0 +1,182 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.15; + +// Testing +import { CommonTest } from "test/setup/CommonTest.sol"; + +// Libraries +import { DevFeatures } from "src/libraries/DevFeatures.sol"; +import { Preinstalls } from "src/libraries/Preinstalls.sol"; + +// Contracts +import { ConditionalDeployer } from "src/L2/ConditionalDeployer.sol"; + +/// @title ConditionalDeployer_Harness +/// @notice This contract is deployed by the ConditionalDeployer to test the deployment of an +/// implementation. +contract ConditionalDeployer_Harness { + uint256 public immutable number; + + constructor(uint256 _number) { + number = _number; + } +} + +/// @title ConditionalDeployer_TestInit +/// @notice Reusable test initialization for `ConditionalDeployer` tests. +contract ConditionalDeployer_TestInit is CommonTest { + // Test contracts + bytes public simpleContractCreationCode; + + function setUp() public override { + super.setUp(); + skipIfDevFeatureDisabled(DevFeatures.L2CM); + // Deploy contracts + simpleContractCreationCode = type(ConditionalDeployer_Harness).creationCode; + } +} + +/// @title ConditionalDeployer_Deploy_Test +/// @notice Tests the `deploy` function of the `ConditionalDeployer` contract. +contract ConditionalDeployer_Deploy_Test is ConditionalDeployer_TestInit { + /// @notice Event emitted when an implementation is deployed. + event ImplementationDeployed(address indexed implementation, bytes32 salt); + + /// @notice Event emitted when deployment is skipped because implementation already exists. + event ImplementationExists(address indexed implementation); + + /// @notice Tests that `deploy` succeeds and emits the correct event. + function testFuzz_deploy_succeeds(address _caller, bytes32 _salt, uint256 _number) public { + bytes memory _initCode = abi.encodePacked(simpleContractCreationCode, abi.encode(_number)); + address expectedImplementation = getExpectedImplementation(_initCode, _salt); + + vm.expectEmit(address(conditionalDeployer)); + emit ImplementationDeployed(expectedImplementation, _salt); + + vm.prank(_caller); + address implementation = conditionalDeployer.deploy(_salt, _initCode); + + assertEq(implementation, expectedImplementation); + assertEq(ConditionalDeployer_Harness(implementation).number(), _number); + assert(implementation.code.length != 0); + } + + /// @notice Tests that `deploy` is idempotent and produces the same address when called multiple times. + function testFuzz_deploy_idempotent_succeeds(address _caller, bytes32 _salt, uint256 _number) public { + bytes memory _initCode = abi.encodePacked(simpleContractCreationCode, abi.encode(_number)); + address expectedImplementation = getExpectedImplementation(_initCode, _salt); + + // First Deployment + vm.expectEmit(address(conditionalDeployer)); + emit ImplementationDeployed(expectedImplementation, _salt); + + assertEq(expectedImplementation.code.length, 0); + + vm.prank(_caller); + address implementation1 = conditionalDeployer.deploy(_salt, _initCode); + + // Assert that the implementation was deployed + assertEq(implementation1, expectedImplementation); + assert(implementation1.code.length != 0); + assertEq(ConditionalDeployer_Harness(implementation1).number(), _number); + + // Second Deployment + vm.expectEmit(address(conditionalDeployer)); + emit ImplementationExists(implementation1); + + vm.prank(_caller); + address implementation2 = conditionalDeployer.deploy(_salt, _initCode); + + assertEq(implementation1, implementation2); + } + + /// @notice Tests that `deploy` reverts when the deployment call to the DeterministicDeploymentProxy fails. + /// @dev The deployment call to the DeterministicDeploymentProxy is mocked to revert. + function testFuzz_deploy_deploymentFailed_reverts(address _caller, bytes32 _salt, uint256 _number) public { + bytes memory _initCode = abi.encodePacked(simpleContractCreationCode, abi.encode(_number)); + + // Mock the deployment call to the DeterministicDeploymentProxy to revert + vm.mockCallRevert( + conditionalDeployer.deterministicDeploymentProxy(), + 0, + abi.encodePacked(_salt, _initCode), + bytes("deployment failed") + ); + + vm.prank(_caller); + vm.expectRevert( + abi.encodeWithSelector( + ConditionalDeployer.ConditionalDeployer_DeploymentFailed.selector, bytes("deployment failed") + ) + ); + conditionalDeployer.deploy(_salt, _initCode); + } + + /// @notice Tests that `deploy` reverts when the deployment call to the DeterministicDeploymentProxy returns the + /// wrong address. + /// @dev The deployment call to the DeterministicDeploymentProxy is mocked to return the wrong address. + function testFuzz_deploy_notExpectedAddress_reverts( + address _caller, + bytes32 _salt, + address _notExpectedAddress, + uint256 _number + ) + public + { + bytes memory _initCode = abi.encodePacked(simpleContractCreationCode, abi.encode(_number)); + address expectedImplementation = getExpectedImplementation(_initCode, _salt); + vm.assume(_notExpectedAddress != expectedImplementation); + + vm.mockCall( + conditionalDeployer.deterministicDeploymentProxy(), + 0, + abi.encodePacked(_salt, _initCode), + abi.encodePacked(_notExpectedAddress) + ); + vm.prank(_caller); + vm.expectRevert( + abi.encodeWithSelector( + ConditionalDeployer.ConditionalDeployer_DeploymentFailed.selector, abi.encodePacked(_notExpectedAddress) + ) + ); + conditionalDeployer.deploy(_salt, _initCode); + } + + /// @notice Returns the expected implementation address for the given initialization code and salt. + /// @dev Uses the CREATE2 formula to compute the expected implementation address. + /// @param _initCode The initialization code for the contract. + /// @param _salt The salt to use for deployment. + /// @return expectedImplementation_ The expected implementation address. + function getExpectedImplementation( + bytes memory _initCode, + bytes32 _salt + ) + internal + view + returns (address expectedImplementation_) + { + bytes32 codeHash = keccak256(_initCode); + expectedImplementation_ = address( + uint160( + uint256( + keccak256( + abi.encodePacked( + bytes1(0xff), conditionalDeployer.deterministicDeploymentProxy(), _salt, codeHash + ) + ) + ) + ) + ); + } +} + +/// @title ConditionalDeployer_Uncategorized_Test +/// @notice General tests that are not testing any function directly of the `ConditionalDeployer` +/// contract or are testing multiple functions. +contract ConditionalDeployer_Uncategorized_Test is ConditionalDeployer_TestInit { + /// @notice Tests that the getters return valid values. + function test_getters_succeeds() external view { + assert(bytes(conditionalDeployer.version()).length > 0); + assertEq(conditionalDeployer.deterministicDeploymentProxy(), payable(Preinstalls.DeterministicDeploymentProxy)); + } +} diff --git a/packages/contracts-bedrock/test/L2/FeeSplitter.t.sol b/packages/contracts-bedrock/test/L2/FeeSplitter.t.sol index 35edca9a278..63c51f0c11a 100644 --- a/packages/contracts-bedrock/test/L2/FeeSplitter.t.sol +++ b/packages/contracts-bedrock/test/L2/FeeSplitter.t.sol @@ -17,7 +17,7 @@ import { Types } from "src/libraries/Types.sol"; // Interfaces import { IFeeSplitter } from "interfaces/L2/IFeeSplitter.sol"; import { ISharesCalculator } from "interfaces/L2/ISharesCalculator.sol"; -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; import { IFeeVault } from "interfaces/L2/IFeeVault.sol"; /// @title FeeSplitter_TestInit @@ -45,8 +45,8 @@ contract FeeSplitter_TestInit is CommonTest { super.enableRevenueShare(); super.setUp(); - // Get the owner from ProxyAdmin - _owner = IProxyAdmin(Predeploys.PROXY_ADMIN).owner(); + // Get the owner from L2ProxyAdmin + _owner = IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner(); // Initialize fee vaults array _feeVaults[0] = Predeploys.SEQUENCER_FEE_WALLET; diff --git a/packages/contracts-bedrock/test/L2/FeeVault.t.sol b/packages/contracts-bedrock/test/L2/FeeVault.t.sol index aa8a572335b..df774026642 100644 --- a/packages/contracts-bedrock/test/L2/FeeVault.t.sol +++ b/packages/contracts-bedrock/test/L2/FeeVault.t.sol @@ -5,7 +5,7 @@ pragma solidity 0.8.15; import { CommonTest } from "test/setup/CommonTest.sol"; // Interfaces -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; import { IFeeVault } from "interfaces/L2/IFeeVault.sol"; import { IL2ToL1MessagePasser } from "interfaces/L2/IL2ToL1MessagePasser.sol"; import { IL2ToL1MessagePasserCGT } from "interfaces/L2/IL2ToL1MessagePasserCGT.sol"; @@ -30,7 +30,7 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { /// @notice Helper function to set up L2 withdrawal configuration. function _setupL2Withdrawal() internal { // Set the withdrawal network to L2 - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); feeVault.setWithdrawalNetwork(Types.WithdrawalNetwork.L2); } @@ -72,7 +72,7 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { function testFuzz_withdraw_notEnough_reverts(uint256 _minWithdrawalAmount) external { // Set the minimum withdrawal amount _minWithdrawalAmount = bound(_minWithdrawalAmount, 1, type(uint256).max); - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); feeVault.setMinWithdrawalAmount(_minWithdrawalAmount); // Set the balance to be less than the minimum withdrawal amount @@ -87,15 +87,15 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { skipIfSysFeatureEnabled(Features.CUSTOM_GAS_TOKEN); // Setup L1 withdrawal - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); feeVault.setWithdrawalNetwork(Types.WithdrawalNetwork.L1); // Set recipient - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); feeVault.setRecipient(recipient); // Set minimum withdrawal amount - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); feeVault.setMinWithdrawalAmount(minWithdrawalAmount); // Set the balance to be greater than the minimum withdrawal amount @@ -151,15 +151,15 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { skipIfSysFeatureDisabled(Features.CUSTOM_GAS_TOKEN); // Setup L1 withdrawal - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); feeVault.setWithdrawalNetwork(Types.WithdrawalNetwork.L1); // Set recipient - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); feeVault.setRecipient(recipient); // Set minimum withdrawal amount - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); feeVault.setMinWithdrawalAmount(minWithdrawalAmount); // Set the balance to be greater than the minimum withdrawal amount @@ -221,7 +221,7 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { /// @notice Tests that the owner can successfully set minimum withdrawal amount with fuzz testing. function testFuzz_setMinWithdrawalAmount_succeeds(uint256 _newMinWithdrawalAmount) external { - address owner = IProxyAdmin(Predeploys.PROXY_ADMIN).owner(); + address owner = IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner(); vm.prank(owner); IFeeVault(payable(address(feeVault))).setMinWithdrawalAmount(_newMinWithdrawalAmount); @@ -232,7 +232,7 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { /// @notice Tests that non-owner cannot set minimum withdrawal amount with fuzz testing. function testFuzz_setMinWithdrawalAmount_onlyOwner_reverts(address _caller, uint256 _newAmount) external { - address owner = IProxyAdmin(Predeploys.PROXY_ADMIN).owner(); + address owner = IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner(); vm.assume(_caller != owner); uint256 initialAmount = feeVault.minWithdrawalAmount(); @@ -247,7 +247,7 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { /// @notice Tests that the owner can successfully set recipient with fuzz testing. function testFuzz_setRecipient_succeeds(address _newRecipient) external { - address owner = IProxyAdmin(Predeploys.PROXY_ADMIN).owner(); + address owner = IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner(); vm.prank(owner); IFeeVault(payable(address(feeVault))).setRecipient(_newRecipient); @@ -258,7 +258,7 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { /// @notice Tests that non-owner cannot set recipient with fuzz testing. function testFuzz_setRecipient_onlyOwner_reverts(address _caller, address _newRecipient) external { - address owner = IProxyAdmin(Predeploys.PROXY_ADMIN).owner(); + address owner = IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner(); vm.assume(_caller != owner); address initialRecipient = feeVault.recipient(); @@ -277,7 +277,7 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { _networkValue = uint8(bound(_networkValue, 0, 1)); Types.WithdrawalNetwork newNetwork = Types.WithdrawalNetwork(_networkValue); - address owner = IProxyAdmin(Predeploys.PROXY_ADMIN).owner(); + address owner = IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner(); vm.prank(owner); IFeeVault(payable(address(feeVault))).setWithdrawalNetwork(newNetwork); @@ -288,7 +288,7 @@ abstract contract FeeVault_Uncategorized_Test is CommonTest { /// @notice Tests that non-owner cannot set withdrawal network with fuzz testing. function testFuzz_setWithdrawalNetwork_onlyOwner_reverts(address _caller, uint8 _networkValue) external { - address owner = IProxyAdmin(Predeploys.PROXY_ADMIN).owner(); + address owner = IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner(); vm.assume(_caller != owner); // Bound to valid enum values diff --git a/packages/contracts-bedrock/test/L2/L2ProxyAdmin.t.sol b/packages/contracts-bedrock/test/L2/L2ProxyAdmin.t.sol new file mode 100644 index 00000000000..8a792387725 --- /dev/null +++ b/packages/contracts-bedrock/test/L2/L2ProxyAdmin.t.sol @@ -0,0 +1,112 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.15; + +// Testing +import { CommonTest } from "test/setup/CommonTest.sol"; + +// Libraries +import { Constants } from "src/libraries/Constants.sol"; +import { Predeploys } from "src/libraries/Predeploys.sol"; + +// Interfaces +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; + +// Contracts +import { L2ProxyAdmin } from "src/L2/L2ProxyAdmin.sol"; +import { IL2ContractsManager } from "interfaces/L2/IL2ContractsManager.sol"; + +/// @title L2ProxyAdmin_TestInit +/// @notice Reusable test initialization for `L2ProxyAdmin` tests. +abstract contract L2ProxyAdmin_TestInit is CommonTest { + IL2ProxyAdmin public l2ProxyAdmin; + address public owner; + + // Events + event PredeploysUpgraded(address indexed l2ContractsManager); + + /// @notice Test setup. + function setUp() public virtual override { + super.setUp(); + l2ProxyAdmin = IL2ProxyAdmin(Predeploys.PROXY_ADMIN); + owner = l2ProxyAdmin.owner(); + } + + /// @notice Helper function to setup a mock and expect a call to it. + function _mockAndExpect(address _receiver, bytes memory _calldata, bytes memory _returned) internal { + vm.mockCall(_receiver, _calldata, _returned); + vm.expectCall(_receiver, _calldata); + } +} + +/// @title L2ProxyAdmin_Constructor_Test +/// @notice Tests the `constructor` function of the `L2ProxyAdmin` contract. +contract L2ProxyAdmin_Constructor_Test is L2ProxyAdmin_TestInit { + /// @notice Tests that the `constructor` function succeeds. + function test_constructor_succeeds(address _owner) public { + // Deploy the L2ProxyAdmin contract + l2ProxyAdmin = IL2ProxyAdmin(address(new L2ProxyAdmin(_owner))); + // It sets the owner to the correct address + assertEq(l2ProxyAdmin.owner(), _owner); + } +} + +/// @title L2ProxyAdmin_Version_Test +/// @notice Tests the `version` function of the `L2ProxyAdmin` contract. +contract L2ProxyAdmin_Version_Test is L2ProxyAdmin_TestInit { + /// @notice Tests that the `version` function returns a non-empty string. + function test_version_succeeds() public view { + assertGt(bytes(l2ProxyAdmin.version()).length, 0, "Version should be non-empty"); + } +} + +/// @title L2ProxyAdmin_UpgradePredeploys_Test +/// @notice Tests the `upgradePredeploys` function of the `L2ProxyAdmin` contract. +contract L2ProxyAdmin_UpgradePredeploys_Test is L2ProxyAdmin_TestInit { + /// @notice Tests that upgradePredeploys reverts when called by unauthorized caller. + function testFuzz_upgradePredeploys_unauthorizedCaller_reverts( + address _caller, + address _l2ContractsManager + ) + public + { + vm.assume(_caller != Constants.DEPOSITOR_ACCOUNT); + + // Expect the revert with L2ProxyAdmin__Unauthorized + vm.expectRevert(L2ProxyAdmin.L2ProxyAdmin__Unauthorized.selector); + + // Call upgradePredeploys with unauthorized caller + vm.prank(_caller); + l2ProxyAdmin.upgradePredeploys(_l2ContractsManager); + } + + /// @notice Tests that upgradePredeploys succeeds when called by DEPOSITOR_ACCOUNT. + function testFuzz_upgradePredeploys_succeeds(address _l2ContractsManager) public { + assumeAddressIsNot(_l2ContractsManager, AddressType.Precompile, AddressType.ForgeAddress); + + // Mock the delegatecall to return success + _mockAndExpect(_l2ContractsManager, abi.encodeCall(IL2ContractsManager.upgrade, ()), abi.encode()); + + // Expect the PredeploysUpgraded event + vm.expectEmit(address(l2ProxyAdmin)); + emit PredeploysUpgraded(_l2ContractsManager); + + // Call upgradePredeploys with authorized caller + vm.prank(Constants.DEPOSITOR_ACCOUNT); + l2ProxyAdmin.upgradePredeploys(_l2ContractsManager); + } + + /// @notice Tests that upgradePredeploys reverts when delegatecall fails. + function testFuzz_upgradePredeploys_delegatecallFails_reverts(address _l2ContractsManager) public { + assumeAddressIsNot(_l2ContractsManager, AddressType.Precompile, AddressType.ForgeAddress); + + // Mock the delegatecall to return failure + vm.mockCallRevert(_l2ContractsManager, abi.encodeCall(IL2ContractsManager.upgrade, ()), bytes("error")); + + // Expect the revert with L2ProxyAdmin__UpgradeFailed + vm.expectRevert(abi.encodeWithSelector(L2ProxyAdmin.L2ProxyAdmin__UpgradeFailed.selector, bytes("error"))); + + // Call upgradePredeploys with authorized caller + vm.prank(Constants.DEPOSITOR_ACCOUNT); + l2ProxyAdmin.upgradePredeploys(_l2ContractsManager); + } +} diff --git a/packages/contracts-bedrock/test/L2/LegacyFeeSplitter.t.sol b/packages/contracts-bedrock/test/L2/LegacyFeeSplitter.t.sol index a876aa4c2d7..e2cc3fb9144 100644 --- a/packages/contracts-bedrock/test/L2/LegacyFeeSplitter.t.sol +++ b/packages/contracts-bedrock/test/L2/LegacyFeeSplitter.t.sol @@ -3,7 +3,7 @@ pragma solidity 0.8.15; import { FeeSplitter_TestInit } from "test/L2/FeeSplitter.t.sol"; import { LegacyFeeSplitter } from "test/mocks/LegacyFeeSplitter.sol"; -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; import { IFeeVault } from "interfaces/L2/IFeeVault.sol"; import { Predeploys } from "src/libraries/Predeploys.sol"; @@ -18,7 +18,7 @@ contract LegacyFeeSplitter_DisburseFees_Test is FeeSplitter_TestInit { legacyFeeSplitter = new LegacyFeeSplitter(); // Setup the legacy splitter as the recipient in the vaults - address owner = IProxyAdmin(Predeploys.PROXY_ADMIN).owner(); + address owner = IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner(); vm.startPrank(owner); IFeeVault(payable(Predeploys.SEQUENCER_FEE_WALLET)).setRecipient(address(legacyFeeSplitter)); diff --git a/packages/contracts-bedrock/test/L2/OptimismMintableERC721.t.sol b/packages/contracts-bedrock/test/L2/OptimismMintableERC721.t.sol index d66a702d01a..696cff0842e 100644 --- a/packages/contracts-bedrock/test/L2/OptimismMintableERC721.t.sol +++ b/packages/contracts-bedrock/test/L2/OptimismMintableERC721.t.sol @@ -3,10 +3,12 @@ pragma solidity 0.8.15; import { ERC721, IERC721 } from "@openzeppelin/contracts/token/ERC721/ERC721.sol"; import { IERC721Enumerable } from "@openzeppelin/contracts/token/ERC721/extensions/ERC721Enumerable.sol"; +import { IERC721Metadata } from "@openzeppelin/contracts/token/ERC721/extensions/IERC721Metadata.sol"; import { IERC165 } from "@openzeppelin/contracts/utils/introspection/IERC165.sol"; import { Strings } from "@openzeppelin/contracts/utils/Strings.sol"; import { CommonTest } from "test/setup/CommonTest.sol"; import { OptimismMintableERC721, IOptimismMintableERC721 } from "src/L2/OptimismMintableERC721.sol"; +import { SemverComp } from "src/libraries/SemverComp.sol"; /// @title OptimismMintableERC721_TestInit /// @notice Reusable test initialization for `OptimismMintableERC721` tests. @@ -37,15 +39,18 @@ abstract contract OptimismMintableERC721_TestInit is CommonTest { /// @notice Tests the `constructor` of the `OptimismMintableERC721` contract. contract OptimismMintableERC721_Constructor_Test is OptimismMintableERC721_TestInit { /// @notice Tests that the constructor initializes state variables correctly with valid inputs. - function test_constructor_succeeds() external view { - assertEq(L2NFT.name(), "L2NFT"); - assertEq(L2NFT.symbol(), "L2T"); - assertEq(L2NFT.remoteToken(), address(L1NFT)); - assertEq(L2NFT.bridge(), address(l2ERC721Bridge)); - assertEq(L2NFT.remoteChainId(), 1); - assertEq(L2NFT.REMOTE_TOKEN(), address(L1NFT)); - assertEq(L2NFT.BRIDGE(), address(l2ERC721Bridge)); - assertEq(L2NFT.REMOTE_CHAIN_ID(), 1); + function testFuzz_constructor_validParams_succeeds(uint256 _remoteChainId) external { + vm.assume(_remoteChainId != 0); + OptimismMintableERC721 nft = + new OptimismMintableERC721(address(l2ERC721Bridge), _remoteChainId, address(L1NFT), "L2NFT", "L2T"); + assertEq(nft.name(), "L2NFT"); + assertEq(nft.symbol(), "L2T"); + assertEq(nft.remoteToken(), address(L1NFT)); + assertEq(nft.bridge(), address(l2ERC721Bridge)); + assertEq(nft.remoteChainId(), _remoteChainId); + assertEq(nft.REMOTE_TOKEN(), address(L1NFT)); + assertEq(nft.BRIDGE(), address(l2ERC721Bridge)); + assertEq(nft.REMOTE_CHAIN_ID(), _remoteChainId); } /// @notice Tests that the constructor reverts when the bridge address is zero. @@ -72,28 +77,31 @@ contract OptimismMintableERC721_Constructor_Test is OptimismMintableERC721_TestI contract OptimismMintableERC721_SafeMint_Test is OptimismMintableERC721_TestInit { /// @notice Tests that the `safeMint` function successfully mints a token when called by the /// bridge. - function test_safeMint_succeeds() external { + function testFuzz_safeMint_validParams_succeeds(address _to, uint256 _tokenId) external { + vm.assume(_to != address(0)); + vm.assume(_to.code.length == 0); + // Expect a transfer event. vm.expectEmit(true, true, true, true); - emit Transfer(address(0), alice, 1); + emit Transfer(address(0), _to, _tokenId); // Expect a mint event. vm.expectEmit(true, true, true, true); - emit Mint(alice, 1); + emit Mint(_to, _tokenId); // Mint the token. vm.prank(address(l2ERC721Bridge)); - L2NFT.safeMint(alice, 1); + L2NFT.safeMint(_to, _tokenId); - // Token should be owned by alice. - assertEq(L2NFT.ownerOf(1), alice); + // Token should be owned by the recipient. + assertEq(L2NFT.ownerOf(_tokenId), _to); } - /// @notice Tests that the `safeMint` function reverts when called by an address other than the bridge. - function test_safeMint_notBridge_reverts() external { - // Try to mint the token. + /// @notice Tests that the `safeMint` function reverts when called by a non-bridge address. + function testFuzz_safeMint_notBridge_reverts(address _caller) external { + vm.assume(_caller != address(l2ERC721Bridge)); vm.expectRevert("OptimismMintableERC721: only bridge can call this function"); - vm.prank(address(alice)); + vm.prank(_caller); L2NFT.safeMint(alice, 1); } } @@ -103,38 +111,39 @@ contract OptimismMintableERC721_SafeMint_Test is OptimismMintableERC721_TestInit contract OptimismMintableERC721_Burn_Test is OptimismMintableERC721_TestInit { /// @notice Tests that the `burn` function successfully burns a token when called by the /// bridge. - function test_burn_succeeds() external { + function testFuzz_burn_validParams_succeeds(uint256 _tokenId) external { // Mint the token first. vm.prank(address(l2ERC721Bridge)); - L2NFT.safeMint(alice, 1); + L2NFT.safeMint(alice, _tokenId); // Expect a transfer event. vm.expectEmit(true, true, true, true); - emit Transfer(alice, address(0), 1); + emit Transfer(alice, address(0), _tokenId); // Expect a burn event. vm.expectEmit(true, true, true, true); - emit Burn(alice, 1); + emit Burn(alice, _tokenId); // Burn the token. vm.prank(address(l2ERC721Bridge)); - L2NFT.burn(alice, 1); + L2NFT.burn(alice, _tokenId); - // Token should be owned by address(0). + // Token should no longer exist. vm.expectRevert("ERC721: invalid token ID"); - L2NFT.ownerOf(1); + L2NFT.ownerOf(_tokenId); } - /// @notice Tests that the `burn` function reverts when called by an address other than the - /// bridge. - function test_burn_notBridge_reverts() external { + /// @notice Tests that the `burn` function reverts when called by a non-bridge address. + function testFuzz_burn_notBridge_reverts(address _caller) external { + vm.assume(_caller != address(l2ERC721Bridge)); + // Mint the token first. vm.prank(address(l2ERC721Bridge)); L2NFT.safeMint(alice, 1); // Try to burn the token. vm.expectRevert("OptimismMintableERC721: only bridge can call this function"); - vm.prank(address(alice)); + vm.prank(_caller); L2NFT.burn(alice, 1); } } @@ -144,7 +153,7 @@ contract OptimismMintableERC721_Burn_Test is OptimismMintableERC721_TestInit { contract OptimismMintableERC721_SupportsInterface_Test is OptimismMintableERC721_TestInit { /// @notice Tests that the `supportsInterface` function returns true for /// IOptimismMintableERC721, IERC721Enumerable, IERC721 and IERC165 interfaces. - function test_supportsInterface_succeeds() external view { + function test_supportsInterface_supportedInterfaces_succeeds() external view { // Checks if the contract supports the IOptimismMintableERC721 interface. assertTrue(L2NFT.supportsInterface(type(IOptimismMintableERC721).interfaceId)); // Checks if the contract supports the IERC721Enumerable interface. @@ -154,6 +163,26 @@ contract OptimismMintableERC721_SupportsInterface_Test is OptimismMintableERC721 // Checks if the contract supports the IERC165 interface. assertTrue(L2NFT.supportsInterface(type(IERC165).interfaceId)); } + + /// @notice Tests that the `supportsInterface` function returns false for unsupported + /// interfaces. + function testFuzz_supportsInterface_unsupportedInterface_fails(bytes4 _interfaceId) external view { + vm.assume(_interfaceId != type(IOptimismMintableERC721).interfaceId); + vm.assume(_interfaceId != type(IERC721Enumerable).interfaceId); + vm.assume(_interfaceId != type(IERC721).interfaceId); + vm.assume(_interfaceId != type(IERC721Metadata).interfaceId); + vm.assume(_interfaceId != type(IERC165).interfaceId); + assertFalse(L2NFT.supportsInterface(_interfaceId)); + } +} + +/// @title OptimismMintableERC721_Version_Test +/// @notice Tests the `version` function of the `OptimismMintableERC721` contract. +contract OptimismMintableERC721_Version_Test is OptimismMintableERC721_TestInit { + /// @notice Tests that version returns a valid semver string. + function test_version_validFormat_succeeds() external view { + SemverComp.parse(L2NFT.version()); + } } /// @title OptimismMintableERC721_Uncategorized_Test @@ -161,14 +190,14 @@ contract OptimismMintableERC721_SupportsInterface_Test is OptimismMintableERC721 /// `OptimismMintableERC721` contract. contract OptimismMintableERC721_Uncategorized_Test is OptimismMintableERC721_TestInit { /// @notice Tests that the `tokenURI` function returns the correct URI for a minted token. - function test_tokenURI_succeeds() external { + function testFuzz_tokenURI_validTokenId_succeeds(uint256 _tokenId) external { // Mint the token first. vm.prank(address(l2ERC721Bridge)); - L2NFT.safeMint(alice, 1); + L2NFT.safeMint(alice, _tokenId); // Token URI should be correct. assertEq( - L2NFT.tokenURI(1), + L2NFT.tokenURI(_tokenId), string( abi.encodePacked( "ethereum:", @@ -176,7 +205,7 @@ contract OptimismMintableERC721_Uncategorized_Test is OptimismMintableERC721_Tes "@", Strings.toString(1), "/tokenURI?uint256=", - Strings.toString(1) + Strings.toString(_tokenId) ) ) ); diff --git a/packages/contracts-bedrock/test/invariants/CustomGasToken.t.sol b/packages/contracts-bedrock/test/invariants/CustomGasToken.t.sol index 334d8d44a56..13a9897f76d 100644 --- a/packages/contracts-bedrock/test/invariants/CustomGasToken.t.sol +++ b/packages/contracts-bedrock/test/invariants/CustomGasToken.t.sol @@ -14,7 +14,7 @@ import { SafeSend } from "src/universal/SafeSend.sol"; // Contracts import { ILiquidityController } from "interfaces/L2/ILiquidityController.sol"; import { INativeAssetLiquidity } from "interfaces/L2/INativeAssetLiquidity.sol"; -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; /// @title CGT_Minter /// @notice An actor with the minter role (can mint and burn) @@ -202,7 +202,7 @@ contract CustomGasToken_Invariants_Test is CommonTest { randomActor.initAddresses(address(actor_minter), address(actor_funder)); // Authorize the minter actor (simple access control in unit tests) - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); liquidityController.authorizeMinter(address(actor_minter)); // Create the initial supply diff --git a/packages/contracts-bedrock/test/invariants/FeeSplit.t.sol b/packages/contracts-bedrock/test/invariants/FeeSplit.t.sol index f8d36eb6b19..935bb4ad401 100644 --- a/packages/contracts-bedrock/test/invariants/FeeSplit.t.sol +++ b/packages/contracts-bedrock/test/invariants/FeeSplit.t.sol @@ -5,7 +5,7 @@ import { StdUtils } from "forge-std/StdUtils.sol"; import { Vm } from "forge-std/Vm.sol"; import { CommonTest } from "test/setup/CommonTest.sol"; import { IFeeVault } from "interfaces/L2/IFeeVault.sol"; -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; import { Predeploys } from "src/libraries/Predeploys.sol"; import { IFeeSplitter } from "interfaces/L2/IFeeSplitter.sol"; import { IL1Withdrawer } from "interfaces/L2/IL1Withdrawer.sol"; @@ -124,7 +124,7 @@ contract FeeSplitter_Preconditions is CommonTest { function setMinAmount(uint256 _minAmount, uint256 _vaultIndex) public { _vaultIndex = bound(_vaultIndex, 0, 3); - vm.prank(IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + vm.prank(IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); if (_vaultIndex == 0) { IFeeVault(payable(Predeploys.SEQUENCER_FEE_WALLET)).setMinWithdrawalAmount(_minAmount); diff --git a/packages/contracts-bedrock/test/libraries/Predeploys.t.sol b/packages/contracts-bedrock/test/libraries/Predeploys.t.sol index 2a025cd5c09..e16c0005772 100644 --- a/packages/contracts-bedrock/test/libraries/Predeploys.t.sol +++ b/packages/contracts-bedrock/test/libraries/Predeploys.t.sol @@ -10,6 +10,7 @@ import { Predeploys } from "src/libraries/Predeploys.sol"; import { ForgeArtifacts } from "scripts/libraries/ForgeArtifacts.sol"; import { Fork } from "scripts/libraries/Config.sol"; import { Features } from "src/libraries/Features.sol"; +import { DevFeatures } from "src/libraries/DevFeatures.sol"; /// @title Predeploys_TestInit /// @notice Reusable test initialization for `Predeploys` tests. @@ -57,7 +58,7 @@ abstract contract Predeploys_TestInit is CommonTest { } /// @notice Internal test function for predeploys validation across different forks. - function _test_predeploys(Fork _fork, bool _enableCrossL2Inbox, bool _isCustomGasToken) internal { + function _test_predeploys(Fork _fork, bool _enableCrossL2Inbox, bool _isCustomGasToken, bool _useL2CM) internal { uint256 count = 2048; uint160 prefix = uint160(0x420) << 148; @@ -73,7 +74,7 @@ abstract contract Predeploys_TestInit is CommonTest { } bool isPredeploy = - Predeploys.isSupportedPredeploy(addr, uint256(_fork), _enableCrossL2Inbox, _isCustomGasToken); + Predeploys.isSupportedPredeploy(addr, uint256(_fork), _enableCrossL2Inbox, _isCustomGasToken, _useL2CM); bytes memory code = addr.code; if (isPredeploy) assertTrue(code.length > 0); @@ -159,14 +160,21 @@ contract Predeploys_Uncategorized_Test is Predeploys_TestInit { /// @notice Tests that the predeploy addresses are set correctly. They have code /// and the proxied accounts have the correct admin. function test_predeploys_succeeds() external { - _test_predeploys(Fork.ISTHMUS, false, false); + _test_predeploys(Fork.ISTHMUS, false, false, false); } /// @notice Tests that the predeploy addresses are set correctly. They have code /// and the proxied accounts have the correct admin. Using custom gas token. function test_predeploys_customGasToken_succeeds() external { skipIfSysFeatureDisabled(Features.CUSTOM_GAS_TOKEN); - _test_predeploys(Fork.ISTHMUS, false, true); + _test_predeploys(Fork.ISTHMUS, false, true, false); + } + + /// @notice Tests that the predeploy addresses are set correctly. They have code + /// and the proxied accounts have the correct admin. Using l2cm. + function test_predeploys_l2cm_succeeds() external { + skipIfDevFeatureDisabled(DevFeatures.L2CM); + _test_predeploys(Fork.ISTHMUS, false, false, true); } } @@ -183,12 +191,12 @@ contract Predeploys_UncategorizedInterop_Test is Predeploys_TestInit { /// @notice Tests that the predeploy addresses are set correctly. They have code and the /// proxied accounts have the correct admin. Using interop with inbox. function test_predeploysWithInbox_succeeds() external { - _test_predeploys(Fork.INTEROP, true, false); + _test_predeploys(Fork.INTEROP, true, false, false); } /// @notice Tests that the predeploy addresses are set correctly. They have code and the /// proxied accounts have the correct admin. Using interop without inbox. function test_predeploysWithoutInbox_succeeds() external { - _test_predeploys(Fork.INTEROP, false, false); + _test_predeploys(Fork.INTEROP, false, false, false); } } diff --git a/packages/contracts-bedrock/test/opcm/DeployOPChain.t.sol b/packages/contracts-bedrock/test/opcm/DeployOPChain.t.sol index b8f141bae8d..961e05cccd1 100644 --- a/packages/contracts-bedrock/test/opcm/DeployOPChain.t.sol +++ b/packages/contracts-bedrock/test/opcm/DeployOPChain.t.sol @@ -242,70 +242,20 @@ contract DeployOPChain_Test is DeployOPChain_TestBase { _checkDeploymentAssertions(doo); } - function test_run_cannonGameType_succeeds() public { - // Skip test if OPCM v2 is not enabled because OPCM v1 registers PERMISSIONED_CANNON only regardles of the game - // type. + function test_run_cannonGameType_reverts() public { skipIfDevFeatureDisabled(DevFeatures.OPCM_V2); deployOPChainInput.disputeGameType = GameTypes.CANNON; - DeployOPChain.Output memory doo = deployOPChain.run(deployOPChainInput); - - // CANNON should be enabled with init bond - assertEq( - doo.disputeGameFactoryProxy.initBonds(GameTypes.CANNON), - deployOPChain.DEFAULT_INIT_BOND(), - "CANNON init bond" - ); - assertNotEq(address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.CANNON)), address(0), "CANNON impl"); - - // PERMISSIONED_CANNON must always be enabled - assertEq( - doo.disputeGameFactoryProxy.initBonds(GameTypes.PERMISSIONED_CANNON), - deployOPChain.DEFAULT_INIT_BOND(), - "PERMISSIONED_CANNON init bond" - ); - assertNotEq( - address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.PERMISSIONED_CANNON)), - address(0), - "PERMISSIONED_CANNON impl" - ); - - // CANNON_KONA should not be enabled - assertEq(doo.disputeGameFactoryProxy.initBonds(GameTypes.CANNON_KONA), 0, "CANNON_KONA init bond"); + vm.expectRevert("DeployOPChain: only PERMISSIONED_CANNON game type is supported for initial deployment"); + deployOPChain.run(deployOPChainInput); } - function test_run_cannonKonaGameType_succeeds() public { - // Skip test if OPCM v2 is not enabled because OPCM v1 registers PERMISSIONED_CANNON only regardles of the game - // type. + function test_run_cannonKonaGameType_reverts() public { skipIfDevFeatureDisabled(DevFeatures.OPCM_V2); deployOPChainInput.disputeGameType = GameTypes.CANNON_KONA; - DeployOPChain.Output memory doo = deployOPChain.run(deployOPChainInput); - - // CANNON_KONA should be enabled with init bond - assertEq( - doo.disputeGameFactoryProxy.initBonds(GameTypes.CANNON_KONA), - deployOPChain.DEFAULT_INIT_BOND(), - "CANNON_KONA init bond" - ); - assertNotEq( - address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.CANNON_KONA)), address(0), "CANNON_KONA impl" - ); - - // PERMISSIONED_CANNON must always be enabled in OPCM v2 - assertEq( - doo.disputeGameFactoryProxy.initBonds(GameTypes.PERMISSIONED_CANNON), - deployOPChain.DEFAULT_INIT_BOND(), - "PERMISSIONED_CANNON init bond" - ); - assertNotEq( - address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.PERMISSIONED_CANNON)), - address(0), - "PERMISSIONED_CANNON impl" - ); - - // CANNON should not be enabled - assertEq(doo.disputeGameFactoryProxy.initBonds(GameTypes.CANNON), 0, "CANNON init bond"); + vm.expectRevert("DeployOPChain: only PERMISSIONED_CANNON game type is supported for initial deployment"); + deployOPChain.run(deployOPChainInput); } /// @notice Tests that faultDisputeGame is set to address(0) and permissionedDisputeGame is set to the correct @@ -313,28 +263,7 @@ contract DeployOPChain_Test is DeployOPChain_TestBase { function test_run_faultDisputeGamePermissionedCannon_succeeds() public { skipIfDevFeatureDisabled(DevFeatures.OPCM_V2); - _assertDisputeGames(GameTypes.PERMISSIONED_CANNON); - } - - /// @notice Tests that faultDisputeGame is set to address(0) when disputeGameType is GameTypes.CANNON. - function test_run_faultDisputeGameCannon_succeeds() public { - skipIfDevFeatureDisabled(DevFeatures.OPCM_V2); - - _assertDisputeGames(GameTypes.CANNON); - } - - /// @notice Tests that faultDisputeGame is set to address(0) when disputeGameType is GameTypes.CANNON_KONA. - function test_run_faultDisputeGameCannonKona_succeeds() public { - skipIfDevFeatureDisabled(DevFeatures.OPCM_V2); - - _assertDisputeGames(GameTypes.CANNON_KONA); - } - - /// @notice Helper function that runs DeployOPChain.run and asserts DeployOPChain.Output.faultDisputeGame is set to - /// address(0) and DeployOPChain.Output.permissionedDisputeGame is set to the correct implementation. - function _assertDisputeGames(GameType _gameType) internal { - deployOPChainInput.disputeGameType = _gameType; - + deployOPChainInput.disputeGameType = GameTypes.PERMISSIONED_CANNON; DeployOPChain.Output memory doo = deployOPChain.run(deployOPChainInput); address expectedPermissioned = address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.PERMISSIONED_CANNON)); @@ -382,25 +311,23 @@ contract DeployOPChain_Test is DeployOPChain_TestBase { ); assertNotEq(address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.PERMISSIONED_CANNON)), address(0)); - // CANNON is only enabled if it's the starting game type - bool cannonEnabled = deployOPChainInput.disputeGameType.raw() == GameTypes.CANNON.raw(); + // CANNON must be disabled for initial deployment + assertEq(doo.disputeGameFactoryProxy.initBonds(GameTypes.CANNON), 0, "CANNON init bond should be 0"); assertEq( - doo.disputeGameFactoryProxy.initBonds(GameTypes.CANNON), - cannonEnabled ? deployOPChain.DEFAULT_INIT_BOND() : 0 + address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.CANNON)), + address(0), + "CANNON impl should be the zero address" ); - if (cannonEnabled) { - assertNotEq(address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.CANNON)), address(0)); - } - // CANNON_KONA is only enabled if it's the starting game type - bool cannonKonaEnabled = deployOPChainInput.disputeGameType.raw() == GameTypes.CANNON_KONA.raw(); + // CANNON_KONA must be disabled for initial deployment + assertEq( + doo.disputeGameFactoryProxy.initBonds(GameTypes.CANNON_KONA), 0, "CANNON_KONA init bond should be 0" + ); assertEq( - doo.disputeGameFactoryProxy.initBonds(GameTypes.CANNON_KONA), - cannonKonaEnabled ? deployOPChain.DEFAULT_INIT_BOND() : 0 + address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.CANNON_KONA)), + address(0), + "CANNON_KONA impl should be the zero address" ); - if (cannonKonaEnabled) { - assertNotEq(address(doo.disputeGameFactoryProxy.gameImpls(GameTypes.CANNON_KONA)), address(0)); - } } } } diff --git a/packages/contracts-bedrock/test/opcm/DeploySaferSafes.t.sol b/packages/contracts-bedrock/test/opcm/DeploySaferSafes.t.sol new file mode 100644 index 00000000000..49ad8ebecdb --- /dev/null +++ b/packages/contracts-bedrock/test/opcm/DeploySaferSafes.t.sol @@ -0,0 +1,140 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.15; + +// Testing +import { Test } from "test/setup/Test.sol"; + +// Scripts +import { DeploySaferSafes } from "scripts/deploy/DeploySaferSafes.s.sol"; + +// Interfaces +import { ISaferSafes } from "interfaces/safe/ISaferSafes.sol"; + +/// @title DeploySaferSafes_Test +/// @notice Tests for the DeploySaferSafes script. +contract DeploySaferSafes_Test is Test { + DeploySaferSafes deploySaferSafes; + + /// @notice Sets up the test suite. + function setUp() public { + deploySaferSafes = new DeploySaferSafes(); + } + + /// @notice Tests that the DeploySaferSafes script succeeds. + function test_run_succeeds() public { + DeploySaferSafes.Output memory output = deploySaferSafes.run(); + + // Verify the SaferSafes singleton is deployed. + assertNotEq(address(output.saferSafesSingleton), address(0), "SaferSafes address is zero"); + + // Verify the contract has code. + assertGt(address(output.saferSafesSingleton).code.length, 0, "SaferSafes has no code"); + + // Verify the version is correct. + assertEq(output.saferSafesSingleton.version(), "1.10.1", "SaferSafes version mismatch"); + } + + /// @notice Tests that the deployment is deterministic and reuses addresses. + function test_reuseAddresses_succeeds() public { + DeploySaferSafes.Output memory output1 = deploySaferSafes.run(); + DeploySaferSafes.Output memory output2 = deploySaferSafes.run(); + + // Verify that the same address is reused. + assertEq( + address(output1.saferSafesSingleton), + address(output2.saferSafesSingleton), + "SaferSafes address should be reused" + ); + } + + /// @notice Tests that assertValidOutput succeeds with valid output. + function test_assertValidOutput_succeeds() public { + DeploySaferSafes.Output memory output = deploySaferSafes.run(); + + // This should not revert. + deploySaferSafes.assertValidOutput(output); + } + + /// @notice Tests that assertValidOutput reverts when the address is zero. + function test_assertValidOutput_zeroAddress_reverts() public { + DeploySaferSafes.Output memory output; + output.saferSafesSingleton = ISaferSafes(address(0)); + + vm.expectRevert("DeployUtils: zero address"); + deploySaferSafes.assertValidOutput(output); + } + + /// @notice Tests that assertValidOutput reverts when the contract has no code. + function test_assertValidOutput_noCode_reverts() public { + DeploySaferSafes.Output memory output; + address noCodeAddr = makeAddr("noCode"); + output.saferSafesSingleton = ISaferSafes(noCodeAddr); + + vm.expectRevert(bytes(string.concat("DeployUtils: no code at ", vm.toString(noCodeAddr)))); + deploySaferSafes.assertValidOutput(output); + } + + /// @notice Tests that assertValidOutput reverts when the version is incorrect. + function test_assertValidOutput_wrongVersion_reverts() public { + // Deploy a mock contract with a different version. + MockSaferSafes mockSaferSafes = new MockSaferSafes(); + + DeploySaferSafes.Output memory output; + output.saferSafesSingleton = ISaferSafes(address(mockSaferSafes)); + + vm.expectRevert("DeploySaferSafes: unexpected version"); + deploySaferSafes.assertValidOutput(output); + } + + /// @notice Tests that the deployment uses CREATE2 for deterministic addresses. + function test_deterministicDeployment_succeeds() public { + // First deployment. + DeploySaferSafes.Output memory output1 = deploySaferSafes.run(); + + // The contract should be deployed at a deterministic address. + // Running again should return the same address without redeploying. + DeploySaferSafes.Output memory output2 = deploySaferSafes.run(); + + // Verify that the same address is used. + assertEq( + address(output1.saferSafesSingleton), + address(output2.saferSafesSingleton), + "SaferSafes address should be deterministic" + ); + + // Verify that the contract has code (it wasn't redeployed). + assertGt(address(output2.saferSafesSingleton).code.length, 0, "Contract should have code"); + } + + /// @notice Tests that multiple runs do not redeploy the contract. + function test_multipleRuns_succeeds() public { + DeploySaferSafes.Output memory output1 = deploySaferSafes.run(); + DeploySaferSafes.Output memory output2 = deploySaferSafes.run(); + DeploySaferSafes.Output memory output3 = deploySaferSafes.run(); + + // All deployments should use the same address. + assertEq(address(output1.saferSafesSingleton), address(output2.saferSafesSingleton), "Second run mismatch"); + assertEq(address(output2.saferSafesSingleton), address(output3.saferSafesSingleton), "Third run mismatch"); + } + + /// @notice Tests that the deployed contract has the expected version string format. + function test_versionFormat_succeeds() public { + DeploySaferSafes.Output memory output = deploySaferSafes.run(); + + string memory version = output.saferSafesSingleton.version(); + + // Verify the version is not empty. + assertTrue(bytes(version).length > 0, "Version should not be empty"); + + // Verify the version matches the expected format. + assertEq(version, "1.10.1", "Version should be 1.10.1"); + } +} + +/// @title MockSaferSafes +/// @notice A mock SaferSafes contract with a different version for testing. +contract MockSaferSafes { + function version() external pure returns (string memory) { + return "0.0.0"; + } +} diff --git a/packages/contracts-bedrock/test/scripts/L2Genesis.t.sol b/packages/contracts-bedrock/test/scripts/L2Genesis.t.sol index e157c1cff99..7750eff501e 100644 --- a/packages/contracts-bedrock/test/scripts/L2Genesis.t.sol +++ b/packages/contracts-bedrock/test/scripts/L2Genesis.t.sol @@ -20,7 +20,7 @@ import { IL1FeeVault } from "interfaces/L2/IL1FeeVault.sol"; import { IOperatorFeeVault } from "interfaces/L2/IOperatorFeeVault.sol"; import { IOptimismMintableERC20Factory } from "interfaces/universal/IOptimismMintableERC20Factory.sol"; import { IOptimismMintableERC721Factory } from "interfaces/L2/IOptimismMintableERC721Factory.sol"; -import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; +import { IL2ProxyAdmin } from "interfaces/L2/IL2ProxyAdmin.sol"; import { IGovernanceToken } from "interfaces/governance/IGovernanceToken.sol"; import { IGasPriceOracle } from "interfaces/L2/IGasPriceOracle.sol"; import { IFeeSplitter } from "interfaces/L2/IFeeSplitter.sol"; @@ -43,14 +43,14 @@ abstract contract L2Genesis_TestInit is Test { function testProxyAdmin() internal view { // Verify owner in the proxy - assertEq(input.opChainProxyAdminOwner, IProxyAdmin(Predeploys.PROXY_ADMIN).owner()); + assertEq(input.opChainProxyAdminOwner, IL2ProxyAdmin(Predeploys.PROXY_ADMIN).owner()); // Verify owner in the implementation to catch storage shifting issues // The implementation is stored in the code namespace address proxyAdminImpl = Predeploys.predeployToCodeNamespace(Predeploys.PROXY_ADMIN); assertEq( input.opChainProxyAdminOwner, - IProxyAdmin(proxyAdminImpl).owner(), + IL2ProxyAdmin(proxyAdminImpl).owner(), "ProxyAdmin implementation owner should match expected" ); } @@ -71,7 +71,11 @@ abstract contract L2Genesis_TestInit is Test { assertEq(Predeploys.PROXY_ADMIN, EIP1967Helper.getAdmin(addr)); // If it's not a supported predeploy, skip next checks. - if (!Predeploys.isSupportedPredeploy(addr, uint256(LATEST_FORK), true, input.useCustomGasToken)) { + if ( + !Predeploys.isSupportedPredeploy( + addr, uint256(LATEST_FORK), true, input.useCustomGasToken, input.useL2CM + ) + ) { continue; } @@ -260,7 +264,8 @@ contract L2Genesis_Run_Test is L2Genesis_TestInit { gasPayingTokenName: "", gasPayingTokenSymbol: "", nativeAssetLiquidityAmount: type(uint248).max, - liquidityControllerOwner: address(0x000000000000000000000000000000000000000d) + liquidityControllerOwner: address(0x000000000000000000000000000000000000000d), + useL2CM: false }); } @@ -440,4 +445,18 @@ contract L2Genesis_Run_Test is L2Genesis_TestInit { vm.expectRevert("FeeVault: custom gas token and revenue share cannot be enabled together"); genesis.run(input); } + + /// @notice Tests that enabling l2cm succeeds. + function test_run_l2cm_succeeds() external { + input.useL2CM = true; + genesis.run(input); + + testProxyAdmin(); + testPredeploys(); + testVaultsWithRevenueShare(); + testGovernance(); + testFactories(); + testForks(); + testFeeSplitter(); + } } diff --git a/packages/contracts-bedrock/test/scripts/ReadImplementationAddresses.t.sol b/packages/contracts-bedrock/test/scripts/ReadImplementationAddresses.t.sol new file mode 100644 index 00000000000..b5b09edb45a --- /dev/null +++ b/packages/contracts-bedrock/test/scripts/ReadImplementationAddresses.t.sol @@ -0,0 +1,123 @@ +// SPDX-License-Identifier: MIT +pragma solidity 0.8.15; + +// Testing +import { CommonTest } from "test/setup/CommonTest.sol"; + +// Scripts +import { ReadImplementationAddresses } from "scripts/deploy/ReadImplementationAddresses.s.sol"; + +// Interfaces +import { IOPContractsManager } from "interfaces/L1/IOPContractsManager.sol"; +import { IOPContractsManagerV2 } from "interfaces/L1/opcm/IOPContractsManagerV2.sol"; +import { IMIPS64 } from "interfaces/cannon/IMIPS64.sol"; + +// Libraries +import { DevFeatures } from "src/libraries/DevFeatures.sol"; + +/// @title ReadImplementationAddressesTest +/// @notice Tests that ReadImplementationAddresses correctly reads implementation addresses +/// from the deployed contracts. Uses CommonTest to get real deployed contracts. +contract ReadImplementationAddressesTest is CommonTest { + ReadImplementationAddresses script; + + function setUp() public override { + super.setUp(); + script = new ReadImplementationAddresses(); + } + + /// @notice Returns the OPCM instance, handling V1 vs V2 feature flag. + function _opcm() internal view returns (IOPContractsManager) { + return isDevFeatureEnabled(DevFeatures.OPCM_V2) ? IOPContractsManager(address(opcmV2)) : opcm; + } + + /// @notice Builds the input struct from the deployed contracts. + function _buildInput() internal view returns (ReadImplementationAddresses.Input memory input_) { + input_.addressManager = address(addressManager); + input_.l1ERC721BridgeProxy = address(l1ERC721Bridge); + input_.systemConfigProxy = address(systemConfig); + input_.optimismMintableERC20FactoryProxy = address(l1OptimismMintableERC20Factory); + input_.l1StandardBridgeProxy = address(l1StandardBridge); + input_.optimismPortalProxy = address(optimismPortal2); + input_.disputeGameFactoryProxy = address(disputeGameFactory); + input_.opcm = address(_opcm()); + } + + /// @notice Tests that ReadImplementationAddresses.run succeeds and returns correct addresses. + function test_run_succeeds() public { + ReadImplementationAddresses.Input memory input = _buildInput(); + ReadImplementationAddresses.Output memory output = script.run(input); + + // Get expected implementations from OPCM + IOPContractsManager opcm_ = _opcm(); + IOPContractsManager.Implementations memory impls = opcm_.implementations(); + + // Assert implementations from OPCM match output + assertEq(output.delayedWETH, impls.delayedWETHImpl, "DelayedWETH should match"); + assertEq(output.anchorStateRegistry, impls.anchorStateRegistryImpl, "AnchorStateRegistry should match"); + assertEq(output.mipsSingleton, impls.mipsImpl, "MIPS singleton should match"); + assertEq(output.faultDisputeGame, impls.faultDisputeGameImpl, "FaultDisputeGame should match"); + assertEq( + output.permissionedDisputeGame, impls.permissionedDisputeGameImpl, "PermissionedDisputeGame should match" + ); + + // Assert PreimageOracle is read from MIPS + IMIPS64 mips_ = IMIPS64(impls.mipsImpl); + assertEq(output.preimageOracleSingleton, address(mips_.oracle()), "PreimageOracle should match"); + + // Assert OPCM standard validator + assertEq( + output.opcmStandardValidator, address(opcm_.opcmStandardValidator()), "OPCM StandardValidator should match" + ); + + // Assert V1 vs V2 specific fields + if (isDevFeatureEnabled(DevFeatures.OPCM_V2)) { + // V2: deployer/upgrader/gameTypeAdder are zero, migrator comes from opcmMigrator() + assertEq(output.opcmDeployer, address(0), "OPCM Deployer should be zero in V2"); + assertEq(output.opcmUpgrader, address(0), "OPCM Upgrader should be zero in V2"); + assertEq(output.opcmGameTypeAdder, address(0), "OPCM GameTypeAdder should be zero in V2"); + assertEq( + output.opcmInteropMigrator, + address(IOPContractsManagerV2(address(opcm_)).opcmMigrator()), + "OPCM InteropMigrator should match" + ); + } else { + // V1: all component addresses come from opcm getters + assertEq(output.opcmDeployer, address(opcm_.opcmDeployer()), "OPCM Deployer should match"); + assertEq(output.opcmUpgrader, address(opcm_.opcmUpgrader()), "OPCM Upgrader should match"); + assertEq(output.opcmGameTypeAdder, address(opcm_.opcmGameTypeAdder()), "OPCM GameTypeAdder should match"); + assertEq( + output.opcmInteropMigrator, address(opcm_.opcmInteropMigrator()), "OPCM InteropMigrator should match" + ); + } + } + + /// @notice Tests that ReadImplementationAddresses.runWithBytes succeeds. + function test_runWithBytes_succeeds() public { + ReadImplementationAddresses.Input memory input = _buildInput(); + bytes memory inputBytes = abi.encode(input); + + bytes memory outputBytes = script.runWithBytes(inputBytes); + ReadImplementationAddresses.Output memory output = abi.decode(outputBytes, (ReadImplementationAddresses.Output)); + + // Get expected implementations from OPCM + IOPContractsManager opcm_ = _opcm(); + IOPContractsManager.Implementations memory impls = opcm_.implementations(); + + // Assert key values match + assertEq(output.delayedWETH, impls.delayedWETHImpl, "DelayedWETH should match"); + assertEq(output.mipsSingleton, impls.mipsImpl, "MIPS singleton should match"); + assertEq( + output.opcmStandardValidator, address(opcm_.opcmStandardValidator()), "OPCM StandardValidator should match" + ); + } + + /// @notice Tests that the script reverts when OPCM address has no code. + function test_run_opcmCodeLengthZero_reverts() public { + ReadImplementationAddresses.Input memory input = _buildInput(); + input.opcm = address(0); + + vm.expectRevert("ReadImplementationAddresses: OPCM address has no code"); + script.run(input); + } +} diff --git a/packages/contracts-bedrock/test/scripts/VerifyOPCM.t.sol b/packages/contracts-bedrock/test/scripts/VerifyOPCM.t.sol index bef1cfa7d96..5f8fb5dbb86 100644 --- a/packages/contracts-bedrock/test/scripts/VerifyOPCM.t.sol +++ b/packages/contracts-bedrock/test/scripts/VerifyOPCM.t.sol @@ -13,9 +13,23 @@ import { VerifyOPCM } from "scripts/deploy/VerifyOPCM.s.sol"; // Interfaces import { IOPContractsManager, IOPContractsManagerUpgrader } from "interfaces/L1/IOPContractsManager.sol"; +import { IOPContractsManagerStandardValidator } from "interfaces/L1/IOPContractsManagerStandardValidator.sol"; import { IOPContractsManagerV2 } from "interfaces/L1/opcm/IOPContractsManagerV2.sol"; +import { IOptimismPortal2 } from "interfaces/L1/IOptimismPortal2.sol"; +import { IAnchorStateRegistry } from "interfaces/dispute/IAnchorStateRegistry.sol"; +import { IMIPS64 } from "interfaces/cannon/IMIPS64.sol"; contract VerifyOPCM_Harness is VerifyOPCM { + bool private _skipSecurityChecks; + + function setSkipSecurityValueChecks(bool _skip) public { + _skipSecurityChecks = _skip; + } + + function skipSecurityValueChecks() public view override returns (bool) { + return _skipSecurityChecks; + } + function loadArtifactInfo(string memory _artifactPath) public view returns (ArtifactInfo memory) { return _loadArtifactInfo(_artifactPath); } @@ -62,6 +76,26 @@ contract VerifyOPCM_Harness is VerifyOPCM { function removeExpectedGetter(string memory _getter) public { expectedGetters[_getter] = ""; } + + function verifyPreimageOracle(IMIPS64 _mips) public view returns (bool) { + return _verifyPreimageOracle(_mips); + } + + function verifyPortalDelays(IOptimismPortal2 _portal) public view returns (bool) { + return _verifyPortalDelays(_portal); + } + + function verifyAnchorStateRegistryDelays(IAnchorStateRegistry _asr) public view returns (bool) { + return _verifyAnchorStateRegistryDelays(_asr); + } + + function verifyStandardValidatorArgs(IOPContractsManager _opcm, address _validator) public returns (bool) { + return _verifyStandardValidatorArgs(_opcm, _validator); + } + + function setValidatorGetterCheck(string memory _getter, string memory _check) public { + validatorGetterChecks[_getter] = _check; + } } /// @title VerifyOPCM_TestInit @@ -73,12 +107,44 @@ abstract contract VerifyOPCM_TestInit is CommonTest { super.setUp(); harness = new VerifyOPCM_Harness(); harness.setUp(); + + // If OPCM V2 is enabled, set up the test environment for OPCM V2. + // nosemgrep: sol-style-vm-env-only-in-config-sol + if (vm.envOr("DEV_FEATURE__OPCM_V2", false)) { + opcm = IOPContractsManager(address(opcmV2)); + } + + // Always set up the environment variables for the test. + setupEnvVars(); + + // Set the OPCM address so that runSingle also runs for V2 OPCM if the dev feature is enabled. + vm.setEnv("OPCM_ADDRESS", vm.toString(address(opcm))); } /// @notice Sets up the environment variables for the VerifyOPCM test. function setupEnvVars() public { - vm.setEnv("EXPECTED_SUPERCHAIN_CONFIG", vm.toString(address(opcm.superchainConfig()))); - vm.setEnv("EXPECTED_PROTOCOL_VERSIONS", vm.toString(address(opcm.protocolVersions()))); + // If OPCM V2 is not enabled, set the environment variables for the old OPCM. + if (!isDevFeatureEnabled(DevFeatures.OPCM_V2)) { + vm.setEnv("EXPECTED_SUPERCHAIN_CONFIG", vm.toString(address(opcm.superchainConfig()))); + vm.setEnv("EXPECTED_PROTOCOL_VERSIONS", vm.toString(address(opcm.protocolVersions()))); + } + + // Grab a reference to the validator. + IOPContractsManagerStandardValidator validator = + IOPContractsManagerStandardValidator(opcm.opcmStandardValidator()); + + // Fetch all of the expected values from existing contracts, this just makes the tests pass + // by default. We will override these with bad values during tests to demonstrate that the + // script correctly rejects them. + vm.setEnv("EXPECTED_L1_PAO_MULTISIG", vm.toString(validator.l1PAOMultisig())); + vm.setEnv("EXPECTED_CHALLENGER", vm.toString(validator.challenger())); + vm.setEnv("EXPECTED_WITHDRAWAL_DELAY_SECONDS", vm.toString(validator.withdrawalDelaySeconds())); + vm.setEnv("EXPECTED_SUPERCHAIN_CONFIG", vm.toString(address(optimismPortal2.superchainConfig()))); + vm.setEnv("EXPECTED_PROOF_MATURITY_DELAY_SECONDS", vm.toString(optimismPortal2.proofMaturityDelaySeconds())); + vm.setEnv( + "EXPECTED_DISPUTE_GAME_FINALITY_DELAY_SECONDS", + vm.toString(anchorStateRegistry.disputeGameFinalityDelaySeconds()) + ); } } @@ -87,17 +153,6 @@ abstract contract VerifyOPCM_TestInit is CommonTest { contract VerifyOPCM_Run_Test is VerifyOPCM_TestInit { function setUp() public override { super.setUp(); - - // If OPCM V2 is enabled, set up the test environment for OPCM V2. - // nosemgrep: sol-style-vm-env-only-in-config-sol - if (vm.envOr("DEV_FEATURE__OPCM_V2", false)) { - opcm = IOPContractsManager(address(opcmV2)); - } else { - setupEnvVars(); - } - - // Set the OPCM address so that runSingle also runs for V2 OPCM if the dev feature is enabled. - vm.setEnv("OPCM_ADDRESS", vm.toString(address(opcm))); } /// @notice Tests that the script succeeds when no changes are introduced. @@ -159,6 +214,9 @@ contract VerifyOPCM_Run_Test is VerifyOPCM_TestInit { // Coverage changes bytecode and causes failures, skip. skipIfCoverage(); + // Skip security value checks since this test deliberately corrupts immutable values. + harness.setSkipSecurityValueChecks(true); + // Grab the list of implementations. VerifyOPCM.OpcmContractRef[] memory refs = harness.getOpcmContractRefs(opcm, "implementations", false); @@ -228,6 +286,9 @@ contract VerifyOPCM_Run_Test is VerifyOPCM_TestInit { // Coverage changes bytecode and causes failures, skip. skipIfCoverage(); + // Skip security value checks since corrupted bytecode may break contract queries. + harness.setSkipSecurityValueChecks(true); + // Grab the list of implementations. VerifyOPCM.OpcmContractRef[] memory refs = harness.getOpcmContractRefs(opcm, "implementations", false); @@ -575,9 +636,6 @@ contract VerifyOPCM_Run_Test is VerifyOPCM_TestInit { // Verify that immutable variables fail validation bool result = harness.verifyOpcmImmutableVariables(opcm); assertFalse(result, "OPCM with invalid immutable variables should fail verification"); - - // Clear mock calls and restore original environment variables to avoid test isolation issues - vm.clearMockedCalls(); } /// @notice Tests that the script fails when OPCM immutable variables are invalid. @@ -589,26 +647,6 @@ contract VerifyOPCM_Run_Test is VerifyOPCM_TestInit { // If OPCM V2 is enabled because we do not use environment variables for OPCM V2. skipIfDevFeatureEnabled(DevFeatures.OPCM_V2); - // Set expected addresses via environment variables - address expectedSuperchainConfig = address(0x1111); - address expectedProtocolVersions = address(0x2222); - - // Use vm.mockCall instead of vm.setEnv to avoid global env mutation. We need to ignore - // semgrep here because envAddress has multiple potential signatures so we can't use - // abi.encodeCall. - // nosemgrep: sol-style-use-abi-encodecall - vm.mockCall( - address(vm), - abi.encodeWithSignature("envAddress(string)", "EXPECTED_SUPERCHAIN_CONFIG"), - abi.encode(expectedSuperchainConfig) - ); - // nosemgrep: sol-style-use-abi-encodecall - vm.mockCall( - address(vm), - abi.encodeWithSignature("envAddress(string)", "EXPECTED_PROTOCOL_VERSIONS"), - abi.encode(expectedProtocolVersions) - ); - // Test that mocking each individual getter causes verification to fail _assertOnOpcmGetter(IOPContractsManager.superchainConfig.selector); _assertOnOpcmGetter(IOPContractsManager.protocolVersions.selector); @@ -634,3 +672,88 @@ contract VerifyOPCM_Run_Test is VerifyOPCM_TestInit { harness.validateAllGettersAccounted(); } } + +/// @title VerifyOPCM_verifyPortalDelays_Test +/// @notice Tests for the portal delay verification function. +contract VerifyOPCM_verifyPortalDelays_Test is VerifyOPCM_TestInit { + function setUp() public override { + super.setUp(); + vm.setEnv("EXPECTED_PROOF_MATURITY_DELAY_SECONDS", vm.toString(optimismPortal2.proofMaturityDelaySeconds())); + } + + /// @notice Tests that portal delay verification succeeds with correct values. + function test_verifyPortalDelays_matchingDelay_succeeds() public view { + bool result = harness.verifyPortalDelays(optimismPortal2); + assertTrue(result, "Portal delay verification should succeed"); + } + + /// @notice Tests that portal delay verification fails with wrong expected value. + function test_verifyPortalDelays_mismatchedDelay_fails() public { + // Mock the portal to return a different delay than expected. + vm.mockCall( + address(optimismPortal2), + abi.encodeCall(IOptimismPortal2.proofMaturityDelaySeconds, ()), + abi.encode(uint256(12345)) + ); + bool result = harness.verifyPortalDelays(optimismPortal2); + assertFalse(result, "Portal delay verification should fail with wrong expected value"); + } +} + +/// @title VerifyOPCM_verifyAnchorStateRegistryDelays_Test +/// @notice Tests for the anchor state registry delay verification function. +contract VerifyOPCM_verifyAnchorStateRegistryDelays_Test is VerifyOPCM_TestInit { + function setUp() public override { + super.setUp(); + vm.setEnv( + "EXPECTED_DISPUTE_GAME_FINALITY_DELAY_SECONDS", + vm.toString(anchorStateRegistry.disputeGameFinalityDelaySeconds()) + ); + } + + /// @notice Tests that ASR delay verification succeeds with correct values. + function test_verifyAnchorStateRegistryDelays_matchingDelay_succeeds() public view { + bool result = harness.verifyAnchorStateRegistryDelays(anchorStateRegistry); + assertTrue(result, "ASR delay verification should succeed"); + } + + /// @notice Tests that ASR delay verification fails with wrong expected value. + function test_verifyAnchorStateRegistryDelays_mismatchedDelay_fails() public { + // Mock the ASR to return a different delay than expected. + vm.mockCall( + address(anchorStateRegistry), + abi.encodeCall(IAnchorStateRegistry.disputeGameFinalityDelaySeconds, ()), + abi.encode(uint256(99999)) + ); + bool result = harness.verifyAnchorStateRegistryDelays(anchorStateRegistry); + assertFalse(result, "ASR delay verification should fail with wrong expected value"); + } +} + +/// @title VerifyOPCM_verifyPreimageOracle_Test +/// @notice Tests for the PreimageOracle bytecode verification function. +contract VerifyOPCM_verifyPreimageOracle_Test is VerifyOPCM_TestInit { + /// @notice Tests that PreimageOracle verification succeeds when bytecode matches. + function test_verifyPreimageOracle_matchingBytecode_succeeds() public { + skipIfCoverage(); + IMIPS64 mipsImpl = IMIPS64(opcm.implementations().mipsImpl); + bool result = harness.verifyPreimageOracle(mipsImpl); + assertTrue(result, "PreimageOracle verification should succeed"); + } + + /// @notice Tests that PreimageOracle verification fails when bytecode doesn't match. + function test_verifyPreimageOracle_corruptedBytecode_fails() public { + skipIfCoverage(); + IMIPS64 mipsImpl = IMIPS64(opcm.implementations().mipsImpl); + address oracleAddr = address(mipsImpl.oracle()); + + bytes memory corruptedCode = oracleAddr.code; + if (corruptedCode.length > 100) { + corruptedCode[100] = bytes1(uint8(corruptedCode[100]) ^ 0xFF); + } + vm.etch(oracleAddr, corruptedCode); + + bool result = harness.verifyPreimageOracle(mipsImpl); + assertFalse(result, "PreimageOracle verification should fail with corrupted bytecode"); + } +} diff --git a/packages/contracts-bedrock/test/setup/CommonTest.sol b/packages/contracts-bedrock/test/setup/CommonTest.sol index daea37745ae..5bfbcd7f072 100644 --- a/packages/contracts-bedrock/test/setup/CommonTest.sol +++ b/packages/contracts-bedrock/test/setup/CommonTest.sol @@ -105,6 +105,11 @@ abstract contract CommonTest is Test, Setup, Events { deploy.cfg().setOperatorFeeVaultWithdrawalNetwork(1); } + if (Config.devFeatureL2CM()) { + console.log("CommonTest: enabling l2cm"); + deploy.cfg().setUseL2CM(true); + } + if (isForkTest()) { // Skip any test suite which uses a nonstandard configuration. if (useAltDAOverride || useInteropOverride) { diff --git a/packages/contracts-bedrock/test/setup/DisputeGames.sol b/packages/contracts-bedrock/test/setup/DisputeGames.sol index 6268a5bc92f..e71b17ca4b7 100644 --- a/packages/contracts-bedrock/test/setup/DisputeGames.sol +++ b/packages/contracts-bedrock/test/setup/DisputeGames.sol @@ -13,6 +13,7 @@ import { LibGameArgs } from "src/dispute/lib/LibGameArgs.sol"; // Interfaces import "../../interfaces/dispute/IDisputeGame.sol"; +import "../../interfaces/dispute/IDelayedWETH.sol"; import "../../interfaces/dispute/IDisputeGameFactory.sol"; import { IFaultDisputeGame } from "../../interfaces/dispute/IFaultDisputeGame.sol"; import { IPermissionedDisputeGame } from "../../interfaces/dispute/IPermissionedDisputeGame.sol"; @@ -154,6 +155,35 @@ library DisputeGames { } } + /// @notice Gets the DelayedWETH for a game type, handling both v1 and v2 dispute games. + /// V1 games store the prestate on the game implementation, v2 games store it in gameArgs. + /// Returns address(0) if no implementation exists for the game type. + /// @param _dgf The dispute game factory. + /// @param _gameType The game type to get the DelayedWETH for. + /// @return delayedWeth_ The delayedWETH address. + function getGameImplDelayedWeth( + IDisputeGameFactory _dgf, + GameType _gameType + ) + internal + view + returns (IDelayedWETH delayedWeth_) + { + // Return zero if no implementation exists for this game type + address gameImpl = address(_dgf.gameImpls(_gameType)); + if (gameImpl == address(0)) { + return IDelayedWETH(payable(address(0))); + } + + (bool gameArgsExist, bytes memory gameArgsData) = _getGameArgs(_dgf, _gameType); + if (gameArgsExist) { + LibGameArgs.GameArgs memory gameArgs = LibGameArgs.decode(gameArgsData); + delayedWeth_ = IDelayedWETH(payable(gameArgs.weth)); + } else { + delayedWeth_ = IFaultDisputeGame(gameImpl).weth(); + } + } + function mockGameImplPrestate(IDisputeGameFactory _dgf, GameType _gameType, bytes32 _prestate) internal { bytes memory value = abi.encodePacked(_prestate); _mockGameArg(_dgf, _gameType, GameArg.PRESTATE, value); diff --git a/packages/contracts-bedrock/test/setup/FeatureFlags.sol b/packages/contracts-bedrock/test/setup/FeatureFlags.sol index 300bfea09d9..b9c22cae022 100644 --- a/packages/contracts-bedrock/test/setup/FeatureFlags.sol +++ b/packages/contracts-bedrock/test/setup/FeatureFlags.sol @@ -45,6 +45,10 @@ abstract contract FeatureFlags { console.log("Setup: DEV_FEATURE__OPCM_V2 is enabled"); devFeatureBitmap |= DevFeatures.OPCM_V2; } + if (Config.devFeatureL2CM()) { + console.log("Setup: DEV_FEATURE__L2CM is enabled"); + devFeatureBitmap |= DevFeatures.L2CM; + } } /// @notice Returns the string name of a feature. @@ -55,6 +59,8 @@ abstract contract FeatureFlags { return "DEV_FEATURE__OPTIMISM_PORTAL_INTEROP"; } else if (_feature == DevFeatures.OPCM_V2) { return "DEV_FEATURE__OPCM_V2"; + } else if (_feature == DevFeatures.L2CM) { + return "DEV_FEATURE__L2CM"; } else if (_feature == Features.CUSTOM_GAS_TOKEN) { return "SYS_FEATURE__CUSTOM_GAS_TOKEN"; } else if (_feature == Features.ETH_LOCKBOX) { diff --git a/packages/contracts-bedrock/test/setup/ForkLive.s.sol b/packages/contracts-bedrock/test/setup/ForkLive.s.sol index c59a3feafb6..be371195255 100644 --- a/packages/contracts-bedrock/test/setup/ForkLive.s.sol +++ b/packages/contracts-bedrock/test/setup/ForkLive.s.sol @@ -32,7 +32,6 @@ import { ISystemConfig } from "interfaces/L1/ISystemConfig.sol"; import { IProxyAdmin } from "interfaces/universal/IProxyAdmin.sol"; import { IOPContractsManager } from "interfaces/L1/IOPContractsManager.sol"; import { IAnchorStateRegistry } from "interfaces/dispute/IAnchorStateRegistry.sol"; -import { IFaultDisputeGame } from "interfaces/dispute/IFaultDisputeGame.sol"; import { IETHLockbox } from "interfaces/L1/IETHLockbox.sol"; import { IOptimismPortal2 } from "interfaces/L1/IOptimismPortal2.sol"; import { IOPContractsManagerUpgrader } from "interfaces/L1/IOPContractsManager.sol"; @@ -179,15 +178,22 @@ contract ForkLive is Deployer, StdAssertions, FeatureFlags { IDisputeGameFactory(artifacts.mustGetAddress("DisputeGameFactoryProxy")); // The PermissionedDisputeGame and PermissionedDelayedWETHProxy are not listed in the registry for OP, so we - // look it up onchain - IFaultDisputeGame permissionedDisputeGame = - IFaultDisputeGame(address(disputeGameFactory.gameImpls(GameTypes.PERMISSIONED_CANNON))); - artifacts.save("PermissionedDisputeGame", address(permissionedDisputeGame)); - artifacts.save("PermissionedDelayedWETHProxy", address(permissionedDisputeGame.weth())); - - // The SR seems out-of-date, so pull the DelayedWETH addresses from the PermissionedDisputeGame. - artifacts.save("DelayedWETHProxy", address(permissionedDisputeGame.weth())); - artifacts.save("DelayedWETHImpl", EIP1967Helper.getImplementation(address(permissionedDisputeGame.weth()))); + // look it up onchain. + address permissionedGameImpl = address(disputeGameFactory.gameImpls(GameTypes.PERMISSIONED_CANNON)); + artifacts.save("PermissionedDisputeGame", permissionedGameImpl); + + // Get DelayedWETH for PERMISSIONED games + IDelayedWETH permissionedDelayedWeth = + DisputeGames.getGameImplDelayedWeth(disputeGameFactory, GameTypes.PERMISSIONED_CANNON); + artifacts.save("PermissionedDelayedWETHProxy", address(permissionedDelayedWeth)); + + // Get DelayedWETH for PERMISSIONLESS games (CANNON) + IDelayedWETH permissionlessDelayedWeth = + DisputeGames.getGameImplDelayedWeth(disputeGameFactory, GameTypes.CANNON); + + // The SR seems out-of-date, so pull the DelayedWETH addresses from the games. + artifacts.save("DelayedWETHProxy", address(permissionlessDelayedWeth)); + artifacts.save("DelayedWETHImpl", EIP1967Helper.getImplementation(address(permissionlessDelayedWeth))); } /// @notice Calls to the Deploy.s.sol contract etched by Setup.sol to a deterministic address, sets up the diff --git a/packages/contracts-bedrock/test/setup/Setup.sol b/packages/contracts-bedrock/test/setup/Setup.sol index b83c68cf5ff..8e58d04c092 100644 --- a/packages/contracts-bedrock/test/setup/Setup.sol +++ b/packages/contracts-bedrock/test/setup/Setup.sol @@ -70,6 +70,7 @@ import { IFeeSplitter } from "interfaces/L2/IFeeSplitter.sol"; import { IL1Withdrawer } from "interfaces/L2/IL1Withdrawer.sol"; import { ISuperchainRevSharesCalculator } from "interfaces/L2/ISuperchainRevSharesCalculator.sol"; import { IOPContractsManagerV2 } from "interfaces/L1/opcm/IOPContractsManagerV2.sol"; +import { IConditionalDeployer } from "interfaces/L2/IConditionalDeployer.sol"; /// @title Setup /// @dev This contact is responsible for setting up the contracts in state. It currently @@ -160,6 +161,7 @@ abstract contract Setup is FeatureFlags { IFeeSplitter feeSplitter = IFeeSplitter(payable(Predeploys.FEE_SPLITTER)); IL1Withdrawer l1Withdrawer; ISuperchainRevSharesCalculator superchainRevSharesCalculator; + IConditionalDeployer conditionalDeployer = IConditionalDeployer(Predeploys.CONDITIONAL_DEPLOYER); /// @notice Indicates whether a test is running against a forked production network. function isForkTest() public view returns (bool) { @@ -363,7 +365,8 @@ abstract contract Setup is FeatureFlags { gasPayingTokenName: deploy.cfg().gasPayingTokenName(), gasPayingTokenSymbol: deploy.cfg().gasPayingTokenSymbol(), nativeAssetLiquidityAmount: deploy.cfg().nativeAssetLiquidityAmount(), - liquidityControllerOwner: deploy.cfg().liquidityControllerOwner() + liquidityControllerOwner: deploy.cfg().liquidityControllerOwner(), + useL2CM: deploy.cfg().useL2CM() }) ); @@ -387,6 +390,7 @@ abstract contract Setup is FeatureFlags { labelPredeploy(Predeploys.SEQUENCER_FEE_WALLET); labelPredeploy(Predeploys.L2_ERC721_BRIDGE); labelPredeploy(Predeploys.OPTIMISM_MINTABLE_ERC721_FACTORY); + labelPredeploy(Predeploys.PROXY_ADMIN); labelPredeploy(Predeploys.BASE_FEE_VAULT); labelPredeploy(Predeploys.L1_FEE_VAULT); labelPredeploy(Predeploys.OPERATOR_FEE_VAULT); @@ -405,6 +409,7 @@ abstract contract Setup is FeatureFlags { labelPredeploy(Predeploys.NATIVE_ASSET_LIQUIDITY); labelPredeploy(Predeploys.LIQUIDITY_CONTROLLER); labelPredeploy(Predeploys.FEE_SPLITTER); + labelPredeploy(Predeploys.CONDITIONAL_DEPLOYER); // L2 Preinstalls labelPreinstall(Preinstalls.MultiCall3); diff --git a/reth b/reth deleted file mode 160000 index 8fa01eb62eb..00000000000 --- a/reth +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 8fa01eb62eb145112c8285f02aee12c2b4e2bd4d diff --git a/op-reth/.cargo/config.toml b/rust/.cargo/config.toml similarity index 100% rename from op-reth/.cargo/config.toml rename to rust/.cargo/config.toml diff --git a/rust/.config/zepter.yaml b/rust/.config/zepter.yaml new file mode 100644 index 00000000000..08e1bb53239 --- /dev/null +++ b/rust/.config/zepter.yaml @@ -0,0 +1,39 @@ +version: + format: 1 + # Minimum zepter version that is expected to work. This is just for printing a nice error + # message when someone tries to use an older version. + binary: 0.13.2 + +# The examples in the following comments assume crate `A` to have a dependency on crate `B`. +workflows: + check: + - [ + "lint", + # Check that `A` activates the features of `B`. + "propagate-feature", + # These are the features to check (union of all sub-project features): + "--features=std,serde,arbitrary,test-utils,metrics,op,dev,asm-keccak,jemalloc,jemalloc-prof,tracy-allocator,tracy,serde-bincode-compat,bench,alloy-compat,min-error-logs,min-warn-logs,min-info-logs,min-debug-logs,min-trace-logs,otlp,otlp-logs,js-tracer,portable,keccak-cache-global", + # Do not try to add a new section into `[features]` of `A` only because `B` exposes that feature. There are edge-cases where this is still needed, but we can add them manually. + "--left-side-feature-missing=ignore", + # Ignore the case that `A` is outside of the workspace. Otherwise it will report errors in external dependencies that we have no influence on. + "--left-side-outside-workspace=ignore", + # Only check normal dependencies. + # Propagating to dev-dependencies leads to compilation issues. + "--dep-kinds=normal:check,dev:ignore", + "--show-path", + "--quiet", + ] + default: + # Running `zepter` with no subcommand will check & fix. + - [$check.0, "--fix"] + +# Will be displayed when any workflow fails: +help: + text: | + This workspace uses the Zepter CLI to detect abnormalities in Cargo features, e.g. missing propagation. + + It looks like one more checks failed; please check the console output. + + You can try to automatically address them by installing zepter (`cargo install zepter --locked`) and simply running `zepter` in the workspace root. + links: + - "https://github.com/ggwpez/zepter" diff --git a/kona/.dockerignore b/rust/.dockerignore similarity index 100% rename from kona/.dockerignore rename to rust/.dockerignore diff --git a/rust/.gitignore b/rust/.gitignore new file mode 100644 index 00000000000..f4ceea78560 --- /dev/null +++ b/rust/.gitignore @@ -0,0 +1 @@ +**/target/ diff --git a/rust/Cargo.lock b/rust/Cargo.lock new file mode 100644 index 00000000000..2e8a90552b5 --- /dev/null +++ b/rust/Cargo.lock @@ -0,0 +1,16283 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "addr2line" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aead" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" +dependencies = [ + "crypto-common", + "generic-array", +] + +[[package]] +name = "aes" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "aes-gcm" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" +dependencies = [ + "aead", + "aes", + "cipher", + "ctr", + "ghash", + "subtle", +] + +[[package]] +name = "ahash" +version = "0.8.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" +dependencies = [ + "cfg-if", + "getrandom 0.3.4", + "once_cell", + "version_check", + "zerocopy", +] + +[[package]] +name = "aho-corasick" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd31a130427c27518df266943a5308ed92d4b226cc639f5a8f1002816174301" +dependencies = [ + "memchr", +] + +[[package]] +name = "aligned-vec" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc890384c8602f339876ded803c97ad529f3842aba97f6392b3dba0dd171769b" +dependencies = [ + "equator", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "allocator-api2" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + +[[package]] +name = "alloy-chains" +version = "0.2.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90f374d3c6d729268bbe2d0e0ff992bb97898b2df756691a62ee1d5f0506bc39" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "num_enum", + "proptest", + "serde", + "strum 0.27.2", +] + +[[package]] +name = "alloy-consensus" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed1958f0294ecc05ebe7b3c9a8662a3e221c2523b7f2bcd94c7a651efbd510bf" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "alloy-trie", + "alloy-tx-macros", + "arbitrary", + "auto_impl", + "borsh", + "c-kzg", + "derive_more", + "either", + "k256", + "once_cell", + "rand 0.8.5", + "secp256k1 0.30.0", + "serde", + "serde_json", + "serde_with", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-consensus-any" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f752e99497ddc39e22d547d7dfe516af10c979405a034ed90e69b914b7dddeae" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "arbitrary", + "serde", +] + +[[package]] +name = "alloy-dyn-abi" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14ff5ee5f27aa305bda825c735f686ad71bb65508158f059f513895abe69b8c3" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-type-parser", + "alloy-sol-types", + "derive_more", + "itoa", + "serde", + "serde_json", + "winnow", +] + +[[package]] +name = "alloy-eip2124" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "741bdd7499908b3aa0b159bba11e71c8cddd009a2c2eb7a06e825f1ec87900a5" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "crc", + "rand 0.8.5", + "serde", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-eip2930" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9441120fa82df73e8959ae0e4ab8ade03de2aaae61be313fbf5746277847ce25" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "borsh", + "rand 0.8.5", + "serde", +] + +[[package]] +name = "alloy-eip7702" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2919c5a56a1007492da313e7a3b6d45ef5edc5d33416fdec63c0d7a2702a0d20" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "borsh", + "k256", + "rand 0.8.5", + "serde", + "serde_with", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-eip7928" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3231de68d5d6e75332b7489cfcc7f4dfabeba94d990a10e4b923af0e6623540" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "borsh", + "serde", +] + +[[package]] +name = "alloy-eips" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "813a67f87e56b38554d18b182616ee5006e8e2bf9df96a0df8bf29dff1d52e3f" +dependencies = [ + "alloy-eip2124", + "alloy-eip2930", + "alloy-eip7702", + "alloy-eip7928", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "arbitrary", + "auto_impl", + "borsh", + "c-kzg", + "derive_more", + "either", + "ethereum_ssz", + "ethereum_ssz_derive", + "serde", + "serde_with", + "sha2", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-evm" +version = "0.27.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1582933a9fc27c0953220eb4f18f6492ff577822e9a8d848890ff59f6b4f5beb" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-hardforks", + "alloy-op-hardforks", + "alloy-primitives", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-sol-types", + "auto_impl", + "derive_more", + "op-alloy", + "op-revm", + "revm", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-genesis" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05864eef929c4d28895ae4b4d8ac9c6753c4df66e873b9c8fafc8089b59c1502" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-serde", + "alloy-trie", + "borsh", + "serde", + "serde_with", +] + +[[package]] +name = "alloy-hardforks" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83ba208044232d14d4adbfa77e57d6329f51bc1acc21f5667bb7db72d88a0831" +dependencies = [ + "alloy-chains", + "alloy-eip2124", + "alloy-primitives", + "auto_impl", + "dyn-clone", + "serde", +] + +[[package]] +name = "alloy-json-abi" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8708475665cc00e081c085886e68eada2f64cfa08fc668213a9231655093d4de" +dependencies = [ + "alloy-primitives", + "alloy-sol-type-parser", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-json-rpc" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2dd146b3de349a6ffaa4e4e319ab3a90371fb159fb0bddeb1c7bbe8b1792eff" +dependencies = [ + "alloy-primitives", + "alloy-sol-types", + "http", + "serde", + "serde_json", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "alloy-network" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c12278ffbb8872dfba3b2f17d8ea5e8503c2df5155d9bc5ee342794bde505c3" +dependencies = [ + "alloy-consensus", + "alloy-consensus-any", + "alloy-eips", + "alloy-json-rpc", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rpc-types-any", + "alloy-rpc-types-eth", + "alloy-serde", + "alloy-signer", + "alloy-sol-types", + "async-trait", + "auto_impl", + "derive_more", + "futures-utils-wasm", + "serde", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-network-primitives" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "833037c04917bc2031541a60e8249e4ab5500e24c637c1c62e95e963a655d66f" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-op-evm" +version = "0.26.3" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-hardforks", + "alloy-op-hardforks", + "alloy-primitives", + "auto_impl", + "op-alloy", + "op-revm", + "revm", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-op-hardforks" +version = "0.4.7" +dependencies = [ + "alloy-chains", + "alloy-hardforks", + "alloy-primitives", + "auto_impl", + "serde", +] + +[[package]] +name = "alloy-primitives" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b88cf92ed20685979ed1d8472422f0c6c2d010cec77caf63aaa7669cc1a7bc2" +dependencies = [ + "alloy-rlp", + "arbitrary", + "bytes", + "cfg-if", + "const-hex", + "derive_more", + "fixed-cache", + "foldhash 0.2.0", + "getrandom 0.3.4", + "hashbrown 0.16.1", + "indexmap 2.13.0", + "itoa", + "k256", + "keccak-asm", + "paste", + "proptest", + "proptest-derive 0.6.0", + "rand 0.9.2", + "rapidhash", + "rayon", + "ruint", + "rustc-hash", + "serde", + "sha3", +] + +[[package]] +name = "alloy-provider" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eafa840b0afe01c889a3012bb2fde770a544f74eab2e2870303eb0a5fb869c48" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-network", + "alloy-network-primitives", + "alloy-primitives", + "alloy-pubsub", + "alloy-rpc-client", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-rpc-types-trace", + "alloy-signer", + "alloy-sol-types", + "alloy-transport", + "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", + "async-stream", + "async-trait", + "auto_impl", + "dashmap", + "either", + "futures", + "futures-utils-wasm", + "lru 0.16.3", + "parking_lot", + "pin-project", + "reqwest 0.12.28", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-pubsub" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57b3a3b3e4efc9f4d30e3326b6bd6811231d16ef94837e18a802b44ca55119e6" +dependencies = [ + "alloy-json-rpc", + "alloy-primitives", + "alloy-transport", + "auto_impl", + "bimap", + "futures", + "parking_lot", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower 0.5.3", + "tracing", + "wasmtimer", +] + +[[package]] +name = "alloy-rlp" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e93e50f64a77ad9c5470bf2ad0ca02f228da70c792a8f06634801e202579f35e" +dependencies = [ + "alloy-rlp-derive", + "arrayvec", + "bytes", +] + +[[package]] +name = "alloy-rlp-derive" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce8849c74c9ca0f5a03da1c865e3eb6f768df816e67dd3721a398a8a7e398011" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "alloy-rpc-client" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12768ae6303ec764905a8a7cd472aea9072f9f9c980d18151e26913da8ae0123" +dependencies = [ + "alloy-json-rpc", + "alloy-primitives", + "alloy-pubsub", + "alloy-transport", + "alloy-transport-http", + "alloy-transport-ipc", + "alloy-transport-ws", + "futures", + "pin-project", + "reqwest 0.12.28", + "serde", + "serde_json", + "tokio", + "tokio-stream", + "tower 0.5.3", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-rpc-types" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0622d8bcac2f16727590aa33f4c3f05ea98130e7e4b4924bce8be85da5ad0dae" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-rpc-types-admin" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c38c5ac70457ecc74e87fe1a5a19f936419224ded0eb0636241452412ca92733" +dependencies = [ + "alloy-genesis", + "alloy-primitives", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-rpc-types-anvil" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae8eb0e5d6c48941b61ab76fabab4af66f7d88309a98aa14ad3dec7911c1eba3" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-rpc-types-any" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1cf5a093e437dfd62df48e480f24e1a3807632358aad6816d7a52875f1c04aa" +dependencies = [ + "alloy-consensus-any", + "alloy-rpc-types-eth", + "alloy-serde", +] + +[[package]] +name = "alloy-rpc-types-beacon" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e07949e912479ef3b848e1cf8db54b534bdd7bc58e6c23f28ea9488960990c8c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "derive_more", + "ethereum_ssz", + "ethereum_ssz_derive", + "serde", + "serde_json", + "serde_with", + "thiserror 2.0.18", + "tree_hash", + "tree_hash_derive", +] + +[[package]] +name = "alloy-rpc-types-debug" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "925ff0f48c2169c050f0ae7a82769bdf3f45723d6742ebb6a5efb4ed2f491b26" +dependencies = [ + "alloy-primitives", + "derive_more", + "serde", + "serde_with", +] + +[[package]] +name = "alloy-rpc-types-engine" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "336ef381c7409f23c69f6e79bddc1917b6e832cff23e7a5cf84b9381d53582e6" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "arbitrary", + "derive_more", + "ethereum_ssz", + "ethereum_ssz_derive", + "jsonwebtoken", + "rand 0.8.5", + "serde", + "strum 0.27.2", +] + +[[package]] +name = "alloy-rpc-types-eth" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28e97603095020543a019ab133e0e3dc38cd0819f19f19bdd70c642404a54751" +dependencies = [ + "alloy-consensus", + "alloy-consensus-any", + "alloy-eips", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "alloy-sol-types", + "arbitrary", + "itertools 0.14.0", + "serde", + "serde_json", + "serde_with", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-rpc-types-mev" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2805153975e25d38e37ee100880e642d5b24e421ed3014a7d2dae1d9be77562e" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-rpc-types-trace" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1aec4e1c66505d067933ea1a949a4fb60a19c4cfc2f109aa65873ea99e62ea8" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-rpc-types-txpool" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25b73c1d6e4f1737a20d246dad5a0abd6c1b76ec4c3d153684ef8c6f1b6bb4f4" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-serde", + "serde", +] + +[[package]] +name = "alloy-serde" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "946a0d413dbb5cd9adba0de5f8a1a34d5b77deda9b69c1d7feed8fc875a1aa26" +dependencies = [ + "alloy-primitives", + "arbitrary", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-signer" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f7481dc8316768f042495eaf305d450c32defbc9bce09d8bf28afcd956895bb" +dependencies = [ + "alloy-primitives", + "async-trait", + "auto_impl", + "either", + "elliptic-curve", + "k256", + "thiserror 2.0.18", +] + +[[package]] +name = "alloy-signer-local" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1259dac1f534a4c66c1d65237c89915d0010a2a91d6c3b0bada24dc5ee0fb917" +dependencies = [ + "alloy-consensus", + "alloy-network", + "alloy-primitives", + "alloy-signer", + "async-trait", + "coins-bip32", + "coins-bip39", + "k256", + "rand 0.8.5", + "thiserror 2.0.18", + "zeroize", +] + +[[package]] +name = "alloy-sol-macro" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5fa1ca7e617c634d2bd9fa71f9ec8e47c07106e248b9fcbd3eaddc13cabd625" +dependencies = [ + "alloy-sol-macro-expander", + "alloy-sol-macro-input", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "alloy-sol-macro-expander" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27c00c0c3a75150a9dc7c8c679ca21853a137888b4e1c5569f92d7e2b15b5102" +dependencies = [ + "alloy-sol-macro-input", + "const-hex", + "heck", + "indexmap 2.13.0", + "proc-macro-error2", + "proc-macro2", + "quote", + "sha3", + "syn 2.0.114", + "syn-solidity", +] + +[[package]] +name = "alloy-sol-macro-input" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297db260eb4d67c105f68d6ba11b8874eec681caec5505eab8fbebee97f790bc" +dependencies = [ + "const-hex", + "dunce", + "heck", + "macro-string", + "proc-macro2", + "quote", + "syn 2.0.114", + "syn-solidity", +] + +[[package]] +name = "alloy-sol-type-parser" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94b91b13181d3bcd23680fd29d7bc861d1f33fbe90fdd0af67162434aeba902d" +dependencies = [ + "serde", + "winnow", +] + +[[package]] +name = "alloy-sol-types" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc442cc2a75207b708d481314098a0f8b6f7b58e3148dd8d8cc7407b0d6f9385" +dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-macro", + "serde", +] + +[[package]] +name = "alloy-transport" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78f169b85eb9334871db986e7eaf59c58a03d86a30cc68b846573d47ed0656bb" +dependencies = [ + "alloy-json-rpc", + "auto_impl", + "base64 0.22.1", + "derive_more", + "futures", + "futures-utils-wasm", + "parking_lot", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tower 0.5.3", + "tracing", + "url", + "wasmtimer", +] + +[[package]] +name = "alloy-transport-http" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "019821102e70603e2c141954418255bec539ef64ac4117f8e84fb493769acf73" +dependencies = [ + "alloy-json-rpc", + "alloy-rpc-types-engine", + "alloy-transport", + "http-body-util", + "hyper", + "hyper-tls", + "hyper-util", + "jsonwebtoken", + "reqwest 0.12.28", + "serde_json", + "tower 0.5.3", + "tracing", + "url", +] + +[[package]] +name = "alloy-transport-ipc" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e574ca2f490fb5961d2cdd78188897392c46615cd88b35c202d34bbc31571a81" +dependencies = [ + "alloy-json-rpc", + "alloy-pubsub", + "alloy-transport", + "bytes", + "futures", + "interprocess", + "pin-project", + "serde", + "serde_json", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "alloy-transport-ws" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b92dea6996269769f74ae56475570e3586910661e037b7b52d50c9641f76c68f" +dependencies = [ + "alloy-pubsub", + "alloy-transport", + "futures", + "http", + "serde_json", + "tokio", + "tokio-tungstenite 0.26.2", + "tracing", + "ws_stream_wasm", +] + +[[package]] +name = "alloy-trie" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "428aa0f0e0658ff091f8f667c406e034b431cb10abd39de4f507520968acc499" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "arrayvec", + "derive_arbitrary", + "derive_more", + "nybbles", + "proptest", + "proptest-derive 0.5.1", + "serde", + "smallvec", + "tracing", +] + +[[package]] +name = "alloy-tx-macros" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ceac797eb8a56bdf5ab1fab353072c17d472eab87645ca847afe720db3246d" +dependencies = [ + "darling 0.21.3", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "ambassador" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8184c5d23ba3829fb1e93388d776c3469cd9f4162af65250490b4f22d3ecf614" +dependencies = [ + "itertools 0.10.5", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + +[[package]] +name = "anstream" +version = "0.6.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.61.2", +] + +[[package]] +name = "anyhow" +version = "1.0.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" + +[[package]] +name = "aquamarine" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f50776554130342de4836ba542aa85a4ddb361690d7e8df13774d7284c3d5c2" +dependencies = [ + "include_dir", + "itertools 0.10.5", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" +dependencies = [ + "derive_arbitrary", +] + +[[package]] +name = "arbtest" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a3be567977128c0f71ad1462d9624ccda712193d124e944252f0c5789a06d46" +dependencies = [ + "arbitrary", +] + +[[package]] +name = "ark-bls12-381" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3df4dcc01ff89867cd86b0da835f23c3f02738353aaee7dde7495af71363b8d5" +dependencies = [ + "ark-ec", + "ark-ff 0.5.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", +] + +[[package]] +name = "ark-bn254" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d69eab57e8d2663efa5c63135b2af4f396d66424f88954c21104125ab6b3e6bc" +dependencies = [ + "ark-ec", + "ark-ff 0.5.0", + "ark-r1cs-std", + "ark-std 0.5.0", +] + +[[package]] +name = "ark-ec" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43d68f2d516162846c1238e755a7c4d131b892b70cc70c471a8e3ca3ed818fce" +dependencies = [ + "ahash", + "ark-ff 0.5.0", + "ark-poly", + "ark-serialize 0.5.0", + "ark-std 0.5.0", + "educe", + "fnv", + "hashbrown 0.15.5", + "itertools 0.13.0", + "num-bigint", + "num-integer", + "num-traits", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b3235cc41ee7a12aaaf2c575a2ad7b46713a8a50bda2fc3b003a04845c05dd6" +dependencies = [ + "ark-ff-asm 0.3.0", + "ark-ff-macros 0.3.0", + "ark-serialize 0.3.0", + "ark-std 0.3.0", + "derivative", + "num-bigint", + "num-traits", + "paste", + "rustc_version 0.3.3", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec847af850f44ad29048935519032c33da8aa03340876d351dfab5660d2966ba" +dependencies = [ + "ark-ff-asm 0.4.2", + "ark-ff-macros 0.4.2", + "ark-serialize 0.4.2", + "ark-std 0.4.0", + "derivative", + "digest 0.10.7", + "itertools 0.10.5", + "num-bigint", + "num-traits", + "paste", + "rustc_version 0.4.1", + "zeroize", +] + +[[package]] +name = "ark-ff" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a177aba0ed1e0fbb62aa9f6d0502e9b46dad8c2eab04c14258a1212d2557ea70" +dependencies = [ + "ark-ff-asm 0.5.0", + "ark-ff-macros 0.5.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", + "arrayvec", + "digest 0.10.7", + "educe", + "itertools 0.13.0", + "num-bigint", + "num-traits", + "paste", + "zeroize", +] + +[[package]] +name = "ark-ff-asm" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db02d390bf6643fb404d3d22d31aee1c4bc4459600aef9113833d17e786c6e44" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-asm" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" +dependencies = [ + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-asm" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" +dependencies = [ + "quote", + "syn 2.0.114", +] + +[[package]] +name = "ark-ff-macros" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db2fd794a08ccb318058009eefdf15bcaaaaf6f8161eb3345f907222bac38b20" +dependencies = [ + "num-bigint", + "num-traits", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "ark-ff-macros" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09be120733ee33f7693ceaa202ca41accd5653b779563608f1234f78ae07c4b3" +dependencies = [ + "num-bigint", + "num-traits", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "ark-poly" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "579305839da207f02b89cd1679e50e67b4331e2f9294a57693e5051b7703fe27" +dependencies = [ + "ahash", + "ark-ff 0.5.0", + "ark-serialize 0.5.0", + "ark-std 0.5.0", + "educe", + "fnv", + "hashbrown 0.15.5", +] + +[[package]] +name = "ark-r1cs-std" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941551ef1df4c7a401de7068758db6503598e6f01850bdb2cfdb614a1f9dbea1" +dependencies = [ + "ark-ec", + "ark-ff 0.5.0", + "ark-relations", + "ark-std 0.5.0", + "educe", + "num-bigint", + "num-integer", + "num-traits", + "tracing", +] + +[[package]] +name = "ark-relations" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec46ddc93e7af44bcab5230937635b06fb5744464dd6a7e7b083e80ebd274384" +dependencies = [ + "ark-ff 0.5.0", + "ark-std 0.5.0", + "tracing", + "tracing-subscriber 0.2.25", +] + +[[package]] +name = "ark-serialize" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6c2b318ee6e10f8c2853e73a83adc0ccb88995aa978d8a3408d492ab2ee671" +dependencies = [ + "ark-std 0.3.0", + "digest 0.9.0", +] + +[[package]] +name = "ark-serialize" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb7b85a02b83d2f22f89bd5cac66c9c89474240cb6207cb1efc16d098e822a5" +dependencies = [ + "ark-std 0.4.0", + "digest 0.10.7", + "num-bigint", +] + +[[package]] +name = "ark-serialize" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f4d068aaf107ebcd7dfb52bc748f8030e0fc930ac8e360146ca54c1203088f7" +dependencies = [ + "ark-serialize-derive", + "ark-std 0.5.0", + "arrayvec", + "digest 0.10.7", + "num-bigint", +] + +[[package]] +name = "ark-serialize-derive" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "ark-std" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1df2c09229cbc5a028b1d70e00fdb2acee28b1055dfb5ca73eea49c5a25c4e7c" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + +[[package]] +name = "ark-std" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + +[[package]] +name = "ark-std" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "246a225cc6131e9ee4f24619af0f19d67761fff15d7ccc22e42b80846e69449a" +dependencies = [ + "num-traits", + "rand 0.8.5", +] + +[[package]] +name = "arrayref" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" + +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +dependencies = [ + "serde", +] + +[[package]] +name = "asn1-rs" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56624a96882bb8c26d61312ae18cb45868e5a9992ea73c58e45c3101e56a1e60" +dependencies = [ + "asn1-rs-derive", + "asn1-rs-impl", + "displaydoc", + "nom", + "num-traits", + "rusticata-macros", + "thiserror 2.0.18", + "time", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", + "synstructure", +] + +[[package]] +name = "asn1-rs-impl" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "asn1_der" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "155a5a185e42c6b77ac7b88a15143d930a9e9727a5b7b77eed417404ab15c247" + +[[package]] +name = "assert-json-diff" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e4f2b81832e72834d7518d8487a0396a28cc408186a2e8854c0f98011faf12" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-compression" +version = "0.4.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d10e4f991a553474232bc0a31799f6d24b034a84c0971d80d2e2f78b2e576e40" +dependencies = [ + "compression-codecs", + "compression-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "async-io" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" +dependencies = [ + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix 1.1.3", + "slab", + "windows-sys 0.61.2", +] + +[[package]] +name = "async-lock" +version = "3.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" +dependencies = [ + "event-listener", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-object-pool" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1ac0219111eb7bb7cb76d4cf2cb50c598e7ae549091d3616f9e95442c18486f" +dependencies = [ + "async-lock", + "event-listener", +] + +[[package]] +name = "async-stream" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" +dependencies = [ + "async-stream-impl", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "async-trait" +version = "0.1.89" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "async_io_stream" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d7b9decdf35d8908a7e3ef02f64c5e9b1695e230154c0e8de3969142d9b94c" +dependencies = [ + "futures", + "pharos", + "rustc_version 0.4.1", +] + +[[package]] +name = "asynchronous-codec" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233" +dependencies = [ + "bytes", + "futures-sink", + "futures-util", + "memchr", + "pin-project-lite", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "attohttpc" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e2cdb6d5ed835199484bb92bb8b3edd526effe995c61732580439c1a67e2e9" +dependencies = [ + "base64 0.22.1", + "http", + "log", + "url", +] + +[[package]] +name = "aurora-engine-modexp" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "518bc5745a6264b5fd7b09dffb9667e400ee9e2bbe18555fac75e1fe9afa0df9" +dependencies = [ + "hex", + "num", +] + +[[package]] +name = "auto_impl" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffdcb70bdbc4d478427380519163274ac86e52916e10f0a8889adf0f96d3fee7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "aws-lc-rs" +version = "1.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b7b6141e96a8c160799cc2d5adecd5cbbe5054cb8c7c4af53da0f83bb7ad256" +dependencies = [ + "aws-lc-sys", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.37.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c34dda4df7017c8db52132f0f8a2e0f8161649d15723ed63fc00c82d0f2081a" +dependencies = [ + "cc", + "cmake", + "dunce", + "fs_extra", +] + +[[package]] +name = "axum" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +dependencies = [ + "async-trait", + "axum-core", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "itoa", + "matchit", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper", + "tower 0.5.3", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper", + "tower-layer", + "tower-service", +] + +[[package]] +name = "backoff" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b62ddb9cb1ec0a098ad4bbf9344d0713fa193ae1a80af55febcff2627b6a00c1" +dependencies = [ + "getrandom 0.2.17", + "instant", + "rand 0.8.5", +] + +[[package]] +name = "backon" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef" +dependencies = [ + "fastrand", + "tokio", +] + +[[package]] +name = "backtrace" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide 0.8.9", + "object", + "rustc-demangle", + "windows-link", +] + +[[package]] +name = "base-x" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base256emoji" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e9430d9a245a77c92176e649af6e275f20839a48389859d1661e9a128d077c" +dependencies = [ + "const-str", + "match-lookup", +] + +[[package]] +name = "base64" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "base64ct" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" + +[[package]] +name = "bech32" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d86b93f97252c47b41663388e6d155714a9d0c398b99f1005cbc5f978b29f445" + +[[package]] +name = "bimap" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7" + +[[package]] +name = "bincode" +version = "1.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +dependencies = [ + "serde", +] + +[[package]] +name = "bincode" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36eaf5d7b090263e8150820482d5d93cd964a81e4019913c972f4edcc6edb740" +dependencies = [ + "bincode_derive", + "serde", + "unty", +] + +[[package]] +name = "bincode_derive" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf95709a440f45e986983918d0e8a1f30a9b1df04918fc828670606804ac3c09" +dependencies = [ + "virtue", +] + +[[package]] +name = "bindgen" +version = "0.71.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f58bf3d7db68cfbac37cfc485a8d711e87e064c3d0fe0435b92f7a407f9d6b3" +dependencies = [ + "bitflags 2.10.0", + "cexpr", + "clang-sys", + "itertools 0.13.0", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.114", +] + +[[package]] +name = "bindgen" +version = "0.72.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "993776b509cfb49c750f11b8f07a46fa23e0a1386ffc01fb1e7d343efc387895" +dependencies = [ + "bitflags 2.10.0", + "cexpr", + "clang-sys", + "itertools 0.13.0", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.114", +] + +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + +[[package]] +name = "bitcoin-io" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dee39a0ee5b4095224a0cfc6bf4cc1baf0f9624b96b367e53b66d974e51d953" + +[[package]] +name = "bitcoin_hashes" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26ec84b80c482df901772e931a9a681e26a1b9ee2302edeff23cb30328745c8b" +dependencies = [ + "bitcoin-io", + "hex-conservative", +] + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" +dependencies = [ + "arbitrary", + "serde_core", +] + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty", + "radium", + "serde", + "tap", + "wyz", +] + +[[package]] +name = "blake2" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "blake3" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2468ef7d57b3fb7e16b576e8377cdbde2320c60e1491e961d11da40fc4f02a2d" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", + "cpufeatures", +] + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "block-padding" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93" +dependencies = [ + "generic-array", +] + +[[package]] +name = "blst" +version = "0.3.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcdb4c7013139a150f9fc55d123186dbfaba0d912817466282c73ac49e71fb45" +dependencies = [ + "cc", + "glob", + "threadpool", + "zeroize", +] + +[[package]] +name = "boa_ast" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc119a5ad34c3f459062a96907f53358989b173d104258891bb74f95d93747e8" +dependencies = [ + "bitflags 2.10.0", + "boa_interner", + "boa_macros", + "boa_string", + "indexmap 2.13.0", + "num-bigint", + "rustc-hash", +] + +[[package]] +name = "boa_engine" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e637ec52ea66d76b0ca86180c259d6c7bb6e6a6e14b2f36b85099306d8b00cc3" +dependencies = [ + "aligned-vec", + "arrayvec", + "bitflags 2.10.0", + "boa_ast", + "boa_gc", + "boa_interner", + "boa_macros", + "boa_parser", + "boa_string", + "bytemuck", + "cfg-if", + "cow-utils", + "dashmap", + "dynify", + "fast-float2", + "float16", + "futures-channel", + "futures-concurrency", + "futures-lite", + "hashbrown 0.16.1", + "icu_normalizer", + "indexmap 2.13.0", + "intrusive-collections", + "itertools 0.14.0", + "num-bigint", + "num-integer", + "num-traits", + "num_enum", + "paste", + "portable-atomic", + "rand 0.9.2", + "regress", + "rustc-hash", + "ryu-js", + "serde", + "serde_json", + "small_btree", + "static_assertions", + "tag_ptr", + "tap", + "thin-vec", + "thiserror 2.0.18", + "time", + "xsum", +] + +[[package]] +name = "boa_gc" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1179f690cbfcbe5364cceee5f1cb577265bb6f07b0be6f210aabe270adcf9da" +dependencies = [ + "boa_macros", + "boa_string", + "hashbrown 0.16.1", + "thin-vec", +] + +[[package]] +name = "boa_interner" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9626505d33dc63d349662437297df1d3afd9d5fc4a2b3ad34e5e1ce879a78848" +dependencies = [ + "boa_gc", + "boa_macros", + "hashbrown 0.16.1", + "indexmap 2.13.0", + "once_cell", + "phf", + "rustc-hash", + "static_assertions", +] + +[[package]] +name = "boa_macros" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f36418a46544b152632c141b0a0b7a453cd69ca150caeef83aee9e2f4b48b7d" +dependencies = [ + "cfg-if", + "cow-utils", + "proc-macro2", + "quote", + "syn 2.0.114", + "synstructure", +] + +[[package]] +name = "boa_parser" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02f99bf5b684f0de946378fcfe5f38c3a0fbd51cbf83a0f39ff773a0e218541f" +dependencies = [ + "bitflags 2.10.0", + "boa_ast", + "boa_interner", + "boa_macros", + "fast-float2", + "icu_properties", + "num-bigint", + "num-traits", + "regress", + "rustc-hash", +] + +[[package]] +name = "boa_string" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ce9d7aa5563a2e14eab111e2ae1a06a69a812f6c0c3d843196c9d03fbef440" +dependencies = [ + "fast-float2", + "itoa", + "paste", + "rustc-hash", + "ryu-js", + "static_assertions", +] + +[[package]] +name = "borsh" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1da5ab77c1437701eeff7c88d968729e7766172279eab0676857b3d63af7a6f" +dependencies = [ + "borsh-derive", + "cfg_aliases", +] + +[[package]] +name = "borsh-derive" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0686c856aa6aac0c4498f936d7d6a02df690f614c03e4d906d1018062b5c5e2c" +dependencies = [ + "once_cell", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "boyer-moore-magiclen" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7441b4796eb8a7107d4cd99d829810be75f5573e1081c37faa0e8094169ea0d6" +dependencies = [ + "debug-helper", +] + +[[package]] +name = "brotli" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bs58" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf88ba1141d185c399bee5288d850d63b8369520c1eafc32a0430b5b6c287bf4" +dependencies = [ + "sha2", + "tinyvec", +] + +[[package]] +name = "bstr" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab" +dependencies = [ + "memchr", + "regex-automata", + "serde", +] + +[[package]] +name = "buddy_system_allocator" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b672b945a3e4f4f40bfd4cd5ee07df9e796a42254ce7cd6d2599ad969244c44a" +dependencies = [ + "spin 0.10.0", +] + +[[package]] +name = "bumpalo" +version = "3.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" + +[[package]] +name = "byte-slice-cast" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7575182f7272186991736b70173b0ea045398f984bf5ebbb3804736ce1330c9d" + +[[package]] +name = "bytecheck" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0caa33a2c0edca0419d15ac723dff03f1956f7978329b1e3b5fdaaaed9d3ca8b" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "rancor", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89385e82b5d1821d2219e0b095efa2cc1f246cbf99080f3be46a1a85c0d392d9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "bytecount" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175812e0be2bccb6abe50bb8d566126198344f707e304f45c648fd8f2cc0365e" + +[[package]] +name = "bytemuck" +version = "1.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" +dependencies = [ + "bytemuck_derive", +] + +[[package]] +name = "bytemuck_derive" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "byteorder" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" +dependencies = [ + "serde", +] + +[[package]] +name = "bzip2-sys" +version = "0.1.13+1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225bff33b2141874fe80d71e07d6eec4f85c5c216453dd96388240f96e1acc14" +dependencies = [ + "cc", + "pkg-config", +] + +[[package]] +name = "c-kzg" +version = "2.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e00bf4b112b07b505472dbefd19e37e53307e2bfed5a79e0cc161d58ccd0e687" +dependencies = [ + "arbitrary", + "blst", + "cc", + "glob", + "hex", + "libc", + "once_cell", + "serde", +] + +[[package]] +name = "camino" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48" +dependencies = [ + "serde_core", +] + +[[package]] +name = "cargo-platform" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87a0c0e6148f11f01f32650a2ea02d532b2ad4e81d8bd41e6e565b5adc5e6082" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "cargo_metadata" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef987d17b0a113becdd19d3d0022d04d7ef41f9efe4f3fb63ac44ba61df3ade9" +dependencies = [ + "camino", + "cargo-platform", + "semver 1.0.27", + "serde", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "cassowary" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "castaway" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dec551ab6e7578819132c713a93c022a05d60159dc86e7a7050223577484c55a" +dependencies = [ + "rustversion", +] + +[[package]] +name = "cc" +version = "1.2.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b26a0954ae34af09b50f0de26458fa95369a0d478d8236d3f93082b219bd29" +dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", +] + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chacha20" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" +dependencies = [ + "cfg-if", + "cipher", + "cpufeatures", +] + +[[package]] +name = "chacha20poly1305" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" +dependencies = [ + "aead", + "chacha20", + "cipher", + "poly1305", + "zeroize", +] + +[[package]] +name = "chrono" +version = "0.4.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" +dependencies = [ + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "ciborium" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" +dependencies = [ + "ciborium-io", + "ciborium-ll", + "serde", +] + +[[package]] +name = "ciborium-io" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" + +[[package]] +name = "ciborium-ll" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" +dependencies = [ + "ciborium-io", + "half", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", + "zeroize", +] + +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "clap" +version = "4.5.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75ca66430e33a14957acc24c5077b503e7d374151b2b4b3a10c83b4ceb4be0e" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.5.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793207c7fa6300a0608d1080b858e5fdbe713cdc1c8db9fb17777d8a13e63df0" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.5.55" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "clap_lex" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" + +[[package]] +name = "cmake" +version = "0.1.57" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d" +dependencies = [ + "cc", +] + +[[package]] +name = "codspeed" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93f4cce9c27c49c4f101fffeebb1826f41a9df2e7498b7cd4d95c0658b796c6c" +dependencies = [ + "colored", + "libc", + "serde", + "serde_json", + "uuid", +] + +[[package]] +name = "codspeed-criterion-compat" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3c23d880a28a2aab52d38ca8481dd7a3187157d0a952196b6db1db3c8499725" +dependencies = [ + "codspeed", + "codspeed-criterion-compat-walltime", + "colored", +] + +[[package]] +name = "codspeed-criterion-compat-walltime" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b0a2f7365e347f4f22a67e9ea689bf7bc89900a354e22e26cf8a531a42c8fbb" +dependencies = [ + "anes", + "cast", + "ciborium", + "clap", + "codspeed", + "criterion-plot", + "is-terminal", + "itertools 0.10.5", + "num-traits", + "once_cell", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "coins-bip32" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2073678591747aed4000dd468b97b14d7007f7936851d3f2f01846899f5ebf08" +dependencies = [ + "bs58", + "coins-core", + "digest 0.10.7", + "hmac", + "k256", + "serde", + "sha2", + "thiserror 1.0.69", +] + +[[package]] +name = "coins-bip39" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74b169b26623ff17e9db37a539fe4f15342080df39f129ef7631df7683d6d9d4" +dependencies = [ + "bitvec", + "coins-bip32", + "hmac", + "once_cell", + "pbkdf2", + "rand 0.8.5", + "sha2", + "thiserror 1.0.69", +] + +[[package]] +name = "coins-core" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b962ad8545e43a28e14e87377812ba9ae748dd4fd963f4c10e9fcc6d13475b" +dependencies = [ + "base64 0.21.7", + "bech32", + "bs58", + "const-hex", + "digest 0.10.7", + "generic-array", + "ripemd", + "serde", + "sha2", + "sha3", + "thiserror 1.0.69", +] + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "colored" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "117725a109d387c937a1533ce01b450cbde6b88abceea8473c4d7a85853cda3c" +dependencies = [ + "lazy_static", + "windows-sys 0.59.0", +] + +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + +[[package]] +name = "comfy-table" +version = "7.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "958c5d6ecf1f214b4c2bbbbf6ab9523a864bd136dcf71a7e8904799acfe1ad47" +dependencies = [ + "crossterm 0.29.0", + "unicode-segmentation", + "unicode-width 0.2.0", +] + +[[package]] +name = "compact_str" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b79c4069c6cad78e2e0cdfcbd26275770669fb39fd308a752dc110e83b9af32" +dependencies = [ + "castaway", + "cfg-if", + "itoa", + "rustversion", + "ryu", + "static_assertions", +] + +[[package]] +name = "compression-codecs" +version = "0.4.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00828ba6fd27b45a448e57dbfe84f1029d4c9f26b368157e9a448a5f49a2ec2a" +dependencies = [ + "brotli", + "compression-core", + "flate2", + "memchr", + "zstd", + "zstd-safe", +] + +[[package]] +name = "compression-core" +version = "0.4.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75984efb6ed102a0d42db99afb6c1948f0380d1d91808d5529916e6c08b49d8d" + +[[package]] +name = "concat-kdf" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d72c1252426a83be2092dd5884a5f6e3b8e7180f6891b6263d2c21b92ec8816" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "console" +version = "0.15.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "054ccb5b10f9f2cbf51eb355ca1d05c2d279ce1804688d0db74b4733a5aeafd8" +dependencies = [ + "encode_unicode", + "libc", + "once_cell", + "windows-sys 0.59.0", +] + +[[package]] +name = "const-hex" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735" +dependencies = [ + "cfg-if", + "cpufeatures", + "proptest", + "serde_core", +] + +[[package]] +name = "const-oid" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" + +[[package]] +name = "const-str" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3" + +[[package]] +name = "const_format" +version = "0.2.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" +dependencies = [ + "const_format_proc_macros", +] + +[[package]] +name = "const_format_proc_macros" +version = "0.2.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "constant_time_eq" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b" + +[[package]] +name = "convert_case" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + +[[package]] +name = "cow-utils" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "417bef24afe1460300965a25ff4a24b8b45ad011948302ec221e8a0a81eb2c79" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d" +dependencies = [ + "crc-catalog", +] + +[[package]] +name = "crc-catalog" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "criterion-plot" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" +dependencies = [ + "cast", + "itertools 0.10.5", +] + +[[package]] +name = "critical-section" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "crossterm" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" +dependencies = [ + "bitflags 2.10.0", + "crossterm_winapi", + "mio", + "parking_lot", + "rustix 0.38.44", + "signal-hook", + "signal-hook-mio", + "winapi", +] + +[[package]] +name = "crossterm" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" +dependencies = [ + "bitflags 2.10.0", + "crossterm_winapi", + "document-features", + "parking_lot", + "rustix 1.1.3", + "winapi", +] + +[[package]] +name = "crossterm_winapi" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdd7c62a3665c7f6830a51635d9ac9b23ed385797f70a83bb8bafe9c572ab2b" +dependencies = [ + "winapi", +] + +[[package]] +name = "crunchy" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fbee9c2c2f33933d720630a6a0bac33ba7053db5344fac858d4b8952d77d5" + +[[package]] +name = "crypto-bigint" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "subtle", + "zeroize", +] + +[[package]] +name = "crypto-common" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +dependencies = [ + "generic-array", + "rand_core 0.6.4", + "typenum", +] + +[[package]] +name = "ctr" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" +dependencies = [ + "cipher", +] + +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "rustc_version 0.4.1", + "subtle", + "zeroize", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core 0.20.11", + "darling_macro 0.20.11", +] + +[[package]] +name = "darling" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +dependencies = [ + "darling_core 0.21.3", + "darling_macro 0.21.3", +] + +[[package]] +name = "darling" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" +dependencies = [ + "darling_core 0.23.0", + "darling_macro 0.23.0", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.114", +] + +[[package]] +name = "darling_core" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "serde", + "strsim", + "syn 2.0.114", +] + +[[package]] +name = "darling_core" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" +dependencies = [ + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.114", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core 0.20.11", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "darling_macro" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +dependencies = [ + "darling_core 0.21.3", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "darling_macro" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" +dependencies = [ + "darling_core 0.23.0", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + +[[package]] +name = "data-encoding" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" + +[[package]] +name = "data-encoding-macro" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8142a83c17aa9461d637e649271eae18bf2edd00e91f2e105df36c3c16355bdb" +dependencies = [ + "data-encoding", + "data-encoding-macro-internal", +] + +[[package]] +name = "data-encoding-macro-internal" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ab67060fc6b8ef687992d439ca0fa36e7ed17e9a0b16b25b601e8757df720de" +dependencies = [ + "data-encoding", + "syn 1.0.109", +] + +[[package]] +name = "debug-helper" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f578e8e2c440e7297e008bb5486a3a8a194775224bbc23729b0dbdfaeebf162e" + +[[package]] +name = "delay_map" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88e365f083a5cb5972d50ce8b1b2c9f125dc5ec0f50c0248cfb568ae59efcf0b" +dependencies = [ + "futures", + "tokio", + "tokio-util", +] + +[[package]] +name = "der" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7c1832837b905bbfb5101e07cc24c8deddf52f93225eee6ead5f4d63d53ddcb" +dependencies = [ + "const-oid", + "zeroize", +] + +[[package]] +name = "der-parser" +version = "10.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07da5016415d5a3c4dd39b11ed26f915f52fc4e0dc197d87908bc916e51bc1a6" +dependencies = [ + "asn1-rs", + "displaydoc", + "nom", + "num-bigint", + "num-traits", + "rusticata-macros", +] + +[[package]] +name = "deranged" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" +dependencies = [ + "powerfmt", + "serde_core", +] + +[[package]] +name = "derivative" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "derive-where" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef941ded77d15ca19b40374869ac6000af1c9f2a4c0f3d4c70926287e6364a8f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "derive_arbitrary" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling 0.20.11", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn 2.0.114", +] + +[[package]] +name = "derive_more" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version 0.4.1", + "syn 2.0.114", + "unicode-xid", +] + +[[package]] +name = "diff" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" + +[[package]] +name = "digest" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" +dependencies = [ + "generic-array", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "const-oid", + "crypto-common", + "subtle", +] + +[[package]] +name = "dirs" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" +dependencies = [ + "dirs-sys", +] + +[[package]] +name = "dirs-next" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" +dependencies = [ + "cfg-if", + "dirs-sys-next", +] + +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users 0.5.2", + "windows-sys 0.61.2", +] + +[[package]] +name = "dirs-sys-next" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" +dependencies = [ + "libc", + "redox_users 0.4.6", + "winapi", +] + +[[package]] +name = "discv5" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f170f4f6ed0e1df52bf43b403899f0081917ecf1500bfe312505cc3b515a8899" +dependencies = [ + "aes", + "aes-gcm", + "alloy-rlp", + "arrayvec", + "ctr", + "delay_map", + "enr", + "fnv", + "futures", + "hashlink 0.9.1", + "hex", + "hkdf", + "lazy_static", + "libp2p-identity", + "lru 0.12.5", + "more-asserts", + "multiaddr", + "parking_lot", + "rand 0.8.5", + "smallvec", + "socket2 0.5.10", + "tokio", + "tracing", + "uint 0.10.0", + "zeroize", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "doctest-file" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aac81fa3e28d21450aa4d2ac065992ba96a1d7303efbce51a95f4fd175b67562" + +[[package]] +name = "document-features" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4b8a88685455ed29a21542a33abd9cb6510b6b129abadabdcef0f4c55bc8f61" +dependencies = [ + "litrs", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "downcast" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" + +[[package]] +name = "dtoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c3cf4824e2d5f025c7b531afcb2325364084a16806f6d47fbc1f5fbd9960590" + +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + +[[package]] +name = "dyn-clone" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" + +[[package]] +name = "dynify" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81acb15628a3e22358bf73de5e7e62360b8a777dbcb5fc9ac7dfa9ae73723747" +dependencies = [ + "dynify-macros", +] + +[[package]] +name = "dynify-macros" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ec431cd708430d5029356535259c5d645d60edd3d39c54e5eea9782d46caa7d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "ecdsa" +version = "0.16.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +dependencies = [ + "der", + "digest 0.10.7", + "elliptic-curve", + "rfc6979", + "serdect", + "signature", + "spki", +] + +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "pkcs8", + "serde", + "signature", +] + +[[package]] +name = "ed25519-dalek" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" +dependencies = [ + "curve25519-dalek", + "ed25519", + "rand_core 0.6.4", + "serde", + "sha2", + "subtle", + "zeroize", +] + +[[package]] +name = "educe" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7bc049e1bd8cdeb31b68bbd586a9464ecf9f3944af3958a7a9d0f8b9799417" +dependencies = [ + "enum-ordinalize", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +dependencies = [ + "serde", +] + +[[package]] +name = "elliptic-curve" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +dependencies = [ + "base16ct", + "crypto-bigint", + "digest 0.10.7", + "ff", + "generic-array", + "group", + "pkcs8", + "rand_core 0.6.4", + "sec1", + "serdect", + "subtle", + "zeroize", +] + +[[package]] +name = "encode_unicode" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" + +[[package]] +name = "endian-type" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" + +[[package]] +name = "enr" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "851bd664a3d3a3c175cff92b2f0df02df3c541b4895d0ae307611827aae46152" +dependencies = [ + "alloy-rlp", + "base64 0.22.1", + "bytes", + "ed25519-dalek", + "hex", + "k256", + "log", + "rand 0.8.5", + "secp256k1 0.30.0", + "serde", + "sha3", + "zeroize", +] + +[[package]] +name = "enum-as-inner" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "enum-ordinalize" +version = "4.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a1091a7bb1f8f2c4b28f1fe2cef4980ca2d410a3d727d67ecc3178c9b0800f0" +dependencies = [ + "enum-ordinalize-derive", +] + +[[package]] +name = "enum-ordinalize-derive" +version = "4.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "equator" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4711b213838dfee0117e3be6ac926007d7f433d7bbe33595975d4190cb07e6fc" +dependencies = [ + "equator-macro", +] + +[[package]] +name = "equator-macro" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44f23cf4b44bfce11a86ace86f8a73ffdec849c9fd00a386a53d278bd9e81fb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "ethereum_hashing" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c853bd72c9e5787f8aafc3df2907c2ed03cff3150c3acd94e2e53a98ab70a8ab" +dependencies = [ + "cpufeatures", + "ring", + "sha2", +] + +[[package]] +name = "ethereum_serde_utils" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3dc1355dbb41fbbd34ec28d4fb2a57d9a70c67ac3c19f6a5ca4d4a176b9e997a" +dependencies = [ + "alloy-primitives", + "hex", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "ethereum_ssz" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dcddb2554d19cde19b099fadddde576929d7a4d0c1cd3512d1fd95cf174375c" +dependencies = [ + "alloy-primitives", + "ethereum_serde_utils", + "itertools 0.13.0", + "serde", + "serde_derive", + "smallvec", + "typenum", +] + +[[package]] +name = "ethereum_ssz_derive" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a657b6b3b7e153637dc6bdc6566ad9279d9ee11a15b12cfb24a2e04360637e9f" +dependencies = [ + "darling 0.20.11", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener", + "pin-project-lite", +] + +[[package]] +name = "example-custom-node" +version = "0.0.0" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-genesis", + "alloy-network", + "alloy-op-evm", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-serde", + "async-trait", + "derive_more", + "eyre", + "jsonrpsee", + "modular-bitfield", + "op-alloy-consensus", + "op-alloy-rpc-types", + "op-alloy-rpc-types-engine", + "op-revm", + "reth-codecs", + "reth-db-api", + "reth-engine-primitives", + "reth-ethereum", + "reth-network-peers", + "reth-node-builder", + "reth-op", + "reth-optimism-flashblocks", + "reth-optimism-forks", + "reth-payload-builder", + "reth-rpc-api", + "reth-rpc-engine-api", + "revm", + "revm-primitives", + "serde", + "thiserror 2.0.18", +] + +[[package]] +name = "example-discovery" +version = "0.0.0" +dependencies = [ + "anyhow", + "clap", + "discv5", + "kona-cli", + "kona-disc", + "tokio", + "tracing", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "example-engine-api-access" +version = "0.0.0" +dependencies = [ + "reth-db", + "reth-node-builder", + "reth-optimism-chainspec", + "reth-optimism-node", + "tokio", +] + +[[package]] +name = "example-exex-hello-world" +version = "0.0.0" +dependencies = [ + "clap", + "eyre", + "futures", + "reth-ethereum", + "reth-op", + "reth-tracing", + "tokio", +] + +[[package]] +name = "example-gossip" +version = "0.0.0" +dependencies = [ + "anyhow", + "async-trait", + "clap", + "discv5", + "kona-cli", + "kona-disc", + "kona-node-service", + "kona-registry", + "libp2p", + "op-alloy-rpc-types-engine", + "tokio", + "tokio-util", + "tracing", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "example-op-db-access" +version = "0.0.0" +dependencies = [ + "eyre", + "reth-op", +] + +[[package]] +name = "execution-fixture" +version = "0.0.0" +dependencies = [ + "anyhow", + "clap", + "kona-cli", + "kona-executor", + "tokio", + "tracing", + "tracing-subscriber 0.3.22", + "url", +] + +[[package]] +name = "eyre" +version = "0.6.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" +dependencies = [ + "indenter", + "once_cell", +] + +[[package]] +name = "fast-float2" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8eb564c5c7423d25c886fb561d1e4ee69f72354d16918afa32c08811f6b6a55" + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "fastrlp" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139834ddba373bbdd213dffe02c8d110508dcf1726c2be27e8d1f7d7e1856418" +dependencies = [ + "arrayvec", + "auto_impl", + "bytes", +] + +[[package]] +name = "fastrlp" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce8dba4714ef14b8274c371879b175aa55b16b30f269663f19d576f380018dc4" +dependencies = [ + "arrayvec", + "auto_impl", + "bytes", +] + +[[package]] +name = "fdlimit" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e182f7dbc2ef73d9ef67351c5fbbea084729c48362d3ce9dd44c28e32e277fe5" +dependencies = [ + "libc", + "thiserror 1.0.69", +] + +[[package]] +name = "ff" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0b50bfb653653f9ca9095b427bed08ab8d75a137839d9ad64eb11810d5b6393" +dependencies = [ + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + +[[package]] +name = "filetime" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" +dependencies = [ + "cfg-if", + "libc", + "libredox", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" + +[[package]] +name = "fixed-cache" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0aaafa7294e9617eb29e5c684a3af33324ef512a1bf596af2d1938a03798da29" +dependencies = [ + "equivalent", + "typeid", +] + +[[package]] +name = "fixed-hash" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" +dependencies = [ + "byteorder", + "rand 0.8.5", + "rustc-hex", + "static_assertions", +] + +[[package]] +name = "fixed-map" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86ed19add84e8cb9e8cc5f7074de0324247149ffef0b851e215fb0edc50c229b" +dependencies = [ + "fixed-map-derive", + "serde", +] + +[[package]] +name = "fixed-map-derive" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dc7a9cb3326bafb80642c5ce99b39a2c0702d4bfa8ee8a3e773791a6cbe2407" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + +[[package]] +name = "flate2" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b375d6465b98090a5f25b1c7703f3859783755aa9a80433b36e0379a3ec2f369" +dependencies = [ + "crc32fast", + "miniz_oxide 0.8.9", +] + +[[package]] +name = "float16" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bffafbd079d520191c7c2779ae9cf757601266cf4167d3f659ff09617ff8483" +dependencies = [ + "cfg-if", + "rustc_version 0.2.3", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "fragile" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dd6caf6059519a65843af8fe2a3ae298b14b80179855aeb4adc2c1934ee619" + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "fsevent-sys" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2" +dependencies = [ + "libc", +] + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-bounded" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91f328e7fb845fc832912fb6a34f40cf6d1888c92f974d1893a54e97b5ff542e" +dependencies = [ + "futures-timer", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-concurrency" +version = "7.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175cd8cca9e1d45b87f18ffa75088f2099e3c4fe5e2f83e42de112560bea8ea6" +dependencies = [ + "fixedbitset", + "futures-core", + "futures-lite", + "pin-project", + "smallvec", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-executor" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + +[[package]] +name = "futures-io" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" + +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "fastrand", + "futures-core", + "futures-io", + "parking", + "pin-project-lite", +] + +[[package]] +name = "futures-macro" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "futures-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" +dependencies = [ + "futures-io", + "rustls", + "rustls-pki-types", +] + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-timer" +version = "3.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" +dependencies = [ + "gloo-timers", + "send_wrapper 0.4.0", +] + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "futures-utils-wasm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42012b0f064e01aa58b545fe3727f90f7dd4020f4a3ea735b50344965f5a57e9" + +[[package]] +name = "generator" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52f04ae4152da20c76fe800fa48659201d5cf627c5149ca0b707b69d7eef6cf9" +dependencies = [ + "cc", + "cfg-if", + "libc", + "log", + "rustversion", + "windows-link", + "windows-result 0.4.1", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", + "zeroize", +] + +[[package]] +name = "getrandom" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff2abc00be7fca6ebc474524697ae276ad847ad0a6b3faa4bcb027e9a4614ad0" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasip2", + "wasm-bindgen", +] + +[[package]] +name = "ghash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0d8a4362ccb29cb0b265253fb0a2728f592895ee6854fd9bc13f2ffda266ff1" +dependencies = [ + "opaque-debug", + "polyval", +] + +[[package]] +name = "gimli" +version = "0.32.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" + +[[package]] +name = "git2" +version = "0.20.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b88256088d75a56f8ecfa070513a775dd9107f6530ef14919dac831af9cfe2b" +dependencies = [ + "bitflags 2.10.0", + "libc", + "libgit2-sys", + "log", + "url", +] + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "gloo-net" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06f627b1a58ca3d42b45d6104bf1e1a03799df472df00988b6ba21accc10580" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "gloo-utils", + "http", + "js-sys", + "pin-project", + "serde", + "serde_json", + "thiserror 1.0.69", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "gloo-utils" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "gmp-mpfr-sys" +version = "1.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60f8970a75c006bb2f8ae79c6768a116dd215fa8346a87aed99bf9d82ca43394" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "group" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" +dependencies = [ + "ff", + "rand_core 0.6.4", + "subtle", +] + +[[package]] +name = "h2" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f44da3a8150a6703ed5d34e164b875fd14c2cdab9af1252a9a1020bde2bdc54" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap 2.13.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "half" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea2d84b969582b4b1864a92dc5d27cd2b77b622a8d79306834f1be5ba20d84b" +dependencies = [ + "cfg-if", + "crunchy", + "zerocopy", +] + +[[package]] +name = "hash-db" +version = "0.15.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d23bd4e7b5eda0d0f3a307e8b381fdc8ba9000f26fbe912250c0a4cc3956364a" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" + +[[package]] +name = "hashbrown" +version = "0.14.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash 0.1.5", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash 0.2.0", + "rayon", + "serde", + "serde_core", +] + +[[package]] +name = "hashlink" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +dependencies = [ + "hashbrown 0.14.5", +] + +[[package]] +name = "hashlink" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "hdrhistogram" +version = "7.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" +dependencies = [ + "byteorder", + "num-traits", +] + +[[package]] +name = "headers" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" +dependencies = [ + "base64 0.22.1", + "bytes", + "headers-core", + "http", + "httpdate", + "mime", + "sha1", +] + +[[package]] +name = "headers-core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" +dependencies = [ + "http", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + +[[package]] +name = "hex-conservative" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fda06d18ac606267c40c04e41b9947729bf8b9efe74bd4e82b61a5f26a510b9f" +dependencies = [ + "arrayvec", +] + +[[package]] +name = "hex_fmt" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b07f60793ff0a4d9cef0f18e63b5357e06209987153a64648c972c1e5aff336f" + +[[package]] +name = "hickory-proto" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8a6fe56c0038198998a6f217ca4e7ef3a5e51f46163bd6dd60b5c71ca6c6502" +dependencies = [ + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner", + "futures-channel", + "futures-io", + "futures-util", + "idna", + "ipnet", + "once_cell", + "rand 0.9.2", + "ring", + "serde", + "socket2 0.5.10", + "thiserror 2.0.18", + "tinyvec", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "hickory-resolver" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc62a9a99b0bfb44d2ab95a7208ac952d31060efc16241c87eaf36406fecf87a" +dependencies = [ + "cfg-if", + "futures-util", + "hickory-proto", + "ipconfig", + "moka", + "once_cell", + "parking_lot", + "rand 0.9.2", + "resolv-conf", + "serde", + "smallvec", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "hkdf" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" +dependencies = [ + "hmac", +] + +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "http" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" +dependencies = [ + "bytes", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "http-range-header" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "httpmock" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "511f510e9b1888d67f10bab4397f8b019d2a9b249a2c10acbce2d705b1b32e26" +dependencies = [ + "assert-json-diff", + "async-object-pool", + "async-trait", + "base64 0.22.1", + "bytes", + "crossbeam-utils", + "form_urlencoded", + "futures-timer", + "futures-util", + "headers", + "http", + "http-body-util", + "hyper", + "hyper-util", + "path-tree", + "regex", + "serde", + "serde_json", + "serde_regex", + "similar", + "stringmetrics", + "tabwriter", + "thiserror 2.0.18", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "human_bytes" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91f255a4535024abf7640cb288260811fc14794f62b063652ed349f9a6c2348e" + +[[package]] +name = "humantime" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" + +[[package]] +name = "humantime-serde" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a3db5ea5923d99402c94e9feb261dc5ee9b4efa158b0315f788cf549cc200c" +dependencies = [ + "humantime", + "serde", +] + +[[package]] +name = "hyper" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" +dependencies = [ + "atomic-waker", + "bytes", + "futures-channel", + "futures-core", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "pin-utils", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http", + "hyper", + "hyper-util", + "log", + "rustls", + "rustls-native-certs", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", + "webpki-roots 1.0.5", +] + +[[package]] +name = "hyper-timeout" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" +dependencies = [ + "hyper", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2 0.6.2", + "system-configuration 0.7.0", + "tokio", + "tower-layer", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core 0.62.2", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" +dependencies = [ + "displaydoc", + "litemap", + "serde", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b24a59706036ba941c9476a55cd57b82b77f38a3c667d637ee7cabbc85eaedc" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "utf16_iter", + "write16", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5a97b8ac6235e69506e8dacfb2adf38461d2ce6d3e9bd9c94c4cbc3cd4400a4" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" +dependencies = [ + "displaydoc", + "icu_locale_core", + "serde", + "stable_deref_trait", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "if-addrs" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cabb0019d51a643781ff15c9c8a3e5dedc365c47211270f4e8f82812fedd8f0a" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "if-addrs" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf39cc0423ee66021dc5eccface85580e4a001e0c5288bae8bea7ecb69225e90" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "if-watch" +version = "3.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdf9d64cfcf380606e64f9a0bcf493616b65331199f984151a6fa11a7b3cde38" +dependencies = [ + "async-io", + "core-foundation 0.9.4", + "fnv", + "futures", + "if-addrs 0.10.2", + "ipnet", + "log", + "netlink-packet-core", + "netlink-packet-route", + "netlink-proto", + "netlink-sys", + "rtnetlink", + "system-configuration 0.6.1", + "tokio", + "windows 0.53.0", +] + +[[package]] +name = "igd-next" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "516893339c97f6011282d5825ac94fc1c7aad5cad26bdc2d0cee068c0bf97f97" +dependencies = [ + "async-trait", + "attohttpc", + "bytes", + "futures", + "http", + "http-body-util", + "hyper", + "hyper-util", + "log", + "rand 0.9.2", + "tokio", + "url", + "xmltree", +] + +[[package]] +name = "impl-codec" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba6a270039626615617f3f36d15fc827041df3b78c439da2cadfa47455a77f2f" +dependencies = [ + "parity-scale-codec", +] + +[[package]] +name = "impl-trait-for-tuples" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0eb5a3343abf848c0984fe4604b2b105da9539376e24fc0a3b0007411ae4fd9" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "include_dir" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd" +dependencies = [ + "include_dir_macros", +] + +[[package]] +name = "include_dir_macros" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "indenter" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "964de6e86d545b246d84badc0fef527924ace5134f30641c203ef52ba83f58d5" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", + "serde", +] + +[[package]] +name = "indexmap" +version = "2.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7714e70437a7dc3ac8eb7e6f8df75fd8eb422675fc7678aff7364301092b1017" +dependencies = [ + "arbitrary", + "equivalent", + "hashbrown 0.16.1", + "rayon", + "serde", + "serde_core", +] + +[[package]] +name = "indoc" +version = "2.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" +dependencies = [ + "rustversion", +] + +[[package]] +name = "inotify" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" +dependencies = [ + "bitflags 2.10.0", + "inotify-sys", + "libc", +] + +[[package]] +name = "inotify-sys" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb" +dependencies = [ + "libc", +] + +[[package]] +name = "inout" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01" +dependencies = [ + "block-padding", + "generic-array", +] + +[[package]] +name = "instability" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357b7205c6cd18dd2c86ed312d1e70add149aea98e7ef72b9fdf0270e555c11d" +dependencies = [ + "darling 0.23.0", + "indoc", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "interprocess" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d941b405bd2322993887859a8ee6ac9134945a24ec5ec763a8a962fc64dfec2d" +dependencies = [ + "doctest-file", + "futures-core", + "libc", + "recvmsg", + "tokio", + "widestring", + "windows-sys 0.52.0", +] + +[[package]] +name = "intrusive-collections" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "189d0897e4cbe8c75efedf3502c18c887b05046e59d28404d4d8e46cbc4d1e86" +dependencies = [ + "memoffset", +] + +[[package]] +name = "ipconfig" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" +dependencies = [ + "socket2 0.5.10", + "widestring", + "windows-sys 0.48.0", + "winreg", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +dependencies = [ + "serde", +] + +[[package]] +name = "iri-string" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is-terminal" +version = "0.4.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" + +[[package]] +name = "jemalloc_pprof" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74ff642505c7ce8d31c0d43ec0e235c6fd4585d9b8172d8f9dd04d36590200b5" +dependencies = [ + "anyhow", + "libc", + "mappings", + "once_cell", + "pprof_util", + "tempfile", + "tikv-jemalloc-ctl", + "tokio", + "tracing", +] + +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + +[[package]] +name = "jobserver" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" +dependencies = [ + "getrandom 0.3.4", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "jsonrpsee" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f3f48dc3e6b8bd21e15436c1ddd0bc22a6a54e8ec46fedd6adf3425f396ec6a" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-http-client", + "jsonrpsee-proc-macros", + "jsonrpsee-server", + "jsonrpsee-types", + "jsonrpsee-wasm-client", + "jsonrpsee-ws-client", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98" +dependencies = [ + "base64 0.22.1", + "futures-channel", + "futures-util", + "gloo-net", + "http", + "jsonrpsee-core", + "pin-project", + "rustls", + "rustls-pki-types", + "rustls-platform-verifier", + "soketto", + "thiserror 2.0.18", + "tokio", + "tokio-rustls", + "tokio-util", + "tracing", + "url", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "316c96719901f05d1137f19ba598b5fe9c9bc39f4335f67f6be8613921946480" +dependencies = [ + "async-trait", + "bytes", + "futures-timer", + "futures-util", + "http", + "http-body", + "http-body-util", + "jsonrpsee-types", + "parking_lot", + "pin-project", + "rand 0.9.2", + "rustc-hash", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tower 0.5.3", + "tracing", + "wasm-bindgen-futures", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8" +dependencies = [ + "base64 0.22.1", + "http-body", + "hyper", + "hyper-rustls", + "hyper-util", + "jsonrpsee-core", + "jsonrpsee-types", + "rustls", + "rustls-platform-verifier", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tower 0.5.3", + "url", +] + +[[package]] +name = "jsonrpsee-proc-macros" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2da3f8ab5ce1bb124b6d082e62dffe997578ceaf0aeb9f3174a214589dc00f07" +dependencies = [ + "heck", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "jsonrpsee-server" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c51b7c290bb68ce3af2d029648148403863b982f138484a73f02a9dd52dbd7f" +dependencies = [ + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "jsonrpsee-core", + "jsonrpsee-types", + "pin-project", + "route-recognizer", + "serde", + "serde_json", + "soketto", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tower 0.5.3", + "tracing", +] + +[[package]] +name = "jsonrpsee-types" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc88ff4688e43cc3fa9883a8a95c6fa27aa2e76c96e610b737b6554d650d7fd5" +dependencies = [ + "http", + "serde", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7902885de4779f711a95d82c8da2d7e5f9f3a7c7cfa44d51c067fd1c29d72a3c" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", + "tower 0.5.3", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b6fceceeb05301cc4c065ab3bd2fa990d41ff4eb44e4ca1b30fa99c057c3e79" +dependencies = [ + "http", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", + "tower 0.5.3", + "url", +] + +[[package]] +name = "jsonwebtoken" +version = "9.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" +dependencies = [ + "base64 0.22.1", + "js-sys", + "pem", + "ring", + "serde", + "serde_json", + "simple_asn1", +] + +[[package]] +name = "k256" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6e3919bbaa2945715f0bb6d3934a173d1e9a59ac23767fbaaef277265a7411b" +dependencies = [ + "cfg-if", + "ecdsa", + "elliptic-curve", + "once_cell", + "serdect", + "sha2", + "signature", +] + +[[package]] +name = "keccak" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" +dependencies = [ + "cpufeatures", +] + +[[package]] +name = "keccak-asm" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b646a74e746cd25045aa0fd42f4f7f78aa6d119380182c7e63a5593c4ab8df6f" +dependencies = [ + "digest 0.10.7", + "sha3-asm", +] + +[[package]] +name = "kona-cli" +version = "0.3.2" +dependencies = [ + "alloy-chains", + "alloy-primitives", + "clap", + "kona-genesis", + "kona-registry", + "libc", + "libp2p", + "metrics-exporter-prometheus 0.18.1", + "metrics-process", + "rstest", + "serde", + "thiserror 2.0.18", + "tracing", + "tracing-appender", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "kona-client" +version = "1.0.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-op-evm", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "async-trait", + "cfg-if", + "kona-derive", + "kona-driver", + "kona-executor", + "kona-genesis", + "kona-interop", + "kona-mpt", + "kona-preimage", + "kona-proof", + "kona-proof-interop", + "kona-protocol", + "kona-registry", + "kona-std-fpvm", + "kona-std-fpvm-proc", + "lru 0.16.3", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "op-revm", + "revm", + "serde", + "serde_json", + "sha2", + "spin 0.10.0", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "kona-derive" +version = "0.4.5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "async-trait", + "kona-genesis", + "kona-hardforks", + "kona-macros", + "kona-protocol", + "kona-registry", + "metrics", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "proptest", + "serde", + "serde_json", + "spin 0.10.0", + "thiserror 2.0.18", + "tokio", + "tracing", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "kona-disc" +version = "0.1.2" +dependencies = [ + "alloy-rlp", + "backon", + "derive_more", + "discv5", + "kona-cli", + "kona-genesis", + "kona-macros", + "kona-peers", + "libp2p", + "metrics", + "rand 0.9.2", + "serde_json", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "kona-driver" +version = "0.4.0" +dependencies = [ + "alloy-consensus", + "alloy-evm", + "alloy-primitives", + "alloy-rlp", + "async-trait", + "kona-derive", + "kona-executor", + "kona-genesis", + "kona-protocol", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "spin 0.10.0", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "kona-engine" +version = "0.1.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-network", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-client", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-transport", + "alloy-transport-http", + "arbitrary", + "async-trait", + "derive_more", + "http", + "http-body-util", + "jsonrpsee-types", + "kona-genesis", + "kona-macros", + "kona-protocol", + "kona-registry", + "metrics", + "metrics-exporter-prometheus 0.18.1", + "op-alloy-consensus", + "op-alloy-network", + "op-alloy-provider", + "op-alloy-rpc-types", + "op-alloy-rpc-types-engine", + "parking_lot", + "rand 0.9.2", + "rollup-boost", + "rollup-boost-types", + "rstest", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tower 0.5.3", + "tracing", + "url", +] + +[[package]] +name = "kona-executor" +version = "0.4.0" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-op-evm", + "alloy-op-hardforks", + "alloy-primitives", + "alloy-provider", + "alloy-rlp", + "alloy-rpc-client", + "alloy-rpc-types-engine", + "alloy-transport", + "alloy-transport-http", + "alloy-trie", + "kona-genesis", + "kona-mpt", + "kona-protocol", + "kona-registry", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "op-revm", + "rand 0.9.2", + "revm", + "rocksdb", + "rstest", + "serde", + "serde_json", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "kona-genesis" +version = "0.4.5" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-genesis", + "alloy-hardforks", + "alloy-op-hardforks", + "alloy-primitives", + "alloy-sol-types", + "arbitrary", + "derive_more", + "op-revm", + "rand 0.9.2", + "serde", + "serde_json", + "serde_repr", + "tabled", + "thiserror 2.0.18", + "toml 0.9.11+spec-1.1.0", +] + +[[package]] +name = "kona-gossip" +version = "0.1.2" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "arbitrary", + "derive_more", + "discv5", + "futures", + "ipnet", + "kona-disc", + "kona-genesis", + "kona-macros", + "kona-peers", + "lazy_static", + "libp2p", + "libp2p-identity", + "libp2p-stream", + "metrics", + "multihash", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "openssl", + "rand 0.9.2", + "serde", + "serde_json", + "serde_repr", + "snap", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "kona-hardforks" +version = "0.4.5" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "kona-protocol", + "op-alloy-consensus", + "op-revm", + "revm", +] + +[[package]] +name = "kona-host" +version = "1.0.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-op-evm", + "alloy-primitives", + "alloy-provider", + "alloy-rlp", + "alloy-rpc-client", + "alloy-rpc-types", + "alloy-rpc-types-beacon", + "alloy-serde", + "alloy-transport", + "alloy-transport-http", + "anyhow", + "ark-ff 0.5.0", + "async-trait", + "clap", + "kona-cli", + "kona-client", + "kona-derive", + "kona-driver", + "kona-executor", + "kona-genesis", + "kona-mpt", + "kona-preimage", + "kona-proof", + "kona-proof-interop", + "kona-protocol", + "kona-providers-alloy", + "kona-registry", + "kona-std-fpvm", + "op-alloy-network", + "op-alloy-rpc-types-engine", + "proptest", + "revm", + "rocksdb", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tracing", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "kona-interop" +version = "0.4.5" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-serde", + "alloy-sol-types", + "arbitrary", + "async-trait", + "derive_more", + "kona-genesis", + "kona-protocol", + "kona-registry", + "op-alloy-consensus", + "rand 0.9.2", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "kona-macros" +version = "0.1.2" + +[[package]] +name = "kona-mpt" +version = "0.3.0" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "alloy-provider", + "alloy-rlp", + "alloy-rpc-types", + "alloy-transport-http", + "alloy-trie", + "codspeed-criterion-compat", + "op-alloy-rpc-types-engine", + "proptest", + "rand 0.9.2", + "reqwest 0.13.1", + "serde", + "thiserror 2.0.18", + "tokio", +] + +[[package]] +name = "kona-node" +version = "1.0.0-rc.1" +dependencies = [ + "alloy-chains", + "alloy-genesis", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types-engine", + "alloy-signer", + "alloy-signer-local", + "alloy-transport", + "alloy-transport-http", + "anyhow", + "backon", + "clap", + "derive_more", + "dirs", + "discv5", + "futures", + "http", + "jsonrpsee", + "kona-cli", + "kona-derive", + "kona-disc", + "kona-engine", + "kona-genesis", + "kona-gossip", + "kona-node-service", + "kona-peers", + "kona-protocol", + "kona-providers-alloy", + "kona-registry", + "kona-rpc", + "kona-sources", + "libp2p", + "metrics", + "op-alloy-network", + "op-alloy-provider", + "op-alloy-rpc-types-engine", + "reqwest 0.13.1", + "rollup-boost", + "rstest", + "serde_json", + "strum 0.27.2", + "tabled", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", + "tracing-subscriber 0.3.22", + "url", + "vergen", + "vergen-git2", +] + +[[package]] +name = "kona-node-service" +version = "0.1.3" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-client", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-signer", + "alloy-signer-local", + "alloy-transport", + "alloy-transport-http", + "anyhow", + "arbitrary", + "async-stream", + "async-trait", + "backon", + "derive_more", + "discv5", + "futures", + "http", + "http-body-util", + "jsonrpsee", + "kona-derive", + "kona-disc", + "kona-engine", + "kona-genesis", + "kona-gossip", + "kona-macros", + "kona-peers", + "kona-protocol", + "kona-providers-alloy", + "kona-rpc", + "kona-sources", + "libp2p", + "libp2p-stream", + "metrics", + "mockall", + "op-alloy-consensus", + "op-alloy-network", + "op-alloy-provider", + "op-alloy-rpc-types-engine", + "rand 0.9.2", + "rollup-boost", + "rstest", + "strum 0.27.2", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tower 0.5.3", + "tracing", + "url", +] + +[[package]] +name = "kona-peers" +version = "0.1.2" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "arbtest", + "derive_more", + "dirs", + "discv5", + "kona-genesis", + "kona-registry", + "lazy_static", + "libp2p", + "libp2p-identity", + "multihash", + "secp256k1 0.31.1", + "serde", + "serde_json", + "tempfile", + "thiserror 2.0.18", + "tracing", + "unsigned-varint 0.8.0", + "url", +] + +[[package]] +name = "kona-preimage" +version = "0.3.0" +dependencies = [ + "alloy-primitives", + "async-channel", + "async-trait", + "rkyv", + "serde", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "kona-proof" +version = "0.3.0" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-op-evm", + "alloy-primitives", + "alloy-rlp", + "alloy-trie", + "ark-bls12-381", + "ark-ff 0.5.0", + "async-trait", + "c-kzg", + "kona-derive", + "kona-driver", + "kona-executor", + "kona-genesis", + "kona-mpt", + "kona-preimage", + "kona-protocol", + "kona-registry", + "lazy_static", + "lru 0.16.3", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "op-revm", + "rand 0.9.2", + "rayon", + "rstest", + "serde", + "serde_json", + "spin 0.10.0", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "kona-proof-interop" +version = "0.2.0" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-op-evm", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "arbitrary", + "async-trait", + "kona-executor", + "kona-genesis", + "kona-interop", + "kona-mpt", + "kona-preimage", + "kona-proof", + "kona-protocol", + "kona-registry", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "op-revm", + "rand 0.9.2", + "revm", + "serde", + "serde_json", + "spin 0.10.0", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "kona-protocol" +version = "0.4.5" +dependencies = [ + "alloc-no-stdlib", + "alloy-consensus", + "alloy-eips", + "alloy-hardforks", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-serde", + "alloy-sol-types", + "ambassador", + "arbitrary", + "async-trait", + "brotli", + "derive_more", + "kona-genesis", + "kona-registry", + "miniz_oxide 0.9.0", + "op-alloy-consensus", + "op-alloy-rpc-types", + "op-alloy-rpc-types-engine", + "proptest", + "rand 0.9.2", + "rstest", + "serde", + "serde_json", + "spin 0.10.0", + "thiserror 2.0.18", + "tokio", + "tracing", + "tracing-subscriber 0.3.22", + "unsigned-varint 0.8.0", +] + +[[package]] +name = "kona-providers-alloy" +version = "0.3.3" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-client", + "alloy-rpc-types-beacon", + "alloy-rpc-types-engine", + "alloy-serde", + "alloy-transport", + "alloy-transport-http", + "async-trait", + "c-kzg", + "http-body-util", + "httpmock", + "kona-derive", + "kona-genesis", + "kona-macros", + "kona-protocol", + "lru 0.16.3", + "metrics", + "op-alloy-consensus", + "op-alloy-network", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tower 0.5.3", +] + +[[package]] +name = "kona-providers-local" +version = "0.1.0" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "async-trait", + "kona-derive", + "kona-genesis", + "kona-macros", + "kona-protocol", + "lru 0.16.3", + "metrics", + "op-alloy-consensus", + "rstest", + "thiserror 2.0.18", + "tokio", +] + +[[package]] +name = "kona-registry" +version = "0.4.5" +dependencies = [ + "alloy-chains", + "alloy-eips", + "alloy-genesis", + "alloy-hardforks", + "alloy-op-hardforks", + "alloy-primitives", + "kona-genesis", + "lazy_static", + "serde", + "serde_json", + "tabled", + "toml 0.9.11+spec-1.1.0", +] + +[[package]] +name = "kona-rpc" +version = "0.3.2" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rpc-client", + "alloy-rpc-types-engine", + "async-trait", + "backon", + "derive_more", + "getrandom 0.3.4", + "ipnet", + "jsonrpsee", + "kona-engine", + "kona-genesis", + "kona-gossip", + "kona-macros", + "kona-protocol", + "libp2p", + "metrics", + "op-alloy-consensus", + "op-alloy-rpc-jsonrpsee", + "op-alloy-rpc-types", + "op-alloy-rpc-types-engine", + "rollup-boost", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "kona-serde" +version = "0.2.2" +dependencies = [ + "alloy-primitives", + "serde", + "serde_json", + "toml 0.9.11+spec-1.1.0", +] + +[[package]] +name = "kona-sources" +version = "0.1.2" +dependencies = [ + "alloy-primitives", + "alloy-rpc-client", + "alloy-signer", + "alloy-signer-local", + "alloy-transport", + "alloy-transport-http", + "derive_more", + "notify", + "op-alloy-rpc-types-engine", + "rustls", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "kona-std-fpvm" +version = "0.2.0" +dependencies = [ + "async-trait", + "buddy_system_allocator", + "cfg-if", + "kona-preimage", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "kona-std-fpvm-proc" +version = "0.2.0" +dependencies = [ + "cfg-if", + "kona-std-fpvm", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "kona-supervisor" +version = "0.1.0" +dependencies = [ + "alloy-network", + "alloy-provider", + "alloy-rpc-types-engine", + "anyhow", + "clap", + "glob", + "kona-cli", + "kona-genesis", + "kona-interop", + "kona-protocol", + "kona-registry", + "kona-supervisor-core", + "kona-supervisor-service", + "metrics", + "serde", + "serde_json", + "tempfile", + "tokio", + "tracing", + "tracing-subscriber 0.3.22", + "vergen", + "vergen-git2", +] + +[[package]] +name = "kona-supervisor-core" +version = "0.1.0" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-network", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-client", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-transport", + "async-trait", + "auto_impl", + "derive_more", + "futures", + "jsonrpsee", + "kona-genesis", + "kona-interop", + "kona-protocol", + "kona-supervisor-metrics", + "kona-supervisor-rpc", + "kona-supervisor-storage", + "kona-supervisor-types", + "metrics", + "mockall", + "op-alloy-consensus", + "op-alloy-rpc-types", + "serde", + "serde_json", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "kona-supervisor-metrics" +version = "0.1.0" + +[[package]] +name = "kona-supervisor-rpc" +version = "0.1.1" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rpc-client", + "alloy-rpc-types-engine", + "alloy-serde", + "async-trait", + "derive_more", + "jsonrpsee", + "kona-interop", + "kona-protocol", + "kona-supervisor-types", + "op-alloy-consensus", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", +] + +[[package]] +name = "kona-supervisor-service" +version = "0.1.0" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-client", + "alloy-rpc-types-eth", + "anyhow", + "async-trait", + "derive_more", + "futures", + "jsonrpsee", + "kona-genesis", + "kona-interop", + "kona-protocol", + "kona-supervisor-core", + "kona-supervisor-metrics", + "kona-supervisor-rpc", + "kona-supervisor-storage", + "kona-supervisor-types", + "mockall", + "thiserror 2.0.18", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "kona-supervisor-storage" +version = "0.1.0" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "bytes", + "derive_more", + "eyre", + "kona-cli", + "kona-interop", + "kona-protocol", + "kona-supervisor-metrics", + "kona-supervisor-types", + "metrics", + "modular-bitfield", + "op-alloy-consensus", + "reth-codecs", + "reth-db", + "reth-db-api", + "reth-primitives-traits", + "serde", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "kona-supervisor-types" +version = "0.1.1" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-serde", + "derive_more", + "kona-interop", + "kona-protocol", + "op-alloy-consensus", + "serde", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" +dependencies = [ + "bitflags 1.3.2", + "libc", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +dependencies = [ + "spin 0.9.8", +] + +[[package]] +name = "libc" +version = "0.2.180" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" + +[[package]] +name = "libgit2-sys" +version = "0.18.3+1.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9b3acc4b91781bb0b3386669d325163746af5f6e4f73e6d2d630e09a35f3487" +dependencies = [ + "cc", + "libc", + "libz-sys", + "pkg-config", +] + +[[package]] +name = "libloading" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" +dependencies = [ + "cfg-if", + "windows-link", +] + +[[package]] +name = "libm" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" + +[[package]] +name = "libp2p" +version = "0.56.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce71348bf5838e46449ae240631117b487073d5f347c06d434caddcb91dceb5a" +dependencies = [ + "bytes", + "either", + "futures", + "futures-timer", + "getrandom 0.2.17", + "libp2p-allow-block-list", + "libp2p-connection-limits", + "libp2p-core", + "libp2p-dns", + "libp2p-gossipsub", + "libp2p-identify", + "libp2p-identity", + "libp2p-mdns", + "libp2p-metrics", + "libp2p-noise", + "libp2p-ping", + "libp2p-quic", + "libp2p-swarm", + "libp2p-tcp", + "libp2p-upnp", + "libp2p-yamux", + "multiaddr", + "pin-project", + "rw-stream-sink", + "thiserror 2.0.18", +] + +[[package]] +name = "libp2p-allow-block-list" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d16ccf824ee859ca83df301e1c0205270206223fd4b1f2e512a693e1912a8f4a" +dependencies = [ + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", +] + +[[package]] +name = "libp2p-connection-limits" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a18b8b607cf3bfa2f8c57db9c7d8569a315d5cc0a282e6bfd5ebfc0a9840b2a0" +dependencies = [ + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", +] + +[[package]] +name = "libp2p-core" +version = "0.43.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "249128cd37a2199aff30a7675dffa51caf073b51aa612d2f544b19932b9aebca" +dependencies = [ + "either", + "fnv", + "futures", + "futures-timer", + "libp2p-identity", + "multiaddr", + "multihash", + "multistream-select", + "parking_lot", + "pin-project", + "quick-protobuf", + "rand 0.8.5", + "rw-stream-sink", + "thiserror 2.0.18", + "tracing", + "unsigned-varint 0.8.0", + "web-time", +] + +[[package]] +name = "libp2p-dns" +version = "0.44.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b770c1c8476736ca98c578cba4b505104ff8e842c2876b528925f9766379f9a" +dependencies = [ + "async-trait", + "futures", + "hickory-resolver", + "libp2p-core", + "libp2p-identity", + "parking_lot", + "smallvec", + "tracing", +] + +[[package]] +name = "libp2p-gossipsub" +version = "0.49.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7f58e37d8d6848e5c4c9e3c35c6f61133235bff2960c9c00a663b0849301221" +dependencies = [ + "async-channel", + "asynchronous-codec", + "base64 0.22.1", + "byteorder", + "bytes", + "either", + "fnv", + "futures", + "futures-timer", + "getrandom 0.2.17", + "hashlink 0.9.1", + "hex_fmt", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "quick-protobuf", + "quick-protobuf-codec", + "rand 0.8.5", + "regex", + "sha2", + "tracing", + "web-time", +] + +[[package]] +name = "libp2p-identify" +version = "0.47.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ab792a8b68fdef443a62155b01970c81c3aadab5e659621b063ef252a8e65e8" +dependencies = [ + "asynchronous-codec", + "either", + "futures", + "futures-bounded", + "futures-timer", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "quick-protobuf", + "quick-protobuf-codec", + "smallvec", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "libp2p-identity" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0c7892c221730ba55f7196e98b0b8ba5e04b4155651736036628e9f73ed6fc3" +dependencies = [ + "asn1_der", + "bs58", + "ed25519-dalek", + "hkdf", + "k256", + "multihash", + "quick-protobuf", + "rand 0.8.5", + "sha2", + "thiserror 2.0.18", + "tracing", + "zeroize", +] + +[[package]] +name = "libp2p-mdns" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c66872d0f1ffcded2788683f76931be1c52e27f343edb93bc6d0bcd8887be443" +dependencies = [ + "futures", + "hickory-proto", + "if-watch", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "rand 0.8.5", + "smallvec", + "socket2 0.5.10", + "tokio", + "tracing", +] + +[[package]] +name = "libp2p-metrics" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "805a555148522cb3414493a5153451910cb1a146c53ffbf4385708349baf62b7" +dependencies = [ + "futures", + "libp2p-core", + "libp2p-gossipsub", + "libp2p-identify", + "libp2p-identity", + "libp2p-ping", + "libp2p-swarm", + "pin-project", + "prometheus-client", + "web-time", +] + +[[package]] +name = "libp2p-noise" +version = "0.46.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc73eacbe6462a0eb92a6527cac6e63f02026e5407f8831bde8293f19217bfbf" +dependencies = [ + "asynchronous-codec", + "bytes", + "futures", + "libp2p-core", + "libp2p-identity", + "multiaddr", + "multihash", + "quick-protobuf", + "rand 0.8.5", + "snow", + "static_assertions", + "thiserror 2.0.18", + "tracing", + "x25519-dalek", + "zeroize", +] + +[[package]] +name = "libp2p-ping" +version = "0.47.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74bb7fcdfd9fead4144a3859da0b49576f171a8c8c7c0bfc7c541921d25e60d3" +dependencies = [ + "futures", + "futures-timer", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "rand 0.8.5", + "tracing", + "web-time", +] + +[[package]] +name = "libp2p-quic" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8dc448b2de9f4745784e3751fe8bc6c473d01b8317edd5ababcb0dec803d843f" +dependencies = [ + "futures", + "futures-timer", + "if-watch", + "libp2p-core", + "libp2p-identity", + "libp2p-tls", + "quinn", + "rand 0.8.5", + "ring", + "rustls", + "socket2 0.5.10", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "libp2p-stream" +version = "0.4.0-alpha" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6bd8025c80205ec2810cfb28b02f362ab48a01bee32c50ab5f12761e033464" +dependencies = [ + "futures", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "rand 0.8.5", + "tracing", +] + +[[package]] +name = "libp2p-swarm" +version = "0.47.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce88c6c4bf746c8482480345ea3edfd08301f49e026889d1cbccfa1808a9ed9e" +dependencies = [ + "either", + "fnv", + "futures", + "futures-timer", + "hashlink 0.10.0", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm-derive", + "multistream-select", + "rand 0.8.5", + "smallvec", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "libp2p-swarm-derive" +version = "0.35.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd297cf53f0cb3dee4d2620bb319ae47ef27c702684309f682bdb7e55a18ae9c" +dependencies = [ + "heck", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "libp2p-tcp" +version = "0.44.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb6585b9309699f58704ec9ab0bb102eca7a3777170fa91a8678d73ca9cafa93" +dependencies = [ + "futures", + "futures-timer", + "if-watch", + "libc", + "libp2p-core", + "socket2 0.6.2", + "tokio", + "tracing", +] + +[[package]] +name = "libp2p-tls" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96ff65a82e35375cbc31ebb99cacbbf28cb6c4fefe26bf13756ddcf708d40080" +dependencies = [ + "futures", + "futures-rustls", + "libp2p-core", + "libp2p-identity", + "rcgen", + "ring", + "rustls", + "rustls-webpki", + "thiserror 2.0.18", + "x509-parser", + "yasna", +] + +[[package]] +name = "libp2p-upnp" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4757e65fe69399c1a243bbb90ec1ae5a2114b907467bf09f3575e899815bb8d3" +dependencies = [ + "futures", + "futures-timer", + "igd-next", + "libp2p-core", + "libp2p-swarm", + "tokio", + "tracing", +] + +[[package]] +name = "libp2p-yamux" +version = "0.47.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f15df094914eb4af272acf9adaa9e287baa269943f32ea348ba29cfb9bfc60d8" +dependencies = [ + "either", + "futures", + "libp2p-core", + "thiserror 2.0.18", + "tracing", + "yamux 0.12.1", + "yamux 0.13.8", +] + +[[package]] +name = "libproc" +version = "0.14.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a54ad7278b8bc5301d5ffd2a94251c004feb971feba96c971ea4063645990757" +dependencies = [ + "bindgen 0.72.1", + "errno", + "libc", +] + +[[package]] +name = "libredox" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" +dependencies = [ + "bitflags 2.10.0", + "libc", + "redox_syscall 0.7.0", +] + +[[package]] +name = "librocksdb-sys" +version = "0.17.3+10.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cef2a00ee60fe526157c9023edab23943fae1ce2ab6f4abb2a807c1746835de9" +dependencies = [ + "bindgen 0.72.1", + "bzip2-sys", + "cc", + "libc", + "libz-sys", + "lz4-sys", + "tikv-jemalloc-sys", + "zstd-sys", +] + +[[package]] +name = "libz-sys" +version = "1.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linked_hash_set" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "984fb35d06508d1e69fc91050cceba9c0b748f983e6739fa2c7a9237154c52c8" +dependencies = [ + "linked-hash-map", + "serde_core", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + +[[package]] +name = "linux-raw-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" + +[[package]] +name = "litemap" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" + +[[package]] +name = "litrs" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" + +[[package]] +name = "lock_api" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" +dependencies = [ + "scopeguard", + "serde", +] + +[[package]] +name = "log" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" + +[[package]] +name = "loom" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca" +dependencies = [ + "cfg-if", + "generator", + "scoped-tls", + "tracing", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "lru" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +dependencies = [ + "hashbrown 0.15.5", +] + +[[package]] +name = "lru" +version = "0.16.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" +dependencies = [ + "hashbrown 0.16.1", +] + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "lz4" +version = "1.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a20b523e860d03443e98350ceaac5e71c6ba89aea7d960769ec3ce37f4de5af4" +dependencies = [ + "lz4-sys", +] + +[[package]] +name = "lz4-sys" +version = "1.11.1+lz4-1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bd8c0d6c6ed0cd30b3652886bb8711dc4bb01d637a68105a3d5158039b418e6" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "lz4_flex" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08ab2867e3eeeca90e844d1940eab391c9dc5228783db2ed999acbc0a9ed375a" + +[[package]] +name = "mach2" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a1b95cd5421ec55b445b5ae102f5ea0e768de1f82bd3001e11f426c269c3aea" +dependencies = [ + "libc", +] + +[[package]] +name = "macro-string" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b27834086c65ec3f9387b096d66e99f221cf081c2b738042aa252bcd41204e3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "mappings" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db4d277bb50d4508057e7bddd7fcd19ef4a4cc38051b6a5a36868d75ae2cbeb9" +dependencies = [ + "anyhow", + "libc", + "once_cell", + "pprof_util", + "tracing", +] + +[[package]] +name = "match-lookup" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "757aee279b8bdbb9f9e676796fd459e4207a1f986e87886700abf589f5abf771" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "matchers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matchit" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" + +[[package]] +name = "memchr" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" + +[[package]] +name = "memmap2" +version = "0.9.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "744133e4a0e0a658e1374cf3bf8e415c4052a15a111acd372764c55b4177d490" +dependencies = [ + "libc", +] + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "metrics" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d5312e9ba3771cfa961b585728215e3d972c950a3eed9252aa093d6301277e8" +dependencies = [ + "ahash", + "portable-atomic", +] + +[[package]] +name = "metrics-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37a87f4b19620e4c561f7b48f5e6ca085b1780def671696a6a3d9d0c137360ec" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "metrics-exporter-prometheus" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd7399781913e5393588a8d8c6a2867bf85fb38eaf2502fdce465aad2dc6f034" +dependencies = [ + "base64 0.22.1", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "indexmap 2.13.0", + "ipnet", + "metrics", + "metrics-util 0.19.1", + "quanta", + "thiserror 1.0.69", + "tokio", + "tracing", +] + +[[package]] +name = "metrics-exporter-prometheus" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3589659543c04c7dc5526ec858591015b87cd8746583b51b48ef4353f99dbcda" +dependencies = [ + "base64 0.22.1", + "http-body-util", + "hyper", + "hyper-util", + "indexmap 2.13.0", + "ipnet", + "metrics", + "metrics-util 0.20.1", + "quanta", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "metrics-process" +version = "2.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f615e08e049bd14a44c4425415782efb9bcd479fc1e19ddeb971509074c060d0" +dependencies = [ + "libc", + "libproc", + "mach2", + "metrics", + "once_cell", + "procfs 0.18.0", + "rlimit", + "windows 0.62.2", +] + +[[package]] +name = "metrics-util" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8496cc523d1f94c1385dd8f0f0c2c480b2b8aeccb5b7e4485ad6365523ae376" +dependencies = [ + "aho-corasick", + "crossbeam-epoch", + "crossbeam-utils", + "hashbrown 0.15.5", + "indexmap 2.13.0", + "metrics", + "ordered-float", + "quanta", + "radix_trie", + "rand 0.9.2", + "rand_xoshiro", + "sketches-ddsketch", +] + +[[package]] +name = "metrics-util" +version = "0.20.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdfb1365fea27e6dd9dc1dbc19f570198bc86914533ad639dae939635f096be4" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", + "hashbrown 0.16.1", + "metrics", + "quanta", + "rand 0.9.2", + "rand_xoshiro", + "sketches-ddsketch", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + +[[package]] +name = "miniz_oxide" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5faa9f23e86bd5768d76def086192ff5f869fb088da12a976ea21e9796b975f6" +dependencies = [ + "adler2", + "serde", +] + +[[package]] +name = "mio" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" +dependencies = [ + "libc", + "log", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "mockall" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f58d964098a5f9c6b63d0798e5372fd04708193510a7af313c22e9f29b7b620b" +dependencies = [ + "cfg-if", + "downcast", + "fragile", + "mockall_derive", + "predicates", + "predicates-tree", +] + +[[package]] +name = "mockall_derive" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca41ce716dda6a9be188b385aa78ee5260fc25cd3802cb2a8afdc6afbe6b6dbf" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "modular-bitfield" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a53d79ba8304ac1c4f9eb3b9d281f21f7be9d4626f72ce7df4ad8fbde4f38a74" +dependencies = [ + "modular-bitfield-impl", + "static_assertions", +] + +[[package]] +name = "modular-bitfield-impl" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a7d5f7076603ebc68de2dc6a650ec331a062a13abaa346975be747bbfa4b789" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "moka" +version = "0.12.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4ac832c50ced444ef6be0767a008b02c106a909ba79d1d830501e94b96f6b7e" +dependencies = [ + "async-lock", + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "equivalent", + "event-listener", + "futures-util", + "parking_lot", + "portable-atomic", + "smallvec", + "tagptr", + "uuid", +] + +[[package]] +name = "more-asserts" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fafa6961cabd9c63bcd77a45d7e3b7f3b552b70417831fb0f56db717e72407e" + +[[package]] +name = "multiaddr" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe6351f60b488e04c1d21bc69e56b89cb3f5e8f5d22557d6e8031bdfd79b6961" +dependencies = [ + "arrayref", + "byteorder", + "data-encoding", + "libp2p-identity", + "multibase", + "multihash", + "percent-encoding", + "serde", + "static_assertions", + "unsigned-varint 0.8.0", + "url", +] + +[[package]] +name = "multibase" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8694bb4835f452b0e3bb06dbebb1d6fc5385b6ca1caf2e55fd165c042390ec77" +dependencies = [ + "base-x", + "base256emoji", + "data-encoding", + "data-encoding-macro", +] + +[[package]] +name = "multihash" +version = "0.19.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b430e7953c29dd6a09afc29ff0bb69c6e306329ee6794700aee27b76a1aea8d" +dependencies = [ + "core2", + "unsigned-varint 0.8.0", +] + +[[package]] +name = "multistream-select" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea0df8e5eec2298a62b326ee4f0d7fe1a6b90a09dfcf9df37b38f947a8c42f19" +dependencies = [ + "bytes", + "futures", + "log", + "pin-project", + "smallvec", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "munge" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e17401f259eba956ca16491461b6e8f72913a0a114e39736ce404410f915a0c" +dependencies = [ + "munge_macro", +] + +[[package]] +name = "munge_macro" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4568f25ccbd45ab5d5603dc34318c1ec56b117531781260002151b8530a9f931" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe 0.1.6", + "openssl-sys", + "schannel", + "security-framework 2.11.1", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "netlink-packet-core" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72724faf704479d67b388da142b186f916188505e7e0b26719019c525882eda4" +dependencies = [ + "anyhow", + "byteorder", + "netlink-packet-utils", +] + +[[package]] +name = "netlink-packet-route" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053998cea5a306971f88580d0829e90f270f940befd7cf928da179d4187a5a66" +dependencies = [ + "anyhow", + "bitflags 1.3.2", + "byteorder", + "libc", + "netlink-packet-core", + "netlink-packet-utils", +] + +[[package]] +name = "netlink-packet-utils" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ede8a08c71ad5a95cdd0e4e52facd37190977039a4704eb82a283f713747d34" +dependencies = [ + "anyhow", + "byteorder", + "paste", + "thiserror 1.0.69", +] + +[[package]] +name = "netlink-proto" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72452e012c2f8d612410d89eea01e2d9b56205274abb35d53f60200b2ec41d60" +dependencies = [ + "bytes", + "futures", + "log", + "netlink-packet-core", + "netlink-sys", + "thiserror 2.0.18", +] + +[[package]] +name = "netlink-sys" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd6c30ed10fa69cc491d491b85cc971f6bdeb8e7367b7cde2ee6cc878d583fae" +dependencies = [ + "bytes", + "futures-util", + "libc", + "log", + "tokio", +] + +[[package]] +name = "nibble_vec" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" +dependencies = [ + "smallvec", +] + +[[package]] +name = "nix" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" +dependencies = [ + "bitflags 1.3.2", + "cfg-if", + "libc", +] + +[[package]] +name = "nohash-hasher" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bf50223579dc7cdcfb3bfcacf7069ff68243f8c363f62ffa99cf000a6b9c451" + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", +] + +[[package]] +name = "notify" +version = "8.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" +dependencies = [ + "bitflags 2.10.0", + "fsevent-sys", + "inotify", + "kqueue", + "libc", + "log", + "mio", + "notify-types", + "walkdir", + "windows-sys 0.60.2", +] + +[[package]] +name = "notify-types" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42b8cfee0e339a0337359f3c88165702ac6e600dc01c0cc9579a92d62b08477a" +dependencies = [ + "bitflags 2.10.0", +] + +[[package]] +name = "ntapi" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c70f219e21142367c70c0b30c6a9e3a14d55b4d12a204d897fbec83a0363f081" +dependencies = [ + "winapi", +] + +[[package]] +name = "nu-ansi-term" +version = "0.50.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7957b9740744892f114936ab4a57b3f487491bbeafaf8083688b16841a4240e5" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "num" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" +dependencies = [ + "num-bigint", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits", +] + +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", + "serde", +] + +[[package]] +name = "num-complex" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-conv" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" +dependencies = [ + "num-traits", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", + "libm", +] + +[[package]] +name = "num_cpus" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "num_enum" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1207a7e20ad57b847bbddc6776b968420d38292bbfe2089accff5e19e82454c" +dependencies = [ + "num_enum_derive", + "rustversion", +] + +[[package]] +name = "num_enum_derive" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "num_threads" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" +dependencies = [ + "libc", +] + +[[package]] +name = "nybbles" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5676b5c379cf5b03da1df2b3061c4a4e2aa691086a56ac923e08c143f53f59" +dependencies = [ + "alloy-rlp", + "arbitrary", + "cfg-if", + "proptest", + "ruint", + "serde", + "smallvec", +] + +[[package]] +name = "object" +version = "0.37.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" +dependencies = [ + "memchr", +] + +[[package]] +name = "oid-registry" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12f40cff3dde1b6087cc5d5f5d4d65712f34016a03ed60e9c08dcc392736b5b7" +dependencies = [ + "asn1-rs", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" +dependencies = [ + "critical-section", + "portable-atomic", +] + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + +[[package]] +name = "op-alloy" +version = "0.23.1" +dependencies = [ + "op-alloy-consensus", + "op-alloy-network", + "op-alloy-provider", + "op-alloy-rpc-jsonrpsee", + "op-alloy-rpc-types", + "op-alloy-rpc-types-engine", +] + +[[package]] +name = "op-alloy-consensus" +version = "0.23.1" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-network", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-eth", + "alloy-serde", + "alloy-signer", + "arbitrary", + "bincode 2.0.1", + "derive_more", + "rand 0.9.2", + "serde", + "serde_json", + "serde_with", + "thiserror 2.0.18", +] + +[[package]] +name = "op-alloy-flz" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a79f352fc3893dcd670172e615afef993a41798a1d3fc0db88a3e60ef2e70ecc" + +[[package]] +name = "op-alloy-network" +version = "0.23.1" +dependencies = [ + "alloy-consensus", + "alloy-network", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types-eth", + "alloy-signer", + "op-alloy-consensus", + "op-alloy-rpc-types", +] + +[[package]] +name = "op-alloy-provider" +version = "0.23.1" +dependencies = [ + "alloy-network", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types-engine", + "alloy-transport", + "async-trait", + "op-alloy-rpc-types-engine", +] + +[[package]] +name = "op-alloy-rpc-jsonrpsee" +version = "0.23.1" +dependencies = [ + "alloy-primitives", + "jsonrpsee", +] + +[[package]] +name = "op-alloy-rpc-types" +version = "0.23.1" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-network-primitives", + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-serde", + "arbitrary", + "derive_more", + "jsonrpsee", + "op-alloy-consensus", + "rand 0.9.2", + "serde", + "serde_json", + "similar-asserts", + "thiserror 2.0.18", +] + +[[package]] +name = "op-alloy-rpc-types-engine" +version = "0.23.1" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "alloy-serde", + "arbitrary", + "arbtest", + "derive_more", + "ethereum_ssz", + "ethereum_ssz_derive", + "op-alloy-consensus", + "serde", + "serde_json", + "sha2", + "snap", + "thiserror 2.0.18", +] + +[[package]] +name = "op-reth" +version = "1.10.2" +dependencies = [ + "clap", + "reth-cli-util", + "reth-optimism-chainspec", + "reth-optimism-cli", + "reth-optimism-consensus", + "reth-optimism-evm", + "reth-optimism-forks", + "reth-optimism-node", + "reth-optimism-payload-builder", + "reth-optimism-primitives", + "reth-optimism-rpc", + "tracing", +] + +[[package]] +name = "op-revm" +version = "15.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79c92b75162c2ed1661849fa51683b11254a5b661798360a2c24be918edafd40" +dependencies = [ + "auto_impl", + "revm", + "serde", +] + +[[package]] +name = "opaque-debug" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" + +[[package]] +name = "openssl" +version = "0.10.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" +dependencies = [ + "bitflags 2.10.0", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + +[[package]] +name = "openssl-src" +version = "300.5.5+3.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f1787d533e03597a7934fd0a765f0d28e94ecc5fb7789f8053b1e699a56f709" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "opentelemetry" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "236e667b670a5cdf90c258f5a55794ec5ac5027e960c224bff8367a59e1e6426" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "opentelemetry" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b84bcd6ae87133e903af7ef497404dda70c60d0ea14895fc8a5e6722754fc2a0" +dependencies = [ + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "opentelemetry-appender-tracing" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef6a1ac5ca3accf562b8c306fa8483c85f4390f768185ab775f242f7fe8fdcc2" +dependencies = [ + "opentelemetry 0.31.0", + "tracing", + "tracing-core", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "opentelemetry-http" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8863faf2910030d139fb48715ad5ff2f35029fc5f244f6d5f689ddcf4d26253" +dependencies = [ + "async-trait", + "bytes", + "http", + "opentelemetry 0.28.0", + "reqwest 0.12.28", + "tracing", +] + +[[package]] +name = "opentelemetry-http" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a6d09a73194e6b66df7c8f1b680f156d916a1a942abf2de06823dd02b7855d" +dependencies = [ + "async-trait", + "bytes", + "http", + "opentelemetry 0.31.0", + "reqwest 0.12.28", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5bef114c6d41bea83d6dc60eb41720eedd0261a67af57b66dd2b84ac46c01d91" +dependencies = [ + "async-trait", + "futures-core", + "http", + "opentelemetry 0.28.0", + "opentelemetry-http 0.28.0", + "opentelemetry-proto 0.28.0", + "opentelemetry_sdk 0.28.0", + "prost 0.13.5", + "reqwest 0.12.28", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tonic 0.12.3", + "tracing", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2366db2dca4d2ad033cad11e6ee42844fd727007af5ad04a1730f4cb8163bf" +dependencies = [ + "http", + "opentelemetry 0.31.0", + "opentelemetry-http 0.31.0", + "opentelemetry-proto 0.31.0", + "opentelemetry_sdk 0.31.0", + "prost 0.14.3", + "reqwest 0.12.28", + "thiserror 2.0.18", + "tokio", + "tonic 0.14.3", + "tracing", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f8870d3024727e99212eb3bb1762ec16e255e3e6f58eeb3dc8db1aa226746d" +dependencies = [ + "base64 0.22.1", + "hex", + "opentelemetry 0.28.0", + "opentelemetry_sdk 0.28.0", + "prost 0.13.5", + "serde", + "tonic 0.12.3", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7175df06de5eaee9909d4805a3d07e28bb752c34cab57fa9cff549da596b30f" +dependencies = [ + "opentelemetry 0.31.0", + "opentelemetry_sdk 0.31.0", + "prost 0.14.3", + "tonic 0.14.3", + "tonic-prost", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e62e29dfe041afb8ed2a6c9737ab57db4907285d999ef8ad3a59092a36bdc846" + +[[package]] +name = "opentelemetry_sdk" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84dfad6042089c7fc1f6118b7040dc2eb4ab520abbf410b79dc481032af39570" +dependencies = [ + "async-trait", + "futures-channel", + "futures-executor", + "futures-util", + "glob", + "opentelemetry 0.28.0", + "percent-encoding", + "rand 0.8.5", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e14ae4f5991976fd48df6d843de219ca6d31b01daaab2dad5af2badeded372bd" +dependencies = [ + "futures-channel", + "futures-executor", + "futures-util", + "opentelemetry 0.31.0", + "percent-encoding", + "rand 0.9.2", + "thiserror 2.0.18", +] + +[[package]] +name = "option-ext" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" + +[[package]] +name = "ordered-float" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951" +dependencies = [ + "num-traits", +] + +[[package]] +name = "p256" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9863ad85fa8f4460f9c48cb909d38a0d689dba1f6f6988a5e3e0d31071bcd4b" +dependencies = [ + "ecdsa", + "elliptic-curve", + "primeorder", + "sha2", +] + +[[package]] +name = "page_size" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30d5b2194ed13191c1999ae0704b7839fb18384fa22e49b57eeaa97d79ce40da" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "papergrid" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6978128c8b51d8f4080631ceb2302ab51e32cc6e8615f735ee2f83fd269ae3f1" +dependencies = [ + "bytecount", + "fnv", + "unicode-width 0.2.0", +] + +[[package]] +name = "parity-scale-codec" +version = "3.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "799781ae679d79a948e13d4824a40970bfa500058d245760dd857301059810fa" +dependencies = [ + "arbitrary", + "arrayvec", + "bitvec", + "byte-slice-cast", + "bytes", + "const_format", + "impl-trait-for-tuples", + "parity-scale-codec-derive", + "rustversion", + "serde", +] + +[[package]] +name = "parity-scale-codec-derive" +version = "3.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34b4653168b563151153c9e4c08ebed57fb8262bebfa79711552fa983c623e7a" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + +[[package]] +name = "parking_lot" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall 0.5.18", + "smallvec", + "windows-link", +] + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "path-tree" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a97453bc21a968f722df730bfe11bd08745cb50d1300b0df2bda131dece136" +dependencies = [ + "smallvec", +] + +[[package]] +name = "pbkdf2" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" +dependencies = [ + "digest 0.10.7", + "hmac", +] + +[[package]] +name = "pem" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" +dependencies = [ + "base64 0.22.1", + "serde_core", +] + +[[package]] +name = "percent-encoding" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pest" +version = "2.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c9eb05c21a464ea704b53158d358a31e6425db2f63a1a7312268b05fe2b75f7" +dependencies = [ + "memchr", + "ucd-trie", +] + +[[package]] +name = "pharos" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e9567389417feee6ce15dd6527a8a1ecac205ef62c2932bcf3d9f6fc5b78b414" +dependencies = [ + "futures", + "rustc_version 0.4.1", +] + +[[package]] +name = "phf" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" +dependencies = [ + "phf_macros", + "phf_shared", + "serde", +] + +[[package]] +name = "phf_generator" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" +dependencies = [ + "fastrand", + "phf_shared", +] + +[[package]] +name = "phf_macros" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "phf_shared" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" +dependencies = [ + "siphasher", +] + +[[package]] +name = "pin-project" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkcs8" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" +dependencies = [ + "der", + "spki", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "plain_hasher" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e19e6491bdde87c2c43d70f4c194bc8a758f2eb732df00f61e43f7362e3b4cc" +dependencies = [ + "crunchy", +] + +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "polling" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix 1.1.3", + "windows-sys 0.61.2", +] + +[[package]] +name = "poly1305" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" +dependencies = [ + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "polyval" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" +dependencies = [ + "cfg-if", + "cpufeatures", + "opaque-debug", + "universal-hash", +] + +[[package]] +name = "portable-atomic" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" + +[[package]] +name = "potential_utf" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "pprof_util" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4429d44e5e2c8a69399fc0070379201eed018e3df61e04eb7432811df073c224" +dependencies = [ + "anyhow", + "backtrace", + "flate2", + "num", + "paste", + "prost 0.13.5", +] + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "predicates" +version = "3.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5d19ee57562043d37e82899fade9a22ebab7be9cef5026b07fda9cdd4293573" +dependencies = [ + "anstyle", + "predicates-core", +] + +[[package]] +name = "predicates-core" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "727e462b119fe9c93fd0eb1429a5f7647394014cf3c04ab2c0350eeb09095ffa" + +[[package]] +name = "predicates-tree" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72dd2d6d381dfb73a193c7fca536518d7caee39fc8503f74e7dc0be0531b425c" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "pretty_assertions" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" +dependencies = [ + "diff", + "yansi", +] + +[[package]] +name = "primeorder" +version = "0.13.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +dependencies = [ + "elliptic-curve", +] + +[[package]] +name = "primitive-types" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b34d9fd68ae0b74a41b21c03c2f62847aa0ffea044eee893b4c140b37e244e2" +dependencies = [ + "fixed-hash", + "impl-codec", + "uint 0.9.5", +] + +[[package]] +name = "proc-macro-crate" +version = "3.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +dependencies = [ + "toml_edit 0.23.10+spec-1.0.0", +] + +[[package]] +name = "proc-macro-error-attr2" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" +dependencies = [ + "proc-macro2", + "quote", +] + +[[package]] +name = "proc-macro-error2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" +dependencies = [ + "proc-macro-error-attr2", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "procfs" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc5b72d8145275d844d4b5f6d4e1eef00c8cd889edb6035c21675d1bb1f45c9f" +dependencies = [ + "bitflags 2.10.0", + "chrono", + "flate2", + "hex", + "procfs-core 0.17.0", + "rustix 0.38.44", +] + +[[package]] +name = "procfs" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25485360a54d6861439d60facef26de713b1e126bf015ec8f98239467a2b82f7" +dependencies = [ + "bitflags 2.10.0", + "procfs-core 0.18.0", + "rustix 1.1.3", +] + +[[package]] +name = "procfs-core" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "239df02d8349b06fc07398a3a1697b06418223b1c7725085e801e7c0fc6a12ec" +dependencies = [ + "bitflags 2.10.0", + "chrono", + "hex", +] + +[[package]] +name = "procfs-core" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6401bf7b6af22f78b563665d15a22e9aef27775b79b149a66ca022468a4e405" +dependencies = [ + "bitflags 2.10.0", + "hex", +] + +[[package]] +name = "prometheus-client" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf41c1a7c32ed72abe5082fb19505b969095c12da9f5732a4bc9878757fd087c" +dependencies = [ + "dtoa", + "itoa", + "parking_lot", + "prometheus-client-derive-encode", +] + +[[package]] +name = "prometheus-client-derive-encode" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "proptest" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" +dependencies = [ + "bit-set", + "bit-vec", + "bitflags 2.10.0", + "num-traits", + "rand 0.9.2", + "rand_chacha 0.9.0", + "rand_xorshift", + "regex-syntax", + "rusty-fork", + "tempfile", + "unarray", +] + +[[package]] +name = "proptest-arbitrary-interop" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1981e49bd2432249da8b0e11e5557099a8e74690d6b94e721f7dc0bb7f3555f" +dependencies = [ + "arbitrary", + "proptest", +] + +[[package]] +name = "proptest-derive" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "proptest-derive" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "095a99f75c69734802359b682be8daaf8980296731f6470434ea2c652af1dd30" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive 0.13.5", +] + +[[package]] +name = "prost" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568" +dependencies = [ + "bytes", + "prost-derive 0.14.3", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools 0.14.0", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "prost-derive" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27c6023962132f4b30eb4c172c91ce92d933da334c59c23cddee82358ddafb0b" +dependencies = [ + "anyhow", + "itertools 0.14.0", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "ptr_meta" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b9a0cf95a1196af61d4f1cbdab967179516d9a4a4312af1f31948f8f6224a79" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7347867d0a7e1208d93b46767be83e2b8f978c3dad35f775ac8d8847551d6fe1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "quanta" +version = "0.12.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3ab5a9d756f0d97bdc89019bd2e4ea098cf9cde50ee7564dde6b81ccc8f06c7" +dependencies = [ + "crossbeam-utils", + "libc", + "once_cell", + "raw-cpuid", + "wasi", + "web-sys", + "winapi", +] + +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + +[[package]] +name = "quick-protobuf" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d6da84cc204722a989e01ba2f6e1e276e190f22263d0cb6ce8526fcdb0d2e1f" +dependencies = [ + "byteorder", +] + +[[package]] +name = "quick-protobuf-codec" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15a0580ab32b169745d7a39db2ba969226ca16738931be152a3209b409de2474" +dependencies = [ + "asynchronous-codec", + "bytes", + "quick-protobuf", + "thiserror 1.0.69", + "unsigned-varint 0.8.0", +] + +[[package]] +name = "quinn" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" +dependencies = [ + "bytes", + "cfg_aliases", + "futures-io", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2 0.6.2", + "thiserror 2.0.18", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +dependencies = [ + "bytes", + "getrandom 0.3.4", + "lru-slab", + "rand 0.9.2", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror 2.0.18", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2 0.6.2", + "tracing", + "windows-sys 0.60.2", +] + +[[package]] +name = "quote" +version = "1.0.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "radix_trie" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c069c179fcdc6a2fe24d8d18305cf085fdbd4f922c041943e203685d6a1c58fd" +dependencies = [ + "endian-type", + "nibble_vec", +] + +[[package]] +name = "rancor" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a063ea72381527c2a0561da9c80000ef822bdd7c3241b1cc1b12100e3df081ee" +dependencies = [ + "ptr_meta", +] + +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "libc", + "rand_chacha 0.3.1", + "rand_core 0.6.4", + "serde", +] + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha 0.9.0", + "rand_core 0.9.5", + "serde", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core 0.9.5", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" +dependencies = [ + "getrandom 0.2.17", +] + +[[package]] +name = "rand_core" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76afc826de14238e6e8c374ddcc1fa19e374fd8dd986b0d2af0d02377261d83c" +dependencies = [ + "getrandom 0.3.4", + "serde", +] + +[[package]] +name = "rand_xorshift" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "513962919efc330f829edb2535844d1b912b0fbe2ca165d613e4e8788bb05a5a" +dependencies = [ + "rand_core 0.9.5", +] + +[[package]] +name = "rand_xoshiro" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f703f4665700daf5512dcca5f43afa6af89f09db47fb56be587f80636bda2d41" +dependencies = [ + "rand_core 0.9.5", +] + +[[package]] +name = "rapidhash" +version = "4.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d8b5b858a440a0bc02625b62dd95131b9201aa9f69f411195dd4a7cfb1de3d7" +dependencies = [ + "rand 0.9.2", + "rustversion", +] + +[[package]] +name = "ratatui" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eabd94c2f37801c20583fc49dd5cd6b0ba68c716787c2dd6ed18571e1e63117b" +dependencies = [ + "bitflags 2.10.0", + "cassowary", + "compact_str", + "crossterm 0.28.1", + "indoc", + "instability", + "itertools 0.13.0", + "lru 0.12.5", + "paste", + "strum 0.26.3", + "unicode-segmentation", + "unicode-truncate", + "unicode-width 0.2.0", +] + +[[package]] +name = "raw-cpuid" +version = "11.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "498cd0dc59d73224351ee52a95fee0f1a617a2eae0e7d9d720cc622c73a54186" +dependencies = [ + "bitflags 2.10.0", +] + +[[package]] +name = "rayon" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "rcgen" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75e669e5202259b5314d1ea5397316ad400819437857b90861765f24c4cf80a2" +dependencies = [ + "pem", + "ring", + "rustls-pki-types", + "time", + "yasna", +] + +[[package]] +name = "recvmsg" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3edd4d5d42c92f0a659926464d4cce56b562761267ecf0f469d85b7de384175" + +[[package]] +name = "redox_syscall" +version = "0.5.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" +dependencies = [ + "bitflags 2.10.0", +] + +[[package]] +name = "redox_syscall" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27" +dependencies = [ + "bitflags 2.10.0", +] + +[[package]] +name = "redox_users" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror 1.0.69", +] + +[[package]] +name = "redox_users" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" +dependencies = [ + "getrandom 0.2.17", + "libredox", + "thiserror 2.0.18", +] + +[[package]] +name = "ref-cast" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "regex" +version = "1.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" + +[[package]] +name = "regress" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2057b2325e68a893284d1538021ab90279adac1139957ca2a74426c6f118fb48" +dependencies = [ + "hashbrown 0.16.1", + "memchr", +] + +[[package]] +name = "relative-path" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" + +[[package]] +name = "rend" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cadadef317c2f20755a64d7fdc48f9e7178ee6b0e1f7fce33fa60f1d68a276e6" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "reqwest" +version = "0.12.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-native-certs", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-rustls", + "tokio-util", + "tower 0.5.3", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots 1.0.5", +] + +[[package]] +name = "reqwest" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04e9018c9d814e5f30cc16a0f03271aeab3571e609612d9fe78c1aa8d11c2f62" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures-core", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "js-sys", + "log", + "percent-encoding", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower 0.5.3", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "resolv-conf" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7" + +[[package]] +name = "reth-basic-payload-builder" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "futures-core", + "futures-util", + "metrics", + "reth-chain-state", + "reth-metrics", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-revm", + "reth-storage-api", + "reth-tasks", + "tokio", + "tracing", +] + +[[package]] +name = "reth-chain-state" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-signer", + "alloy-signer-local", + "derive_more", + "metrics", + "parking_lot", + "pin-project", + "rand 0.9.2", + "rayon", + "reth-chainspec", + "reth-errors", + "reth-ethereum-primitives", + "reth-execution-types", + "reth-metrics", + "reth-primitives-traits", + "reth-storage-api", + "reth-trie", + "revm-database", + "revm-state", + "serde", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-chainspec" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-genesis", + "alloy-primitives", + "alloy-trie", + "auto_impl", + "derive_more", + "reth-ethereum-forks", + "reth-network-peers", + "reth-primitives-traits", + "serde_json", +] + +[[package]] +name = "reth-cli" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-genesis", + "clap", + "eyre", + "reth-cli-runner", + "reth-db", + "serde_json", + "shellexpand", +] + +[[package]] +name = "reth-cli-commands" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "backon", + "clap", + "comfy-table", + "crossterm 0.28.1", + "eyre", + "fdlimit", + "futures", + "human_bytes", + "humantime", + "itertools 0.14.0", + "lz4", + "metrics", + "proptest", + "proptest-arbitrary-interop", + "ratatui", + "reqwest 0.12.28", + "reth-chainspec", + "reth-cli", + "reth-cli-runner", + "reth-cli-util", + "reth-codecs", + "reth-config", + "reth-consensus", + "reth-db", + "reth-db-api", + "reth-db-common", + "reth-discv4", + "reth-discv5", + "reth-downloaders", + "reth-ecies", + "reth-era", + "reth-era-downloader", + "reth-era-utils", + "reth-eth-wire", + "reth-ethereum-primitives", + "reth-etl", + "reth-evm", + "reth-exex", + "reth-fs-util", + "reth-net-nat", + "reth-network", + "reth-network-p2p", + "reth-network-peers", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-node-events", + "reth-node-metrics", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-prune-types", + "reth-revm", + "reth-stages", + "reth-stages-types", + "reth-static-file", + "reth-static-file-types", + "reth-storage-api", + "reth-tasks", + "reth-trie", + "reth-trie-common", + "reth-trie-db", + "secp256k1 0.30.0", + "serde", + "serde_json", + "tar", + "tokio", + "tokio-stream", + "toml 0.8.23", + "tracing", + "url", + "zstd", +] + +[[package]] +name = "reth-cli-runner" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "reth-tasks", + "tokio", + "tracing", +] + +[[package]] +name = "reth-cli-util" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "cfg-if", + "eyre", + "libc", + "rand 0.8.5", + "reth-fs-util", + "reth-tracing", + "secp256k1 0.30.0", + "serde", + "thiserror 2.0.18", + "tikv-jemallocator", + "tracy-client", +] + +[[package]] +name = "reth-codecs" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-genesis", + "alloy-primitives", + "alloy-trie", + "arbitrary", + "bytes", + "modular-bitfield", + "op-alloy-consensus", + "reth-codecs-derive", + "reth-zstd-compressors", + "serde", + "visibility", +] + +[[package]] +name = "reth-codecs-derive" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "reth-config" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "eyre", + "humantime-serde", + "reth-network-types", + "reth-prune-types", + "reth-stages-types", + "reth-static-file-types", + "serde", + "toml 0.8.23", + "url", +] + +[[package]] +name = "reth-consensus" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "auto_impl", + "reth-execution-types", + "reth-primitives-traits", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-consensus-common" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "reth-chainspec", + "reth-consensus", + "reth-primitives-traits", +] + +[[package]] +name = "reth-consensus-debug-client" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types-engine", + "alloy-transport", + "auto_impl", + "derive_more", + "eyre", + "futures", + "reqwest 0.12.28", + "reth-node-api", + "reth-primitives-traits", + "reth-tracing", + "ringbuffer 0.15.0", + "serde", + "serde_json", + "tokio", +] + +[[package]] +name = "reth-db" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "derive_more", + "eyre", + "metrics", + "page_size", + "parking_lot", + "reth-db-api", + "reth-fs-util", + "reth-libmdbx", + "reth-metrics", + "reth-nippy-jar", + "reth-static-file-types", + "reth-storage-errors", + "reth-tracing", + "rustc-hash", + "strum 0.27.2", + "sysinfo", + "tempfile", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-db-api" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-genesis", + "alloy-primitives", + "arbitrary", + "arrayvec", + "bytes", + "derive_more", + "metrics", + "modular-bitfield", + "op-alloy-consensus", + "parity-scale-codec", + "proptest", + "reth-codecs", + "reth-db-models", + "reth-ethereum-primitives", + "reth-primitives-traits", + "reth-prune-types", + "reth-stages-types", + "reth-storage-errors", + "reth-trie-common", + "roaring", + "serde", +] + +[[package]] +name = "reth-db-common" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-genesis", + "alloy-primitives", + "boyer-moore-magiclen", + "eyre", + "reth-chainspec", + "reth-codecs", + "reth-config", + "reth-db-api", + "reth-etl", + "reth-execution-errors", + "reth-fs-util", + "reth-node-types", + "reth-primitives-traits", + "reth-provider", + "reth-stages-types", + "reth-static-file-types", + "reth-trie", + "reth-trie-db", + "serde", + "serde_json", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "reth-db-models" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "arbitrary", + "bytes", + "modular-bitfield", + "reth-codecs", + "reth-primitives-traits", + "serde", +] + +[[package]] +name = "reth-discv4" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "discv5", + "enr", + "itertools 0.14.0", + "parking_lot", + "rand 0.8.5", + "reth-ethereum-forks", + "reth-net-banlist", + "reth-net-nat", + "reth-network-peers", + "schnellru", + "secp256k1 0.30.0", + "serde", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-discv5" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "derive_more", + "discv5", + "enr", + "futures", + "itertools 0.14.0", + "metrics", + "rand 0.9.2", + "reth-chainspec", + "reth-ethereum-forks", + "reth-metrics", + "reth-network-peers", + "secp256k1 0.30.0", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "reth-dns-discovery" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "data-encoding", + "enr", + "hickory-resolver", + "linked_hash_set", + "parking_lot", + "reth-ethereum-forks", + "reth-network-peers", + "reth-tokio-util", + "schnellru", + "secp256k1 0.30.0", + "serde", + "serde_with", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-downloaders" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "async-compression", + "futures", + "futures-util", + "itertools 0.14.0", + "metrics", + "pin-project", + "rayon", + "reth-config", + "reth-consensus", + "reth-ethereum-primitives", + "reth-metrics", + "reth-network-p2p", + "reth-network-peers", + "reth-primitives-traits", + "reth-provider", + "reth-storage-api", + "reth-tasks", + "reth-testing-utils", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-e2e-test-utils" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-network", + "alloy-primitives", + "alloy-provider", + "alloy-rlp", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-signer", + "alloy-signer-local", + "derive_more", + "eyre", + "futures-util", + "jsonrpsee", + "reth-chainspec", + "reth-cli-commands", + "reth-config", + "reth-consensus", + "reth-db", + "reth-db-common", + "reth-engine-local", + "reth-engine-primitives", + "reth-ethereum-primitives", + "reth-network-api", + "reth-network-p2p", + "reth-network-peers", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-node-ethereum", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-primitives", + "reth-primitives-traits", + "reth-provider", + "reth-rpc-api", + "reth-rpc-builder", + "reth-rpc-eth-api", + "reth-rpc-server-types", + "reth-stages-types", + "reth-tasks", + "reth-tokio-util", + "reth-tracing", + "revm", + "serde_json", + "tempfile", + "tokio", + "tokio-stream", + "tracing", + "url", +] + +[[package]] +name = "reth-ecies" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "aes", + "alloy-primitives", + "alloy-rlp", + "block-padding", + "byteorder", + "cipher", + "concat-kdf", + "ctr", + "digest 0.10.7", + "futures", + "hmac", + "pin-project", + "rand 0.8.5", + "reth-network-peers", + "secp256k1 0.30.0", + "sha2", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-engine-local" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "alloy-rpc-types-engine", + "eyre", + "futures-util", + "op-alloy-rpc-types-engine", + "reth-chainspec", + "reth-engine-primitives", + "reth-ethereum-engine-primitives", + "reth-payload-builder", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-storage-api", + "reth-transaction-pool", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-engine-primitives" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "auto_impl", + "futures", + "reth-chain-state", + "reth-errors", + "reth-ethereum-primitives", + "reth-evm", + "reth-execution-types", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-trie-common", + "serde", + "thiserror 2.0.18", + "tokio", +] + +[[package]] +name = "reth-engine-service" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "futures", + "pin-project", + "reth-chainspec", + "reth-consensus", + "reth-engine-primitives", + "reth-engine-tree", + "reth-ethereum-primitives", + "reth-evm", + "reth-network-p2p", + "reth-node-types", + "reth-payload-builder", + "reth-provider", + "reth-prune", + "reth-stages-api", + "reth-tasks", + "reth-trie-db", +] + +[[package]] +name = "reth-engine-tree" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eip7928", + "alloy-eips", + "alloy-evm", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "crossbeam-channel", + "dashmap", + "derive_more", + "fixed-cache", + "futures", + "metrics", + "moka", + "parking_lot", + "rayon", + "reth-chain-state", + "reth-chainspec", + "reth-consensus", + "reth-db", + "reth-engine-primitives", + "reth-errors", + "reth-ethereum-primitives", + "reth-evm", + "reth-execution-types", + "reth-metrics", + "reth-network-p2p", + "reth-payload-builder", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-prune-types", + "reth-revm", + "reth-stages", + "reth-stages-api", + "reth-static-file", + "reth-tasks", + "reth-tracing", + "reth-trie", + "reth-trie-common", + "reth-trie-db", + "reth-trie-parallel", + "reth-trie-sparse", + "reth-trie-sparse-parallel", + "revm", + "revm-primitives", + "schnellru", + "smallvec", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "reth-engine-util" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-rpc-types-engine", + "eyre", + "futures", + "itertools 0.14.0", + "pin-project", + "reth-chainspec", + "reth-engine-primitives", + "reth-engine-tree", + "reth-errors", + "reth-evm", + "reth-fs-util", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-revm", + "reth-storage-api", + "serde", + "serde_json", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-era" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "ethereum_ssz", + "ethereum_ssz_derive", + "snap", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-era-downloader" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "bytes", + "eyre", + "futures-util", + "reqwest 0.12.28", + "reth-era", + "reth-fs-util", + "sha2", + "tokio", +] + +[[package]] +name = "reth-era-utils" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "eyre", + "futures-util", + "reth-db-api", + "reth-era", + "reth-era-downloader", + "reth-etl", + "reth-fs-util", + "reth-primitives-traits", + "reth-provider", + "reth-stages-types", + "reth-storage-api", + "tokio", + "tracing", +] + +[[package]] +name = "reth-errors" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "reth-consensus", + "reth-execution-errors", + "reth-storage-errors", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-eth-wire" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-chains", + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "bytes", + "derive_more", + "futures", + "pin-project", + "reth-codecs", + "reth-ecies", + "reth-eth-wire-types", + "reth-ethereum-forks", + "reth-metrics", + "reth-network-peers", + "reth-primitives-traits", + "serde", + "snap", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-eth-wire-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-hardforks", + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "bytes", + "derive_more", + "proptest", + "proptest-arbitrary-interop", + "reth-chainspec", + "reth-codecs-derive", + "reth-ethereum-primitives", + "reth-primitives-traits", + "serde", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-ethereum" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "reth-chainspec", + "reth-cli-util", + "reth-codecs", + "reth-consensus", + "reth-consensus-common", + "reth-db", + "reth-engine-local", + "reth-eth-wire", + "reth-ethereum-cli", + "reth-ethereum-consensus", + "reth-ethereum-primitives", + "reth-evm", + "reth-evm-ethereum", + "reth-exex", + "reth-network", + "reth-network-api", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-node-ethereum", + "reth-primitives-traits", + "reth-provider", + "reth-revm", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-builder", + "reth-rpc-eth-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "reth-trie", + "reth-trie-db", +] + +[[package]] +name = "reth-ethereum-cli" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "clap", + "eyre", + "reth-chainspec", + "reth-cli", + "reth-cli-commands", + "reth-cli-runner", + "reth-db", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-node-ethereum", + "reth-node-metrics", + "reth-rpc-server-types", + "reth-tracing", + "tracing", +] + +[[package]] +name = "reth-ethereum-consensus" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "reth-chainspec", + "reth-consensus", + "reth-consensus-common", + "reth-execution-types", + "reth-primitives-traits", + "tracing", +] + +[[package]] +name = "reth-ethereum-engine-primitives" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "reth-engine-primitives", + "reth-ethereum-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "serde", + "sha2", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-ethereum-forks" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eip2124", + "alloy-hardforks", + "alloy-primitives", + "arbitrary", + "auto_impl", + "once_cell", + "rustc-hash", +] + +[[package]] +name = "reth-ethereum-payload-builder" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "reth-basic-payload-builder", + "reth-chainspec", + "reth-consensus-common", + "reth-errors", + "reth-ethereum-primitives", + "reth-evm", + "reth-evm-ethereum", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-payload-validator", + "reth-primitives-traits", + "reth-revm", + "reth-storage-api", + "reth-transaction-pool", + "revm", + "tracing", +] + +[[package]] +name = "reth-ethereum-primitives" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-eth", + "alloy-serde", + "arbitrary", + "modular-bitfield", + "reth-codecs", + "reth-primitives-traits", + "reth-zstd-compressors", + "serde", + "serde_with", +] + +[[package]] +name = "reth-etl" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "rayon", + "reth-db-api", + "tempfile", +] + +[[package]] +name = "reth-evm" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-primitives", + "auto_impl", + "derive_more", + "futures-util", + "metrics", + "rayon", + "reth-execution-errors", + "reth-execution-types", + "reth-metrics", + "reth-primitives-traits", + "reth-storage-api", + "reth-storage-errors", + "reth-trie-common", + "revm", +] + +[[package]] +name = "reth-evm-ethereum" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-primitives", + "alloy-rpc-types-engine", + "derive_more", + "parking_lot", + "reth-chainspec", + "reth-ethereum-forks", + "reth-ethereum-primitives", + "reth-evm", + "reth-execution-types", + "reth-primitives-traits", + "reth-storage-errors", + "revm", +] + +[[package]] +name = "reth-execution-errors" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-evm", + "alloy-primitives", + "alloy-rlp", + "nybbles", + "reth-storage-errors", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-execution-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-primitives", + "derive_more", + "reth-ethereum-primitives", + "reth-primitives-traits", + "reth-trie-common", + "revm", + "serde", + "serde_with", +] + +[[package]] +name = "reth-exex" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "eyre", + "futures", + "itertools 0.14.0", + "metrics", + "parking_lot", + "reth-chain-state", + "reth-chainspec", + "reth-config", + "reth-ethereum-primitives", + "reth-evm", + "reth-exex-types", + "reth-fs-util", + "reth-metrics", + "reth-node-api", + "reth-node-core", + "reth-payload-builder", + "reth-primitives-traits", + "reth-provider", + "reth-prune-types", + "reth-revm", + "reth-stages-api", + "reth-tasks", + "reth-tracing", + "rmp-serde", + "thiserror 2.0.18", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-exex-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "reth-chain-state", + "reth-execution-types", + "reth-primitives-traits", + "serde", + "serde_with", +] + +[[package]] +name = "reth-fs-util" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "serde", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-invalid-block-hooks" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-debug", + "eyre", + "futures", + "jsonrpsee", + "pretty_assertions", + "reth-engine-primitives", + "reth-evm", + "reth-primitives-traits", + "reth-provider", + "reth-revm", + "reth-rpc-api", + "reth-tracing", + "reth-trie", + "revm", + "revm-bytecode", + "revm-database", + "serde", + "serde_json", +] + +[[package]] +name = "reth-ipc" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "bytes", + "futures", + "futures-util", + "interprocess", + "jsonrpsee", + "pin-project", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tower 0.5.3", + "tracing", +] + +[[package]] +name = "reth-libmdbx" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "bitflags 2.10.0", + "byteorder", + "dashmap", + "derive_more", + "parking_lot", + "reth-mdbx-sys", + "smallvec", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "reth-mdbx-sys" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "bindgen 0.71.1", + "cc", +] + +[[package]] +name = "reth-metrics" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "futures", + "metrics", + "metrics-derive", + "tokio", + "tokio-util", +] + +[[package]] +name = "reth-net-banlist" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "ipnet", +] + +[[package]] +name = "reth-net-nat" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "futures-util", + "if-addrs 0.14.0", + "reqwest 0.12.28", + "serde_with", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "reth-network" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "aquamarine", + "auto_impl", + "derive_more", + "discv5", + "enr", + "futures", + "itertools 0.14.0", + "metrics", + "parking_lot", + "pin-project", + "rand 0.8.5", + "rand 0.9.2", + "rayon", + "reth-chainspec", + "reth-consensus", + "reth-discv4", + "reth-discv5", + "reth-dns-discovery", + "reth-ecies", + "reth-eth-wire", + "reth-eth-wire-types", + "reth-ethereum-forks", + "reth-ethereum-primitives", + "reth-evm-ethereum", + "reth-fs-util", + "reth-metrics", + "reth-net-banlist", + "reth-network-api", + "reth-network-p2p", + "reth-network-peers", + "reth-network-types", + "reth-primitives-traits", + "reth-storage-api", + "reth-tasks", + "reth-tokio-util", + "reth-transaction-pool", + "rustc-hash", + "schnellru", + "secp256k1 0.30.0", + "serde", + "smallvec", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-network-api" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "alloy-rpc-types-admin", + "alloy-rpc-types-eth", + "auto_impl", + "derive_more", + "enr", + "futures", + "reth-eth-wire-types", + "reth-ethereum-forks", + "reth-network-p2p", + "reth-network-peers", + "reth-network-types", + "reth-tokio-util", + "serde", + "thiserror 2.0.18", + "tokio", + "tokio-stream", +] + +[[package]] +name = "reth-network-p2p" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "auto_impl", + "derive_more", + "futures", + "parking_lot", + "reth-consensus", + "reth-eth-wire-types", + "reth-ethereum-primitives", + "reth-network-peers", + "reth-network-types", + "reth-primitives-traits", + "reth-storage-errors", + "tokio", + "tracing", +] + +[[package]] +name = "reth-network-peers" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "enr", + "secp256k1 0.30.0", + "serde_with", + "thiserror 2.0.18", + "tokio", + "url", +] + +[[package]] +name = "reth-network-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eip2124", + "humantime-serde", + "reth-net-banlist", + "reth-network-peers", + "serde", + "serde_json", + "tracing", +] + +[[package]] +name = "reth-nippy-jar" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "anyhow", + "bincode 1.3.3", + "derive_more", + "lz4_flex", + "memmap2", + "reth-fs-util", + "serde", + "thiserror 2.0.18", + "tracing", + "zstd", +] + +[[package]] +name = "reth-node-api" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-rpc-types-engine", + "eyre", + "reth-basic-payload-builder", + "reth-consensus", + "reth-db-api", + "reth-engine-primitives", + "reth-evm", + "reth-network-api", + "reth-node-core", + "reth-node-types", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-provider", + "reth-tasks", + "reth-tokio-util", + "reth-transaction-pool", +] + +[[package]] +name = "reth-node-builder" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-provider", + "alloy-rpc-types", + "alloy-rpc-types-engine", + "aquamarine", + "eyre", + "fdlimit", + "futures", + "jsonrpsee", + "parking_lot", + "rayon", + "reth-basic-payload-builder", + "reth-chain-state", + "reth-chainspec", + "reth-config", + "reth-consensus", + "reth-consensus-debug-client", + "reth-db", + "reth-db-api", + "reth-db-common", + "reth-downloaders", + "reth-engine-local", + "reth-engine-primitives", + "reth-engine-service", + "reth-engine-tree", + "reth-engine-util", + "reth-evm", + "reth-exex", + "reth-fs-util", + "reth-invalid-block-hooks", + "reth-network", + "reth-network-api", + "reth-network-p2p", + "reth-node-api", + "reth-node-core", + "reth-node-ethstats", + "reth-node-events", + "reth-node-metrics", + "reth-payload-builder", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-builder", + "reth-rpc-engine-api", + "reth-rpc-eth-types", + "reth-rpc-layer", + "reth-stages", + "reth-static-file", + "reth-tasks", + "reth-tokio-util", + "reth-tracing", + "reth-transaction-pool", + "reth-trie-db", + "secp256k1 0.30.0", + "serde_json", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-node-core" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "clap", + "derive_more", + "dirs-next", + "eyre", + "futures", + "humantime", + "ipnet", + "rand 0.9.2", + "reth-chainspec", + "reth-cli-util", + "reth-config", + "reth-consensus", + "reth-db", + "reth-discv4", + "reth-discv5", + "reth-engine-local", + "reth-engine-primitives", + "reth-ethereum-forks", + "reth-net-banlist", + "reth-net-nat", + "reth-network", + "reth-network-p2p", + "reth-network-peers", + "reth-primitives-traits", + "reth-prune-types", + "reth-rpc-convert", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-stages-types", + "reth-storage-api", + "reth-storage-errors", + "reth-tracing", + "reth-tracing-otlp", + "reth-transaction-pool", + "secp256k1 0.30.0", + "serde", + "shellexpand", + "strum 0.27.2", + "thiserror 2.0.18", + "toml 0.8.23", + "tracing", + "url", + "vergen", + "vergen-git2", +] + +[[package]] +name = "reth-node-ethereum" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eips", + "alloy-network", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "eyre", + "reth-chainspec", + "reth-engine-local", + "reth-engine-primitives", + "reth-ethereum-consensus", + "reth-ethereum-engine-primitives", + "reth-ethereum-payload-builder", + "reth-ethereum-primitives", + "reth-evm", + "reth-evm-ethereum", + "reth-network", + "reth-node-api", + "reth-node-builder", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-provider", + "reth-revm", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-builder", + "reth-rpc-eth-api", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-tracing", + "reth-transaction-pool", + "revm", + "tokio", +] + +[[package]] +name = "reth-node-ethstats" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "chrono", + "futures-util", + "reth-chain-state", + "reth-network-api", + "reth-primitives-traits", + "reth-storage-api", + "reth-transaction-pool", + "serde", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tokio-tungstenite 0.26.2", + "tracing", + "url", +] + +[[package]] +name = "reth-node-events" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "derive_more", + "futures", + "humantime", + "pin-project", + "reth-engine-primitives", + "reth-network-api", + "reth-primitives-traits", + "reth-prune-types", + "reth-stages", + "reth-static-file-types", + "reth-storage-api", + "tokio", + "tracing", +] + +[[package]] +name = "reth-node-metrics" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "bytes", + "eyre", + "http", + "http-body-util", + "jemalloc_pprof", + "jsonrpsee-server", + "mappings", + "metrics", + "metrics-exporter-prometheus 0.18.1", + "metrics-process", + "metrics-util 0.20.1", + "pprof_util", + "procfs 0.17.0", + "reqwest 0.12.28", + "reth-fs-util", + "reth-metrics", + "reth-tasks", + "tempfile", + "tikv-jemalloc-ctl", + "tokio", + "tower 0.5.3", + "tracing", +] + +[[package]] +name = "reth-node-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "reth-chainspec", + "reth-db-api", + "reth-engine-primitives", + "reth-payload-primitives", + "reth-primitives-traits", +] + +[[package]] +name = "reth-op" +version = "1.10.2" +dependencies = [ + "alloy-primitives", + "reth-chainspec", + "reth-cli-util", + "reth-codecs", + "reth-consensus", + "reth-consensus-common", + "reth-db", + "reth-engine-local", + "reth-eth-wire", + "reth-evm", + "reth-exex", + "reth-network", + "reth-network-api", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-optimism-chainspec", + "reth-optimism-cli", + "reth-optimism-consensus", + "reth-optimism-evm", + "reth-optimism-node", + "reth-optimism-primitives", + "reth-optimism-rpc", + "reth-primitives-traits", + "reth-provider", + "reth-revm", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-builder", + "reth-rpc-eth-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "reth-trie", + "reth-trie-db", +] + +[[package]] +name = "reth-optimism-chainspec" +version = "1.10.2" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-genesis", + "alloy-hardforks", + "alloy-op-hardforks", + "alloy-primitives", + "derive_more", + "miniz_oxide 0.9.0", + "op-alloy-consensus", + "op-alloy-rpc-types", + "paste", + "reth-chainspec", + "reth-ethereum-forks", + "reth-network-peers", + "reth-optimism-forks", + "reth-optimism-primitives", + "reth-primitives-traits", + "serde", + "serde_json", + "tar-no-std", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-optimism-cli" +version = "1.10.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "clap", + "derive_more", + "eyre", + "futures-util", + "op-alloy-consensus", + "proptest", + "reth-chainspec", + "reth-cli", + "reth-cli-commands", + "reth-cli-runner", + "reth-consensus", + "reth-db", + "reth-db-api", + "reth-db-common", + "reth-downloaders", + "reth-execution-types", + "reth-fs-util", + "reth-node-builder", + "reth-node-core", + "reth-node-events", + "reth-node-metrics", + "reth-optimism-chainspec", + "reth-optimism-consensus", + "reth-optimism-evm", + "reth-optimism-node", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-rpc-server-types", + "reth-stages", + "reth-static-file", + "reth-static-file-types", + "reth-tracing", + "serde", + "tempfile", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "reth-optimism-consensus" +version = "1.10.2" +dependencies = [ + "alloy-chains", + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-trie", + "op-alloy-consensus", + "reth-chainspec", + "reth-consensus", + "reth-consensus-common", + "reth-db-common", + "reth-execution-types", + "reth-optimism-chainspec", + "reth-optimism-forks", + "reth-optimism-node", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-provider", + "reth-revm", + "reth-storage-api", + "reth-storage-errors", + "reth-trie", + "reth-trie-common", + "revm", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "reth-optimism-evm" +version = "1.10.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-genesis", + "alloy-op-evm", + "alloy-primitives", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "op-revm", + "reth-chainspec", + "reth-evm", + "reth-execution-errors", + "reth-execution-types", + "reth-optimism-chainspec", + "reth-optimism-consensus", + "reth-optimism-forks", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-revm", + "reth-rpc-eth-api", + "reth-storage-errors", + "revm", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-optimism-flashblocks" +version = "1.10.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "brotli", + "derive_more", + "eyre", + "futures-util", + "metrics", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "reth-chain-state", + "reth-engine-primitives", + "reth-errors", + "reth-evm", + "reth-execution-types", + "reth-metrics", + "reth-optimism-payload-builder", + "reth-optimism-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-revm", + "reth-rpc-eth-types", + "reth-storage-api", + "reth-tasks", + "ringbuffer 0.16.0", + "serde_json", + "test-case", + "tokio", + "tokio-tungstenite 0.28.0", + "tracing", + "url", +] + +[[package]] +name = "reth-optimism-forks" +version = "1.10.2" +dependencies = [ + "alloy-op-hardforks", + "alloy-primitives", + "once_cell", + "reth-ethereum-forks", +] + +[[package]] +name = "reth-optimism-node" +version = "1.10.2" +dependencies = [ + "alloy-consensus", + "alloy-genesis", + "alloy-network", + "alloy-op-hardforks", + "alloy-primitives", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "clap", + "eyre", + "futures", + "op-alloy-consensus", + "op-alloy-network", + "op-alloy-rpc-types-engine", + "op-revm", + "reth-chainspec", + "reth-consensus", + "reth-db", + "reth-db-api", + "reth-e2e-test-utils", + "reth-engine-local", + "reth-evm", + "reth-network", + "reth-node-api", + "reth-node-builder", + "reth-node-core", + "reth-optimism-chainspec", + "reth-optimism-consensus", + "reth-optimism-evm", + "reth-optimism-forks", + "reth-optimism-node", + "reth-optimism-payload-builder", + "reth-optimism-primitives", + "reth-optimism-rpc", + "reth-optimism-storage", + "reth-optimism-txpool", + "reth-payload-builder", + "reth-payload-util", + "reth-primitives-traits", + "reth-provider", + "reth-revm", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-engine-api", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-stages-types", + "reth-tasks", + "reth-tracing", + "reth-transaction-pool", + "reth-trie-common", + "reth-trie-db", + "revm", + "serde", + "serde_json", + "tokio", + "url", +] + +[[package]] +name = "reth-optimism-payload-builder" +version = "1.10.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "derive_more", + "either", + "op-alloy-consensus", + "op-alloy-rpc-types-engine", + "reth-basic-payload-builder", + "reth-chainspec", + "reth-evm", + "reth-execution-types", + "reth-optimism-evm", + "reth-optimism-forks", + "reth-optimism-primitives", + "reth-optimism-txpool", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-payload-util", + "reth-payload-validator", + "reth-primitives-traits", + "reth-revm", + "reth-storage-api", + "reth-transaction-pool", + "revm", + "serde", + "sha2", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "reth-optimism-primitives" +version = "1.10.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "arbitrary", + "bincode 2.0.1", + "bytes", + "modular-bitfield", + "op-alloy-consensus", + "proptest", + "proptest-arbitrary-interop", + "rand 0.8.5", + "rand 0.9.2", + "reth-codecs", + "reth-primitives-traits", + "reth-zstd-compressors", + "rstest", + "secp256k1 0.31.1", + "serde", + "serde_json", + "serde_with", +] + +[[package]] +name = "reth-optimism-rpc" +version = "1.10.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-op-hardforks", + "alloy-primitives", + "alloy-rpc-client", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-transport", + "alloy-transport-http", + "async-trait", + "derive_more", + "eyre", + "futures", + "jsonrpsee", + "jsonrpsee-core", + "jsonrpsee-types", + "metrics", + "op-alloy-consensus", + "op-alloy-network", + "op-alloy-rpc-jsonrpsee", + "op-alloy-rpc-types", + "op-alloy-rpc-types-engine", + "op-revm", + "reth-chain-state", + "reth-chainspec", + "reth-evm", + "reth-metrics", + "reth-node-api", + "reth-node-builder", + "reth-optimism-chainspec", + "reth-optimism-evm", + "reth-optimism-flashblocks", + "reth-optimism-forks", + "reth-optimism-payload-builder", + "reth-optimism-primitives", + "reth-optimism-txpool", + "reth-primitives-traits", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-engine-api", + "reth-rpc-eth-api", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "revm", + "serde_json", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tower 0.5.3", + "tracing", +] + +[[package]] +name = "reth-optimism-storage" +version = "1.10.2" +dependencies = [ + "alloy-consensus", + "reth-codecs", + "reth-optimism-primitives", + "reth-prune-types", + "reth-stages-types", + "reth-storage-api", +] + +[[package]] +name = "reth-optimism-txpool" +version = "1.10.2" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-json-rpc", + "alloy-primitives", + "alloy-rpc-client", + "alloy-rpc-types-eth", + "alloy-serde", + "c-kzg", + "derive_more", + "futures-util", + "metrics", + "op-alloy-consensus", + "op-alloy-flz", + "op-alloy-rpc-types", + "op-revm", + "parking_lot", + "reth-chain-state", + "reth-chainspec", + "reth-evm", + "reth-metrics", + "reth-optimism-chainspec", + "reth-optimism-evm", + "reth-optimism-forks", + "reth-optimism-primitives", + "reth-primitives-traits", + "reth-provider", + "reth-storage-api", + "reth-transaction-pool", + "serde", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "reth-payload-builder" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "alloy-rpc-types", + "futures-util", + "metrics", + "reth-chain-state", + "reth-ethereum-engine-primitives", + "reth-metrics", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-payload-builder-primitives" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "pin-project", + "reth-payload-primitives", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-payload-primitives" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "auto_impl", + "either", + "op-alloy-rpc-types-engine", + "reth-chain-state", + "reth-chainspec", + "reth-errors", + "reth-execution-types", + "reth-primitives-traits", + "reth-trie-common", + "serde", + "thiserror 2.0.18", + "tokio", +] + +[[package]] +name = "reth-payload-util" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "reth-transaction-pool", +] + +[[package]] +name = "reth-payload-validator" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-rpc-types-engine", + "reth-primitives-traits", +] + +[[package]] +name = "reth-primitives" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "once_cell", + "reth-ethereum-forks", + "reth-ethereum-primitives", + "reth-primitives-traits", + "reth-static-file-types", +] + +[[package]] +name = "reth-primitives-traits" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-genesis", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-eth", + "alloy-trie", + "arbitrary", + "auto_impl", + "byteorder", + "bytes", + "derive_more", + "modular-bitfield", + "once_cell", + "op-alloy-consensus", + "proptest", + "proptest-arbitrary-interop", + "rayon", + "reth-codecs", + "revm-bytecode", + "revm-primitives", + "revm-state", + "secp256k1 0.30.0", + "serde", + "serde_with", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-provider" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "dashmap", + "eyre", + "itertools 0.14.0", + "metrics", + "notify", + "parking_lot", + "rayon", + "reth-chain-state", + "reth-chainspec", + "reth-codecs", + "reth-db", + "reth-db-api", + "reth-errors", + "reth-ethereum-engine-primitives", + "reth-ethereum-primitives", + "reth-execution-types", + "reth-metrics", + "reth-nippy-jar", + "reth-node-types", + "reth-primitives-traits", + "reth-prune-types", + "reth-stages-types", + "reth-static-file-types", + "reth-storage-api", + "reth-storage-errors", + "reth-trie", + "reth-trie-db", + "revm-database", + "revm-state", + "rocksdb", + "strum 0.27.2", + "tokio", + "tracing", +] + +[[package]] +name = "reth-prune" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "itertools 0.14.0", + "metrics", + "rayon", + "reth-config", + "reth-db-api", + "reth-errors", + "reth-exex-types", + "reth-metrics", + "reth-primitives-traits", + "reth-provider", + "reth-prune-types", + "reth-stages-types", + "reth-static-file-types", + "reth-storage-api", + "reth-tokio-util", + "rustc-hash", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "reth-prune-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "arbitrary", + "derive_more", + "modular-bitfield", + "reth-codecs", + "serde", + "strum 0.27.2", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "reth-revm" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "reth-primitives-traits", + "reth-storage-api", + "reth-storage-errors", + "reth-trie", + "revm", +] + +[[package]] +name = "reth-rpc" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-dyn-abi", + "alloy-eip7928", + "alloy-eips", + "alloy-evm", + "alloy-genesis", + "alloy-network", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-client", + "alloy-rpc-types", + "alloy-rpc-types-admin", + "alloy-rpc-types-beacon", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-rpc-types-mev", + "alloy-rpc-types-trace", + "alloy-rpc-types-txpool", + "alloy-serde", + "alloy-signer", + "alloy-signer-local", + "async-trait", + "derive_more", + "dyn-clone", + "futures", + "http", + "http-body", + "hyper", + "itertools 0.14.0", + "jsonrpsee", + "jsonrpsee-types", + "jsonwebtoken", + "parking_lot", + "pin-project", + "reth-chain-state", + "reth-chainspec", + "reth-consensus", + "reth-consensus-common", + "reth-engine-primitives", + "reth-errors", + "reth-ethereum-engine-primitives", + "reth-ethereum-primitives", + "reth-evm", + "reth-evm-ethereum", + "reth-execution-types", + "reth-metrics", + "reth-network-api", + "reth-network-peers", + "reth-network-types", + "reth-node-api", + "reth-primitives-traits", + "reth-revm", + "reth-rpc-api", + "reth-rpc-convert", + "reth-rpc-engine-api", + "reth-rpc-eth-api", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "reth-trie-common", + "revm", + "revm-inspectors", + "revm-primitives", + "serde", + "serde_json", + "sha2", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tower 0.5.3", + "tracing", + "tracing-futures", +] + +[[package]] +name = "reth-rpc-api" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eip7928", + "alloy-eips", + "alloy-genesis", + "alloy-json-rpc", + "alloy-primitives", + "alloy-rpc-types", + "alloy-rpc-types-admin", + "alloy-rpc-types-anvil", + "alloy-rpc-types-beacon", + "alloy-rpc-types-debug", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-rpc-types-mev", + "alloy-rpc-types-trace", + "alloy-rpc-types-txpool", + "alloy-serde", + "jsonrpsee", + "reth-chain-state", + "reth-engine-primitives", + "reth-network-peers", + "reth-rpc-eth-api", + "reth-trie-common", + "serde_json", +] + +[[package]] +name = "reth-rpc-builder" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-network", + "alloy-provider", + "dyn-clone", + "http", + "jsonrpsee", + "metrics", + "pin-project", + "reth-chain-state", + "reth-chainspec", + "reth-consensus", + "reth-engine-primitives", + "reth-evm", + "reth-ipc", + "reth-metrics", + "reth-network-api", + "reth-node-core", + "reth-primitives-traits", + "reth-rpc", + "reth-rpc-api", + "reth-rpc-eth-api", + "reth-rpc-eth-types", + "reth-rpc-layer", + "reth-rpc-server-types", + "reth-storage-api", + "reth-tasks", + "reth-tokio-util", + "reth-transaction-pool", + "serde", + "thiserror 2.0.18", + "tokio", + "tokio-util", + "tower 0.5.3", + "tower-http", + "tracing", +] + +[[package]] +name = "reth-rpc-convert" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-evm", + "alloy-json-rpc", + "alloy-network", + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-signer", + "auto_impl", + "dyn-clone", + "jsonrpsee-types", + "op-alloy-consensus", + "op-alloy-network", + "op-alloy-rpc-types", + "reth-ethereum-primitives", + "reth-evm", + "reth-primitives-traits", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-rpc-engine-api" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "async-trait", + "jsonrpsee-core", + "jsonrpsee-types", + "metrics", + "reth-chainspec", + "reth-engine-primitives", + "reth-metrics", + "reth-network-api", + "reth-payload-builder", + "reth-payload-builder-primitives", + "reth-payload-primitives", + "reth-primitives-traits", + "reth-rpc-api", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "serde", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "reth-rpc-eth-api" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-dyn-abi", + "alloy-eips", + "alloy-evm", + "alloy-json-rpc", + "alloy-network", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-eth", + "alloy-rpc-types-mev", + "alloy-serde", + "async-trait", + "auto_impl", + "dyn-clone", + "futures", + "jsonrpsee", + "jsonrpsee-types", + "parking_lot", + "reth-chain-state", + "reth-chainspec", + "reth-errors", + "reth-evm", + "reth-network-api", + "reth-node-api", + "reth-primitives-traits", + "reth-revm", + "reth-rpc-convert", + "reth-rpc-eth-types", + "reth-rpc-server-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "reth-trie-common", + "revm", + "revm-inspectors", + "tokio", + "tracing", +] + +[[package]] +name = "reth-rpc-eth-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-evm", + "alloy-network", + "alloy-primitives", + "alloy-rpc-client", + "alloy-rpc-types-eth", + "alloy-sol-types", + "alloy-transport", + "derive_more", + "futures", + "itertools 0.14.0", + "jsonrpsee-core", + "jsonrpsee-types", + "metrics", + "rand 0.9.2", + "reqwest 0.12.28", + "reth-chain-state", + "reth-chainspec", + "reth-errors", + "reth-ethereum-primitives", + "reth-evm", + "reth-execution-types", + "reth-metrics", + "reth-primitives-traits", + "reth-revm", + "reth-rpc-convert", + "reth-rpc-server-types", + "reth-storage-api", + "reth-tasks", + "reth-transaction-pool", + "reth-trie", + "revm", + "revm-inspectors", + "schnellru", + "serde", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tracing", + "url", +] + +[[package]] +name = "reth-rpc-layer" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-rpc-types-engine", + "http", + "jsonrpsee-http-client", + "pin-project", + "tower 0.5.3", + "tower-http", + "tracing", +] + +[[package]] +name = "reth-rpc-server-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "jsonrpsee-core", + "jsonrpsee-types", + "reth-errors", + "reth-network-api", + "serde", + "strum 0.27.2", +] + +[[package]] +name = "reth-stages" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "bincode 1.3.3", + "eyre", + "futures-util", + "itertools 0.14.0", + "num-traits", + "rayon", + "reqwest 0.12.28", + "reth-chainspec", + "reth-codecs", + "reth-config", + "reth-consensus", + "reth-db", + "reth-db-api", + "reth-era", + "reth-era-downloader", + "reth-era-utils", + "reth-ethereum-primitives", + "reth-etl", + "reth-evm", + "reth-execution-types", + "reth-exex", + "reth-fs-util", + "reth-network-p2p", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-prune-types", + "reth-revm", + "reth-stages-api", + "reth-static-file-types", + "reth-storage-api", + "reth-storage-errors", + "reth-testing-utils", + "reth-trie", + "reth-trie-db", + "tempfile", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "reth-stages-api" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "aquamarine", + "auto_impl", + "futures-util", + "metrics", + "reth-consensus", + "reth-errors", + "reth-metrics", + "reth-network-p2p", + "reth-primitives-traits", + "reth-provider", + "reth-prune", + "reth-stages-types", + "reth-static-file", + "reth-static-file-types", + "reth-tokio-util", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "reth-stages-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "arbitrary", + "bytes", + "modular-bitfield", + "reth-codecs", + "reth-trie-common", + "serde", +] + +[[package]] +name = "reth-static-file" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "parking_lot", + "rayon", + "reth-codecs", + "reth-db-api", + "reth-primitives-traits", + "reth-provider", + "reth-prune-types", + "reth-stages-types", + "reth-static-file-types", + "reth-storage-errors", + "reth-tokio-util", + "tracing", +] + +[[package]] +name = "reth-static-file-types" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "clap", + "derive_more", + "fixed-map", + "serde", + "strum 0.27.2", +] + +[[package]] +name = "reth-storage-api" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rpc-types-engine", + "auto_impl", + "reth-chainspec", + "reth-db-api", + "reth-db-models", + "reth-ethereum-primitives", + "reth-execution-types", + "reth-primitives-traits", + "reth-prune-types", + "reth-stages-types", + "reth-storage-errors", + "reth-trie-common", + "revm-database", + "serde_json", +] + +[[package]] +name = "reth-storage-errors" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "derive_more", + "reth-primitives-traits", + "reth-prune-types", + "reth-static-file-types", + "revm-database-interface", + "revm-state", + "thiserror 2.0.18", +] + +[[package]] +name = "reth-tasks" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "auto_impl", + "dyn-clone", + "futures-util", + "metrics", + "pin-project", + "rayon", + "reth-metrics", + "thiserror 2.0.18", + "tokio", + "tracing", + "tracing-futures", +] + +[[package]] +name = "reth-testing-utils" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-genesis", + "alloy-primitives", + "rand 0.8.5", + "rand 0.9.2", + "reth-ethereum-primitives", + "reth-primitives-traits", + "secp256k1 0.30.0", +] + +[[package]] +name = "reth-tokio-util" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-tracing" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "clap", + "eyre", + "reth-tracing-otlp", + "rolling-file", + "tracing", + "tracing-appender", + "tracing-journald", + "tracing-logfmt", + "tracing-samply", + "tracing-subscriber 0.3.22", + "tracing-tracy", + "tracy-client", +] + +[[package]] +name = "reth-tracing-otlp" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "clap", + "eyre", + "opentelemetry 0.31.0", + "opentelemetry-appender-tracing", + "opentelemetry-otlp 0.31.0", + "opentelemetry-semantic-conventions", + "opentelemetry_sdk 0.31.0", + "tracing", + "tracing-opentelemetry 0.32.1", + "tracing-subscriber 0.3.22", + "url", +] + +[[package]] +name = "reth-transaction-pool" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "aquamarine", + "auto_impl", + "bitflags 2.10.0", + "futures-util", + "metrics", + "parking_lot", + "paste", + "pin-project", + "proptest", + "proptest-arbitrary-interop", + "rand 0.9.2", + "reth-chain-state", + "reth-chainspec", + "reth-eth-wire-types", + "reth-ethereum-primitives", + "reth-evm", + "reth-evm-ethereum", + "reth-execution-types", + "reth-fs-util", + "reth-metrics", + "reth-primitives-traits", + "reth-storage-api", + "reth-tasks", + "revm", + "revm-interpreter", + "revm-primitives", + "rustc-hash", + "schnellru", + "serde", + "serde_json", + "smallvec", + "thiserror 2.0.18", + "tokio", + "tokio-stream", + "tracing", +] + +[[package]] +name = "reth-trie" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-eips", + "alloy-primitives", + "alloy-rlp", + "alloy-trie", + "auto_impl", + "itertools 0.14.0", + "metrics", + "parking_lot", + "reth-execution-errors", + "reth-metrics", + "reth-primitives-traits", + "reth-stages-types", + "reth-storage-errors", + "reth-trie-common", + "reth-trie-sparse", + "revm-database", + "tracing", + "triehash", +] + +[[package]] +name = "reth-trie-common" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-consensus", + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-eth", + "alloy-serde", + "alloy-trie", + "arbitrary", + "arrayvec", + "bytes", + "derive_more", + "hash-db", + "itertools 0.14.0", + "nybbles", + "plain_hasher", + "rayon", + "reth-codecs", + "reth-primitives-traits", + "revm-database", + "serde", + "serde_with", +] + +[[package]] +name = "reth-trie-db" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "metrics", + "parking_lot", + "reth-db-api", + "reth-execution-errors", + "reth-metrics", + "reth-primitives-traits", + "reth-stages-types", + "reth-storage-api", + "reth-storage-errors", + "reth-trie", + "reth-trie-common", + "tracing", +] + +[[package]] +name = "reth-trie-parallel" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "crossbeam-channel", + "dashmap", + "derive_more", + "itertools 0.14.0", + "metrics", + "rayon", + "reth-execution-errors", + "reth-metrics", + "reth-primitives-traits", + "reth-provider", + "reth-storage-errors", + "reth-trie", + "reth-trie-common", + "reth-trie-sparse", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "reth-trie-sparse" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "alloy-trie", + "auto_impl", + "metrics", + "rayon", + "reth-execution-errors", + "reth-metrics", + "reth-primitives-traits", + "reth-trie-common", + "smallvec", + "tracing", +] + +[[package]] +name = "reth-trie-sparse-parallel" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "alloy-trie", + "metrics", + "rayon", + "reth-execution-errors", + "reth-metrics", + "reth-trie-common", + "reth-trie-sparse", + "smallvec", + "tracing", +] + +[[package]] +name = "reth-zstd-compressors" +version = "1.10.2" +source = "git+https://github.com/paradigmxyz/reth?rev=b3d532ce#b3d532ce9d09b925ebbce7bdf213d04c402e124c" +dependencies = [ + "zstd", +] + +[[package]] +name = "revm" +version = "34.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2aabdebaa535b3575231a88d72b642897ae8106cf6b0d12eafc6bfdf50abfc7" +dependencies = [ + "revm-bytecode", + "revm-context", + "revm-context-interface", + "revm-database", + "revm-database-interface", + "revm-handler", + "revm-inspector", + "revm-interpreter", + "revm-precompile", + "revm-primitives", + "revm-state", +] + +[[package]] +name = "revm-bytecode" +version = "8.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d1e5c1eaa44d39d537f668bc5c3409dc01e5c8be954da6c83370bbdf006457" +dependencies = [ + "bitvec", + "phf", + "revm-primitives", + "serde", +] + +[[package]] +name = "revm-context" +version = "13.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "892ff3e6a566cf8d72ffb627fdced3becebbd9ba64089c25975b9b028af326a5" +dependencies = [ + "bitvec", + "cfg-if", + "derive-where", + "revm-bytecode", + "revm-context-interface", + "revm-database-interface", + "revm-primitives", + "revm-state", + "serde", +] + +[[package]] +name = "revm-context-interface" +version = "14.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57f61cc6d23678c4840af895b19f8acfbbd546142ec8028b6526c53cc1c16c98" +dependencies = [ + "alloy-eip2930", + "alloy-eip7702", + "auto_impl", + "either", + "revm-database-interface", + "revm-primitives", + "revm-state", + "serde", +] + +[[package]] +name = "revm-database" +version = "10.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "529528d0b05fe646be86223032c3e77aa8b05caa2a35447d538c55965956a511" +dependencies = [ + "alloy-eips", + "revm-bytecode", + "revm-database-interface", + "revm-primitives", + "revm-state", + "serde", +] + +[[package]] +name = "revm-database-interface" +version = "9.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7bf93ac5b91347c057610c0d96e923db8c62807e03f036762d03e981feddc1d" +dependencies = [ + "auto_impl", + "either", + "revm-primitives", + "revm-state", + "serde", + "thiserror 2.0.18", +] + +[[package]] +name = "revm-handler" +version = "15.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cd0e43e815a85eded249df886c4badec869195e70cdd808a13cfca2794622d2" +dependencies = [ + "auto_impl", + "derive-where", + "revm-bytecode", + "revm-context", + "revm-context-interface", + "revm-database-interface", + "revm-interpreter", + "revm-precompile", + "revm-primitives", + "revm-state", + "serde", +] + +[[package]] +name = "revm-inspector" +version = "15.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f3ccad59db91ef93696536a0dbaf2f6f17cfe20d4d8843ae118edb7e97947ef" +dependencies = [ + "auto_impl", + "either", + "revm-context", + "revm-database-interface", + "revm-handler", + "revm-interpreter", + "revm-primitives", + "revm-state", + "serde", + "serde_json", +] + +[[package]] +name = "revm-inspectors" +version = "0.34.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e435414e9de50a1b930da602067c76365fea2fea11e80ceb50783c94ddd127f" +dependencies = [ + "alloy-primitives", + "alloy-rpc-types-eth", + "alloy-rpc-types-trace", + "alloy-sol-types", + "anstyle", + "boa_engine", + "boa_gc", + "colorchoice", + "revm", + "serde", + "serde_json", + "thiserror 2.0.18", +] + +[[package]] +name = "revm-interpreter" +version = "32.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11406408597bc249392d39295831c4b641b3a6f5c471a7c41104a7a1e3564c07" +dependencies = [ + "revm-bytecode", + "revm-context-interface", + "revm-primitives", + "revm-state", + "serde", +] + +[[package]] +name = "revm-precompile" +version = "32.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50c1285c848d240678bf69cb0f6179ff5a4aee6fc8e921d89708087197a0aff3" +dependencies = [ + "ark-bls12-381", + "ark-bn254", + "ark-ec", + "ark-ff 0.5.0", + "ark-serialize 0.5.0", + "arrayref", + "aurora-engine-modexp", + "blst", + "c-kzg", + "cfg-if", + "gmp-mpfr-sys", + "k256", + "p256", + "revm-primitives", + "ripemd", + "secp256k1 0.31.1", + "sha2", +] + +[[package]] +name = "revm-primitives" +version = "22.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba580c56a8ec824a64f8a1683577876c2e1dbe5247044199e9b881421ad5dcf9" +dependencies = [ + "alloy-primitives", + "num_enum", + "once_cell", + "serde", +] + +[[package]] +name = "revm-state" +version = "9.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "311720d4f0f239b041375e7ddafdbd20032a33b7bae718562ea188e188ed9fd3" +dependencies = [ + "alloy-eip7928", + "bitflags 2.10.0", + "revm-bytecode", + "revm-primitives", + "serde", +] + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.17", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "ringbuffer" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3df6368f71f205ff9c33c076d170dd56ebf68e8161c733c0caa07a7a5509ed53" + +[[package]] +name = "ringbuffer" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57b0b88a509053cbfd535726dcaaceee631313cef981266119527a1d110f6d2b" + +[[package]] +name = "ripemd" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f" +dependencies = [ + "digest 0.10.7", +] + +[[package]] +name = "rkyv" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "360b333c61ae24e5af3ae7c8660bd6b21ccd8200dbbc5d33c2454421e85b9c69" +dependencies = [ + "bytecheck", + "bytes", + "hashbrown 0.16.1", + "indexmap 2.13.0", + "munge", + "ptr_meta", + "rancor", + "rend", + "rkyv_derive", + "tinyvec", + "uuid", +] + +[[package]] +name = "rkyv_derive" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02f8cdd12b307ab69fe0acf4cd2249c7460eb89dce64a0febadf934ebb6a9e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "rlimit" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7043b63bd0cd1aaa628e476b80e6d4023a3b50eb32789f2728908107bd0c793a" +dependencies = [ + "libc", +] + +[[package]] +name = "rlp" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb919243f34364b6bd2fc10ef797edbfa75f33c252e7998527479c6d6b47e1ec" +dependencies = [ + "bytes", + "rustc-hex", +] + +[[package]] +name = "rmp" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ba8be72d372b2c9b35542551678538b562e7cf86c3315773cae48dfbfe7790c" +dependencies = [ + "num-traits", +] + +[[package]] +name = "rmp-serde" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72f81bee8c8ef9b577d1681a70ebbc962c232461e397b22c208c43c04b67a155" +dependencies = [ + "rmp", + "serde", +] + +[[package]] +name = "roaring" +version = "0.10.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19e8d2cfa184d94d0726d650a9f4a1be7f9b76ac9fdb954219878dc00c1c1e7b" +dependencies = [ + "bytemuck", + "byteorder", +] + +[[package]] +name = "rocksdb" +version = "0.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddb7af00d2b17dbd07d82c0063e25411959748ff03e8d4f96134c2ff41fce34f" +dependencies = [ + "libc", + "librocksdb-sys", +] + +[[package]] +name = "rolling-file" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8395b4f860856b740f20a296ea2cd4d823e81a2658cf05ef61be22916026a906" +dependencies = [ + "chrono", +] + +[[package]] +name = "rollup-boost" +version = "0.7.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d1d7c635dec67c86346eb871e8a22dd1596c33d4a96a9a4926b4d2fd703b63" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-serde", + "backoff", + "blake3", + "bytes", + "clap", + "dashmap", + "dotenvy", + "ed25519-dalek", + "eyre", + "futures", + "hex", + "http", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-util", + "jsonrpsee", + "lru 0.16.3", + "metrics", + "metrics-derive", + "metrics-exporter-prometheus 0.16.2", + "metrics-util 0.19.1", + "moka", + "op-alloy-rpc-types-engine", + "opentelemetry 0.28.0", + "opentelemetry-otlp 0.28.0", + "opentelemetry_sdk 0.28.0", + "parking_lot", + "paste", + "rollup-boost-types", + "rustls", + "serde", + "serde_json", + "sha2", + "thiserror 2.0.18", + "tokio", + "tokio-tungstenite 0.26.2", + "tokio-util", + "tower 0.5.3", + "tower-http", + "tracing", + "tracing-opentelemetry 0.29.0", + "tracing-subscriber 0.3.22", + "url", + "uuid", + "vergen", + "vergen-git2", +] + +[[package]] +name = "rollup-boost-types" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "756f32c7f241ab6d91d823e94d20f6e0729bfcaec3b545bd30f33b24e50f5821" +dependencies = [ + "alloy-primitives", + "alloy-rlp", + "alloy-rpc-types-engine", + "alloy-rpc-types-eth", + "alloy-serde", + "blake3", + "ed25519-dalek", + "futures", + "moka", + "op-alloy-rpc-types-engine", + "serde", + "serde_json", + "thiserror 2.0.18", + "tracing", +] + +[[package]] +name = "route-recognizer" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" + +[[package]] +name = "rstest" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5a3193c063baaa2a95a33f03035c8a72b83d97a54916055ba22d35ed3839d49" +dependencies = [ + "futures-timer", + "futures-util", + "rstest_macros", +] + +[[package]] +name = "rstest_macros" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c845311f0ff7951c5506121a9ad75aec44d083c31583b2ea5a30bcb0b0abba0" +dependencies = [ + "cfg-if", + "glob", + "proc-macro-crate", + "proc-macro2", + "quote", + "regex", + "relative-path", + "rustc_version 0.4.1", + "syn 2.0.114", + "unicode-ident", +] + +[[package]] +name = "rtnetlink" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a552eb82d19f38c3beed3f786bd23aa434ceb9ac43ab44419ca6d67a7e186c0" +dependencies = [ + "futures", + "log", + "netlink-packet-core", + "netlink-packet-route", + "netlink-packet-utils", + "netlink-proto", + "netlink-sys", + "nix", + "thiserror 1.0.69", + "tokio", +] + +[[package]] +name = "ruint" +version = "1.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" +dependencies = [ + "alloy-rlp", + "arbitrary", + "ark-ff 0.3.0", + "ark-ff 0.4.2", + "ark-ff 0.5.0", + "bytes", + "fastrlp 0.3.1", + "fastrlp 0.4.0", + "num-bigint", + "num-integer", + "num-traits", + "parity-scale-codec", + "primitive-types", + "proptest", + "rand 0.8.5", + "rand 0.9.2", + "rlp", + "ruint-macro", + "serde_core", + "valuable", + "zeroize", +] + +[[package]] +name = "ruint-macro" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48fd7bd8a6377e15ad9d42a8ec25371b94ddc67abe7c8b9127bec79bebaaae18" + +[[package]] +name = "rustc-demangle" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b50b8869d9fc858ce7266cce0194bd74df58b9d0e3f6df3a9fc8eb470d95c09d" + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +dependencies = [ + "rand 0.8.5", +] + +[[package]] +name = "rustc-hex" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e75f6a532d0fd9f7f13144f392b6ad56a32696bfcd9c78f797f16bbb6f072d6" + +[[package]] +name = "rustc_version" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a" +dependencies = [ + "semver 0.9.0", +] + +[[package]] +name = "rustc_version" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee" +dependencies = [ + "semver 0.11.0", +] + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver 1.0.27", +] + +[[package]] +name = "rusticata-macros" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" +dependencies = [ + "nom", +] + +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags 2.10.0", + "errno", + "libc", + "linux-raw-sys 0.4.15", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustix" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +dependencies = [ + "bitflags 2.10.0", + "errno", + "libc", + "linux-raw-sys 0.11.0", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls" +version = "0.23.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" +dependencies = [ + "aws-lc-rs", + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" +dependencies = [ + "openssl-probe 0.2.1", + "rustls-pki-types", + "schannel", + "security-framework 3.5.1", +] + +[[package]] +name = "rustls-pki-types" +version = "1.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-platform-verifier" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19787cda76408ec5404443dc8b31795c87cd8fec49762dc75fa727740d34acc1" +dependencies = [ + "core-foundation 0.10.1", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework 3.5.1", + "security-framework-sys", + "webpki-root-certs 0.26.11", + "windows-sys 0.59.0", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + +[[package]] +name = "rustls-webpki" +version = "0.103.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53" +dependencies = [ + "aws-lc-rs", + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "rusty-fork" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc6bf79ff24e648f6da1f8d1f011e9cac26491b619e6b9280f2b47f1774e6ee2" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + +[[package]] +name = "rw-stream-sink" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8c9026ff5d2f23da5e45bbc283f156383001bfb09c4e44256d02c1a685fe9a1" +dependencies = [ + "futures", + "pin-project", + "static_assertions", +] + +[[package]] +name = "ryu" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" + +[[package]] +name = "ryu-js" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd29631678d6fb0903b69223673e122c32e9ae559d0960a38d574695ebc0ea15" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2b42f36aa1cd011945615b92222f6bf73c599a102a300334cd7f8dbeec726cc" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schnellru" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "356285bbf17bea63d9e52e96bd18f039672ac92b55b8cb997d6162a2a37d1649" +dependencies = [ + "ahash", + "cfg-if", + "hashbrown 0.13.2", +] + +[[package]] +name = "scoped-tls" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294" + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array", + "pkcs8", + "serdect", + "subtle", + "zeroize", +] + +[[package]] +name = "secp256k1" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b50c5943d326858130af85e049f2661ba3c78b26589b8ab98e65e80ae44a1252" +dependencies = [ + "bitcoin_hashes", + "rand 0.8.5", + "secp256k1-sys 0.10.1", + "serde", +] + +[[package]] +name = "secp256k1" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c3c81b43dc2d8877c216a3fccf76677ee1ebccd429566d3e67447290d0c42b2" +dependencies = [ + "bitcoin_hashes", + "rand 0.9.2", + "secp256k1-sys 0.11.0", + "serde", +] + +[[package]] +name = "secp256k1-sys" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9" +dependencies = [ + "cc", +] + +[[package]] +name = "secp256k1-sys" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb913707158fadaf0d8702c2db0e857de66eb003ccfdda5924b5f5ac98efb38" +dependencies = [ + "cc", +] + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags 2.10.0", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +dependencies = [ + "bitflags 2.10.0", + "core-foundation 0.10.1", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d7eb9ef2c18661902cc47e535f9bc51b78acd254da71d375c2f6720d9a40403" +dependencies = [ + "semver-parser 0.7.0", +] + +[[package]] +name = "semver" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6" +dependencies = [ + "semver-parser 0.10.3", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" +dependencies = [ + "serde", + "serde_core", +] + +[[package]] +name = "semver-parser" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" + +[[package]] +name = "semver-parser" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9900206b54a3527fdc7b8a938bffd94a568bac4f4aa8113b209df75a09c0dec2" +dependencies = [ + "pest", +] + +[[package]] +name = "send_wrapper" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" + +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "serde_json" +version = "1.0.149" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" +dependencies = [ + "indexmap 2.13.0", + "itoa", + "memchr", + "serde", + "serde_core", + "zmij", +] + +[[package]] +name = "serde_regex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8136f1a4ea815d7eac4101cfd0b16dc0cb5e1fe1b8609dfd728058656b7badf" +dependencies = [ + "regex", + "serde", +] + +[[package]] +name = "serde_repr" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_spanned" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" +dependencies = [ + "serde_core", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_with" +version = "3.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7" +dependencies = [ + "base64 0.22.1", + "chrono", + "hex", + "indexmap 1.9.3", + "indexmap 2.13.0", + "schemars 0.9.0", + "schemars 1.2.1", + "serde_core", + "serde_json", + "serde_with_macros", + "time", +] + +[[package]] +name = "serde_with_macros" +version = "3.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c" +dependencies = [ + "darling 0.21.3", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "serdect" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a84f14a19e9a014bb9f4512488d9829a68e04ecabffb0f9904cd1ace94598177" +dependencies = [ + "base16ct", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha2" +version = "0.10.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest 0.10.7", +] + +[[package]] +name = "sha3" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" +dependencies = [ + "digest 0.10.7", + "keccak", +] + +[[package]] +name = "sha3-asm" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b31139435f327c93c6038ed350ae4588e2c70a13d50599509fee6349967ba35a" +dependencies = [ + "cc", + "cfg-if", +] + +[[package]] +name = "sharded-slab" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "shellexpand" +version = "3.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b1fdf65dd6331831494dd616b30351c38e96e45921a27745cf98490458b90bb" +dependencies = [ + "dirs", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook" +version = "0.3.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d881a16cf4426aa584979d30bd82cb33429027e42122b169753d6ef1085ed6e2" +dependencies = [ + "libc", + "signal-hook-registry", +] + +[[package]] +name = "signal-hook-mio" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b75a19a7a740b25bc7944bdee6172368f988763b744e3d4dfe753f6b4ece40cc" +dependencies = [ + "libc", + "mio", + "signal-hook", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" +dependencies = [ + "errno", + "libc", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +dependencies = [ + "digest 0.10.7", + "rand_core 0.6.4", +] + +[[package]] +name = "simd-adler32" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2" + +[[package]] +name = "simdutf8" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e" + +[[package]] +name = "similar" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" +dependencies = [ + "bstr", + "unicode-segmentation", +] + +[[package]] +name = "similar-asserts" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5b441962c817e33508847a22bd82f03a30cff43642dc2fae8b050566121eb9a" +dependencies = [ + "console", + "serde", + "similar", +] + +[[package]] +name = "simple_asn1" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" +dependencies = [ + "num-bigint", + "num-traits", + "thiserror 2.0.18", + "time", +] + +[[package]] +name = "siphasher" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" + +[[package]] +name = "sketches-ddsketch" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1e9a774a6c28142ac54bb25d25562e6bcf957493a184f15ad4eebccb23e410a" + +[[package]] +name = "slab" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" + +[[package]] +name = "small_btree" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ba60d2df92ba73864714808ca68c059734853e6ab722b40e1cf543ebb3a057a" +dependencies = [ + "arrayvec", +] + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" +dependencies = [ + "arbitrary", + "serde", +] + +[[package]] +name = "snap" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b6b67fb9a61334225b5b790716f609cd58395f895b3fe8b328786812a40bc3b" + +[[package]] +name = "snow" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "850948bee068e713b8ab860fe1adc4d109676ab4c3b621fd8147f06b261f2f85" +dependencies = [ + "aes-gcm", + "blake2", + "chacha20poly1305", + "curve25519-dalek", + "rand_core 0.6.4", + "ring", + "rustc_version 0.4.1", + "sha2", + "subtle", +] + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "soketto" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" +dependencies = [ + "base64 0.22.1", + "bytes", + "futures", + "http", + "httparse", + "log", + "rand 0.8.5", + "sha1", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "spin" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe4ccb98d9c292d56fec89a5e07da7fc4cf0dc11e156b41793132775d3e591" +dependencies = [ + "lock_api", +] + +[[package]] +name = "spki" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +dependencies = [ + "base64ct", + "der", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" + +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + +[[package]] +name = "stringmetrics" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b3c8667cd96245cbb600b8dec5680a7319edd719c5aa2b5d23c6bff94f39765" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.26.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +dependencies = [ + "strum_macros 0.26.4", +] + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +dependencies = [ + "strum_macros 0.27.2", +] + +[[package]] +name = "strum_macros" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.114", +] + +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.114" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn-solidity" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2379beea9476b89d0237078be761cf8e012d92d5ae4ae0c9a329f974838870fc" +dependencies = [ + "paste", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "sysinfo" +version = "0.33.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fc858248ea01b66f19d8e8a6d55f41deaf91e9d495246fd01368d99935c6c01" +dependencies = [ + "core-foundation-sys", + "libc", + "memchr", + "ntapi", + "windows 0.57.0", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.10.0", + "core-foundation 0.9.4", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b" +dependencies = [ + "bitflags 2.10.0", + "core-foundation 0.9.4", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tabled" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e39a2ee1fbcd360805a771e1b300f78cc88fec7b8d3e2f71cd37bbf23e725c7d" +dependencies = [ + "papergrid", + "tabled_derive", + "testing_table", +] + +[[package]] +name = "tabled_derive" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ea5d1b13ca6cff1f9231ffd62f15eefd72543dab5e468735f1a456728a02846" +dependencies = [ + "heck", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "tabwriter" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fce91f2f0ec87dff7e6bcbbeb267439aa1188703003c6055193c821487400432" +dependencies = [ + "unicode-width 0.2.0", +] + +[[package]] +name = "tag_ptr" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0e973b34477b7823833469eb0f5a3a60370fef7a453e02d751b59180d0a5a05" + +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tar" +version = "0.4.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d863878d212c87a19c1a610eb53bb01fe12951c0501cf5a0d65f724914a667a" +dependencies = [ + "filetime", + "libc", + "xattr", +] + +[[package]] +name = "tar-no-std" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "715f9a4586706a61c571cb5ee1c3ac2bbb2cf63e15bce772307b95befef5f5ee" +dependencies = [ + "bitflags 2.10.0", + "log", + "num-traits", +] + +[[package]] +name = "tempfile" +version = "3.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +dependencies = [ + "fastrand", + "getrandom 0.3.4", + "once_cell", + "rustix 1.1.3", + "windows-sys 0.61.2", +] + +[[package]] +name = "termtree" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" + +[[package]] +name = "test-case" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb2550dd13afcd286853192af8601920d959b14c401fcece38071d53bf0768a8" +dependencies = [ + "test-case-macros", +] + +[[package]] +name = "test-case-core" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adcb7fd841cd518e279be3d5a3eb0636409487998a4aff22f3de87b81e88384f" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "test-case-macros" +version = "3.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", + "test-case-core", +] + +[[package]] +name = "testing_table" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f8daae29995a24f65619e19d8d31dea5b389f3d853d8bf297bbf607cd0014cc" +dependencies = [ + "unicode-width 0.2.0", +] + +[[package]] +name = "thin-vec" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "144f754d318415ac792f9d69fc87abbbfc043ce2ef041c60f16ad828f638717d" + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" +dependencies = [ + "thiserror-impl 2.0.18", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "thread_local" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f60246a4944f24f6e018aa17cdeffb7818b76356965d03b07d6a9886e8962185" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "threadpool" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" +dependencies = [ + "num_cpus", +] + +[[package]] +name = "tikv-jemalloc-ctl" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "661f1f6a57b3a36dc9174a2c10f19513b4866816e13425d3e418b11cc37bc24c" +dependencies = [ + "libc", + "paste", + "tikv-jemalloc-sys", +] + +[[package]] +name = "tikv-jemalloc-sys" +version = "0.6.1+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd8aa5b2ab86a2cefa406d889139c162cbb230092f7d1d7cbc1716405d852a3b" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "tikv-jemallocator" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0359b4327f954e0567e69fb191cf1436617748813819c94b8cd4a431422d053a" +dependencies = [ + "libc", + "tikv-jemalloc-sys", +] + +[[package]] +name = "time" +version = "0.3.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" +dependencies = [ + "deranged", + "itoa", + "js-sys", + "libc", + "num-conv", + "num_threads", + "powerfmt", + "serde_core", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" + +[[package]] +name = "time-macros" +version = "0.2.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinystr" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" +dependencies = [ + "displaydoc", + "serde_core", + "zerovec", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "tinyvec" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.49.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +dependencies = [ + "bytes", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "socket2 0.6.2", + "tokio-macros", + "windows-sys 0.61.2", +] + +[[package]] +name = "tokio-macros" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32da49809aab5c3bc678af03902d4ccddea2a87d028d86392a4b1560c6906c70" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", + "tokio-util", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" +dependencies = [ + "futures-util", + "log", + "native-tls", + "rustls", + "rustls-native-certs", + "rustls-pki-types", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tungstenite 0.26.2", + "webpki-roots 0.26.11", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" +dependencies = [ + "futures-util", + "log", + "rustls", + "rustls-native-certs", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tungstenite 0.28.0", +] + +[[package]] +name = "tokio-util" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" +dependencies = [ + "bytes", + "futures-core", + "futures-io", + "futures-sink", + "pin-project-lite", + "slab", + "tokio", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_edit 0.22.27", +] + +[[package]] +name = "toml" +version = "0.9.11+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3afc9a848309fe1aaffaed6e1546a7a14de1f935dc9d89d32afd9a44bab7c46" +dependencies = [ + "indexmap 2.13.0", + "serde_core", + "serde_spanned 1.0.4", + "toml_datetime 0.7.5+spec-1.1.0", + "toml_parser", + "toml_writer", + "winnow", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.7.5+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" +dependencies = [ + "serde_core", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap 2.13.0", + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.11", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_edit" +version = "0.23.10+spec-1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" +dependencies = [ + "indexmap 2.13.0", + "toml_datetime 0.7.5+spec-1.1.0", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" +dependencies = [ + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "toml_writer" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" + +[[package]] +name = "tonic" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +dependencies = [ + "async-stream", + "async-trait", + "axum", + "base64 0.22.1", + "bytes", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "prost 0.13.5", + "socket2 0.5.10", + "tokio", + "tokio-stream", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tonic" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a286e33f82f8a1ee2df63f4fa35c0becf4a85a0cb03091a15fd7bf0b402dc94a" +dependencies = [ + "async-trait", + "base64 0.22.1", + "bytes", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "sync_wrapper", + "tokio", + "tokio-stream", + "tower 0.5.3", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tonic-prost" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6c55a2d6a14174563de34409c9f92ff981d006f56da9c6ecd40d9d4a31500b0" +dependencies = [ + "bytes", + "prost 0.14.3", + "tonic 0.14.3", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "indexmap 1.9.3", + "pin-project", + "pin-project-lite", + "rand 0.8.5", + "slab", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebe5ef63511595f1344e2d5cfa636d973292adc0eec1f0ad45fae9f0851ab1d4" +dependencies = [ + "futures-core", + "futures-util", + "hdrhistogram", + "indexmap 2.13.0", + "pin-project-lite", + "slab", + "sync_wrapper", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-http" +version = "0.6.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" +dependencies = [ + "async-compression", + "base64 0.22.1", + "bitflags 2.10.0", + "bytes", + "futures-core", + "futures-util", + "http", + "http-body", + "http-body-util", + "http-range-header", + "httpdate", + "iri-string", + "mime", + "mime_guess", + "percent-encoding", + "pin-project-lite", + "tokio", + "tokio-util", + "tower 0.5.3", + "tower-layer", + "tower-service", + "tracing", + "uuid", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-appender" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" +dependencies = [ + "crossbeam-channel", + "thiserror 2.0.18", + "time", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "tracing-core" +version = "0.1.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-journald" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d3a81ed245bfb62592b1e2bc153e77656d94ee6a0497683a65a12ccaf2438d0" +dependencies = [ + "libc", + "tracing-core", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-logfmt" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b1f47d22deb79c3f59fcf2a1f00f60cbdc05462bf17d1cd356c1fefa3f444bd" +dependencies = [ + "time", + "tracing", + "tracing-core", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "721f2d2569dce9f3dfbbddee5906941e953bfcdf736a62da3377f5751650cc36" +dependencies = [ + "js-sys", + "once_cell", + "opentelemetry 0.28.0", + "opentelemetry_sdk 0.28.0", + "smallvec", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber 0.3.22", + "web-time", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.32.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ac28f2d093c6c477eaa76b23525478f38de514fa9aeb1285738d4b97a9552fc" +dependencies = [ + "js-sys", + "opentelemetry 0.31.0", + "smallvec", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber 0.3.22", + "web-time", +] + +[[package]] +name = "tracing-samply" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c175f7ecc002b6ef04776a39f440503e4e788790ddbdbfac8259b7a069526334" +dependencies = [ + "cfg-if", + "itoa", + "libc", + "mach2", + "memmap2", + "smallvec", + "tracing-core", + "tracing-subscriber 0.3.22", +] + +[[package]] +name = "tracing-serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.2.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e0d2eaa99c3c2e41547cfa109e910a68ea03823cccad4a0525dcbc9b01e8c71" +dependencies = [ + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f30143827ddab0d256fd843b7a66d164e9f271cfa0dde49142c5ca0ca291f1e" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex-automata", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "tracing-tracy" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eaa1852afa96e0fe9e44caa53dc0bd2d9d05e0f2611ce09f97f8677af56e4ba" +dependencies = [ + "tracing-core", + "tracing-subscriber 0.3.22", + "tracy-client", +] + +[[package]] +name = "tracy-client" +version = "0.18.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4f6fc3baeac5d86ab90c772e9e30620fc653bf1864295029921a15ef478e6a5" +dependencies = [ + "loom", + "once_cell", + "rustc-demangle", + "tracy-client-sys", +] + +[[package]] +name = "tracy-client-sys" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5f7c95348f20c1c913d72157b3c6dee6ea3e30b3d19502c5a7f6d3f160dacbf" +dependencies = [ + "cc", + "windows-targets 0.52.6", +] + +[[package]] +name = "tree_hash" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee44f4cef85f88b4dea21c0b1f58320bdf35715cf56d840969487cff00613321" +dependencies = [ + "alloy-primitives", + "ethereum_hashing", + "ethereum_ssz", + "smallvec", + "typenum", +] + +[[package]] +name = "tree_hash_derive" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bee2ea1551f90040ab0e34b6fb7f2fa3bad8acc925837ac654f2c78a13e3089" +dependencies = [ + "darling 0.20.11", + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "triehash" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1631b201eb031b563d2e85ca18ec8092508e262a3196ce9bd10a67ec87b9f5c" +dependencies = [ + "hash-db", + "rlp", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "tungstenite" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +dependencies = [ + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "native-tls", + "rand 0.9.2", + "rustls", + "rustls-pki-types", + "sha1", + "thiserror 2.0.18", + "utf-8", +] + +[[package]] +name = "tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" +dependencies = [ + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "rand 0.9.2", + "rustls", + "rustls-pki-types", + "sha1", + "thiserror 2.0.18", + "utf-8", +] + +[[package]] +name = "typeid" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" + +[[package]] +name = "typenum" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "ucd-trie" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" + +[[package]] +name = "uint" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76f64bba2c53b04fcab63c01a7d7427eadc821e3bc48c34dc9ba29c501164b52" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "uint" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "909988d098b2f738727b161a106cfc7cab00c539c2687a8836f8e565976fb53e" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + +[[package]] +name = "unarray" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" + +[[package]] +name = "unicase" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142" + +[[package]] +name = "unicode-ident" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" + +[[package]] +name = "unicode-segmentation" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" + +[[package]] +name = "unicode-truncate" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf" +dependencies = [ + "itertools 0.13.0", + "unicode-segmentation", + "unicode-width 0.1.14", +] + +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "unicode-width" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "universal-hash" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" +dependencies = [ + "crypto-common", + "subtle", +] + +[[package]] +name = "unsigned-varint" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" + +[[package]] +name = "unsigned-varint" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb066959b24b5196ae73cb057f45598450d2c5f71460e98c49b738086eff9c06" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "unty" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" + +[[package]] +name = "url" +version = "2.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", + "serde", + "serde_derive", +] + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf16_iter" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "uuid" +version = "1.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee48d38b119b0cd71fe4141b30f5ba9c7c5d9f4e7a3a8b4a674e4b6ef789976f" +dependencies = [ + "getrandom 0.3.4", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "valuable" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "vergen" +version = "9.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b849a1f6d8639e8de261e81ee0fc881e3e3620db1af9f2e0da015d4382ceaf75" +dependencies = [ + "anyhow", + "cargo_metadata", + "derive_builder", + "regex", + "rustversion", + "time", + "vergen-lib", +] + +[[package]] +name = "vergen-git2" +version = "9.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d51ab55ddf1188c8d679f349775362b0fa9e90bd7a4ac69838b2a087623f0d57" +dependencies = [ + "anyhow", + "derive_builder", + "git2", + "rustversion", + "time", + "vergen", + "vergen-lib", +] + +[[package]] +name = "vergen-lib" +version = "9.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b34a29ba7e9c59e62f229ae1932fb1b8fb8a6fdcc99215a641913f5f5a59a569" +dependencies = [ + "anyhow", + "derive_builder", + "rustversion", +] + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "virtue" +version = "0.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" + +[[package]] +name = "visibility" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasip2" +version = "1.0.2+wasi-0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9517f9239f02c069db75e65f174b3da828fe5f5b945c4dd26bd25d89c03ebcf5" +dependencies = [ + "wit-bindgen", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" +dependencies = [ + "cfg-if", + "futures-util", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" +dependencies = [ + "bumpalo", + "proc-macro2", + "quote", + "syn 2.0.114", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.108" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "wasm-streams" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" +dependencies = [ + "futures-util", + "js-sys", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wasmtimer" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c598d6b99ea013e35844697fc4670d08339d5cda15588f193c6beedd12f644b" +dependencies = [ + "futures", + "js-sys", + "parking_lot", + "pin-utils", + "slab", + "wasm-bindgen", +] + +[[package]] +name = "web-sys" +version = "0.3.85" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-root-certs" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75c7f0ef91146ebfb530314f5f1d24528d7f0767efbfd31dce919275413e393e" +dependencies = [ + "webpki-root-certs 1.0.5", +] + +[[package]] +name = "webpki-root-certs" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36a29fc0408b113f68cf32637857ab740edfafdf460c326cd2afaa2d84cc05dc" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "webpki-roots" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" +dependencies = [ + "webpki-roots 1.0.5", +] + +[[package]] +name = "webpki-roots" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "widestring" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" +dependencies = [ + "windows-sys 0.61.2", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efc5cf48f83140dcaab716eeaea345f9e93d0018fb81162753a3f76c3397b538" +dependencies = [ + "windows-core 0.53.0", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" +dependencies = [ + "windows-core 0.57.0", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "527fadee13e0c05939a6a05d5bd6eec6cd2e3dbd648b9f8e447c6518133d8580" +dependencies = [ + "windows-collections", + "windows-core 0.62.2", + "windows-future", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b2d95af1a8a14a3c7367e1ed4fc9c20e0a26e79551b1454d72583c97cc6610" +dependencies = [ + "windows-core 0.62.2", +] + +[[package]] +name = "windows-core" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dcc5b895a6377f1ab9fa55acedab1fd5ac0db66ad1e6c7f47e28a22e446a5dd" +dependencies = [ + "windows-result 0.1.2", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" +dependencies = [ + "windows-implement 0.57.0", + "windows-interface 0.57.0", + "windows-result 0.1.2", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement 0.60.2", + "windows-interface 0.59.3", + "windows-link", + "windows-result 0.4.1", + "windows-strings", +] + +[[package]] +name = "windows-future" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d6f90251fe18a279739e78025bd6ddc52a7e22f921070ccdc67dde84c605cb" +dependencies = [ + "windows-core 0.62.2", + "windows-link", + "windows-threading", +] + +[[package]] +name = "windows-implement" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "windows-interface" +version = "0.57.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-numerics" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e40844ac143cdb44aead537bbf727de9b044e107a0f1220392177d15b0f26" +dependencies = [ + "windows-core 0.62.2", + "windows-link", +] + +[[package]] +name = "windows-registry" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" +dependencies = [ + "windows-link", + "windows-result 0.4.1", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-result" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", +] + +[[package]] +name = "windows-threading" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3949bd5b99cafdf1c7ca86b43ca564028dfe27d66958f2470940f73d86d75b37" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" + +[[package]] +name = "winnow" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +dependencies = [ + "memchr", +] + +[[package]] +name = "winreg" +version = "0.50.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "wit-bindgen" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" + +[[package]] +name = "write16" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + +[[package]] +name = "writeable" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" + +[[package]] +name = "ws_stream_wasm" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c173014acad22e83f16403ee360115b38846fe754e735c5d9d3803fe70c6abc" +dependencies = [ + "async_io_stream", + "futures", + "js-sys", + "log", + "pharos", + "rustc_version 0.4.1", + "send_wrapper 0.6.0", + "thiserror 2.0.18", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "x25519-dalek" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7e468321c81fb07fa7f4c636c3972b9100f0346e5b6a9f2bd0603a52f7ed277" +dependencies = [ + "curve25519-dalek", + "rand_core 0.6.4", + "serde", + "zeroize", +] + +[[package]] +name = "x509-parser" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4569f339c0c402346d4a75a9e39cf8dad310e287eef1ff56d4c68e5067f53460" +dependencies = [ + "asn1-rs", + "data-encoding", + "der-parser", + "lazy_static", + "nom", + "oid-registry", + "rusticata-macros", + "thiserror 2.0.18", + "time", +] + +[[package]] +name = "xattr" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156" +dependencies = [ + "libc", + "rustix 1.1.3", +] + +[[package]] +name = "xml-rs" +version = "0.8.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae8337f8a065cfc972643663ea4279e04e7256de865aa66fe25cec5fb912d3f" + +[[package]] +name = "xmltree" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7d8a75eaf6557bb84a65ace8609883db44a29951042ada9b393151532e41fcb" +dependencies = [ + "xml-rs", +] + +[[package]] +name = "xsum" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0637d3a5566a82fa5214bae89087bc8c9fb94cd8e8a3c07feb691bb8d9c632db" + +[[package]] +name = "yamux" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed0164ae619f2dc144909a9f082187ebb5893693d8c0196e8085283ccd4b776" +dependencies = [ + "futures", + "log", + "nohash-hasher", + "parking_lot", + "pin-project", + "rand 0.8.5", + "static_assertions", +] + +[[package]] +name = "yamux" +version = "0.13.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "deab71f2e20691b4728b349c6cee8fc7223880fa67b6b4f92225ec32225447e5" +dependencies = [ + "futures", + "log", + "nohash-hasher", + "parking_lot", + "pin-project", + "rand 0.9.2", + "static_assertions", + "web-time", +] + +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + +[[package]] +name = "yasna" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd" +dependencies = [ + "time", +] + +[[package]] +name = "yoke" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" +dependencies = [ + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7456cf00f0685ad319c5b1693f291a650eaf345e941d082fc4e03df8a03996ac" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1328722bbf2115db7e19d69ebcc15e795719e2d66b60827c6a69a117365e37a0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" +dependencies = [ + "zeroize_derive", +] + +[[package]] +name = "zeroize_derive" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "zerotrie" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" +dependencies = [ + "serde", + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.114", +] + +[[package]] +name = "zmij" +version = "1.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ff05f8caa9038894637571ae6b9e29466c1f4f829d26c9b28f869a29cbe3445" + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.16+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/rust/Cargo.toml b/rust/Cargo.toml new file mode 100644 index 00000000000..798261865bd --- /dev/null +++ b/rust/Cargo.toml @@ -0,0 +1,666 @@ +[workspace.package] +edition = "2024" +license = "MIT OR Apache-2.0" +rust-version = "1.88" +authors = ["Op Stack Contributors"] +homepage = "https://github.com/ethereum-optimism/optimism" +repository = "https://github.com/ethereum-optimism/optimism" +exclude = ["**/target"] +keywords = ["ethereum", "optimism", "crypto"] +categories = ["cryptography", "cryptography::cryptocurrencies"] + +[workspace] +resolver = "2" +members = [ + # Kona + "kona/bin/*", + "kona/crates/proof/*", + "kona/crates/node/*", + "kona/crates/supervisor/*", + "kona/crates/protocol/*", + "kona/crates/providers/*", + "kona/crates/utilities/*", + "kona/examples/*", + + # Op-Alloy + "op-alloy/crates/*", + + # Op-Reth + "op-reth/bin/", + "op-reth/crates/chainspec/", + "op-reth/crates/cli/", + "op-reth/crates/consensus/", + "op-reth/crates/evm/", + "op-reth/crates/flashblocks/", + "op-reth/crates/hardforks/", + "op-reth/crates/node/", + "op-reth/crates/payload/", + "op-reth/crates/primitives/", + "op-reth/crates/reth/", + "op-reth/crates/rpc/", + "op-reth/crates/storage/", + "op-reth/crates/txpool/", + "op-reth/examples/*", + + # Alloy OP EVM + "alloy-op-evm/", + + # Alloy OP Hardforks + "alloy-op-hardforks/", +] +default-members = [ + "kona/bin/host", + "kona/bin/client", + "kona/bin/node", + "kona/bin/supervisor", + "op-reth/bin/", +] + +# ==================== WORKSPACE METADATA ==================== +[workspace.metadata.cargo-udeps.ignore] +normal = ["rustls-platform-verifier"] + +[workspace.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +# ==================== WORKSPACE LINTS ==================== +[workspace.lints.rust] +missing-debug-implementations = "warn" +missing-docs = "warn" +unreachable-pub = "warn" +unused-must-use = "deny" +rust-2018-idioms = { level = "deny", priority = -1 } +unnameable-types = "warn" +rust-2024-incompatible-pat = "warn" + +[workspace.lints.rustdoc] +all = "warn" + +[workspace.lints.clippy] +# warn-level lints +all = { level = "warn", priority = -1 } +borrow_as_ptr = "warn" +branches_sharing_code = "warn" +clear_with_drain = "warn" +cloned_instead_of_copied = "warn" +collection_is_never_read = "warn" +dbg_macro = "warn" +derive_partial_eq_without_eq = "warn" +doc_markdown = "warn" +empty_line_after_doc_comments = "warn" +empty_line_after_outer_attr = "warn" +enum_glob_use = "warn" +equatable_if_let = "warn" +explicit_into_iter_loop = "warn" +explicit_iter_loop = "warn" +flat_map_option = "warn" +from_iter_instead_of_collect = "warn" +if_not_else = "warn" +if_then_some_else_none = "warn" +implicit_clone = "warn" +imprecise_flops = "warn" +iter_on_empty_collections = "warn" +iter_on_single_items = "warn" +iter_with_drain = "warn" +iter_without_into_iter = "warn" +large_stack_frames = "warn" +manual_assert = "warn" +manual_clamp = "warn" +manual_is_variant_and = "warn" +manual_string_new = "warn" +match_same_arms = "warn" +missing-const-for-fn = "warn" +mutex_integer = "warn" +naive_bytecount = "warn" +needless_bitwise_bool = "warn" +needless_continue = "warn" +needless_for_each = "warn" +needless_pass_by_ref_mut = "warn" +nonstandard_macro_braces = "warn" +option_as_ref_cloned = "warn" +option-if-let-else = "warn" +or_fun_call = "warn" +path_buf_push_overwrite = "warn" +read_zero_byte_vec = "warn" +redundant-clone = "warn" +redundant_else = "warn" +single_char_pattern = "warn" +string_lit_as_bytes = "warn" +string_lit_chars_any = "warn" +suboptimal_flops = "warn" +suspicious_operation_groupings = "warn" +trailing_empty_array = "warn" +trait_duplication_in_bounds = "warn" +transmute_undefined_repr = "warn" +trivial_regex = "warn" +tuple_array_conversions = "warn" +type_repetition_in_bounds = "warn" +uninhabited_references = "warn" +unnecessary_self_imports = "warn" +unnecessary_struct_initialization = "warn" +unnested_or_patterns = "warn" +unused_peekable = "warn" +unused_rounding = "warn" +use-self = "warn" +useless_let_if_seq = "warn" +while_float = "warn" +zero_sized_map_values = "warn" + +# allow-level lints +as_ptr_cast_mut = "allow" +cognitive_complexity = "allow" +debug_assert_with_mut_call = "allow" +fallible_impl_from = "allow" +future_not_send = "allow" +needless_collect = "allow" +non_send_fields_in_send_ty = "allow" +redundant_pub_crate = "allow" +result_large_err = "allow" +significant_drop_in_scrutinee = "allow" +significant_drop_tightening = "allow" +too_long_first_doc_paragraph = "allow" + +# ==================== PROFILES ==================== +[profile.dev] +opt-level = 1 +overflow-checks = false +debug = "line-tables-only" +split-debuginfo = "unpacked" + +[profile.dev.package] +proptest.opt-level = 3 +rand_chacha.opt-level = 3 +rand_xorshift.opt-level = 3 +unarray.opt-level = 3 + +[profile.bench] +debug = true + +[profile.dev-client] +inherits = "dev" +panic = "abort" + +[profile.release] +opt-level = 3 +lto = "thin" +debug = "none" +strip = "symbols" +panic = "unwind" +codegen-units = 16 + +[profile.release-client-lto] +inherits = "release" +panic = "abort" +codegen-units = 1 +lto = "fat" +strip = "none" # cannon load-elf requires the symbol section + +[profile.release-perf] +inherits = "release" +lto = "fat" +codegen-units = 1 + +[profile.hivetests] +inherits = "test" +opt-level = 3 +lto = "thin" + +[profile.profiling] +inherits = "release" +debug = "full" +strip = "none" + +[profile.maxperf] +inherits = "release" +lto = "fat" +codegen-units = 1 + +[profile.reproducible] +inherits = "release" +panic = "abort" +codegen-units = 1 +incremental = false + +# ==================== WORKSPACE DEPENDENCIES ==================== +[workspace.dependencies] +# ==================== KONA INTERNAL CRATES ==================== +# Binaries +kona-host = { path = "kona/bin/host", version = "1.0.2", default-features = false } +kona-client = { path = "kona/bin/client", version = "1.0.2", default-features = false } + +# Protocol +kona-comp = { path = "kona/crates/batcher/comp", version = "0.4.5", default-features = false } +kona-derive = { path = "kona/crates/protocol/derive", version = "0.4.5", default-features = false } +kona-interop = { path = "kona/crates/protocol/interop", version = "0.4.5", default-features = false } +kona-genesis = { path = "kona/crates/protocol/genesis", version = "0.4.5", default-features = false } +kona-protocol = { path = "kona/crates/protocol/protocol", version = "0.4.5", default-features = false } +kona-registry = { path = "kona/crates/protocol/registry", version = "0.4.5", default-features = false } +kona-hardforks = { path = "kona/crates/protocol/hardforks", version = "0.4.5", default-features = false } + +# Node +kona-rpc = { path = "kona/crates/node/rpc", version = "0.3.2", default-features = false } +kona-peers = { path = "kona/crates/node/peers", version = "0.1.2", default-features = false } +kona-engine = { path = "kona/crates/node/engine", version = "0.1.2", default-features = false } +kona-sources = { path = "kona/crates/node/sources", version = "0.1.2", default-features = false } +kona-node-service = { path = "kona/crates/node/service", version = "0.1.3", default-features = false } +kona-disc = { path = "kona/crates/node/disc", version = "0.1.2", default-features = false } +kona-gossip = { path = "kona/crates/node/gossip", version = "0.1.2", default-features = false } + +# Supervisor +kona-supervisor-rpc = { path = "kona/crates/supervisor/rpc", version = "0.1.1", default-features = false } +kona-supervisor-core = { path = "kona/crates/supervisor/core", version = "0.1.0", default-features = false } +kona-supervisor-service = { path = "kona/crates/supervisor/service", version = "0.1.0", default-features = false } +kona-supervisor-types = { path = "kona/crates/supervisor/types", version = "0.1.1", default-features = false } +kona-supervisor-storage = { path = "kona/crates/supervisor/storage", version = "0.1.0", default-features = false } +kona-supervisor-metrics = { path = "kona/crates/supervisor/metrics", version = "0.1.0", default-features = false } + +# Providers +kona-providers-alloy = { path = "kona/crates/providers/providers-alloy", version = "0.3.3", default-features = false } +kona-providers-local = { path = "kona/crates/providers/providers-local", version = "0.1.0", default-features = false } + +# Proof +kona-driver = { path = "kona/crates/proof/driver", version = "0.4.0", default-features = false } +kona-mpt = { path = "kona/crates/proof/mpt", version = "0.3.0", default-features = false } +kona-proof = { path = "kona/crates/proof/proof", version = "0.3.0", default-features = false } +kona-executor = { path = "kona/crates/proof/executor", version = "0.4.0", default-features = false } +kona-std-fpvm = { path = "kona/crates/proof/std-fpvm", version = "0.2.0", default-features = false } +kona-preimage = { path = "kona/crates/proof/preimage", version = "0.3.0", default-features = false } +kona-std-fpvm-proc = { path = "kona/crates/proof/std-fpvm-proc", version = "0.2.0", default-features = false } +kona-proof-interop = { path = "kona/crates/proof/proof-interop", version = "0.2.0", default-features = false } + +# Utilities +kona-cli = { path = "kona/crates/utilities/cli", version = "0.3.2", default-features = false } +kona-serde = { path = "kona/crates/utilities/serde", version = "0.2.2", default-features = false } +kona-macros = { path = "kona/crates/utilities/macros", version = "0.1.2", default-features = false } + +# ==================== OP-RETH INTERNAL CRATES ==================== +op-reth = { path = "op-reth/bin/" } +reth-optimism-chainspec = { path = "op-reth/crates/chainspec/", default-features = false } +reth-optimism-cli = { path = "op-reth/crates/cli/", default-features = false } +reth-optimism-consensus = { path = "op-reth/crates/consensus/", default-features = false } +reth-optimism-evm = { path = "op-reth/crates/evm/", default-features = false } +reth-optimism-flashblocks = { path = "op-reth/crates/flashblocks/" } +reth-optimism-forks = { path = "op-reth/crates/hardforks/", default-features = false } +reth-optimism-node = { path = "op-reth/crates/node/" } +reth-optimism-payload-builder = { path = "op-reth/crates/payload/" } +reth-optimism-primitives = { path = "op-reth/crates/primitives/", default-features = false } +reth-op = { path = "op-reth/crates/reth/", default-features = false } +reth-optimism-rpc = { path = "op-reth/crates/rpc/" } +reth-optimism-storage = { path = "op-reth/crates/storage/" } +reth-optimism-txpool = { path = "op-reth/crates/txpool/" } + +# ==================== OP-ALLOY INTERNAL CRATES ==================== +op-alloy-consensus = { version = "0.23.1", path = "op-alloy/crates/consensus", default-features = false } +op-alloy-network = { version = "0.23.1", path = "op-alloy/crates/network", default-features = false } +op-alloy-provider = { version = "0.23.1", path = "op-alloy/crates/provider", default-features = false } +op-alloy-rpc-types = { version = "0.23.1", path = "op-alloy/crates/rpc-types", default-features = false } +op-alloy-rpc-types-engine = { version = "0.23.1", path = "op-alloy/crates/rpc-types-engine", default-features = false } +op-alloy-rpc-jsonrpsee = { version = "0.23.1", path = "op-alloy/crates/rpc-jsonrpsee", default-features = false } + +# ==================== ALLOY-OP-EVM / ALLOY-OP-HARDFORKS ==================== +alloy-op-evm = { version = "0.26.3", path = "alloy-op-evm/", default-features = false } +alloy-op-hardforks = { version = "0.4.7", path = "alloy-op-hardforks/", default-features = false } + +# ==================== RETH CRATES (from git rev b3d532ce / main) ==================== +reth = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-basic-payload-builder = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-chain-state = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-chainspec = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-cli = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-cli-commands = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-cli-runner = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-cli-util = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-codecs = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-consensus = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-consensus-common = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-db = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-db-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-db-common = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-downloaders = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-e2e-test-utils = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-engine-local = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-engine-primitives = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-errors = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-eth-wire = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-ethereum = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-ethereum-cli = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-ethereum-consensus = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-ethereum-forks = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-ethereum-primitives = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-evm = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-evm-ethereum = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-exex = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-execution-errors = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-execution-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-fs-util = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-metrics = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-network = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-network-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-network-peers = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-node-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-node-builder = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-node-core = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-node-ethereum = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-node-events = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-node-metrics = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-payload-builder = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-payload-builder-primitives = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-payload-primitives = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-payload-util = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-payload-validator = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-primitives-traits = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-provider = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-prune = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-prune-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-revm = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-rpc = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-rpc-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-rpc-builder = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-rpc-engine-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-rpc-eth-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-rpc-eth-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-rpc-server-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-stages = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-stages-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-static-file = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-static-file-types = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-storage-api = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-storage-errors = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-tasks = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-tracing = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-transaction-pool = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-trie = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-trie-common = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } +reth-trie-db = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce" } +reth-zstd-compressors = { git = "https://github.com/paradigmxyz/reth", rev = "b3d532ce", default-features = false } + +# ==================== REVM (latest: op-reth versions) ==================== +revm = { version = "34.0.0", default-features = false } +revm-bytecode = { version = "8.0.0", default-features = false } +revm-database = { version = "10.0.0", default-features = false } +revm-state = { version = "9.0.0", default-features = false } +revm-primitives = { version = "22.0.0", default-features = false } +revm-interpreter = { version = "32.0.0", default-features = false } +revm-database-interface = { version = "9.0.0", default-features = false } +op-revm = { version = "15.0.0", default-features = false } +revm-inspectors = "0.34.1" + +# ==================== ALLOY ==================== +alloy-chains = { version = "0.2.30", default-features = false } +alloy-dyn-abi = "1.5.4" +alloy-eip2124 = { version = "0.2.0", default-features = false } +alloy-eip7928 = { version = "0.3.2", default-features = false } +alloy-evm = { version = "0.27.0", default-features = false } +alloy-primitives = { version = "1.5.4", default-features = false, features = [ + "map-foldhash", +] } +alloy-rlp = { version = "0.3.12", default-features = false, features = [ + "core-net", +] } +alloy-sol-macro = "1.5.4" +alloy-sol-types = { version = "1.5.4", default-features = false } +alloy-trie = { version = "0.9.3", default-features = false } + +alloy-hardforks = { version = "0.4.7", default-features = false } + +alloy-consensus = { version = "1.5.2", default-features = false } +alloy-contract = { version = "1.5.2", default-features = false } +alloy-eips = { version = "1.5.2", default-features = false } +alloy-genesis = { version = "1.5.2", default-features = false } +alloy-json-rpc = { version = "1.5.2", default-features = false } +alloy-network = { version = "1.5.2", default-features = false } +alloy-network-primitives = { version = "1.5.2", default-features = false } +alloy-node-bindings = { version = "1.5.2", default-features = false } +alloy-provider = { version = "1.5.2", features = [ + "reqwest", + "debug-api", +], default-features = false } +alloy-pubsub = { version = "1.5.2", default-features = false } +alloy-rpc-client = { version = "1.5.2", default-features = false } +alloy-rpc-types = { version = "1.5.2", features = [ + "eth", +], default-features = false } +alloy-rpc-types-admin = { version = "1.5.2", default-features = false } +alloy-rpc-types-anvil = { version = "1.5.2", default-features = false } +alloy-rpc-types-beacon = { version = "1.5.2", default-features = false } +alloy-rpc-types-debug = { version = "1.5.2", default-features = false } +alloy-rpc-types-engine = { version = "1.5.2", default-features = false } +alloy-rpc-types-eth = { version = "1.5.2", default-features = false } +alloy-rpc-types-mev = { version = "1.5.2", default-features = false } +alloy-rpc-types-trace = { version = "1.5.2", default-features = false } +alloy-rpc-types-txpool = { version = "1.5.2", default-features = false } +alloy-serde = { version = "1.5.2", default-features = false } +alloy-signer = { version = "1.5.2", default-features = false } +alloy-signer-local = { version = "1.5.2", default-features = false } +alloy-transport = { version = "1.5.2" } +alloy-transport-http = { version = "1.5.2", features = [ + "reqwest-rustls-tls", +], default-features = false } +alloy-transport-ipc = { version = "1.5.2", default-features = false } +alloy-transport-ws = { version = "1.5.2", default-features = false } + +# ==================== OP-ALLOY (from crates.io) ==================== +op-alloy = { version = "0.23.1", path = "op-alloy/crates/op-alloy", default-features = false } +op-alloy-flz = { version = "0.13.1", default-features = false } + +# ==================== ASYNC ==================== +async-channel = "2.5" +async-compression = { version = "0.4", default-features = false } +async-stream = "0.3.6" +async-trait = "0.1.89" +futures = "0.3.31" +futures-core = "0.3" +futures-util = { version = "0.3.31", default-features = false } +hyper = "1.8" +hyper-util = "0.1.19" +pin-project = "1.1.10" +tracing-futures = "0.2" +tower = "0.5.3" +tower-http = "0.6" + +# ==================== TOKIO ==================== +tokio = { version = "1.49.0", default-features = false } +tokio-stream = "0.1.18" +tokio-tungstenite = "0.28.0" +tokio-util = { version = "0.7.18", features = ["codec"] } + +# ==================== RPC ==================== +jsonrpsee = { version = "0.26.0", features = [ + "jsonrpsee-core", + "client-core", + "server-core", + "macros", +] } +jsonrpsee-core = "0.26.0" +jsonrpsee-server = "0.26.0" +jsonrpsee-http-client = "0.26.0" +jsonrpsee-types = "0.26.0" + +# ==================== CRYPTO ==================== +c-kzg = { version = "2.1.5", default-features = false } +enr = { version = "0.13", default-features = false } +k256 = { version = "0.13", default-features = false, features = ["ecdsa"] } +sha2 = { version = "0.10.9", default-features = false } +secp256k1 = { version = "0.31.1", default-features = false } +ark-ff = { version = "0.5.0", default-features = false } +ark-bls12-381 = { version = "0.5.0", default-features = false } + +# ==================== SERIALIZATION ==================== +bincode = { version = "2.0.1", features = ["serde"] } +ethereum_ssz = "0.9.1" +ethereum_ssz_derive = "0.9.1" +rkyv = "0.8.14" +serde_repr = "0.1.20" +serde_with = { version = "3.16", default-features = false, features = [ + "macros", +] } +toml = { version = "0.9.11", default-features = false } +serde = { version = "1.0.228", default-features = false, features = [ + "derive", + "alloc", +] } +serde_json = { version = "1.0.149", default-features = false, features = [ + "alloc", +] } + +# ==================== METRICS ==================== +metrics = { version = "0.24.3", default-features = false } +metrics-derive = "0.1" +metrics-exporter-prometheus = { version = "0.18.1", default-features = false } +metrics-process = "2.4.2" +metrics-util = { default-features = false, version = "0.20.1" } +prometheus = { version = "0.14.0", default-features = false } + +# ==================== TRACING ==================== +tracing = { version = "0.1.44", default-features = false } +tracing-appender = "0.2.4" +tracing-loki = "0.2.6" +tracing-subscriber = { version = "0.3.22", default-features = false } + +# ==================== TESTING ==================== +arbitrary = { version = "1.4.2", features = ["derive"] } +arbtest = "0.3.2" +assert_matches = "1.5.0" +criterion = { package = "codspeed-criterion-compat", version = "2.10" } +httpmock = "0.8.2" +insta = "1.46" +mockall = "0.14.0" +pprof = "0.15.0" +proptest = "1.9.0" +proptest-derive = "0.7" +proptest-arbitrary-interop = "0.1.0" +rstest = "0.26.1" +similar-asserts = { version = "1.7.0", features = ["serde"] } +tempfile = "3.24.0" +test-case = "3" +test-fuzz = "7.2.5" + +# ==================== COMPRESSION ==================== +alloc-no-stdlib = "2.0.4" +brotli = { version = "8.0.2", default-features = false } +getrandom = "0.3.4" +lz4 = "1.28.1" +lz4_flex = { version = "0.12", default-features = false } +miniz_oxide = "0.9.0" +snap = "1.1.1" +tar-no-std = { version = "0.4.2", default-features = false } +zstd = "0.13" + +# ==================== NETWORKING ==================== +discv5 = "0.10.2" +ipnet = "2.11.0" +libp2p = "0.56.0" +libp2p-stream = "0.4.0-alpha" +libp2p-identity = "0.2.13" +openssl = "0.10.75" + +# ==================== MISC ==================== +ambassador = "0.5.0" +anyhow = { version = "1.0.100", default-features = false } +aquamarine = "0.6" +arrayvec = { version = "0.7.6", default-features = false } +auto_impl = "1.3.0" +backon = { version = "1.6.0", default-features = false, features = [ + "std-blocking-sleep", + "tokio-sleep", +] } +bitflags = "2.10" +boyer-moore-magiclen = "0.2.22" +buddy_system_allocator = "0.12.0" +bytes = { version = "1.11.0", default-features = false } +cfg-if = "1.0.4" +chrono = "0.4.43" +clap = "4.5.56" +color-eyre = "0.6.5" +crossterm = "0.29.0" +dashmap = "6.1" +derive_more = { version = "2.1.1", default-features = false, features = [ + "full", +] } +dirs = "6.0.0" +dirs-next = "2.0.0" +dyn-clone = "1.0.20" +either = { version = "1.15.0", default-features = false } +eyre = "0.6.12" +fdlimit = "0.3.0" +fixed-map = { version = "0.9", default-features = false } +glob = "0.3.3" +http = "1.4.0" +http-body = "1.0" +http-body-util = "0.1.3" +humantime = "2.3" +humantime-serde = "1.1" +itertools = { version = "0.14", default-features = false } +jsonwebtoken = "10" +lazy_static = { version = "1.5.0", default-features = false } +libc = "0.2" +linked_hash_set = "0.1" +lru = "0.16.3" +mini-moka = "0.10" +modular-bitfield = "0.11.2" +moka = "0.12" +multihash = "0.19.3" +notify = { version = "8.2", default-features = false, features = [ + "macos_fsevent", +] } +nybbles = { version = "0.4.7", default-features = false } +once_cell = { version = "1.21", default-features = false, features = [ + "critical-section", +] } +parking_lot = "0.12.5" +paste = "1.0" +proc-macro2 = "1.0" +quote = "1.0" +rand = { version = "0.9.2", default-features = false } +rand_08 = { package = "rand", version = "0.8" } +ratatui = "0.30.0" +rayon = "1.11.0" +reqwest = { version = "0.13.1", default-features = false } +ringbuffer = "0.16.0" +rollup-boost = "0.7.13" +rollup-boost-types = "0.1.0" +rustc-hash = { version = "2.1", default-features = false } +rustls = { version = "0.23", default-features = false } +rustls-pemfile = { version = "2.2", default-features = false } +schnellru = "0.2" +shellexpand = "3.1.1" +shlex = "1.3" +smallvec = "1" +spin = "0.10.0" +strum = { version = "0.27", default-features = false } +strum_macros = "0.27" +syn = "2.0" +tabled = { version = "0.20.0", default-features = false } +tar = "0.4.44" +thiserror = { version = "2.0.18", default-features = false } +unsigned-varint = "0.8.0" +url = { version = "2.5.8", default-features = false } +vergen = "9.1.0" +vergen-git2 = "9.1.0" +byteorder = "1" + +# allocators +jemalloc_pprof = { version = "0.8", default-features = false } +tikv-jemalloc-ctl = "0.6" +tikv-jemallocator = "0.6" +tracy-client = "0.18.4" +snmalloc-rs = { version = "0.3.8", features = ["build_cc"] } + +# K/V database +rocksdb = { version = "0.24.0", default-features = false } + +[patch.crates-io] +# Duplicated by: reth-payload-primitives, reth-engine-local (reth git), rollup-boost, +# rollup-boost-types (crates.io) +op-alloy-rpc-types-engine = { path = "op-alloy/crates/rpc-types-engine" } +# Duplicated by: reth-codecs, reth-db-api, reth-primitives-traits, reth-rpc-convert (reth git) +op-alloy-consensus = { path = "op-alloy/crates/consensus" } +# Duplicated by: reth-rpc-convert (reth git) +op-alloy-network = { path = "op-alloy/crates/network" } +# Duplicated by: reth-rpc-convert (reth git) +op-alloy-rpc-types = { path = "op-alloy/crates/rpc-types" } +# Duplicated by: alloy-evm (crates.io) +op-alloy = { path = "op-alloy/crates/op-alloy" } +# Duplicated by: alloy-evm (crates.io) +alloy-op-hardforks = { path = "alloy-op-hardforks/" } diff --git a/alloy-op-evm/.config/nextest.toml b/rust/alloy-op-evm/.config/nextest.toml similarity index 100% rename from alloy-op-evm/.config/nextest.toml rename to rust/alloy-op-evm/.config/nextest.toml diff --git a/alloy-op-evm/.config/zepter.yaml b/rust/alloy-op-evm/.config/zepter.yaml similarity index 100% rename from alloy-op-evm/.config/zepter.yaml rename to rust/alloy-op-evm/.config/zepter.yaml diff --git a/alloy-op-evm/.gitignore b/rust/alloy-op-evm/.gitignore similarity index 100% rename from alloy-op-evm/.gitignore rename to rust/alloy-op-evm/.gitignore diff --git a/rust/alloy-op-evm/Cargo.toml b/rust/alloy-op-evm/Cargo.toml new file mode 100644 index 00000000000..593a6ff2a56 --- /dev/null +++ b/rust/alloy-op-evm/Cargo.toml @@ -0,0 +1,49 @@ +[package] +name = "alloy-op-evm" +description = "OP EVM implementation" + +version = "0.26.3" +edition = "2021" +rust-version = "1.88" +authors = ["Alloy Contributors", "OpLabsPBC"] +license.workspace = true +homepage = "https://github.com/ethereum-optimism/optimism" +repository = "https://github.com/ethereum-optimism/optimism" + +[lints] +workspace = true + +[dependencies] +alloy-evm = { workspace = true, features = ["op"] } + +alloy-eips = { workspace = true } +alloy-consensus = { workspace = true } +alloy-primitives = { workspace = true } + +alloy-op-hardforks.workspace = true +op-alloy = { workspace = true, features = ["consensus"] } + +revm = { workspace = true } +op-revm = { workspace = true } + +thiserror = { workspace = true } + +auto_impl = { workspace = true } + +[dev-dependencies] +alloy-hardforks = { workspace = true } + +[features] +default = ["std"] +std = [ + "alloy-primitives/std", + "revm/std", + "alloy-evm/std", + "op-revm/std", + "alloy-consensus/std", + "alloy-eips/std", + "op-alloy/std", + "thiserror/std" +] +gmp = ["alloy-evm/gmp"] +asm-keccak = ["alloy-evm/asm-keccak", "alloy-primitives/asm-keccak", "revm/asm-keccak"] diff --git a/alloy-op-evm/LICENSE-APACHE b/rust/alloy-op-evm/LICENSE-APACHE similarity index 100% rename from alloy-op-evm/LICENSE-APACHE rename to rust/alloy-op-evm/LICENSE-APACHE diff --git a/alloy-op-evm/LICENSE-MIT b/rust/alloy-op-evm/LICENSE-MIT similarity index 100% rename from alloy-op-evm/LICENSE-MIT rename to rust/alloy-op-evm/LICENSE-MIT diff --git a/alloy-op-evm/README.md b/rust/alloy-op-evm/README.md similarity index 100% rename from alloy-op-evm/README.md rename to rust/alloy-op-evm/README.md diff --git a/alloy-op-evm/cliff.toml b/rust/alloy-op-evm/cliff.toml similarity index 100% rename from alloy-op-evm/cliff.toml rename to rust/alloy-op-evm/cliff.toml diff --git a/rust/alloy-op-evm/justfile b/rust/alloy-op-evm/justfile new file mode 100644 index 00000000000..686cc2ed3c7 --- /dev/null +++ b/rust/alloy-op-evm/justfile @@ -0,0 +1,7 @@ +# default recipe to display help information +default: + @just --list + +# Run cargo tests +test *args='': + cargo nextest run --workspace {{args}} diff --git a/alloy-op-evm/release.toml b/rust/alloy-op-evm/release.toml similarity index 100% rename from alloy-op-evm/release.toml rename to rust/alloy-op-evm/release.toml diff --git a/alloy-op-evm/scripts/changelog.sh b/rust/alloy-op-evm/scripts/changelog.sh similarity index 100% rename from alloy-op-evm/scripts/changelog.sh rename to rust/alloy-op-evm/scripts/changelog.sh diff --git a/alloy-op-evm/scripts/check_no_std.sh b/rust/alloy-op-evm/scripts/check_no_std.sh similarity index 100% rename from alloy-op-evm/scripts/check_no_std.sh rename to rust/alloy-op-evm/scripts/check_no_std.sh diff --git a/rust/alloy-op-evm/src/block/canyon.rs b/rust/alloy-op-evm/src/block/canyon.rs new file mode 100644 index 00000000000..6e7b5702c9f --- /dev/null +++ b/rust/alloy-op-evm/src/block/canyon.rs @@ -0,0 +1,52 @@ +use alloy_evm::Database; +use alloy_op_hardforks::OpHardforks; +use alloy_primitives::{Address, B256, Bytes, address, b256, hex}; +use revm::{DatabaseCommit, primitives::HashMap, state::Bytecode}; + +/// The address of the create2 deployer +const CREATE_2_DEPLOYER_ADDR: Address = address!("0x13b0D85CcB8bf860b6b79AF3029fCA081AE9beF2"); + +/// The codehash of the create2 deployer contract. +const CREATE_2_DEPLOYER_CODEHASH: B256 = + b256!("0xb0550b5b431e30d38000efb7107aaa0ade03d48a7198a140edda9d27134468b2"); + +/// The raw bytecode of the create2 deployer contract. +const CREATE_2_DEPLOYER_BYTECODE: [u8; 1584] = hex!( + "6080604052600436106100435760003560e01c8063076c37b21461004f578063481286e61461007157806356299481146100ba57806366cfa057146100da57600080fd5b3661004a57005b600080fd5b34801561005b57600080fd5b5061006f61006a366004610327565b6100fa565b005b34801561007d57600080fd5b5061009161008c366004610327565b61014a565b60405173ffffffffffffffffffffffffffffffffffffffff909116815260200160405180910390f35b3480156100c657600080fd5b506100916100d5366004610349565b61015d565b3480156100e657600080fd5b5061006f6100f53660046103ca565b610172565b61014582826040518060200161010f9061031a565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe082820381018352601f90910116604052610183565b505050565b600061015683836102e7565b9392505050565b600061016a8484846102f0565b949350505050565b61017d838383610183565b50505050565b6000834710156101f4576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601d60248201527f437265617465323a20696e73756666696369656e742062616c616e636500000060448201526064015b60405180910390fd5b815160000361025f576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820181905260248201527f437265617465323a2062797465636f6465206c656e677468206973207a65726f60448201526064016101eb565b8282516020840186f5905073ffffffffffffffffffffffffffffffffffffffff8116610156576040517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601960248201527f437265617465323a204661696c6564206f6e206465706c6f790000000000000060448201526064016101eb565b60006101568383305b6000604051836040820152846020820152828152600b8101905060ff815360559020949350505050565b61014e806104ad83390190565b6000806040838503121561033a57600080fd5b50508035926020909101359150565b60008060006060848603121561035e57600080fd5b8335925060208401359150604084013573ffffffffffffffffffffffffffffffffffffffff8116811461039057600080fd5b809150509250925092565b7f4e487b7100000000000000000000000000000000000000000000000000000000600052604160045260246000fd5b6000806000606084860312156103df57600080fd5b8335925060208401359150604084013567ffffffffffffffff8082111561040557600080fd5b818601915086601f83011261041957600080fd5b81358181111561042b5761042b61039b565b604051601f82017fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe0908116603f011681019083821181831017156104715761047161039b565b8160405282815289602084870101111561048a57600080fd5b826020860160208301376000602084830101528095505050505050925092509256fe608060405234801561001057600080fd5b5061012e806100206000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c8063249cb3fa14602d575b600080fd5b603c603836600460b1565b604e565b60405190815260200160405180910390f35b60008281526020818152604080832073ffffffffffffffffffffffffffffffffffffffff8516845290915281205460ff16608857600060aa565b7fa2ef4600d742022d532d4747cb3547474667d6f13804902513b2ec01c848f4b45b9392505050565b6000806040838503121560c357600080fd5b82359150602083013573ffffffffffffffffffffffffffffffffffffffff8116811460ed57600080fd5b80915050925092905056fea26469706673582212205ffd4e6cede7d06a5daf93d48d0541fc68189eeb16608c1999a82063b666eb1164736f6c63430008130033a2646970667358221220fdc4a0fe96e3b21c108ca155438d37c9143fb01278a3c1d274948bad89c564ba64736f6c63430008130033" +); + +/// The Canyon hardfork issues an irregular state transition that force-deploys the create2 +/// deployer contract. This is done by directly setting the code of the create2 deployer account +/// prior to executing any transactions on the timestamp activation of the fork. +pub(crate) fn ensure_create2_deployer( + chain_spec: impl OpHardforks, + timestamp: u64, + db: &mut DB, +) -> Result<(), DB::Error> +where + DB: Database + DatabaseCommit, +{ + // If the canyon hardfork is active at the current timestamp, and it was not active at the + // previous block timestamp (heuristically, block time is not perfectly constant at 2s), and the + // chain is an optimism chain, then we need to force-deploy the create2 deployer contract. + if chain_spec.is_canyon_active_at_timestamp(timestamp) && + !chain_spec.is_canyon_active_at_timestamp(timestamp.saturating_sub(2)) + { + // Load the create2 deployer account from the cache. + let mut acc_info = db.basic(CREATE_2_DEPLOYER_ADDR)?.unwrap_or_default(); + + // Update the account info with the create2 deployer codehash and bytecode. + acc_info.code_hash = CREATE_2_DEPLOYER_CODEHASH; + acc_info.code = Some(Bytecode::new_raw(Bytes::from_static(&CREATE_2_DEPLOYER_BYTECODE))); + + // Convert the cache account back into a revm account and mark it as touched. + let mut revm_acc: revm::state::Account = acc_info.into(); + revm_acc.mark_touch(); + + // Commit the create2 deployer account to the database. + db.commit(HashMap::from_iter([(CREATE_2_DEPLOYER_ADDR, revm_acc)])); + return Ok(()); + } + + Ok(()) +} diff --git a/rust/alloy-op-evm/src/block/mod.rs b/rust/alloy-op-evm/src/block/mod.rs new file mode 100644 index 00000000000..1c1f4852767 --- /dev/null +++ b/rust/alloy-op-evm/src/block/mod.rs @@ -0,0 +1,759 @@ +//! Block executor for Optimism. + +use crate::OpEvmFactory; +use alloc::{borrow::Cow, boxed::Box, vec::Vec}; +use alloy_consensus::{Eip658Value, Header, Transaction, TransactionEnvelope, TxReceipt}; +use alloy_eips::{Encodable2718, Typed2718}; +use alloy_evm::{ + Database, Evm, EvmFactory, FromRecoveredTx, FromTxWithEncoded, RecoveredTx, + block::{ + BlockExecutionError, BlockExecutionResult, BlockExecutor, BlockExecutorFactory, + BlockExecutorFor, BlockValidationError, ExecutableTx, OnStateHook, + StateChangePostBlockSource, StateChangeSource, StateDB, SystemCaller, TxResult, + state_changes::{balance_increment_state, post_block_balance_increments}, + }, + eth::{EthTxResult, receipt_builder::ReceiptBuilderCtx}, +}; +use alloy_op_hardforks::{OpChainHardforks, OpHardforks}; +use alloy_primitives::{Address, B256, Bytes}; +use canyon::ensure_create2_deployer; +use op_alloy::consensus::OpDepositReceipt; +use op_revm::{ + L1BlockInfo, OpTransaction, constants::L1_BLOCK_CONTRACT, estimate_tx_compressed_size, + transaction::deposit::DEPOSIT_TRANSACTION_TYPE, +}; +pub use receipt_builder::OpAlloyReceiptBuilder; +use receipt_builder::OpReceiptBuilder; +use revm::{ + Database as _, DatabaseCommit, Inspector, + context::{Block, result::ResultAndState}, + database::{DatabaseCommitExt, State}, +}; + +mod canyon; +pub mod receipt_builder; + +/// Trait for OP transaction environments. Allows to recover the transaction encoded bytes if +/// they're available. +pub trait OpTxEnv { + /// Returns the encoded bytes of the transaction. + fn encoded_bytes(&self) -> Option<&Bytes>; +} + +impl OpTxEnv for OpTransaction { + fn encoded_bytes(&self) -> Option<&Bytes> { + self.enveloped_tx.as_ref() + } +} + +/// Context for OP block execution. +#[derive(Debug, Default, Clone)] +pub struct OpBlockExecutionCtx { + /// Parent block hash. + pub parent_hash: B256, + /// Parent beacon block root. + pub parent_beacon_block_root: Option, + /// The block's extra data. + pub extra_data: Bytes, +} + +/// The result of executing an OP transaction. +#[derive(Debug)] +pub struct OpTxResult { + /// The inner result of the transaction execution. + pub inner: EthTxResult, + /// Whether the transaction is a deposit transaction. + pub is_deposit: bool, + /// The sender of the transaction. + pub sender: Address, +} + +impl TxResult for OpTxResult { + type HaltReason = H; + + fn result(&self) -> &ResultAndState { + &self.inner.result + } +} + +/// Block executor for Optimism. +#[derive(Debug)] +pub struct OpBlockExecutor { + /// Spec. + pub spec: Spec, + /// Receipt builder. + pub receipt_builder: R, + /// Context for block execution. + pub ctx: OpBlockExecutionCtx, + /// The EVM used by executor. + pub evm: Evm, + /// Receipts of executed transactions. + pub receipts: Vec, + /// Total gas used by executed transactions. + pub gas_used: u64, + /// Da footprint. + /// + /// This is only set for blocks post-Jovian activation. + /// See [DA footprint block limit spec](https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/exec-engine.md#da-footprint-block-limit) + pub da_footprint_used: u64, + /// Whether Regolith hardfork is active. + pub is_regolith: bool, + /// Utility to call system smart contracts. + pub system_caller: SystemCaller, +} + +impl OpBlockExecutor +where + E: Evm, + R: OpReceiptBuilder, + Spec: OpHardforks + Clone, +{ + /// Creates a new [`OpBlockExecutor`]. + pub fn new(evm: E, ctx: OpBlockExecutionCtx, spec: Spec, receipt_builder: R) -> Self { + Self { + is_regolith: spec + .is_regolith_active_at_timestamp(evm.block().timestamp().saturating_to()), + evm, + system_caller: SystemCaller::new(spec.clone()), + spec, + receipt_builder, + receipts: Vec::new(), + gas_used: 0, + da_footprint_used: 0, + ctx, + } + } +} + +/// Custom errors that can occur during OP block execution. +#[derive(Debug, thiserror::Error)] +pub enum OpBlockExecutionError { + /// Failed to load cache account. + #[error("failed to load cache account")] + LoadCacheAccount, + + /// Failed to get Jovian da footprint gas scalar from database. + #[error("failed to get da footprint gas scalar from database: {_0}")] + GetJovianDaFootprintScalar(Box), + + /// Transaction DA footprint exceeds available block DA footprint. + #[error( + "transaction DA footprint exceeds available block DA footprint. transaction_da_footprint: {transaction_da_footprint}, available_block_da_footprint: {available_block_da_footprint}" + )] + TransactionDaFootprintAboveGasLimit { + /// The DA footprint of the transaction to execute. + transaction_da_footprint: u64, + /// The available block DA footprint. + available_block_da_footprint: u64, + }, +} + +impl OpBlockExecutor +where + E: Evm< + DB: Database + DatabaseCommit + StateDB, + Tx: FromRecoveredTx + FromTxWithEncoded + OpTxEnv, + >, + R: OpReceiptBuilder, + Spec: OpHardforks, +{ + fn jovian_da_footprint_estimation( + &mut self, + tx_env: &E::Tx, + tx: impl RecoveredTx, + ) -> Result { + // Try to use the enveloped tx if it exists, otherwise use the encoded 2718 bytes + let encoded = tx_env + .encoded_bytes() + .map_or_else( + || estimate_tx_compressed_size(tx.tx().encoded_2718().as_ref()), + |encoded| estimate_tx_compressed_size(encoded), + ) + .saturating_div(1_000_000); + + // Load the L1 block contract into the cache. If the L1 block contract is not pre-loaded the + // database will panic when trying to fetch the DA footprint gas scalar. + self.evm.db_mut().basic(L1_BLOCK_CONTRACT).map_err(BlockExecutionError::other)?; + + let da_footprint_gas_scalar = L1BlockInfo::fetch_da_footprint_gas_scalar(self.evm.db_mut()) + .map_err(BlockExecutionError::other)? + .into(); + + Ok(encoded.saturating_mul(da_footprint_gas_scalar)) + } +} + +impl BlockExecutor for OpBlockExecutor +where + E: Evm< + DB: Database + DatabaseCommit + StateDB, + Tx: FromRecoveredTx + FromTxWithEncoded + OpTxEnv, + >, + R: OpReceiptBuilder, + Spec: OpHardforks, +{ + type Transaction = R::Transaction; + type Receipt = R::Receipt; + type Evm = E; + type Result = OpTxResult::TxType>; + + fn apply_pre_execution_changes(&mut self) -> Result<(), BlockExecutionError> { + // Set state clear flag if the block is after the Spurious Dragon hardfork. + let state_clear_flag = + self.spec.is_spurious_dragon_active_at_block(self.evm.block().number().saturating_to()); + self.evm.db_mut().set_state_clear_flag(state_clear_flag); + + self.system_caller.apply_blockhashes_contract_call(self.ctx.parent_hash, &mut self.evm)?; + self.system_caller + .apply_beacon_root_contract_call(self.ctx.parent_beacon_block_root, &mut self.evm)?; + + // Ensure that the create2deployer is force-deployed at the canyon transition. Optimism + // blocks will always have at least a single transaction in them (the L1 info transaction), + // so we can safely assume that this will always be triggered upon the transition and that + // the above check for empty blocks will never be hit on OP chains. + ensure_create2_deployer( + &self.spec, + self.evm.block().timestamp().saturating_to(), + self.evm.db_mut(), + ) + .map_err(BlockExecutionError::other)?; + + Ok(()) + } + + fn execute_transaction_without_commit( + &mut self, + tx: impl ExecutableTx, + ) -> Result { + let (tx_env, tx) = tx.into_parts(); + let is_deposit = tx.tx().ty() == DEPOSIT_TRANSACTION_TYPE; + + // The sum of the transaction's gas limit, Tg, and the gas utilized in this block prior, + // must be no greater than the block's gasLimit. + let block_available_gas = self.evm.block().gas_limit() - self.gas_used; + if tx.tx().gas_limit() > block_available_gas && (self.is_regolith || !is_deposit) { + return Err(BlockValidationError::TransactionGasLimitMoreThanAvailableBlockGas { + transaction_gas_limit: tx.tx().gas_limit(), + block_available_gas, + } + .into()); + } + + let da_footprint_used = if self + .spec + .is_jovian_active_at_timestamp(self.evm.block().timestamp().saturating_to()) && + !is_deposit + { + let da_footprint_available = self.evm.block().gas_limit() - self.da_footprint_used; + + let tx_da_footprint = self.jovian_da_footprint_estimation(&tx_env, &tx)?; + + if tx_da_footprint > da_footprint_available { + return Err(BlockExecutionError::Validation(BlockValidationError::Other( + Box::new(OpBlockExecutionError::TransactionDaFootprintAboveGasLimit { + transaction_da_footprint: tx_da_footprint, + available_block_da_footprint: da_footprint_available, + }), + ))); + } + + tx_da_footprint + } else { + 0 + }; + + // Execute transaction and return the result + let result = self.evm.transact(tx_env).map_err(|err| { + let hash = tx.tx().trie_hash(); + BlockExecutionError::evm(err, hash) + })?; + + Ok(OpTxResult { + inner: EthTxResult { + result, + blob_gas_used: da_footprint_used, + tx_type: tx.tx().tx_type(), + }, + is_deposit, + sender: *tx.signer(), + }) + } + + fn commit_transaction(&mut self, output: Self::Result) -> Result { + let OpTxResult { + inner: EthTxResult { result: ResultAndState { result, state }, blob_gas_used, tx_type }, + is_deposit, + sender, + } = output; + + // Fetch the depositor account from the database for the deposit nonce. + // Note that this *only* needs to be done post-regolith hardfork, as deposit nonces + // were not introduced in Bedrock. In addition, regular transactions don't have deposit + // nonces, so we don't need to touch the DB for those. + let depositor = (self.is_regolith && is_deposit) + .then(|| self.evm.db_mut().basic(sender).map(|acc| acc.unwrap_or_default())) + .transpose() + .map_err(BlockExecutionError::other)?; + + self.system_caller.on_state(StateChangeSource::Transaction(self.receipts.len()), &state); + + let gas_used = result.gas_used(); + + // append gas used + self.gas_used += gas_used; + + // Update DA footprint if Jovian is active + if self.spec.is_jovian_active_at_timestamp(self.evm.block().timestamp().saturating_to()) && + !is_deposit + { + // Add to DA footprint used + self.da_footprint_used = self.da_footprint_used.saturating_add(blob_gas_used); + } + + self.receipts.push( + match self.receipt_builder.build_receipt(ReceiptBuilderCtx { + tx_type, + result, + cumulative_gas_used: self.gas_used, + evm: &self.evm, + state: &state, + }) { + Ok(receipt) => receipt, + Err(ctx) => { + let receipt = alloy_consensus::Receipt { + // Success flag was added in `EIP-658: Embedding transaction status code + // in receipts`. + status: Eip658Value::Eip658(ctx.result.is_success()), + cumulative_gas_used: self.gas_used, + logs: ctx.result.into_logs(), + }; + + self.receipt_builder.build_deposit_receipt(OpDepositReceipt { + inner: receipt, + deposit_nonce: depositor.map(|account| account.nonce), + // The deposit receipt version was introduced in Canyon to indicate an + // update to how receipt hashes should be computed + // when set. The state transition process ensures + // this is only set for post-Canyon deposit + // transactions. + deposit_receipt_version: (is_deposit && + self.spec.is_canyon_active_at_timestamp( + self.evm.block().timestamp().saturating_to(), + )) + .then_some(1), + }) + } + }, + ); + + self.evm.db_mut().commit(state); + + Ok(gas_used) + } + + fn finish( + mut self, + ) -> Result<(Self::Evm, BlockExecutionResult), BlockExecutionError> { + let balance_increments = + post_block_balance_increments::

(&self.spec, self.evm.block(), &[], None); + // increment balances + self.evm + .db_mut() + .increment_balances(balance_increments.clone()) + .map_err(|_| BlockValidationError::IncrementBalanceFailed)?; + // call state hook with changes due to balance increments. + self.system_caller.try_on_state_with(|| { + balance_increment_state(&balance_increments, self.evm.db_mut()).map(|state| { + ( + StateChangeSource::PostBlock(StateChangePostBlockSource::BalanceIncrements), + Cow::Owned(state), + ) + }) + })?; + + let legacy_gas_used = + self.receipts.last().map(|r| r.cumulative_gas_used()).unwrap_or_default(); + + Ok(( + self.evm, + BlockExecutionResult { + receipts: self.receipts, + requests: Default::default(), + gas_used: legacy_gas_used, + blob_gas_used: self.da_footprint_used, + }, + )) + } + + fn set_state_hook(&mut self, hook: Option>) { + self.system_caller.with_state_hook(hook); + } + + fn evm_mut(&mut self) -> &mut Self::Evm { + &mut self.evm + } + + fn evm(&self) -> &Self::Evm { + &self.evm + } + + fn receipts(&self) -> &[Self::Receipt] { + &self.receipts + } +} + +/// Ethereum block executor factory. +#[derive(Debug, Clone, Default, Copy)] +pub struct OpBlockExecutorFactory< + R = OpAlloyReceiptBuilder, + Spec = OpChainHardforks, + EvmFactory = OpEvmFactory, +> { + /// Receipt builder. + receipt_builder: R, + /// Chain specification. + spec: Spec, + /// EVM factory. + evm_factory: EvmFactory, +} + +impl OpBlockExecutorFactory { + /// Creates a new [`OpBlockExecutorFactory`] with the given spec, [`EvmFactory`], and + /// [`OpReceiptBuilder`]. + pub const fn new(receipt_builder: R, spec: Spec, evm_factory: EvmFactory) -> Self { + Self { receipt_builder, spec, evm_factory } + } + + /// Exposes the receipt builder. + pub const fn receipt_builder(&self) -> &R { + &self.receipt_builder + } + + /// Exposes the chain specification. + pub const fn spec(&self) -> &Spec { + &self.spec + } + + /// Exposes the EVM factory. + pub const fn evm_factory(&self) -> &EvmFactory { + &self.evm_factory + } +} + +impl BlockExecutorFactory for OpBlockExecutorFactory +where + R: OpReceiptBuilder, + Spec: OpHardforks, + EvmF: EvmFactory< + Tx: FromRecoveredTx + FromTxWithEncoded + OpTxEnv, + >, + Self: 'static, +{ + type EvmFactory = EvmF; + type ExecutionCtx<'a> = OpBlockExecutionCtx; + type Transaction = R::Transaction; + type Receipt = R::Receipt; + + fn evm_factory(&self) -> &Self::EvmFactory { + &self.evm_factory + } + + fn create_executor<'a, DB, I>( + &'a self, + evm: EvmF::Evm<&'a mut State, I>, + ctx: Self::ExecutionCtx<'a>, + ) -> impl BlockExecutorFor<'a, Self, DB, I> + where + DB: Database + 'a, + I: Inspector>> + 'a, + { + OpBlockExecutor::new(evm, ctx, &self.spec, &self.receipt_builder) + } +} + +#[cfg(test)] +mod tests { + use alloc::{string::ToString, vec}; + use alloy_consensus::{SignableTransaction, TxLegacy, transaction::Recovered}; + use alloy_eips::eip2718::WithEncoded; + use alloy_evm::{EvmEnv, ToTxEnv}; + use alloy_hardforks::ForkCondition; + use alloy_op_hardforks::OpHardfork; + use alloy_primitives::{Address, Signature, U256, uint}; + use op_alloy::consensus::OpTxEnvelope; + use op_revm::{ + DefaultOp, L1BlockInfo, OpBuilder, OpSpecId, + constants::{ + BASE_FEE_SCALAR_OFFSET, ECOTONE_L1_BLOB_BASE_FEE_SLOT, ECOTONE_L1_FEE_SCALARS_SLOT, + L1_BASE_FEE_SLOT, L1_BLOCK_CONTRACT, OPERATOR_FEE_SCALARS_SLOT, + }, + }; + use revm::{ + Context, + context::BlockEnv, + database::{CacheDB, EmptyDB, InMemoryDB}, + inspector::NoOpInspector, + primitives::HashMap, + state::AccountInfo, + }; + + use crate::OpEvm; + + use super::*; + + #[test] + fn test_with_encoded() { + let executor_factory = OpBlockExecutorFactory::new( + OpAlloyReceiptBuilder::default(), + OpChainHardforks::op_mainnet(), + OpEvmFactory::default(), + ); + let mut db = State::builder().with_database(CacheDB::::default()).build(); + let evm = executor_factory.evm_factory.create_evm(&mut db, EvmEnv::default()); + let mut executor = executor_factory.create_executor(evm, OpBlockExecutionCtx::default()); + let tx = Recovered::new_unchecked( + OpTxEnvelope::Legacy(TxLegacy::default().into_signed(Signature::new( + Default::default(), + Default::default(), + Default::default(), + ))), + Address::ZERO, + ); + let tx_with_encoded = WithEncoded::new(tx.encoded_2718().into(), tx.clone()); + + // make sure we can use both `WithEncoded` and transaction itself as inputs. + let _ = executor.execute_transaction(&tx); + let _ = executor.execute_transaction(&tx_with_encoded); + } + + fn prepare_jovian_db(da_footprint_gas_scalar: u16) -> State { + const L1_BASE_FEE: U256 = uint!(1_U256); + const L1_BLOB_BASE_FEE: U256 = uint!(2_U256); + const L1_BASE_FEE_SCALAR: u64 = 3; + const L1_BLOB_BASE_FEE_SCALAR: u64 = 4; + const L1_FEE_SCALARS: U256 = U256::from_limbs([ + 0, + (L1_BASE_FEE_SCALAR << (64 - BASE_FEE_SCALAR_OFFSET * 2)) | L1_BLOB_BASE_FEE_SCALAR, + 0, + 0, + ]); + const OPERATOR_FEE_SCALAR: u8 = 5; + const OPERATOR_FEE_CONST: u8 = 6; + let da_footprint_gas_scalar_bytes = da_footprint_gas_scalar.to_be_bytes(); + let mut operator_fee_and_da_footprint = [0u8; 32]; + operator_fee_and_da_footprint[31] = OPERATOR_FEE_CONST; + operator_fee_and_da_footprint[23] = OPERATOR_FEE_SCALAR; + operator_fee_and_da_footprint[19] = da_footprint_gas_scalar_bytes[1]; + operator_fee_and_da_footprint[18] = da_footprint_gas_scalar_bytes[0]; + let operator_fee_and_da_footprint_u256 = U256::from_be_bytes(operator_fee_and_da_footprint); + + let mut db = State::builder().with_database(InMemoryDB::default()).build(); + + db.insert_account_with_storage( + L1_BLOCK_CONTRACT, + Default::default(), + HashMap::from_iter([ + (L1_BASE_FEE_SLOT, L1_BASE_FEE), + (ECOTONE_L1_FEE_SCALARS_SLOT, L1_FEE_SCALARS), + (ECOTONE_L1_BLOB_BASE_FEE_SLOT, L1_BLOB_BASE_FEE), + (OPERATOR_FEE_SCALARS_SLOT, operator_fee_and_da_footprint_u256), + ]), + ); + + db.insert_account( + Address::ZERO, + AccountInfo { balance: U256::from(400_000_000), ..Default::default() }, + ); + + db + } + + fn build_executor<'a>( + db: &'a mut State, + receipt_builder: &'a OpAlloyReceiptBuilder, + op_chain_hardforks: &'a OpChainHardforks, + gas_limit: u64, + jovian_timestamp: u64, + ) -> OpBlockExecutor< + OpEvm<&'a mut State, NoOpInspector>, + &'a OpAlloyReceiptBuilder, + &'a OpChainHardforks, + > { + let ctx = Context::op() + .with_db(db) + .with_chain(L1BlockInfo { + operator_fee_scalar: Some(U256::from(2)), + operator_fee_constant: Some(U256::from(50)), + ..Default::default() + }) + .with_block(BlockEnv { + timestamp: U256::from(jovian_timestamp), + gas_limit, + ..Default::default() + }) + .modify_cfg_chained(|cfg| cfg.spec = OpSpecId::JOVIAN); + + let evm = OpEvm::new(ctx.build_op_with_inspector(NoOpInspector {}), true); + + OpBlockExecutor::new( + evm, + OpBlockExecutionCtx::default(), + op_chain_hardforks, + receipt_builder, + ) + } + + #[test] + fn test_jovian_da_footprint_estimation() { + const DA_FOOTPRINT_GAS_SCALAR: u16 = 7; + const GAS_LIMIT: u64 = 100_000; + const JOVIAN_TIMESTAMP: u64 = 1746806402; + + let mut db = prepare_jovian_db(DA_FOOTPRINT_GAS_SCALAR); + let op_chain_hardforks = OpChainHardforks::new( + OpHardfork::op_mainnet() + .into_iter() + .chain(vec![(OpHardfork::Jovian, ForkCondition::Timestamp(JOVIAN_TIMESTAMP))]), + ); + + let receipt_builder = OpAlloyReceiptBuilder::default(); + let mut executor = build_executor( + &mut db, + &receipt_builder, + &op_chain_hardforks, + GAS_LIMIT, + JOVIAN_TIMESTAMP, + ); + + let tx_inner = TxLegacy { gas_limit: GAS_LIMIT, ..Default::default() }; + + let tx = Recovered::new_unchecked( + OpTxEnvelope::Legacy(tx_inner.into_signed(Signature::new( + Default::default(), + Default::default(), + Default::default(), + ))), + Address::ZERO, + ); + let tx_env = tx.to_tx_env(); + + assert!(executor.da_footprint_used == 0); + + let expected_da_footprint = executor.jovian_da_footprint_estimation(&tx_env, &tx).unwrap(); + + // make sure we can use both `WithEncoded` and transaction itself as inputs. + let res = executor.execute_transaction(&tx); + assert!(res.is_ok()); + + assert!(executor.da_footprint_used == expected_da_footprint); + } + + #[test] + fn test_jovian_da_footprint_estimation_out_of_gas() { + const DA_FOOTPRINT_GAS_SCALAR: u16 = 7; + const JOVIAN_TIMESTAMP: u64 = 1746806402; + const GAS_LIMIT: u64 = 100; + + let mut db = prepare_jovian_db(DA_FOOTPRINT_GAS_SCALAR); + let op_chain_hardforks = OpChainHardforks::new( + OpHardfork::op_mainnet() + .into_iter() + .chain(vec![(OpHardfork::Jovian, ForkCondition::Timestamp(JOVIAN_TIMESTAMP))]), + ); + + let receipt_builder = OpAlloyReceiptBuilder::default(); + let mut executor = build_executor( + &mut db, + &receipt_builder, + &op_chain_hardforks, + GAS_LIMIT, + JOVIAN_TIMESTAMP, + ); + + let tx_inner = TxLegacy { gas_limit: GAS_LIMIT, ..Default::default() }; + + let tx = Recovered::new_unchecked( + OpTxEnvelope::Legacy(tx_inner.into_signed(Signature::new( + Default::default(), + Default::default(), + Default::default(), + ))), + Address::ZERO, + ); + let tx_env = tx.to_tx_env(); + + assert!(executor.da_footprint_used == 0); + + let expected_da_footprint = executor.jovian_da_footprint_estimation(&tx_env, &tx).unwrap(); + + // make sure we can use both `WithEncoded` and transaction itself as inputs. + let res = executor.execute_transaction(&tx); + assert!(res.is_err()); + let err = res.unwrap_err(); + match err { + BlockExecutionError::Validation(BlockValidationError::Other(err)) => { + assert_eq!( + err.to_string(), + OpBlockExecutionError::TransactionDaFootprintAboveGasLimit { + transaction_da_footprint: expected_da_footprint, + available_block_da_footprint: GAS_LIMIT, + } + .to_string(), + ); + } + _ => panic!("expected TransactionDaFootprintAboveGasLimit error"), + } + } + + #[test] + fn test_jovian_da_footprint_estimation_maxed_out_da_footprint() { + const DA_FOOTPRINT_GAS_SCALAR: u16 = 2000; + const JOVIAN_TIMESTAMP: u64 = 1746806402; + const GAS_LIMIT: u64 = 200_000; + + let mut db = prepare_jovian_db(DA_FOOTPRINT_GAS_SCALAR); + let op_chain_hardforks = OpChainHardforks::new( + OpHardfork::op_mainnet() + .into_iter() + .chain(vec![(OpHardfork::Jovian, ForkCondition::Timestamp(JOVIAN_TIMESTAMP))]), + ); + + let receipt_builder = OpAlloyReceiptBuilder::default(); + let mut executor = build_executor( + &mut db, + &receipt_builder, + &op_chain_hardforks, + GAS_LIMIT, + JOVIAN_TIMESTAMP, + ); + + let tx_inner = TxLegacy { gas_limit: GAS_LIMIT, ..Default::default() }; + + let tx = Recovered::new_unchecked( + OpTxEnvelope::Legacy(tx_inner.into_signed(Signature::new( + Default::default(), + Default::default(), + Default::default(), + ))), + Address::ZERO, + ); + let tx_env = tx.to_tx_env(); + + assert!(executor.da_footprint_used == 0); + + let expected_da_footprint = executor.jovian_da_footprint_estimation(&tx_env, &tx).unwrap(); + + // make sure we can use both `WithEncoded` and transaction itself as inputs. + let gas_used_tx = executor.execute_transaction(&tx).expect("failed to execute transaction"); + + // The gas used when executing the transaction should be the legacy value... + assert!(gas_used_tx < expected_da_footprint); + + // The gas used when finishing the executor should be the DA footprint since this is higher + // than the legacy gas used and jovian is active... + let (_, result) = executor.finish().expect("failed to finish executor"); + assert_eq!(result.blob_gas_used, expected_da_footprint); + assert_eq!(result.gas_used, gas_used_tx); + assert!(result.blob_gas_used > result.gas_used); + } +} diff --git a/alloy-op-evm/src/block/receipt_builder.rs b/rust/alloy-op-evm/src/block/receipt_builder.rs similarity index 97% rename from alloy-op-evm/src/block/receipt_builder.rs rename to rust/alloy-op-evm/src/block/receipt_builder.rs index add624c7a87..7fd04314c7f 100644 --- a/alloy-op-evm/src/block/receipt_builder.rs +++ b/rust/alloy-op-evm/src/block/receipt_builder.rs @@ -2,7 +2,7 @@ //! [`super::OpBlockExecutor`]. use alloy_consensus::{Eip658Value, TransactionEnvelope}; -use alloy_evm::{eth::receipt_builder::ReceiptBuilderCtx, Evm}; +use alloy_evm::{Evm, eth::receipt_builder::ReceiptBuilderCtx}; use core::fmt::Debug; use op_alloy::consensus::{OpDepositReceipt, OpReceiptEnvelope, OpTxEnvelope, OpTxType}; diff --git a/rust/alloy-op-evm/src/lib.rs b/rust/alloy-op-evm/src/lib.rs new file mode 100644 index 00000000000..b9880781d4c --- /dev/null +++ b/rust/alloy-op-evm/src/lib.rs @@ -0,0 +1,356 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/alloy-rs/core/main/assets/alloy.jpg", + html_favicon_url = "https://raw.githubusercontent.com/alloy-rs/core/main/assets/favicon.ico" +)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; + +pub use alloy_evm::op::{spec, spec_by_timestamp_after_bedrock}; + +use alloy_evm::{Database, Evm, EvmEnv, EvmFactory, precompiles::PrecompilesMap}; +use alloy_primitives::{Address, Bytes}; +use core::{ + fmt::Debug, + ops::{Deref, DerefMut}, +}; +use op_revm::{ + DefaultOp, OpBuilder, OpContext, OpHaltReason, OpSpecId, OpTransaction, OpTransactionError, + precompiles::OpPrecompiles, +}; +use revm::{ + Context, ExecuteEvm, InspectEvm, Inspector, SystemCallEvm, + context::{BlockEnv, TxEnv}, + context_interface::result::{EVMError, ResultAndState}, + handler::{PrecompileProvider, instructions::EthInstructions}, + inspector::NoOpInspector, + interpreter::{InterpreterResult, interpreter::EthInterpreter}, +}; + +pub mod block; +pub use block::{OpBlockExecutionCtx, OpBlockExecutor, OpBlockExecutorFactory}; + +/// OP EVM implementation. +/// +/// This is a wrapper type around the `revm` evm with optional [`Inspector`] (tracing) +/// support. [`Inspector`] support is configurable at runtime because it's part of the underlying +/// [`OpEvm`](op_revm::OpEvm) type. +#[allow(missing_debug_implementations)] // missing revm::OpContext Debug impl +pub struct OpEvm { + inner: op_revm::OpEvm, I, EthInstructions>, P>, + inspect: bool, +} + +impl OpEvm { + /// Provides a reference to the EVM context. + pub const fn ctx(&self) -> &OpContext { + &self.inner.0.ctx + } + + /// Provides a mutable reference to the EVM context. + pub const fn ctx_mut(&mut self) -> &mut OpContext { + &mut self.inner.0.ctx + } +} + +impl OpEvm { + /// Creates a new OP EVM instance. + /// + /// The `inspect` argument determines whether the configured [`Inspector`] of the given + /// [`OpEvm`](op_revm::OpEvm) should be invoked on [`Evm::transact`]. + pub const fn new( + evm: op_revm::OpEvm, I, EthInstructions>, P>, + inspect: bool, + ) -> Self { + Self { inner: evm, inspect } + } +} + +impl Deref for OpEvm { + type Target = OpContext; + + #[inline] + fn deref(&self) -> &Self::Target { + self.ctx() + } +} + +impl DerefMut for OpEvm { + #[inline] + fn deref_mut(&mut self) -> &mut Self::Target { + self.ctx_mut() + } +} + +impl Evm for OpEvm +where + DB: Database, + I: Inspector>, + P: PrecompileProvider, Output = InterpreterResult>, +{ + type DB = DB; + type Tx = OpTransaction; + type Error = EVMError; + type HaltReason = OpHaltReason; + type Spec = OpSpecId; + type BlockEnv = BlockEnv; + type Precompiles = P; + type Inspector = I; + + fn block(&self) -> &BlockEnv { + &self.block + } + + fn chain_id(&self) -> u64 { + self.cfg.chain_id + } + + fn transact_raw( + &mut self, + tx: Self::Tx, + ) -> Result, Self::Error> { + if self.inspect { self.inner.inspect_tx(tx) } else { self.inner.transact(tx) } + } + + fn transact_system_call( + &mut self, + caller: Address, + contract: Address, + data: Bytes, + ) -> Result, Self::Error> { + self.inner.system_call_with_caller(caller, contract, data) + } + + fn finish(self) -> (Self::DB, EvmEnv) { + let Context { block: block_env, cfg: cfg_env, journaled_state, .. } = self.inner.0.ctx; + + (journaled_state.database, EvmEnv { block_env, cfg_env }) + } + + fn set_inspector_enabled(&mut self, enabled: bool) { + self.inspect = enabled; + } + + fn components(&self) -> (&Self::DB, &Self::Inspector, &Self::Precompiles) { + ( + &self.inner.0.ctx.journaled_state.database, + &self.inner.0.inspector, + &self.inner.0.precompiles, + ) + } + + fn components_mut(&mut self) -> (&mut Self::DB, &mut Self::Inspector, &mut Self::Precompiles) { + ( + &mut self.inner.0.ctx.journaled_state.database, + &mut self.inner.0.inspector, + &mut self.inner.0.precompiles, + ) + } +} + +/// Factory producing [`OpEvm`]s. +#[derive(Debug, Default, Clone, Copy)] +#[non_exhaustive] +pub struct OpEvmFactory; + +impl EvmFactory for OpEvmFactory { + type Evm>> = OpEvm; + type Context = OpContext; + type Tx = OpTransaction; + type Error = + EVMError; + type HaltReason = OpHaltReason; + type Spec = OpSpecId; + type BlockEnv = BlockEnv; + type Precompiles = PrecompilesMap; + + fn create_evm( + &self, + db: DB, + input: EvmEnv, + ) -> Self::Evm { + let spec_id = input.cfg_env.spec; + OpEvm { + inner: Context::op() + .with_db(db) + .with_block(input.block_env) + .with_cfg(input.cfg_env) + .build_op_with_inspector(NoOpInspector {}) + .with_precompiles(PrecompilesMap::from_static( + OpPrecompiles::new_with_spec(spec_id).precompiles(), + )), + inspect: false, + } + } + + fn create_evm_with_inspector>>( + &self, + db: DB, + input: EvmEnv, + inspector: I, + ) -> Self::Evm { + let spec_id = input.cfg_env.spec; + OpEvm { + inner: Context::op() + .with_db(db) + .with_block(input.block_env) + .with_cfg(input.cfg_env) + .build_op_with_inspector(inspector) + .with_precompiles(PrecompilesMap::from_static( + OpPrecompiles::new_with_spec(spec_id).precompiles(), + )), + inspect: true, + } + } +} + +#[cfg(test)] +mod tests { + use alloc::{string::ToString, vec}; + use alloy_evm::{ + EvmInternals, + precompiles::{Precompile, PrecompileInput}, + }; + use alloy_primitives::U256; + use op_revm::precompiles::{bls12_381, bn254_pair}; + use revm::{context::CfgEnv, database::EmptyDB, precompile::PrecompileError}; + + use super::*; + + #[test] + fn test_precompiles_jovian_fail() { + let mut evm = OpEvmFactory::default().create_evm( + EmptyDB::default(), + EvmEnv::new(CfgEnv::new_with_spec(OpSpecId::JOVIAN), BlockEnv::default()), + ); + + let (precompiles, ctx) = (&mut evm.inner.0.precompiles, &mut evm.inner.0.ctx); + + let jovian_precompile = precompiles.get(bn254_pair::JOVIAN.address()).unwrap(); + let result = jovian_precompile.call(PrecompileInput { + data: &vec![0; bn254_pair::JOVIAN_MAX_INPUT_SIZE + 1], + gas: u64::MAX, + caller: Address::ZERO, + value: U256::ZERO, + is_static: false, + target_address: Address::ZERO, + bytecode_address: Address::ZERO, + internals: EvmInternals::from_context(ctx), + }); + + assert!(result.is_err()); + assert!(matches!(result.unwrap_err(), PrecompileError::Bn254PairLength)); + + let jovian_precompile = precompiles.get(bls12_381::JOVIAN_G1_MSM.address()).unwrap(); + let result = jovian_precompile.call(PrecompileInput { + data: &vec![0; bls12_381::JOVIAN_G1_MSM_MAX_INPUT_SIZE + 1], + gas: u64::MAX, + caller: Address::ZERO, + value: U256::ZERO, + is_static: false, + target_address: Address::ZERO, + bytecode_address: Address::ZERO, + internals: EvmInternals::from_context(ctx), + }); + + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("G1MSM input length too long")); + + let jovian_precompile = precompiles.get(bls12_381::JOVIAN_G2_MSM.address()).unwrap(); + let result = jovian_precompile.call(PrecompileInput { + data: &vec![0; bls12_381::JOVIAN_G2_MSM_MAX_INPUT_SIZE + 1], + gas: u64::MAX, + caller: Address::ZERO, + value: U256::ZERO, + is_static: false, + target_address: Address::ZERO, + bytecode_address: Address::ZERO, + internals: EvmInternals::from_context(ctx), + }); + + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("G2MSM input length too long")); + + let jovian_precompile = precompiles.get(bls12_381::JOVIAN_PAIRING.address()).unwrap(); + let result = jovian_precompile.call(PrecompileInput { + data: &vec![0; bls12_381::JOVIAN_PAIRING_MAX_INPUT_SIZE + 1], + gas: u64::MAX, + caller: Address::ZERO, + value: U256::ZERO, + is_static: false, + target_address: Address::ZERO, + bytecode_address: Address::ZERO, + internals: EvmInternals::from_context(ctx), + }); + + assert!(result.is_err()); + assert!(result.unwrap_err().to_string().contains("Pairing input length too long")); + } + + #[test] + fn test_precompiles_jovian() { + let mut evm = OpEvmFactory::default().create_evm( + EmptyDB::default(), + EvmEnv::new(CfgEnv::new_with_spec(OpSpecId::JOVIAN), BlockEnv::default()), + ); + let (precompiles, ctx) = (&mut evm.inner.0.precompiles, &mut evm.inner.0.ctx); + let jovian_precompile = precompiles.get(bn254_pair::JOVIAN.address()).unwrap(); + let result = jovian_precompile.call(PrecompileInput { + data: &vec![0; bn254_pair::JOVIAN_MAX_INPUT_SIZE], + gas: u64::MAX, + caller: Address::ZERO, + value: U256::ZERO, + is_static: false, + target_address: Address::ZERO, + bytecode_address: Address::ZERO, + internals: EvmInternals::from_context(ctx), + }); + + assert!(result.is_ok()); + + let jovian_precompile = precompiles.get(bls12_381::JOVIAN_G1_MSM.address()).unwrap(); + let result = jovian_precompile.call(PrecompileInput { + data: &vec![0; bls12_381::JOVIAN_G1_MSM_MAX_INPUT_SIZE], + gas: u64::MAX, + caller: Address::ZERO, + value: U256::ZERO, + is_static: false, + target_address: Address::ZERO, + bytecode_address: Address::ZERO, + internals: EvmInternals::from_context(ctx), + }); + + assert!(result.is_ok()); + + let jovian_precompile = precompiles.get(bls12_381::JOVIAN_G2_MSM.address()).unwrap(); + let result = jovian_precompile.call(PrecompileInput { + data: &vec![0; bls12_381::JOVIAN_G2_MSM_MAX_INPUT_SIZE], + gas: u64::MAX, + caller: Address::ZERO, + value: U256::ZERO, + is_static: false, + target_address: Address::ZERO, + bytecode_address: Address::ZERO, + internals: EvmInternals::from_context(ctx), + }); + + assert!(result.is_ok()); + + let jovian_precompile = precompiles.get(bls12_381::JOVIAN_PAIRING.address()).unwrap(); + let result = jovian_precompile.call(PrecompileInput { + data: &vec![0; bls12_381::JOVIAN_PAIRING_MAX_INPUT_SIZE], + gas: u64::MAX, + caller: Address::ZERO, + value: U256::ZERO, + is_static: false, + target_address: Address::ZERO, + bytecode_address: Address::ZERO, + internals: EvmInternals::from_context(ctx), + }); + + assert!(result.is_ok()); + } +} diff --git a/alloy-op-hardforks/.config/zepter.yaml b/rust/alloy-op-hardforks/.config/zepter.yaml similarity index 100% rename from alloy-op-hardforks/.config/zepter.yaml rename to rust/alloy-op-hardforks/.config/zepter.yaml diff --git a/alloy-op-hardforks/.gitignore b/rust/alloy-op-hardforks/.gitignore similarity index 100% rename from alloy-op-hardforks/.gitignore rename to rust/alloy-op-hardforks/.gitignore diff --git a/rust/alloy-op-hardforks/Cargo.toml b/rust/alloy-op-hardforks/Cargo.toml new file mode 100644 index 00000000000..1181af60687 --- /dev/null +++ b/rust/alloy-op-hardforks/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "alloy-op-hardforks" +description = "Bindings for named OP hardforks" + +version = "0.4.7" +edition = "2024" +rust-version = "1.88" +authors = ["Alloy Contributors", "OpLabsPBC"] +license.workspace = true +homepage = "https://github.com/ethereum-optimism/optimism" +repository = "https://github.com/ethereum-optimism/optimism" + +[lints] +workspace = true + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[dependencies] +alloy-hardforks = { workspace = true } + +# ethereum +alloy-chains = { workspace = true } +alloy-primitives = { workspace = true } + +# misc +auto_impl = { workspace = true } +serde = { workspace = true, optional = true } + +[features] +serde = [ + "dep:serde", + "alloy-hardforks/serde", + "alloy-chains/serde", + "alloy-primitives/serde" +] diff --git a/alloy-op-hardforks/LICENSE-APACHE b/rust/alloy-op-hardforks/LICENSE-APACHE similarity index 100% rename from alloy-op-hardforks/LICENSE-APACHE rename to rust/alloy-op-hardforks/LICENSE-APACHE diff --git a/alloy-op-hardforks/LICENSE-MIT b/rust/alloy-op-hardforks/LICENSE-MIT similarity index 100% rename from alloy-op-hardforks/LICENSE-MIT rename to rust/alloy-op-hardforks/LICENSE-MIT diff --git a/alloy-op-hardforks/README.md b/rust/alloy-op-hardforks/README.md similarity index 100% rename from alloy-op-hardforks/README.md rename to rust/alloy-op-hardforks/README.md diff --git a/alloy-op-hardforks/dprint.json b/rust/alloy-op-hardforks/dprint.json similarity index 100% rename from alloy-op-hardforks/dprint.json rename to rust/alloy-op-hardforks/dprint.json diff --git a/rust/alloy-op-hardforks/justfile b/rust/alloy-op-hardforks/justfile new file mode 100644 index 00000000000..c984998c91c --- /dev/null +++ b/rust/alloy-op-hardforks/justfile @@ -0,0 +1,7 @@ +# default recipe to display help information +default: + @just --list + +# Run cargo tests +test *args='': + cargo test --workspace {{args}} diff --git a/rust/alloy-op-hardforks/src/base/mainnet.rs b/rust/alloy-op-hardforks/src/base/mainnet.rs new file mode 100644 index 00000000000..62556094e16 --- /dev/null +++ b/rust/alloy-op-hardforks/src/base/mainnet.rs @@ -0,0 +1,22 @@ +//! Base Mainnet hardfork starting points + +use crate::optimism::mainnet::*; + +/// Bedrock base hardfork activation block is 0. +pub const BASE_MAINNET_BEDROCK_BLOCK: u64 = 0; +/// Regolith base hardfork activation timestamp is 0. +pub const BASE_MAINNET_REGOLITH_TIMESTAMP: u64 = OP_MAINNET_REGOLITH_TIMESTAMP; +/// Canyon base hardfork activation timestamp is 1704992401. +pub const BASE_MAINNET_CANYON_TIMESTAMP: u64 = OP_MAINNET_CANYON_TIMESTAMP; +/// Ecotone base hardfork activation timestamp is 1710374401. +pub const BASE_MAINNET_ECOTONE_TIMESTAMP: u64 = OP_MAINNET_ECOTONE_TIMESTAMP; +/// Fjord base hardfork activation timestamp is 1720627201. +pub const BASE_MAINNET_FJORD_TIMESTAMP: u64 = OP_MAINNET_FJORD_TIMESTAMP; +/// Granite base hardfork activation timestamp is 1726070401. +pub const BASE_MAINNET_GRANITE_TIMESTAMP: u64 = OP_MAINNET_GRANITE_TIMESTAMP; +/// Holocene base hardfork activation timestamp is 1736445601. +pub const BASE_MAINNET_HOLOCENE_TIMESTAMP: u64 = OP_MAINNET_HOLOCENE_TIMESTAMP; +/// Isthmus base hardfork activation timestamp is 1746806401. +pub const BASE_MAINNET_ISTHMUS_TIMESTAMP: u64 = OP_MAINNET_ISTHMUS_TIMESTAMP; +/// Jovian base hardfork activation timestamp is `1_763_481_601`. +pub const BASE_MAINNET_JOVIAN_TIMESTAMP: u64 = OP_MAINNET_JOVIAN_TIMESTAMP; diff --git a/alloy-op-hardforks/src/base/mod.rs b/rust/alloy-op-hardforks/src/base/mod.rs similarity index 100% rename from alloy-op-hardforks/src/base/mod.rs rename to rust/alloy-op-hardforks/src/base/mod.rs diff --git a/rust/alloy-op-hardforks/src/base/sepolia.rs b/rust/alloy-op-hardforks/src/base/sepolia.rs new file mode 100644 index 00000000000..28ed1687c90 --- /dev/null +++ b/rust/alloy-op-hardforks/src/base/sepolia.rs @@ -0,0 +1,22 @@ +//! Base Sepolia hardfork starting points + +use crate::optimism::sepolia::*; + +/// Bedrock base sepolia hardfork activation block is 0. +pub const BASE_SEPOLIA_BEDROCK_BLOCK: u64 = OP_SEPOLIA_BEDROCK_BLOCK; +/// Regolith base sepolia hardfork activation timestamp is 0. +pub const BASE_SEPOLIA_REGOLITH_TIMESTAMP: u64 = OP_SEPOLIA_REGOLITH_TIMESTAMP; +/// Canyon base sepolia hardfork activation timestamp is 1699981200. +pub const BASE_SEPOLIA_CANYON_TIMESTAMP: u64 = OP_SEPOLIA_CANYON_TIMESTAMP; +/// Ecotone base sepolia hardfork activation timestamp is 1708534800. +pub const BASE_SEPOLIA_ECOTONE_TIMESTAMP: u64 = OP_SEPOLIA_ECOTONE_TIMESTAMP; +/// Fjord base sepolia hardfork activation timestamp is 1716998400. +pub const BASE_SEPOLIA_FJORD_TIMESTAMP: u64 = OP_SEPOLIA_FJORD_TIMESTAMP; +/// Granite base sepolia hardfork activation timestamp is 1723478400. +pub const BASE_SEPOLIA_GRANITE_TIMESTAMP: u64 = OP_SEPOLIA_GRANITE_TIMESTAMP; +/// Holocene base sepolia hardfork activation timestamp is 1732633200. +pub const BASE_SEPOLIA_HOLOCENE_TIMESTAMP: u64 = OP_SEPOLIA_HOLOCENE_TIMESTAMP; +/// Isthmus base sepolia hardfork activation timestamp is 1744905600. +pub const BASE_SEPOLIA_ISTHMUS_TIMESTAMP: u64 = OP_SEPOLIA_ISTHMUS_TIMESTAMP; +/// Jovian base sepolia hardfork activation timestamp is `1_762_963_201`. +pub const BASE_SEPOLIA_JOVIAN_TIMESTAMP: u64 = OP_SEPOLIA_JOVIAN_TIMESTAMP; diff --git a/rust/alloy-op-hardforks/src/lib.rs b/rust/alloy-op-hardforks/src/lib.rs new file mode 100644 index 00000000000..c4dffbcde53 --- /dev/null +++ b/rust/alloy-op-hardforks/src/lib.rs @@ -0,0 +1,674 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/alloy-rs/core/main/assets/alloy.jpg", + html_favicon_url = "https://raw.githubusercontent.com/alloy-rs/core/main/assets/favicon.ico" +)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![no_std] + +extern crate alloc; +use alloc::vec::Vec; +use alloy_chains::{Chain, NamedChain}; +use alloy_hardforks::{EthereumHardfork, hardfork}; +pub use alloy_hardforks::{EthereumHardforks, ForkCondition}; +use alloy_primitives::U256; +use core::ops::Index; + +pub mod optimism; +pub use optimism::{mainnet as op_mainnet, mainnet::*, sepolia as op_sepolia, sepolia::*}; + +pub mod base; +pub use base::{mainnet as base_mainnet, mainnet::*, sepolia as base_sepolia, sepolia::*}; + +hardfork!( + /// The name of an optimism hardfork. + /// + /// When building a list of hardforks for a chain, it's still expected to zip with + /// [`EthereumHardfork`]. + #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] + #[derive(Default)] + OpHardfork { + /// Bedrock: . + Bedrock, + /// Regolith: . + Regolith, + /// . + Canyon, + /// Ecotone: . + Ecotone, + /// Fjord: + Fjord, + /// Granite: + Granite, + /// Holocene: + Holocene, + /// Isthmus: + #[default] + Isthmus, + /// Jovian: + Jovian, + /// TODO: add interop hardfork overview when available + Interop, + } +); + +impl OpHardfork { + /// Reverse lookup to find the hardfork given a chain ID and block timestamp. + /// Returns the active hardfork at the given timestamp for the specified OP chain. + pub fn from_chain_and_timestamp(chain: Chain, timestamp: u64) -> Option { + let named = chain.named()?; + + match named { + NamedChain::Optimism => Some(match timestamp { + _i if timestamp < OP_MAINNET_CANYON_TIMESTAMP => Self::Regolith, + _i if timestamp < OP_MAINNET_ECOTONE_TIMESTAMP => Self::Canyon, + _i if timestamp < OP_MAINNET_FJORD_TIMESTAMP => Self::Ecotone, + _i if timestamp < OP_MAINNET_GRANITE_TIMESTAMP => Self::Fjord, + _i if timestamp < OP_MAINNET_HOLOCENE_TIMESTAMP => Self::Granite, + _i if timestamp < OP_MAINNET_ISTHMUS_TIMESTAMP => Self::Holocene, + _i if timestamp < OP_MAINNET_JOVIAN_TIMESTAMP => Self::Isthmus, + _ => Self::Jovian, + }), + NamedChain::OptimismSepolia => Some(match timestamp { + _i if timestamp < OP_SEPOLIA_CANYON_TIMESTAMP => Self::Regolith, + _i if timestamp < OP_SEPOLIA_ECOTONE_TIMESTAMP => Self::Canyon, + _i if timestamp < OP_SEPOLIA_FJORD_TIMESTAMP => Self::Ecotone, + _i if timestamp < OP_SEPOLIA_GRANITE_TIMESTAMP => Self::Fjord, + _i if timestamp < OP_SEPOLIA_HOLOCENE_TIMESTAMP => Self::Granite, + _i if timestamp < OP_SEPOLIA_ISTHMUS_TIMESTAMP => Self::Holocene, + _i if timestamp < OP_SEPOLIA_JOVIAN_TIMESTAMP => Self::Isthmus, + _ => Self::Jovian, + }), + NamedChain::Base => Some(match timestamp { + _i if timestamp < BASE_MAINNET_CANYON_TIMESTAMP => Self::Regolith, + _i if timestamp < BASE_MAINNET_ECOTONE_TIMESTAMP => Self::Canyon, + _i if timestamp < BASE_MAINNET_FJORD_TIMESTAMP => Self::Ecotone, + _i if timestamp < BASE_MAINNET_GRANITE_TIMESTAMP => Self::Fjord, + _i if timestamp < BASE_MAINNET_HOLOCENE_TIMESTAMP => Self::Granite, + _i if timestamp < BASE_MAINNET_ISTHMUS_TIMESTAMP => Self::Holocene, + _i if timestamp < BASE_MAINNET_JOVIAN_TIMESTAMP => Self::Isthmus, + _ => Self::Jovian, + }), + NamedChain::BaseSepolia => Some(match timestamp { + _i if timestamp < BASE_SEPOLIA_CANYON_TIMESTAMP => Self::Regolith, + _i if timestamp < BASE_SEPOLIA_ECOTONE_TIMESTAMP => Self::Canyon, + _i if timestamp < BASE_SEPOLIA_FJORD_TIMESTAMP => Self::Ecotone, + _i if timestamp < BASE_SEPOLIA_GRANITE_TIMESTAMP => Self::Fjord, + _i if timestamp < BASE_SEPOLIA_HOLOCENE_TIMESTAMP => Self::Granite, + _i if timestamp < BASE_SEPOLIA_ISTHMUS_TIMESTAMP => Self::Holocene, + _i if timestamp < BASE_SEPOLIA_JOVIAN_TIMESTAMP => Self::Isthmus, + _ => Self::Jovian, + }), + _ => None, + } + } + + /// Optimism mainnet list of hardforks. + pub const fn op_mainnet() -> [(Self, ForkCondition); 9] { + [ + (Self::Bedrock, ForkCondition::Block(OP_MAINNET_BEDROCK_BLOCK)), + (Self::Regolith, ForkCondition::Timestamp(OP_MAINNET_REGOLITH_TIMESTAMP)), + (Self::Canyon, ForkCondition::Timestamp(OP_MAINNET_CANYON_TIMESTAMP)), + (Self::Ecotone, ForkCondition::Timestamp(OP_MAINNET_ECOTONE_TIMESTAMP)), + (Self::Fjord, ForkCondition::Timestamp(OP_MAINNET_FJORD_TIMESTAMP)), + (Self::Granite, ForkCondition::Timestamp(OP_MAINNET_GRANITE_TIMESTAMP)), + (Self::Holocene, ForkCondition::Timestamp(OP_MAINNET_HOLOCENE_TIMESTAMP)), + (Self::Isthmus, ForkCondition::Timestamp(OP_MAINNET_ISTHMUS_TIMESTAMP)), + (Self::Jovian, ForkCondition::Timestamp(OP_MAINNET_JOVIAN_TIMESTAMP)), + ] + } + + /// Optimism Sepolia list of hardforks. + pub const fn op_sepolia() -> [(Self, ForkCondition); 9] { + [ + (Self::Bedrock, ForkCondition::Block(OP_SEPOLIA_BEDROCK_BLOCK)), + (Self::Regolith, ForkCondition::Timestamp(OP_SEPOLIA_REGOLITH_TIMESTAMP)), + (Self::Canyon, ForkCondition::Timestamp(OP_SEPOLIA_CANYON_TIMESTAMP)), + (Self::Ecotone, ForkCondition::Timestamp(OP_SEPOLIA_ECOTONE_TIMESTAMP)), + (Self::Fjord, ForkCondition::Timestamp(OP_SEPOLIA_FJORD_TIMESTAMP)), + (Self::Granite, ForkCondition::Timestamp(OP_SEPOLIA_GRANITE_TIMESTAMP)), + (Self::Holocene, ForkCondition::Timestamp(OP_SEPOLIA_HOLOCENE_TIMESTAMP)), + (Self::Isthmus, ForkCondition::Timestamp(OP_SEPOLIA_ISTHMUS_TIMESTAMP)), + (Self::Jovian, ForkCondition::Timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP)), + ] + } + + /// Base mainnet list of hardforks. + pub const fn base_mainnet() -> [(Self, ForkCondition); 9] { + [ + (Self::Bedrock, ForkCondition::Block(BASE_MAINNET_BEDROCK_BLOCK)), + (Self::Regolith, ForkCondition::Timestamp(BASE_MAINNET_REGOLITH_TIMESTAMP)), + (Self::Canyon, ForkCondition::Timestamp(BASE_MAINNET_CANYON_TIMESTAMP)), + (Self::Ecotone, ForkCondition::Timestamp(BASE_MAINNET_ECOTONE_TIMESTAMP)), + (Self::Fjord, ForkCondition::Timestamp(BASE_MAINNET_FJORD_TIMESTAMP)), + (Self::Granite, ForkCondition::Timestamp(BASE_MAINNET_GRANITE_TIMESTAMP)), + (Self::Holocene, ForkCondition::Timestamp(BASE_MAINNET_HOLOCENE_TIMESTAMP)), + (Self::Isthmus, ForkCondition::Timestamp(BASE_MAINNET_ISTHMUS_TIMESTAMP)), + (Self::Jovian, ForkCondition::Timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP)), + ] + } + + /// Base Sepolia list of hardforks. + pub const fn base_sepolia() -> [(Self, ForkCondition); 9] { + [ + (Self::Bedrock, ForkCondition::Block(BASE_SEPOLIA_BEDROCK_BLOCK)), + (Self::Regolith, ForkCondition::Timestamp(BASE_SEPOLIA_REGOLITH_TIMESTAMP)), + (Self::Canyon, ForkCondition::Timestamp(BASE_SEPOLIA_CANYON_TIMESTAMP)), + (Self::Ecotone, ForkCondition::Timestamp(BASE_SEPOLIA_ECOTONE_TIMESTAMP)), + (Self::Fjord, ForkCondition::Timestamp(BASE_SEPOLIA_FJORD_TIMESTAMP)), + (Self::Granite, ForkCondition::Timestamp(BASE_SEPOLIA_GRANITE_TIMESTAMP)), + (Self::Holocene, ForkCondition::Timestamp(BASE_SEPOLIA_HOLOCENE_TIMESTAMP)), + (Self::Isthmus, ForkCondition::Timestamp(BASE_SEPOLIA_ISTHMUS_TIMESTAMP)), + (Self::Jovian, ForkCondition::Timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP)), + ] + } + + /// Devnet list of hardforks. + pub const fn devnet() -> [(Self, ForkCondition); 9] { + [ + (Self::Bedrock, ForkCondition::ZERO_BLOCK), + (Self::Regolith, ForkCondition::ZERO_TIMESTAMP), + (Self::Canyon, ForkCondition::ZERO_TIMESTAMP), + (Self::Ecotone, ForkCondition::ZERO_TIMESTAMP), + (Self::Fjord, ForkCondition::ZERO_TIMESTAMP), + (Self::Granite, ForkCondition::ZERO_TIMESTAMP), + (Self::Holocene, ForkCondition::ZERO_TIMESTAMP), + (Self::Isthmus, ForkCondition::ZERO_TIMESTAMP), + (Self::Jovian, ForkCondition::Timestamp(1762185600)), + ] + } + + /// Returns index of `self` in sorted canonical array. + pub const fn idx(&self) -> usize { + *self as usize + } +} + +/// Extends [`EthereumHardforks`] with optimism helper methods. +#[auto_impl::auto_impl(&, Arc)] +pub trait OpHardforks: EthereumHardforks { + /// Retrieves [`ForkCondition`] by an [`OpHardfork`]. If `fork` is not present, returns + /// [`ForkCondition::Never`]. + fn op_fork_activation(&self, fork: OpHardfork) -> ForkCondition; + + /// Convenience method to check if [`OpHardfork::Bedrock`] is active at a given block + /// number. + fn is_bedrock_active_at_block(&self, block_number: u64) -> bool { + self.op_fork_activation(OpHardfork::Bedrock).active_at_block(block_number) + } + + /// Returns `true` if [`Regolith`](OpHardfork::Regolith) is active at given block + /// timestamp. + fn is_regolith_active_at_timestamp(&self, timestamp: u64) -> bool { + self.op_fork_activation(OpHardfork::Regolith).active_at_timestamp(timestamp) + } + + /// Returns `true` if [`Canyon`](OpHardfork::Canyon) is active at given block timestamp. + fn is_canyon_active_at_timestamp(&self, timestamp: u64) -> bool { + self.op_fork_activation(OpHardfork::Canyon).active_at_timestamp(timestamp) + } + + /// Returns `true` if [`Ecotone`](OpHardfork::Ecotone) is active at given block timestamp. + fn is_ecotone_active_at_timestamp(&self, timestamp: u64) -> bool { + self.op_fork_activation(OpHardfork::Ecotone).active_at_timestamp(timestamp) + } + + /// Returns `true` if [`Fjord`](OpHardfork::Fjord) is active at given block timestamp. + fn is_fjord_active_at_timestamp(&self, timestamp: u64) -> bool { + self.op_fork_activation(OpHardfork::Fjord).active_at_timestamp(timestamp) + } + + /// Returns `true` if [`Granite`](OpHardfork::Granite) is active at given block timestamp. + fn is_granite_active_at_timestamp(&self, timestamp: u64) -> bool { + self.op_fork_activation(OpHardfork::Granite).active_at_timestamp(timestamp) + } + + /// Returns `true` if [`Holocene`](OpHardfork::Holocene) is active at given block + /// timestamp. + fn is_holocene_active_at_timestamp(&self, timestamp: u64) -> bool { + self.op_fork_activation(OpHardfork::Holocene).active_at_timestamp(timestamp) + } + + /// Returns `true` if [`Isthmus`](OpHardfork::Isthmus) is active at given block + /// timestamp. + fn is_isthmus_active_at_timestamp(&self, timestamp: u64) -> bool { + self.op_fork_activation(OpHardfork::Isthmus).active_at_timestamp(timestamp) + } + + /// Returns `true` if [`Jovian`](OpHardfork::Jovian) is active at given block + /// timestamp. + fn is_jovian_active_at_timestamp(&self, timestamp: u64) -> bool { + self.op_fork_activation(OpHardfork::Jovian).active_at_timestamp(timestamp) + } + + /// Returns `true` if [`Interop`](OpHardfork::Interop) is active at given block + /// timestamp. + fn is_interop_active_at_timestamp(&self, timestamp: u64) -> bool { + self.op_fork_activation(OpHardfork::Interop).active_at_timestamp(timestamp) + } +} + +/// A type allowing to configure activation [`ForkCondition`]s for a given list of +/// [`OpHardfork`]s. +/// +/// Zips together [`EthereumHardfork`]s and [`OpHardfork`]s. Optimism hard forks, at least, +/// whenever Ethereum hard forks. When Ethereum hard forks, a new [`OpHardfork`] piggybacks on top +/// of the new [`EthereumHardfork`] to include (or to noop) the L1 changes on L2. +/// +/// Optimism can also hard fork independently of Ethereum. The relation between Ethereum and +/// Optimism hard forks is described by predicate [`EthereumHardfork`] `=>` [`OpHardfork`], since +/// an OP chain can undergo an [`OpHardfork`] without an [`EthereumHardfork`], but not the other +/// way around. +#[derive(Debug, Clone)] +pub struct OpChainHardforks { + /// Ordered list of OP hardfork activations. + forks: Vec<(OpHardfork, ForkCondition)>, +} + +impl OpChainHardforks { + /// Creates a new [`OpChainHardforks`] with the given list of forks. The input list is sorted + /// w.r.t. the hardcoded canonicity of [`OpHardfork`]s. + pub fn new(forks: impl IntoIterator) -> Self { + let mut forks = forks.into_iter().collect::>(); + forks.sort(); + Self { forks } + } + + /// Creates a new [`OpChainHardforks`] with OP mainnet configuration. + pub fn op_mainnet() -> Self { + Self::new(OpHardfork::op_mainnet()) + } + + /// Creates a new [`OpChainHardforks`] with OP Sepolia configuration. + pub fn op_sepolia() -> Self { + Self::new(OpHardfork::op_sepolia()) + } + + /// Creates a new [`OpChainHardforks`] with Base mainnet configuration. + pub fn base_mainnet() -> Self { + Self::new(OpHardfork::base_mainnet()) + } + + /// Creates a new [`OpChainHardforks`] with Base Sepolia configuration. + pub fn base_sepolia() -> Self { + Self::new(OpHardfork::base_sepolia()) + } + + /// Creates a new [`OpChainHardforks`] with devnet configuration. + pub fn devnet() -> Self { + Self::new(OpHardfork::devnet()) + } + + /// Returns `true` if this is an OP mainnet instance. + pub fn is_op_mainnet(&self) -> bool { + self[OpHardfork::Bedrock] == ForkCondition::Block(OP_MAINNET_BEDROCK_BLOCK) + } +} + +impl EthereumHardforks for OpChainHardforks { + fn ethereum_fork_activation(&self, fork: EthereumHardfork) -> ForkCondition { + use EthereumHardfork::{Cancun, Prague, Shanghai}; + use OpHardfork::{Canyon, Ecotone, Isthmus}; + + if self.forks.is_empty() { + return ForkCondition::Never; + } + + let forks_len = self.forks.len(); + // check index out of bounds + match fork { + Shanghai if forks_len <= Canyon.idx() => ForkCondition::Never, + Cancun if forks_len <= Ecotone.idx() => ForkCondition::Never, + Prague if forks_len <= Isthmus.idx() => ForkCondition::Never, + _ => self[fork], + } + } +} + +impl OpHardforks for OpChainHardforks { + fn op_fork_activation(&self, fork: OpHardfork) -> ForkCondition { + // check index out of bounds + if self.forks.len() <= fork.idx() { + return ForkCondition::Never; + } + self[fork] + } +} + +impl Index for OpChainHardforks { + type Output = ForkCondition; + + fn index(&self, hf: OpHardfork) -> &Self::Output { + use OpHardfork::{ + Bedrock, Canyon, Ecotone, Fjord, Granite, Holocene, Interop, Isthmus, Jovian, Regolith, + }; + + match hf { + Bedrock => &self.forks[Bedrock.idx()].1, + Regolith => &self.forks[Regolith.idx()].1, + Canyon => &self.forks[Canyon.idx()].1, + Ecotone => &self.forks[Ecotone.idx()].1, + Fjord => &self.forks[Fjord.idx()].1, + Granite => &self.forks[Granite.idx()].1, + Holocene => &self.forks[Holocene.idx()].1, + Isthmus => &self.forks[Isthmus.idx()].1, + Jovian => &self.forks[Jovian.idx()].1, + Interop => &self.forks[Interop.idx()].1, + } + } +} + +impl Index for OpChainHardforks { + type Output = ForkCondition; + + fn index(&self, hf: EthereumHardfork) -> &Self::Output { + use EthereumHardfork::{ + Amsterdam, ArrowGlacier, Berlin, Bpo1, Bpo2, Bpo3, Bpo4, Bpo5, Byzantium, Cancun, + Constantinople, Dao, Frontier, GrayGlacier, Homestead, Istanbul, London, MuirGlacier, + Osaka, Paris, Petersburg, Prague, Shanghai, SpuriousDragon, Tangerine, + }; + use OpHardfork::{Bedrock, Canyon, Ecotone, Isthmus}; + + match hf { + // Dao Hardfork is not needed for OpChainHardforks + Dao | Osaka | Bpo1 | Bpo2 | Bpo3 | Bpo4 | Bpo5 | Amsterdam => &ForkCondition::Never, + Berlin if self.is_op_mainnet() => &ForkCondition::Block(OP_MAINNET_BERLIN_BLOCK), + Frontier | Homestead | Tangerine | SpuriousDragon | Byzantium | Constantinople | + Petersburg | Istanbul | MuirGlacier | Berlin => &ForkCondition::ZERO_BLOCK, + London | ArrowGlacier | GrayGlacier => &self[Bedrock], + Paris if self.is_op_mainnet() => &ForkCondition::TTD { + activation_block_number: OP_MAINNET_BEDROCK_BLOCK, + fork_block: Some(OP_MAINNET_BEDROCK_BLOCK), + total_difficulty: U256::ZERO, + }, + Paris => &ForkCondition::TTD { + activation_block_number: 0, + fork_block: Some(0), + total_difficulty: U256::ZERO, + }, + Shanghai => &self[Canyon], + Cancun => &self[Ecotone], + Prague => &self[Isthmus], + _ => unreachable!(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use core::str::FromStr; + + extern crate alloc; + + #[test] + fn check_op_hardfork_from_str() { + let hardfork_str = [ + "beDrOck", "rEgOlITH", "cAnYoN", "eCoToNe", "FJorD", "GRaNiTe", "hOlOcEnE", "isthMUS", + "jOvIaN", "inTerOP", + ]; + let expected_hardforks = [ + OpHardfork::Bedrock, + OpHardfork::Regolith, + OpHardfork::Canyon, + OpHardfork::Ecotone, + OpHardfork::Fjord, + OpHardfork::Granite, + OpHardfork::Holocene, + OpHardfork::Isthmus, + OpHardfork::Jovian, + OpHardfork::Interop, + ]; + + let hardforks: alloc::vec::Vec = + hardfork_str.iter().map(|h| OpHardfork::from_str(h).unwrap()).collect(); + + assert_eq!(hardforks, expected_hardforks); + } + + #[test] + fn check_nonexistent_hardfork_from_str() { + assert!(OpHardfork::from_str("not a hardfork").is_err()); + } + + #[test] + fn op_mainnet_fork_conditions() { + use OpHardfork::*; + + let op_mainnet_forks = OpChainHardforks::op_mainnet(); + assert_eq!(op_mainnet_forks[Bedrock], ForkCondition::Block(OP_MAINNET_BEDROCK_BLOCK)); + assert_eq!( + op_mainnet_forks[Regolith], + ForkCondition::Timestamp(OP_MAINNET_REGOLITH_TIMESTAMP) + ); + assert_eq!(op_mainnet_forks[Canyon], ForkCondition::Timestamp(OP_MAINNET_CANYON_TIMESTAMP)); + assert_eq!( + op_mainnet_forks[Ecotone], + ForkCondition::Timestamp(OP_MAINNET_ECOTONE_TIMESTAMP) + ); + assert_eq!(op_mainnet_forks[Fjord], ForkCondition::Timestamp(OP_MAINNET_FJORD_TIMESTAMP)); + assert_eq!( + op_mainnet_forks[Granite], + ForkCondition::Timestamp(OP_MAINNET_GRANITE_TIMESTAMP) + ); + assert_eq!( + op_mainnet_forks[Holocene], + ForkCondition::Timestamp(OP_MAINNET_HOLOCENE_TIMESTAMP) + ); + assert_eq!( + op_mainnet_forks[Isthmus], + ForkCondition::Timestamp(OP_MAINNET_ISTHMUS_TIMESTAMP) + ); + assert_eq!(op_mainnet_forks[Jovian], ForkCondition::Timestamp(OP_MAINNET_JOVIAN_TIMESTAMP)); + assert_eq!(op_mainnet_forks.op_fork_activation(Interop), ForkCondition::Never); + } + + #[test] + fn op_sepolia_fork_conditions() { + use OpHardfork::*; + + let op_sepolia_forks = OpChainHardforks::op_sepolia(); + assert_eq!(op_sepolia_forks[Bedrock], ForkCondition::Block(OP_SEPOLIA_BEDROCK_BLOCK)); + assert_eq!( + op_sepolia_forks[Regolith], + ForkCondition::Timestamp(OP_SEPOLIA_REGOLITH_TIMESTAMP) + ); + assert_eq!(op_sepolia_forks[Canyon], ForkCondition::Timestamp(OP_SEPOLIA_CANYON_TIMESTAMP)); + assert_eq!( + op_sepolia_forks[Ecotone], + ForkCondition::Timestamp(OP_SEPOLIA_ECOTONE_TIMESTAMP) + ); + assert_eq!(op_sepolia_forks[Fjord], ForkCondition::Timestamp(OP_SEPOLIA_FJORD_TIMESTAMP)); + assert_eq!( + op_sepolia_forks[Granite], + ForkCondition::Timestamp(OP_SEPOLIA_GRANITE_TIMESTAMP) + ); + assert_eq!( + op_sepolia_forks[Holocene], + ForkCondition::Timestamp(OP_SEPOLIA_HOLOCENE_TIMESTAMP) + ); + assert_eq!( + op_sepolia_forks[Isthmus], + ForkCondition::Timestamp(OP_SEPOLIA_ISTHMUS_TIMESTAMP) + ); + assert_eq!(op_sepolia_forks[Jovian], ForkCondition::Timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP)); + assert_eq!(op_sepolia_forks.op_fork_activation(Interop), ForkCondition::Never); + } + + #[test] + fn base_mainnet_fork_conditions() { + use OpHardfork::*; + + let base_mainnet_forks = OpChainHardforks::base_mainnet(); + assert_eq!(base_mainnet_forks[Bedrock], ForkCondition::Block(BASE_MAINNET_BEDROCK_BLOCK)); + assert_eq!( + base_mainnet_forks[Regolith], + ForkCondition::Timestamp(BASE_MAINNET_REGOLITH_TIMESTAMP) + ); + assert_eq!( + base_mainnet_forks[Canyon], + ForkCondition::Timestamp(BASE_MAINNET_CANYON_TIMESTAMP) + ); + assert_eq!( + base_mainnet_forks[Ecotone], + ForkCondition::Timestamp(BASE_MAINNET_ECOTONE_TIMESTAMP) + ); + assert_eq!( + base_mainnet_forks[Fjord], + ForkCondition::Timestamp(BASE_MAINNET_FJORD_TIMESTAMP) + ); + assert_eq!( + base_mainnet_forks[Granite], + ForkCondition::Timestamp(BASE_MAINNET_GRANITE_TIMESTAMP) + ); + assert_eq!( + base_mainnet_forks[Holocene], + ForkCondition::Timestamp(BASE_MAINNET_HOLOCENE_TIMESTAMP) + ); + assert_eq!( + base_mainnet_forks[Isthmus], + ForkCondition::Timestamp(BASE_MAINNET_ISTHMUS_TIMESTAMP) + ); + assert_eq!( + base_mainnet_forks[Jovian], + ForkCondition::Timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP) + ); + assert_eq!( + base_mainnet_forks[Jovian], + ForkCondition::Timestamp(OP_MAINNET_JOVIAN_TIMESTAMP) + ); + assert_eq!(base_mainnet_forks.op_fork_activation(Interop), ForkCondition::Never); + } + + #[test] + fn base_sepolia_fork_conditions() { + use OpHardfork::*; + + let base_sepolia_forks = OpChainHardforks::base_sepolia(); + assert_eq!(base_sepolia_forks[Bedrock], ForkCondition::Block(BASE_SEPOLIA_BEDROCK_BLOCK)); + assert_eq!( + base_sepolia_forks[Regolith], + ForkCondition::Timestamp(BASE_SEPOLIA_REGOLITH_TIMESTAMP) + ); + assert_eq!( + base_sepolia_forks[Canyon], + ForkCondition::Timestamp(BASE_SEPOLIA_CANYON_TIMESTAMP) + ); + assert_eq!( + base_sepolia_forks[Ecotone], + ForkCondition::Timestamp(BASE_SEPOLIA_ECOTONE_TIMESTAMP) + ); + assert_eq!( + base_sepolia_forks[Fjord], + ForkCondition::Timestamp(BASE_SEPOLIA_FJORD_TIMESTAMP) + ); + assert_eq!( + base_sepolia_forks[Granite], + ForkCondition::Timestamp(BASE_SEPOLIA_GRANITE_TIMESTAMP) + ); + assert_eq!( + base_sepolia_forks[Holocene], + ForkCondition::Timestamp(BASE_SEPOLIA_HOLOCENE_TIMESTAMP) + ); + assert_eq!( + base_sepolia_forks[Isthmus], + ForkCondition::Timestamp(BASE_SEPOLIA_ISTHMUS_TIMESTAMP) + ); + assert_eq!( + base_sepolia_forks.op_fork_activation(Jovian), + ForkCondition::Timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP) + ); + assert_eq!( + base_sepolia_forks[Jovian], + ForkCondition::Timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP) + ); + assert_eq!(base_sepolia_forks.op_fork_activation(Interop), ForkCondition::Never); + } + + #[test] + fn is_jovian_active_at_timestamp() { + let op_mainnet_forks = OpChainHardforks::op_mainnet(); + assert!(op_mainnet_forks.is_jovian_active_at_timestamp(OP_MAINNET_JOVIAN_TIMESTAMP)); + assert!(!op_mainnet_forks.is_jovian_active_at_timestamp(OP_MAINNET_JOVIAN_TIMESTAMP - 1)); + assert!(op_mainnet_forks.is_jovian_active_at_timestamp(OP_MAINNET_JOVIAN_TIMESTAMP + 1000)); + + let op_sepolia_forks = OpChainHardforks::op_sepolia(); + assert!(op_sepolia_forks.is_jovian_active_at_timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP)); + assert!(!op_sepolia_forks.is_jovian_active_at_timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP - 1)); + assert!(op_sepolia_forks.is_jovian_active_at_timestamp(OP_SEPOLIA_JOVIAN_TIMESTAMP + 1000)); + + let base_mainnet_forks = OpChainHardforks::base_mainnet(); + assert!(base_mainnet_forks.is_jovian_active_at_timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP)); + assert!( + !base_mainnet_forks.is_jovian_active_at_timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP - 1) + ); + assert!( + base_mainnet_forks.is_jovian_active_at_timestamp(BASE_MAINNET_JOVIAN_TIMESTAMP + 1000) + ); + + let base_sepolia_forks = OpChainHardforks::base_sepolia(); + assert!(base_sepolia_forks.is_jovian_active_at_timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP)); + assert!( + !base_sepolia_forks.is_jovian_active_at_timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP - 1) + ); + assert!( + base_sepolia_forks.is_jovian_active_at_timestamp(BASE_SEPOLIA_JOVIAN_TIMESTAMP + 1000) + ); + } + + #[test] + fn test_reverse_lookup_op_chains() { + // Test key hardforks across all OP stack chains + let test_cases = [ + // (chain_id, timestamp, expected) - focusing on major transitions + // OP Mainnet + (Chain::optimism_mainnet(), OP_MAINNET_CANYON_TIMESTAMP, OpHardfork::Canyon), + (Chain::optimism_mainnet(), OP_MAINNET_ECOTONE_TIMESTAMP, OpHardfork::Ecotone), + (Chain::optimism_mainnet(), OP_MAINNET_GRANITE_TIMESTAMP, OpHardfork::Granite), + (Chain::optimism_mainnet(), OP_MAINNET_CANYON_TIMESTAMP - 1, OpHardfork::Regolith), + (Chain::optimism_mainnet(), OP_MAINNET_ISTHMUS_TIMESTAMP + 1000, OpHardfork::Isthmus), + (Chain::optimism_mainnet(), OP_MAINNET_JOVIAN_TIMESTAMP, OpHardfork::Jovian), + (Chain::optimism_mainnet(), OP_MAINNET_JOVIAN_TIMESTAMP - 1, OpHardfork::Isthmus), + (Chain::optimism_mainnet(), OP_MAINNET_JOVIAN_TIMESTAMP + 1000, OpHardfork::Jovian), + // OP Sepolia + (Chain::optimism_sepolia(), OP_SEPOLIA_CANYON_TIMESTAMP, OpHardfork::Canyon), + (Chain::optimism_sepolia(), OP_SEPOLIA_ECOTONE_TIMESTAMP, OpHardfork::Ecotone), + (Chain::optimism_sepolia(), OP_SEPOLIA_CANYON_TIMESTAMP - 1, OpHardfork::Regolith), + (Chain::optimism_sepolia(), OP_SEPOLIA_JOVIAN_TIMESTAMP, OpHardfork::Jovian), + (Chain::optimism_sepolia(), OP_SEPOLIA_JOVIAN_TIMESTAMP - 1, OpHardfork::Isthmus), + (Chain::optimism_sepolia(), OP_SEPOLIA_JOVIAN_TIMESTAMP + 1000, OpHardfork::Jovian), + // Base Mainnet + (Chain::base_mainnet(), BASE_MAINNET_CANYON_TIMESTAMP, OpHardfork::Canyon), + (Chain::base_mainnet(), BASE_MAINNET_ECOTONE_TIMESTAMP, OpHardfork::Ecotone), + (Chain::base_mainnet(), BASE_MAINNET_JOVIAN_TIMESTAMP, OpHardfork::Jovian), + // Base Sepolia + (Chain::base_sepolia(), BASE_SEPOLIA_CANYON_TIMESTAMP, OpHardfork::Canyon), + (Chain::base_sepolia(), BASE_SEPOLIA_ECOTONE_TIMESTAMP, OpHardfork::Ecotone), + (Chain::base_sepolia(), BASE_SEPOLIA_JOVIAN_TIMESTAMP, OpHardfork::Jovian), + ]; + + for (chain_id, timestamp, expected) in test_cases { + assert_eq!( + OpHardfork::from_chain_and_timestamp(chain_id, timestamp), + Some(expected), + "chain {chain_id} at timestamp {timestamp}" + ); + } + + // Edge cases + assert_eq!(OpHardfork::from_chain_and_timestamp(Chain::from_id(999999), 1000000), None); + } + + // https://github.com/alloy-rs/hardforks/issues/63 + #[test] + fn test_ethereum_fork_activation_consistency() { + let op_mainnet_forks = OpChainHardforks::op_mainnet(); + for ethereum_hardfork in EthereumHardfork::VARIANTS { + let _ = op_mainnet_forks.ethereum_fork_activation(*ethereum_hardfork); + } + for op_hardfork in OpHardfork::VARIANTS { + let _ = op_mainnet_forks.op_fork_activation(*op_hardfork); + } + } +} diff --git a/rust/alloy-op-hardforks/src/optimism/mainnet.rs b/rust/alloy-op-hardforks/src/optimism/mainnet.rs new file mode 100644 index 00000000000..a7fb0ef1119 --- /dev/null +++ b/rust/alloy-op-hardforks/src/optimism/mainnet.rs @@ -0,0 +1,24 @@ +//! Optimism Mainnet hardfork starting points + +//------------------------ OVM chain ------------------------// +/// Berlin hardfork activation block is 3950000. +pub const OP_MAINNET_BERLIN_BLOCK: u64 = 3_950_000; +//------------------------ EVM chain ------------------------// +/// Bedrock hardfork activation block is 105235063. +pub const OP_MAINNET_BEDROCK_BLOCK: u64 = 105_235_063; +/// Regolith hardfork activation timestamp is 0. +pub const OP_MAINNET_REGOLITH_TIMESTAMP: u64 = 0; +/// Canyon hardfork activation timestamp is 1704992401. +pub const OP_MAINNET_CANYON_TIMESTAMP: u64 = 1_704_992_401; +/// Ecotone hardfork activation timestamp is 1710374401. +pub const OP_MAINNET_ECOTONE_TIMESTAMP: u64 = 1_710_374_401; +/// Fjord hardfork activation timestamp is 1720627201. +pub const OP_MAINNET_FJORD_TIMESTAMP: u64 = 1_720_627_201; +/// Granite hardfork activation timestamp is 1726070401. +pub const OP_MAINNET_GRANITE_TIMESTAMP: u64 = 1_726_070_401; +/// Holocene hardfork activation timestamp is 1736445601. +pub const OP_MAINNET_HOLOCENE_TIMESTAMP: u64 = 1_736_445_601; +/// Isthmus hardfork activation timestamp is 1746806401. +pub const OP_MAINNET_ISTHMUS_TIMESTAMP: u64 = 1_746_806_401; +/// Jovian hardfork activation timestamp is `1_764_691_201` # Tue 2 Dec 2025 16:00:01 UTC +pub const OP_MAINNET_JOVIAN_TIMESTAMP: u64 = 1_764_691_201; diff --git a/alloy-op-hardforks/src/optimism/mod.rs b/rust/alloy-op-hardforks/src/optimism/mod.rs similarity index 100% rename from alloy-op-hardforks/src/optimism/mod.rs rename to rust/alloy-op-hardforks/src/optimism/mod.rs diff --git a/rust/alloy-op-hardforks/src/optimism/sepolia.rs b/rust/alloy-op-hardforks/src/optimism/sepolia.rs new file mode 100644 index 00000000000..6414a510b7b --- /dev/null +++ b/rust/alloy-op-hardforks/src/optimism/sepolia.rs @@ -0,0 +1,20 @@ +//! Optimism Sepolia hardfork starting points + +/// Bedrock sepolia hardfork activation block is 0. +pub const OP_SEPOLIA_BEDROCK_BLOCK: u64 = 0; +/// Regolith sepolia hardfork activation timestamp is 0. +pub const OP_SEPOLIA_REGOLITH_TIMESTAMP: u64 = 0; +/// Canyon sepolia hardfork activation timestamp is 1699981200. +pub const OP_SEPOLIA_CANYON_TIMESTAMP: u64 = 1_699_981_200; +/// Ecotone sepolia hardfork activation timestamp is 1708534800. +pub const OP_SEPOLIA_ECOTONE_TIMESTAMP: u64 = 1_708_534_800; +/// Fjord sepolia hardfork activation timestamp is 1716998400. +pub const OP_SEPOLIA_FJORD_TIMESTAMP: u64 = 1_716_998_400; +/// Granite sepolia hardfork activation timestamp is 1723478400. +pub const OP_SEPOLIA_GRANITE_TIMESTAMP: u64 = 1_723_478_400; +/// Holocene sepolia hardfork activation timestamp is 1732633200. +pub const OP_SEPOLIA_HOLOCENE_TIMESTAMP: u64 = 1_732_633_200; +/// Isthmus sepolia hardfork activation timestamp is 1744905600. +pub const OP_SEPOLIA_ISTHMUS_TIMESTAMP: u64 = 1_744_905_600; +/// Jovian sepolia hardfork activation timestamp is `1_763_568_001` # Wed 19 Nov 2025 16:00:01 UTC. +pub const OP_SEPOLIA_JOVIAN_TIMESTAMP: u64 = 1_763_568_001; diff --git a/rust/clippy.toml b/rust/clippy.toml new file mode 100644 index 00000000000..1e75cb34f32 --- /dev/null +++ b/rust/clippy.toml @@ -0,0 +1,18 @@ +msrv = "1.88" +too-large-for-stack = 128 +doc-valid-idents = [ + "P2P", + "ExEx", + "ExExes", + "IPv4", + "IPv6", + "KiB", + "MiB", + "GiB", + "TiB", + "PiB", + "EiB", + "WAL", + "MessagePack", +] +allow-dbg-in-tests = true diff --git a/rust/deny.toml b/rust/deny.toml new file mode 100644 index 00000000000..461d1eda193 --- /dev/null +++ b/rust/deny.toml @@ -0,0 +1,114 @@ +[graph] +targets = [] +all-features = false +no-default-features = false + +[output] +feature-depth = 1 + +# This section is considered when running `cargo deny check advisories` +[advisories] +yanked = "warn" +ignore = [ + # paste crate is no longer maintained. + "RUSTSEC-2024-0436", + # https://rustsec.org/advisories/RUSTSEC-2024-0384 used by sse example + "RUSTSEC-2024-0384", + "RUSTSEC-2025-0012", + # bincode is unmaintained but still functional; transitive dep from reth-nippy-jar and test-fuzz. + "RUSTSEC-2025-0141", +] + +# This section is considered when running `cargo deny check bans`. +[bans] +multiple-versions = "warn" +wildcards = "allow" +highlight = "all" +workspace-default-features = "allow" +external-default-features = "allow" +allow = [] +deny = [{ name = "openssl", wrappers = ["kona-gossip", "native-tls"] }] +skip = [] +skip-tree = [] + +[licenses] +version = 2 +confidence-threshold = 0.8 + +allow = [ + "MIT", + "MIT-0", + "Apache-2.0", + "Apache-2.0 WITH LLVM-exception", + "BSD-2-Clause", + "BSD-3-Clause", + "BSL-1.0", + "0BSD", + "CC0-1.0", + "ISC", + "Unlicense", + "Unicode-3.0", + "Zlib", + "LicenseRef-rustls-webpki", + "CDLA-Permissive-2.0", + "MPL-2.0", +] + +exceptions = [ + # CC0 is a permissive license but somewhat unclear status for source code + # https://tldrlegal.com/license/creative-commons-cc0-1.0-universal + { allow = ["CC0-1.0"], name = "secp256k1" }, + { allow = ["CC0-1.0"], name = "aurora-engine-modexp" }, + { allow = ["CC0-1.0"], name = "secp256k1-sys" }, + { allow = ["CC0-1.0"], name = "tiny-keccak" }, + { allow = ["CC0-1.0"], name = "notify" }, + # aws-lc-sys includes OpenSSL in its composite license expression + { allow = ["OpenSSL"], name = "aws-lc-sys" }, + # gmp feature (optional, LGPL-licensed) + { allow = ["LGPL-3.0-or-later"], crate = "rug" }, + { allow = ["LGPL-3.0-or-later"], crate = "gmp-mpfr-sys" }, + # Grandfathered MPL-2.0 exceptions from op-reth + { allow = ["MPL-2.0"], name = "option-ext" }, + { allow = ["MPL-2.0"], name = "webpki-root-certs" }, +] + +[[licenses.clarify]] +name = "ring" +expression = "LicenseRef-ring" +license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }] + +[[licenses.clarify]] +name = "rustls-webpki" +expression = "LicenseRef-rustls-webpki" +license-files = [{ path = "LICENSE", hash = 0x001c7e6c }] + +[[licenses.clarify]] +name = "webpki" +expression = "LicenseRef-webpki" +license-files = [{ path = "LICENSE", hash = 0x001c7e6c }] + +[licenses.private] +ignore = false +registries = [] + +[sources] +allow-git = [ + "https://github.com/paradigmxyz/reth", + "https://github.com/alloy-rs/alloy", + "https://github.com/alloy-rs/hardforks", + "https://github.com/alloy-rs/evm", + "https://github.com/bluealloy/revm", + "https://github.com/paritytech/jsonrpsee", + "https://github.com/paradigmxyz/jsonrpsee", + "https://github.com/paradigmxyz/revm-inspectors", + "https://github.com/foundry-rs/block-explorers", + "https://github.com/flashbots/rollup-boost", +] +unknown-registry = "warn" +unknown-git = "deny" +allow-registry = ["https://github.com/rust-lang/crates.io-index"] + +[sources.allow-org] +github = [] +gitlab = [] +bitbucket = [] diff --git a/kona/docs/docs/components/CodeGroup.tsx b/rust/docs/docs/components/CodeGroup.tsx similarity index 100% rename from kona/docs/docs/components/CodeGroup.tsx rename to rust/docs/docs/components/CodeGroup.tsx diff --git a/kona/docs/docs/components/SdkShowcase.tsx b/rust/docs/docs/components/SdkShowcase.tsx similarity index 93% rename from kona/docs/docs/components/SdkShowcase.tsx rename to rust/docs/docs/components/SdkShowcase.tsx index 667c5ef928f..3b5be6c29e2 100644 --- a/kona/docs/docs/components/SdkShowcase.tsx +++ b/rust/docs/docs/components/SdkShowcase.tsx @@ -12,13 +12,13 @@ const projects: SdkProject[] = [ name: 'Kona Client', description: 'Fault proof program for rollup state transitions', linesOfCode: '~3K LoC', - githubUrl: 'https://github.com/op-rs/kona/tree/main/bin/client' + githubUrl: 'https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/bin/client' }, { name: 'Kona Node', description: 'Modular OP Stack rollup node implementation', linesOfCode: '~8K LoC', - githubUrl: 'https://github.com/op-rs/kona/tree/main/bin/node' + githubUrl: 'https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/bin/node' }, { name: 'OP Succinct', diff --git a/kona/docs/docs/components/TrustedBy.tsx b/rust/docs/docs/components/TrustedBy.tsx similarity index 100% rename from kona/docs/docs/components/TrustedBy.tsx rename to rust/docs/docs/components/TrustedBy.tsx diff --git a/rust/docs/docs/pages/index.mdx b/rust/docs/docs/pages/index.mdx new file mode 100644 index 00000000000..888d0283dda --- /dev/null +++ b/rust/docs/docs/pages/index.mdx @@ -0,0 +1,115 @@ +--- +layout: landing +--- + +import { HomePage } from 'vocs/components' +import { SdkShowcase } from '../components/SdkShowcase' +import { CodeGroup, Code } from '../components/CodeGroup' + +
+
+
+
+

+ OP Stack Rust +

+

+ Rust implementations for the OP Stack +

+

+ A unified documentation site for OP Stack Rust components: Kona (rollup node & fault proofs), + op-reth (execution client), and op-alloy (types & providers). + Built by OP Labs. +

+
+
+
+ + + + +
+
+
+ + + + + + +
+
+

Built with Kona SDK

+

+ Production implementations using Kona's modular architecture +

+ +
+
diff --git a/rust/docs/docs/pages/kona/glossary.mdx b/rust/docs/docs/pages/kona/glossary.mdx new file mode 100644 index 00000000000..e9d99d19e08 --- /dev/null +++ b/rust/docs/docs/pages/kona/glossary.mdx @@ -0,0 +1,41 @@ +# Glossary + +*This document contains definitions for terms used throughout the Kona book.* + +#### Fault Proof VM +A `Fault Proof VM` is a virtual machine, commonly supporting a subset of the Linux kernel's syscalls and a modified subset of an existing reduced instruction set architecture, +that is designed to execute verifiable programs. + +Full specification for the `cannon` & `cannon-rs` FPVMs, as an example, is available in the [Optimism Monorepo][cannon-specs]. + +#### Fault Proof Program +A `Fault Proof Program` is a program, commonly written in a general-purpose language such as Golang, C, or Rust, that may be compiled down +to a compatible `Fault Proof VM` target and provably executed on that target VM. + +Examples of `Fault Proof Programs` include the [OP Program][op-program], which runs on top of [`cannon`][cannon], [`cannon-rs`][cannon-rs], and +[`asterisc`][asterisc] to verify a claim about the state of an [OP Stack][op-stack] layer two. + +#### Preimage ABI +The `Preimage ABI` is a specification for a synchronous communication protocol between a `client` and a `host` that is used to request and read data from the `host`'s +datastore. Full specifications for the `Preimage ABI` are available in the [Optimism Monorepo][preimage-specs]. + +[op-stack]: https://github.com/ethereum-optimism/optimism +[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program +[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon +[cannon-rs]: https://github.com/op-rs/cannon-rs +[asterisc]: https://github.com/ethereum-optimism/asterisc +[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html +[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program +[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle +[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine +[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction +[op-succinct]: https://github.com/succinctlabs/op-succinct +[revm]: https://github.com/bluealloy/revm + +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md + +[op-labs]: https://github.com/ethereum-optimism +[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/intro/contributing.mdx b/rust/docs/docs/pages/kona/intro/contributing.mdx similarity index 88% rename from kona/docs/docs/pages/intro/contributing.mdx rename to rust/docs/docs/pages/kona/intro/contributing.mdx index dc81eca93fd..817d0926749 100644 --- a/kona/docs/docs/pages/intro/contributing.mdx +++ b/rust/docs/docs/pages/kona/intro/contributing.mdx @@ -17,7 +17,7 @@ Before working with this repository locally, you'll need to install several depe ## Pull Request Process -1. Before anything, [create an issue](https://github.com/op-rs/kona/issues/new) to discuss the change you're +1. Before anything, [create an issue](https://github.com/ethereum-optimism/optimism/issues/new) to discuss the change you're wanting to make, if it is significant or changes functionality. Feel free to skip this step for trivial changes. 1. Once your change is implemented, ensure that all checks are passing before creating a PR. The full CI pipeline can be run locally via the `justfile`s in the repository. @@ -31,7 +31,7 @@ Before working with this repository locally, you'll need to install several depe Need support or have questions? Open a github issue: -- **GitHub Issues**: [Open an issue](https://github.com/op-rs/kona/issues/new) for bugs or feature requests +- **GitHub Issues**: [Open an issue](https://github.com/ethereum-optimism/optimism/issues/new) for bugs or feature requests ### Crates diff --git a/kona/docs/docs/pages/intro/lore.mdx b/rust/docs/docs/pages/kona/intro/lore.mdx similarity index 100% rename from kona/docs/docs/pages/intro/lore.mdx rename to rust/docs/docs/pages/kona/intro/lore.mdx diff --git a/rust/docs/docs/pages/kona/intro/overview.mdx b/rust/docs/docs/pages/kona/intro/overview.mdx new file mode 100644 index 00000000000..b6ab34d44d3 --- /dev/null +++ b/rust/docs/docs/pages/kona/intro/overview.mdx @@ -0,0 +1,93 @@ +import { Callout } from 'vocs/components' + +# Kona [Documentation for Kona users and developers] + +Kona is an implementation of the [OP Stack][op-stack] written in Rust, +designed to be modular and extensible. `no_std` support is prioritized +to provide the building blocks for fault proofs. + + +Kona is in active development and should be considered experimental. + + + +These docs may contain inaccuracies as it evolves. + +Please [open an issue][new-issue] if you find any errors or have any suggestions +for improvements, and also feel free to [contribute][contributing] to the project! + + + +## Introduction + +Originally a suite of portable implementations of the OP Stack rollup state transition, +Kona has been extended to be _the monorepo_ for OP Stack +types, components, and services built in Rust. Kona provides an ecosystem of extensible, low-level +crates that compose into components and services required for the OP Stack. + +Protocol crates are `no_std` compatible for use within the Fault Proof. Types defined in these +libraries are shared by other components of the OP Stack including the rollup node. + +Proof crates are available for developing verifiable Rust programs targeting +[Fault Proof VMs](/kona/glossary#fault-proof-vm). +These libraries provide tooling and abstractions around low-level syscalls, memory management, +and other common structures that authors of verifiable programs will need to interact with. +It also provides build pipelines for compiling `no_std` Rust programs to a format that can be +executed by supported Fault Proof VM targets. + +Kona is built and maintained by open source contributors and is licensed under the MIT License. + +## Goals of Kona + +**1. Composability** + +Kona provides a common set of tools and abstractions for developing verifiable Rust programs +on top of several supported Fault Proof VM targets. This is done to ensure that programs +written for one supported FPVM can be easily ported to another supported FPVM, and that the +ecosystem of programs built on top of these targets can be easily shared and reused. + +**2. Safety** + +Through standardization of these low-level system interfaces and build pipelines, Kona seeks +to increase coverage over the low-level operations that are required to build on top of a FPVM. + +**3. Developer Experience** + +Building on top of custom Rust targets can be difficult, especially when the target is +nascent and tooling is not yet mature. Kona seeks to improve this experience by standardizing +and streamlining the process of developing and compiling verifiable Rust programs, targeted +at supported FPVMs. + +**4. Performance** + +Kona is opinionated in that it favors `no_std` Rust programs for embedded FPVM development, +for both performance and portability. In contrast with alternative approaches, such as the +[`op-program`][op-program] using the Golang `MIPS64` target, `no_std` Rust programs produce +much smaller binaries, resulting in fewer instructions that need to be executed on the FPVM. +In addition, this offers developers more low-level control over interactions with the FPVM +kernel, which can be useful for optimizing performance-critical code. + +## Contributing + +Contributors are welcome! Please see the [contributing guide][contributing] for more information. + +[op-stack]: https://github.com/ethereum-optimism/optimism +[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program +[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon +[cannon-rs]: https://github.com/op-rs/cannon-rs +[asterisc]: https://github.com/ethereum-optimism/asterisc +[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html +[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program +[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle +[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine +[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction +[op-succinct]: https://github.com/succinctlabs/op-succinct +[revm]: https://github.com/bluealloy/revm + +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md + +[op-labs]: https://github.com/ethereum-optimism +[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/intro/why.mdx b/rust/docs/docs/pages/kona/intro/why.mdx similarity index 100% rename from kona/docs/docs/pages/intro/why.mdx rename to rust/docs/docs/pages/kona/intro/why.mdx diff --git a/kona/docs/docs/pages/node/configuration.mdx b/rust/docs/docs/pages/kona/node/configuration.mdx similarity index 100% rename from kona/docs/docs/pages/node/configuration.mdx rename to rust/docs/docs/pages/kona/node/configuration.mdx diff --git a/kona/docs/docs/pages/node/design/derivation.mdx b/rust/docs/docs/pages/kona/node/design/derivation.mdx similarity index 96% rename from kona/docs/docs/pages/node/design/derivation.mdx rename to rust/docs/docs/pages/kona/node/design/derivation.mdx index 14e65a3b164..6da55d5b3d1 100644 --- a/kona/docs/docs/pages/node/design/derivation.mdx +++ b/rust/docs/docs/pages/kona/node/design/derivation.mdx @@ -224,11 +224,11 @@ These metrics help operators monitor the health and progress of the derivation p ## Related Documentation For more details on the underlying derivation pipeline implementation, see: -- [Derivation Pipeline Introduction](/sdk/protocol/derive/intro) -- [Custom Providers](/sdk/protocol/derive/providers) -- [Stage Swapping](/sdk/protocol/derive/stages) -- [Pipeline Signaling](/sdk/protocol/derive/signaling) +- [Derivation Pipeline Introduction](/kona/sdk/protocol/derive/intro) +- [Custom Providers](/kona/sdk/protocol/derive/providers) +- [Stage Swapping](/kona/sdk/protocol/derive/stages) +- [Pipeline Signaling](/kona/sdk/protocol/derive/signaling) [kd]: https://crates.io/crates/kona-derive -[na]: /node/design/intro#node-actors -[rc]: /sdk/protocol/genesis/rollup-config +[na]: /kona/node/design/intro#node-actors +[rc]: /kona/sdk/protocol/genesis/rollup-config diff --git a/kona/docs/docs/pages/node/design/engine.mdx b/rust/docs/docs/pages/kona/node/design/engine.mdx similarity index 100% rename from kona/docs/docs/pages/node/design/engine.mdx rename to rust/docs/docs/pages/kona/node/design/engine.mdx diff --git a/rust/docs/docs/pages/kona/node/design/intro.mdx b/rust/docs/docs/pages/kona/node/design/intro.mdx new file mode 100644 index 00000000000..50c3da4e71f --- /dev/null +++ b/rust/docs/docs/pages/kona/node/design/intro.mdx @@ -0,0 +1,78 @@ +# Node Design Overview + +The entry-point for the `kona-node` is the [`RollupNodeService`][trait] +trait which encapsulates the core wiring for the node. The default +implementation of the trait [`start` method][start] handles connecting +all the different components of the node, running each in a spawned +thread. As such, each node component is considered an actor. + +The [`RollupNodeService`][trait] abstracts individual actors through +the [`NodeActor` trait][actor]. With the `NodeActor` trait, the +`RollupNodeService` builds the actor and then starts it. + +Kona provides implementations for all `NodeActor`s required +to run a `RollupNodeService`. Actors are defined in the +[actors][actors] module of the `kona-node-service` crate. + +The `kona-node` is an implementation of the `RollupNodeService` +that lives in the [standard][standard] module. + + +### Actors + +The architecture of `kona-node` is a web of actors that share +state through message passing, using channels, rather than using +shared memory. + +The [`RollupNodeService`][trait] defines the set of required +actors using associated types. These are subject to change, +but are currently defined as follows. + +- **Derivation Actor**: Orchestrates the derivation pipeline, + deriving L2 payload attributes from l1 blocks. Payload + attributes prepared this way are forwarded to the Engine + Actor to be executed. The [derivation][derivation] docs + dive deeper into how the derivation actor works. +- **Engine Actor**: Brokers the connection to the execution + layer client (or "execution engine"). The engine actor + turns messages from other actors into engine "tasks" + that are executed in a round-robin against the EL client. + The [engine][engine] docs expand on this. +- **Network Actor**: Manages the P2P Network for the rollup + node. The P2P stack consists of `discv5` peer discovery + and block gossip through libp2p. Visit the [network][p2p] + docs for more detail. +- **Supervisor Actor (beta)**: The supervisor actor is an + interop feature that allows the `kona-node` to be + "managed" (or "indexed") by the supervisor - a new + component in the OP Stack. A detailed overview of + interop and the supervisor's role is provided in the + [supervisor][supervisor] docs. +- **Runtime Actor**: Loads runtime values from the contracts + on the L1 chain for the OP Stack. This is a very + light-weight actor described in [runtime][runtime] docs. +- **Sequencer Actor**: The sequencer actor extends the + `kona-node` to be run as a sequencer. Sequencing is + periphery to the basic rollup node operation. See + the [sequencer][sequencer]. +- **RPC Actor**: The RPC actor spins up and serves an + RPC server that exposes the rpc methods required by + the [OP Stack Specs][specs]. + + + + +[p2p]: ./p2p +[engine]: ./engine +[derivation]: ./derivation +[supervisor]: #TODO +[runtime]: #TODO +[sequencer]: ./sequencer + +[specs]: https://specs.optimism.io/protocol/rollup-node.html + +[standard]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/crates/node/service/src/service/standard/node.rs +[actors]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/crates/node/service/src/actors +[actor]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/crates/node/service/src/actors/traits.rs#L19 +[start]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/crates/node/service/src/service/core.rs#L161-L162 +[trait]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/crates/node/service/src/service/core.rs#L56 diff --git a/kona/docs/docs/pages/node/design/p2p.mdx b/rust/docs/docs/pages/kona/node/design/p2p.mdx similarity index 100% rename from kona/docs/docs/pages/node/design/p2p.mdx rename to rust/docs/docs/pages/kona/node/design/p2p.mdx diff --git a/kona/docs/docs/pages/node/design/sequencer.mdx b/rust/docs/docs/pages/kona/node/design/sequencer.mdx similarity index 100% rename from kona/docs/docs/pages/node/design/sequencer.mdx rename to rust/docs/docs/pages/kona/node/design/sequencer.mdx diff --git a/rust/docs/docs/pages/kona/node/faq/overview.mdx b/rust/docs/docs/pages/kona/node/faq/overview.mdx new file mode 100644 index 00000000000..b3c662040f2 --- /dev/null +++ b/rust/docs/docs/pages/kona/node/faq/overview.mdx @@ -0,0 +1,6 @@ +# FAQ + +1. [Ports](/kona/node/faq/ports) - Detailed account of ports used by the `kona-node` for P2P communication, JSON-RPC APIs, and the Engine API for execution layer communication. + +2. [Profiling](/kona/node/faq/profiling) - Profile performance of the Kona node including CPU profiling and memory analysis. + diff --git a/kona/docs/docs/pages/node/faq/ports.mdx b/rust/docs/docs/pages/kona/node/faq/ports.mdx similarity index 100% rename from kona/docs/docs/pages/node/faq/ports.mdx rename to rust/docs/docs/pages/kona/node/faq/ports.mdx diff --git a/kona/docs/docs/pages/node/faq/profiling.mdx b/rust/docs/docs/pages/kona/node/faq/profiling.mdx similarity index 100% rename from kona/docs/docs/pages/node/faq/profiling.mdx rename to rust/docs/docs/pages/kona/node/faq/profiling.mdx diff --git a/rust/docs/docs/pages/kona/node/install/binaries.mdx b/rust/docs/docs/pages/kona/node/install/binaries.mdx new file mode 100644 index 00000000000..a35f8f6f65a --- /dev/null +++ b/rust/docs/docs/pages/kona/node/install/binaries.mdx @@ -0,0 +1,3 @@ +# Kona Binaries + +Download the latest pre-built binaries from the [GitHub releases page](https://github.com/ethereum-optimism/optimism/releases). diff --git a/rust/docs/docs/pages/kona/node/install/docker.mdx b/rust/docs/docs/pages/kona/node/install/docker.mdx new file mode 100644 index 00000000000..696f4190f38 --- /dev/null +++ b/rust/docs/docs/pages/kona/node/install/docker.mdx @@ -0,0 +1,64 @@ +import { Callout } from 'vocs/components' + +# Docker + +There are two ways to obtain a Kona Docker image: + +1. [GitHub](#github) +2. [Building it from source](#building-the-docker-image) + +Once you have obtained the Docker image, you can run the node. + +Jump ahead to [Run a Node using Docker page](/kona/node/run/docker). + + +## GitHub + +Kona docker images are published with every release on GitHub Container Registry. + +You can obtain the latest `kona-node` image with: + +```bash +docker pull ghcr.io/op-rs/kona/kona-node +``` + + +Specify a specific version (e.g. v0.1.0) like so. + +```bash +docker pull ghcr.io/op-rs/kona/kona-node:v0.1.0 +``` + + +You can test the image with: + +```bash +docker run --rm ghcr.io/op-rs/kona/kona-node --version +``` + +If you can see the [latest release](https://github.com/ethereum-optimism/optimism/releases) version, +then you've successfully installed Kona via Docker. + + +## Building the Docker image + +To build the image from source, navigate to the root of the repository and run: + +```bash +just build-local kona-node +``` + + +This will create an image with the tag `kona:local`. To specify a custom +tag, just pass it in after `kona-node` in the command above, like so: + +```bash +just build-local kona-node my-custom-tag +``` + + +The build will likely take several minutes. Once it's built, test it with: + +```bash +docker run kona:local --version +``` diff --git a/rust/docs/docs/pages/kona/node/install/overview.mdx b/rust/docs/docs/pages/kona/node/install/overview.mdx new file mode 100644 index 00000000000..89eb9838b25 --- /dev/null +++ b/rust/docs/docs/pages/kona/node/install/overview.mdx @@ -0,0 +1,56 @@ +--- +description: Installation instructions for Kona. +--- + +## Prerequisites + +Before installing Kona, ensure you have the following prerequisites: + +- **Rust toolchain** (MSRV: 1.82) +- **`just`** command runner +- **Docker** (optional, for containerized builds) + +### Installing Rust + +If you don't have Rust installed, you can install it using [rustup](https://rustup.rs/): + +```bash +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh +``` + +Rustup is an easy way to update the Rust compiler, and works on all platforms. + +:::tip + +- During installation, when prompted, enter `1` for the default installation. +- After Rust installation completes, try running `cargo version` . If it cannot + be found, run `source $HOME/.cargo/env`. After that, running `cargo version` should return the version, for example `cargo 1.68.2`. +- It's generally advisable to append `source $HOME/.cargo/env` to `~/.bashrc`. + +::: + +The Minimum Supported Rust Version (MSRV) of this project is 1.82.0. If you +already have a version of Rust installed, you can check your version by running +`rustc --version`. To update your version of Rust, run rustup update. + + +### Installing Just + +`just` is a command runner that Kona uses for build tasks. Install it with: + +```bash +cargo install just +``` + +## Installation Methods + +There are three ways to obtain Kona: + +- [Docker images](/kona/node/install/docker) +- [Pre-built binaries](/kona/node/install/binaries) +- [Building from source](/kona/node/install/source) + +:::note +If you have Docker installed, we recommend using the [Docker recipe](/kona/node/run/docker) configuration +that will have kona-node, op-reth, Prometheus and Grafana running and syncing with just one command. +::: diff --git a/kona/docs/docs/pages/node/install/source.mdx b/rust/docs/docs/pages/kona/node/install/source.mdx similarity index 80% rename from kona/docs/docs/pages/node/install/source.mdx rename to rust/docs/docs/pages/kona/node/install/source.mdx index 00d290fd43c..ef3f09f57c1 100644 --- a/kona/docs/docs/pages/node/install/source.mdx +++ b/rust/docs/docs/pages/kona/node/install/source.mdx @@ -4,15 +4,15 @@ Building from source requires that the Rust toolchain is installed, as well as the `just` command runner. -Visit the [Prerequisites](/node/install/overview) for details on installing Rust and `just`. +Visit the [Prerequisites](/kona/node/install/overview) for details on installing Rust and `just`. ::: First clone the repository: ```bash -git clone https://github.com/op-rs/kona.git -cd kona +git clone https://github.com/ethereum-optimism/optimism.git +cd rust/kona ``` Then, install the `kona-node` binary into your PATH directly via: @@ -65,19 +65,18 @@ If you have installed Rust using rustup, simply run `rustup update`. If you can't install the latest version of Rust you can instead compile using the Minimum Supported Rust Version (MSRV) which is listed under -the `rust-version` key in kona's [Cargo.toml](https://github.com/op-rs/kona/blob/main/Cargo.toml). +the `rust-version` key in kona's [Cargo.toml](https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/Cargo.toml). If compilation fails with `(signal: 9, SIGKILL: kill)`, this could mean your machine ran out of memory during compilation. If you are on Docker, consider increasing the memory of the container, or use a [pre-built -binary](/node/install/binaries). +binary](/kona/node/install/binaries). If compilation fails with `error: linking with cc failed: exit code: 1`, try running `cargo clean`. - ## Next Steps -- Read the [Overview](/intro/overview) to understand Kona's architecture -- Check out the [Binaries](/node/run/binary) documentation -- Explore the [Examples](/sdk/examples/intro) +- Read the [Overview](/kona/intro/overview) to understand Kona's architecture +- Check out the [Binaries](/kona/node/run/binary) documentation +- Explore the [Examples](/kona/sdk/examples/intro) diff --git a/kona/docs/docs/pages/node/monitoring.mdx b/rust/docs/docs/pages/kona/node/monitoring.mdx similarity index 88% rename from kona/docs/docs/pages/node/monitoring.mdx rename to rust/docs/docs/pages/kona/node/monitoring.mdx index 3abf2aeb47d..a84af332d1f 100644 --- a/kona/docs/docs/pages/node/monitoring.mdx +++ b/rust/docs/docs/pages/kona/node/monitoring.mdx @@ -37,4 +37,4 @@ dashboard][dashboard] in the textbox > `Load`. [setup]: https://reth.rs/run/monitoring#prometheus--grafana -[dashboard]: https://github.com/op-rs/kona/blob/f86052b5dacec7da46b12441aafab2867069f7e7/docker/recipes/kona-node/grafana/dashboards/overview.json +[dashboard]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/docker/recipes/kona-node/grafana/dashboards/overview.json diff --git a/kona/docs/docs/pages/node/requirements.mdx b/rust/docs/docs/pages/kona/node/requirements.mdx similarity index 100% rename from kona/docs/docs/pages/node/requirements.mdx rename to rust/docs/docs/pages/kona/node/requirements.mdx diff --git a/kona/docs/docs/pages/node/rpc/admin.mdx b/rust/docs/docs/pages/kona/node/rpc/admin.mdx similarity index 100% rename from kona/docs/docs/pages/node/rpc/admin.mdx rename to rust/docs/docs/pages/kona/node/rpc/admin.mdx diff --git a/rust/docs/docs/pages/kona/node/rpc/overview.mdx b/rust/docs/docs/pages/kona/node/rpc/overview.mdx new file mode 100644 index 00000000000..ca13d931514 --- /dev/null +++ b/rust/docs/docs/pages/kona/node/rpc/overview.mdx @@ -0,0 +1,25 @@ +# JSON-RPC + +The `kona-node` supports JSON-RPC for interacting with the node. + +By default, `kona-node` exposes an HTTP JSON-RPC server. A WebSocket JSON-RPC +endpoint is also available and can be enabled with the `--rpc.ws-enabled` flag +or the `KONA_NODE_RPC_WS_ENABLED` environment variable. IPC transport is not +supported. + +### Namespaces + +JSON-RPC methods are grouped into namespaces, which are listed below: + +| Namespace | Description | Sensitive | +| ---------------------------- | ------------------------------------------------------------------------------------------------------ | --------- | +| [`p2p`](/kona/node/rpc/p2p) | The `p2p` API allows you to configure the p2p stack. | Maybe | +| [`rollup`](/kona/node/rpc/rollup) | The `rollup` API provides OP Stack specific rpc methods. | No | +| [`admin`](/kona/node/rpc/admin) | The `admin` API allows you to configure your node. | **Yes** | + + +### Interacting with the RPC + +Kona enables these RPC methods by default. + +You can interact with the RPC using any JSON-RPC client, such as `curl`, `httpie`, or a custom client in your preferred programming language. diff --git a/kona/docs/docs/pages/node/rpc/p2p.mdx b/rust/docs/docs/pages/kona/node/rpc/p2p.mdx similarity index 100% rename from kona/docs/docs/pages/node/rpc/p2p.mdx rename to rust/docs/docs/pages/kona/node/rpc/p2p.mdx diff --git a/kona/docs/docs/pages/node/rpc/rollup.mdx b/rust/docs/docs/pages/kona/node/rpc/rollup.mdx similarity index 100% rename from kona/docs/docs/pages/node/rpc/rollup.mdx rename to rust/docs/docs/pages/kona/node/rpc/rollup.mdx diff --git a/kona/docs/docs/pages/node/run/binary.mdx b/rust/docs/docs/pages/kona/node/run/binary.mdx similarity index 95% rename from kona/docs/docs/pages/node/run/binary.mdx rename to rust/docs/docs/pages/kona/node/run/binary.mdx index b236822f831..e09294a680a 100644 --- a/kona/docs/docs/pages/node/run/binary.mdx +++ b/rust/docs/docs/pages/kona/node/run/binary.mdx @@ -5,7 +5,7 @@ import { Callout } from 'vocs/components' :::note If you haven't already built the `kona-node` binary, head over to the -[Installation](/node/install/overview) guide. +[Installation](/kona/node/install/overview) guide. ::: @@ -29,12 +29,12 @@ cli flag. More on that in the This tutorial walks through running the `kona-node` as a binary. To use docker, head over to the -[Docker Guide](/node/run/docker) which uses a `docker-compose` +[Docker Guide](/kona/node/run/docker) which uses a `docker-compose` setup provided by `kona`. The `docker-compose` setup automatically bootstraps the `kona-node` with `op-reth`, provisioning grafana dashboards and a default Prometheus configuration. It is encouraged to follow the -[Docker Guide](/node/run/docker) to avoid misconfigurations. +[Docker Guide](/kona/node/run/docker) to avoid misconfigurations. The `kona-node` requires a few CLI flags. @@ -215,7 +215,7 @@ engine actor to insert into the chain. ### Configuring a Dockerfile To learn more about running a `kona-node` using docker, check -out the [docker guide](/node/run/docker). +out the [docker guide](/kona/node/run/docker). [tracing-env]: https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#method.from_default_env @@ -227,6 +227,6 @@ out the [docker guide](/node/run/docker). [op-reth]: https://github.com/paradigmxyz/reth/blob/main/crates/optimism/bin/src/main.rs [op-geth]: https://github.com/ethereum-optimism/op-geth -[buildx]: https://github.com/op-rs/kona/tree/main/docker +[buildx]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/docker [packages]: https://github.com/orgs/op-rs/packages?repo_name=kona -[pdocs]: https://github.com/op-rs/kona/pkgs/container/kona%2Fkona-node/446969659?tag=latest +[pdocs]: https://github.com/ethereum-optimism/optimism/pkgs/container/kona%2Fkona-node/446969659?tag=latest diff --git a/rust/docs/docs/pages/kona/node/run/docker.mdx b/rust/docs/docs/pages/kona/node/run/docker.mdx new file mode 100644 index 00000000000..f29e6fca5ec --- /dev/null +++ b/rust/docs/docs/pages/kona/node/run/docker.mdx @@ -0,0 +1,177 @@ +# Docker Guide + +:::info + +This guide uses Kona's pre-packaged docker config. + +For detailed usage of the `kona-node` binary, head +over to [the binary guide](/kona/node/run/binary). + +::: + +Kona provides a [`kona-node` docker recipe][recipe] +with detailed instructions for running a complete node setup. + +## Quick Start + +The easiest way to run `kona-node` with Docker is using the provided recipe: + +1. **Navigate to the recipe directory:** + ```bash + cd docker/recipes/kona-node + ``` + +2. **Configure environment variables:** + Edit `cfg.env` to set your L1 RPC endpoints: + ```bash + L1_PROVIDER_RPC=https://your-l1-rpc-endpoint + L1_BEACON_API=https://your-l1-beacon-endpoint + ``` + +3. **Start the services:** + ```bash + just up + ``` + +This will start: +- `kona-node` - The OP Stack node implementation +- `op-reth` - Execution layer client +- `prometheus` - Metrics collection +- `grafana` - Monitoring dashboards (accessible at http://localhost:3000) + +## Docker Compose + +In the [provided docker compose][compose], there are a few services +aside from the `kona-node` and `op-reth`. These are `prometheus` +and `grafana` which automatically come provisioned with dashboards +for monitoring and insight into the `kona-node` and `op-reth` services. +For more detail into how Prometheus and Grafana work, head over to the +[Monitoring][monitoring] docs. + +The `docker-compose.yaml` uses published images from GitHub Container Registry: + +- **`op-reth`**: ghcr.io/paradigmxyz/op-reth:latest +- **`kona-node`**: ghcr.io/op-rs/kona/kona-node:latest + +### Service Configuration + +#### kona-node Service + +The `kona-node` service is configured with the following key settings: + +- **Ports**: + - `5060` - RPC endpoint + - `9223` - P2P discovery (TCP/UDP) + - `9002` - Metrics +- **Environment**: L1 RPC and Beacon API endpoints are required +- **Volumes**: Persistent data storage and JWT token for engine API authentication + +#### op-reth Service + +The `op-reth` service provides the execution layer: + +- **Ports**: + - `8545` - HTTP RPC + - `8551` - Engine API (authenticated) + - `30303` - P2P discovery + - `9001` - Metrics +- **Configuration**: Pre-configured for OP Sepolia testnet + +## Configuration + +### Network Selection + +By default, the recipe is configured for **OP Sepolia**. To sync a different OP Stack chain: + +1. Set appropriate L1 endpoints for your target network in `cfg.env` +2. Modify the docker-compose.yaml: + - Update `op-reth --chain` parameter + - Update `op-reth --rollup.sequencer-http` endpoint + - Update `kona-node --chain` parameter + +### RPC Trust Configuration + +By default, `kona-node` trusts RPC providers (both L1 and L2). When using public or untrusted RPC endpoints, you should disable trust to enable block hash verification: + +```bash +# In cfg.env or as environment variables: +KONA_NODE_L1_TRUST_RPC=false +KONA_NODE_L2_TRUST_RPC=false +``` + +Or modify the docker-compose.yaml command: +```yaml +kona-node: + command: | + node + --chain op-sepolia + --l1-eth-rpc ${L1_PROVIDER_RPC} + --l1-beacon ${L1_BEACON_API} + --l1-trust-rpc false # Add this for untrusted L1 RPCs + --l2-engine-rpc ws://op-reth:8551 + --l2-trust-rpc false # Add this for untrusted L2 RPCs +``` + +See the [configuration guide](/kona/node/configuration#rpc-trust-configuration) for more details on RPC trust settings. + +### Port Configuration + +All host ports can be customized via environment variables in `cfg.env`: + +```bash +# Kona Node ports +KONA_NODE_RPC_PORT=5060 +KONA_NODE_DISCOVERY_PORT=9223 +KONA_NODE_METRICS_PORT=9002 + +# OP Reth ports +OP_RETH_RPC_PORT=8545 +OP_RETH_ENGINE_PORT=8551 +OP_RETH_METRICS_PORT=9001 +OP_RETH_DISCOVERY_PORT=30303 + +# Monitoring +PROMETHEUS_PORT=9090 +``` + +### Logging + +Adjust log levels by setting the `RUST_LOG` environment variable: + +```bash +export RUST_LOG=engine_builder=trace,runtime=debug +``` + +## Management Commands + +The recipe includes convenient Just commands: + +```bash +# Start all services +just up + +# Stop all services +just down + +# Restart all services +just restart + +# Generate JWT token (if needed) +./generate-jwt.sh +``` + +## Using Local Images + +To use locally built images instead of published ones: + +1. **Build the kona-node image:** + ```bash + just build-local kona-node + ``` + +2. **Update docker-compose.yaml** to use `kona-node:local` instead of the published image. + +[monitoring]: ../monitoring.mdx + +[recipe]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/docker/recipes/kona-node/README.md +[compose]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/docker/recipes/kona-node/docker-compose.yaml diff --git a/kona/docs/docs/pages/node/run/mechanics.mdx b/rust/docs/docs/pages/kona/node/run/mechanics.mdx similarity index 93% rename from kona/docs/docs/pages/node/run/mechanics.mdx rename to rust/docs/docs/pages/kona/node/run/mechanics.mdx index e53a5828456..d706022f33c 100644 --- a/kona/docs/docs/pages/node/run/mechanics.mdx +++ b/rust/docs/docs/pages/kona/node/run/mechanics.mdx @@ -40,7 +40,7 @@ There are a few core architectural pieces of the `kona-node`. - **P2P Networking:** Enables block gossip and peer discovery. For an in-depth breakdown of these three pillars and a detailed design -of the `kona-node`, visit the [Node Design section](/node/design/intro). +of the `kona-node`, visit the [Node Design section](/kona/node/design/intro). Additionally, an RPC server exposes essential methods, including the [L2 Output RPC method][l2o-rpc]. @@ -210,16 +210,16 @@ your own builder pattern. [cli-docs]: ../configuration.mdx [subcommands]: ../subcommands.mdx -[service]: https://github.com/op-rs/kona/tree/main/crates/node/service +[service]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/crates/node/service [node-actor]: https://docs.rs/kona-node-service/latest/kona_node_service/trait.NodeActor.html -[kona]: https://github.com/op-rs/kona +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona [packages]: https://github.com/orgs/op-rs/packages?repo_name=kona [rollup-node]: https://specs.optimism.io/protocol/rollup-node.html -[package]: https://github.com/op-rs/kona/pkgs/container/kona%2Fkona-node -[pdocs]: https://github.com/op-rs/kona/pkgs/container/kona%2Fkona-node/446969659?tag=latest +[package]: https://github.com/ethereum-optimism/optimism/pkgs/container/kona%2Fkona-node +[pdocs]: https://github.com/ethereum-optimism/optimism/pkgs/container/kona%2Fkona-node/446969659?tag=latest -[buildx]: https://github.com/op-rs/kona/tree/main/docker +[buildx]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/docker [engine-api]: https://github.com/ethereum/execution-apis/blob/main/src/engine/common.md [l2o-rpc]: https://specs.optimism.io/protocol/rollup-node.html#l2-output-rpc-method diff --git a/rust/docs/docs/pages/kona/node/run/overview.mdx b/rust/docs/docs/pages/kona/node/run/overview.mdx new file mode 100644 index 00000000000..c9d76ca5975 --- /dev/null +++ b/rust/docs/docs/pages/kona/node/run/overview.mdx @@ -0,0 +1,38 @@ +# Run a Node + +Now that you have [installed the `kona-node`](/kona/node/install/overview), +it's time to run it. + +In this section, we'll guide you through running the kona-node on +various networks and with different configurations. + + +## Supported Networks + +Kona uses the [superchain-registry][scr] to dynamically load +chain configurations for the specified network. As such, Kona +can only support networks that are defined this way. + +[scr]: https://github.com/ethereum-optimism/superchain-registry + +To view available networks, the `kona-node` binary provides +a `registry` subcommand that lists all available networks: + +```bash +kona-node registry +``` + +:::tip +Want to add support for a new network? +Feel free to [add a chain](https://github.com/ethereum-optimism/superchain-registry/blob/main/docs/ops.md#adding-a-chain) +to the superchain-registry! +::: + + +## Configuration & Monitoring + +Learn how to configure and monitor your node: + +- **[Configuration](/kona/node/configuration)** - Configure your node +- **[Monitoring](/kona/node/monitoring)** - Set up logs, metrics, and observability + diff --git a/kona/docs/docs/pages/node/subcommands.mdx b/rust/docs/docs/pages/kona/node/subcommands.mdx similarity index 100% rename from kona/docs/docs/pages/node/subcommands.mdx rename to rust/docs/docs/pages/kona/node/subcommands.mdx diff --git a/rust/docs/docs/pages/kona/rfc/active/intro.mdx b/rust/docs/docs/pages/kona/rfc/active/intro.mdx new file mode 100644 index 00000000000..0836df34385 --- /dev/null +++ b/rust/docs/docs/pages/kona/rfc/active/intro.mdx @@ -0,0 +1,8 @@ +# Request For Comment [RFC] + +Documents in this section are in the request-for-comment stage. + +To comment on these documents, [open an issue in the kona repository](https://github.com/ethereum-optimism/optimism/issues/new) +and provide detail on the changes you're requesting. + +Once the document has been reviewed, they will be moved to the archives. diff --git a/kona/docs/docs/pages/rfc/archived/monorepo.mdx b/rust/docs/docs/pages/kona/rfc/archived/monorepo.mdx similarity index 100% rename from kona/docs/docs/pages/rfc/archived/monorepo.mdx rename to rust/docs/docs/pages/kona/rfc/archived/monorepo.mdx diff --git a/kona/docs/docs/pages/rfc/archived/umbrellas.mdx b/rust/docs/docs/pages/kona/rfc/archived/umbrellas.mdx similarity index 98% rename from kona/docs/docs/pages/rfc/archived/umbrellas.mdx rename to rust/docs/docs/pages/kona/rfc/archived/umbrellas.mdx index ae0401b6fd3..010dc7e61b5 100644 --- a/kona/docs/docs/pages/rfc/archived/umbrellas.mdx +++ b/rust/docs/docs/pages/kona/rfc/archived/umbrellas.mdx @@ -12,7 +12,7 @@ crates contained in that directory. #### Repository Structure -Kona now has a [monorepo](/rfc/archived/monorepo) structure that merged +Kona now has a [monorepo](/kona/rfc/archived/monorepo) structure that merged `maili` and `hilo` crates into `kona`. This introduces a number of higher-level directories that hold a variety of crates themselves. As of the time at which this document was written the `kona` repository loosely looks like the following. diff --git a/rust/docs/docs/pages/kona/run.mdx b/rust/docs/docs/pages/kona/run.mdx new file mode 100644 index 00000000000..6469bcae282 --- /dev/null +++ b/rust/docs/docs/pages/kona/run.mdx @@ -0,0 +1,56 @@ +import { Callout } from 'vocs/components' + +# Run a Node + + +This tutorial walks through running the `kona-node` as +a binary. To use docker, head over to the +[Docker Guide](/kona/node/install/docker) which uses a `docker-compose` +setup provided by `kona`. The `docker-compose` setup +automatically bootstraps the `kona-node` with `op-reth`, +provisioning grafana dashboards and a default Prometheus +data source. + + +## Prerequisites + +In order to follow this tutorial, you'll need: + +1. An L1 Archive node (e.g., `op-geth`) with enough history for the rollup network you want to run. +2. A `kona-node` binary. See [installation](/kona/node/install/binaries) for instructions. +3. A rollup configuration file. See [rollup configuration](/kona/sdk/protocol/genesis/rollup-config) for more information. + +## Quick Start + +The fastest way to get started is to use one of the pre-built configurations: + +```bash +# Download a rollup configuration +curl -o rollup.json https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/configs/base-mainnet.json + +# Run the node +kona-node --rollup-config rollup.json --l1-rpc-url http://localhost:8545 +``` + +## Configuration + +The `kona-node` can be configured using command-line flags or environment variables. For a complete list of options, run: + +```bash +kona-node --help +``` + +### Key Configuration Options + +- `--rollup-config`: Path to the rollup configuration file +- `--l1-rpc-url`: L1 RPC endpoint URL +- `--l2-rpc-url`: L2 RPC endpoint URL (optional) +- `--data-dir`: Directory for storing node data +- `--log-level`: Logging level (debug, info, warn, error) + +## Next Steps + +- [Docker Support](/kona/node/run/docker) - Run with Docker +- [Monitoring](/kona/node/monitoring) - Set up monitoring and metrics +- [CLI Reference](/kona/node/configuration) - Complete CLI documentation +- [Subcommands](/kona/node/subcommands) - Available subcommands diff --git a/kona/docs/docs/pages/sdk/examples/custom-derivation-pipeline.mdx b/rust/docs/docs/pages/kona/sdk/examples/custom-derivation-pipeline.mdx similarity index 92% rename from kona/docs/docs/pages/sdk/examples/custom-derivation-pipeline.mdx rename to rust/docs/docs/pages/kona/sdk/examples/custom-derivation-pipeline.mdx index decb017e9c6..06a6e218f80 100644 --- a/kona/docs/docs/pages/sdk/examples/custom-derivation-pipeline.mdx +++ b/rust/docs/docs/pages/kona/sdk/examples/custom-derivation-pipeline.mdx @@ -193,7 +193,7 @@ mod tests { ## Related Resources -- [kona-derive](https://github.com/op-rs/kona/tree/main/crates/protocol/derive) - Core derivation pipeline -- [Pipeline Traits](https://github.com/op-rs/kona/tree/main/crates/protocol/derive/src/traits) - Trait definitions -- [Stage Examples](https://github.com/op-rs/kona/tree/main/crates/protocol/derive/src/stages) - Built-in stages +- [kona-derive](https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/crates/protocol/derive) - Core derivation pipeline +- [Pipeline Traits](https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/crates/protocol/derive/src/traits) - Trait definitions +- [Stage Examples](https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/crates/protocol/derive/src/stages) - Built-in stages - [OP Stack Derivation Spec](https://specs.optimism.io/protocol/derivation.html) - Protocol specification \ No newline at end of file diff --git a/kona/docs/docs/pages/sdk/examples/executor-test-fixtures.mdx b/rust/docs/docs/pages/kona/sdk/examples/executor-test-fixtures.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/examples/executor-test-fixtures.mdx rename to rust/docs/docs/pages/kona/sdk/examples/executor-test-fixtures.mdx diff --git a/kona/docs/docs/pages/sdk/examples/frames-to-batch.mdx b/rust/docs/docs/pages/kona/sdk/examples/frames-to-batch.mdx similarity index 82% rename from kona/docs/docs/pages/sdk/examples/frames-to-batch.mdx rename to rust/docs/docs/pages/kona/sdk/examples/frames-to-batch.mdx index 453223c0bcd..4def514c4ef 100644 --- a/kona/docs/docs/pages/sdk/examples/frames-to-batch.mdx +++ b/rust/docs/docs/pages/kona/sdk/examples/frames-to-batch.mdx @@ -1,9 +1,5 @@ # Transform Frames into a Batch -:::info -This example performs the reverse transformation as the [batch-to-frames][batch-to-frames] example. -::: - This example walks through transforming [`Frame`][frame]s into the [`Batch`][batch] types. :::danger @@ -12,7 +8,6 @@ more are not covered by this example. This example solely demonstrates the most way to transform individual [`Frame`][frame]s into a [`Batch`][batch] type. ::: - ## Walkthrough The high level transformation is the following. @@ -27,7 +22,6 @@ the first step is to decode the frame data into [`Frame`][frame]s using the [`Channel`][channel] can be constructed using the [`ChannelId`][channel-id] of the first frame. - :::info [`Frame`][frame]s may also be added to a [`Channel`][channel] once decoded with the [`Channel::add_frame`][add-frame] method. @@ -42,10 +36,9 @@ which hardforks are activated (using the `RollupConfig`). For the sake of this e brotli bytes can then be passed right into [`Batch::decode`][decode-batch] to wind up with the example's desired [`Batch`][batch]. +## Running this example -## Running this example: - -- Clone the examples repository: `git clone git@github.com:op-rs/kona.git` +- Clone the examples repository: `git clone git@github.com:ethereum-optimism/optimism.git` - Run: `cargo run --example frames_to_batch` ```rust @@ -155,8 +148,6 @@ fn example_transactions() -> Vec { } ``` - -[batch-to-frames]: /sdk/examples/batch-to-frames [frame]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Frame.html [batch]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.Batch.html [channel]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html @@ -169,25 +160,3 @@ fn example_transactions() -> Vec { [decode-batch]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.Batch.html#method.decode [fjord]: https://specs.optimism.io/protocol/fjord/overview.html [channel-id]: https://docs.rs/kona-protocol/latest/kona_protocol/type.ChannelId.html - - -[op-stack]: https://github.com/ethereum-optimism/optimism -[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program -[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon -[cannon-rs]: https://github.com/op-rs/cannon-rs -[asterisc]: https://github.com/ethereum-optimism/asterisc -[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html -[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program -[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle -[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine -[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction -[op-succinct]: https://github.com/succinctlabs/op-succinct -[revm]: https://github.com/bluealloy/revm - -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md - -[op-labs]: https://github.com/ethereum-optimism -[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/rust/docs/docs/pages/kona/sdk/examples/intro.mdx b/rust/docs/docs/pages/kona/sdk/examples/intro.mdx new file mode 100644 index 00000000000..496497492d6 --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/examples/intro.mdx @@ -0,0 +1,10 @@ +# Examples + +Examples for working with `kona` crates. + +- [Load a Rollup Config for a Chain ID](/kona/sdk/examples/load-a-rollup-config) +- [Transform Frames to a Batch](/kona/sdk/examples/frames-to-batch) +- [Create a new L1BlockInfoTx Hardfork Variant](/kona/sdk/examples/new-l1-block-info-tx-hardfork) +- [Create a new `kona-executor` test fixture](/kona/sdk/examples/executor-test-fixtures) +- [Configuring P2P Network Peer Scoring](/kona/sdk/examples/p2p-peer-scoring) +- [Custom Derivation Pipeline with New Stage](/kona/sdk/examples/custom-derivation-pipeline) diff --git a/kona/docs/docs/pages/sdk/examples/load-a-rollup-config.mdx b/rust/docs/docs/pages/kona/sdk/examples/load-a-rollup-config.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/examples/load-a-rollup-config.mdx rename to rust/docs/docs/pages/kona/sdk/examples/load-a-rollup-config.mdx diff --git a/kona/docs/docs/pages/sdk/examples/new-l1-block-info-tx-hardfork.mdx b/rust/docs/docs/pages/kona/sdk/examples/new-l1-block-info-tx-hardfork.mdx similarity index 94% rename from kona/docs/docs/pages/sdk/examples/new-l1-block-info-tx-hardfork.mdx rename to rust/docs/docs/pages/kona/sdk/examples/new-l1-block-info-tx-hardfork.mdx index ac211939421..4c14f028a01 100644 --- a/kona/docs/docs/pages/sdk/examples/new-l1-block-info-tx-hardfork.mdx +++ b/rust/docs/docs/pages/kona/sdk/examples/new-l1-block-info-tx-hardfork.mdx @@ -108,8 +108,8 @@ Some new error variants to the [`BlockInfoError`][bie] are needed as well. [pr-diff]: https://github.com/alloy-rs/op-alloy/pull/130/files [decode-calldata]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.L1BlockInfoTx.html#method.decode_calldata [try-new]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.L1BlockInfoTx.html#method.try_new -[ecotone]: https://github.com/op-rs/kona/blob/main/crates/protocol/hardforks/src/ecotone.rs -[info-mod]: https://github.com/op-rs/kona/blob/main/crates/protocol/protocol/src/info/mod.rs +[ecotone]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/crates/protocol/hardforks/src/ecotone.rs +[info-mod]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/crates/protocol/protocol/src/info/mod.rs [genesis]: https://docs.rs/kona-genesis/latest/kona_genesis/index.html [rc]: https://docs.rs/kona-genesis/latest/kona_genesis/struct.RollupConfig.html [hfc]: https://docs.rs/kona-genesis/latest/kona_genesis/struct.HardForkConfig.html diff --git a/kona/docs/docs/pages/sdk/examples/p2p-peer-scoring.mdx b/rust/docs/docs/pages/kona/sdk/examples/p2p-peer-scoring.mdx similarity index 96% rename from kona/docs/docs/pages/sdk/examples/p2p-peer-scoring.mdx rename to rust/docs/docs/pages/kona/sdk/examples/p2p-peer-scoring.mdx index ca2bb3eb28f..0d6b22b9a61 100644 --- a/kona/docs/docs/pages/sdk/examples/p2p-peer-scoring.mdx +++ b/rust/docs/docs/pages/kona/sdk/examples/p2p-peer-scoring.mdx @@ -345,8 +345,8 @@ assert_eq!(*PEER_SCORE_INSPECT_FREQUENCY, Duration::from_secs(15)); ## Related Resources -- [kona-peers](https://github.com/op-rs/kona/tree/main/crates/node/peers) - Peer management and scoring -- [kona-gossip](https://github.com/op-rs/kona/tree/main/crates/node/gossip) - Gossipsub implementation -- [kona-node-service](https://github.com/op-rs/kona/tree/main/crates/node/service) - Network service implementation +- [kona-peers](https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/crates/node/peers) - Peer management and scoring +- [kona-gossip](https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/crates/node/gossip) - Gossipsub implementation +- [kona-node-service](https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/crates/node/service) - Network service implementation - [libp2p gossipsub](https://docs.rs/libp2p-gossipsub) - Underlying gossipsub protocol - [OP Stack P2P Spec](https://specs.optimism.io/protocol/rollup-node-p2p.html) - Protocol specification \ No newline at end of file diff --git a/kona/docs/docs/pages/sdk/fpp-dev/env.mdx b/rust/docs/docs/pages/kona/sdk/fpp-dev/env.mdx similarity index 90% rename from kona/docs/docs/pages/sdk/fpp-dev/env.mdx rename to rust/docs/docs/pages/kona/sdk/fpp-dev/env.mdx index 3bfb61f33d3..61b382380dd 100644 --- a/kona/docs/docs/pages/sdk/fpp-dev/env.mdx +++ b/rust/docs/docs/pages/kona/sdk/fpp-dev/env.mdx @@ -1,6 +1,6 @@ # Environment -Before kicking off the development of your own [Fault Proof Program](/glossary#fault-proof-program), +Before kicking off the development of your own [Fault Proof Program](/kona/glossary#fault-proof-program), it's important to understand the environment that your program will be running in. The FPP runs on top of a custom FPVM target, which is typically a VM with a modified subset of an existing reduced instruction set architecture and a subset of Linux syscalls. The FPVM is designed to @@ -10,7 +10,7 @@ communication with the `host` (the FPVM), and other implementation-specific feat ## Host and Client Communication While the program is running on top of the FPVM, it is considered to be in the `client` role, while the VM is in the `host` role. The only way for the `client` and `host` -to communicate with one another is synchronously through the [Preimage ABI](/glossary#preimage-abi) ([specification][preimage-specs]). +to communicate with one another is synchronously through the [Preimage ABI](/kona/glossary#preimage-abi) ([specification][preimage-specs]). In order for the `client` to read from the `host`, the `read` and `write` syscalls are modified within the FPVM to allow the `client` to request preparation of and read foreign data. @@ -43,7 +43,7 @@ which has the objective of verifying claims about the state of an [OP Stack][op- ![op-program-architecture](/op-program-fpp.svg) -In this program, execution and derivation of the L2 chain is performed within it, and ultimately the claimed state of the L2 chain is verified in the [prologue](/sdk/fpp-dev/prologue) stage. +In this program, execution and derivation of the L2 chain is performed within it, and ultimately the claimed state of the L2 chain is verified in the [prologue](/kona/sdk/fpp-dev/prologue) stage. It communicates with the `host` for two reasons: @@ -73,10 +73,10 @@ Other programs (`clients`) may have different requirements for communication wit [op-succinct]: https://github.com/succinctlabs/op-succinct [revm]: https://github.com/bluealloy/revm -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md [op-labs]: https://github.com/ethereum-optimism [bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/sdk/fpp-dev/epilogue.mdx b/rust/docs/docs/pages/kona/sdk/fpp-dev/epilogue.mdx similarity index 79% rename from kona/docs/docs/pages/sdk/fpp-dev/epilogue.mdx rename to rust/docs/docs/pages/kona/sdk/fpp-dev/epilogue.mdx index 326a7af4dd2..253f4c7fe5d 100644 --- a/kona/docs/docs/pages/sdk/fpp-dev/epilogue.mdx +++ b/rust/docs/docs/pages/kona/sdk/fpp-dev/epilogue.mdx @@ -1,8 +1,8 @@ # Epilogue The epilogue stage of the program is intended to perform the final validation on the outputs from the -[execution phase](/sdk/fpp-dev/execution). In most programs, this entails comparing the outputs of the execution phase -to portions of the bootstrap data made available during the [prologue phase](/sdk/fpp-dev/prologue). +[execution phase](/kona/sdk/fpp-dev/execution). In most programs, this entails comparing the outputs of the execution phase +to portions of the bootstrap data made available during the [prologue phase](/kona/sdk/fpp-dev/prologue). Generally, this phase should consist almost entirely of validation steps. diff --git a/rust/docs/docs/pages/kona/sdk/fpp-dev/execution.mdx b/rust/docs/docs/pages/kona/sdk/fpp-dev/execution.mdx new file mode 100644 index 00000000000..0bf5d36fbbc --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/fpp-dev/execution.mdx @@ -0,0 +1,21 @@ +# Execution + +The execution phase of the program is commonly the heaviest portion of the fault proof program, where the computation +that is being verified is performed. + +This phase consumes the outputs of the [prologue phase](/kona/sdk/fpp-dev/prologue), and performs the bulk of the verifiable +computation. After execution has concluded, the outputs are passed along to the [epilogue phase](/kona/sdk/fpp-dev/epilogue) for +final verification. + +## Example + +At a high-level, in the `kona-client` program, the execution phase: + +1. Derives the inputs to the L2 derivation pipeline by unrolling the L1 head hash fetched in the epilogue. +1. Passes the inputs to the L2 derivation pipeline, producing the L2 execution payloads required to reproduce + the L2 safe chain at the claimed height. +1. Executes the payloads produced by the L2 derivation pipeline, producing the [L2 output root][l2-output-root] at the + L2 claim height. + + +[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction diff --git a/rust/docs/docs/pages/kona/sdk/fpp-dev/intro.mdx b/rust/docs/docs/pages/kona/sdk/fpp-dev/intro.mdx new file mode 100644 index 00000000000..f89d3af068c --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/fpp-dev/intro.mdx @@ -0,0 +1,21 @@ +# Fault Proof Program Development + +This chapter provides an overview of [Fault Proof Program](/kona/glossary#fault-proof-program) development +on top of the custom FPVM targets supported by [Kona][kona]. + +At a high level, a Fault Proof Program is not much different from a regular `no_std` Rust program. A custom entrypoint is provided, and the program +is compiled down to a custom target, which is then executed on the FPVM. + +Fault Proof Programs are structured with 3 stages: +1. **Prologue**: The bootstrapping stage, where the program is loaded into memory and the initial state is set up. During this phase, the program's initial + state is written to the FPVM's memory, and the program's entrypoint is set. +1. **Execution**: The main execution stage, where the program is executed on the FPVM. During this phase, the program's entrypoint is called, and the + program is executed until it exits. +1. **Epilogue**: The finalization stage, where the program's final state is read from the FPVM's memory. During this phase, the program's final state is + inspected and properties of the state transition are verified. + +The following sections will provide a more in-depth overview of each of these stages, as well as the tools and abstractions provided by Kona for +developing your own Fault Proof Programs. + + +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona diff --git a/kona/docs/docs/pages/sdk/fpp-dev/io.mdx b/rust/docs/docs/pages/kona/sdk/fpp-dev/io.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/fpp-dev/io.mdx rename to rust/docs/docs/pages/kona/sdk/fpp-dev/io.mdx diff --git a/kona/docs/docs/pages/sdk/fpp-dev/prologue.mdx b/rust/docs/docs/pages/kona/sdk/fpp-dev/prologue.mdx similarity index 79% rename from kona/docs/docs/pages/sdk/fpp-dev/prologue.mdx rename to rust/docs/docs/pages/kona/sdk/fpp-dev/prologue.mdx index 2e92f399f1a..a05a4e94ac8 100644 --- a/kona/docs/docs/pages/sdk/fpp-dev/prologue.mdx +++ b/rust/docs/docs/pages/kona/sdk/fpp-dev/prologue.mdx @@ -1,10 +1,10 @@ # Prologue The prologue stage of the program is commonly responsible for bootstrapping the program with inputs from an external -source, pulled in through the [Host-Client communication](/sdk/fpp-dev/env#host---client-communication) implementation. +source, pulled in through the [Host-Client communication](/kona/sdk/fpp-dev/env#host---client-communication) implementation. As a rule of thumb, the prologue implementation should be kept minimal, and should not do much more than establish -the inputs for the [execution phase](/sdk/fpp-dev/execution). +the inputs for the [execution phase](/kona/sdk/fpp-dev/execution). ## Example @@ -33,10 +33,10 @@ As an example, the prologue stage of the `kona-client` program runs through seve [op-succinct]: https://github.com/succinctlabs/op-succinct [revm]: https://github.com/bluealloy/revm -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md [op-labs]: https://github.com/ethereum-optimism [bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/sdk/fpp-dev/targets.mdx b/rust/docs/docs/pages/kona/sdk/fpp-dev/targets.mdx similarity index 91% rename from kona/docs/docs/pages/sdk/fpp-dev/targets.mdx rename to rust/docs/docs/pages/kona/sdk/fpp-dev/targets.mdx index 82383499f78..33fd1120bff 100644 --- a/kona/docs/docs/pages/sdk/fpp-dev/targets.mdx +++ b/rust/docs/docs/pages/kona/sdk/fpp-dev/targets.mdx @@ -58,10 +58,10 @@ Syscalls supported by `cannon` can be found within the `cannon` specification [h [op-succinct]: https://github.com/succinctlabs/op-succinct [revm]: https://github.com/bluealloy/revm -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md [op-labs]: https://github.com/ethereum-optimism [bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/rust/docs/docs/pages/kona/sdk/overview.mdx b/rust/docs/docs/pages/kona/sdk/overview.mdx new file mode 100644 index 00000000000..33259ecaef5 --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/overview.mdx @@ -0,0 +1,34 @@ +# Kona as a Library + +Kona is designed as a modular, library-first OP Stack implementation in Rust. This design philosophy allows developers to integrate Kona components into their applications and build custom solutions on top of the OP Stack. + +## Library Structure + +Kona is organized as a collection of focused crates that can be used independently or together: + +- **Protocol Libraries**: Core protocol logic and data structures +- **Node Components**: Modular node architecture for building custom rollup nodes +- **Proof System**: Fault proof generation and verification +- **Utilities**: Common utilities and helper functions + +## Key Benefits + +- **Modularity**: Use only the components you need +- **Performance**: Rust's zero-cost abstractions and memory safety +- **Extensibility**: Easy to extend and customize for specific use cases +- **Reliability**: Strong typing and comprehensive testing + +## Getting Started + +To use Kona as a library, add the relevant crates to your `Cargo.toml`: + +```toml +[dependencies] +kona-derive = "0.1" +kona-protocol = "0.1" +kona-node = "0.1" +``` + +## Examples + +See the [Examples](/kona/sdk/examples/intro) section for practical usage examples and integration patterns. diff --git a/kona/docs/docs/pages/sdk/proof/custom-backend.mdx b/rust/docs/docs/pages/kona/sdk/proof/custom-backend.mdx similarity index 93% rename from kona/docs/docs/pages/sdk/proof/custom-backend.mdx rename to rust/docs/docs/pages/kona/sdk/proof/custom-backend.mdx index 2ab904e7afb..d0375302ee1 100644 --- a/kona/docs/docs/pages/sdk/proof/custom-backend.mdx +++ b/rust/docs/docs/pages/kona/sdk/proof/custom-backend.mdx @@ -26,7 +26,7 @@ verifiable manner. ## Backend Traits -Covered in the [FPVM Backend](/sdk/proof/fpvm-backend) section of the book, `kona-client` ships with an implementation of +Covered in the [FPVM Backend](/kona/sdk/proof/fpvm-backend) section of the book, `kona-client` ships with an implementation of `kona-derive` and `kona-executor`'s data source traits which pull in data over the [PreimageOracle ABI][preimage-specs]. However, running `kona-client` on top of a different verifiable environment, i.e. a zkVM or TEE, is also possible @@ -85,7 +85,7 @@ let pipeline = PipelineBuilder::new() ``` From here, a custom derivation driver is needed to produce the desired execution payload(s). An example of this for -`kona-client` can be found in the [single proof implementation](https://github.com/op-rs/kona/blob/main/bin/client/src/single.rs#L98). +`kona-client` can be found in the [single proof implementation](https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/bin/client/src/single.rs#L98). ### `kona-mpt` / `kona-executor` sources @@ -113,7 +113,7 @@ let header = executor.execute_payload(...).expect("Failed execution"); ### Bringing it Together Once your custom backend traits for both `kona-derive` and `kona-executor` have been implemented, -your final binary may look something like [that of `kona-client`'s](https://github.com/op-rs/kona/blob/main/bin/client/src/kona.rs). +your final binary may look something like [that of `kona-client`'s](https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/bin/client/src/kona.rs). Alternatively, if you're looking to prove a wider range of blocks, [`op-succinct`'s `range` program](https://github.com/succinctlabs/op-succinct/tree/main/programs/range) offers a good example of running the pipeline and executor across a string of contiguous blocks. @@ -130,10 +130,10 @@ offers a good example of running the pipeline and executor across a string of co [op-succinct]: https://github.com/succinctlabs/op-succinct [revm]: https://github.com/bluealloy/revm -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md [op-labs]: https://github.com/ethereum-optimism [bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/sdk/proof/exec-ext.mdx b/rust/docs/docs/pages/kona/sdk/proof/exec-ext.mdx similarity index 94% rename from kona/docs/docs/pages/sdk/proof/exec-ext.mdx rename to rust/docs/docs/pages/kona/sdk/proof/exec-ext.mdx index f806cf12791..07118c7fbdd 100644 --- a/kona/docs/docs/pages/sdk/proof/exec-ext.mdx +++ b/rust/docs/docs/pages/kona/sdk/proof/exec-ext.mdx @@ -185,7 +185,7 @@ Prior to the integration of `alloy-evm`, `kona-executor` used a builder pattern - **Performance**: Reduced indirection compared to the handler approach - **Maintainability**: Cleaner separation of concerns between execution and customization -For more complex customizations involving multiple precompiles, custom opcodes, or specialized execution logic, refer to the [`FpvmOpEvmFactory`](https://github.com/op-rs/kona/blob/main/bin/client/src/fpvm_evm/factory.rs) implementation in the `kona-client` for a comprehensive example. +For more complex customizations involving multiple precompiles, custom opcodes, or specialized execution logic, refer to the [`FpvmOpEvmFactory`](https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/bin/client/src/fpvm_evm/factory.rs) implementation in the `kona-client` for a comprehensive example. [op-stack]: https://github.com/ethereum-optimism/optimism [op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program @@ -202,10 +202,10 @@ For more complex customizations involving multiple precompiles, custom opcodes, [alloy-evm]: https://github.com/alloy-rs/evm [evm-factory]: https://docs.rs/alloy-evm/latest/alloy_evm/eth/struct.EthEvmFactory.html -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md [op-labs]: https://github.com/ethereum-optimism [bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/sdk/proof/fpvm-backend.mdx b/rust/docs/docs/pages/kona/sdk/proof/fpvm-backend.mdx similarity index 88% rename from kona/docs/docs/pages/sdk/proof/fpvm-backend.mdx rename to rust/docs/docs/pages/kona/sdk/proof/fpvm-backend.mdx index dab1ecfc4ad..9262396cf81 100644 --- a/kona/docs/docs/pages/sdk/proof/fpvm-backend.mdx +++ b/rust/docs/docs/pages/kona/sdk/proof/fpvm-backend.mdx @@ -1,17 +1,17 @@ # FPVM Backend -> 📖 Before reading this section of the book, it is advised to read the [Fault Proof Program Environment](/sdk/fpp-dev/env) +> 📖 Before reading this section of the book, it is advised to read the [Fault Proof Program Environment](/kona/sdk/fpp-dev/env) > section to familiarize yourself with the PreimageOracle IO pattern. Kona is effectively split into three parts: - OP Stack state transition logic (`kona-derive`, `kona-executor`, `kona-mpt`) - OP Stack state transition proof SDK (`kona-preimage`, `kona-proof`) -- [Fault Proof VM](/glossary#fault-proof-vm) IO and utilities +- [Fault Proof VM](/kona/glossary#fault-proof-vm) IO and utilities (`kona-std-fpvm`, `kona-std-fpvm-proc`) This section of the book focuses on the usage of `kona-std-fpvm` and `kona-preimage` to facilitate communication between host and client -for programs running on top of the [FPVM targets](/sdk/fpp-dev/env). +for programs running on top of the [FPVM targets](/kona/sdk/fpp-dev/env). ## Host and Client Communication API @@ -51,14 +51,14 @@ io::exit(0); With this library, you can implement a custom communication protocol between the host and client, or extend the existing [PreimageOracle ABI][preimage-specs]. However, for most developers, we recommend sticking with `kona-preimage` -when developing programs that target the [FPVMs](/sdk/fpp-dev/env), barring needs like printing directly to +when developing programs that target the [FPVMs](/kona/sdk/fpp-dev/env), barring needs like printing directly to `stdout`. ### `kona-preimage` `kona-preimage` is an implementation of the [PreimageOracle ABI][preimage-specs]. This crate enables synchronous communication between the host and client program, described in -[Host - Client Communication](/sdk/fpp-dev/env#host---client-communication) in the FPP Dev environment section of the +[Host - Client Communication](/kona/sdk/fpp-dev/env#host---client-communication) in the FPP Dev environment section of the book. The crate is built around the [`Channel`](https://docs.rs/kona-preimage/latest/kona_preimage/trait.Channel.html) trait, @@ -98,9 +98,9 @@ of the consumer are not covered by the to-[spec][preimage-specs] implementations Finally, in `kona-proof`, implementations of data source traits from `kona-derive` and `kona-executor` are provided to pull in untyped data from the host by `PreimageKey`. These data source traits are covered in more detail within -the [Custom Backend](/sdk/proof/custom-backend) section, but we'll quickly gloss over them here to build intuition. +the [Custom Backend](/kona/sdk/proof/custom-backend) section, but we'll quickly gloss over them here to build intuition. -Let's take, for example, [`OracleL1ChainProvider`](https://github.com/op-rs/kona/blob/40a8d7ec3def4a1eeb26492a1e4338d8b032e428/bin/client/src/l1/chain_provider.rs#L16-L23). +Let's take, for example, [`OracleL1ChainProvider`](https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/bin/client/src/l1/chain_provider.rs#L16-L23). The [`ChainProvider`](https://docs.rs/kona-derive/latest/kona_derive/trait.ChainProvider.html) trait in `kona-derive` defines a simple interface for fetching information about the L1 chain. In the `OracleL1ChainProvider`, this information is pulled in over the [PreimageOracle ABI][preimage-specs]. There are many other examples of these data source traits, @@ -150,10 +150,10 @@ it to the user. [op-succinct]: https://github.com/succinctlabs/op-succinct [revm]: https://github.com/bluealloy/revm -[kona]: https://github.com/op-rs/kona -[issues]: https://github.com/op-rs/kona/issues -[new-issue]: https://github.com/op-rs/kona/issues/new -[contributing]: https://github.com/op-rs/kona/tree/main/CONTRIBUTING.md +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md [op-labs]: https://github.com/ethereum-optimism [bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/rust/docs/docs/pages/kona/sdk/proof/intro.mdx b/rust/docs/docs/pages/kona/sdk/proof/intro.mdx new file mode 100644 index 00000000000..b47934b105b --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/proof/intro.mdx @@ -0,0 +1,61 @@ +# Kona Proof SDK + +Welcome to the Kona Proof SDK, a powerful set of libraries designed +from first principles to build proofs with the OP Stack STF on top +of the OP Stack's FPVMs and other verifiable backends like [SP-1][sp-1], +[Risc0][rzero], [Intel TDX][tdx], and [AMD SEV-SNP][sev-snp]. At its +core, Kona is built on the principles of modularity, extensibility, +and developer empowerment. + +## A Foundation of Flexibility + +The kona repository is more than a fault proof program for the OP Stack +— it's an ecosystem of interoperable components, each crafted with +reusability and extensibility as primary goals. While we provide +[Fault Proof VM](/kona/glossary#fault-proof-vm) and "online" backends +for key components like `kona-derive` and `kona-executor`, the true +power of `kona` lies in its adaptability. + +## Extend Without Forking + +One of Kona's standout features is its ability to support custom +features and data sources without requiring you to fork the entire +project. Through careful use of Rust's powerful trait system and +abstract interfaces, we've created a framework that allows you to +plug in your own features and ideas seamlessly. + +## What You'll Learn + +In this section of the developer book, we'll dive deep into the Kona SDK, covering: +* **Building on the FPVM Backend**: Learn how to leverage the Fault Proof VM tooling to create your own fault proof programs. +* **Creating Custom Backends**: Discover the process of designing and implementing your own backend to run `kona-client` or a variation of it on different targets. +* **Extending Core Components**: Explore techniques for creating new constructs that integrate smoothly with crates like `kona-derive` and `kona-executor`. + +Whether you're looking to use Kona as-is, extend its functionality, or create entirely new programs based on its libraries, +this guide is intended to provide you with the knowledge and tools you need to succeed. + +[sp-1]: https://github.com/succinctlabs/sp1 +[rzero]: https://github.com/risc0/risc0 +[tdx]: https://www.intel.com/content/www/us/en/developer/tools/trust-domain-extensions/documentation.html +[sev-snp]: https://www.amd.com/en/developer/sev.html + +[op-stack]: https://github.com/ethereum-optimism/optimism +[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program +[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon +[cannon-rs]: https://github.com/op-rs/cannon-rs +[asterisc]: https://github.com/ethereum-optimism/asterisc +[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html +[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program +[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle +[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine +[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction +[op-succinct]: https://github.com/succinctlabs/op-succinct +[revm]: https://github.com/bluealloy/revm + +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md + +[op-labs]: https://github.com/ethereum-optimism +[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/rust/docs/docs/pages/kona/sdk/protocol/derive/intro.mdx b/rust/docs/docs/pages/kona/sdk/protocol/derive/intro.mdx new file mode 100644 index 00000000000..029e65f2dbc --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/protocol/derive/intro.mdx @@ -0,0 +1,312 @@ +import { Callout } from 'vocs/components' + +# The `kona-derive` Derivation Pipeline + +[`kona-derive`][kd] defines an entirely trait-abstracted, `no_std` derivation +pipeline for the OP Stack. It can be used through the [`Pipeline`][p] trait, +which is implemented for the concrete [`DerivationPipeline`][dp] object. + +This document dives into the inner workings of the derivation pipeline, its +stages, and how to build and interface with Kona's pipeline. Other documents +in this section will provide a comprehensive overview of Derivation Pipeline +extensibility including trait-abstracted providers, custom stages, signaling, +and hardfork activation including multiplexed stages. + +- [Swapping out a stage](/kona/sdk/protocol/derive/stages) +- [Defining a custom Provider](/kona/sdk/protocol/derive/providers) +- [Extending Pipeline Signals](/kona/sdk/protocol/derive/signaling) +- [Implementing Hardfork Activations](/kona/sdk/protocol/hardforks) + + +## What is a Derivation Pipeline? + +Simply put, an OP Stack Derivation Pipeline transforms data on L1 into L2 +payload attributes that can be executed to produce the canonical L2 block. + +Within a pipeline, there are a set of stages that break up this transformation +further. When composed, these stages operate over the input data, sequentially +producing payload attributes. + +In [`kona-derive`][kd], stages are architected using composition - each sequential +stage owns the previous one, forming a stack. For example, let's define stage A +as the first stage, accepting raw L1 input data, and stage C produces the pipeline +output - payload attributes. Stage B "owns" stage A, and stage C then owns stage B. +Using this example, the [`DerivationPipeline`][dp] type in [`kona-derive`][kd] only +holds stage C, since ownership of the other stages is nested within stage C. + + +In a future architecture of the derivation pipeline, stages could be made +standalone such that communication between stages happens through channels. +In a multi-threaded, non-fault-proof environment, these stages can then +run in parallel since stage ownership is decoupled. + + + +## Kona's Derivation Pipeline + +The top-level stage in [`kona-derive`][kd] that produces +[`OpAttributesWithParent`][attributes] is the [`AttributesQueue`][attributes-queue]. + +Post-Holocene (the Holocene hardfork), the following stages are composed by +the [`DerivationPipeline`][dp]. +- [`AttributesQueue`][attributes-queue] + - [`BatchProvider`][batch-provider] + - [`BatchStream`][batch-stream] + - [`ChannelReader`][channel-reader] + - [`ChannelProvider`][channel-provider] + - [`FrameQueue`][frame-queue] + - [`L1Retrieval`][retrieval] + - [`IndexedTraversal` or `PollingTraversal`][traversal] + +Notice, from top to bottom, each stage owns the stage nested below it. +Where the [`IndexedTraversal` or `PollingTraversal`][traversal] stage iterates over L1 data, the +[`AttributesQueue`][attributes-queue] stage produces +[`OpAttributesWithParent`][attributes], creating a function that transforms +L1 data into payload attributes. + + +## The [`Pipeline`][p] interface + +Now that we've broken down the stages inside the [`DerivationPipeline`][dp] +type, let's move up another level to break down how the [`DerivationPipeline`][dp] +type functions itself. At the highest level, [`kona-derive`][kd] defines the +interface for working with the pipeline through the [`Pipeline`][p] trait. + +[`Pipeline`][p] provides two core methods. +- `peek() -> Option<&OpAttributesWithParent>` +- `async step() -> StepResult` + +Functionally, a pipeline can be "stepped" on, which attempts to derive +payload attributes from input data. Steps do not guarantee that payload attributes +are produced, they only attempt to advance the stages within the pipeline. + +The `peek()` method provides a way to check if attributes are prepared. +Beyond `peek()` returning `Option::Some(&OpAttributesWithParent)`, the [`Pipeline`][p] +extends the [Iterator][iterator] trait, providing a way to consume the generated payload +attributes. + + +## Constructing a Derivation Pipeline + +[`kona-derive`][kd] provides a [`PipelineBuilder`][builder] to abstract the complexity +of generics away from the downstream consumers. Below we provide an example for using +the [`PipelineBuilder`][builder] to instantiate a [`DerivationPipeline`][dp]. + +```rust,ignore +// Imports +use std::sync::Arc; +use kona_protocol::BlockInfo; +use kona_genesis::RollupConfig; +use kona_providers_alloy::*; + +// Use a default rollup config. +let rollup_config = Arc::new(RollupConfig::default()); + +// Providers are instantiated to with localhost urls (`127.0.0.1`) +let chain_provider = + AlloyChainProvider::new_http("http://127.0.0.1:8545".try_into().unwrap()); +let l2_chain_provider = AlloyL2ChainProvider::new_http( + "http://127.0.0.1:9545".try_into().unwrap(), + rollup_config.clone(), +); +let beacon_client = OnlineBeaconClient::new_http("http://127.0.0.1:5555".into()); +let blob_provider = OnlineBlobProvider::new(beacon_client, None, None); +let blob_provider = OnlineBlobProviderWithFallback::new(blob_provider, None); +let dap_source = + EthereumDataSource::new(chain_provider.clone(), blob_provider, &rollup_config); +let builder = StatefulAttributesBuilder::new( + rollup_config.clone(), + l2_chain_provider.clone(), + chain_provider.clone(), +); + +// This is the starting L1 block for the pipeline. +// +// To get the starting L1 block for a given L2 block, +// use the `AlloyL2ChainProvider::l2_block_info_by_number` +// method to get the `L2BlockInfo.l1_origin`. This l1_origin +// is the origin that can be passed here. +let origin = BlockInfo::default(); + +// Build the pipeline using the `PipelineBuilder`. +// Alternatively, use the `new_online_pipeline` helper +// method provided by the `kona-derive-alloy` crate. +let pipeline = PipelineBuilder::new() + .rollup_config(rollup_config.clone()) + .dap_source(dap_source) + .l2_chain_provider(l2_chain_provider) + .chain_provider(chain_provider) + .builder(builder) + .origin(origin) + .build(); + +assert_eq!(pipeline.rollup_config, rollup_config); +assert_eq!(pipeline.origin(), Some(origin)); +``` + + +## Producing Payload Attributes + +Since the [`Pipeline`][p] trait extends the [`Iterator`][iterator] trait, +producing [`OpAttributesWithParent`][attributes] is as simple as calling +[`Iterator::next()`][next] method on the [`DerivationPipeline`][dp]. + +Extending the example from above, producing the attributes is shown below. + +```rust +// Import the iterator trait to show where `.next` is sourced. +use core::iter::Iterator; + +// ... +// example from above constructing the pipeline +// ... + +let attributes = pipeline.next(); + +// Since we haven't stepped on the pipeline, +// there shouldn't be any payload attributes prepared. +assert!(attributes.is_none()); +``` + +As demonstrated, the pipeline won't have any payload attributes +without having been "stepped" on. Naively, we can continuously +step on the pipeline until attributes are ready, and then consume them. + +```rust +// Import the iterator trait to show where `.next` is sourced. +use core::iter::Iterator; + +// ... +// example from constructing the pipeline +// ... + +// Continuously step on the pipeline until attributes are prepared. +let l2_safe_head = L2BlockInfo::default(); +loop { + if matches!(pipeline.step(l2_safe_head).await, StepResult::PreparedAttributes) { + // The pipeline has successfully prepared payload attributes, break the loop. + break; + } +} + +// Since the loop is only broken once attributes are prepared, +// this must be `Option::Some`. +let attributes = pipeline.next().expect("Must contain payload attributes"); + +// The parent of the prepared payload attributes should be +// the l2 safe head that we "stepped on". +assert_eq!(attributes.parent, l2_safe_head); +``` + +Importantly, the above is not sufficient logic to produce payload attributes and drive +the derivation pipeline. There are multiple different `StepResult`s to handle when +stepping on the pipeline, including advancing the origin, re-orgs, and pipeline resets. +In the next section, pipeline resets are outlined. + +For an up-to-date driver that runs the derivation pipeline as part of the fault proof +program, reference kona's [client driver][driver]. + + +## Resets + +When stepping on the [`DerivationPipeline`][dp] produces a reset error, the driver +of the pipeline must perform a reset on the pipeline. This is done by sending a "signal" +through the [`DerivationPipeline`][dp]. Below demonstrates this. + +```rust +// Import the iterator trait to show where `.next` is sourced. +use core::iter::Iterator; + +// ... +// example from constructing the pipeline +// ... + +// Continuously step on the pipeline until attributes are prepared. +let l2_safe_head = L2BlockInfo::default(); +loop { + match pipeline.step(l2_safe_head).await { + StepResult::StepFailed(e) | StepResult::OriginAdvanceErr(e) => { + match e { + PipelineErrorKind::Reset(e) => { + // Get the system config from the provider. + let system_config = l2_chain_provider + .system_config_by_number( + l2_safe_head.block_info.number, + rollup_config.clone(), + ) + .await?; + // Reset the pipeline to the initial L2 safe head and L1 origin. + self.pipeline + .signal( + ResetSignal { + l2_safe_head: l2_safe_head, + l1_origin: pipeline + .origin() + .ok_or_else(|| anyhow!("Missing L1 origin"))?, + system_config: Some(system_config), + } + .signal(), + ) + .await?; + // ... + } + _ => { /* Handling left to the driver */ } + } + } + _ => { /* Handling left to the driver */ } + } +} +``` + + +## Learn More + +[`kona-derive`][kd] is one implementation of the OP Stack derivation pipeline. + +To learn more, it is highly encouraged to read the ["first" derivation pipeline][op-dp] +written in [golang][go]. It is often colloquially referred to as the "reference" +implementation and provides the basis for how much of Kona's derivation pipeline +was built. + + +## Provenance + +> The lore do be bountiful. +> +> - Bard XVIII of the Logic Gates + +The kona project spawned out of the need to build a secondary fault proof for the OP Stack. +Initially, we sought to re-use [magi][magi]'s derivation pipeline, but the ethereum-rust +ecosystem moves quickly and [magi][magi] was behind by a generation of types - using +[ethers-rs] instead of new [alloy][alloy] types. Additionally, [magi][magi]'s derivation +pipeline was not `no_std` compatible - a hard requirement for running a rust fault proof +program on top of the RISCV or MIPS ISAs. + +So, [@clabby][clabby] and [@refcell][refcell] stood up [kona][kona] in a few months. + + +[driver]: https://docs.rs/kona-driver/latest/kona_driver/struct.Driver.html +[next]: https://doc.rust-lang.org/nightly/core/iter/trait.Iterator.html#tymethod.next +[builder]: https://docs.rs/kona-derive/latest/kona_derive/struct.PipelineBuilder.html +[alloy]: https://github.com/alloy-rs/alloy +[ethers-rs]: https://github.com/gakonst/ethers-rs +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[clabby]: https://github.com/clabby +[refcell]: https://github.com/refcell +[go]: https://go.dev/ +[magi]: https://github.com/a16z/magi +[kd]: https://crates.io/crates/kona-derive +[iterator]: https://doc.rust-lang.org/nightly/core/iter/trait.Iterator.html +[p]: https://docs.rs/kona-derive/latest/kona_derive/trait.Pipeline.html +[op-dp]: https://github.com/ethereum-optimism/optimism/tree/develop/op-node/rollup/derive +[dp]: https://docs.rs/kona-derive/latest/kona_derive/struct.DerivationPipeline.html +[attributes]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.OpAttributesWithParent.html + +[attributes-queue]: https://docs.rs/kona-derive/latest/kona_derive/struct.AttributesQueue.html +[batch-provider]: https://docs.rs/kona-derive/latest/kona_derive/struct.BatchProvider.html +[batch-stream]: https://docs.rs/kona-derive/latest/kona_derive/struct.BatchStream.html +[channel-reader]: https://docs.rs/kona-derive/latest/kona_derive/struct.ChannelReader.html +[channel-provider]: https://docs.rs/kona-derive/latest/kona_derive/struct.ChannelProvider.html +[frame-queue]: https://docs.rs/kona-derive/latest/kona_derive/struct.FrameQueue.html +[retrieval]: https://docs.rs/kona-derive/latest/kona_derive/struct.L1Retrieval.html +[traversal]: https://docs.rs/kona-derive/latest/kona_derive/struct.IndexedTraversal.html diff --git a/kona/docs/docs/pages/sdk/protocol/derive/providers.mdx b/rust/docs/docs/pages/kona/sdk/protocol/derive/providers.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/derive/providers.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/derive/providers.mdx diff --git a/kona/docs/docs/pages/sdk/protocol/derive/signaling.mdx b/rust/docs/docs/pages/kona/sdk/protocol/derive/signaling.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/derive/signaling.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/derive/signaling.mdx diff --git a/kona/docs/docs/pages/sdk/protocol/derive/stages.mdx b/rust/docs/docs/pages/kona/sdk/protocol/derive/stages.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/derive/stages.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/derive/stages.mdx diff --git a/rust/docs/docs/pages/kona/sdk/protocol/genesis/intro.mdx b/rust/docs/docs/pages/kona/sdk/protocol/genesis/intro.mdx new file mode 100644 index 00000000000..5fe03ee701f --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/protocol/genesis/intro.mdx @@ -0,0 +1,30 @@ +# Genesis + +The genesis crate contains types related to chain genesis. + +This section contains in-depth sections on building with [`kona-genesis`][genesis] crate types. + +- [The Rollup Config](./rollup-config.mdx) +- [The System Config](./system-config.mdx) + + +[op-stack]: https://github.com/ethereum-optimism/optimism +[op-program]: https://github.com/ethereum-optimism/optimism/tree/develop/op-program +[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon +[cannon-rs]: https://github.com/op-rs/cannon-rs +[asterisc]: https://github.com/ethereum-optimism/asterisc +[fp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html +[fpp-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-program +[preimage-specs]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle +[cannon-specs]: https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#cannon-fault-proof-virtual-machine +[l2-output-root]: https://specs.optimism.io/protocol/proposals.html#l2-output-commitment-construction +[op-succinct]: https://github.com/succinctlabs/op-succinct +[revm]: https://github.com/bluealloy/revm + +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[issues]: https://github.com/ethereum-optimism/optimism/issues +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new +[contributing]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md + +[op-labs]: https://github.com/ethereum-optimism +[bad-boi-labs]: https://github.com/BadBoiLabs diff --git a/kona/docs/docs/pages/sdk/protocol/genesis/rollup-config.mdx b/rust/docs/docs/pages/kona/sdk/protocol/genesis/rollup-config.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/genesis/rollup-config.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/genesis/rollup-config.mdx diff --git a/kona/docs/docs/pages/sdk/protocol/genesis/system-config.mdx b/rust/docs/docs/pages/kona/sdk/protocol/genesis/system-config.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/genesis/system-config.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/genesis/system-config.mdx diff --git a/kona/docs/docs/pages/sdk/protocol/hardforks.mdx b/rust/docs/docs/pages/kona/sdk/protocol/hardforks.mdx similarity index 90% rename from kona/docs/docs/pages/sdk/protocol/hardforks.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/hardforks.mdx index 0607994fcc7..f1b520c6bab 100644 --- a/kona/docs/docs/pages/sdk/protocol/hardforks.mdx +++ b/rust/docs/docs/pages/kona/sdk/protocol/hardforks.mdx @@ -1,7 +1,5 @@ # Hardforks -kona-hardforks crate - Hardforks are consensus layer types of the OP Stack. `kona-hardforks` most directly exports the [`Hardforks`][hardforks] type that provides diff --git a/rust/docs/docs/pages/kona/sdk/protocol/interop.mdx b/rust/docs/docs/pages/kona/sdk/protocol/interop.mdx new file mode 100644 index 00000000000..c01c1af35ed --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/protocol/interop.mdx @@ -0,0 +1,3 @@ +# Interop + +`kona-interop` provides core types for the interop protocol. diff --git a/rust/docs/docs/pages/kona/sdk/protocol/intro.mdx b/rust/docs/docs/pages/kona/sdk/protocol/intro.mdx new file mode 100644 index 00000000000..876f4d5f53a --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/protocol/intro.mdx @@ -0,0 +1,28 @@ +# Kona Protocol Libraries + +The Kona monorepo contains a set of protocol crates that are designed +to be `no_std` compatible for Kona's fault proof sdk. Protocol crates +are built on [alloy][alloy] and [op-alloy][op-alloy] types. + +The following protocol crates are published to [crates.io][crates]. + +At the lowest level, `kona-genesis` and `kona-hardforks` expose +core genesis and hardfork types. + +`kona-protocol` sits just above `kona-genesis`, composing genesis types +into other core protocol types, as well as many independent protocol types. + +More recently, the `kona-interop` crate was introduced that contains types +specific to [Interop][interop]. + +`kona-registry` contains bindings to the [superchain-registry][scr]. +The registry is available in a `no_std` environment +but requires `serde` to read serialized configs at compile time. `kona-registry` uses +types defined in `kona-genesis` to deserialize the superchain registry configs at compile time. + + +[crates]: https://crates.io +[alloy]: https://github.com/alloy-rs/alloy +[op-alloy]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/op-alloy +[interop]: https://specs.optimism.io/interop/overview.html +[scr]: https://github.com/ethereum-optimism/superchain-registry diff --git a/kona/docs/docs/pages/sdk/protocol/protocol/batches.mdx b/rust/docs/docs/pages/kona/sdk/protocol/protocol/batches.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/protocol/batches.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/protocol/batches.mdx diff --git a/kona/docs/docs/pages/sdk/protocol/protocol/block-info.mdx b/rust/docs/docs/pages/kona/sdk/protocol/protocol/block-info.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/protocol/block-info.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/protocol/block-info.mdx diff --git a/kona/docs/docs/pages/sdk/protocol/protocol/channels.mdx b/rust/docs/docs/pages/kona/sdk/protocol/protocol/channels.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/protocol/channels.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/protocol/channels.mdx diff --git a/kona/docs/docs/pages/sdk/protocol/protocol/frames.mdx b/rust/docs/docs/pages/kona/sdk/protocol/protocol/frames.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/protocol/frames.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/protocol/frames.mdx diff --git a/rust/docs/docs/pages/kona/sdk/protocol/protocol/intro.mdx b/rust/docs/docs/pages/kona/sdk/protocol/protocol/intro.mdx new file mode 100644 index 00000000000..64094305772 --- /dev/null +++ b/rust/docs/docs/pages/kona/sdk/protocol/protocol/intro.mdx @@ -0,0 +1,63 @@ +# Protocol + +The [`kona-protocol`][protocol] crate contains types, constants, and methods +specific to Optimism derivation and batch-submission. + +[`kona-protocol`][protocol] supports `no_std`. + +## Background + +Protocol types are primarily used for L2 chain derivation. This section will +break down L2 chain derivation as it relates to types defined in +`kona-protocol` - that is, from the raw L2 chain data posted to L1, to the +[`Batch`][batch] type. And since the [`Batch`][batch] type naively breaks up +into the payload attributes, once executed, it becomes the canonical L2 block! +Note though, this provides an incredibly simplified introduction. It is advised +to reference [the specs][s] for the most up-to-date information regarding +derivation. + +The L2 chain is derived from data posted to the L1 chain - either as calldata +or blob data. Data is iteratively pulled from each L1 block and translated +into the first type defined by `kona-protocol`: the [`Frame`][frame] type. + +[`Frame`][frame]s are [parsed][parsed] from the raw data. Each [`Frame`][frame] +is a part of a [`Channel`][channel], the next type one level up in deriving +L2 blocks. [`Channel`][channel]s have IDs that frames reference. [`Frame`][frame]s +are [added][added] iteratively to the [`Channel`][channel]. Once a +[`Channel`][channel] [is ready][ready], it can be used to read a [`Batch`][batch]. + +Since a [`Channel`][channel] stitches together frames, it contains the raw frame +data. In order to turn this [`Channel`][channel] data into a [`Batch`][batch], +it needs to be decompressed using the respective (de)compression algorithm +(see [the channel specs][channel-specs] for more detail on this). Once +decompressed, the raw data can be [decoded][decoded] into the [`Batch`][batch] +type. + + +## Sections + +#### Core Derivation Types (discussed above) + +- [Frames](./frames.mdx) +- [Channels](./channels.mdx) +- [Batches](./batches.mdx) + +#### Other Critical Protocol Types + +- [BlockInfo](./block-info.mdx) +- [L2BlockInfo](./l2-block-info.mdx) + + + +[decoded]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.Batch.html#method.decode +[batch]: https://docs.rs/kona-protocol/latest/kona_protocol/enum.Batch.html +[ready]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html#method.is_ready +[added]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html#method.add_frame +[channel]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Channel.html +[frame]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Frame.html +[parsed]: https://docs.rs/kona-protocol/latest/kona_protocol/struct.Frame.html#method.parse_frames + +[protocol]: https://crates.io/crates/kona-protocol +[s]: https://specs.optimism.io/protocol/derivation.html#overview +[lcd]: https://specs.optimism.io/protocol/derivation.html#overview +[channel-specs]: https://specs.optimism.io/protocol/derivation.html#channel-format diff --git a/kona/docs/docs/pages/sdk/protocol/protocol/l2-block-info.mdx b/rust/docs/docs/pages/kona/sdk/protocol/protocol/l2-block-info.mdx similarity index 100% rename from kona/docs/docs/pages/sdk/protocol/protocol/l2-block-info.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/protocol/l2-block-info.mdx diff --git a/kona/docs/docs/pages/sdk/protocol/registry.mdx b/rust/docs/docs/pages/kona/sdk/protocol/registry.mdx similarity index 91% rename from kona/docs/docs/pages/sdk/protocol/registry.mdx rename to rust/docs/docs/pages/kona/sdk/protocol/registry.mdx index e355197523d..9e815770ba7 100644 --- a/kona/docs/docs/pages/sdk/protocol/registry.mdx +++ b/rust/docs/docs/pages/kona/sdk/protocol/registry.mdx @@ -1,7 +1,5 @@ # Registry -kona-registry crate - [`kona-registry`][sc] is a `no_std` crate that exports rust type definitions for chains in the [`superchain-registry`][osr]. These are lazily evaluated statics that provide `ChainConfig`s, `RollupConfig`s, and `Chain` objects for all chains with static definitions @@ -56,7 +54,7 @@ println!("OP Mainnet Chain Config: {:?}", op_chain_config); [serde]: https://crates.io/crates/serde [alloy]: https://github.com/alloy-rs/alloy -[op-alloy]: https://github.com/alloy-rs/op-alloy +[op-alloy]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/op-alloy [op-superchain]: https://docs.optimism.io/stack/explainer [osr]: https://github.com/ethereum-optimism/superchain-registry diff --git a/rust/docs/docs/pages/op-alloy/building/consensus.mdx b/rust/docs/docs/pages/op-alloy/building/consensus.mdx new file mode 100644 index 00000000000..7484725c490 --- /dev/null +++ b/rust/docs/docs/pages/op-alloy/building/consensus.mdx @@ -0,0 +1,66 @@ +# Consensus + +The `op-alloy-consensus` crate provides an Optimism consensus interface. +It contains constants, types, and functions for implementing Optimism EL +consensus and communication. This includes an extended `OpTxEnvelope` type +with [deposit transactions](https://specs.optimism.io/protocol/deposits.html), and receipts containing OP Stack +specific fields (`deposit_nonce` + `deposit_receipt_version`). + +In general a type belongs in this crate if it exists in the +`alloy-consensus` crate, but was modified from the base Ethereum protocol +in the OP Stack. For consensus types that are not modified by the OP Stack, +the `alloy-consensus` types should be used instead. + + +## Block + +[`op-alloy-consensus`](https://crates.io/crates/op-alloy-consensus) exports an Optimism block type, [`OpBlock`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/type.OpBlock.html). + +This type simply re-uses the `alloy-consensus` block type, with `OpTxEnvelope` +as the type of transactions in the block. + + +## Transactions + +Optimism extends the Ethereum [EIP-2718](https://eips.ethereum.org/EIPS/eip-2718) transaction envelope to include a +deposit variant. + +### [`OpTxEnvelope`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxEnvelope.html) + +The [`OpTxEnvelope`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxEnvelope.html) type is based on [Alloy](https://github.com/alloy-rs/alloy)'s +[`TxEnvelope`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxType.html) type. + +Optimism modifies the `TxEnvelope` to the following. +- Legacy +- EIP-2930 +- EIP-1559 +- EIP-7702 +- Deposit + +Deposit is a custom transaction type that is either an L1 attributes +deposit transaction or a user-submitted deposit transaction. Read more +about deposit transactions in [the specs](https://specs.optimism.io). + +### Transaction Types ([`OpTxType`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxType.html)) + +The [`OpTxType`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxType.html) enumerates the transaction types using their byte identifier, +represents as a `u8` in rust. + + +## Receipt Types + +Just like [`op-alloy-consensus`](https://crates.io/crates/op-alloy-consensus) defines transaction types, +it also defines associated receipt types. + +[`OpReceiptEnvelope`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/enum.OpReceiptEnvelope.html) defines an [Eip-2718](https://eips.ethereum.org/EIPS/eip-2718) receipt envelope type +modified for the OP Stack. It contains the following variants - mapping +directly to the `OpTxEnvelope` variants defined above. + +- Legacy +- EIP-2930 +- EIP-1559 +- EIP-7702 +- Deposit + +There is also an [`OpDepositReceipt`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/struct.OpDepositReceipt.html) type, extending the alloy receipt +type with a deposit nonce and deposit receipt version. diff --git a/rust/docs/docs/pages/op-alloy/building/engine.mdx b/rust/docs/docs/pages/op-alloy/building/engine.mdx new file mode 100644 index 00000000000..f1cdde805af --- /dev/null +++ b/rust/docs/docs/pages/op-alloy/building/engine.mdx @@ -0,0 +1,13 @@ +# RPC Engine Types + +The [`op-alloy-rpc-types-engine`](https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/) crate provides Optimism types for interfacing +with the Engine API in the OP Stack. + +Optimism defines a custom payload attributes type called [`OpPayloadAttributes`](https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/struct.OpPayloadAttributes.html). +`OpPayloadAttributes` extends alloy's [`PayloadAttributes`](https://docs.rs/alloy-rpc-types-engine/latest/alloy_rpc_types_engine/payload/struct.PayloadAttributes.html) with a few fields: transactions, +a flag for enabling the tx pool, the gas limit, and EIP 1559 parameters. + +Optimism also returns a custom type for the `engine_getPayload` request for both V3 and +V4 payload envelopes. These are the [`OpExecutionPayloadEnvelopeV3`](https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/payload/v3/struct.OpExecutionPayloadEnvelopeV3.html) and +[`OpExecutionPayloadEnvelopeV4`](https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/payload/v4/struct.OpExecutionPayloadEnvelopeV4.html) types, which both wrap payload envelope types +from [`alloy-rpc-types-engine`](https://crates.io/crates/alloy-rpc-types-engine). diff --git a/rust/docs/docs/pages/op-alloy/building/index.mdx b/rust/docs/docs/pages/op-alloy/building/index.mdx new file mode 100644 index 00000000000..1286a3c89ca --- /dev/null +++ b/rust/docs/docs/pages/op-alloy/building/index.mdx @@ -0,0 +1,10 @@ +# Building + +This section offers in-depth documentation into the various `op-alloy` crates. +Some of the primary crates and their types are listed below. + +- [`op-alloy-consensus`](https://crates.io/crates/op-alloy-consensus) provides [`OpBlock`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/type.OpBlock.html), + [`OpTxEnvelope`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/transaction/enum.OpTxEnvelope.html), [`OpReceiptEnvelope`](https://docs.rs/op-alloy-consensus/latest/op_alloy_consensus/enum.OpReceiptEnvelope.html), + and more. +- [`op-alloy-rpc-types-engine`](https://crates.io/crates/op-alloy-rpc-types-engine) provides the + [`OpPayloadAttributes`](https://docs.rs/op-alloy-rpc-types-engine/latest/op_alloy_rpc_types_engine/struct.OpPayloadAttributes.html). diff --git a/rust/docs/docs/pages/op-alloy/glossary.mdx b/rust/docs/docs/pages/op-alloy/glossary.mdx new file mode 100644 index 00000000000..37a95d32e3f --- /dev/null +++ b/rust/docs/docs/pages/op-alloy/glossary.mdx @@ -0,0 +1,3 @@ +# Glossary + +*This document contains definitions for terms used throughout the op-alloy book.* diff --git a/rust/docs/docs/pages/op-alloy/intro.mdx b/rust/docs/docs/pages/op-alloy/intro.mdx new file mode 100644 index 00000000000..7f47b654c69 --- /dev/null +++ b/rust/docs/docs/pages/op-alloy/intro.mdx @@ -0,0 +1,36 @@ +# op-alloy + +Welcome to the hands-on guide for getting started with `op-alloy`! + +`op-alloy` connects applications to the OP Stack, leveraging high +performance types, traits, and middleware from [Alloy](https://github.com/alloy-rs/alloy). + +> Development Status +> +> `op-alloy` is in active development, and is not yet ready for use in production. +> During development, this book will evolve quickly and may contain inaccuracies. +> +> Please [open an issue](https://github.com/ethereum-optimism/optimism/issues/new) if you find any errors or have any suggestions for +> improvements, and also feel free to [contribute](https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona/CONTRIBUTING.md) to the project! + +## Sections + +### [Getting Started](/op-alloy/starting) + +To get started with op-alloy, add its crates as a dependency and take your first steps. + +### [Building with op-alloy](/op-alloy/building) + +Walk through types and functionality available in different `op-alloy` crates. + +### [Contributing](https://github.com/ethereum-optimism/optimism/tree/develop/rust/op-alloy/CONTRIBUTING.md) + +Contributors are welcome! It is built and maintained by Alloy contributors, +members of [OP Labs](https://github.com/ethereum-optimism), and the broader open source community. + +`op-alloy` follows and expands the OP Stack standards set in the [specs](https://specs.optimism.io). + +### Licensing + +`op-alloy` is licensed under the combined Apache 2.0 and MIT License, along +with a SNAPPY license for snappy encoding use. diff --git a/rust/docs/docs/pages/op-alloy/starting.mdx b/rust/docs/docs/pages/op-alloy/starting.mdx new file mode 100644 index 00000000000..8bfc4187564 --- /dev/null +++ b/rust/docs/docs/pages/op-alloy/starting.mdx @@ -0,0 +1,80 @@ +# Installation + +[op-alloy](https://github.com/ethereum-optimism/optimism/tree/develop/rust/op-alloy) consists of a number of crates that provide a range of functionality +essential for interfacing with any OP Stack chain. + +The most succinct way to work with `op-alloy` is to add the [`op-alloy`](https://crates.io/crates/op-alloy) crate +with the `full` feature flag from the command-line using Cargo. + +```txt +cargo add op-alloy --features full +``` + +Alternatively, you can add the following to your `Cargo.toml` file. + +```txt +op-alloy = { version = "0.5", features = ["full"] } +``` + +For more fine-grained control over the features you wish to include, you can add the individual +crates to your `Cargo.toml` file, or use the `op-alloy` crate with the features you need. + +After `op-alloy` is added as a dependency, crates re-exported by `op-alloy` are now available. + +```rust +use op_alloy::{ + genesis::{RollupConfig, SystemConfig}, + consensus::OpBlock, + protocol::BlockInfo, + network::Optimism, + provider::ext::engine::OpEngineApi, + rpc_types::OpTransactionReceipt, + rpc_jsonrpsee::traits::RollupNode, + rpc_types_engine::OpAttributesWithParent, +}; +``` + +## Features + +The [`op-alloy`](https://crates.io/crates/op-alloy) defines many [feature flags](https://docs.rs/crate/op-alloy/latest/features) including the following. + +Default +- `std` +- `k256` +- `serde` + +Full enables the most commonly used crates. +- `full` + +The `k256` feature flag enables the `k256` feature on the `op-alloy-consensus` crate. +- `k256` + +Arbitrary enables arbitrary features on crates, deriving the `Arbitrary` trait on types. +- `arbitrary` + +Serde derives serde's Serialize and Deserialize traits on types. +- `serde` + +Additionally, individual crates can be enabled using their shorthand names. +For example, the `consensus` feature flag provides the `op-alloy-consensus` re-export +so `op-alloy-consensus` types can be used from `op-alloy` through `op_alloy::consensus::InsertTypeHere`. + +## Crates + +- [`op-alloy-network`](https://crates.io/crates/op-alloy-network) +- [`op-alloy-provider`](https://crates.io/crates/op-alloy-provider) +- [`op-alloy-consensus`](https://crates.io/crates/op-alloy-consensus) (supports `no_std`) +- [`op-alloy-rpc-jsonrpsee`](https://crates.io/crates/op-alloy-rpc-jsonrpsee) +- [`op-alloy-rpc-types`](https://crates.io/crates/op-alloy-rpc-types) (supports `no_std`) +- [`op-alloy-rpc-types-engine`](https://crates.io/crates/op-alloy-rpc-types-engine) (supports `no_std`) + +## `no_std` + +As noted above, the following crates are `no_std` compatible. + +- [`op-alloy-consensus`](https://crates.io/crates/op-alloy-consensus) +- [`op-alloy-rpc-types-engine`](https://crates.io/crates/op-alloy-rpc-types-engine) +- [`op-alloy-rpc-types`](https://crates.io/crates/op-alloy-rpc-types) + +To add `no_std` support to a crate, ensure the [check_no_std](https://github.com/ethereum-optimism/optimism/blob/develop/rust/op-alloy/scripts/check_no_std.sh) +script is updated to include this crate once `no_std` compatible. diff --git a/rust/docs/docs/pages/op-reth/cli/cli.mdx b/rust/docs/docs/pages/op-reth/cli/cli.mdx new file mode 100644 index 00000000000..ba486b26a21 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/cli.mdx @@ -0,0 +1,5 @@ +# CLI Reference + +The op-reth node is operated via the CLI by running the `op-reth node` command. To stop it, press `ctrl-c`. You may need to wait a bit as op-reth tears down existing p2p connections or performs other cleanup tasks. + +However, op-reth has more commands — see the [op-reth CLI reference](/op-reth/cli/op-reth). diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth.mdx new file mode 100644 index 00000000000..572335c21b1 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth.mdx @@ -0,0 +1,174 @@ +# op-reth + +Reth + +```bash +$ op-reth --help +``` +```txt +Usage: op-reth [OPTIONS] + +Commands: + node Start the node + init Initialize the database from a genesis file + init-state Initialize the database from a state dump file + import-op This syncs RLP encoded OP blocks below Bedrock from a file, without executing + import-receipts-op This imports RLP encoded receipts from a file + dump-genesis Dumps genesis block JSON configuration to stdout + db Database debugging utilities + stage Manipulate individual stages + p2p P2P Debugging utilities + config Write config to stdout + prune Prune according to the configuration without any limits + re-execute Re-execute blocks in parallel to verify historical sync correctness + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + + -V, --version + Print version + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/config.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/config.mdx new file mode 100644 index 00000000000..62389fe994e --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/config.mdx @@ -0,0 +1,162 @@ +# op-reth config + +Write config to stdout + +```bash +$ op-reth config --help +``` +```txt +Usage: op-reth config [OPTIONS] + +Options: + --config + The path to the configuration file to use. + + --default + Show the default config + + -h, --help + Print help (see a summary with '-h') + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db.mdx new file mode 100644 index 00000000000..d8a816e23ae --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db.mdx @@ -0,0 +1,341 @@ +# op-reth db + +Database debugging utilities + +```bash +$ op-reth db --help +``` +```txt +Usage: op-reth db [OPTIONS] + +Commands: + stats Lists all the tables, their entry count and their size + list Lists the contents of a table + checksum Calculates the content checksum of a table or static file segment + diff Create a diff between two database tables or two entire databases + get Gets the content of a table for the given key + drop Deletes all database entries + clear Deletes all table entries + repair-trie Verifies trie consistency and outputs any inconsistencies + static-file-header Reads and displays the static file segment header + version Lists current and local database versions + path Returns the full database path + settings Manage storage settings + account-storage Gets storage size information for an account + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/account-storage.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/account-storage.mdx new file mode 100644 index 00000000000..f71f08c74f1 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/account-storage.mdx @@ -0,0 +1,170 @@ +# op-reth db account-storage + +Gets storage size information for an account + +```bash +$ op-reth db account-storage --help +``` +```txt +Usage: op-reth db account-storage [OPTIONS]
+ +Arguments: +
+ The account address to check storage for + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/checksum.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/checksum.mdx new file mode 100644 index 00000000000..8027558cfbc --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/checksum.mdx @@ -0,0 +1,171 @@ +# op-reth db checksum + +Calculates the content checksum of a table or static file segment + +```bash +$ op-reth db checksum --help +``` +```txt +Usage: op-reth db checksum [OPTIONS] + +Commands: + mdbx Calculates the checksum of a database table + static-file Calculates the checksum of a static file segment + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/checksum/mdbx.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/checksum/mdbx.mdx new file mode 100644 index 00000000000..aa34fef6941 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/checksum/mdbx.mdx @@ -0,0 +1,179 @@ +# op-reth db checksum mdbx + +Calculates the checksum of a database table + +```bash +$ op-reth db checksum mdbx --help +``` +```txt +Usage: op-reth db checksum mdbx [OPTIONS] + +Arguments: +
+ The table name + +Options: + --start-key + The start of the range to checksum + + --end-key + The end of the range to checksum + + --limit + The maximum number of records that are queried and used to compute the checksum + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/checksum/static-file.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/checksum/static-file.mdx new file mode 100644 index 00000000000..a9939730d1a --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/checksum/static-file.mdx @@ -0,0 +1,187 @@ +# op-reth db checksum static-file + +Calculates the checksum of a static file segment + +```bash +$ op-reth db checksum static-file --help +``` +```txt +Usage: op-reth db checksum static-file [OPTIONS] + +Arguments: + + The static file segment + + Possible values: + - headers: Static File segment responsible for the `CanonicalHeaders`, `Headers`, `HeaderTerminalDifficulties` tables + - transactions: Static File segment responsible for the `Transactions` table + - receipts: Static File segment responsible for the `Receipts` table + - transaction-senders: Static File segment responsible for the `TransactionSenders` table + - account-change-sets: Static File segment responsible for the `AccountChangeSets` table + - storage-change-sets: Static File segment responsible for the `StorageChangeSets` table + +Options: + --start-block + The block number to start from (inclusive) + + --end-block + The block number to end at (inclusive) + + --limit + The maximum number of rows to checksum + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/clear.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/clear.mdx new file mode 100644 index 00000000000..1139958cbcb --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/clear.mdx @@ -0,0 +1,171 @@ +# op-reth db clear + +Deletes all table entries + +```bash +$ op-reth db clear --help +``` +```txt +Usage: op-reth db clear [OPTIONS] + +Commands: + mdbx Deletes all database table entries + static-file Deletes all static file segment entries + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/clear/mdbx.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/clear/mdbx.mdx new file mode 100644 index 00000000000..b9f6537b3dd --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/clear/mdbx.mdx @@ -0,0 +1,170 @@ +# op-reth db clear mdbx + +Deletes all database table entries + +```bash +$ op-reth db clear mdbx --help +``` +```txt +Usage: op-reth db clear mdbx [OPTIONS]
+ +Arguments: +
+ + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/clear/static-file.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/clear/static-file.mdx new file mode 100644 index 00000000000..a0ceb8ee50c --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/clear/static-file.mdx @@ -0,0 +1,176 @@ +# op-reth db clear static-file + +Deletes all static file segment entries + +```bash +$ op-reth db clear static-file --help +``` +```txt +Usage: op-reth db clear static-file [OPTIONS] + +Arguments: + + Possible values: + - headers: Static File segment responsible for the `CanonicalHeaders`, `Headers`, `HeaderTerminalDifficulties` tables + - transactions: Static File segment responsible for the `Transactions` table + - receipts: Static File segment responsible for the `Receipts` table + - transaction-senders: Static File segment responsible for the `TransactionSenders` table + - account-change-sets: Static File segment responsible for the `AccountChangeSets` table + - storage-change-sets: Static File segment responsible for the `StorageChangeSets` table + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/diff.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/diff.mdx new file mode 100644 index 00000000000..1df107888f5 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/diff.mdx @@ -0,0 +1,222 @@ +# op-reth db diff + +Create a diff between two database tables or two entire databases + +```bash +$ op-reth db diff --help +``` +```txt +Usage: op-reth db diff [OPTIONS] --secondary-datadir --output + +Options: + --secondary-datadir + The path to the data dir for all reth files and subdirectories. + + -h, --help + Print help (see a summary with '-h') + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + + --table
+ The table name to diff. If not specified, all tables are diffed. + + --output + The output directory for the diff report. + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/drop.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/drop.mdx new file mode 100644 index 00000000000..ec387677f79 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/drop.mdx @@ -0,0 +1,169 @@ +# op-reth db drop + +Deletes all database entries + +```bash +$ op-reth db drop --help +``` +```txt +Usage: op-reth db drop [OPTIONS] + +Options: + -f, --force + Bypasses the interactive confirmation and drops the database directly + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/get.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/get.mdx new file mode 100644 index 00000000000..15b7ea8a287 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/get.mdx @@ -0,0 +1,171 @@ +# op-reth db get + +Gets the content of a table for the given key + +```bash +$ op-reth db get --help +``` +```txt +Usage: op-reth db get [OPTIONS] + +Commands: + mdbx Gets the content of a database table for the given key + static-file Gets the content of a static file segment for the given key + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/get/mdbx.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/get/mdbx.mdx new file mode 100644 index 00000000000..e4f002625c4 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/get/mdbx.mdx @@ -0,0 +1,185 @@ +# op-reth db get mdbx + +Gets the content of a database table for the given key + +```bash +$ op-reth db get mdbx --help +``` +```txt +Usage: op-reth db get mdbx [OPTIONS]
[SUBKEY] [END_KEY] [END_SUBKEY] + +Arguments: +
+ + + + The key to get content for + + [SUBKEY] + The subkey to get content for + + [END_KEY] + Optional end key for range query (exclusive upper bound) + + [END_SUBKEY] + Optional end subkey for range query (exclusive upper bound) + +Options: + --raw + Output bytes instead of human-readable decoded value + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/get/static-file.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/get/static-file.mdx new file mode 100644 index 00000000000..af64e2e00a2 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/get/static-file.mdx @@ -0,0 +1,185 @@ +# op-reth db get static-file + +Gets the content of a static file segment for the given key + +```bash +$ op-reth db get static-file --help +``` +```txt +Usage: op-reth db get static-file [OPTIONS] [SUBKEY] + +Arguments: + + Possible values: + - headers: Static File segment responsible for the `CanonicalHeaders`, `Headers`, `HeaderTerminalDifficulties` tables + - transactions: Static File segment responsible for the `Transactions` table + - receipts: Static File segment responsible for the `Receipts` table + - transaction-senders: Static File segment responsible for the `TransactionSenders` table + - account-change-sets: Static File segment responsible for the `AccountChangeSets` table + - storage-change-sets: Static File segment responsible for the `StorageChangeSets` table + + + The key to get content for + + [SUBKEY] + The subkey to get content for, for example address in changeset + +Options: + --raw + Output bytes instead of human-readable decoded value + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/list.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/list.mdx new file mode 100644 index 00000000000..1b926b1acab --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/list.mdx @@ -0,0 +1,212 @@ +# op-reth db list + +Lists the contents of a table + +```bash +$ op-reth db list --help +``` +```txt +Usage: op-reth db list [OPTIONS]
+ +Arguments: +
+ The table name + +Options: + -s, --skip + Skip first N entries + + [default: 0] + + -r, --reverse + Reverse the order of the entries. If enabled last table entries are read + + -l, --len + How many items to take from the walker + + [default: 5] + + --search + Search parameter for both keys and values. Prefix it with `0x` to search for binary data, and text otherwise. + + ATTENTION! For compressed tables (`Transactions` and `Receipts`), there might be missing results since the search uses the raw uncompressed value from the database. + + --min-row-size + Minimum size of row in bytes + + [default: 0] + + --min-key-size + Minimum size of key in bytes + + [default: 0] + + --min-value-size + Minimum size of value in bytes + + [default: 0] + + -c, --count + Returns the number of rows found + + -j, --json + Dump as JSON instead of using TUI + + --raw + Output bytes instead of human-readable decoded value + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/path.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/path.mdx new file mode 100644 index 00000000000..b500e225e1e --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/path.mdx @@ -0,0 +1,166 @@ +# op-reth db path + +Returns the full database path + +```bash +$ op-reth db path --help +``` +```txt +Usage: op-reth db path [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/repair-trie.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/repair-trie.mdx new file mode 100644 index 00000000000..67d5b8cee0b --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/repair-trie.mdx @@ -0,0 +1,174 @@ +# op-reth db repair-trie + +Verifies trie consistency and outputs any inconsistencies + +```bash +$ op-reth db repair-trie --help +``` +```txt +Usage: op-reth db repair-trie [OPTIONS] + +Options: + --dry-run + Only show inconsistencies without making any repairs + + --metrics + Enable Prometheus metrics. + + The metrics will be served at the given interface and port. + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings.mdx new file mode 100644 index 00000000000..95ff5e0df67 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings.mdx @@ -0,0 +1,171 @@ +# op-reth db settings + +Manage storage settings + +```bash +$ op-reth db settings --help +``` +```txt +Usage: op-reth db settings [OPTIONS] + +Commands: + get Get current storage settings from database + set Set storage settings in database + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/get.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/get.mdx new file mode 100644 index 00000000000..c4e54307302 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/get.mdx @@ -0,0 +1,166 @@ +# op-reth db settings get + +Get current storage settings from database + +```bash +$ op-reth db settings get --help +``` +```txt +Usage: op-reth db settings get [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set.mdx new file mode 100644 index 00000000000..8aa2ae3cdca --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set.mdx @@ -0,0 +1,176 @@ +# op-reth db settings set + +Set storage settings in database + +```bash +$ op-reth db settings set --help +``` +```txt +Usage: op-reth db settings set [OPTIONS] + +Commands: + receipts Store receipts in static files instead of the database + transaction_senders Store transaction senders in static files instead of the database + account_changesets Store account changesets in static files instead of the database + storages_history Store storage history in rocksdb instead of MDBX + transaction_hash_numbers Store transaction hash to number mapping in rocksdb instead of MDBX + account_history Store account history in rocksdb instead of MDBX + storage_changesets Store storage changesets in static files instead of the database + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/account_changesets.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/account_changesets.mdx new file mode 100644 index 00000000000..40520075834 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/account_changesets.mdx @@ -0,0 +1,170 @@ +# op-reth db settings set account_changesets + +Store account changesets in static files instead of the database + +```bash +$ op-reth db settings set account_changesets --help +``` +```txt +Usage: op-reth db settings set account_changesets [OPTIONS] + +Arguments: + + [possible values: true, false] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/account_history.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/account_history.mdx new file mode 100644 index 00000000000..641475ab142 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/account_history.mdx @@ -0,0 +1,170 @@ +# op-reth db settings set account_history + +Store account history in rocksdb instead of MDBX + +```bash +$ op-reth db settings set account_history --help +``` +```txt +Usage: op-reth db settings set account_history [OPTIONS] + +Arguments: + + [possible values: true, false] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/receipts.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/receipts.mdx new file mode 100644 index 00000000000..2cbd8647ba2 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/receipts.mdx @@ -0,0 +1,170 @@ +# op-reth db settings set receipts + +Store receipts in static files instead of the database + +```bash +$ op-reth db settings set receipts --help +``` +```txt +Usage: op-reth db settings set receipts [OPTIONS] + +Arguments: + + [possible values: true, false] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/receipts_in_static_files.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/receipts_in_static_files.mdx new file mode 100644 index 00000000000..490ee06ce94 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/receipts_in_static_files.mdx @@ -0,0 +1,152 @@ +# op-reth db settings set receipts_in_static_files + +Store receipts in static files instead of the database + +```bash +$ op-reth db settings set receipts_in_static_files --help +``` +```txt +Usage: op-reth db settings set receipts_in_static_files [OPTIONS] + +Arguments: + + [possible values: true, false] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces. + + - `http`: expects endpoint path to end with `/v1/traces` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/storage_changesets.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/storage_changesets.mdx new file mode 100644 index 00000000000..d84b848a6b6 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/storage_changesets.mdx @@ -0,0 +1,170 @@ +# op-reth db settings set storage_changesets + +Store storage changesets in static files instead of the database + +```bash +$ op-reth db settings set storage_changesets --help +``` +```txt +Usage: op-reth db settings set storage_changesets [OPTIONS] + +Arguments: + + [possible values: true, false] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/storages_history.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/storages_history.mdx new file mode 100644 index 00000000000..bef26be1a49 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/storages_history.mdx @@ -0,0 +1,170 @@ +# op-reth db settings set storages_history + +Store storage history in rocksdb instead of MDBX + +```bash +$ op-reth db settings set storages_history --help +``` +```txt +Usage: op-reth db settings set storages_history [OPTIONS] + +Arguments: + + [possible values: true, false] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/transaction_hash_numbers.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/transaction_hash_numbers.mdx new file mode 100644 index 00000000000..b7f7dda97f1 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/transaction_hash_numbers.mdx @@ -0,0 +1,170 @@ +# op-reth db settings set transaction_hash_numbers + +Store transaction hash to number mapping in rocksdb instead of MDBX + +```bash +$ op-reth db settings set transaction_hash_numbers --help +``` +```txt +Usage: op-reth db settings set transaction_hash_numbers [OPTIONS] + +Arguments: + + [possible values: true, false] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/transaction_senders.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/transaction_senders.mdx new file mode 100644 index 00000000000..f95ddf7b811 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/transaction_senders.mdx @@ -0,0 +1,170 @@ +# op-reth db settings set transaction_senders + +Store transaction senders in static files instead of the database + +```bash +$ op-reth db settings set transaction_senders --help +``` +```txt +Usage: op-reth db settings set transaction_senders [OPTIONS] + +Arguments: + + [possible values: true, false] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/transaction_senders_in_static_files.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/transaction_senders_in_static_files.mdx new file mode 100644 index 00000000000..1947c57293b --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/settings/set/transaction_senders_in_static_files.mdx @@ -0,0 +1,152 @@ +# op-reth db settings set transaction_senders_in_static_files + +Store transaction senders in static files instead of the database + +```bash +$ op-reth db settings set transaction_senders_in_static_files --help +``` +```txt +Usage: op-reth db settings set transaction_senders_in_static_files [OPTIONS] + +Arguments: + + [possible values: true, false] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces. + + - `http`: expects endpoint path to end with `/v1/traces` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/static-file-header.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/static-file-header.mdx new file mode 100644 index 00000000000..5483848ff01 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/static-file-header.mdx @@ -0,0 +1,171 @@ +# op-reth db static-file-header + +Reads and displays the static file segment header + +```bash +$ op-reth db static-file-header --help +``` +```txt +Usage: op-reth db static-file-header [OPTIONS] + +Commands: + block Query by segment and block number + path Query by path to static file + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/static-file-header/block.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/static-file-header/block.mdx new file mode 100644 index 00000000000..bdf56a9804d --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/static-file-header/block.mdx @@ -0,0 +1,181 @@ +# op-reth db static-file-header block + +Query by segment and block number + +```bash +$ op-reth db static-file-header block --help +``` +```txt +Usage: op-reth db static-file-header block [OPTIONS] + +Arguments: + + Static file segment + + Possible values: + - headers: Static File segment responsible for the `CanonicalHeaders`, `Headers`, `HeaderTerminalDifficulties` tables + - transactions: Static File segment responsible for the `Transactions` table + - receipts: Static File segment responsible for the `Receipts` table + - transaction-senders: Static File segment responsible for the `TransactionSenders` table + - account-change-sets: Static File segment responsible for the `AccountChangeSets` table + - storage-change-sets: Static File segment responsible for the `StorageChangeSets` table + + + Block number to query + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/static-file-header/path.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/static-file-header/path.mdx new file mode 100644 index 00000000000..7f237185cec --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/static-file-header/path.mdx @@ -0,0 +1,170 @@ +# op-reth db static-file-header path + +Query by path to static file + +```bash +$ op-reth db static-file-header path --help +``` +```txt +Usage: op-reth db static-file-header path [OPTIONS] + +Arguments: + + Path to the static file + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/stats.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/stats.mdx new file mode 100644 index 00000000000..1a2ed7d4a93 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/stats.mdx @@ -0,0 +1,182 @@ +# op-reth db stats + +Lists all the tables, their entry count and their size + +```bash +$ op-reth db stats --help +``` +```txt +Usage: op-reth db stats [OPTIONS] + +Options: + --skip-consistency-checks + Skip consistency checks for static files + + --detailed-sizes + Show only the total size for static files + + --detailed-segments + Show detailed information per static file segment + + --checksum + Show a checksum of each table in the database. + + WARNING: this option will take a long time to run, as it needs to traverse and hash the entire database. + + For individual table checksums, use the `reth db checksum` command. + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/db/version.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/db/version.mdx new file mode 100644 index 00000000000..a33bf8ef3bd --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/db/version.mdx @@ -0,0 +1,166 @@ +# op-reth db version + +Lists current and local database versions + +```bash +$ op-reth db version --help +``` +```txt +Usage: op-reth db version [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/dump-genesis.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/dump-genesis.mdx new file mode 100644 index 00000000000..2486fb79aea --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/dump-genesis.mdx @@ -0,0 +1,165 @@ +# op-reth dump-genesis + +Dumps genesis block JSON configuration to stdout + +```bash +$ op-reth dump-genesis --help +``` +```txt +Usage: op-reth dump-genesis [OPTIONS] + +Options: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + + -h, --help + Print help (see a summary with '-h') + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/import-op.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/import-op.mdx new file mode 100644 index 00000000000..c5affadf9f5 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/import-op.mdx @@ -0,0 +1,334 @@ +# op-reth import-op + +This syncs RLP encoded OP blocks below Bedrock from a file, without executing + +```bash +$ op-reth import-op --help +``` +```txt +Usage: op-reth import-op [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --chunk-len + Chunk byte length to read from file. + + + The path to a block file for import. + + The online stages (headers and bodies) are replaced by a file import, after which the + remaining stages are executed. + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/import-receipts-op.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/import-receipts-op.mdx new file mode 100644 index 00000000000..398086f9dc6 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/import-receipts-op.mdx @@ -0,0 +1,334 @@ +# op-reth import-receipts-op + +This imports RLP encoded receipts from a file + +```bash +$ op-reth import-receipts-op --help +``` +```txt +Usage: op-reth import-receipts-op [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --chunk-len + Chunk byte length to read from file. + + + The path to a receipts file for import. File must use `OpGethReceiptFileCodec` (used for + exporting OP chain segment below Bedrock block via testinprod/op-geth). + + + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/init-state.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/init-state.mdx new file mode 100644 index 00000000000..3e3e1ba019e --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/init-state.mdx @@ -0,0 +1,364 @@ +# op-reth init-state + +Initialize the database from a state dump file + +```bash +$ op-reth init-state --help +``` +```txt +Usage: op-reth init-state [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --without-evm + Specifies whether to initialize the state without relying on EVM historical data. + + When enabled, and before inserting the state, it creates a dummy chain up to the last EVM block specified. It then, appends the first block provided block. + + - **Note**: **Do not** import receipts and blocks beforehand, or this will fail or be ignored. + + --header + Header file containing the header in an RLP encoded format. + + --header-hash + Hash of the header. + + --without-ovm + Specifies whether to initialize the state without relying on OVM or EVM historical data. + + When enabled, and before inserting the state, it creates a dummy chain up to the last OVM block (#105235062) (14GB / 90 seconds). It then, appends the Bedrock block. This is hardcoded for OP mainnet, for other OP chains you will need to pass in a header. + + - **Note**: **Do not** import receipts and blocks beforehand, or this will fail or be ignored. + + + JSONL file with state dump. + + Must contain accounts in following format, additional account fields are ignored. Must + also contain { "root": \ } as first line. + { + "balance": "\", + "nonce": \, + "code": "\", + "storage": { + "\": "\", + .. + }, + "address": "\", + } + + Allows init at a non-genesis block. Caution! Blocks must be manually imported up until + and including the non-genesis block to init chain at. See 'import' command. + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/init.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/init.mdx new file mode 100644 index 00000000000..9a0930b4fe3 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/init.mdx @@ -0,0 +1,325 @@ +# op-reth init + +Initialize the database from a genesis file + +```bash +$ op-reth init --help +``` +```txt +Usage: op-reth init [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/node.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/node.mdx new file mode 100644 index 00000000000..98205ad008e --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/node.mdx @@ -0,0 +1,1274 @@ +# op-reth node + +Start the node + +```bash +$ op-reth node --help +``` +```txt +Usage: op-reth node [OPTIONS] + +Options: + --config + The path to the configuration file to use. + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + + --instance + Add a new instance of a node. + + Configures the ports of the node to avoid conflicts with the defaults. This is useful for running multiple nodes on the same machine. + + Max number of instances is 200. It is chosen in a way so that it's not possible to have port numbers that conflict with each other. + + Changes to the following port numbers: - `DISCOVERY_PORT`: default + `instance` - 1 - `AUTH_PORT`: default + `instance` * 100 - 100 - `HTTP_RPC_PORT`: default - `instance` + 1 - `WS_RPC_PORT`: default + `instance` * 2 - 2 - `IPC_PATH`: default + `-instance` + + --with-unused-ports + Sets all ports to unused, allowing the OS to choose random unused ports when sockets are bound. + + Mutually exclusive with `--instance`. + + -h, --help + Print help (see a summary with '-h') + +Metrics: + --metrics + Enable Prometheus metrics. + + The metrics will be served at the given interface and port. + + --metrics.prometheus.push.url + URL for pushing Prometheus metrics to a push gateway. + + If set, the node will periodically push metrics to the specified push gateway URL. + + --metrics.prometheus.push.interval + Interval in seconds for pushing metrics to push gateway. + + Default: 5 seconds + + [default: 5] + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + +Networking: + -d, --disable-discovery + Disable the discovery service + + --disable-dns-discovery + Disable the DNS discovery + + --disable-discv4-discovery + Disable Discv4 discovery + + --enable-discv5-discovery + Enable Discv5 discovery + + --disable-nat + Disable Nat discovery + + --discovery.addr + The UDP address to use for devp2p peer discovery version 4 + + [default: 0.0.0.0] + + --discovery.port + The UDP port to use for devp2p peer discovery version 4 + + [default: 30303] + + --discovery.v5.addr + The UDP IPv4 address to use for devp2p peer discovery version 5. Overwritten by `RLPx` address, if it's also IPv4 + + --discovery.v5.addr.ipv6 + The UDP IPv6 address to use for devp2p peer discovery version 5. Overwritten by `RLPx` address, if it's also IPv6 + + --discovery.v5.port + The UDP IPv4 port to use for devp2p peer discovery version 5. Not used unless `--addr` is IPv4, or `--discovery.v5.addr` is set + + [default: 9200] + + --discovery.v5.port.ipv6 + The UDP IPv6 port to use for devp2p peer discovery version 5. Not used unless `--addr` is IPv6, or `--discovery.addr.ipv6` is set + + [default: 9200] + + --discovery.v5.lookup-interval + The interval in seconds at which to carry out periodic lookup queries, for the whole run of the program + + [default: 20] + + --discovery.v5.bootstrap.lookup-interval + The interval in seconds at which to carry out boost lookup queries, for a fixed number of times, at bootstrap + + [default: 5] + + --discovery.v5.bootstrap.lookup-countdown + The number of times to carry out boost lookup queries at bootstrap + + [default: 200] + + --trusted-peers + Comma separated enode URLs of trusted peers for P2P connections. + + --trusted-peers enode://abcd@192.168.0.1:30303 + + --trusted-only + Connect to or accept from trusted peers only + + --bootnodes + Comma separated enode URLs for P2P discovery bootstrap. + + Will fall back to a network-specific default if not specified. + + --dns-retries + Amount of DNS resolution requests retries to perform when peering + + [default: 0] + + --peers-file + The path to the known peers file. Connected peers are dumped to this file on nodes + shutdown, and read on startup. Cannot be used with `--no-persist-peers`. + + --identity + Custom node identity + + [default: reth/-/] + + --p2p-secret-key + Secret key to use for this node. + + This will also deterministically set the peer ID. If not specified, it will be set in the data dir for the chain being used. + + --p2p-secret-key-hex + Hex encoded secret key to use for this node. + + This will also deterministically set the peer ID. Cannot be used together with `--p2p-secret-key`. + + --no-persist-peers + Do not persist peers. + + --nat + NAT resolution method (any|none|upnp|publicip|extip:\) + + [default: any] + + --addr + Network listening address + + [default: 0.0.0.0] + + --port + Network listening port + + [default: 30303] + + --max-outbound-peers + Maximum number of outbound peers. default: 100 + + --max-inbound-peers + Maximum number of inbound peers. default: 30 + + --max-peers + Maximum number of total peers (inbound + outbound). + + Splits peers using approximately 2:1 inbound:outbound ratio. Cannot be used together with `--max-outbound-peers` or `--max-inbound-peers`. + + --max-tx-reqs + Max concurrent `GetPooledTransactions` requests. + + [default: 130] + + --max-tx-reqs-peer + Max concurrent `GetPooledTransactions` requests per peer. + + [default: 1] + + --max-seen-tx-history + Max number of seen transactions to remember per peer. + + Default is 320 transaction hashes. + + [default: 320] + + --max-pending-imports + Max number of transactions to import concurrently. + + [default: 4096] + + --pooled-tx-response-soft-limit + Experimental, for usage in research. Sets the max accumulated byte size of transactions + to pack in one response. + Spec'd at 2MiB. + + [default: 2097152] + + --pooled-tx-pack-soft-limit + Experimental, for usage in research. Sets the max accumulated byte size of transactions to + request in one request. + + Since `RLPx` protocol version 68, the byte size of a transaction is shared as metadata in a + transaction announcement (see `RLPx` specs). This allows a node to request a specific size + response. + + By default, nodes request only 128 KiB worth of transactions, but should a peer request + more, up to 2 MiB, a node will answer with more than 128 KiB. + + Default is 128 KiB. + + [default: 131072] + + --max-tx-pending-fetch + Max capacity of cache of hashes for transactions pending fetch. + + [default: 25600] + + --net-if.experimental + Name of network interface used to communicate with peers. + + If flag is set, but no value is passed, the default interface for docker `eth0` is tried. + + --tx-propagation-policy + Transaction Propagation Policy + + The policy determines which peers transactions are gossiped to. + + [default: All] + + --tx-ingress-policy + Transaction ingress policy + + Determines which peers' transactions are accepted over P2P. + + [default: All] + + --disable-tx-gossip + Disable transaction pool gossip + + Disables gossiping of transactions in the mempool to peers. This can be omitted for personal nodes, though providers should always opt to enable this flag. + + --tx-propagation-mode + Sets the transaction propagation mode by determining how new pending transactions are propagated to other peers in full. + + Examples: sqrt, all, max:10 + + [default: sqrt] + + --required-block-hashes + Comma separated list of required block hashes or block number=hash pairs. Peers that don't have these blocks will be filtered out. Format: hash or `block_number=hash` (e.g., 23115201=0x1234...) + + --network-id + Optional network ID to override the chain specification's network ID for P2P connections + + --netrestrict + Restrict network communication to the given IP networks (CIDR masks). + + Comma separated list of CIDR network specifications. Only peers with IP addresses within these ranges will be allowed to connect. + + Example: --netrestrict "192.168.0.0/16,10.0.0.0/8" + +RPC: + --http + Enable the HTTP-RPC server + + --http.addr + Http server address to listen on + + [default: 127.0.0.1] + + --http.port + Http server port to listen on + + [default: 8545] + + --http.disable-compression + Disable compression for HTTP responses + + --http.api + Rpc Modules to be configured for the HTTP server + + [possible values: admin, debug, eth, net, trace, txpool, web3, rpc, reth, ots, flashbots, miner, mev, testing] + + --http.corsdomain + Http Corsdomain to allow request from + + --ws + Enable the WS-RPC server + + --ws.addr + Ws server address to listen on + + [default: 127.0.0.1] + + --ws.port + Ws server port to listen on + + [default: 8546] + + --ws.origins + Origins from which to accept `WebSocket` requests + + --ws.api + Rpc Modules to be configured for the WS server + + [possible values: admin, debug, eth, net, trace, txpool, web3, rpc, reth, ots, flashbots, miner, mev, testing] + + --ipcdisable + Disable the IPC-RPC server + + --ipcpath + Filename for IPC socket/pipe within the datadir + + [default: .ipc] + + --ipc.permissions + Set the permissions for the IPC socket file, in octal format. + + If not specified, the permissions will be set by the system's umask. + + --authrpc.addr + Auth server address to listen on + + [default: 127.0.0.1] + + --authrpc.port + Auth server port to listen on + + [default: 8551] + + --authrpc.jwtsecret + Path to a JWT secret to use for the authenticated engine-API RPC server. + + This will enforce JWT authentication for all requests coming from the consensus layer. + + If no path is provided, a secret will be generated and stored in the datadir under `//jwt.hex`. For mainnet this would be `~/.reth/mainnet/jwt.hex` by default. + + --auth-ipc + Enable auth engine API over IPC + + --auth-ipc.path + Filename for auth IPC socket/pipe within the datadir + + [default: _engine_api.ipc] + + --disable-auth-server + Disable the auth/engine API server. + + This will prevent the authenticated engine-API server from starting. Use this if you're running a node that doesn't need to serve engine API requests. + + --rpc.jwtsecret + Hex encoded JWT secret to authenticate the regular RPC server(s), see `--http.api` and `--ws.api`. + + This is __not__ used for the authenticated engine-API RPC server, see `--authrpc.jwtsecret`. + + --rpc.max-request-size + Set the maximum RPC request payload size for both HTTP and WS in megabytes + + [default: 15] + + --rpc.max-response-size + Set the maximum RPC response payload size for both HTTP and WS in megabytes + + [default: 160] + [aliases: --rpc.returndata.limit] + + --rpc.max-subscriptions-per-connection + Set the maximum concurrent subscriptions per connection + + [default: 1024] + + --rpc.max-connections + Maximum number of RPC server connections + + [default: 500] + + --rpc.max-tracing-requests + Maximum number of concurrent tracing requests. + + By default this chooses a sensible value based on the number of available cores. Tracing requests are generally CPU bound. Choosing a value that is higher than the available CPU cores can have a negative impact on the performance of the node and affect the node's ability to maintain sync. + + [default: ] + + --rpc.max-blocking-io-requests + Maximum number of concurrent blocking IO requests. + + Blocking IO requests include `eth_call`, `eth_estimateGas`, and similar methods that require EVM execution. These are spawned as blocking tasks to avoid blocking the async runtime. + + [default: 256] + + --rpc.max-trace-filter-blocks + Maximum number of blocks for `trace_filter` requests + + [default: 100] + + --rpc.max-blocks-per-filter + Maximum number of blocks that could be scanned per filter request. (0 = entire chain) + + [default: 100000] + + --rpc.max-logs-per-response + Maximum number of logs that can be returned in a single response. (0 = no limit) + + [default: 20000] + + --rpc.gascap + Maximum gas limit for `eth_call` and call tracing RPC methods + + [default: 50000000] + + --rpc.evm-memory-limit + Maximum memory the EVM can allocate per RPC request + + [default: 4294967295] + + --rpc.txfeecap + Maximum eth transaction fee (in ether) that can be sent via the RPC APIs (0 = no cap) + + [default: 1.0] + + --rpc.max-simulate-blocks + Maximum number of blocks for `eth_simulateV1` call + + [default: 256] + + --rpc.eth-proof-window + The maximum proof window for historical proof generation. This value allows for generating historical proofs up to configured number of blocks from current tip (up to `tip - window`) + + [default: 0] + + --rpc.proof-permits + Maximum number of concurrent getproof requests + + [default: 25] + + --rpc.pending-block + Configures the pending block behavior for RPC responses. + + Options: full (include all transactions), empty (header only), none (disable pending blocks). + + [default: full] + + --rpc.forwarder + Endpoint to forward transactions to + + --builder.disallow + Path to file containing disallowed addresses, json-encoded list of strings. Block validation API will reject blocks containing transactions from these addresses + +RPC State Cache: + --rpc-cache.max-blocks + Max number of blocks in cache + + [default: 5000] + + --rpc-cache.max-receipts + Max number receipts in cache + + [default: 2000] + + --rpc-cache.max-headers + Max number of headers in cache + + [default: 1000] + + --rpc-cache.max-concurrent-db-requests + Max number of concurrent database requests + + [default: 512] + +Gas Price Oracle: + --gpo.blocks + Number of recent blocks to check for gas price + + [default: 20] + + --gpo.ignoreprice + Gas Price below which gpo will ignore transactions + + [default: 2] + + --gpo.maxprice + Maximum transaction priority fee(or gasprice before London Fork) to be recommended by gpo + + [default: 500000000000] + + --gpo.percentile + The percentile of gas prices to use for the estimate + + [default: 60] + + --gpo.default-suggested-fee + The default gas price to use if there are no blocks to use + + --rpc.send-raw-transaction-sync-timeout + Timeout for `send_raw_transaction_sync` RPC method + + [default: 30s] + + --testing.skip-invalid-transactions + Skip invalid transactions in `testing_buildBlockV1` instead of failing. + + When enabled, transactions that fail execution will be skipped, and all subsequent transactions from the same sender will also be skipped. + +TxPool: + --txpool.pending-max-count + Max number of transaction in the pending sub-pool + + [default: 10000] + + --txpool.pending-max-size + Max size of the pending sub-pool in megabytes + + [default: 20] + + --txpool.basefee-max-count + Max number of transaction in the basefee sub-pool + + [default: 10000] + + --txpool.basefee-max-size + Max size of the basefee sub-pool in megabytes + + [default: 20] + + --txpool.queued-max-count + Max number of transaction in the queued sub-pool + + [default: 10000] + + --txpool.queued-max-size + Max size of the queued sub-pool in megabytes + + [default: 20] + + --txpool.blobpool-max-count + Max number of transaction in the blobpool + + [default: 10000] + + --txpool.blobpool-max-size + Max size of the blobpool in megabytes + + [default: 20] + + --txpool.blob-cache-size + Max number of entries for the in memory cache of the blob store + + --txpool.disable-blobs-support + Disable EIP-4844 blob transaction support + + --txpool.max-account-slots + Max number of executable transaction slots guaranteed per account + + [default: 16] + + --txpool.pricebump + Price bump (in %) for the transaction pool underpriced check + + [default: 10] + + --txpool.minimal-protocol-fee + Minimum base fee required by the protocol + + [default: 7] + + --txpool.minimum-priority-fee + Minimum priority fee required for transaction acceptance into the pool. Transactions with priority fee below this value will be rejected + + --txpool.gas-limit + The default enforced gas limit for transactions entering the pool + + [default: 30000000] + + --txpool.max-tx-gas + Maximum gas limit for individual transactions. Transactions exceeding this limit will be rejected by the transaction pool + + --blobpool.pricebump + Price bump percentage to replace an already existing blob transaction + + [default: 100] + + --txpool.max-tx-input-bytes + Max size in bytes of a single transaction allowed to enter the pool + + [default: 131072] + + --txpool.max-cached-entries + The maximum number of blobs to keep in the in memory blob cache + + [default: 100] + + --txpool.nolocals + Flag to disable local transaction exemptions + + --txpool.locals + Flag to allow certain addresses as local + + --txpool.no-local-transactions-propagation + Flag to toggle local transaction propagation + + --txpool.additional-validation-tasks + Number of additional transaction validation tasks to spawn + + [default: 1] + + --txpool.max-pending-txns + Maximum number of pending transactions from the network to buffer + + [default: 2048] + + --txpool.max-new-txns + Maximum number of new transactions to buffer + + [default: 1024] + + --txpool.max-new-pending-txs-notifications + How many new pending transactions to buffer and send to in progress pending transaction iterators + + [default: 200] + + --txpool.lifetime + Maximum amount of time non-executable transaction are queued + + [default: 10800] + + --txpool.transactions-backup + Path to store the local transaction backup at, to survive node restarts + + --txpool.disable-transactions-backup + Disables transaction backup to disk on node shutdown + + --txpool.max-batch-size + Max batch size for transaction pool insertions + + [default: 1] + +Builder: + --builder.extradata + Block extra data set by the payload builder + + [default: reth//] + + --builder.gaslimit + Target gas limit for built blocks + + --builder.interval + The interval at which the job should build a new payload after the last. + + Interval is specified in seconds or in milliseconds if the value ends with `ms`: * `50ms` -> 50 milliseconds * `1` -> 1 second + + [default: 1] + + --builder.deadline + The deadline for when the payload builder job should resolve + + [default: 12] + + --builder.max-tasks + Maximum number of tasks to spawn for building a payload + + [default: 3] + + --builder.max-blobs + Maximum number of blobs to include per block + +Debug: + --debug.terminate + Flag indicating whether the node should be terminated after the pipeline sync + + --debug.tip + Set the chain tip manually for testing purposes. + + NOTE: This is a temporary flag + + --debug.max-block + Runs the sync only up to the specified block + + --debug.etherscan [] + Runs a fake consensus client that advances the chain using recent block hashes on Etherscan. If specified, requires an `ETHERSCAN_API_KEY` environment variable + + --debug.rpc-consensus-url + Runs a fake consensus client using blocks fetched from an RPC endpoint. Supports both HTTP and `WebSocket` endpoints - `WebSocket` endpoints will use subscriptions, while HTTP endpoints will poll for new blocks + + --debug.skip-fcu + If provided, the engine will skip `n` consecutive FCUs + + --debug.skip-new-payload + If provided, the engine will skip `n` consecutive new payloads + + --debug.reorg-frequency + If provided, the chain will be reorged at specified frequency + + --debug.reorg-depth + The reorg depth for chain reorgs + + --debug.engine-api-store + The path to store engine API messages at. If specified, all of the intercepted engine API messages will be written to specified location + + --debug.invalid-block-hook + Determines which type of invalid block hook to install + + Example: `witness,prestate` + + [default: witness] + [possible values: witness, pre-state, opcode] + + --debug.healthy-node-rpc-url + The RPC URL of a healthy node to use for comparing invalid block hook results against. + + Debug setting that enables execution witness comparison for troubleshooting bad blocks. + When enabled, the node will collect execution witnesses from the specified source and + compare them against local execution when a bad block is encountered, helping identify + discrepancies in state execution. + + --ethstats + The URL of the ethstats server to connect to. Example: `nodename:secret@host:port` + + --debug.startup-sync-state-idle + Set the node to idle state when the backfill is not running. + + This makes the `eth_syncing` RPC return "Idle" when the node has just started or finished the backfill, but did not yet receive any new blocks. + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Dev testnet: + --dev + Start the node in dev mode + + This mode uses a local proof-of-authority consensus engine with either fixed block times + or automatically mined blocks. + Disables network discovery and enables local http server. + Prefunds 20 accounts derived by mnemonic "test test test test test test test test test test + test junk" with 10 000 ETH each. + + --dev.block-max-transactions + How many transactions to mine per block + + --dev.block-time + Interval between blocks. + + Parses strings using [`humantime::parse_duration`] + --dev.block-time 12s + + --dev.mnemonic + Derive dev accounts from a fixed mnemonic instead of random ones. + + [default: "test test test test test test test test test test test junk"] + +Pruning: + --full + Run full node. Only the most recent [`MINIMUM_PRUNING_DISTANCE`] block states are stored + + --minimal + Run minimal storage mode with maximum pruning and smaller static files. + + This mode configures the node to use minimal disk space by: - Fully pruning sender recovery, transaction lookup, receipts - Leaving 10,064 blocks for account, storage history and block bodies - Using 10,000 blocks per static file segment + + --prune.block-interval + Minimum pruning interval measured in blocks + + --prune.sender-recovery.full + Prunes all sender recovery data + + --prune.sender-recovery.distance + Prune sender recovery data before the `head-N` block number. In other words, keep last N + 1 blocks + + --prune.sender-recovery.before + Prune sender recovery data before the specified block number. The specified block number is not pruned + + --prune.transaction-lookup.full + Prunes all transaction lookup data + + --prune.transaction-lookup.distance + Prune transaction lookup data before the `head-N` block number. In other words, keep last N + 1 blocks + + --prune.transaction-lookup.before + Prune transaction lookup data before the specified block number. The specified block number is not pruned + + --prune.receipts.full + Prunes all receipt data + + --prune.receipts.pre-merge + Prune receipts before the merge block + + --prune.receipts.distance + Prune receipts before the `head-N` block number. In other words, keep last N + 1 blocks + + --prune.receipts.before + Prune receipts before the specified block number. The specified block number is not pruned + + --prune.receiptslogfilter + Configure receipts log filter. Format: <`address`>:<`prune_mode`>... where <`prune_mode`> can be 'full', 'distance:<`blocks`>', or 'before:<`block_number`>' + + --prune.account-history.full + Prunes all account history + + --prune.account-history.distance + Prune account before the `head-N` block number. In other words, keep last N + 1 blocks + + --prune.account-history.before + Prune account history before the specified block number. The specified block number is not pruned + + --prune.storage-history.full + Prunes all storage history data + + --prune.storage-history.distance + Prune storage history before the `head-N` block number. In other words, keep last N + 1 blocks + + --prune.storage-history.before + Prune storage history before the specified block number. The specified block number is not pruned + + --prune.bodies.pre-merge + Prune bodies before the merge block + + --prune.bodies.distance + Prune bodies before the `head-N` block number. In other words, keep last N + 1 blocks + + --prune.bodies.before + Prune storage history before the specified block number. The specified block number is not pruned + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + +Engine: + --engine.persistence-threshold + Configure persistence threshold for the engine. This determines how many canonical blocks must be in-memory, ahead of the last persisted block, before flushing canonical blocks to disk again. + + To persist blocks as fast as the node receives them, set this value to zero. This will cause more frequent DB writes. + + [default: 2] + + --engine.memory-block-buffer-target + Configure the target number of blocks to keep in memory + + [default: 0] + + --engine.legacy-state-root + Enable legacy state root + + --engine.disable-state-cache + Disable state cache + + --engine.disable-prewarming + Disable parallel prewarming + + --engine.disable-parallel-sparse-trie + Disable the parallel sparse trie in the engine + + --engine.state-provider-metrics + Enable state provider latency metrics. This allows the engine to collect and report stats about how long state provider calls took during execution, but this does introduce slight overhead to state provider calls + + --engine.cross-block-cache-size + Configure the size of cross-block cache in megabytes + + [default: 4096] + + --engine.state-root-task-compare-updates + Enable comparing trie updates from the state root task to the trie updates from the regular state root calculation + + --engine.accept-execution-requests-hash + Enables accepting requests hash instead of an array of requests in `engine_newPayloadV4` + + --engine.multiproof-chunking + Whether multiproof task should chunk proof targets + + --engine.multiproof-chunk-size + Multiproof task chunk size for proof targets + + [default: 60] + + --engine.reserved-cpu-cores + Configure the number of reserved CPU cores for non-reth processes + + [default: 1] + + --engine.disable-precompile-cache + Disable precompile cache + + --engine.state-root-fallback + Enable state root fallback, useful for testing + + --engine.always-process-payload-attributes-on-canonical-head + Always process payload attributes and begin a payload build process even if `forkchoiceState.headBlockHash` is already the canonical head or an ancestor. See `TreeConfig::always_process_payload_attributes_on_canonical_head` for more details. + + Note: This is a no-op on OP Stack. + + --engine.allow-unwind-canonical-header + Allow unwinding canonical header to ancestor during forkchoice updates. See `TreeConfig::unwind_canonical_header` for more details + + --engine.storage-worker-count + Configure the number of storage proof workers in the Tokio blocking pool. If not specified, defaults to 2x available parallelism, clamped between 2 and 64 + + --engine.account-worker-count + Configure the number of account proof workers in the Tokio blocking pool. If not specified, defaults to the same count as storage workers + + --engine.enable-proof-v2 + Enable V2 storage proofs for state root calculations + + --engine.disable-cache-metrics + Disable cache metrics recording, which can take up to 50ms with large cached state + +ERA: + --era.enable + Enable import from ERA1 files + + --era.path + The path to a directory for import. + + The ERA1 files are read from the local directory parsing headers and bodies. + + --era.url + The URL to a remote host where the ERA1 files are hosted. + + The ERA1 files are read from the remote host using HTTP GET requests parsing headers + and bodies. + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +Rollup: + --rollup.sequencer + Endpoint for the sequencer mempool (can be both HTTP and WS) + + [aliases: --rollup.sequencer-http, --rollup.sequencer-ws] + + --rollup.disable-tx-pool-gossip + Disable transaction pool gossip + + --rollup.compute-pending-block + By default the pending block equals the latest block to save resources and not leak txs from the tx-pool, this flag enables computing of the pending block from the tx-pool instead. + + If `compute_pending_block` is not enabled, the payload builder will use the payload attributes from the latest block. Note that this flag is not yet functional. + + --rollup.discovery.v4 + enables discovery v4 if provided + + --rollup.enable-tx-conditional + Enable transaction conditional support on sequencer + + --rollup.supervisor-http + HTTP endpoint for the supervisor + + [default: http://localhost:1337/] + + --rollup.supervisor-safety-level + Safety level for the supervisor + + [default: CrossUnsafe] + + --rollup.sequencer-headers + Optional headers to use when connecting to the sequencer + + --rollup.historicalrpc + RPC endpoint for historical data + + --min-suggested-priority-fee + Minimum suggested priority fee (tip) in wei, default `1_000_000` + + [default: 1000000] + + --flashblocks-url + A URL pointing to a secure websocket subscription that streams out flashblocks. + + If given, the flashblocks are received to build pending block. All request with "pending" block tag will use the pending state based on flashblocks. + + --flashblock-consensus + Enable flashblock consensus client to drive the chain forward + + When enabled, the flashblock consensus client will process flashblock sequences and submit them to the engine API to advance the chain. Requires `flashblocks_url` to be set. + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/p2p.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p.mdx new file mode 100644 index 00000000000..3b4efdbd6f0 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p.mdx @@ -0,0 +1,163 @@ +# op-reth p2p + +P2P Debugging utilities + +```bash +$ op-reth p2p --help +``` +```txt +Usage: op-reth p2p [OPTIONS] + +Commands: + header Download block header + body Download block body + rlpx RLPx commands + bootnode Bootnode command + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/body.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/body.mdx new file mode 100644 index 00000000000..7fb5e5fa61c --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/body.mdx @@ -0,0 +1,413 @@ +# op-reth p2p body + +Download block body + +```bash +$ op-reth p2p body --help +``` +```txt +Usage: op-reth p2p body [OPTIONS] + +Options: + --retries + The number of retries per request + + [default: 5] + + -h, --help + Print help (see a summary with '-h') + +Networking: + -d, --disable-discovery + Disable the discovery service + + --disable-dns-discovery + Disable the DNS discovery + + --disable-discv4-discovery + Disable Discv4 discovery + + --enable-discv5-discovery + Enable Discv5 discovery + + --disable-nat + Disable Nat discovery + + --discovery.addr + The UDP address to use for devp2p peer discovery version 4 + + [default: 0.0.0.0] + + --discovery.port + The UDP port to use for devp2p peer discovery version 4 + + [default: 30303] + + --discovery.v5.addr + The UDP IPv4 address to use for devp2p peer discovery version 5. Overwritten by `RLPx` address, if it's also IPv4 + + --discovery.v5.addr.ipv6 + The UDP IPv6 address to use for devp2p peer discovery version 5. Overwritten by `RLPx` address, if it's also IPv6 + + --discovery.v5.port + The UDP IPv4 port to use for devp2p peer discovery version 5. Not used unless `--addr` is IPv4, or `--discovery.v5.addr` is set + + [default: 9200] + + --discovery.v5.port.ipv6 + The UDP IPv6 port to use for devp2p peer discovery version 5. Not used unless `--addr` is IPv6, or `--discovery.addr.ipv6` is set + + [default: 9200] + + --discovery.v5.lookup-interval + The interval in seconds at which to carry out periodic lookup queries, for the whole run of the program + + [default: 20] + + --discovery.v5.bootstrap.lookup-interval + The interval in seconds at which to carry out boost lookup queries, for a fixed number of times, at bootstrap + + [default: 5] + + --discovery.v5.bootstrap.lookup-countdown + The number of times to carry out boost lookup queries at bootstrap + + [default: 200] + + --trusted-peers + Comma separated enode URLs of trusted peers for P2P connections. + + --trusted-peers enode://abcd@192.168.0.1:30303 + + --trusted-only + Connect to or accept from trusted peers only + + --bootnodes + Comma separated enode URLs for P2P discovery bootstrap. + + Will fall back to a network-specific default if not specified. + + --dns-retries + Amount of DNS resolution requests retries to perform when peering + + [default: 0] + + --peers-file + The path to the known peers file. Connected peers are dumped to this file on nodes + shutdown, and read on startup. Cannot be used with `--no-persist-peers`. + + --identity + Custom node identity + + [default: reth/-/] + + --p2p-secret-key + Secret key to use for this node. + + This will also deterministically set the peer ID. If not specified, it will be set in the data dir for the chain being used. + + --p2p-secret-key-hex + Hex encoded secret key to use for this node. + + This will also deterministically set the peer ID. Cannot be used together with `--p2p-secret-key`. + + --no-persist-peers + Do not persist peers. + + --nat + NAT resolution method (any|none|upnp|publicip|extip:\) + + [default: any] + + --addr + Network listening address + + [default: 0.0.0.0] + + --port + Network listening port + + [default: 30303] + + --max-outbound-peers + Maximum number of outbound peers. default: 100 + + --max-inbound-peers + Maximum number of inbound peers. default: 30 + + --max-peers + Maximum number of total peers (inbound + outbound). + + Splits peers using approximately 2:1 inbound:outbound ratio. Cannot be used together with `--max-outbound-peers` or `--max-inbound-peers`. + + --max-tx-reqs + Max concurrent `GetPooledTransactions` requests. + + [default: 130] + + --max-tx-reqs-peer + Max concurrent `GetPooledTransactions` requests per peer. + + [default: 1] + + --max-seen-tx-history + Max number of seen transactions to remember per peer. + + Default is 320 transaction hashes. + + [default: 320] + + --max-pending-imports + Max number of transactions to import concurrently. + + [default: 4096] + + --pooled-tx-response-soft-limit + Experimental, for usage in research. Sets the max accumulated byte size of transactions + to pack in one response. + Spec'd at 2MiB. + + [default: 2097152] + + --pooled-tx-pack-soft-limit + Experimental, for usage in research. Sets the max accumulated byte size of transactions to + request in one request. + + Since `RLPx` protocol version 68, the byte size of a transaction is shared as metadata in a + transaction announcement (see `RLPx` specs). This allows a node to request a specific size + response. + + By default, nodes request only 128 KiB worth of transactions, but should a peer request + more, up to 2 MiB, a node will answer with more than 128 KiB. + + Default is 128 KiB. + + [default: 131072] + + --max-tx-pending-fetch + Max capacity of cache of hashes for transactions pending fetch. + + [default: 25600] + + --net-if.experimental + Name of network interface used to communicate with peers. + + If flag is set, but no value is passed, the default interface for docker `eth0` is tried. + + --tx-propagation-policy + Transaction Propagation Policy + + The policy determines which peers transactions are gossiped to. + + [default: All] + + --tx-ingress-policy + Transaction ingress policy + + Determines which peers' transactions are accepted over P2P. + + [default: All] + + --disable-tx-gossip + Disable transaction pool gossip + + Disables gossiping of transactions in the mempool to peers. This can be omitted for personal nodes, though providers should always opt to enable this flag. + + --tx-propagation-mode + Sets the transaction propagation mode by determining how new pending transactions are propagated to other peers in full. + + Examples: sqrt, all, max:10 + + [default: sqrt] + + --required-block-hashes + Comma separated list of required block hashes or block number=hash pairs. Peers that don't have these blocks will be filtered out. Format: hash or `block_number=hash` (e.g., 23115201=0x1234...) + + --network-id + Optional network ID to override the chain specification's network ID for P2P connections + + --netrestrict + Restrict network communication to the given IP networks (CIDR masks). + + Comma separated list of CIDR network specifications. Only peers with IP addresses within these ranges will be allowed to connect. + + Example: --netrestrict "192.168.0.0/16,10.0.0.0/8" + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use. + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + + + The block number or hash + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/bootnode.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/bootnode.mdx new file mode 100644 index 00000000000..387eef511b5 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/bootnode.mdx @@ -0,0 +1,174 @@ +# op-reth p2p bootnode + +Bootnode command + +```bash +$ op-reth p2p bootnode --help +``` +```txt +Usage: op-reth p2p bootnode [OPTIONS] + +Options: + --addr + Listen address for the bootnode (default: "0.0.0.0:30301") + + [default: 0.0.0.0:30301] + + --p2p-secret-key + Secret key to use for the bootnode. + + This will also deterministically set the peer ID. If a path is provided but no key exists at that path, a new random secret will be generated and stored there. If no path is specified, a new ephemeral random secret will be used. + + --nat + NAT resolution method (any|none|upnp|publicip|extip:\) + + [default: any] + + --v5 + Run a v5 topic discovery bootnode + + -h, --help + Print help (see a summary with '-h') + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/header.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/header.mdx new file mode 100644 index 00000000000..9ede2d8eb71 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/header.mdx @@ -0,0 +1,413 @@ +# op-reth p2p header + +Download block header + +```bash +$ op-reth p2p header --help +``` +```txt +Usage: op-reth p2p header [OPTIONS] + +Options: + --retries + The number of retries per request + + [default: 5] + + -h, --help + Print help (see a summary with '-h') + +Networking: + -d, --disable-discovery + Disable the discovery service + + --disable-dns-discovery + Disable the DNS discovery + + --disable-discv4-discovery + Disable Discv4 discovery + + --enable-discv5-discovery + Enable Discv5 discovery + + --disable-nat + Disable Nat discovery + + --discovery.addr + The UDP address to use for devp2p peer discovery version 4 + + [default: 0.0.0.0] + + --discovery.port + The UDP port to use for devp2p peer discovery version 4 + + [default: 30303] + + --discovery.v5.addr + The UDP IPv4 address to use for devp2p peer discovery version 5. Overwritten by `RLPx` address, if it's also IPv4 + + --discovery.v5.addr.ipv6 + The UDP IPv6 address to use for devp2p peer discovery version 5. Overwritten by `RLPx` address, if it's also IPv6 + + --discovery.v5.port + The UDP IPv4 port to use for devp2p peer discovery version 5. Not used unless `--addr` is IPv4, or `--discovery.v5.addr` is set + + [default: 9200] + + --discovery.v5.port.ipv6 + The UDP IPv6 port to use for devp2p peer discovery version 5. Not used unless `--addr` is IPv6, or `--discovery.addr.ipv6` is set + + [default: 9200] + + --discovery.v5.lookup-interval + The interval in seconds at which to carry out periodic lookup queries, for the whole run of the program + + [default: 20] + + --discovery.v5.bootstrap.lookup-interval + The interval in seconds at which to carry out boost lookup queries, for a fixed number of times, at bootstrap + + [default: 5] + + --discovery.v5.bootstrap.lookup-countdown + The number of times to carry out boost lookup queries at bootstrap + + [default: 200] + + --trusted-peers + Comma separated enode URLs of trusted peers for P2P connections. + + --trusted-peers enode://abcd@192.168.0.1:30303 + + --trusted-only + Connect to or accept from trusted peers only + + --bootnodes + Comma separated enode URLs for P2P discovery bootstrap. + + Will fall back to a network-specific default if not specified. + + --dns-retries + Amount of DNS resolution requests retries to perform when peering + + [default: 0] + + --peers-file + The path to the known peers file. Connected peers are dumped to this file on nodes + shutdown, and read on startup. Cannot be used with `--no-persist-peers`. + + --identity + Custom node identity + + [default: reth/-/] + + --p2p-secret-key + Secret key to use for this node. + + This will also deterministically set the peer ID. If not specified, it will be set in the data dir for the chain being used. + + --p2p-secret-key-hex + Hex encoded secret key to use for this node. + + This will also deterministically set the peer ID. Cannot be used together with `--p2p-secret-key`. + + --no-persist-peers + Do not persist peers. + + --nat + NAT resolution method (any|none|upnp|publicip|extip:\) + + [default: any] + + --addr + Network listening address + + [default: 0.0.0.0] + + --port + Network listening port + + [default: 30303] + + --max-outbound-peers + Maximum number of outbound peers. default: 100 + + --max-inbound-peers + Maximum number of inbound peers. default: 30 + + --max-peers + Maximum number of total peers (inbound + outbound). + + Splits peers using approximately 2:1 inbound:outbound ratio. Cannot be used together with `--max-outbound-peers` or `--max-inbound-peers`. + + --max-tx-reqs + Max concurrent `GetPooledTransactions` requests. + + [default: 130] + + --max-tx-reqs-peer + Max concurrent `GetPooledTransactions` requests per peer. + + [default: 1] + + --max-seen-tx-history + Max number of seen transactions to remember per peer. + + Default is 320 transaction hashes. + + [default: 320] + + --max-pending-imports + Max number of transactions to import concurrently. + + [default: 4096] + + --pooled-tx-response-soft-limit + Experimental, for usage in research. Sets the max accumulated byte size of transactions + to pack in one response. + Spec'd at 2MiB. + + [default: 2097152] + + --pooled-tx-pack-soft-limit + Experimental, for usage in research. Sets the max accumulated byte size of transactions to + request in one request. + + Since `RLPx` protocol version 68, the byte size of a transaction is shared as metadata in a + transaction announcement (see `RLPx` specs). This allows a node to request a specific size + response. + + By default, nodes request only 128 KiB worth of transactions, but should a peer request + more, up to 2 MiB, a node will answer with more than 128 KiB. + + Default is 128 KiB. + + [default: 131072] + + --max-tx-pending-fetch + Max capacity of cache of hashes for transactions pending fetch. + + [default: 25600] + + --net-if.experimental + Name of network interface used to communicate with peers. + + If flag is set, but no value is passed, the default interface for docker `eth0` is tried. + + --tx-propagation-policy + Transaction Propagation Policy + + The policy determines which peers transactions are gossiped to. + + [default: All] + + --tx-ingress-policy + Transaction ingress policy + + Determines which peers' transactions are accepted over P2P. + + [default: All] + + --disable-tx-gossip + Disable transaction pool gossip + + Disables gossiping of transactions in the mempool to peers. This can be omitted for personal nodes, though providers should always opt to enable this flag. + + --tx-propagation-mode + Sets the transaction propagation mode by determining how new pending transactions are propagated to other peers in full. + + Examples: sqrt, all, max:10 + + [default: sqrt] + + --required-block-hashes + Comma separated list of required block hashes or block number=hash pairs. Peers that don't have these blocks will be filtered out. Format: hash or `block_number=hash` (e.g., 23115201=0x1234...) + + --network-id + Optional network ID to override the chain specification's network ID for P2P connections + + --netrestrict + Restrict network communication to the given IP networks (CIDR masks). + + Comma separated list of CIDR network specifications. Only peers with IP addresses within these ranges will be allowed to connect. + + Example: --netrestrict "192.168.0.0/16,10.0.0.0/8" + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use. + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + + + The header number or hash + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/rlpx.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/rlpx.mdx new file mode 100644 index 00000000000..5b33e8b850e --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/rlpx.mdx @@ -0,0 +1,160 @@ +# op-reth p2p rlpx + +RLPx commands + +```bash +$ op-reth p2p rlpx --help +``` +```txt +Usage: op-reth p2p rlpx [OPTIONS] + +Commands: + ping ping node + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/rlpx/ping.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/rlpx/ping.mdx new file mode 100644 index 00000000000..e91e437e943 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/p2p/rlpx/ping.mdx @@ -0,0 +1,160 @@ +# op-reth p2p rlpx ping + +ping node + +```bash +$ op-reth p2p rlpx ping --help +``` +```txt +Usage: op-reth p2p rlpx ping [OPTIONS] + +Arguments: + + The node to ping + +Options: + -h, --help + Print help (see a summary with '-h') + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/prune.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/prune.mdx new file mode 100644 index 00000000000..603af5d99e4 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/prune.mdx @@ -0,0 +1,325 @@ +# op-reth prune + +Prune according to the configuration without any limits + +```bash +$ op-reth prune --help +``` +```txt +Usage: op-reth prune [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/re-execute.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/re-execute.mdx new file mode 100644 index 00000000000..c185b91027d --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/re-execute.mdx @@ -0,0 +1,339 @@ +# op-reth re-execute + +Re-execute blocks in parallel to verify historical sync correctness + +```bash +$ op-reth re-execute --help +``` +```txt +Usage: op-reth re-execute [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --from + The height to start at + + [default: 1] + + --to + The height to end at. Defaults to the latest block + + --num-tasks + Number of tasks to run in parallel. Defaults to the number of available CPUs + + --skip-invalid-blocks + Continues with execution when an invalid block is encountered and collects these blocks + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage.mdx new file mode 100644 index 00000000000..f1669000555 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage.mdx @@ -0,0 +1,163 @@ +# op-reth stage + +Manipulate individual stages + +```bash +$ op-reth stage --help +``` +```txt +Usage: op-reth stage [OPTIONS] + +Commands: + run Run a single stage + drop Drop a stage's tables from the database + dump Dumps a stage from a range into a new database + unwind Unwinds a certain block range, deleting it from the database + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/drop.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/drop.mdx new file mode 100644 index 00000000000..d5034f0d4b8 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/drop.mdx @@ -0,0 +1,339 @@ +# op-reth stage drop + +Drop a stage's tables from the database + +```bash +$ op-reth stage drop --help +``` +```txt +Usage: op-reth stage drop [OPTIONS] + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + + Possible values: + - headers: The headers stage within the pipeline + - bodies: The bodies stage within the pipeline + - senders: The senders stage within the pipeline + - execution: The execution stage within the pipeline + - account-hashing: The account hashing stage within the pipeline + - storage-hashing: The storage hashing stage within the pipeline + - hashing: The account and storage hashing stages within the pipeline + - merkle: The merkle stage within the pipeline + - tx-lookup: The transaction lookup stage within the pipeline + - account-history: The account history stage within the pipeline + - storage-history: The storage history stage within the pipeline + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump.mdx new file mode 100644 index 00000000000..9150154c31f --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump.mdx @@ -0,0 +1,332 @@ +# op-reth stage dump + +Dumps a stage from a range into a new database + +```bash +$ op-reth stage dump --help +``` +```txt +Usage: op-reth stage dump [OPTIONS] + +Commands: + execution Execution stage + storage-hashing `StorageHashing` stage + account-hashing `AccountHashing` stage + merkle Merkle stage + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/account-hashing.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/account-hashing.mdx new file mode 100644 index 00000000000..05bedbe5b09 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/account-hashing.mdx @@ -0,0 +1,178 @@ +# op-reth stage dump account-hashing + +`AccountHashing` stage + +```bash +$ op-reth stage dump account-hashing --help +``` +```txt +Usage: op-reth stage dump account-hashing [OPTIONS] --output-datadir --from --to + +Options: + --output-datadir + The path to the new datadir folder. + + -f, --from + From which block + + -t, --to + To which block + + -d, --dry-run + If passed, it will dry-run a stage execution from the newly created database right after dumping + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/execution.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/execution.mdx new file mode 100644 index 00000000000..95cf1f8e64d --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/execution.mdx @@ -0,0 +1,178 @@ +# op-reth stage dump execution + +Execution stage + +```bash +$ op-reth stage dump execution --help +``` +```txt +Usage: op-reth stage dump execution [OPTIONS] --output-datadir --from --to + +Options: + --output-datadir + The path to the new datadir folder. + + -f, --from + From which block + + -t, --to + To which block + + -d, --dry-run + If passed, it will dry-run a stage execution from the newly created database right after dumping + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/merkle.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/merkle.mdx new file mode 100644 index 00000000000..523e60bbe92 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/merkle.mdx @@ -0,0 +1,178 @@ +# op-reth stage dump merkle + +Merkle stage + +```bash +$ op-reth stage dump merkle --help +``` +```txt +Usage: op-reth stage dump merkle [OPTIONS] --output-datadir --from --to + +Options: + --output-datadir + The path to the new datadir folder. + + -f, --from + From which block + + -t, --to + To which block + + -d, --dry-run + If passed, it will dry-run a stage execution from the newly created database right after dumping + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/storage-hashing.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/storage-hashing.mdx new file mode 100644 index 00000000000..2e10a26adee --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/dump/storage-hashing.mdx @@ -0,0 +1,178 @@ +# op-reth stage dump storage-hashing + +`StorageHashing` stage + +```bash +$ op-reth stage dump storage-hashing --help +``` +```txt +Usage: op-reth stage dump storage-hashing [OPTIONS] --output-datadir --from --to + +Options: + --output-datadir + The path to the new datadir folder. + + -f, --from + From which block + + -t, --to + To which block + + -d, --dry-run + If passed, it will dry-run a stage execution from the newly created database right after dumping + + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/run.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/run.mdx new file mode 100644 index 00000000000..75b39f76c77 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/run.mdx @@ -0,0 +1,586 @@ +# op-reth stage run + +Run a single stage. + +```bash +$ op-reth stage run --help +``` +```txt +Usage: op-reth stage run [OPTIONS] --from --to + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --metrics + Enable Prometheus metrics. + + The metrics will be served at the given interface and port. + + --from + The height to start at + + -t, --to + The end of the stage + + --batch-size + Batch size for stage execution and unwind + + -s, --skip-unwind + Normally, running the stage requires unwinding for stages that already have been run, in order to not rewrite to the same database slots. + + You can optionally skip the unwinding phase if you're syncing a block range that has not been synced before. + + -c, --commit + Commits the changes in the database. WARNING: potentially destructive. + + Useful when you want to run diagnostics on the database. + + NOTE: This flag is currently required for the headers, bodies, and execution stages because they use static files and must commit to properly unwind and run. + + --checkpoints + Save stage checkpoints + + + The name of the stage to run + + Possible values: + - headers: The headers stage within the pipeline + - bodies: The bodies stage within the pipeline + - senders: The senders stage within the pipeline + - execution: The execution stage within the pipeline + - account-hashing: The account hashing stage within the pipeline + - storage-hashing: The storage hashing stage within the pipeline + - hashing: The account and storage hashing stages within the pipeline + - merkle: The merkle stage within the pipeline + - tx-lookup: The transaction lookup stage within the pipeline + - account-history: The account history stage within the pipeline + - storage-history: The storage history stage within the pipeline + +Networking: + -d, --disable-discovery + Disable the discovery service + + --disable-dns-discovery + Disable the DNS discovery + + --disable-discv4-discovery + Disable Discv4 discovery + + --enable-discv5-discovery + Enable Discv5 discovery + + --disable-nat + Disable Nat discovery + + --discovery.addr + The UDP address to use for devp2p peer discovery version 4 + + [default: 0.0.0.0] + + --discovery.port + The UDP port to use for devp2p peer discovery version 4 + + [default: 30303] + + --discovery.v5.addr + The UDP IPv4 address to use for devp2p peer discovery version 5. Overwritten by `RLPx` address, if it's also IPv4 + + --discovery.v5.addr.ipv6 + The UDP IPv6 address to use for devp2p peer discovery version 5. Overwritten by `RLPx` address, if it's also IPv6 + + --discovery.v5.port + The UDP IPv4 port to use for devp2p peer discovery version 5. Not used unless `--addr` is IPv4, or `--discovery.v5.addr` is set + + [default: 9200] + + --discovery.v5.port.ipv6 + The UDP IPv6 port to use for devp2p peer discovery version 5. Not used unless `--addr` is IPv6, or `--discovery.addr.ipv6` is set + + [default: 9200] + + --discovery.v5.lookup-interval + The interval in seconds at which to carry out periodic lookup queries, for the whole run of the program + + [default: 20] + + --discovery.v5.bootstrap.lookup-interval + The interval in seconds at which to carry out boost lookup queries, for a fixed number of times, at bootstrap + + [default: 5] + + --discovery.v5.bootstrap.lookup-countdown + The number of times to carry out boost lookup queries at bootstrap + + [default: 200] + + --trusted-peers + Comma separated enode URLs of trusted peers for P2P connections. + + --trusted-peers enode://abcd@192.168.0.1:30303 + + --trusted-only + Connect to or accept from trusted peers only + + --bootnodes + Comma separated enode URLs for P2P discovery bootstrap. + + Will fall back to a network-specific default if not specified. + + --dns-retries + Amount of DNS resolution requests retries to perform when peering + + [default: 0] + + --peers-file + The path to the known peers file. Connected peers are dumped to this file on nodes + shutdown, and read on startup. Cannot be used with `--no-persist-peers`. + + --identity + Custom node identity + + [default: reth/-/] + + --p2p-secret-key + Secret key to use for this node. + + This will also deterministically set the peer ID. If not specified, it will be set in the data dir for the chain being used. + + --p2p-secret-key-hex + Hex encoded secret key to use for this node. + + This will also deterministically set the peer ID. Cannot be used together with `--p2p-secret-key`. + + --no-persist-peers + Do not persist peers. + + --nat + NAT resolution method (any|none|upnp|publicip|extip:\) + + [default: any] + + --addr + Network listening address + + [default: 0.0.0.0] + + --port + Network listening port + + [default: 30303] + + --max-outbound-peers + Maximum number of outbound peers. default: 100 + + --max-inbound-peers + Maximum number of inbound peers. default: 30 + + --max-peers + Maximum number of total peers (inbound + outbound). + + Splits peers using approximately 2:1 inbound:outbound ratio. Cannot be used together with `--max-outbound-peers` or `--max-inbound-peers`. + + --max-tx-reqs + Max concurrent `GetPooledTransactions` requests. + + [default: 130] + + --max-tx-reqs-peer + Max concurrent `GetPooledTransactions` requests per peer. + + [default: 1] + + --max-seen-tx-history + Max number of seen transactions to remember per peer. + + Default is 320 transaction hashes. + + [default: 320] + + --max-pending-imports + Max number of transactions to import concurrently. + + [default: 4096] + + --pooled-tx-response-soft-limit + Experimental, for usage in research. Sets the max accumulated byte size of transactions + to pack in one response. + Spec'd at 2MiB. + + [default: 2097152] + + --pooled-tx-pack-soft-limit + Experimental, for usage in research. Sets the max accumulated byte size of transactions to + request in one request. + + Since `RLPx` protocol version 68, the byte size of a transaction is shared as metadata in a + transaction announcement (see `RLPx` specs). This allows a node to request a specific size + response. + + By default, nodes request only 128 KiB worth of transactions, but should a peer request + more, up to 2 MiB, a node will answer with more than 128 KiB. + + Default is 128 KiB. + + [default: 131072] + + --max-tx-pending-fetch + Max capacity of cache of hashes for transactions pending fetch. + + [default: 25600] + + --net-if.experimental + Name of network interface used to communicate with peers. + + If flag is set, but no value is passed, the default interface for docker `eth0` is tried. + + --tx-propagation-policy + Transaction Propagation Policy + + The policy determines which peers transactions are gossiped to. + + [default: All] + + --tx-ingress-policy + Transaction ingress policy + + Determines which peers' transactions are accepted over P2P. + + [default: All] + + --disable-tx-gossip + Disable transaction pool gossip + + Disables gossiping of transactions in the mempool to peers. This can be omitted for personal nodes, though providers should always opt to enable this flag. + + --tx-propagation-mode + Sets the transaction propagation mode by determining how new pending transactions are propagated to other peers in full. + + Examples: sqrt, all, max:10 + + [default: sqrt] + + --required-block-hashes + Comma separated list of required block hashes or block number=hash pairs. Peers that don't have these blocks will be filtered out. Format: hash or `block_number=hash` (e.g., 23115201=0x1234...) + + --network-id + Optional network ID to override the chain specification's network ID for P2P connections + + --netrestrict + Restrict network communication to the given IP networks (CIDR masks). + + Comma separated list of CIDR network specifications. Only peers with IP addresses within these ranges will be allowed to connect. + + Example: --netrestrict "192.168.0.0/16,10.0.0.0/8" + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/unwind.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/unwind.mdx new file mode 100644 index 00000000000..37852456cfd --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/unwind.mdx @@ -0,0 +1,333 @@ +# op-reth stage unwind + +Unwinds a certain block range, deleting it from the database + +```bash +$ op-reth stage unwind --help +``` +```txt +Usage: op-reth stage unwind [OPTIONS] + +Commands: + to-block Unwinds the database from the latest block, until the given block number or hash has been reached, that block is not included + num-blocks Unwinds the database from the latest block, until the given number of blocks have been reached + help Print this message or the help of the given subcommand(s) + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --datadir + The path to the data dir for all reth files and subdirectories. + + Defaults to the OS-specific data directory: + + - Linux: `$XDG_DATA_HOME/reth/` or `$HOME/.local/share/reth/` + - Windows: `{FOLDERID_RoamingAppData}/reth/` + - macOS: `$HOME/Library/Application Support/reth/` + + [default: default] + + --datadir.static-files + The absolute path to store static files in. + + --datadir.rocksdb + The absolute path to store `RocksDB` database in. + + --datadir.pprof-dumps + The absolute path to store pprof dumps in. + + --config + The path to the configuration file to use + + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Database: + --db.log-level + Database logging level. Levels higher than "notice" require a debug build + + Possible values: + - fatal: Enables logging for critical conditions, i.e. assertion failures + - error: Enables logging for error conditions + - warn: Enables logging for warning conditions + - notice: Enables logging for normal but significant condition + - verbose: Enables logging for verbose informational + - debug: Enables logging for debug-level messages + - trace: Enables logging for trace debug-level messages + - extra: Enables logging for extra debug-level messages + + --db.exclusive + Open environment in exclusive/monopolistic mode. Makes it possible to open a database on an NFS volume + + [possible values: true, false] + + --db.max-size + Maximum database size (e.g., 4TB, 8TB). + + This sets the "map size" of the database. If the database grows beyond this limit, the node will stop with an "environment map size limit reached" error. + + The default value is 8TB. + + --db.page-size + Database page size (e.g., 4KB, 8KB, 16KB). + + Specifies the page size used by the MDBX database. + + The page size determines the maximum database size. MDBX supports up to 2^31 pages, so with the default 4KB page size, the maximum database size is 8TB. To allow larger databases, increase this value to 8KB or higher. + + WARNING: This setting is only configurable at database creation; changing it later requires re-syncing. + + --db.growth-step + Database growth step (e.g., 4GB, 4KB) + + --db.read-transaction-timeout + Read transaction timeout in seconds, 0 means no timeout + + --db.max-readers + Maximum number of readers allowed to access the database concurrently + + --db.sync-mode + Controls how aggressively the database synchronizes data to disk + +Static Files: + --static-files.blocks-per-file.headers + Number of blocks per file for the headers segment + + --static-files.blocks-per-file.transactions + Number of blocks per file for the transactions segment + + --static-files.blocks-per-file.receipts + Number of blocks per file for the receipts segment + + --static-files.blocks-per-file.transaction-senders + Number of blocks per file for the transaction senders segment + + --static-files.blocks-per-file.account-change-sets + Number of blocks per file for the account changesets segment + + --static-files.blocks-per-file.storage-change-sets + Number of blocks per file for the storage changesets segment + + --static-files.receipts + Store receipts in static files instead of the database. + + When enabled, receipts will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.transaction-senders + Store transaction senders in static files instead of the database. + + When enabled, transaction senders will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.account-change-sets + Store account changesets in static files. + + When enabled, account changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + + --static-files.storage-change-sets + Store storage changesets in static files. + + When enabled, storage changesets will be written to static files on disk instead of the database. + + Note: This setting can only be configured at genesis initialization. Once the node has been initialized, changing this flag requires re-syncing from scratch. + + [default: false] + [possible values: true, false] + +RocksDB: + --rocksdb.all + Route all supported tables to `RocksDB` instead of MDBX. + + This enables `RocksDB` for `tx-hash`, `storages-history`, and `account-history` tables. Cannot be combined with individual flags set to false. + + --rocksdb.tx-hash + Route tx hash -> number table to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.storages-history + Route storages history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --rocksdb.account-history + Route account history tables to `RocksDB` instead of MDBX. + + This is a genesis-initialization-only flag: changing it after genesis requires a re-sync. Defaults to `true` when the `edge` feature is enabled, `false` otherwise. + + [default: false] + [possible values: true, false] + + --offline + If this is enabled, then all stages except headers, bodies, and sender recovery will be unwound + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/unwind/num-blocks.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/unwind/num-blocks.mdx new file mode 100644 index 00000000000..b8434741573 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/unwind/num-blocks.mdx @@ -0,0 +1,170 @@ +# op-reth stage unwind num-blocks + +Unwinds the database from the latest block, until the given number of blocks have been reached + +```bash +$ op-reth stage unwind num-blocks --help +``` +```txt +Usage: op-reth stage unwind num-blocks [OPTIONS] + +Arguments: + + + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/cli/op-reth/stage/unwind/to-block.mdx b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/unwind/to-block.mdx new file mode 100644 index 00000000000..363c6adcf37 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/cli/op-reth/stage/unwind/to-block.mdx @@ -0,0 +1,170 @@ +# op-reth stage unwind to-block + +Unwinds the database from the latest block, until the given block number or hash has been reached, that block is not included + +```bash +$ op-reth stage unwind to-block --help +``` +```txt +Usage: op-reth stage unwind to-block [OPTIONS] + +Arguments: + + + +Options: + -h, --help + Print help (see a summary with '-h') + +Datadir: + --chain + The chain this node is running. + Possible values are either a built-in chain or the path to a chain specification file. + + Built-in chains: + optimism, optimism_sepolia, optimism-sepolia, base, base_sepolia, base-sepolia, arena-z, arena-z-sepolia, automata, base-devnet-0-sepolia-dev-0, bob, boba-sepolia, boba, camp-sepolia, celo, creator-chain-testnet-sepolia, cyber, cyber-sepolia, ethernity, ethernity-sepolia, fraxtal, funki, funki-sepolia, hashkeychain, ink, ink-sepolia, lisk, lisk-sepolia, lyra, metal, metal-sepolia, mint, mode, mode-sepolia, oplabs-devnet-0-sepolia-dev-0, orderly, ozean-sepolia, pivotal-sepolia, polynomial, race, race-sepolia, radius_testnet-sepolia, redstone, rehearsal-0-bn-0-rehearsal-0-bn, rehearsal-0-bn-1-rehearsal-0-bn, settlus-mainnet, settlus-sepolia-sepolia, shape, shape-sepolia, silent-data-mainnet, snax, soneium, soneium-minato-sepolia, sseed, swan, swell, tbn, tbn-sepolia, unichain, unichain-sepolia, worldchain, worldchain-sepolia, xterio-eth, zora, zora-sepolia, dev + + [default: optimism] + +Logging: + --log.stdout.format + The format to use for logs written to stdout + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.stdout.filter + The filter to use for logs written to stdout + + [default: ] + + --log.file.format + The format to use for logs written to the log file + + Possible values: + - json: Represents JSON formatting for logs. This format outputs log records as JSON objects, making it suitable for structured logging + - log-fmt: Represents logfmt (key=value) formatting for logs. This format is concise and human-readable, typically used in command-line applications + - terminal: Represents terminal-friendly formatting for logs + + [default: terminal] + + --log.file.filter + The filter to use for logs written to the log file + + [default: debug] + + --log.file.directory + The path to put log files in + + [default: /logs] + + --log.file.name + The prefix name of the log files + + [default: reth.log] + + --log.file.max-size + The maximum size (in MB) of one log file + + [default: 200] + + --log.file.max-files + The maximum amount of log files that will be stored. If set to 0, background file logging is disabled + + [default: 5] + + --log.journald + Write logs to journald + + --log.journald.filter + The filter to use for logs written to journald + + [default: error] + + --color + Sets whether or not the formatter emits ANSI terminal escape codes for colors and other text formatting + + Possible values: + - always: Colors on + - auto: Auto-detect + - never: Colors off + + [default: always] + + --logs-otlp[=] + Enable `Opentelemetry` logs export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/logs` - gRPC: `http://localhost:4317` + + Example: --logs-otlp=http://collector:4318/v1/logs + + [env: OTEL_EXPORTER_OTLP_LOGS_ENDPOINT=] + + --logs-otlp.filter + Set a filter directive for the OTLP logs exporter. This controls the verbosity of logs sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --logs-otlp.filter=info,reth=debug + + Defaults to INFO if not specified. + + [default: info] + +Display: + -v, --verbosity... + Set the minimum log level. + + -v Errors + -vv Warnings + -vvv Info + -vvvv Debug + -vvvvv Traces (warning: very verbose!) + + -q, --quiet + Silence all log output + +Tracing: + --tracing-otlp[=] + Enable `Opentelemetry` tracing export to an OTLP endpoint. + + If no value provided, defaults based on protocol: - HTTP: `http://localhost:4318/v1/traces` - gRPC: `http://localhost:4317` + + Example: --tracing-otlp=http://collector:4318/v1/traces + + [env: OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=] + + --tracing-otlp-protocol + OTLP transport protocol to use for exporting traces and logs. + + - `http`: expects endpoint path to end with `/v1/traces` or `/v1/logs` - `grpc`: expects endpoint without a path + + Defaults to HTTP if not specified. + + Possible values: + - http: HTTP/Protobuf transport, port 4318, requires `/v1/traces` path + - grpc: gRPC transport, port 4317 + + [env: OTEL_EXPORTER_OTLP_PROTOCOL=] + [default: http] + + --tracing-otlp.filter + Set a filter directive for the OTLP tracer. This controls the verbosity of spans and events sent to the OTLP endpoint. It follows the same syntax as the `RUST_LOG` environment variable. + + Example: --tracing-otlp.filter=info,reth=debug,hyper_util=off + + Defaults to TRACE if not specified. + + [default: debug] + + --tracing-otlp.sample-ratio + Trace sampling ratio to control the percentage of traces to export. + + Valid range: 0.0 to 1.0 - 1.0, default: Sample all traces - 0.01: Sample 1% of traces - 0.0: Disable sampling + + Example: --tracing-otlp.sample-ratio=0.0. + + [env: OTEL_TRACES_SAMPLER_ARG=] +``` \ No newline at end of file diff --git a/rust/docs/docs/pages/op-reth/index.mdx b/rust/docs/docs/pages/op-reth/index.mdx new file mode 100644 index 00000000000..fd1ae9f1233 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/index.mdx @@ -0,0 +1,19 @@ +--- +description: op-reth is the OP Stack execution client built on reth. +--- + +# op-reth + +`op-reth` is the [OP Stack](https://docs.optimism.io/) execution client built on [reth](https://github.com/paradigmxyz/reth). It provides a high-performance execution layer for Optimism and all OP Stack chains. + +## Features + +- **Full OP Stack support** — Deposit transactions, L2-specific fee handling, and all OP Stack protocol changes. +- **Superchain Registry** — Built-in support for all chains in the [superchain registry](https://github.com/ethereum-optimism/superchain-registry). Use `--chain unichain`, `--chain base`, etc. +- **High performance** — Built on reth's modular architecture with parallelized execution and efficient storage. + +## Getting started + +- [Running op-reth on OP Stack chains](/op-reth/run/opstack) — Install and run op-reth with a rollup node. +- [Sync OP Mainnet](/op-reth/run/faq/sync-op-mainnet) — Import Bedrock state and sync OP Mainnet from scratch. +- [CLI Reference](/op-reth/cli/op-reth) — Full command-line reference for op-reth. diff --git a/rust/docs/docs/pages/op-reth/run/faq/sync-op-mainnet.mdx b/rust/docs/docs/pages/op-reth/run/faq/sync-op-mainnet.mdx new file mode 100644 index 00000000000..ed857da7c41 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/run/faq/sync-op-mainnet.mdx @@ -0,0 +1,107 @@ +--- +description: Syncing Reth with OP Mainnet and Bedrock state. +--- + +# Sync OP Mainnet + +To sync OP mainnet, Bedrock state needs to be imported as a starting point. There are currently two ways: + +- Minimal bootstrap **(recommended)**: only state snapshot at Bedrock block is imported without any OVM historical data. +- Full bootstrap **(not recommended)**: state, blocks and receipts are imported. + +## Minimal bootstrap (recommended) + +**The state snapshot at Bedrock block is required.** It can be exported from [op-geth](https://github.com/testinprod-io/op-erigon/blob/pcw109550/bedrock-db-migration/bedrock-migration.md#export-state) (**.jsonl**) or downloaded directly from [here](https://mega.nz/file/GdZ1xbAT#a9cBv3AqzsTGXYgX7nZc_3fl--tcBmOAIwIA5ND6kwc). + +### 1. Download and decompress + +After you downloaded the state file, ensure the state file is decompressed into **.jsonl** format: + +```sh +$ unzstd /path/to/world_trie_state.jsonl.zstd +``` + +### 2. Import the state + +Import the state snapshot: + +```sh +$ op-reth init-state --without-ovm --chain optimism --datadir op-mainnet world_trie_state.jsonl +``` + +### 3. Sync from Bedrock to tip + +Running the node with `--debug.tip ` syncs the node without help from CL until a fixed tip. The +block hash can be taken from the latest block on [https://optimistic.etherscan.io](https://optimistic.etherscan.io). + +Eg, sync the node to a recent finalized block (e.g. 125200000) to catch up close to the tip, before pairing with op-node. + +```sh +$ op-reth node --chain optimism --datadir op-mainnet --debug.tip 0x098f87b75c8b861c775984f9d5dbe7b70cbbbc30fc15adb03a5044de0144f2d0 # block #125200000 +``` + +## Full bootstrap (not recommended) + +**Not recommended for now**: [storage consistency issue](https://github.com/paradigmxyz/reth/pull/11099) tldr: sudden crash may break the node. + +### Import state + +To sync OP mainnet, the Bedrock datadir needs to be imported to use as starting point. +Blocks lower than the OP mainnet Bedrock fork, are built on the OVM and cannot be executed on the EVM. +For this reason, the chain segment from genesis until Bedrock, must be manually imported to circumvent +execution in reth's sync pipeline. + +Importing OP mainnet Bedrock datadir requires exported data: + +- Blocks [and receipts] below Bedrock +- State snapshot at first Bedrock block + +### Manual Export Steps + +The `op-geth` Bedrock datadir can be downloaded from [https://datadirs.optimism.io](https://datadirs.optimism.io). + +To export the OVM chain from `op-geth`, clone the `testinprod-io/op-geth` repo and checkout +[testinprod-io/op-geth#1](https://github.com/testinprod-io/op-geth/pull/1). Commands to export blocks, receipts and state dump can be +found in `op-geth/migrate.sh`. + +### Manual Import Steps + +#### 1. Import Blocks + +Imports a `.rlp` file of blocks. + +Import of >100 million OVM blocks, from genesis to Bedrock, completes in 45 minutes. + +```bash +$ op-reth import-op --chain optimism +``` + +#### 2. Import Receipts + +This step is optional. To run a full node, skip this step. If however receipts are to be imported, the +corresponding transactions must already be imported (see [step 1](#1-import-blocks)). + +Imports a `.rlp` file of receipts, that has been exported with command specified in +[testinprod-io/op-geth#1](https://github.com/testinprod-io/op-geth/pull/1) (command for exporting receipts uses custom RLP-encoding). + +Import of >100 million OVM receipts, from genesis to Bedrock, completes in 30 minutes. + +```bash +$ op-reth import-receipts-op --chain optimism +``` + +#### 3. Import State + +Imports a `.jsonl` state dump. The block at which the state dump is made, must be the latest block in +reth's database. This should be block 105 235 063, the first Bedrock block (see [step 1](#1-import-blocks)). + +Import of >4 million OP mainnet accounts at Bedrock, completes in 10 minutes. + +```bash +$ op-reth init-state --chain optimism +``` + +### Start with op-node + +Use `op-node` to track the tip. Start `op-node` with `--syncmode=execution-layer` and `--l2.enginekind=reth`. If `op-node`'s RPC +connection to L1 is over localhost, `--l1.trustrpc` can be set to improve performance. diff --git a/rust/docs/docs/pages/op-reth/run/opstack.mdx b/rust/docs/docs/pages/op-reth/run/opstack.mdx new file mode 100644 index 00000000000..d05017bf490 --- /dev/null +++ b/rust/docs/docs/pages/op-reth/run/opstack.mdx @@ -0,0 +1,116 @@ +--- +description: Running op-reth on Optimism and OP Stack chains. +--- + +# Running op-reth on OP Stack chains + +`op-reth` is the OP Stack execution client built on [reth][reth]. It supports OP Stack chains out of the box. Optimism has a small diff from the [L1 EELS][l1-el-spec], +comprising of the following key changes: + +1. A new transaction type, [`0x7E (Deposit)`][deposit-spec], which is used to deposit funds from L1 to L2. +1. Modifications to the `PayloadAttributes` that allow the [sequencer][sequencer] to submit transactions to the EL through the Engine API. Payloads will be built with deposit transactions at the top of the block, + with the first deposit transaction always being the "L1 Info Transaction." +1. [EIP-1559](https://eips.ethereum.org/EIPS/eip-1559) denominator and elasticity parameters have been adjusted to account for the lower block time (2s) on L2. Otherwise, the 1559 formula remains the same. +1. Network fees are distributed to the various [fee vaults][l2-el-spec]. +1. ... and some other minor changes. + +For a more in-depth list of changes and their rationale, as well as specifics about the OP Stack specification such as transaction ordering and more, see the documented [`op-geth` diff][op-geth-forkdiff], +the [L2 EL specification][l2-el-spec], and the [OP Stack specification][op-stack-spec]. + +### Superchain Registry + +Since 1.4.0 op-reth has built in support for all chains in the [superchain registry][superchain-registry]. All superchains are supported by the `--chain` argument, e.g. `--chain unichain` or `--chain unichain-sepolia`. + +## Running on Optimism + +You will need three things to run `op-reth`: + +1. An archival L1 node, synced to the settlement layer of the OP Stack chain you want to sync (e.g. `reth`, `geth`, `besu`, `nethermind`, etc.) +1. A rollup node (e.g. `op-node`, `magi`, `hildr`, etc.) +1. An instance of `op-reth`. + +For this example, we'll start a `Base Mainnet` node. + +### Installing `op-reth` + +To install `op-reth` via the `Makefile` in the workspace root: + +```sh +git clone https://github.com/ethereum-optimism/optimism.git && \ + cd optimism/op-reth && \ + make install +``` + +This will install the `op-reth` binary to `~/.cargo/bin/op-reth`. + +### Installing a Rollup Node + +Next, you'll need to install a [Rollup Node][rollup-node-spec], which is the equivalent to the Consensus Client on the OP Stack. Available options include: + +1. [`op-node`][op-node] +1. [`magi`][magi] +1. [`hildr`][hildr] + +For the sake of this tutorial, we'll use the reference implementation of the Rollup Node maintained by OP Labs, the `op-node`. The `op-node` can be built from source, or pulled from a [Docker image available on Google Cloud][op-node-docker]. + +### Running `op-reth` + +op-reth supports additional OP Stack specific CLI arguments: + +1. `--rollup.sequencer ` - The sequencer endpoint to connect to. Transactions sent to the `op-reth` EL are also forwarded to this sequencer endpoint for inclusion, as the sequencer is the entity that builds blocks on OP Stack chains. Aliases: `--rollup.sequencer-http`, `--rollup.sequencer-ws`. +1. `--rollup.disable-tx-pool-gossip` - Disables gossiping of transactions in the mempool to peers. This can be omitted for personal nodes, though providers should always opt to enable this flag. +1. `--rollup.discovery.v4` - Enables the discovery v4 protocol for peer discovery. By default, op-reth, similar to op-geth, has discovery v5 enabled and discovery v4 disabled, whereas regular reth has discovery v4 enabled and discovery v5 disabled. +1. `--rollup.compute-pending-block` - Enables computing of the pending block from the tx-pool instead of using the latest block. By default the pending block equals the latest block to save resources and not leak txs from the tx-pool. +1. `--rollup.enable-tx-conditional` - Enable transaction conditional support on sequencer. +1. `--rollup.supervisor-http ` - HTTP endpoint for the interop supervisor. +1. `--rollup.supervisor-safety-level ` - Safety level for the supervisor (default: `CrossUnsafe`). +1. `--rollup.sequencer-headers ` - Optional headers to use when connecting to the sequencer. Requires `--rollup.sequencer`. +1. `--rollup.historicalrpc ` - RPC endpoint for historical data. Alias: `--rollup.historical-rpc`. +1. `--min-suggested-priority-fee ` - Minimum suggested priority fee (tip) in wei (default: `1000000`). +1. `--flashblocks-url ` - A URL pointing to a secure websocket subscription that streams out flashblocks. If given, the flashblocks are received to build pending block. +1. `--flashblock-consensus` - Enable flashblock consensus client to drive the chain forward. Requires `--flashblocks-url`. + +First, ensure that your L1 archival node is running and synced to tip. Also make sure that the beacon node / consensus layer client is running and has http APIs enabled. Then, start `op-reth` with the `--rollup.sequencer` flag set to the `Base Mainnet` sequencer endpoint: + +```sh +op-reth node \ + --chain base \ + --rollup.sequencer https://mainnet-sequencer.base.org \ + --http \ + --ws \ + --authrpc.port 9551 \ + --authrpc.jwtsecret /path/to/jwt.hex +``` + +Then, once `op-reth` has been started, start up the `op-node`: + +```sh +op-node \ + --network="base-mainnet" \ + --l1= \ + --l2=http://localhost:9551 \ + --l2.jwt-secret=/path/to/jwt.hex \ + --rpc.addr=0.0.0.0 \ + --rpc.port=7000 \ + --l1.beacon= \ + --syncmode=execution-layer \ + --l2.enginekind=reth +``` + +Consider adding the `--l1.trustrpc` flag to improve performance, if the connection to l1 is over localhost. + +[l1-el-spec]: https://github.com/ethereum/execution-specs +[rollup-node-spec]: https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/rollup-node.md +[op-geth-forkdiff]: https://op-geth.optimism.io +[sequencer]: https://github.com/ethereum-optimism/specs/blob/main/specs/background.md#sequencers +[op-stack-spec]: https://github.com/ethereum-optimism/specs/blob/main/specs +[l2-el-spec]: https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/exec-engine.md +[deposit-spec]: https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/deposits.md +[derivation-spec]: https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/derivation.md +[superchain-registry]: https://github.com/ethereum-optimism/superchain-registry +[op-node-docker]: https://console.cloud.google.com/artifacts/docker/oplabs-tools-artifacts/us/images/op-node +[reth]: https://github.com/paradigmxyz/reth +[optimism]: https://github.com/ethereum-optimism/optimism +[op-node]: https://github.com/ethereum-optimism/optimism/tree/develop/op-node +[magi]: https://github.com/a16z/magi +[hildr]: https://github.com/optimism-java/hildr diff --git a/kona/assets/banner.png b/rust/docs/docs/public/banner.png similarity index 100% rename from kona/assets/banner.png rename to rust/docs/docs/public/banner.png diff --git a/kona/assets/favicon.ico b/rust/docs/docs/public/favicon.ico similarity index 100% rename from kona/assets/favicon.ico rename to rust/docs/docs/public/favicon.ico diff --git a/kona/docs/docs/public/logo.png b/rust/docs/docs/public/logo.png similarity index 100% rename from kona/docs/docs/public/logo.png rename to rust/docs/docs/public/logo.png diff --git a/kona/docs/docs/public/op-program-fpp.svg b/rust/docs/docs/public/op-program-fpp.svg similarity index 100% rename from kona/docs/docs/public/op-program-fpp.svg rename to rust/docs/docs/public/op-program-fpp.svg diff --git a/kona/docs/docs/styles.css b/rust/docs/docs/styles.css similarity index 100% rename from kona/docs/docs/styles.css rename to rust/docs/docs/styles.css diff --git a/rust/docs/justfile b/rust/docs/justfile new file mode 100644 index 00000000000..0150a6f1204 --- /dev/null +++ b/rust/docs/justfile @@ -0,0 +1,15 @@ +# Install documentation dependencies +docs-install: + bun install + +# Start the documentation development server +docs-dev: docs-install + bun run dev + +# Build the documentation for production +docs-build: docs-install + bun run build + +# Preview the built documentation +docs-preview: docs-build + bun run preview diff --git a/rust/docs/package-lock.json b/rust/docs/package-lock.json new file mode 100644 index 00000000000..b1886b90ce8 --- /dev/null +++ b/rust/docs/package-lock.json @@ -0,0 +1,10036 @@ +{ + "name": "op-stack-rust-docs", + "version": "0.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "op-stack-rust-docs", + "version": "0.0.0", + "dependencies": { + "react": "19.2.1", + "react-dom": "19.2.1", + "vocs": "1.2.1" + }, + "devDependencies": { + "@types/node": "latest", + "@types/react": "latest", + "tailwindcss": "^4.1.11", + "typescript": "latest" + } + }, + "node_modules/@antfu/install-pkg": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@antfu/install-pkg/-/install-pkg-1.1.0.tgz", + "integrity": "sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==", + "license": "MIT", + "dependencies": { + "package-manager-detector": "^1.3.0", + "tinyexec": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.28.6.tgz", + "integrity": "sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==", + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.6.tgz", + "integrity": "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@braintree/sanitize-url": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-7.1.2.tgz", + "integrity": "sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==", + "license": "MIT" + }, + "node_modules/@chevrotain/cst-dts-gen": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/cst-dts-gen/-/cst-dts-gen-11.0.3.tgz", + "integrity": "sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==", + "license": "Apache-2.0", + "dependencies": { + "@chevrotain/gast": "11.0.3", + "@chevrotain/types": "11.0.3", + "lodash-es": "4.17.21" + } + }, + "node_modules/@chevrotain/cst-dts-gen/node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "license": "MIT" + }, + "node_modules/@chevrotain/gast": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/gast/-/gast-11.0.3.tgz", + "integrity": "sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==", + "license": "Apache-2.0", + "dependencies": { + "@chevrotain/types": "11.0.3", + "lodash-es": "4.17.21" + } + }, + "node_modules/@chevrotain/gast/node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "license": "MIT" + }, + "node_modules/@chevrotain/regexp-to-ast": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/regexp-to-ast/-/regexp-to-ast-11.0.3.tgz", + "integrity": "sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==", + "license": "Apache-2.0" + }, + "node_modules/@chevrotain/types": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/types/-/types-11.0.3.tgz", + "integrity": "sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==", + "license": "Apache-2.0" + }, + "node_modules/@chevrotain/utils": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/@chevrotain/utils/-/utils-11.0.3.tgz", + "integrity": "sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==", + "license": "Apache-2.0" + }, + "node_modules/@clack/core": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@clack/core/-/core-0.3.5.tgz", + "integrity": "sha512-5cfhQNH+1VQ2xLQlmzXMqUoiaH0lRBq9/CLW9lTyMbuKLC3+xEK01tHVvyut++mLOn5urSHmkm6I0Lg9MaJSTQ==", + "license": "MIT", + "dependencies": { + "picocolors": "^1.0.0", + "sisteransi": "^1.0.5" + } + }, + "node_modules/@clack/prompts": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@clack/prompts/-/prompts-0.7.0.tgz", + "integrity": "sha512-0MhX9/B4iL6Re04jPrttDm+BsP8y6mS7byuv0BvXgdXhbV5PdlsHt55dvNsuBCPZ7xq1oTAOOuotR9NFbQyMSA==", + "bundleDependencies": [ + "is-unicode-supported" + ], + "license": "MIT", + "dependencies": { + "@clack/core": "^0.3.3", + "is-unicode-supported": "*", + "picocolors": "^1.0.0", + "sisteransi": "^1.0.5" + } + }, + "node_modules/@clack/prompts/node_modules/is-unicode-supported": { + "version": "1.3.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@emotion/hash": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/@emotion/hash/-/hash-0.9.2.tgz", + "integrity": "sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g==", + "license": "MIT" + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.2.tgz", + "integrity": "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.2.tgz", + "integrity": "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.2.tgz", + "integrity": "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.2.tgz", + "integrity": "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.2.tgz", + "integrity": "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.2.tgz", + "integrity": "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.2.tgz", + "integrity": "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.2.tgz", + "integrity": "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.2.tgz", + "integrity": "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.2.tgz", + "integrity": "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.2.tgz", + "integrity": "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.2.tgz", + "integrity": "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.2.tgz", + "integrity": "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==", + "cpu": [ + "mips64el" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.2.tgz", + "integrity": "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.2.tgz", + "integrity": "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.2.tgz", + "integrity": "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==", + "cpu": [ + "s390x" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.2.tgz", + "integrity": "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.2.tgz", + "integrity": "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.2.tgz", + "integrity": "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.2.tgz", + "integrity": "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.2.tgz", + "integrity": "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.2.tgz", + "integrity": "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.2.tgz", + "integrity": "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.2.tgz", + "integrity": "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.2.tgz", + "integrity": "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.2.tgz", + "integrity": "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@floating-ui/core": { + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.4.tgz", + "integrity": "sha512-C3HlIdsBxszvm5McXlB8PeOEWfBhcGBTZGkGlWc2U0KFY5IwG5OQEuQ8rq52DZmcHDlPLd+YFBK+cZcytwIFWg==", + "license": "MIT", + "dependencies": { + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/dom": { + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.5.tgz", + "integrity": "sha512-N0bD2kIPInNHUHehXhMke1rBGs1dwqvC9O9KYMyyjK7iXt7GAhnro7UlcuYcGdS/yYOlq0MAVgrow8IbWJwyqg==", + "license": "MIT", + "dependencies": { + "@floating-ui/core": "^1.7.4", + "@floating-ui/utils": "^0.2.10" + } + }, + "node_modules/@floating-ui/react": { + "version": "0.27.17", + "resolved": "https://registry.npmjs.org/@floating-ui/react/-/react-0.27.17.tgz", + "integrity": "sha512-LGVZKHwmWGg6MRHjLLgsfyaX2y2aCNgnD1zT/E6B+/h+vxg+nIJUqHPAlTzsHDyqdgEpJ1Np5kxWuFEErXzoGg==", + "license": "MIT", + "dependencies": { + "@floating-ui/react-dom": "^2.1.7", + "@floating-ui/utils": "^0.2.10", + "tabbable": "^6.0.0" + }, + "peerDependencies": { + "react": ">=17.0.0", + "react-dom": ">=17.0.0" + } + }, + "node_modules/@floating-ui/react-dom": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@floating-ui/react-dom/-/react-dom-2.1.7.tgz", + "integrity": "sha512-0tLRojf/1Go2JgEVm+3Frg9A3IW8bJgKgdO0BN5RkF//ufuz2joZM63Npau2ff3J6lUVYgDSNzNkR+aH3IVfjg==", + "license": "MIT", + "dependencies": { + "@floating-ui/dom": "^1.7.5" + }, + "peerDependencies": { + "react": ">=16.8.0", + "react-dom": ">=16.8.0" + } + }, + "node_modules/@floating-ui/utils": { + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.10.tgz", + "integrity": "sha512-aGTxbpbg8/b5JfU1HXSrbH3wXZuLPJcNEcZQFMxLs3oSzgtVu6nFPkbbGGUvBcUjKV2YyB9Wxxabo+HEH9tcRQ==", + "license": "MIT" + }, + "node_modules/@fortawesome/fontawesome-free": { + "version": "6.7.2", + "resolved": "https://registry.npmjs.org/@fortawesome/fontawesome-free/-/fontawesome-free-6.7.2.tgz", + "integrity": "sha512-JUOtgFW6k9u4Y+xeIaEiLr3+cjoUPiAuLXoyKOJSia6Duzb7pq+A76P9ZdPDoAoxHdHzq6gE9/jKBGXlZT8FbA==", + "license": "(CC-BY-4.0 AND OFL-1.1 AND MIT)", + "engines": { + "node": ">=6" + } + }, + "node_modules/@hono/node-server": { + "version": "1.19.9", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.9.tgz", + "integrity": "sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==", + "license": "MIT", + "engines": { + "node": ">=18.14.1" + }, + "peerDependencies": { + "hono": "^4" + } + }, + "node_modules/@iconify/types": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@iconify/types/-/types-2.0.0.tgz", + "integrity": "sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==", + "license": "MIT" + }, + "node_modules/@iconify/utils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@iconify/utils/-/utils-3.1.0.tgz", + "integrity": "sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw==", + "license": "MIT", + "dependencies": { + "@antfu/install-pkg": "^1.1.0", + "@iconify/types": "^2.0.0", + "mlly": "^1.8.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@mdx-js/mdx": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@mdx-js/mdx/-/mdx-3.1.1.tgz", + "integrity": "sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdx": "^2.0.0", + "acorn": "^8.0.0", + "collapse-white-space": "^2.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "estree-util-scope": "^1.0.0", + "estree-walker": "^3.0.0", + "hast-util-to-jsx-runtime": "^2.0.0", + "markdown-extensions": "^2.0.0", + "recma-build-jsx": "^1.0.0", + "recma-jsx": "^1.0.0", + "recma-stringify": "^1.0.0", + "rehype-recma": "^1.0.0", + "remark-mdx": "^3.0.0", + "remark-parse": "^11.0.0", + "remark-rehype": "^11.0.0", + "source-map": "^0.7.0", + "unified": "^11.0.0", + "unist-util-position-from-estree": "^2.0.0", + "unist-util-stringify-position": "^4.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/@mdx-js/react": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@mdx-js/react/-/react-3.1.1.tgz", + "integrity": "sha512-f++rKLQgUVYDAtECQ6fn/is15GkEH9+nZPM3MS0RcxVqoTfawHvDlSCH7JbMhAM6uJ32v3eXLvLmLvjGu7PTQw==", + "license": "MIT", + "dependencies": { + "@types/mdx": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "@types/react": ">=16", + "react": ">=16" + } + }, + "node_modules/@mdx-js/rollup": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@mdx-js/rollup/-/rollup-3.1.1.tgz", + "integrity": "sha512-v8satFmBB+DqDzYohnm1u2JOvxx6Hl3pUvqzJvfs2Zk/ngZ1aRUhsWpXvwPkNeGN9c2NCm/38H29ZqXQUjf8dw==", + "license": "MIT", + "dependencies": { + "@mdx-js/mdx": "^3.0.0", + "@rollup/pluginutils": "^5.0.0", + "source-map": "^0.7.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "rollup": ">=2" + } + }, + "node_modules/@mermaid-js/parser": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/@mermaid-js/parser/-/parser-0.6.3.tgz", + "integrity": "sha512-lnjOhe7zyHjc+If7yT4zoedx2vo4sHaTmtkl1+or8BRTnCtDmcTpAjpzDSfCZrshM5bCoz0GyidzadJAH1xobA==", + "license": "MIT", + "dependencies": { + "langium": "3.3.1" + } + }, + "node_modules/@noble/hashes": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@noble/hashes/-/hashes-1.8.0.tgz", + "integrity": "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==", + "license": "MIT", + "engines": { + "node": "^14.21.3 || >=16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@radix-ui/colors": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@radix-ui/colors/-/colors-3.0.0.tgz", + "integrity": "sha512-FUOsGBkHrYJwCSEtWRCIfQbZG7q1e6DgxCIOe1SUQzDe/7rXXeA47s8yCn6fuTNQAj1Zq4oTFi9Yjp3wzElcxg==", + "license": "MIT" + }, + "node_modules/@radix-ui/number": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/number/-/number-1.1.1.tgz", + "integrity": "sha512-MkKCwxlXTgz6CFoJx3pCwn07GKp36+aZyu/u2Ln2VrA5DcdyCZkASEDBTd8x5whTQQL5CiYf4prXKLcgQdv29g==", + "license": "MIT" + }, + "node_modules/@radix-ui/primitive": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.3.tgz", + "integrity": "sha512-JTF99U/6XIjCBo0wqkU5sK10glYe27MRRsfwoiq5zzOEZLHU3A3KCMa5X/azekYRCJ0HlwI0crAXS/5dEHTzDg==", + "license": "MIT" + }, + "node_modules/@radix-ui/react-accessible-icon": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-accessible-icon/-/react-accessible-icon-1.1.7.tgz", + "integrity": "sha512-XM+E4WXl0OqUJFovy6GjmxxFyx9opfCAIUku4dlKRd5YEPqt4kALOkQOp0Of6reHuUkJuiPBEc5k0o4z4lTC8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-accordion": { + "version": "1.2.12", + "resolved": "https://registry.npmjs.org/@radix-ui/react-accordion/-/react-accordion-1.2.12.tgz", + "integrity": "sha512-T4nygeh9YE9dLRPhAHSeOZi7HBXo+0kYIPJXayZfvWOWA0+n3dESrZbjfDPUABkUNym6Hd+f2IR113To8D2GPA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collapsible": "1.1.12", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-alert-dialog": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-alert-dialog/-/react-alert-dialog-1.1.15.tgz", + "integrity": "sha512-oTVLkEw5GpdRe29BqJ0LSDFWI3qu0vR1M0mUkOQWDIUnY/QIkLpgDMWuKxP94c2NAC2LGcgVhG1ImF3jkZ5wXw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dialog": "1.1.15", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-arrow": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.7.tgz", + "integrity": "sha512-F+M1tLhO+mlQaOWspE8Wstg+z6PwxwRd8oQ8IXceWz92kfAmalTRf0EjrouQeo7QssEPfCn05B4Ihs1K9WQ/7w==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-aspect-ratio": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-aspect-ratio/-/react-aspect-ratio-1.1.7.tgz", + "integrity": "sha512-Yq6lvO9HQyPwev1onK1daHCHqXVLzPhSVjmsNjCa2Zcxy2f7uJD2itDtxknv6FzAKCwD1qQkeVDmX/cev13n/g==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-avatar": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/@radix-ui/react-avatar/-/react-avatar-1.1.10.tgz", + "integrity": "sha512-V8piFfWapM5OmNCXTzVQY+E1rDa53zY+MQ4Y7356v4fFz6vqCyUtIz2rUD44ZEdwg78/jKmMJHj07+C/Z/rcog==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-is-hydrated": "0.1.0", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-checkbox": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-checkbox/-/react-checkbox-1.3.3.tgz", + "integrity": "sha512-wBbpv+NQftHDdG86Qc0pIyXk5IR3tM8Vd0nWLKDcX8nNn4nXFOFwsKuqw2okA/1D/mpaAkmuyndrPJTYDNZtFw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-use-size": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collapsible": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collapsible/-/react-collapsible-1.1.12.tgz", + "integrity": "sha512-Uu+mSh4agx2ib1uIGPP4/CKNULyajb3p92LsVXmH2EHVMTfZWpll88XJ0j4W0z3f8NK1eYl1+Mf/szHPmcHzyA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-collection": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.7.tgz", + "integrity": "sha512-Fh9rGN0MoI4ZFUNyfFVNU4y9LUz93u9/0K+yLgA2bwRojxM8JU1DyvvMBabnZPBgMWREAJvU2jjVzq+LrFUglw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-compose-refs": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.2.tgz", + "integrity": "sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.2.tgz", + "integrity": "sha512-jCi/QKUM2r1Ju5a3J64TH2A5SpKAgh0LpknyqdQ4m6DCV0xJ2HG1xARRwNGPQfi1SLdLWZ1OJz6F4OMBBNiGJA==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-context-menu": { + "version": "2.2.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-context-menu/-/react-context-menu-2.2.16.tgz", + "integrity": "sha512-O8morBEW+HsVG28gYDZPTrT9UUovQUlJue5YO836tiTJhuIWBm/zQHc7j388sHWtdH/xUZurK9olD2+pcqx5ww==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-menu": "2.1.16", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dialog": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dialog/-/react-dialog-1.1.15.tgz", + "integrity": "sha512-TCglVRtzlffRNxRMEyR36DGBLJpeusFcgMVD9PZEzAKnUs1lKCgX5u9BmC2Yg+LL9MgZDugFFs1Vl+Jp4t/PGw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-direction": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-direction/-/react-direction-1.1.1.tgz", + "integrity": "sha512-1UEWRX6jnOA2y4H5WczZ44gOOjTEmlqv1uNW4GAJEO5+bauCBhv8snY65Iw5/VOS/ghKN9gr2KjnLKxrsvoMVw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dismissable-layer": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dismissable-layer/-/react-dismissable-layer-1.1.11.tgz", + "integrity": "sha512-Nqcp+t5cTB8BinFkZgXiMJniQH0PsUt2k51FUhbdfeKvc4ACcG2uQniY/8+h1Yv6Kza4Q7lD7PQV0z0oicE0Mg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-escape-keydown": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-dropdown-menu": { + "version": "2.1.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-dropdown-menu/-/react-dropdown-menu-2.1.16.tgz", + "integrity": "sha512-1PLGQEynI/3OX/ftV54COn+3Sud/Mn8vALg2rWnBLnRaGtJDduNW/22XjlGgPdpcIbiQxjKtb7BkcjP00nqfJw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-menu": "2.1.16", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-guards": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-guards/-/react-focus-guards-1.1.3.tgz", + "integrity": "sha512-0rFg/Rj2Q62NCm62jZw0QX7a3sz6QCQU0LpZdNrJX8byRGaGVTqbrW9jAoIAHyMQqsNpeZ81YgSizOt5WXq0Pw==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-focus-scope": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-focus-scope/-/react-focus-scope-1.1.7.tgz", + "integrity": "sha512-t2ODlkXBQyn7jkl6TNaw/MtVEVvIGelJDCG41Okq/KwUsJBwQ4XVZsHAVUkK4mBv3ewiAS3PGuUWuY2BoK4ZUw==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-form": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-form/-/react-form-0.1.8.tgz", + "integrity": "sha512-QM70k4Zwjttifr5a4sZFts9fn8FzHYvQ5PiB19O2HsYibaHSVt9fH9rzB0XZo/YcM+b7t/p7lYCT/F5eOeF5yQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-label": "2.1.7", + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-form/node_modules/@radix-ui/react-label": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.7.tgz", + "integrity": "sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-hover-card": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-hover-card/-/react-hover-card-1.1.15.tgz", + "integrity": "sha512-qgTkjNT1CfKMoP0rcasmlH2r1DAiYicWsDsufxl940sT2wHNEWWv6FMWIQXWhVdmC1d/HYfbhQx60KYyAtKxjg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-icons": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-icons/-/react-icons-1.3.2.tgz", + "integrity": "sha512-fyQIhGDhzfc9pK2kH6Pl9c4BDJGfMkPqkyIgYDthyNYoNg3wVhoJMMh19WS4Up/1KMPFVpNsT2q3WmXn2N1m6g==", + "license": "MIT", + "peerDependencies": { + "react": "^16.x || ^17.x || ^18.x || ^19.0.0 || ^19.0.0-rc" + } + }, + "node_modules/@radix-ui/react-id": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-id/-/react-id-1.1.1.tgz", + "integrity": "sha512-kGkGegYIdQsOb4XjsfM97rXsiHaBwco+hFI66oO4s9LU+PLAC5oJ7khdOVFxkhsmlbpUqDAvXw11CluXP+jkHg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-label": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.8.tgz", + "integrity": "sha512-FmXs37I6hSBVDlO4y764TNz1rLgKwjJMQ0EGte6F3Cb3f4bIuHB/iLa/8I9VKkmOy+gNHq8rql3j686ACVV21A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-label/node_modules/@radix-ui/react-primitive": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.4.tgz", + "integrity": "sha512-9hQc4+GNVtJAIEPEqlYqW5RiYdrr8ea5XQ0ZOnD6fgru+83kqT15mq2OCcbe8KnjRZl5vF3ks69AKz3kh1jrhg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.4" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-label/node_modules/@radix-ui/react-slot": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.4.tgz", + "integrity": "sha512-Jl+bCv8HxKnlTLVrcDE8zTMJ09R9/ukw4qBs/oZClOfoQk/cOTbDn+NceXfV7j09YPVQUryJPHurafcSg6EVKA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-menu": { + "version": "2.1.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menu/-/react-menu-2.1.16.tgz", + "integrity": "sha512-72F2T+PLlphrqLcAotYPp0uJMr5SjP5SL01wfEspJbru5Zs5vQaSHb4VB3ZMJPimgHHCHG7gMOeOB9H3Hdmtxg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-menubar": { + "version": "1.1.16", + "resolved": "https://registry.npmjs.org/@radix-ui/react-menubar/-/react-menubar-1.1.16.tgz", + "integrity": "sha512-EB1FktTz5xRRi2Er974AUQZWg2yVBb1yjip38/lgwtCVRd3a+maUoGHN/xs9Yv8SY8QwbSEb+YrxGadVWbEutA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-menu": "2.1.16", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-navigation-menu": { + "version": "1.2.14", + "resolved": "https://registry.npmjs.org/@radix-ui/react-navigation-menu/-/react-navigation-menu-1.2.14.tgz", + "integrity": "sha512-YB9mTFQvCOAQMHU+C/jVl96WmuWeltyUEpRJJky51huhds5W2FQr1J8D/16sQlf0ozxkPK8uF3niQMdUwZPv5w==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-one-time-password-field": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-one-time-password-field/-/react-one-time-password-field-0.1.8.tgz", + "integrity": "sha512-ycS4rbwURavDPVjCb5iS3aG4lURFDILi6sKI/WITUMZ13gMmn/xGjpLoqBAalhJaDk8I3UbCM5GzKHrnzwHbvg==", + "license": "MIT", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-is-hydrated": "0.1.0", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-password-toggle-field": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-password-toggle-field/-/react-password-toggle-field-0.1.3.tgz", + "integrity": "sha512-/UuCrDBWravcaMix4TdT+qlNdVwOM1Nck9kWx/vafXsdfj1ChfhOdfi3cy9SGBpWgTXwYCuboT/oYpJy3clqfw==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-is-hydrated": "0.1.0" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popover": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popover/-/react-popover-1.1.15.tgz", + "integrity": "sha512-kr0X2+6Yy/vJzLYJUPCZEc8SfQcf+1COFoAqauJm74umQhta9M7lNJHP7QQS3vkvcGLQUbWpMzwrXYwrYztHKA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-popper": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-popper/-/react-popper-1.2.8.tgz", + "integrity": "sha512-0NJQ4LFFUuWkE7Oxf0htBKS6zLkkjBH+hM1uk7Ng705ReR8m/uelduy1DBo0PyBXPKVnBA6YBlU94MBGXrSBCw==", + "license": "MIT", + "dependencies": { + "@floating-ui/react-dom": "^2.0.0", + "@radix-ui/react-arrow": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-rect": "1.1.1", + "@radix-ui/react-use-size": "1.1.1", + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-portal": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/@radix-ui/react-portal/-/react-portal-1.1.9.tgz", + "integrity": "sha512-bpIxvq03if6UNwXZ+HTK71JLh4APvnXntDc6XOX8UVq4XQOVl7lwok0AvIl+b8zgCw3fSaVTZMpAPPagXbKmHQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-presence": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@radix-ui/react-presence/-/react-presence-1.1.5.tgz", + "integrity": "sha512-/jfEwNDdQVBCNvjkGit4h6pMOzq8bHkopq458dPt2lMjx+eBQUohZNG9A7DtO/O5ukSbxuaNGXMjHicgwy6rQQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-primitive": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.1.3.tgz", + "integrity": "sha512-m9gTwRkhy2lvCPe6QJp4d3G1TYEUHn/FzJUtq9MjH46an1wJU+GdoGC5VLof8RX8Ft/DlpshApkhswDLZzHIcQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-slot": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-progress": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-progress/-/react-progress-1.1.7.tgz", + "integrity": "sha512-vPdg/tF6YC/ynuBIJlk1mm7Le0VgW6ub6J2UWnTQ7/D23KXcPI1qy+0vBkgKgd38RCMJavBXpB83HPNFMTb0Fg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-radio-group": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-radio-group/-/react-radio-group-1.3.8.tgz", + "integrity": "sha512-VBKYIYImA5zsxACdisNQ3BjCBfmbGH3kQlnFVqlWU4tXwjy7cGX8ta80BcrO+WJXIn5iBylEH3K6ZTlee//lgQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-use-size": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-roving-focus": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-roving-focus/-/react-roving-focus-1.1.11.tgz", + "integrity": "sha512-7A6S9jSgm/S+7MdtNDSb+IU859vQqJ/QAtcYQcfFC6W8RS4IxIZDldLR0xqCFZ6DCyrQLjLPsxtTNch5jVA4lA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-scroll-area": { + "version": "1.2.10", + "resolved": "https://registry.npmjs.org/@radix-ui/react-scroll-area/-/react-scroll-area-1.2.10.tgz", + "integrity": "sha512-tAXIa1g3sM5CGpVT0uIbUx/U3Gs5N8T52IICuCtObaos1S8fzsrPXG5WObkQN3S6NVl6wKgPhAIiBGbWnvc97A==", + "license": "MIT", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-select": { + "version": "2.2.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-select/-/react-select-2.2.6.tgz", + "integrity": "sha512-I30RydO+bnn2PQztvo25tswPH+wFBjehVGtmagkU78yMdwTwVf12wnAOF+AeP8S2N8xD+5UPbGhkUfPyvT+mwQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-visually-hidden": "1.2.3", + "aria-hidden": "^1.2.4", + "react-remove-scroll": "^2.6.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-separator": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-separator/-/react-separator-1.1.7.tgz", + "integrity": "sha512-0HEb8R9E8A+jZjvmFCy/J4xhbXy3TV+9XSnGJ3KvTtjlIUy/YQ/p6UYZvi7YbeoeXdyU9+Y3scizK6hkY37baA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-slider": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slider/-/react-slider-1.3.6.tgz", + "integrity": "sha512-JPYb1GuM1bxfjMRlNLE+BcmBC8onfCi60Blk7OBqi2MLTFdS+8401U4uFjnwkOr49BLmXxLC6JHkvAsx5OJvHw==", + "license": "MIT", + "dependencies": { + "@radix-ui/number": "1.1.1", + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-use-size": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-slot": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz", + "integrity": "sha512-aeNmHnBxbi2St0au6VBVC7JXFlhLlOnvIIlePNniyUNAClzmtAUEY8/pBiK3iHjufOlwA+c20/8jngo7xcrg8A==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-compose-refs": "1.1.2" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-switch": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.2.6.tgz", + "integrity": "sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-previous": "1.1.1", + "@radix-ui/react-use-size": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tabs": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-1.1.13.tgz", + "integrity": "sha512-7xdcatg7/U+7+Udyoj2zodtI9H/IIopqo+YOIcZOq1nJwXWBZ9p8xiu5llXlekDbZkca79a/fozEYQXIA4sW6A==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-toast": { + "version": "1.2.15", + "resolved": "https://registry.npmjs.org/@radix-ui/react-toast/-/react-toast-1.2.15.tgz", + "integrity": "sha512-3OSz3TacUWy4WtOXV38DggwxoqJK4+eDkNMl5Z/MJZaoUPaP4/9lf81xXMe1I2ReTAptverZUpbPY4wWwWyL5g==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-toggle": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/@radix-ui/react-toggle/-/react-toggle-1.1.10.tgz", + "integrity": "sha512-lS1odchhFTeZv3xwHH31YPObmJn8gOg7Lq12inrr0+BH/l3Tsq32VfjqH1oh80ARM3mlkfMic15n0kg4sD1poQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-toggle-group": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-toggle-group/-/react-toggle-group-1.1.11.tgz", + "integrity": "sha512-5umnS0T8JQzQT6HbPyO7Hh9dgd82NmS36DQr+X/YJ9ctFNCiiQd6IJAYYZ33LUwm8M+taCz5t2ui29fHZc4Y6Q==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-toggle": "1.1.10", + "@radix-ui/react-use-controllable-state": "1.2.2" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-toolbar": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/@radix-ui/react-toolbar/-/react-toolbar-1.1.11.tgz", + "integrity": "sha512-4ol06/1bLoFu1nwUqzdD4Y5RZ9oDdKeiHIsntug54Hcr1pgaHiPqHFEaXI1IFP/EsOfROQZ8Mig9VTIRza6Tjg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-separator": "1.1.7", + "@radix-ui/react-toggle-group": "1.1.11" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-tooltip": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@radix-ui/react-tooltip/-/react-tooltip-1.2.8.tgz", + "integrity": "sha512-tY7sVt1yL9ozIxvmbtN5qtmH2krXcBCfjEiCgKGLqunJHvgvZG2Pcl2oQ3kbcZARb1BGEHdkLzcYGO8ynVlieg==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-id": "1.1.1", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-callback-ref": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz", + "integrity": "sha512-FkBMwD+qbGQeMu1cOHnuGB6x4yzPjho8ap5WtbEJ26umhgqVXbhekKUQO+hZEL1vU92a3wHwdp0HAcqAUF5iDg==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-controllable-state": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-controllable-state/-/react-use-controllable-state-1.2.2.tgz", + "integrity": "sha512-BjasUjixPFdS+NKkypcyyN5Pmg83Olst0+c6vGov0diwTEo6mgdqVR6hxcEgFuh4QrAs7Rc+9KuGJ9TVCj0Zzg==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-effect-event": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-effect-event/-/react-use-effect-event-0.0.2.tgz", + "integrity": "sha512-Qp8WbZOBe+blgpuUT+lw2xheLP8q0oatc9UpmiemEICxGvFLYmHm9QowVZGHtJlGbS6A6yJ3iViad/2cVjnOiA==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-escape-keydown": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-escape-keydown/-/react-use-escape-keydown-1.1.1.tgz", + "integrity": "sha512-Il0+boE7w/XebUHyBjroE+DbByORGR9KKmITzbR7MyQ4akpORYP/ZmbhAr0DG7RmmBqoOnZdy2QlvajJ2QA59g==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-callback-ref": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-is-hydrated": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-is-hydrated/-/react-use-is-hydrated-0.1.0.tgz", + "integrity": "sha512-U+UORVEq+cTnRIaostJv9AGdV3G6Y+zbVd+12e18jQ5A3c0xL03IhnHuiU4UV69wolOQp5GfR58NW/EgdQhwOA==", + "license": "MIT", + "dependencies": { + "use-sync-external-store": "^1.5.0" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-layout-effect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-layout-effect/-/react-use-layout-effect-1.1.1.tgz", + "integrity": "sha512-RbJRS4UWQFkzHTTwVymMTUv8EqYhOp8dOOviLj2ugtTiXRaRQS7GLGxZTLL1jWhMeoSCf5zmcZkqTl9IiYfXcQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-previous": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-previous/-/react-use-previous-1.1.1.tgz", + "integrity": "sha512-2dHfToCj/pzca2Ck724OZ5L0EVrr3eHRNsG/b3xQJLA2hZpVCS99bLAX+hm1IHXDEnzU6by5z/5MIY794/a8NQ==", + "license": "MIT", + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-rect/-/react-use-rect-1.1.1.tgz", + "integrity": "sha512-QTYuDesS0VtuHNNvMh+CjlKJ4LJickCMUAqjlE3+j8w+RlRpwyX3apEQKGFzbZGdo7XNG1tXa+bQqIE7HIXT2w==", + "license": "MIT", + "dependencies": { + "@radix-ui/rect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-use-size": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/react-use-size/-/react-use-size-1.1.1.tgz", + "integrity": "sha512-ewrXRDTAqAXlkl6t/fkXWNAhFX9I+CkKlw6zjEwk86RSPKwZr3xpBRso655aqYafwtnbpHLj6toFzmd6xdVptQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-use-layout-effect": "1.1.1" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/@radix-ui/react-visually-hidden": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@radix-ui/react-visually-hidden/-/react-visually-hidden-1.2.3.tgz", + "integrity": "sha512-pzJq12tEaaIhqjbzpCuv/OypJY/BPavOofm+dbab+MHLajy277+1lLm6JFcGgF5eskJ6mquGirhXY2GD/8u8Ug==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/@radix-ui/rect": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@radix-ui/rect/-/rect-1.1.1.tgz", + "integrity": "sha512-HPwpGIzkl28mWyZqG52jiqDJ12waP11Pa1lGoiyUkIEuMLBP0oeK/C89esbXrxsky5we7dfd8U58nm0SgAWpVw==", + "license": "MIT" + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-rc.2", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-rc.2.tgz", + "integrity": "sha512-izyXV/v+cHiRfozX62W9htOAvwMo4/bXKDrQ+vom1L1qRuexPock/7VZDAhnpHCLNejd3NJ6hiab+tO0D44Rgw==", + "license": "MIT" + }, + "node_modules/@rollup/pluginutils": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/@rollup/pluginutils/-/pluginutils-5.3.0.tgz", + "integrity": "sha512-5EdhGZtnu3V88ces7s53hhfK5KSASnJZv8Lulpc04cWO3REESroJXg73DFsOmgbU2BhwV0E20bu2IDZb3VKW4Q==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^2.0.2", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "rollup": "^1.20.0||^2.0.0||^3.0.0||^4.0.0" + }, + "peerDependenciesMeta": { + "rollup": { + "optional": true + } + } + }, + "node_modules/@rollup/pluginutils/node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz", + "integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz", + "integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.1.tgz", + "integrity": "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz", + "integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz", + "integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz", + "integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz", + "integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz", + "integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz", + "integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz", + "integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz", + "integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz", + "integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz", + "integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz", + "integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz", + "integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz", + "integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz", + "integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==", + "cpu": [ + "s390x" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz", + "integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz", + "integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz", + "integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz", + "integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz", + "integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz", + "integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz", + "integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz", + "integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@shikijs/core": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.29.2.tgz", + "integrity": "sha512-vju0lY9r27jJfOY4Z7+Rt/nIOjzJpZ3y+nYpqtUZInVoXQ/TJZcfGnNOGnKjFdVZb8qexiCuSlZRKcGfhhTTZQ==", + "license": "MIT", + "dependencies": { + "@shikijs/engine-javascript": "1.29.2", + "@shikijs/engine-oniguruma": "1.29.2", + "@shikijs/types": "1.29.2", + "@shikijs/vscode-textmate": "^10.0.1", + "@types/hast": "^3.0.4", + "hast-util-to-html": "^9.0.4" + } + }, + "node_modules/@shikijs/engine-javascript": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/engine-javascript/-/engine-javascript-1.29.2.tgz", + "integrity": "sha512-iNEZv4IrLYPv64Q6k7EPpOCE/nuvGiKl7zxdq0WFuRPF5PAE9PRo2JGq/d8crLusM59BRemJ4eOqrFrC4wiQ+A==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "1.29.2", + "@shikijs/vscode-textmate": "^10.0.1", + "oniguruma-to-es": "^2.2.0" + } + }, + "node_modules/@shikijs/engine-oniguruma": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/engine-oniguruma/-/engine-oniguruma-1.29.2.tgz", + "integrity": "sha512-7iiOx3SG8+g1MnlzZVDYiaeHe7Ez2Kf2HrJzdmGwkRisT7r4rak0e655AcM/tF9JG/kg5fMNYlLLKglbN7gBqA==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "1.29.2", + "@shikijs/vscode-textmate": "^10.0.1" + } + }, + "node_modules/@shikijs/langs": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/langs/-/langs-1.29.2.tgz", + "integrity": "sha512-FIBA7N3LZ+223U7cJDUYd5shmciFQlYkFXlkKVaHsCPgfVLiO+e12FmQE6Tf9vuyEsFe3dIl8qGWKXgEHL9wmQ==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "1.29.2" + } + }, + "node_modules/@shikijs/rehype": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/rehype/-/rehype-1.29.2.tgz", + "integrity": "sha512-sxi53HZe5XDz0s2UqF+BVN/kgHPMS9l6dcacM4Ra3ZDzCJa5rDGJ+Ukpk4LxdD1+MITBM6hoLbPfGv9StV8a5Q==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "1.29.2", + "@types/hast": "^3.0.4", + "hast-util-to-string": "^3.0.1", + "shiki": "1.29.2", + "unified": "^11.0.5", + "unist-util-visit": "^5.0.0" + } + }, + "node_modules/@shikijs/themes": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/themes/-/themes-1.29.2.tgz", + "integrity": "sha512-i9TNZlsq4uoyqSbluIcZkmPL9Bfi3djVxRnofUHwvx/h6SRW3cwgBC5SML7vsDcWyukY0eCzVN980rqP6qNl9g==", + "license": "MIT", + "dependencies": { + "@shikijs/types": "1.29.2" + } + }, + "node_modules/@shikijs/transformers": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/transformers/-/transformers-1.29.2.tgz", + "integrity": "sha512-NHQuA+gM7zGuxGWP9/Ub4vpbwrYCrho9nQCLcCPfOe3Yc7LOYwmSuhElI688oiqIXk9dlZwDiyAG9vPBTuPJMA==", + "license": "MIT", + "dependencies": { + "@shikijs/core": "1.29.2", + "@shikijs/types": "1.29.2" + } + }, + "node_modules/@shikijs/twoslash": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/twoslash/-/twoslash-1.29.2.tgz", + "integrity": "sha512-2S04ppAEa477tiaLfGEn1QJWbZUmbk8UoPbAEw4PifsrxkBXtAtOflIZJNtuCwz8ptc/TPxy7CO7gW4Uoi6o/g==", + "license": "MIT", + "dependencies": { + "@shikijs/core": "1.29.2", + "@shikijs/types": "1.29.2", + "twoslash": "^0.2.12" + } + }, + "node_modules/@shikijs/twoslash/node_modules/twoslash": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/twoslash/-/twoslash-0.2.12.tgz", + "integrity": "sha512-tEHPASMqi7kqwfJbkk7hc/4EhlrKCSLcur+TcvYki3vhIfaRMXnXjaYFgXpoZRbT6GdprD4tGuVBEmTpUgLBsw==", + "license": "MIT", + "dependencies": { + "@typescript/vfs": "^1.6.0", + "twoslash-protocol": "0.2.12" + }, + "peerDependencies": { + "typescript": "*" + } + }, + "node_modules/@shikijs/twoslash/node_modules/twoslash-protocol": { + "version": "0.2.12", + "resolved": "https://registry.npmjs.org/twoslash-protocol/-/twoslash-protocol-0.2.12.tgz", + "integrity": "sha512-5qZLXVYfZ9ABdjqbvPc4RWMr7PrpPaaDSeaYY55vl/w1j6H6kzsWK/urAEIXlzYlyrFmyz1UbwIt+AA0ck+wbg==", + "license": "MIT" + }, + "node_modules/@shikijs/types": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/@shikijs/types/-/types-1.29.2.tgz", + "integrity": "sha512-VJjK0eIijTZf0QSTODEXCqinjBn0joAHQ+aPSBzrv4O2d/QSbsMw+ZeSRx03kV34Hy7NzUvV/7NqfYGRLrASmw==", + "license": "MIT", + "dependencies": { + "@shikijs/vscode-textmate": "^10.0.1", + "@types/hast": "^3.0.4" + } + }, + "node_modules/@shikijs/vscode-textmate": { + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/@shikijs/vscode-textmate/-/vscode-textmate-10.0.2.tgz", + "integrity": "sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==", + "license": "MIT" + }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "license": "MIT" + }, + "node_modules/@tailwindcss/node": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.15.tgz", + "integrity": "sha512-HF4+7QxATZWY3Jr8OlZrBSXmwT3Watj0OogeDvdUY/ByXJHQ+LBtqA2brDb3sBxYslIFx6UP94BJ4X6a4L9Bmw==", + "license": "MIT", + "dependencies": { + "@jridgewell/remapping": "^2.3.4", + "enhanced-resolve": "^5.18.3", + "jiti": "^2.6.0", + "lightningcss": "1.30.2", + "magic-string": "^0.30.19", + "source-map-js": "^1.2.1", + "tailwindcss": "4.1.15" + } + }, + "node_modules/@tailwindcss/node/node_modules/tailwindcss": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.15.tgz", + "integrity": "sha512-k2WLnWkYFkdpRv+Oby3EBXIyQC8/s1HOFMBUViwtAh6Z5uAozeUSMQlIsn/c6Q2iJzqG6aJT3wdPaRNj70iYxQ==", + "license": "MIT" + }, + "node_modules/@tailwindcss/oxide": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.15.tgz", + "integrity": "sha512-krhX+UOOgnsUuks2SR7hFafXmLQrKxB4YyRTERuCE59JlYL+FawgaAlSkOYmDRJdf1Q+IFNDMl9iRnBW7QBDfQ==", + "license": "MIT", + "engines": { + "node": ">= 10" + }, + "optionalDependencies": { + "@tailwindcss/oxide-android-arm64": "4.1.15", + "@tailwindcss/oxide-darwin-arm64": "4.1.15", + "@tailwindcss/oxide-darwin-x64": "4.1.15", + "@tailwindcss/oxide-freebsd-x64": "4.1.15", + "@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.15", + "@tailwindcss/oxide-linux-arm64-gnu": "4.1.15", + "@tailwindcss/oxide-linux-arm64-musl": "4.1.15", + "@tailwindcss/oxide-linux-x64-gnu": "4.1.15", + "@tailwindcss/oxide-linux-x64-musl": "4.1.15", + "@tailwindcss/oxide-wasm32-wasi": "4.1.15", + "@tailwindcss/oxide-win32-arm64-msvc": "4.1.15", + "@tailwindcss/oxide-win32-x64-msvc": "4.1.15" + } + }, + "node_modules/@tailwindcss/oxide-android-arm64": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-android-arm64/-/oxide-android-arm64-4.1.15.tgz", + "integrity": "sha512-TkUkUgAw8At4cBjCeVCRMc/guVLKOU1D+sBPrHt5uVcGhlbVKxrCaCW9OKUIBv1oWkjh4GbunD/u/Mf0ql6kEA==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-arm64": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.15.tgz", + "integrity": "sha512-xt5XEJpn2piMSfvd1UFN6jrWXyaKCwikP4Pidcf+yfHTSzSpYhG3dcMktjNkQO3JiLCp+0bG0HoWGvz97K162w==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-darwin-x64": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-x64/-/oxide-darwin-x64-4.1.15.tgz", + "integrity": "sha512-TnWaxP6Bx2CojZEXAV2M01Yl13nYPpp0EtGpUrY+LMciKfIXiLL2r/SiSRpagE5Fp2gX+rflp/Os1VJDAyqymg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-freebsd-x64": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-freebsd-x64/-/oxide-freebsd-x64-4.1.15.tgz", + "integrity": "sha512-quISQDWqiB6Cqhjc3iWptXVZHNVENsWoI77L1qgGEHNIdLDLFnw3/AfY7DidAiiCIkGX/MjIdB3bbBZR/G2aJg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm-gnueabihf": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm-gnueabihf/-/oxide-linux-arm-gnueabihf-4.1.15.tgz", + "integrity": "sha512-ObG76+vPlab65xzVUQbExmDU9FIeYLQ5k2LrQdR2Ud6hboR+ZobXpDoKEYXf/uOezOfIYmy2Ta3w0ejkTg9yxg==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-gnu": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-gnu/-/oxide-linux-arm64-gnu-4.1.15.tgz", + "integrity": "sha512-4WbBacRmk43pkb8/xts3wnOZMDKsPFyEH/oisCm2q3aLZND25ufvJKcDUpAu0cS+CBOL05dYa8D4U5OWECuH/Q==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-arm64-musl": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-arm64-musl/-/oxide-linux-arm64-musl-4.1.15.tgz", + "integrity": "sha512-AbvmEiteEj1nf42nE8skdHv73NoR+EwXVSgPY6l39X12Ex8pzOwwfi3Kc8GAmjsnsaDEbk+aj9NyL3UeyHcTLg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-gnu": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-gnu/-/oxide-linux-x64-gnu-4.1.15.tgz", + "integrity": "sha512-+rzMVlvVgrXtFiS+ES78yWgKqpThgV19ISKD58Ck+YO5pO5KjyxLt7AWKsWMbY0R9yBDC82w6QVGz837AKQcHg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-linux-x64-musl": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-linux-x64-musl/-/oxide-linux-x64-musl-4.1.15.tgz", + "integrity": "sha512-fPdEy7a8eQN9qOIK3Em9D3TO1z41JScJn8yxl/76mp4sAXFDfV4YXxsiptJcOwy6bGR+70ZSwFIZhTXzQeqwQg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-wasm32-wasi": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-wasm32-wasi/-/oxide-wasm32-wasi-4.1.15.tgz", + "integrity": "sha512-sJ4yd6iXXdlgIMfIBXuVGp/NvmviEoMVWMOAGxtxhzLPp9LOj5k0pMEMZdjeMCl4C6Up+RM8T3Zgk+BMQ0bGcQ==", + "bundleDependencies": [ + "@napi-rs/wasm-runtime", + "@emnapi/core", + "@emnapi/runtime", + "@tybys/wasm-util", + "@emnapi/wasi-threads", + "tslib" + ], + "cpu": [ + "wasm32" + ], + "license": "MIT", + "optional": true, + "dependencies": { + "@emnapi/core": "^1.5.0", + "@emnapi/runtime": "^1.5.0", + "@emnapi/wasi-threads": "^1.1.0", + "@napi-rs/wasm-runtime": "^1.0.7", + "@tybys/wasm-util": "^0.10.1", + "tslib": "^2.4.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@tailwindcss/oxide-win32-arm64-msvc": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-arm64-msvc/-/oxide-win32-arm64-msvc-4.1.15.tgz", + "integrity": "sha512-sJGE5faXnNQ1iXeqmRin7Ds/ru2fgCiaQZQQz3ZGIDtvbkeV85rAZ0QJFMDg0FrqsffZG96H1U9AQlNBRLsHVg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/oxide-win32-x64-msvc": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/oxide-win32-x64-msvc/-/oxide-win32-x64-msvc-4.1.15.tgz", + "integrity": "sha512-NLeHE7jUV6HcFKS504bpOohyi01zPXi2PXmjFfkzTph8xRxDdxkRsXm/xDO5uV5K3brrE1cCwbUYmFUSHR3u1w==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 10" + } + }, + "node_modules/@tailwindcss/vite": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/@tailwindcss/vite/-/vite-4.1.15.tgz", + "integrity": "sha512-B6s60MZRTUil+xKoZoGe6i0Iar5VuW+pmcGlda2FX+guDuQ1G1sjiIy1W0frneVpeL/ZjZ4KEgWZHNrIm++2qA==", + "license": "MIT", + "dependencies": { + "@tailwindcss/node": "4.1.15", + "@tailwindcss/oxide": "4.1.15", + "tailwindcss": "4.1.15" + }, + "peerDependencies": { + "vite": "^5.2.0 || ^6 || ^7" + } + }, + "node_modules/@tailwindcss/vite/node_modules/tailwindcss": { + "version": "4.1.15", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.15.tgz", + "integrity": "sha512-k2WLnWkYFkdpRv+Oby3EBXIyQC8/s1HOFMBUViwtAh6Z5uAozeUSMQlIsn/c6Q2iJzqG6aJT3wdPaRNj70iYxQ==", + "license": "MIT" + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/d3": { + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/@types/d3/-/d3-7.4.3.tgz", + "integrity": "sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==", + "license": "MIT", + "dependencies": { + "@types/d3-array": "*", + "@types/d3-axis": "*", + "@types/d3-brush": "*", + "@types/d3-chord": "*", + "@types/d3-color": "*", + "@types/d3-contour": "*", + "@types/d3-delaunay": "*", + "@types/d3-dispatch": "*", + "@types/d3-drag": "*", + "@types/d3-dsv": "*", + "@types/d3-ease": "*", + "@types/d3-fetch": "*", + "@types/d3-force": "*", + "@types/d3-format": "*", + "@types/d3-geo": "*", + "@types/d3-hierarchy": "*", + "@types/d3-interpolate": "*", + "@types/d3-path": "*", + "@types/d3-polygon": "*", + "@types/d3-quadtree": "*", + "@types/d3-random": "*", + "@types/d3-scale": "*", + "@types/d3-scale-chromatic": "*", + "@types/d3-selection": "*", + "@types/d3-shape": "*", + "@types/d3-time": "*", + "@types/d3-time-format": "*", + "@types/d3-timer": "*", + "@types/d3-transition": "*", + "@types/d3-zoom": "*" + } + }, + "node_modules/@types/d3-array": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", + "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", + "license": "MIT" + }, + "node_modules/@types/d3-axis": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-axis/-/d3-axis-3.0.6.tgz", + "integrity": "sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-brush": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-brush/-/d3-brush-3.0.6.tgz", + "integrity": "sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-chord": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-chord/-/d3-chord-3.0.6.tgz", + "integrity": "sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==", + "license": "MIT" + }, + "node_modules/@types/d3-color": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", + "license": "MIT" + }, + "node_modules/@types/d3-contour": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-contour/-/d3-contour-3.0.6.tgz", + "integrity": "sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==", + "license": "MIT", + "dependencies": { + "@types/d3-array": "*", + "@types/geojson": "*" + } + }, + "node_modules/@types/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==", + "license": "MIT" + }, + "node_modules/@types/d3-dispatch": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-dispatch/-/d3-dispatch-3.0.7.tgz", + "integrity": "sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==", + "license": "MIT" + }, + "node_modules/@types/d3-drag": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-drag/-/d3-drag-3.0.7.tgz", + "integrity": "sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-dsv": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-dsv/-/d3-dsv-3.0.7.tgz", + "integrity": "sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==", + "license": "MIT" + }, + "node_modules/@types/d3-ease": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", + "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", + "license": "MIT" + }, + "node_modules/@types/d3-fetch": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@types/d3-fetch/-/d3-fetch-3.0.7.tgz", + "integrity": "sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==", + "license": "MIT", + "dependencies": { + "@types/d3-dsv": "*" + } + }, + "node_modules/@types/d3-force": { + "version": "3.0.10", + "resolved": "https://registry.npmjs.org/@types/d3-force/-/d3-force-3.0.10.tgz", + "integrity": "sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==", + "license": "MIT" + }, + "node_modules/@types/d3-format": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-format/-/d3-format-3.0.4.tgz", + "integrity": "sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==", + "license": "MIT" + }, + "node_modules/@types/d3-geo": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-geo/-/d3-geo-3.1.0.tgz", + "integrity": "sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==", + "license": "MIT", + "dependencies": { + "@types/geojson": "*" + } + }, + "node_modules/@types/d3-hierarchy": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/@types/d3-hierarchy/-/d3-hierarchy-3.1.7.tgz", + "integrity": "sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==", + "license": "MIT" + }, + "node_modules/@types/d3-interpolate": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "license": "MIT", + "dependencies": { + "@types/d3-color": "*" + } + }, + "node_modules/@types/d3-path": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", + "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", + "license": "MIT" + }, + "node_modules/@types/d3-polygon": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-polygon/-/d3-polygon-3.0.2.tgz", + "integrity": "sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==", + "license": "MIT" + }, + "node_modules/@types/d3-quadtree": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/@types/d3-quadtree/-/d3-quadtree-3.0.6.tgz", + "integrity": "sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==", + "license": "MIT" + }, + "node_modules/@types/d3-random": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-random/-/d3-random-3.0.3.tgz", + "integrity": "sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==", + "license": "MIT" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", + "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==", + "license": "MIT" + }, + "node_modules/@types/d3-selection": { + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/@types/d3-selection/-/d3-selection-3.0.11.tgz", + "integrity": "sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==", + "license": "MIT" + }, + "node_modules/@types/d3-shape": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.8.tgz", + "integrity": "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==", + "license": "MIT", + "dependencies": { + "@types/d3-path": "*" + } + }, + "node_modules/@types/d3-time": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", + "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", + "license": "MIT" + }, + "node_modules/@types/d3-time-format": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time-format/-/d3-time-format-4.0.3.tgz", + "integrity": "sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==", + "license": "MIT" + }, + "node_modules/@types/d3-timer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", + "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", + "license": "MIT" + }, + "node_modules/@types/d3-transition": { + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-transition/-/d3-transition-3.0.9.tgz", + "integrity": "sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==", + "license": "MIT", + "dependencies": { + "@types/d3-selection": "*" + } + }, + "node_modules/@types/d3-zoom": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-zoom/-/d3-zoom-3.0.8.tgz", + "integrity": "sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==", + "license": "MIT", + "dependencies": { + "@types/d3-interpolate": "*", + "@types/d3-selection": "*" + } + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "license": "MIT", + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "license": "MIT" + }, + "node_modules/@types/estree-jsx": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree-jsx/-/estree-jsx-1.0.5.tgz", + "integrity": "sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==", + "license": "MIT", + "dependencies": { + "@types/estree": "*" + } + }, + "node_modules/@types/geojson": { + "version": "7946.0.16", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.16.tgz", + "integrity": "sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==", + "license": "MIT" + }, + "node_modules/@types/hast": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz", + "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/mdast": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz", + "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==", + "license": "MIT", + "dependencies": { + "@types/unist": "*" + } + }, + "node_modules/@types/mdx": { + "version": "2.0.13", + "resolved": "https://registry.npmjs.org/@types/mdx/-/mdx-2.0.13.tgz", + "integrity": "sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==", + "license": "MIT" + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "25.2.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.2.1.tgz", + "integrity": "sha512-CPrnr8voK8vC6eEtyRzvMpgp3VyVRhgclonE7qYi6P9sXwYb59ucfrnmFBTaP0yUi8Gk4yZg/LlTJULGxvTNsg==", + "license": "MIT", + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/react": { + "version": "19.2.13", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.13.tgz", + "integrity": "sha512-KkiJeU6VbYbUOp5ITMIc7kBfqlYkKA5KhEHVrGMmUUMt7NeaZg65ojdPk+FtNrBAOXNVM5QM72jnADjM+XVRAQ==", + "license": "MIT", + "dependencies": { + "csstype": "^3.2.2" + } + }, + "node_modules/@types/trusted-types": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.7.tgz", + "integrity": "sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==", + "license": "MIT", + "optional": true + }, + "node_modules/@types/unist": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", + "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==", + "license": "MIT" + }, + "node_modules/@typescript/vfs": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/@typescript/vfs/-/vfs-1.6.2.tgz", + "integrity": "sha512-hoBwJwcbKHmvd2QVebiytN1aELvpk9B74B4L1mFm/XT1Q/VOYAWl2vQ9AWRFtQq8zmz6enTpfTV8WRc4ATjW/g==", + "license": "MIT", + "dependencies": { + "debug": "^4.1.1" + }, + "peerDependencies": { + "typescript": "*" + } + }, + "node_modules/@ungap/structured-clone": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz", + "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==", + "license": "ISC" + }, + "node_modules/@vanilla-extract/babel-plugin-debug-ids": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@vanilla-extract/babel-plugin-debug-ids/-/babel-plugin-debug-ids-1.2.2.tgz", + "integrity": "sha512-MeDWGICAF9zA/OZLOKwhoRlsUW+fiMwnfuOAqFVohL31Agj7Q/RBWAYweqjHLgFBCsdnr6XIfwjJnmb2znEWxw==", + "license": "MIT", + "dependencies": { + "@babel/core": "^7.23.9" + } + }, + "node_modules/@vanilla-extract/compiler": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@vanilla-extract/compiler/-/compiler-0.3.4.tgz", + "integrity": "sha512-W9HXf9EAccpE1vEIATvSoBVj/bQnmHfYHfDJjUN8dcOHW6oMcnoGTqweDM9I66BHqlNH4d0IsaeZKSViOv7K4w==", + "license": "MIT", + "dependencies": { + "@vanilla-extract/css": "^1.18.0", + "@vanilla-extract/integration": "^8.0.7", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0", + "vite-node": "^3.2.2" + } + }, + "node_modules/@vanilla-extract/css": { + "version": "1.18.0", + "resolved": "https://registry.npmjs.org/@vanilla-extract/css/-/css-1.18.0.tgz", + "integrity": "sha512-/p0dwOjr0o8gE5BRQ5O9P0u/2DjUd6Zfga2JGmE4KaY7ZITWMszTzk4x4CPlM5cKkRr2ZGzbE6XkuPNfp9shSQ==", + "license": "MIT", + "dependencies": { + "@emotion/hash": "^0.9.0", + "@vanilla-extract/private": "^1.0.9", + "css-what": "^6.1.0", + "cssesc": "^3.0.0", + "csstype": "^3.2.3", + "dedent": "^1.5.3", + "deep-object-diff": "^1.1.9", + "deepmerge": "^4.2.2", + "lru-cache": "^10.4.3", + "media-query-parser": "^2.0.2", + "modern-ahocorasick": "^1.0.0", + "picocolors": "^1.0.0" + } + }, + "node_modules/@vanilla-extract/dynamic": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@vanilla-extract/dynamic/-/dynamic-2.1.5.tgz", + "integrity": "sha512-QGIFGb1qyXQkbzx6X6i3+3LMc/iv/ZMBttMBL+Wm/DetQd36KsKsFg5CtH3qy+1hCA/5w93mEIIAiL4fkM8ycw==", + "license": "MIT", + "dependencies": { + "@vanilla-extract/private": "^1.0.9" + } + }, + "node_modules/@vanilla-extract/integration": { + "version": "8.0.7", + "resolved": "https://registry.npmjs.org/@vanilla-extract/integration/-/integration-8.0.7.tgz", + "integrity": "sha512-ILob4F9cEHXpbWAVt3Y2iaQJpqYq/c/5TJC8Fz58C2XmX3QW2Y589krvViiyJhQfydCGK3EbwPQhVFjQaBeKfg==", + "license": "MIT", + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/plugin-syntax-typescript": "^7.23.3", + "@vanilla-extract/babel-plugin-debug-ids": "^1.2.2", + "@vanilla-extract/css": "^1.18.0", + "dedent": "^1.5.3", + "esbuild": "npm:esbuild@>=0.17.6 <0.28.0", + "eval": "0.1.8", + "find-up": "^5.0.0", + "javascript-stringify": "^2.0.1", + "mlly": "^1.4.2" + } + }, + "node_modules/@vanilla-extract/private": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@vanilla-extract/private/-/private-1.0.9.tgz", + "integrity": "sha512-gT2jbfZuaaCLrAxwXbRgIhGhcXbRZCG3v4TTUnjw0EJ7ArdBRxkq4msNJkbuRkCgfIK5ATmprB5t9ljvLeFDEA==", + "license": "MIT" + }, + "node_modules/@vanilla-extract/vite-plugin": { + "version": "5.1.4", + "resolved": "https://registry.npmjs.org/@vanilla-extract/vite-plugin/-/vite-plugin-5.1.4.tgz", + "integrity": "sha512-fTYNKUK3n4ApkUf2FEcO7mpqNKEHf9kDGg8DXlkqHtPxgwPhjuaajmDfQCSBsNgnA2SLI+CB5EO6kLQuKsw2Rw==", + "license": "MIT", + "dependencies": { + "@vanilla-extract/compiler": "^0.3.4", + "@vanilla-extract/integration": "^8.0.7" + }, + "peerDependencies": { + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-5.1.3.tgz", + "integrity": "sha512-NVUnA6gQCl8jfoYqKqQU5Clv0aPw14KkZYCsX6T9Lfu9slI0LOU10OTwFHS/WmptsMMpshNd/1tuWsHQ2Uk+cg==", + "license": "MIT", + "dependencies": { + "@babel/core": "^7.29.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-rc.2", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.18.0" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ansi-regex": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz", + "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/aria-hidden": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/aria-hidden/-/aria-hidden-1.2.6.tgz", + "integrity": "sha512-ik3ZgC9dY/lYVVM++OISsaYDeg1tb0VtP5uL3ouh1koGOaUMDPpbFIei4JkFimWUFPn90sbMNMXQAIVOlnYKJA==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/astring": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/astring/-/astring-1.9.0.tgz", + "integrity": "sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==", + "license": "MIT", + "bin": { + "astring": "bin/astring" + } + }, + "node_modules/autoprefixer": { + "version": "10.4.24", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.24.tgz", + "integrity": "sha512-uHZg7N9ULTVbutaIsDRoUkoS8/h3bdsmVJYZ5l3wv8Cp/6UIIoRDm90hZ+BwxUj/hGBEzLxdHNSKuFpn8WOyZw==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001766", + "fraction.js": "^5.3.4", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/bail": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz", + "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/baseline-browser-mapping": { + "version": "2.9.19", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.19.tgz", + "integrity": "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==", + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/bcp-47-match": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/bcp-47-match/-/bcp-47-match-2.0.3.tgz", + "integrity": "sha512-JtTezzbAibu8G0R9op9zb3vcWZd9JF6M0xOYGPn0fNCd7wOpRB1mU2mH9T8gaBGbAAyIIVgB2G7xG0GP98zMAQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/bl": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz", + "integrity": "sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==", + "license": "MIT", + "dependencies": { + "buffer": "^6.0.3", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", + "license": "ISC" + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.2.1" + } + }, + "node_modules/bytes": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/cac": { + "version": "6.7.14", + "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", + "integrity": "sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001768", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001768.tgz", + "integrity": "sha512-qY3aDRZC5nWPgHUgIB84WL+nySuo19wk0VJpp/XI9T34lrvkyhRvNVOFJOp2kxClQhiFBu+TaUSudf6oa3vkSA==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/ccount": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", + "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/chalk": { + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz", + "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==", + "license": "MIT", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-html4": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz", + "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-entities-legacy": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz", + "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/character-reference-invalid": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/character-reference-invalid/-/character-reference-invalid-2.0.1.tgz", + "integrity": "sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/chevrotain": { + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/chevrotain/-/chevrotain-11.0.3.tgz", + "integrity": "sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==", + "license": "Apache-2.0", + "dependencies": { + "@chevrotain/cst-dts-gen": "11.0.3", + "@chevrotain/gast": "11.0.3", + "@chevrotain/regexp-to-ast": "11.0.3", + "@chevrotain/types": "11.0.3", + "@chevrotain/utils": "11.0.3", + "lodash-es": "4.17.21" + } + }, + "node_modules/chevrotain-allstar": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/chevrotain-allstar/-/chevrotain-allstar-0.3.1.tgz", + "integrity": "sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==", + "license": "MIT", + "dependencies": { + "lodash-es": "^4.17.21" + }, + "peerDependencies": { + "chevrotain": "^11.0.0" + } + }, + "node_modules/chevrotain/node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "license": "MIT" + }, + "node_modules/chroma-js": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/chroma-js/-/chroma-js-3.2.0.tgz", + "integrity": "sha512-os/OippSlX1RlWWr+QDPcGUZs0uoqr32urfxESG9U93lhUfbnlyckte84Q8P1UQY/qth983AS1JONKmLS4T0nw==", + "license": "(BSD-3-Clause AND Apache-2.0)" + }, + "node_modules/cli-cursor": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", + "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", + "license": "MIT", + "dependencies": { + "restore-cursor": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-spinners": { + "version": "2.9.2", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz", + "integrity": "sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==", + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/collapse-white-space": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-2.1.0.tgz", + "integrity": "sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/comma-separated-tokens": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz", + "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/compressible": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", + "license": "MIT", + "dependencies": { + "mime-db": ">= 1.43.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/compression": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/compression/-/compression-1.8.1.tgz", + "integrity": "sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "compressible": "~2.0.18", + "debug": "2.6.9", + "negotiator": "~0.6.4", + "on-headers": "~1.1.0", + "safe-buffer": "5.2.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/compression/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/compression/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/confbox": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/confbox/-/confbox-0.1.8.tgz", + "integrity": "sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==", + "license": "MIT" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "license": "MIT" + }, + "node_modules/cookie": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-1.1.1.tgz", + "integrity": "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/cose-base": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", + "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", + "license": "MIT", + "dependencies": { + "layout-base": "^1.0.0" + } + }, + "node_modules/create-vocs": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/create-vocs/-/create-vocs-1.0.0.tgz", + "integrity": "sha512-Lv1Bd3WZEgwG4nrogkM54m8viW+TWPlGivLyEi7aNb3cuKPsEfMDZ/kTbo87fzOGtsZ2yh7scO54ZmVhhgBgTw==", + "dependencies": { + "@clack/prompts": "^0.7.0", + "cac": "^6.7.14", + "detect-package-manager": "^3.0.2", + "fs-extra": "^11.3.0", + "picocolors": "^1.1.1" + }, + "bin": { + "create-vocs": "_lib/bin.js" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz", + "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==", + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css-selector-parser": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/css-selector-parser/-/css-selector-parser-3.3.0.tgz", + "integrity": "sha512-Y2asgMGFqJKF4fq4xHDSlFYIkeVfRsm69lQC1q9kbEsH5XtnINTMrweLkjYMeaUgiXBy/uvKeO/a1JHTNnmB2g==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/mdevils" + }, + { + "type": "patreon", + "url": "https://patreon.com/mdevils" + } + ], + "license": "MIT" + }, + "node_modules/css-what": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz", + "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==", + "license": "BSD-2-Clause", + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "license": "MIT" + }, + "node_modules/cytoscape": { + "version": "3.33.1", + "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.33.1.tgz", + "integrity": "sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==", + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/cytoscape-cose-bilkent": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", + "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", + "license": "MIT", + "dependencies": { + "cose-base": "^1.0.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/cytoscape-fcose": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz", + "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==", + "license": "MIT", + "dependencies": { + "cose-base": "^2.2.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/cytoscape-fcose/node_modules/cose-base": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz", + "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==", + "license": "MIT", + "dependencies": { + "layout-base": "^2.0.0" + } + }, + "node_modules/cytoscape-fcose/node_modules/layout-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz", + "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==", + "license": "MIT" + }, + "node_modules/d3": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", + "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "license": "ISC", + "dependencies": { + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-chord": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "license": "ISC", + "dependencies": { + "d3-path": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-contour": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", + "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "license": "ISC", + "dependencies": { + "d3-array": "^3.2.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "license": "ISC", + "dependencies": { + "delaunator": "5" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "license": "ISC", + "dependencies": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" + }, + "bin": { + "csv2json": "bin/dsv2json.js", + "csv2tsv": "bin/dsv2dsv.js", + "dsv2dsv": "bin/dsv2dsv.js", + "dsv2json": "bin/dsv2json.js", + "json2csv": "bin/json2dsv.js", + "json2dsv": "bin/json2dsv.js", + "json2tsv": "bin/json2dsv.js", + "tsv2csv": "bin/dsv2dsv.js", + "tsv2json": "bin/dsv2json.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "license": "ISC", + "dependencies": { + "d3-dsv": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.2.tgz", + "integrity": "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-geo": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", + "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2.5.0 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-quadtree": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-sankey": { + "version": "0.12.3", + "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", + "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", + "license": "BSD-3-Clause", + "dependencies": { + "d3-array": "1 - 2", + "d3-shape": "^1.2.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-array": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", + "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", + "license": "BSD-3-Clause", + "dependencies": { + "internmap": "^1.0.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-path": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", + "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==", + "license": "BSD-3-Clause" + }, + "node_modules/d3-sankey/node_modules/d3-shape": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", + "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", + "license": "BSD-3-Clause", + "dependencies": { + "d3-path": "1" + } + }, + "node_modules/d3-sankey/node_modules/internmap": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", + "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==", + "license": "ISC" + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "d3-selection": "2 - 3" + } + }, + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "license": "ISC", + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/dagre-d3-es": { + "version": "7.0.13", + "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.13.tgz", + "integrity": "sha512-efEhnxpSuwpYOKRm/L5KbqoZmNNukHa/Flty4Wp62JRvgH2ojwVgPgdYyr4twpieZnyRDdIH7PY2mopX26+j2Q==", + "license": "MIT", + "dependencies": { + "d3": "^7.9.0", + "lodash-es": "^4.17.21" + } + }, + "node_modules/dayjs": { + "version": "1.11.19", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.19.tgz", + "integrity": "sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==", + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decode-named-character-reference": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.3.0.tgz", + "integrity": "sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==", + "license": "MIT", + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/dedent": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.7.1.tgz", + "integrity": "sha512-9JmrhGZpOlEgOLdQgSm0zxFaYoQon408V1v49aqTWuXENVlnCuY9JBZcXZiCsZQWDjTm5Qf/nIvAy77mXDAjEg==", + "license": "MIT", + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deep-object-diff": { + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/deep-object-diff/-/deep-object-diff-1.1.9.tgz", + "integrity": "sha512-Rn+RuwkmkDwCi2/oXOFS9Gsr5lJZu/yTGpK7wAaAIE75CC+LCGEZHpY6VQJa/RoJcrmaA/docWJZvYohlNkWPA==", + "license": "MIT" + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/delaunator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", + "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "license": "ISC", + "dependencies": { + "robust-predicates": "^3.0.2" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, + "node_modules/detect-node-es": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/detect-node-es/-/detect-node-es-1.1.0.tgz", + "integrity": "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ==", + "license": "MIT" + }, + "node_modules/detect-package-manager": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/detect-package-manager/-/detect-package-manager-3.0.2.tgz", + "integrity": "sha512-8JFjJHutStYrfWwzfretQoyNGoZVW1Fsrp4JO9spa7h/fBfwgTMEIy4/LBzRDGsxwVPHU0q+T9YvwLDJoOApLQ==", + "license": "MIT", + "dependencies": { + "execa": "^5.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/direction": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/direction/-/direction-2.0.1.tgz", + "integrity": "sha512-9S6m9Sukh1cZNknO1CWAr2QAWsbKLafQiyM5gZ7VgXHeuaoUwffKN4q6NC4A/Mf9iiPlOXQEKW/Mv/mh9/3YFA==", + "license": "MIT", + "bin": { + "direction": "cli.js" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/dompurify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz", + "integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==", + "license": "(MPL-2.0 OR Apache-2.0)", + "optionalDependencies": { + "@types/trusted-types": "^2.0.7" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "license": "MIT" + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.286", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz", + "integrity": "sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A==", + "license": "ISC" + }, + "node_modules/emoji-regex": { + "version": "10.6.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz", + "integrity": "sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A==", + "license": "MIT" + }, + "node_modules/emoji-regex-xs": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex-xs/-/emoji-regex-xs-1.0.0.tgz", + "integrity": "sha512-LRlerrMYoIDrT6jgpeZ2YYl/L8EulRTt5hQcYjy5AInh7HWXKimpqx68aknBFpGL2+/IcogTcaydJEgaTmOpDg==", + "license": "MIT" + }, + "node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/enhanced-resolve": { + "version": "5.19.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.19.0.tgz", + "integrity": "sha512-phv3E1Xl4tQOShqSte26C7Fl84EwUdZsyOuSSk9qtAGyyQs2s3jJzComh+Abf4g187lUUAvH+H26omrqia2aGg==", + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.3.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/entities": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", + "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", + "license": "MIT" + }, + "node_modules/esast-util-from-estree": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/esast-util-from-estree/-/esast-util-from-estree-2.0.0.tgz", + "integrity": "sha512-4CyanoAudUSBAn5K13H4JhsMH6L9ZP7XbLVe/dKybkxMO7eDyLsT8UHl9TRNrU2Gr9nz+FovfSIjuXWJ81uVwQ==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "devlop": "^1.0.0", + "estree-util-visit": "^2.0.0", + "unist-util-position-from-estree": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/esast-util-from-js": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/esast-util-from-js/-/esast-util-from-js-2.0.1.tgz", + "integrity": "sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "acorn": "^8.0.0", + "esast-util-from-estree": "^2.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/esbuild": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.2.tgz", + "integrity": "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==", + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.27.2", + "@esbuild/android-arm": "0.27.2", + "@esbuild/android-arm64": "0.27.2", + "@esbuild/android-x64": "0.27.2", + "@esbuild/darwin-arm64": "0.27.2", + "@esbuild/darwin-x64": "0.27.2", + "@esbuild/freebsd-arm64": "0.27.2", + "@esbuild/freebsd-x64": "0.27.2", + "@esbuild/linux-arm": "0.27.2", + "@esbuild/linux-arm64": "0.27.2", + "@esbuild/linux-ia32": "0.27.2", + "@esbuild/linux-loong64": "0.27.2", + "@esbuild/linux-mips64el": "0.27.2", + "@esbuild/linux-ppc64": "0.27.2", + "@esbuild/linux-riscv64": "0.27.2", + "@esbuild/linux-s390x": "0.27.2", + "@esbuild/linux-x64": "0.27.2", + "@esbuild/netbsd-arm64": "0.27.2", + "@esbuild/netbsd-x64": "0.27.2", + "@esbuild/openbsd-arm64": "0.27.2", + "@esbuild/openbsd-x64": "0.27.2", + "@esbuild/openharmony-arm64": "0.27.2", + "@esbuild/sunos-x64": "0.27.2", + "@esbuild/win32-arm64": "0.27.2", + "@esbuild/win32-ia32": "0.27.2", + "@esbuild/win32-x64": "0.27.2" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/estree-util-attach-comments": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-attach-comments/-/estree-util-attach-comments-3.0.0.tgz", + "integrity": "sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-build-jsx": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/estree-util-build-jsx/-/estree-util-build-jsx-3.0.1.tgz", + "integrity": "sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "estree-walker": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-is-identifier-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", + "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", + "license": "MIT", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-scope": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/estree-util-scope/-/estree-util-scope-1.0.0.tgz", + "integrity": "sha512-2CAASclonf+JFWBNJPndcOpA8EMJwa0Q8LUFJEKqXLW6+qBvbFZuF5gItbQOs/umBUkjviCSDCbBwU2cXbmrhQ==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "devlop": "^1.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-to-js": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/estree-util-to-js/-/estree-util-to-js-2.0.0.tgz", + "integrity": "sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "astring": "^1.8.0", + "source-map": "^0.7.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-util-value-to-estree": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/estree-util-value-to-estree/-/estree-util-value-to-estree-3.5.0.tgz", + "integrity": "sha512-aMV56R27Gv3QmfmF1MY12GWkGzzeAezAX+UplqHVASfjc9wNzI/X6hC0S9oxq61WT4aQesLGslWP9tKk6ghRZQ==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/remcohaszing" + } + }, + "node_modules/estree-util-visit": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-2.0.0.tgz", + "integrity": "sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eval": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", + "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", + "dependencies": { + "@types/node": "*", + "require-like": ">= 0.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "license": "MIT" + }, + "node_modules/fault": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fault/-/fault-2.0.1.tgz", + "integrity": "sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==", + "license": "MIT", + "dependencies": { + "format": "^0.2.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/format": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", + "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/fraction.js": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", + "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fs-extra": { + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", + "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-nonce": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-nonce/-/get-nonce-1.0.1.tgz", + "integrity": "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/github-slugger": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz", + "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==", + "license": "ISC" + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "license": "ISC" + }, + "node_modules/hachure-fill": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/hachure-fill/-/hachure-fill-0.5.2.tgz", + "integrity": "sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==", + "license": "MIT" + }, + "node_modules/hast-util-classnames": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-classnames/-/hast-util-classnames-3.0.0.tgz", + "integrity": "sha512-tI3JjoGDEBVorMAWK4jNRsfLMYmih1BUOG3VV36pH36njs1IEl7xkNrVTD2mD2yYHmQCa5R/fj61a8IAF4bRaQ==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-dom": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/hast-util-from-dom/-/hast-util-from-dom-5.0.1.tgz", + "integrity": "sha512-N+LqofjR2zuzTjCPzyDUdSshy4Ma6li7p/c3pA78uTwzFgENbgbUrm2ugwsOdcjI1muO+o6Dgzp9p8WHtn/39Q==", + "license": "ISC", + "dependencies": { + "@types/hast": "^3.0.0", + "hastscript": "^9.0.0", + "web-namespaces": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-dom/node_modules/hastscript": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz", + "integrity": "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-html": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-html/-/hast-util-from-html-2.0.3.tgz", + "integrity": "sha512-CUSRHXyKjzHov8yKsQjGOElXy/3EKpyX56ELnkHH34vDVw1N1XSQ1ZcAvTyAPtGqLTuKP/uxM+aLkSPqF/EtMw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "devlop": "^1.1.0", + "hast-util-from-parse5": "^8.0.0", + "parse5": "^7.0.0", + "vfile": "^6.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-html-isomorphic": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hast-util-from-html-isomorphic/-/hast-util-from-html-isomorphic-2.0.0.tgz", + "integrity": "sha512-zJfpXq44yff2hmE0XmwEOzdWin5xwH+QIhMLOScpX91e/NSGPsAzNCvLQDIEPyO2TXi+lBmU6hjLIhV8MwP2kw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-from-dom": "^5.0.0", + "hast-util-from-html": "^2.0.0", + "unist-util-remove-position": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5": { + "version": "8.0.3", + "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.3.tgz", + "integrity": "sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "hastscript": "^9.0.0", + "property-information": "^7.0.0", + "vfile": "^6.0.0", + "vfile-location": "^5.0.0", + "web-namespaces": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-from-parse5/node_modules/hastscript": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.1.tgz", + "integrity": "sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-has-property": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-has-property/-/hast-util-has-property-3.0.0.tgz", + "integrity": "sha512-MNilsvEKLFpV604hwfhVStK0usFY/QmM5zX16bo7EjnAEGofr5YyI37kzopBlZJkHD4t887i+q/C8/tr5Q94cA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-heading-rank": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-heading-rank/-/hast-util-heading-rank-3.0.0.tgz", + "integrity": "sha512-EJKb8oMUXVHcWZTDepnr+WNbfnXKFNf9duMesmr4S8SXTJBJ9M4Yok08pu9vxdJwdlGRhVumk9mEhkEvKGifwA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-is-element": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-3.0.0.tgz", + "integrity": "sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-parse-selector": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz", + "integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-select": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/hast-util-select/-/hast-util-select-6.0.4.tgz", + "integrity": "sha512-RqGS1ZgI0MwxLaKLDxjprynNzINEkRHY2i8ln4DDjgv9ZhcYVIHN9rlpiYsqtFwrgpYU361SyWDQcGNIBVu3lw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "bcp-47-match": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "css-selector-parser": "^3.0.0", + "devlop": "^1.0.0", + "direction": "^2.0.0", + "hast-util-has-property": "^3.0.0", + "hast-util-to-string": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "nth-check": "^2.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-estree": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/hast-util-to-estree/-/hast-util-to-estree-3.1.3.tgz", + "integrity": "sha512-48+B/rJWAp0jamNbAAf9M7Uf//UVqAoMmgXhBdxTDJLGKY+LRnZ99qcG+Qjl5HfMpYNzS5v4EAwVEF34LeAj7w==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "estree-util-attach-comments": "^3.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-js": "^1.0.0", + "unist-util-position": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-html": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz", + "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-whitespace": "^3.0.0", + "html-void-elements": "^3.0.0", + "mdast-util-to-hast": "^13.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "stringify-entities": "^4.0.0", + "zwitch": "^2.0.4" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-jsx-runtime": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/hast-util-to-jsx-runtime/-/hast-util-to-jsx-runtime-2.3.6.tgz", + "integrity": "sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "hast-util-whitespace": "^3.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "property-information": "^7.0.0", + "space-separated-tokens": "^2.0.0", + "style-to-js": "^1.0.0", + "unist-util-position": "^5.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-string": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/hast-util-to-string/-/hast-util-to-string-3.0.1.tgz", + "integrity": "sha512-XelQVTDWvqcl3axRfI0xSeoVKzyIFPwsAGSLIsKdJKQMXDYJS4WYrBNF/8J7RdhIcFI2BOHgAifggsvsxp/3+A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-to-text": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/hast-util-to-text/-/hast-util-to-text-4.0.2.tgz", + "integrity": "sha512-KK6y/BN8lbaq654j7JgBydev7wuNMcID54lkRav1P0CaE1e47P72AWWPiGKXTJU271ooYzcvTAn/Zt0REnvc7A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/unist": "^3.0.0", + "hast-util-is-element": "^3.0.0", + "unist-util-find-after": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hast-util-whitespace": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz", + "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hastscript": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-8.0.0.tgz", + "integrity": "sha512-dMOtzCEd3ABUeSIISmrETiKuyydk1w0pa+gE/uormcTpSYuaNJPbX1NU3JLyscSLjwAQM8bWMhhIlnCqnRvDTw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "comma-separated-tokens": "^2.0.0", + "hast-util-parse-selector": "^4.0.0", + "property-information": "^6.0.0", + "space-separated-tokens": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/hastscript/node_modules/property-information": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz", + "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/hono": { + "version": "4.11.7", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.11.7.tgz", + "integrity": "sha512-l7qMiNee7t82bH3SeyUCt9UF15EVmaBvsppY2zQtrbIhl/yzBTny+YUxsVjSjQ6gaqaeVtZmGocom8TzBlA4Yw==", + "license": "MIT", + "engines": { + "node": ">=16.9.0" + } + }, + "node_modules/html-void-elements": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz", + "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/http-errors": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz", + "integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==", + "license": "MIT", + "dependencies": { + "depd": "~2.0.0", + "inherits": "~2.0.4", + "setprototypeof": "~1.2.0", + "statuses": "~2.0.2", + "toidentifier": "~1.0.1" + }, + "engines": { + "node": ">= 0.8" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "license": "ISC" + }, + "node_modules/inline-style-parser": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.2.7.tgz", + "integrity": "sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==", + "license": "MIT" + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/is-alphabetical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphabetical/-/is-alphabetical-2.0.1.tgz", + "integrity": "sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-alphanumerical": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-alphanumerical/-/is-alphanumerical-2.0.1.tgz", + "integrity": "sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==", + "license": "MIT", + "dependencies": { + "is-alphabetical": "^2.0.0", + "is-decimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-decimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-decimal/-/is-decimal-2.0.1.tgz", + "integrity": "sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-hexadecimal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-hexadecimal/-/is-hexadecimal-2.0.1.tgz", + "integrity": "sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/is-interactive": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz", + "integrity": "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-plain-obj": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz", + "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-unicode-supported": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", + "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "license": "ISC" + }, + "node_modules/javascript-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.1.0.tgz", + "integrity": "sha512-JVAfqNPTvNq3sB/VHQJAFxN/sPgKnsKrCwyRt15zwNCdrMMJDdcEOdubuy+DuJYYdm0ox1J4uzEuYKkN+9yhVg==", + "license": "MIT" + }, + "node_modules/jiti": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.6.1.tgz", + "integrity": "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==", + "license": "MIT", + "bin": { + "jiti": "lib/jiti-cli.mjs" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonfile": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.2.0.tgz", + "integrity": "sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==", + "license": "MIT", + "dependencies": { + "universalify": "^2.0.0" + }, + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/katex": { + "version": "0.16.28", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.28.tgz", + "integrity": "sha512-YHzO7721WbmAL6Ov1uzN/l5mY5WWWhJBSW+jq4tkfZfsxmo1hu6frS0EOswvjBUnWE6NtjEs48SFn5CQESRLZg==", + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "license": "MIT", + "dependencies": { + "commander": "^8.3.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/khroma": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", + "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==" + }, + "node_modules/langium": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/langium/-/langium-3.3.1.tgz", + "integrity": "sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==", + "license": "MIT", + "dependencies": { + "chevrotain": "~11.0.3", + "chevrotain-allstar": "~0.3.0", + "vscode-languageserver": "~9.0.1", + "vscode-languageserver-textdocument": "~1.0.11", + "vscode-uri": "~3.0.8" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/layout-base": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", + "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==", + "license": "MIT" + }, + "node_modules/lightningcss": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.2.tgz", + "integrity": "sha512-utfs7Pr5uJyyvDETitgsaqSyjCb2qNRAtuqUeWIAKztsOYdcACf2KtARYXg2pSvhkt+9NfoaNY7fxjl6nuMjIQ==", + "license": "MPL-2.0", + "dependencies": { + "detect-libc": "^2.0.3" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + }, + "optionalDependencies": { + "lightningcss-android-arm64": "1.30.2", + "lightningcss-darwin-arm64": "1.30.2", + "lightningcss-darwin-x64": "1.30.2", + "lightningcss-freebsd-x64": "1.30.2", + "lightningcss-linux-arm-gnueabihf": "1.30.2", + "lightningcss-linux-arm64-gnu": "1.30.2", + "lightningcss-linux-arm64-musl": "1.30.2", + "lightningcss-linux-x64-gnu": "1.30.2", + "lightningcss-linux-x64-musl": "1.30.2", + "lightningcss-win32-arm64-msvc": "1.30.2", + "lightningcss-win32-x64-msvc": "1.30.2" + } + }, + "node_modules/lightningcss-android-arm64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-android-arm64/-/lightningcss-android-arm64-1.30.2.tgz", + "integrity": "sha512-BH9sEdOCahSgmkVhBLeU7Hc9DWeZ1Eb6wNS6Da8igvUwAe0sqROHddIlvU06q3WyXVEOYDZ6ykBZQnjTbmo4+A==", + "cpu": [ + "arm64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-arm64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.2.tgz", + "integrity": "sha512-ylTcDJBN3Hp21TdhRT5zBOIi73P6/W0qwvlFEk22fkdXchtNTOU4Qc37SkzV+EKYxLouZ6M4LG9NfZ1qkhhBWA==", + "cpu": [ + "arm64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-darwin-x64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-darwin-x64/-/lightningcss-darwin-x64-1.30.2.tgz", + "integrity": "sha512-oBZgKchomuDYxr7ilwLcyms6BCyLn0z8J0+ZZmfpjwg9fRVZIR5/GMXd7r9RH94iDhld3UmSjBM6nXWM2TfZTQ==", + "cpu": [ + "x64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-freebsd-x64": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-freebsd-x64/-/lightningcss-freebsd-x64-1.30.2.tgz", + "integrity": "sha512-c2bH6xTrf4BDpK8MoGG4Bd6zAMZDAXS569UxCAGcA7IKbHNMlhGQ89eRmvpIUGfKWNVdbhSbkQaWhEoMGmGslA==", + "cpu": [ + "x64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm-gnueabihf": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm-gnueabihf/-/lightningcss-linux-arm-gnueabihf-1.30.2.tgz", + "integrity": "sha512-eVdpxh4wYcm0PofJIZVuYuLiqBIakQ9uFZmipf6LF/HRj5Bgm0eb3qL/mr1smyXIS1twwOxNWndd8z0E374hiA==", + "cpu": [ + "arm" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-gnu": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-gnu/-/lightningcss-linux-arm64-gnu-1.30.2.tgz", + "integrity": "sha512-UK65WJAbwIJbiBFXpxrbTNArtfuznvxAJw4Q2ZGlU8kPeDIWEX1dg3rn2veBVUylA2Ezg89ktszWbaQnxD/e3A==", + "cpu": [ + "arm64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-arm64-musl": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-arm64-musl/-/lightningcss-linux-arm64-musl-1.30.2.tgz", + "integrity": "sha512-5Vh9dGeblpTxWHpOx8iauV02popZDsCYMPIgiuw97OJ5uaDsL86cnqSFs5LZkG3ghHoX5isLgWzMs+eD1YzrnA==", + "cpu": [ + "arm64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-gnu": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-gnu/-/lightningcss-linux-x64-gnu-1.30.2.tgz", + "integrity": "sha512-Cfd46gdmj1vQ+lR6VRTTadNHu6ALuw2pKR9lYq4FnhvgBc4zWY1EtZcAc6EffShbb1MFrIPfLDXD6Xprbnni4w==", + "cpu": [ + "x64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-linux-x64-musl": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-linux-x64-musl/-/lightningcss-linux-x64-musl-1.30.2.tgz", + "integrity": "sha512-XJaLUUFXb6/QG2lGIW6aIk6jKdtjtcffUT0NKvIqhSBY3hh9Ch+1LCeH80dR9q9LBjG3ewbDjnumefsLsP6aiA==", + "cpu": [ + "x64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-arm64-msvc": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-arm64-msvc/-/lightningcss-win32-arm64-msvc-1.30.2.tgz", + "integrity": "sha512-FZn+vaj7zLv//D/192WFFVA0RgHawIcHqLX9xuWiQt7P0PtdFEVaxgF9rjM/IRYHQXNnk61/H/gb2Ei+kUQ4xQ==", + "cpu": [ + "arm64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/lightningcss-win32-x64-msvc": { + "version": "1.30.2", + "resolved": "https://registry.npmjs.org/lightningcss-win32-x64-msvc/-/lightningcss-win32-x64-msvc-1.30.2.tgz", + "integrity": "sha512-5g1yc73p+iAkid5phb4oVFMB45417DkRevRbt/El/gKXJk4jid+vPFF/AXbxn05Aky8PapwzZrdJShv5C0avjw==", + "cpu": [ + "x64" + ], + "license": "MPL-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/parcel" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash-es": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.23.tgz", + "integrity": "sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==", + "license": "MIT" + }, + "node_modules/log-symbols": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-5.1.0.tgz", + "integrity": "sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==", + "license": "MIT", + "dependencies": { + "chalk": "^5.0.0", + "is-unicode-supported": "^1.1.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "license": "ISC" + }, + "node_modules/magic-string": { + "version": "0.30.21", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.21.tgz", + "integrity": "sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==", + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.5" + } + }, + "node_modules/mark.js": { + "version": "8.11.1", + "resolved": "https://registry.npmjs.org/mark.js/-/mark.js-8.11.1.tgz", + "integrity": "sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==", + "license": "MIT" + }, + "node_modules/markdown-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-2.0.0.tgz", + "integrity": "sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==", + "license": "MIT", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/markdown-table": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz", + "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/marked": { + "version": "16.4.2", + "resolved": "https://registry.npmjs.org/marked/-/marked-16.4.2.tgz", + "integrity": "sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA==", + "license": "MIT", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 20" + } + }, + "node_modules/mdast-util-directive": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-directive/-/mdast-util-directive-3.1.0.tgz", + "integrity": "sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-find-and-replace": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz", + "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz", + "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-frontmatter": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz", + "integrity": "sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "escape-string-regexp": "^5.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-extension-frontmatter": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz", + "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==", + "license": "MIT", + "dependencies": { + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-gfm-autolink-literal": "^2.0.0", + "mdast-util-gfm-footnote": "^2.0.0", + "mdast-util-gfm-strikethrough": "^2.0.0", + "mdast-util-gfm-table": "^2.0.0", + "mdast-util-gfm-task-list-item": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz", + "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-find-and-replace": "^3.0.0", + "micromark-util-character": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-strikethrough": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", + "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", + "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-gfm-task-list-item": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", + "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz", + "integrity": "sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==", + "license": "MIT", + "dependencies": { + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-expression": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.1.tgz", + "integrity": "sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdx-jsx": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.2.0.tgz", + "integrity": "sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "ccount": "^2.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-mdxjs-esm": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", + "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", + "license": "MIT", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-hast": { + "version": "13.2.1", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.1.tgz", + "integrity": "sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "@ungap/structured-clone": "^1.0.0", + "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz", + "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/media-query-parser": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/media-query-parser/-/media-query-parser-2.0.2.tgz", + "integrity": "sha512-1N4qp+jE0pL5Xv4uEcwVUhIkwdUO3S/9gML90nqKA7v7FcOS5vUtatfzok9S9U1EJU8dHWlcv95WLnKmmxZI9w==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.12.5" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "license": "MIT" + }, + "node_modules/mermaid": { + "version": "11.12.2", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-11.12.2.tgz", + "integrity": "sha512-n34QPDPEKmaeCG4WDMGy0OT6PSyxKCfy2pJgShP+Qow2KLrvWjclwbc3yXfSIf4BanqWEhQEpngWwNp/XhZt6w==", + "license": "MIT", + "dependencies": { + "@braintree/sanitize-url": "^7.1.1", + "@iconify/utils": "^3.0.1", + "@mermaid-js/parser": "^0.6.3", + "@types/d3": "^7.4.3", + "cytoscape": "^3.29.3", + "cytoscape-cose-bilkent": "^4.1.0", + "cytoscape-fcose": "^2.2.0", + "d3": "^7.9.0", + "d3-sankey": "^0.12.3", + "dagre-d3-es": "7.0.13", + "dayjs": "^1.11.18", + "dompurify": "^3.2.5", + "katex": "^0.16.22", + "khroma": "^2.1.0", + "lodash-es": "^4.17.21", + "marked": "^16.2.1", + "roughjs": "^4.6.6", + "stylis": "^4.3.6", + "ts-dedent": "^2.2.0", + "uuid": "^11.1.0" + } + }, + "node_modules/mermaid-isomorphic": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/mermaid-isomorphic/-/mermaid-isomorphic-3.0.4.tgz", + "integrity": "sha512-XQTy7H1XwHK3DPEHf+ZNWiqUEd9BwX3Xws38R9Fj2gx718srmgjlZoUzHr+Tca+O+dqJOJsAJaKzCoP65QDfDg==", + "license": "MIT", + "dependencies": { + "@fortawesome/fontawesome-free": "^6.0.0", + "mermaid": "^11.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/remcohaszing" + }, + "peerDependencies": { + "playwright": "1" + }, + "peerDependenciesMeta": { + "playwright": { + "optional": true + } + } + }, + "node_modules/micromark": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz", + "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz", + "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-destination": "^2.0.0", + "micromark-factory-label": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-title": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-html-tag-name": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-subtokenize": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-extension-directive": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/micromark-extension-directive/-/micromark-extension-directive-3.0.2.tgz", + "integrity": "sha512-wjcXHgk+PPdmvR58Le9d7zQYWy+vKEU9Se44p2CrCDPiLr2FMyiT4Fyb5UFKFC66wGB3kPlgD7q3TnoqPS7SZA==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-factory-whitespace": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "parse-entities": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-frontmatter": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-frontmatter/-/micromark-extension-frontmatter-2.0.0.tgz", + "integrity": "sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg==", + "license": "MIT", + "dependencies": { + "fault": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz", + "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==", + "license": "MIT", + "dependencies": { + "micromark-extension-gfm-autolink-literal": "^2.0.0", + "micromark-extension-gfm-footnote": "^2.0.0", + "micromark-extension-gfm-strikethrough": "^2.0.0", + "micromark-extension-gfm-table": "^2.0.0", + "micromark-extension-gfm-tagfilter": "^2.0.0", + "micromark-extension-gfm-task-list-item": "^2.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-autolink-literal": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz", + "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==", + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-footnote": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz", + "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-strikethrough": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz", + "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-classify-character": "^2.0.0", + "micromark-util-resolve-all": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-table": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz", + "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-tagfilter": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz", + "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==", + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-gfm-task-list-item": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz", + "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==", + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdx-expression": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-expression/-/micromark-extension-mdx-expression-3.0.1.tgz", + "integrity": "sha512-dD/ADLJ1AeMvSAKBwO22zG22N4ybhe7kFIZ3LsDI0GlsNr2A3KYxb0LdC1u5rj4Nw+CHKY0RVdnHX8vj8ejm4Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-mdx-expression": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-events-to-acorn": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-extension-mdx-jsx": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-jsx/-/micromark-extension-mdx-jsx-3.0.2.tgz", + "integrity": "sha512-e5+q1DjMh62LZAJOnDraSSbDMvGJ8x3cbjygy2qFEi7HCeUT4BDKCvMozPozcD6WmOt6sVvYDNBKhFSz3kjOVQ==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "micromark-factory-mdx-expression": "^2.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-events-to-acorn": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdx-md": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-mdx-md/-/micromark-extension-mdx-md-2.0.0.tgz", + "integrity": "sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ==", + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdxjs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs/-/micromark-extension-mdxjs-3.0.0.tgz", + "integrity": "sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ==", + "license": "MIT", + "dependencies": { + "acorn": "^8.0.0", + "acorn-jsx": "^5.0.0", + "micromark-extension-mdx-expression": "^3.0.0", + "micromark-extension-mdx-jsx": "^3.0.0", + "micromark-extension-mdx-md": "^2.0.0", + "micromark-extension-mdxjs-esm": "^3.0.0", + "micromark-util-combine-extensions": "^2.0.0", + "micromark-util-types": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-extension-mdxjs-esm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/micromark-extension-mdxjs-esm/-/micromark-extension-mdxjs-esm-3.0.0.tgz", + "integrity": "sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "devlop": "^1.0.0", + "micromark-core-commonmark": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-events-to-acorn": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-position-from-estree": "^2.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/micromark-factory-destination": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz", + "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz", + "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-mdx-expression": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-factory-mdx-expression/-/micromark-factory-mdx-expression-2.0.3.tgz", + "integrity": "sha512-kQnEtA3vzucU2BkrIa8/VaSAsP+EJ3CKOvhMuJgOEGg9KDC6OAY6nSnNDVRiVNRqj7Y4SlSzcStaH/5jge8JdQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "devlop": "^1.0.0", + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-events-to-acorn": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-position-from-estree": "^2.0.0", + "vfile-message": "^4.0.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz", + "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz", + "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz", + "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-factory-space": "^2.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz", + "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz", + "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz", + "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz", + "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-chunked": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz", + "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-decode-string": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz", + "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^2.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz", + "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-events-to-acorn": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/micromark-util-events-to-acorn/-/micromark-util-events-to-acorn-2.0.3.tgz", + "integrity": "sha512-jmsiEIiZ1n7X1Rr5k8wVExBQCg5jy4UXVADItHmNk1zkwEVhBuIUKRu3fqv+hs4nxLISi2DQGlqIOGiFxgbfHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "estree-util-visit": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "vfile-message": "^4.0.0" + } + }, + "node_modules/micromark-util-html-tag-name": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz", + "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz", + "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz", + "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz", + "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^2.0.0", + "micromark-util-encode": "^2.0.0", + "micromark-util-symbol": "^2.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz", + "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "devlop": "^1.0.0", + "micromark-util-chunked": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz", + "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/micromark-util-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz", + "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/mime-db": { + "version": "1.54.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.54.0.tgz", + "integrity": "sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/mini-svg-data-uri": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/mini-svg-data-uri/-/mini-svg-data-uri-1.4.4.tgz", + "integrity": "sha512-r9deDe9p5FJUPZAk3A59wGH7Ii9YrjjWw0jmw/liSbHl2CHiyXj6FcDXDu2K3TjVAXqiJdaw3xxwlZZr9E6nHg==", + "license": "MIT", + "bin": { + "mini-svg-data-uri": "cli.js" + } + }, + "node_modules/minisearch": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/minisearch/-/minisearch-7.2.0.tgz", + "integrity": "sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg==", + "license": "MIT" + }, + "node_modules/mlly": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/mlly/-/mlly-1.8.0.tgz", + "integrity": "sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==", + "license": "MIT", + "dependencies": { + "acorn": "^8.15.0", + "pathe": "^2.0.3", + "pkg-types": "^1.3.1", + "ufo": "^1.6.1" + } + }, + "node_modules/modern-ahocorasick": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/modern-ahocorasick/-/modern-ahocorasick-1.1.0.tgz", + "integrity": "sha512-sEKPVl2rM+MNVkGQt3ChdmD8YsigmXdn5NifZn6jiwn9LRJpWm8F3guhaqrJT/JOat6pwpbXEk6kv+b9DMIjsQ==", + "license": "MIT" + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/negotiator": { + "version": "0.6.4", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", + "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "license": "MIT" + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "license": "BSD-2-Clause", + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, + "node_modules/on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz", + "integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/oniguruma-to-es": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/oniguruma-to-es/-/oniguruma-to-es-2.3.0.tgz", + "integrity": "sha512-bwALDxriqfKGfUufKGGepCzu9x7nJQuoRoAFp4AnwehhC2crqrDIAP/uN2qdlsAvSMpeRC3+Yzhqc7hLmle5+g==", + "license": "MIT", + "dependencies": { + "emoji-regex-xs": "^1.0.0", + "regex": "^5.1.1", + "regex-recursion": "^5.1.1" + } + }, + "node_modules/ora": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/ora/-/ora-7.0.1.tgz", + "integrity": "sha512-0TUxTiFJWv+JnjWm4o9yvuskpEJLXTcng8MJuKd+SzAzp2o+OP3HWqNhB4OdJRt1Vsd9/mR0oyaEYlOnL7XIRw==", + "license": "MIT", + "dependencies": { + "chalk": "^5.3.0", + "cli-cursor": "^4.0.0", + "cli-spinners": "^2.9.0", + "is-interactive": "^2.0.0", + "is-unicode-supported": "^1.3.0", + "log-symbols": "^5.1.0", + "stdin-discarder": "^0.1.0", + "string-width": "^6.1.0", + "strip-ansi": "^7.1.0" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-limit": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", + "license": "MIT", + "dependencies": { + "yocto-queue": "^1.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate/node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/package-manager-detector": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/package-manager-detector/-/package-manager-detector-1.6.0.tgz", + "integrity": "sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==", + "license": "MIT" + }, + "node_modules/parse-entities": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/parse-entities/-/parse-entities-4.0.2.tgz", + "integrity": "sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0", + "character-entities-legacy": "^3.0.0", + "character-reference-invalid": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "is-alphanumerical": "^2.0.0", + "is-decimal": "^2.0.0", + "is-hexadecimal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/parse-entities/node_modules/@types/unist": { + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz", + "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==", + "license": "MIT" + }, + "node_modules/parse5": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", + "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", + "license": "MIT", + "dependencies": { + "entities": "^6.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-data-parser": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/path-data-parser/-/path-data-parser-0.1.0.tgz", + "integrity": "sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==", + "license": "MIT" + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/pathe": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pkg-types": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/pkg-types/-/pkg-types-1.3.1.tgz", + "integrity": "sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==", + "license": "MIT", + "dependencies": { + "confbox": "^0.1.8", + "mlly": "^1.7.4", + "pathe": "^2.0.1" + } + }, + "node_modules/playwright": { + "version": "1.58.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.58.1.tgz", + "integrity": "sha512-+2uTZHxSCcxjvGc5C891LrS1/NlxglGxzrC4seZiVjcYVQfUa87wBL6rTDqzGjuoWNjnBzRqKmF6zRYGMvQUaQ==", + "license": "Apache-2.0", + "dependencies": { + "playwright-core": "1.58.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/playwright-core": { + "version": "1.58.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.58.1.tgz", + "integrity": "sha512-bcWzOaTxcW+VOOGBCQgnaKToLJ65d6AqfLVKEWvexyS3AS6rbXl+xdpYRMGSRBClPvyj44njOWoxjNdL/H9UNg==", + "license": "Apache-2.0", + "bin": { + "playwright-core": "cli.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/points-on-curve": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/points-on-curve/-/points-on-curve-0.2.0.tgz", + "integrity": "sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==", + "license": "MIT" + }, + "node_modules/points-on-path": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/points-on-path/-/points-on-path-0.2.1.tgz", + "integrity": "sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==", + "license": "MIT", + "dependencies": { + "path-data-parser": "0.1.0", + "points-on-curve": "0.2.0" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "license": "MIT" + }, + "node_modules/property-information": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz", + "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/radix-ui": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/radix-ui/-/radix-ui-1.4.3.tgz", + "integrity": "sha512-aWizCQiyeAenIdUbqEpXgRA1ya65P13NKn/W8rWkcN0OPkRDxdBVLWnIEDsS2RpwCK2nobI7oMUSmexzTDyAmA==", + "license": "MIT", + "dependencies": { + "@radix-ui/primitive": "1.1.3", + "@radix-ui/react-accessible-icon": "1.1.7", + "@radix-ui/react-accordion": "1.2.12", + "@radix-ui/react-alert-dialog": "1.1.15", + "@radix-ui/react-arrow": "1.1.7", + "@radix-ui/react-aspect-ratio": "1.1.7", + "@radix-ui/react-avatar": "1.1.10", + "@radix-ui/react-checkbox": "1.3.3", + "@radix-ui/react-collapsible": "1.1.12", + "@radix-ui/react-collection": "1.1.7", + "@radix-ui/react-compose-refs": "1.1.2", + "@radix-ui/react-context": "1.1.2", + "@radix-ui/react-context-menu": "2.2.16", + "@radix-ui/react-dialog": "1.1.15", + "@radix-ui/react-direction": "1.1.1", + "@radix-ui/react-dismissable-layer": "1.1.11", + "@radix-ui/react-dropdown-menu": "2.1.16", + "@radix-ui/react-focus-guards": "1.1.3", + "@radix-ui/react-focus-scope": "1.1.7", + "@radix-ui/react-form": "0.1.8", + "@radix-ui/react-hover-card": "1.1.15", + "@radix-ui/react-label": "2.1.7", + "@radix-ui/react-menu": "2.1.16", + "@radix-ui/react-menubar": "1.1.16", + "@radix-ui/react-navigation-menu": "1.2.14", + "@radix-ui/react-one-time-password-field": "0.1.8", + "@radix-ui/react-password-toggle-field": "0.1.3", + "@radix-ui/react-popover": "1.1.15", + "@radix-ui/react-popper": "1.2.8", + "@radix-ui/react-portal": "1.1.9", + "@radix-ui/react-presence": "1.1.5", + "@radix-ui/react-primitive": "2.1.3", + "@radix-ui/react-progress": "1.1.7", + "@radix-ui/react-radio-group": "1.3.8", + "@radix-ui/react-roving-focus": "1.1.11", + "@radix-ui/react-scroll-area": "1.2.10", + "@radix-ui/react-select": "2.2.6", + "@radix-ui/react-separator": "1.1.7", + "@radix-ui/react-slider": "1.3.6", + "@radix-ui/react-slot": "1.2.3", + "@radix-ui/react-switch": "1.2.6", + "@radix-ui/react-tabs": "1.1.13", + "@radix-ui/react-toast": "1.2.15", + "@radix-ui/react-toggle": "1.1.10", + "@radix-ui/react-toggle-group": "1.1.11", + "@radix-ui/react-toolbar": "1.1.11", + "@radix-ui/react-tooltip": "1.2.8", + "@radix-ui/react-use-callback-ref": "1.1.1", + "@radix-ui/react-use-controllable-state": "1.2.2", + "@radix-ui/react-use-effect-event": "0.0.2", + "@radix-ui/react-use-escape-keydown": "1.1.1", + "@radix-ui/react-use-is-hydrated": "0.1.0", + "@radix-ui/react-use-layout-effect": "1.1.1", + "@radix-ui/react-use-size": "1.1.1", + "@radix-ui/react-visually-hidden": "1.2.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/radix-ui/node_modules/@radix-ui/react-label": { + "version": "2.1.7", + "resolved": "https://registry.npmjs.org/@radix-ui/react-label/-/react-label-2.1.7.tgz", + "integrity": "sha512-YT1GqPSL8kJn20djelMX7/cTRp/Y9w5IZHvfxQTVHrOqa2yMl7i/UfMqKRU5V7mEyKTrUVgJXhNQPVCG8PBLoQ==", + "license": "MIT", + "dependencies": { + "@radix-ui/react-primitive": "2.1.3" + }, + "peerDependencies": { + "@types/react": "*", + "@types/react-dom": "*", + "react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc", + "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "@types/react-dom": { + "optional": true + } + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/react": { + "version": "19.2.1", + "resolved": "https://registry.npmjs.org/react/-/react-19.2.1.tgz", + "integrity": "sha512-DGrYcCWK7tvYMnWh79yrPHt+vdx9tY+1gPZa7nJQtO/p8bLTDaHp4dzwEhQB7pZ4Xe3ok4XKuEPrVuc+wlpkmw==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "19.2.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.1.tgz", + "integrity": "sha512-ibrK8llX2a4eOskq1mXKu/TGZj9qzomO+sNfO98M6d9zIPOEhlBkMkBUBLd1vgS0gQsLDBzA+8jJBVXDnfHmJg==", + "license": "MIT", + "dependencies": { + "scheduler": "^0.27.0" + }, + "peerDependencies": { + "react": "^19.2.1" + } + }, + "node_modules/react-intersection-observer": { + "version": "9.16.0", + "resolved": "https://registry.npmjs.org/react-intersection-observer/-/react-intersection-observer-9.16.0.tgz", + "integrity": "sha512-w9nJSEp+DrW9KmQmeWHQyfaP6b03v+TdXynaoA964Wxt7mdR3An11z4NNCQgL4gKSK7y1ver2Fq+JKH6CWEzUA==", + "license": "MIT", + "peerDependencies": { + "react": "^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^17.0.0 || ^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + } + } + }, + "node_modules/react-refresh": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.18.0.tgz", + "integrity": "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-remove-scroll": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/react-remove-scroll/-/react-remove-scroll-2.7.2.tgz", + "integrity": "sha512-Iqb9NjCCTt6Hf+vOdNIZGdTiH1QSqr27H/Ek9sv/a97gfueI/5h1s3yRi1nngzMUaOOToin5dI1dXKdXiF+u0Q==", + "license": "MIT", + "dependencies": { + "react-remove-scroll-bar": "^2.3.7", + "react-style-singleton": "^2.2.3", + "tslib": "^2.1.0", + "use-callback-ref": "^1.3.3", + "use-sidecar": "^1.1.3" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-remove-scroll-bar": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/react-remove-scroll-bar/-/react-remove-scroll-bar-2.3.8.tgz", + "integrity": "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q==", + "license": "MIT", + "dependencies": { + "react-style-singleton": "^2.2.2", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/react-router": { + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.13.0.tgz", + "integrity": "sha512-PZgus8ETambRT17BUm/LL8lX3Of+oiLaPuVTRH3l1eLvSPpKO3AvhAEb5N7ihAFZQrYDqkvvWfFh9p0z9VsjLw==", + "license": "MIT", + "dependencies": { + "cookie": "^1.0.1", + "set-cookie-parser": "^2.6.0" + }, + "engines": { + "node": ">=20.0.0" + }, + "peerDependencies": { + "react": ">=18", + "react-dom": ">=18" + }, + "peerDependenciesMeta": { + "react-dom": { + "optional": true + } + } + }, + "node_modules/react-style-singleton": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/react-style-singleton/-/react-style-singleton-2.2.3.tgz", + "integrity": "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ==", + "license": "MIT", + "dependencies": { + "get-nonce": "^1.0.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/recma-build-jsx": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/recma-build-jsx/-/recma-build-jsx-1.0.0.tgz", + "integrity": "sha512-8GtdyqaBcDfva+GUKDr3nev3VpKAhup1+RvkMvUxURHpW7QyIvk9F5wz7Vzo06CEMSilw6uArgRqhpiUcWp8ew==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-util-build-jsx": "^3.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/recma-jsx": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/recma-jsx/-/recma-jsx-1.0.1.tgz", + "integrity": "sha512-huSIy7VU2Z5OLv6oFLosQGGDqPqdO1iq6bWNAdhzMxSJP7RAso4fCZ1cKu8j9YHCZf3TPrq4dw3okhrylgcd7w==", + "license": "MIT", + "dependencies": { + "acorn-jsx": "^5.0.0", + "estree-util-to-js": "^2.0.0", + "recma-parse": "^1.0.0", + "recma-stringify": "^1.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + }, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/recma-parse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/recma-parse/-/recma-parse-1.0.0.tgz", + "integrity": "sha512-OYLsIGBB5Y5wjnSnQW6t3Xg7q3fQ7FWbw/vcXtORTnyaSFscOtABg+7Pnz6YZ6c27fG1/aN8CjfwoUEUIdwqWQ==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "esast-util-from-js": "^2.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/recma-stringify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/recma-stringify/-/recma-stringify-1.0.0.tgz", + "integrity": "sha512-cjwII1MdIIVloKvC9ErQ+OgAtwHBmcZ0Bg4ciz78FtbT8In39aAYbaA7zvxQ61xVMSPE8WxhLwLbhif4Js2C+g==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "estree-util-to-js": "^2.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/regex": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/regex/-/regex-5.1.1.tgz", + "integrity": "sha512-dN5I359AVGPnwzJm2jN1k0W9LPZ+ePvoOeVMMfqIMFz53sSwXkxaJoxr50ptnsC771lK95BnTrVSZxq0b9yCGw==", + "license": "MIT", + "dependencies": { + "regex-utilities": "^2.3.0" + } + }, + "node_modules/regex-recursion": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/regex-recursion/-/regex-recursion-5.1.1.tgz", + "integrity": "sha512-ae7SBCbzVNrIjgSbh7wMznPcQel1DNlDtzensnFxpiNpXt1U2ju/bHugH422r+4LAVS1FpW1YCwilmnNsjum9w==", + "license": "MIT", + "dependencies": { + "regex": "^5.1.1", + "regex-utilities": "^2.3.0" + } + }, + "node_modules/regex-utilities": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/regex-utilities/-/regex-utilities-2.3.0.tgz", + "integrity": "sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==", + "license": "MIT" + }, + "node_modules/rehype-autolink-headings": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/rehype-autolink-headings/-/rehype-autolink-headings-7.1.0.tgz", + "integrity": "sha512-rItO/pSdvnvsP4QRB1pmPiNHUskikqtPojZKJPPPAVx9Hj8i8TwMBhofrrAYRhYOOBZH9tgmG5lPqDLuIWPWmw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@ungap/structured-clone": "^1.0.0", + "hast-util-heading-rank": "^3.0.0", + "hast-util-is-element": "^3.0.0", + "unified": "^11.0.0", + "unist-util-visit": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-class-names": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/rehype-class-names/-/rehype-class-names-2.0.0.tgz", + "integrity": "sha512-jldCIiAEvXKdq8hqr5f5PzNdIDkvHC6zfKhwta9oRoMu7bn0W7qLES/JrrjBvr9rKz3nJ8x4vY1EWI+dhjHVZQ==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-classnames": "^3.0.0", + "hast-util-select": "^6.0.0", + "unified": "^11.0.4" + } + }, + "node_modules/rehype-mermaid": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/rehype-mermaid/-/rehype-mermaid-3.0.0.tgz", + "integrity": "sha512-fxrD5E4Fa1WXUjmjNDvLOMT4XB1WaxcfycFIWiYU0yEMQhcTDElc9aDFnbDFRLxG1Cfo1I3mfD5kg4sjlWaB+Q==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "hast-util-from-html-isomorphic": "^2.0.0", + "hast-util-to-text": "^4.0.0", + "mermaid-isomorphic": "^3.0.0", + "mini-svg-data-uri": "^1.0.0", + "space-separated-tokens": "^2.0.0", + "unified": "^11.0.0", + "unist-util-visit-parents": "^6.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/remcohaszing" + }, + "peerDependencies": { + "playwright": "1" + }, + "peerDependenciesMeta": { + "playwright": { + "optional": true + } + } + }, + "node_modules/rehype-recma": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/rehype-recma/-/rehype-recma-1.0.0.tgz", + "integrity": "sha512-lqA4rGUf1JmacCNWWZx0Wv1dHqMwxzsDWYMTowuplHF3xH0N/MmrZ/G3BDZnzAkRmxDadujCjaKM2hqYdCBOGw==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/hast": "^3.0.0", + "hast-util-to-estree": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/rehype-slug": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/rehype-slug/-/rehype-slug-6.0.0.tgz", + "integrity": "sha512-lWyvf/jwu+oS5+hL5eClVd3hNdmwM1kAC0BUvEGD19pajQMIzcNUd/k9GsfQ+FfECvX+JE+e9/btsKH0EjJT6A==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "github-slugger": "^2.0.0", + "hast-util-heading-rank": "^3.0.0", + "hast-util-to-string": "^3.0.0", + "unist-util-visit": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-directive": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/remark-directive/-/remark-directive-3.0.1.tgz", + "integrity": "sha512-gwglrEQEZcZYgVyG1tQuA+h58EZfq5CSULw7J90AFuCTyib1thgHPoqQ+h9iFvU6R+vnZ5oNFQR5QKgGpk741A==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-directive": "^3.0.0", + "micromark-extension-directive": "^3.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-frontmatter": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/remark-frontmatter/-/remark-frontmatter-5.0.0.tgz", + "integrity": "sha512-XTFYvNASMe5iPN0719nPrdItC9aU0ssC4v14mH1BCi1u0n1gAocqcujWUrByftZTbLhRtiKRyjYTSIOcr69UVQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-frontmatter": "^2.0.0", + "micromark-extension-frontmatter": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-gfm": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz", + "integrity": "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-gfm": "^3.0.0", + "micromark-extension-gfm": "^3.0.0", + "remark-parse": "^11.0.0", + "remark-stringify": "^11.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-mdx": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/remark-mdx/-/remark-mdx-3.1.1.tgz", + "integrity": "sha512-Pjj2IYlUY3+D8x00UJsIOg5BEvfMyeI+2uLPn9VO9Wg4MEtN/VTIq2NEJQfde9PnX15KgtHyl9S0BcTnWrIuWg==", + "license": "MIT", + "dependencies": { + "mdast-util-mdx": "^3.0.0", + "micromark-extension-mdxjs": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-mdx-frontmatter": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/remark-mdx-frontmatter/-/remark-mdx-frontmatter-5.2.0.tgz", + "integrity": "sha512-U/hjUYTkQqNjjMRYyilJgLXSPF65qbLPdoESOkXyrwz2tVyhAnm4GUKhfXqOOS9W34M3545xEMq+aMpHgVjEeQ==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "estree-util-value-to-estree": "^3.0.0", + "toml": "^3.0.0", + "unified": "^11.0.0", + "unist-util-mdx-define": "^1.0.0", + "yaml": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/remcohaszing" + } + }, + "node_modules/remark-parse": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz", + "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-rehype": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz", + "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==", + "license": "MIT", + "dependencies": { + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "mdast-util-to-hast": "^13.0.0", + "unified": "^11.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/remark-stringify": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz", + "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==", + "license": "MIT", + "dependencies": { + "@types/mdast": "^4.0.0", + "mdast-util-to-markdown": "^2.0.0", + "unified": "^11.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/require-like": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/require-like/-/require-like-0.1.2.tgz", + "integrity": "sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==", + "engines": { + "node": "*" + } + }, + "node_modules/restore-cursor": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", + "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", + "license": "MIT", + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/robust-predicates": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", + "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", + "license": "Unlicense" + }, + "node_modules/rollup": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz", + "integrity": "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==", + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.57.1", + "@rollup/rollup-android-arm64": "4.57.1", + "@rollup/rollup-darwin-arm64": "4.57.1", + "@rollup/rollup-darwin-x64": "4.57.1", + "@rollup/rollup-freebsd-arm64": "4.57.1", + "@rollup/rollup-freebsd-x64": "4.57.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.57.1", + "@rollup/rollup-linux-arm-musleabihf": "4.57.1", + "@rollup/rollup-linux-arm64-gnu": "4.57.1", + "@rollup/rollup-linux-arm64-musl": "4.57.1", + "@rollup/rollup-linux-loong64-gnu": "4.57.1", + "@rollup/rollup-linux-loong64-musl": "4.57.1", + "@rollup/rollup-linux-ppc64-gnu": "4.57.1", + "@rollup/rollup-linux-ppc64-musl": "4.57.1", + "@rollup/rollup-linux-riscv64-gnu": "4.57.1", + "@rollup/rollup-linux-riscv64-musl": "4.57.1", + "@rollup/rollup-linux-s390x-gnu": "4.57.1", + "@rollup/rollup-linux-x64-gnu": "4.57.1", + "@rollup/rollup-linux-x64-musl": "4.57.1", + "@rollup/rollup-openbsd-x64": "4.57.1", + "@rollup/rollup-openharmony-arm64": "4.57.1", + "@rollup/rollup-win32-arm64-msvc": "4.57.1", + "@rollup/rollup-win32-ia32-msvc": "4.57.1", + "@rollup/rollup-win32-x64-gnu": "4.57.1", + "@rollup/rollup-win32-x64-msvc": "4.57.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/roughjs": { + "version": "4.6.6", + "resolved": "https://registry.npmjs.org/roughjs/-/roughjs-4.6.6.tgz", + "integrity": "sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==", + "license": "MIT", + "dependencies": { + "hachure-fill": "^0.5.2", + "path-data-parser": "^0.1.0", + "points-on-curve": "^0.2.0", + "points-on-path": "^0.2.1" + } + }, + "node_modules/rw": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", + "license": "BSD-3-Clause" + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "license": "MIT" + }, + "node_modules/scheduler": { + "version": "0.27.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.27.0.tgz", + "integrity": "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q==", + "license": "MIT" + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/send": { + "version": "0.19.2", + "resolved": "https://registry.npmjs.org/send/-/send-0.19.2.tgz", + "integrity": "sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg==", + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "~0.5.2", + "http-errors": "~2.0.1", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "~2.4.1", + "range-parser": "~1.2.1", + "statuses": "~2.0.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "license": "MIT" + }, + "node_modules/serve-static": { + "version": "1.16.3", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.3.tgz", + "integrity": "sha512-x0RTqQel6g5SY7Lg6ZreMmsOzncHFU7nhnRWkKgWuMTu5NN0DR5oruckMqRvacAN9d5w6ARnRBXl9xhDCgfMeA==", + "license": "MIT", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "~0.19.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/set-cookie-parser": { + "version": "2.7.2", + "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.2.tgz", + "integrity": "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw==", + "license": "MIT" + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", + "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", + "license": "ISC" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shiki": { + "version": "1.29.2", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.29.2.tgz", + "integrity": "sha512-njXuliz/cP+67jU2hukkxCNuH1yUi4QfdZZY+sMr5PPrIyXSu5iTb/qYC4BiWWB0vZ+7TbdvYUCeL23zpwCfbg==", + "license": "MIT", + "dependencies": { + "@shikijs/core": "1.29.2", + "@shikijs/engine-javascript": "1.29.2", + "@shikijs/engine-oniguruma": "1.29.2", + "@shikijs/langs": "1.29.2", + "@shikijs/themes": "1.29.2", + "@shikijs/types": "1.29.2", + "@shikijs/vscode-textmate": "^10.0.1", + "@types/hast": "^3.0.4" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC" + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", + "license": "MIT" + }, + "node_modules/source-map": { + "version": "0.7.6", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.6.tgz", + "integrity": "sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==", + "license": "BSD-3-Clause", + "engines": { + "node": ">= 12" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/space-separated-tokens": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz", + "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/statuses": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz", + "integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/stdin-discarder": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.1.0.tgz", + "integrity": "sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==", + "license": "MIT", + "dependencies": { + "bl": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-6.1.0.tgz", + "integrity": "sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ==", + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^10.2.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/stringify-entities": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz", + "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==", + "license": "MIT", + "dependencies": { + "character-entities-html4": "^2.0.0", + "character-entities-legacy": "^3.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", + "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/style-to-js": { + "version": "1.1.21", + "resolved": "https://registry.npmjs.org/style-to-js/-/style-to-js-1.1.21.tgz", + "integrity": "sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==", + "license": "MIT", + "dependencies": { + "style-to-object": "1.0.14" + } + }, + "node_modules/style-to-object": { + "version": "1.0.14", + "resolved": "https://registry.npmjs.org/style-to-object/-/style-to-object-1.0.14.tgz", + "integrity": "sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==", + "license": "MIT", + "dependencies": { + "inline-style-parser": "0.2.7" + } + }, + "node_modules/stylis": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.6.tgz", + "integrity": "sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==", + "license": "MIT" + }, + "node_modules/tabbable": { + "version": "6.4.0", + "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.4.0.tgz", + "integrity": "sha512-05PUHKSNE8ou2dwIxTngl4EzcnsCDZGJ/iCLtDflR/SHB/ny14rXc+qU5P4mG9JkusiV7EivzY9Mhm55AzAvCg==", + "license": "MIT" + }, + "node_modules/tailwindcss": { + "version": "4.1.18", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.18.tgz", + "integrity": "sha512-4+Z+0yiYyEtUVCScyfHCxOYP06L5Ne+JiHhY2IjR2KWMIWhJOYZKLSGZaP5HkZ8+bY0cxfzwDE5uOmzFXyIwxw==", + "dev": true, + "license": "MIT" + }, + "node_modules/tapable": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz", + "integrity": "sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==", + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/tinyexec": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.2.tgz", + "integrity": "sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==", + "license": "MIT", + "engines": { + "node": ">=18" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/toml": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", + "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==", + "license": "MIT" + }, + "node_modules/trim-lines": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz", + "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/trough": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", + "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/ts-dedent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", + "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", + "license": "MIT", + "engines": { + "node": ">=6.10" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" + }, + "node_modules/twoslash": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/twoslash/-/twoslash-0.3.6.tgz", + "integrity": "sha512-VuI5OKl+MaUO9UIW3rXKoPgHI3X40ZgB/j12VY6h98Ae1mCBihjPvhOPeJWlxCYcmSbmeZt5ZKkK0dsVtp+6pA==", + "license": "MIT", + "dependencies": { + "@typescript/vfs": "^1.6.2", + "twoslash-protocol": "0.3.6" + }, + "peerDependencies": { + "typescript": "^5.5.0" + } + }, + "node_modules/twoslash-protocol": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/twoslash-protocol/-/twoslash-protocol-0.3.6.tgz", + "integrity": "sha512-FHGsJ9Q+EsNr5bEbgG3hnbkvEBdW5STgPU824AHUjB4kw0Dn4p8tABT7Ncg1Ie6V0+mDg3Qpy41VafZXcQhWMA==", + "license": "MIT" + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/ua-parser-js": { + "version": "1.0.41", + "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-1.0.41.tgz", + "integrity": "sha512-LbBDqdIC5s8iROCUjMbW1f5dJQTEFB1+KO9ogbvlb3nm9n4YHa5p4KTvFPWvh2Hs8gZMBuiB1/8+pdfe/tDPug==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/ua-parser-js" + }, + { + "type": "paypal", + "url": "https://paypal.me/faisalman" + }, + { + "type": "github", + "url": "https://github.com/sponsors/faisalman" + } + ], + "license": "MIT", + "bin": { + "ua-parser-js": "script/cli.js" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ufo": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/ufo/-/ufo-1.6.3.tgz", + "integrity": "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==", + "license": "MIT" + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "license": "MIT" + }, + "node_modules/unified": { + "version": "11.0.5", + "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz", + "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "bail": "^2.0.0", + "devlop": "^1.0.0", + "extend": "^3.0.0", + "is-plain-obj": "^4.0.0", + "trough": "^2.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-find-after": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-find-after/-/unist-util-find-after-5.0.0.tgz", + "integrity": "sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-is": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.1.tgz", + "integrity": "sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-mdx-define": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/unist-util-mdx-define/-/unist-util-mdx-define-1.1.2.tgz", + "integrity": "sha512-9ncH7i7TN5Xn7/tzX5bE3rXgz1X/u877gYVAUB3mLeTKYJmQHmqKTDBi6BTGXV7AeolBCI9ErcVsOt2qryoD0g==", + "license": "MIT", + "dependencies": { + "@types/estree": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "estree-util-scope": "^1.0.0", + "estree-walker": "^3.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/remcohaszing" + } + }, + "node_modules/unist-util-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz", + "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-position-from-estree": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unist-util-position-from-estree/-/unist-util-position-from-estree-2.0.0.tgz", + "integrity": "sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-remove-position": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unist-util-remove-position/-/unist-util-remove-position-5.0.0.tgz", + "integrity": "sha512-Hp5Kh3wLxv0PHj9m2yZhhLt58KzPtEYKQQ4yxfYFEO7EvHwzyDYnduhHnY1mDxoqr7VUwVuHXk9RXKIiYS1N8Q==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-visit": "^5.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-stringify-position": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz", + "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.1.0.tgz", + "integrity": "sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/unist-util-visit-parents": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.2.tgz", + "integrity": "sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-is": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/universalify": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", + "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", + "license": "MIT", + "engines": { + "node": ">= 10.0.0" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/use-callback-ref": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/use-callback-ref/-/use-callback-ref-1.3.3.tgz", + "integrity": "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg==", + "license": "MIT", + "dependencies": { + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/use-sidecar": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/use-sidecar/-/use-sidecar-1.1.3.tgz", + "integrity": "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ==", + "license": "MIT", + "dependencies": { + "detect-node-es": "^1.1.0", + "tslib": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "@types/react": "*", + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + } + } + }, + "node_modules/use-sync-external-store": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz", + "integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==", + "license": "MIT", + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, + "node_modules/uuid": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.1.0.tgz", + "integrity": "sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/esm/bin/uuid" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/vfile": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz", + "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile-message": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-location": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-5.0.3.tgz", + "integrity": "sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-matter": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/vfile-matter/-/vfile-matter-5.0.1.tgz", + "integrity": "sha512-o6roP82AiX0XfkyTHyRCMXgHfltUNlXSEqCIS80f+mbAyiQBE2fxtDVMtseyytGx75sihiJFo/zR6r/4LTs2Cw==", + "license": "MIT", + "dependencies": { + "vfile": "^6.0.0", + "yaml": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vfile-message": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz", + "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==", + "license": "MIT", + "dependencies": { + "@types/unist": "^3.0.0", + "unist-util-stringify-position": "^4.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/vite": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.1.tgz", + "integrity": "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==", + "license": "MIT", + "dependencies": { + "esbuild": "^0.27.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.15" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^20.19.0 || >=22.12.0", + "jiti": ">=1.21.0", + "less": "^4.0.0", + "lightningcss": "^1.21.0", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", + "terser": "^5.16.0", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "jiti": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/vite-node": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", + "license": "MIT", + "dependencies": { + "cac": "^6.7.14", + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" + }, + "bin": { + "vite-node": "vite-node.mjs" + }, + "engines": { + "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + }, + "funding": { + "url": "https://opencollective.com/vitest" + } + }, + "node_modules/vite/node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/vocs": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/vocs/-/vocs-1.2.1.tgz", + "integrity": "sha512-rQ5aoD68+UJQeJ9G/nPcqcwhbBpMFZnHJ9ZkIsRHaeqBdiA4S86ufplJRKxmX56XZLEpY+wlU+TGz8Qsxtb8Sw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/wevm" + } + ], + "license": "MIT", + "dependencies": { + "@floating-ui/react": "^0.27.16", + "@hono/node-server": "^1.19.5", + "@mdx-js/mdx": "^3.1.1", + "@mdx-js/react": "^3.1.1", + "@mdx-js/rollup": "^3.1.1", + "@noble/hashes": "^1.7.1", + "@radix-ui/colors": "^3.0.0", + "@radix-ui/react-accordion": "^1.2.3", + "@radix-ui/react-dialog": "^1.1.6", + "@radix-ui/react-icons": "^1.3.2", + "@radix-ui/react-label": "^2.1.2", + "@radix-ui/react-navigation-menu": "^1.2.5", + "@radix-ui/react-popover": "^1.1.6", + "@radix-ui/react-tabs": "^1.1.3", + "@shikijs/rehype": "^1", + "@shikijs/transformers": "^1", + "@shikijs/twoslash": "^1", + "@tailwindcss/vite": "4.1.15", + "@vanilla-extract/css": "^1.17.4", + "@vanilla-extract/dynamic": "^2.1.5", + "@vanilla-extract/vite-plugin": "^5.1.1", + "@vitejs/plugin-react": "^5.0.4", + "autoprefixer": "^10.4.21", + "cac": "^6.7.14", + "chroma-js": "^3.1.2", + "clsx": "^2.1.1", + "compression": "^1.8.1", + "create-vocs": "^1.0.0-alpha.5", + "cross-spawn": "^7.0.6", + "fs-extra": "^11.3.2", + "hastscript": "^8.0.0", + "hono": "^4.10.3", + "mark.js": "^8.11.1", + "mdast-util-directive": "^3.1.0", + "mdast-util-from-markdown": "^2.0.2", + "mdast-util-frontmatter": "^2.0.1", + "mdast-util-gfm": "^3.1.0", + "mdast-util-mdx": "^3.0.0", + "mdast-util-mdx-jsx": "^3.2.0", + "mdast-util-to-hast": "^13.2.0", + "mdast-util-to-markdown": "^2.1.2", + "minisearch": "^7.2.0", + "nuqs": "^2.7.2", + "ora": "^7.0.1", + "p-limit": "^5.0.0", + "picomatch": "^4.0.3", + "playwright": "^1.52.0", + "postcss": "^8.5.2", + "radix-ui": "^1.1.3", + "react-intersection-observer": "^9.15.1", + "react-router": "^7.9.4", + "rehype-autolink-headings": "^7.1.0", + "rehype-class-names": "^2.0.0", + "rehype-mermaid": "^3.0.0", + "rehype-slug": "^6.0.0", + "remark-directive": "^3.0.1", + "remark-frontmatter": "^5.0.0", + "remark-gfm": "^4.0.1", + "remark-mdx": "^3.1.1", + "remark-mdx-frontmatter": "^5.2.0", + "remark-parse": "^11.0.0", + "serve-static": "^1.16.2", + "shiki": "^1", + "toml": "^3.0.0", + "twoslash": "~0.3.4", + "ua-parser-js": "^1.0.40", + "unified": "^11.0.5", + "unist-util-visit": "^5.0.0", + "vfile-matter": "^5.0.1", + "vite": "^7.1.11", + "yaml": "^2.8.1" + }, + "bin": { + "vocs": "_lib/cli/index.js" + }, + "engines": { + "node": ">=22" + }, + "peerDependencies": { + "react": "^19", + "react-dom": "^19" + } + }, + "node_modules/vocs/node_modules/nuqs": { + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/nuqs/-/nuqs-2.8.8.tgz", + "integrity": "sha512-LF5sw9nWpHyPWzMMu9oho3r9C5DvkpmBIg4LQN78sexIzGaeRx8DWr0uy3YiFx5i2QGZN1Qqcb+OAtEVRa2bnA==", + "license": "MIT", + "dependencies": { + "@standard-schema/spec": "1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/franky47" + }, + "peerDependencies": { + "@remix-run/react": ">=2", + "@tanstack/react-router": "^1", + "next": ">=14.2.0", + "react": ">=18.2.0 || ^19.0.0-0", + "react-router": "^5 || ^6 || ^7", + "react-router-dom": "^5 || ^6 || ^7" + }, + "peerDependenciesMeta": { + "@remix-run/react": { + "optional": true + }, + "@tanstack/react-router": { + "optional": true + }, + "next": { + "optional": true + }, + "react-router": { + "optional": true + }, + "react-router-dom": { + "optional": true + } + } + }, + "node_modules/vscode-jsonrpc": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", + "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/vscode-languageserver": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/vscode-languageserver/-/vscode-languageserver-9.0.1.tgz", + "integrity": "sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==", + "license": "MIT", + "dependencies": { + "vscode-languageserver-protocol": "3.17.5" + }, + "bin": { + "installServerIntoExtension": "bin/installServerIntoExtension" + } + }, + "node_modules/vscode-languageserver-protocol": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", + "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", + "license": "MIT", + "dependencies": { + "vscode-jsonrpc": "8.2.0", + "vscode-languageserver-types": "3.17.5" + } + }, + "node_modules/vscode-languageserver-textdocument": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.12.tgz", + "integrity": "sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==", + "license": "MIT" + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", + "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", + "license": "MIT" + }, + "node_modules/vscode-uri": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.8.tgz", + "integrity": "sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==", + "license": "MIT" + }, + "node_modules/web-namespaces": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", + "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "license": "ISC" + }, + "node_modules/yaml": { + "version": "2.8.2", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.2.tgz", + "integrity": "sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==", + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, + "engines": { + "node": ">= 14.6" + }, + "funding": { + "url": "https://github.com/sponsors/eemeli" + } + }, + "node_modules/yocto-queue": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.2.tgz", + "integrity": "sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ==", + "license": "MIT", + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zwitch": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", + "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + } + } +} diff --git a/kona/docs/package.json b/rust/docs/package.json similarity index 92% rename from kona/docs/package.json rename to rust/docs/package.json index d4dc2e78d7a..482e9a21334 100644 --- a/kona/docs/package.json +++ b/rust/docs/package.json @@ -1,5 +1,5 @@ { - "name": "kona-docs", + "name": "op-stack-rust-docs", "private": true, "version": "0.0.0", "type": "module", diff --git a/rust/docs/sidebar-cli-op-reth.ts b/rust/docs/sidebar-cli-op-reth.ts new file mode 100644 index 00000000000..775b163a625 --- /dev/null +++ b/rust/docs/sidebar-cli-op-reth.ts @@ -0,0 +1,273 @@ +import { SidebarItem } from "vocs"; + +export const opRethCliSidebar: SidebarItem = { + text: "op-reth", + link: "/op-reth/cli/op-reth", + collapsed: false, + items: [ + { + text: "op-reth node", + link: "/op-reth/cli/op-reth/node" + }, + { + text: "op-reth init", + link: "/op-reth/cli/op-reth/init" + }, + { + text: "op-reth init-state", + link: "/op-reth/cli/op-reth/init-state" + }, + { + text: "op-reth import-op", + link: "/op-reth/cli/op-reth/import-op" + }, + { + text: "op-reth import-receipts-op", + link: "/op-reth/cli/op-reth/import-receipts-op" + }, + { + text: "op-reth dump-genesis", + link: "/op-reth/cli/op-reth/dump-genesis" + }, + { + text: "op-reth db", + link: "/op-reth/cli/op-reth/db", + collapsed: true, + items: [ + { + text: "op-reth db stats", + link: "/op-reth/cli/op-reth/db/stats" + }, + { + text: "op-reth db list", + link: "/op-reth/cli/op-reth/db/list" + }, + { + text: "op-reth db checksum", + link: "/op-reth/cli/op-reth/db/checksum", + collapsed: true, + items: [ + { + text: "op-reth db checksum mdbx", + link: "/op-reth/cli/op-reth/db/checksum/mdbx" + }, + { + text: "op-reth db checksum static-file", + link: "/op-reth/cli/op-reth/db/checksum/static-file" + } + ] + }, + { + text: "op-reth db diff", + link: "/op-reth/cli/op-reth/db/diff" + }, + { + text: "op-reth db get", + link: "/op-reth/cli/op-reth/db/get", + collapsed: true, + items: [ + { + text: "op-reth db get mdbx", + link: "/op-reth/cli/op-reth/db/get/mdbx" + }, + { + text: "op-reth db get static-file", + link: "/op-reth/cli/op-reth/db/get/static-file" + } + ] + }, + { + text: "op-reth db drop", + link: "/op-reth/cli/op-reth/db/drop" + }, + { + text: "op-reth db clear", + link: "/op-reth/cli/op-reth/db/clear", + collapsed: true, + items: [ + { + text: "op-reth db clear mdbx", + link: "/op-reth/cli/op-reth/db/clear/mdbx" + }, + { + text: "op-reth db clear static-file", + link: "/op-reth/cli/op-reth/db/clear/static-file" + } + ] + }, + { + text: "op-reth db repair-trie", + link: "/op-reth/cli/op-reth/db/repair-trie" + }, + { + text: "op-reth db static-file-header", + link: "/op-reth/cli/op-reth/db/static-file-header", + collapsed: true, + items: [ + { + text: "op-reth db static-file-header block", + link: "/op-reth/cli/op-reth/db/static-file-header/block" + }, + { + text: "op-reth db static-file-header path", + link: "/op-reth/cli/op-reth/db/static-file-header/path" + } + ] + }, + { + text: "op-reth db version", + link: "/op-reth/cli/op-reth/db/version" + }, + { + text: "op-reth db path", + link: "/op-reth/cli/op-reth/db/path" + }, + { + text: "op-reth db settings", + link: "/op-reth/cli/op-reth/db/settings", + collapsed: true, + items: [ + { + text: "op-reth db settings get", + link: "/op-reth/cli/op-reth/db/settings/get" + }, + { + text: "op-reth db settings set", + link: "/op-reth/cli/op-reth/db/settings/set", + collapsed: true, + items: [ + { + text: "op-reth db settings set receipts", + link: "/op-reth/cli/op-reth/db/settings/set/receipts" + }, + { + text: "op-reth db settings set transaction_senders", + link: "/op-reth/cli/op-reth/db/settings/set/transaction_senders" + }, + { + text: "op-reth db settings set account_changesets", + link: "/op-reth/cli/op-reth/db/settings/set/account_changesets" + }, + { + text: "op-reth db settings set storages_history", + link: "/op-reth/cli/op-reth/db/settings/set/storages_history" + }, + { + text: "op-reth db settings set transaction_hash_numbers", + link: "/op-reth/cli/op-reth/db/settings/set/transaction_hash_numbers" + }, + { + text: "op-reth db settings set account_history", + link: "/op-reth/cli/op-reth/db/settings/set/account_history" + }, + { + text: "op-reth db settings set storage_changesets", + link: "/op-reth/cli/op-reth/db/settings/set/storage_changesets" + } + ] + } + ] + }, + { + text: "op-reth db account-storage", + link: "/op-reth/cli/op-reth/db/account-storage" + } + ] + }, + { + text: "op-reth stage", + link: "/op-reth/cli/op-reth/stage", + collapsed: true, + items: [ + { + text: "op-reth stage run", + link: "/op-reth/cli/op-reth/stage/run" + }, + { + text: "op-reth stage drop", + link: "/op-reth/cli/op-reth/stage/drop" + }, + { + text: "op-reth stage dump", + link: "/op-reth/cli/op-reth/stage/dump", + collapsed: true, + items: [ + { + text: "op-reth stage dump execution", + link: "/op-reth/cli/op-reth/stage/dump/execution" + }, + { + text: "op-reth stage dump storage-hashing", + link: "/op-reth/cli/op-reth/stage/dump/storage-hashing" + }, + { + text: "op-reth stage dump account-hashing", + link: "/op-reth/cli/op-reth/stage/dump/account-hashing" + }, + { + text: "op-reth stage dump merkle", + link: "/op-reth/cli/op-reth/stage/dump/merkle" + } + ] + }, + { + text: "op-reth stage unwind", + link: "/op-reth/cli/op-reth/stage/unwind", + collapsed: true, + items: [ + { + text: "op-reth stage unwind to-block", + link: "/op-reth/cli/op-reth/stage/unwind/to-block" + }, + { + text: "op-reth stage unwind num-blocks", + link: "/op-reth/cli/op-reth/stage/unwind/num-blocks" + } + ] + } + ] + }, + { + text: "op-reth p2p", + link: "/op-reth/cli/op-reth/p2p", + collapsed: true, + items: [ + { + text: "op-reth p2p header", + link: "/op-reth/cli/op-reth/p2p/header" + }, + { + text: "op-reth p2p body", + link: "/op-reth/cli/op-reth/p2p/body" + }, + { + text: "op-reth p2p rlpx", + link: "/op-reth/cli/op-reth/p2p/rlpx", + collapsed: true, + items: [ + { + text: "op-reth p2p rlpx ping", + link: "/op-reth/cli/op-reth/p2p/rlpx/ping" + } + ] + }, + { + text: "op-reth p2p bootnode", + link: "/op-reth/cli/op-reth/p2p/bootnode" + } + ] + }, + { + text: "op-reth config", + link: "/op-reth/cli/op-reth/config" + }, + { + text: "op-reth prune", + link: "/op-reth/cli/op-reth/prune" + }, + { + text: "op-reth re-execute", + link: "/op-reth/cli/op-reth/re-execute" + } + ] +}; diff --git a/rust/docs/sidebar-kona.ts b/rust/docs/sidebar-kona.ts new file mode 100644 index 00000000000..8d82d55c0e3 --- /dev/null +++ b/rust/docs/sidebar-kona.ts @@ -0,0 +1,213 @@ +import { SidebarItem } from "vocs"; + +export const konaSidebar: SidebarItem[] = [ + { + text: "Introduction", + items: [ + { text: "Overview", link: "/kona/intro/overview" }, + { text: "Why Kona?", link: "/kona/intro/why" }, + { text: "Contributing", link: "/kona/intro/contributing" }, + { text: "Kona Lore", link: "/kona/intro/lore" } + ] + }, + { + text: "Kona for Node Operators", + items: [ + { text: "System Requirements", link: "/kona/node/requirements" }, + { + text: "Installation", + collapsed: true, + items: [ + { + text: "Prerequisites", + link: "/kona/node/install/overview" + }, + { + text: "Pre-Built Binaries", + link: "/kona/node/install/binaries" + }, + { + text: "Docker", + link: "/kona/node/install/docker" + }, + { + text: "Build from Source", + link: "/kona/node/install/source" + } + ] + }, + { + text: "Run a Node", + items: [ + { + text: "Overview", + link: "/kona/node/run/overview", + }, + { + text: "Binary", + link: "/kona/node/run/binary", + }, + { + text: "Docker", + link: "/kona/node/run/docker", + }, + { + text: "How it Works", + link: "/kona/node/run/mechanics", + } + ] + }, + { + text: "JSON-RPC Reference", + items: [ + { + text: "Overview", + link: "/kona/node/rpc/overview", + }, + { + text: "p2p", + link: "/kona/node/rpc/p2p", + }, + { + text: "rollup", + link: "/kona/node/rpc/rollup", + }, + { + text: "admin", + link: "/kona/node/rpc/admin", + } + ] + }, + { text: "Configuration", link: "/kona/node/configuration" }, + { text: "Kurtosis Integration", link: "/kona/kurtosis/overview" }, + { text: "Monitoring", link: "/kona/node/monitoring" }, + { text: "Subcommands", link: "/kona/node/subcommands" }, + { + text: "FAQ", + link: "/kona/node/faq/overview", + collapsed: true, + items: [ + { + text: "Ports", + link: "/kona/node/faq/ports" + }, + { + text: "Profiling", + link: "/kona/node/faq/profiling" + } + ] + } + ] + }, + { + text: "Kona as a Library", + items: [ + { text: "Overview", link: "/kona/sdk/overview" }, + { + text: "Node SDK", + items: [ + { text: "Introduction", link: "/kona/node/design/intro" }, + { text: "Derivation", link: "/kona/node/design/derivation" }, + { text: "Engine", link: "/kona/node/design/engine" }, + { text: "P2P", link: "/kona/node/design/p2p" }, + { text: "Sequencer", link: "/kona/node/design/sequencer" } + ] + }, + { + text: "Proof SDK", + items: [ + { text: "Introduction", link: "/kona/sdk/proof/intro" }, + { text: "FPVM Backend", link: "/kona/sdk/proof/fpvm-backend" }, + { text: "Custom Backend", link: "/kona/sdk/proof/custom-backend" }, + { text: "kona-executor Extensions", link: "/kona/sdk/proof/exec-ext" } + ] + }, + { + text: "Fault Proof Program Development", + collapsed: true, + items: [ + { text: "Introduction", link: "/kona/sdk/fpp-dev/intro" }, + { text: "Environment", link: "/kona/sdk/fpp-dev/env" }, + { text: "Supported Targets", link: "/kona/sdk/fpp-dev/targets" }, + { text: "Prologue", link: "/kona/sdk/fpp-dev/prologue" }, + { text: "Execution", link: "/kona/sdk/fpp-dev/execution" }, + { text: "Epilogue", link: "/kona/sdk/fpp-dev/epilogue" } + ] + }, + { + text: "Protocol Libraries", + collapsed: true, + items: [ + { text: "Introduction", link: "/kona/sdk/protocol/intro" }, + { text: "Registry", link: "/kona/sdk/protocol/registry" }, + { text: "Interop", link: "/kona/sdk/protocol/interop" }, + { text: "Hardforks", link: "/kona/sdk/protocol/hardforks" }, + { + text: "Derivation", + collapsed: true, + items: [ + { text: "Introduction", link: "/kona/sdk/protocol/derive/intro" }, + { text: "Custom Providers", link: "/kona/sdk/protocol/derive/providers" }, + { text: "Stage Swapping", link: "/kona/sdk/protocol/derive/stages" }, + { text: "Signaling", link: "/kona/sdk/protocol/derive/signaling" } + ] + }, + { + text: "Genesis", + collapsed: true, + items: [ + { text: "Introduction", link: "/kona/sdk/protocol/genesis/intro" }, + { text: "Rollup Config", link: "/kona/sdk/protocol/genesis/rollup-config" }, + { text: "System Config", link: "/kona/sdk/protocol/genesis/system-config" } + ] + }, + { + text: "Protocol", + collapsed: true, + items: [ + { text: "Introduction", link: "/kona/sdk/protocol/protocol/intro" }, + { text: "BlockInfo", link: "/kona/sdk/protocol/protocol/block-info" }, + { text: "L2BlockInfo", link: "/kona/sdk/protocol/protocol/l2-block-info" }, + { text: "Frames", link: "/kona/sdk/protocol/protocol/frames" }, + { text: "Channels", link: "/kona/sdk/protocol/protocol/channels" }, + { text: "Batches", link: "/kona/sdk/protocol/protocol/batches" } + ] + } + ] + }, + { + text: "Examples", + collapsed: true, + items: [ + { text: "Introduction", link: "/kona/sdk/examples/intro" }, + { text: "Load a Rollup Config", link: "/kona/sdk/examples/load-a-rollup-config" }, + { text: "Transform Frames to a Batch", link: "/kona/sdk/examples/frames-to-batch" }, + { text: "Transform a Batch into Frames", link: "/kona/sdk/examples/batch-to-frames" }, + { text: "Create a new L1BlockInfoTx Hardfork Variant", link: "/kona/sdk/examples/new-l1-block-info-tx-hardfork" }, + { text: "Create a new kona-executor test fixture", link: "/kona/sdk/examples/executor-test-fixtures" }, + { text: "Configuring P2P Network Peer Scoring", link: "/kona/sdk/examples/p2p-peer-scoring" }, + { text: "Custom Derivation Pipeline with New Stage", link: "/kona/sdk/examples/custom-derivation-pipeline" }, + { text: "Testing Kona Sequencing with Kurtosis", link: "/kona/sdk/examples/kurtosis-sequencing-test" } + ] + } + ] + }, + { + text: "RFC", + link: "/kona/rfc/active/intro", + items: [ + { + text: "Active RFCs", + items: [ ] + }, + { + text: "Archived RFCs", + collapsed: true, + items: [ + { text: "Umbrellas", link: "/kona/rfc/archived/umbrellas" }, + { text: "Monorepo", link: "/kona/rfc/archived/monorepo" } + ] + } + ] + } +]; diff --git a/rust/docs/sidebar-op-alloy.ts b/rust/docs/sidebar-op-alloy.ts new file mode 100644 index 00000000000..adb3d94a322 --- /dev/null +++ b/rust/docs/sidebar-op-alloy.ts @@ -0,0 +1,43 @@ +import { SidebarItem } from "vocs"; + +export const opAlloySidebar: SidebarItem[] = [ + { + text: "Introduction", + items: [ + { + text: "Overview", + link: "/op-alloy/intro" + }, + { + text: "Getting Started", + link: "/op-alloy/starting" + } + ] + }, + { + text: "Building", + items: [ + { + text: "Overview", + link: "/op-alloy/building" + }, + { + text: "Consensus", + link: "/op-alloy/building/consensus" + }, + { + text: "Engine RPC Types", + link: "/op-alloy/building/engine" + } + ] + }, + { + text: "Reference", + items: [ + { + text: "Glossary", + link: "/op-alloy/glossary" + } + ] + } +]; diff --git a/rust/docs/sidebar-op-reth.ts b/rust/docs/sidebar-op-reth.ts new file mode 100644 index 00000000000..c11642058d5 --- /dev/null +++ b/rust/docs/sidebar-op-reth.ts @@ -0,0 +1,41 @@ +import { SidebarItem } from "vocs"; +import { opRethCliSidebar } from "./sidebar-cli-op-reth"; + +export const opRethSidebar: SidebarItem[] = [ + { + text: "Introduction", + items: [ + { + text: "Overview", + link: "/op-reth/" + } + ] + }, + { + text: "Running op-reth", + items: [ + { + text: "OP Stack", + link: "/op-reth/run/opstack" + }, + { + text: "FAQ", + collapsed: true, + items: [ + { + text: "Sync OP Mainnet", + link: "/op-reth/run/faq/sync-op-mainnet" + } + ] + } + ] + }, + { + text: "CLI Reference", + link: "/op-reth/cli/op-reth", + collapsed: false, + items: [ + opRethCliSidebar + ] + }, +]; diff --git a/rust/docs/sidebar.ts b/rust/docs/sidebar.ts new file mode 100644 index 00000000000..d16b2c74358 --- /dev/null +++ b/rust/docs/sidebar.ts @@ -0,0 +1,10 @@ +import { SidebarItem } from "vocs"; +import { konaSidebar } from "./sidebar-kona"; +import { opRethSidebar } from "./sidebar-op-reth"; +import { opAlloySidebar } from "./sidebar-op-alloy"; + +export const sidebar = { + "/kona/": konaSidebar, + "/op-reth/": opRethSidebar, + "/op-alloy/": opAlloySidebar, +} satisfies Record; diff --git a/kona/docs/tsconfig.json b/rust/docs/tsconfig.json similarity index 100% rename from kona/docs/tsconfig.json rename to rust/docs/tsconfig.json diff --git a/rust/docs/vocs.config.ts b/rust/docs/vocs.config.ts new file mode 100644 index 00000000000..dfdd26063d4 --- /dev/null +++ b/rust/docs/vocs.config.ts @@ -0,0 +1,71 @@ +import { defineConfig } from 'vocs' +import { sidebar } from './sidebar' + +export default defineConfig({ + title: 'OP Stack Rust', + description: 'Rust implementations for the OP Stack: Kona, op-reth, and op-alloy', + logoUrl: '/logo.png', + iconUrl: '/logo.png', + sidebar, + search: { + fuzzy: true + }, + topNav: [ + { + text: 'Kona', + items: [ + { text: 'Overview', link: '/kona/intro/overview' }, + { text: 'Run a Node', link: '/kona/node/run/overview' }, + { text: 'SDK', link: '/kona/sdk/overview' }, + { text: 'Rustdocs', link: 'https://docs.rs/kona-node/latest/' }, + ] + }, + { + text: 'op-reth', + items: [ + { text: 'Overview', link: '/op-reth/' }, + { text: 'Run', link: '/op-reth/run/opstack' }, + { text: 'CLI Reference', link: '/op-reth/cli/op-reth' }, + ] + }, + { + text: 'op-alloy', + items: [ + { text: 'Overview', link: '/op-alloy/intro' }, + { text: 'Getting Started', link: '/op-alloy/starting' }, + { text: 'Building', link: '/op-alloy/building' }, + ] + }, + { text: 'GitHub', link: 'https://github.com/ethereum-optimism/optimism/tree/develop/rust' }, + ], + socials: [ + { + icon: 'github', + link: 'https://github.com/ethereum-optimism/optimism', + }, + ], + theme: { + accentColor: { + light: '#ff0420', + dark: '#ff0420', + } + }, + editLink: { + pattern: "https://github.com/ethereum-optimism/optimism/edit/develop/rust/docs/:path", + }, + sponsors: [ + { + name: 'Supporters', + height: 120, + items: [ + [ + { + name: 'OP Labs', + link: 'https://oplabs.co', + image: 'https://avatars.githubusercontent.com/u/109625874?s=200&v=4', + } + ] + ] + } + ] +}) diff --git a/rust/justfile b/rust/justfile new file mode 100644 index 00000000000..76ca9da3835 --- /dev/null +++ b/rust/justfile @@ -0,0 +1,201 @@ +set positional-arguments + +# Aliases +alias t := test +alias l := lint +alias f := fmt-fix +alias b := build + +# default recipe to display help information +default: + @just --list + +############################### Build ############################### + +# Build the workspace +build *args='': + cargo build --workspace {{args}} + +# Build the workspace in release mode +build-release *args='': + cargo build --workspace --release {{args}} + +# Build the rollup node +build-node: + cargo build --release --bin kona-node + +# Build the supervisor +build-supervisor: + cargo build --release --bin kona-supervisor + +# Build op-reth +build-op-reth: + cargo build --release --bin op-reth + +############################### Test ################################ + +# Run all tests (unit + doc tests) +test: test-unit test-docs + +# Run unit tests (excluding online tests) +test-unit *args="-E '!test(test_online)'": + cargo nextest run --workspace --all-features {{args}} + +# Run online tests only +test-online: + cargo nextest run --workspace --all-features -E 'test(test_online)' + +# Run doc tests +test-docs: + cargo test --doc --workspace --locked + +############################### Lint ################################ + +# Run all lints +lint: fmt-check lint-clippy lint-docs + +# Check formatting (requires nightly) +fmt-check: + cargo +nightly fmt --all -- --check + +# Fix formatting (requires nightly) +fmt-fix: + cargo +nightly fmt --all + +# Run clippy +lint-clippy: + cargo clippy --workspace --all-features --all-targets -- -D warnings + +# Lint Rust documentation +lint-docs: + RUSTDOCFLAGS="-D warnings" cargo doc --workspace --no-deps --document-private-items + +############################ no_std ################################# + +# Check no_std compatibility for proof, protocol, alloy, and op-alloy crates +check-no-std: + #!/usr/bin/env bash + set -euo pipefail + + no_std_packages=( + # proof crates + kona-executor + kona-mpt + kona-preimage + kona-proof + kona-proof-interop + + # protocol crates + kona-genesis + kona-hardforks + kona-registry + kona-protocol + kona-derive + kona-driver + kona-interop + + # utilities + kona-serde + + # alloy + alloy-op-evm + alloy-op-hardforks + + # op-alloy + op-alloy + op-alloy-consensus + op-alloy-rpc-types + op-alloy-rpc-types-engine + ) + + # We need to install the riscv32imac-unknown-none-elf target before starting to build the no-std crates. + rustup target add riscv32imac-unknown-none-elf + + for package in "${no_std_packages[@]}"; do + echo "Checking no_std build for: $package" + cargo build -p "$package" --target riscv32imac-unknown-none-elf --no-default-features + echo "Successfully checked no_std build for: $package" + done + +########################### Benchmarks ############################## + +# Run benchmarks (compile only) +bench: + cargo bench --no-run --workspace --features test-utils --exclude example-gossip --exclude example-discovery + +########################## Misc tools ############################### + +# Check for unused dependencies (requires nightly + cargo-udeps) +check-udeps: + cargo +nightly udeps --release --workspace --all-features --all-targets + +# Run cargo hack for feature powerset checking +hack partition="": + #!/usr/bin/env bash + set -euo pipefail + cargo hack check --feature-powerset --depth 2 --no-dev-deps {{ if partition != "" { "--partition " + partition } else { "" } }} + +######################### Documentation ################################ + +DOCS_DIR := justfile_directory() / "docs" + +# Start the documentation development server +docs-dev: + cd "{{DOCS_DIR}}" && just docs-dev + +# Build the documentation for production +docs-build: + cd "{{DOCS_DIR}}" && just docs-build + +# Preview the built documentation +docs-preview: + cd "{{DOCS_DIR}}" && just docs-preview + +######################### Kona Prestates ############################## + +KONA_DIR := justfile_directory() / "kona" + +# Build all kona prestates +build-kona-prestates: build-kona-cannon-prestate build-kona-interop-prestate + +build-kona-cannon-prestate: + @just build-kona-prestate kona-client prestate-artifacts-cannon + +build-kona-interop-prestate: + @just build-kona-prestate kona-client-int prestate-artifacts-cannon-interop + +build-kona-prestate VARIANT OUTPUT_DIR: + #!/usr/bin/env bash + set -euo pipefail + + echo "Building prestate for {{VARIANT}}..." + cd "{{KONA_DIR}}/docker/fpvm-prestates" + just cannon {{VARIANT}} "{{KONA_DIR}}/{{OUTPUT_DIR}}" + + cd "{{KONA_DIR}}" + + # Copy with hash-based name for challenger lookup + HASH=$(jq -r .pre "{{OUTPUT_DIR}}/prestate-proof.json") + cp "{{OUTPUT_DIR}}/prestate.bin.gz" "{{OUTPUT_DIR}}/${HASH}.bin.gz" + echo "Prestate for {{VARIANT}}: ${HASH}" + +build-kona-reproducible-prestate: + @just build-kona-prestates + +output-kona-prestate-hash: + @echo "-------------------- Kona Prestates --------------------" + @echo "" + @echo "Cannon Absolute prestate hash:" + @jq -r .pre {{KONA_DIR}}/prestate-artifacts-cannon/prestate-proof.json + @echo "" + @echo "Cannon Interop Absolute prestate hash:" + @jq -r .pre {{KONA_DIR}}/prestate-artifacts-cannon-interop/prestate-proof.json + @echo "" + +reproducible-kona-prestate: build-kona-reproducible-prestate output-kona-prestate-hash + +clean-kona-prestates: + #!/usr/bin/env bash + set -euo pipefail + rm -rf "{{KONA_DIR}}/build" + rm -rf "{{KONA_DIR}}/prestate-artifacts-cannon" "{{KONA_DIR}}/prestate-artifacts-cannon-interop" diff --git a/kona/.config/changelog.sh b/rust/kona/.config/changelog.sh similarity index 100% rename from kona/.config/changelog.sh rename to rust/kona/.config/changelog.sh diff --git a/kona/.config/nextest.toml b/rust/kona/.config/nextest.toml similarity index 100% rename from kona/.config/nextest.toml rename to rust/kona/.config/nextest.toml diff --git a/kona/.config/zepter.yaml b/rust/kona/.config/zepter.yaml similarity index 100% rename from kona/.config/zepter.yaml rename to rust/kona/.config/zepter.yaml diff --git a/rust/kona/.dockerignore b/rust/kona/.dockerignore new file mode 100644 index 00000000000..98a662e11a9 --- /dev/null +++ b/rust/kona/.dockerignore @@ -0,0 +1,7 @@ +target/ +book/ +assets/ +monorepo/ +.config/ +.github/ +tests/ diff --git a/kona/.gitignore b/rust/kona/.gitignore similarity index 100% rename from kona/.gitignore rename to rust/kona/.gitignore diff --git a/kona/CHANGELOG.md b/rust/kona/CHANGELOG.md similarity index 100% rename from kona/CHANGELOG.md rename to rust/kona/CHANGELOG.md diff --git a/kona/CLAUDE.md b/rust/kona/CLAUDE.md similarity index 100% rename from kona/CLAUDE.md rename to rust/kona/CLAUDE.md diff --git a/rust/kona/CONTRIBUTING.md b/rust/kona/CONTRIBUTING.md new file mode 100644 index 00000000000..15a04a29a1a --- /dev/null +++ b/rust/kona/CONTRIBUTING.md @@ -0,0 +1,31 @@ +# Contributing + +Thank you for wanting to contribute! Before contributing to this repository, +please read through this document and discuss the change you wish to make via issue. + +## Dependencies + +Before working with this repository locally, you'll need to install the following dependencies. + +- [just][just] for our command-runner scripts. +- The [Rust toolchain][rust] + +## Pull Request Process + +1. Before anything, [create an issue][create-an-issue] to discuss the change you're + wanting to make, if it is significant or changes functionality. Feel free to skip this step for trivial changes. +1. Once your change is implemented, ensure that all checks are passing before creating a PR. The full CI pipeline can + be run locally via the `justfile`s in the repository. +1. Make sure to update any documentation that has gone stale as a result of the change, in the `README` files, the [book][book], + and in rustdoc comments. +1. Once you have sign-off from a maintainer, you may merge your pull request yourself if you have permissions to do so. + If not, the maintainer who approves your pull request will add it to the merge queue. + + + +[just]: https://github.com/casey/just +[rust]: https://rustup.rs/ + +[book]: https://rollup.yoga + +[create-an-issue]: https://github.com/ethereum-optimism/optimism/issues/new diff --git a/kona/LICENSE.md b/rust/kona/LICENSE.md similarity index 100% rename from kona/LICENSE.md rename to rust/kona/LICENSE.md diff --git a/rust/kona/README.md b/rust/kona/README.md new file mode 100644 index 00000000000..ea3bd328e9f --- /dev/null +++ b/rust/kona/README.md @@ -0,0 +1,173 @@ +

+Kona +

+ +

+ The Monorepo for OP Stack Types, Components, and Services built in Rust. +

+ +

+ What's Kona? • + Overview • + MSRV • + Contributing • + Credits • + License +

+ +# 🚧 Important information + +We have taken the decision to move `Kona` to `https://github.com/ethereum-optimism/optimism`. Once `https://github.com/ethereum-optimism/optimism/pull/18569` gets merged, `op-rs` operations are going to be transferred to `ethereum-optimism/optimism`. + +This repository will get archived mid January 2026. + + +The commit/contribution history will be maintained, thanks for contributing to `ethereum-optimism/optimism` and we can't wait to see you in `https://github.com/ethereum-optimism/optimism`! + + +## What's Kona? + +Originally a suite of portable implementations of the OP Stack rollup state transition, +Kona has been extended to be _the monorepo_ for OP Stack +types, components, and services built in Rust. Kona provides an ecosystem of extensible, low-level +crates that compose into components and services required for the OP Stack. + +The [docs][site] contains a more in-depth overview of the project, contributor guidelines, tutorials for +getting started with building your own programs, and a reference for the libraries and tools provided by Kona. + +## Overview + +> [!NOTE] +> +> Ethereum (Alloy) types modified for the OP Stack live in [op-alloy](https://github.com/ethereum-optimism/optimism/tree/develop/rust/op-alloy). + +**Binaries** + +- [`client`](./bin/client): The bare-metal program that executes the state transition, to be run on a prover. +- [`host`](./bin/host): The host program that runs natively alongside the prover, serving as the [Preimage Oracle][g-preimage-oracle] server. +- [`node`](./bin/node): [WIP] A [Rollup Node][rollup-node-spec] implementation, backed by [`kona-derive`](./crates/protocol/derive). Supports flexible chain ID specification via `--l2-chain-id` using either numeric IDs (`10`) or chain names (`optimism`). +- [`supervisor`](./bin/supervisor): [WIP] A [Supervisor][supervisor-spec] implementation. + +**Protocol** + +- [`genesis`](./crates/protocol/genesis): Genesis types for OP Stack chains. +- [`protocol`](./crates/protocol/protocol): Core protocol types used across OP Stack rust crates. +- [`derive`](./crates/protocol/derive): `no_std` compatible implementation of the [derivation pipeline][g-derivation-pipeline]. +- [`driver`](./crates/proof/driver): Stateful derivation pipeline driver. +- [`interop`](./crates/protocol/interop): Core functionality and primitives for the [Interop feature](https://specs.optimism.io/interop/overview.html) of the OP Stack. +- [`registry`](./crates/protocol/registry): Rust bindings for the [superchain-registry][superchain-registry]. +- [`comp`](./crates/batcher/comp): Compression types for the OP Stack. +- [`hardforks`](./crates/protocol/hardforks): Consensus layer hardfork types for the OP Stack including network upgrade transactions. + +**Proof** + +- [`mpt`](./crates/proof/mpt): Utilities for interacting with the Merkle Patricia Trie in the client program. +- [`executor`](./crates/proof/executor): `no_std` stateless block executor for the [OP Stack][op-stack]. +- [`proof`](./crates/proof/proof): High level OP Stack state transition proof SDK. +- [`proof-interop`](./crates/proof/proof-interop): Extension of `kona-proof` with interop support. +- [`preimage`](./crates/proof/preimage): High level interfaces to the [`PreimageOracle`][fpp-specs] ABI. +- [`std-fpvm`](./crates/proof/std-fpvm): Platform specific [Fault Proof VM][g-fault-proof-vm] kernel APIs. +- [`std-fpvm-proc`](./crates/proof/std-fpvm-proc): Proc macro for [Fault Proof Program][fpp-specs] entrypoints. + +**Node** + +- [`service`](./crates/node/service): The OP Stack rollup node service. +- [`engine`](./crates/node/engine): An extensible implementation of the [OP Stack][op-stack] rollup node engine client +- [`rpc`](./crates/node/rpc): OP Stack RPC types and extensions. +- [`gossip`](./crates/node/gossip): OP Stack P2P Networking - Gossip. +- [`disc`](./crates/node/disc): OP Stack P2P Networking - Discovery. +- [`peers`](./crates/node/peers): Networking Utilities ported from reth. +- [`sources`](./crates/node/sources): Data source types and utilities for the kona-node. + +**Providers** + +- [`providers-alloy`](./crates/providers/providers-alloy): Provider implementations for `kona-derive` backed by [Alloy][alloy]. + +**Utilities** + +- [`serde`](./crates/utilities/serde): Serialization helpers. +- [`cli`](./crates/utilities/cli): Standard CLI utilities, used across `kona`'s binaries. +- [`macros`](./crates/utilities/macros): Utility macros. + +### Proof + +Built on top of these libraries, this repository also features a [proof program][fpp-specs] +designed to deterministically execute the rollup state transition in order to verify an +[L2 output root][g-output-root] from the L1 inputs it was [derived from][g-derivation-pipeline]. + +Kona's libraries were built with alternative backend support and extensibility in mind - the repository features +a fault proof virtual machine backend for use in the governance-approved OP Stack, but it's portable across +provers! Kona is also used by: + +- [`op-succinct`][op-succinct] +- [`kailua`][kailua] + +To build your own backend for kona, or build a new application on top of its libraries, +see the [SDK section of the docs](https://rollup.yoga/node/design/intro). + +## MSRV + +The current MSRV (minimum supported rust version) is `1.88`. + +The MSRV is not increased automatically, and will be updated +only as part of a patch (pre-1.0) or minor (post-1.0) release. + + +## Crate Releases + +`kona` releases are done using the [`cargo-release`](https://crates.io/crates/cargo-release) crate. +A detailed guide is available in [./RELEASES.md](./RELEASES.md). + + +## Contributing + +`kona` is built by open source contributors like you, thank you for improving the project! + +A [contributing guide][contributing] is available that sets guidelines for contributing. + +Pull requests will not be merged unless CI passes, so please ensure that your contribution +follows the linting rules and passes clippy. + + +## Credits + +`kona` is inspired by the work of several teams, namely [OP Labs][op-labs] and other contributors' work on the +[Optimism monorepo][op-go-monorepo] and [BadBoiLabs][bad-boi-labs]'s work on [Cannon-rs][badboi-cannon-rs]. + +`kona` is also built on rust types in [alloy][alloy], [op-alloy][op-alloy], and [maili][maili]. + +## License + +Licensed under the [MIT license.](https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/LICENSE.md) + +> [!NOTE] +> +> Contributions intentionally submitted for inclusion in these crates by you +> shall be licensed as above, without any additional terms or conditions. + + + + +[alloy]: https://github.com/alloy-rs/alloy +[maili]: https://github.com/op-rs/maili +[op-alloy]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/op-alloy +[contributing]: https://rollup.yoga/intro/contributing +[op-stack]: https://github.com/ethereum-optimism/optimism +[superchain-registry]: https://github.com/ethereum-optimism/superchain-registry +[op-go-monorepo]: https://github.com/ethereum-optimism/optimism/tree/develop +[cannon]: https://github.com/ethereum-optimism/optimism/tree/develop/cannon +[cannon-rs]: https://github.com/op-rs/cannon-rs +[rollup-node-spec]: https://specs.optimism.io/protocol/rollup-node.html +[supervisor-spec]: https://specs.optimism.io/interop/supervisor.html +[badboi-cannon-rs]: https://github.com/BadBoiLabs/cannon-rs +[asterisc]: https://github.com/ethereum-optimism/asterisc +[fpp-specs]: https://specs.optimism.io/fault-proof/index.html +[site]: https://rollup.yoga +[op-succinct]: https://github.com/succinctlabs/op-succinct +[kailua]: https://github.com/risc0/kailua +[op-labs]: https://github.com/ethereum-optimism +[bad-boi-labs]: https://github.com/BadBoiLabs +[g-output-root]: https://specs.optimism.io/glossary.html#l2-output-root +[g-derivation-pipeline]: https://specs.optimism.io/protocol/derivation.html#l2-chain-derivation-pipeline +[g-fault-proof-vm]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-vm +[g-preimage-oracle]: https://specs.optimism.io/fault-proof/index.html#pre-image-oracle diff --git a/kona/RELEASES.md b/rust/kona/RELEASES.md similarity index 100% rename from kona/RELEASES.md rename to rust/kona/RELEASES.md diff --git a/kona/SECURITY.md b/rust/kona/SECURITY.md similarity index 100% rename from kona/SECURITY.md rename to rust/kona/SECURITY.md diff --git a/kona/docs/docs/public/banner.png b/rust/kona/assets/banner.png similarity index 100% rename from kona/docs/docs/public/banner.png rename to rust/kona/assets/banner.png diff --git a/kona/docs/docs/public/favicon.ico b/rust/kona/assets/favicon.ico similarity index 100% rename from kona/docs/docs/public/favicon.ico rename to rust/kona/assets/favicon.ico diff --git a/kona/assets/square.png b/rust/kona/assets/square.png similarity index 100% rename from kona/assets/square.png rename to rust/kona/assets/square.png diff --git a/kona/bin/client/Cargo.toml b/rust/kona/bin/client/Cargo.toml similarity index 100% rename from kona/bin/client/Cargo.toml rename to rust/kona/bin/client/Cargo.toml diff --git a/kona/bin/client/README.md b/rust/kona/bin/client/README.md similarity index 100% rename from kona/bin/client/README.md rename to rust/kona/bin/client/README.md diff --git a/rust/kona/bin/client/justfile b/rust/kona/bin/client/justfile new file mode 100644 index 00000000000..1d2a2656699 --- /dev/null +++ b/rust/kona/bin/client/justfile @@ -0,0 +1,297 @@ +set fallback := true + +KONA_CLIENT_ROOT := source_directory() + +# default recipe to display help information +default: + @just --list + +# Run the client program on asterisc with the host in detached server mode. +run-client-asterisc block_number l1_rpc l1_beacon_rpc l2_rpc rollup_node_rpc verbosity='': + #!/usr/bin/env bash + + L1_NODE_ADDRESS="{{l1_rpc}}" + L1_BEACON_ADDRESS="{{l1_beacon_rpc}}" + L2_NODE_ADDRESS="{{l2_rpc}}" + OP_NODE_ADDRESS="{{rollup_node_rpc}}" + + HOST_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../../target/release/kona-host" + CLIENT_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../../target/riscv64imac-unknown-none-elf/release-client-lto/kona-client" + STATE_PATH="{{KONA_CLIENT_ROOT}}/../../state.bin.gz" + + CLAIMED_L2_BLOCK_NUMBER={{block_number}} + echo "Fetching configuration for block #$CLAIMED_L2_BLOCK_NUMBER..." + + # Get output root for block + CLAIMED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $CLAIMED_L2_BLOCK_NUMBER) | jq -r .outputRoot) + + # Get the info for the previous block + AGREED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .outputRoot) + AGREED_L2_HEAD_HASH=$(cast block --rpc-url $L2_NODE_ADDRESS $((CLAIMED_L2_BLOCK_NUMBER - 1)) --json | jq -r .hash) + L1_ORIGIN_NUM=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .blockRef.l1origin.number) + L1_HEAD=$(cast block --rpc-url $L1_NODE_ADDRESS $((L1_ORIGIN_NUM + 30)) --json | jq -r .hash) + L2_CHAIN_ID=$(cast chain-id --rpc-url $L2_NODE_ADDRESS) + + # Move to the kona root + cd {{KONA_CLIENT_ROOT}}/../.. + + echo "Building client program for RISC-V target..." + just build-asterisc-client + + echo "Loading client program into Asterisc state format..." + asterisc load-elf --path=$CLIENT_BIN_PATH + + echo "Building host program for native target..." + cargo build --bin kona-host --release + + echo "Running asterisc" + asterisc run \ + --info-at '%10000000' \ + --proof-at never \ + --input $STATE_PATH \ + -- \ + $HOST_BIN_PATH \ + single \ + --l1-head $L1_HEAD \ + --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ + --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ + --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ + --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ + --l2-chain-id $L2_CHAIN_ID \ + --l1-node-address $L1_NODE_ADDRESS \ + --l1-beacon-address $L1_BEACON_ADDRESS \ + --l2-node-address $L2_NODE_ADDRESS \ + --server \ + --data-dir ./data \ + {{verbosity}} + +# Run the client program natively with the host program attached. +run-client-native block_number l1_rpc l1_beacon_rpc l2_rpc rollup_node_rpc rollup_config_path='' verbosity='': + #!/usr/bin/env bash + set -o errexit -o nounset -o pipefail + + L1_NODE_ADDRESS="{{l1_rpc}}" + L1_BEACON_ADDRESS="{{l1_beacon_rpc}}" + L2_NODE_ADDRESS="{{l2_rpc}}" + OP_NODE_ADDRESS="{{rollup_node_rpc}}" + + L2_CHAIN_ID=$(cast chain-id --rpc-url $L2_NODE_ADDRESS) + if [ -z "{{rollup_config_path}}" ]; then + CHAIN_ID_OR_ROLLUP_CONFIG_ARG="--l2-chain-id $L2_CHAIN_ID" + else + CHAIN_ID_OR_ROLLUP_CONFIG_ARG="--rollup-config-path $(realpath {{rollup_config_path}})" + fi + + CLAIMED_L2_BLOCK_NUMBER={{block_number}} + echo "Fetching configuration for block #$CLAIMED_L2_BLOCK_NUMBER..." + + # Get output root for block + CLAIMED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $CLAIMED_L2_BLOCK_NUMBER) | jq -r .outputRoot) + + # Get the info for the previous block + AGREED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .outputRoot) + AGREED_L2_HEAD_HASH=$(cast block --rpc-url $L2_NODE_ADDRESS $((CLAIMED_L2_BLOCK_NUMBER - 1)) --json | jq -r .hash) + L1_ORIGIN_NUM=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .blockRef.l1origin.number) + L1_HEAD=$(cast block --rpc-url $L1_NODE_ADDRESS $((L1_ORIGIN_NUM + 30)) --json | jq -r .hash) + + # Move to the kona root + cd {{KONA_CLIENT_ROOT}}/../.. + + echo "Running host program with native client program..." + cargo r --bin kona-host --release -- \ + single \ + --l1-head $L1_HEAD \ + --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ + --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ + --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ + --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ + --l1-node-address $L1_NODE_ADDRESS \ + --l1-beacon-address $L1_BEACON_ADDRESS \ + --l2-node-address $L2_NODE_ADDRESS \ + --native \ + --data-dir ./data \ + $CHAIN_ID_OR_ROLLUP_CONFIG_ARG \ + {{verbosity}} + +# Run the client program natively with the host program attached, in offline mode. +run-client-native-offline block_number l2_claim l2_output_root l2_head l1_head l2_chain_id verbosity='': + #!/usr/bin/env bash + + CLAIMED_L2_BLOCK_NUMBER={{block_number}} + CLAIMED_L2_OUTPUT_ROOT={{l2_claim}} + AGREED_L2_OUTPUT_ROOT={{l2_output_root}} + AGREED_L2_HEAD_HASH={{l2_head}} + L1_HEAD={{l1_head}} + L2_CHAIN_ID={{l2_chain_id}} + + # Move to the kona root + cd {{KONA_CLIENT_ROOT}}/../.. + + echo "Running host program with native client program..." + cargo r --bin kona-host -- \ + single \ + --l1-head $L1_HEAD \ + --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ + --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ + --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ + --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ + --l2-chain-id $L2_CHAIN_ID \ + --native \ + --data-dir ./data \ + {{verbosity}} + +# Run the client program on asterisc with the host program detached, in offline mode. +run-client-asterisc-offline block_number l2_claim l2_output_root l2_head l1_head l2_chain_id verbosity='': + #!/usr/bin/env bash + + HOST_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../../target/debug/kona-host" + CLIENT_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../../target/riscv64imac-unknown-none-elf/release-client-lto/kona-client" + STATE_PATH="{{KONA_CLIENT_ROOT}}/../../state.bin.gz" + + CLAIMED_L2_BLOCK_NUMBER={{block_number}} + CLAIMED_L2_OUTPUT_ROOT={{l2_claim}} + AGREED_L2_OUTPUT_ROOT={{l2_output_root}} + AGREED_L2_HEAD_HASH={{l2_head}} + L1_HEAD={{l1_head}} + L2_CHAIN_ID={{l2_chain_id}} + + # Move to the kona root + cd {{KONA_CLIENT_ROOT}}/../.. + + echo "Building client program for RISC-V target..." + just build-asterisc-client + + echo "Loading client program into Asterisc state format..." + asterisc load-elf --path=$CLIENT_BIN_PATH + + echo "Building host program for native target..." + cargo build --bin kona-host + + echo "Running asterisc" + asterisc run \ + --info-at '%10000000' \ + --proof-at never \ + --input $STATE_PATH \ + -- \ + $HOST_BIN_PATH \ + single \ + --l1-head $L1_HEAD \ + --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ + --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ + --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ + --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ + --l2-chain-id $L2_CHAIN_ID \ + --server \ + --data-dir ./data \ + {{verbosity}} + +# Run the client program on cannon with the host in detached server mode. +run-client-cannon block_number l1_rpc l1_beacon_rpc l2_rpc rollup_node_rpc rollup_config_path='' verbosity='': + #!/usr/bin/env bash + set -o errexit -o nounset -o pipefail + + HOST_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../../target/release/kona-host" + CLIENT_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../../target/mips64-unknown-none/release-client-lto/kona-client" + STATE_PATH="{{KONA_CLIENT_ROOT}}/../../state.bin.gz" + + L1_NODE_ADDRESS="{{l1_rpc}}" + L1_BEACON_ADDRESS="{{l1_beacon_rpc}}" + L2_NODE_ADDRESS="{{l2_rpc}}" + OP_NODE_ADDRESS="{{rollup_node_rpc}}" + + L2_CHAIN_ID=$(cast chain-id --rpc-url $L2_NODE_ADDRESS) + if [ -z "{{rollup_config_path}}" ]; then + CHAIN_ID_OR_ROLLUP_CONFIG_ARG="--l2-chain-id $L2_CHAIN_ID" + else + CHAIN_ID_OR_ROLLUP_CONFIG_ARG="--rollup-config-path $(realpath {{rollup_config_path}})" + fi + + CLAIMED_L2_BLOCK_NUMBER={{block_number}} + echo "Fetching configuration for block #$CLAIMED_L2_BLOCK_NUMBER..." + + # Get output root for block + CLAIMED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $CLAIMED_L2_BLOCK_NUMBER) | jq -r .outputRoot) + + # Get the info for the previous block + AGREED_L2_OUTPUT_ROOT=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .outputRoot) + AGREED_L2_HEAD_HASH=$(cast block --rpc-url $L2_NODE_ADDRESS $((CLAIMED_L2_BLOCK_NUMBER - 1)) --json | jq -r .hash) + L1_ORIGIN_NUM=$(cast rpc --rpc-url $OP_NODE_ADDRESS "optimism_outputAtBlock" $(cast 2h $((CLAIMED_L2_BLOCK_NUMBER - 1))) | jq -r .blockRef.l1origin.number) + L1_HEAD=$(cast block --rpc-url $L1_NODE_ADDRESS $((L1_ORIGIN_NUM + 30)) --json | jq -r .hash) + + # Move to the kona root + cd {{KONA_CLIENT_ROOT}}/../.. + + echo "Building client program for MIPS64 target..." + just build-cannon-client + + echo "Loading client program into Cannon state format..." + cannon load-elf --path=$CLIENT_BIN_PATH --type multithreaded64-5 + + echo "Building host program for native target..." + cargo build --bin kona-host --release + + echo "Running cannon" + cannon run \ + --info-at '%10000000' \ + --proof-at never \ + --input $STATE_PATH \ + -- \ + $HOST_BIN_PATH \ + single \ + --l1-head $L1_HEAD \ + --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ + --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ + --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ + --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ + --l2-chain-id $L2_CHAIN_ID \ + --l1-node-address $L1_NODE_ADDRESS \ + --l1-beacon-address $L1_BEACON_ADDRESS \ + --l2-node-address $L2_NODE_ADDRESS \ + --server \ + --data-dir ./data \ + {{verbosity}} + +# Run the client program on cannon with the host program detached, in offline mode. +run-client-cannon-offline block_number l2_claim l2_output_root l2_head l1_head l2_chain_id verbosity='': + #!/usr/bin/env bash + + HOST_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../../target/debug/kona-host" + CLIENT_BIN_PATH="{{KONA_CLIENT_ROOT}}/../../../target/mips64-unknown-none/release-client-lto/kona-client" + STATE_PATH="{{KONA_CLIENT_ROOT}}/../../state.bin.gz" + + CLAIMED_L2_BLOCK_NUMBER={{block_number}} + CLAIMED_L2_OUTPUT_ROOT={{l2_claim}} + AGREED_L2_OUTPUT_ROOT={{l2_output_root}} + AGREED_L2_HEAD_HASH={{l2_head}} + L1_HEAD={{l1_head}} + L2_CHAIN_ID={{l2_chain_id}} + + # Move to the kona root + cd {{KONA_CLIENT_ROOT}}/../.. + + echo "Building client program for MIPS64 target..." + just build-cannon-client + + echo "Loading client program into Cannon state format..." + cannon load-elf --path=$CLIENT_BIN_PATH --type multithreaded64-5 + + echo "Building host program for native target..." + cargo build --bin kona-host + + echo "Running cannon" + cannon run \ + --info-at '%10000000' \ + --proof-at never \ + --input $STATE_PATH \ + -- \ + $HOST_BIN_PATH \ + single \ + --l1-head $L1_HEAD \ + --agreed-l2-head-hash $AGREED_L2_HEAD_HASH \ + --claimed-l2-output-root $CLAIMED_L2_OUTPUT_ROOT \ + --agreed-l2-output-root $AGREED_L2_OUTPUT_ROOT \ + --claimed-l2-block-number $CLAIMED_L2_BLOCK_NUMBER \ + --l2-chain-id $L2_CHAIN_ID \ + --server \ + --data-dir ./data \ + {{verbosity}} diff --git a/kona/bin/client/src/fpvm_evm/factory.rs b/rust/kona/bin/client/src/fpvm_evm/factory.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/factory.rs rename to rust/kona/bin/client/src/fpvm_evm/factory.rs diff --git a/kona/bin/client/src/fpvm_evm/mod.rs b/rust/kona/bin/client/src/fpvm_evm/mod.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/mod.rs rename to rust/kona/bin/client/src/fpvm_evm/mod.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/bls12_g1_add.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_g1_add.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/bls12_g1_add.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_g1_add.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/bls12_g1_msm.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_g1_msm.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/bls12_g1_msm.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_g1_msm.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/bls12_g2_add.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_g2_add.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/bls12_g2_add.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_g2_add.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/bls12_g2_msm.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_g2_msm.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/bls12_g2_msm.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_g2_msm.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/bls12_map_fp.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_map_fp.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/bls12_map_fp.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_map_fp.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/bls12_map_fp2.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_map_fp2.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/bls12_map_fp2.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_map_fp2.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/bls12_pair.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_pair.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/bls12_pair.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/bls12_pair.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/bn128_pair.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/bn128_pair.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/bn128_pair.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/bn128_pair.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/ecrecover.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/ecrecover.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/ecrecover.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/ecrecover.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/kzg_point_eval.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/kzg_point_eval.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/kzg_point_eval.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/kzg_point_eval.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/mod.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/mod.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/mod.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/mod.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/provider.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/provider.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/provider.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/provider.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/test_utils.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/test_utils.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/test_utils.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/test_utils.rs diff --git a/kona/bin/client/src/fpvm_evm/precompiles/utils.rs b/rust/kona/bin/client/src/fpvm_evm/precompiles/utils.rs similarity index 100% rename from kona/bin/client/src/fpvm_evm/precompiles/utils.rs rename to rust/kona/bin/client/src/fpvm_evm/precompiles/utils.rs diff --git a/kona/bin/client/src/interop/consolidate.rs b/rust/kona/bin/client/src/interop/consolidate.rs similarity index 100% rename from kona/bin/client/src/interop/consolidate.rs rename to rust/kona/bin/client/src/interop/consolidate.rs diff --git a/kona/bin/client/src/interop/mod.rs b/rust/kona/bin/client/src/interop/mod.rs similarity index 100% rename from kona/bin/client/src/interop/mod.rs rename to rust/kona/bin/client/src/interop/mod.rs diff --git a/kona/bin/client/src/interop/transition.rs b/rust/kona/bin/client/src/interop/transition.rs similarity index 88% rename from kona/bin/client/src/interop/transition.rs rename to rust/kona/bin/client/src/interop/transition.rs index 43103f84c4e..86b6e4ba267 100644 --- a/kona/bin/client/src/interop/transition.rs +++ b/rust/kona/bin/client/src/interop/transition.rs @@ -40,15 +40,15 @@ where FromTxWithEncoded + FromRecoveredTx + OpTxEnv, { // Check if we can short-circuit the transition, if we are within padding. - if let PreState::TransitionState(ref transition_state) = boot.agreed_pre_state { - if transition_state.step >= transition_state.pre_state.output_roots.len() as u64 { - info!( - target: "interop_client", - "No derivation/execution required, transition state is already saturated." - ); + if let PreState::TransitionState(ref transition_state) = boot.agreed_pre_state && + transition_state.step >= transition_state.pre_state.output_roots.len() as u64 + { + info!( + target: "interop_client", + "No derivation/execution required, transition state is already saturated." + ); - return transition_and_check(boot.agreed_pre_state, None, boot.claimed_post_state); - } + return transition_and_check(boot.agreed_pre_state, None, boot.claimed_post_state); } // Fetch the L2 block hash of the current safe head. @@ -133,6 +133,19 @@ where // L2 block. match driver.advance_to_target(rollup_config.as_ref(), Some(disputed_l2_block_number)).await { Ok((safe_head, output_root)) => { + // If derivation didn't reach the target, L1 data was insufficient. + if safe_head.block_info.number < disputed_l2_block_number { + warn!( + target: "interop_client", + "Exhausted data source; Transitioning to invalid state." + ); + return (boot.claimed_post_state == INVALID_TRANSITION_HASH).then_some(()).ok_or( + FaultProofProgramError::InvalidClaim( + INVALID_TRANSITION_HASH, + boot.claimed_post_state, + ), + ); + } let optimistic_block = OptimisticBlock::new(safe_head.block_info.hash, output_root); transition_and_check( boot.agreed_pre_state, diff --git a/kona/bin/client/src/interop/util.rs b/rust/kona/bin/client/src/interop/util.rs similarity index 100% rename from kona/bin/client/src/interop/util.rs rename to rust/kona/bin/client/src/interop/util.rs diff --git a/kona/bin/client/src/kona.rs b/rust/kona/bin/client/src/kona.rs similarity index 96% rename from kona/bin/client/src/kona.rs rename to rust/kona/bin/client/src/kona.rs index 944cf98f1c4..972c26e3b06 100644 --- a/kona/bin/client/src/kona.rs +++ b/rust/kona/bin/client/src/kona.rs @@ -1,7 +1,7 @@ #![doc = include_str!("../README.md")] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![cfg_attr(docsrs, feature(doc_cfg))] #![no_std] #![cfg_attr(any(target_arch = "mips64", target_arch = "riscv64"), no_main)] diff --git a/kona/bin/client/src/kona_interop.rs b/rust/kona/bin/client/src/kona_interop.rs similarity index 96% rename from kona/bin/client/src/kona_interop.rs rename to rust/kona/bin/client/src/kona_interop.rs index 482e8cbcfa2..874f6abd5db 100644 --- a/kona/bin/client/src/kona_interop.rs +++ b/rust/kona/bin/client/src/kona_interop.rs @@ -1,7 +1,7 @@ #![doc = include_str!("../README.md")] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![cfg_attr(docsrs, feature(doc_cfg))] #![no_std] #![cfg_attr(any(target_arch = "mips64", target_arch = "riscv64"), no_main)] diff --git a/rust/kona/bin/client/src/lib.rs b/rust/kona/bin/client/src/lib.rs new file mode 100644 index 00000000000..15b4e25a32c --- /dev/null +++ b/rust/kona/bin/client/src/lib.rs @@ -0,0 +1,12 @@ +#![doc = include_str!("../README.md")] +#![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] +#![deny(unused_must_use, rust_2018_idioms)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![allow(clippy::type_complexity)] +#![cfg_attr(not(test), no_std)] + +extern crate alloc; + +pub mod fpvm_evm; +pub mod interop; +pub mod single; diff --git a/kona/bin/client/src/single.rs b/rust/kona/bin/client/src/single.rs similarity index 100% rename from kona/bin/client/src/single.rs rename to rust/kona/bin/client/src/single.rs diff --git a/kona/bin/client/testdata/holocene-op-sepolia-26215604-witness.tar.zst b/rust/kona/bin/client/testdata/holocene-op-sepolia-26215604-witness.tar.zst similarity index 100% rename from kona/bin/client/testdata/holocene-op-sepolia-26215604-witness.tar.zst rename to rust/kona/bin/client/testdata/holocene-op-sepolia-26215604-witness.tar.zst diff --git a/rust/kona/bin/host/Cargo.toml b/rust/kona/bin/host/Cargo.toml new file mode 100644 index 00000000000..06fe88b9cb2 --- /dev/null +++ b/rust/kona/bin/host/Cargo.toml @@ -0,0 +1,81 @@ +[package] +name = "kona-host" +version = "1.0.2" +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true +homepage.workspace = true +publish = false + +[lints] +workspace = true + +[dependencies] +# Proof +kona-mpt.workspace = true +kona-client.workspace = true +kona-executor.workspace = true +kona-std-fpvm.workspace = true +kona-proof-interop.workspace = true +kona-proof = { workspace = true, features = ["std"] } +kona-preimage = { workspace = true, features = ["std"] } + +# Protocol +kona-driver.workspace = true +kona-derive.workspace = true +kona-registry.workspace = true +kona-protocol = { workspace = true, features = ["std", "serde"] } +kona-genesis = { workspace = true, features = ["std", "serde"] } + +# Services +kona-cli.workspace = true +kona-providers-alloy.workspace = true + +# Alloy +alloy-rlp.workspace = true +alloy-transport.workspace = true +alloy-eips = { workspace = true, features = ["kzg"] } +alloy-serde.workspace = true +alloy-provider = { workspace = true, features = ["reqwest"] } +alloy-consensus = { workspace = true, features = ["std"] } +alloy-rpc-client.workspace = true +alloy-transport-http.workspace = true +alloy-rpc-types = { workspace = true, features = ["eth", "debug"] } +alloy-primitives = { workspace = true, features = ["serde"] } +alloy-rpc-types-beacon.workspace = true + +# Op Alloy +op-alloy-rpc-types-engine = { workspace = true, features = ["serde"] } +op-alloy-network.workspace = true +alloy-op-evm = { workspace = true, features = ["std"] } + +# Revm +revm = { workspace = true, features = ["std", "c-kzg", "secp256k1", "portable", "blst"] } + +# General +anyhow.workspace = true +tracing.workspace = true +serde_json.workspace = true +async-trait.workspace = true +rocksdb = { workspace = true, features = ["snappy", "bindgen-runtime"] } +tokio = { workspace = true, features = ["full"] } +serde = { workspace = true, features = ["derive"] } +clap = { workspace = true, features = ["derive", "env"] } +tracing-subscriber = { workspace = true, features = ["fmt"] } +thiserror.workspace = true + +# KZG +ark-ff.workspace = true + +[dev-dependencies] +proptest.workspace = true + +[features] +default = [ "interop", "single" ] +single = [] +interop = [ "single" ] + +[[bin]] +name = "kona-host" +path = "src/bin/host.rs" diff --git a/kona/bin/host/README.md b/rust/kona/bin/host/README.md similarity index 100% rename from kona/bin/host/README.md rename to rust/kona/bin/host/README.md diff --git a/kona/bin/host/src/backend/mod.rs b/rust/kona/bin/host/src/backend/mod.rs similarity index 100% rename from kona/bin/host/src/backend/mod.rs rename to rust/kona/bin/host/src/backend/mod.rs diff --git a/kona/bin/host/src/backend/offline.rs b/rust/kona/bin/host/src/backend/offline.rs similarity index 81% rename from kona/bin/host/src/backend/offline.rs rename to rust/kona/bin/host/src/backend/offline.rs index a2ca1e531fd..17e4a4198e7 100644 --- a/kona/bin/host/src/backend/offline.rs +++ b/rust/kona/bin/host/src/backend/offline.rs @@ -1,4 +1,4 @@ -//! Contains the implementations of the [HintRouter] and [PreimageFetcher] traits. +//! Contains the implementations of the [`HintRouter`] and [`PreimageFetcher`] traits. use crate::kv::KeyValueStore; use async_trait::async_trait; @@ -9,7 +9,7 @@ use kona_preimage::{ use std::sync::Arc; use tokio::sync::RwLock; -/// A [KeyValueStore]-backed implementation of the [PreimageFetcher] trait. +/// A [`KeyValueStore`]-backed implementation of the [`PreimageFetcher`] trait. #[derive(Debug)] pub struct OfflineHostBackend where @@ -22,7 +22,7 @@ impl OfflineHostBackend where KV: KeyValueStore + ?Sized, { - /// Create a new [OfflineHostBackend] from the given [KeyValueStore]. + /// Create a new [`OfflineHostBackend`] from the given [`KeyValueStore`]. pub const fn new(kv_store: Arc>) -> Self { Self { inner: kv_store } } diff --git a/kona/bin/host/src/backend/online.rs b/rust/kona/bin/host/src/backend/online.rs similarity index 86% rename from kona/bin/host/src/backend/online.rs rename to rust/kona/bin/host/src/backend/online.rs index 4b151ff0177..ca6127b0fe8 100644 --- a/kona/bin/host/src/backend/online.rs +++ b/rust/kona/bin/host/src/backend/online.rs @@ -1,4 +1,4 @@ -//! Contains the [OnlineHostBackend] definition. +//! Contains the [`OnlineHostBackend`] definition. use crate::SharedKeyValueStore; use anyhow::Result; @@ -12,8 +12,8 @@ use std::{collections::HashSet, hash::Hash, str::FromStr, sync::Arc}; use tokio::sync::RwLock; use tracing::{debug, error, trace}; -/// The [OnlineHostBackendCfg] trait is used to define the type configuration for the -/// [OnlineHostBackend]. +/// The [`OnlineHostBackendCfg`] trait is used to define the type configuration for the +/// [`OnlineHostBackend`]. pub trait OnlineHostBackendCfg { /// The hint type describing the range of hints that can be received. type HintType: FromStr + Hash + Eq + PartialEq + Clone + Send + Sync; @@ -22,11 +22,11 @@ pub trait OnlineHostBackendCfg { type Providers: Send + Sync; } -/// A [HintHandler] is an interface for receiving hints, fetching remote data, and storing it in the -/// key-value store. +/// A [`HintHandler`] is an interface for receiving hints, fetching remote data, and storing it in +/// the key-value store. #[async_trait] pub trait HintHandler { - /// The type configuration for the [HintHandler]. + /// The type configuration for the [`HintHandler`]. type Cfg: OnlineHostBackendCfg; /// Fetches data in response to a hint. @@ -38,8 +38,8 @@ pub trait HintHandler { ) -> Result<()>; } -/// The [OnlineHostBackend] is a [HintRouter] and [PreimageFetcher] that is used to fetch data from -/// remote sources in response to hints. +/// The [`OnlineHostBackend`] is a [`HintRouter`] and [`PreimageFetcher`] that is used to fetch data +/// from remote sources in response to hints. /// /// [PreimageKey]: kona_preimage::PreimageKey #[allow(missing_debug_implementations)] @@ -58,7 +58,7 @@ where proactive_hints: HashSet, /// The last hint that was received. last_hint: Arc>>>, - /// Phantom marker for the [HintHandler]. + /// Phantom marker for the [`HintHandler`]. _hint_handler: std::marker::PhantomData, } @@ -67,7 +67,7 @@ where C: OnlineHostBackendCfg, H: HintHandler, { - /// Creates a new [HintHandler] with the given configuration, key-value store, providers, and + /// Creates a new [`HintHandler`] with the given configuration, key-value store, providers, and /// external configuration. pub fn new(cfg: C, kv: SharedKeyValueStore, providers: C::Providers, _: H) -> Self { Self { @@ -80,7 +80,7 @@ where } } - /// Adds a new proactive hint to the [OnlineHostBackend]. + /// Adds a new proactive hint to the [`OnlineHostBackend`]. pub fn with_proactive_hint(mut self, hint_type: C::HintType) -> Self { self.proactive_hints.insert(hint_type); self diff --git a/rust/kona/bin/host/src/backend/util.rs b/rust/kona/bin/host/src/backend/util.rs new file mode 100644 index 00000000000..ed74d335dfa --- /dev/null +++ b/rust/kona/bin/host/src/backend/util.rs @@ -0,0 +1,40 @@ +//! Utilities for the preimage server backend. + +use crate::{KeyValueStore, Result}; +use alloy_consensus::EMPTY_ROOT_HASH; +use alloy_primitives::keccak256; +use alloy_rlp::EMPTY_STRING_CODE; +use kona_preimage::{PreimageKey, PreimageKeyType}; +use tokio::sync::RwLock; + +/// Constructs a merkle patricia trie from the ordered list passed and stores all encoded +/// intermediate nodes of the trie in the [`KeyValueStore`]. +pub(crate) async fn store_ordered_trie>( + kv: &RwLock, + values: &[T], +) -> Result<()> { + let mut kv_write_lock = kv.write().await; + + // If the list of nodes is empty, store the empty root hash and exit early. + // The `HashBuilder` will not push the preimage of the empty root hash to the + // `ProofRetainer` in the event that there are no leaves inserted. + if values.is_empty() { + let empty_key = PreimageKey::new(*EMPTY_ROOT_HASH, PreimageKeyType::Keccak256); + return kv_write_lock.set(empty_key.into(), [EMPTY_STRING_CODE].into()); + } + + let mut hb = kona_mpt::ordered_trie_with_encoder(values, |node, buf| { + buf.put_slice(node.as_ref()); + }); + hb.root(); + let intermediates = hb.take_proof_nodes().into_inner(); + + for (_, value) in intermediates { + let value_hash = keccak256(value.as_ref()); + let key = PreimageKey::new(*value_hash, PreimageKeyType::Keccak256); + + kv_write_lock.set(key.into(), value.into())?; + } + + Ok(()) +} diff --git a/kona/bin/host/src/bin/host.rs b/rust/kona/bin/host/src/bin/host.rs similarity index 97% rename from kona/bin/host/src/bin/host.rs rename to rust/kona/bin/host/src/bin/host.rs index a7f27ca5cf6..67c4deff394 100644 --- a/kona/bin/host/src/bin/host.rs +++ b/rust/kona/bin/host/src/bin/host.rs @@ -2,7 +2,7 @@ #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] +#![cfg_attr(docsrs, feature(doc_cfg))] use anyhow::Result; use clap::{Parser, Subcommand}; diff --git a/rust/kona/bin/host/src/error.rs b/rust/kona/bin/host/src/error.rs new file mode 100644 index 00000000000..8a2cc7d4aff --- /dev/null +++ b/rust/kona/bin/host/src/error.rs @@ -0,0 +1,142 @@ +//! Error types for the host binary. + +use alloy_rlp::Error as RlpError; +use alloy_transport::TransportError; +use kona_preimage::errors::PreimageOracleError; +use std::array::TryFromSliceError; +use thiserror::Error; + +/// Result type for host operations. +pub type Result = std::result::Result; + +/// Error type for host operations. +#[derive(Debug, Error)] +pub enum HostError { + /// A custom error message. + #[error("{0}")] + Custom(String), + + /// Block not found error. + #[error("Block not found")] + BlockNotFound, + + /// Invalid hint data length. + #[error("Invalid hint data length")] + InvalidHintDataLength, + + /// Precompile not accelerated. + #[error("Precompile not accelerated")] + PrecompileNotAccelerated, + + /// Failed precompile execution. + #[error("Failed precompile execution: {0}")] + PrecompileExecutionFailed(String), + + /// No rollup config found for chain ID. + #[error("No rollup config found for chain ID: {0}")] + NoRollupConfig(u64), + + /// Output root mismatch. + #[error("Output root does not match L2 head")] + OutputRootMismatch, + + /// Agreed pre-state hash mismatch. + #[error("Agreed pre-state hash does not match")] + AgreedPreStateHashMismatch, + + /// Expected blob count mismatch. + #[error("Expected {expected} blob(s), got {actual}")] + BlobCountMismatch { + /// Expected blob count. + expected: usize, + /// Actual blob count. + actual: usize, + }, + + /// Expected sidecar count mismatch. + #[error("Expected {expected} sidecar(s), got {actual}")] + SidecarCountMismatch { + /// Expected sidecar count. + expected: usize, + /// Actual sidecar count. + actual: usize, + }, + + /// No artifacts found for safe head. + #[error("No artifacts found for the safe head")] + NoArtifactsForSafeHead, + + /// Failed to fetch blob sidecars. + #[error("Failed to fetch blob sidecars: {0}")] + BlobSidecarFetchFailed(String), + + /// Failed to set key-value pair. + #[error("Failed to set key-value pair: {0}")] + KeyValueSetFailed(String), + + /// Failed to convert slice to B256. + #[error("Failed to convert slice to B256: {0}")] + B256ConversionFailed(String), + + /// Failed to fetch header RLP. + #[error("Failed to fetch header RLP: {0}")] + HeaderRlpFetchFailed(String), + + /// Error fetching code hash preimage. + #[error("Error fetching code hash preimage: {0}")] + CodeHashPreimageFetchFailed(String), + + /// Transport error. + #[error("Transport error: {0}")] + Transport(#[from] TransportError), + + /// RLP decoding error. + #[error("RLP decoding error: {0}")] + Rlp(#[from] RlpError), + + /// `TryFromSlice` error. + #[error("TryFromSlice error: {0}")] + TryFromSlice(#[from] TryFromSliceError), + + /// Serde JSON error. + #[error("Serde JSON error: {0}")] + SerdeJson(#[from] serde_json::Error), + + /// `RocksDB` error. + #[error("RocksDB error: {0}")] + RocksDb(String), + + /// Preimage oracle error. + #[error("Preimage oracle error: {0}")] + PreimageOracle(#[from] PreimageOracleError), + + /// Kona derive error. + #[error("Kona derive error: {0}")] + KonaDerive(String), + + /// Kona executor error. + #[error("Kona executor error: {0}")] + KonaExecutor(String), + + /// IO error. + #[error("IO error: {0}")] + Io(#[from] std::io::Error), +} + +impl From for HostError { + fn from(err: rocksdb::Error) -> Self { + Self::RocksDb(err.to_string()) + } +} + +impl From for HostError { + fn from(err: kona_derive::PipelineError) -> Self { + Self::KonaDerive(err.to_string()) + } +} + +impl From for HostError { + fn from(err: kona_executor::ExecutorError) -> Self { + Self::KonaExecutor(err.to_string()) + } +} diff --git a/kona/bin/host/src/eth/mod.rs b/rust/kona/bin/host/src/eth/mod.rs similarity index 100% rename from kona/bin/host/src/eth/mod.rs rename to rust/kona/bin/host/src/eth/mod.rs diff --git a/kona/bin/host/src/eth/precompiles.rs b/rust/kona/bin/host/src/eth/precompiles.rs similarity index 100% rename from kona/bin/host/src/eth/precompiles.rs rename to rust/kona/bin/host/src/eth/precompiles.rs diff --git a/rust/kona/bin/host/src/interop/cfg.rs b/rust/kona/bin/host/src/interop/cfg.rs new file mode 100644 index 00000000000..50c62198ed6 --- /dev/null +++ b/rust/kona/bin/host/src/interop/cfg.rs @@ -0,0 +1,379 @@ +//! This module contains all CLI-specific code for the interop entrypoint. + +use super::{InteropHintHandler, InteropLocalInputs}; +use crate::{ + DiskKeyValueStore, MemoryKeyValueStore, OfflineHostBackend, OnlineHostBackend, + OnlineHostBackendCfg, PreimageServer, SharedKeyValueStore, SplitKeyValueStore, + eth::rpc_provider, server::PreimageServerError, +}; +use alloy_primitives::{B256, Bytes}; +use alloy_provider::{Provider, RootProvider}; +use clap::Parser; +use kona_cli::cli_styles; +use kona_genesis::{L1ChainConfig, RollupConfig}; +use kona_preimage::{ + BidirectionalChannel, Channel, HintReader, HintWriter, OracleReader, OracleServer, +}; +use kona_proof_interop::HintType; +use kona_providers_alloy::{OnlineBeaconClient, OnlineBlobProvider}; +use kona_std_fpvm::{FileChannel, FileDescriptor}; +use op_alloy_network::Optimism; +use serde::Serialize; +use std::{collections::HashMap, path::PathBuf, str::FromStr, sync::Arc}; +use tokio::{ + sync::RwLock, + task::{self, JoinHandle}, +}; + +/// The interop host application. +#[derive(Default, Parser, Serialize, Clone, Debug)] +#[command(styles = cli_styles())] +pub struct InteropHost { + /// Hash of the L1 head block, marking a static, trusted cutoff point for reading data from the + /// L1 chain. + #[arg(long, env)] + pub l1_head: B256, + /// Agreed [`PreState`](kona_proof_interop::PreState) to start from. + #[arg(long, visible_alias = "l2-pre-state", value_parser = Bytes::from_str, env)] + pub agreed_l2_pre_state: Bytes, + /// Claimed L2 post-state to validate. + #[arg(long, visible_alias = "l2-claim", env)] + pub claimed_l2_post_state: B256, + /// Claimed L2 timestamp, corresponding to the L2 post-state. + #[arg(long, visible_alias = "l2-timestamp", env)] + pub claimed_l2_timestamp: u64, + /// Addresses of L2 JSON-RPC endpoints to use (eth and debug namespace required). + #[arg( + long, + visible_alias = "l2s", + requires = "l1_node_address", + requires = "l1_beacon_address", + value_delimiter = ',', + env + )] + pub l2_node_addresses: Option>, + /// Address of L1 JSON-RPC endpoint to use (eth and debug namespace required) + #[arg( + long, + visible_alias = "l1", + requires = "l2_node_addresses", + requires = "l1_beacon_address", + env + )] + pub l1_node_address: Option, + /// Address of the L1 Beacon API endpoint to use. + #[arg( + long, + visible_alias = "beacon", + requires = "l1_node_address", + requires = "l2_node_addresses", + env + )] + pub l1_beacon_address: Option, + /// The Data Directory for preimage data storage. Optional if running in online mode, + /// required if running in offline mode. + #[arg( + long, + visible_alias = "db", + required_unless_present_all = ["l2_node_addresses", "l1_node_address", "l1_beacon_address"], + env + )] + pub data_dir: Option, + /// Run the client program natively. + #[arg(long, conflicts_with = "server", required_unless_present = "server")] + pub native: bool, + /// Run in pre-image server mode without executing any client program. If not provided, the + /// host will run the client program in the host process. + #[arg(long, conflicts_with = "native", required_unless_present = "native")] + pub server: bool, + /// Path to rollup configs. If provided, the host will use this config instead of attempting to + /// look up the configs in the superchain registry. + /// The rollup configs should be stored as serde-JSON serialized files. + #[arg(long, alias = "rollup-cfgs", value_delimiter = ',', env)] + pub rollup_config_paths: Option>, + /// Path to l1 config. If provided, the host will use this config instead of attempting to + /// look up the config in the superchain registry. + /// The l1 config should be stored as serde-JSON serialized files. + #[arg(long, alias = "l1-cfg")] + pub l1_config_path: Option, +} + +/// An error that can occur when handling interop hosts +#[derive(Debug, thiserror::Error)] +pub enum InteropHostError { + /// An error when handling preimage requests. + #[error("Error handling preimage request: {0}")] + PreimageServerError(#[from] PreimageServerError), + /// An IO error. + #[error("IO error: {0}")] + IOError(#[from] std::io::Error), + /// A JSON parse error. + #[error("Failed deserializing RollupConfig: {0}")] + ParseError(#[from] serde_json::Error), + /// No l1 config found. + #[error("No l1 config found")] + NoL1Config, + /// Task failed to execute to completion. + #[error("Join error: {0}")] + ExecutionError(#[from] tokio::task::JoinError), + /// A RPC error. + #[error("Rpc Error: {0}")] + RpcError(#[from] alloy_transport::RpcError), + /// An error when no provider found for chain ID. + #[error("No provider found for chain ID: {0}")] + RootProviderError(u64), + /// Any other error. + #[error("Error: {0}")] + Other(&'static str), +} + +impl InteropHost { + /// Starts the [`InteropHost`] application. + pub async fn start(self) -> Result<(), InteropHostError> { + if self.server { + let hint = FileChannel::new(FileDescriptor::HintRead, FileDescriptor::HintWrite); + let preimage = + FileChannel::new(FileDescriptor::PreimageRead, FileDescriptor::PreimageWrite); + + self.start_server(hint, preimage).await?.await? + } else { + self.start_native().await + } + } + + /// Starts the preimage server, communicating with the client over the provided channels. + async fn start_server( + &self, + hint: C, + preimage: C, + ) -> Result>, InteropHostError> + where + C: Channel + Send + Sync + 'static, + { + let kv_store = self.create_key_value_store()?; + + let task_handle = if self.is_offline() { + task::spawn(async { + PreimageServer::new( + OracleServer::new(preimage), + HintReader::new(hint), + Arc::new(OfflineHostBackend::new(kv_store)), + ) + .start() + .await + .map_err(InteropHostError::from) + }) + } else { + let providers = self.create_providers().await?; + let backend = OnlineHostBackend::new( + self.clone(), + kv_store.clone(), + providers, + InteropHintHandler, + ) + .with_proactive_hint(HintType::L2BlockData); + + task::spawn(async { + PreimageServer::new( + OracleServer::new(preimage), + HintReader::new(hint), + Arc::new(backend), + ) + .start() + .await + .map_err(InteropHostError::from) + }) + }; + + Ok(task_handle) + } + + /// Starts the host in native mode, running both the client and preimage server in the same + /// process. + async fn start_native(&self) -> Result<(), InteropHostError> { + let hint = BidirectionalChannel::new()?; + let preimage = BidirectionalChannel::new()?; + + let server_task = self.start_server(hint.host, preimage.host).await?; + let client_task = task::spawn(kona_client::interop::run( + OracleReader::new(preimage.client), + HintWriter::new(hint.client), + )); + + let (_, client_result) = tokio::try_join!(server_task, client_task)?; + + // Bubble up the exit status of the client program if execution completes. + std::process::exit(client_result.is_err() as i32) + } + + /// Returns `true` if the host is running in offline mode. + pub const fn is_offline(&self) -> bool { + self.l1_node_address.is_none() && + self.l2_node_addresses.is_none() && + self.l1_beacon_address.is_none() && + self.data_dir.is_some() + } + + /// Reads the [`RollupConfig`]s from the file system and returns a map of L2 chain ID -> + /// [`RollupConfig`]s. + pub fn read_rollup_configs( + &self, + ) -> Option, InteropHostError>> { + let rollup_config_paths = self.rollup_config_paths.as_ref()?; + + Some(rollup_config_paths.iter().try_fold(HashMap::default(), |mut acc, path| { + // Read the serialized config from the file system. + let ser_config = std::fs::read_to_string(path)?; + + // Deserialize the config and return it. + let cfg: RollupConfig = serde_json::from_str(&ser_config)?; + + acc.insert(cfg.l2_chain_id.id(), cfg); + Ok(acc) + })) + } + + /// Reads the [`L1ChainConfig`]s from the file system and returns a map of L1 chain ID -> + /// [`L1ChainConfig`]s. + pub fn read_l1_config(&self) -> Result { + let path = self.l1_config_path.as_ref().ok_or_else(|| InteropHostError::NoL1Config)?; + + // Read the serialized config from the file system. + let ser_config = std::fs::read_to_string(path)?; + + // Deserialize the config and return it. + serde_json::from_str(&ser_config) + .map_err(|_| InteropHostError::Other("failed to parse L1 config")) + } + + /// Creates the key-value store for the host backend. + fn create_key_value_store(&self) -> Result { + let local_kv_store = InteropLocalInputs::new(self.clone()); + + let kv_store: SharedKeyValueStore = if let Some(ref data_dir) = self.data_dir { + let disk_kv_store = DiskKeyValueStore::new(data_dir.clone()); + let split_kv_store = SplitKeyValueStore::new(local_kv_store, disk_kv_store); + Arc::new(RwLock::new(split_kv_store)) + } else { + let mem_kv_store = MemoryKeyValueStore::new(); + let split_kv_store = SplitKeyValueStore::new(local_kv_store, mem_kv_store); + Arc::new(RwLock::new(split_kv_store)) + }; + + Ok(kv_store) + } + + /// Creates the providers required for the preimage server backend. + async fn create_providers(&self) -> Result { + let l1_provider = rpc_provider( + self.l1_node_address.as_ref().ok_or(InteropHostError::Other("Provider must be set"))?, + ) + .await; + + let blob_provider = OnlineBlobProvider::init(OnlineBeaconClient::new_http( + self.l1_beacon_address + .clone() + .ok_or(InteropHostError::Other("Beacon API URL must be set"))?, + )) + .await; + + // Resolve all chain IDs to their corresponding providers. + let l2_node_addresses = self + .l2_node_addresses + .as_ref() + .ok_or(InteropHostError::Other("L2 node addresses must be set"))?; + let mut l2_providers = HashMap::default(); + for l2_node_address in l2_node_addresses { + let l2_provider = rpc_provider::(l2_node_address).await; + let chain_id = l2_provider.get_chain_id().await?; + l2_providers.insert(chain_id, l2_provider); + } + + Ok(InteropProviders { l1: l1_provider, blobs: blob_provider, l2s: l2_providers }) + } +} + +impl OnlineHostBackendCfg for InteropHost { + type HintType = HintType; + type Providers = InteropProviders; +} + +/// The providers required for the single chain host. +#[derive(Debug, Clone)] +pub struct InteropProviders { + /// The L1 EL provider. + pub l1: RootProvider, + /// The L1 beacon node provider. + pub blobs: OnlineBlobProvider, + /// The L2 EL providers, keyed by chain ID. + pub l2s: HashMap>, +} + +impl InteropProviders { + /// Returns the L2 [`RootProvider`] for the given chain ID. + pub fn l2(&self, chain_id: &u64) -> Result<&RootProvider, InteropHostError> { + self.l2s.get(chain_id).ok_or_else(|| InteropHostError::RootProviderError(*chain_id)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::b256; + + #[test] + fn test_parse_interop_host_cli() { + let hash = b256!("ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68"); + let host = InteropHost::parse_from([ + "interop-host", + "--l1-head", + "ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68", + "--l2-pre-state", + "ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68", + "--claimed-l2-post-state", + &hash.to_string(), + "--claimed-l2-timestamp", + "0", + "--native", + "--l2-node-addresses", + "http://localhost:8545", + "--l1-node-address", + "http://localhost:8546", + "--l1-beacon-address", + "http://localhost:8547", + ]); + assert_eq!(host.l1_head, hash); + assert_eq!(host.agreed_l2_pre_state, Bytes::from(hash.0)); + assert_eq!(host.claimed_l2_post_state, hash); + assert_eq!(host.claimed_l2_timestamp, 0); + assert!(host.native); + } + + #[test] + fn test_parse_interop_hex_bytes() { + let hash = b256!("ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68"); + let host = InteropHost::parse_from([ + "interop-host", + "--l1-head", + "ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68", + "--l2-pre-state", + "ff", + "--claimed-l2-post-state", + &hash.to_string(), + "--claimed-l2-timestamp", + "0", + "--native", + "--l2-node-addresses", + "http://localhost:8545", + "--l1-node-address", + "http://localhost:8546", + "--l1-beacon-address", + "http://localhost:8547", + ]); + assert_eq!(host.l1_head, hash); + assert_eq!(host.agreed_l2_pre_state, Bytes::from([0xff])); + assert_eq!(host.claimed_l2_post_state, hash); + assert_eq!(host.claimed_l2_timestamp, 0); + assert!(host.native); + } +} diff --git a/rust/kona/bin/host/src/interop/handler.rs b/rust/kona/bin/host/src/interop/handler.rs new file mode 100644 index 00000000000..c338842b363 --- /dev/null +++ b/rust/kona/bin/host/src/interop/handler.rs @@ -0,0 +1,616 @@ +//! [`HintHandler`] for the [`InteropHost`]. + +use super::InteropHost; +use crate::{ + HintHandler, OnlineHostBackend, OnlineHostBackendCfg, PreimageServer, SharedKeyValueStore, + backend::util::store_ordered_trie, +}; +use alloy_consensus::{Header, Sealed}; +use alloy_eips::{ + eip2718::Encodable2718, + eip4844::{BlobTransactionSidecarItem, FIELD_ELEMENTS_PER_BLOB, IndexedBlobHash}, +}; +use alloy_op_evm::OpEvmFactory; +use alloy_primitives::{Address, B256, Bytes, keccak256}; +use alloy_provider::Provider; +use alloy_rlp::{Decodable, Encodable}; +use alloy_rpc_types::Block; +use anyhow::{Result, anyhow, ensure}; +use ark_ff::{BigInteger, PrimeField}; +use async_trait::async_trait; +use kona_derive::EthereumDataSource; +use kona_driver::Driver; +use kona_executor::TrieDBProvider; +use kona_preimage::{ + BidirectionalChannel, HintReader, HintWriter, OracleReader, OracleServer, PreimageKey, + PreimageKeyType, +}; +use kona_proof::{ + CachingOracle, Hint, + executor::KonaExecutor, + l1::{OracleBlobProvider, OracleL1ChainProvider, OraclePipeline, ROOTS_OF_UNITY}, + l2::OracleL2ChainProvider, + sync::new_oracle_pipeline_cursor, +}; +use kona_proof_interop::{HintType, PreState}; +use kona_protocol::{BlockInfo, OutputRoot, Predeploys}; +use kona_registry::{L1_CONFIGS, ROLLUP_CONFIGS}; +use std::sync::Arc; +use tokio::task; +use tracing::{Instrument, debug, info, info_span, warn}; + +/// The [`HintHandler`] for the [`InteropHost`]. +#[derive(Debug, Clone, Copy)] +pub struct InteropHintHandler; + +#[async_trait] +impl HintHandler for InteropHintHandler { + type Cfg = InteropHost; + + async fn fetch_hint( + hint: Hint<::HintType>, + cfg: &Self::Cfg, + providers: &::Providers, + kv: SharedKeyValueStore, + ) -> Result<()> { + match hint.ty { + HintType::L1BlockHeader => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + let raw_header: Bytes = + providers.l1.client().request("debug_getRawHeader", [hash]).await?; + + let mut kv_lock = kv.write().await; + kv_lock.set(PreimageKey::new_keccak256(*hash).into(), raw_header.into())?; + } + HintType::L1Transactions => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + let Block { transactions, .. } = providers + .l1 + .get_block_by_hash(hash) + .full() + .await? + .ok_or_else(|| anyhow!("Block not found"))?; + let encoded_transactions = transactions + .into_transactions() + .map(|tx| tx.inner.encoded_2718()) + .collect::>(); + + store_ordered_trie(kv.as_ref(), encoded_transactions.as_slice()).await?; + } + HintType::L1Receipts => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + let raw_receipts: Vec = + providers.l1.client().request("debug_getRawReceipts", [hash]).await?; + + store_ordered_trie(kv.as_ref(), raw_receipts.as_slice()).await?; + } + HintType::L1Blob => { + ensure!(hint.data.len() == 48, "Invalid hint data length"); + + let hash_data_bytes: [u8; 32] = hint.data[0..32].try_into()?; + let index_data_bytes: [u8; 8] = hint.data[32..40].try_into()?; + let timestamp_data_bytes: [u8; 8] = hint.data[40..48].try_into()?; + + let hash: B256 = hash_data_bytes.into(); + let index = u64::from_be_bytes(index_data_bytes); + let timestamp = u64::from_be_bytes(timestamp_data_bytes); + + let partial_block_ref = BlockInfo { timestamp, ..Default::default() }; + let indexed_hash = IndexedBlobHash { index, hash }; + + // Fetch the blob sidecar from the blob provider. + let mut sidecars = providers + .blobs + .fetch_filtered_blob_sidecars(&partial_block_ref, &[indexed_hash]) + .await + .map_err(|e| anyhow!("Failed to fetch blob sidecars: {e}"))?; + + if sidecars.len() != 1 { + anyhow::bail!("Expected 1 sidecar, got {}", sidecars.len()); + } + + let BlobTransactionSidecarItem { + blob, + kzg_proof: proof, + kzg_commitment: commitment, + .. + } = sidecars.pop().expect("Expected 1 sidecar"); + + // Acquire a lock on the key-value store and set the preimages. + let mut kv_lock = kv.write().await; + + // Set the preimage for the blob commitment. + kv_lock.set( + PreimageKey::new(*hash, PreimageKeyType::Sha256).into(), + commitment.to_vec(), + )?; + + // Write all the field elements to the key-value store. There should be 4096. + // The preimage oracle key for each field element is the keccak256 hash of + // `abi.encodePacked(sidecar.KZGCommitment, bytes32(ROOTS_OF_UNITY[i]))`. + let mut blob_key = [0u8; 80]; + blob_key[..48].copy_from_slice(commitment.as_ref()); + for i in 0..FIELD_ELEMENTS_PER_BLOB { + blob_key[48..].copy_from_slice( + ROOTS_OF_UNITY[i as usize].into_bigint().to_bytes_be().as_ref(), + ); + let blob_key_hash = keccak256(blob_key.as_ref()); + + kv_lock + .set(PreimageKey::new_keccak256(*blob_key_hash).into(), blob_key.into())?; + kv_lock.set( + PreimageKey::new(*blob_key_hash, PreimageKeyType::Blob).into(), + blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(), + )?; + } + + // Write the KZG Proof as the 4096th element. + // Note: This is not associated with a root of unity, as to be backwards compatible + // with ZK users of kona that use this proof for the overall blob. + blob_key[72..].copy_from_slice((FIELD_ELEMENTS_PER_BLOB).to_be_bytes().as_ref()); + let blob_key_hash = keccak256(blob_key.as_ref()); + + kv_lock.set(PreimageKey::new_keccak256(*blob_key_hash).into(), blob_key.into())?; + kv_lock.set( + PreimageKey::new(*blob_key_hash, PreimageKeyType::Blob).into(), + proof.to_vec(), + )?; + } + HintType::L1Precompile => { + ensure!(hint.data.len() >= 28, "Invalid hint data length"); + + let address = Address::from_slice(&hint.data.as_ref()[..20]); + let gas = u64::from_be_bytes(hint.data.as_ref()[20..28].try_into()?); + let input = hint.data[28..].to_vec(); + let input_hash = keccak256(hint.data.as_ref()); + + let result = crate::eth::execute(address, input, gas).map_or_else( + |_| vec![0u8; 1], + |raw_res| { + let mut res = Vec::with_capacity(1 + raw_res.len()); + res.push(0x01); + res.extend_from_slice(&raw_res); + res + }, + ); + + let mut kv_lock = kv.write().await; + kv_lock.set(PreimageKey::new_keccak256(*input_hash).into(), hint.data.into())?; + kv_lock.set( + PreimageKey::new(*input_hash, PreimageKeyType::Precompile).into(), + result, + )?; + } + HintType::AgreedPreState => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + + if hash != keccak256(cfg.agreed_l2_pre_state.as_ref()) { + anyhow::bail!("Agreed pre-state hash does not match."); + } + + let mut kv_write_lock = kv.write().await; + kv_write_lock.set( + PreimageKey::new_keccak256(*hash).into(), + cfg.agreed_l2_pre_state.clone().into(), + )?; + } + HintType::L2OutputRoot => { + ensure!(hint.data.len() >= 32 && hint.data.len() <= 40, "Invalid hint data length"); + + let hash = B256::from_slice(&hint.data.as_ref()[0..32]); + let chain_id = u64::from_be_bytes(hint.data.as_ref()[32..40].try_into()?); + let l2_provider = providers.l2(&chain_id)?; + + // Decode the pre-state to determine the timestamp of the block. + let pre = PreState::decode(&mut cfg.agreed_l2_pre_state.as_ref())?; + let timestamp = match pre { + PreState::SuperRoot(super_root) => super_root.timestamp, + PreState::TransitionState(transition_state) => { + transition_state.pre_state.timestamp + } + }; + + // Convert the timestamp to an L2 block number, using the rollup config for the + // chain ID embedded within the hint. + let rollup_config = cfg + .read_rollup_configs() + // If an error occurred while reading the rollup configs, return the error. + .transpose()? + // Try to find the appropriate rollup config for the chain ID. + .and_then(|configs| configs.get(&chain_id).cloned()) + // If we can't find the rollup config, try to find it in the global rollup + // configs. + .or_else(|| ROLLUP_CONFIGS.get(&chain_id).cloned()) + .map(Arc::new) + .ok_or_else(|| anyhow!("No rollup config found for chain ID: {chain_id}"))?; + let block_number = rollup_config.block_number_from_timestamp(timestamp); + + // Fetch the header for the L2 head block. + let raw_header: Bytes = l2_provider + .client() + .request("debug_getRawHeader", &[format!("0x{block_number:x}")]) + .await + .map_err(|e| anyhow!("Failed to fetch header RLP: {e}"))?; + let header = Header::decode(&mut raw_header.as_ref())?; + + // Fetch the storage root for the L2 head block. + let l2_to_l1_message_passer = l2_provider + .get_proof(Predeploys::L2_TO_L1_MESSAGE_PASSER, Default::default()) + .block_id(block_number.into()) + .await?; + + let output_root = OutputRoot::from_parts( + header.state_root, + l2_to_l1_message_passer.storage_hash, + header.hash_slow(), + ); + let output_root_hash = output_root.hash(); + + ensure!( + output_root_hash == hash, + "Output root does not match L2 head. Expected: {hash}, got: {output_root_hash}" + ); + + let mut kv_lock = kv.write().await; + kv_lock.set( + PreimageKey::new_keccak256(*output_root_hash).into(), + output_root.encode().into(), + )?; + } + HintType::L2BlockHeader => { + ensure!(hint.data.len() == 40, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref()[..32].try_into()?; + let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); + + let raw_header: Bytes = + providers.l2(&chain_id)?.client().request("debug_getRawHeader", [hash]).await?; + + let mut kv_lock = kv.write().await; + kv_lock.set(PreimageKey::new_keccak256(*hash).into(), raw_header.into())?; + } + HintType::L2Transactions => { + ensure!(hint.data.len() == 40, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref()[..32].try_into()?; + let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); + + let Block { transactions, .. } = providers + .l2(&chain_id)? + .get_block_by_hash(hash) + .full() + .await? + .ok_or_else(|| anyhow!("Block not found"))?; + let encoded_transactions = transactions + .into_transactions() + .map(|tx| tx.inner.inner.encoded_2718()) + .collect::>(); + + store_ordered_trie(kv.as_ref(), encoded_transactions.as_slice()).await?; + } + HintType::L2Receipts => { + ensure!(hint.data.len() == 40, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref()[..32].try_into()?; + let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); + + let raw_receipts: Vec = providers + .l2(&chain_id)? + .client() + .request("debug_getRawReceipts", [hash]) + .await?; + + store_ordered_trie(kv.as_ref(), raw_receipts.as_slice()).await?; + } + HintType::L2Code => { + // geth hashdb scheme code hash key prefix + const CODE_PREFIX: u8 = b'c'; + + ensure!(hint.data.len() == 40, "Invalid hint data length"); + + let hash: B256 = B256::from_slice(&hint.data[0..32]); + let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); + let l2_provider = providers.l2(&chain_id)?; + + // Attempt to fetch the code from the L2 chain provider. + let code_key = [&[CODE_PREFIX], hash.as_slice()].concat(); + let code = l2_provider + .client() + .request::<&[Bytes; 1], Bytes>("debug_dbGet", &[code_key.into()]) + .await; + + // Check if the first attempt to fetch the code failed. If it did, try fetching the + // code hash preimage without the geth hashdb scheme prefix. + let code = match code { + Ok(code) => code, + Err(_) => l2_provider + .client() + .request::<&[B256; 1], Bytes>("debug_dbGet", &[hash]) + .await + .map_err(|e| anyhow!("Error fetching code hash preimage: {e}"))?, + }; + + let mut kv_lock = kv.write().await; + kv_lock.set(PreimageKey::new_keccak256(*hash).into(), code.into())?; + } + HintType::L2StateNode => { + ensure!(hint.data.len() == 40, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + let chain_id = u64::from_be_bytes(hint.data[32..40].try_into()?); + + // Fetch the preimage from the L2 chain provider. + let preimage: Bytes = + providers.l2(&chain_id)?.client().request("debug_dbGet", &[hash]).await?; + + let mut kv_write_lock = kv.write().await; + kv_write_lock.set(PreimageKey::new_keccak256(*hash).into(), preimage.into())?; + } + HintType::L2AccountProof => { + ensure!(hint.data.len() == 8 + 20 + 8, "Invalid hint data length"); + + let block_number = u64::from_be_bytes(hint.data.as_ref()[..8].try_into()?); + let address = Address::from_slice(&hint.data.as_ref()[8..28]); + let chain_id = u64::from_be_bytes(hint.data[28..].try_into()?); + + let proof_response = providers + .l2(&chain_id)? + .get_proof(address, Default::default()) + .block_id(block_number.into()) + .await?; + + // Write the account proof nodes to the key-value store. + let mut kv_lock = kv.write().await; + proof_response.account_proof.into_iter().try_for_each(|node| { + let node_hash = keccak256(node.as_ref()); + let key = PreimageKey::new_keccak256(*node_hash); + kv_lock.set(key.into(), node.into())?; + Ok::<(), anyhow::Error>(()) + })?; + } + HintType::L2AccountStorageProof => { + ensure!(hint.data.len() == 8 + 20 + 32 + 8, "Invalid hint data length"); + + let block_number = u64::from_be_bytes(hint.data.as_ref()[..8].try_into()?); + let address = Address::from_slice(&hint.data.as_ref()[8..28]); + let slot = B256::from_slice(&hint.data.as_ref()[28..60]); + let chain_id = u64::from_be_bytes(hint.data[60..].try_into()?); + + let mut proof_response = providers + .l2(&chain_id)? + .get_proof(address, vec![slot]) + .block_id(block_number.into()) + .await?; + + let mut kv_lock = kv.write().await; + + // Write the account proof nodes to the key-value store. + proof_response.account_proof.into_iter().try_for_each(|node| { + let node_hash = keccak256(node.as_ref()); + let key = PreimageKey::new_keccak256(*node_hash); + kv_lock.set(key.into(), node.into())?; + Ok::<(), anyhow::Error>(()) + })?; + + // Write the storage proof nodes to the key-value store. + let storage_proof = proof_response.storage_proof.remove(0); + storage_proof.proof.into_iter().try_for_each(|node| { + let node_hash = keccak256(node.as_ref()); + let key = PreimageKey::new_keccak256(*node_hash); + kv_lock.set(key.into(), node.into())?; + Ok::<(), anyhow::Error>(()) + })?; + } + HintType::L2BlockData => { + ensure!(hint.data.len() == 72, "Invalid hint data length"); + + let agreed_block_hash = B256::from_slice(&hint.data.as_ref()[..32]); + let disputed_block_hash = B256::from_slice(&hint.data.as_ref()[32..64]); + let chain_id = u64::from_be_bytes(hint.data.as_ref()[64..72].try_into()?); + + // Return early if the agreed and disputed block are the same. This can occur when + // the chain has not progressed past its prestate, but the super root timestamp has + // progressed. + if agreed_block_hash == disputed_block_hash { + debug!( + target: "interop_hint_handler", + chain_id, + "Chain has not progressed. Skipping block data hint." + ); + return Ok(()); + } + + let l2_provider = providers.l2(&chain_id)?; + let rollup_config = cfg + .read_rollup_configs() + // If an error occurred while reading the rollup configs, return the error. + .transpose()? + // Try to find the appropriate rollup config for the chain ID. + .and_then(|configs| configs.get(&chain_id).cloned()) + // If we can't find the rollup config, try to find it in the global rollup + // configs. + .or_else(|| ROLLUP_CONFIGS.get(&chain_id).cloned()) + .map(Arc::new) + .ok_or_else(|| anyhow!("No rollup config found for chain ID: {chain_id}"))?; + + let l1_config = cfg + .read_l1_config() + .or_else(|_| { + L1_CONFIGS.get(&rollup_config.l1_chain_id).cloned().ok_or_else(|| { + anyhow!( + "No L1 config found for chain ID: {}", + rollup_config.l1_chain_id + ) + }) + }) + .map(Arc::new)?; + + // Check if the block is canonical before continuing. + let parent_block = l2_provider + .get_block_by_hash(agreed_block_hash) + .await? + .ok_or_else(|| anyhow!("Block not found."))?; + let disputed_block = l2_provider + .get_block_by_number((parent_block.header.number + 1).into()) + .await? + .ok_or_else(|| anyhow!("Block not found."))?; + + // Return early if the disputed block is canonical - preimages can be fetched + // through the normal flow. + if disputed_block.header.hash == disputed_block_hash { + debug!( + target: "interop_hint_handler", + number = disputed_block.header.number, + hash = ?disputed_block.header.hash, + "Block is already canonical. Skipping re-derivation + execution." + ); + return Ok(()); + } + + info!( + target: "interop_hint_handler", + optimistic_hash = ?disputed_block_hash, + "Re-executing optimistic block for witness collection" + ); + + // Reproduce the preimages for the optimistic block's derivation + execution and + // store them in the key-value store. + let hint = BidirectionalChannel::new()?; + let preimage = BidirectionalChannel::new()?; + let backend = + OnlineHostBackend::new(cfg.clone(), kv.clone(), providers.clone(), Self); + let server_task = task::spawn( + PreimageServer::new( + OracleServer::new(preimage.host), + HintReader::new(hint.host), + Arc::new(backend), + ) + .start(), + ); + let client_task = task::spawn({ + let l1_head = cfg.l1_head; + + async move { + let oracle = Arc::new(CachingOracle::new( + 1024, + OracleReader::new(preimage.client), + HintWriter::new(hint.client), + )); + + let mut l1_provider = OracleL1ChainProvider::new(l1_head, oracle.clone()); + let mut l2_provider = OracleL2ChainProvider::new( + agreed_block_hash, + rollup_config.clone(), + oracle.clone(), + ); + let beacon = OracleBlobProvider::new(oracle.clone()); + + l2_provider.set_chain_id(Some(chain_id)); + + let safe_head = l2_provider + .header_by_hash(agreed_block_hash) + .map(|header| Sealed::new_unchecked(header, agreed_block_hash))?; + let target_block = safe_head.number + 1; + + let cursor = new_oracle_pipeline_cursor( + rollup_config.as_ref(), + safe_head, + &mut l1_provider, + &mut l2_provider, + ) + .await?; + l2_provider.set_cursor(cursor.clone()); + + let da_provider = EthereumDataSource::new_from_parts( + l1_provider.clone(), + beacon, + &rollup_config, + ); + let pipeline = OraclePipeline::new( + rollup_config.clone(), + l1_config.clone(), + cursor.clone(), + oracle, + da_provider, + l1_provider, + l2_provider.clone(), + ) + .await?; + let executor = KonaExecutor::new( + rollup_config.as_ref(), + l2_provider.clone(), + l2_provider, + OpEvmFactory::default(), + None, + ); + let mut driver = Driver::new(cursor, executor, pipeline); + + driver + .advance_to_target(rollup_config.as_ref(), Some(target_block)) + .await?; + + driver + .safe_head_artifacts + .ok_or_else(|| anyhow!("No artifacts found for the safe head")) + } + .instrument(info_span!( + "OptimisticBlockReexecution", + block_number = disputed_block.header.number + )) + }); + + // Wait on both the server and client tasks to complete. + let (_, client_result) = tokio::try_join!(server_task, client_task)?; + let (build_outcome, raw_transactions) = client_result?; + + // Store optimistic block hash preimage. + let mut kv_lock = kv.write().await; + let mut rlp_buf = Vec::with_capacity(build_outcome.header.length()); + build_outcome.header.encode(&mut rlp_buf); + kv_lock.set( + PreimageKey::new(*build_outcome.header.hash(), PreimageKeyType::Keccak256) + .into(), + rlp_buf, + )?; + + // Drop the lock on the key-value store to avoid deadlocks. + drop(kv_lock); + + // Store receipts root preimages. + let raw_receipts = build_outcome + .execution_result + .receipts + .into_iter() + .map(|receipt| Ok::<_, anyhow::Error>(receipt.encoded_2718())) + .collect::>>()?; + store_ordered_trie(kv.as_ref(), raw_receipts.as_slice()).await?; + + // Store tx root preimages. + store_ordered_trie(kv.as_ref(), raw_transactions.as_slice()).await?; + + info!( + target: "interop_hint_handler", + number = build_outcome.header.number, + hash = ?build_outcome.header.hash(), + "Re-executed optimistic block and collected witness" + ); + } + HintType::L2PayloadWitness => { + warn!( + target: "interop_hint_handler", + "L2PayloadWitness hint not implemented for interop hint handler, ignoring hint" + ); + } + } + + Ok(()) + } +} diff --git a/rust/kona/bin/host/src/interop/local_kv.rs b/rust/kona/bin/host/src/interop/local_kv.rs new file mode 100644 index 00000000000..fd33d2854cf --- /dev/null +++ b/rust/kona/bin/host/src/interop/local_kv.rs @@ -0,0 +1,51 @@ +//! Contains a concrete implementation of the [`KeyValueStore`] trait that stores data on disk, +//! using the [`InteropHost`] config. + +use super::InteropHost; +use crate::{KeyValueStore, Result}; +use alloy_primitives::{B256, keccak256}; +use kona_preimage::PreimageKey; +use kona_proof_interop::boot::{ + L1_CONFIG_KEY, L1_HEAD_KEY, L2_AGREED_PRE_STATE_KEY, L2_CLAIMED_POST_STATE_KEY, + L2_CLAIMED_TIMESTAMP_KEY, L2_ROLLUP_CONFIG_KEY, +}; + +/// A simple, synchronous key-value store that returns data from a [`InteropHost`] config. +#[derive(Debug)] +pub struct InteropLocalInputs { + cfg: InteropHost, +} + +impl InteropLocalInputs { + /// Create a new [`InteropLocalInputs`] with the given [`InteropHost`] config. + pub const fn new(cfg: InteropHost) -> Self { + Self { cfg } + } +} + +impl KeyValueStore for InteropLocalInputs { + fn get(&self, key: B256) -> Option> { + let preimage_key = PreimageKey::try_from(*key).ok()?; + match preimage_key.key_value() { + L1_HEAD_KEY => Some(self.cfg.l1_head.to_vec()), + L2_AGREED_PRE_STATE_KEY => { + Some(keccak256(self.cfg.agreed_l2_pre_state.as_ref()).to_vec()) + } + L2_CLAIMED_POST_STATE_KEY => Some(self.cfg.claimed_l2_post_state.to_vec()), + L2_CLAIMED_TIMESTAMP_KEY => Some(self.cfg.claimed_l2_timestamp.to_be_bytes().to_vec()), + L2_ROLLUP_CONFIG_KEY => { + let rollup_configs = self.cfg.read_rollup_configs()?.ok()?; + serde_json::to_vec(&rollup_configs).ok() + } + L1_CONFIG_KEY => { + let l1_config = self.cfg.read_l1_config().ok()?; + serde_json::to_vec(&l1_config).ok() + } + _ => None, + } + } + + fn set(&mut self, _: B256, _: Vec) -> Result<()> { + unreachable!("LocalKeyValueStore is read-only") + } +} diff --git a/kona/bin/host/src/interop/mod.rs b/rust/kona/bin/host/src/interop/mod.rs similarity index 100% rename from kona/bin/host/src/interop/mod.rs rename to rust/kona/bin/host/src/interop/mod.rs diff --git a/kona/bin/host/src/kv/disk.rs b/rust/kona/bin/host/src/kv/disk.rs similarity index 86% rename from kona/bin/host/src/kv/disk.rs rename to rust/kona/bin/host/src/kv/disk.rs index 81093959fbe..00d4bb96fd5 100644 --- a/kona/bin/host/src/kv/disk.rs +++ b/rust/kona/bin/host/src/kv/disk.rs @@ -1,4 +1,4 @@ -//! Contains a concrete implementation of the [KeyValueStore] trait that stores data on disk +//! Contains a concrete implementation of the [`KeyValueStore`] trait that stores data on disk //! using [rocksdb]. use super::{KeyValueStore, MemoryKeyValueStore}; @@ -15,7 +15,7 @@ pub struct DiskKeyValueStore { } impl DiskKeyValueStore { - /// Create a new [DiskKeyValueStore] with the given data directory. + /// Create a new [`DiskKeyValueStore`] with the given data directory. pub fn new(data_directory: PathBuf) -> Self { let db = DB::open(&Self::get_db_options(), data_directory.as_path()) .unwrap_or_else(|e| panic!("Failed to open database at {data_directory:?}: {e}")); @@ -23,7 +23,7 @@ impl DiskKeyValueStore { Self { data_directory, db } } - /// Gets the [Options] for the underlying RocksDB instance. + /// Gets the [Options] for the underlying `RocksDB` instance. fn get_db_options() -> Options { let mut options = Options::default(); options.set_compression_type(rocksdb::DBCompressionType::Snappy); @@ -87,13 +87,13 @@ mod test { fn convert_disk_kv_to_mem_kv(k_v in hash_map(any::<[u8; 32]>(), vec(any::(), 0..128), 1..128)) { let tempdir = temp_dir(); let mut disk_kv = DiskKeyValueStore::new(tempdir); - k_v.iter().for_each(|(k, v)| { - disk_kv.set(k.into(), v.to_vec()).unwrap(); - }); + for (k, v) in &k_v { + disk_kv.set(k.into(), v.clone()).unwrap(); + } let mem_kv = MemoryKeyValueStore::try_from(disk_kv).unwrap(); for (k, v) in k_v { - assert_eq!(mem_kv.get(k.into()).unwrap(), v.to_vec()); + assert_eq!(mem_kv.get(k.into()).unwrap(), v.clone()); } } } diff --git a/kona/bin/host/src/kv/mem.rs b/rust/kona/bin/host/src/kv/mem.rs similarity index 82% rename from kona/bin/host/src/kv/mem.rs rename to rust/kona/bin/host/src/kv/mem.rs index d7f607b83f3..7015410834b 100644 --- a/kona/bin/host/src/kv/mem.rs +++ b/rust/kona/bin/host/src/kv/mem.rs @@ -1,4 +1,4 @@ -//! Contains a concrete implementation of the [KeyValueStore] trait that stores data in memory. +//! Contains a concrete implementation of the [`KeyValueStore`] trait that stores data in memory. use super::KeyValueStore; use crate::Result; @@ -14,7 +14,7 @@ pub struct MemoryKeyValueStore { } impl MemoryKeyValueStore { - /// Create a new [MemoryKeyValueStore] with an empty store. + /// Create a new [`MemoryKeyValueStore`] with an empty store. pub fn new() -> Self { Self { store: HashMap::default() } } diff --git a/rust/kona/bin/host/src/kv/mod.rs b/rust/kona/bin/host/src/kv/mod.rs new file mode 100644 index 00000000000..45468e79c8e --- /dev/null +++ b/rust/kona/bin/host/src/kv/mod.rs @@ -0,0 +1,27 @@ +//! This module contains the [`KeyValueStore`] trait and concrete implementations of it. + +use crate::Result; +use alloy_primitives::B256; +use std::sync::Arc; +use tokio::sync::RwLock; + +mod mem; +pub use mem::MemoryKeyValueStore; + +mod disk; +pub use disk::DiskKeyValueStore; + +mod split; +pub use split::SplitKeyValueStore; + +/// A type alias for a shared key-value store. +pub type SharedKeyValueStore = Arc>; + +/// Describes the interface of a simple, synchronous key-value store. +pub trait KeyValueStore { + /// Get the value associated with the given key. + fn get(&self, key: B256) -> Option>; + + /// Set the value associated with the given key. + fn set(&mut self, key: B256, value: Vec) -> Result<()>; +} diff --git a/rust/kona/bin/host/src/kv/split.rs b/rust/kona/bin/host/src/kv/split.rs new file mode 100644 index 00000000000..9327fab0ede --- /dev/null +++ b/rust/kona/bin/host/src/kv/split.rs @@ -0,0 +1,47 @@ +//! Contains a concrete implementation of the [`KeyValueStore`] trait that splits between two +//! separate [`KeyValueStore`]s depending on [`PreimageKeyType`]. + +use super::KeyValueStore; +use crate::Result; +use alloy_primitives::B256; +use kona_preimage::PreimageKeyType; + +/// A split implementation of the [`KeyValueStore`] trait that splits between two separate +/// [`KeyValueStore`]s. +#[derive(Clone, Debug)] +pub struct SplitKeyValueStore +where + L: KeyValueStore, + R: KeyValueStore, +{ + local_store: L, + remote_store: R, +} + +impl SplitKeyValueStore +where + L: KeyValueStore, + R: KeyValueStore, +{ + /// Create a new [`SplitKeyValueStore`] with the given left and right [`KeyValueStore`]s. + pub const fn new(local_store: L, remote_store: R) -> Self { + Self { local_store, remote_store } + } +} + +impl KeyValueStore for SplitKeyValueStore +where + L: KeyValueStore, + R: KeyValueStore, +{ + fn get(&self, key: B256) -> Option> { + match PreimageKeyType::try_from(key[0]).ok()? { + PreimageKeyType::Local => self.local_store.get(key), + _ => self.remote_store.get(key), + } + } + + fn set(&mut self, key: B256, value: Vec) -> Result<()> { + self.remote_store.set(key, value) + } +} diff --git a/rust/kona/bin/host/src/lib.rs b/rust/kona/bin/host/src/lib.rs new file mode 100644 index 00000000000..623ba04ac6a --- /dev/null +++ b/rust/kona/bin/host/src/lib.rs @@ -0,0 +1,24 @@ +#![doc = include_str!("../README.md")] +#![cfg_attr(docsrs, feature(doc_cfg))] + +mod error; +pub use error::{HostError, Result}; + +mod server; +pub use server::{PreimageServer, PreimageServerError}; + +mod kv; +pub use kv::{ + DiskKeyValueStore, KeyValueStore, MemoryKeyValueStore, SharedKeyValueStore, SplitKeyValueStore, +}; + +mod backend; +pub use backend::{HintHandler, OfflineHostBackend, OnlineHostBackend, OnlineHostBackendCfg}; + +pub mod eth; + +#[cfg(feature = "single")] +pub mod single; + +#[cfg(feature = "interop")] +pub mod interop; diff --git a/rust/kona/bin/host/src/server.rs b/rust/kona/bin/host/src/server.rs new file mode 100644 index 00000000000..51e6f5140a5 --- /dev/null +++ b/rust/kona/bin/host/src/server.rs @@ -0,0 +1,99 @@ +//! This module contains the [`PreimageServer`] struct and its implementation. + +use kona_preimage::{ + HintReaderServer, PreimageOracleServer, PreimageServerBackend, errors::PreimageOracleError, +}; +use std::sync::Arc; +use tokio::spawn; +use tracing::{error, info}; + +/// The [`PreimageServer`] is responsible for waiting for incoming preimage requests and +/// serving them to the client. +#[derive(Debug)] +pub struct PreimageServer { + /// The oracle server. + oracle_server: P, + /// The hint router. + hint_reader: H, + /// [`PreimageServerBackend`] that routes hints and retrieves preimages. + backend: Arc, +} + +/// An error that can occur when handling preimage requests +#[derive(Debug, thiserror::Error)] +pub enum PreimageServerError { + /// A preimage request error. + #[error("Failed to serve preimage request: {0}")] + PreimageRequestFailed(PreimageOracleError), + /// An error when failed to serve route hint. + #[error("Failed to route hint: {0}")] + RouteHintFailed(PreimageOracleError), + /// Task failed to execute to completion. + #[error("Join error: {0}")] + ExecutionError(#[from] tokio::task::JoinError), +} + +impl PreimageServer +where + P: PreimageOracleServer + Send + Sync + 'static, + H: HintReaderServer + Send + Sync + 'static, + B: PreimageServerBackend + Send + Sync + 'static, +{ + /// Create a new [`PreimageServer`] with the given [`PreimageOracleServer`], + /// [`HintReaderServer`], and [`PreimageServerBackend`]. + pub const fn new(oracle_server: P, hint_reader: H, backend: Arc) -> Self { + Self { oracle_server, hint_reader, backend } + } + + /// Starts the [`PreimageServer`] and waits for incoming requests. + pub async fn start(self) -> Result<(), PreimageServerError> { + // Create the futures for the oracle server and hint router. + let server = spawn(Self::start_oracle_server(self.oracle_server, self.backend.clone())); + let hint_router = spawn(Self::start_hint_router(self.hint_reader, self.backend.clone())); + + // Race the two futures to completion, returning the result of the first one to finish. + tokio::select! { + s = server => s?, + h = hint_router => h?, + } + } + + /// Starts the oracle server, which waits for incoming preimage requests and serves them to the + /// client. + async fn start_oracle_server( + oracle_server: P, + backend: Arc, + ) -> Result<(), PreimageServerError> { + info!(target: "host_server", "Starting oracle server"); + loop { + // Serve the next preimage request. This `await` will yield to the runtime + // if no progress can be made. + match oracle_server.next_preimage_request(backend.as_ref()).await { + Ok(_) => {} + Err(PreimageOracleError::IOError(_)) => return Ok(()), + Err(e) => { + error!(target: "host_server", "Failed to serve preimage request: {e}"); + return Err(PreimageServerError::PreimageRequestFailed(e)); + } + } + } + } + + /// Starts the hint router, which waits for incoming hints and routes them to the appropriate + /// handler. + async fn start_hint_router(hint_reader: H, backend: Arc) -> Result<(), PreimageServerError> { + info!(target: "host_server", "Starting hint router"); + loop { + // Route the next hint. This `await` will yield to the runtime if no progress can be + // made. + match hint_reader.next_hint(backend.as_ref()).await { + Ok(_) => {} + Err(PreimageOracleError::IOError(_)) => return Ok(()), + Err(e) => { + error!(target: "host_server", "Failed to serve route hint: {e}"); + return Err(PreimageServerError::RouteHintFailed(e)); + } + } + } + } +} diff --git a/rust/kona/bin/host/src/single/cfg.rs b/rust/kona/bin/host/src/single/cfg.rs new file mode 100644 index 00000000000..d14057194ee --- /dev/null +++ b/rust/kona/bin/host/src/single/cfg.rs @@ -0,0 +1,389 @@ +//! This module contains all CLI-specific code for the single chain entrypoint. + +use super::{SingleChainHintHandler, SingleChainLocalInputs}; +use crate::{ + DiskKeyValueStore, MemoryKeyValueStore, OfflineHostBackend, OnlineHostBackend, + OnlineHostBackendCfg, PreimageServer, SharedKeyValueStore, SplitKeyValueStore, + eth::rpc_provider, server::PreimageServerError, +}; +use alloy_primitives::B256; +use alloy_provider::RootProvider; +use clap::Parser; +use kona_cli::cli_styles; +use kona_genesis::{L1ChainConfig, RollupConfig}; +use kona_preimage::{ + BidirectionalChannel, Channel, HintReader, HintWriter, OracleReader, OracleServer, +}; +use kona_proof::HintType; +use kona_providers_alloy::{OnlineBeaconClient, OnlineBlobProvider}; +use kona_std_fpvm::{FileChannel, FileDescriptor}; +use op_alloy_network::Optimism; +use serde::Serialize; +use std::{path::PathBuf, sync::Arc}; +use tokio::{ + sync::RwLock, + task::{self, JoinHandle}, +}; + +/// The host binary CLI application arguments. +#[derive(Default, Parser, Serialize, Clone, Debug)] +#[command(styles = cli_styles())] +pub struct SingleChainHost { + /// Hash of the L1 head block. Derivation stops after this block is processed. + #[arg(long, env)] + pub l1_head: B256, + /// Hash of the agreed upon safe L2 block committed to by `--agreed-l2-output-root`. + #[arg(long, visible_alias = "l2-head", env)] + pub agreed_l2_head_hash: B256, + /// Agreed safe L2 Output Root to start derivation from. + #[arg(long, visible_alias = "l2-output-root", env)] + pub agreed_l2_output_root: B256, + /// Claimed L2 output root at block # `--claimed-l2-block-number` to validate. + #[arg(long, visible_alias = "l2-claim", env)] + pub claimed_l2_output_root: B256, + /// Number of the L2 block that the claimed output root commits to. + #[arg(long, visible_alias = "l2-block-number", env)] + pub claimed_l2_block_number: u64, + /// Address of L2 JSON-RPC endpoint to use (eth and debug namespace required). + #[arg( + long, + visible_alias = "l2", + requires = "l1_node_address", + requires = "l1_beacon_address", + env + )] + pub l2_node_address: Option, + /// Address of L1 JSON-RPC endpoint to use (eth and debug namespace required) + #[arg( + long, + visible_alias = "l1", + requires = "l2_node_address", + requires = "l1_beacon_address", + env + )] + pub l1_node_address: Option, + /// Address of the L1 Beacon API endpoint to use. + #[arg( + long, + visible_alias = "beacon", + requires = "l1_node_address", + requires = "l2_node_address", + env + )] + pub l1_beacon_address: Option, + /// The Data Directory for preimage data storage. Optional if running in online mode, + /// required if running in offline mode. + #[arg( + long, + visible_alias = "db", + required_unless_present_all = ["l2_node_address", "l1_node_address", "l1_beacon_address"], + env + )] + pub data_dir: Option, + /// Run the client program natively. + #[arg(long, conflicts_with = "server", required_unless_present = "server")] + pub native: bool, + /// Run in pre-image server mode without executing any client program. If not provided, the + /// host will run the client program in the host process. + #[arg(long, conflicts_with = "native", required_unless_present = "native")] + pub server: bool, + /// The L2 chain ID of a supported chain. If provided, the host will look for the corresponding + /// rollup config in the superchain registry. + #[arg( + long, + conflicts_with = "rollup_config_path", + required_unless_present = "rollup_config_path", + env + )] + pub l2_chain_id: Option, + /// Path to rollup config. If provided, the host will use this config instead of attempting to + /// look up the config in the superchain registry. + #[arg( + long, + alias = "rollup-cfg", + conflicts_with = "l2_chain_id", + required_unless_present = "l2_chain_id", + env + )] + pub rollup_config_path: Option, + /// Path to l1 config. If provided, the host will use this config instead of attempting to + /// look up the config in the known l1 configs. + #[arg(long, alias = "l1-cfg", env)] + pub l1_config_path: Option, + /// Optionally enables the use of `debug_executePayload` to collect the execution witness from + /// the execution layer. + #[arg(long, env)] + pub enable_experimental_witness_endpoint: bool, +} + +/// An error that can occur when handling single chain hosts +#[derive(Debug, thiserror::Error)] +pub enum SingleChainHostError { + /// An error when handling preimage requests. + #[error("Error handling preimage request: {0}")] + PreimageServerError(#[from] PreimageServerError), + /// An IO error. + #[error("IO error: {0}")] + IOError(#[from] std::io::Error), + /// A JSON parse error. + #[error("Failed deserializing RollupConfig: {0}")] + ParseError(#[from] serde_json::Error), + /// Task failed to execute to completion. + #[error("Join error: {0}")] + ExecutionError(#[from] tokio::task::JoinError), + /// No rollup config found. + #[error("No rollup config found")] + NoRollupConfig, + /// No l1 config found. + #[error("No l1 config found")] + NoL1Config, + /// Any other error. + #[error("Error: {0}")] + Other(&'static str), +} + +impl SingleChainHost { + /// Starts the [`SingleChainHost`] application. + pub async fn start(self) -> Result<(), SingleChainHostError> { + if self.server { + let hint = FileChannel::new(FileDescriptor::HintRead, FileDescriptor::HintWrite); + let preimage = + FileChannel::new(FileDescriptor::PreimageRead, FileDescriptor::PreimageWrite); + + self.start_server(hint, preimage).await?.await? + } else { + self.start_native().await + } + } + + /// Starts the preimage server, communicating with the client over the provided channels. + pub async fn start_server( + &self, + hint: C, + preimage: C, + ) -> Result>, SingleChainHostError> + where + C: Channel + Send + Sync + 'static, + { + let kv_store = self.create_key_value_store()?; + + let task_handle = if self.is_offline() { + task::spawn(async { + PreimageServer::new( + OracleServer::new(preimage), + HintReader::new(hint), + Arc::new(OfflineHostBackend::new(kv_store)), + ) + .start() + .await + .map_err(SingleChainHostError::from) + }) + } else { + let providers = self.create_providers().await?; + let backend = OnlineHostBackend::new( + self.clone(), + kv_store.clone(), + providers, + SingleChainHintHandler, + ) + .with_proactive_hint(HintType::L2PayloadWitness); + + task::spawn(async { + PreimageServer::new( + OracleServer::new(preimage), + HintReader::new(hint), + Arc::new(backend), + ) + .start() + .await + .map_err(SingleChainHostError::from) + }) + }; + + Ok(task_handle) + } + + /// Starts the host in native mode, running both the client and preimage server in the same + /// process. + async fn start_native(&self) -> Result<(), SingleChainHostError> { + let hint = BidirectionalChannel::new()?; + let preimage = BidirectionalChannel::new()?; + + let server_task = self.start_server(hint.host, preimage.host).await?; + let client_task = task::spawn(kona_client::single::run( + OracleReader::new(preimage.client), + HintWriter::new(hint.client), + )); + + let (_, client_result) = tokio::try_join!(server_task, client_task)?; + + // Bubble up the exit status of the client program if execution completes. + std::process::exit(client_result.is_err() as i32) + } + + /// Returns `true` if the host is running in offline mode. + pub const fn is_offline(&self) -> bool { + self.l1_node_address.is_none() && + self.l2_node_address.is_none() && + self.l1_beacon_address.is_none() && + self.data_dir.is_some() + } + + /// Reads the [`RollupConfig`] from the file system and returns the deserialized configuration. + pub fn read_rollup_config(&self) -> Result { + let path = + self.rollup_config_path.as_ref().ok_or_else(|| SingleChainHostError::NoRollupConfig)?; + + // Read the serialized config from the file system. + let ser_config = std::fs::read_to_string(path)?; + + // Deserialize the config and return it. + serde_json::from_str(&ser_config).map_err(SingleChainHostError::ParseError) + } + + /// Reads the [`L1ChainConfig`] from the file system and returns the deserialized configuration. + pub fn read_l1_config(&self) -> Result { + let path = self.l1_config_path.as_ref().ok_or_else(|| SingleChainHostError::NoL1Config)?; + + // Read the serialized config from the file system. + let ser_config = std::fs::read_to_string(path)?; + + // Deserialize the config and return it. + serde_json::from_str(&ser_config).map_err(SingleChainHostError::ParseError) + } + + /// Creates the key-value store for the host backend. + pub fn create_key_value_store(&self) -> Result { + let local_kv_store = SingleChainLocalInputs::new(self.clone()); + + let kv_store: SharedKeyValueStore = if let Some(ref data_dir) = self.data_dir { + let disk_kv_store = DiskKeyValueStore::new(data_dir.clone()); + let split_kv_store = SplitKeyValueStore::new(local_kv_store, disk_kv_store); + Arc::new(RwLock::new(split_kv_store)) + } else { + let mem_kv_store = MemoryKeyValueStore::new(); + let split_kv_store = SplitKeyValueStore::new(local_kv_store, mem_kv_store); + Arc::new(RwLock::new(split_kv_store)) + }; + + Ok(kv_store) + } + + /// Creates the providers required for the host backend. + pub async fn create_providers(&self) -> Result { + let l1_provider = rpc_provider( + self.l1_node_address + .as_ref() + .ok_or(SingleChainHostError::Other("Provider must be set"))?, + ) + .await; + let blob_provider = OnlineBlobProvider::init(OnlineBeaconClient::new_http( + self.l1_beacon_address + .clone() + .ok_or(SingleChainHostError::Other("Beacon API URL must be set"))?, + )) + .await; + let l2_provider = rpc_provider::( + self.l2_node_address + .as_ref() + .ok_or(SingleChainHostError::Other("L2 node address must be set"))?, + ) + .await; + + Ok(SingleChainProviders { l1: l1_provider, blobs: blob_provider, l2: l2_provider }) + } +} + +impl OnlineHostBackendCfg for SingleChainHost { + type HintType = HintType; + type Providers = SingleChainProviders; +} + +/// The providers required for the single chain host. +#[derive(Debug, Clone)] +pub struct SingleChainProviders { + /// The L1 EL provider. + pub l1: RootProvider, + /// The L1 beacon node provider. + pub blobs: OnlineBlobProvider, + /// The L2 EL provider. + pub l2: RootProvider, +} + +#[cfg(test)] +mod test { + use crate::single::SingleChainHost; + use alloy_primitives::B256; + use clap::Parser; + + #[test] + fn test_flags() { + let zero_hash_str = &B256::ZERO.to_string(); + let default_flags = [ + "single", + "--l1-head", + zero_hash_str, + "--l2-head", + zero_hash_str, + "--l2-output-root", + zero_hash_str, + "--l2-claim", + zero_hash_str, + "--l2-block-number", + "0", + ]; + + let cases = [ + // valid + (["--server", "--l2-chain-id", "0", "--data-dir", "dummy"].as_slice(), true), + (["--server", "--rollup-config-path", "dummy", "--data-dir", "dummy"].as_slice(), true), + (["--native", "--l2-chain-id", "0", "--data-dir", "dummy"].as_slice(), true), + (["--native", "--rollup-config-path", "dummy", "--data-dir", "dummy"].as_slice(), true), + ( + [ + "--l1-node-address", + "dummy", + "--l2-node-address", + "dummy", + "--l1-beacon-address", + "dummy", + "--server", + "--l2-chain-id", + "0", + ] + .as_slice(), + true, + ), + ( + [ + "--server", + "--l2-chain-id", + "0", + "--data-dir", + "dummy", + "--enable-experimental-witness-endpoint", + ] + .as_slice(), + true, + ), + // invalid + (["--server", "--native", "--l2-chain-id", "0"].as_slice(), false), + (["--l2-chain-id", "0", "--rollup-config-path", "dummy", "--server"].as_slice(), false), + (["--server"].as_slice(), false), + (["--native"].as_slice(), false), + (["--rollup-config-path", "dummy"].as_slice(), false), + (["--l2-chain-id", "0"].as_slice(), false), + (["--l1-node-address", "dummy", "--server", "--l2-chain-id", "0"].as_slice(), false), + (["--l2-node-address", "dummy", "--server", "--l2-chain-id", "0"].as_slice(), false), + (["--l1-beacon-address", "dummy", "--server", "--l2-chain-id", "0"].as_slice(), false), + ([].as_slice(), false), + ]; + + for (args_ext, valid) in cases { + let args = default_flags.iter().chain(args_ext.iter()).copied().collect::>(); + + let parsed = SingleChainHost::try_parse_from(args); + assert_eq!(parsed.is_ok(), valid); + } + } +} diff --git a/rust/kona/bin/host/src/single/handler.rs b/rust/kona/bin/host/src/single/handler.rs new file mode 100644 index 00000000000..e3babf7d98f --- /dev/null +++ b/rust/kona/bin/host/src/single/handler.rs @@ -0,0 +1,384 @@ +//! [`HintHandler`] for the [`SingleChainHost`]. + +use crate::{ + HintHandler, OnlineHostBackendCfg, backend::util::store_ordered_trie, kv::SharedKeyValueStore, + single::cfg::SingleChainHost, +}; +use alloy_consensus::Header; +use alloy_eips::{ + eip2718::Encodable2718, + eip4844::{BlobTransactionSidecarItem, FIELD_ELEMENTS_PER_BLOB, IndexedBlobHash}, +}; +use alloy_primitives::{Address, B256, Bytes, keccak256}; +use alloy_provider::Provider; +use alloy_rlp::Decodable; +use alloy_rpc_types::{Block, debug::ExecutionWitness}; +use anyhow::{Result, anyhow, ensure}; +use ark_ff::{BigInteger, PrimeField}; +use async_trait::async_trait; +use kona_preimage::{PreimageKey, PreimageKeyType}; +use kona_proof::{Hint, HintType, l1::ROOTS_OF_UNITY}; +use kona_protocol::{BlockInfo, OutputRoot, Predeploys}; +use op_alloy_rpc_types_engine::OpPayloadAttributes; +use tracing::warn; + +/// The [`HintHandler`] for the [`SingleChainHost`]. +#[derive(Debug, Clone, Copy)] +pub struct SingleChainHintHandler; + +#[async_trait] +impl HintHandler for SingleChainHintHandler { + type Cfg = SingleChainHost; + + async fn fetch_hint( + hint: Hint<::HintType>, + cfg: &Self::Cfg, + providers: &::Providers, + kv: SharedKeyValueStore, + ) -> Result<()> { + match hint.ty { + HintType::L1BlockHeader => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + let raw_header: Bytes = + providers.l1.client().request("debug_getRawHeader", [hash]).await?; + + let mut kv_lock = kv.write().await; + kv_lock.set(PreimageKey::new_keccak256(*hash).into(), raw_header.into())?; + } + HintType::L1Transactions => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + let Block { transactions, .. } = providers + .l1 + .get_block_by_hash(hash) + .full() + .await? + .ok_or_else(|| anyhow!("Block not found"))?; + let encoded_transactions = transactions + .into_transactions() + .map(|tx| tx.inner.encoded_2718()) + .collect::>(); + + store_ordered_trie(kv.as_ref(), encoded_transactions.as_slice()).await?; + } + HintType::L1Receipts => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + let raw_receipts: Vec = + providers.l1.client().request("debug_getRawReceipts", [hash]).await?; + + store_ordered_trie(kv.as_ref(), raw_receipts.as_slice()).await?; + } + HintType::L1Blob => { + ensure!(hint.data.len() == 48, "Invalid hint data length"); + + let hash_data_bytes: [u8; 32] = hint.data[0..32].try_into()?; + let index_data_bytes: [u8; 8] = hint.data[32..40].try_into()?; + let timestamp_data_bytes: [u8; 8] = hint.data[40..48].try_into()?; + + let hash: B256 = hash_data_bytes.into(); + let index = u64::from_be_bytes(index_data_bytes); + let timestamp = u64::from_be_bytes(timestamp_data_bytes); + + let partial_block_ref = BlockInfo { timestamp, ..Default::default() }; + let indexed_hash = IndexedBlobHash { index, hash }; + + // Fetch the blobs from the blob provider. + let mut blobs = providers + .blobs + .fetch_filtered_blob_sidecars(&partial_block_ref, &[indexed_hash]) + .await + .map_err(|e| anyhow!("Failed to fetch blob sidecars: {e}"))?; + if blobs.len() != 1 { + anyhow::bail!("Expected 1 blob, got {}", blobs.len()); + } + let BlobTransactionSidecarItem { + blob, + kzg_proof: proof, + kzg_commitment: commitment, + .. + } = blobs.pop().expect("Expected 1 blob"); + + // Acquire a lock on the key-value store and set the preimages. + let mut kv_lock = kv.write().await; + + // Set the preimage for the blob commitment. + kv_lock.set( + PreimageKey::new(*hash, PreimageKeyType::Sha256).into(), + commitment.to_vec(), + )?; + + // Write all the field elements to the key-value store. There should be 4096. + // The preimage oracle key for each field element is the keccak256 hash of + // `abi.encodePacked(sidecar.KZGCommitment, bytes32(ROOTS_OF_UNITY[i]))`. + let mut blob_key = [0u8; 80]; + blob_key[..48].copy_from_slice(commitment.as_ref()); + for i in 0..FIELD_ELEMENTS_PER_BLOB { + blob_key[48..].copy_from_slice( + ROOTS_OF_UNITY[i as usize].into_bigint().to_bytes_be().as_ref(), + ); + let blob_key_hash = keccak256(blob_key.as_ref()); + + kv_lock + .set(PreimageKey::new_keccak256(*blob_key_hash).into(), blob_key.into())?; + kv_lock.set( + PreimageKey::new(*blob_key_hash, PreimageKeyType::Blob).into(), + blob[(i as usize) << 5..(i as usize + 1) << 5].to_vec(), + )?; + } + + // Write the KZG Proof as the 4096th element. + // Note: This is not associated with a root of unity, as to be backwards compatible + // with ZK users of kona that use this proof for the overall blob. + blob_key[72..].copy_from_slice(FIELD_ELEMENTS_PER_BLOB.to_be_bytes().as_ref()); + let blob_key_hash = keccak256(blob_key.as_ref()); + + kv_lock.set(PreimageKey::new_keccak256(*blob_key_hash).into(), blob_key.into())?; + kv_lock.set( + PreimageKey::new(*blob_key_hash, PreimageKeyType::Blob).into(), + proof.to_vec(), + )?; + } + HintType::L1Precompile => { + ensure!(hint.data.len() >= 28, "Invalid hint data length"); + + let address = Address::from_slice(&hint.data.as_ref()[..20]); + let gas = u64::from_be_bytes(hint.data.as_ref()[20..28].try_into()?); + let input = hint.data[28..].to_vec(); + let input_hash = keccak256(hint.data.as_ref()); + + let result = crate::eth::execute(address, input, gas).map_or_else( + |_| vec![0u8; 1], + |raw_res| { + let mut res = Vec::with_capacity(1 + raw_res.len()); + res.push(0x01); + res.extend_from_slice(&raw_res); + res + }, + ); + + let mut kv_lock = kv.write().await; + kv_lock.set(PreimageKey::new_keccak256(*input_hash).into(), hint.data.into())?; + kv_lock.set( + PreimageKey::new(*input_hash, PreimageKeyType::Precompile).into(), + result, + )?; + } + HintType::L2BlockHeader => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + // Fetch the raw header from the L2 chain provider. + let hash: B256 = hint.data.as_ref().try_into()?; + let raw_header: Bytes = + providers.l2.client().request("debug_getRawHeader", [hash]).await?; + + // Acquire a lock on the key-value store and set the preimage. + let mut kv_lock = kv.write().await; + kv_lock.set(PreimageKey::new_keccak256(*hash).into(), raw_header.into())?; + } + HintType::L2Transactions => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + let Block { transactions, .. } = providers + .l2 + .get_block_by_hash(hash) + .full() + .await? + .ok_or_else(|| anyhow!("Block not found."))?; + + let encoded_transactions = transactions + .into_transactions() + .map(|tx| tx.inner.inner.encoded_2718()) + .collect::>(); + store_ordered_trie(kv.as_ref(), encoded_transactions.as_slice()).await?; + } + HintType::StartingL2Output => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + // Fetch the header for the L2 head block. + let raw_header: Bytes = providers + .l2 + .client() + .request("debug_getRawHeader", &[cfg.agreed_l2_head_hash]) + .await?; + let header = Header::decode(&mut raw_header.as_ref())?; + + // Fetch the storage root for the L2 head block. + let l2_to_l1_message_passer = providers + .l2 + .get_proof(Predeploys::L2_TO_L1_MESSAGE_PASSER, Default::default()) + .block_id(cfg.agreed_l2_head_hash.into()) + .await?; + + let output_root = OutputRoot::from_parts( + header.state_root, + l2_to_l1_message_passer.storage_hash, + cfg.agreed_l2_head_hash, + ); + let output_root_hash = output_root.hash(); + + ensure!( + output_root_hash == cfg.agreed_l2_output_root, + "Output root does not match L2 head." + ); + + let mut kv_write_lock = kv.write().await; + kv_write_lock.set( + PreimageKey::new_keccak256(*output_root_hash).into(), + output_root.encode().into(), + )?; + } + HintType::L2Code => { + // geth hashdb scheme code hash key prefix + const CODE_PREFIX: u8 = b'c'; + + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + + // Attempt to fetch the code from the L2 chain provider. + let code_key = [&[CODE_PREFIX], hash.as_slice()].concat(); + let code = providers + .l2 + .client() + .request::<&[Bytes; 1], Bytes>("debug_dbGet", &[code_key.into()]) + .await; + + // Check if the first attempt to fetch the code failed. If it did, try fetching the + // code hash preimage without the geth hashdb scheme prefix. + let code = match code { + Ok(code) => code, + Err(_) => providers + .l2 + .client() + .request::<&[B256; 1], Bytes>("debug_dbGet", &[hash]) + .await + .map_err(|e| anyhow!("Error fetching code hash preimage: {e}"))?, + }; + + let mut kv_lock = kv.write().await; + kv_lock.set(PreimageKey::new_keccak256(*hash).into(), code.into())?; + } + HintType::L2StateNode => { + ensure!(hint.data.len() == 32, "Invalid hint data length"); + + let hash: B256 = hint.data.as_ref().try_into()?; + + warn!(target: "single_hint_handler", "L2StateNode hint was sent for node hash: {}", hash); + warn!( + target: "single_hint_handler", + "`debug_executePayload` failed to return a complete witness." + ); + + // Fetch the preimage from the L2 chain provider. + let preimage: Bytes = providers.l2.client().request("debug_dbGet", &[hash]).await?; + + let mut kv_write_lock = kv.write().await; + kv_write_lock.set(PreimageKey::new_keccak256(*hash).into(), preimage.into())?; + } + HintType::L2AccountProof => { + ensure!(hint.data.len() == 8 + 20, "Invalid hint data length"); + + let block_number = u64::from_be_bytes(hint.data.as_ref()[..8].try_into()?); + let address = Address::from_slice(&hint.data.as_ref()[8..28]); + + let proof_response = providers + .l2 + .get_proof(address, Default::default()) + .block_id(block_number.into()) + .await?; + + // Write the account proof nodes to the key-value store. + let mut kv_lock = kv.write().await; + proof_response.account_proof.into_iter().try_for_each(|node| { + let node_hash = keccak256(node.as_ref()); + let key = PreimageKey::new_keccak256(*node_hash); + kv_lock.set(key.into(), node.into())?; + Ok::<(), anyhow::Error>(()) + })?; + } + HintType::L2AccountStorageProof => { + ensure!(hint.data.len() == 8 + 20 + 32, "Invalid hint data length"); + + let block_number = u64::from_be_bytes(hint.data.as_ref()[..8].try_into()?); + let address = Address::from_slice(&hint.data.as_ref()[8..28]); + let slot = B256::from_slice(&hint.data.as_ref()[28..]); + + let mut proof_response = providers + .l2 + .get_proof(address, vec![slot]) + .block_id(block_number.into()) + .await?; + + let mut kv_lock = kv.write().await; + + // Write the account proof nodes to the key-value store. + proof_response.account_proof.into_iter().try_for_each(|node| { + let node_hash = keccak256(node.as_ref()); + let key = PreimageKey::new_keccak256(*node_hash); + kv_lock.set(key.into(), node.into())?; + Ok::<(), anyhow::Error>(()) + })?; + + // Write the storage proof nodes to the key-value store. + let storage_proof = proof_response.storage_proof.remove(0); + storage_proof.proof.into_iter().try_for_each(|node| { + let node_hash = keccak256(node.as_ref()); + let key = PreimageKey::new_keccak256(*node_hash); + kv_lock.set(key.into(), node.into())?; + Ok::<(), anyhow::Error>(()) + })?; + } + HintType::L2PayloadWitness => { + if !cfg.enable_experimental_witness_endpoint { + warn!( + target: "single_hint_handler", + "L2PayloadWitness hint was sent, but payload witness is disabled. Skipping hint." + ); + return Ok(()); + } + + ensure!(hint.data.len() >= 32, "Invalid hint data length"); + + let parent_block_hash = B256::from_slice(&hint.data.as_ref()[..32]); + let payload_attributes: OpPayloadAttributes = + serde_json::from_slice(&hint.data[32..])?; + + let Ok(execute_payload_response) = providers + .l2 + .client() + .request::<(B256, OpPayloadAttributes), ExecutionWitness>( + "debug_executePayload", + (parent_block_hash, payload_attributes), + ) + .await + else { + // Allow this hint to fail silently, as not all execution clients support + // the `debug_executePayload` method. + return Ok(()); + }; + + let preimages = execute_payload_response + .state + .into_iter() + .chain(execute_payload_response.codes) + .chain(execute_payload_response.keys); + + let mut kv_lock = kv.write().await; + for preimage in preimages { + let computed_hash = keccak256(preimage.as_ref()); + + let key = PreimageKey::new_keccak256(*computed_hash); + kv_lock.set(key.into(), preimage.into())?; + } + } + } + + Ok(()) + } +} diff --git a/rust/kona/bin/host/src/single/local_kv.rs b/rust/kona/bin/host/src/single/local_kv.rs new file mode 100644 index 00000000000..612a54c8f0b --- /dev/null +++ b/rust/kona/bin/host/src/single/local_kv.rs @@ -0,0 +1,56 @@ +//! Contains a concrete implementation of the [`KeyValueStore`] trait that stores data on disk, +//! using the [`SingleChainHost`] config. + +use super::SingleChainHost; +use crate::{KeyValueStore, Result}; +use alloy_primitives::B256; +use kona_preimage::PreimageKey; +use kona_proof::boot::{ + L1_CONFIG_KEY, L1_HEAD_KEY, L2_CHAIN_ID_KEY, L2_CLAIM_BLOCK_NUMBER_KEY, L2_CLAIM_KEY, + L2_OUTPUT_ROOT_KEY, L2_ROLLUP_CONFIG_KEY, +}; + +/// A simple, synchronous key-value store that returns data from a [`SingleChainHost`] config. +#[derive(Debug)] +pub struct SingleChainLocalInputs { + cfg: SingleChainHost, +} + +impl SingleChainLocalInputs { + /// Create a new [`SingleChainLocalInputs`] with the given [`SingleChainHost`] config. + pub const fn new(cfg: SingleChainHost) -> Self { + Self { cfg } + } +} + +impl KeyValueStore for SingleChainLocalInputs { + fn get(&self, key: B256) -> Option> { + let preimage_key = PreimageKey::try_from(*key).ok()?; + match preimage_key.key_value() { + L1_HEAD_KEY => Some(self.cfg.l1_head.to_vec()), + L2_OUTPUT_ROOT_KEY => Some(self.cfg.agreed_l2_output_root.to_vec()), + L2_CLAIM_KEY => Some(self.cfg.claimed_l2_output_root.to_vec()), + L2_CLAIM_BLOCK_NUMBER_KEY => { + Some(self.cfg.claimed_l2_block_number.to_be_bytes().to_vec()) + } + L2_CHAIN_ID_KEY => { + Some(self.cfg.l2_chain_id.unwrap_or_default().to_be_bytes().to_vec()) + } + L2_ROLLUP_CONFIG_KEY => { + let rollup_config = self.cfg.read_rollup_config().ok()?; + let serialized = serde_json::to_vec(&rollup_config).ok()?; + Some(serialized) + } + L1_CONFIG_KEY => { + let l1_config = self.cfg.read_l1_config().ok()?; + let serialized = serde_json::to_vec(&l1_config).ok()?; + Some(serialized) + } + _ => None, + } + } + + fn set(&mut self, _: B256, _: Vec) -> Result<()> { + unreachable!("LocalKeyValueStore is read-only") + } +} diff --git a/kona/bin/host/src/single/mod.rs b/rust/kona/bin/host/src/single/mod.rs similarity index 100% rename from kona/bin/host/src/single/mod.rs rename to rust/kona/bin/host/src/single/mod.rs diff --git a/kona/bin/node/Cargo.toml b/rust/kona/bin/node/Cargo.toml similarity index 100% rename from kona/bin/node/Cargo.toml rename to rust/kona/bin/node/Cargo.toml diff --git a/kona/bin/node/README.md b/rust/kona/bin/node/README.md similarity index 100% rename from kona/bin/node/README.md rename to rust/kona/bin/node/README.md diff --git a/kona/bin/node/build.rs b/rust/kona/bin/node/build.rs similarity index 100% rename from kona/bin/node/build.rs rename to rust/kona/bin/node/build.rs diff --git a/kona/bin/node/src/cli.rs b/rust/kona/bin/node/src/cli.rs similarity index 100% rename from kona/bin/node/src/cli.rs rename to rust/kona/bin/node/src/cli.rs diff --git a/kona/bin/node/src/commands/bootstore.rs b/rust/kona/bin/node/src/commands/bootstore.rs similarity index 93% rename from kona/bin/node/src/commands/bootstore.rs rename to rust/kona/bin/node/src/commands/bootstore.rs index 44f7d1fce00..052993ae105 100644 --- a/kona/bin/node/src/commands/bootstore.rs +++ b/rust/kona/bin/node/src/commands/bootstore.rs @@ -15,7 +15,7 @@ use std::path::PathBuf; /// ```sh /// kona-node bootstore [FLAGS] [OPTIONS] /// ``` -#[derive(Parser, Default, PartialEq, Debug, Clone)] +#[derive(Parser, Default, PartialEq, Eq, Debug, Clone)] #[command(about = "Utility tool to interact with local bootstores")] pub struct BootstoreCommand { /// Optionally prints all bootstores. @@ -57,7 +57,7 @@ impl BootstoreCommand { pub fn info(&self, chain_id: u64) -> anyhow::Result<()> { let chain = kona_registry::OPCHAINS .get(&chain_id) - .ok_or(anyhow::anyhow!("Chain ID {chain_id} not found in the registry"))?; + .ok_or_else(|| anyhow::anyhow!("Chain ID {chain_id} not found in the registry"))?; println!("{} Bootstore (Chain ID: {chain_id})", chain.name); let bootstore: BootStoreFile = self .bootstore diff --git a/kona/bin/node/src/commands/info.rs b/rust/kona/bin/node/src/commands/info.rs similarity index 96% rename from kona/bin/node/src/commands/info.rs rename to rust/kona/bin/node/src/commands/info.rs index f621e39ecf2..6bc25405bef 100644 --- a/kona/bin/node/src/commands/info.rs +++ b/rust/kona/bin/node/src/commands/info.rs @@ -16,7 +16,7 @@ use tracing::info; /// kona-node info /// ``` -#[derive(Parser, Default, PartialEq, Debug, Clone)] +#[derive(Parser, Default, PartialEq, Eq, Debug, Clone)] #[command(about = "Runs the information stack for the kona-node.")] pub struct InfoCommand; diff --git a/kona/bin/node/src/commands/mod.rs b/rust/kona/bin/node/src/commands/mod.rs similarity index 100% rename from kona/bin/node/src/commands/mod.rs rename to rust/kona/bin/node/src/commands/mod.rs diff --git a/rust/kona/bin/node/src/commands/net.rs b/rust/kona/bin/node/src/commands/net.rs new file mode 100644 index 00000000000..4a8478ee1f1 --- /dev/null +++ b/rust/kona/bin/node/src/commands/net.rs @@ -0,0 +1,158 @@ +//! Net Subcommand + +use crate::flags::{GlobalArgs, P2PArgs, RpcArgs}; +use clap::Parser; +use futures::future::OptionFuture; +use jsonrpsee::{RpcModule, core::async_trait, server::Server}; +use kona_cli::LogConfig; +use kona_gossip::P2pRpcRequest; +use kona_node_service::{ + EngineClientResult, NetworkActor, NetworkBuilder, NetworkEngineClient, NetworkInboundData, + NodeActor, +}; +use kona_registry::scr_rollup_config_by_alloy_ident; +use kona_rpc::{OpP2PApiServer, P2pRpc, RpcBuilder}; +use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; +use tokio::sync::mpsc; +use tokio_util::sync::CancellationToken; +use tracing::{error, info, warn}; +use url::Url; + +/// The `net` Subcommand +/// +/// The `net` subcommand is used to run the networking stack for the `kona-node`. +/// +/// # Usage +/// +/// ```sh +/// kona-node net [FLAGS] [OPTIONS] +/// ``` +#[derive(Parser, Default, PartialEq, Eq, Debug, Clone)] +#[command(about = "Runs the networking stack for the kona-node.")] +pub struct NetCommand { + /// URL of the L1 execution client RPC API. + /// This is used to load the unsafe block signer at startup. + /// Without this, the rollup config unsafe block signer will be used which may be outdated. + #[arg(long, visible_alias = "l1", env = "L1_ETH_RPC")] + pub l1_eth_rpc: Option, + /// P2P CLI Flags + #[command(flatten)] + pub p2p: P2PArgs, + /// RPC CLI Flags + #[command(flatten)] + pub rpc: RpcArgs, +} + +impl NetCommand { + /// Initializes the logging system based on global arguments. + pub fn init_logs(&self, args: &GlobalArgs) -> anyhow::Result<()> { + // Filter out discovery warnings since they're very very noisy. + let filter = tracing_subscriber::EnvFilter::from_default_env() + .add_directive("discv5=error".parse()?) + .add_directive("bootstore=debug".parse()?); + + // Initialize the telemetry stack. + LogConfig::new(args.log_args.clone()).init_tracing_subscriber(Some(filter))?; + Ok(()) + } + + /// Run the Net subcommand. + pub async fn run(self, args: &GlobalArgs) -> anyhow::Result<()> { + let signer = args.genesis_signer()?; + info!(target: "net", "Genesis block signer: {:?}", signer); + + let rpc_config = Option::::from(self.rpc); + + // Get the rollup config from the args + let rollup_config = + scr_rollup_config_by_alloy_ident(&args.l2_chain_id).ok_or_else(|| { + anyhow::anyhow!("Rollup config not found for chain id: {}", args.l2_chain_id) + })?; + + // Start the Network Stack + self.p2p.check_ports()?; + let p2p_config = self.p2p.config(rollup_config, args, self.l1_eth_rpc).await?; + + let (block_tx, mut block_rx) = mpsc::channel(1024); + let (NetworkInboundData { p2p_rpc: rpc, .. }, network) = NetworkActor::new( + ForwardingNetworkEngineClient { block_tx }, + CancellationToken::new(), + NetworkBuilder::from(p2p_config), + ); + + network.start(()).await?; + + info!(target: "net", "Network started, receiving blocks."); + + // On an interval, use the rpc tx to request stats about the p2p network. + let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(2)); + + let handle = if let Some(config) = rpc_config { + info!(target: "net", socket = ?config.socket, "Starting RPC server"); + + // Setup the RPC server with the P2P RPC Module + let mut launcher = RpcModule::new(()); + launcher.merge(P2pRpc::new(rpc.clone()).into_rpc())?; + + let server = Server::builder().build(config.socket).await?; + Some(server.start(launcher)) + } else { + info!(target: "net", "RPC server disabled"); + None + }; + + loop { + tokio::select! { + Some(payload) = block_rx.recv() => { + info!(target: "net", "Received unsafe payload: {:?}", payload.execution_payload.block_hash()); + } + _ = interval.tick(), if !rpc.is_closed() => { + let (otx, mut orx) = tokio::sync::oneshot::channel(); + if let Err(e) = rpc.send(P2pRpcRequest::PeerCount(otx)).await { + warn!(target: "net", "Failed to send network rpc request: {:?}", e); + continue; + } + tokio::time::timeout(tokio::time::Duration::from_secs(5), async move { + loop { + match orx.try_recv() { + Ok((d, g)) => { + let d = d.unwrap_or_default(); + info!(target: "net", "Peer counts: Discovery={} | Swarm={}", d, g); + break; + } + Err(tokio::sync::oneshot::error::TryRecvError::Empty) => { + /* Keep trying to receive */ + } + Err(tokio::sync::oneshot::error::TryRecvError::Closed) => { + break; + } + } + } + }).await.unwrap(); + } + _ = OptionFuture::from(handle.clone().map(|h| h.stopped())) => { + warn!(target: "net", "RPC server stopped"); + return Ok(()); + } + } + } + } +} + +#[derive(Debug)] +struct ForwardingNetworkEngineClient { + block_tx: mpsc::Sender, +} + +#[async_trait] +impl NetworkEngineClient for ForwardingNetworkEngineClient { + async fn send_unsafe_block(&self, block: OpExecutionPayloadEnvelope) -> EngineClientResult<()> { + let _ = self + .block_tx + .send(block) + .await + .inspect_err(|e| error!(target: "net", "Failed to send block: {:?}", e)); + + Ok(()) + } +} diff --git a/rust/kona/bin/node/src/commands/node.rs b/rust/kona/bin/node/src/commands/node.rs new file mode 100644 index 00000000000..093e7ee6fd4 --- /dev/null +++ b/rust/kona/bin/node/src/commands/node.rs @@ -0,0 +1,521 @@ +//! Node Subcommand. + +use crate::{ + flags::{ + BuilderClientArgs, DerivationDelegateArgs, GlobalArgs, L1ClientArgs, L2ClientArgs, P2PArgs, + RollupBoostFlags, RpcArgs, SequencerArgs, + }, + metrics::{CliMetrics, init_rollup_config_metrics}, +}; +use alloy_provider::RootProvider; +use alloy_rpc_types_engine::JwtSecret; +use alloy_transport_http::Http; +use anyhow::{Result, bail}; +use backon::{ExponentialBuilder, Retryable}; +use clap::Parser; +use kona_cli::{LogConfig, MetricsArgs}; +use kona_engine::{HyperAuthClient, OpEngineClient}; +use kona_genesis::{L1ChainConfig, RollupConfig}; +use kona_node_service::{EngineConfig, L1ConfigBuilder, NodeMode, RollupNodeBuilder}; +use kona_registry::{L1Config, scr_rollup_config_by_alloy_ident}; +use op_alloy_network::Optimism; +use op_alloy_provider::ext::engine::OpEngineApi; +use serde_json::from_reader; +use std::{fs::File, io::Write, path::PathBuf, sync::Arc, time::Duration}; +use strum::IntoEnumIterator; +use tracing::{debug, error, info}; + +/// A JWT token validation error. +#[derive(Debug, thiserror::Error)] +pub(super) enum JwtValidationError { + #[error("JWT signature is invalid")] + InvalidSignature, + #[error("Failed to exchange capabilities with engine: {0}")] + CapabilityExchange(String), +} + +/// Command-line interface for running a Kona rollup node. +/// +/// The `NodeCommand` struct defines all the configuration options needed to start and run +/// a rollup node in the Kona ecosystem. It supports multiple node modes including validator +/// and sequencer modes, and provides comprehensive networking and RPC configuration options. +/// +/// # Node Modes +/// +/// The node can operate in different modes: +/// - **Validator**: Validates L2 blocks and participates in consensus +/// - **Sequencer**: Sequences transactions and produces L2 blocks +/// +/// # Configuration Sources +/// +/// Configuration can be provided through: +/// - Command-line arguments +/// - Environment variables (prefixed with `KONA_NODE_`) +/// - Configuration files (for rollup config) +/// +/// # Examples +/// +/// ```bash +/// # Run as validator with default settings +/// kona node --l1-eth-rpc http://localhost:8545 \ +/// --l1-beacon http://localhost:5052 \ +/// --l2-engine-rpc http://localhost:8551 +/// +/// # Run as sequencer with custom JWT secret +/// kona node --mode sequencer \ +/// --l1-eth-rpc http://localhost:8545 \ +/// --l1-beacon http://localhost:5052 \ +/// --l2-engine-rpc http://localhost:8551 \ +/// --l2-engine-jwt-secret /path/to/jwt.hex +/// ``` +#[derive(Parser, Debug, Clone)] +#[command(about = "Runs the consensus node")] +pub struct NodeCommand { + /// The mode to run the node in. + #[arg( + long = "mode", + default_value_t = NodeMode::Validator, + env = "KONA_NODE_MODE", + help = format!( + "The mode to run the node in. Supported modes are: {}", + NodeMode::iter() + .map(|mode| format!("\"{}\"", mode.to_string())) + .collect::>() + .join(", ") + ) + )] + pub node_mode: NodeMode, + + /// L1 RPC CLI arguments. + #[clap(flatten)] + pub l1_rpc_args: L1ClientArgs, + + /// L2 engine CLI arguments. + #[clap(flatten)] + pub l2_client_args: L2ClientArgs, + + /// Optional block builder client. + #[clap(flatten)] + pub builder_client_args: BuilderClientArgs, + + /// Optional derivation delegation client. + #[clap(flatten)] + pub derivation_delegate_args: DerivationDelegateArgs, + + /// Path to a custom L2 rollup configuration file + /// (overrides the default rollup configuration from the registry) + #[arg(long, visible_alias = "rollup-cfg", env = "KONA_NODE_ROLLUP_CONFIG")] + pub l2_config_file: Option, + /// Path to a custom L1 rollup configuration file + /// (overrides the default rollup configuration from the registry) + #[arg(long, visible_alias = "rollup-l1-cfg", env = "KONA_NODE_L1_CHAIN_CONFIG")] + pub l1_config_file: Option, + /// P2P CLI arguments. + #[command(flatten)] + pub p2p_flags: P2PArgs, + /// RPC CLI arguments. + #[command(flatten)] + pub rpc_flags: RpcArgs, + /// SEQUENCER CLI arguments. + #[command(flatten)] + pub sequencer_flags: SequencerArgs, + + /// Rollup boost CLI arguments - contains the builder and l2 engine arguments. + #[command(flatten)] + pub rollup_boost_flags: RollupBoostFlags, +} + +impl Default for NodeCommand { + fn default() -> Self { + Self { + l1_rpc_args: L1ClientArgs::default(), + l2_client_args: L2ClientArgs::default(), + builder_client_args: BuilderClientArgs::default(), + derivation_delegate_args: DerivationDelegateArgs::default(), + l2_config_file: None, + l1_config_file: None, + node_mode: NodeMode::Validator, + p2p_flags: P2PArgs::default(), + rpc_flags: RpcArgs::default(), + sequencer_flags: SequencerArgs::default(), + rollup_boost_flags: RollupBoostFlags::default(), + } + } +} + +impl NodeCommand { + /// Initializes the logging system based on global arguments. + pub fn init_logs(&self, args: &GlobalArgs) -> anyhow::Result<()> { + // Filter out discovery warnings since they're very very noisy. + let filter = tracing_subscriber::EnvFilter::from_default_env() + .add_directive("discv5=error".parse()?); + + LogConfig::new(args.log_args.clone()).init_tracing_subscriber(Some(filter))?; + Ok(()) + } + + /// Initializes CLI metrics for the Node subcommand. + pub fn init_cli_metrics(&self, args: &MetricsArgs) -> anyhow::Result<()> { + if !args.enabled { + debug!("CLI metrics are disabled"); + return Ok(()); + } + metrics::gauge!( + CliMetrics::IDENTIFIER, + &[ + (CliMetrics::P2P_PEER_SCORING_LEVEL, self.p2p_flags.scoring.to_string()), + (CliMetrics::P2P_TOPIC_SCORING_ENABLED, self.p2p_flags.topic_scoring.to_string()), + (CliMetrics::P2P_BANNING_ENABLED, self.p2p_flags.ban_enabled.to_string()), + ( + CliMetrics::P2P_PEER_REDIALING, + self.p2p_flags.peer_redial.unwrap_or(0).to_string() + ), + (CliMetrics::P2P_FLOOD_PUBLISH, self.p2p_flags.gossip_flood_publish.to_string()), + (CliMetrics::P2P_DISCOVERY_INTERVAL, self.p2p_flags.discovery_interval.to_string()), + ( + CliMetrics::P2P_ADVERTISE_IP, + self.p2p_flags + .advertise_ip + .map(|ip| ip.to_string()) + .unwrap_or_else(|| String::from("0.0.0.0")) + ), + ( + CliMetrics::P2P_ADVERTISE_TCP_PORT, + self.p2p_flags + .advertise_tcp_port + .map_or_else(|| "auto".to_string(), |p| p.to_string()) + ), + ( + CliMetrics::P2P_ADVERTISE_UDP_PORT, + self.p2p_flags + .advertise_udp_port + .map_or_else(|| "auto".to_string(), |p| p.to_string()) + ), + (CliMetrics::P2P_PEERS_LO, self.p2p_flags.peers_lo.to_string()), + (CliMetrics::P2P_PEERS_HI, self.p2p_flags.peers_hi.to_string()), + (CliMetrics::P2P_GOSSIP_MESH_D, self.p2p_flags.gossip_mesh_d.to_string()), + (CliMetrics::P2P_GOSSIP_MESH_D_LO, self.p2p_flags.gossip_mesh_dlo.to_string()), + (CliMetrics::P2P_GOSSIP_MESH_D_HI, self.p2p_flags.gossip_mesh_dhi.to_string()), + (CliMetrics::P2P_GOSSIP_MESH_D_LAZY, self.p2p_flags.gossip_mesh_dlazy.to_string()), + (CliMetrics::P2P_BAN_DURATION, self.p2p_flags.ban_duration.to_string()), + ] + ) + .set(1); + Ok(()) + } + + /// Check if the error is related to JWT signature validation + fn is_jwt_signature_error(error: &dyn std::error::Error) -> bool { + let mut source = Some(error); + while let Some(err) = source { + let err_str = err.to_string().to_lowercase(); + if err_str.contains("signature invalid") || + (err_str.contains("jwt") && err_str.contains("invalid")) || + err_str.contains("unauthorized") || + err_str.contains("authentication failed") + { + return true; + } + source = err.source(); + } + false + } + + /// Helper to check JWT signature error from `anyhow::Error` (for retry condition) + fn is_jwt_signature_error_from_anyhow(error: &anyhow::Error) -> bool { + Self::is_jwt_signature_error(error.as_ref() as &dyn std::error::Error) + } + + /// Validate the jwt secret if specified by exchanging capabilities with the engine. + /// Since the engine client will fail if the jwt token is invalid, this allows to ensure + /// that the jwt token passed as a cli arg is correct. + pub async fn validate_jwt(&self) -> anyhow::Result { + let jwt_secret = self.l2_jwt_secret()?; + + let engine = OpEngineClient::>::rpc_client::( + self.l2_client_args.l2_engine_rpc.clone(), + jwt_secret, + ); + + let exchange = || async { + match as OpEngineApi< + Optimism, + Http, + >>::exchange_capabilities(&engine, vec![]) + .await + { + Ok(_) => { + debug!("Successfully exchanged capabilities with engine"); + Ok(jwt_secret) + } + Err(e) => { + if Self::is_jwt_signature_error(&e) { + error!( + "Engine API JWT secret differs from the one specified by --l2.jwt-secret/--l2.jwt-secret-encoded" + ); + error!( + "Ensure that the JWT secret file specified is correct (by default it is `jwt.hex` in the current directory)" + ); + return Err(JwtValidationError::InvalidSignature.into()); + } + Err(JwtValidationError::CapabilityExchange(e.to_string()).into()) + } + } + }; + + exchange + .retry(ExponentialBuilder::default()) + .when(|e| !Self::is_jwt_signature_error_from_anyhow(e)) + .notify(|_, duration| { + debug!("Retrying engine capability handshake after {duration:?}"); + }) + .await + } + + /// Run the Node subcommand. + pub async fn run(self, args: &GlobalArgs) -> anyhow::Result<()> { + let cfg = self.get_l2_config(args)?; + + info!( + target: "rollup_node", + chain_id = cfg.l2_chain_id.id(), + "Starting rollup node services" + ); + for hf in cfg.hardforks.to_string().lines() { + info!(target: "rollup_node", "{hf}"); + } + + let l1_config = L1ConfigBuilder { + chain_config: self.get_l1_config(cfg.l1_chain_id)?, + trust_rpc: self.l1_rpc_args.l1_trust_rpc, + beacon: self.l1_rpc_args.l1_beacon.clone(), + rpc_url: self.l1_rpc_args.l1_eth_rpc.clone(), + slot_duration_override: self.l1_rpc_args.l1_slot_duration_override, + }; + + // If metrics are enabled, initialize the global cli metrics. + args.metrics.enabled.then(|| init_rollup_config_metrics(&cfg)); + + let jwt_secret = self.validate_jwt().await?; + + self.p2p_flags.check_ports()?; + let p2p_config = self + .p2p_flags + .clone() + .config(&cfg, args, Some(self.l1_rpc_args.l1_eth_rpc.clone())) + .await?; + let rpc_config = self.rpc_flags.clone().into(); + + let engine_config = EngineConfig { + config: Arc::new(cfg.clone()), + builder_url: self.builder_client_args.l2_builder_rpc.clone(), + builder_jwt_secret: self.builder_jwt_secret()?, + builder_timeout: Duration::from_millis(self.builder_client_args.builder_timeout), + l2_url: self.l2_client_args.l2_engine_rpc.clone(), + l2_jwt_secret: jwt_secret, + l2_timeout: Duration::from_millis(self.l2_client_args.l2_engine_timeout), + l1_url: self.l1_rpc_args.l1_eth_rpc.clone(), + mode: self.node_mode, + rollup_boost: self.rollup_boost_flags.as_rollup_boost_args(), + }; + + RollupNodeBuilder::new( + cfg, + l1_config, + self.l2_client_args.l2_trust_rpc, + engine_config, + p2p_config, + rpc_config, + ) + .with_sequencer_config(self.sequencer_flags.config()) + .with_derivation_delegate_config(self.derivation_delegate_args.config()) + .build() + .start() + .await + .map_err(|e| { + error!(target: "rollup_node", "Failed to start rollup node service: {e}"); + anyhow::anyhow!("{e}") + })?; + + Ok(()) + } + + /// Get the L1 config, either from a file or the known chains. + pub fn get_l1_config(&self, l1_chain_id: u64) -> Result { + match &self.l1_config_file { + Some(path) => { + debug!("Loading l1 config from file: {:?}", path); + let file = File::open(path) + .map_err(|e| anyhow::anyhow!("Failed to open l1 config file: {e}"))?; + from_reader(file).map_err(|e| anyhow::anyhow!("Failed to parse l1 config: {e}")) + } + None => { + debug!("Loading l1 config from known chains"); + let cfg = L1Config::get_l1_genesis(l1_chain_id).map_err(|e| { + anyhow::anyhow!("Failed to find l1 config for chain ID {l1_chain_id}: {e}") + })?; + Ok(cfg.into()) + } + } + } + + /// Get the L2 rollup config, either from a file or the superchain registry. + pub fn get_l2_config(&self, args: &GlobalArgs) -> Result { + match &self.l2_config_file { + Some(path) => { + debug!("Loading l2 config from file: {:?}", path); + let file = File::open(path) + .map_err(|e| anyhow::anyhow!("Failed to open l2 config file: {e}"))?; + from_reader(file).map_err(|e| anyhow::anyhow!("Failed to parse l2 config: {e}")) + } + None => { + debug!("Loading l2 config from superchain registry"); + let Some(cfg) = scr_rollup_config_by_alloy_ident(&args.l2_chain_id) else { + bail!("Failed to find l2 config for chain ID {}", args.l2_chain_id); + }; + Ok(cfg.clone()) + } + } + } + + /// Returns the L2 JWT secret for the engine API + /// using the provided [`PathBuf`]. If the file is not found, + /// it will return the default JWT secret. + pub fn l2_jwt_secret(&self) -> anyhow::Result { + if let Some(path) = &self.l2_client_args.l2_engine_jwt_secret && + let Ok(secret) = std::fs::read_to_string(path) + { + return JwtSecret::from_hex(secret) + .map_err(|e| anyhow::anyhow!("Failed to parse JWT secret: {e}")); + } + + if let Some(secret) = &self.l2_client_args.l2_engine_jwt_encoded { + return Ok(*secret); + } + + Self::default_jwt_secret("l2_jwt.hex") + } + + /// Returns the builder JWT secret for the engine API + /// using the provided [`PathBuf`]. If the file is not found, + /// it will return the default JWT secret. + pub fn builder_jwt_secret(&self) -> anyhow::Result { + if let Some(path) = &self.builder_client_args.builder_jwt_path && + let Ok(secret) = std::fs::read_to_string(path) + { + return JwtSecret::from_hex(secret) + .map_err(|e| anyhow::anyhow!("Failed to parse JWT secret: {e}")); + } + + if let Some(secret) = &self.builder_client_args.builder_jwt_secret { + return Ok(*secret); + } + + Self::default_jwt_secret("builder_jwt.hex") + } + + /// Uses the current directory to attempt to read + /// the JWT secret from a file named `file_name`. + /// If the file is not found, it will create a new random JWT secret and write it to the file. + pub fn default_jwt_secret(file_name: &str) -> anyhow::Result { + let cur_dir = std::env::current_dir() + .map_err(|e| anyhow::anyhow!("Failed to get current directory: {e}"))?; + std::fs::read_to_string(cur_dir.join(file_name)).map_or_else( + |_| { + let secret = JwtSecret::random(); + + if let Ok(mut file) = File::create(file_name) && + let Err(e) = file + .write_all(alloy_primitives::hex::encode(secret.as_bytes()).as_bytes()) + { + return Err(anyhow::anyhow!("Failed to write JWT secret to file: {e}")); + } + + Ok(secret) + }, + |content| Ok(JwtSecret::from_hex(content)?), + ) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use anyhow::anyhow; + + #[derive(Debug)] + struct MockError { + message: String, + } + + impl std::fmt::Display for MockError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.message) + } + } + + impl std::error::Error for MockError {} + + const fn default_flags() -> &'static [&'static str] { + &[ + "--l1-eth-rpc", + "http://localhost:8545", + "--l1-beacon", + "http://localhost:5052", + "--l2-engine-rpc", + "http://localhost:8551", + ] + } + + #[test] + fn test_node_cli_defaults() { + let args = NodeCommand::parse_from( + std::iter::once(&"node").chain(default_flags().iter()).copied(), + ); + assert_eq!(args.node_mode, NodeMode::Validator); + } + + #[test] + fn test_node_cli_missing_l1_eth_rpc() { + let err = NodeCommand::try_parse_from(["node"]).unwrap_err(); + assert!(err.to_string().contains("--l1-eth-rpc")); + } + + #[test] + fn test_node_cli_missing_l1_beacon() { + let err = NodeCommand::try_parse_from(["node", "--l1-eth-rpc", "http://localhost:8545"]) + .unwrap_err(); + assert!(err.to_string().contains("--l1-beacon")); + } + + #[test] + fn test_node_cli_missing_l2_engine_rpc() { + let err = NodeCommand::try_parse_from([ + "node", + "--l1-eth-rpc", + "http://localhost:8545", + "--l1-beacon", + "http://localhost:5052", + ]) + .unwrap_err(); + assert!(err.to_string().contains("--l2-engine-rpc")); + } + + #[test] + fn test_is_jwt_signature_error() { + let jwt_error = MockError { message: "signature invalid".to_string() }; + assert!(NodeCommand::is_jwt_signature_error(&jwt_error)); + + let other_error = MockError { message: "network timeout".to_string() }; + assert!(!NodeCommand::is_jwt_signature_error(&other_error)); + } + + #[test] + fn test_is_jwt_signature_error_from_anyhow() { + let jwt_anyhow_error = anyhow!("signature invalid"); + assert!(NodeCommand::is_jwt_signature_error_from_anyhow(&jwt_anyhow_error)); + + let other_anyhow_error = anyhow!("network timeout"); + assert!(!NodeCommand::is_jwt_signature_error_from_anyhow(&other_anyhow_error)); + } +} diff --git a/kona/bin/node/src/commands/registry.rs b/rust/kona/bin/node/src/commands/registry.rs similarity index 95% rename from kona/bin/node/src/commands/registry.rs rename to rust/kona/bin/node/src/commands/registry.rs index b203ccf4450..3115cc1668b 100644 --- a/kona/bin/node/src/commands/registry.rs +++ b/rust/kona/bin/node/src/commands/registry.rs @@ -13,7 +13,7 @@ use kona_cli::LogConfig; /// ```sh /// kona-node registry [FLAGS] [OPTIONS] /// ``` -#[derive(Parser, Default, PartialEq, Debug, Clone)] +#[derive(Parser, Default, PartialEq, Eq, Debug, Clone)] #[command(about = "Lists the OP Stack chains available in the superchain-registry")] pub struct RegistryCommand; diff --git a/kona/bin/node/src/flags/engine/flashblocks.rs b/rust/kona/bin/node/src/flags/engine/flashblocks.rs similarity index 96% rename from kona/bin/node/src/flags/engine/flashblocks.rs rename to rust/kona/bin/node/src/flags/engine/flashblocks.rs index bd09dd2c707..dbbb8d3b4d9 100644 --- a/kona/bin/node/src/flags/engine/flashblocks.rs +++ b/rust/kona/bin/node/src/flags/engine/flashblocks.rs @@ -23,7 +23,7 @@ pub struct FlashblocksFlags { )] pub flashblocks: bool, - /// Flashblocks Builder WebSocket URL + /// Flashblocks Builder `WebSocket` URL #[arg( long, visible_alias = "rollup-boost.flashblocks-builder-url", @@ -32,7 +32,7 @@ pub struct FlashblocksFlags { )] pub flashblocks_builder_url: Url, - /// Flashblocks WebSocket host for outbound connections + /// Flashblocks `WebSocket` host for outbound connections #[arg( long, visible_alias = "rollup-boost.flashblocks-host", @@ -41,7 +41,7 @@ pub struct FlashblocksFlags { )] pub flashblocks_host: String, - /// Flashblocks WebSocket port for outbound connections + /// Flashblocks `WebSocket` port for outbound connections #[arg( long, visible_alias = "rollup-boost.flashblocks-port", @@ -67,7 +67,7 @@ impl Default for FlashblocksFlags { } } -/// Configuration for the Flashblocks WebSocket connection. +/// Configuration for the Flashblocks `WebSocket` connection. #[derive(Parser, Debug, Clone, Copy)] pub struct FlashblocksWebsocketFlags { /// Minimum time for exponential backoff for timeout if builder disconnected diff --git a/kona/bin/node/src/flags/engine/mod.rs b/rust/kona/bin/node/src/flags/engine/mod.rs similarity index 100% rename from kona/bin/node/src/flags/engine/mod.rs rename to rust/kona/bin/node/src/flags/engine/mod.rs diff --git a/kona/bin/node/src/flags/engine/providers.rs b/rust/kona/bin/node/src/flags/engine/providers.rs similarity index 98% rename from kona/bin/node/src/flags/engine/providers.rs rename to rust/kona/bin/node/src/flags/engine/providers.rs index c1b38c2d3ce..340125176be 100644 --- a/kona/bin/node/src/flags/engine/providers.rs +++ b/rust/kona/bin/node/src/flags/engine/providers.rs @@ -136,7 +136,7 @@ impl Default for L2ClientArgs { /// L2 derivation delegate connection arguments. #[derive(Clone, Debug, Default, clap::Args)] pub struct DerivationDelegateArgs { - /// The source must be an OP Stack L2 CL RPC exposing optimism_syncStatus. + /// The source must be an OP Stack L2 CL RPC exposing `optimism_syncStatus`. #[arg(long, visible_alias = "l2.follow.source", env = "KONA_NODE_L2_FOLLOW_SOURCE")] pub l2_follow_source: Option, } diff --git a/kona/bin/node/src/flags/engine/rollup_boost.rs b/rust/kona/bin/node/src/flags/engine/rollup_boost.rs similarity index 100% rename from kona/bin/node/src/flags/engine/rollup_boost.rs rename to rust/kona/bin/node/src/flags/engine/rollup_boost.rs diff --git a/kona/bin/node/src/flags/globals.rs b/rust/kona/bin/node/src/flags/globals.rs similarity index 93% rename from kona/bin/node/src/flags/globals.rs rename to rust/kona/bin/node/src/flags/globals.rs index 6d492478871..7af4a5f4278 100644 --- a/kona/bin/node/src/flags/globals.rs +++ b/rust/kona/bin/node/src/flags/globals.rs @@ -44,12 +44,12 @@ impl GlobalArgs { let id = self.l2_chain_id; OPCHAINS .get(&id.id()) - .ok_or(anyhow::anyhow!("No chain config found for chain ID: {id}"))? + .ok_or_else(|| anyhow::anyhow!("No chain config found for chain ID: {id}"))? .roles .as_ref() - .ok_or(anyhow::anyhow!("No roles found for chain ID: {id}"))? + .ok_or_else(|| anyhow::anyhow!("No roles found for chain ID: {id}"))? .unsafe_block_signer - .ok_or(anyhow::anyhow!("No unsafe block signer found for chain ID: {id}")) + .ok_or_else(|| anyhow::anyhow!("No unsafe block signer found for chain ID: {id}")) } } diff --git a/rust/kona/bin/node/src/flags/metrics.rs b/rust/kona/bin/node/src/flags/metrics.rs new file mode 100644 index 00000000000..86effd9859b --- /dev/null +++ b/rust/kona/bin/node/src/flags/metrics.rs @@ -0,0 +1,62 @@ +//! Prometheus metrics CLI args +//! +//! Specifies the available flags for prometheus metric configuration inside CLI + +use crate::metrics::VersionInfo; +use kona_cli::MetricsArgs; + +/// Initializes metrics for a Kona application, including Prometheus and node-specific metrics. +/// Initialize the tracing stack and Prometheus metrics recorder. +/// +/// This function should be called at the beginning of the program. +pub fn init_unified_metrics(args: &MetricsArgs) -> anyhow::Result<()> { + args.init_metrics()?; + if args.enabled { + kona_gossip::Metrics::init(); + kona_disc::Metrics::init(); + kona_engine::Metrics::init(); + kona_node_service::Metrics::init(); + kona_derive::Metrics::init(); + kona_providers_alloy::Metrics::init(); + VersionInfo::from_build().register_version_metrics(); + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use clap::Parser; + use std::net::IpAddr; + + /// A mock command that uses the `MetricsArgs`. + #[derive(Parser, Debug, Clone)] + #[command(about = "Mock command")] + struct MockCommand { + /// Metrics CLI Flags + #[clap(flatten)] + pub metrics: MetricsArgs, + } + + #[test] + fn test_metrics_args_listen_enabled() { + let args = MockCommand::parse_from(["test", "--metrics.enabled"]); + assert!(args.metrics.enabled); + + let args = MockCommand::parse_from(["test"]); + assert!(!args.metrics.enabled); + } + + #[test] + fn test_metrics_args_listen_ip() { + let args = MockCommand::parse_from(["test", "--metrics.addr", "127.0.0.1"]); + let expected: IpAddr = "127.0.0.1".parse().unwrap(); + assert_eq!(args.metrics.addr, expected); + } + + #[test] + fn test_metrics_args_listen_port() { + let args = MockCommand::parse_from(["test", "--metrics.port", "1234"]); + assert_eq!(args.metrics.port, 1234); + } +} diff --git a/kona/bin/node/src/flags/mod.rs b/rust/kona/bin/node/src/flags/mod.rs similarity index 100% rename from kona/bin/node/src/flags/mod.rs rename to rust/kona/bin/node/src/flags/mod.rs diff --git a/kona/bin/node/src/flags/overrides.rs b/rust/kona/bin/node/src/flags/overrides.rs similarity index 100% rename from kona/bin/node/src/flags/overrides.rs rename to rust/kona/bin/node/src/flags/overrides.rs diff --git a/rust/kona/bin/node/src/flags/p2p.rs b/rust/kona/bin/node/src/flags/p2p.rs new file mode 100644 index 00000000000..3071f88a272 --- /dev/null +++ b/rust/kona/bin/node/src/flags/p2p.rs @@ -0,0 +1,726 @@ +//! P2P CLI Flags +//! +//! These are based on p2p flags from the [`op-node`][op-node] CLI. +//! +//! [op-node]: https://github.com/ethereum-optimism/optimism/blob/develop/op-node/flags/p2p_flags.go + +use crate::flags::{GlobalArgs, SignerArgs}; +use alloy_primitives::{B256, b256}; +use alloy_provider::Provider; +use alloy_signer_local::PrivateKeySigner; +use anyhow::Result; +use clap::Parser; +use discv5::enr::k256; +use kona_derive::ChainProvider; +use kona_disc::LocalNode; +use kona_genesis::RollupConfig; +use kona_gossip::GaterConfig; +use kona_node_service::NetworkConfig; +use kona_peers::{BootNode, BootStoreFile, PeerMonitoring, PeerScoreLevel}; +use kona_providers_alloy::AlloyChainProvider; +use libp2p::identity::Keypair; +use std::{ + net::{IpAddr, SocketAddr, ToSocketAddrs}, + num::ParseIntError, + path::PathBuf, + str::FromStr, +}; +use tokio::time::Duration; +use url::Url; + +/// Resolves a hostname or IP address string to an [`IpAddr`]. +/// +/// Accepts either: +/// - A valid IP address string (e.g., "127.0.0.1", "`::1`") +/// - A DNS hostname (e.g., "node1.example.com") +/// +/// For DNS hostnames, this performs synchronous DNS resolution and returns the first +/// resolved IP address. +fn resolve_host(host: &str) -> Result { + // First, try to parse as a direct IP address + if let Ok(ip) = host.parse::() { + return Ok(ip); + } + + // If that fails, try DNS resolution + // We append a port to make it a valid socket address for resolution + let socket_addr = format!("{host}:0"); + match socket_addr.to_socket_addrs() { + Ok(mut addrs) => addrs + .next() + .map(|addr| addr.ip()) + .ok_or_else(|| format!("DNS resolution for '{host}' returned no addresses")), + Err(e) => Err(format!("Failed to resolve '{host}': {e}")), + } +} + +/// P2P CLI Flags +#[derive(Parser, Clone, Debug, PartialEq, Eq)] +pub struct P2PArgs { + /// Disable Discv5 (node discovery). + #[arg(long = "p2p.no-discovery", default_value = "false", env = "KONA_NODE_P2P_NO_DISCOVERY")] + pub no_discovery: bool, + /// Read the hex-encoded 32-byte private key for the peer ID from this txt file. + /// Created if not already exists. Important to persist to keep the same network identity after + /// restarting, maintaining the previous advertised identity. + #[arg(long = "p2p.priv.path", env = "KONA_NODE_P2P_PRIV_PATH")] + pub priv_path: Option, + /// The hex-encoded 32-byte private key for the peer ID. + #[arg(long = "p2p.priv.raw", env = "KONA_NODE_P2P_PRIV_RAW")] + pub private_key: Option, + + /// IP address or DNS hostname to advertise to external peers from Discv5. + /// Optional argument. Use the `p2p.listen.ip` if not set. + /// Accepts either an IP address (e.g., "1.2.3.4") or a DNS hostname (e.g., + /// "node1.example.com"). DNS hostnames are resolved to IP addresses at startup. + /// + /// Technical note: if this argument is set, the dynamic ENR updates from the discovery layer + /// will be disabled. This is to allow the advertised IP to be static (to use in a network + /// behind a NAT for instance). + #[arg(long = "p2p.advertise.ip", env = "KONA_NODE_P2P_ADVERTISE_IP", value_parser = resolve_host)] + pub advertise_ip: Option, + /// TCP port to advertise to external peers from the discovery layer. Same as `p2p.listen.tcp` + /// if set to zero. + #[arg(long = "p2p.advertise.tcp", env = "KONA_NODE_P2P_ADVERTISE_TCP_PORT")] + pub advertise_tcp_port: Option, + /// UDP port to advertise to external peers from the discovery layer. + /// Same as `p2p.listen.udp` if set to zero. + #[arg(long = "p2p.advertise.udp", env = "KONA_NODE_P2P_ADVERTISE_UDP_PORT")] + pub advertise_udp_port: Option, + + /// IP address or DNS hostname to bind LibP2P/Discv5 to. + /// Accepts either an IP address (e.g., "0.0.0.0") or a DNS hostname (e.g., + /// "node1.example.com"). DNS hostnames are resolved to IP addresses at startup. + #[arg(long = "p2p.listen.ip", default_value = "0.0.0.0", env = "KONA_NODE_P2P_LISTEN_IP", value_parser = resolve_host)] + pub listen_ip: IpAddr, + /// TCP port to bind `LibP2P` to. Any available system port if set to 0. + #[arg(long = "p2p.listen.tcp", default_value = "9222", env = "KONA_NODE_P2P_LISTEN_TCP_PORT")] + pub listen_tcp_port: u16, + /// UDP port to bind Discv5 to. Same as TCP port if left 0. + #[arg(long = "p2p.listen.udp", default_value = "9223", env = "KONA_NODE_P2P_LISTEN_UDP_PORT")] + pub listen_udp_port: u16, + /// Low-tide peer count. The node actively searches for new peer connections if below this + /// amount. + #[arg(long = "p2p.peers.lo", default_value = "20", env = "KONA_NODE_P2P_PEERS_LO")] + pub peers_lo: u32, + /// High-tide peer count. The node starts pruning peer connections slowly after reaching this + /// number. + #[arg(long = "p2p.peers.hi", default_value = "30", env = "KONA_NODE_P2P_PEERS_HI")] + pub peers_hi: u32, + /// Grace period to keep a newly connected peer around, if it is not misbehaving. + #[arg( + long = "p2p.peers.grace", + default_value = "30", + env = "KONA_NODE_P2P_PEERS_GRACE", + value_parser = |arg: &str| -> Result {Ok(Duration::from_secs(arg.parse()?))} + )] + pub peers_grace: Duration, + /// Configure `GossipSub` topic stable mesh target count. + /// Aka: The desired outbound degree (numbers of peers to gossip to). + #[arg(long = "p2p.gossip.mesh.d", default_value = "8", env = "KONA_NODE_P2P_GOSSIP_MESH_D")] + pub gossip_mesh_d: usize, + /// Configure `GossipSub` topic stable mesh low watermark. + /// Aka: The lower bound of outbound degree. + #[arg(long = "p2p.gossip.mesh.lo", default_value = "6", env = "KONA_NODE_P2P_GOSSIP_MESH_DLO")] + pub gossip_mesh_dlo: usize, + /// Configure `GossipSub` topic stable mesh high watermark. + /// Aka: The upper bound of outbound degree (additional peers will not receive gossip). + #[arg( + long = "p2p.gossip.mesh.dhi", + default_value = "12", + env = "KONA_NODE_P2P_GOSSIP_MESH_DHI" + )] + pub gossip_mesh_dhi: usize, + /// Configure `GossipSub` gossip target. + /// Aka: The target degree for gossip only (not messaging like p2p.gossip.mesh.d, just + /// announcements of IHAVE). + #[arg( + long = "p2p.gossip.mesh.dlazy", + default_value = "6", + env = "KONA_NODE_P2P_GOSSIP_MESH_DLAZY" + )] + pub gossip_mesh_dlazy: usize, + /// Configure `GossipSub` to publish messages to all known peers on the topic, outside of the + /// mesh. Also see Dlazy as less aggressive alternative. + #[arg( + long = "p2p.gossip.mesh.floodpublish", + default_value = "false", + env = "KONA_NODE_P2P_GOSSIP_FLOOD_PUBLISH" + )] + pub gossip_flood_publish: bool, + /// Sets the peer scoring strategy for the P2P stack. + /// Can be one of: none or light. + #[arg(long = "p2p.scoring", default_value = "light", env = "KONA_NODE_P2P_SCORING")] + pub scoring: PeerScoreLevel, + + /// Allows to ban peers based on their score. + /// + /// Peers are banned based on a ban threshold (see `p2p.ban.threshold`). + /// If a peer's score is below the threshold, it gets automatically banned. + #[arg(long = "p2p.ban.peers", default_value = "false", env = "KONA_NODE_P2P_BAN_PEERS")] + pub ban_enabled: bool, + + /// The threshold used to ban peers. + /// + /// For peers to be banned, the `p2p.ban.peers` flag must be set to `true`. + /// By default, peers are banned if their score is below -100. This follows the `op-node` default ``. + #[arg(long = "p2p.ban.threshold", default_value = "-100", env = "KONA_NODE_P2P_BAN_THRESHOLD")] + pub ban_threshold: i64, + + /// The duration in minutes to ban a peer for. + /// + /// For peers to be banned, the `p2p.ban.peers` flag must be set to `true`. + /// By default peers are banned for 1 hour. This follows the `op-node` default ``. + #[arg(long = "p2p.ban.duration", default_value = "60", env = "KONA_NODE_P2P_BAN_DURATION")] + pub ban_duration: u64, + + /// The interval in seconds to find peers using the discovery service. + /// Defaults to 5 seconds. + #[arg( + long = "p2p.discovery.interval", + default_value = "5", + env = "KONA_NODE_P2P_DISCOVERY_INTERVAL" + )] + pub discovery_interval: u64, + /// The directory to store the bootstore. + #[arg(long = "p2p.bootstore", env = "KONA_NODE_P2P_BOOTSTORE")] + pub bootstore: Option, + /// Disables the bootstore. + #[arg(long = "p2p.no-bootstore", env = "KONA_NODE_P2P_NO_BOOTSTORE")] + pub disable_bootstore: bool, + /// Peer Redialing threshold is the maximum amount of times to attempt to redial a peer that + /// disconnects. By default, peers are *not* redialed. If set to 0, the peer will be + /// redialed indefinitely. + #[arg(long = "p2p.redial", env = "KONA_NODE_P2P_REDIAL", default_value = "500")] + pub peer_redial: Option, + + /// The duration in minutes of the peer dial period. + /// When the last time a peer was dialed is longer than the dial period, the number of peer + /// dials is reset to 0, allowing the peer to be dialed again. + #[arg(long = "p2p.redial.period", env = "KONA_NODE_P2P_REDIAL_PERIOD", default_value = "60")] + pub redial_period: u64, + + /// An optional list of bootnode ENRs or node records to start the node with. + #[arg(long = "p2p.bootnodes", value_delimiter = ',', env = "KONA_NODE_P2P_BOOTNODES")] + pub bootnodes: Vec, + + /// Optionally enable topic scoring. + /// + /// Topic scoring is a mechanism to score peers based on their behavior in the gossip network. + /// Historically, topic scoring was only enabled for the v1 topic on the OP Stack p2p network + /// in the `op-node`. This was a silent bug, and topic scoring is actively being + /// [phased out of the `op-node`][out]. + /// + /// This flag is only presented for backwards compatibility and debugging purposes. + /// + /// [out]: https://github.com/ethereum-optimism/optimism/pull/15719 + #[arg( + long = "p2p.topic-scoring", + default_value = "false", + env = "KONA_NODE_P2P_TOPIC_SCORING" + )] + pub topic_scoring: bool, + + /// An optional unsafe block signer address. + /// + /// By default, this is fetched from the chain config in the superchain-registry using the + /// specified L2 chain ID. + #[arg(long = "p2p.unsafe.block.signer", env = "KONA_NODE_P2P_UNSAFE_BLOCK_SIGNER")] + pub unsafe_block_signer: Option, + + /// An optional flag to remove random peers from discovery to rotate the peer set. + /// + /// This is the number of seconds to wait before removing a peer from the discovery + /// service. By default, peers are not removed from the discovery service. + /// + /// This is useful for discovering a wider set of peers. + #[arg(long = "p2p.discovery.randomize", env = "KONA_NODE_P2P_DISCOVERY_RANDOMIZE")] + pub discovery_randomize: Option, + + /// Specify optional remote signer configuration. Note that this argument is mutually exclusive + /// with `p2p.sequencer.key` that specifies a local sequencer signer. + #[command(flatten)] + pub signer: SignerArgs, +} + +impl Default for P2PArgs { + fn default() -> Self { + // Construct default values using the clap parser. + // This works since none of the cli flags are required. + Self::parse_from::<[_; 0], &str>([]) + } +} + +impl P2PArgs { + fn check_ports_inner(ip_addr: IpAddr, tcp_port: u16, udp_port: u16) -> Result<()> { + if tcp_port == 0 { + return Ok(()); + } + if udp_port == 0 { + return Ok(()); + } + let tcp_socket = std::net::TcpListener::bind((ip_addr, tcp_port)); + let udp_socket = std::net::UdpSocket::bind((ip_addr, udp_port)); + if let Err(e) = tcp_socket { + tracing::error!(target: "p2p::flags", tcp_port, "Error binding TCP socket: {e}"); + anyhow::bail!("Error binding TCP socket on port {tcp_port}: {e}"); + } + if let Err(e) = udp_socket { + tracing::error!(target: "p2p::flags", udp_port, "Error binding UDP socket: {e}"); + anyhow::bail!("Error binding UDP socket on port {udp_port}: {e}"); + } + + Ok(()) + } + + /// Checks if the listen ports are available on the system. + /// + /// If either of the ports are `0`, this check is skipped. + /// + /// ## Errors + /// + /// - If the TCP port is already in use. + /// - If the UDP port is already in use. + pub fn check_ports(&self) -> Result<()> { + Self::check_ports_inner(self.listen_ip, self.listen_tcp_port, self.listen_udp_port) + } + + /// Returns the private key as specified in the raw cli flag or via file path. + pub fn private_key(&self) -> Option { + if let Some(key) = self.private_key { + match PrivateKeySigner::from_bytes(&key) { + Ok(signer) => return Some(signer), + Err(e) => { + tracing::error!(target: "p2p::flags", "Failed to parse private key: {}", e); + return None; + } + } + } + + if let Some(path) = self.priv_path.as_ref() && + path.exists() + { + let contents = std::fs::read_to_string(path).ok()?; + let decoded = B256::from_str(&contents).ok()?; + match PrivateKeySigner::from_bytes(&decoded) { + Ok(signer) => return Some(signer), + Err(e) => { + tracing::error!(target: "p2p::flags", "Failed to parse private key from file: {}", e); + return None; + } + } + } + + None + } + + /// Returns the unsafe block signer from the CLI arguments. + pub async fn unsafe_block_signer( + &self, + args: &GlobalArgs, + rollup_config: &RollupConfig, + l1_eth_rpc: Option, + ) -> anyhow::Result { + if let Some(l1_eth_rpc) = l1_eth_rpc { + /// The storage slot that the unsafe block signer address is stored at. + /// Computed as: `bytes32(uint256(keccak256("systemconfig.unsafeblocksigner")) - 1)` + const UNSAFE_BLOCK_SIGNER_ADDRESS_STORAGE_SLOT: B256 = + b256!("0x65a7ed542fb37fe237fdfbdd70b31598523fe5b32879e307bae27a0bd9581c08"); + + let mut provider = AlloyChainProvider::new_http(l1_eth_rpc, 1024); + let latest_block_num = provider.latest_block_number().await?; + let block_info = provider.block_info_by_number(latest_block_num).await?; + + // Fetch the unsafe block signer address from the system config. + let unsafe_block_signer_address = provider + .inner + .get_storage_at( + rollup_config.l1_system_config_address, + UNSAFE_BLOCK_SIGNER_ADDRESS_STORAGE_SLOT.into(), + ) + .hash(block_info.hash) + .await?; + + // Convert the unsafe block signer address to the correct type. + return Ok(alloy_primitives::Address::from_slice( + &unsafe_block_signer_address.to_be_bytes_vec()[12..], + )); + } + + // Otherwise use the genesis signer or the configured unsafe block signer. + args.genesis_signer().or_else(|_| { + self.unsafe_block_signer + .ok_or_else(|| anyhow::anyhow!("Unsafe block signer not provided")) + }) + } + + /// Constructs kona's P2P network [`NetworkConfig`] from CLI arguments. + /// + /// ## Parameters + /// + /// - [`GlobalArgs`]: required to fetch the genesis unsafe block signer. + /// + /// Errors if the genesis unsafe block signer isn't available for the specified L2 Chain ID. + pub async fn config( + self, + config: &RollupConfig, + args: &GlobalArgs, + l1_rpc: Option, + ) -> anyhow::Result { + // Note: the advertised address is contained in the ENR for external peers from the + // discovery layer to use. + + // Fallback to the listen ip if the advertise ip is not specified + let advertise_ip = self.advertise_ip.unwrap_or(self.listen_ip); + + // If the advertise ip is set, we will disable the dynamic ENR updates. + let static_ip = self.advertise_ip.is_some(); + + // If the advertise tcp port is null, use the listen tcp port + let advertise_tcp_port = match self.advertise_tcp_port { + None => self.listen_tcp_port, + Some(port) => port, + }; + + let advertise_udp_port = match self.advertise_udp_port { + None => self.listen_udp_port, + Some(port) => port, + }; + + let keypair = self.keypair().unwrap_or_else(|e| { + let generated = Keypair::generate_secp256k1(); + tracing::warn!( + target: "p2p::config", + error = %e, + peer_id = %generated.public().to_peer_id(), + "Failed to load P2P keypair from configuration, generated ephemeral keypair. \ + Set --p2p.priv.path or --p2p.priv.raw for a persistent peer ID." + ); + generated + }); + let secp256k1_key = keypair.clone().try_into_secp256k1() + .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to secp256k1. This is a bug since we only support secp256k1 keys: {e}"))? + .secret().to_bytes(); + let local_node_key = k256::ecdsa::SigningKey::from_bytes(&secp256k1_key.into()) + .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to k256 signing key. This is a bug since we only support secp256k1 keys: {e}"))?; + + let discovery_address = + LocalNode::new(local_node_key, advertise_ip, advertise_tcp_port, advertise_udp_port); + let gossip_config = kona_gossip::default_config_builder() + .mesh_n(self.gossip_mesh_d) + .mesh_n_low(self.gossip_mesh_dlo) + .mesh_n_high(self.gossip_mesh_dhi) + .gossip_lazy(self.gossip_mesh_dlazy) + .flood_publish(self.gossip_flood_publish) + .build()?; + + let monitor_peers = self.ban_enabled.then_some(PeerMonitoring { + ban_duration: Duration::from_secs(60 * self.ban_duration), + ban_threshold: self.ban_threshold as f64, + }); + + let discovery_listening_address = SocketAddr::new(self.listen_ip, self.listen_udp_port); + let discovery_config = + NetworkConfig::discv5_config(discovery_listening_address.into(), static_ip); + + let mut gossip_address = libp2p::Multiaddr::from(self.listen_ip); + gossip_address.push(libp2p::multiaddr::Protocol::Tcp(self.listen_tcp_port)); + + let unsafe_block_signer = self.unsafe_block_signer(args, config, l1_rpc).await?; + + let bootstore = if self.disable_bootstore { + None + } else { + Some(self.bootstore.map_or_else( + || BootStoreFile::Default { chain_id: args.l2_chain_id.into() }, + BootStoreFile::Custom, + )) + }; + + let bootnodes = self + .bootnodes + .iter() + .map(|bootnode| BootNode::parse_bootnode(bootnode)) + .collect::>() + .into(); + + Ok(NetworkConfig { + discovery_config, + discovery_interval: Duration::from_secs(self.discovery_interval), + discovery_address, + discovery_randomize: self.discovery_randomize.map(Duration::from_secs), + enr_update: !static_ip, + gossip_address, + keypair, + unsafe_block_signer, + gossip_config, + scoring: self.scoring, + monitor_peers, + bootstore, + topic_scoring: self.topic_scoring, + gater_config: GaterConfig { + peer_redialing: self.peer_redial, + dial_period: Duration::from_secs(60 * self.redial_period), + }, + bootnodes, + rollup_config: config.clone(), + gossip_signer: self.signer.config(args)?, + }) + } + + /// Returns the [`Keypair`] from the cli inputs. + /// + /// If the raw private key is empty and the specified file is empty, + /// this method will generate a new private key and write it out to the file. + /// + /// If neither a file is specified, nor a raw private key input, this method + /// will error. + pub fn keypair(&self) -> Result { + // Attempt the parse the private key if specified. + if let Some(mut private_key) = self.private_key { + let keypair = kona_cli::SecretKeyLoader::parse(&mut private_key.0) + .map_err(|e| anyhow::anyhow!(e))?; + tracing::info!( + target: "p2p::config", + peer_id = %keypair.public().to_peer_id(), + "Successfully loaded P2P keypair from raw private key" + ); + return Ok(keypair); + } + + let Some(ref key_path) = self.priv_path else { + anyhow::bail!("Neither a raw private key nor a private key file path was provided."); + }; + + kona_cli::SecretKeyLoader::load(key_path).map_err(|e| anyhow::anyhow!(e)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::b256; + use clap::Parser; + use kona_peers::NodeRecord; + + /// A mock command that uses the `P2PArgs`. + #[derive(Parser, Debug, Clone)] + #[command(about = "Mock command")] + struct MockCommand { + /// P2P CLI Flags + #[clap(flatten)] + pub p2p: P2PArgs, + } + + #[test] + fn test_p2p_args_keypair_missing_both() { + let args = MockCommand::parse_from(["test"]); + assert!(args.p2p.keypair().is_err()); + } + + #[test] + fn test_p2p_args_keypair_raw_private_key() { + let args = MockCommand::parse_from([ + "test", + "--p2p.priv.raw", + "1d2b0bda21d56b8bd12d4f94ebacffdfb35f5e226f84b461103bb8beab6353be", + ]); + assert!(args.p2p.keypair().is_ok()); + } + + #[test] + fn test_p2p_args_keypair_from_path() { + // Create a temporary directory. + let dir = std::env::temp_dir(); + let mut source_path = dir.clone(); + assert!(std::env::set_current_dir(dir).is_ok()); + + // Write a private key to a file. + let key = b256!("1d2b0bda21d56b8bd12d4f94ebacffdfb35f5e226f84b461103bb8beab6353be"); + let hex = alloy_primitives::hex::encode(key.0); + source_path.push("test.txt"); + std::fs::write(&source_path, &hex).unwrap(); + + // Parse the keypair from the file. + let args = + MockCommand::parse_from(["test", "--p2p.priv.path", source_path.to_str().unwrap()]); + assert!(args.p2p.keypair().is_ok()); + } + + #[test] + fn test_p2p_args() { + let args = MockCommand::parse_from(["test"]); + assert_eq!(args.p2p, P2PArgs::default()); + } + + #[test] + fn test_p2p_args_randomized() { + let args = MockCommand::parse_from(["test", "--p2p.discovery.randomize", "10"]); + assert_eq!(args.p2p.discovery_randomize, Some(10)); + let args = MockCommand::parse_from(["test"]); + assert_eq!(args.p2p.discovery_randomize, None); + } + + #[test] + fn test_p2p_args_no_discovery() { + let args = MockCommand::parse_from(["test", "--p2p.no-discovery"]); + assert!(args.p2p.no_discovery); + } + + #[test] + fn test_p2p_args_priv_path() { + let args = MockCommand::parse_from(["test", "--p2p.priv.path", "test.txt"]); + assert_eq!(args.p2p.priv_path, Some(PathBuf::from("test.txt"))); + } + + #[test] + fn test_p2p_args_private_key() { + let args = MockCommand::parse_from([ + "test", + "--p2p.priv.raw", + "1d2b0bda21d56b8bd12d4f94ebacffdfb35f5e226f84b461103bb8beab6353be", + ]); + let key = b256!("1d2b0bda21d56b8bd12d4f94ebacffdfb35f5e226f84b461103bb8beab6353be"); + assert_eq!(args.p2p.private_key, Some(key)); + } + + #[test] + fn test_p2p_args_sequencer_key() { + let args = MockCommand::parse_from([ + "test", + "--p2p.sequencer.key", + "bcc617ea05150ff60490d3c6058630ba94ae9f12a02a87efd291349ca0e54e0a", + ]); + let key = b256!("bcc617ea05150ff60490d3c6058630ba94ae9f12a02a87efd291349ca0e54e0a"); + assert_eq!(args.p2p.signer.sequencer_key, Some(key)); + } + + #[test] + fn test_p2p_args_listen_ip() { + let args = MockCommand::parse_from(["test", "--p2p.listen.ip", "127.0.0.1"]); + let expected: IpAddr = "127.0.0.1".parse().unwrap(); + assert_eq!(args.p2p.listen_ip, expected); + } + + #[test] + fn test_p2p_args_listen_tcp_port() { + let args = MockCommand::parse_from(["test", "--p2p.listen.tcp", "1234"]); + assert_eq!(args.p2p.listen_tcp_port, 1234); + } + + #[test] + fn test_p2p_args_listen_udp_port() { + let args = MockCommand::parse_from(["test", "--p2p.listen.udp", "1234"]); + assert_eq!(args.p2p.listen_udp_port, 1234); + } + + #[test] + fn test_p2p_args_bootnodes() { + let args = MockCommand::parse_from([ + "test", + "--p2p.bootnodes", + "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305", + ]); + assert_eq!( + args.p2p.bootnodes, + vec![ + "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305", + ] + ); + + // Parse the bootnodes. + let bootnodes = args + .p2p + .bootnodes + .iter() + .map(|bootnode| BootNode::parse_bootnode(bootnode)) + .collect::>(); + + // Otherwise, attempt to use the Node Record format. + let record = NodeRecord::from_str( + "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305").unwrap(); + let expected_bootnode = vec![BootNode::from_unsigned(record).unwrap()]; + + assert_eq!(bootnodes, expected_bootnode); + } + + #[test] + fn test_p2p_args_bootnodes_multiple() { + let args = MockCommand::parse_from([ + "test", + "--p2p.bootnodes", + "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305,enode://dd751a9ef8912be1bfa7a5e34e2c3785cc5253110bd929f385e07ba7ac19929fb0e0c5d93f77827291f4da02b2232240fbc47ea7ce04c46e333e452f8656b667@34.65.107.0:30305", + ]); + assert_eq!( + args.p2p.bootnodes, + vec![ + "enode://ca2774c3c401325850b2477fd7d0f27911efbf79b1e8b335066516e2bd8c4c9e0ba9696a94b1cb030a88eac582305ff55e905e64fb77fe0edcd70a4e5296d3ec@34.65.175.185:30305", + "enode://dd751a9ef8912be1bfa7a5e34e2c3785cc5253110bd929f385e07ba7ac19929fb0e0c5d93f77827291f4da02b2232240fbc47ea7ce04c46e333e452f8656b667@34.65.107.0:30305", + ] + ); + } + + #[test] + fn test_p2p_args_bootnode_enr() { + let args = MockCommand::parse_from([ + "test", + "--p2p.bootnodes", + "enr:-J64QBbwPjPLZ6IOOToOLsSjtFUjjzN66qmBZdUexpO32Klrc458Q24kbty2PdRaLacHM5z-cZQr8mjeQu3pik6jPSOGAYYFIqBfgmlkgnY0gmlwhDaRWFWHb3BzdGFja4SzlAUAiXNlY3AyNTZrMaECmeSnJh7zjKrDSPoNMGXoopeDF4hhpj5I0OsQUUt4u8uDdGNwgiQGg3VkcIIkBg", + ]); + assert_eq!( + args.p2p.bootnodes, + vec![ + "enr:-J64QBbwPjPLZ6IOOToOLsSjtFUjjzN66qmBZdUexpO32Klrc458Q24kbty2PdRaLacHM5z-cZQr8mjeQu3pik6jPSOGAYYFIqBfgmlkgnY0gmlwhDaRWFWHb3BzdGFja4SzlAUAiXNlY3AyNTZrMaECmeSnJh7zjKrDSPoNMGXoopeDF4hhpj5I0OsQUUt4u8uDdGNwgiQGg3VkcIIkBg", + ] + ); + } + + #[test] + fn test_p2p_args_listen_ip_dns_resolution() { + // Test that DNS hostnames are resolved to IP addresses + // Using localhost which should resolve reliably + let args = MockCommand::parse_from(["test", "--p2p.listen.ip", "localhost"]); + // localhost typically resolves to 127.0.0.1 or ::1 + assert!( + args.p2p.listen_ip == "127.0.0.1".parse::().unwrap() || + args.p2p.listen_ip == "::1".parse::().unwrap() + ); + } + + #[test] + fn test_p2p_args_advertise_ip_dns_resolution() { + // Test that DNS hostnames are resolved to IP addresses for advertise_ip + let args = MockCommand::parse_from(["test", "--p2p.advertise.ip", "localhost"]); + // localhost typically resolves to 127.0.0.1 or ::1 + let ip = args.p2p.advertise_ip.unwrap(); + assert!( + ip == "127.0.0.1".parse::().unwrap() || ip == "::1".parse::().unwrap() + ); + } + + #[test] + fn test_resolve_host_with_ip() { + // Test that IP addresses are passed through directly + let ip = resolve_host("192.168.1.1").unwrap(); + assert_eq!(ip, "192.168.1.1".parse::().unwrap()); + + let ipv6 = resolve_host("::1").unwrap(); + assert_eq!(ipv6, "::1".parse::().unwrap()); + } + + #[test] + fn test_resolve_host_with_dns() { + // Test DNS resolution with localhost + let ip = resolve_host("localhost").unwrap(); + assert!( + ip == "127.0.0.1".parse::().unwrap() || ip == "::1".parse::().unwrap() + ); + } + + #[test] + fn test_resolve_host_invalid() { + // Test that invalid hostnames return an error + let result = resolve_host("this-hostname-definitely-does-not-exist.invalid"); + assert!(result.is_err()); + } +} diff --git a/kona/bin/node/src/flags/rpc.rs b/rust/kona/bin/node/src/flags/rpc.rs similarity index 100% rename from kona/bin/node/src/flags/rpc.rs rename to rust/kona/bin/node/src/flags/rpc.rs diff --git a/rust/kona/bin/node/src/flags/sequencer.rs b/rust/kona/bin/node/src/flags/sequencer.rs new file mode 100644 index 00000000000..11546ad3cfa --- /dev/null +++ b/rust/kona/bin/node/src/flags/sequencer.rs @@ -0,0 +1,78 @@ +//! Sequencer CLI Flags +//! +//! These are based on sequencer flags from the [`op-node`][op-node] CLI. +//! +//! [op-node]: https://github.com/ethereum-optimism/optimism/blob/develop/op-node/flags/flags.go#L233-L265 + +use clap::Parser; +use kona_node_service::SequencerConfig; +use std::{num::ParseIntError, time::Duration}; +use url::Url; + +/// Sequencer CLI Flags +#[derive(Parser, Clone, Debug, PartialEq, Eq)] +pub struct SequencerArgs { + /// Initialize the sequencer in a stopped state. The sequencer can be started using the + /// `admin_startSequencer` RPC. + #[arg( + long = "sequencer.stopped", + default_value = "false", + env = "KONA_NODE_SEQUENCER_STOPPED" + )] + pub stopped: bool, + + /// Maximum number of L2 blocks for restricting the distance between L2 safe and unsafe. + /// Disabled if 0. + #[arg( + long = "sequencer.max-safe-lag", + default_value = "0", + env = "KONA_NODE_SEQUENCER_MAX_SAFE_LAG" + )] + pub max_safe_lag: u64, + + /// Number of L1 blocks to keep distance from the L1 head as a sequencer for picking an L1 + /// origin. + #[arg(long = "sequencer.l1-confs", default_value = "4", env = "KONA_NODE_SEQUENCER_L1_CONFS")] + pub l1_confs: u64, + + /// Forces the sequencer to strictly prepare the next L1 origin and create empty L2 blocks + #[arg( + long = "sequencer.recover", + default_value = "false", + env = "KONA_NODE_SEQUENCER_RECOVER" + )] + pub recover: bool, + + /// Conductor service rpc endpoint. Providing this value will enable the conductor service. + #[arg(long = "conductor.rpc", env = "KONA_NODE_CONDUCTOR_RPC")] + pub conductor_rpc: Option, + + /// Conductor service rpc timeout. + #[arg( + long = "conductor.rpc.timeout", + default_value = "1", + env = "KONA_NODE_CONDUCTOR_RPC_TIMEOUT", + value_parser = |arg: &str| -> Result {Ok(Duration::from_secs(arg.parse()?))} + )] + pub conductor_rpc_timeout: Duration, +} + +impl Default for SequencerArgs { + fn default() -> Self { + // Construct default values using the clap parser. + // This works since none of the cli flags are required. + Self::parse_from::<[_; 0], &str>([]) + } +} + +impl SequencerArgs { + /// Creates a [`SequencerConfig`] from the [`SequencerArgs`]. + pub fn config(&self) -> SequencerConfig { + SequencerConfig { + sequencer_stopped: self.stopped, + sequencer_recovery_mode: self.recover, + conductor_rpc_url: self.conductor_rpc.clone(), + l1_conf_delay: self.l1_confs, + } + } +} diff --git a/kona/bin/node/src/flags/signer.rs b/rust/kona/bin/node/src/flags/signer.rs similarity index 99% rename from kona/bin/node/src/flags/signer.rs rename to rust/kona/bin/node/src/flags/signer.rs index c747116bf10..8e8be2b13e6 100644 --- a/kona/bin/node/src/flags/signer.rs +++ b/rust/kona/bin/node/src/flags/signer.rs @@ -3,7 +3,7 @@ use std::path::PathBuf; use alloy_primitives::{Address, B256}; use alloy_signer::{Signer, k256::ecdsa}; use alloy_signer_local::PrivateKeySigner; -use clap::{Parser, arg}; +use clap::Parser; use kona_cli::SecretKeyLoader; use kona_sources::{BlockSigner, ClientCert, RemoteSigner}; use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; diff --git a/rust/kona/bin/node/src/main.rs b/rust/kona/bin/node/src/main.rs new file mode 100644 index 00000000000..bd6b4cfcd03 --- /dev/null +++ b/rust/kona/bin/node/src/main.rs @@ -0,0 +1,26 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] + +pub mod cli; +pub mod commands; +pub mod flags; +pub mod metrics; + +pub(crate) mod version; + +fn main() { + use clap::Parser; + + kona_cli::sigsegv_handler::install(); + kona_cli::backtrace::enable(); + + if let Err(err) = cli::Cli::parse().run() { + eprintln!("Error: {err:?}"); + std::process::exit(1); + } +} diff --git a/kona/bin/node/src/metrics/cli_opts.rs b/rust/kona/bin/node/src/metrics/cli_opts.rs similarity index 99% rename from kona/bin/node/src/metrics/cli_opts.rs rename to rust/kona/bin/node/src/metrics/cli_opts.rs index 420e280875f..8b593d93271 100644 --- a/kona/bin/node/src/metrics/cli_opts.rs +++ b/rust/kona/bin/node/src/metrics/cli_opts.rs @@ -3,7 +3,7 @@ use kona_genesis::RollupConfig; /// Metrics to record various CLI options. -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct CliMetrics; impl CliMetrics { diff --git a/kona/bin/node/src/metrics/mod.rs b/rust/kona/bin/node/src/metrics/mod.rs similarity index 100% rename from kona/bin/node/src/metrics/mod.rs rename to rust/kona/bin/node/src/metrics/mod.rs diff --git a/kona/bin/node/src/metrics/version.rs b/rust/kona/bin/node/src/metrics/version.rs similarity index 100% rename from kona/bin/node/src/metrics/version.rs rename to rust/kona/bin/node/src/metrics/version.rs diff --git a/kona/bin/node/src/version.rs b/rust/kona/bin/node/src/version.rs similarity index 100% rename from kona/bin/node/src/version.rs rename to rust/kona/bin/node/src/version.rs diff --git a/kona/bin/supervisor/Architecture.md b/rust/kona/bin/supervisor/Architecture.md similarity index 100% rename from kona/bin/supervisor/Architecture.md rename to rust/kona/bin/supervisor/Architecture.md diff --git a/rust/kona/bin/supervisor/Cargo.toml b/rust/kona/bin/supervisor/Cargo.toml new file mode 100644 index 00000000000..a1fa0943302 --- /dev/null +++ b/rust/kona/bin/supervisor/Cargo.toml @@ -0,0 +1,47 @@ +[package] +name = "kona-supervisor" +version = "0.1.0" +description = "Kona Supervisor" + +edition.workspace = true +license.workspace = true +rust-version.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +keywords.workspace = true +categories.workspace = true + +[dependencies] +# Workspace +kona-supervisor-service.workspace = true +kona-supervisor-core.workspace = true +kona-cli.workspace = true +kona-interop.workspace = true +kona-genesis.workspace = true +kona-protocol.workspace = true + +alloy-network.workspace = true +alloy-provider.workspace = true +alloy-rpc-types-engine.workspace = true + +clap = { workspace = true, features = ["derive", "env"] } +tokio = { workspace = true, features = [ "full", "macros"] } +anyhow = { workspace = true } +tracing-subscriber = { workspace = true, features = ["fmt", "env-filter"] } +tracing = { workspace = true } +serde.workspace = true +serde_json.workspace = true +glob.workspace = true +metrics.workspace = true + +[dev-dependencies] +tempfile.workspace = true +kona-registry.workspace = true + +[build-dependencies] +vergen = { workspace = true, features = ["build", "cargo", "emit_and_set"] } +vergen-git2.workspace = true + +[lints] +workspace = true diff --git a/kona/bin/supervisor/README.md b/rust/kona/bin/supervisor/README.md similarity index 100% rename from kona/bin/supervisor/README.md rename to rust/kona/bin/supervisor/README.md diff --git a/kona/bin/supervisor/build.rs b/rust/kona/bin/supervisor/build.rs similarity index 100% rename from kona/bin/supervisor/build.rs rename to rust/kona/bin/supervisor/build.rs diff --git a/kona/bin/supervisor/src/cli.rs b/rust/kona/bin/supervisor/src/cli.rs similarity index 100% rename from kona/bin/supervisor/src/cli.rs rename to rust/kona/bin/supervisor/src/cli.rs diff --git a/kona/bin/supervisor/src/flags/mod.rs b/rust/kona/bin/supervisor/src/flags/mod.rs similarity index 100% rename from kona/bin/supervisor/src/flags/mod.rs rename to rust/kona/bin/supervisor/src/flags/mod.rs diff --git a/kona/bin/supervisor/src/flags/supervisor.rs b/rust/kona/bin/supervisor/src/flags/supervisor.rs similarity index 99% rename from kona/bin/supervisor/src/flags/supervisor.rs rename to rust/kona/bin/supervisor/src/flags/supervisor.rs index 2f0d20eb8d7..2d45bd10059 100644 --- a/kona/bin/supervisor/src/flags/supervisor.rs +++ b/rust/kona/bin/supervisor/src/flags/supervisor.rs @@ -299,6 +299,7 @@ mod tests { } #[tokio::test] + #[allow(clippy::zero_sized_map_values)] async fn test_init_dependency_set_success() -> anyhow::Result<()> { let mut temp_file = NamedTempFile::new()?; let json_content = r#" @@ -662,7 +663,7 @@ mod tests { let args = SupervisorArgs { l1_rpc: "dummy".to_string(), // clap/env may produce [""] — ensure it's filtered to empty - l2_consensus_nodes: vec!["".to_string()], + l2_consensus_nodes: vec![String::new()], l2_consensus_jwt_secret: vec![], datadir: PathBuf::from("dummy"), datadir_sync_endpoint: None, diff --git a/rust/kona/bin/supervisor/src/main.rs b/rust/kona/bin/supervisor/src/main.rs new file mode 100644 index 00000000000..36ef944f274 --- /dev/null +++ b/rust/kona/bin/supervisor/src/main.rs @@ -0,0 +1,24 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] + +pub mod cli; +pub mod flags; +pub mod metrics; +pub(crate) mod version; + +use clap::Parser; + +fn main() { + kona_cli::sigsegv_handler::install(); + kona_cli::backtrace::enable(); + + if let Err(err) = cli::Cli::parse().run() { + eprintln!("Error: {err:?}"); + std::process::exit(1); + } +} diff --git a/kona/bin/supervisor/src/metrics/mod.rs b/rust/kona/bin/supervisor/src/metrics/mod.rs similarity index 100% rename from kona/bin/supervisor/src/metrics/mod.rs rename to rust/kona/bin/supervisor/src/metrics/mod.rs diff --git a/kona/bin/supervisor/src/metrics/version.rs b/rust/kona/bin/supervisor/src/metrics/version.rs similarity index 100% rename from kona/bin/supervisor/src/metrics/version.rs rename to rust/kona/bin/supervisor/src/metrics/version.rs diff --git a/kona/bin/supervisor/src/version.rs b/rust/kona/bin/supervisor/src/version.rs similarity index 100% rename from kona/bin/supervisor/src/version.rs rename to rust/kona/bin/supervisor/src/version.rs diff --git a/kona/crates/node/disc/Cargo.toml b/rust/kona/crates/node/disc/Cargo.toml similarity index 100% rename from kona/crates/node/disc/Cargo.toml rename to rust/kona/crates/node/disc/Cargo.toml diff --git a/kona/crates/node/disc/src/builder.rs b/rust/kona/crates/node/disc/src/builder.rs similarity index 100% rename from kona/crates/node/disc/src/builder.rs rename to rust/kona/crates/node/disc/src/builder.rs diff --git a/rust/kona/crates/node/disc/src/driver.rs b/rust/kona/crates/node/disc/src/driver.rs new file mode 100644 index 00000000000..094ce8fa4e2 --- /dev/null +++ b/rust/kona/crates/node/disc/src/driver.rs @@ -0,0 +1,545 @@ +//! Discovery Module. + +use backon::{ExponentialBuilder, RetryableWithContext}; +use derive_more::Debug; +use discv5::{Config, Discv5, Enr, enr::NodeId}; +use kona_peers::{BootNode, BootNodes, BootStore, BootStoreFile, EnrValidation, enr_to_multiaddr}; +use tokio::{ + sync::mpsc::channel, + time::{Duration, sleep}, +}; + +use crate::{Discv5Builder, Discv5Handler, HandlerRequest, LocalNode}; + +/// The [`Discv5Driver`] drives the discovery service. +/// +/// Calling [`Discv5Driver::start`] spawns a new [`Discv5`] +/// discovery service in a new tokio task and returns a +/// [`Discv5Handler`]. +/// +/// Channels are used to communicate between the [`Discv5Handler`] +/// and the spawned task containing the [`Discv5`] service. +/// +/// Since some requested operations are asynchronous, this pattern of message +/// passing is used as opposed to wrapping the [`Discv5`] in an `Arc>`. +/// If an `Arc>` were used, a lock held across the operation's future +/// would be needed since some asynchronous operations require a mutable +/// reference to the [`Discv5`] service. +#[derive(Debug)] +pub struct Discv5Driver { + /// The [`Discv5`] discovery service. + #[debug(skip)] + pub disc: Discv5, + /// The optional [`BootStoreFile`] to use for the bootstore. + pub bootstore: Option, + /// Bootnodes used to bootstrap the discovery service. + pub bootnodes: BootNodes, + /// The chain ID of the network. + pub chain_id: u64, + /// The interval to discovery random nodes. + pub interval: Duration, + /// Whether to forward ENRs to the enr receiver on startup. + pub forward: bool, + /// The interval at which to store the ENRs in the bootstore. + /// This is set to 60 seconds by default. + pub store_interval: Duration, + /// The frequency at which to remove random nodes from the discovery table. + /// This is not enabled (`None`) by default. + pub remove_interval: Option, +} + +impl Discv5Driver { + /// Returns a new [`Discv5Builder`] instance. + pub fn builder( + local_node: LocalNode, + chain_id: u64, + discovery_config: Config, + ) -> Discv5Builder { + Discv5Builder::new(local_node, chain_id, discovery_config) + } + + /// Instantiates a new [`Discv5Driver`]. + pub const fn new( + disc: Discv5, + interval: Duration, + chain_id: u64, + bootstore: Option, + bootnodes: BootNodes, + ) -> Result { + Ok(Self { + disc, + chain_id, + bootnodes, + interval, + forward: true, + remove_interval: None, + store_interval: Duration::from_secs(60), + bootstore, + }) + } + + /// Starts the inner [`Discv5`] service. + async fn init(self) -> Result { + let (s, res) = { + |mut v: Self| async { + let res = v.disc.start().await; + (v, res) + } + } + .retry(ExponentialBuilder::default()) + .context(self) + .notify(|err: &discv5::Error, dur: Duration| { + warn!(target: "discovery", ?err, "Failed to start discovery service [Duration: {:?}]", dur); + }) + .await; + res.map(|_| s) + } + + /// Bootstraps the [`Discv5`] table with bootnodes. + async fn bootstrap_peers( + bootstore: Option, + bootnodes: BootNodes, + chain_id: u64, + disc: &Discv5, + ) -> BootStore { + // Note: if the bootstore file cannot be created, we use a default bootstore. + let mut store = bootstore + .map_or_else(BootStore::default, |bootstore| bootstore.try_into().unwrap_or_default()); + + let initial_store_length = store.len(); + + for bn in bootnodes.0.into_iter().chain(BootNodes::from_chain_id(chain_id).0.into_iter()) { + let res = match bn { + BootNode::Enr(enr) => Ok(enr.clone()), + BootNode::Enode(enode) => disc.request_enr(enode.clone()).await, + }; + + let Ok(enr) = res else { + debug!(target: "discovery::bootstrap", ?res, "Failed to add boot node ENR to discovery table"); + continue; + }; + + let validation = EnrValidation::validate(&enr, chain_id); + if validation.is_invalid() { + trace!(target: "discovery::bootstrap", "Ignoring Invalid Bootnode ENR: {:?}. {:?}", enr, validation); + continue; + } + + if let Err(e) = disc.add_enr(enr.clone()) { + debug!(target: "discovery::bootstrap", "Failed to add enr: {:?}", e); + continue; + } + + store.add_enr(enr); + } + + let new_store_len = store.len(); + + debug!(target: "discovery::bootstrap", + added=%(new_store_len - initial_store_length), + total=%new_store_len, + "Added new ENRs to discv5 bootstore" + ); + + store + } + + /// Spawns a new [`Discv5`] discovery service in a new tokio task. + /// + /// Returns a [`Discv5Handler`] to communicate with the spawned task. + pub fn start(mut self) -> (Discv5Handler, tokio::sync::mpsc::Receiver) { + let chain_id = self.chain_id; + let (req_sender, mut req_recv) = channel::(1024); + let (enr_sender, enr_recv) = channel::(1024); + + tokio::spawn(async move { + let remove = self.remove_interval.is_some(); + let remove_dur = self.remove_interval.unwrap_or(std::time::Duration::from_secs(600)); + let mut removal_interval = tokio::time::interval(remove_dur); + let mut interval = tokio::time::interval(self.interval); + let mut store_interval = tokio::time::interval(self.store_interval); + + // Step 1: Start the discovery service. + let Ok(s) = self.init().await else { + error!(target: "discovery", "Failed to start discovery service"); + return; + }; + self = s; + trace!(target: "discovery", "Discv5 Initialized"); + + // Step 2: Bootstrap the discovery table with bootnodes. + let mut store = + Self::bootstrap_peers(self.bootstore, self.bootnodes, chain_id, &self.disc).await; + + let enrs = self.disc.table_entries_enr(); + info!(target: "discovery", "Discv5 Started with {} ENRs", enrs.len()); + + // Step 3: Forward ENRs in the bootstore to the enr receiver. + if self.forward { + for enr in store.valid_peers_with_chain_id(self.chain_id) { + if let Err(e) = enr_sender.send(enr.clone()).await { + debug!(target: "discovery", "Failed to forward enr: {:?}", e); + } + } + } + + // Continuously attempt to start the event stream with a retry limit and shutdown + // signal. + let mut retries = 0; + let max_retries = 10; // Maximum number of retries before giving up. + let mut event_stream = loop { + if retries >= max_retries { + error!(target: "discovery", "Exceeded maximum retries for event stream startup. Aborting..."); + return; // Exit the task if the retry limit is reached. + } + match self.disc.event_stream().await { + Ok(event_stream) => { + break event_stream; + } + Err(e) => { + warn!(target: "discovery", "Failed to start event stream: {:?}", e); + retries += 1; + sleep(Duration::from_secs(2)).await; + info!(target: "discovery", "Retrying event stream startup... (Attempt {}/{})", retries, max_retries); + } + } + }; + + // Step 4: Run the core driver loop. + loop { + tokio::select! { + msg = req_recv.recv() => { + match msg { + Some(msg) => match msg { + HandlerRequest::Metrics(tx) => { + let metrics = self.disc.metrics(); + if let Err(e) = tx.send(metrics) { + warn!(target: "discovery", "Failed to send metrics: {:?}", e); + } + } + HandlerRequest::PeerCount(tx) => { + let peers = self.disc.connected_peers(); + if let Err(e) = tx.send(peers) { + warn!(target: "discovery", "Failed to send peer count: {:?}", e); + } + } + HandlerRequest::LocalEnr(tx) => { + let enr = self.disc.local_enr().clone(); + if let Err(e) = tx.send(enr.clone()) { + warn!(target: "discovery", "Failed to send local enr: {:?}", e); + } + } + HandlerRequest::AddEnr(enr) => { + let _ = self.disc.add_enr(enr); + } + HandlerRequest::RequestEnr{out, addr} => { + let enr = self.disc.request_enr(addr).await; + if let Err(e) = out.send(enr) { + warn!(target: "discovery", "Failed to send request enr: {:?}", e); + } + } + HandlerRequest::TableEnrs(tx) => { + let enrs = self.disc.table_entries_enr(); + if let Err(e) = tx.send(enrs) { + warn!(target: "discovery", "Failed to send table enrs: {:?}", e); + } + }, + HandlerRequest::TableInfos(tx) => { + let infos = self.disc.table_entries(); + if let Err(e) = tx.send(infos) { + warn!(target: "discovery", "Failed to send table infos: {:?}", e); + } + }, + HandlerRequest::BanAddrs{addrs_to_ban, ban_duration} => { + let enrs = self.disc.table_entries_enr(); + + for enr in enrs { + let Some(multi_addr) = enr_to_multiaddr(&enr) else { + continue; + }; + + if addrs_to_ban.contains(&multi_addr) { + self.disc.ban_node(&enr.node_id(), Some(ban_duration)); + } + } + }, + } + None => { + trace!(target: "discovery", "Receiver `None` peer enr"); + } + } + } + event = event_stream.recv() => { + let Some(event) = event else { + trace!(target: "discovery", "Received `None` event"); + continue; + }; + match event { + discv5::Event::Discovered(enr) => { + if EnrValidation::validate(&enr, chain_id).is_valid() { + debug!(target: "discovery", "Valid ENR discovered, forwarding to swarm: {:?}", enr); + kona_macros::inc!(gauge, crate::Metrics::DISCOVERY_EVENT, "type" => "discovered"); + store.add_enr(enr.clone()); + let sender = enr_sender.clone(); + tokio::spawn(async move { + if let Err(e) = sender.send(enr).await { + debug!(target: "discovery", "Failed to send enr: {:?}", e); + } + }); + } + } + discv5::Event::SessionEstablished(enr, addr) => { + if EnrValidation::validate(&enr, chain_id).is_valid() { + debug!(target: "discovery", "Session established with valid ENR, forwarding to swarm. Address: {:?}, ENR: {:?}", addr, enr); + kona_macros::inc!(gauge, crate::Metrics::DISCOVERY_EVENT, "type" => "session_established"); + store.add_enr(enr.clone()); + let sender = enr_sender.clone(); + tokio::spawn(async move { + if let Err(e) = sender.send(enr).await { + debug!(target: "discovery", "Failed to send enr: {:?}", e); + } + }); + } + } + discv5::Event::UnverifiableEnr { enr, .. } => { + if EnrValidation::validate(&enr, chain_id).is_valid() { + debug!(target: "discovery", "Valid ENR discovered, forwarding to swarm: {:?}", enr); + kona_macros::inc!(gauge, crate::Metrics::DISCOVERY_EVENT, "type" => "unverifiable_enr"); + store.add_enr(enr.clone()); + let sender = enr_sender.clone(); + tokio::spawn(async move { + if let Err(e) = sender.send(enr).await { + debug!(target: "discovery", "Failed to send enr: {:?}", e); + } + }); + } + + } + _ => {} + } + } + _ = interval.tick() => { + let id = NodeId::random(); + trace!(target: "discovery", "Finding random node: {}", id); + kona_macros::inc!(gauge, crate::Metrics::FIND_NODE_REQUEST, "find_node" => "find_node"); + let fut = self.disc.find_node(id); + let enr_sender = enr_sender.clone(); + tokio::spawn(async move { + match fut.await { + Ok(nodes) => { + let enrs = nodes.into_iter().filter(|node| EnrValidation::validate(node, chain_id).is_valid()); + for enr in enrs { + _ = enr_sender.send(enr).await; + } + } + Err(err) => { + info!(target: "discovery", "Failed to find node: {:?}", err); + } + } + }); + } + _ = store_interval.tick() => { + let start = std::time::Instant::now(); + let enrs = self.disc.table_entries_enr(); + store.merge(enrs); + + if let Err(e) = store.sync() { + warn!(target: "discovery", "Failed to sync bootstore: {:?}", e); + } + + let elapsed = start.elapsed(); + debug!(target: "discovery", "Bootstore ENRs stored in {:?}", elapsed); + kona_macros::record!(histogram, crate::Metrics::ENR_STORE_TIME, "store_time", "store_time", elapsed.as_secs_f64()); + kona_macros::set!(gauge, crate::Metrics::DISCOVERY_PEER_COUNT, self.disc.connected_peers() as f64); + } + _ = removal_interval.tick() => { + if remove { + let enrs = self.disc.table_entries_enr(); + if enrs.len() > 20 { + let mut rng = rand::rng(); + let index = rand::Rng::random_range(&mut rng, 0..enrs.len()); + let enr = enrs[index].clone(); + debug!(target: "removal", "Removing random ENR: {:?}", enr); + self.disc.remove_node(&enr.node_id()); + } + } + } + } + } + }); + + (Discv5Handler::new(chain_id, req_sender), enr_recv) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::LocalNode; + use discv5::{ + ConfigBuilder, + enr::{CombinedKey, CombinedPublicKey}, + handler::NodeContact, + }; + use kona_genesis::{OP_MAINNET_CHAIN_ID, OP_SEPOLIA_CHAIN_ID}; + use tempfile::tempdir; + + use std::net::{IpAddr, Ipv4Addr, SocketAddr}; + + #[tokio::test] + async fn test_online_discv5_driver() { + let CombinedKey::Secp256k1(secret_key) = CombinedKey::generate_secp256k1() else { + unreachable!() + }; + + let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); + let discovery = Discv5Driver::builder( + LocalNode::new(secret_key, IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0, 0), + OP_SEPOLIA_CHAIN_ID, + ConfigBuilder::new(socket.into()).build(), + ) + .build() + .expect("Failed to build discovery service"); + let (handle, _) = discovery.start(); + assert_eq!(handle.chain_id, OP_SEPOLIA_CHAIN_ID); + } + + #[tokio::test] + async fn test_online_discv5_driver_bootstrap_testnet() { + // Use a test file to make sure bootstore + // doesn't conflict with a local bootstore. + let file = tempdir().unwrap(); + let file = file.path().join("bootstore.json"); + + let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); + let CombinedKey::Secp256k1(secret_key) = CombinedKey::generate_secp256k1() else { + unreachable!() + }; + let mut discovery = Discv5Driver::builder( + LocalNode::new(secret_key, IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0, 0), + OP_SEPOLIA_CHAIN_ID, + ConfigBuilder::new(socket.into()).build(), + ) + .build() + .expect("Failed to build discovery service"); + discovery.bootstore = Some(BootStoreFile::Custom(file)); + + discovery = discovery.init().await.expect("Failed to initialize discovery service"); + + // There are no ENRs for `OP_SEPOLIA_CHAIN_ID` in the bootstore. + // If an ENR is added, this check will fail. + Discv5Driver::bootstrap_peers( + discovery.bootstore, + discovery.bootnodes, + OP_SEPOLIA_CHAIN_ID, + &discovery.disc, + ) + .await; + assert!( + discovery.disc.table_entries_enr().len() >= 5, + "Discovery table should have at least 5 ENRs" + ); + + // It should have the same number of entries as the testnet table. + let testnet = BootNodes::testnet(); + + // Filter out testnet ENRs that are not valid. + let testnet: Vec = testnet + .iter() + .filter_map(|node| { + if let BootNode::Enr(enr) = node { + // Check that the ENR is valid for the testnet. + if EnrValidation::validate(enr, OP_SEPOLIA_CHAIN_ID).is_invalid() { + return None; + } + } + let node_contact = + NodeContact::try_from_multiaddr(node.to_multiaddr().unwrap()).unwrap(); + + Some(node_contact.public_key()) + }) + .collect(); + + // There should be 8 valid ENRs for the testnet. + assert_eq!(testnet.len(), 8); + + // Those 8 ENRs should be in the discovery table. + let disc_enrs = discovery.disc.table_entries_enr(); + for public_key in testnet { + assert!( + disc_enrs.iter().any(|enr| enr.public_key() == public_key), + "Discovery table does not contain testnet ENR: {public_key:?}" + ); + } + } + + #[tokio::test] + async fn test_online_discv5_driver_bootstrap_mainnet() { + kona_cli::init_test_tracing(); + + // Use a test file to make sure bootstore + // doesn't conflict with a local bootstore. + let file = tempdir().unwrap(); + let file = file.path().join("bootstore.json"); + + // Filter out ENRs that are not valid. + let mainnet = BootNodes::mainnet(); + let mainnet: Vec = mainnet + .iter() + .filter_map(|node| { + if let BootNode::Enr(enr) = node && + EnrValidation::validate(enr, OP_MAINNET_CHAIN_ID).is_invalid() + { + return None; + } + let node_contact = + NodeContact::try_from_multiaddr(node.to_multiaddr().unwrap()).unwrap(); + + Some(node_contact.public_key()) + }) + .collect(); + + // There should be 16 valid ENRs for the mainnet. + assert_eq!(mainnet.len(), 16); + + let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0); + + let CombinedKey::Secp256k1(secret_key) = CombinedKey::generate_secp256k1() else { + unreachable!() + }; + + let mut discovery = Discv5Driver::builder( + LocalNode::new(secret_key, IpAddr::V4(Ipv4Addr::UNSPECIFIED), 0, 0), + OP_MAINNET_CHAIN_ID, + ConfigBuilder::new(socket.into()).build(), + ) + .build() + .expect("Failed to build discovery service"); + discovery.bootstore = Some(BootStoreFile::Custom(file)); + + discovery = discovery.init().await.expect("Failed to initialize discovery service"); + + // There are no ENRs for op mainnet in the bootstore. + // If an ENR is added, this check will fail. + Discv5Driver::bootstrap_peers( + discovery.bootstore, + discovery.bootnodes, + OP_MAINNET_CHAIN_ID, + &discovery.disc, + ) + .await; + assert!( + discovery.disc.table_entries_enr().len() >= 10, + "Discovery table should have at least 10 ENRs" + ); + + // Those ENRs should be in the mainnet bootnodes. + let disc_enrs = discovery.disc.table_entries_enr(); + for enr in disc_enrs { + assert!( + mainnet.iter().any(|pub_key| pub_key == &enr.public_key()), + "Discovery table does not contain mainnet ENR: {enr:?}" + ); + } + } +} diff --git a/kona/crates/node/disc/src/error.rs b/rust/kona/crates/node/disc/src/error.rs similarity index 100% rename from kona/crates/node/disc/src/error.rs rename to rust/kona/crates/node/disc/src/error.rs diff --git a/kona/crates/node/disc/src/handler.rs b/rust/kona/crates/node/disc/src/handler.rs similarity index 100% rename from kona/crates/node/disc/src/handler.rs rename to rust/kona/crates/node/disc/src/handler.rs diff --git a/rust/kona/crates/node/disc/src/lib.rs b/rust/kona/crates/node/disc/src/lib.rs new file mode 100644 index 00000000000..4fdee7bc7c0 --- /dev/null +++ b/rust/kona/crates/node/disc/src/lib.rs @@ -0,0 +1,74 @@ +//! Discovery service for the OP Stack. +//! +//! This crate provides decentralized peer discovery capabilities using the Discv5 distributed +//! hash table (DHT) protocol, as defined in the Ethereum networking specifications. +//! +//! ## Overview +//! +//! The discovery service enables OP Stack nodes to find and connect to other network +//! participants without relying on centralized infrastructure. It maintains a local +//! view of the network through ENRs (Ethereum Node Records) and facilitates peer +//! connections for the gossip layer. +//! +//! ## Key Components +//! +//! - [`Discv5Driver`]: Main service driver that manages the discovery process +//! - [`Discv5Builder`]: Builder pattern for configuring discovery service parameters +//! - [`Discv5Handler`]: Handle for interacting with the discovery service +//! - [`LocalNode`]: Represents the local node's discovery information +//! +//! ## Discovery Process +//! +//! 1. **Bootstrap**: Connect to known bootstrap nodes to join the network +//! 2. **Table Population**: Discover peers through DHT queries and populate the routing table +//! 3. **Peer Maintenance**: Periodically refresh peer information and prune stale entries +//! 4. **ENR Updates**: Keep local ENR information current and propagate changes +//! +//! ## ENR Management +//! +//! ENRs (Ethereum Node Records) contain essential information about network peers: +//! - Node identity and cryptographic proof +//! - Network address and port information +//! - Protocol capabilities and version +//! - Chain-specific information (chain ID, etc.) +//! +//! ## Persistent Storage +//! +//! The service maintains a persistent bootstore that caches discovered peers across +//! restarts, reducing bootstrap time and improving network resilience. +//! +//! ## Configuration +//! +//! Key configuration parameters include: +//! - Discovery interval for random peer queries +//! - Bootstrap node list +//! - Storage location for persistent peer cache +//! - Network interface and port bindings + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/kona-logo.png" +)] +#![doc(issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/")] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +// Logging +#[macro_use] +extern crate tracing; +// Used in tests +use kona_genesis as _; + +mod builder; +pub use builder::{Discv5Builder, LocalNode}; + +mod error; +pub use error::Discv5BuilderError; + +mod driver; +pub use driver::Discv5Driver; + +mod handler; +pub use handler::{Discv5Handler, HandlerRequest}; + +mod metrics; +pub use metrics::Metrics; diff --git a/rust/kona/crates/node/disc/src/metrics.rs b/rust/kona/crates/node/disc/src/metrics.rs new file mode 100644 index 00000000000..63981a3e78f --- /dev/null +++ b/rust/kona/crates/node/disc/src/metrics.rs @@ -0,0 +1,62 @@ +//! Metrics for the discovery service. + +/// Container for discovery metrics. +#[derive(Debug, Clone)] +pub struct Metrics; + +impl Metrics { + /// Identifier for discv5 events. + pub const DISCOVERY_EVENT: &str = "kona_node_discovery_events"; + + /// Counter for the number of `FIND_NODE` requests. + pub const FIND_NODE_REQUEST: &str = "kona_node_find_node_requests"; + + /// Timer for the time taken to store ENRs in the bootstore. + pub const ENR_STORE_TIME: &str = "kona_node_enr_store_time"; + + /// Identifier for the gauge that tracks the number of peers in the discovery service. + pub const DISCOVERY_PEER_COUNT: &str = "kona_node_discovery_peer_count"; + + /// Initializes metrics for the discovery service. + /// + /// This does two things: + /// * Describes various metrics. + /// * Initializes metrics to 0 so they can be queried immediately. + #[cfg(feature = "metrics")] + pub fn init() { + Self::describe(); + Self::zero(); + } + + /// Describes metrics used in the discovery service. + #[cfg(feature = "metrics")] + pub fn describe() { + metrics::describe_gauge!(Self::DISCOVERY_EVENT, "Events received by the discv5 service"); + metrics::describe_histogram!( + Self::ENR_STORE_TIME, + "Observations of elapsed time to store ENRs in the on-disk bootstore" + ); + metrics::describe_gauge!( + Self::DISCOVERY_PEER_COUNT, + "Number of peers connected to the discv5 service" + ); + metrics::describe_gauge!( + Self::FIND_NODE_REQUEST, + "Requests made to find a node through the discv5 peer discovery service" + ); + } + + /// Initializes metrics to `0` so they can be queried immediately by consumers of prometheus + /// metrics. + #[cfg(feature = "metrics")] + pub fn zero() { + // Discovery Event + kona_macros::set!(gauge, Self::DISCOVERY_EVENT, "type", "discovered", 0); + kona_macros::set!(gauge, Self::DISCOVERY_EVENT, "type", "session_established", 0); + kona_macros::set!(gauge, Self::DISCOVERY_EVENT, "type", "unverifiable_enr", 0); + + // Peer Counts + kona_macros::set!(gauge, Self::DISCOVERY_PEER_COUNT, 0); + kona_macros::set!(gauge, Self::FIND_NODE_REQUEST, 0); + } +} diff --git a/rust/kona/crates/node/engine/Cargo.toml b/rust/kona/crates/node/engine/Cargo.toml new file mode 100644 index 00000000000..3545fcaf9f9 --- /dev/null +++ b/rust/kona/crates/node/engine/Cargo.toml @@ -0,0 +1,74 @@ +[package] +name = "kona-engine" +description = "An implementation of the OP Stack engine client" +version = "0.1.2" +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true + +[lints] +workspace = true + +[dependencies] +# workspace +kona-genesis.workspace = true +kona-macros.workspace = true +kona-protocol = {workspace = true, features = ["serde", "std"]} + +# alloy +alloy-eips.workspace = true +alloy-consensus.workspace = true +alloy-json-rpc.workspace = true +alloy-network.workspace = true +alloy-transport.workspace = true +alloy-primitives.workspace = true +alloy-provider = { workspace = true, features = ["ipc", "reqwest", "reqwest-rustls-tls", "engine-api"] } +alloy-rpc-client.workspace = true +alloy-rpc-types-eth.workspace = true +alloy-rpc-types-engine = { workspace = true, features = ["jwt", "serde"] } +alloy-transport-http = { workspace = true, features = ["reqwest", "hyper", "jwt-auth"] } + +# op-alloy +op-alloy-network.workspace = true +op-alloy-consensus.workspace = true +op-alloy-provider.workspace = true +op-alloy-rpc-types.workspace = true +op-alloy-rpc-types-engine.workspace = true + +# general +serde.workspace = true +tokio.workspace = true +tracing.workspace = true +async-trait.workspace = true +thiserror.workspace = true +url.workspace = true +tower.workspace = true +http-body-util.workspace = true +derive_more = { workspace = true, features = ["display", "deref", "from_str", "constructor"] } +serde_json.workspace = true +jsonrpsee-types.workspace = true + +# metrics +metrics = { workspace = true, optional = true } + +# rollup boost +rollup-boost.workspace = true +rollup-boost-types.workspace = true +http.workspace = true +parking_lot.workspace = true + +[dev-dependencies] +kona-registry.workspace = true +rand = {workspace = true, features = ["thread_rng"]} +arbitrary.workspace = true +op-alloy-rpc-types = {workspace = true, features = ["arbitrary", "k256"]} +metrics-exporter-prometheus.workspace = true +rstest.workspace = true + +[features] +metrics = [ "dep:metrics" ] +test-utils = [ + "kona-protocol/test-utils" +] diff --git a/rust/kona/crates/node/engine/README.md b/rust/kona/crates/node/engine/README.md new file mode 100644 index 00000000000..02a4674daca --- /dev/null +++ b/rust/kona/crates/node/engine/README.md @@ -0,0 +1,45 @@ +# `kona-engine` + +An extensible implementation of the [OP Stack][op-stack] rollup node engine client. + +## Overview + +The `kona-engine` crate provides a task-based engine client for interacting with Ethereum execution layers. It implements the Engine API specification and manages the execution layer state through a priority-driven task queue system. + +## Key Components + +- **[`Engine`](crate::Engine)** - Main task queue processor that executes engine operations atomically +- **[`EngineClient`](crate::EngineClient)** - HTTP client for Engine API communication with JWT authentication +- **[`EngineState`](crate::EngineState)** - Tracks the current state of the execution layer +- **Task Types** - Specialized tasks for different engine operations: + - [`InsertTask`](crate::InsertTask) - Insert new payloads into the execution engine + - [`BuildTask`](crate::BuildTask) - Build new payloads with automatic forkchoice synchronization + - [`ConsolidateTask`](crate::ConsolidateTask) - Consolidate unsafe payloads to advance the safe chain + - [`FinalizeTask`](crate::FinalizeTask) - Finalize safe payloads on L1 confirmation + - [`SynchronizeTask`](crate::SynchronizeTask) - Internal task for execution layer forkchoice synchronization + +## Architecture + +The engine implements a task-driven architecture where forkchoice synchronization is handled automatically: + +- **Automatic Forkchoice Handling**: The [`BuildTask`](crate::BuildTask) automatically performs forkchoice updates during block building, eliminating the need for explicit forkchoice management in user code. +- **Internal Synchronization**: [`SynchronizeTask`](crate::SynchronizeTask) handles internal execution layer synchronization and is primarily used by other tasks rather than directly by users. +- **Priority-Based Execution**: Tasks are executed in priority order to ensure optimal sequencer performance and block processing efficiency. + +## Engine API Compatibility + +The crate supports multiple Engine API versions with automatic version selection based on the rollup configuration: + +- **Engine Forkchoice Updated**: V2, V3 +- **Engine New Payload**: V2, V3, V4 +- **Engine Get Payload**: V2, V3, V4 + +Version selection follows Optimism hardfork activation times (Bedrock, Canyon, Delta, Ecotone, Isthmus). + +## Features + +- `metrics` - Enable Prometheus metrics collection (optional) + + + +[op-stack]: https://specs.optimism.io diff --git a/rust/kona/crates/node/engine/src/attributes.rs b/rust/kona/crates/node/engine/src/attributes.rs new file mode 100644 index 00000000000..7815362ddee --- /dev/null +++ b/rust/kona/crates/node/engine/src/attributes.rs @@ -0,0 +1,1003 @@ +//! Contains a utility method to check if attributes match a block. + +use alloy_eips::{Decodable2718, eip1559::BaseFeeParams}; +use alloy_network::TransactionResponse; +use alloy_primitives::{Address, B256, Bytes}; +use alloy_rpc_types_eth::{Block, BlockTransactions, Withdrawals}; +use kona_genesis::RollupConfig; +use kona_protocol::OpAttributesWithParent; +use op_alloy_consensus::{ + EIP1559ParamError, OpTxEnvelope, decode_holocene_extra_data, decode_jovian_extra_data, +}; +use op_alloy_rpc_types::Transaction; + +/// Result of validating payload attributes against an execution layer block. +/// +/// Used to verify that proposed payload attributes match the actual executed block, +/// ensuring consistency between the rollup derivation process and execution layer. +/// Validation includes withdrawals, transactions, fees, and other block properties. +/// +/// # Examples +/// +/// ```rust,ignore +/// use kona_engine::AttributesMatch; +/// use kona_genesis::RollupConfig; +/// use kona_protocol::OpAttributesWithParent; +/// +/// let config = RollupConfig::default(); +/// let match_result = AttributesMatch::check_withdrawals(&config, &attributes, &block); +/// +/// if match_result.is_match() { +/// println!("Attributes are valid for this block"); +/// } +/// ``` +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AttributesMatch { + /// The payload attributes are consistent with the block. + Match, + /// The attributes do not match the block (contains mismatch details). + Mismatch(AttributesMismatch), +} + +impl AttributesMatch { + /// Returns true if the attributes match the block. + pub const fn is_match(&self) -> bool { + matches!(self, Self::Match) + } + + /// Returns true if the attributes do not match the block. + pub const fn is_mismatch(&self) -> bool { + matches!(self, Self::Mismatch(_)) + } + + /// Checks that withdrawals for a block and attributes match. + pub fn check_withdrawals( + config: &RollupConfig, + attributes: &OpAttributesWithParent, + block: &Block, + ) -> Self { + let attr_withdrawals = attributes.attributes().payload_attributes.withdrawals.as_ref(); + let attr_withdrawals = attr_withdrawals.map(|w| Withdrawals::new(w.clone())); + let block_withdrawals = block.withdrawals.as_ref(); + + if config.is_canyon_active(block.header.timestamp) { + // In canyon, the withdrawals list should be some and empty + if attr_withdrawals.is_none_or(|w| !w.is_empty()) { + return Self::Mismatch(AttributesMismatch::CanyonWithdrawalsNotEmpty); + } + if block_withdrawals.is_none_or(|w| !w.is_empty()) { + return Self::Mismatch(AttributesMismatch::CanyonWithdrawalsNotEmpty); + } + if !config.is_isthmus_active(block.header.timestamp) { + // In canyon, the withdrawals root should be set to the empty value + let empty_hash = alloy_consensus::EMPTY_ROOT_HASH; + if block.header.inner.withdrawals_root != Some(empty_hash) { + return Self::Mismatch(AttributesMismatch::CanyonNotEmptyHash); + } + } + } else { + // In bedrock, the withdrawals list should be None + if attr_withdrawals.is_some() { + return Self::Mismatch(AttributesMismatch::BedrockWithdrawals); + } + } + + if config.is_isthmus_active(block.header.timestamp) { + // In isthmus, the withdrawals root must be set + if block.header.inner.withdrawals_root.is_none() { + return Self::Mismatch(AttributesMismatch::IsthmusMissingWithdrawalsRoot); + } + } + + Self::Match + } + + /// Checks the attributes and block transaction list for consolidation. + /// We start by checking that there are the same number of transactions in both the attribute + /// payload and the block. Then we compare their contents + fn check_transactions(attributes_txs: &[Bytes], block: &Block) -> Self { + // Before checking the number of transactions, we have to make sure that the block + // has the right transactions format. We need to have access to the + // full transactions to be able to compare their contents. + let block_txs = match block.transactions { + BlockTransactions::Hashes(_) | BlockTransactions::Full(_) + if attributes_txs.is_empty() && block.transactions.is_empty() => + { + // We early return when both attributes and blocks are empty. This is for ergonomics + // because the default [`BlockTransactions`] format is + // [`BlockTransactions::Hash`], which may cause + // the [`BlockTransactions`] format check to fail right below. We may want to be a + // bit more flexible and not reject the hash format if both the + // attributes and the block are empty. + return Self::Match; + } + BlockTransactions::Uncle => { + // This can never be uncle transactions + error!( + "Invalid format for the block transactions. The `Uncle` transaction format is not relevant in that context and should not get used here. This is a bug" + ); + + return AttributesMismatch::MalformedBlockTransactions.into(); + } + BlockTransactions::Hashes(_) => { + // We can't have hash transactions with non empty blocks + error!( + "Invalid format for the block transactions. The `Hash` transaction format is not relevant in that context and should not get used here. This is a bug." + ); + + return AttributesMismatch::MalformedBlockTransactions.into(); + } + BlockTransactions::Full(ref block_txs) => block_txs, + }; + + let attributes_txs_len = attributes_txs.len(); + let block_txs_len = block_txs.len(); + + if attributes_txs_len != block_txs_len { + return AttributesMismatch::TransactionLen(attributes_txs_len, block_txs_len).into(); + } + + // Then we need to check that the content of the encoded transactions match + // Note that it is safe to zip both iterators because we checked their length + // beforehand. + for (attr_tx_bytes, block_tx) in attributes_txs.iter().zip(block_txs) { + trace!( + target: "engine", + ?attr_tx_bytes, + block_tx_hash = %block_tx.tx_hash(), + "Checking attributes transaction against block transaction", + ); + // Let's try to deserialize the attributes transaction + let Ok(attr_tx) = OpTxEnvelope::decode_2718(&mut &attr_tx_bytes[..]) else { + error!( + "Impossible to deserialize transaction from attributes. If we have stored these attributes it means the transactions where well formatted. This is a bug" + ); + + return AttributesMismatch::MalformedAttributesTransaction.into(); + }; + + if &attr_tx != block_tx.inner.inner.inner() { + warn!(target: "engine", ?attr_tx, ?block_tx, "Transaction mismatch in derived attributes"); + return AttributesMismatch::TransactionContent( + attr_tx.tx_hash(), + block_tx.tx_hash(), + ) + .into(); + } + } + + Self::Match + } + + /// Validates and compares EIP1559 parameters for consolidation. + fn check_eip1559( + config: &RollupConfig, + attributes: &OpAttributesWithParent, + block: &Block, + ) -> Self { + // We can assume that the EIP-1559 params are set iff holocene is active. + // Note here that we don't need to check for the attributes length because of type-safety. + let (ae, ad): (u128, u128) = match attributes.attributes().decode_eip_1559_params() { + None => { + // Holocene is active but the eip1559 are not set. This is a bug! + // Note: we checked the timestamp match above, so we can assume that both the + // attributes and the block have the same stamps + if config.is_holocene_active(block.header.timestamp) { + error!( + "EIP1559 parameters for attributes not set while holocene is active. This is a bug" + ); + return AttributesMismatch::MissingAttributesEIP1559.into(); + } + + // If the attributes are not specified, that means we can just early return. + return Self::Match; + } + Some((0, e)) if e != 0 => { + error!( + "Holocene EIP1559 params cannot have a 0 denominator unless elasticity is also 0. This is a bug" + ); + return AttributesMismatch::InvalidEIP1559ParamsCombination.into(); + } + // We need to translate (0, 0) parameters to pre-holocene protocol constants. + // Since holocene is supposed to be active, canyon should be as well. We take the canyon + // base fee params. + Some((0, 0)) => { + let BaseFeeParams { max_change_denominator, elasticity_multiplier } = + config.chain_op_config.post_canyon_params(); + + (elasticity_multiplier, max_change_denominator) + } + Some((ae, ad)) => (ae.into(), ad.into()), + }; + + let extra_data_decoded = if config.is_jovian_active(block.header.timestamp) { + decode_jovian_extra_data(&block.header.extra_data).map(|(be, bd, _)| (be, bd)) + } else if config.is_holocene_active(block.header.timestamp) { + decode_holocene_extra_data(&block.header.extra_data) + } else { + return AttributesMismatch::MissingBlockEIP1559.into(); + }; + + // We decode the extra data stemming from the block header. + let (be, bd): (u128, u128) = match extra_data_decoded { + Ok((be, bd)) => (be.into(), bd.into()), + Err(EIP1559ParamError::NoEIP1559Params) => { + error!( + "EIP1559 parameters for the block not set while holocene is active. This is a bug" + ); + return AttributesMismatch::MissingBlockEIP1559.into(); + } + Err(EIP1559ParamError::InvalidVersion(v)) => { + error!( + version = v, + "The version in the extra data EIP1559 payload is incorrect. Should be 0. This is a bug", + ); + return AttributesMismatch::InvalidExtraDataVersion.into(); + } + Err(e) => { + error!(err = ?e, "An unknown extra data decoding error occurred. This is a bug",); + + return AttributesMismatch::UnknownExtraDataDecodingError(e).into(); + } + }; + + // We now have to check that both parameters match + if ae != be || ad != bd { + return AttributesMismatch::EIP1559Parameters( + BaseFeeParams { max_change_denominator: ad, elasticity_multiplier: ae }, + BaseFeeParams { max_change_denominator: bd, elasticity_multiplier: be }, + ) + .into(); + } + + Self::Match + } + + /// Checks if the specified [`OpAttributesWithParent`] matches the specified [`Block`]. + /// Returns [`AttributesMatch::Match`] if they match, otherwise returns + /// [`AttributesMatch::Mismatch`]. + pub fn check( + config: &RollupConfig, + attributes: &OpAttributesWithParent, + block: &Block, + ) -> Self { + if attributes.parent.block_info.hash != block.header.inner.parent_hash { + return AttributesMismatch::ParentHash( + attributes.parent.block_info.hash, + block.header.inner.parent_hash, + ) + .into(); + } + + if attributes.attributes().payload_attributes.timestamp != block.header.inner.timestamp { + return AttributesMismatch::Timestamp( + attributes.attributes().payload_attributes.timestamp, + block.header.inner.timestamp, + ) + .into(); + } + + let mix_hash = block.header.inner.mix_hash; + if attributes.attributes().payload_attributes.prev_randao != mix_hash { + return AttributesMismatch::PrevRandao( + attributes.attributes().payload_attributes.prev_randao, + mix_hash, + ) + .into(); + } + + // Let's extract the list of attribute transactions + let default_vec = vec![]; + let attributes_txs = attributes.attributes().transactions.as_ref().unwrap_or(&default_vec); + + // Check transactions + if let mismatch @ Self::Mismatch(_) = Self::check_transactions(attributes_txs, block) { + return mismatch; + } + + let Some(gas_limit) = attributes.attributes().gas_limit else { + return AttributesMismatch::MissingAttributesGasLimit.into(); + }; + + if gas_limit != block.header.inner.gas_limit { + return AttributesMismatch::GasLimit(gas_limit, block.header.inner.gas_limit).into(); + } + + if let m @ Self::Mismatch(_) = Self::check_withdrawals(config, attributes, block) { + return m; + } + + if attributes.attributes().payload_attributes.parent_beacon_block_root != + block.header.inner.parent_beacon_block_root + { + return AttributesMismatch::ParentBeaconBlockRoot( + attributes.attributes().payload_attributes.parent_beacon_block_root, + block.header.inner.parent_beacon_block_root, + ) + .into(); + } + + if attributes.attributes().payload_attributes.suggested_fee_recipient != + block.header.inner.beneficiary + { + return AttributesMismatch::FeeRecipient( + attributes.attributes().payload_attributes.suggested_fee_recipient, + block.header.inner.beneficiary, + ) + .into(); + } + + // Check the EIP-1559 parameters in a separate helper method + if let m @ Self::Mismatch(_) = Self::check_eip1559(config, attributes, block) { + return m; + } + + Self::Match + } +} + +/// An enum over the type of mismatch between [`OpAttributesWithParent`] +/// and a [`Block`]. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AttributesMismatch { + /// The parent hash of the block does not match the parent hash of the attributes. + ParentHash(B256, B256), + /// The timestamp of the block does not match the timestamp of the attributes. + Timestamp(u64, u64), + /// The prev randao of the block does not match the prev randao of the attributes. + PrevRandao(B256, B256), + /// The block contains malformed transactions. This is a bug - the transaction format + /// should be checked before the consolidation step. + MalformedBlockTransactions, + /// There is a malformed transaction inside the attributes. This is a bug - the transaction + /// format should be checked before the consolidation step. + MalformedAttributesTransaction, + /// A mismatch in the number of transactions contained in the attributes and the block. + TransactionLen(usize, usize), + /// A mismatch in the content of some transactions contained in the attributes and the block. + TransactionContent(B256, B256), + /// The EIP1559 payload for the [`OpAttributesWithParent`] is missing when holocene is active. + MissingAttributesEIP1559, + /// The EIP1559 payload for the block is missing when holocene is active. + MissingBlockEIP1559, + /// The version in the extra data EIP1559 payload is incorrect. Should be 0. + InvalidExtraDataVersion, + /// An unknown extra data decoding error occurred. + UnknownExtraDataDecodingError(EIP1559ParamError), + /// Holocene EIP1559 params cannot have a 0 denominator unless elasticity is also 0 + InvalidEIP1559ParamsCombination, + /// The EIP1559 base fee parameters of the attributes and the block don't match + EIP1559Parameters(BaseFeeParams, BaseFeeParams), + /// Transactions mismatch. + Transactions(u64, u64), + /// The gas limit of the block does not match the gas limit of the attributes. + GasLimit(u64, u64), + /// The gas limit for the [`OpAttributesWithParent`] is missing. + MissingAttributesGasLimit, + /// The fee recipient of the block does not match the fee recipient of the attributes. + FeeRecipient(Address, Address), + /// A mismatch in the parent beacon block root. + ParentBeaconBlockRoot(Option, Option), + /// After the canyon hardfork, withdrawals cannot be empty. + CanyonWithdrawalsNotEmpty, + /// After the canyon hardfork, the withdrawals root must be the empty hash. + CanyonNotEmptyHash, + /// In the bedrock hardfork, the attributes must has empty withdrawals. + BedrockWithdrawals, + /// In the isthmus hardfork, the withdrawals root must be set. + IsthmusMissingWithdrawalsRoot, +} + +impl From for AttributesMatch { + fn from(mismatch: AttributesMismatch) -> Self { + Self::Mismatch(mismatch) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::AttributesMismatch::EIP1559Parameters; + use alloy_consensus::EMPTY_ROOT_HASH; + use alloy_primitives::{Bytes, FixedBytes, address, b256}; + use alloy_rpc_types_eth::BlockTransactions; + use arbitrary::{Arbitrary, Unstructured}; + use kona_protocol::{BlockInfo, L2BlockInfo}; + use kona_registry::ROLLUP_CONFIGS; + use op_alloy_consensus::encode_holocene_extra_data; + use op_alloy_rpc_types_engine::OpPayloadAttributes; + + fn default_attributes() -> OpAttributesWithParent { + OpAttributesWithParent { + attributes: OpPayloadAttributes::default(), + parent: L2BlockInfo::default(), + derived_from: Some(BlockInfo::default()), + is_last_in_span: true, + } + } + + fn default_rollup_config() -> &'static RollupConfig { + let opm = 10; + ROLLUP_CONFIGS.get(&opm).expect("default rollup config should exist") + } + + #[test] + fn test_attributes_match_parent_hash_mismatch() { + let cfg = default_rollup_config(); + let attributes = default_attributes(); + let mut block = Block::::default(); + block.header.inner.parent_hash = + b256!("1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"); + let check = AttributesMatch::check(cfg, &attributes, &block); + let expected: AttributesMatch = AttributesMismatch::ParentHash( + attributes.parent.block_info.hash, + block.header.inner.parent_hash, + ) + .into(); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + #[test] + fn test_attributes_match_check_timestamp() { + let cfg = default_rollup_config(); + let attributes = default_attributes(); + let mut block = Block::::default(); + block.header.inner.timestamp = 1234567890; + let check = AttributesMatch::check(cfg, &attributes, &block); + let expected: AttributesMatch = AttributesMismatch::Timestamp( + attributes.attributes().payload_attributes.timestamp, + block.header.inner.timestamp, + ) + .into(); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + #[test] + fn test_attributes_match_check_prev_randao() { + let cfg = default_rollup_config(); + let attributes = default_attributes(); + let mut block = Block::::default(); + block.header.inner.mix_hash = + b256!("1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef"); + let check = AttributesMatch::check(cfg, &attributes, &block); + let expected: AttributesMatch = AttributesMismatch::PrevRandao( + attributes.attributes().payload_attributes.prev_randao, + block.header.inner.mix_hash, + ) + .into(); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + #[test] + fn test_attributes_match_missing_gas_limit() { + let cfg = default_rollup_config(); + let attributes = default_attributes(); + let mut block = Block::::default(); + block.header.inner.gas_limit = 123456; + let check = AttributesMatch::check(cfg, &attributes, &block); + let expected: AttributesMatch = AttributesMismatch::MissingAttributesGasLimit.into(); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + #[test] + fn test_attributes_match_check_gas_limit() { + let cfg = default_rollup_config(); + let mut attributes = default_attributes(); + attributes.attributes.gas_limit = Some(123457); + let mut block = Block::::default(); + block.header.inner.gas_limit = 123456; + let check = AttributesMatch::check(cfg, &attributes, &block); + let expected: AttributesMatch = AttributesMismatch::GasLimit( + attributes.attributes().gas_limit.unwrap_or_default(), + block.header.inner.gas_limit, + ) + .into(); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + #[test] + fn test_attributes_match_check_parent_beacon_block_root() { + let cfg = default_rollup_config(); + let mut attributes = default_attributes(); + attributes.attributes.gas_limit = Some(0); + attributes.attributes.payload_attributes.parent_beacon_block_root = + Some(b256!("1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef")); + let block = Block::::default(); + let check = AttributesMatch::check(cfg, &attributes, &block); + let expected: AttributesMatch = AttributesMismatch::ParentBeaconBlockRoot( + attributes.attributes().payload_attributes.parent_beacon_block_root, + block.header.inner.parent_beacon_block_root, + ) + .into(); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + #[test] + fn test_attributes_match_check_fee_recipient() { + let cfg = default_rollup_config(); + let mut attributes = default_attributes(); + attributes.attributes.gas_limit = Some(0); + let mut block = Block::::default(); + block.header.inner.beneficiary = address!("1234567890abcdef1234567890abcdef12345678"); + let check = AttributesMatch::check(cfg, &attributes, &block); + let expected: AttributesMatch = AttributesMismatch::FeeRecipient( + attributes.attributes().payload_attributes.suggested_fee_recipient, + block.header.inner.beneficiary, + ) + .into(); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + fn generate_txs(num_txs: usize) -> Vec { + // Simulate some random data + let mut data = vec![0; 1024]; + let mut rng = rand::rng(); + + (0..num_txs) + .map(|_| { + rand::Rng::fill(&mut rng, &mut data[..]); + + // Create unstructured data with the random bytes + let u = Unstructured::new(&data); + + // Generate a random instance of MyStruct + Transaction::arbitrary_take_rest(u).expect("Impossible to generate arbitrary tx") + }) + .collect() + } + + fn test_transactions_match_helper() -> (OpAttributesWithParent, Block) { + const NUM_TXS: usize = 10; + + let transactions = generate_txs(NUM_TXS); + let mut attributes = default_attributes(); + attributes.attributes.gas_limit = Some(0); + attributes.attributes.transactions = Some( + transactions + .iter() + .map(|tx| { + use alloy_eips::Encodable2718; + let mut buf = vec![]; + tx.inner.inner.inner().encode_2718(&mut buf); + Bytes::from(buf) + }) + .collect::>(), + ); + + let block = Block:: { + transactions: BlockTransactions::Full(transactions), + ..Default::default() + }; + + (attributes, block) + } + + #[test] + fn test_attributes_match_check_transactions() { + let cfg = default_rollup_config(); + let (attributes, block) = test_transactions_match_helper(); + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, AttributesMatch::Match); + } + + #[test] + fn test_attributes_mismatch_check_transactions_len() { + let cfg = default_rollup_config(); + let (mut attributes, block) = test_transactions_match_helper(); + attributes.attributes = OpPayloadAttributes { + transactions: attributes.attributes.transactions.map(|mut txs| { + txs.pop(); + txs + }), + ..attributes.attributes + }; + + let block_txs_len = block.transactions.len(); + + let expected: AttributesMatch = + AttributesMismatch::TransactionLen(block_txs_len - 1, block_txs_len).into(); + + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + #[test] + fn test_attributes_mismatch_check_transaction_content() { + let cfg = default_rollup_config(); + let (attributes, mut block) = test_transactions_match_helper(); + let BlockTransactions::Full(block_txs) = &mut block.transactions else { + unreachable!("The helper should build a full list of transactions") + }; + + let first_tx = block_txs.last().unwrap().clone(); + let first_tx_hash = first_tx.tx_hash(); + + // We set the last tx to be the same as the first transaction. + // Since the transactions are generated randomly and there are more than one transaction, + // there is a very high likelihood that any pair of transactions is distinct. + let last_tx = block_txs.first_mut().unwrap(); + let last_tx_hash = last_tx.tx_hash(); + *last_tx = first_tx; + + let expected: AttributesMatch = + AttributesMismatch::TransactionContent(last_tx_hash, first_tx_hash).into(); + + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + /// Checks the edge case where the attributes array is empty. + #[test] + fn test_attributes_mismatch_empty_tx_attributes() { + let cfg = default_rollup_config(); + let (mut attributes, block) = test_transactions_match_helper(); + attributes.attributes = OpPayloadAttributes { transactions: None, ..attributes.attributes }; + + let block_txs_len = block.transactions.len(); + + let expected: AttributesMatch = AttributesMismatch::TransactionLen(0, block_txs_len).into(); + + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + /// Checks the edge case where the transactions contained in the block have the wrong + /// format. + #[test] + fn test_block_transactions_wrong_format() { + let cfg = default_rollup_config(); + let (attributes, mut block) = test_transactions_match_helper(); + block.transactions = BlockTransactions::Uncle; + + let expected: AttributesMatch = AttributesMismatch::MalformedBlockTransactions.into(); + + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + /// Checks the edge case where the transactions contained in the attributes have the wrong + /// format. + #[test] + fn test_attributes_transactions_wrong_format() { + let cfg = default_rollup_config(); + let (mut attributes, block) = test_transactions_match_helper(); + let txs = attributes.attributes.transactions.as_mut().unwrap(); + let first_tx_bytes = txs.first_mut().unwrap(); + *first_tx_bytes = Bytes::copy_from_slice(&[0, 1, 2]); + + let expected: AttributesMatch = AttributesMismatch::MalformedAttributesTransaction.into(); + + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, expected); + assert!(check.is_mismatch()); + } + + // Test that the check pass if the transactions obtained from the attributes have the format + // `Some(vec![])`, ie an empty vector inside a `Some` option. + #[test] + fn test_attributes_and_block_transactions_empty() { + let cfg = default_rollup_config(); + let (mut attributes, mut block) = test_transactions_match_helper(); + + attributes.attributes = + OpPayloadAttributes { transactions: Some(vec![]), ..attributes.attributes }; + + block.transactions = BlockTransactions::Full(vec![]); + + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, AttributesMatch::Match); + + // Edge case: if the block transactions and the payload attributes are empty, we can also + // use the hash format (this is the default value of `BlockTransactions`). + attributes.attributes = OpPayloadAttributes { transactions: None, ..attributes.attributes }; + block.transactions = BlockTransactions::Hashes(vec![]); + + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, AttributesMatch::Match); + } + + // Edge case: if the payload attributes has the format `Some(vec![])`, we can still + // use the hash format. + #[test] + fn test_attributes_and_block_transactions_empty_hash_format() { + let cfg = default_rollup_config(); + let (mut attributes, mut block) = test_transactions_match_helper(); + + attributes.attributes = + OpPayloadAttributes { transactions: Some(vec![]), ..attributes.attributes }; + + block.transactions = BlockTransactions::Hashes(vec![]); + + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, AttributesMatch::Match); + } + + // Test that the check fails if the block format is incorrect and the attributes are empty + #[test] + fn test_attributes_empty_and_block_uncle() { + let cfg = default_rollup_config(); + let (mut attributes, mut block) = test_transactions_match_helper(); + + attributes.attributes = + OpPayloadAttributes { transactions: Some(vec![]), ..attributes.attributes }; + + block.transactions = BlockTransactions::Uncle; + + let expected: AttributesMatch = AttributesMismatch::MalformedBlockTransactions.into(); + + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, expected); + } + + fn eip1559_test_setup() -> (RollupConfig, OpAttributesWithParent, Block) { + let mut cfg = default_rollup_config().clone(); + + // We need to activate holocene to make sure it works! We set the activation time to zero to + // make sure that it is activated by default. + cfg.hardforks.holocene_time = Some(0); + + let mut attributes = default_attributes(); + attributes.attributes.gas_limit = Some(0); + // For canyon and above we need to specify the withdrawals + attributes.attributes.payload_attributes.withdrawals = Some(vec![]); + + // For canyon and above we also need to specify the withdrawal headers + let block = Block { + withdrawals: Some(Withdrawals(vec![])), + header: alloy_rpc_types_eth::Header { + inner: alloy_consensus::Header { + withdrawals_root: Some(EMPTY_ROOT_HASH), + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + (cfg, attributes, block) + } + + /// Ensures that we have to set the EIP1559 parameters for holocene and above. + #[test] + fn test_eip1559_parameters_not_specified_holocene() { + let (cfg, attributes, block) = eip1559_test_setup(); + + let check = AttributesMatch::check(&cfg, &attributes, &block); + assert_eq!(check, AttributesMatch::Mismatch(AttributesMismatch::MissingAttributesEIP1559)); + assert!(check.is_mismatch()); + } + + /// Ensures that we have to set the EIP1559 parameters for holocene and above. + #[test] + fn test_eip1559_parameters_specified_attributes_but_not_block() { + let (cfg, mut attributes, block) = eip1559_test_setup(); + + attributes.attributes.eip_1559_params = Some(Default::default()); + + let check = AttributesMatch::check(&cfg, &attributes, &block); + assert_eq!( + check, + AttributesMatch::Mismatch(AttributesMismatch::UnknownExtraDataDecodingError( + EIP1559ParamError::InvalidExtraDataLength + )) + ); + assert!(check.is_mismatch()); + } + + /// Check that, when the eip1559 params are specified and empty, the check fails because we + /// fallback on canyon params for the attributes but not for the block (edge case). + #[test] + fn test_eip1559_parameters_specified_both_and_empty() { + let (cfg, mut attributes, mut block) = eip1559_test_setup(); + + attributes.attributes.eip_1559_params = Some(Default::default()); + block.header.extra_data = vec![0; 9].into(); + + let check = AttributesMatch::check(&cfg, &attributes, &block); + assert_eq!( + check, + AttributesMatch::Mismatch(EIP1559Parameters( + BaseFeeParams { max_change_denominator: 250, elasticity_multiplier: 6 }, + BaseFeeParams { max_change_denominator: 0, elasticity_multiplier: 0 } + )) + ); + assert!(check.is_mismatch()); + } + + #[test] + fn test_eip1559_parameters_empty_for_attr_only() { + let (cfg, mut attributes, mut block) = eip1559_test_setup(); + + attributes.attributes.eip_1559_params = Some(Default::default()); + block.header.extra_data = encode_holocene_extra_data( + Default::default(), + BaseFeeParams { max_change_denominator: 250, elasticity_multiplier: 6 }, + ) + .unwrap(); + + let check = AttributesMatch::check(&cfg, &attributes, &block); + assert_eq!(check, AttributesMatch::Match); + assert!(check.is_match()); + } + + #[test] + fn test_eip1559_parameters_custom_values_match() { + let (cfg, mut attributes, mut block) = eip1559_test_setup(); + + let eip1559_extra_params = encode_holocene_extra_data( + Default::default(), + BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 }, + ) + .unwrap(); + let eip1559_params: FixedBytes<8> = + eip1559_extra_params.clone().split_off(1).as_ref().try_into().unwrap(); + + attributes.attributes.eip_1559_params = Some(eip1559_params); + block.header.extra_data = eip1559_extra_params; + + let check = AttributesMatch::check(&cfg, &attributes, &block); + assert_eq!(check, AttributesMatch::Match); + assert!(check.is_match()); + } + + #[test] + fn test_eip1559_parameters_custom_values_mismatch() { + let (cfg, mut attributes, mut block) = eip1559_test_setup(); + + let eip1559_extra_params = encode_holocene_extra_data( + Default::default(), + BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 }, + ) + .unwrap(); + + let eip1559_params: FixedBytes<8> = encode_holocene_extra_data( + Default::default(), + BaseFeeParams { max_change_denominator: 99, elasticity_multiplier: 2 }, + ) + .unwrap() + .split_off(1) + .as_ref() + .try_into() + .unwrap(); + + attributes.attributes.eip_1559_params = Some(eip1559_params); + block.header.extra_data = eip1559_extra_params; + + let check = AttributesMatch::check(&cfg, &attributes, &block); + assert_eq!( + check, + AttributesMatch::Mismatch(AttributesMismatch::EIP1559Parameters( + BaseFeeParams { max_change_denominator: 99, elasticity_multiplier: 2 }, + BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 } + )) + ); + assert!(check.is_mismatch()); + } + + /// Edge case: if the elasticity multiplier is 0, the max change denominator cannot be 0 as well + #[test] + fn test_eip1559_parameters_combination_mismatch() { + let (cfg, mut attributes, mut block) = eip1559_test_setup(); + + let eip1559_extra_params = encode_holocene_extra_data( + Default::default(), + BaseFeeParams { max_change_denominator: 5, elasticity_multiplier: 0 }, + ) + .unwrap(); + let eip1559_params: FixedBytes<8> = + eip1559_extra_params.clone().split_off(1).as_ref().try_into().unwrap(); + + attributes.attributes.eip_1559_params = Some(eip1559_params); + block.header.extra_data = eip1559_extra_params; + + let check = AttributesMatch::check(&cfg, &attributes, &block); + assert_eq!( + check, + AttributesMatch::Mismatch(AttributesMismatch::InvalidEIP1559ParamsCombination) + ); + assert!(check.is_mismatch()); + } + + /// Check that the version of the extra block data must be zero. + #[test] + fn test_eip1559_parameters_invalid_version() { + let (cfg, mut attributes, mut block) = eip1559_test_setup(); + + let eip1559_extra_params = encode_holocene_extra_data( + Default::default(), + BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 }, + ) + .unwrap(); + let eip1559_params: FixedBytes<8> = + eip1559_extra_params.clone().split_off(1).as_ref().try_into().unwrap(); + + let mut raw_extra_params_bytes = eip1559_extra_params.to_vec(); + raw_extra_params_bytes[0] = 10; + + attributes.attributes.eip_1559_params = Some(eip1559_params); + block.header.extra_data = raw_extra_params_bytes.into(); + + let check = AttributesMatch::check(&cfg, &attributes, &block); + assert_eq!(check, AttributesMatch::Mismatch(AttributesMismatch::InvalidExtraDataVersion)); + assert!(check.is_mismatch()); + } + + /// Try to encode jovian extra data with the holocene encoding function. + #[test] + fn test_eip1559_parameters_invalid_jovian_encoding() { + let (mut cfg, mut attributes, mut block) = eip1559_test_setup(); + + cfg.hardforks.jovian_time = Some(0); + + let eip1559_extra_params = encode_holocene_extra_data( + Default::default(), + BaseFeeParams { max_change_denominator: 100, elasticity_multiplier: 2 }, + ) + .unwrap(); + let eip1559_params: FixedBytes<8> = + eip1559_extra_params.clone().split_off(1).as_ref().try_into().unwrap(); + + let raw_extra_params_bytes = eip1559_extra_params.to_vec(); + + attributes.attributes.eip_1559_params = Some(eip1559_params); + block.header.extra_data = raw_extra_params_bytes.into(); + + let check = AttributesMatch::check(&cfg, &attributes, &block); + assert_eq!( + check, + AttributesMatch::Mismatch(AttributesMismatch::UnknownExtraDataDecodingError( + EIP1559ParamError::InvalidExtraDataLength + )) + ); + assert!(check.is_mismatch()); + } + + /// The default parameters can't overflow the u32 byte representation of the base fee params! + #[test] + fn test_eip1559_default_param_cant_overflow() { + let (mut cfg, mut attributes, mut block) = eip1559_test_setup(); + cfg.chain_op_config.eip1559_denominator_canyon = u64::MAX; + cfg.chain_op_config.eip1559_elasticity = u64::MAX; + + attributes.attributes.eip_1559_params = Some(Default::default()); + block.header.extra_data = vec![0; 9].into(); + + let check = AttributesMatch::check(&cfg, &attributes, &block); + + // Note that in this case we *always* have a mismatch because there isn't enough bytes in + // the default representation of the extra params to represent a u128 + assert_eq!( + check, + AttributesMatch::Mismatch(EIP1559Parameters( + BaseFeeParams { + max_change_denominator: u64::MAX as u128, + elasticity_multiplier: u64::MAX as u128 + }, + BaseFeeParams { max_change_denominator: 0, elasticity_multiplier: 0 } + )) + ); + assert!(check.is_mismatch()); + } + + #[test] + fn test_attributes_match() { + let cfg = default_rollup_config(); + let mut attributes = default_attributes(); + attributes.attributes.gas_limit = Some(0); + let block = Block::::default(); + let check = AttributesMatch::check(cfg, &attributes, &block); + assert_eq!(check, AttributesMatch::Match); + assert!(check.is_match()); + } +} diff --git a/rust/kona/crates/node/engine/src/client.rs b/rust/kona/crates/node/engine/src/client.rs new file mode 100644 index 00000000000..4e73ece6919 --- /dev/null +++ b/rust/kona/crates/node/engine/src/client.rs @@ -0,0 +1,511 @@ +//! An Engine API Client. + +use crate::{Metrics, RollupBoostServerArgs, RollupBoostServerError}; +use alloy_eips::{BlockId, eip1898::BlockNumberOrTag}; +use alloy_network::{Ethereum, Network}; +use alloy_primitives::{Address, B256, BlockHash, Bytes, StorageKey}; +use alloy_provider::{EthGetBlock, Provider, RootProvider, RpcWithBlock, ext::EngineApi}; +use alloy_rpc_client::RpcClient; +use alloy_rpc_types_engine::{ + ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadEnvelopeV2, ExecutionPayloadInputV2, + ExecutionPayloadV1, ExecutionPayloadV3, ForkchoiceState, ForkchoiceUpdated, JwtSecret, + PayloadId, PayloadStatus, +}; +use alloy_rpc_types_eth::{Block, EIP1186AccountProofResponse}; +use alloy_transport::{RpcError, TransportErrorKind, TransportResult}; +use alloy_transport_http::{ + AuthLayer, AuthService, Http, HyperClient, + hyper_util::{ + client::legacy::{Client, connect::HttpConnector}, + rt::TokioExecutor, + }, +}; +use async_trait::async_trait; +use http::uri::InvalidUri; +use http_body_util::Full; +use kona_genesis::RollupConfig; +use kona_protocol::{FromBlockError, L2BlockInfo}; +use op_alloy_network::Optimism; +use op_alloy_provider::ext::engine::OpEngineApi; +use op_alloy_rpc_types::Transaction; +use op_alloy_rpc_types_engine::{ + OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, OpExecutionPayloadV4, + OpPayloadAttributes, ProtocolVersion, +}; +use parking_lot::Mutex; +use rollup_boost::{ + EngineApiServer, Flashblocks, FlashblocksWebsocketConfig, Probes, RollupBoostServer, + RpcClientError, +}; +use rollup_boost_types::payload::PayloadSource; +use std::{ + future::Future, + net::{AddrParseError, IpAddr, SocketAddr}, + str::FromStr, + sync::Arc, + time::{Duration, Instant}, +}; +use thiserror::Error; +use tower::ServiceBuilder; +use url::Url; + +/// An error that occurred in the [`EngineClient`]. +#[derive(Error, Debug)] +pub enum EngineClientError { + /// An RPC error occurred + #[error("An RPC error occurred: {0}")] + RpcError(#[from] RpcError), + + /// An error occurred while decoding the payload + #[error("An error occurred while decoding the payload: {0}")] + BlockInfoDecodeError(#[from] FromBlockError), +} +/// A Hyper HTTP client with a JWT authentication layer. +pub type HyperAuthClient> = HyperClient>>; + +/// Engine API client used to communicate with L1/L2 ELs and optional rollup-boost. +/// `EngineClient` trait that is very coupled to its only implementation. +/// The main reason this exists is for mocking/unit testing. +#[async_trait] +pub trait EngineClient: OpEngineApi> + Send + Sync { + /// Returns a reference to the inner [`RollupConfig`]. + fn cfg(&self) -> &RollupConfig; + + /// Fetches the L1 block with the provided `BlockId`. + fn get_l1_block(&self, block: BlockId) -> EthGetBlock<::BlockResponse>; + + /// Fetches the L2 block with the provided `BlockId`. + fn get_l2_block(&self, block: BlockId) -> EthGetBlock<::BlockResponse>; + + /// Get the account and storage values of the specified account including the merkle proofs. + /// This call can be used to verify that the data has not been tampered with. + fn get_proof( + &self, + address: Address, + keys: Vec, + ) -> RpcWithBlock<(Address, Vec), EIP1186AccountProofResponse>; + + /// Sends the given payload to the execution layer client, as specified for the Paris fork. + async fn new_payload_v1(&self, payload: ExecutionPayloadV1) -> TransportResult; + + /// Fetches the [`Block`] for the given [`BlockNumberOrTag`]. + async fn l2_block_by_label( + &self, + numtag: BlockNumberOrTag, + ) -> Result>, EngineClientError>; + + /// Fetches the [`L2BlockInfo`] by [`BlockNumberOrTag`]. + async fn l2_block_info_by_label( + &self, + numtag: BlockNumberOrTag, + ) -> Result, EngineClientError>; +} + +/// An Engine API client that provides authenticated HTTP communication with an execution layer. +/// +/// The [`OpEngineClient`] handles JWT authentication and manages connections to both L1 and L2 +/// execution layers. It automatically selects the appropriate Engine API version based on the +/// rollup configuration and block timestamps. +/// +/// Engine API client used to communicate with L1/L2 ELs and optional rollup-boost. +#[derive(Clone, Debug)] +pub struct OpEngineClient +where + L1Provider: Provider, + L2Provider: Provider, +{ + /// The L2 engine provider for Engine API calls. + engine: L2Provider, + /// The L1 chain provider for reading L1 data. + l1_provider: L1Provider, + /// The [`RollupConfig`] for determining Engine API versions based on hardfork activations. + cfg: Arc, + /// The rollup boost server + pub rollup_boost: Arc, +} + +impl OpEngineClient +where + L1Provider: Provider, + L2Provider: Provider, +{ + /// Creates a new RPC client for the given address and JWT secret. + pub fn rpc_client(addr: Url, jwt: JwtSecret) -> RootProvider { + let hyper_client = Client::builder(TokioExecutor::new()).build_http::>(); + let auth_layer = AuthLayer::new(jwt); + let service = ServiceBuilder::new().layer(auth_layer).service(hyper_client); + let layer_transport = HyperClient::with_service(service); + let http_hyper = Http::with_client(layer_transport, addr); + let rpc_client = RpcClient::new(http_hyper, false); + RootProvider::::new(rpc_client) + } +} + +/// The builder for the [`OpEngineClient`]. +#[derive(Debug, Clone)] +pub struct EngineClientBuilder { + /// The builder URL. + pub builder: Url, + /// The builder JWT secret. + pub builder_jwt: JwtSecret, + /// The builder timeout. + pub builder_timeout: Duration, + /// The L2 Engine API endpoint URL. + pub l2: Url, + /// The L2 JWT secret. + pub l2_jwt: JwtSecret, + /// The L2 timeout. + pub l2_timeout: Duration, + /// The L1 RPC URL. + pub l1_rpc: Url, + /// The [`RollupConfig`] for determining Engine API versions based on hardfork activations. + pub cfg: Arc, + /// The rollup boost arguments. + pub rollup_boost: RollupBoostServerArgs, +} + +/// An error that occurred in the [`EngineClientBuilder`]. +#[derive(Error, Debug)] +pub enum EngineClientBuilderError { + /// An error occurred while parsing the URL + #[error("An error occurred while parsing the URL: {0}")] + UrlParseError(#[from] InvalidUri), + /// An error occurred while parsing the IP address + #[error("An error occurred while parsing the IP address: {0}")] + IpAddrParseError(#[from] AddrParseError), + /// An error occurred while creating the RPC client + #[error("An error occurred while creating the RPC client: {0}")] + RpcClientError(#[from] RpcClientError), + /// An error occurred while creating the Flashblocks service + #[error("An error occurred while creating the Flashblocks service: {0}")] + FlashblocksError(String), +} + +impl EngineClientBuilder { + /// Creates a new [`OpEngineClient`] with authenticated HTTP connections. + /// + /// Sets up JWT-authenticated connections to the Engine API endpoint through the rollup-boost + /// server along with an unauthenticated connection to the L1 chain. + /// + /// # FIXME(@theochap, ``, ``): + /// This method can be simplified/improved in a few ways: + /// - Unify kona's and rollup-boost's RPC client creation + /// - Removed the `dyn RollupBoostServerLike` type erasure. + pub fn build( + self, + ) -> Result>, EngineClientBuilderError> + { + let probes = Arc::new(Probes::default()); + let l2_client = rollup_boost::RpcClient::new( + http::Uri::from_str(self.l2.to_string().as_str())?, + self.l2_jwt, + self.l2_timeout.as_millis() as u64, + PayloadSource::L2, + )?; + let builder_client = rollup_boost::RpcClient::new( + http::Uri::from_str(self.builder.to_string().as_str())?, + self.builder_jwt, + self.builder_timeout.as_millis() as u64, + PayloadSource::Builder, + )?; + + let rollup_boost_server = match self.rollup_boost.flashblocks { + Some(flashblocks) => { + let inbound_url = flashblocks.flashblocks_builder_url; + let outbound_addr = SocketAddr::new( + IpAddr::from_str(&flashblocks.flashblocks_host)?, + flashblocks.flashblocks_port, + ); + + let ws_config = flashblocks.flashblocks_ws_config; + + let builder_client = Arc::new( + Flashblocks::run( + builder_client, + inbound_url, + outbound_addr, + FlashblocksWebsocketConfig { + flashblock_builder_ws_initial_reconnect_ms: ws_config + .flashblock_builder_ws_initial_reconnect_ms, + flashblock_builder_ws_max_reconnect_ms: ws_config + .flashblock_builder_ws_max_reconnect_ms, + flashblock_builder_ws_connect_timeout_ms: ws_config + .flashblock_builder_ws_connect_timeout_ms, + flashblock_builder_ws_ping_interval_ms: ws_config + .flashblock_builder_ws_ping_interval_ms, + flashblock_builder_ws_pong_timeout_ms: ws_config + .flashblock_builder_ws_pong_timeout_ms, + }, + ) + .map_err(|e| EngineClientBuilderError::FlashblocksError(e.to_string()))?, + ); + Arc::new(rollup_boost::RollupBoostServer::new( + l2_client, + builder_client, + Arc::new(Mutex::new(self.rollup_boost.initial_execution_mode)), + self.rollup_boost.block_selection_policy, + probes, + self.rollup_boost.external_state_root, + self.rollup_boost.ignore_unhealthy_builders, + )) + } + None => Arc::new(rollup_boost::RollupBoostServer::new( + l2_client, + Arc::new(builder_client), + Arc::new(Mutex::new(self.rollup_boost.initial_execution_mode)), + self.rollup_boost.block_selection_policy, + probes, + self.rollup_boost.external_state_root, + self.rollup_boost.ignore_unhealthy_builders, + )), + }; + + // TODO(ethereum-optimism/optimism#18656): remove this client, upstream the remaining + // EngineApiExt methods to the RollupBoostServer + let engine = OpEngineClient::>::rpc_client::( + self.l2, + self.l2_jwt, + ); + + let l1_provider = RootProvider::new_http(self.l1_rpc); + + Ok(OpEngineClient { engine, l1_provider, cfg: self.cfg, rollup_boost: rollup_boost_server }) + } +} + +#[async_trait] +impl EngineClient for OpEngineClient +where + L1Provider: Provider, + L2Provider: Provider, +{ + fn cfg(&self) -> &RollupConfig { + self.cfg.as_ref() + } + + fn get_l1_block(&self, block: BlockId) -> EthGetBlock<::BlockResponse> { + self.l1_provider.get_block(block) + } + + fn get_l2_block(&self, block: BlockId) -> EthGetBlock<::BlockResponse> { + self.engine.get_block(block) + } + + fn get_proof( + &self, + address: Address, + keys: Vec, + ) -> RpcWithBlock<(Address, Vec), EIP1186AccountProofResponse> { + self.engine.get_proof(address, keys) + } + + async fn new_payload_v1(&self, payload: ExecutionPayloadV1) -> TransportResult { + self.engine.new_payload_v1(payload).await + } + + async fn l2_block_by_label( + &self, + numtag: BlockNumberOrTag, + ) -> Result>, EngineClientError> { + Ok(self.engine.get_block_by_number(numtag).full().await?) + } + + async fn l2_block_info_by_label( + &self, + numtag: BlockNumberOrTag, + ) -> Result, EngineClientError> { + let block = self.engine.get_block_by_number(numtag).full().await?; + let Some(block) = block else { + return Ok(None); + }; + Ok(Some(L2BlockInfo::from_block_and_genesis(&block.into_consensus(), &self.cfg.genesis)?)) + } +} + +#[async_trait::async_trait] +impl OpEngineApi> + for OpEngineClient +where + L1Provider: Provider, + L2Provider: Provider, +{ + async fn new_payload_v2( + &self, + payload: ExecutionPayloadInputV2, + ) -> TransportResult { + let call = >>::new_payload_v2( + &self.engine, + payload, + ); + + record_call_time(call, Metrics::NEW_PAYLOAD_METHOD).await + } + + async fn new_payload_v3( + &self, + payload: ExecutionPayloadV3, + parent_beacon_block_root: B256, + ) -> TransportResult { + let call = self.rollup_boost.new_payload_v3(payload, vec![], parent_beacon_block_root); + + record_call_time(call, Metrics::NEW_PAYLOAD_METHOD) + .await + .map_err(|err| RollupBoostServerError::from(err).into()) + } + + async fn new_payload_v4( + &self, + payload: OpExecutionPayloadV4, + parent_beacon_block_root: B256, + ) -> TransportResult { + let call = self.rollup_boost.new_payload_v4( + payload.clone(), + vec![], + parent_beacon_block_root, + vec![], + ); + + record_call_time(call, Metrics::NEW_PAYLOAD_METHOD) + .await + .map_err(|err| RollupBoostServerError::from(err).into()) + } + + async fn fork_choice_updated_v2( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> TransportResult { + let call = + >>::fork_choice_updated_v2( + &self.engine, + fork_choice_state, + payload_attributes, + ); + + record_call_time(call, Metrics::FORKCHOICE_UPDATE_METHOD).await + } + + async fn fork_choice_updated_v3( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> TransportResult { + let call = self.rollup_boost.fork_choice_updated_v3(fork_choice_state, payload_attributes); + + record_call_time(call, Metrics::FORKCHOICE_UPDATE_METHOD) + .await + .map_err(|err| RollupBoostServerError::from(err).into()) + } + + async fn get_payload_v2( + &self, + payload_id: PayloadId, + ) -> TransportResult { + let call = >>::get_payload_v2( + &self.engine, + payload_id, + ); + + record_call_time(call, Metrics::GET_PAYLOAD_METHOD).await + } + + async fn get_payload_v3( + &self, + payload_id: PayloadId, + ) -> TransportResult { + let call = self.rollup_boost.get_payload_v3(payload_id); + + record_call_time(call, Metrics::GET_PAYLOAD_METHOD) + .await + .map_err(|err| RollupBoostServerError::from(err).into()) + } + + async fn get_payload_v4( + &self, + payload_id: PayloadId, + ) -> TransportResult { + let call = self.rollup_boost.get_payload_v4(payload_id); + + record_call_time(call, Metrics::GET_PAYLOAD_METHOD) + .await + .map_err(|err| RollupBoostServerError::from(err).into()) + } + + async fn get_payload_bodies_by_hash_v1( + &self, + block_hashes: Vec, + ) -> TransportResult { + >>::get_payload_bodies_by_hash_v1( + &self.engine, + block_hashes, + ) + .await + } + + async fn get_payload_bodies_by_range_v1( + &self, + start: u64, + count: u64, + ) -> TransportResult { + , + >>::get_payload_bodies_by_range_v1(&self.engine, start, count).await + } + + async fn get_client_version_v1( + &self, + client_version: ClientVersionV1, + ) -> TransportResult> { + >>::get_client_version_v1( + &self.engine, + client_version, + ) + .await + } + + async fn signal_superchain_v1( + &self, + recommended: ProtocolVersion, + required: ProtocolVersion, + ) -> TransportResult { + >>::signal_superchain_v1( + &self.engine, + recommended, + required, + ) + .await + } + + async fn exchange_capabilities( + &self, + capabilities: Vec, + ) -> TransportResult> { + >>::exchange_capabilities( + &self.engine, + capabilities, + ) + .await + } +} + +/// Wrapper to record the time taken for a call to the engine API and log the result as a metric. +async fn record_call_time( + f: impl Future>, + metric_label: &'static str, +) -> Result { + // Await on the future and track its duration. + let start = Instant::now(); + let result = f.await?; + let duration = start.elapsed(); + + // Record the call duration. + kona_macros::record!( + histogram, + Metrics::ENGINE_METHOD_REQUEST_DURATION, + "method", + metric_label, + duration.as_secs_f64() + ); + Ok(result) +} diff --git a/kona/crates/node/engine/src/kinds.rs b/rust/kona/crates/node/engine/src/kinds.rs similarity index 100% rename from kona/crates/node/engine/src/kinds.rs rename to rust/kona/crates/node/engine/src/kinds.rs diff --git a/rust/kona/crates/node/engine/src/lib.rs b/rust/kona/crates/node/engine/src/lib.rs new file mode 100644 index 00000000000..22b647bb468 --- /dev/null +++ b/rust/kona/crates/node/engine/src/lib.rs @@ -0,0 +1,84 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] + +//! ## Architecture +//! +//! The engine operates as a task-driven system where operations are queued and executed atomically: +//! +//! ```text +//! ┌─────────────┐ ┌──────────────┐ ┌─────────────┐ +//! │ Engine │◄───┤ Task Queue │◄───┤ Engine │ +//! │ Client │ │ (Priority) │ │ Tasks │ +//! └─────────────┘ └──────────────┘ └─────────────┘ +//! │ │ │ +//! ▼ ▼ ▼ +//! ┌─────────────┐ ┌──────────────┐ ┌─────────────┐ +//! │ Engine API │ │ Engine State │ │ Rollup │ +//! │ (HTTP/JWT) │ │ Updates │ │ Config │ +//! └─────────────┘ └──────────────┘ └─────────────┘ +//! ``` +//! +//! ## Module Organization +//! +//! - **Task Queue** - Core engine task queue and execution logic via [`Engine`] +//! - **Client** - HTTP client for Engine API communication via [`EngineClient`] +//! - **State** - Engine state management and synchronization via [`EngineState`] +//! - **Versions** - Engine API version selection via [`EngineForkchoiceVersion`], +//! [`EngineNewPayloadVersion`], [`EngineGetPayloadVersion`] +//! - **Attributes** - Payload attribute validation via [`AttributesMatch`] +//! - **Kinds** - Engine client type identification via [`EngineKind`] +//! - **Query** - Engine query interface via [`EngineQueries`] +//! - **Metrics** - Optional Prometheus metrics collection via [`Metrics`] + +#[macro_use] +extern crate tracing; + +mod task_queue; +pub use task_queue::{ + BuildTask, BuildTaskError, ConsolidateInput, ConsolidateTask, ConsolidateTaskError, Engine, + EngineBuildError, EngineResetError, EngineTask, EngineTaskError, EngineTaskErrorSeverity, + EngineTaskErrors, EngineTaskExt, FinalizeTask, FinalizeTaskError, InsertTask, InsertTaskError, + SealTask, SealTaskError, SynchronizeTask, SynchronizeTaskError, +}; + +mod attributes; +pub use attributes::{AttributesMatch, AttributesMismatch}; + +mod client; +pub use client::{ + EngineClient, EngineClientBuilder, EngineClientBuilderError, EngineClientError, + HyperAuthClient, OpEngineClient, +}; + +mod rollup_boost; +pub use rollup_boost::{ + FlashblocksClientArgs, FlashblocksWebsocketConfig, RollupBoostServer, RollupBoostServerArgs, + RollupBoostServerError, +}; + +mod versions; +pub use versions::{EngineForkchoiceVersion, EngineGetPayloadVersion, EngineNewPayloadVersion}; + +mod state; +pub use state::{EngineState, EngineSyncState, EngineSyncStateUpdate}; + +mod kinds; +pub use kinds::EngineKind; + +mod query; +pub use query::{EngineQueries, EngineQueriesError, EngineQuerySender}; + +mod metrics; +pub use metrics::Metrics; + +mod sync; +pub use sync::{L2ForkchoiceState, SyncStartError, find_starting_forkchoice}; + +#[cfg(any(test, feature = "test-utils"))] +/// Utilities that are useful when creating unit tests using structs within this library. +pub mod test_utils; diff --git a/kona/crates/node/engine/src/metrics/mod.rs b/rust/kona/crates/node/engine/src/metrics/mod.rs similarity index 100% rename from kona/crates/node/engine/src/metrics/mod.rs rename to rust/kona/crates/node/engine/src/metrics/mod.rs diff --git a/kona/crates/node/engine/src/query.rs b/rust/kona/crates/node/engine/src/query.rs similarity index 98% rename from kona/crates/node/engine/src/query.rs rename to rust/kona/crates/node/engine/src/query.rs index e30c36ae314..aa879d4373c 100644 --- a/kona/crates/node/engine/src/query.rs +++ b/rust/kona/crates/node/engine/src/query.rs @@ -34,7 +34,7 @@ pub enum EngineQueries { OutputAtBlock { /// The block number or tag to retrieve the output for. block: BlockNumberOrTag, - /// Response channel for (block_info, output_root, engine_state). + /// Response channel for (`block_info`, `output_root`, `engine_state`). sender: Sender<(L2BlockInfo, OutputRoot, EngineState)>, }, /// Subscribe to engine state updates via a watch channel receiver. diff --git a/kona/crates/node/engine/src/rollup_boost.rs b/rust/kona/crates/node/engine/src/rollup_boost.rs similarity index 92% rename from kona/crates/node/engine/src/rollup_boost.rs rename to rust/kona/crates/node/engine/src/rollup_boost.rs index 57add9b53a0..9dc21079839 100644 --- a/kona/crates/node/engine/src/rollup_boost.rs +++ b/rust/kona/crates/node/engine/src/rollup_boost.rs @@ -28,20 +28,20 @@ pub struct RollupBoostServerArgs { /// Configuration for the Flashblocks client. #[derive(Clone, Debug)] pub struct FlashblocksClientArgs { - /// Flashblocks Builder WebSocket URL + /// Flashblocks Builder `WebSocket` URL pub flashblocks_builder_url: Url, - /// Flashblocks WebSocket host for outbound connections + /// Flashblocks `WebSocket` host for outbound connections pub flashblocks_host: String, - /// Flashblocks WebSocket port for outbound connections + /// Flashblocks `WebSocket` port for outbound connections pub flashblocks_port: u16, /// Websocket connection configuration pub flashblocks_ws_config: FlashblocksWebsocketConfig, } -/// Configuration for the Flashblocks WebSocket connection. +/// Configuration for the Flashblocks `WebSocket` connection. #[derive(Debug, Clone, Copy)] pub struct FlashblocksWebsocketConfig { /// Minimum time for exponential backoff for timeout if builder disconnected diff --git a/rust/kona/crates/node/engine/src/state/core.rs b/rust/kona/crates/node/engine/src/state/core.rs new file mode 100644 index 00000000000..70a95aa59b2 --- /dev/null +++ b/rust/kona/crates/node/engine/src/state/core.rs @@ -0,0 +1,258 @@ +//! The internal state of the engine controller. + +use crate::Metrics; +use alloy_rpc_types_engine::ForkchoiceState; +use kona_protocol::L2BlockInfo; +use serde::{Deserialize, Serialize}; + +/// The synchronization state of the execution layer across different safety levels. +/// +/// Tracks block progression through various stages of verification and finalization, +/// from initial unsafe blocks received via P2P to fully finalized blocks derived from +/// finalized L1 data. Each level represents increasing confidence in the block's validity. +/// +/// # Safety Levels +/// +/// The state tracks blocks at different safety levels, listed from least to most safe: +/// +/// 1. **Unsafe** - Most recent blocks from P2P network (unverified) +/// 2. **Cross-unsafe** - Unsafe blocks with cross-layer verification +/// 3. **Local-safe** - Derived from L1 data, completed span-batch +/// 4. **Safe** - Cross-verified with safe L1 dependencies +/// 5. **Finalized** - Derived from finalized L1 data only +/// +/// See the [OP Stack specifications](https://specs.optimism.io) for detailed safety definitions. +#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)] +pub struct EngineSyncState { + /// Most recent block found on the P2P network (lowest safety level). + unsafe_head: L2BlockInfo, + /// Cross-verified unsafe head (equal to `unsafe_head` pre-interop). + cross_unsafe_head: L2BlockInfo, + /// Derived from L1 data as a completed span-batch, but not yet cross-verified. + local_safe_head: L2BlockInfo, + /// Derived from L1 data and cross-verified to have safe L1 dependencies. + safe_head: L2BlockInfo, + /// Derived from finalized L1 data with only finalized dependencies (highest safety level). + finalized_head: L2BlockInfo, +} + +impl EngineSyncState { + /// Returns the current unsafe head. + pub const fn unsafe_head(&self) -> L2BlockInfo { + self.unsafe_head + } + + /// Returns the current cross-verified unsafe head. + pub const fn cross_unsafe_head(&self) -> L2BlockInfo { + self.cross_unsafe_head + } + + /// Returns the current local safe head. + pub const fn local_safe_head(&self) -> L2BlockInfo { + self.local_safe_head + } + + /// Returns the current safe head. + pub const fn safe_head(&self) -> L2BlockInfo { + self.safe_head + } + + /// Returns the current finalized head. + pub const fn finalized_head(&self) -> L2BlockInfo { + self.finalized_head + } + + /// Creates a `ForkchoiceState` + /// + /// - `head_block` = `unsafe_head` + /// - `safe_block` = `safe_head` + /// - `finalized_block` = `finalized_head` + /// + /// If the block info is not yet available, the default values are used. + pub const fn create_forkchoice_state(&self) -> ForkchoiceState { + ForkchoiceState { + head_block_hash: self.unsafe_head.hash(), + safe_block_hash: self.safe_head.hash(), + finalized_block_hash: self.finalized_head.hash(), + } + } + + /// Applies the update to the provided sync state, using the current state values if the update + /// is not specified. Returns the new sync state. + pub fn apply_update(self, sync_state_update: EngineSyncStateUpdate) -> Self { + if let Some(unsafe_head) = sync_state_update.unsafe_head { + Self::update_block_label_metric( + Metrics::UNSAFE_BLOCK_LABEL, + unsafe_head.block_info.number, + ); + } + if let Some(cross_unsafe_head) = sync_state_update.cross_unsafe_head { + Self::update_block_label_metric( + Metrics::CROSS_UNSAFE_BLOCK_LABEL, + cross_unsafe_head.block_info.number, + ); + } + if let Some(local_safe_head) = sync_state_update.local_safe_head { + Self::update_block_label_metric( + Metrics::LOCAL_SAFE_BLOCK_LABEL, + local_safe_head.block_info.number, + ); + } + if let Some(safe_head) = sync_state_update.safe_head { + Self::update_block_label_metric(Metrics::SAFE_BLOCK_LABEL, safe_head.block_info.number); + } + if let Some(finalized_head) = sync_state_update.finalized_head { + Self::update_block_label_metric( + Metrics::FINALIZED_BLOCK_LABEL, + finalized_head.block_info.number, + ); + } + + Self { + unsafe_head: sync_state_update.unsafe_head.unwrap_or(self.unsafe_head), + cross_unsafe_head: sync_state_update + .cross_unsafe_head + .unwrap_or(self.cross_unsafe_head), + local_safe_head: sync_state_update.local_safe_head.unwrap_or(self.local_safe_head), + safe_head: sync_state_update.safe_head.unwrap_or(self.safe_head), + finalized_head: sync_state_update.finalized_head.unwrap_or(self.finalized_head), + } + } + + /// Updates a block label metric, keyed by the label. + #[inline] + fn update_block_label_metric(label: &'static str, number: u64) { + kona_macros::set!(gauge, Metrics::BLOCK_LABELS, "label", label, number as f64); + } +} + +/// Specifies how to update the sync state of the engine. +#[derive(Default, Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct EngineSyncStateUpdate { + /// Most recent block found on the p2p network + pub unsafe_head: Option, + /// Cross-verified unsafe head, always equal to the unsafe head pre-interop + pub cross_unsafe_head: Option, + /// Derived from L1, and known to be a completed span-batch, + /// but not cross-verified yet. + pub local_safe_head: Option, + /// Derived from L1 and cross-verified to have cross-safe dependencies. + pub safe_head: Option, + /// Derived from finalized L1 data, + /// and cross-verified to only have finalized dependencies. + pub finalized_head: Option, +} + +/// The chain state viewed by the engine controller. +#[derive(Default, Debug, Copy, Clone, PartialEq, Eq)] +pub struct EngineState { + /// The sync state of the engine. + pub sync_state: EngineSyncState, + + /// Whether or not the EL has finished syncing. + pub el_sync_finished: bool, + + /// Track when the rollup node changes the forkchoice to restore previous + /// known unsafe chain. e.g. Unsafe Reorg caused by Invalid span batch. + /// This update does not retry except engine returns non-input error + /// because engine may forgot backupUnsafeHead or backupUnsafeHead is not part + /// of the chain. + pub need_fcu_call_backup_unsafe_reorg: bool, +} + +impl EngineState { + /// Returns if consolidation is needed. + /// + /// [Consolidation] is only performed by a rollup node when the unsafe head + /// is ahead of the safe head. When the two are equal, consolidation isn't + /// required and the [`crate::BuildTask`] can be used to build the block. + /// + /// [Consolidation]: https://specs.optimism.io/protocol/derivation.html#l1-consolidation-payload-attributes-matching + pub fn needs_consolidation(&self) -> bool { + self.sync_state.safe_head() != self.sync_state.unsafe_head() + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::Metrics; + use kona_protocol::BlockInfo; + use metrics_exporter_prometheus::PrometheusBuilder; + use rstest::rstest; + + impl EngineState { + /// Set the unsafe head. + pub fn set_unsafe_head(&mut self, unsafe_head: L2BlockInfo) { + self.sync_state.apply_update(EngineSyncStateUpdate { + unsafe_head: Some(unsafe_head), + ..Default::default() + }); + } + + /// Set the cross-verified unsafe head. + pub fn set_cross_unsafe_head(&mut self, cross_unsafe_head: L2BlockInfo) { + self.sync_state.apply_update(EngineSyncStateUpdate { + cross_unsafe_head: Some(cross_unsafe_head), + ..Default::default() + }); + } + + /// Set the local safe head. + pub fn set_local_safe_head(&mut self, local_safe_head: L2BlockInfo) { + self.sync_state.apply_update(EngineSyncStateUpdate { + local_safe_head: Some(local_safe_head), + ..Default::default() + }); + } + + /// Set the safe head. + pub fn set_safe_head(&mut self, safe_head: L2BlockInfo) { + self.sync_state.apply_update(EngineSyncStateUpdate { + safe_head: Some(safe_head), + ..Default::default() + }); + } + + /// Set the finalized head. + pub fn set_finalized_head(&mut self, finalized_head: L2BlockInfo) { + self.sync_state.apply_update(EngineSyncStateUpdate { + finalized_head: Some(finalized_head), + ..Default::default() + }); + } + } + + #[rstest] + #[case::set_unsafe(EngineState::set_unsafe_head, Metrics::UNSAFE_BLOCK_LABEL, 1)] + #[case::set_cross_unsafe( + EngineState::set_cross_unsafe_head, + Metrics::CROSS_UNSAFE_BLOCK_LABEL, + 2 + )] + #[case::set_local_safe(EngineState::set_local_safe_head, Metrics::LOCAL_SAFE_BLOCK_LABEL, 3)] + #[case::set_safe_head(EngineState::set_safe_head, Metrics::SAFE_BLOCK_LABEL, 4)] + #[case::set_finalized_head(EngineState::set_finalized_head, Metrics::FINALIZED_BLOCK_LABEL, 5)] + #[cfg(feature = "metrics")] + fn test_chain_label_metrics( + #[case] set_fn: impl Fn(&mut EngineState, L2BlockInfo), + #[case] label_name: &str, + #[case] number: u64, + ) { + let handle = PrometheusBuilder::new().install_recorder().unwrap(); + crate::Metrics::init(); + + let mut state = EngineState::default(); + set_fn( + &mut state, + L2BlockInfo { + block_info: BlockInfo { number, ..Default::default() }, + ..Default::default() + }, + ); + + assert!(handle.render().contains( + format!("kona_node_block_labels{{label=\"{label_name}\"}} {number}").as_str() + )); + } +} diff --git a/kona/crates/node/engine/src/state/mod.rs b/rust/kona/crates/node/engine/src/state/mod.rs similarity index 100% rename from kona/crates/node/engine/src/state/mod.rs rename to rust/kona/crates/node/engine/src/state/mod.rs diff --git a/kona/crates/node/engine/src/sync/error.rs b/rust/kona/crates/node/engine/src/sync/error.rs similarity index 100% rename from kona/crates/node/engine/src/sync/error.rs rename to rust/kona/crates/node/engine/src/sync/error.rs diff --git a/kona/crates/node/engine/src/sync/forkchoice.rs b/rust/kona/crates/node/engine/src/sync/forkchoice.rs similarity index 97% rename from kona/crates/node/engine/src/sync/forkchoice.rs rename to rust/kona/crates/node/engine/src/sync/forkchoice.rs index c1c604d46cb..002478e4320 100644 --- a/kona/crates/node/engine/src/sync/forkchoice.rs +++ b/rust/kona/crates/node/engine/src/sync/forkchoice.rs @@ -9,8 +9,8 @@ use kona_protocol::L2BlockInfo; use op_alloy_network::Optimism; use std::fmt::Display; -/// An unsafe, safe, and finalized [L2BlockInfo] returned by the [crate::find_starting_forkchoice] -/// function. +/// An unsafe, safe, and finalized [`L2BlockInfo`] returned by the +/// [`crate::find_starting_forkchoice`] function. #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub struct L2ForkchoiceState { /// The unsafe L2 block. diff --git a/rust/kona/crates/node/engine/src/sync/mod.rs b/rust/kona/crates/node/engine/src/sync/mod.rs new file mode 100644 index 00000000000..90f7d1a83eb --- /dev/null +++ b/rust/kona/crates/node/engine/src/sync/mod.rs @@ -0,0 +1,148 @@ +//! Sync start algorithm for the OP Stack rollup node. + +use kona_genesis::RollupConfig; +use kona_protocol::L2BlockInfo; + +mod forkchoice; +pub use forkchoice::L2ForkchoiceState; + +mod error; +pub use error::SyncStartError; + +use tracing::info; + +use crate::EngineClient; + +/// Searches for the latest [`L2ForkchoiceState`] that we can use to start the sync process with. +/// +/// - The *unsafe L2 block*: This is the highest L2 block whose L1 origin is a *plausible* +/// extension of the canonical L1 chain (as known to the rollup node). +/// - The *safe L2 block*: This is the highest L2 block whose epoch's sequencing window is +/// complete within the canonical L1 chain (as known to the rollup node). +/// - The *finalized L2 block*: This is the L2 block which is known to be fully derived from +/// finalized L1 block data. +/// +/// Plausible: meaning that the blockhash of the L2 block's L1 origin +/// (as reported in the L1 Attributes deposit within the L2 block) is not canonical at another +/// height in the L1 chain, and the same holds for all its ancestors. +pub async fn find_starting_forkchoice( + cfg: &RollupConfig, + engine_client: &EngineClient_, +) -> Result { + let mut current_fc = L2ForkchoiceState::current(cfg, engine_client).await?; + info!( + target: "sync_start", + unsafe = %current_fc.un_safe.block_info.number, + safe = %current_fc.safe.block_info.number, + finalized = %current_fc.finalized.block_info.number, + "Loaded current L2 EL forkchoice state" + ); + + // Search for the highest `unsafe` block, relative to the initial `unsafe` block's L1 origin, + loop { + let l1_origin = + engine_client.get_l1_block(current_fc.un_safe.l1_origin.hash.into()).await?; + info!( + target: "sync_start", + l1_origin = %current_fc.un_safe.l1_origin.number, + l2_unsafe = %current_fc.un_safe.block_info.number, + "Searching for L2 unsafe block with canonical L1 origin" + ); + + match l1_origin { + Some(_) => { + // Unsafe block has existing L1 origin. Continue with this head. + info!( + target: "sync_start", + l2_unsafe = %current_fc.un_safe.block_info.number, + "Found L2 unsafe block with canonical L1 origin" + ); + break; + } + None => { + let l2_parent_hash = current_fc.un_safe.block_info.parent_hash.into(); + let l2_parent = engine_client + .get_l2_block(l2_parent_hash) + .full() + .await? + .ok_or(SyncStartError::BlockNotFound(l2_parent_hash))?; + + current_fc.un_safe = + L2BlockInfo::from_block_and_genesis(&l2_parent.into_consensus(), &cfg.genesis)?; + } + } + } + + // Search for the highest `safe` block that's L1 origin is at least older than the sequencing + // window, relative to the L1 origin of the `unsafe` block. + let mut safe_cursor = current_fc.un_safe; + loop { + info!( + target: "sync_start", + l1_origin = %safe_cursor.l1_origin.number, + l2_safe = %safe_cursor.block_info.number, + "Searching for L2 safe block beyond sequencing window" + ); + + let is_behind_sequence_window = + current_fc.un_safe.l1_origin.number.saturating_sub(cfg.seq_window_size) > + safe_cursor.l1_origin.number; + let is_finalized = safe_cursor.block_info.hash == current_fc.finalized.block_info.hash; + let is_genesis = safe_cursor.block_info.hash == cfg.genesis.l2.hash; + if is_behind_sequence_window || is_finalized || is_genesis { + info!( + target: "sync_start", + l2_safe = %safe_cursor.block_info.number, + is_behind_sequence_window, + is_finalized, + is_genesis, + "Found suitable L2 safe block" + ); + current_fc.safe = safe_cursor; + break; + } + let block = engine_client + .get_l2_block(safe_cursor.block_info.parent_hash.into()) + .full() + .await? + .ok_or(SyncStartError::BlockNotFound(safe_cursor.block_info.parent_hash.into()))?; + safe_cursor = L2BlockInfo::from_block_and_genesis(&block.into_consensus(), &cfg.genesis)?; + } + + // Leave the finalized block as-is, and return the current forkchoice. + Ok(current_fc) +} + +#[cfg(test)] +mod test { + use alloy_provider::Network; + use alloy_rpc_types_eth::Block; + use kona_protocol::L2BlockInfo; + use kona_registry::ROLLUP_CONFIGS; + use op_alloy_network::Optimism; + + const OP_SEPOLIA_CHAIN_ID: u64 = 11155420; + const OP_SEPOLIA_GENESIS_RPC_RESPONSE: &str = "{\"hash\":\"0x102de6ffb001480cc9b8b548fd05c34cd4f46ae4aa91759393db90ea0409887d\",\"parentHash\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"sha3Uncles\":\"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347\",\"miner\":\"0x4200000000000000000000000000000000000011\",\"stateRoot\":\"0x06787a17a3ed87c339a39dbbeeb311578a0c83ed29daa2db95da62b28efce8a9\",\"transactionsRoot\":\"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421\",\"receiptsRoot\":\"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421\",\"logsBloom\":\"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\",\"difficulty\":\"0x0\",\"number\":\"0x0\",\"gasLimit\":\"0x1c9c380\",\"gasUsed\":\"0x0\",\"timestamp\":\"0x64d6dbac\",\"extraData\":\"0x424544524f434b\",\"mixHash\":\"0x0000000000000000000000000000000000000000000000000000000000000000\",\"nonce\":\"0x0000000000000000\",\"baseFeePerGas\":\"0x3b9aca00\",\"size\":\"0x209\",\"uncles\":[],\"transactions\":[]}"; + + /// Sanity regression test - `alloy_rpc_types`' `Block::into_consensus` failed to saturate the + /// header of the `alloy_consensus::Header` type on an old version. This test covers the + /// conversion to ensure an OP genesis block's conversion to the consensus type works for + /// the sake of `L2BlockInfo::from_block_and_genesis`. + #[tokio::test] + async fn test_genesis_block_hash() { + let rollup_config = ROLLUP_CONFIGS.get(&OP_SEPOLIA_CHAIN_ID).unwrap(); + let genesis_block: Block<::TransactionResponse> = + serde_json::from_str(OP_SEPOLIA_GENESIS_RPC_RESPONSE).unwrap(); + + let rpc_reported_hash = genesis_block.header.hash; + let consensus_block = genesis_block.into_consensus(); + + // Check that the genesis block's RPC-reported hash is equal to the manually computed hash. + assert_eq!(rpc_reported_hash, consensus_block.hash_slow()); + + // Convert to `L2BlockInfo` and check the same. + let l2_block_info = + L2BlockInfo::from_block_and_genesis(&consensus_block, &rollup_config.genesis).unwrap(); + assert_eq!(rpc_reported_hash, l2_block_info.block_info.hash); + } +} diff --git a/rust/kona/crates/node/engine/src/task_queue/core.rs b/rust/kona/crates/node/engine/src/task_queue/core.rs new file mode 100644 index 00000000000..9ebad4f9cb9 --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/core.rs @@ -0,0 +1,182 @@ +//! The [`Engine`] is a task queue that receives and executes [`EngineTask`]s. + +use super::EngineTaskExt; +use crate::{ + EngineClient, EngineState, EngineSyncStateUpdate, EngineTask, EngineTaskError, + EngineTaskErrorSeverity, Metrics, SyncStartError, SynchronizeTask, SynchronizeTaskError, + find_starting_forkchoice, task_queue::EngineTaskErrors, +}; +use alloy_rpc_types_eth::Transaction; +use kona_genesis::{RollupConfig, SystemConfig}; +use kona_protocol::{BlockInfo, L2BlockInfo, OpBlockConversionError, to_system_config}; +use op_alloy_consensus::OpTxEnvelope; +use std::{collections::BinaryHeap, sync::Arc}; +use thiserror::Error; +use tokio::sync::watch::Sender; + +/// The [`Engine`] task queue. +/// +/// Tasks of a shared [`EngineTask`] variant are processed in FIFO order, providing synchronization +/// guarantees for the L2 execution layer and other actors. A priority queue, ordered by +/// [`EngineTask`]'s [`Ord`] implementation, is used to prioritize tasks executed by the +/// [`Engine::drain`] method. +/// +/// Because tasks are executed one at a time, they are considered to be atomic operations over the +/// [`EngineState`], and are given exclusive access to the engine state during execution. +/// +/// Tasks within the queue are also considered fallible. If they fail with a temporary error, +/// they are not popped from the queue, the error is returned, and they are retried on the +/// next call to [`Engine::drain`]. +#[derive(Debug)] +pub struct Engine { + /// The state of the engine. + state: EngineState, + /// A sender that can be used to notify the engine actor of state changes. + state_sender: Sender, + /// A sender that can be used to notify the engine actor of task queue length changes. + task_queue_length: Sender, + /// The task queue. + tasks: BinaryHeap>, +} + +impl Engine { + /// Creates a new [`Engine`] with an empty task queue and the passed initial [`EngineState`]. + pub fn new( + initial_state: EngineState, + state_sender: Sender, + task_queue_length: Sender, + ) -> Self { + Self { state: initial_state, state_sender, task_queue_length, tasks: BinaryHeap::default() } + } + + /// Returns a reference to the inner [`EngineState`]. + pub const fn state(&self) -> &EngineState { + &self.state + } + + /// Returns a receiver that can be used to listen to engine state updates. + pub fn state_subscribe(&self) -> tokio::sync::watch::Receiver { + self.state_sender.subscribe() + } + + /// Returns a receiver that can be used to listen to engine queue length updates. + pub fn queue_length_subscribe(&self) -> tokio::sync::watch::Receiver { + self.task_queue_length.subscribe() + } + + /// Enqueues a new [`EngineTask`] for execution. + /// Updates the queue length and notifies listeners of the change. + pub fn enqueue(&mut self, task: EngineTask) { + self.tasks.push(task); + self.task_queue_length.send_replace(self.tasks.len()); + } + + /// Resets the engine by finding a plausible sync starting point via + /// [`find_starting_forkchoice`]. The state will be updated to the starting point, and a + /// forkchoice update will be enqueued in order to reorg the execution layer. + pub async fn reset( + &mut self, + client: Arc, + config: Arc, + ) -> Result<(L2BlockInfo, BlockInfo, SystemConfig), EngineResetError> { + // Clear any outstanding tasks to prepare for the reset. + self.clear(); + + let mut start = find_starting_forkchoice(&config, client.as_ref()).await?; + + // Retry to synchronize the engine until we succeeds or a critical error occurs. + while let Err(err) = SynchronizeTask::new( + client.clone(), + config.clone(), + EngineSyncStateUpdate { + unsafe_head: Some(start.un_safe), + cross_unsafe_head: Some(start.un_safe), + local_safe_head: Some(start.safe), + safe_head: Some(start.safe), + finalized_head: Some(start.finalized), + }, + ) + .execute(&mut self.state) + .await + { + match err.severity() { + EngineTaskErrorSeverity::Temporary | + EngineTaskErrorSeverity::Flush | + EngineTaskErrorSeverity::Reset => { + warn!(target: "engine", ?err, "Forkchoice update failed during reset. Trying again..."); + start = find_starting_forkchoice(&config, client.as_ref()).await?; + } + EngineTaskErrorSeverity::Critical => { + return Err(EngineResetError::Forkchoice(err)); + } + } + } + + // Find the new safe head's L1 origin and SystemConfig. + let origin_block = start + .safe + .l1_origin + .number + .saturating_sub(config.channel_timeout(start.safe.block_info.timestamp)); + let l1_origin_info: BlockInfo = client + .get_l1_block(origin_block.into()) + .await + .map_err(SyncStartError::RpcError)? + .ok_or(SyncStartError::BlockNotFound(origin_block.into()))? + .into_consensus() + .into(); + let l2_safe_block = client + .get_l2_block(start.safe.block_info.hash.into()) + .full() + .await + .map_err(SyncStartError::RpcError)? + .ok_or(SyncStartError::BlockNotFound(origin_block.into()))? + .into_consensus() + .map_transactions(|t| as Clone>::clone(&t).into_inner()); + let system_config = to_system_config(&l2_safe_block, &config)?; + + kona_macros::inc!(counter, Metrics::ENGINE_RESET_COUNT); + + Ok((start.safe, l1_origin_info, system_config)) + } + + /// Clears the task queue. + pub fn clear(&mut self) { + self.tasks.clear(); + } + + /// Attempts to drain the queue by executing all [`EngineTask`]s in-order. If any task returns + /// an error along the way, it is not popped from the queue (in case it must be retried) and + /// the error is returned. + pub async fn drain(&mut self) -> Result<(), EngineTaskErrors> { + // Drain tasks in order of priority, halting on errors for a retry to be attempted. + while let Some(task) = self.tasks.peek() { + // Execute the task + task.execute(&mut self.state).await?; + + // Update the state and notify the engine actor. + self.state_sender.send_replace(self.state); + + // Pop the task from the queue now that it's been executed. + self.tasks.pop(); + + self.task_queue_length.send_replace(self.tasks.len()); + } + + Ok(()) + } +} + +/// An error occurred while attempting to reset the [`Engine`]. +#[derive(Debug, Error)] +pub enum EngineResetError { + /// An error that occurred while updating the forkchoice state. + #[error(transparent)] + Forkchoice(#[from] SynchronizeTaskError), + /// An error occurred while traversing the L1 for the sync starting point. + #[error(transparent)] + SyncStart(#[from] SyncStartError), + /// An error occurred while constructing the `SystemConfig` for the new safe head. + #[error(transparent)] + SystemConfigConversion(#[from] OpBlockConversionError), +} diff --git a/kona/crates/node/engine/src/task_queue/mod.rs b/rust/kona/crates/node/engine/src/task_queue/mod.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/mod.rs rename to rust/kona/crates/node/engine/src/task_queue/mod.rs diff --git a/rust/kona/crates/node/engine/src/task_queue/tasks/build/error.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/build/error.rs new file mode 100644 index 00000000000..0448da7a26a --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/build/error.rs @@ -0,0 +1,72 @@ +//! Contains error types for the [`crate::SynchronizeTask`]. + +use crate::{EngineTaskError, task_queue::tasks::task::EngineTaskErrorSeverity}; +use alloy_rpc_types_engine::{PayloadId, PayloadStatusEnum}; +use alloy_transport::{RpcError, TransportErrorKind}; +use thiserror::Error; +use tokio::sync::mpsc; + +/// An error that occurs during payload building within the engine. +/// +/// This error type is specific to the block building process and represents failures +/// that can occur during the automatic forkchoice update phase of [`BuildTask`]. +/// Unlike [`BuildTaskError`], which handles higher-level build orchestration errors, +/// `EngineBuildError` focuses on low-level engine API communication failures. +/// +/// ## Error Categories +/// +/// - **State Validation**: Errors related to inconsistent chain state +/// - **Engine Communication**: RPC failures during forkchoice updates +/// - **Payload Validation**: Invalid payload status responses from the execution layer +/// +/// [`BuildTask`]: crate::BuildTask +#[derive(Debug, Error)] +pub enum EngineBuildError { + /// The finalized head is ahead of the unsafe head. + #[error("Finalized head is ahead of unsafe head")] + FinalizedAheadOfUnsafe(u64, u64), + /// The forkchoice update call to the engine api failed. + #[error("Failed to build payload attributes in the engine. Forkchoice RPC error: {0}")] + AttributesInsertionFailed(#[from] RpcError), + /// The inserted payload is invalid. + #[error("The inserted payload is invalid: {0}")] + InvalidPayload(String), + /// The inserted payload status is unexpected. + #[error("The inserted payload status is unexpected: {0}")] + UnexpectedPayloadStatus(PayloadStatusEnum), + /// The payload ID is missing. + #[error("The inserted payload ID is missing")] + MissingPayloadId, + /// The engine is syncing. + #[error("The engine is syncing")] + EngineSyncing, +} + +/// An error that occurs when running the [`crate::BuildTask`]. +#[derive(Debug, Error)] +pub enum BuildTaskError { + /// An error occurred when building the payload attributes in the engine. + #[error("An error occurred when building the payload attributes to the engine.")] + EngineBuildError(EngineBuildError), + /// Error sending the built payload envelope. + #[error(transparent)] + MpscSend(#[from] Box>), +} + +impl EngineTaskError for BuildTaskError { + fn severity(&self) -> EngineTaskErrorSeverity { + match self { + Self::EngineBuildError(EngineBuildError::FinalizedAheadOfUnsafe(_, _)) => { + EngineTaskErrorSeverity::Critical + } + Self::EngineBuildError( + EngineBuildError::AttributesInsertionFailed(_) | + EngineBuildError::InvalidPayload(_) | + EngineBuildError::UnexpectedPayloadStatus(_) | + EngineBuildError::MissingPayloadId | + EngineBuildError::EngineSyncing, + ) => EngineTaskErrorSeverity::Temporary, + Self::MpscSend(_) => EngineTaskErrorSeverity::Critical, + } + } +} diff --git a/kona/crates/node/engine/src/task_queue/tasks/build/mod.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/build/mod.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/build/mod.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/build/mod.rs diff --git a/rust/kona/crates/node/engine/src/task_queue/tasks/build/task.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/build/task.rs new file mode 100644 index 00000000000..ab634cbfada --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/build/task.rs @@ -0,0 +1,186 @@ +//! A task for building a new block and importing it. +use super::BuildTaskError; +use crate::{ + EngineClient, EngineForkchoiceVersion, EngineState, EngineTaskExt, + state::EngineSyncStateUpdate, task_queue::tasks::build::error::EngineBuildError, +}; +use alloy_rpc_types_engine::{PayloadId, PayloadStatusEnum}; +use async_trait::async_trait; +use derive_more::Constructor; +use kona_genesis::RollupConfig; +use kona_protocol::OpAttributesWithParent; +use std::{sync::Arc, time::Instant}; +use tokio::sync::mpsc; + +/// Task for building new blocks with automatic forkchoice synchronization. +/// +/// The [`BuildTask`] only performs the `engine_forkchoiceUpdated` call within the block building +/// workflow. It makes this call with the provided attributes to initiate block building on the +/// execution layer and, if successful, sends the new [`PayloadId`] via the configured sender. +/// +/// ## Error Handling +/// +/// The task uses [`EngineBuildError`] for build-specific failures during the forkchoice update +/// phase. +/// +/// [`EngineBuildError`]: crate::EngineBuildError +#[derive(Debug, Clone, Constructor)] +pub struct BuildTask { + /// The engine API client. + pub engine: Arc, + /// The [`RollupConfig`]. + pub cfg: Arc, + /// The [`OpAttributesWithParent`] to instruct the execution layer to build. + pub attributes: OpAttributesWithParent, + /// The optional sender through which [`PayloadId`] will be sent after the + /// block build has been started. + pub payload_id_tx: Option>, +} + +impl BuildTask { + /// Validates the provided [`PayloadStatusEnum`] according to the rules listed below. + /// + /// ## Observed [`PayloadStatusEnum`] Variants + /// - `VALID`: Returns Ok(()) - forkchoice update was successful + /// - `INVALID`: Returns error with validation details + /// - `SYNCING`: Returns temporary error - EL is syncing + /// - Other: Returns error for unexpected status codes + fn validate_forkchoice_status(status: PayloadStatusEnum) -> Result<(), BuildTaskError> { + match status { + PayloadStatusEnum::Valid => Ok(()), + PayloadStatusEnum::Invalid { validation_error } => { + error!(target: "engine_builder", "Forkchoice update failed: {}", validation_error); + Err(BuildTaskError::EngineBuildError(EngineBuildError::InvalidPayload( + validation_error, + ))) + } + PayloadStatusEnum::Syncing => { + warn!(target: "engine_builder", "Forkchoice update failed temporarily: EL is syncing"); + Err(BuildTaskError::EngineBuildError(EngineBuildError::EngineSyncing)) + } + PayloadStatusEnum::Accepted => { + // Other codes are never returned by `engine_forkchoiceUpdate` + Err(BuildTaskError::EngineBuildError(EngineBuildError::UnexpectedPayloadStatus( + status, + ))) + } + } + } + + /// Starts the block building process by sending an initial `engine_forkchoiceUpdate` call with + /// the payload attributes to build. + /// + /// ### Success (`VALID`) + /// If the build is successful, the [`PayloadId`] is returned for sealing and the successful + /// forkchoice update identifier is relayed via the stored `payload_id_tx` sender. + /// + /// ### Failure (`INVALID`) + /// If the forkchoice update fails, the [`BuildTaskError`]. + /// + /// ### Syncing (`SYNCING`) + /// If the EL is syncing, the payload attributes are buffered and the function returns early. + /// This is a temporary state, and the function should be called again later. + /// + /// Note: This is `pub(super)` to allow testing via the `tests` submodule. + pub(super) async fn start_build( + &self, + state: &EngineState, + engine_client: &EngineClient_, + attributes_envelope: OpAttributesWithParent, + ) -> Result { + // Sanity check if the head is behind the finalized head. If it is, this is a critical + // error. + if state.sync_state.unsafe_head().block_info.number < + state.sync_state.finalized_head().block_info.number + { + return Err(BuildTaskError::EngineBuildError( + EngineBuildError::FinalizedAheadOfUnsafe( + state.sync_state.unsafe_head().block_info.number, + state.sync_state.finalized_head().block_info.number, + ), + )); + } + + // When inserting a payload, we advertise the parent's unsafe head as the current unsafe + // head to build on top of. + let new_forkchoice = state + .sync_state + .apply_update(EngineSyncStateUpdate { + unsafe_head: Some(attributes_envelope.parent), + ..Default::default() + }) + .create_forkchoice_state(); + + let forkchoice_version = EngineForkchoiceVersion::from_cfg( + &self.cfg, + attributes_envelope.attributes.payload_attributes.timestamp, + ); + let update = match forkchoice_version { + EngineForkchoiceVersion::V3 => { + engine_client + .fork_choice_updated_v3(new_forkchoice, Some(attributes_envelope.attributes)) + .await + } + EngineForkchoiceVersion::V2 => { + engine_client + .fork_choice_updated_v2(new_forkchoice, Some(attributes_envelope.attributes)) + .await + } + } + .map_err(|e| { + error!(target: "engine_builder", "Forkchoice update failed: {}", e); + BuildTaskError::EngineBuildError(EngineBuildError::AttributesInsertionFailed(e)) + })?; + + Self::validate_forkchoice_status(update.payload_status.status)?; + + debug!( + target: "engine_builder", + unsafe_hash = new_forkchoice.head_block_hash.to_string(), + safe_hash = new_forkchoice.safe_block_hash.to_string(), + finalized_hash = new_forkchoice.finalized_block_hash.to_string(), + "Forkchoice update with attributes successful" + ); + + // Fetch the payload ID from the FCU. If no payload ID was returned, something went wrong - + // the block building job on the EL should have been initiated. + update + .payload_id + .ok_or(BuildTaskError::EngineBuildError(EngineBuildError::MissingPayloadId)) + } +} + +#[async_trait] +impl EngineTaskExt for BuildTask { + type Output = PayloadId; + + type Error = BuildTaskError; + + async fn execute(&self, state: &mut EngineState) -> Result { + debug!( + target: "engine_builder", + txs = self.attributes.attributes().transactions.as_ref().map_or(0, |txs| txs.len()), + is_deposits = self.attributes.is_deposits_only(), + "Starting new build job" + ); + + // Start the build by sending an FCU call with the current forkchoice and the input + // payload attributes. + let fcu_start_time = Instant::now(); + let payload_id = self.start_build(state, &self.engine, self.attributes.clone()).await?; + let fcu_duration = fcu_start_time.elapsed(); + + info!( + target: "engine_builder", + fcu_duration = ?fcu_duration, + "block build started" + ); + + // If a channel was provided, send the payload ID to it. + if let Some(tx) = &self.payload_id_tx { + tx.send(payload_id).await.map_err(Box::new)?; + } + + Ok(payload_id) + } +} diff --git a/kona/crates/node/engine/src/task_queue/tasks/build/task_test.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/build/task_test.rs similarity index 92% rename from kona/crates/node/engine/src/task_queue/tasks/build/task_test.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/build/task_test.rs index f7af1b11af9..ef212a14059 100644 --- a/kona/crates/node/engine/src/task_queue/tasks/build/task_test.rs +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/build/task_test.rs @@ -1,4 +1,4 @@ -//! Tests for BuildTask::execute +//! Tests for `BuildTask::execute` use crate::{ BuildTask, BuildTaskError, EngineBuildError, EngineClient, EngineForkchoiceVersion, @@ -64,7 +64,7 @@ enum TestErr { // Wraps real errors, ignoring details so we can easily match on results. async fn wrapped_execute( - task: &mut BuildTask, + task: &BuildTask, state: &mut EngineState, ) -> Result { match task.execute(state).await { @@ -87,7 +87,7 @@ async fn wrapped_execute( #[case::success(Some(PayloadStatusEnum::Valid), true, None)] #[case::missing_id(Some(PayloadStatusEnum::Valid), false, Some(TestErr::MissingPayloadId))] #[case::fcu_fail(None, false, Some(TestErr::AttributesInsertionFailed))] -#[case::fcu_status_fail(Some(PayloadStatusEnum::Invalid{validation_error: "".to_string()}), false, Some(TestErr::InvalidPayload))] +#[case::fcu_status_fail(Some(PayloadStatusEnum::Invalid{validation_error: String::new()}), false, Some(TestErr::InvalidPayload))] #[case::fcu_status_fail(Some(PayloadStatusEnum::Syncing), false, Some(TestErr::EngineSyncing))] #[case::fcu_status_fail(Some(PayloadStatusEnum::Accepted), false, Some(TestErr::Unexpected))] #[tokio::test] @@ -102,7 +102,7 @@ async fn test_execute_variants( #[values(EngineForkchoiceVersion::V2, EngineForkchoiceVersion::V3)] fcu_version: EngineForkchoiceVersion, ) { - let payload_id = if payload_id_present { Some(PayloadId::new([1u8; 8])) } else { None }; + let payload_id = payload_id_present.then(|| PayloadId::new([1u8; 8])); let parent_block = test_block_info(0); let unsafe_block = test_block_info(1); @@ -112,7 +112,7 @@ async fn test_execute_variants( // Configure client with FCU response. If none, it will err on call, which is also a test case. let engine_client = fcu_status - .map_or(test_engine_client_builder(), |status| { + .map_or_else(test_engine_client_builder, |status| { configure_fcu( test_engine_client_builder(), fcu_version, @@ -130,11 +130,11 @@ async fn test_execute_variants( let (tx, mut rx) = mpsc::channel(1); - let mut task = BuildTask::new( + let task = BuildTask::new( Arc::new(engine_client.clone()), Arc::new(cfg), attributes.clone(), - if with_channel { Some(tx) } else { None }, + with_channel.then_some(tx), ); let mut state = TestEngineStateBuilder::new() @@ -144,7 +144,7 @@ async fn test_execute_variants( .build(); // Execute: Call execute - let result = wrapped_execute(&mut task, &mut state).await; + let result = wrapped_execute(&task, &mut state).await; if expected_err.is_some() { assert_eq!(expected_err, result.err()); diff --git a/kona/crates/node/engine/src/task_queue/tasks/consolidate/error.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/consolidate/error.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/consolidate/error.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/consolidate/error.rs diff --git a/kona/crates/node/engine/src/task_queue/tasks/consolidate/mod.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/consolidate/mod.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/consolidate/mod.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/consolidate/mod.rs diff --git a/rust/kona/crates/node/engine/src/task_queue/tasks/consolidate/task.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/consolidate/task.rs new file mode 100644 index 00000000000..79e37636b39 --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/consolidate/task.rs @@ -0,0 +1,295 @@ +//! A task to consolidate the engine state. + +use crate::{ + ConsolidateTaskError, EngineClient, EngineState, EngineTaskExt, SynchronizeTask, + state::EngineSyncStateUpdate, task_queue::build_and_seal, +}; +use alloy_rpc_types_eth::Block; +use async_trait::async_trait; +use kona_genesis::RollupConfig; +use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; +use op_alloy_rpc_types::Transaction; +use std::{sync::Arc, time::Instant}; + +/// Input for consolidation - either derived attributes or safe L2 block +#[derive(Debug, Clone)] +pub enum ConsolidateInput { + /// Consolidate based on derived attributes. + Attributes(Box), + /// Derivation Delegation: consolidate based on safe L2 block info. + BlockInfo(L2BlockInfo), +} + +impl From for ConsolidateInput { + fn from(v: L2BlockInfo) -> Self { + Self::BlockInfo(v) + } +} + +impl From for ConsolidateInput { + fn from(v: OpAttributesWithParent) -> Self { + Self::Attributes(Box::new(v)) + } +} + +impl ConsolidateInput { + /// Returns the block number for this consolidation input. + const fn l2_block_number(&self) -> u64 { + match self { + Self::Attributes(attributes) => attributes.block_number(), + Self::BlockInfo(info) => info.block_info.number, + } + } + + /// Checks if the block is consistent with this consolidation input. + fn is_consistent_with_block(&self, cfg: &RollupConfig, block: &Block) -> bool { + match self { + Self::Attributes(attributes) => { + crate::AttributesMatch::check(cfg, attributes, block).is_match() + } + Self::BlockInfo(info) => block.header.hash == info.block_info.hash, + } + } + + /// Returns true if this is `Attributes` and `attributes.is_last_in_span` is true. + const fn is_attributes_last_in_span(&self) -> bool { + matches!( + self, + Self::Attributes(attributes) + if attributes.is_last_in_span + ) + } +} + +/// The [`ConsolidateTask`] attempts to consolidate the engine state +/// using the specified payload attributes or block info. +#[derive(Debug, Clone)] +pub struct ConsolidateTask { + /// The engine client. + pub client: Arc, + /// The [`RollupConfig`]. + pub cfg: Arc, + /// The input for consolidation (either attributes or block info). + pub input: ConsolidateInput, +} + +impl ConsolidateTask { + /// Creates a new [`ConsolidateTask`] with the specified input + pub const fn new( + client: Arc, + cfg: Arc, + input: ConsolidateInput, + ) -> Self { + Self { client, cfg, input } + } + + /// This is used when the [`ConsolidateTask`] fails to consolidate the engine state + async fn execute_build_and_seal_tasks( + &self, + state: &mut EngineState, + attributes: &OpAttributesWithParent, + ) -> Result<(), ConsolidateTaskError> { + build_and_seal(state, self.client.clone(), self.cfg.clone(), attributes.clone(), true) + .await?; + + Ok(()) + } + + /// This provides symmetric fallback behavior to with `build_and_seal`. + async fn reconcile_to_safe_head( + &self, + state: &mut EngineState, + safe_l2: &L2BlockInfo, + ) -> Result<(), ConsolidateTaskError> { + warn!( + target: "engine", + safe_l2 = %safe_l2, + "Apply safe head" + ); + + let fcu_start = Instant::now(); + + // We intentionally set unsafe_head and cross_unsafe_head to safe_l2 to ensure the + // engine observes a self-consistent head state. This is required to correctly handle + // reorgs (where unsafe may be ahead on a non-canonical fork) and to trigger EL sync when + // the local unsafe head lags behind the safe head. + SynchronizeTask::new( + Arc::clone(&self.client), + self.cfg.clone(), + EngineSyncStateUpdate { + unsafe_head: Some(*safe_l2), + cross_unsafe_head: Some(*safe_l2), + safe_head: Some(*safe_l2), + local_safe_head: Some(*safe_l2), + ..Default::default() + }, + ) + .execute(state) + .await + .map_err(|e| { + warn!(target: "engine", ?e, "Apply safe head failed"); + e + })?; + + let fcu_duration = fcu_start.elapsed(); + + info!( + target: "engine", + hash = %safe_l2.block_info.hash, + number = safe_l2.block_info.number, + fcu_duration = ?fcu_duration, + "Updated safe head via follow safe" + ); + + Ok(()) + } + + /// Handles the fallback case when the block doesn't match the input or does not exist. + async fn reconcile_unsafe_to_safe( + &self, + state: &mut EngineState, + ) -> Result<(), ConsolidateTaskError> { + match &self.input { + ConsolidateInput::Attributes(attributes) => { + self.execute_build_and_seal_tasks(state, attributes).await + } + ConsolidateInput::BlockInfo(safe_l2) => { + self.reconcile_to_safe_head(state, safe_l2).await + } + } + } + + /// Attempts consolidation on the engine state. + pub async fn consolidate(&self, state: &mut EngineState) -> Result<(), ConsolidateTaskError> { + let global_start = Instant::now(); + + // Fetch the unsafe L2 block + let block_num = self.input.l2_block_number(); + let fetch_start = Instant::now(); + let block = match self.client.l2_block_by_label(block_num.into()).await { + Ok(Some(block)) => block, + Ok(None) => { + warn!(target: "engine", "Received `None` block for {}", block_num); + return Err(ConsolidateTaskError::MissingUnsafeL2Block(block_num)); + } + Err(_) => { + warn!(target: "engine", "Failed to fetch unsafe l2 block for consolidation"); + return Err(ConsolidateTaskError::FailedToFetchUnsafeL2Block); + } + }; + let block_fetch_duration = fetch_start.elapsed(); + let block_hash = block.header.hash; + + if self.input.is_consistent_with_block(&self.cfg, &block) { + trace!( + target: "engine", + input = ?self.input, + block_hash = %block_hash, + "Consolidating engine state", + ); + match L2BlockInfo::from_block_and_genesis(&block.into_consensus(), &self.cfg.genesis) { + // Only issue a forkchoice update if the attributes are the last in the span + // batch. This is an optimization to avoid sending a FCU + // call for every block in the span batch. + Ok(block_info) if !self.input.is_attributes_last_in_span() => { + let total_duration = global_start.elapsed(); + + // Apply a transient update to the safe head. + state.sync_state = state.sync_state.apply_update(EngineSyncStateUpdate { + safe_head: Some(block_info), + local_safe_head: Some(block_info), + ..Default::default() + }); + + info!( + target: "engine", + hash = %block_info.block_info.hash, + number = block_info.block_info.number, + ?total_duration, + ?block_fetch_duration, + "Updated safe head via L1 consolidation" + ); + + return Ok(()); + } + Ok(block_info) => { + let fcu_start = Instant::now(); + + SynchronizeTask::new( + Arc::clone(&self.client), + self.cfg.clone(), + EngineSyncStateUpdate { + safe_head: Some(block_info), + local_safe_head: Some(block_info), + ..Default::default() + }, + ) + .execute(state) + .await + .map_err(|e| { + warn!(target: "engine", ?e, "Consolidation failed"); + e + })?; + + let fcu_duration = fcu_start.elapsed(); + let total_duration = global_start.elapsed(); + + info!( + target: "engine", + hash = %block_info.block_info.hash, + number = block_info.block_info.number, + ?total_duration, + ?block_fetch_duration, + fcu_duration = ?fcu_duration, + "Updated safe head via L1 consolidation" + ); + + return Ok(()); + } + Err(e) => { + // Continue on to build the block since we failed to construct the block info. + warn!(target: "engine", ?e, "Failed to construct L2BlockInfo, proceeding to build task"); + } + } + } + + debug!( + target: "engine", + input = ?self.input, + block_hash = %block_hash, + "ConsolidateInput mismatch! Initiating reorg", + ); + // Handle mismatch case - called when consistency check fails + // or when L2BlockInfo construction fails in Attributes branch + self.reconcile_unsafe_to_safe(state).await + } +} + +#[async_trait] +impl EngineTaskExt for ConsolidateTask { + type Output = (); + + type Error = ConsolidateTaskError; + + // Behavior depends on how the safe head is provided: + // + // - `Attributes`: The safe head is advanced through the normal derivation flow, where the + // DerivationActor and EngineActor coordinate both safe and unsafe heads. In this case, we + // consolidate as long as the unsafe head has not fallen behind. + // + // - `BlockInfo`: The safe head is injected externally by the DerivationActor while delegating + // derivation, and is not coordinated with the EngineActor's safe/unsafe heads. If the + // injected safe head is ahead of the EngineActor's unsafe head, we reconcile the unsafe chain + // up to the safe head instead of consolidating. + async fn execute(&self, state: &mut EngineState) -> Result<(), ConsolidateTaskError> { + let safe_head_number = match &self.input { + ConsolidateInput::Attributes { .. } => state.sync_state.safe_head().block_info.number, + ConsolidateInput::BlockInfo(safe_block_info) => safe_block_info.block_info.number, + }; + if safe_head_number < state.sync_state.unsafe_head().block_info.number { + self.consolidate(state).await + } else { + self.reconcile_unsafe_to_safe(state).await + } + } +} diff --git a/rust/kona/crates/node/engine/src/task_queue/tasks/finalize/error.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/finalize/error.rs new file mode 100644 index 00000000000..c8444c2595e --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/finalize/error.rs @@ -0,0 +1,42 @@ +//! Contains error types for the [`crate::FinalizeTask`]. + +use crate::{ + EngineTaskError, SynchronizeTaskError, task_queue::tasks::task::EngineTaskErrorSeverity, +}; +use alloy_transport::{RpcError, TransportErrorKind}; +use kona_protocol::FromBlockError; +use thiserror::Error; + +/// An error that occurs when running the [`crate::FinalizeTask`]. +#[derive(Debug, Error)] +pub enum FinalizeTaskError { + /// The block is not safe, and therefore cannot be finalized. + #[error("Attempted to finalize a block that is not yet safe")] + BlockNotSafe, + /// The block to finalize was not found. + #[error("The block to finalize was not found: Number {0}")] + BlockNotFound(u64), + /// An error occurred while transforming the RPC block into [`L2BlockInfo`]. + /// + /// [`L2BlockInfo`]: kona_protocol::L2BlockInfo + #[error(transparent)] + FromBlock(#[from] FromBlockError), + /// A temporary RPC failure. + #[error(transparent)] + TransportError(#[from] RpcError), + /// The forkchoice update call to finalize the block failed. + #[error(transparent)] + ForkchoiceUpdateFailed(#[from] SynchronizeTaskError), +} + +impl EngineTaskError for FinalizeTaskError { + fn severity(&self) -> EngineTaskErrorSeverity { + match self { + Self::BlockNotSafe | Self::BlockNotFound(_) | Self::FromBlock(_) => { + EngineTaskErrorSeverity::Critical + } + Self::TransportError(_) => EngineTaskErrorSeverity::Temporary, + Self::ForkchoiceUpdateFailed(inner) => inner.severity(), + } + } +} diff --git a/kona/crates/node/engine/src/task_queue/tasks/finalize/mod.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/finalize/mod.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/finalize/mod.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/finalize/mod.rs diff --git a/kona/crates/node/engine/src/task_queue/tasks/finalize/task.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/finalize/task.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/finalize/task.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/finalize/task.rs diff --git a/rust/kona/crates/node/engine/src/task_queue/tasks/insert/error.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/insert/error.rs new file mode 100644 index 00000000000..2d4d4512ea0 --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/insert/error.rs @@ -0,0 +1,43 @@ +//! Contains the error types for the [`InsertTask`](crate::InsertTask). + +use crate::{ + EngineTaskError, SynchronizeTaskError, task_queue::tasks::task::EngineTaskErrorSeverity, +}; +use alloy_rpc_types_engine::PayloadStatusEnum; +use alloy_transport::{RpcError, TransportErrorKind}; +use kona_protocol::FromBlockError; +use op_alloy_rpc_types_engine::OpPayloadError; + +/// An error that occurs when running the [`InsertTask`](crate::InsertTask). +#[derive(Debug, thiserror::Error)] +pub enum InsertTaskError { + /// Error converting a payload into a block. + #[error(transparent)] + FromBlockError(#[from] OpPayloadError), + /// Failed to insert new payload. + #[error("Failed to insert new payload: {0}")] + InsertFailed(RpcError), + /// Unexpected payload status + #[error("Unexpected payload status: {0}")] + UnexpectedPayloadStatus(PayloadStatusEnum), + /// Error converting the payload + chain genesis into an L2 block info. + #[error(transparent)] + L2BlockInfoConstruction(#[from] FromBlockError), + /// The forkchoice update call to consolidate the block into the engine state failed. + #[error(transparent)] + ForkchoiceUpdateFailed(#[from] SynchronizeTaskError), +} + +impl EngineTaskError for InsertTaskError { + fn severity(&self) -> EngineTaskErrorSeverity { + match self { + Self::FromBlockError(_) | Self::L2BlockInfoConstruction(_) => { + EngineTaskErrorSeverity::Critical + } + Self::InsertFailed(_) | Self::UnexpectedPayloadStatus(_) => { + EngineTaskErrorSeverity::Temporary + } + Self::ForkchoiceUpdateFailed(inner) => inner.severity(), + } + } +} diff --git a/kona/crates/node/engine/src/task_queue/tasks/insert/mod.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/insert/mod.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/insert/mod.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/insert/mod.rs diff --git a/kona/crates/node/engine/src/task_queue/tasks/insert/task.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/insert/task.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/insert/task.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/insert/task.rs diff --git a/kona/crates/node/engine/src/task_queue/tasks/mod.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/mod.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/mod.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/mod.rs diff --git a/rust/kona/crates/node/engine/src/task_queue/tasks/seal/error.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/seal/error.rs new file mode 100644 index 00000000000..a283b910ce0 --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/seal/error.rs @@ -0,0 +1,65 @@ +//! Contains error types for the [`crate::SynchronizeTask`]. + +use crate::{EngineTaskError, InsertTaskError, task_queue::tasks::task::EngineTaskErrorSeverity}; +use alloy_transport::{RpcError, TransportErrorKind}; +use kona_protocol::FromBlockError; +use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; +use thiserror::Error; +use tokio::sync::mpsc; + +/// An error that occurs when running the [`crate::SealTask`]. +#[derive(Debug, Error)] +pub enum SealTaskError { + /// Impossible to insert the payload into the engine. + #[error(transparent)] + PayloadInsertionFailed(#[from] Box), + /// The get payload call to the engine api failed. + #[error(transparent)] + GetPayloadFailed(RpcError), + /// A deposit-only payload failed to import. + #[error("Deposit-only payload failed to import")] + DepositOnlyPayloadFailed, + /// Failed to re-attempt payload import with deposit-only payload. + #[error("Failed to re-attempt payload import with deposit-only payload")] + DepositOnlyPayloadReattemptFailed, + /// The payload is invalid, and the derivation pipeline must + /// be flushed post-holocene. + #[error("Invalid payload, must flush post-holocene")] + HoloceneInvalidFlush, + /// Failed to convert a [`OpExecutionPayload`] to a [`L2BlockInfo`]. + /// + /// [`OpExecutionPayload`]: op_alloy_rpc_types_engine::OpExecutionPayload + /// [`L2BlockInfo`]: kona_protocol::L2BlockInfo + #[error(transparent)] + FromBlock(#[from] FromBlockError), + /// Error sending the built payload envelope. + #[error(transparent)] + MpscSend(#[from] Box>>), + /// The clock went backwards. + #[error("The clock went backwards")] + ClockWentBackwards, + /// Unsafe head changed between build and seal. This likely means that there was some race + /// condition between the previous seal updating the unsafe head and the build attributes + /// being created. This build has been invalidated. + /// + /// If not propagated to the original caller for handling (i.e. there was no original caller), + /// this should not happen and is a critical error. + #[error("Unsafe head changed between build and seal")] + UnsafeHeadChangedSinceBuild, +} + +impl EngineTaskError for SealTaskError { + fn severity(&self) -> EngineTaskErrorSeverity { + match self { + Self::PayloadInsertionFailed(inner) => inner.severity(), + Self::GetPayloadFailed(_) => EngineTaskErrorSeverity::Temporary, + Self::HoloceneInvalidFlush => EngineTaskErrorSeverity::Flush, + Self::DepositOnlyPayloadReattemptFailed | + Self::DepositOnlyPayloadFailed | + Self::FromBlock(_) | + Self::MpscSend(_) | + Self::ClockWentBackwards | + Self::UnsafeHeadChangedSinceBuild => EngineTaskErrorSeverity::Critical, + } + } +} diff --git a/kona/crates/node/engine/src/task_queue/tasks/seal/mod.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/seal/mod.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/seal/mod.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/seal/mod.rs diff --git a/rust/kona/crates/node/engine/src/task_queue/tasks/seal/task.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/seal/task.rs new file mode 100644 index 00000000000..4fe640779b2 --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/seal/task.rs @@ -0,0 +1,285 @@ +//! A task for importing a block that has already been started. +use super::SealTaskError; +use crate::{ + EngineClient, EngineGetPayloadVersion, EngineState, EngineTaskExt, InsertTask, + InsertTaskError::{self}, + task_queue::build_and_seal, +}; +use alloy_rpc_types_engine::{ExecutionPayload, PayloadId}; +use async_trait::async_trait; +use derive_more::Constructor; +use kona_genesis::RollupConfig; +use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; +use op_alloy_rpc_types_engine::{OpExecutionPayload, OpExecutionPayloadEnvelope}; +use std::{sync::Arc, time::Instant}; +use tokio::sync::mpsc; + +/// Task for block sealing and canonicalization. +/// +/// The [`SealTask`] handles the following parts of the block building workflow: +/// +/// 1. **Payload Construction**: Retrieves the built payload using `engine_getPayload` +/// 2. **Block Import**: Imports the payload using [`InsertTask`] for canonicalization +/// +/// ## Error Handling +/// +/// The task delegates to [`InsertTaskError`] for payload import failures. +/// +/// [`InsertTask`]: crate::InsertTask +/// [`InsertTaskError`]: crate::InsertTaskError +#[derive(Debug, Clone, Constructor)] +pub struct SealTask { + /// The engine API client. + pub engine: Arc, + /// The [`RollupConfig`]. + pub cfg: Arc, + /// The [`PayloadId`] being sealed. + pub payload_id: PayloadId, + /// The [`OpAttributesWithParent`] to instruct the execution layer to build. + pub attributes: OpAttributesWithParent, + /// Whether or not the payload was derived, or created by the sequencer. + pub is_attributes_derived: bool, + /// An optional sender to convey success/failure result of the built + /// [`OpExecutionPayloadEnvelope`] after the block has been built, imported, and canonicalized + /// or the [`SealTaskError`] that occurred during processing. + pub result_tx: Option>>, +} + +impl SealTask { + /// Seals the execution payload in the EL, returning the execution envelope. + /// + /// ## Engine Method Selection + /// The method used to fetch the payload from the EL is determined by the payload timestamp. The + /// method used to import the payload into the engine is determined by the payload version. + /// + /// - `engine_getPayloadV2` is used for payloads with a timestamp before the Ecotone fork. + /// - `engine_getPayloadV3` is used for payloads with a timestamp after the Ecotone fork. + /// - `engine_getPayloadV4` is used for payloads with a timestamp after the Isthmus fork. + async fn seal_payload( + &self, + cfg: &RollupConfig, + engine: &EngineClient_, + payload_id: PayloadId, + payload_attrs: OpAttributesWithParent, + ) -> Result { + let payload_timestamp = payload_attrs.attributes().payload_attributes.timestamp; + + debug!( + target: "engine", + payload_id = payload_id.to_string(), + l2_time = payload_timestamp, + "Sealing payload" + ); + + let get_payload_version = EngineGetPayloadVersion::from_cfg(cfg, payload_timestamp); + let payload_envelope = match get_payload_version { + EngineGetPayloadVersion::V4 => { + let payload = engine.get_payload_v4(payload_id).await.map_err(|e| { + error!(target: "engine", "Payload fetch failed: {e}"); + SealTaskError::GetPayloadFailed(e) + })?; + + OpExecutionPayloadEnvelope { + parent_beacon_block_root: Some(payload.parent_beacon_block_root), + execution_payload: OpExecutionPayload::V4(payload.execution_payload), + } + } + EngineGetPayloadVersion::V3 => { + let payload = engine.get_payload_v3(payload_id).await.map_err(|e| { + error!(target: "engine", "Payload fetch failed: {e}"); + SealTaskError::GetPayloadFailed(e) + })?; + + OpExecutionPayloadEnvelope { + parent_beacon_block_root: Some(payload.parent_beacon_block_root), + execution_payload: OpExecutionPayload::V3(payload.execution_payload), + } + } + EngineGetPayloadVersion::V2 => { + let payload = engine.get_payload_v2(payload_id).await.map_err(|e| { + error!(target: "engine", "Payload fetch failed: {e}"); + SealTaskError::GetPayloadFailed(e) + })?; + + OpExecutionPayloadEnvelope { + parent_beacon_block_root: None, + execution_payload: match payload.execution_payload.into_payload() { + ExecutionPayload::V1(payload) => OpExecutionPayload::V1(payload), + ExecutionPayload::V2(payload) => OpExecutionPayload::V2(payload), + _ => unreachable!("the response should be a V1 or V2 payload"), + }, + } + } + }; + + Ok(payload_envelope) + } + + /// Inserts a payload into the engine with Holocene fallback support. + /// + /// This function handles: + /// 1. Executing the `InsertTask` to import the payload + /// 2. Handling deposits-only payload failures + /// 3. Holocene fallback via `build_and_seal` if needed + /// + /// Returns Ok(()) if the payload is successfully inserted, or an error if insertion fails. + async fn insert_payload( + &self, + state: &mut EngineState, + new_payload: OpExecutionPayloadEnvelope, + ) -> Result<(), SealTaskError> { + // Insert the new block into the engine. + match InsertTask::new( + Arc::clone(&self.engine), + self.cfg.clone(), + new_payload.clone(), + self.is_attributes_derived, + ) + .execute(state) + .await + { + Err(InsertTaskError::UnexpectedPayloadStatus(e)) + if self.attributes.is_deposits_only() => + { + error!(target: "engine", error = ?e, "Critical: Deposit-only payload import failed"); + return Err(SealTaskError::DepositOnlyPayloadFailed); + } + Err(InsertTaskError::UnexpectedPayloadStatus(e)) + if self.cfg.is_holocene_active( + self.attributes.attributes().payload_attributes.timestamp, + ) => + { + warn!(target: "engine", error = ?e, "Re-attempting payload import with deposits only."); + + // HOLOCENE: Re-attempt payload import with deposits only + // First build the deposits-only payload, then seal it + let deposits_only_attrs = self.attributes.as_deposits_only(); + + return match build_and_seal( + state, + self.engine.clone(), + self.cfg.clone(), + deposits_only_attrs.clone(), + self.is_attributes_derived, + ) + .await + { + Ok(_) => { + info!(target: "engine", "Successfully imported deposits-only payload"); + Err(SealTaskError::HoloceneInvalidFlush) + } + Err(_) => Err(SealTaskError::DepositOnlyPayloadReattemptFailed), + }; + } + Err(e) => { + error!(target: "engine", "Payload import failed: {e}"); + return Err(Box::new(e).into()); + } + Ok(_) => { + info!(target: "engine", "Successfully imported payload") + } + } + + Ok(()) + } + + /// Seals and canonicalizes the block by fetching the payload and importing it. + /// + /// This function handles: + /// 1. Fetching the execution payload from the EL + /// 2. Importing the payload into the engine with Holocene fallback support + /// 3. Sending the payload to the optional channel + async fn seal_and_canonicalize_block( + &self, + state: &mut EngineState, + ) -> Result { + // Fetch the payload just inserted from the EL and import it into the engine. + let block_import_start_time = Instant::now(); + let new_payload = self + .seal_payload(&self.cfg, &self.engine, self.payload_id, self.attributes.clone()) + .await?; + + let new_block_ref = L2BlockInfo::from_payload_and_genesis( + new_payload.execution_payload.clone(), + self.attributes.attributes().payload_attributes.parent_beacon_block_root, + &self.cfg.genesis, + ) + .map_err(SealTaskError::FromBlock)?; + + // Insert the payload into the engine. + self.insert_payload(state, new_payload.clone()).await?; + + let block_import_duration = block_import_start_time.elapsed(); + + info!( + target: "engine", + l2_number = new_block_ref.block_info.number, + l2_time = new_block_ref.block_info.timestamp, + block_import_duration = ?block_import_duration, + "Built and imported new {} block", + if self.is_attributes_derived { "safe" } else { "unsafe" }, + ); + + Ok(new_payload) + } + + /// Sends the provided result via the `result_tx` sender if one exists, returning the + /// appropriate error if it does not. + /// + /// This allows the original caller to handle errors, removing that burden from the engine, + /// which may not know the caller's intent or retry preferences. If the original caller did not + /// provide a mechanism to get notified of updates, handle the error in the default manner in + /// the task queue logic. + async fn send_channel_result_or_get_error( + &self, + res: Result, + ) -> Result<(), SealTaskError> { + // NB: If a response channel was provided, that channel will receive success/failure info, + // and this task will always succeed. If not, task failure will be relayed to the caller. + if let Some(tx) = &self.result_tx { + tx.send(res).await.map_err(|e| SealTaskError::MpscSend(Box::new(e)))?; + } else if let Err(x) = res { + return Err(x); + } + + Ok(()) + } +} + +#[async_trait] +impl EngineTaskExt for SealTask { + type Output = (); + + type Error = SealTaskError; + + async fn execute(&self, state: &mut EngineState) -> Result<(), SealTaskError> { + debug!( + target: "engine", + txs = self.attributes.attributes().transactions.as_ref().map_or(0, |txs| txs.len()), + is_deposits = self.attributes.is_deposits_only(), + "Starting new seal job" + ); + + let unsafe_block_info = state.sync_state.unsafe_head().block_info; + let parent_block_info = self.attributes.parent.block_info; + + let res = if unsafe_block_info.hash != parent_block_info.hash || + unsafe_block_info.number != parent_block_info.number + { + info!( + target: "engine", + unsafe_block_info = ?unsafe_block_info, + parent_block_info = ?parent_block_info, + "Seal attributes parent does not match unsafe head, returning rebuild error" + ); + Err(SealTaskError::UnsafeHeadChangedSinceBuild) + } else { + // Seal the block and import it into the engine. + self.seal_and_canonicalize_block(state).await + }; + + self.send_channel_result_or_get_error(res).await?; + + Ok(()) + } +} diff --git a/rust/kona/crates/node/engine/src/task_queue/tasks/synchronize/error.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/synchronize/error.rs new file mode 100644 index 00000000000..90052efed4a --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/synchronize/error.rs @@ -0,0 +1,35 @@ +//! Contains error types for the [`crate::SynchronizeTask`]. + +use crate::{EngineTaskError, task_queue::tasks::task::EngineTaskErrorSeverity}; +use alloy_rpc_types_engine::PayloadStatusEnum; +use alloy_transport::{RpcError, TransportErrorKind}; +use thiserror::Error; + +/// An error that occurs when running the [`crate::SynchronizeTask`]. +#[derive(Debug, Error)] +pub enum SynchronizeTaskError { + /// The forkchoice update call to the engine api failed. + #[error("Forkchoice update engine api call failed due to an RPC error: {0}")] + ForkchoiceUpdateFailed(RpcError), + /// The finalized head is behind the unsafe head. + #[error("Invalid forkchoice state: unsafe head {0} is ahead of finalized head {1}")] + FinalizedAheadOfUnsafe(u64, u64), + /// The forkchoice state is invalid. + #[error("Invalid forkchoice state")] + InvalidForkchoiceState, + /// The payload status is unexpected. + #[error("Unexpected payload status: {0}")] + UnexpectedPayloadStatus(PayloadStatusEnum), +} + +impl EngineTaskError for SynchronizeTaskError { + fn severity(&self) -> EngineTaskErrorSeverity { + match self { + Self::FinalizedAheadOfUnsafe(_, _) => EngineTaskErrorSeverity::Critical, + Self::ForkchoiceUpdateFailed(_) | Self::UnexpectedPayloadStatus(_) => { + EngineTaskErrorSeverity::Temporary + } + Self::InvalidForkchoiceState => EngineTaskErrorSeverity::Reset, + } + } +} diff --git a/kona/crates/node/engine/src/task_queue/tasks/synchronize/mod.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/synchronize/mod.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/synchronize/mod.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/synchronize/mod.rs diff --git a/kona/crates/node/engine/src/task_queue/tasks/synchronize/task.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/synchronize/task.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/synchronize/task.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/synchronize/task.rs diff --git a/rust/kona/crates/node/engine/src/task_queue/tasks/task.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/task.rs new file mode 100644 index 00000000000..206f0dafb74 --- /dev/null +++ b/rust/kona/crates/node/engine/src/task_queue/tasks/task.rs @@ -0,0 +1,240 @@ +//! Tasks sent to the [`Engine`] for execution. +//! +//! [`Engine`]: crate::Engine + +use super::{BuildTask, ConsolidateTask, FinalizeTask, InsertTask}; +use crate::{ + BuildTaskError, ConsolidateTaskError, EngineClient, EngineState, FinalizeTaskError, + InsertTaskError, + task_queue::{SealTask, SealTaskError}, +}; +use async_trait::async_trait; +use derive_more::Display; +use std::cmp::Ordering; +use thiserror::Error; +use tokio::task::yield_now; + +/// The severity of an engine task error. +/// +/// This is used to determine how to handle the error when draining the engine task queue. +#[derive(Debug, PartialEq, Eq, Display, Clone, Copy)] +pub enum EngineTaskErrorSeverity { + /// The error is temporary and the task is retried. + #[display("temporary")] + Temporary, + /// The error is critical and is propagated to the engine actor. + #[display("critical")] + Critical, + /// The error indicates that the engine should be reset. + #[display("reset")] + Reset, + /// The error indicates that the engine should be flushed. + #[display("flush")] + Flush, +} + +/// The interface for an engine task error. +/// +/// An engine task error should have an associated severity level to specify how to handle the error +/// when draining the engine task queue. +pub trait EngineTaskError { + /// The severity of the error. + fn severity(&self) -> EngineTaskErrorSeverity; +} + +/// The interface for an engine task. +#[async_trait] +pub trait EngineTaskExt { + /// The output type of the task. + type Output; + + /// The error type of the task. + type Error: EngineTaskError; + + /// Executes the task, taking a shared lock on the engine state and `self`. + async fn execute(&self, state: &mut EngineState) -> Result; +} + +/// An error that may occur during an [`EngineTask`]'s execution. +#[derive(Error, Debug)] +pub enum EngineTaskErrors { + /// An error that occurred while inserting a block into the engine. + #[error(transparent)] + Insert(#[from] InsertTaskError), + /// An error that occurred while building a block. + #[error(transparent)] + Build(#[from] BuildTaskError), + /// An error that occurred while sealing a block. + #[error(transparent)] + Seal(#[from] SealTaskError), + /// An error that occurred while consolidating the engine state. + #[error(transparent)] + Consolidate(#[from] ConsolidateTaskError), + /// An error that occurred while finalizing an L2 block. + #[error(transparent)] + Finalize(#[from] FinalizeTaskError), +} + +impl EngineTaskError for EngineTaskErrors { + fn severity(&self) -> EngineTaskErrorSeverity { + match self { + Self::Insert(inner) => inner.severity(), + Self::Build(inner) => inner.severity(), + Self::Seal(inner) => inner.severity(), + Self::Consolidate(inner) => inner.severity(), + Self::Finalize(inner) => inner.severity(), + } + } +} + +/// Tasks that may be inserted into and executed by the [`Engine`]. +/// +/// [`Engine`]: crate::Engine +#[derive(Debug, Clone)] +pub enum EngineTask { + /// Inserts a payload into the execution engine. + Insert(Box>), + /// Begins building a new block with the given attributes, producing a new payload ID. + Build(Box>), + /// Seals the block with the given payload ID and attributes, inserting it into the execution + /// engine. + Seal(Box>), + /// Performs consolidation on the engine state, reverting to payload attribute processing + /// via the [`BuildTask`] if consolidation fails. + Consolidate(Box>), + /// Finalizes an L2 block + Finalize(Box>), +} + +impl EngineTask { + /// Executes the task without consuming it. + async fn execute_inner(&self, state: &mut EngineState) -> Result<(), EngineTaskErrors> { + match self { + Self::Insert(task) => task.execute(state).await?, + Self::Seal(task) => task.execute(state).await?, + Self::Consolidate(task) => task.execute(state).await?, + Self::Finalize(task) => task.execute(state).await?, + Self::Build(task) => { + task.execute(state).await?; + } + }; + + Ok(()) + } + + const fn task_metrics_label(&self) -> &'static str { + match self { + Self::Insert(_) => crate::Metrics::INSERT_TASK_LABEL, + Self::Consolidate(_) => crate::Metrics::CONSOLIDATE_TASK_LABEL, + Self::Build(_) => crate::Metrics::BUILD_TASK_LABEL, + Self::Seal(_) => crate::Metrics::SEAL_TASK_LABEL, + Self::Finalize(_) => crate::Metrics::FINALIZE_TASK_LABEL, + } + } +} + +impl PartialEq for EngineTask { + fn eq(&self, other: &Self) -> bool { + matches!( + (self, other), + (Self::Insert(_), Self::Insert(_)) | + (Self::Build(_), Self::Build(_)) | + (Self::Seal(_), Self::Seal(_)) | + (Self::Consolidate(_), Self::Consolidate(_)) | + (Self::Finalize(_), Self::Finalize(_)) + ) + } +} + +impl Eq for EngineTask {} + +impl PartialOrd for EngineTask { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for EngineTask { + fn cmp(&self, other: &Self) -> Ordering { + // Order (descending): BuildBlock -> InsertUnsafe -> Consolidate -> Finalize + // + // https://specs.optimism.io/protocol/derivation.html#forkchoice-synchronization + // + // - Block building jobs are prioritized above all other tasks, to give priority to the + // sequencer. BuildTask handles forkchoice updates automatically. + // - InsertUnsafe tasks are prioritized over Consolidate tasks, to ensure that unsafe block + // gossip is imported promptly. + // - Consolidate tasks are prioritized over Finalize tasks, as they advance the safe chain + // via derivation. + // - Finalize tasks have the lowest priority, as they only update finalized status. + match (self, other) { + // Same variant cases + (Self::Insert(_), Self::Insert(_)) | + (Self::Consolidate(_), Self::Consolidate(_)) | + (Self::Build(_), Self::Build(_)) | + (Self::Seal(_), Self::Seal(_)) | + (Self::Finalize(_), Self::Finalize(_)) => Ordering::Equal, + + // SealBlock tasks are prioritized over all others + (Self::Seal(_), _) => Ordering::Greater, + (_, Self::Seal(_)) => Ordering::Less, + + // BuildBlock tasks are prioritized over InsertUnsafe and Consolidate tasks + (Self::Build(_), _) => Ordering::Greater, + (_, Self::Build(_)) => Ordering::Less, + + // InsertUnsafe tasks are prioritized over Consolidate and Finalize tasks + (Self::Insert(_), _) => Ordering::Greater, + (_, Self::Insert(_)) => Ordering::Less, + + // Consolidate tasks are prioritized over Finalize tasks + (Self::Consolidate(_), _) => Ordering::Greater, + (_, Self::Consolidate(_)) => Ordering::Less, + } + } +} + +#[async_trait] +impl EngineTaskExt for EngineTask { + type Output = (); + + type Error = EngineTaskErrors; + + async fn execute(&self, state: &mut EngineState) -> Result<(), Self::Error> { + // Retry the task until it succeeds or a critical error occurs. + while let Err(e) = self.execute_inner(state).await { + let severity = e.severity(); + + kona_macros::inc!( + counter, + crate::Metrics::ENGINE_TASK_FAILURE, + self.task_metrics_label() => severity.to_string() + ); + + match severity { + EngineTaskErrorSeverity::Temporary => { + trace!(target: "engine", "{e}"); + + // Yield the task to allow other tasks to execute to avoid starvation. + yield_now().await; + } + EngineTaskErrorSeverity::Critical => { + error!(target: "engine", "{e}"); + return Err(e); + } + EngineTaskErrorSeverity::Reset => { + warn!(target: "engine", "Engine requested derivation reset"); + return Err(e); + } + EngineTaskErrorSeverity::Flush => { + warn!(target: "engine", "Engine requested derivation flush"); + return Err(e); + } + } + } + + kona_macros::inc!(counter, crate::Metrics::ENGINE_TASK_SUCCESS, self.task_metrics_label()); + + Ok(()) + } +} diff --git a/kona/crates/node/engine/src/task_queue/tasks/util.rs b/rust/kona/crates/node/engine/src/task_queue/tasks/util.rs similarity index 100% rename from kona/crates/node/engine/src/task_queue/tasks/util.rs rename to rust/kona/crates/node/engine/src/task_queue/tasks/util.rs diff --git a/rust/kona/crates/node/engine/src/test_utils/attributes.rs b/rust/kona/crates/node/engine/src/test_utils/attributes.rs new file mode 100644 index 00000000000..fae84e4fd59 --- /dev/null +++ b/rust/kona/crates/node/engine/src/test_utils/attributes.rs @@ -0,0 +1,111 @@ +use alloy_eips::BlockNumHash; +use alloy_primitives::{B256, b256}; +use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; +use op_alloy_rpc_types_engine::OpPayloadAttributes; + +/// Builder for creating test `OpAttributesWithParent` instances with sensible defaults +#[derive(Debug)] +pub struct TestAttributesBuilder { + timestamp: u64, + prev_randao: B256, + suggested_fee_recipient: alloy_primitives::Address, + withdrawals: Option>, + parent_beacon_block_root: Option, + transactions: Option>, + no_tx_pool: Option, + gas_limit: Option, + eip_1559_params: Option, + min_base_fee: Option, + parent: L2BlockInfo, + derived_from: Option, + is_last_in_span: bool, +} + +impl TestAttributesBuilder { + /// Creates a new builder with default values + pub fn new() -> Self { + let parent = L2BlockInfo { + block_info: BlockInfo { + number: 0, + hash: b256!("1111111111111111111111111111111111111111111111111111111111111111"), + parent_hash: B256::ZERO, + timestamp: 1000, + }, + l1_origin: BlockNumHash::default(), + seq_num: 0, + }; + + Self { + timestamp: 2000, + prev_randao: b256!("2222222222222222222222222222222222222222222222222222222222222222"), + suggested_fee_recipient: alloy_primitives::Address::ZERO, + withdrawals: None, + parent_beacon_block_root: Some(B256::ZERO), + transactions: None, + no_tx_pool: Some(false), + gas_limit: Some(30_000_000), + eip_1559_params: None, + min_base_fee: None, + parent, + derived_from: None, + is_last_in_span: false, + } + } + + /// Sets the timestamp + pub const fn with_timestamp(mut self, timestamp: u64) -> Self { + self.timestamp = timestamp; + self + } + + /// Sets the parent block + pub const fn with_parent(mut self, parent: L2BlockInfo) -> Self { + self.parent = parent; + self + } + + /// Sets the transactions + #[allow(dead_code)] + pub fn with_transactions(mut self, txs: Vec) -> Self { + self.transactions = Some(txs); + self + } + + /// Sets the gas limit + #[allow(dead_code)] + pub const fn with_gas_limit(mut self, gas_limit: u64) -> Self { + self.gas_limit = Some(gas_limit); + self + } + + /// Builds the `OpAttributesWithParent` + pub fn build(self) -> OpAttributesWithParent { + let attributes = OpPayloadAttributes { + payload_attributes: alloy_rpc_types_engine::PayloadAttributes { + timestamp: self.timestamp, + prev_randao: self.prev_randao, + suggested_fee_recipient: self.suggested_fee_recipient, + withdrawals: self.withdrawals, + parent_beacon_block_root: self.parent_beacon_block_root, + }, + transactions: self.transactions, + no_tx_pool: self.no_tx_pool, + gas_limit: self.gas_limit, + eip_1559_params: self.eip_1559_params, + min_base_fee: self.min_base_fee, + }; + + OpAttributesWithParent::new( + attributes, + self.parent, + self.derived_from, + self.is_last_in_span, + ) + } +} + +impl Default for TestAttributesBuilder { + fn default() -> Self { + Self::new() + } +} diff --git a/rust/kona/crates/node/engine/src/test_utils/engine_client.rs b/rust/kona/crates/node/engine/src/test_utils/engine_client.rs new file mode 100644 index 00000000000..c2f1a823d40 --- /dev/null +++ b/rust/kona/crates/node/engine/src/test_utils/engine_client.rs @@ -0,0 +1,803 @@ +//! Mock implementations for testing engine client functionality. + +use crate::{EngineClient, HyperAuthClient}; +use alloy_eips::{BlockId, eip1898::BlockNumberOrTag}; +use alloy_network::{Ethereum, Network}; +use alloy_primitives::{Address, B256, BlockHash, StorageKey}; +use alloy_provider::{EthGetBlock, ProviderCall, RpcWithBlock}; +use alloy_rpc_types_engine::{ + ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadEnvelopeV2, ExecutionPayloadInputV2, + ExecutionPayloadV1, ExecutionPayloadV3, ForkchoiceState, ForkchoiceUpdated, PayloadId, + PayloadStatus, +}; +use alloy_rpc_types_eth::{Block, EIP1186AccountProofResponse, Transaction as EthTransaction}; +use alloy_transport::{TransportError, TransportErrorKind, TransportResult}; +use alloy_transport_http::Http; +use async_trait::async_trait; +use kona_genesis::RollupConfig; +use kona_protocol::L2BlockInfo; +use op_alloy_network::Optimism; +use op_alloy_provider::ext::engine::OpEngineApi; +use op_alloy_rpc_types::Transaction as OpTransaction; +use op_alloy_rpc_types_engine::{ + OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, OpExecutionPayloadV4, + OpPayloadAttributes, ProtocolVersion, +}; +use std::{collections::HashMap, sync::Arc}; +use tokio::sync::RwLock; + +use crate::EngineClientError; + +/// Builder for creating test `MockEngineClient` instances with sensible defaults +pub fn test_engine_client_builder() -> MockEngineClientBuilder { + MockEngineClientBuilder::new().with_config(Arc::new(RollupConfig::default())) +} + +/// Mock storage for engine client responses. +/// +/// Each API method has version-specific storage to allow tests to verify +/// which specific version was called and return different responses per version. +#[derive(Debug, Clone, Default)] +pub struct MockEngineStorage { + /// Storage for block responses by tag. + pub l2_blocks_by_label: HashMap>, + /// Storage for block info responses by tag. + pub block_info_by_tag: HashMap, + + // Version-specific new_payload responses + /// Storage for `new_payload_v1` responses. + pub new_payload_v1_response: Option, + /// Storage for `new_payload_v2` responses. + pub new_payload_v2_response: Option, + /// Storage for `new_payload_v3` responses. + pub new_payload_v3_response: Option, + /// Storage for `new_payload_v4` responses. + pub new_payload_v4_response: Option, + + // Version-specific fork_choice_updated responses + /// Storage for `fork_choice_updated_v2` responses. + pub fork_choice_updated_v2_response: Option, + /// Storage for `fork_choice_updated_v3` responses. + pub fork_choice_updated_v3_response: Option, + + // Version-specific get_payload responses + /// Storage for execution payload envelope v2 responses. + pub execution_payload_v2: Option, + /// Storage for OP execution payload envelope v3 responses. + pub execution_payload_v3: Option, + /// Storage for OP execution payload envelope v4 responses. + pub execution_payload_v4: Option, + + // Version-specific get_payload_bodies responses + /// Storage for `get_payload_bodies_by_hash_v1` responses. + pub get_payload_bodies_by_hash_v1_response: Option, + /// Storage for `get_payload_bodies_by_range_v1` responses. + pub get_payload_bodies_by_range_v1_response: Option, + + // Non-versioned responses + /// Storage for client version responses. + pub client_versions: Option>, + /// Storage for protocol version responses. + pub protocol_version: Option, + /// Storage for capabilities responses. + pub capabilities: Option>, + + // Storage for get_l1_block, get_l2_block, and get_proof + /// Storage for L1 blocks by stringified `BlockId`. + /// L1 blocks use standard Ethereum transactions. + pub l1_blocks_by_id: HashMap>, + /// Storage for L2 blocks by stringified `BlockId`. + /// L2 blocks use OP Stack transactions. + pub l2_blocks_by_id: HashMap>, + /// Storage for proofs by (address, stringified `BlockId`) key. + pub proofs_by_address: HashMap<(Address, String), EIP1186AccountProofResponse>, +} + +/// Builder for constructing a [`MockEngineClient`] with pre-configured responses. +/// +/// This builder allows you to set up mock responses before creating the client, +/// making it easier to write concise tests. +/// +/// # Example +/// +/// ```rust,ignore +/// use kona_engine::test_utils::{MockEngineClient}; +/// use alloy_rpc_types_engine::{PayloadStatus, PayloadStatusEnum}; +/// use std::sync::Arc; +/// +/// let mock = MockEngineClient::builder() +/// .with_config(Arc::new(RollupConfig::default())) +/// .with_payload_status(PayloadStatus { +/// status: PayloadStatusEnum::Valid, +/// latest_valid_hash: Some(B256::ZERO), +/// }) +/// .build(); +/// ``` +#[derive(Debug)] +pub struct MockEngineClientBuilder { + cfg: Option>, + storage: MockEngineStorage, +} + +impl MockEngineClientBuilder { + /// Creates a new builder with default values. + pub fn new() -> Self { + Self { cfg: None, storage: MockEngineStorage::default() } + } + + /// Sets the rollup configuration. + pub fn with_config(mut self, cfg: Arc) -> Self { + self.cfg = Some(cfg); + self + } + + /// Sets a block response for a specific tag. + pub fn with_l2_block_by_label( + mut self, + tag: BlockNumberOrTag, + block: Block, + ) -> Self { + self.storage.l2_blocks_by_label.insert(tag, block); + self + } + + /// Sets a block info response for a specific tag. + pub fn with_block_info_by_tag(mut self, tag: BlockNumberOrTag, info: L2BlockInfo) -> Self { + self.storage.block_info_by_tag.insert(tag, info); + self + } + + /// Sets the `new_payload_v1` response. + pub fn with_new_payload_v1_response(mut self, status: PayloadStatus) -> Self { + self.storage.new_payload_v1_response = Some(status); + self + } + + /// Sets the `new_payload_v2` response. + pub fn with_new_payload_v2_response(mut self, status: PayloadStatus) -> Self { + self.storage.new_payload_v2_response = Some(status); + self + } + + /// Sets the `new_payload_v3` response. + pub fn with_new_payload_v3_response(mut self, status: PayloadStatus) -> Self { + self.storage.new_payload_v3_response = Some(status); + self + } + + /// Sets the `new_payload_v4` response. + pub fn with_new_payload_v4_response(mut self, status: PayloadStatus) -> Self { + self.storage.new_payload_v4_response = Some(status); + self + } + + /// Sets the `fork_choice_updated_v2` response. + pub fn with_fork_choice_updated_v2_response(mut self, response: ForkchoiceUpdated) -> Self { + self.storage.fork_choice_updated_v2_response = Some(response); + self + } + + /// Sets the `fork_choice_updated_v3` response. + pub fn with_fork_choice_updated_v3_response(mut self, response: ForkchoiceUpdated) -> Self { + self.storage.fork_choice_updated_v3_response = Some(response); + self + } + + /// Sets the execution payload v2 response. + pub fn with_execution_payload_v2(mut self, payload: ExecutionPayloadEnvelopeV2) -> Self { + self.storage.execution_payload_v2 = Some(payload); + self + } + + /// Sets the execution payload v3 response. + pub fn with_execution_payload_v3(mut self, payload: OpExecutionPayloadEnvelopeV3) -> Self { + self.storage.execution_payload_v3 = Some(payload); + self + } + + /// Sets the execution payload v4 response. + pub fn with_execution_payload_v4(mut self, payload: OpExecutionPayloadEnvelopeV4) -> Self { + self.storage.execution_payload_v4 = Some(payload); + self + } + + /// Sets the `get_payload_bodies_by_hash_v1` response. + pub fn with_payload_bodies_by_hash_response( + mut self, + bodies: ExecutionPayloadBodiesV1, + ) -> Self { + self.storage.get_payload_bodies_by_hash_v1_response = Some(bodies); + self + } + + /// Sets the `get_payload_bodies_by_range_v1` response. + pub fn with_payload_bodies_by_range_response( + mut self, + bodies: ExecutionPayloadBodiesV1, + ) -> Self { + self.storage.get_payload_bodies_by_range_v1_response = Some(bodies); + self + } + + /// Sets the client versions response. + pub fn with_client_versions(mut self, versions: Vec) -> Self { + self.storage.client_versions = Some(versions); + self + } + + /// Sets the protocol version response. + pub const fn with_protocol_version(mut self, version: ProtocolVersion) -> Self { + self.storage.protocol_version = Some(version); + self + } + + /// Sets the capabilities response. + pub fn with_capabilities(mut self, capabilities: Vec) -> Self { + self.storage.capabilities = Some(capabilities); + self + } + + /// Sets an L1 block response for a specific `BlockId`. + pub fn with_l1_block(mut self, block_id: BlockId, block: Block) -> Self { + let key = block_id_to_key(&block_id); + self.storage.l1_blocks_by_id.insert(key, block); + self + } + + /// Sets an L2 block response for a specific `BlockId`. + pub fn with_l2_block(mut self, block_id: BlockId, block: Block) -> Self { + let key = block_id_to_key(&block_id); + self.storage.l2_blocks_by_id.insert(key, block); + self + } + + /// Sets a proof response for a specific address and `BlockId`. + pub fn with_proof( + mut self, + address: Address, + block_id: BlockId, + proof: EIP1186AccountProofResponse, + ) -> Self { + let key = block_id_to_key(&block_id); + self.storage.proofs_by_address.insert((address, key), proof); + self + } + + /// Builds the [`MockEngineClient`] with the configured values. + /// + /// # Panics + /// + /// Panics if any required fields (cfg) are not set. + pub fn build(self) -> MockEngineClient { + let cfg = self.cfg.expect("cfg must be set"); + + MockEngineClient { cfg, storage: Arc::new(RwLock::new(self.storage)) } + } +} + +impl Default for MockEngineClientBuilder { + fn default() -> Self { + Self::new() + } +} + +/// Mock implementation of the `EngineClient` trait for testing. +/// +/// This mock allows tests to configure expected responses for all `EngineClient` +/// and `OpEngineApi` methods. All responses are stored in a shared [`MockEngineStorage`] +/// protected by an `RwLock` for thread-safe access. +#[derive(Debug, Clone)] +pub struct MockEngineClient { + /// The rollup configuration. + cfg: Arc, + /// Shared storage for mock responses. + storage: Arc>, +} + +impl MockEngineClient { + /// Creates a new mock engine client with the given config. + pub fn new(cfg: Arc) -> Self { + Self { cfg, storage: Arc::new(RwLock::new(MockEngineStorage::default())) } + } + + /// Creates a builder for constructing a mock engine client. + pub fn builder() -> MockEngineClientBuilder { + MockEngineClientBuilder::new() + } + + /// Returns a reference to the mock storage for configuring responses. + pub fn storage(&self) -> Arc> { + Arc::clone(&self.storage) + } + + /// Sets a block response for a specific tag. + pub async fn set_l2_block_by_label(&self, tag: BlockNumberOrTag, block: Block) { + self.storage.write().await.l2_blocks_by_label.insert(tag, block); + } + + /// Sets a block info response for a specific tag. + pub async fn set_block_info_by_tag(&self, tag: BlockNumberOrTag, info: L2BlockInfo) { + self.storage.write().await.block_info_by_tag.insert(tag, info); + } + + /// Sets the `new_payload_v1` response. + pub async fn set_new_payload_v1_response(&self, status: PayloadStatus) { + self.storage.write().await.new_payload_v1_response = Some(status); + } + + /// Sets the `new_payload_v2` response. + pub async fn set_new_payload_v2_response(&self, status: PayloadStatus) { + self.storage.write().await.new_payload_v2_response = Some(status); + } + + /// Sets the `new_payload_v3` response. + pub async fn set_new_payload_v3_response(&self, status: PayloadStatus) { + self.storage.write().await.new_payload_v3_response = Some(status); + } + + /// Sets the `new_payload_v4` response. + pub async fn set_new_payload_v4_response(&self, status: PayloadStatus) { + self.storage.write().await.new_payload_v4_response = Some(status); + } + + /// Sets the `fork_choice_updated_v2` response. + pub async fn set_fork_choice_updated_v2_response(&self, response: ForkchoiceUpdated) { + self.storage.write().await.fork_choice_updated_v2_response = Some(response); + } + + /// Sets the `fork_choice_updated_v3` response. + pub async fn set_fork_choice_updated_v3_response(&self, response: ForkchoiceUpdated) { + self.storage.write().await.fork_choice_updated_v3_response = Some(response); + } + + /// Sets the execution payload v2 response. + pub async fn set_execution_payload_v2(&self, payload: ExecutionPayloadEnvelopeV2) { + self.storage.write().await.execution_payload_v2 = Some(payload); + } + + /// Sets the execution payload v3 response. + pub async fn set_execution_payload_v3(&self, payload: OpExecutionPayloadEnvelopeV3) { + self.storage.write().await.execution_payload_v3 = Some(payload); + } + + /// Sets the execution payload v4 response. + pub async fn set_execution_payload_v4(&self, payload: OpExecutionPayloadEnvelopeV4) { + self.storage.write().await.execution_payload_v4 = Some(payload); + } + + /// Sets the `get_payload_bodies_by_hash_v1` response. + pub async fn set_payload_bodies_by_hash_response(&self, bodies: ExecutionPayloadBodiesV1) { + self.storage.write().await.get_payload_bodies_by_hash_v1_response = Some(bodies); + } + + /// Sets the `get_payload_bodies_by_range_v1` response. + pub async fn set_payload_bodies_by_range_response(&self, bodies: ExecutionPayloadBodiesV1) { + self.storage.write().await.get_payload_bodies_by_range_v1_response = Some(bodies); + } + + /// Sets the client versions response. + pub async fn set_client_versions(&self, versions: Vec) { + self.storage.write().await.client_versions = Some(versions); + } + + /// Sets the protocol version response. + pub async fn set_protocol_version(&self, version: ProtocolVersion) { + self.storage.write().await.protocol_version = Some(version); + } + + /// Sets the capabilities response. + pub async fn set_capabilities(&self, capabilities: Vec) { + self.storage.write().await.capabilities = Some(capabilities); + } + + /// Sets an L1 block response for a specific `BlockId`. + pub async fn set_l1_block(&self, block_id: BlockId, block: Block) { + let key = block_id_to_key(&block_id); + self.storage.write().await.l1_blocks_by_id.insert(key, block); + } + + /// Sets an L2 block response for a specific `BlockId`. + pub async fn set_l2_block(&self, block_id: BlockId, block: Block) { + let key = block_id_to_key(&block_id); + self.storage.write().await.l2_blocks_by_id.insert(key, block); + } + + /// Sets a proof response for a specific address and `BlockId`. + pub async fn set_proof( + &self, + address: Address, + block_id: BlockId, + proof: EIP1186AccountProofResponse, + ) { + let key = block_id_to_key(&block_id); + self.storage.write().await.proofs_by_address.insert((address, key), proof); + } +} + +#[async_trait] +impl EngineClient for MockEngineClient { + fn cfg(&self) -> &RollupConfig { + self.cfg.as_ref() + } + + fn get_l1_block(&self, block: BlockId) -> EthGetBlock<::BlockResponse> { + let storage = Arc::clone(&self.storage); + let block_key = block_id_to_key(&block); + + EthGetBlock::new_provider( + block, + Box::new(move |_kind| { + let storage = Arc::clone(&storage); + let block_key = block_key.clone(); + + ProviderCall::BoxedFuture(Box::pin(async move { + let storage_guard = storage.read().await; + Ok(storage_guard.l1_blocks_by_id.get(&block_key).cloned()) + })) + }), + ) + } + + fn get_l2_block(&self, block: BlockId) -> EthGetBlock<::BlockResponse> { + let storage = Arc::clone(&self.storage); + let block_key = block_id_to_key(&block); + + EthGetBlock::new_provider( + block, + Box::new(move |_kind| { + let storage = Arc::clone(&storage); + let block_key = block_key.clone(); + + ProviderCall::BoxedFuture(Box::pin(async move { + let storage_guard = storage.read().await; + Ok(storage_guard.l2_blocks_by_id.get(&block_key).cloned()) + })) + }), + ) + } + + fn get_proof( + &self, + address: Address, + _keys: Vec, + ) -> RpcWithBlock<(Address, Vec), EIP1186AccountProofResponse> { + let storage = Arc::clone(&self.storage); + + RpcWithBlock::new_provider(move |block_id| { + let storage = Arc::clone(&storage); + let block_key = block_id_to_key(&block_id); + let address = address; + + ProviderCall::BoxedFuture(Box::pin(async move { + let storage_guard = storage.read().await; + storage_guard.proofs_by_address.get(&(address, block_key)).cloned().ok_or_else( + || { + TransportError::from(TransportErrorKind::custom_str( + "No proof configured for this address and block. \ + Use with_proof() or set_proof() to set a response.", + )) + }, + ) + })) + }) + } + + async fn new_payload_v1(&self, _payload: ExecutionPayloadV1) -> TransportResult { + let storage = self.storage.read().await; + storage.new_payload_v1_response.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "new_payload_v1 was called but no v1 response configured. \ + Use with_new_payload_v1_response() or set_new_payload_v1_response() to set a response." + )) + }) + } + + async fn l2_block_by_label( + &self, + numtag: BlockNumberOrTag, + ) -> Result>, EngineClientError> { + let storage = self.storage.read().await; + Ok(storage.l2_blocks_by_label.get(&numtag).cloned()) + } + + async fn l2_block_info_by_label( + &self, + numtag: BlockNumberOrTag, + ) -> Result, EngineClientError> { + let storage = self.storage.read().await; + Ok(storage.block_info_by_tag.get(&numtag).copied()) + } +} + +#[async_trait] +impl OpEngineApi> for MockEngineClient { + async fn new_payload_v2( + &self, + _payload: ExecutionPayloadInputV2, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.new_payload_v2_response.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "new_payload_v2 was called but no v2 response configured. \ + Use with_new_payload_v2_response() or set_new_payload_v2_response() to set a response." + )) + }) + } + + async fn new_payload_v3( + &self, + _payload: ExecutionPayloadV3, + _parent_beacon_block_root: B256, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.new_payload_v3_response.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "new_payload_v3 was called but no v3 response configured. \ + Use with_new_payload_v3_response() or set_new_payload_v3_response() to set a response." + )) + }) + } + + async fn new_payload_v4( + &self, + _payload: OpExecutionPayloadV4, + _parent_beacon_block_root: B256, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.new_payload_v4_response.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "new_payload_v4 was called but no v4 response configured. \ + Use with_new_payload_v4_response() or set_new_payload_v4_response() to set a response." + )) + }) + } + + async fn fork_choice_updated_v2( + &self, + _fork_choice_state: ForkchoiceState, + _payload_attributes: Option, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.fork_choice_updated_v2_response.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "fork_choice_updated_v2 was called but no v2 response configured. \ + Use with_fork_choice_updated_v2_response() or set_fork_choice_updated_v2_response() to set a response." + )) + }) + } + + async fn fork_choice_updated_v3( + &self, + _fork_choice_state: ForkchoiceState, + _payload_attributes: Option, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.fork_choice_updated_v3_response.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "fork_choice_updated_v3 was called but no v3 response configured. \ + Use with_fork_choice_updated_v3_response() or set_fork_choice_updated_v3_response() to set a response." + )) + }) + } + + async fn get_payload_v2( + &self, + _payload_id: PayloadId, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.execution_payload_v2.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "No execution payload v2 set in mock", + )) + }) + } + + async fn get_payload_v3( + &self, + _payload_id: PayloadId, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.execution_payload_v3.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "No execution payload v3 set in mock", + )) + }) + } + + async fn get_payload_v4( + &self, + _payload_id: PayloadId, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.execution_payload_v4.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "No execution payload v4 set in mock", + )) + }) + } + + async fn get_payload_bodies_by_hash_v1( + &self, + _block_hashes: Vec, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.get_payload_bodies_by_hash_v1_response.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "get_payload_bodies_by_hash_v1 was called but no response configured. \ + Use with_payload_bodies_by_hash_response() or set_payload_bodies_by_hash_response() to set a response." + )) + }) + } + + async fn get_payload_bodies_by_range_v1( + &self, + _start: u64, + _count: u64, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.get_payload_bodies_by_range_v1_response.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str( + "get_payload_bodies_by_range_v1 was called but no response configured. \ + Use with_payload_bodies_by_range_response() or set_payload_bodies_by_range_response() to set a response." + )) + }) + } + + async fn get_client_version_v1( + &self, + _client_version: ClientVersionV1, + ) -> TransportResult> { + let storage = self.storage.read().await; + storage.client_versions.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str("No client versions set in mock")) + }) + } + + async fn signal_superchain_v1( + &self, + _recommended: ProtocolVersion, + _required: ProtocolVersion, + ) -> TransportResult { + let storage = self.storage.read().await; + storage.protocol_version.ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str("No protocol version set in mock")) + }) + } + + async fn exchange_capabilities( + &self, + _capabilities: Vec, + ) -> TransportResult> { + let storage = self.storage.read().await; + storage.capabilities.clone().ok_or_else(|| { + TransportError::from(TransportErrorKind::custom_str("No capabilities set in mock")) + }) + } +} + +/// Helper function to convert `BlockId` to a string key for `HashMap` storage. +/// This is necessary because `BlockId` doesn't implement Hash. +fn block_id_to_key(block_id: &BlockId) -> String { + match block_id { + BlockId::Hash(hash) => format!("hash:{}", hash.block_hash), + BlockId::Number(num) => format!("number:{num}"), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_rpc_types_engine::PayloadStatusEnum; + + #[tokio::test] + async fn test_mock_engine_client_creation() { + let cfg = Arc::new(RollupConfig::default()); + + let mock = MockEngineClient::new(cfg.clone()); + + // Verify the config was set correctly + assert_eq!(mock.cfg().block_time, cfg.block_time); + } + + #[tokio::test] + async fn test_mock_payload_status() { + let cfg = Arc::new(RollupConfig::default()); + + let mock = MockEngineClient::new(cfg); + + let status = + PayloadStatus { status: PayloadStatusEnum::Valid, latest_valid_hash: Some(B256::ZERO) }; + + mock.set_new_payload_v2_response(status.clone()).await; + + // Create a minimal ExecutionPayloadInputV2 for testing + use alloy_primitives::{Bytes, U256}; + use alloy_rpc_types_engine::ExecutionPayloadV1; + let payload = ExecutionPayloadInputV2 { + execution_payload: ExecutionPayloadV1 { + parent_hash: B256::ZERO, + fee_recipient: Default::default(), + state_root: B256::ZERO, + receipts_root: B256::ZERO, + logs_bloom: Default::default(), + prev_randao: B256::ZERO, + block_number: 0, + gas_limit: 0, + gas_used: 0, + timestamp: 0, + extra_data: Bytes::new(), + base_fee_per_gas: U256::ZERO, + block_hash: B256::ZERO, + transactions: vec![], + }, + withdrawals: None, + }; + + let result = mock.new_payload_v2(payload).await.unwrap(); + + assert_eq!(result.status, status.status); + } + + #[tokio::test] + async fn test_mock_forkchoice_updated() { + let cfg = Arc::new(RollupConfig::default()); + + let mock = MockEngineClient::new(cfg); + + let fcu = ForkchoiceUpdated { + payload_status: PayloadStatus { + status: PayloadStatusEnum::Valid, + latest_valid_hash: Some(B256::ZERO), + }, + payload_id: None, + }; + + mock.set_fork_choice_updated_v2_response(fcu.clone()).await; + + let result = mock.fork_choice_updated_v2(ForkchoiceState::default(), None).await.unwrap(); + + assert_eq!(result.payload_status.status, fcu.payload_status.status); + } + + #[tokio::test] + async fn test_builder_pattern() { + let cfg = Arc::new(RollupConfig::default()); + let status = + PayloadStatus { status: PayloadStatusEnum::Valid, latest_valid_hash: Some(B256::ZERO) }; + + let mock = MockEngineClient::builder() + .with_config(cfg.clone()) + .with_new_payload_v2_response(status.clone()) + .build(); + + // Verify the config was set + assert_eq!(mock.cfg().block_time, cfg.block_time); + + // Create a minimal ExecutionPayloadInputV2 for testing + use alloy_primitives::{Bytes, U256}; + use alloy_rpc_types_engine::ExecutionPayloadV1; + let payload = ExecutionPayloadInputV2 { + execution_payload: ExecutionPayloadV1 { + parent_hash: B256::ZERO, + fee_recipient: Default::default(), + state_root: B256::ZERO, + receipts_root: B256::ZERO, + logs_bloom: Default::default(), + prev_randao: B256::ZERO, + block_number: 0, + gas_limit: 0, + gas_used: 0, + timestamp: 0, + extra_data: Bytes::new(), + base_fee_per_gas: U256::ZERO, + block_hash: B256::ZERO, + transactions: vec![], + }, + withdrawals: None, + }; + + // Verify the pre-configured response is returned + let result = mock.new_payload_v2(payload).await.unwrap(); + assert_eq!(result.status, status.status); + } +} diff --git a/kona/crates/node/engine/src/test_utils/engine_state.rs b/rust/kona/crates/node/engine/src/test_utils/engine_state.rs similarity index 96% rename from kona/crates/node/engine/src/test_utils/engine_state.rs rename to rust/kona/crates/node/engine/src/test_utils/engine_state.rs index 214c9986721..20abf4fc8ec 100644 --- a/kona/crates/node/engine/src/test_utils/engine_state.rs +++ b/rust/kona/crates/node/engine/src/test_utils/engine_state.rs @@ -3,7 +3,7 @@ use alloy_eips::BlockNumHash; use alloy_primitives::{B256, b256}; use kona_protocol::{BlockInfo, L2BlockInfo}; -/// Builder for creating test EngineState instances with sensible defaults +/// Builder for creating test `EngineState` instances with sensible defaults #[derive(Debug)] pub struct TestEngineStateBuilder { unsafe_head: L2BlockInfo, @@ -71,7 +71,7 @@ impl TestEngineStateBuilder { self } - /// Builds the EngineState + /// Builds the `EngineState` pub fn build(self) -> EngineState { let mut state = EngineState::default(); diff --git a/kona/crates/node/engine/src/test_utils/misc.rs b/rust/kona/crates/node/engine/src/test_utils/misc.rs similarity index 85% rename from kona/crates/node/engine/src/test_utils/misc.rs rename to rust/kona/crates/node/engine/src/test_utils/misc.rs index f3e826f63a0..fd77bf7ccf4 100644 --- a/kona/crates/node/engine/src/test_utils/misc.rs +++ b/rust/kona/crates/node/engine/src/test_utils/misc.rs @@ -2,7 +2,7 @@ use alloy_eips::BlockNumHash; use alloy_primitives::B256; use kona_protocol::{BlockInfo, L2BlockInfo}; -/// Helper to create a test L2BlockInfo at a specific block number +/// Helper to create a test `L2BlockInfo` at a specific block number pub fn test_block_info(number: u64) -> L2BlockInfo { L2BlockInfo { block_info: BlockInfo { diff --git a/kona/crates/node/engine/src/test_utils/mod.rs b/rust/kona/crates/node/engine/src/test_utils/mod.rs similarity index 100% rename from kona/crates/node/engine/src/test_utils/mod.rs rename to rust/kona/crates/node/engine/src/test_utils/mod.rs diff --git a/rust/kona/crates/node/engine/src/test_utils/provider.rs b/rust/kona/crates/node/engine/src/test_utils/provider.rs new file mode 100644 index 00000000000..9cb92e31cd9 --- /dev/null +++ b/rust/kona/crates/node/engine/src/test_utils/provider.rs @@ -0,0 +1,34 @@ +use alloy_network::Ethereum; +use alloy_provider::Provider; +use async_trait::async_trait; +use op_alloy_network::Optimism; + +/// Mock L1 Provider that implements the Provider trait for testing. +/// +/// This is a minimal no-op provider that satisfies the trait bounds required +/// by [`MockEngineClient`](super::MockEngineClient). All provider methods return empty/default +/// values. +#[derive(Debug, Clone)] +pub struct MockL1Provider; + +#[async_trait] +impl Provider for MockL1Provider { + fn root(&self) -> &alloy_provider::RootProvider { + unimplemented!("MockL1Provider does not support root()") + } +} + +/// Mock L2 Provider that implements the Provider trait for Optimism network. +/// +/// This is a minimal no-op provider that satisfies the trait bounds required +/// by [`MockEngineClient`](super::MockEngineClient). All provider methods return empty/default +/// values. +#[derive(Debug, Clone)] +pub struct MockL2Provider; + +#[async_trait] +impl Provider for MockL2Provider { + fn root(&self) -> &alloy_provider::RootProvider { + unimplemented!("MockL2Provider does not support root()") + } +} diff --git a/kona/crates/node/engine/src/versions.rs b/rust/kona/crates/node/engine/src/versions.rs similarity index 100% rename from kona/crates/node/engine/src/versions.rs rename to rust/kona/crates/node/engine/src/versions.rs diff --git a/kona/crates/node/gossip/Cargo.toml b/rust/kona/crates/node/gossip/Cargo.toml similarity index 100% rename from kona/crates/node/gossip/Cargo.toml rename to rust/kona/crates/node/gossip/Cargo.toml diff --git a/kona/crates/node/gossip/src/behaviour.rs b/rust/kona/crates/node/gossip/src/behaviour.rs similarity index 98% rename from kona/crates/node/gossip/src/behaviour.rs rename to rust/kona/crates/node/gossip/src/behaviour.rs index 49869f031eb..bdae9b7ca02 100644 --- a/kona/crates/node/gossip/src/behaviour.rs +++ b/rust/kona/crates/node/gossip/src/behaviour.rs @@ -54,7 +54,7 @@ impl Behaviour { .map_err(|_| BehaviourError::GossipsubCreationFailed)?; let identify = libp2p::identify::Behaviour::new( - libp2p::identify::Config::new("".to_string(), public_key) + libp2p::identify::Config::new(String::new(), public_key) .with_agent_version("kona".to_string()), ); diff --git a/kona/crates/node/gossip/src/block_validity.rs b/rust/kona/crates/node/gossip/src/block_validity.rs similarity index 99% rename from kona/crates/node/gossip/src/block_validity.rs rename to rust/kona/crates/node/gossip/src/block_validity.rs index 49aa85f04c9..c6c78731dd9 100644 --- a/kona/crates/node/gossip/src/block_validity.rs +++ b/rust/kona/crates/node/gossip/src/block_validity.rs @@ -171,12 +171,12 @@ impl BlockHandler { BlockInvalidError::Signer { .. } => "invalid_signer", BlockInvalidError::TooManyBlocks { .. } => "too_many_blocks", BlockInvalidError::BlockSeen { .. } => "block_seen", - BlockInvalidError::InvalidBlock(_) => "invalid_block", + BlockInvalidError::InvalidBlock(_) | + BlockInvalidError::BaseFeePerGasOverflow(_) => "invalid_block", BlockInvalidError::ParentBeaconRoot => "parent_beacon_root", BlockInvalidError::BlobGasUsed => "blob_gas_used", BlockInvalidError::ExcessBlobGas => "excess_blob_gas", BlockInvalidError::WithdrawalsRoot => "withdrawals_root", - BlockInvalidError::BaseFeePerGasOverflow(_) => "invalid_block", }; kona_macros::inc!(counter, Metrics::BLOCK_VALIDATION_FAILED, "reason" => reason); } @@ -324,8 +324,7 @@ impl BlockHandler { } match &envelope.payload { - OpExecutionPayload::V1(_) => Ok(()), - OpExecutionPayload::V2(_) => Ok(()), + OpExecutionPayload::V1(_) | OpExecutionPayload::V2(_) => Ok(()), OpExecutionPayload::V3(payload) => { validate_v3(&self.rollup_config, payload, envelope.parent_beacon_block_root) } @@ -622,7 +621,7 @@ pub(crate) mod tests { }) .collect::>(); - for envelope in next_payloads[..next_payloads.len() - 1].iter() { + for envelope in &next_payloads[..next_payloads.len() - 1] { assert!(handler.block_valid(envelope).is_ok()); } diff --git a/rust/kona/crates/node/gossip/src/builder.rs b/rust/kona/crates/node/gossip/src/builder.rs new file mode 100644 index 00000000000..5a3e0aa1b51 --- /dev/null +++ b/rust/kona/crates/node/gossip/src/builder.rs @@ -0,0 +1,221 @@ +//! A builder for the [`GossipDriver`]. + +use alloy_primitives::Address; +use kona_genesis::RollupConfig; +use kona_peers::{PeerMonitoring, PeerScoreLevel}; +use libp2p::{ + Multiaddr, StreamProtocol, SwarmBuilder, gossipsub::Config, identity::Keypair, + noise::Config as NoiseConfig, tcp::Config as TcpConfig, yamux::Config as YamuxConfig, +}; +use std::time::Duration; +use tokio::sync::watch; + +use crate::{Behaviour, BlockHandler, GaterConfig, GossipDriver, GossipDriverBuilderError}; + +/// A builder for the [`GossipDriver`]. +#[derive(Debug)] +pub struct GossipDriverBuilder { + /// The [`RollupConfig`] for the network. + rollup_config: RollupConfig, + /// The [`Keypair`] for the node. + keypair: Keypair, + /// The [`Multiaddr`] for the gossip driver to listen on. + gossip_addr: Multiaddr, + /// Unsafe block signer [`Address`]. + signer: Address, + /// The idle connection timeout as a [`Duration`]. + timeout: Option, + /// Sets the [`PeerScoreLevel`] for the [`Behaviour`]. + scoring: Option, + /// The [`Config`] for the [`Behaviour`]. + config: Option, + /// If set, the gossip layer will monitor peer scores and ban peers that are below a given + /// threshold. + peer_monitoring: Option, + /// The configuration for the connection gater. + gater_config: Option, + /// Topic scoring. Disabled by default. + topic_scoring: bool, +} + +impl GossipDriverBuilder { + /// Creates a new [`GossipDriverBuilder`]. + pub const fn new( + rollup_config: RollupConfig, + signer: Address, + gossip_addr: Multiaddr, + keypair: Keypair, + ) -> Self { + Self { + timeout: None, + keypair, + gossip_addr, + signer, + scoring: None, + config: None, + peer_monitoring: None, + gater_config: None, + rollup_config, + topic_scoring: false, + } + } + + /// Sets the configuration for the connection gater. + pub const fn with_gater_config(mut self, config: GaterConfig) -> Self { + self.gater_config = Some(config); + self + } + + /// Sets the [`RollupConfig`] for the network. + /// This is used to determine the topic to publish to. + pub fn with_rollup_config(mut self, rollup_config: RollupConfig) -> Self { + self.rollup_config = rollup_config; + self + } + + /// Sets topic scoring. + /// This is disabled by default. + pub const fn with_topic_scoring(mut self, topic_scoring: bool) -> Self { + self.topic_scoring = topic_scoring; + self + } + + /// Sets the [`PeerScoreLevel`] for the [`Behaviour`]. + pub const fn with_peer_scoring(mut self, level: PeerScoreLevel) -> Self { + self.scoring = Some(level); + self + } + + /// Sets the [`PeerMonitoring`] configuration for the gossip driver. + pub const fn with_peer_monitoring(mut self, peer_monitoring: Option) -> Self { + self.peer_monitoring = peer_monitoring; + self + } + + /// Sets the unsafe block signer [`Address`]. + pub const fn with_unsafe_block_signer_receiver(mut self, signer: Address) -> Self { + self.signer = signer; + self + } + + /// Sets the [`Keypair`] for the node. + pub fn with_keypair(mut self, keypair: Keypair) -> Self { + self.keypair = keypair; + self + } + + /// Sets the swarm's idle connection timeout. + pub const fn with_timeout(mut self, timeout: Duration) -> Self { + self.timeout = Some(timeout); + self + } + + /// Sets the [`Multiaddr`] for the gossip driver to listen on. + pub fn with_address(mut self, addr: Multiaddr) -> Self { + self.gossip_addr = addr; + self + } + + /// Sets the [`Config`] for the [`Behaviour`]. + pub fn with_config(mut self, config: Config) -> Self { + self.config = Some(config); + self + } + + /// Builds the [`GossipDriver`]. + pub fn build( + mut self, + ) -> Result< + (GossipDriver, watch::Sender
), + GossipDriverBuilderError, + > { + // Extract builder arguments + let timeout = self.timeout.take().unwrap_or(Duration::from_secs(60)); + let keypair = self.keypair; + let addr = self.gossip_addr; + let signer_recv = self.signer; + let rollup_config = self.rollup_config; + let l2_chain_id = rollup_config.l2_chain_id; + let block_time = rollup_config.block_time; + + let (signer_tx, signer_rx) = watch::channel(signer_recv); + + // Block Handler setup + let handler = BlockHandler::new(rollup_config, signer_rx); + + // Construct the gossip behaviour + let config = self.config.unwrap_or_else(crate::default_config); + info!( + target: "gossip", + "CONFIG: [Mesh D: {}] [Mesh L: {}] [Mesh H: {}] [Gossip Lazy: {}] [Flood Publish: {}]", + config.mesh_n(), + config.mesh_n_low(), + config.mesh_n_high(), + config.gossip_lazy(), + config.flood_publish() + ); + info!( + target: "gossip", + "CONFIG: [Heartbeat: {}] [Floodsub: {}] [Validation: {:?}] [Max Transmit: {} bytes]", + config.heartbeat_interval().as_secs(), + config.support_floodsub(), + config.validation_mode(), + config.max_transmit_size() + ); + let mut behaviour = Behaviour::new(keypair.public(), config, &[Box::new(handler.clone())])?; + + // If peer scoring is configured, set it on the behaviour. + match self.scoring { + None => info!(target: "scoring", "Peer scoring not enabled"), + Some(PeerScoreLevel::Off) => { + info!(target: "scoring", level = ?PeerScoreLevel::Off, "Peer scoring explicitly disabled") + } + Some(level) => { + use crate::handler::Handler; + let params = level + .to_params(handler.topics(), self.topic_scoring, block_time) + .unwrap_or_default(); + match behaviour.gossipsub.with_peer_score(params, PeerScoreLevel::thresholds()) { + Ok(_) => debug!(target: "scoring", "Peer scoring enabled successfully"), + Err(e) => warn!(target: "scoring", "Peer scoring failed: {}", e), + } + } + } + + // Let's setup the sync request/response protocol stream. + let mut sync_handler = behaviour.sync_req_resp.new_control(); + + let protocol = format!("/opstack/req/payload_by_number/{l2_chain_id}/0/"); + let sync_protocol_name = StreamProtocol::try_from_owned(protocol) + .map_err(|_| GossipDriverBuilderError::SetupSyncReqRespError)?; + let sync_protocol = sync_handler + .accept(sync_protocol_name) + .map_err(|_| GossipDriverBuilderError::SyncReqRespAlreadyAccepted)?; + + // Build the swarm with DNS+TCP transport. + // Note: with_dns() must be called after with_tcp() to wrap TCP with DNS resolution. + debug!(target: "gossip", "Building Swarm with Peer ID: {}", keypair.public().to_peer_id()); + let swarm = SwarmBuilder::with_existing_identity(keypair) + .with_tokio() + .with_tcp( + TcpConfig::default().nodelay(true), + |i: &Keypair| { + debug!(target: "gossip", "Noise Config Peer ID: {}", i.public().to_peer_id()); + NoiseConfig::new(i) + }, + YamuxConfig::default, + ) + .map_err(|_| GossipDriverBuilderError::TcpError)? + .with_dns() + .map_err(|_| GossipDriverBuilderError::TcpError)? + .with_behaviour(|_| behaviour) + .map_err(|_| GossipDriverBuilderError::WithBehaviourError)? + .with_swarm_config(|c| c.with_idle_connection_timeout(timeout)) + .build(); + + let gater_config = self.gater_config.take().unwrap_or_default(); + let gate = crate::ConnectionGater::new(gater_config); + + Ok((GossipDriver::new(swarm, addr, handler, sync_handler, sync_protocol, gate), signer_tx)) + } +} diff --git a/rust/kona/crates/node/gossip/src/config.rs b/rust/kona/crates/node/gossip/src/config.rs new file mode 100644 index 00000000000..3b33f20486c --- /dev/null +++ b/rust/kona/crates/node/gossip/src/config.rs @@ -0,0 +1,164 @@ +//! Gossipsub Config + +use lazy_static::lazy_static; +use libp2p::gossipsub::{Config, ConfigBuilder, Message, MessageId}; +use openssl::sha::sha256; +use snap::raw::Decoder; +use std::time::Duration; + +//////////////////////////////////////////////////////////////////////////////////////////////// +// GossipSub Constants +//////////////////////////////////////////////////////////////////////////////////////////////// + +/// The maximum gossip size. +/// Limits the total size of gossip RPC containers as well as decompressed individual messages. +pub const MAX_GOSSIP_SIZE: usize = 10 * (1 << 20); + +/// The minimum gossip size. +/// Used to make sure that there is at least some data to validate the signature against. +pub const MIN_GOSSIP_SIZE: usize = 66; + +/// The maximum outbound queue. +pub const MAX_OUTBOUND_QUEUE: usize = 256; + +/// The maximum validate queue. +pub const MAX_VALIDATE_QUEUE: usize = 256; + +/// The global validate throttle. +pub const GLOBAL_VALIDATE_THROTTLE: usize = 512; + +/// The default mesh D. +pub const DEFAULT_MESH_D: usize = 8; + +/// The default mesh D low. +pub const DEFAULT_MESH_DLO: usize = 6; + +/// The default mesh D high. +pub const DEFAULT_MESH_DHI: usize = 12; + +/// The default mesh D lazy. +pub const DEFAULT_MESH_DLAZY: usize = 6; + +//////////////////////////////////////////////////////////////////////////////////////////////// +// Duration Constants +//////////////////////////////////////////////////////////////////////////////////////////////// + +lazy_static! { + /// The gossip heartbeat. + pub static ref GOSSIP_HEARTBEAT: Duration = Duration::from_millis(500); + + /// The seen messages TTL. + /// Limits the duration that message IDs are remembered for gossip deduplication purposes. + pub static ref SEEN_MESSAGES_TTL: Duration = 130 * *GOSSIP_HEARTBEAT; + + /// The peer score inspect frequency. + /// The frequency at which peer scores are inspected. + pub static ref PEER_SCORE_INSPECT_FREQUENCY: Duration = 15 * Duration::from_secs(1); +} + +//////////////////////////////////////////////////////////////////////////////////////////////// +// Config Building +//////////////////////////////////////////////////////////////////////////////////////////////// + +/// Builds the default gossipsub configuration. +/// +/// Notable defaults: +/// - `flood_publish`: false (call `.flood_publish(true)` on the [`ConfigBuilder`] to enable) +/// - `backoff_slack`: 1 +/// - heart beat interval: 1 second +/// - peer exchange is disabled +/// - maximum byte size for gossip messages: 2048 bytes +/// +/// # Returns +/// +/// A [`ConfigBuilder`] with the default gossipsub configuration already set. +/// Call `.build()` on the returned builder to get the final [`libp2p::gossipsub::Config`]. +pub fn default_config_builder() -> ConfigBuilder { + let mut builder = ConfigBuilder::default(); + builder + .mesh_n(DEFAULT_MESH_D) + .mesh_n_low(DEFAULT_MESH_DLO) + .mesh_n_high(DEFAULT_MESH_DHI) + .gossip_lazy(DEFAULT_MESH_DLAZY) + .heartbeat_interval(*GOSSIP_HEARTBEAT) + .fanout_ttl(Duration::from_secs(60)) + .history_length(12) + .history_gossip(3) + .flood_publish(false) + .support_floodsub() + .max_transmit_size(MAX_GOSSIP_SIZE) + .duplicate_cache_time(Duration::from_secs(120)) + .validation_mode(libp2p::gossipsub::ValidationMode::None) + .validate_messages() + .message_id_fn(compute_message_id); + + builder +} + +/// Returns the default [Config] for gossipsub. +pub fn default_config() -> Config { + default_config_builder().build().expect("default gossipsub config must be valid") +} + +/// Computes the [`MessageId`] of a `gossipsub` message. +fn compute_message_id(msg: &Message) -> MessageId { + let mut decoder = Decoder::new(); + let id = decoder.decompress_vec(&msg.data).map_or_else( + |_| { + warn!(target: "cfg", "Failed to decompress message, using invalid snappy"); + let domain_invalid_snappy: Vec = vec![0x0, 0x0, 0x0, 0x0]; + sha256([domain_invalid_snappy.as_slice(), msg.data.as_slice()].concat().as_slice()) + [..20] + .to_vec() + }, + |data| { + let domain_valid_snappy: Vec = vec![0x1, 0x0, 0x0, 0x0]; + sha256([domain_valid_snappy.as_slice(), data.as_slice()].concat().as_slice())[..20] + .to_vec() + }, + ); + + MessageId(id) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_constructs_default_config() { + let cfg = default_config(); + assert_eq!(cfg.mesh_n(), DEFAULT_MESH_D); + assert_eq!(cfg.mesh_n_low(), DEFAULT_MESH_DLO); + assert_eq!(cfg.mesh_n_high(), DEFAULT_MESH_DHI); + } + + #[test] + fn test_compute_message_id_invalid_snappy() { + let msg = Message { + source: None, + data: vec![1, 2, 3, 4, 5], + sequence_number: None, + topic: libp2p::gossipsub::TopicHash::from_raw("test"), + }; + + let id = compute_message_id(&msg); + let hashed = sha256(&[&[0x0, 0x0, 0x0, 0x0], [1, 2, 3, 4, 5].as_slice()].concat()); + assert_eq!(id.0, hashed[..20].to_vec()); + } + + #[test] + fn test_compute_message_id_valid_snappy() { + let compressed = snap::raw::Encoder::new().compress_vec(&[1, 2, 3, 4, 5]).unwrap(); + let msg = Message { + source: None, + data: compressed, + sequence_number: None, + topic: libp2p::gossipsub::TopicHash::from_raw("test"), + }; + + let id = compute_message_id(&msg); + let hashed = sha256(&[&[0x1, 0x0, 0x0, 0x0], [1, 2, 3, 4, 5].as_slice()].concat()); + assert_eq!(id.0, hashed[..20].to_vec()); + } +} diff --git a/rust/kona/crates/node/gossip/src/driver.rs b/rust/kona/crates/node/gossip/src/driver.rs new file mode 100644 index 00000000000..6079a032bfb --- /dev/null +++ b/rust/kona/crates/node/gossip/src/driver.rs @@ -0,0 +1,492 @@ +//! Consensus-layer gossipsub driver for Optimism. + +use alloy_primitives::{Address, hex}; +use derive_more::Debug; +use discv5::Enr; +use futures::{AsyncReadExt, AsyncWriteExt, stream::StreamExt}; +use kona_genesis::RollupConfig; +use kona_peers::{EnrValidation, PeerMonitoring, enr_to_multiaddr}; +use libp2p::{ + Multiaddr, PeerId, Swarm, TransportError, + gossipsub::{IdentTopic, MessageId}, + swarm::SwarmEvent, +}; +use libp2p_identity::Keypair; +use libp2p_stream::IncomingStreams; +use op_alloy_rpc_types_engine::OpNetworkPayloadEnvelope; +use std::{ + collections::HashMap, + sync::Arc, + time::{Duration, Instant}, +}; +use tokio::sync::Mutex; + +use crate::{ + Behaviour, BlockHandler, ConnectionGate, ConnectionGater, Event, GossipDriverBuilder, Handler, + PublishError, +}; + +/// A driver for a [`Swarm`] instance. +/// +/// Connects the swarm to the given [`Multiaddr`] +/// and handles events using the [`BlockHandler`]. +#[derive(Debug)] +pub struct GossipDriver { + /// The [`Swarm`] instance. + #[debug(skip)] + pub swarm: Swarm, + /// A [`Multiaddr`] to listen on. + pub addr: Multiaddr, + /// The [`BlockHandler`]. + pub handler: BlockHandler, + /// A [`libp2p_stream::Control`] instance. Can be used to control the sync request/response + #[debug(skip)] + pub sync_handler: libp2p_stream::Control, + /// The inbound streams for the sync request/response protocol. + /// + /// This is an option to allow to take the underlying value when the gossip driver gets + /// activated. + /// + /// TODO(op-rs/kona#2141): remove the sync-req-resp protocol once the `op-node` + /// phases it out. + #[debug(skip)] + pub sync_protocol: Option, + /// A mapping from [`PeerId`] to [`Multiaddr`]. + pub peerstore: HashMap, + /// If set, the gossip layer will monitor peer scores and ban peers that are below a given + /// threshold. + pub peer_monitoring: Option, + /// Tracks connection start time for peers + pub peer_connection_start: HashMap, + /// The connection gate. + pub connection_gate: G, + /// Tracks ping times for peers. + pub ping: Arc>>, +} + +impl GossipDriver +where + G: ConnectionGate, +{ + /// Returns the [`GossipDriverBuilder`] that can be used to construct the [`GossipDriver`]. + pub const fn builder( + rollup_config: RollupConfig, + signer: Address, + gossip_addr: Multiaddr, + keypair: Keypair, + ) -> GossipDriverBuilder { + GossipDriverBuilder::new(rollup_config, signer, gossip_addr, keypair) + } + + /// Creates a new [`GossipDriver`] instance. + pub fn new( + swarm: Swarm, + addr: Multiaddr, + handler: BlockHandler, + sync_handler: libp2p_stream::Control, + sync_protocol: IncomingStreams, + gate: G, + ) -> Self { + Self { + swarm, + addr, + handler, + peerstore: Default::default(), + peer_monitoring: None, + peer_connection_start: Default::default(), + sync_handler, + sync_protocol: Some(sync_protocol), + connection_gate: gate, + ping: Arc::new(Mutex::new(Default::default())), + } + } + + /// Publishes an unsafe block to gossip. + /// + /// ## Arguments + /// + /// * `topic_selector` - A function that selects the topic for the block. This is expected to be + /// a closure that takes the [`BlockHandler`] and returns the [`IdentTopic`] for the block. + /// * `payload` - The payload to be published. + /// + /// ## Returns + /// + /// Returns the [`MessageId`] of the published message or a [`PublishError`] + /// if the message could not be published. + pub fn publish( + &mut self, + selector: impl FnOnce(&BlockHandler) -> IdentTopic, + payload: Option, + ) -> Result, PublishError> { + let Some(payload) = payload else { + return Ok(None); + }; + let topic = selector(&self.handler); + let topic_hash = topic.hash(); + let data = self.handler.encode(topic, payload)?; + let id = self.swarm.behaviour_mut().gossipsub.publish(topic_hash, data)?; + kona_macros::inc!(gauge, crate::Metrics::UNSAFE_BLOCK_PUBLISHED); + Ok(Some(id)) + } + + /// Handles the sync request/response protocol. + /// + /// This is a mock handler that supports the `payload_by_number` protocol. + /// It always returns: not found (1), version (0). `` + /// + /// ## Note + /// + /// This is used to ensure op-nodes are not penalizing kona-nodes for not supporting it. + /// This feature is being deprecated by the op-node team. Once it is fully removed from the + /// op-node's implementation we will remove this handler. + pub(super) fn sync_protocol_handler(&mut self) { + let Some(mut sync_protocol) = self.sync_protocol.take() else { + return; + }; + + // Spawn a new task to handle the sync request/response protocol. + tokio::spawn(async move { + loop { + let Some((peer_id, mut inbound_stream)) = sync_protocol.next().await else { + warn!(target: "gossip", "The sync protocol stream has ended"); + return; + }; + + info!(target: "gossip", "Received a sync request from {peer_id}, spawning a new task to handle it"); + + tokio::spawn(async move { + let mut buffer = Vec::new(); + let Ok(bytes_received) = inbound_stream.read_to_end(&mut buffer).await else { + error!(target: "gossip", "Failed to read the sync request from {peer_id}"); + return; + }; + + debug!(target: "gossip", bytes_received = bytes_received, peer_id = ?peer_id, payload = ?buffer, "Received inbound sync request"); + + // We return: not found (1), version (0). `` + // Response format: = + // No payload is returned. + const OUTPUT: [u8; 2] = hex!("0100"); + + // We only write that we're not supporting the sync request. + if let Err(e) = inbound_stream.write_all(&OUTPUT).await { + error!(target: "gossip", err = ?e, "Failed to write the sync response to {peer_id}"); + return; + }; + + debug!(target: "gossip", bytes_sent = OUTPUT.len(), peer_id = ?peer_id, "Sent outbound sync response"); + }); + } + }); + } + + /// Starts the libp2p Swarm. + /// + /// - Starts the sync request/response protocol handler. + /// - Tells the swarm to listen on the given [`Multiaddr`]. + /// + /// Waits for the swarm to start listen before returning and connecting to peers. + pub async fn start(&mut self) -> Result> { + // Start the sync request/response protocol handler. + self.sync_protocol_handler(); + + match self.swarm.listen_on(self.addr.clone()) { + Ok(id) => loop { + if let SwarmEvent::NewListenAddr { address, listener_id } = + self.swarm.select_next_some().await && + id == listener_id + { + info!(target: "gossip", "Swarm now listening on: {address}"); + + self.addr = address.clone(); + + return Ok(address); + } + }, + Err(err) => { + error!(target: "gossip", "Fail to listen on {}: {err}", self.addr); + Err(err) + } + } + } + + /// Returns the local peer id. + pub fn local_peer_id(&self) -> &libp2p::PeerId { + self.swarm.local_peer_id() + } + + /// Returns a mutable reference to the Swarm's behaviour. + pub fn behaviour_mut(&mut self) -> &mut Behaviour { + self.swarm.behaviour_mut() + } + + /// Attempts to select the next event from the Swarm. + pub async fn next(&mut self) -> Option> { + self.swarm.next().await + } + + /// Returns the number of connected peers. + pub fn connected_peers(&self) -> usize { + self.swarm.connected_peers().count() + } + + /// Dials the given [`Enr`]. + pub fn dial(&mut self, enr: Enr) { + let validation = EnrValidation::validate(&enr, self.handler.rollup_config.l2_chain_id.id()); + if validation.is_invalid() { + trace!(target: "gossip", "Invalid OP Stack ENR for chain id {}: {}", self.handler.rollup_config.l2_chain_id.id(), validation); + return; + } + let Some(multiaddr) = enr_to_multiaddr(&enr) else { + debug!(target: "gossip", "Failed to extract tcp socket from enr: {:?}", enr); + kona_macros::inc!(gauge, crate::Metrics::DIAL_PEER_ERROR, "type" => "invalid_enr"); + return; + }; + self.dial_multiaddr(multiaddr); + } + + /// Dials the given [`Multiaddr`]. + pub fn dial_multiaddr(&mut self, addr: Multiaddr) { + // Check if we're allowed to dial the address. + if let Err(dial_error) = self.connection_gate.can_dial(&addr) { + debug!(target: "gossip", ?dial_error, "unable to dial peer"); + return; + } + + // Extract the peer ID from the address. + let Some(peer_id) = ConnectionGater::peer_id_from_addr(&addr) else { + warn!(target: "gossip", peer=?addr, "Failed to extract PeerId from Multiaddr"); + return; + }; + + if self.swarm.connected_peers().any(|p| p == &peer_id) { + debug!(target: "gossip", peer=?addr, "Already connected to peer, not dialing"); + kona_macros::inc!(gauge, crate::Metrics::DIAL_PEER_ERROR, "type" => "already_connected", "peer" => peer_id.to_string()); + return; + } + + // Let the gate know we are dialing the address. + // Note: libp2p-dns will automatically resolve DNS multiaddrs at the transport layer. + self.connection_gate.dialing(&addr); + + // Dial + match self.swarm.dial(addr.clone()) { + Ok(_) => { + trace!(target: "gossip", peer=?addr, "Dialed peer"); + self.connection_gate.dialed(&addr); + kona_macros::inc!(gauge, crate::Metrics::DIAL_PEER, "peer" => peer_id.to_string()); + } + Err(e) => { + error!(target: "gossip", "Failed to connect to peer: {:?}", e); + self.connection_gate.remove_dial(&peer_id); + kona_macros::inc!(gauge, crate::Metrics::DIAL_PEER_ERROR, "type" => "connection_error", "error" => e.to_string(), "peer" => peer_id.to_string()); + } + } + } + + fn handle_gossip_event(&mut self, event: Event) -> Option { + match event { + Event::Gossipsub(e) => return self.handle_gossipsub_event(*e), + Event::Ping(libp2p::ping::Event { peer, result, .. }) => { + trace!(target: "gossip", ?peer, ?result, "Ping received"); + + // If the peer is connected to gossip, record the connection duration. + if let Some(start_time) = self.peer_connection_start.get(&peer) { + let _ping_duration = start_time.elapsed(); + kona_macros::record!( + histogram, + crate::Metrics::GOSSIP_PEER_CONNECTION_DURATION_SECONDS, + _ping_duration.as_secs_f64() + ); + } + + // Record the peer score in the metrics if available. + if let Some(_peer_score) = self.behaviour_mut().gossipsub.peer_score(&peer) { + kona_macros::record!( + histogram, + crate::Metrics::PEER_SCORES, + "peer", + peer.to_string(), + _peer_score + ); + } + + let pings = Arc::clone(&self.ping); + tokio::spawn(async move { + if let Ok(time) = result { + pings.lock().await.insert(peer, time); + } + }); + } + Event::Identify(e) => self.handle_identify_event(*e), + // Don't do anything with stream events as this should be unreachable code. + Event::Stream => { + error!(target: "gossip", "Stream events should not be emitted!"); + } + }; + + None + } + + fn handle_identify_event(&mut self, event: libp2p::identify::Event) { + match event { + libp2p::identify::Event::Received { connection_id, peer_id, info } => { + debug!(target: "gossip", ?connection_id, ?peer_id, ?info, "Received identify info from peer"); + self.peerstore.insert(peer_id, info); + } + libp2p::identify::Event::Sent { connection_id, peer_id } => { + debug!(target: "gossip", ?connection_id, ?peer_id, "Sent identify info to peer"); + } + libp2p::identify::Event::Pushed { connection_id, peer_id, info } => { + debug!(target: "gossip", ?connection_id, ?peer_id, ?info, "Pushed identify info to peer"); + } + libp2p::identify::Event::Error { connection_id, peer_id, error } => { + error!(target: "gossip", ?connection_id, ?peer_id, ?error, "Error raised while attempting to identify remote"); + } + } + } + + /// Handles a [`libp2p::gossipsub::Event`]. + fn handle_gossipsub_event( + &mut self, + event: libp2p::gossipsub::Event, + ) -> Option { + match event { + libp2p::gossipsub::Event::Message { + propagation_source: src, + message_id: id, + message, + } => { + trace!(target: "gossip", "Received message with topic: {}", message.topic); + kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "message", "topic" => message.topic.to_string()); + if self.handler.topics().contains(&message.topic) { + let (status, payload) = self.handler.handle(message); + _ = self + .swarm + .behaviour_mut() + .gossipsub + .report_message_validation_result(&id, &src, status); + return payload; + } + } + libp2p::gossipsub::Event::Subscribed { peer_id, topic } => { + trace!(target: "gossip", "Peer: {:?} subscribed to topic: {:?}", peer_id, topic); + kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "subscribed", "topic" => topic.to_string()); + } + libp2p::gossipsub::Event::Unsubscribed { peer_id, topic } => { + trace!(target: "gossip", "Peer: {:?} unsubscribed from topic: {:?}", peer_id, topic); + kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "unsubscribed", "topic" => topic.to_string()); + } + libp2p::gossipsub::Event::SlowPeer { peer_id, .. } => { + trace!(target: "gossip", "Slow peer: {:?}", peer_id); + kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "slow_peer", "peer" => peer_id.to_string()); + } + libp2p::gossipsub::Event::GossipsubNotSupported { peer_id } => { + trace!(target: "gossip", "Peer: {:?} does not support gossipsub", peer_id); + kona_macros::inc!(gauge, crate::Metrics::GOSSIP_EVENT, "type" => "not_supported", "peer" => peer_id.to_string()); + } + } + None + } + + /// Handles the [`SwarmEvent`]. + pub fn handle_event(&mut self, event: SwarmEvent) -> Option { + match event { + SwarmEvent::Behaviour(behavior_event) => { + return self.handle_gossip_event(behavior_event); + } + SwarmEvent::ConnectionEstablished { peer_id, .. } => { + let peer_count = self.swarm.connected_peers().count(); + info!(target: "gossip", "Connection established: {:?} | Peer Count: {}", peer_id, peer_count); + kona_macros::inc!( + gauge, + crate::Metrics::GOSSIPSUB_CONNECTION, + "type" => "connected", + "peer" => peer_id.to_string(), + ); + kona_macros::set!(gauge, crate::Metrics::GOSSIP_PEER_COUNT, peer_count as f64); + + self.peer_connection_start.insert(peer_id, Instant::now()); + } + SwarmEvent::OutgoingConnectionError { peer_id: _peer_id, error, .. } => { + debug!(target: "gossip", "Outgoing connection error: {:?}", error); + // Remove the peer from current_dials so it can be dialed again + if let Some(peer_id) = _peer_id { + self.connection_gate.remove_dial(&peer_id); + } + kona_macros::inc!( + gauge, + crate::Metrics::GOSSIPSUB_CONNECTION, + "type" => "outgoing_error", + "peer" => _peer_id.map(|p| p.to_string()).unwrap_or_default() + ); + } + SwarmEvent::IncomingConnectionError { + error, connection_id: _connection_id, .. + } => { + debug!(target: "gossip", "Incoming connection error: {:?}", error); + kona_macros::inc!( + gauge, + crate::Metrics::GOSSIPSUB_CONNECTION, + "type" => "incoming_error", + "connection_id" => _connection_id.to_string() + ); + } + SwarmEvent::ConnectionClosed { peer_id, cause, .. } => { + let peer_count = self.swarm.connected_peers().count(); + warn!(target: "gossip", ?peer_id, ?cause, peer_count, "Connection closed"); + kona_macros::inc!( + gauge, + crate::Metrics::GOSSIPSUB_CONNECTION, + "type" => "closed", + "peer" => peer_id.to_string() + ); + kona_macros::set!(gauge, crate::Metrics::GOSSIP_PEER_COUNT, peer_count as f64); + + // Record the total connection duration. + if let Some(start_time) = self.peer_connection_start.remove(&peer_id) { + let _peer_duration = start_time.elapsed(); + kona_macros::record!( + histogram, + crate::Metrics::GOSSIP_PEER_CONNECTION_DURATION_SECONDS, + _peer_duration.as_secs_f64() + ); + } + + // Record the peer score in the metrics if available. + if let Some(_peer_score) = self.behaviour_mut().gossipsub.peer_score(&peer_id) { + kona_macros::record!( + histogram, + crate::Metrics::PEER_SCORES, + "peer", + peer_id.to_string(), + _peer_score + ); + } + + let pings = Arc::clone(&self.ping); + tokio::spawn(async move { + pings.lock().await.remove(&peer_id); + }); + + // If the connection was initiated by us, remove the peer from the current dials + // set so that we can dial it again. + self.connection_gate.remove_dial(&peer_id); + } + SwarmEvent::NewListenAddr { listener_id, address } => { + debug!(target: "gossip", reporter_id = ?listener_id, new_address = ?address, "New listen address"); + } + SwarmEvent::Dialing { peer_id, connection_id } => { + debug!(target: "gossip", ?peer_id, ?connection_id, "Dialing peer"); + } + SwarmEvent::NewExternalAddrOfPeer { peer_id, address } => { + debug!(target: "gossip", ?peer_id, ?address, "New external address of peer"); + } + _ => { + debug!(target: "gossip", ?event, "Ignoring non-behaviour in event handler"); + } + }; + + None + } +} diff --git a/rust/kona/crates/node/gossip/src/error.rs b/rust/kona/crates/node/gossip/src/error.rs new file mode 100644 index 00000000000..99cd64e6df8 --- /dev/null +++ b/rust/kona/crates/node/gossip/src/error.rs @@ -0,0 +1,117 @@ +//! Error types for the gossip networking module. + +use crate::BehaviourError; +use derive_more::From; +use libp2p::{Multiaddr, PeerId}; +use std::net::IpAddr; +use thiserror::Error; + +/// Error encountered when publishing a payload to the gossip network. +/// +/// Represents failures in the payload publishing pipeline, including +/// network-level publishing errors and payload encoding issues. +#[derive(Debug, Error)] +pub enum PublishError { + /// Failed to publish the payload via `GossipSub` protocol. + /// + /// This can occur due to network connectivity issues, mesh topology + /// problems, or protocol-level errors in the libp2p stack. + #[error("Failed to publish payload: {0}")] + PublishError(#[from] libp2p::gossipsub::PublishError), + + /// Failed to encode the payload before publishing. + /// + /// Indicates an issue with serializing the payload data structure + /// into the binary format expected by the network protocol. + #[error("Failed to encode payload: {0}")] + EncodeError(#[from] HandlerEncodeError), +} + +/// Error encountered when encoding payloads in the block handler. +/// +/// Represents failures in the payload serialization process, typically +/// occurring when converting OP Stack data structures to network format. +#[derive(Debug, Error)] +pub enum HandlerEncodeError { + /// Failed to encode the OP Stack payload envelope. + /// + /// This error indicates issues with serializing the OP Stack network payload + /// structure, which contains the consensus data being gossiped. + #[error("Failed to encode payload: {0}")] + PayloadEncodeError(#[from] op_alloy_rpc_types_engine::PayloadEnvelopeEncodeError), + + /// Attempted to publish to an unknown or unsubscribed topic. + /// + /// This error occurs when trying to publish to a `GossipSub` topic that + /// is not recognized or that the node is not subscribed to. + #[error("Unknown topic: {0}")] + UnknownTopic(libp2p::gossipsub::TopicHash), +} + +/// An error type for the [`crate::GossipDriverBuilder`]. +#[derive(Debug, Clone, PartialEq, Eq, From, Error)] +pub enum GossipDriverBuilderError { + /// A TCP error. + #[error("TCP error")] + TcpError, + /// An error when setting the behaviour on the swarm builder. + #[error("error setting behaviour on swarm builder")] + WithBehaviourError, + /// An error when building the gossip behaviour. + #[error("error building gossip behaviour")] + BehaviourError(BehaviourError), + /// An error when setting up the sync request/response protocol. + #[error("error setting up sync request/response protocol")] + SetupSyncReqRespError, + /// The sync request/response protocol has already been accepted. + #[error("sync request/response protocol already accepted")] + SyncReqRespAlreadyAccepted, +} + +/// An error type representing reasons why a peer cannot be dialed. +#[derive(Debug, Clone, Error)] +pub enum DialError { + /// Failed to extract `PeerId` from Multiaddr. + #[error("Failed to extract PeerId from Multiaddr: {addr}")] + InvalidMultiaddr { + /// The multiaddress that failed to be parsed or does not contain a valid `PeerId` + /// component + addr: Multiaddr, + }, + /// Already dialing this peer. + #[error("Already dialing peer: {peer_id}")] + AlreadyDialing { + /// The `PeerId` of the peer that is already being dialed + peer_id: PeerId, + }, + /// Dial threshold reached for this peer. + #[error("Dial threshold reached for peer: {addr}")] + ThresholdReached { + /// The multiaddress of the peer that has reached the maximum dial attempts + addr: Multiaddr, + }, + /// Peer is blocked. + #[error("Peer is blocked: {peer_id}")] + PeerBlocked { + /// The `PeerId` of the peer that is on the blocklist + peer_id: PeerId, + }, + /// Failed to extract IP address from Multiaddr. + #[error("Failed to extract IP address from Multiaddr: {addr}")] + InvalidIpAddress { + /// The multiaddress that does not contain a valid IP address component + addr: Multiaddr, + }, + /// IP address is blocked. + #[error("IP address is blocked: {ip}")] + AddressBlocked { + /// The IP address that is on the blocklist + ip: IpAddr, + }, + /// IP address is in a blocked subnet. + #[error("IP address {ip} is in a blocked subnet")] + SubnetBlocked { + /// The IP address that belongs to a blocked subnet range + ip: IpAddr, + }, +} diff --git a/rust/kona/crates/node/gossip/src/event.rs b/rust/kona/crates/node/gossip/src/event.rs new file mode 100644 index 00000000000..ee3dff9d66f --- /dev/null +++ b/rust/kona/crates/node/gossip/src/event.rs @@ -0,0 +1,109 @@ +//! Event Handling Module. + +use libp2p::{gossipsub, identify, ping}; + +/// High-level events emitted by the gossip networking system. +/// +/// This enum wraps the various low-level libp2p events into a unified +/// event type that can be handled by the application layer. Events are +/// generated by the underlying libp2p protocols and bubble up through +/// the networking stack. +#[derive(Debug)] +pub enum Event { + /// Network connectivity check event from the ping protocol. + /// + /// Used to verify peer connectivity and measure round-trip times. + #[allow(dead_code)] + Ping(ping::Event), + + /// `GossipSub` mesh networking event. + /// + /// Includes message reception, peer subscription changes, and mesh + /// topology updates. This is the primary event type for consensus + /// layer networking. + Gossipsub(Box), + + /// Peer identification protocol event. + /// + /// Contains information about peer capabilities, supported protocols, + /// and network identity. Used for protocol negotiation and compatibility + /// checking. + Identify(Box), + + /// Stream protocol event for request-response communication. + /// + /// Handles direct peer-to-peer communication outside of the gossip mesh, + /// typically used for block synchronization requests. + Stream, +} + +impl From for Event { + /// Converts [`ping::Event`] to [Event] + fn from(value: ping::Event) -> Self { + Self::Ping(value) + } +} + +impl From for Event { + /// Converts [`gossipsub::Event`] to [Event] + fn from(value: gossipsub::Event) -> Self { + Self::Gossipsub(Box::new(value)) + } +} + +impl From for Event { + /// Converts [`identify::Event`] to [Event] + fn from(value: identify::Event) -> Self { + Self::Identify(Box::new(value)) + } +} + +impl From<()> for Event { + /// Converts () to [Event] + fn from(_value: ()) -> Self { + Self::Stream + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_event_conversion() { + let gossipsub_event = libp2p::gossipsub::Event::Message { + propagation_source: libp2p::PeerId::random(), + message_id: libp2p::gossipsub::MessageId(vec![]), + message: libp2p::gossipsub::Message { + source: None, + data: vec![], + sequence_number: None, + topic: libp2p::gossipsub::TopicHash::from_raw("test"), + }, + }; + let event = Event::from(gossipsub_event); + match event { + Event::Gossipsub(e) => { + assert!( + matches!(*e, libp2p::gossipsub::Event::Message { .. }), + "Event conversion failed" + ); + } + _ => panic!("Event conversion failed"), + } + } + + #[test] + fn test_event_conversion_ping() { + let ping_event = ping::Event { + peer: libp2p::PeerId::random(), + connection: libp2p::swarm::ConnectionId::new_unchecked(0), + result: Ok(core::time::Duration::from_secs(1)), + }; + let event = Event::from(ping_event); + match event { + Event::Ping(_) => {} + _ => panic!("Event conversion failed"), + } + } +} diff --git a/kona/crates/node/gossip/src/gate.rs b/rust/kona/crates/node/gossip/src/gate.rs similarity index 100% rename from kona/crates/node/gossip/src/gate.rs rename to rust/kona/crates/node/gossip/src/gate.rs diff --git a/kona/crates/node/gossip/src/gater.rs b/rust/kona/crates/node/gossip/src/gater.rs similarity index 99% rename from kona/crates/node/gossip/src/gater.rs rename to rust/kona/crates/node/gossip/src/gater.rs index 3c871e4be44..9b53d1379b8 100644 --- a/kona/crates/node/gossip/src/gater.rs +++ b/rust/kona/crates/node/gossip/src/gater.rs @@ -277,7 +277,7 @@ impl ConnectionGate for ConnectionGater { } fn connectedness(&self, peer_id: &PeerId) -> Connectedness { - self.connectedness.get(peer_id).cloned().unwrap_or(Connectedness::NotConnected) + self.connectedness.get(peer_id).copied().unwrap_or(Connectedness::NotConnected) } fn list_protected_peers(&self) -> Vec { @@ -297,7 +297,7 @@ impl ConnectionGate for ConnectionGater { let dial_info = self .dialed_peers .entry(addr.clone()) - .or_insert(DialInfo { num_dials: 0, last_dial: Instant::now() }); + .or_insert_with(|| DialInfo { num_dials: 0, last_dial: Instant::now() }); // If the last dial was longer than the dial period, reset the number of dials. if dial_info.last_dial.elapsed() > self.config.dial_period { @@ -354,7 +354,7 @@ impl ConnectionGate for ConnectionGater { } fn list_blocked_addrs(&self) -> Vec { - self.blocked_addrs.iter().cloned().collect() + self.blocked_addrs.iter().copied().collect() } fn block_subnet(&mut self, subnet: IpNet) { diff --git a/kona/crates/node/gossip/src/handler.rs b/rust/kona/crates/node/gossip/src/handler.rs similarity index 100% rename from kona/crates/node/gossip/src/handler.rs rename to rust/kona/crates/node/gossip/src/handler.rs diff --git a/rust/kona/crates/node/gossip/src/lib.rs b/rust/kona/crates/node/gossip/src/lib.rs new file mode 100644 index 00000000000..aebee087678 --- /dev/null +++ b/rust/kona/crates/node/gossip/src/lib.rs @@ -0,0 +1,78 @@ +//! Gossip protocol implementation for the OP Stack. +//! +//! This crate provides a comprehensive gossip networking implementation for the OP Stack, +//! including GossipSub-based consensus layer networking, RPC interfaces for network +//! administration, and metrics collection. +//! +//! ## Key Components +//! +//! - [`GossipDriver`]: Main driver managing the libp2p swarm and event handling +//! - [`Behaviour`]: Custom libp2p behavior combining `GossipSub`, Ping, and Identify +//! - [`BlockHandler`]: Validates and processes incoming block payloads +//! - [`ConnectionGater`]: Sophisticated connection management and rate limiting +//! - [`P2pRpcRequest`]: RPC interface for network administration +//! - [`Metrics`]: Metrics collection for monitoring and observability + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/kona-logo.png" +)] +#![doc(issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/")] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +#[macro_use] +extern crate tracing; +// Used in tests +#[allow(unused_extern_crates)] +extern crate alloy_rlp; + +mod metrics; +pub use metrics::Metrics; + +mod rpc; +pub use rpc::{ + Connectedness, Direction, GossipScores, P2pRpcRequest, PeerCount, PeerDump, PeerInfo, + PeerScores, PeerStats, ReqRespScores, TopicScores, +}; + +mod behaviour; +pub use behaviour::{Behaviour, BehaviourError}; + +mod config; +pub use config::{ + DEFAULT_MESH_D, DEFAULT_MESH_DHI, DEFAULT_MESH_DLAZY, DEFAULT_MESH_DLO, + GLOBAL_VALIDATE_THROTTLE, GOSSIP_HEARTBEAT, MAX_GOSSIP_SIZE, MAX_OUTBOUND_QUEUE, + MAX_VALIDATE_QUEUE, MIN_GOSSIP_SIZE, PEER_SCORE_INSPECT_FREQUENCY, SEEN_MESSAGES_TTL, + default_config, default_config_builder, +}; + +mod gate; +pub use gate::ConnectionGate; // trait + +mod gater; +pub use gater::{ + ConnectionGater, // implementation + DialInfo, + GaterConfig, +}; + +mod builder; +pub use builder::GossipDriverBuilder; + +mod error; +pub use error::{DialError, GossipDriverBuilderError, HandlerEncodeError, PublishError}; + +mod event; +pub use event::Event; + +mod handler; +pub use handler::{BlockHandler, Handler}; + +mod driver; +pub use driver::GossipDriver; + +mod block_validity; +pub use block_validity::BlockInvalidError; + +#[cfg(test)] +pub(crate) use block_validity::tests::*; diff --git a/kona/crates/node/gossip/src/metrics/mod.rs b/rust/kona/crates/node/gossip/src/metrics/mod.rs similarity index 100% rename from kona/crates/node/gossip/src/metrics/mod.rs rename to rust/kona/crates/node/gossip/src/metrics/mod.rs diff --git a/kona/crates/node/gossip/src/mod.rs b/rust/kona/crates/node/gossip/src/mod.rs similarity index 100% rename from kona/crates/node/gossip/src/mod.rs rename to rust/kona/crates/node/gossip/src/mod.rs diff --git a/kona/crates/node/gossip/src/rpc/mod.rs b/rust/kona/crates/node/gossip/src/rpc/mod.rs similarity index 100% rename from kona/crates/node/gossip/src/rpc/mod.rs rename to rust/kona/crates/node/gossip/src/rpc/mod.rs diff --git a/rust/kona/crates/node/gossip/src/rpc/request.rs b/rust/kona/crates/node/gossip/src/rpc/request.rs new file mode 100644 index 00000000000..b53f0e3a143 --- /dev/null +++ b/rust/kona/crates/node/gossip/src/rpc/request.rs @@ -0,0 +1,658 @@ +//! Contains the p2p RPC request type. + +use std::{net::IpAddr, num::TryFromIntError, sync::Arc}; + +use crate::{GossipDriver, GossipScores}; +use alloy_primitives::map::{HashMap, HashSet}; +use discv5::{ + enr::{NodeId, k256::ecdsa}, + multiaddr::Protocol, +}; +use ipnet::IpNet; +use kona_disc::Discv5Handler; +use kona_peers::OpStackEnr; +use libp2p::{Multiaddr, PeerId, gossipsub::TopicHash}; +use tokio::sync::oneshot::Sender; + +use super::{ + PeerDump, PeerStats, + types::{Connectedness, Direction, PeerInfo, PeerScores}, +}; +use crate::ConnectionGate; + +/// A p2p RPC Request. +#[derive(Debug)] +pub enum P2pRpcRequest { + /// Returns [`PeerInfo`] for the p2p network. + PeerInfo(Sender), + /// Dumps the node's discovery table from the [`kona_disc::Discv5Driver`]. + DiscoveryTable(Sender>), + /// Returns the current peer count for both the + /// - Discovery Service ([`kona_disc::Discv5Driver`]) + /// - Gossip Service ([`crate::GossipDriver`]) + PeerCount(Sender<(Option, usize)>), + /// Returns a [`PeerDump`] containing detailed information about connected peers. + /// If `connected` is true, only returns connected peers. + Peers { + /// The output channel to send the [`PeerDump`] to. + out: Sender, + /// Whether to only return connected peers. + connected: bool, + }, + /// Request to block a peer by its [`PeerId`]. + BlockPeer { + /// The [`PeerId`] of the peer to block. + id: PeerId, + }, + /// Request to unblock a peer by its [`PeerId`]. + UnblockPeer { + /// The [`PeerId`] of the peer to unblock. + id: PeerId, + }, + /// Request to list all blocked peers. + ListBlockedPeers(Sender>), + /// Request to block a given IP Address. + BlockAddr { + /// The IP address to block. + address: IpAddr, + }, + /// Request to unblock a given IP Address. + UnblockAddr { + /// The IP address to unblock. + address: IpAddr, + }, + /// Request to list all blocked IP Addresses. + ListBlockedAddrs(Sender>), + /// Request to block a given Subnet. + BlockSubnet { + /// The Subnet to block. + address: IpNet, + }, + /// Request to unblock a given Subnet. + UnblockSubnet { + /// The Subnet to unblock. + address: IpNet, + }, + + /// Request to connect to a given peer. + ConnectPeer { + /// The [`Multiaddr`] of the peer to connect to. + address: Multiaddr, + }, + /// Request to disconnect the specified peer. + DisconnectPeer { + /// The peer id to disconnect. + peer_id: PeerId, + }, + /// Protects a given peer from disconnection. + ProtectPeer { + /// The id of the peer. + peer_id: PeerId, + }, + /// Unprotects a given peer. + UnprotectPeer { + /// The id of the peer. + peer_id: PeerId, + }, + /// Request to list all blocked Subnets. + ListBlockedSubnets(Sender>), + /// Returns the current peer stats for both the + /// - Discovery Service ([`kona_disc::Discv5Driver`]) + /// - Gossip Service ([`crate::GossipDriver`]) + /// + /// This information can be used to briefly monitor the current state of the p2p network for a + /// given peer. + PeerStats(Sender), +} + +impl P2pRpcRequest { + /// Handles the peer count request. + pub fn handle(self, gossip: &mut GossipDriver, disc: &Discv5Handler) { + match self { + Self::PeerCount(s) => Self::handle_peer_count(s, gossip, disc), + Self::DiscoveryTable(s) => Self::handle_discovery_table(s, disc), + Self::PeerInfo(s) => Self::handle_peer_info(s, gossip, disc), + Self::Peers { out, connected } => Self::handle_peers(out, connected, gossip, disc), + Self::DisconnectPeer { peer_id } => Self::disconnect_peer(peer_id, gossip), + Self::PeerStats(s) => Self::handle_peer_stats(s, gossip, disc), + Self::ConnectPeer { address } => Self::connect_peer(address, gossip), + Self::BlockPeer { id } => Self::block_peer(id, gossip), + Self::UnblockPeer { id } => Self::unblock_peer(id, gossip), + Self::ListBlockedPeers(s) => Self::list_blocked_peers(s, gossip), + Self::BlockAddr { address } => Self::block_addr(address, gossip), + Self::UnblockAddr { address } => Self::unblock_addr(address, gossip), + Self::ListBlockedAddrs(s) => Self::list_blocked_addrs(s, gossip), + Self::ProtectPeer { peer_id } => Self::protect_peer(peer_id, gossip), + Self::UnprotectPeer { peer_id } => Self::unprotect_peer(peer_id, gossip), + Self::BlockSubnet { address } => Self::block_subnet(address, gossip), + Self::UnblockSubnet { address } => Self::unblock_subnet(address, gossip), + Self::ListBlockedSubnets(s) => Self::list_blocked_subnets(s, gossip), + } + } + + fn protect_peer(id: PeerId, gossip: &mut GossipDriver) { + gossip.connection_gate.protect_peer(id); + } + + fn unprotect_peer(id: PeerId, gossip: &mut GossipDriver) { + gossip.connection_gate.unprotect_peer(id); + } + + fn block_addr(address: IpAddr, gossip: &mut GossipDriver) { + gossip.connection_gate.block_addr(address); + } + + fn unblock_addr(address: IpAddr, gossip: &mut GossipDriver) { + gossip.connection_gate.unblock_addr(address); + } + + fn list_blocked_addrs(s: Sender>, gossip: &GossipDriver) { + let blocked_addrs = gossip.connection_gate.list_blocked_addrs(); + if let Err(e) = s.send(blocked_addrs) { + warn!(target: "p2p::rpc", "Failed to send blocked addresses through response channel: {:?}", e); + } + } + + fn block_peer(id: PeerId, gossip: &mut GossipDriver) { + gossip.connection_gate.block_peer(&id); + gossip.swarm.behaviour_mut().gossipsub.blacklist_peer(&id); + } + + fn unblock_peer(id: PeerId, gossip: &mut GossipDriver) { + gossip.connection_gate.unblock_peer(&id); + gossip.swarm.behaviour_mut().gossipsub.remove_blacklisted_peer(&id); + } + + fn list_blocked_peers(s: Sender>, gossip: &GossipDriver) { + let blocked_peers = gossip.connection_gate.list_blocked_peers(); + if let Err(e) = s.send(blocked_peers) { + warn!(target: "p2p::rpc", "Failed to send blocked peers through response channel: {:?}", e); + } + } + + fn block_subnet(address: IpNet, gossip: &mut GossipDriver) { + gossip.connection_gate.block_subnet(address); + } + + fn unblock_subnet(address: IpNet, gossip: &mut GossipDriver) { + gossip.connection_gate.unblock_subnet(address); + } + + fn connect_peer(address: Multiaddr, gossip: &mut GossipDriver) { + gossip.dial_multiaddr(address) + } + + fn disconnect_peer(peer_id: PeerId, gossip: &mut GossipDriver) { + if let Err(e) = gossip.swarm.disconnect_peer_id(peer_id) { + warn!(target: "p2p::rpc", "Failed to disconnect peer {}: {:?}", peer_id, e); + } else { + info!(target: "p2p::rpc", "Disconnected peer {}", peer_id); + // Record the duration of the peer connection. + if let Some(start_time) = gossip.peer_connection_start.remove(&peer_id) { + let _peer_duration = start_time.elapsed(); + kona_macros::record!( + histogram, + crate::Metrics::GOSSIP_PEER_CONNECTION_DURATION_SECONDS, + _peer_duration.as_secs_f64() + ); + } + } + } + + fn list_blocked_subnets(s: Sender>, gossip: &GossipDriver) { + let blocked_subnets = gossip.connection_gate.list_blocked_subnets(); + if let Err(e) = s.send(blocked_subnets) { + warn!(target: "p2p::rpc", "Failed to send blocked subnets through response channel: {:?}", e); + } + } + + fn handle_discovery_table(sender: Sender>, disc: &Discv5Handler) { + let enrs = disc.table_enrs(); + tokio::spawn(async move { + let dt = match enrs.await { + Ok(dt) => dt.into_iter().map(|e| e.to_string()).collect(), + + Err(e) => { + warn!(target: "p2p_rpc", "Failed to receive peer count: {:?}", e); + return; + } + }; + + if let Err(e) = sender.send(dt) { + warn!(target: "p2p_rpc", "Failed to send peer count through response channel: {:?}", e); + } + }); + } + + fn handle_peers( + sender: Sender, + connected: bool, + gossip: &GossipDriver, + disc: &Discv5Handler, + ) { + let Ok(total_connected) = gossip.swarm.network_info().num_peers().try_into() else { + error!(target: "p2p::rpc", "Failed to get total connected peers. The number of connected peers is too large and overflows u32."); + return; + }; + + let peer_ids: Vec = if connected { + gossip.swarm.connected_peers().copied().collect() + } else { + gossip.peerstore.keys().copied().collect() + }; + + // Get the set of actually connected peers from the swarm for accurate connectedness + // reporting. + let actually_connected: HashSet = gossip.swarm.connected_peers().copied().collect(); + + // Get connection gate information. + let banned_subnets = gossip.connection_gate.list_blocked_subnets(); + let banned_ips = gossip.connection_gate.list_blocked_addrs(); + let banned_peers = gossip.connection_gate.list_blocked_peers(); + let protected_peers = gossip.connection_gate.list_protected_peers(); + + // For each peer id, determine connectedness based on actual swarm connection state. + // This fixes the issue where the connection gate's internal state could be stale, + // especially for inbound connections or after connections close. + let connectedness = peer_ids + .iter() + .copied() + .map(|id| { + if actually_connected.contains(&id) { + (id, Connectedness::Connected) + } else if banned_peers.contains(&id) { + (id, Connectedness::CannotConnect) + } else { + (id, Connectedness::NotConnected) + } + }) + .collect::>(); + + // Clone the ping map + let pings = Arc::clone(&gossip.ping); + + #[derive(Default)] + struct PeerMetadata { + protocols: Option>, + addresses: Vec, + user_agent: String, + protocol_version: String, + score: f64, + } + + // Build a map of peer ids to their supported protocols and addresses. + let mut peer_metadata: HashMap = gossip + .peerstore + .iter() + .map(|(id, info)| { + let protocols = if info.protocols.is_empty() { + None + } else { + Some( + info.protocols + .iter() + .map(|protocol| protocol.to_string()) + .collect::>(), + ) + }; + let addresses = info + .listen_addrs + .iter() + .map(|addr| { + let mut addr = addr.clone(); + addr.push(Protocol::P2p(*id)); + addr.to_string() + }) + .collect::>(); + + let score = gossip.swarm.behaviour().gossipsub.peer_score(id).unwrap_or_default(); + + ( + *id, + PeerMetadata { + protocols, + addresses, + user_agent: info.agent_version.clone(), + protocol_version: info.protocol_version.clone(), + score, + }, + ) + }) + .collect(); + + // We consider that kona-nodes are gossiping blocks if their peers are subscribed to any of + // the blocks topics. + // This is the same heuristic as the one used in the op-node (``). + let peer_gossip_info = gossip + .swarm + .behaviour() + .gossipsub + .all_peers() + .filter_map(|(peer_id, topics)| { + let supported_topics = HashSet::from([ + gossip.handler.blocks_v1_topic.hash(), + gossip.handler.blocks_v2_topic.hash(), + gossip.handler.blocks_v3_topic.hash(), + gossip.handler.blocks_v4_topic.hash(), + ]); + + topics.iter().any(|topic| supported_topics.contains(topic)).then_some(*peer_id) + }) + .collect::>(); + + let disc_table_infos = disc.table_infos(); + + tokio::spawn(async move { + let Ok(table_infos) = disc_table_infos.await else { + error!(target: "p2p::rpc", "Failed to get table infos. The connection to the gossip driver is closed."); + return; + }; + + let pings = { pings.lock().await.clone() }; + + let node_to_peer_id: HashMap = peer_ids.into_iter().filter_map(|id| + { + let Ok(pubkey) = libp2p_identity::PublicKey::try_decode_protobuf(&id.to_bytes()[2..]) else { + error!(target: "p2p::rpc", peer_id = ?id, "Failed to decode public key from peer id. This is a bug as all the peer ids should be decodable (because they come from secp256k1 public keys)."); + return None; + }; + + let key = + match pubkey.try_into_secp256k1().map_err(|err| err.to_string()).and_then( + |key| ecdsa::VerifyingKey::from_sec1_bytes(key.to_bytes().as_slice()).map_err(|err| err.to_string()) + ) { Ok(key) => key, + Err(err) => { + error!(target: "p2p::rpc", peer_id = ?id, err = ?err, "Failed to convert public key to secp256k1 public key. This is a bug."); + return None; + }}; + let node_id = NodeId::from(key); + Some((node_id, id)) + } + ).collect(); + + // Filter out peers that are not in the gossip network. + let node_to_table_infos = table_infos + .into_iter() + .filter(|(id, _, _)| node_to_peer_id.contains_key(id)) + .map(|(id, enr, status)| (id, (enr, status))) + .collect::>(); + + // Build the peer info map. + let infos: HashMap = node_to_peer_id + .iter() + .map(|(id, peer_id)| { + let (maybe_enr, maybe_status) = node_to_table_infos.get(id).cloned().unzip(); + + let opstack_enr = + maybe_enr.clone().and_then(|enr| OpStackEnr::try_from(&enr).ok()); + + let direction = maybe_status + .map(|status| { + if status.is_incoming() { + Direction::Inbound + } else { + Direction::Outbound + } + }) + .unwrap_or_default(); + + let PeerMetadata { protocols, addresses, user_agent, protocol_version, score } = + peer_metadata.remove(peer_id).unwrap_or_default(); + + let peer_connectedness = + connectedness.get(peer_id).copied().unwrap_or(Connectedness::NotConnected); + + let latency = pings.get(peer_id).map(|d| d.as_secs()).unwrap_or(0); + + let node_id = format!("{:?}", &id); + ( + peer_id.to_string(), + PeerInfo { + peer_id: peer_id.to_string(), + node_id, + user_agent, + protocol_version, + enr: maybe_enr.map(|enr| enr.to_string()), + addresses, + protocols, + connectedness: peer_connectedness, + direction, + // Note: we use the chain id from the ENR if it exists, otherwise we + // use 0 to be consistent with op-node's behavior (``). + chain_id: opstack_enr.map(|enr| enr.chain_id).unwrap_or(0), + gossip_blocks: peer_gossip_info.contains(peer_id), + protected: protected_peers.contains(peer_id), + latency, + peer_scores: PeerScores { + gossip: GossipScores { + total: score, + // Note(@theochap): we don't compute the topic scores + // because we don't + // `rust-libp2p` doesn't expose that information to the + // user-facing API. + // See `` + blocks: Default::default(), + // Note(@theochap): We can't compute the ip colocation + // factor because + // `rust-libp2p` doesn't expose that information to the + // user-facing API + // See `` + ip_colocation_factor: Default::default(), + // Note(@theochap): We can't compute the behavioral penalty + // because + // `rust-libp2p` doesn't expose that information to the + // user-facing API + // See `` + behavioral_penalty: Default::default(), + }, + // We only support a shim implementation for the req/resp + // protocol so we're not + // computing scores for it. + req_resp: Default::default(), + }, + }, + ) + }) + .collect(); + + if let Err(e) = sender.send(PeerDump { + total_connected, + peers: infos, + banned_peers: banned_peers.into_iter().map(|p| p.to_string()).collect(), + banned_ips, + banned_subnets, + }) { + warn!(target: "p2p::rpc", "Failed to send peer info through response channel: {:?}", e); + } + }); + } + + /// Handles a peer info request by spawning a task. + fn handle_peer_info( + sender: Sender, + gossip: &GossipDriver, + disc: &Discv5Handler, + ) { + let peer_id = *gossip.local_peer_id(); + let chain_id = disc.chain_id; + let local_enr = disc.local_enr(); + let mut addresses = gossip + .swarm + .listeners() + .map(|a| { + let mut addr = a.clone(); + addr.push(Protocol::P2p(peer_id)); + addr.to_string() + }) + .collect::>(); + + addresses.append( + &mut gossip.swarm.external_addresses().map(|a| a.to_string()).collect::>(), + ); + + tokio::spawn(async move { + let enr = match local_enr.await { + Ok(enr) => enr, + Err(e) => { + warn!(target: "p2p::rpc", "Failed to receive local ENR: {:?}", e); + return; + } + }; + + // Note: we need to use `Debug` impl here because the `Display` impl of + // `NodeId` strips some part of the hex string and replaces it with "...". + let node_id = format!("{:?}", &enr.node_id()); + + // We need to add the local multiaddr to the list of known addresses. + let peer_info = PeerInfo { + peer_id: peer_id.to_string(), + node_id, + user_agent: "kona".to_string(), + protocol_version: String::new(), + enr: Some(enr.to_string()), + addresses, + protocols: Some(vec![ + "/ipfs/id/push/1.0.0".to_string(), + "/meshsub/1.1.0".to_string(), + "/ipfs/ping/1.0.0".to_string(), + "/meshsub/1.2.0".to_string(), + "/ipfs/id/1.0.0".to_string(), + format!("/opstack/req/payload_by_number/{chain_id}/0/"), + "/meshsub/1.0.0".to_string(), + "/floodsub/1.0.0".to_string(), + ]), + connectedness: Connectedness::Connected, + direction: Direction::Inbound, + protected: false, + chain_id, + latency: 0, + gossip_blocks: true, + peer_scores: PeerScores::default(), + }; + if let Err(e) = sender.send(peer_info) { + warn!(target: "p2p_rpc", "Failed to send peer info through response channel: {:?}", e); + } + }); + } + + fn handle_peer_stats( + sender: Sender, + gossip: &GossipDriver, + disc: &Discv5Handler, + ) { + let peers_known = gossip.peerstore.len(); + let gossip_network_info = gossip.swarm.network_info(); + let table_info = disc.peer_count(); + + let banned_peers = gossip.connection_gate.list_blocked_peers().len(); + + let topics = gossip.swarm.behaviour().gossipsub.topics().collect::>(); + + let topics = topics + .into_iter() + .map(|hash| { + ( + hash.clone(), + gossip + .swarm + .behaviour() + .gossipsub + .all_peers() + .filter(|(_, topics)| topics.contains(&hash)) + .count(), + ) + }) + .collect::>(); + + let v1_topic_hash = gossip.handler.blocks_v1_topic.hash(); + let v2_topic_hash = gossip.handler.blocks_v2_topic.hash(); + let v3_topic_hash = gossip.handler.blocks_v3_topic.hash(); + let v4_topic_hash = gossip.handler.blocks_v4_topic.hash(); + + tokio::spawn(async move { + let Ok(table) = table_info.await else { + error!(target: "p2p::rpc", "failed to get discovery table size. The sender has been dropped. The discv5 service may not be running anymore."); + return; + }; + + let Ok(table) = table.try_into() else { + error!(target: "p2p::rpc", "failed to get discovery table size. Integer overflow. Please ensure that the number of peers in the discovery table fits in a u32."); + return; + }; + + let Ok(connected) = gossip_network_info.num_peers().try_into() else { + error!(target: "p2p::rpc", "failed to get number of connected peers. Integer overflow. Please ensure that the number of connected peers fits in a u32."); + return; + }; + + let Ok(known) = peers_known.try_into() else { + error!(target: "p2p::rpc", "failed to get number of known peers. Integer overflow. Please ensure that the number of known peers fits in a u32."); + return; + }; + + // Given a topic hash, this method: + // - gets the number of peers in the mesh for that topic + // - returns an error if the number of peers in the mesh overflows a u32 + // - returns 0 if there are no peers in the mesh for that topic + let get_topic = |topic: &TopicHash| { + Ok::( + topics + .get(topic) + .copied() + .map(|v| v.try_into()) + .transpose()? + .unwrap_or_default(), + ) + }; + + let Ok(block_topics) = vec![ + get_topic(&v1_topic_hash), + get_topic(&v2_topic_hash), + get_topic(&v3_topic_hash), + get_topic(&v4_topic_hash), + ] + .into_iter() + .collect::, _>>() else { + error!(target: "p2p::rpc", "failed to get blocks topic. Some topic count overflowed. Make sure that the number of peers for a given topic fits in a u32."); + return; + }; + + let stats = PeerStats { + connected, + table, + blocks_topic: block_topics[0], + blocks_topic_v2: block_topics[1], + blocks_topic_v3: block_topics[2], + blocks_topic_v4: block_topics[3], + banned: banned_peers as u32, + known, + }; + + if let Err(e) = sender.send(stats) { + warn!(target: "p2p_rpc", "Failed to send peer stats through response channel: {:?}", e); + }; + }); + } + + /// Handles a peer count request by spawning a task. + fn handle_peer_count( + sender: Sender<(Option, usize)>, + gossip: &GossipDriver, + disc: &Discv5Handler, + ) { + let pc_req = disc.peer_count(); + let gossip_pc = gossip.connected_peers(); + tokio::spawn(async move { + let pc = match pc_req.await { + Ok(pc) => Some(pc), + Err(e) => { + warn!(target: "p2p_rpc", "Failed to receive peer count: {:?}", e); + None + } + }; + if let Err(e) = sender.send((pc, gossip_pc)) { + warn!(target: "p2p_rpc", "Failed to send peer count through response channel: {:?}", e); + } + }); + } +} diff --git a/rust/kona/crates/node/gossip/src/rpc/types.rs b/rust/kona/crates/node/gossip/src/rpc/types.rs new file mode 100644 index 00000000000..ad63072b766 --- /dev/null +++ b/rust/kona/crates/node/gossip/src/rpc/types.rs @@ -0,0 +1,428 @@ +//! The types used in the p2p RPC API. + +use core::net::IpAddr; +use derive_more::Display; + +use alloy_primitives::{ChainId, map::HashMap}; + +/// The peer info. +/// +/// +#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PeerInfo { + /// The peer id. + #[serde(rename = "peerID")] + pub peer_id: String, + /// The node id. + #[serde(rename = "nodeID")] + pub node_id: String, + /// The user agent. + pub user_agent: String, + /// The protocol version. + pub protocol_version: String, + /// The enr for the peer. + /// If the peer is not in the discovery table, this will not be set. + #[serde(rename = "ENR")] + #[serde(skip_serializing_if = "Option::is_none")] + pub enr: Option, + /// The peer addresses. + pub addresses: Vec, + /// Peer supported protocols + pub protocols: Option>, + /// 0: "`NotConnected`", + /// 1: "Connected", + /// 2: "`CanConnect`" (gracefully disconnected) + /// 3: "`CannotConnect`" (tried but failed) + pub connectedness: Connectedness, + /// 0: "Unknown", + /// 1: "Inbound" (if the peer contacted us) + /// 2: "Outbound" (if we connected to them) + pub direction: Direction, + /// Whether the peer is protected. + pub protected: bool, + /// The chain id. + #[serde(rename = "chainID")] + pub chain_id: ChainId, + /// The peer latency in nanoseconds + pub latency: u64, + /// Whether the peer gossips + pub gossip_blocks: bool, + /// The peer scores. + #[serde(rename = "scores")] + pub peer_scores: PeerScores, +} + +/// `GossipSub` topic-specific scoring metrics. +/// +/// Tracks peer performance within specific gossip topics, used by the +/// `GossipSub` protocol to maintain mesh quality and route messages efficiently. +/// These scores influence peer selection for the gossip mesh topology. +/// +/// Reference: +#[derive(Clone, Default, Debug, Copy, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TopicScores { + /// Duration the peer has participated in the topic mesh. + /// + /// Longer participation indicates stability and commitment to the topic, + /// contributing positively to the peer's mesh score. + pub time_in_mesh: f64, + + /// Count of first-time message deliveries from this peer. + /// + /// Measures how often this peer is the first to deliver new messages, + /// indicating their connectivity and responsiveness to the network. + pub first_message_deliveries: f64, + + /// Count of messages delivered while in the mesh topology. + /// + /// Tracks consistent message forwarding behavior while the peer is + /// an active participant in the mesh structure. + pub mesh_message_deliveries: f64, + + /// Count of invalid or malicious messages from this peer. + /// + /// Penalizes peers that send invalid, duplicate, or malformed messages, + /// helping maintain network health and preventing spam. + pub invalid_message_deliveries: f64, +} + +/// Comprehensive `GossipSub` scoring metrics for peer quality assessment. +/// +/// Aggregates various scoring factors used by the `GossipSub` protocol to +/// evaluate peer quality and determine mesh topology. Higher scores indicate +/// more reliable and well-behaved peers. +/// +/// Reference: +#[derive(Debug, Default, Clone, Copy, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct GossipScores { + /// Aggregate score across all scoring dimensions. + /// + /// The final computed score that determines this peer's overall + /// reputation in the gossip network. + pub total: f64, + + /// Block-specific topic scores for consensus messages. + /// + /// Tracks peer behavior specifically for block gossip, which is + /// the primary message type in OP Stack networks. + pub blocks: TopicScores, + + /// Penalty for IP address colocation with other peers. + /// + /// Reduces scores for peers sharing IP addresses to prevent + /// eclipse attacks and improve network decentralization. + #[serde(rename = "IPColocationFactor")] + pub ip_colocation_factor: f64, + + /// Penalty for problematic behavior patterns. + /// + /// Applied to peers exhibiting suspicious or harmful behavior + /// that doesn't fit other specific scoring categories. + pub behavioral_penalty: f64, +} + +/// Request-response protocol scoring metrics. +/// +/// Tracks peer performance in direct request-response interactions outside +/// of the gossip mesh, such as block synchronization requests. +/// +/// Reference: +#[derive(Debug, Default, Clone, Copy, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct ReqRespScores { + /// Number of valid responses provided by this peer. + /// + /// Counts successful request-response exchanges where the peer + /// provided correct and timely responses to queries. + pub valid_responses: f64, + + /// Number of error responses or failed requests. + /// + /// Tracks cases where the peer returned errors, timeouts, or + /// otherwise failed to properly respond to requests. + pub error_responses: f64, + /// Number of rejected payloads. + pub rejected_payloads: f64, +} + +/// Peer Scores +/// +/// +#[derive(Clone, Default, Debug, Copy, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PeerScores { + /// The gossip scores + pub gossip: GossipScores, + /// The request-response scores. + pub req_resp: ReqRespScores, +} + +/// Peer count data. +#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PeerCount { + /// The total number of connected peers to the discovery service. + pub connected_discovery: Option, + /// The total number of connected peers to the gossip service. + pub connected_gossip: usize, +} + +/// A raw peer dump. +/// +/// +#[derive(Clone, Default, Debug, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PeerDump { + /// The total number of connected peers + pub total_connected: u32, + /// A map from peer id to peer info + pub peers: HashMap, + /// A list of banned peers. + pub banned_peers: Vec, + /// A list of banned ip addresses. + #[serde(rename = "bannedIPS")] + pub banned_ips: Vec, + /// The banned subnets + pub banned_subnets: Vec, +} + +/// Peer stats. +/// +/// +#[derive(Clone, Default, Debug, Copy, serde::Serialize, serde::Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct PeerStats { + /// The number of connections + pub connected: u32, + /// The table. + pub table: u32, + /// The blocks topic. + #[serde(rename = "blocksTopic")] + pub blocks_topic: u32, + /// The blocks v2 topic. + #[serde(rename = "blocksTopicV2")] + pub blocks_topic_v2: u32, + /// The blocks v3 topic. + #[serde(rename = "blocksTopicV3")] + pub blocks_topic_v3: u32, + /// The blocks v4 topic. + #[serde(rename = "blocksTopicV4")] + pub blocks_topic_v4: u32, + /// The banned count. + pub banned: u32, + /// The known count. + pub known: u32, +} + +/// Represents the connectivity state of a peer in a network, indicating the reachability and +/// interaction status of a node with its peers. +#[derive( + Clone, + Debug, + Display, + PartialEq, + Eq, + Copy, + Default, + // We need to use `serde_repr` to serialize the enum as an integer to match the `op-node` API. + serde_repr::Serialize_repr, + serde_repr::Deserialize_repr, +)] +#[repr(u8)] +pub enum Connectedness { + /// No current connection to the peer, and no recent history of a successful connection. + #[default] + #[display("Not Connected")] + NotConnected = 0, + + /// An active, open connection to the peer exists. + #[display("Connected")] + Connected = 1, + + /// Connection to the peer is possible but not currently established; usually implies a past + /// successful connection. + #[display("Can Connect")] + CanConnect = 2, + + /// Recent attempts to connect to the peer failed, indicating potential issues in reachability + /// or peer status. + #[display("Cannot Connect")] + CannotConnect = 3, + + /// Connection to the peer is limited; may not have full capabilities. + #[display("Limited")] + Limited = 4, +} + +impl From for Connectedness { + fn from(value: u8) -> Self { + match value { + 1 => Self::Connected, + 2 => Self::CanConnect, + 3 => Self::CannotConnect, + 4 => Self::Limited, + _ => Self::NotConnected, + } + } +} +/// Direction represents the direction of a connection. +#[derive(Debug, Clone, Display, Copy, PartialEq, Eq, Default)] +pub enum Direction { + /// Unknown is the default direction when the direction is not specified. + #[default] + #[display("Unknown")] + Unknown = 0, + /// Inbound is for when the remote peer initiated the connection. + #[display("Inbound")] + Inbound = 1, + /// Outbound is for when the local peer initiated the connection. + #[display("Outbound")] + Outbound = 2, +} + +impl serde::Serialize for Direction { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_u8(*self as u8) + } +} + +impl<'de> serde::Deserialize<'de> for Direction { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let value = u8::deserialize(deserializer)?; + match value { + 0 => Ok(Self::Unknown), + 1 => Ok(Self::Inbound), + 2 => Ok(Self::Outbound), + _ => Err(serde::de::Error::invalid_value( + serde::de::Unexpected::Unsigned(value as u64), + &"a value between 0 and 2", + )), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_connectedness_from_u8() { + assert_eq!(Connectedness::from(0), Connectedness::NotConnected); + assert_eq!(Connectedness::from(1), Connectedness::Connected); + assert_eq!(Connectedness::from(2), Connectedness::CanConnect); + assert_eq!(Connectedness::from(3), Connectedness::CannotConnect); + assert_eq!(Connectedness::from(4), Connectedness::Limited); + assert_eq!(Connectedness::from(5), Connectedness::NotConnected); + } + + #[test] + fn test_direction_display() { + assert_eq!(Direction::Unknown.to_string(), "Unknown"); + assert_eq!(Direction::Inbound.to_string(), "Inbound"); + assert_eq!(Direction::Outbound.to_string(), "Outbound"); + } + + #[test] + fn test_direction_serialization() { + assert_eq!( + serde_json::to_string(&Direction::Unknown).unwrap(), + "0", + "Serialization failed for Direction::Unknown" + ); + assert_eq!( + serde_json::to_string(&Direction::Inbound).unwrap(), + "1", + "Serialization failed for Direction::Inbound" + ); + assert_eq!( + serde_json::to_string(&Direction::Outbound).unwrap(), + "2", + "Serialization failed for Direction::Outbound" + ); + } + + #[test] + fn test_direction_deserialization() { + let unknown: Direction = serde_json::from_str("0").unwrap(); + let inbound: Direction = serde_json::from_str("1").unwrap(); + let outbound: Direction = serde_json::from_str("2").unwrap(); + + assert_eq!(unknown, Direction::Unknown, "Deserialization mismatch for Direction::Unknown"); + assert_eq!(inbound, Direction::Inbound, "Deserialization mismatch for Direction::Inbound"); + assert_eq!( + outbound, + Direction::Outbound, + "Deserialization mismatch for Direction::Outbound" + ); + } + + #[test] + fn test_peer_info_connectedness_serialization() { + let peer_info = PeerInfo { + peer_id: String::from("peer123"), + node_id: String::from("node123"), + user_agent: String::from("MyUserAgent"), + protocol_version: String::from("v1"), + enr: Some(String::from("enr123")), + addresses: [String::from("127.0.0.1")].to_vec(), + protocols: Some([String::from("eth"), String::from("p2p")].to_vec()), + connectedness: Connectedness::Connected, + direction: Direction::Outbound, + protected: true, + chain_id: 1, + latency: 100, + gossip_blocks: true, + peer_scores: PeerScores { + gossip: GossipScores { + total: 1.0, + blocks: TopicScores { + time_in_mesh: 10.0, + first_message_deliveries: 5.0, + mesh_message_deliveries: 2.0, + invalid_message_deliveries: 0.0, + }, + ip_colocation_factor: 0.5, + behavioral_penalty: 0.1, + }, + req_resp: ReqRespScores { + valid_responses: 10.0, + error_responses: 1.0, + rejected_payloads: 0.0, + }, + }, + }; + + let serialized = serde_json::to_string(&peer_info).expect("Serialization failed"); + + let deserialized: PeerInfo = + serde_json::from_str(&serialized).expect("Deserialization failed"); + + assert_eq!(peer_info.peer_id, deserialized.peer_id); + assert_eq!(peer_info.node_id, deserialized.node_id); + assert_eq!(peer_info.user_agent, deserialized.user_agent); + assert_eq!(peer_info.protocol_version, deserialized.protocol_version); + assert_eq!(peer_info.enr, deserialized.enr); + assert_eq!(peer_info.addresses, deserialized.addresses); + assert_eq!(peer_info.protocols, deserialized.protocols); + assert_eq!(peer_info.connectedness, deserialized.connectedness); + assert_eq!(peer_info.direction, deserialized.direction); + assert_eq!(peer_info.protected, deserialized.protected); + assert_eq!(peer_info.chain_id, deserialized.chain_id); + assert_eq!(peer_info.latency, deserialized.latency); + assert_eq!(peer_info.gossip_blocks, deserialized.gossip_blocks); + assert_eq!(peer_info.peer_scores.gossip.total, deserialized.peer_scores.gossip.total); + assert_eq!( + peer_info.peer_scores.req_resp.valid_responses, + deserialized.peer_scores.req_resp.valid_responses + ); + } +} diff --git a/kona/crates/node/peers/Cargo.toml b/rust/kona/crates/node/peers/Cargo.toml similarity index 100% rename from kona/crates/node/peers/Cargo.toml rename to rust/kona/crates/node/peers/Cargo.toml diff --git a/rust/kona/crates/node/peers/README.md b/rust/kona/crates/node/peers/README.md new file mode 100644 index 00000000000..22fac7f562b --- /dev/null +++ b/rust/kona/crates/node/peers/README.md @@ -0,0 +1,37 @@ +# `kona-peers` + +Networking Utilities ported from reth. + +Much of this module is ported from +. + +This module manages and converts Ethereum network entities such as node records, peer IDs, and +Ethereum Node Records (ENRs) + +## Node Record Overview + +Ethereum uses different types of "node records" to represent peers on the network. + +The simplest way to identify a peer is by public key. This is the `PeerId` type, which usually +represents a peer's secp256k1 public key. + +A more complete representation of a peer is the `NodeRecord` type, which includes the peer's +IP address, the ports where it is reachable (TCP and UDP), and the peer's public key. This is +what is returned from discovery v4 queries. + +The most comprehensive node record type is the Ethereum Node Record (`discv5::Enr`), which is +a signed, versioned record that includes the information from a `NodeRecord` along with +additional metadata. This is the data structure returned from discovery v5 queries. + +When we need to deserialize an identifier that could be any of these three types (`PeerId`, +`NodeRecord`, and `discv5::Enr`), we use the `AnyNode` type, which is an enum over the +three types. `AnyNode` is used in reth's `admin_addTrustedPeer` RPC method. + +In short, the types are as follows: +- `PeerId`: A simple public key identifier. +- `NodeRecord`: A more complete representation of a peer, including IP address and ports. +- `discv5::Enr`: An Ethereum Node Record, which is a signed, versioned record that includes + additional metadata. Useful when interacting with discovery v5, or when custom metadata is + required. +- `AnyNode`: An enum over `PeerId`, `NodeRecord`, and `discv5::Enr`, useful in + deserialization when the type of the node record is not known. \ No newline at end of file diff --git a/kona/crates/node/peers/src/any.rs b/rust/kona/crates/node/peers/src/any.rs similarity index 100% rename from kona/crates/node/peers/src/any.rs rename to rust/kona/crates/node/peers/src/any.rs diff --git a/kona/crates/node/peers/src/boot.rs b/rust/kona/crates/node/peers/src/boot.rs similarity index 100% rename from kona/crates/node/peers/src/boot.rs rename to rust/kona/crates/node/peers/src/boot.rs diff --git a/kona/crates/node/peers/src/enr.rs b/rust/kona/crates/node/peers/src/enr.rs similarity index 97% rename from kona/crates/node/peers/src/enr.rs rename to rust/kona/crates/node/peers/src/enr.rs index b091ae65a85..b2f6886ae99 100644 --- a/kona/crates/node/peers/src/enr.rs +++ b/rust/kona/crates/node/peers/src/enr.rs @@ -131,7 +131,7 @@ mod tests { fn roundtrip_op_stack_enr() { arbtest::arbtest(|u| { let op_stack_enr = OpStackEnr::from_chain_id(u.arbitrary()?); - let bytes = alloy_rlp::encode(op_stack_enr).to_vec(); + let bytes = alloy_rlp::encode(op_stack_enr); let decoded = OpStackEnr::decode(&mut &bytes[..]).unwrap(); assert_eq!(decoded, op_stack_enr); Ok(()) @@ -165,7 +165,7 @@ mod tests { #[test] fn test_op_mainnet_enr() { let op_enr = OpStackEnr::from_chain_id(10); - let bytes = alloy_rlp::encode(op_enr).to_vec(); + let bytes = alloy_rlp::encode(op_enr); assert_eq!(Bytes::from(bytes.clone()), bytes!("820A00")); let decoded = OpStackEnr::decode(&mut &bytes[..]).unwrap(); assert_eq!(decoded, op_enr); @@ -174,7 +174,7 @@ mod tests { #[test] fn test_base_mainnet_enr() { let base_enr = OpStackEnr::from_chain_id(8453); - let bytes = alloy_rlp::encode(base_enr).to_vec(); + let bytes = alloy_rlp::encode(base_enr); assert_eq!(Bytes::from(bytes.clone()), bytes!("83854200")); let decoded = OpStackEnr::decode(&mut &bytes[..]).unwrap(); assert_eq!(decoded, base_enr); diff --git a/rust/kona/crates/node/peers/src/lib.rs b/rust/kona/crates/node/peers/src/lib.rs new file mode 100644 index 00000000000..acff533f79e --- /dev/null +++ b/rust/kona/crates/node/peers/src/lib.rs @@ -0,0 +1,45 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg))] + +#[macro_use] +extern crate tracing; + +/// Alias for a peer identifier. +/// +/// This is the most primitive secp256k1 public key identifier for a given peer. +pub type PeerId = alloy_primitives::B512; + +mod nodes; +pub use nodes::{BootNodes, OP_RAW_BOOTNODES, OP_RAW_TESTNET_BOOTNODES}; + +mod store; +pub use store::{BootStore, BootStoreFile}; + +mod score; +pub use score::PeerScoreLevel; + +mod enr; +pub use enr::{EnrValidation, OpStackEnr, OpStackEnrError}; + +mod any; +pub use any::{AnyNode, DialOptsError}; + +mod boot; +pub use boot::BootNode; + +mod record; +pub use record::{NodeRecord, NodeRecordParseError}; + +mod utils; +pub use utils::{ + PeerIdConversionError, enr_to_multiaddr, local_id_to_p2p_id, peer_id_to_secp256k1_pubkey, +}; + +mod monitoring; +pub use monitoring::PeerMonitoring; diff --git a/kona/crates/node/peers/src/monitoring.rs b/rust/kona/crates/node/peers/src/monitoring.rs similarity index 100% rename from kona/crates/node/peers/src/monitoring.rs rename to rust/kona/crates/node/peers/src/monitoring.rs diff --git a/kona/crates/node/peers/src/nodes.rs b/rust/kona/crates/node/peers/src/nodes.rs similarity index 99% rename from kona/crates/node/peers/src/nodes.rs rename to rust/kona/crates/node/peers/src/nodes.rs index 78bed713a4c..d2c62b6dabc 100644 --- a/kona/crates/node/peers/src/nodes.rs +++ b/rust/kona/crates/node/peers/src/nodes.rs @@ -125,11 +125,11 @@ mod tests { #[test] fn test_parse_raw_bootnodes() { - for raw in OP_RAW_BOOTNODES.iter() { + for raw in OP_RAW_BOOTNODES { BootNode::parse_bootnode(raw); } - for raw in OP_RAW_TESTNET_BOOTNODES.iter() { + for raw in OP_RAW_TESTNET_BOOTNODES { BootNode::parse_bootnode(raw); } } diff --git a/kona/crates/node/peers/src/record.rs b/rust/kona/crates/node/peers/src/record.rs similarity index 97% rename from kona/crates/node/peers/src/record.rs rename to rust/kona/crates/node/peers/src/record.rs index 3ea22342ff5..1affc59c18a 100644 --- a/kona/crates/node/peers/src/record.rs +++ b/rust/kona/crates/node/peers/src/record.rs @@ -38,11 +38,11 @@ impl NodeRecord { /// See also [`std::net::Ipv6Addr::to_ipv4_mapped`] pub fn convert_ipv4_mapped(&mut self) -> bool { // convert IPv4 mapped IPv6 address - if let IpAddr::V6(v6) = self.address { - if let Some(v4) = v6.to_ipv4_mapped() { - self.address = v4.into(); - return true; - } + if let IpAddr::V6(v6) = self.address && + let Some(v4) = v6.to_ipv4_mapped() + { + self.address = v4.into(); + return true; } false } diff --git a/kona/crates/node/peers/src/score.rs b/rust/kona/crates/node/peers/src/score.rs similarity index 100% rename from kona/crates/node/peers/src/score.rs rename to rust/kona/crates/node/peers/src/score.rs diff --git a/kona/crates/node/peers/src/store.rs b/rust/kona/crates/node/peers/src/store.rs similarity index 98% rename from kona/crates/node/peers/src/store.rs rename to rust/kona/crates/node/peers/src/store.rs index 57da34fd69a..f36fd744d36 100644 --- a/kona/crates/node/peers/src/store.rs +++ b/rust/kona/crates/node/peers/src/store.rs @@ -73,7 +73,7 @@ impl TryInto for BootStoreFile { match self { Self::Default { chain_id } => { let mut path = dirs::home_dir() - .ok_or(std::io::Error::other("Failed to get home directory"))?; + .ok_or_else(|| std::io::Error::other("Failed to get home directory"))?; path.push(".kona"); path.push(chain_id.to_string()); path.push("bootstore.json"); diff --git a/rust/kona/crates/node/peers/src/utils.rs b/rust/kona/crates/node/peers/src/utils.rs new file mode 100644 index 00000000000..150ba7d6c9c --- /dev/null +++ b/rust/kona/crates/node/peers/src/utils.rs @@ -0,0 +1,205 @@ +//! Utilities to translate types. + +use discv5::{ + Enr, + enr::{CombinedPublicKey, EnrPublicKey}, + multiaddr::Protocol, +}; +use libp2p::Multiaddr; + +use super::PeerId; + +/// Converts an [`Enr`] into a [`Multiaddr`]. +pub fn enr_to_multiaddr(enr: &Enr) -> Option { + let mut addr = if let Some(socket) = enr.tcp4_socket() { + let mut addr = Multiaddr::from(*socket.ip()); + addr.push(Protocol::Tcp(socket.port())); + addr + } else if let Some(socket) = enr.tcp6_socket() { + let mut addr = Multiaddr::from(*socket.ip()); + addr.push(Protocol::Tcp(socket.port())); + addr + } else { + return None; + }; + + let CombinedPublicKey::Secp256k1(pub_key) = enr.public_key() else { + return None; + }; + + let pub_key = libp2p_identity::secp256k1::PublicKey::try_from_bytes(&pub_key.encode()).ok()?; + let pub_key = libp2p_identity::PublicKey::from(pub_key); + + addr.push(Protocol::P2p(libp2p::PeerId::from_public_key(&pub_key))); + + Some(addr) +} + +/// Converts an uncompressed [`PeerId`] to a [`secp256k1::PublicKey`] by prepending the [`PeerId`] +/// bytes with the `SECP256K1_TAG_PUBKEY_UNCOMPRESSED` tag. +pub fn peer_id_to_secp256k1_pubkey(id: PeerId) -> Result { + /// Tags the public key as uncompressed. + /// + /// See: + const SECP256K1_TAG_PUBKEY_UNCOMPRESSED: u8 = 4; + + let mut full_pubkey = [0u8; secp256k1::constants::UNCOMPRESSED_PUBLIC_KEY_SIZE]; + full_pubkey[0] = SECP256K1_TAG_PUBKEY_UNCOMPRESSED; + full_pubkey[1..].copy_from_slice(id.as_slice()); + secp256k1::PublicKey::from_slice(&full_pubkey) +} + +/// An error that can occur when converting a [`PeerId`] to a [`libp2p::PeerId`]. +#[derive(Debug, thiserror::Error)] +pub enum PeerIdConversionError { + /// The peer id is not valid and cannot be converted to a secp256k1 public key. + #[error("Invalid peer id: {0}")] + InvalidPeerId(secp256k1::Error), + /// The secp256k1 public key cannot be converted to a libp2p peer id. This is a bug. + #[error("Invalid conversion from secp256k1 public key to libp2p peer id: {0}. This is a bug.")] + InvalidPublicKey(#[from] discv5::libp2p_identity::DecodingError), +} + +/// Converts an uncoded [`PeerId`] to a [`libp2p::PeerId`]. These two types represent the same +/// underlying concept (secp256k1 public key) but using different encodings (the local [`PeerId`] is +/// the uncompressed representation of the public key, while the "p2plib" [`libp2p::PeerId`] is a +/// more complex representation, involving protobuf encoding and bitcoin encoding, defined here: ). +pub fn local_id_to_p2p_id(peer_id: PeerId) -> Result { + // The libp2p library works with compressed public keys. + let encoded_pk_bytes = peer_id_to_secp256k1_pubkey(peer_id) + .map_err(PeerIdConversionError::InvalidPeerId)? + .serialize(); + let pk: discv5::libp2p_identity::PublicKey = + discv5::libp2p_identity::secp256k1::PublicKey::try_from_bytes(&encoded_pk_bytes)?.into(); + + Ok(pk.to_peer_id()) +} + +#[cfg(test)] +mod tests { + use std::net::{Ipv4Addr, Ipv6Addr}; + + use super::*; + use crate::PeerId; + use alloy_primitives::hex::FromHex; + use discv5::enr::{CombinedKey, Enr, EnrKey}; + + #[test] + fn test_resolve_multiaddr() { + let ip = Ipv4Addr::new(132, 145, 16, 10); + let tcp_port = 9000; + let udp_port = 9001; + let private_key = CombinedKey::generate_secp256k1(); + + let public_key = private_key.public().encode(); + let public_key = + libp2p_identity::secp256k1::PublicKey::try_from_bytes(&public_key).unwrap(); + let peer_id = libp2p::PeerId::from_public_key(&public_key.into()); + + let enr = Enr::builder().ip4(ip).tcp4(tcp_port).udp4(udp_port).build(&private_key).unwrap(); + + let multiaddr = enr_to_multiaddr(&enr).unwrap(); + + let mut received_ip = None; + let mut received_tcp_port = None; + let mut received_p2p_id = None; + + for protocol in &multiaddr { + match protocol { + Protocol::Ip4(ip) => { + received_ip = Some(ip); + } + Protocol::Tcp(port) => { + received_tcp_port = Some(port); + } + Protocol::P2p(id) => { + received_p2p_id = Some(id); + } + _ => { + panic!("Unexpected protocol: {protocol:?}"); + } + } + } + assert_eq!(received_ip, Some(ip)); + assert_eq!(received_tcp_port, Some(tcp_port)); + assert_eq!(received_p2p_id, Some(peer_id)); + } + + #[test] + fn test_resolve_multiaddr_ipv6() { + let ip = Ipv6Addr::new(0x2001, 0xdb8, 0x0a, 0x11, 0x1e, 0x8a, 0x2e, 0x3a); + let tcp_port = 9000; + let udp_port = 9001; + let private_key = CombinedKey::generate_secp256k1(); + + let public_key = private_key.public().encode(); + let public_key = + libp2p_identity::secp256k1::PublicKey::try_from_bytes(&public_key).unwrap(); + let peer_id = libp2p::PeerId::from_public_key(&public_key.into()); + + let enr = Enr::builder().ip6(ip).tcp6(tcp_port).udp6(udp_port).build(&private_key).unwrap(); + + let multiaddr = enr_to_multiaddr(&enr).unwrap(); + + let mut received_ip = None; + let mut received_tcp_port = None; + let mut received_p2p_id = None; + + for protocol in &multiaddr { + match protocol { + Protocol::Ip6(ip) => { + received_ip = Some(ip); + } + Protocol::Tcp(port) => { + received_tcp_port = Some(port); + } + Protocol::P2p(id) => { + received_p2p_id = Some(id); + } + _ => { + panic!("Unexpected protocol: {protocol:?}"); + } + } + } + assert_eq!(received_ip, Some(ip)); + assert_eq!(received_tcp_port, Some(tcp_port)); + assert_eq!(received_p2p_id, Some(peer_id)); + } + + #[test] + fn test_convert_local_peer_id_to_multi_peer_id() { + let p2p_keypair = discv5::libp2p_identity::secp256k1::Keypair::generate(); + let uncompressed = p2p_keypair.public().to_bytes_uncompressed(); + let local_peer_id = PeerId::from_slice(&uncompressed[1..]); + + // We need to convert the local peer id (uncompressed secp256k1 public key) to a libp2p + // peer id (protocol buffer encoded public key). + let peer_id = local_id_to_p2p_id(local_peer_id).unwrap(); + + let p2p_public_key: discv5::libp2p_identity::PublicKey = + p2p_keypair.public().clone().into(); + + assert_eq!(peer_id, p2p_public_key.to_peer_id()); + } + + #[test] + fn test_hardcoded_peer_id() { + const PUB_KEY_STR: &str = "548f715f3fc388a7c917ba644a2f16270f1ede48a5d88a4d14ea287cc916068363f3092e39936f1a3e7885198bef0e5af951f1d7b1041ce8ba4010917777e71f"; + let pub_key = PeerId::from_hex(PUB_KEY_STR).unwrap(); + + // We need to convert the local peer id (uncompressed secp256k1 public key) to a libp2p + // peer id (protocol buffer encoded public key). + let peer_id = local_id_to_p2p_id(pub_key).unwrap(); + + let uncompressed_pub_key = peer_id_to_secp256k1_pubkey(pub_key).unwrap(); + + let p2p_public_key: discv5::libp2p_identity::PublicKey = + discv5::libp2p_identity::secp256k1::PublicKey::try_from_bytes( + &uncompressed_pub_key.serialize(), + ) + .unwrap() + .into(); + + assert_eq!(peer_id, p2p_public_key.to_peer_id()); + } +} diff --git a/kona/crates/node/rpc/Cargo.toml b/rust/kona/crates/node/rpc/Cargo.toml similarity index 100% rename from kona/crates/node/rpc/Cargo.toml rename to rust/kona/crates/node/rpc/Cargo.toml diff --git a/rust/kona/crates/node/rpc/README.md b/rust/kona/crates/node/rpc/README.md new file mode 100644 index 00000000000..08d86662988 --- /dev/null +++ b/rust/kona/crates/node/rpc/README.md @@ -0,0 +1,3 @@ +## `kona-rpc` + +Low-level Optimism JSON-RPC server and client implementations. diff --git a/kona/crates/node/rpc/src/admin.rs b/rust/kona/crates/node/rpc/src/admin.rs similarity index 100% rename from kona/crates/node/rpc/src/admin.rs rename to rust/kona/crates/node/rpc/src/admin.rs diff --git a/rust/kona/crates/node/rpc/src/client.rs b/rust/kona/crates/node/rpc/src/client.rs new file mode 100644 index 00000000000..1505e48f70f --- /dev/null +++ b/rust/kona/crates/node/rpc/src/client.rs @@ -0,0 +1,100 @@ +use alloy_eips::BlockNumberOrTag; +use alloy_primitives::B256; +use async_trait::async_trait; +use jsonrpsee::core::RpcResult; +use kona_engine::EngineState; +use kona_genesis::RollupConfig; +use kona_protocol::{L2BlockInfo, OutputRoot}; +use rollup_boost::{GetExecutionModeResponse, SetExecutionModeRequest, SetExecutionModeResponse}; +use std::fmt::Debug; +use thiserror::Error; +use tokio::sync::watch; + +/// Client trait wrapping RPC implementation for the `EngineActor`. +#[async_trait] +pub trait EngineRpcClient: Debug + Send + Sync + Clone { + /// Request the current [`RollupConfig`]. + async fn get_config(&self) -> RpcResult; + /// Request the current [`EngineState`] snapshot. + async fn get_state(&self) -> RpcResult; + /// Request the L2 output root for a specific [`BlockNumberOrTag`]. + /// + /// Returns a tuple of [`L2BlockInfo`], [`OutputRoot`], and [`EngineState`] at the requested + /// block. + async fn output_at_block( + &self, + block: BlockNumberOrTag, + ) -> RpcResult<(L2BlockInfo, OutputRoot, EngineState)>; + /// Development API: Get the current number of pending tasks in the queue. + async fn dev_get_task_queue_length(&self) -> RpcResult; + /// Development API: Subscribes to engine queue length updates managed by the returned + /// [`watch::Receiver`]. + async fn dev_subscribe_to_engine_queue_length(&self) -> RpcResult>; + /// Development API: Subscribes to engine state updates managed by the returned + /// [`watch::Receiver`]. + async fn dev_subscribe_to_engine_state(&self) -> RpcResult>; +} + +/// Client trait wrapping RPC implementation for the rollup boost admin endpoints. +#[async_trait] +pub trait RollupBoostAdminClient: Send + Sync + Debug { + /// Sets the execution mode for the rollup boost server. + async fn set_execution_mode( + &self, + request: SetExecutionModeRequest, + ) -> RpcResult; + + /// Gets the current execution mode from the rollup boost server. + async fn get_execution_mode(&self) -> RpcResult; +} + +/// Client trait wrapping RPC implementation for the Sequencer admin endpoints. +#[async_trait] +pub trait SequencerAdminAPIClient: Send + Sync + Debug { + /// Check if the sequencer is active. + async fn is_sequencer_active(&self) -> Result; + + /// Check if the conductor is enabled. + async fn is_conductor_enabled(&self) -> Result; + + /// Check if in recovery mode. + async fn is_recovery_mode(&self) -> Result; + + /// Start the sequencer. + async fn start_sequencer(&self) -> Result<(), SequencerAdminAPIError>; + + /// Stop the sequencer. + async fn stop_sequencer(&self) -> Result; + + /// Set recovery mode. + async fn set_recovery_mode(&self, mode: bool) -> Result<(), SequencerAdminAPIError>; + + /// Override the leader. + async fn override_leader(&self) -> Result<(), SequencerAdminAPIError>; + + /// Reset the derivation pipeline. + async fn reset_derivation_pipeline(&self) -> Result<(), SequencerAdminAPIError>; +} + +/// Errors that can occur when using the sequencer admin API. +#[derive(Debug, Error)] +pub enum SequencerAdminAPIError { + /// Error sending request. + #[error("Error sending request: {0}.")] + RequestError(String), + + /// Error receiving response. + /// Note: this error message is not future-proof, in that it may not be a safe assumption that + /// communication is channel-based. If/when that changes the enum will likely need to be + /// updated to take a parameter, so we can change it then. + #[error("Error receiving response: response channel closed.")] + ResponseError, + + /// Sequencer stopped successfully, followed by some error. + #[error("Sequencer stopped successfully, followed by error: {0}.")] + ErrorAfterSequencerWasStopped(String), + + /// Error overriding leader. + #[error("Error overriding leader: {0}.")] + LeaderOverrideError(String), +} diff --git a/rust/kona/crates/node/rpc/src/config.rs b/rust/kona/crates/node/rpc/src/config.rs new file mode 100644 index 00000000000..a72f08c6ef0 --- /dev/null +++ b/rust/kona/crates/node/rpc/src/config.rs @@ -0,0 +1,48 @@ +//! Contains the RPC Configuration. + +use std::{net::SocketAddr, path::PathBuf}; + +/// The RPC configuration. +#[derive(Debug, Clone)] +pub struct RpcBuilder { + /// Prevent the rpc server from being restarted. + pub no_restart: bool, + /// The RPC socket address. + pub socket: SocketAddr, + /// Enable the admin API. + pub enable_admin: bool, + /// File path used to persist state changes made via the admin API so they persist across + /// restarts. + pub admin_persistence: Option, + /// Enable the websocket rpc server + pub ws_enabled: bool, + /// Enable development RPC endpoints + pub dev_enabled: bool, +} + +impl RpcBuilder { + /// Returns whether `WebSocket` RPC endpoint is enabled + pub const fn ws_enabled(&self) -> bool { + self.ws_enabled + } + + /// Returns whether development RPC endpoints are enabled + pub const fn dev_enabled(&self) -> bool { + self.dev_enabled + } + + /// Returns the socket address of the [`RpcBuilder`]. + pub const fn socket(&self) -> SocketAddr { + self.socket + } + + /// Returns the number of times the RPC server will attempt to restart if it stops. + pub const fn restart_count(&self) -> u32 { + if self.no_restart { 0 } else { 3 } + } + + /// Sets the given [`SocketAddr`] on the [`RpcBuilder`]. + pub fn set_addr(self, addr: SocketAddr) -> Self { + Self { socket: addr, ..self } + } +} diff --git a/kona/crates/node/rpc/src/dev.rs b/rust/kona/crates/node/rpc/src/dev.rs similarity index 100% rename from kona/crates/node/rpc/src/dev.rs rename to rust/kona/crates/node/rpc/src/dev.rs diff --git a/kona/crates/node/rpc/src/health.rs b/rust/kona/crates/node/rpc/src/health.rs similarity index 93% rename from kona/crates/node/rpc/src/health.rs rename to rust/kona/crates/node/rpc/src/health.rs index f0a32e47ccf..777c5c781e6 100644 --- a/kona/crates/node/rpc/src/health.rs +++ b/rust/kona/crates/node/rpc/src/health.rs @@ -7,7 +7,7 @@ use crate::jsonrpsee::HealthzApiServer; /// Key for the rollup boost health status. /// +----------------+-------------------------------+--------------------------------------+-------------------------------+ -/// | Execution Mode | Healthy | PartialContent | Service Unavailable | +/// | Execution Mode | Healthy | `PartialContent` | Service Unavailable | /// +----------------+-------------------------------+--------------------------------------+-------------------------------+ /// | Enabled | - Request-path: L2 succeeds | - Request-path: builder fails/stale | - Request-path: L2 fails | /// | | (get/new payload) → 200 | while L2 succeeds → 206 | (error from L2) → 503 | @@ -15,7 +15,7 @@ use crate::jsonrpsee::HealthzApiServer; /// | | latest-unsafe is fresh → | latest-unsafe is stale → 206 | | /// | | 200 | | | /// +----------------+-------------------------------+--------------------------------------+-------------------------------+ -/// | DryRun | - Request-path: L2 succeeds | - Never set in DryRun | - Request-path: L2 fails | +/// | `DryRun` | - Request-path: L2 succeeds | - Never set in `DryRun` | - Request-path: L2 fails | /// | | (always returns L2) → 200 | (degrade only in Enabled) | (error from L2) → 503 | /// | | - Background: builder stale | | - Background: never sets 503 | /// | | ignored (remains 200) | | | diff --git a/kona/crates/node/rpc/src/jsonrpsee.rs b/rust/kona/crates/node/rpc/src/jsonrpsee.rs similarity index 100% rename from kona/crates/node/rpc/src/jsonrpsee.rs rename to rust/kona/crates/node/rpc/src/jsonrpsee.rs diff --git a/kona/crates/node/rpc/src/l1_watcher.rs b/rust/kona/crates/node/rpc/src/l1_watcher.rs similarity index 100% rename from kona/crates/node/rpc/src/l1_watcher.rs rename to rust/kona/crates/node/rpc/src/l1_watcher.rs diff --git a/rust/kona/crates/node/rpc/src/lib.rs b/rust/kona/crates/node/rpc/src/lib.rs new file mode 100644 index 00000000000..28d8559986b --- /dev/null +++ b/rust/kona/crates/node/rpc/src/lib.rs @@ -0,0 +1,59 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] + +#[macro_use] +extern crate tracing; + +mod admin; +pub use admin::{AdminRpc, NetworkAdminQuery, RollupBoostAdminQuery}; + +mod client; +pub use client::{ + EngineRpcClient, RollupBoostAdminClient, SequencerAdminAPIClient, SequencerAdminAPIError, +}; + +mod config; +pub use config::RpcBuilder; + +mod net; +pub use net::P2pRpc; + +mod p2p; + +mod response; +pub use response::SafeHeadResponse; + +mod output; +pub use output::OutputResponse; + +mod dev; +pub use dev::DevEngineRpc; + +mod jsonrpsee; +pub use jsonrpsee::{ + AdminApiServer, DevEngineApiServer, HealthzApiServer, MinerApiExtServer, OpAdminApiServer, + OpP2PApiServer, RollupBoostHealthzApiServer, RollupNodeApiServer, WsServer, +}; + +#[cfg(feature = "client")] +pub use jsonrpsee::RollupNodeApiClient; + +mod rollup; +pub use rollup::RollupRpc; + +mod l1_watcher; +pub use l1_watcher::{L1State, L1WatcherQueries, L1WatcherQuerySender}; + +mod ws; +pub use ws::WsRPC; + +mod health; +pub use health::{ + HealthzResponse, HealthzRpc, RollupBoostHealth, RollupBoostHealthQuery, + RollupBoostHealthzResponse, +}; diff --git a/rust/kona/crates/node/rpc/src/net.rs b/rust/kona/crates/node/rpc/src/net.rs new file mode 100644 index 00000000000..c65600f9b72 --- /dev/null +++ b/rust/kona/crates/node/rpc/src/net.rs @@ -0,0 +1,22 @@ +//! Network types + +use kona_gossip::P2pRpcRequest; + +/// A type alias for the sender of a [`P2pRpcRequest`]. +type P2pReqSender = tokio::sync::mpsc::Sender; + +/// `P2pRpc` +/// +/// This is a server implementation of [`crate::OpP2PApiServer`]. +#[derive(Debug)] +pub struct P2pRpc { + /// The channel to send [`P2pRpcRequest`]s. + pub sender: P2pReqSender, +} + +impl P2pRpc { + /// Constructs a new [`P2pRpc`] given a sender channel. + pub const fn new(sender: P2pReqSender) -> Self { + Self { sender } + } +} diff --git a/kona/crates/node/rpc/src/output.rs b/rust/kona/crates/node/rpc/src/output.rs similarity index 100% rename from kona/crates/node/rpc/src/output.rs rename to rust/kona/crates/node/rpc/src/output.rs diff --git a/kona/crates/node/rpc/src/p2p.rs b/rust/kona/crates/node/rpc/src/p2p.rs similarity index 100% rename from kona/crates/node/rpc/src/p2p.rs rename to rust/kona/crates/node/rpc/src/p2p.rs diff --git a/kona/crates/node/rpc/src/response.rs b/rust/kona/crates/node/rpc/src/response.rs similarity index 100% rename from kona/crates/node/rpc/src/response.rs rename to rust/kona/crates/node/rpc/src/response.rs diff --git a/rust/kona/crates/node/rpc/src/rollup.rs b/rust/kona/crates/node/rpc/src/rollup.rs new file mode 100644 index 00000000000..cf9a79f5836 --- /dev/null +++ b/rust/kona/crates/node/rpc/src/rollup.rs @@ -0,0 +1,132 @@ +//! Implements the rollup client rpc endpoints. These endpoints serve data about the rollup state. +//! +//! Implemented in the op-node in + +use alloy_eips::BlockNumberOrTag; +use async_trait::async_trait; +use jsonrpsee::{ + core::RpcResult, + types::{ErrorCode, ErrorObject}, +}; +use kona_engine::EngineState; +use kona_genesis::RollupConfig; +use kona_protocol::SyncStatus; +use std::fmt::Debug; + +use crate::{ + EngineRpcClient, L1State, L1WatcherQueries, OutputResponse, RollupNodeApiServer, + SafeHeadResponse, l1_watcher::L1WatcherQuerySender, +}; + +/// `RollupRpc` +/// +/// This is a server implementation of [`crate::RollupNodeApiServer`]. +#[derive(Debug)] +pub struct RollupRpc { + /// The channel to send [`kona_engine::EngineQueries`]s. + pub engine_client: EngineRpcClient_, + /// The channel to send [`crate::L1WatcherQueries`]s. + pub l1_watcher_sender: L1WatcherQuerySender, +} + +impl RollupRpc { + /// The identifier for the Metric that tracks rollup RPC calls. + pub const RPC_IDENT: &'static str = "rollup_rpc"; + + /// Constructs a new [`RollupRpc`] given a sender channel. + pub const fn new( + engine_client: EngineRpcClient_, + l1_watcher_sender: L1WatcherQuerySender, + ) -> Self { + Self { engine_client, l1_watcher_sender } + } + + // Important note: we zero-out the fields that can't be derived yet to follow op-node's + // behaviour. + fn sync_status_from_actor_queries( + l1_sync_status: L1State, + l2_sync_status: EngineState, + ) -> SyncStatus { + SyncStatus { + current_l1: l1_sync_status.current_l1.unwrap_or_default(), + current_l1_finalized: l1_sync_status.current_l1_finalized.unwrap_or_default(), + head_l1: l1_sync_status.head_l1.unwrap_or_default(), + safe_l1: l1_sync_status.safe_l1.unwrap_or_default(), + finalized_l1: l1_sync_status.finalized_l1.unwrap_or_default(), + unsafe_l2: l2_sync_status.sync_state.unsafe_head(), + cross_unsafe_l2: l2_sync_status.sync_state.cross_unsafe_head(), + local_safe_l2: l2_sync_status.sync_state.local_safe_head(), + safe_l2: l2_sync_status.sync_state.safe_head(), + finalized_l2: l2_sync_status.sync_state.finalized_head(), + } + } +} + +#[async_trait] +impl RollupNodeApiServer + for RollupRpc +{ + async fn op_output_at_block(&self, block_num: BlockNumberOrTag) -> RpcResult { + kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_outputAtBlock"); + + let (l1_sync_status_send, l1_sync_status_recv) = tokio::sync::oneshot::channel(); + + let ((l2_block_info, output_root, l2_sync_status), l1_sync_status) = + tokio::try_join!(self.engine_client.output_at_block(block_num), async { + self.l1_watcher_sender + .send(L1WatcherQueries::L1State(l1_sync_status_send)) + .await + .map_err(|_| ErrorObject::from(ErrorCode::InternalError))?; + + l1_sync_status_recv.await.map_err(|_| ErrorObject::from(ErrorCode::InternalError)) + })?; + + let sync_status = Self::sync_status_from_actor_queries(l1_sync_status, l2_sync_status); + + Ok(OutputResponse::from_v0(output_root, sync_status, l2_block_info)) + } + + /// This RPC endpoint is not supported. It is not necessary to track the safe head for every L1 + /// block post-interop anymore so we can remove this method from the rpc interface. + async fn op_safe_head_at_l1_block( + &self, + _block_num: BlockNumberOrTag, + ) -> RpcResult { + kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_safeHeadAtL1Block"); + return Err(ErrorObject::from(ErrorCode::MethodNotFound)); + } + + async fn op_sync_status(&self) -> RpcResult { + kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_syncStatus"); + + let (l1_sync_status_send, l1_sync_status_recv) = tokio::sync::oneshot::channel(); + + let (l1_sync_status, l2_sync_status) = tokio::try_join!( + async { + self.l1_watcher_sender + .send(L1WatcherQueries::L1State(l1_sync_status_send)) + .await + .map_err(|_| ErrorObject::from(ErrorCode::InternalError))?; + l1_sync_status_recv.await.map_err(|_| ErrorObject::from(ErrorCode::InternalError)) + }, + self.engine_client.get_state() + ) + .map_err(|_| ErrorObject::from(ErrorCode::InternalError))?; + + return Ok(Self::sync_status_from_actor_queries(l1_sync_status, l2_sync_status)); + } + + async fn op_rollup_config(&self) -> RpcResult { + kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_rollupConfig"); + + self.engine_client.get_config().await + } + + async fn op_version(&self) -> RpcResult { + kona_macros::inc!(gauge, Self::RPC_IDENT, "method" => "op_version"); + + const RPC_VERSION: &str = env!("CARGO_PKG_VERSION"); + + return Ok(RPC_VERSION.to_string()); + } +} diff --git a/kona/crates/node/rpc/src/ws.rs b/rust/kona/crates/node/rpc/src/ws.rs similarity index 100% rename from kona/crates/node/rpc/src/ws.rs rename to rust/kona/crates/node/rpc/src/ws.rs diff --git a/rust/kona/crates/node/service/Cargo.toml b/rust/kona/crates/node/service/Cargo.toml new file mode 100644 index 00000000000..9014f105fca --- /dev/null +++ b/rust/kona/crates/node/service/Cargo.toml @@ -0,0 +1,97 @@ +[package] +name = "kona-node-service" +description = "An implementation of the OP Stack consensus node service" +version = "0.1.3" +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true + +[lints] +workspace = true + +[dependencies] +# workspace +kona-gossip.workspace = true +kona-disc.workspace = true +kona-engine.workspace = true +kona-sources.workspace = true +kona-genesis.workspace = true +kona-derive.workspace = true +kona-protocol.workspace = true +kona-providers-alloy.workspace = true +kona-rpc = { workspace = true, features = ["client"] } +kona-peers.workspace = true +kona-macros.workspace = true + +# rollup-boost +rollup-boost.workspace = true + +# alloy +alloy-chains.workspace = true +alloy-signer.workspace = true +alloy-signer-local.workspace = true +alloy-primitives.workspace = true +alloy-rpc-client.workspace = true +alloy-rpc-types-eth.workspace = true +alloy-rpc-types-engine = { workspace = true, features = ["jwt", "serde"] } +alloy-provider = { workspace = true, features = ["reqwest", "reqwest-rustls-tls", "hyper", "hyper-tls"] } +alloy-eips.workspace = true +alloy-transport.workspace = true +alloy-transport-http = { workspace = true, features = ["reqwest", "reqwest-rustls-tls", "hyper", "hyper-tls", "jwt-auth"] } + +# op-alloy +op-alloy-network.workspace = true +op-alloy-rpc-types-engine = { workspace = true, features = ["std"] } +op-alloy-provider.workspace = true + +# general +url.workspace = true +libp2p.workspace = true +libp2p-stream.workspace = true +discv5.workspace = true +futures.workspace = true +tracing.workspace = true +thiserror.workspace = true +tokio-util.workspace = true +async-trait.workspace = true +async-stream.workspace = true +tokio-stream.workspace = true +strum = { workspace = true, features = ["derive"] } +backon.workspace = true +derive_more = { workspace = true, features = ["debug", "eq"] } +jsonrpsee = { workspace = true, features = ["server", "http-client"] } +tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } +tower.workspace = true +http-body-util.workspace = true + +# metrics +metrics = { workspace = true, optional = true } + +[dev-dependencies] +rstest.workspace = true +arbitrary.workspace = true +rand.workspace = true +anyhow.workspace = true +backon.workspace = true +http.workspace = true +mockall.workspace = true +alloy-primitives = { workspace = true, features = ["k256"] } +alloy-rpc-types-engine = { workspace = true, features = ["arbitrary"] } +alloy-consensus = { workspace = true, features = ["arbitrary"] } +op-alloy-consensus = { workspace = true, features = ["arbitrary", "k256"] } +kona-derive = {workspace = true, features = ["test-utils"]} + +[features] +default = [] +metrics = [ + "dep:metrics", + "kona-derive/metrics", + "kona-disc/metrics", + "kona-engine/metrics", + "kona-gossip/metrics", + "kona-providers-alloy/metrics", + "kona-rpc/metrics", + "libp2p/metrics", +] diff --git a/rust/kona/crates/node/service/README.md b/rust/kona/crates/node/service/README.md new file mode 100644 index 00000000000..c79093f70a9 --- /dev/null +++ b/rust/kona/crates/node/service/README.md @@ -0,0 +1,5 @@ +# `kona-node-service` + +An implementation of the OP Stack [RollupNode][rn-spec] service. + +[rn-spec]: https://specs.optimism.io/protocol/rollup-node.html diff --git a/rust/kona/crates/node/service/src/actors/derivation/actor.rs b/rust/kona/crates/node/service/src/actors/derivation/actor.rs new file mode 100644 index 00000000000..1df73b78145 --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/derivation/actor.rs @@ -0,0 +1,341 @@ +//! [`NodeActor`] implementation for the derivation sub-routine. + +use crate::{ + CancellableContext, DerivationActorRequest, DerivationEngineClient, DerivationState, + DerivationStateMachine, DerivationStateTransitionError, DerivationStateUpdate, Metrics, + NodeActor, actors::derivation::L2Finalizer, +}; +use async_trait::async_trait; +use kona_derive::{ + ActivationSignal, Pipeline, PipelineError, PipelineErrorKind, ResetError, ResetSignal, Signal, + SignalReceiver, StepResult, +}; +use kona_protocol::OpAttributesWithParent; +use thiserror::Error; +use tokio::{select, sync::mpsc}; +use tokio_util::sync::{CancellationToken, WaitForCancellationFuture}; + +/// The [`NodeActor`] for the derivation sub-routine. +/// +/// This actor is responsible for receiving messages from [`NodeActor`]s and stepping the +/// derivation pipeline forward to produce new payload attributes. The actor then sends the payload +/// to the [`NodeActor`] responsible for the execution sub-routine. +#[derive(Debug)] +pub struct DerivationActor +where + DerivationEngineClient_: DerivationEngineClient, + PipelineSignalReceiver: Pipeline + SignalReceiver, +{ + /// The cancellation token, shared between all tasks. + cancellation_token: CancellationToken, + /// The channel on which all inbound requests are received by the [`DerivationActor`]. + inbound_request_rx: mpsc::Receiver, + /// The Engine client used to interact with the engine. + engine_client: DerivationEngineClient_, + + /// The derivation pipeline. + pipeline: PipelineSignalReceiver, + /// The state machine controlling when derivation can occur. + derivation_state_machine: DerivationStateMachine, + /// The [`L2Finalizer`] tracks derived L2 blocks awaiting finalization. + pub(crate) finalizer: L2Finalizer, +} + +impl CancellableContext + for DerivationActor +where + DerivationEngineClient_: DerivationEngineClient, + PipelineSignalReceiver: Pipeline + SignalReceiver + Send + Sync, +{ + fn cancelled(&self) -> WaitForCancellationFuture<'_> { + self.cancellation_token.cancelled() + } +} + +impl + DerivationActor +where + DerivationEngineClient_: DerivationEngineClient, + PipelineSignalReceiver: Pipeline + SignalReceiver, +{ + /// Creates a new instance of the [`DerivationActor`]. + pub fn new( + engine_client: DerivationEngineClient_, + cancellation_token: CancellationToken, + inbound_request_rx: mpsc::Receiver, + pipeline: PipelineSignalReceiver, + ) -> Self { + Self { + cancellation_token, + pipeline, + inbound_request_rx, + engine_client, + derivation_state_machine: DerivationStateMachine::default(), + finalizer: L2Finalizer::default(), + } + } + + /// Handles a [`Signal`] received over the derivation signal receiver channel. + async fn signal(&mut self, signal: Signal) { + if let Signal::Reset(ResetSignal { l1_origin, .. }) = signal { + kona_macros::set!(counter, Metrics::DERIVATION_L1_ORIGIN, l1_origin.number); + // Clear the finalization queue on reset. + self.finalizer.clear(); + } + + match self.pipeline.signal(signal).await { + Ok(_) => info!(target: "derivation", ?signal, "[SIGNAL] Executed Successfully"), + Err(e) => { + error!(target: "derivation", ?e, ?signal, "Failed to signal derivation pipeline") + } + } + } + + /// Attempts to step the derivation pipeline forward as much as possible in order to produce the + /// next safe payload. + async fn produce_next_attributes(&mut self) -> Result { + // As we start the safe head at the disputed block's parent, we step the pipeline until the + // first attributes are produced. All batches at and before the safe head will be + // dropped, so the first payload will always be the disputed one. + loop { + match self.pipeline.step(self.derivation_state_machine.last_confirmed_safe_head()).await + { + StepResult::PreparedAttributes => { /* continue; attributes will be sent off. */ } + StepResult::AdvancedOrigin => { + let origin = + self.pipeline.origin().ok_or(PipelineError::MissingOrigin.crit())?.number; + + kona_macros::set!(counter, Metrics::DERIVATION_L1_ORIGIN, origin); + debug!(target: "derivation", l1_block = origin, "Advanced L1 origin"); + } + StepResult::OriginAdvanceErr(e) | StepResult::StepFailed(e) => { + match e { + PipelineErrorKind::Temporary(e) => { + // NotEnoughData is transient, and doesn't imply we need to wait for + // more data. We can continue stepping until we receive an Eof. + if matches!(e, PipelineError::NotEnoughData) { + continue; + } + + debug!( + target: "derivation", + "Exhausted data source for now; Yielding until the chain has extended." + ); + return Err(DerivationError::Yield); + } + PipelineErrorKind::Reset(e) => { + warn!(target: "derivation", "Derivation pipeline is being reset: {e}"); + + let system_config = self + .pipeline + .system_config_by_number( + self.derivation_state_machine + .last_confirmed_safe_head() + .block_info + .number, + ) + .await?; + + if matches!(e, ResetError::HoloceneActivation) { + let l1_origin = self + .pipeline + .origin() + .ok_or(PipelineError::MissingOrigin.crit())?; + + self.pipeline + .signal( + ActivationSignal { + l2_safe_head: self + .derivation_state_machine + .last_confirmed_safe_head(), + l1_origin, + system_config: Some(system_config), + } + .signal(), + ) + .await?; + } else { + if let ResetError::ReorgDetected(expected, new) = e { + warn!( + target: "derivation", + "L1 reorg detected! Expected: {expected} | New: {new}" + ); + + kona_macros::inc!(counter, Metrics::L1_REORG_COUNT); + } + // send the `reset` signal to the engine actor only when interop is + // not active. + if !self.pipeline.rollup_config().is_interop_active( + self.derivation_state_machine + .last_confirmed_safe_head() + .block_info + .timestamp, + ) { + self.engine_client.reset_engine_forkchoice().await.map_err(|e| { + error!(target: "derivation", ?e, "Failed to send reset request"); + DerivationError::Sender(Box::new(e)) + })?; + } + self.derivation_state_machine + .update(&DerivationStateUpdate::SignalNeeded)?; + return Err(DerivationError::Yield); + } + } + PipelineErrorKind::Critical(_) => { + error!(target: "derivation", "Critical derivation error: {e}"); + kona_macros::inc!(counter, Metrics::DERIVATION_CRITICAL_ERROR); + return Err(e.into()); + } + } + } + } + + // If there are any new attributes, send them to the execution actor. + if let Some(attrs) = self.pipeline.next() { + return Ok(attrs); + } + } + } + + async fn handle_derivation_actor_request( + &mut self, + request_type: DerivationActorRequest, + ) -> Result<(), DerivationError> { + match request_type { + DerivationActorRequest::ProcessEngineSignalRequest(signal) => { + self.signal(*signal).await; + self.derivation_state_machine.update(&DerivationStateUpdate::SignalProcessed)?; + } + DerivationActorRequest::ProcessFinalizedL1Block(finalized_l1_block) => { + // Attempt to finalize the block. If successful, notify engine. + if let Some(l2_block_number) = self.finalizer.try_finalize_next(*finalized_l1_block) + { + self.engine_client + .send_finalized_l2_block(l2_block_number) + .await + .map_err(|e| DerivationError::Sender(Box::new(e)))?; + } + } + DerivationActorRequest::ProcessL1HeadUpdateRequest(l1_head) => { + info!(target: "derivation", l1_head = ?*l1_head, "Processing l1 head update"); + + self.derivation_state_machine.update(&DerivationStateUpdate::L1DataReceived)?; + + self.attempt_derivation().await?; + } + DerivationActorRequest::ProcessEngineSafeHeadUpdateRequest(safe_head) => { + info!(target: "derivation", safe_head = ?*safe_head, "Received safe head from engine."); + self.derivation_state_machine + .update(&DerivationStateUpdate::NewAttributesConfirmed(safe_head))?; + + self.attempt_derivation().await?; + } + DerivationActorRequest::ProcessEngineSyncCompletionRequest(safe_head) => { + info!(target: "derivation", "Engine finished syncing, starting derivation."); + self.derivation_state_machine + .update(&DerivationStateUpdate::ELSyncCompleted(safe_head))?; + + self.attempt_derivation().await?; + } + } + + Ok(()) + } + + /// Attempts to process the next payload attributes. + async fn attempt_derivation(&mut self) -> Result<(), DerivationError> { + if self.derivation_state_machine.current_state() != DerivationState::Deriving { + info!(target: "derivation", derivation_state=?self.derivation_state_machine, "Skipping derivation."); + return Ok(()); + } + + info!(target: "derivation", derivation_state=?self.derivation_state_machine, "Attempting derivation."); + + // Advance the pipeline as much as possible, new data may be available or there still may be + // payloads in the attributes queue. + let payload_attributes = match self.produce_next_attributes().await { + Ok(attrs) => attrs, + Err(DerivationError::Yield) => { + info!(target: "derivation", "Yielding derivation until more data is available."); + self.derivation_state_machine.update(&DerivationStateUpdate::MoreDataNeeded)?; + return Ok(()); + } + Err(e) => { + return Err(e); + } + }; + trace!(target: "derivation", ?payload_attributes, "Produced payload attributes."); + + self.derivation_state_machine.update(&DerivationStateUpdate::NewAttributesDerived( + Box::new(payload_attributes.clone()), + ))?; + + // Enqueue the payload attributes for finalization tracking. + self.finalizer.enqueue_for_finalization(&payload_attributes); + + // Send payload attributes out for processing. + self.engine_client + .send_safe_l2_signal(payload_attributes.into()) + .await + .map_err(|e| DerivationError::Sender(Box::new(e)))?; + + Ok(()) + } +} + +#[async_trait] +impl NodeActor + for DerivationActor +where + DerivationEngineClient_: DerivationEngineClient + 'static, + PipelineSignalReceiver: Pipeline + SignalReceiver + Send + Sync + 'static, +{ + type Error = DerivationError; + type StartData = (); + + async fn start(mut self, _: Self::StartData) -> Result<(), Self::Error> { + info!(target: "derivation", "Starting derivation"); + loop { + select! { + biased; + + _ = self.cancellation_token.cancelled() => { + info!( + target: "derivation", + "Received shutdown signal. Exiting derivation task." + ); + return Ok(()); + } + req = self.inbound_request_rx.recv() => { + let Some(request_type) = req else { + error!(target: "derivation", "DerivationActor inbound request receiver closed unexpectedly"); + self.cancellation_token.cancel(); + return Err(DerivationError::RequestReceiveFailed); + }; + + self.handle_derivation_actor_request(request_type).await?; + } + } + } + } +} + +/// An error from the [`DerivationActor`]. +#[derive(Error, Debug)] +pub enum DerivationError { + /// An error originating from the derivation pipeline. + #[error(transparent)] + Pipeline(#[from] PipelineErrorKind), + /// Waiting for more data to be available. + #[error("Waiting for more data to be available")] + Yield, + /// An error originating from the broadcast sender. + #[error("Failed to send event to broadcast sender: {0}")] + Sender(Box), + /// Failed to receive inbound request + #[error("Failed to receive inbound request")] + RequestReceiveFailed, + /// An invalid state transition occurred. + #[error(transparent)] + StateTransitionError(#[from] DerivationStateTransitionError), +} diff --git a/rust/kona/crates/node/service/src/actors/derivation/delegated/actor.rs b/rust/kona/crates/node/service/src/actors/derivation/delegated/actor.rs new file mode 100644 index 00000000000..bbe03d37052 --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/derivation/delegated/actor.rs @@ -0,0 +1,256 @@ +use crate::{ + CancellableContext, DerivationActorRequest, DerivationEngineClient, NodeActor, + actors::derivation::{DerivationDelegateClient, DerivationError}, +}; +use alloy_primitives::BlockHash; +use async_trait::async_trait; +use kona_protocol::{L2BlockInfo, SyncStatus}; +use kona_providers_alloy::AlloyChainProvider; +use thiserror::Error; +use tokio::{select, sync::mpsc, time}; +use tokio_util::sync::{CancellationToken, WaitForCancellationFuture}; + +/// The [`NodeActor`] for the delegate derivation sub-routine. +/// +/// This actor is responsible for receiving messages from [`NodeActor`]s and polls +/// an external derivation delegation provider for derivation state. It validates +/// the canonicality of the L1 information associated with delegated derivation +/// results against the canonical L1 chain before forwarding updates. +/// +/// Once validated, the actor sends the derived safe and finalized L2 info +/// to the [`NodeActor`] responsible for the execution sub-routine. +#[derive(Debug)] +pub struct DelegateDerivationActor +where + DerivationEngineClient_: DerivationEngineClient, +{ + /// The cancellation token, shared between all tasks. + cancellation_token: CancellationToken, + /// The channel on which all inbound requests are received by the [`DelegateDerivationActor`]. + inbound_request_rx: mpsc::Receiver, + /// The Engine client used to interact with the engine. + engine_client: DerivationEngineClient_, + + /// Derivation delegate provider. + derivation_delegate_provider: DerivationDelegateClient, + /// L1 provider for validating L1 info for derivation delegation. + l1_provider: AlloyChainProvider, + + /// The engine's L2 safe head, according to updates from the Engine. + engine_l2_safe_head: L2BlockInfo, + /// Whether the engine sync has completed. This will only ever go from false -> true. + has_engine_sync_completed: bool, +} + +impl CancellableContext + for DelegateDerivationActor +where + DerivationEngineClient_: DerivationEngineClient, +{ + fn cancelled(&self) -> WaitForCancellationFuture<'_> { + self.cancellation_token.cancelled() + } +} + +impl DelegateDerivationActor +where + DerivationEngineClient_: DerivationEngineClient, +{ + /// Creates a new instance of the [`DelegateDerivationActor`]. + pub fn new( + engine_client: DerivationEngineClient_, + cancellation_token: CancellationToken, + inbound_request_rx: mpsc::Receiver, + derivation_delegate_provider: DerivationDelegateClient, + l1_provider: AlloyChainProvider, + ) -> Self { + Self { + cancellation_token, + inbound_request_rx, + engine_client, + derivation_delegate_provider, + l1_provider, + engine_l2_safe_head: L2BlockInfo::default(), + has_engine_sync_completed: false, + } + } +} + +#[async_trait] +impl NodeActor for DelegateDerivationActor +where + DerivationEngineClient_: DerivationEngineClient + 'static, +{ + type Error = DerivationError; + type StartData = (); + + async fn start(mut self, _: Self::StartData) -> Result<(), Self::Error> { + self.start_delegate_derivation().await + } +} + +impl DelegateDerivationActor +where + DerivationEngineClient_: DerivationEngineClient + 'static, +{ + /// Hardcoded poll interval for Derivation Delegation + const DERIVATION_DELEGATE_POLL_INTERVAL: std::time::Duration = + std::time::Duration::from_secs(4); + + /// Validates a single L1 block height and hash against the canonical L1 chain. + async fn validate_l1_block( + &mut self, + context: &str, + l1_block_number: u64, + expected_hash: BlockHash, + ) -> Result<(), DerivationDelegationError> { + use kona_derive::ChainProvider; + + let block = self + .l1_provider + .block_info_by_number(l1_block_number) + .await + .map_err(|e| DerivationDelegationError::L1Provider(e.to_string()))?; + + if block.hash != expected_hash { + return Err(DerivationDelegationError::L1ValidationFailed { + context: context.to_string(), + number: l1_block_number, + expected: expected_hash, + actual: block.hash, + }); + } + + Ok(()) + } + + /// Verifies that the L1 info reported by the derivation delegate + /// are consistent with canonical L1 chain. + async fn validate_sync_status(&mut self, v: &SyncStatus) -> bool { + let checks = [ + ("L1 Origin of Safe L2", v.safe_l2.l1_origin.number, v.safe_l2.l1_origin.hash), + ( + "L1 Origin of Finalized L2", + v.finalized_l2.l1_origin.number, + v.finalized_l2.l1_origin.hash, + ), + ("Current L1", v.current_l1.number, v.current_l1.hash), + ]; + for (context, number, hash) in checks { + if let Err(err) = self.validate_l1_block(context, number, hash).await { + warn!( + target: "derivation", + context = context, + error = %err, + "L1 inconsistency detected at sync status from delegate" + ); + return false; + } + } + true + } + + /// Fetches, validates, and applies sync status from the derivation delegate. + async fn fetch_and_apply_delegate_safe_head(&mut self) -> Result<(), DerivationError> { + let sync_status = match self.derivation_delegate_provider.fetch_sync_status().await { + Ok(status) => status, + Err(_) => { + warn!(target: "derivation", "Failed to fetch sync status from delegate"); + return Ok(()); + } + }; + + if !self.validate_sync_status(&sync_status).await { + // Validation failures here are expected to be transient, so we skip processing + // this sync status and continue delegating derivation instead of treating it as + // fatal. + return Ok(()); + } + + self.engine_client + .send_safe_l2_signal(sync_status.safe_l2.into()) + .await + .map_err(|e| DerivationError::Sender(Box::new(e)))?; + + self.engine_client + .send_finalized_l2_block(sync_status.finalized_l2.block_info.number) + .await + .map_err(|e| DerivationError::Sender(Box::new(e)))?; + + debug!( + target: "derivation", + safe_l2 = ?sync_status.safe_l2, + finalized_l2 = ?sync_status.finalized_l2, + "Processed sync status from delegate" + ); + + Ok(()) + } + + async fn start_delegate_derivation(mut self) -> Result<(), DerivationError> { + info!(target: "derivation", "Starting derivation with delegation"); + let mut delegated_derivation_ticker = + time::interval(Self::DERIVATION_DELEGATE_POLL_INTERVAL); + delegated_derivation_ticker.set_missed_tick_behavior(time::MissedTickBehavior::Skip); + loop { + select! { + biased; + + _ = self.cancellation_token.cancelled() => { + info!( + target: "derivation", + "Received shutdown signal. Exiting derivation task." + ); + return Ok(()); + } + req = self.inbound_request_rx.recv() => { + let Some(request_type) = req else { + error!(target: "derivation", "DerivationActor inbound request receiver closed unexpectedly"); + self.cancellation_token.cancel(); + return Err(DerivationError::RequestReceiveFailed); + }; + + self.handle_derivation_delegation_actor_request(request_type).await?; + } + _ = delegated_derivation_ticker.tick(), + if self.has_engine_sync_completed => { + self.fetch_and_apply_delegate_safe_head().await?; + } + } + } + } + + async fn handle_derivation_delegation_actor_request( + &mut self, + request_type: DerivationActorRequest, + ) -> Result<(), DerivationError> { + match request_type { + DerivationActorRequest::ProcessEngineSafeHeadUpdateRequest(safe_head) => { + debug!(target: "derivation", safe_head = ?*safe_head, "Received safe head from engine."); + self.engine_l2_safe_head = *safe_head; + } + DerivationActorRequest::ProcessEngineSyncCompletionRequest(safe_head) => { + info!(target: "derivation", "Engine finished syncing, starting derivation."); + self.engine_l2_safe_head = *safe_head; + self.has_engine_sync_completed = true; + } + DerivationActorRequest::ProcessEngineSignalRequest(_) | + DerivationActorRequest::ProcessFinalizedL1Block(_) | + DerivationActorRequest::ProcessL1HeadUpdateRequest(_) => { + debug!(target: "derivation", "Ignoring request while derivation delegation: {:?}", request_type); + } + } + Ok(()) + } +} + +#[derive(Error, Debug)] +enum DerivationDelegationError { + /// The L1 provider returned an error (network, RPC, etc.) + #[error("L1 provider error: {0}")] + L1Provider(String), + + /// The hash provided by the derivation delegation does not match the canonical chain. + #[error("L1 inconsistency in {context} at block {number}: expected {expected}, got {actual}")] + L1ValidationFailed { context: String, number: u64, expected: BlockHash, actual: BlockHash }, +} diff --git a/kona/crates/node/service/src/actors/derivation/delegated/client.rs b/rust/kona/crates/node/service/src/actors/derivation/delegated/client.rs similarity index 100% rename from kona/crates/node/service/src/actors/derivation/delegated/client.rs rename to rust/kona/crates/node/service/src/actors/derivation/delegated/client.rs diff --git a/kona/crates/node/service/src/actors/derivation/delegated/mod.rs b/rust/kona/crates/node/service/src/actors/derivation/delegated/mod.rs similarity index 100% rename from kona/crates/node/service/src/actors/derivation/delegated/mod.rs rename to rust/kona/crates/node/service/src/actors/derivation/delegated/mod.rs diff --git a/rust/kona/crates/node/service/src/actors/derivation/engine_client.rs b/rust/kona/crates/node/service/src/actors/derivation/engine_client.rs new file mode 100644 index 00000000000..b1b36500b42 --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/derivation/engine_client.rs @@ -0,0 +1,76 @@ +use crate::{EngineActorRequest, EngineClientError, EngineClientResult, ResetRequest}; +use async_trait::async_trait; +use derive_more::Constructor; +use kona_engine::ConsolidateInput; +use std::fmt::Debug; +use tokio::sync::mpsc; + +/// Client to use to interact with the engine. +#[cfg_attr(test, mockall::automock(type SafeL2Signal = OpAttributesWithParent;))] +#[async_trait] +pub trait DerivationEngineClient: Debug + Send + Sync { + /// Resets the engine's forkchoice. + async fn reset_engine_forkchoice(&self) -> EngineClientResult<()>; + + /// Sends a request to finalize the L2 block at the provided block number. + /// Note: This does not wait for the engine to process it. + async fn send_finalized_l2_block(&self, block_number: u64) -> EngineClientResult<()>; + + /// Sends a consolidation signal to the engine. + /// + /// This is the unified entry point for all consolidation-related inputs, + /// including derived attributes and safe L2 block information, as represented + /// by [`ConsolidateInput`]. + /// + /// Note: This does not wait for the engine to process it. + async fn send_safe_l2_signal(&self, signal: ConsolidateInput) -> EngineClientResult<()>; +} + +/// Client to use to send messages to the Engine Actor's inbound channel. +#[derive(Constructor, Debug)] +pub struct QueuedDerivationEngineClient { + /// A channel to use to send the [`EngineActorRequest`]s to the `EngineActor`. + pub engine_actor_request_tx: mpsc::Sender, +} + +#[async_trait] +impl DerivationEngineClient for QueuedDerivationEngineClient { + async fn reset_engine_forkchoice(&self) -> EngineClientResult<()> { + let (result_tx, mut result_rx) = mpsc::channel(1); + + info!(target: "derivation", "Sending reset request to engine."); + self.engine_actor_request_tx + .send(EngineActorRequest::ResetRequest(Box::new(ResetRequest { result_tx }))) + .await + .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; + + result_rx + .recv() + .await + .inspect(|_| info!(target: "derivation", "Engine reset successfully.")) + .ok_or_else(|| { + error!(target: "derivation_engine_client", "Failed to receive built payload"); + EngineClientError::ResponseError("response channel closed.".to_string()) + })? + } + + async fn send_finalized_l2_block(&self, block_number: u64) -> EngineClientResult<()> { + trace!(target: "derivation", block_number, "Sending finalized L2 block number to engine."); + self.engine_actor_request_tx + .send(EngineActorRequest::ProcessFinalizedL2BlockNumberRequest(Box::new(block_number))) + .await + .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; + + Ok(()) + } + + async fn send_safe_l2_signal(&self, signal: ConsolidateInput) -> EngineClientResult<()> { + trace!(target: "derivation", ?signal, "Sending safe L2 signal info to engine."); + self.engine_actor_request_tx + .send(EngineActorRequest::ProcessSafeL2SignalRequest(signal)) + .await + .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; + + Ok(()) + } +} diff --git a/kona/crates/node/service/src/actors/derivation/finalizer.rs b/rust/kona/crates/node/service/src/actors/derivation/finalizer.rs similarity index 98% rename from kona/crates/node/service/src/actors/derivation/finalizer.rs rename to rust/kona/crates/node/service/src/actors/derivation/finalizer.rs index 354e98489c3..5fbbb46d8d7 100644 --- a/kona/crates/node/service/src/actors/derivation/finalizer.rs +++ b/rust/kona/crates/node/service/src/actors/derivation/finalizer.rs @@ -35,7 +35,7 @@ impl L2Finalizer { ), ) .and_modify(|n| *n = (*n).max(attributes.block_number())) - .or_insert(attributes.block_number()); + .or_insert_with(|| attributes.block_number()); } /// Clears the finalization queue. diff --git a/kona/crates/node/service/src/actors/derivation/mod.rs b/rust/kona/crates/node/service/src/actors/derivation/mod.rs similarity index 100% rename from kona/crates/node/service/src/actors/derivation/mod.rs rename to rust/kona/crates/node/service/src/actors/derivation/mod.rs diff --git a/rust/kona/crates/node/service/src/actors/derivation/request.rs b/rust/kona/crates/node/service/src/actors/derivation/request.rs new file mode 100644 index 00000000000..9482eef5582 --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/derivation/request.rs @@ -0,0 +1,36 @@ +use kona_derive::Signal; +use kona_protocol::{BlockInfo, L2BlockInfo}; +use thiserror::Error; + +/// The result of an Engine client call. +pub type DerivationClientResult = Result; + +/// Error making requests to the [`crate::DerivationActor`]. +#[derive(Debug, Error)] +pub enum DerivationClientError { + /// Error making a request to the [`crate::DerivationActor`]. The request never made it there. + #[error("Error making a request to the derivation actor: {0}.")] + RequestError(String), + + /// Error receiving response from the [`crate::DerivationActor`]. + /// This means the request may or may not have succeeded. + #[error("Error receiving response from the derivation actor: {0}..")] + ResponseError(String), +} + +/// Inbound requests that the [`crate::DerivationActor`] can process. +#[derive(Debug)] +pub enum DerivationActorRequest { + /// Request to process the fact that Engine sync has completed, along with the current safe + /// head. + ProcessEngineSyncCompletionRequest(Box), + /// Request to process the provided L2 engine safe head update. + ProcessEngineSafeHeadUpdateRequest(Box), + /// A request containing a [`Signal`] to the derivation pipeline. + /// This allows the Engine to send the `DerivationActor` signals (e.g. to Flush or Reset). + ProcessEngineSignalRequest(Box), + /// A request to process the provided finalized L1 [`BlockInfo`]. + ProcessFinalizedL1Block(Box), + /// Request to process the provided L1 head block update. + ProcessL1HeadUpdateRequest(Box), +} diff --git a/kona/crates/node/service/src/actors/derivation/state_machine.rs b/rust/kona/crates/node/service/src/actors/derivation/state_machine.rs similarity index 97% rename from kona/crates/node/service/src/actors/derivation/state_machine.rs rename to rust/kona/crates/node/service/src/actors/derivation/state_machine.rs index a33c8534c21..3b5cef5d88b 100644 --- a/kona/crates/node/service/src/actors/derivation/state_machine.rs +++ b/rust/kona/crates/node/service/src/actors/derivation/state_machine.rs @@ -16,8 +16,8 @@ pub enum DerivationState { /// [`crate::DerivationActor`] is waiting for confirmation that they were processed into a safe /// head. AwaitingSafeHeadConfirmation, - /// A reorg or some other inconsistency was detected, necessitating a [`kona_derive::Signal`] to - /// be processed before continuing derivation. + /// A reorg or some other inconsistency was detected, necessitating a [`kona_derive::Signal`] + /// to be processed before continuing derivation. AwaitingSignal, /// After receiving a [`kona_derive::Signal`], we need an update of L1 data or a new engine /// safe head to start deriving again. This represents the state waiting for one of the two. @@ -28,7 +28,7 @@ pub enum DerivationState { /// The possible updates of the [`DerivationStateMachine`] implemented by the /// [`crate::DerivationActor`]. -#[derive(Clone, Debug, PartialEq)] +#[derive(Clone, Debug, PartialEq, Eq)] pub enum DerivationStateUpdate { /// The initial EL sync has completed along with the current safe head, allowing derivation to /// start. @@ -49,7 +49,7 @@ pub enum DerivationStateUpdate { SignalProcessed, } -/// An error processing a [DerivationStateMachine] state transition. +/// An error processing a [`DerivationStateMachine`] state transition. #[derive(Debug, Error)] pub enum DerivationStateTransitionError { /// An invalid state transition was attempted. @@ -194,10 +194,10 @@ impl DerivationStateMachine { &mut self, state_update: &DerivationStateUpdate, ) -> Result<(), DerivationStateTransitionError> { - if let DerivationStateUpdate::NewAttributesConfirmed(safe_head) = state_update { - if safe_head.block_info.hash == self.confirmed_safe_head.block_info.hash { - info!(target: "derivation", ?safe_head, "Re-received safe head. Skipping state transition."); - } + if let DerivationStateUpdate::NewAttributesConfirmed(safe_head) = state_update && + safe_head.block_info.hash == self.confirmed_safe_head.block_info.hash + { + info!(target: "derivation", ?safe_head, "Re-received safe head. Skipping state transition."); } info!(target: "derivation", state=?self.state, ?state_update, "Executing derivation state update."); @@ -225,7 +225,7 @@ mod tests { use op_alloy_rpc_types_engine::OpPayloadAttributes; use rstest::rstest; - /// Creates a dummy L2BlockInfo for testing + /// Creates a dummy `L2BlockInfo` for testing fn dummy_l2_block_info() -> L2BlockInfo { L2BlockInfo { block_info: BlockInfo { @@ -239,7 +239,7 @@ mod tests { } } - /// Creates a dummy OpAttributesWithParent for testing + /// Creates a dummy `OpAttributesWithParent` for testing fn dummy_op_attributes() -> OpAttributesWithParent { OpAttributesWithParent { attributes: OpPayloadAttributes::default(), diff --git a/kona/crates/node/service/src/actors/engine/actor.rs b/rust/kona/crates/node/service/src/actors/engine/actor.rs similarity index 100% rename from kona/crates/node/service/src/actors/engine/actor.rs rename to rust/kona/crates/node/service/src/actors/engine/actor.rs diff --git a/rust/kona/crates/node/service/src/actors/engine/client.rs b/rust/kona/crates/node/service/src/actors/engine/client.rs new file mode 100644 index 00000000000..25322a5a7f2 --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/engine/client.rs @@ -0,0 +1,77 @@ +use crate::{DerivationActorRequest, DerivationClientError, DerivationClientResult}; +use async_trait::async_trait; +use derive_more::Constructor; +use kona_derive::Signal; +use kona_protocol::L2BlockInfo; +use std::fmt::Debug; +use tokio::sync::mpsc; + +/// Client to use to interact with the [`crate::DerivationActor`]. +#[cfg_attr(test, mockall::automock)] +#[async_trait] +pub trait EngineDerivationClient: Debug + Send + Sync { + /// Notifies the [`crate::DerivationActor`] that engine syncing has completed. + /// Note: Does not wait for the derivation client to process this message. + async fn notify_sync_completed(&self, safe_head: L2BlockInfo) -> DerivationClientResult<()>; + + /// Sends the new engine `safe_head` to the [`crate::DerivationActor`]. + /// Note: Does not wait for the derivation client to process this message. + async fn send_new_engine_safe_head(&self, safe_head: L2BlockInfo) + -> DerivationClientResult<()>; + + /// Sends the [`crate::DerivationActor`] the provided [`Signal`]. + /// Note: Does not wait for the derivation client to process this message. + async fn send_signal(&self, signal: Signal) -> DerivationClientResult<()>; +} + +/// Client to use to send messages to the [`crate::DerivationActor`]'s inbound channel. +#[derive(Constructor, Debug)] +pub struct QueuedEngineDerivationClient { + /// A channel to use to send the [`DerivationActorRequest`]s to the [`crate::DerivationActor`]. + pub derivation_actor_request_tx: mpsc::Sender, +} + +#[async_trait] +impl EngineDerivationClient for QueuedEngineDerivationClient { + async fn notify_sync_completed(&self, safe_head: L2BlockInfo) -> DerivationClientResult<()> { + info!(target: "engine", "Sending sync completed to derivation actor"); + + self.derivation_actor_request_tx + .send(DerivationActorRequest::ProcessEngineSyncCompletionRequest(Box::new(safe_head))) + .await + .map_err(|_| { + DerivationClientError::RequestError("request channel closed.".to_string()) + })?; + + Ok(()) + } + + async fn send_new_engine_safe_head( + &self, + safe_head: L2BlockInfo, + ) -> DerivationClientResult<()> { + info!(target: "engine", safe_head = ?safe_head, "Sending new safe head to derivation actor"); + + self.derivation_actor_request_tx + .send(DerivationActorRequest::ProcessEngineSafeHeadUpdateRequest(Box::new(safe_head))) + .await + .map_err(|_| { + DerivationClientError::RequestError("request channel closed.".to_string()) + })?; + + Ok(()) + } + + async fn send_signal(&self, signal: Signal) -> DerivationClientResult<()> { + info!(target: "engine", signal = ?signal, "Sending signal to derivation actor"); + + self.derivation_actor_request_tx + .send(DerivationActorRequest::ProcessEngineSignalRequest(Box::new(signal))) + .await + .map_err(|_| { + DerivationClientError::RequestError("request channel closed.".to_string()) + })?; + + Ok(()) + } +} diff --git a/kona/crates/node/service/src/actors/engine/config.rs b/rust/kona/crates/node/service/src/actors/engine/config.rs similarity index 100% rename from kona/crates/node/service/src/actors/engine/config.rs rename to rust/kona/crates/node/service/src/actors/engine/config.rs diff --git a/kona/crates/node/service/src/actors/engine/engine_request_processor.rs b/rust/kona/crates/node/service/src/actors/engine/engine_request_processor.rs similarity index 100% rename from kona/crates/node/service/src/actors/engine/engine_request_processor.rs rename to rust/kona/crates/node/service/src/actors/engine/engine_request_processor.rs diff --git a/kona/crates/node/service/src/actors/engine/error.rs b/rust/kona/crates/node/service/src/actors/engine/error.rs similarity index 100% rename from kona/crates/node/service/src/actors/engine/error.rs rename to rust/kona/crates/node/service/src/actors/engine/error.rs diff --git a/kona/crates/node/service/src/actors/engine/mod.rs b/rust/kona/crates/node/service/src/actors/engine/mod.rs similarity index 100% rename from kona/crates/node/service/src/actors/engine/mod.rs rename to rust/kona/crates/node/service/src/actors/engine/mod.rs diff --git a/rust/kona/crates/node/service/src/actors/engine/request.rs b/rust/kona/crates/node/service/src/actors/engine/request.rs new file mode 100644 index 00000000000..5b058d8bfed --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/engine/request.rs @@ -0,0 +1,97 @@ +use alloy_rpc_types_engine::PayloadId; +use kona_engine::{BuildTaskError, ConsolidateInput, EngineQueries, SealTaskError}; +use kona_protocol::OpAttributesWithParent; +use kona_rpc::{RollupBoostAdminQuery, RollupBoostHealthQuery}; +use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; +use thiserror::Error; +use tokio::sync::mpsc; + +/// The result of an Engine client call. +pub type EngineClientResult = Result; + +/// Error making requests to the `BlockEngine`. +#[derive(Debug, Error)] +pub enum EngineClientError { + /// Error making a request to the engine. The request never made it there. + #[error("Error making a request to the engine: {0}.")] + RequestError(String), + + /// Error receiving response from the engine. + /// This means the request may or may not have succeeded. + #[error("Error receiving response from the engine: {0}.")] + ResponseError(String), + + /// An error occurred starting to build a block. + #[error(transparent)] + StartBuildError(#[from] BuildTaskError), + + /// An error occurred sealing a block. + #[error(transparent)] + SealError(#[from] SealTaskError), + + /// An error occurred performing the reset. + #[error("An error occurred performing the reset: {0}.")] + ResetForkchoiceError(String), +} + +/// Inbound requests that the [`crate::EngineActor`] can process. +#[derive(Debug)] +pub enum EngineActorRequest { + /// Request to build. + BuildRequest(Box), + /// Request to consolidate using a safe L2 signal from attributes or delegated safe-block + /// derivation + ProcessSafeL2SignalRequest(ConsolidateInput), + /// Request to finalize the L2 block at the provided block number. + ProcessFinalizedL2BlockNumberRequest(Box), + /// Request to insert the provided unsafe block. + ProcessUnsafeL2BlockRequest(Box), + /// Request to reset engine forkchoice. + ResetRequest(Box), + /// Request for the engine to process the provided RPC request. + RpcRequest(Box), + /// Request to seal the block with the provided details. + SealRequest(Box), +} + +/// RPC Request for the engine to handle. +#[derive(Debug)] +pub enum EngineRpcRequest { + /// Engine RPC query. + EngineQuery(Box), + /// Rollup boost admin request. + RollupBoostAdminRequest(Box), + /// Rollup boost health request. + RollupBoostHealthRequest(Box), +} + +/// A request to build a payload. +/// Contains the attributes to build and a channel to send back the resulting `PayloadId`. +#[derive(Debug)] +pub struct BuildRequest { + /// The [`OpAttributesWithParent`] from which the block build should be started. + pub attributes: OpAttributesWithParent, + /// The channel on which the result, successful or not, will be sent. + pub result_tx: mpsc::Sender, +} + +/// A request to reset the engine forkchoice. +/// Optionally contains a channel to send back the response if the caller would like to know that +/// the request was successfully processed. +#[derive(Debug)] +pub struct ResetRequest { + /// response will be sent to this channel, if `Some`. + pub result_tx: mpsc::Sender>, +} + +/// A request to seal and canonicalize a payload. +/// Contains the `PayloadId`, attributes, and a channel to send back the result. +#[derive(Debug)] +pub struct SealRequest { + /// The `PayloadId` to seal and canonicalize. + pub payload_id: PayloadId, + /// The attributes necessary for the seal operation. + pub attributes: OpAttributesWithParent, + /// The channel on which the result, successful or not, will be sent. + pub result_tx: mpsc::Sender>, +} diff --git a/kona/crates/node/service/src/actors/engine/rollup_boost.rs b/rust/kona/crates/node/service/src/actors/engine/rollup_boost.rs similarity index 100% rename from kona/crates/node/service/src/actors/engine/rollup_boost.rs rename to rust/kona/crates/node/service/src/actors/engine/rollup_boost.rs diff --git a/kona/crates/node/service/src/actors/engine/rpc_request_processor.rs b/rust/kona/crates/node/service/src/actors/engine/rpc_request_processor.rs similarity index 100% rename from kona/crates/node/service/src/actors/engine/rpc_request_processor.rs rename to rust/kona/crates/node/service/src/actors/engine/rpc_request_processor.rs diff --git a/kona/crates/node/service/src/actors/l1_watcher/actor.rs b/rust/kona/crates/node/service/src/actors/l1_watcher/actor.rs similarity index 100% rename from kona/crates/node/service/src/actors/l1_watcher/actor.rs rename to rust/kona/crates/node/service/src/actors/l1_watcher/actor.rs diff --git a/kona/crates/node/service/src/actors/l1_watcher/blockstream.rs b/rust/kona/crates/node/service/src/actors/l1_watcher/blockstream.rs similarity index 100% rename from kona/crates/node/service/src/actors/l1_watcher/blockstream.rs rename to rust/kona/crates/node/service/src/actors/l1_watcher/blockstream.rs diff --git a/kona/crates/node/service/src/actors/l1_watcher/client.rs b/rust/kona/crates/node/service/src/actors/l1_watcher/client.rs similarity index 100% rename from kona/crates/node/service/src/actors/l1_watcher/client.rs rename to rust/kona/crates/node/service/src/actors/l1_watcher/client.rs diff --git a/kona/crates/node/service/src/actors/l1_watcher/error.rs b/rust/kona/crates/node/service/src/actors/l1_watcher/error.rs similarity index 100% rename from kona/crates/node/service/src/actors/l1_watcher/error.rs rename to rust/kona/crates/node/service/src/actors/l1_watcher/error.rs diff --git a/kona/crates/node/service/src/actors/l1_watcher/mod.rs b/rust/kona/crates/node/service/src/actors/l1_watcher/mod.rs similarity index 100% rename from kona/crates/node/service/src/actors/l1_watcher/mod.rs rename to rust/kona/crates/node/service/src/actors/l1_watcher/mod.rs diff --git a/rust/kona/crates/node/service/src/actors/mod.rs b/rust/kona/crates/node/service/src/actors/mod.rs new file mode 100644 index 00000000000..5002b554cf4 --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/mod.rs @@ -0,0 +1,56 @@ +//! [`NodeActor`] services for the node. +//! +//! [NodeActor]: super::NodeActor + +mod traits; +pub use traits::{CancellableContext, NodeActor}; + +mod engine; +pub use engine::{ + BuildRequest, EngineActor, EngineActorRequest, EngineClientError, EngineClientResult, + EngineConfig, EngineDerivationClient, EngineError, EngineProcessingRequest, EngineProcessor, + EngineRequestReceiver, EngineRpcProcessor, EngineRpcRequest, EngineRpcRequestReceiver, + QueuedEngineDerivationClient, ResetRequest, SealRequest, +}; + +mod rpc; +pub use rpc::{ + QueuedEngineRpcClient, QueuedSequencerAdminAPIClient, RollupBoostAdminApiClient, + RollupBoostHealthRpcClient, RpcActor, RpcActorError, RpcContext, +}; + +mod derivation; +pub use derivation::{ + DelegateDerivationActor, DerivationActor, DerivationActorRequest, DerivationClientError, + DerivationClientResult, DerivationDelegateClient, DerivationDelegateClientError, + DerivationEngineClient, DerivationError, DerivationState, DerivationStateMachine, + DerivationStateTransitionError, DerivationStateUpdate, QueuedDerivationEngineClient, +}; + +mod l1_watcher; +pub use l1_watcher::{ + BlockStream, L1WatcherActor, L1WatcherActorError, L1WatcherDerivationClient, + QueuedL1WatcherDerivationClient, +}; + +mod network; +pub use network::{ + NetworkActor, NetworkActorError, NetworkBuilder, NetworkBuilderError, NetworkConfig, + NetworkDriver, NetworkDriverError, NetworkEngineClient, NetworkHandler, NetworkInboundData, + QueuedNetworkEngineClient, QueuedUnsafePayloadGossipClient, UnsafePayloadGossipClient, + UnsafePayloadGossipClientError, +}; + +mod sequencer; + +pub use sequencer::{ + Conductor, ConductorClient, ConductorError, DelayedL1OriginSelectorProvider, L1OriginSelector, + L1OriginSelectorError, L1OriginSelectorProvider, OriginSelector, QueuedSequencerEngineClient, + SequencerActor, SequencerActorError, SequencerAdminQuery, SequencerConfig, + SequencerEngineClient, +}; + +#[cfg(test)] +pub use network::MockUnsafePayloadGossipClient; +#[cfg(test)] +pub use sequencer::{MockConductor, MockOriginSelector, MockSequencerEngineClient}; diff --git a/kona/crates/node/service/src/actors/network/README.md b/rust/kona/crates/node/service/src/actors/network/README.md similarity index 100% rename from kona/crates/node/service/src/actors/network/README.md rename to rust/kona/crates/node/service/src/actors/network/README.md diff --git a/rust/kona/crates/node/service/src/actors/network/actor.rs b/rust/kona/crates/node/service/src/actors/network/actor.rs new file mode 100644 index 00000000000..c4b51caa73e --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/network/actor.rs @@ -0,0 +1,332 @@ +use alloy_primitives::Address; +use async_trait::async_trait; +use kona_gossip::P2pRpcRequest; +use kona_rpc::NetworkAdminQuery; +use kona_sources::BlockSignerError; +use libp2p::TransportError; +use op_alloy_rpc_types_engine::{OpExecutionPayloadEnvelope, OpNetworkPayloadEnvelope}; +use thiserror::Error; +use tokio::{self, select, sync::mpsc}; +use tokio_util::sync::{CancellationToken, WaitForCancellationFuture}; + +use crate::{ + CancellableContext, NetworkEngineClient, NodeActor, + actors::network::{ + builder::NetworkBuilder, driver::NetworkDriverError, error::NetworkBuilderError, + }, +}; + +/// The network actor handles two core networking components of the rollup node: +/// - *discovery*: Peer discovery over UDP using discv5. +/// - *gossip*: Block gossip over TCP using libp2p. +/// +/// The network actor itself is a light wrapper around the [`NetworkBuilder`]. +/// +/// ## Example +/// +/// ```rust,ignore +/// use kona_gossip::NetworkDriver; +/// use std::net::{IpAddr, Ipv4Addr, SocketAddr}; +/// +/// let chain_id = 10; +/// let signer = Address::random(); +/// let socket = SocketAddr::new(IpAddr::V4(Ipv4Addr::UNSPECIFIED), 9099); +/// +/// // Construct the `Network` using the builder. +/// // let mut driver = Network::builder() +/// // .with_unsafe_block_signer(signer) +/// // .with_chain_id(chain_id) +/// // .with_gossip_addr(socket) +/// // .build() +/// // .unwrap(); +/// +/// // Construct the `NetworkActor` with the [`Network`]. +/// // let actor = NetworkActor::new(driver); +/// ``` +#[derive(Debug)] +pub struct NetworkActor { + /// Network driver + pub(super) builder: NetworkBuilder, + /// The cancellation token, shared between all tasks. + pub(super) cancellation_token: CancellationToken, + /// A channel to receive the unsafe block signer address. + pub(super) signer: mpsc::Receiver
, + /// Handler for p2p RPC Requests. + pub(super) p2p_rpc: mpsc::Receiver, + /// A channel to receive admin rpc requests. + pub(super) admin_rpc: mpsc::Receiver, + /// A channel to receive unsafe blocks and send them through the gossip layer. + pub(super) publish_rx: mpsc::Receiver, + /// A channel to use to interact with the engine actor. + pub(super) engine_client: NetworkEngineClient_, +} + +/// The inbound data for the network actor. +#[derive(Debug)] +pub struct NetworkInboundData { + /// A channel to send the unsafe block signer address to the network actor. + pub signer: mpsc::Sender
, + /// Handler for p2p RPC Requests sent to the network actor. + pub p2p_rpc: mpsc::Sender, + /// Handler for admin RPC Requests. + pub admin_rpc: mpsc::Sender, + /// A channel to send unsafe blocks to the network actor. + /// This channel should only be used by the sequencer actor/admin RPC api to forward their + /// newly produced unsafe blocks to the network actor. + pub gossip_payload_tx: mpsc::Sender, +} + +impl NetworkActor { + /// Constructs a new [`NetworkActor`] given the [`NetworkBuilder`] + pub fn new( + engine_client: NetworkEngineClient_, + cancellation_token: CancellationToken, + driver: NetworkBuilder, + ) -> (NetworkInboundData, Self) { + let (signer_tx, signer_rx) = mpsc::channel(16); + let (rpc_tx, rpc_rx) = mpsc::channel(1024); + let (admin_rpc_tx, admin_rpc_rx) = mpsc::channel(1024); + let (publish_tx, publish_rx) = tokio::sync::mpsc::channel(256); + let actor = Self { + builder: driver, + cancellation_token, + signer: signer_rx, + p2p_rpc: rpc_rx, + admin_rpc: admin_rpc_rx, + publish_rx, + engine_client, + }; + let outbound_data = NetworkInboundData { + signer: signer_tx, + p2p_rpc: rpc_tx, + admin_rpc: admin_rpc_tx, + gossip_payload_tx: publish_tx, + }; + (outbound_data, actor) + } +} + +impl CancellableContext for NetworkActor { + fn cancelled(&self) -> WaitForCancellationFuture<'_> { + self.cancellation_token.cancelled() + } +} + +/// An error from the network actor. +#[derive(Debug, Error)] +pub enum NetworkActorError { + /// Network builder error. + #[error(transparent)] + NetworkBuilder(#[from] NetworkBuilderError), + /// Network driver error. + #[error(transparent)] + NetworkDriver(#[from] NetworkDriverError), + /// Driver startup failed. + #[error(transparent)] + DriverStartup(#[from] TransportError), + /// The network driver was missing its unsafe block receiver. + #[error("Missing unsafe block receiver in network driver")] + MissingUnsafeBlockReceiver, + /// The network driver was missing its unsafe block signer sender. + #[error("Missing unsafe block signer in network driver")] + MissingUnsafeBlockSigner, + /// Channel closed unexpectedly. + #[error("Channel closed unexpectedly")] + ChannelClosed, + /// Failed to sign the payload. + #[error("Failed to sign the payload: {0}")] + FailedToSignPayload(#[from] BlockSignerError), +} + +#[async_trait] +impl NodeActor + for NetworkActor +{ + type Error = NetworkActorError; + type StartData = (); + + async fn start(mut self, _: Self::StartData) -> Result<(), Self::Error> { + let mut handler = self.builder.build()?.start().await?; + + // New unsafe block channel. + let (unsafe_block_tx, mut unsafe_block_rx) = tokio::sync::mpsc::unbounded_channel(); + + loop { + select! { + _ = self.cancellation_token.cancelled() => { + info!( + target: "network", + "Received shutdown signal. Exiting network task." + ); + return Ok(()); + } + block = unsafe_block_rx.recv() => { + let Some(block) = block else { + error!(target: "node::p2p", "The unsafe block receiver channel has closed"); + return Err(NetworkActorError::ChannelClosed); + }; + + if self.engine_client.send_unsafe_block(block).await.is_err() { + warn!(target: "network", "Failed to forward unsafe block to engine"); + return Err(NetworkActorError::ChannelClosed); + } + } + signer = self.signer.recv() => { + let Some(signer) = signer else { + warn!( + target: "network", + "Found no unsafe block signer on receive" + ); + return Err(NetworkActorError::ChannelClosed); + }; + if handler.unsafe_block_signer_sender.send(signer).is_err() { + warn!( + target: "network", + "Failed to send unsafe block signer to network handler", + ); + } + } + Some(block) = self.publish_rx.recv(), if !self.publish_rx.is_closed() => { + let timestamp = block.execution_payload.timestamp(); + let selector = |handler: &kona_gossip::BlockHandler| { + handler.topic(timestamp) + }; + let Some(signer) = handler.signer.as_ref() else { + warn!(target: "net", "No local signer available to sign the payload"); + continue; + }; + + let chain_id = handler.discovery.chain_id; + + let sender_address = *handler.unsafe_block_signer_sender.borrow(); + + let payload_hash = block.payload_hash(); + let signature = signer.sign_block(payload_hash, chain_id, sender_address).await?; + + let payload = OpNetworkPayloadEnvelope { + payload: block.execution_payload, + parent_beacon_block_root: block.parent_beacon_block_root, + signature, + payload_hash, + }; + + match handler.gossip.publish(selector, Some(payload)) { + Ok(id) => info!("Published unsafe payload | {:?}", id), + Err(e) => warn!("Failed to publish unsafe payload: {:?}", e), + } + } + event = handler.gossip.next() => { + let Some(event) = event else { + error!(target: "node::p2p", "The gossip swarm stream has ended"); + return Err(NetworkActorError::ChannelClosed); + }; + + if let Some(payload) = handler.gossip.handle_event(event) + && unsafe_block_tx.send(payload.into()).is_err() + { + warn!(target: "node::p2p", "Failed to send unsafe block to network handler"); + } + }, + enr = handler.enr_receiver.recv() => { + let Some(enr) = enr else { + error!(target: "node::p2p", "The enr receiver channel has closed"); + return Err(NetworkActorError::ChannelClosed); + }; + handler.gossip.dial(enr); + }, + _ = handler.peer_score_inspector.tick(), if handler.gossip.peer_monitoring.as_ref().is_some() => { + handler.handle_peer_monitoring().await; + }, + Some(NetworkAdminQuery::PostUnsafePayload { payload }) = self.admin_rpc.recv(), if !self.admin_rpc.is_closed() => { + debug!(target: "node::p2p", "Broadcasting unsafe payload from admin api"); + if unsafe_block_tx.send(payload).is_err() { + warn!(target: "node::p2p", "Failed to send unsafe block to network handler"); + } + }, + Some(req) = self.p2p_rpc.recv(), if !self.p2p_rpc.is_closed() => { + req.handle(&mut handler.gossip, &handler.discovery); + }, + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::B256; + use alloy_rpc_types_engine::{ExecutionPayloadV1, ExecutionPayloadV3}; + use alloy_signer::SignerSync; + use alloy_signer_local::PrivateKeySigner; + use arbitrary::Arbitrary; + use op_alloy_rpc_types_engine::OpExecutionPayload; + use rand::Rng; + + #[test] + fn test_payload_signature_roundtrip_v1() { + let mut bytes = [0u8; 4096]; + rand::rng().fill(bytes.as_mut_slice()); + + let pubkey = PrivateKeySigner::random(); + let expected_address = pubkey.address(); + const CHAIN_ID: u64 = 1337; + + let block = OpExecutionPayloadEnvelope { + execution_payload: OpExecutionPayload::V1( + ExecutionPayloadV1::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(), + ), + parent_beacon_block_root: None, + }; + + let payload_hash = block.payload_hash(); + let signature = pubkey.sign_hash_sync(&payload_hash.signature_message(CHAIN_ID)).unwrap(); + let payload = OpNetworkPayloadEnvelope { + payload: block.execution_payload, + parent_beacon_block_root: block.parent_beacon_block_root, + signature, + payload_hash, + }; + let encoded_payload = payload.encode_v1().unwrap(); + + let decoded_payload = OpNetworkPayloadEnvelope::decode_v1(&encoded_payload).unwrap(); + + let msg = decoded_payload.payload_hash.signature_message(CHAIN_ID); + let msg_signer = decoded_payload.signature.recover_address_from_prehash(&msg).unwrap(); + + assert_eq!(expected_address, msg_signer); + } + + #[test] + fn test_payload_signature_roundtrip_v3() { + let mut bytes = [0u8; 4096]; + rand::rng().fill(bytes.as_mut_slice()); + + let pubkey = PrivateKeySigner::random(); + let expected_address = pubkey.address(); + const CHAIN_ID: u64 = 1337; + + let block = OpExecutionPayloadEnvelope { + execution_payload: OpExecutionPayload::V3( + ExecutionPayloadV3::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(), + ), + parent_beacon_block_root: Some(B256::random()), + }; + + let payload_hash = block.payload_hash(); + let signature = pubkey.sign_hash_sync(&payload_hash.signature_message(CHAIN_ID)).unwrap(); + let payload = OpNetworkPayloadEnvelope { + payload: block.execution_payload, + parent_beacon_block_root: block.parent_beacon_block_root, + signature, + payload_hash, + }; + let encoded_payload = payload.encode_v3().unwrap(); + + let decoded_payload = OpNetworkPayloadEnvelope::decode_v3(&encoded_payload).unwrap(); + + let msg = decoded_payload.payload_hash.signature_message(CHAIN_ID); + let msg_signer = decoded_payload.signature.recover_address_from_prehash(&msg).unwrap(); + + assert_eq!(expected_address, msg_signer); + } +} diff --git a/kona/crates/node/service/src/actors/network/builder.rs b/rust/kona/crates/node/service/src/actors/network/builder.rs similarity index 100% rename from kona/crates/node/service/src/actors/network/builder.rs rename to rust/kona/crates/node/service/src/actors/network/builder.rs diff --git a/kona/crates/node/service/src/actors/network/config.rs b/rust/kona/crates/node/service/src/actors/network/config.rs similarity index 100% rename from kona/crates/node/service/src/actors/network/config.rs rename to rust/kona/crates/node/service/src/actors/network/config.rs diff --git a/kona/crates/node/service/src/actors/network/driver.rs b/rust/kona/crates/node/service/src/actors/network/driver.rs similarity index 100% rename from kona/crates/node/service/src/actors/network/driver.rs rename to rust/kona/crates/node/service/src/actors/network/driver.rs diff --git a/rust/kona/crates/node/service/src/actors/network/engine_client.rs b/rust/kona/crates/node/service/src/actors/network/engine_client.rs new file mode 100644 index 00000000000..b677e8a511f --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/network/engine_client.rs @@ -0,0 +1,33 @@ +use crate::{EngineActorRequest, EngineClientError, EngineClientResult}; +use async_trait::async_trait; +use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; +use std::fmt::Debug; +use tokio::sync::mpsc; + +/// Client used to interact with the Engine. +#[cfg_attr(test, mockall::automock)] +#[async_trait] +pub trait NetworkEngineClient: Debug + Send + Sync { + /// Note: a successful response does not mean the block was successfully inserted. + /// This function just sends the message to the engine. It does not wait for a response. + async fn send_unsafe_block(&self, block: OpExecutionPayloadEnvelope) -> EngineClientResult<()>; +} + +/// Client to use to send unsafe blocks to the Engine's inbound channel. +#[derive(Debug)] +pub struct QueuedNetworkEngineClient { + /// A channel to use to send the `EngineActor` requests. + pub engine_actor_request_tx: mpsc::Sender, +} + +#[async_trait] +impl NetworkEngineClient for QueuedNetworkEngineClient { + async fn send_unsafe_block(&self, block: OpExecutionPayloadEnvelope) -> EngineClientResult<()> { + trace!(target: "network", ?block, "Sending unsafe block to engine."); + Ok(self + .engine_actor_request_tx + .send(EngineActorRequest::ProcessUnsafeL2BlockRequest(Box::new(block))) + .await + .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?) + } +} diff --git a/kona/crates/node/service/src/actors/network/error.rs b/rust/kona/crates/node/service/src/actors/network/error.rs similarity index 100% rename from kona/crates/node/service/src/actors/network/error.rs rename to rust/kona/crates/node/service/src/actors/network/error.rs diff --git a/kona/crates/node/service/src/actors/network/gossip.rs b/rust/kona/crates/node/service/src/actors/network/gossip.rs similarity index 100% rename from kona/crates/node/service/src/actors/network/gossip.rs rename to rust/kona/crates/node/service/src/actors/network/gossip.rs diff --git a/kona/crates/node/service/src/actors/network/handler.rs b/rust/kona/crates/node/service/src/actors/network/handler.rs similarity index 100% rename from kona/crates/node/service/src/actors/network/handler.rs rename to rust/kona/crates/node/service/src/actors/network/handler.rs diff --git a/kona/crates/node/service/src/actors/network/mod.rs b/rust/kona/crates/node/service/src/actors/network/mod.rs similarity index 100% rename from kona/crates/node/service/src/actors/network/mod.rs rename to rust/kona/crates/node/service/src/actors/network/mod.rs diff --git a/kona/crates/node/service/src/actors/rpc/actor.rs b/rust/kona/crates/node/service/src/actors/rpc/actor.rs similarity index 100% rename from kona/crates/node/service/src/actors/rpc/actor.rs rename to rust/kona/crates/node/service/src/actors/rpc/actor.rs diff --git a/kona/crates/node/service/src/actors/rpc/engine_rpc_client.rs b/rust/kona/crates/node/service/src/actors/rpc/engine_rpc_client.rs similarity index 98% rename from kona/crates/node/service/src/actors/rpc/engine_rpc_client.rs rename to rust/kona/crates/node/service/src/actors/rpc/engine_rpc_client.rs index 4222082250f..bbf02f130ef 100644 --- a/kona/crates/node/service/src/actors/rpc/engine_rpc_client.rs +++ b/rust/kona/crates/node/service/src/actors/rpc/engine_rpc_client.rs @@ -18,7 +18,7 @@ use tokio::sync::{mpsc, oneshot, watch}; /// supported [`EngineActorRequest`] operations to limit the power of callers to RPC-type requests. #[derive(Clone, Constructor, Debug)] pub struct QueuedEngineRpcClient { - /// A channel to use to send the EngineActor requests. + /// A channel to use to send the `EngineActor` requests. pub engine_actor_request_tx: mpsc::Sender, } diff --git a/kona/crates/node/service/src/actors/rpc/error.rs b/rust/kona/crates/node/service/src/actors/rpc/error.rs similarity index 100% rename from kona/crates/node/service/src/actors/rpc/error.rs rename to rust/kona/crates/node/service/src/actors/rpc/error.rs diff --git a/kona/crates/node/service/src/actors/rpc/mod.rs b/rust/kona/crates/node/service/src/actors/rpc/mod.rs similarity index 100% rename from kona/crates/node/service/src/actors/rpc/mod.rs rename to rust/kona/crates/node/service/src/actors/rpc/mod.rs diff --git a/kona/crates/node/service/src/actors/rpc/rollup_boost_rpc_client.rs b/rust/kona/crates/node/service/src/actors/rpc/rollup_boost_rpc_client.rs similarity index 94% rename from kona/crates/node/service/src/actors/rpc/rollup_boost_rpc_client.rs rename to rust/kona/crates/node/service/src/actors/rpc/rollup_boost_rpc_client.rs index 5eb8bd94e80..dee64408b92 100644 --- a/kona/crates/node/service/src/actors/rpc/rollup_boost_rpc_client.rs +++ b/rust/kona/crates/node/service/src/actors/rpc/rollup_boost_rpc_client.rs @@ -9,11 +9,11 @@ use rollup_boost::{GetExecutionModeResponse, SetExecutionModeRequest, SetExecuti use std::fmt::Debug; use tokio::sync::{mpsc, oneshot}; -/// [`RollupBoostHealthzApiServer`] implementation to send the request to EngineActor's request +/// [`RollupBoostHealthzApiServer`] implementation to send the request to `EngineActor`'s request /// channel. #[derive(Debug)] pub struct RollupBoostHealthRpcClient { - /// A channel to use to send the EngineActor requests. + /// A channel to use to send the `EngineActor` requests. pub engine_actor_request_tx: mpsc::Sender, } @@ -38,10 +38,11 @@ impl RollupBoostHealthzApiServer for RollupBoostHealthRpcClient { } } -/// [`RollupBoostAdminClient`] implementation to send the request to EngineActor's request channel. +/// [`RollupBoostAdminClient`] implementation to send the request to `EngineActor`'s request +/// channel. #[derive(Debug)] pub struct RollupBoostAdminApiClient { - /// A channel to use to send the EngineActor requests. + /// A channel to use to send the `EngineActor` requests. pub engine_actor_request_tx: mpsc::Sender, } diff --git a/kona/crates/node/service/src/actors/rpc/sequencer_rpc_client.rs b/rust/kona/crates/node/service/src/actors/rpc/sequencer_rpc_client.rs similarity index 100% rename from kona/crates/node/service/src/actors/rpc/sequencer_rpc_client.rs rename to rust/kona/crates/node/service/src/actors/rpc/sequencer_rpc_client.rs diff --git a/rust/kona/crates/node/service/src/actors/sequencer/actor.rs b/rust/kona/crates/node/service/src/actors/sequencer/actor.rs new file mode 100644 index 00000000000..194766cf01d --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/sequencer/actor.rs @@ -0,0 +1,524 @@ +//! The [`SequencerActor`]. + +use crate::{ + CancellableContext, NodeActor, SequencerAdminQuery, UnsafePayloadGossipClient, + actors::{ + SequencerEngineClient, + engine::EngineClientError, + sequencer::{ + conductor::Conductor, + error::SequencerActorError, + metrics::{ + update_attributes_build_duration_metrics, update_block_build_duration_metrics, + update_conductor_commitment_duration_metrics, update_seal_duration_metrics, + update_total_transactions_sequenced, + }, + origin_selector::OriginSelector, + }, + }, +}; +use alloy_rpc_types_engine::PayloadId; +use async_trait::async_trait; +use kona_derive::{AttributesBuilder, PipelineErrorKind}; +use kona_engine::{InsertTaskError, SealTaskError, SynchronizeTaskError}; +use kona_genesis::RollupConfig; +use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; +use op_alloy_rpc_types_engine::OpPayloadAttributes; +use std::{ + sync::Arc, + time::{Duration, Instant, SystemTime, UNIX_EPOCH}, +}; +use tokio::{select, sync::mpsc}; +use tokio_util::sync::{CancellationToken, WaitForCancellationFuture}; + +/// The handle to a block that has been started but not sealed. +#[derive(Debug)] +pub(super) struct UnsealedPayloadHandle { + /// The [`PayloadId`] of the unsealed payload. + pub payload_id: PayloadId, + /// The [`OpAttributesWithParent`] used to start block building. + pub attributes_with_parent: OpAttributesWithParent, +} + +/// The return payload of the `seal_last_and_start_next` function. This allows the sequencer +/// to make an informed decision about when to seal and build the next block. +#[derive(Debug)] +struct SealLastStartNextResult { + /// The [`UnsealedPayloadHandle`] that was built. + pub unsealed_payload_handle: Option, + /// How long it took to execute the seal operation. + pub seal_duration: Duration, +} + +/// The [`SequencerActor`] is responsible for building L2 blocks on top of the current unsafe head +/// and scheduling them to be signed and gossipped by the P2P layer, extending the L2 chain with new +/// blocks. +#[derive(Debug)] +pub struct SequencerActor< + AttributesBuilder_, + Conductor_, + OriginSelector_, + SequencerEngineClient_, + UnsafePayloadGossipClient_, +> where + AttributesBuilder_: AttributesBuilder, + Conductor_: Conductor, + OriginSelector_: OriginSelector, + SequencerEngineClient_: SequencerEngineClient, + UnsafePayloadGossipClient_: UnsafePayloadGossipClient, +{ + /// Receiver for admin API requests. + pub admin_api_rx: mpsc::Receiver, + /// The attributes builder used for block building. + pub attributes_builder: AttributesBuilder_, + /// The cancellation token, shared between all tasks. + pub cancellation_token: CancellationToken, + /// The optional conductor RPC client. + pub conductor: Option, + /// The struct used to interact with the engine. + pub engine_client: SequencerEngineClient_, + /// Whether the sequencer is active. + pub is_active: bool, + /// Whether the sequencer is in recovery mode. + pub in_recovery_mode: bool, + /// The struct used to determine the next L1 origin. + pub origin_selector: OriginSelector_, + /// The rollup configuration. + pub rollup_config: Arc, + /// A client to asynchronously sign and gossip built payloads to the network actor. + pub unsafe_payload_gossip_client: UnsafePayloadGossipClient_, +} + +impl< + AttributesBuilder_, + Conductor_, + OriginSelector_, + SequencerEngineClient_, + UnsafePayloadGossipClient_, +> + SequencerActor< + AttributesBuilder_, + Conductor_, + OriginSelector_, + SequencerEngineClient_, + UnsafePayloadGossipClient_, + > +where + AttributesBuilder_: AttributesBuilder, + Conductor_: Conductor, + OriginSelector_: OriginSelector, + SequencerEngineClient_: SequencerEngineClient, + UnsafePayloadGossipClient_: UnsafePayloadGossipClient, +{ + /// Seals and commits the last pending block, if one exists and starts the build job for the + /// next L2 block, on top of the current unsafe head. + /// + /// If a new block was started, it will return the associated [`UnsealedPayloadHandle`] so + /// that it may be sealed and committed in a future call to this function. + async fn seal_last_and_start_next( + &mut self, + payload_to_seal: Option<&UnsealedPayloadHandle>, + ) -> Result { + let seal_duration = match payload_to_seal { + Some(to_seal) => { + let seal_start = Instant::now(); + self.seal_and_commit_payload_if_applicable(to_seal).await?; + seal_start.elapsed() + } + None => Duration::default(), + }; + + let unsealed_payload_handle = self.build_unsealed_payload().await?; + + Ok(SealLastStartNextResult { unsealed_payload_handle, seal_duration }) + } + + /// Sends a seal request to seal the provided [`UnsealedPayloadHandle`], committing and + /// gossiping the resulting block, if one is built. + async fn seal_and_commit_payload_if_applicable( + &self, + unsealed_payload_handle: &UnsealedPayloadHandle, + ) -> Result<(), SequencerActorError> { + let seal_request_start = Instant::now(); + + // Send the seal request to the engine to seal the unsealed block. + let payload = self + .engine_client + .seal_and_canonicalize_block( + unsealed_payload_handle.payload_id, + unsealed_payload_handle.attributes_with_parent.clone(), + ) + .await?; + + update_seal_duration_metrics(seal_request_start.elapsed()); + + let payload_transaction_count = + unsealed_payload_handle.attributes_with_parent.count_transactions(); + update_total_transactions_sequenced(payload_transaction_count); + + // If the conductor is available, commit the payload to it. + if let Some(conductor) = &self.conductor { + let _conductor_commitment_start = Instant::now(); + if let Err(err) = conductor.commit_unsafe_payload(&payload).await { + error!(target: "sequencer", ?err, "Failed to commit unsafe payload to conductor"); + } + + update_conductor_commitment_duration_metrics(_conductor_commitment_start.elapsed()); + } + + self.unsafe_payload_gossip_client + .schedule_execution_payload_gossip(payload) + .await + .map_err(Into::into) + } + + /// Starts building an L2 block by creating and populating payload attributes referencing the + /// correct L1 origin block and sending them to the block engine. + pub(super) async fn build_unsealed_payload( + &mut self, + ) -> Result, SequencerActorError> { + let unsafe_head = self.engine_client.get_unsafe_head().await?; + + let Some(l1_origin) = self.get_next_payload_l1_origin(unsafe_head).await? else { + // Temporary error - retry on next tick. + return Ok(None); + }; + + info!( + target: "sequencer", + parent_num = unsafe_head.block_info.number, + l1_origin_num = l1_origin.number, + "Started sequencing new block" + ); + + // Build the payload attributes for the next block. + let attributes_build_start = Instant::now(); + + let Some(attributes_with_parent) = self.build_attributes(unsafe_head, l1_origin).await? + else { + // Temporary error or reset - retry on next tick. + return Ok(None); + }; + + update_attributes_build_duration_metrics(attributes_build_start.elapsed()); + + // Send the built attributes to the engine to be built. + let build_request_start = Instant::now(); + + let payload_id = + self.engine_client.start_build_block(attributes_with_parent.clone()).await?; + + update_block_build_duration_metrics(build_request_start.elapsed()); + + Ok(Some(UnsealedPayloadHandle { payload_id, attributes_with_parent })) + } + + /// Determines and validates the L1 origin block for the provided L2 unsafe head. + /// Returns `Ok(None)` for temporary errors that should be retried. + async fn get_next_payload_l1_origin( + &mut self, + unsafe_head: L2BlockInfo, + ) -> Result, SequencerActorError> { + let l1_origin = match self + .origin_selector + .next_l1_origin(unsafe_head, self.in_recovery_mode) + .await + { + Ok(l1_origin) => l1_origin, + Err(err) => { + warn!( + target: "sequencer", + ?err, + "Temporary error occurred while selecting next L1 origin. Re-attempting on next tick." + ); + return Ok(None); + } + }; + + if unsafe_head.l1_origin.hash != l1_origin.parent_hash && + unsafe_head.l1_origin.hash != l1_origin.hash + { + warn!( + target: "sequencer", + l1_origin = ?l1_origin, + unsafe_head_hash = %unsafe_head.l1_origin.hash, + unsafe_head_l1_origin = ?unsafe_head.l1_origin, + "Cannot build new L2 block on inconsistent L1 origin, resetting engine" + ); + self.engine_client.reset_engine_forkchoice().await?; + return Ok(None); + } + Ok(Some(l1_origin)) + } + + /// Builds the `OpAttributesWithParent` for the next block to build. If None is returned, it + /// indicates that no attributes could be built at this time but future attempts may be made. + async fn build_attributes( + &mut self, + unsafe_head: L2BlockInfo, + l1_origin: BlockInfo, + ) -> Result, SequencerActorError> { + let mut attributes = match self + .attributes_builder + .prepare_payload_attributes(unsafe_head, l1_origin.id()) + .await + { + Ok(attrs) => attrs, + Err(PipelineErrorKind::Temporary(_)) => { + // Temporary error - retry on next tick. + return Ok(None); + } + Err(PipelineErrorKind::Reset(_)) => { + if let Err(err) = self.engine_client.reset_engine_forkchoice().await { + error!(target: "sequencer", ?err, "Failed to reset engine"); + return Err(SequencerActorError::ChannelClosed); + } + + warn!( + target: "sequencer", + "Resetting engine due to pipeline error while preparing payload attributes" + ); + return Ok(None); + } + Err(err @ PipelineErrorKind::Critical(_)) => { + error!(target: "sequencer", ?err, "Failed to prepare payload attributes"); + return Err(err.into()); + } + }; + + attributes.no_tx_pool = Some(!self.should_use_tx_pool(l1_origin, &attributes)); + + let attrs_with_parent = OpAttributesWithParent::new(attributes, unsafe_head, None, false); + Ok(Some(attrs_with_parent)) + } + + /// Determines, for the provided L1 origin block and payload attributes being constructed, if + /// transaction pool transactions should be enabled. + fn should_use_tx_pool(&self, l1_origin: BlockInfo, attributes: &OpPayloadAttributes) -> bool { + if self.in_recovery_mode { + warn!(target: "sequencer", "Sequencer is in recovery mode, producing empty block"); + return false; + } + + // If the next L2 block is beyond the sequencer drift threshold, we must produce an empty + // block. + if attributes.payload_attributes.timestamp > + l1_origin.timestamp + self.rollup_config.max_sequencer_drift(l1_origin.timestamp) + { + return false; + } + + // Do not include transactions in the first Ecotone block. + if self.rollup_config.is_first_ecotone_block(attributes.payload_attributes.timestamp) { + info!(target: "sequencer", "Sequencing ecotone upgrade block"); + return false; + } + + // Do not include transactions in the first Fjord block. + if self.rollup_config.is_first_fjord_block(attributes.payload_attributes.timestamp) { + info!(target: "sequencer", "Sequencing fjord upgrade block"); + return false; + } + + // Do not include transactions in the first Granite block. + if self.rollup_config.is_first_granite_block(attributes.payload_attributes.timestamp) { + info!(target: "sequencer", "Sequencing granite upgrade block"); + return false; + } + + // Do not include transactions in the first Holocene block. + if self.rollup_config.is_first_holocene_block(attributes.payload_attributes.timestamp) { + info!(target: "sequencer", "Sequencing holocene upgrade block"); + return false; + } + + // Do not include transactions in the first Isthmus block. + if self.rollup_config.is_first_isthmus_block(attributes.payload_attributes.timestamp) { + info!(target: "sequencer", "Sequencing isthmus upgrade block"); + return false; + } + + // Do not include transactions in the first Jovian block. + // See: `` + if self.rollup_config.is_first_jovian_block(attributes.payload_attributes.timestamp) { + info!(target: "sequencer", "Sequencing jovian upgrade block"); + return false; + } + + // Do not include transactions in the first Interop block. + if self.rollup_config.is_first_interop_block(attributes.payload_attributes.timestamp) { + info!(target: "sequencer", "Sequencing interop upgrade block"); + return false; + } + + // Transaction pool transactions are enabled if none of the reasons to disable are satisfied + // above. + true + } + + /// Schedules the initial engine reset request and waits for the unsafe head to be updated. + async fn schedule_initial_reset(&self) -> Result<(), SequencerActorError> { + // Reset the engine, in order to initialize the engine state. + // NB: this call waits for confirmation that the reset succeeded and we can proceed with + // post-reset logic. + self.engine_client.reset_engine_forkchoice().await.map_err(|err| { + error!(target: "sequencer", ?err, "Failed to send reset request to engine"); + err.into() + }) + } +} + +#[async_trait] +impl< + AttributesBuilder_, + Conductor_, + OriginSelector_, + SequencerEngineClient_, + UnsafePayloadGossipClient_, +> NodeActor + for SequencerActor< + AttributesBuilder_, + Conductor_, + OriginSelector_, + SequencerEngineClient_, + UnsafePayloadGossipClient_, + > +where + AttributesBuilder_: AttributesBuilder + Sync + 'static, + Conductor_: Conductor + Sync + 'static, + OriginSelector_: OriginSelector + Sync + 'static, + SequencerEngineClient_: SequencerEngineClient + Sync + 'static, + UnsafePayloadGossipClient_: UnsafePayloadGossipClient + Sync + 'static, +{ + type Error = SequencerActorError; + type StartData = (); + + async fn start(mut self, _: Self::StartData) -> Result<(), Self::Error> { + let mut build_ticker = + tokio::time::interval(Duration::from_secs(self.rollup_config.block_time)); + + self.update_metrics(); + + // Reset the engine state prior to beginning block building. + self.schedule_initial_reset().await?; + + let mut next_payload_to_seal: Option = None; + let mut last_seal_duration = Duration::from_secs(0); + loop { + select! { + // We are using a biased select here to ensure that the admin queries are given priority over the block building task. + // This is important to limit the occurrence of race conditions where a stopped query is received when a sequencer is building a new block. + biased; + _ = self.cancellation_token.cancelled() => { + info!( + target: "sequencer", + "Received shutdown signal. Exiting sequencer task." + ); + return Ok(()); + } + Some(query) = self.admin_api_rx.recv() => { + let active_before = self.is_active; + + self.handle_admin_query(query).await; + + // immediately attempt to build a block if the sequencer was just started + if !active_before && self.is_active { + build_ticker.reset_immediately(); + } + } + // The sequencer must be active to build new blocks. + _ = build_ticker.tick(), if self.is_active => { + + match self.seal_last_and_start_next(next_payload_to_seal.as_ref()).await { + Ok(res) => { + next_payload_to_seal = res.unsealed_payload_handle; + last_seal_duration = res.seal_duration; + }, + Err(SequencerActorError::EngineError(EngineClientError::SealError(err))) => { + if is_seal_task_err_fatal(&err) { + error!(target: "sequencer", err=?err, "Critical seal task error occurred"); + self.cancellation_token.cancel(); + return Err(SequencerActorError::EngineError(EngineClientError::SealError(err))); + } + next_payload_to_seal = None; + }, + Err(other_err) => { + error!(target: "sequencer", err = ?other_err, "Unexpected error building or sealing payload"); + self.cancellation_token.cancel(); + return Err(other_err); + } + } + + if let Some(ref payload) = next_payload_to_seal { + let next_block_seconds = payload.attributes_with_parent.parent().block_info.timestamp.saturating_add(self.rollup_config.block_time); + // next block time is last + block_time - time it takes to seal. + let next_block_time = UNIX_EPOCH + Duration::from_secs(next_block_seconds) - last_seal_duration; + match next_block_time.duration_since(SystemTime::now()) { + Ok(duration) => build_ticker.reset_after(duration), + Err(_) => build_ticker.reset_immediately(), + }; + } else { + build_ticker.reset_immediately(); + } + } + } + } + } +} + +impl< + AttributesBuilder_, + Conductor_, + OriginSelector_, + SequencerEngineClient_, + UnsafePayloadGossipClient_, +> CancellableContext + for SequencerActor< + AttributesBuilder_, + Conductor_, + OriginSelector_, + SequencerEngineClient_, + UnsafePayloadGossipClient_, + > +where + AttributesBuilder_: AttributesBuilder, + Conductor_: Conductor, + OriginSelector_: OriginSelector, + SequencerEngineClient_: SequencerEngineClient, + UnsafePayloadGossipClient_: UnsafePayloadGossipClient, +{ + fn cancelled(&self) -> WaitForCancellationFuture<'_> { + self.cancellation_token.cancelled() + } +} + +// Determines whether the provided [`SealTaskError`] is fatal for the sequencer. +// +// NB: We could use `err.severity()`, but that gives EngineActor control over this classification. +// `SequencerActor` may have different interpretations of severity, and it is not clear when making +// a change in that area of the codebase that it will affect this area. When a new task error is +// added, this approach guarantees compilation will fail until it is handled here. +fn is_seal_task_err_fatal(err: &SealTaskError) -> bool { + match err { + SealTaskError::PayloadInsertionFailed(insert_err) => match &**insert_err { + InsertTaskError::ForkchoiceUpdateFailed(synchronize_error) => match synchronize_error { + SynchronizeTaskError::FinalizedAheadOfUnsafe(_, _) => true, + SynchronizeTaskError::ForkchoiceUpdateFailed(_) | + SynchronizeTaskError::InvalidForkchoiceState | + SynchronizeTaskError::UnexpectedPayloadStatus(_) => false, + }, + InsertTaskError::FromBlockError(_) | InsertTaskError::L2BlockInfoConstruction(_) => { + true + } + InsertTaskError::InsertFailed(_) | InsertTaskError::UnexpectedPayloadStatus(_) => false, + }, + SealTaskError::GetPayloadFailed(_) | + SealTaskError::HoloceneInvalidFlush | + SealTaskError::UnsafeHeadChangedSinceBuild => false, + SealTaskError::DepositOnlyPayloadFailed | + SealTaskError::DepositOnlyPayloadReattemptFailed | + SealTaskError::FromBlock(_) | + SealTaskError::MpscSend(_) | + SealTaskError::ClockWentBackwards => true, + } +} diff --git a/kona/crates/node/service/src/actors/sequencer/admin_api_impl.rs b/rust/kona/crates/node/service/src/actors/sequencer/admin_api_impl.rs similarity index 98% rename from kona/crates/node/service/src/actors/sequencer/admin_api_impl.rs rename to rust/kona/crates/node/service/src/actors/sequencer/admin_api_impl.rs index b9bc76c1fe1..ecf7e6a231c 100644 --- a/kona/crates/node/service/src/actors/sequencer/admin_api_impl.rs +++ b/rust/kona/crates/node/service/src/actors/sequencer/admin_api_impl.rs @@ -173,7 +173,7 @@ where Ok(()) } - pub(super) async fn reset_derivation_pipeline(&mut self) -> Result<(), SequencerAdminAPIError> { + pub(super) async fn reset_derivation_pipeline(&self) -> Result<(), SequencerAdminAPIError> { info!(target: "sequencer", "Resetting derivation pipeline"); self.engine_client.reset_engine_forkchoice().await.map_err(|e| { error!(target: "sequencer", err=?e, "Failed to reset engine forkchoice"); diff --git a/kona/crates/node/service/src/actors/sequencer/conductor.rs b/rust/kona/crates/node/service/src/actors/sequencer/conductor.rs similarity index 100% rename from kona/crates/node/service/src/actors/sequencer/conductor.rs rename to rust/kona/crates/node/service/src/actors/sequencer/conductor.rs diff --git a/kona/crates/node/service/src/actors/sequencer/config.rs b/rust/kona/crates/node/service/src/actors/sequencer/config.rs similarity index 100% rename from kona/crates/node/service/src/actors/sequencer/config.rs rename to rust/kona/crates/node/service/src/actors/sequencer/config.rs diff --git a/rust/kona/crates/node/service/src/actors/sequencer/engine_client.rs b/rust/kona/crates/node/service/src/actors/sequencer/engine_client.rs new file mode 100644 index 00000000000..638f0603a31 --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/sequencer/engine_client.rs @@ -0,0 +1,139 @@ +use crate::{ + EngineClientError, EngineClientResult, + actors::engine::{BuildRequest, EngineActorRequest, ResetRequest, SealRequest}, +}; +use alloy_rpc_types_engine::PayloadId; +use async_trait::async_trait; +use derive_more::Constructor; +use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; +use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; +use std::fmt::Debug; +use tokio::sync::{mpsc, watch}; + +/// Trait to be used by the Sequencer to interact with the engine, abstracting communication +/// mechanism. +#[cfg_attr(test, mockall::automock)] +#[async_trait] +pub trait SequencerEngineClient: Debug + Send + Sync { + /// Resets the engine's forkchoice, awaiting confirmation that it succeeded or returning the + /// error in performing the reset. + async fn reset_engine_forkchoice(&self) -> EngineClientResult<()>; + + /// Starts building a block with the provided attributes. + /// + /// Returns a `PayloadId` that can be used to seal the block later. + async fn start_build_block( + &self, + attributes: OpAttributesWithParent, + ) -> EngineClientResult; + + /// Seals and canonicalizes a previously started block. + /// + /// Takes a `PayloadId` from a previous `start_build_block` call and returns + /// the finalized execution payload envelope. + async fn seal_and_canonicalize_block( + &self, + payload_id: PayloadId, + attributes: OpAttributesWithParent, + ) -> EngineClientResult; + + /// Returns the current unsafe head [`L2BlockInfo`]. + async fn get_unsafe_head(&self) -> EngineClientResult; +} + +/// Queue-based implementation of the [`SequencerEngineClient`] trait. This handles all +/// channel-based communication. +#[derive(Constructor, Debug)] +pub struct QueuedSequencerEngineClient { + /// A channel to use to send the `EngineActor` requests. + pub engine_actor_request_tx: mpsc::Sender, + /// A channel to receive the latest unsafe head [`L2BlockInfo`]. + pub unsafe_head_rx: watch::Receiver, +} + +#[async_trait] +impl SequencerEngineClient for QueuedSequencerEngineClient { + async fn get_unsafe_head(&self) -> EngineClientResult { + Ok(*self.unsafe_head_rx.borrow()) + } + + async fn reset_engine_forkchoice(&self) -> EngineClientResult<()> { + let (result_tx, mut result_rx) = mpsc::channel(1); + + info!(target: "sequencer", "Sending reset request to engine."); + self.engine_actor_request_tx + .send(EngineActorRequest::ResetRequest(Box::new(ResetRequest { result_tx }))) + .await + .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; + + result_rx + .recv() + .await + .inspect(|_| info!(target: "sequencer", "Engine reset successfully.")) + .ok_or_else(|| { + error!(target: "block_engine", "Failed to receive built payload"); + EngineClientError::ResponseError("response channel closed.".to_string()) + })? + } + + async fn start_build_block( + &self, + attributes: OpAttributesWithParent, + ) -> EngineClientResult { + let (payload_id_tx, mut payload_id_rx) = mpsc::channel(1); + + trace!(target: "sequencer", "Sending start build request to engine."); + if self + .engine_actor_request_tx + .send(EngineActorRequest::BuildRequest(Box::new(BuildRequest { + attributes, + result_tx: payload_id_tx, + }))) + .await + .is_err() + { + return Err(EngineClientError::RequestError("request channel closed.".to_string())); + } + + payload_id_rx.recv() + .await + .inspect(|payload_id| trace!(target: "sequencer", ?payload_id, "Start build request successfully.")) + .ok_or_else(|| { + error!(target: "block_engine", "Failed to receive payload for initiated block build"); + EngineClientError::ResponseError("response channel closed.".to_string()) + }) + } + + async fn seal_and_canonicalize_block( + &self, + payload_id: PayloadId, + attributes: OpAttributesWithParent, + ) -> EngineClientResult { + let (result_tx, mut result_rx) = mpsc::channel(1); + + trace!(target: "sequencer", ?attributes, "Sending seal request to engine."); + self.engine_actor_request_tx + .send(EngineActorRequest::SealRequest(Box::new(SealRequest { + payload_id, + attributes, + result_tx, + }))) + .await + .map_err(|_| EngineClientError::RequestError("request channel closed.".to_string()))?; + + match result_rx.recv().await { + Some(Ok(payload)) => { + trace!(target: "sequencer", ?payload, "Seal succeeded."); + Ok(payload) + } + Some(Err(err)) => { + info!(target: "sequencer", ?err, "Seal failed."); + Err(EngineClientError::SealError(err)) + } + None => { + error!(target: "block_engine", "Failed to receive built payload"); + Err(EngineClientError::ResponseError("response channel closed.".to_string())) + } + } + } +} diff --git a/kona/crates/node/service/src/actors/sequencer/error.rs b/rust/kona/crates/node/service/src/actors/sequencer/error.rs similarity index 100% rename from kona/crates/node/service/src/actors/sequencer/error.rs rename to rust/kona/crates/node/service/src/actors/sequencer/error.rs diff --git a/rust/kona/crates/node/service/src/actors/sequencer/metrics.rs b/rust/kona/crates/node/service/src/actors/sequencer/metrics.rs new file mode 100644 index 00000000000..07c42b3db2b --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/sequencer/metrics.rs @@ -0,0 +1,77 @@ +use std::time::Duration; + +use crate::{ + Conductor, OriginSelector, SequencerActor, SequencerEngineClient, UnsafePayloadGossipClient, +}; +use kona_derive::AttributesBuilder; + +/// `SequencerActor` metrics-related method implementations. +impl< + AttributesBuilder_, + Conductor_, + OriginSelector_, + SequencerEngineClient_, + UnsafePayloadGossipClient_, +> + SequencerActor< + AttributesBuilder_, + Conductor_, + OriginSelector_, + SequencerEngineClient_, + UnsafePayloadGossipClient_, + > +where + AttributesBuilder_: AttributesBuilder, + Conductor_: Conductor, + OriginSelector_: OriginSelector, + SequencerEngineClient_: SequencerEngineClient, + UnsafePayloadGossipClient_: UnsafePayloadGossipClient, +{ + /// Updates the metrics for the sequencer actor. + pub(super) fn update_metrics(&self) { + // no-op if disabled. + #[cfg(feature = "metrics")] + { + let state_flags: [(&str, String); 2] = [ + ("active", self.is_active.to_string()), + ("recovery", self.in_recovery_mode.to_string()), + ]; + + let gauge = metrics::gauge!(crate::Metrics::SEQUENCER_STATE, &state_flags); + gauge.set(1); + } + } +} + +#[inline] +pub(super) fn update_attributes_build_duration_metrics(duration: Duration) { + // Log the attributes build duration, if metrics are enabled. + kona_macros::set!(gauge, crate::Metrics::SEQUENCER_ATTRIBUTES_BUILDER_DURATION, duration); +} + +#[inline] +pub(super) fn update_conductor_commitment_duration_metrics(duration: Duration) { + kona_macros::set!(gauge, crate::Metrics::SEQUENCER_CONDUCTOR_COMMITMENT_DURATION, duration); +} + +#[inline] +pub(super) fn update_block_build_duration_metrics(duration: Duration) { + kona_macros::set!( + gauge, + crate::Metrics::SEQUENCER_BLOCK_BUILDING_START_TASK_DURATION, + duration + ); +} + +#[inline] +pub(super) fn update_seal_duration_metrics(duration: Duration) { + // Log the block building seal task duration, if metrics are enabled. + kona_macros::set!(gauge, crate::Metrics::SEQUENCER_BLOCK_BUILDING_SEAL_TASK_DURATION, duration); +} + +#[inline] +pub(super) fn update_total_transactions_sequenced(transaction_count: u64) { + #[cfg(feature = "metrics")] + metrics::counter!(crate::Metrics::SEQUENCER_TOTAL_TRANSACTIONS_SEQUENCED) + .increment(transaction_count); +} diff --git a/kona/crates/node/service/src/actors/sequencer/mod.rs b/rust/kona/crates/node/service/src/actors/sequencer/mod.rs similarity index 100% rename from kona/crates/node/service/src/actors/sequencer/mod.rs rename to rust/kona/crates/node/service/src/actors/sequencer/mod.rs diff --git a/kona/crates/node/service/src/actors/sequencer/origin_selector.rs b/rust/kona/crates/node/service/src/actors/sequencer/origin_selector.rs similarity index 98% rename from kona/crates/node/service/src/actors/sequencer/origin_selector.rs rename to rust/kona/crates/node/service/src/actors/sequencer/origin_selector.rs index e2e70a1f3c7..5ff8cdd8631 100644 --- a/kona/crates/node/service/src/actors/sequencer/origin_selector.rs +++ b/rust/kona/crates/node/service/src/actors/sequencer/origin_selector.rs @@ -63,10 +63,10 @@ impl OriginSelector for L1OriginSelec // Start building on the next L1 origin block if the next L2 block's timestamp is // greater than or equal to the next L1 origin's timestamp. - if let Some(next) = self.next { - if unsafe_head.block_info.timestamp + self.cfg.block_time >= next.timestamp { - return Ok(next); - } + if let Some(next) = self.next && + unsafe_head.block_info.timestamp + self.cfg.block_time >= next.timestamp + { + return Ok(next); } let Some(current) = self.current else { @@ -473,16 +473,15 @@ mod test { }; if next_available { + let next = selector.next_l1_origin(unsafe_head, false).await.unwrap(); if next_ahead_of_unsafe { // If the next L1 origin is available and ahead of the unsafe head, the L1 origin // should not change. - let next = selector.next_l1_origin(unsafe_head, false).await.unwrap(); assert_eq!(next.hash, B256::ZERO); assert_eq!(next.number, 0); } else { // If the next L1 origin is available and behind the unsafe head, the L1 origin // should advance. - let next = selector.next_l1_origin(unsafe_head, false).await.unwrap(); assert_eq!(next.hash, B256::with_last_byte(1)); assert_eq!(next.number, 1); } diff --git a/kona/crates/node/service/src/actors/sequencer/tests/actor_test.rs b/rust/kona/crates/node/service/src/actors/sequencer/tests/actor_test.rs similarity index 88% rename from kona/crates/node/service/src/actors/sequencer/tests/actor_test.rs rename to rust/kona/crates/node/service/src/actors/sequencer/tests/actor_test.rs index a8f0ea0423c..8c1687cd503 100644 --- a/kona/crates/node/service/src/actors/sequencer/tests/actor_test.rs +++ b/rust/kona/crates/node/service/src/actors/sequencer/tests/actor_test.rs @@ -10,9 +10,9 @@ use kona_protocol::{BlockInfo, L2BlockInfo}; use rstest::rstest; #[rstest] -#[case::temp(PipelineErrorKind::Temporary(BuilderError::Custom("".into()).into()), false)] -#[case::reset(PipelineErrorKind::Reset(BuilderError::Custom("".into()).into()), false)] -#[case::critical(PipelineErrorKind::Critical(BuilderError::Custom("".into()).into()), true)] +#[case::temp(PipelineErrorKind::Temporary(BuilderError::Custom(String::new()).into()), false)] +#[case::reset(PipelineErrorKind::Reset(BuilderError::Custom(String::new()).into()), false)] +#[case::critical(PipelineErrorKind::Critical(BuilderError::Custom(String::new()).into()), true)] #[tokio::test] async fn test_build_unsealed_payload_prepare_payload_attributes_error( #[case] forced_error: PipelineErrorKind, diff --git a/kona/crates/node/service/src/actors/sequencer/tests/admin_api_impl_test.rs b/rust/kona/crates/node/service/src/actors/sequencer/tests/admin_api_impl_test.rs similarity index 100% rename from kona/crates/node/service/src/actors/sequencer/tests/admin_api_impl_test.rs rename to rust/kona/crates/node/service/src/actors/sequencer/tests/admin_api_impl_test.rs diff --git a/kona/crates/node/service/src/actors/sequencer/tests/mod.rs b/rust/kona/crates/node/service/src/actors/sequencer/tests/mod.rs similarity index 100% rename from kona/crates/node/service/src/actors/sequencer/tests/mod.rs rename to rust/kona/crates/node/service/src/actors/sequencer/tests/mod.rs diff --git a/kona/crates/node/service/src/actors/sequencer/tests/test_util.rs b/rust/kona/crates/node/service/src/actors/sequencer/tests/test_util.rs similarity index 100% rename from kona/crates/node/service/src/actors/sequencer/tests/test_util.rs rename to rust/kona/crates/node/service/src/actors/sequencer/tests/test_util.rs diff --git a/rust/kona/crates/node/service/src/actors/traits.rs b/rust/kona/crates/node/service/src/actors/traits.rs new file mode 100644 index 00000000000..d68a5b12066 --- /dev/null +++ b/rust/kona/crates/node/service/src/actors/traits.rs @@ -0,0 +1,28 @@ +//! [`NodeActor`] trait. + +use async_trait::async_trait; +use tokio_util::sync::WaitForCancellationFuture; + +/// The communication context used by the actor. +pub trait CancellableContext: Send { + /// Returns a future that resolves when the actor is cancelled. + fn cancelled(&self) -> WaitForCancellationFuture<'_>; +} + +/// The [`NodeActor`] is an actor-like service for the node. +/// +/// Actors may: +/// - Handle incoming messages. +/// - Perform background tasks. +/// - Emit new events for other actors to process. +#[async_trait] +pub trait NodeActor: Send + 'static { + /// The error type for the actor. + type Error: std::fmt::Debug; + /// The type necessary to pass to the start function. + /// This is the result of + type StartData: Sized; + + /// Starts the actor. + async fn start(self, start_context: Self::StartData) -> Result<(), Self::Error>; +} diff --git a/rust/kona/crates/node/service/src/lib.rs b/rust/kona/crates/node/service/src/lib.rs new file mode 100644 index 00000000000..deeab0fb47e --- /dev/null +++ b/rust/kona/crates/node/service/src/lib.rs @@ -0,0 +1,46 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] + +#[macro_use] +extern crate tracing; + +mod service; +pub use service::{ + DerivationDelegateConfig, InteropMode, L1Config, L1ConfigBuilder, NodeMode, RollupNode, + RollupNodeBuilder, +}; + +mod actors; +pub use actors::{ + BlockStream, BuildRequest, CancellableContext, Conductor, ConductorClient, ConductorError, + DelayedL1OriginSelectorProvider, DelegateDerivationActor, DerivationActor, + DerivationActorRequest, DerivationClientError, DerivationClientResult, + DerivationDelegateClient, DerivationDelegateClientError, DerivationEngineClient, + DerivationError, DerivationState, DerivationStateMachine, DerivationStateTransitionError, + DerivationStateUpdate, EngineActor, EngineActorRequest, EngineClientError, EngineClientResult, + EngineConfig, EngineDerivationClient, EngineError, EngineProcessingRequest, EngineProcessor, + EngineRequestReceiver, EngineRpcProcessor, EngineRpcRequest, EngineRpcRequestReceiver, + L1OriginSelector, L1OriginSelectorError, L1OriginSelectorProvider, L1WatcherActor, + L1WatcherActorError, L1WatcherDerivationClient, NetworkActor, NetworkActorError, + NetworkBuilder, NetworkBuilderError, NetworkConfig, NetworkDriver, NetworkDriverError, + NetworkEngineClient, NetworkHandler, NetworkInboundData, NodeActor, OriginSelector, + QueuedDerivationEngineClient, QueuedEngineDerivationClient, QueuedEngineRpcClient, + QueuedL1WatcherDerivationClient, QueuedNetworkEngineClient, QueuedSequencerAdminAPIClient, + QueuedSequencerEngineClient, QueuedUnsafePayloadGossipClient, ResetRequest, + RollupBoostAdminApiClient, RollupBoostHealthRpcClient, RpcActor, RpcActorError, RpcContext, + SealRequest, SequencerActor, SequencerActorError, SequencerAdminQuery, SequencerConfig, + SequencerEngineClient, UnsafePayloadGossipClient, UnsafePayloadGossipClientError, +}; + +mod metrics; +pub use metrics::Metrics; + +#[cfg(test)] +pub use actors::{ + MockConductor, MockOriginSelector, MockSequencerEngineClient, MockUnsafePayloadGossipClient, +}; diff --git a/kona/crates/node/service/src/metrics/mod.rs b/rust/kona/crates/node/service/src/metrics/mod.rs similarity index 100% rename from kona/crates/node/service/src/metrics/mod.rs rename to rust/kona/crates/node/service/src/metrics/mod.rs diff --git a/kona/crates/node/service/src/service/builder.rs b/rust/kona/crates/node/service/src/service/builder.rs similarity index 100% rename from kona/crates/node/service/src/service/builder.rs rename to rust/kona/crates/node/service/src/service/builder.rs diff --git a/kona/crates/node/service/src/service/mod.rs b/rust/kona/crates/node/service/src/service/mod.rs similarity index 100% rename from kona/crates/node/service/src/service/mod.rs rename to rust/kona/crates/node/service/src/service/mod.rs diff --git a/kona/crates/node/service/src/service/mode.rs b/rust/kona/crates/node/service/src/service/mode.rs similarity index 100% rename from kona/crates/node/service/src/service/mode.rs rename to rust/kona/crates/node/service/src/service/mode.rs diff --git a/rust/kona/crates/node/service/src/service/node.rs b/rust/kona/crates/node/service/src/service/node.rs new file mode 100644 index 00000000000..7c130b21220 --- /dev/null +++ b/rust/kona/crates/node/service/src/service/node.rs @@ -0,0 +1,425 @@ +//! Contains the [`RollupNode`] implementation. +use crate::{ + ConductorClient, DelayedL1OriginSelectorProvider, DelegateDerivationActor, DerivationActor, + DerivationDelegateClient, DerivationError, EngineActor, EngineActorRequest, EngineConfig, + EngineProcessor, EngineRpcProcessor, InteropMode, L1OriginSelector, L1WatcherActor, + NetworkActor, NetworkBuilder, NetworkConfig, NodeActor, NodeMode, QueuedDerivationEngineClient, + QueuedEngineDerivationClient, QueuedEngineRpcClient, QueuedL1WatcherDerivationClient, + QueuedNetworkEngineClient, QueuedSequencerAdminAPIClient, QueuedSequencerEngineClient, + RollupBoostAdminApiClient, RollupBoostHealthRpcClient, RpcActor, RpcContext, SequencerActor, + SequencerConfig, + actors::{BlockStream, NetworkInboundData, QueuedUnsafePayloadGossipClient}, +}; +use alloy_eips::BlockNumberOrTag; +use alloy_provider::RootProvider; +use kona_derive::StatefulAttributesBuilder; +use kona_engine::{Engine, EngineState, OpEngineClient}; +use kona_genesis::{L1ChainConfig, RollupConfig}; +use kona_protocol::L2BlockInfo; +use kona_providers_alloy::{ + AlloyChainProvider, AlloyL2ChainProvider, OnlineBeaconClient, OnlineBlobProvider, + OnlinePipeline, +}; +use kona_rpc::RpcBuilder; +use op_alloy_network::Optimism; +use std::{ops::Not as _, sync::Arc, time::Duration}; +use tokio::sync::{mpsc, watch}; +use tokio_util::sync::CancellationToken; + +const DERIVATION_PROVIDER_CACHE_SIZE: usize = 1024; +const HEAD_STREAM_POLL_INTERVAL: u64 = 4; +const FINALIZED_STREAM_POLL_INTERVAL: u64 = 60; + +/// The configuration for the L1 chain. +#[derive(Debug, Clone)] +pub struct L1Config { + /// The L1 chain configuration. + pub chain_config: Arc, + /// Whether to trust the L1 RPC. + pub trust_rpc: bool, + /// The L1 beacon client. + pub beacon_client: OnlineBeaconClient, + /// The L1 engine provider. + pub engine_provider: RootProvider, +} + +/// The standard implementation of the [`RollupNode`] service, using the governance approved OP +/// Stack configuration of components. +#[derive(Debug)] +pub struct RollupNode { + /// The rollup configuration. + pub(crate) config: Arc, + /// The L1 configuration. + pub(crate) l1_config: L1Config, + /// The interop mode for the node. + pub(crate) interop_mode: InteropMode, + /// The L2 EL provider. + pub(crate) l2_provider: RootProvider, + /// Whether to trust the L2 RPC. + pub(crate) l2_trust_rpc: bool, + /// The [`EngineConfig`] for the node. + pub(crate) engine_config: EngineConfig, + /// The [`RpcBuilder`] for the node. + pub(crate) rpc_builder: Option, + /// The P2P [`NetworkConfig`] for the node. + pub(crate) p2p_config: NetworkConfig, + /// The [`SequencerConfig`] for the node. + pub(crate) sequencer_config: SequencerConfig, + /// Optional derivation delegate provider. + pub(crate) derivation_delegate_provider: Option, +} + +/// A RollupNode-level derivation actor wrapper. +/// +/// This type selects the concrete derivation actor implementation +/// based on `RollupNode` configuration. +/// +/// It is not intended to be generic or reusable outside the +/// `RollupNode` wiring logic. +enum ConfiguredDerivationActor { + Delegate(Box>), + Normal(Box>), +} + +#[async_trait::async_trait] +impl NodeActor for ConfiguredDerivationActor +where + DelegateDerivationActor: + NodeActor, + DerivationActor: + NodeActor, +{ + type StartData = (); + type Error = DerivationError; + + async fn start(self, ctx: ()) -> Result<(), Self::Error> { + match self { + Self::Delegate(a) => a.start(ctx).await, + Self::Normal(a) => a.start(ctx).await, + } + } +} + +impl RollupNode { + /// The mode of operation for the node. + const fn mode(&self) -> NodeMode { + self.engine_config.mode + } + + /// Creates a network builder for the node. + fn network_builder(&self) -> NetworkBuilder { + NetworkBuilder::from(self.p2p_config.clone()) + } + + /// Returns an engine builder for the node. + fn engine_config(&self) -> EngineConfig { + self.engine_config.clone() + } + + /// Returns an rpc builder for the node. + fn rpc_builder(&self) -> Option { + self.rpc_builder.clone() + } + + /// Returns the sequencer builder for the node. + fn create_attributes_builder( + &self, + ) -> StatefulAttributesBuilder { + let l1_derivation_provider = AlloyChainProvider::new_with_trust( + self.l1_config.engine_provider.clone(), + DERIVATION_PROVIDER_CACHE_SIZE, + self.l1_config.trust_rpc, + ); + let l2_derivation_provider = AlloyL2ChainProvider::new_with_trust( + self.l2_provider.clone(), + self.config.clone(), + DERIVATION_PROVIDER_CACHE_SIZE, + self.l2_trust_rpc, + ); + + StatefulAttributesBuilder::new( + self.config.clone(), + self.l1_config.chain_config.clone(), + l2_derivation_provider, + l1_derivation_provider, + ) + } + + async fn create_pipeline(&self) -> OnlinePipeline { + // Create the caching L1/L2 EL providers for derivation. + let l1_derivation_provider = AlloyChainProvider::new_with_trust( + self.l1_config.engine_provider.clone(), + DERIVATION_PROVIDER_CACHE_SIZE, + self.l1_config.trust_rpc, + ); + let l2_derivation_provider = AlloyL2ChainProvider::new_with_trust( + self.l2_provider.clone(), + self.config.clone(), + DERIVATION_PROVIDER_CACHE_SIZE, + self.l2_trust_rpc, + ); + + match self.interop_mode { + InteropMode::Polled => OnlinePipeline::new_polled( + self.config.clone(), + self.l1_config.chain_config.clone(), + OnlineBlobProvider::init(self.l1_config.beacon_client.clone()).await, + l1_derivation_provider, + l2_derivation_provider, + ), + InteropMode::Indexed => OnlinePipeline::new_indexed( + self.config.clone(), + self.l1_config.chain_config.clone(), + OnlineBlobProvider::init(self.l1_config.beacon_client.clone()).await, + l1_derivation_provider, + l2_derivation_provider, + ), + } + } + + /// Helper function to assemble the [`EngineActor`] since there are many structs created that + /// are not relevant to other actors or logic. + /// Note: ignoring complex type warning. This type only pertains to this function, so it is + /// better to have the full type here than have to piece it together from multiple type defs. + #[allow(clippy::type_complexity)] + fn create_engine_actor( + &self, + cancellation_token: CancellationToken, + engine_request_rx: mpsc::Receiver, + derivation_client: QueuedEngineDerivationClient, + unsafe_head_tx: watch::Sender, + ) -> Result< + EngineActor< + EngineProcessor< + OpEngineClient>, + QueuedEngineDerivationClient, + >, + EngineRpcProcessor>>, + >, + String, + > { + let engine_state = EngineState::default(); + let (engine_state_tx, engine_state_rx) = watch::channel(engine_state); + let (engine_queue_length_tx, engine_queue_length_rx) = watch::channel(0); + let engine = Engine::new(engine_state, engine_state_tx, engine_queue_length_tx); + + let engine_client = Arc::new(self.engine_config().build_engine_client().map_err(|e| { + error!(target: "service", error = ?e, "engine client build failed"); + format!("Engine client build failed: {e:?}") + })?); + + let engine_processor = EngineProcessor::new( + engine_client.clone(), + self.config.clone(), + derivation_client, + engine, + self.mode().is_sequencer().then_some(unsafe_head_tx), + ); + + let engine_rpc_processor = EngineRpcProcessor::new( + engine_client.clone(), + engine_client.rollup_boost.clone(), + self.config.clone(), + engine_state_rx, + engine_queue_length_rx, + ); + + Ok(EngineActor::new( + cancellation_token, + engine_request_rx, + engine_processor, + engine_rpc_processor, + )) + } + + /// Starts the rollup node service. + /// + /// The rollup node, in validator mode, listens to two sources of information to sync the L2 + /// chain: + /// + /// 1. The data availability layer, with a watcher that listens for new updates. L2 inputs (L2 + /// transaction batches + deposits) are then derived from the DA layer. + /// 2. The L2 sequencer, which produces unsafe L2 blocks and sends them to the network over p2p + /// gossip. + /// + /// From these two sources, the node imports `unsafe` blocks from the L2 sequencer, `safe` + /// blocks from the L2 derivation pipeline into the L2 execution layer via the Engine API, + /// and finalizes `safe` blocks that it has derived when L1 finalized block updates are + /// received. + /// + /// In sequencer mode, the node is responsible for producing unsafe L2 blocks and sending them + /// to the network over p2p gossip. The node also listens for L1 finalized block updates and + /// finalizes `safe` blocks that it has derived when L1 finalized block updates are + /// received. + pub async fn start(&self) -> Result<(), String> { + // Create a global cancellation token for graceful shutdown of tasks. + let cancellation = CancellationToken::new(); + + let (derivation_actor_request_tx, derivation_actor_request_rx) = mpsc::channel(1024); + + let (engine_actor_request_tx, engine_actor_request_rx) = mpsc::channel(1024); + let (unsafe_head_tx, unsafe_head_rx) = watch::channel(L2BlockInfo::default()); + + let engine_actor = self.create_engine_actor( + cancellation.clone(), + engine_actor_request_rx, + QueuedEngineDerivationClient::new(derivation_actor_request_tx.clone()), + unsafe_head_tx, + )?; + + // Select the concrete derivation actor implementation based on + // RollupNode configuration. + let derivation: ConfiguredDerivationActor = if let Some(provider) = + self.derivation_delegate_provider.clone() + { + // L1 Provider for sanity checking Derivation Delegation + let l1_provider = AlloyChainProvider::new( + self.l1_config.engine_provider.clone(), + DERIVATION_PROVIDER_CACHE_SIZE, + ); + ConfiguredDerivationActor::Delegate(Box::new(DelegateDerivationActor::<_>::new( + QueuedDerivationEngineClient { + engine_actor_request_tx: engine_actor_request_tx.clone(), + }, + cancellation.clone(), + derivation_actor_request_rx, + provider, + l1_provider, + ))) + } else { + ConfiguredDerivationActor::Normal(Box::new(DerivationActor::<_, OnlinePipeline>::new( + QueuedDerivationEngineClient { + engine_actor_request_tx: engine_actor_request_tx.clone(), + }, + cancellation.clone(), + derivation_actor_request_rx, + self.create_pipeline().await, + ))) + }; + + // Create the p2p actor. + let ( + NetworkInboundData { + signer, + p2p_rpc: network_rpc, + gossip_payload_tx, + admin_rpc: net_admin_rpc, + }, + network, + ) = NetworkActor::new( + QueuedNetworkEngineClient { engine_actor_request_tx: engine_actor_request_tx.clone() }, + cancellation.clone(), + self.network_builder(), + ); + + let (l1_head_updates_tx, l1_head_updates_rx) = watch::channel(None); + let delayed_l1_provider = DelayedL1OriginSelectorProvider::new( + self.l1_config.engine_provider.clone(), + l1_head_updates_rx, + self.sequencer_config.l1_conf_delay, + ); + + let delayed_origin_selector = + L1OriginSelector::new(self.config.clone(), delayed_l1_provider); + + // Conditionally add conductor if configured + let conductor = + self.sequencer_config.conductor_rpc_url.clone().map(ConductorClient::new_http); + + // Create the L1 Watcher actor + + // A channel to send queries about the state of L1. + let (l1_query_tx, l1_query_rx) = mpsc::channel(1024); + + let head_stream = BlockStream::new_as_stream( + self.l1_config.engine_provider.clone(), + BlockNumberOrTag::Latest, + Duration::from_secs(HEAD_STREAM_POLL_INTERVAL), + )?; + let finalized_stream = BlockStream::new_as_stream( + self.l1_config.engine_provider.clone(), + BlockNumberOrTag::Finalized, + Duration::from_secs(FINALIZED_STREAM_POLL_INTERVAL), + )?; + + // Create the [`L1WatcherActor`]. Previously known as the DA watcher actor. + let l1_watcher = L1WatcherActor::new( + self.config.clone(), + self.l1_config.engine_provider.clone(), + l1_query_rx, + l1_head_updates_tx.clone(), + QueuedL1WatcherDerivationClient { derivation_actor_request_tx }, + signer, + cancellation.clone(), + head_stream, + finalized_stream, + ); + + // Create the sequencer if needed + let (sequencer_actor, sequencer_admin_client) = if self.mode().is_sequencer() { + let sequencer_engine_client = QueuedSequencerEngineClient { + engine_actor_request_tx: engine_actor_request_tx.clone(), + unsafe_head_rx, + }; + + // Create the admin API channel + let (sequencer_admin_api_tx, sequencer_admin_api_rx) = mpsc::channel(1024); + let queued_gossip_client = + QueuedUnsafePayloadGossipClient::new(gossip_payload_tx.clone()); + + ( + Some(SequencerActor { + admin_api_rx: sequencer_admin_api_rx, + attributes_builder: self.create_attributes_builder(), + cancellation_token: cancellation.clone(), + conductor, + engine_client: sequencer_engine_client, + is_active: self.sequencer_config.sequencer_stopped.not(), + in_recovery_mode: self.sequencer_config.sequencer_recovery_mode, + origin_selector: delayed_origin_selector, + rollup_config: self.config.clone(), + unsafe_payload_gossip_client: queued_gossip_client, + }), + Some(QueuedSequencerAdminAPIClient::new(sequencer_admin_api_tx)), + ) + } else { + (None, None) + }; + + // Create the RPC server actor. + let rpc = self.rpc_builder().map(|b| { + RpcActor::new( + b, + QueuedEngineRpcClient::new(engine_actor_request_tx.clone()), + RollupBoostAdminApiClient { + engine_actor_request_tx: engine_actor_request_tx.clone(), + }, + RollupBoostHealthRpcClient { + engine_actor_request_tx: engine_actor_request_tx.clone(), + }, + sequencer_admin_client, + ) + }); + + crate::service::spawn_and_wait!( + cancellation, + actors = [ + rpc.map(|r| ( + r, + RpcContext { + cancellation: cancellation.clone(), + p2p_network: network_rpc, + network_admin: net_admin_rpc, + l1_watcher_queries: l1_query_tx, + } + )), + sequencer_actor.map(|s| (s, ())), + Some((network, ())), + Some((l1_watcher, ())), + Some((derivation, ())), + Some((engine_actor, ())), + ] + ); + Ok(()) + } +} diff --git a/rust/kona/crates/node/service/src/service/util.rs b/rust/kona/crates/node/service/src/service/util.rs new file mode 100644 index 00000000000..3a6ab08064d --- /dev/null +++ b/rust/kona/crates/node/service/src/service/util.rs @@ -0,0 +1,100 @@ +//! Utilities for the rollup node service, internal to the crate. + +/// Spawns a set of parallel actors in a [`JoinSet`](tokio::task::JoinSet), and cancels all actors +/// if any of them fail. The type of the error in the [`NodeActor`](crate::NodeActor)s is erased to +/// avoid having to specify a common error type between actors. +/// +/// Actors are passed in as optional arguments, in case a given actor is not needed. +/// +/// This macro also handles OS shutdown signals (SIGTERM, SIGINT) and triggers graceful shutdown +/// when received. +macro_rules! spawn_and_wait { + ($cancellation:expr, actors = [$($actor:expr$(,)?)*]) => { + let mut task_handles = tokio::task::JoinSet::new(); + + // Check if the actor is present, and spawn it if it is. + $( + if let Some((actor, context)) = $actor { + let cancellation = $cancellation.clone(); + task_handles.spawn(async move { + // This guard ensures that the cancellation token is cancelled when the actor is + // dropped. This ensures that the actor is properly shut down. + // Note the underscore prefix: this is to signal that we don't use the guard anywhere, but + // *the compiler shouldn't optimize it away*. + // Note that using a simple `_` would not work here because it gets optimized away in + // release mode. + let _guard = cancellation.drop_guard(); + + if let Err(e) = actor.start(context).await { + return Err(format!("{e:?}")); + } + Ok(()) + }); + } + )* + + // Create the shutdown signal future + let shutdown = $crate::service::shutdown_signal(); + tokio::pin!(shutdown); + + loop { + tokio::select! { + _ = &mut shutdown => { + tracing::info!(target: "rollup_node", "Received shutdown signal, initiating graceful shutdown..."); + $cancellation.cancel(); + break; + } + result = task_handles.join_next() => { + match result { + Some(Ok(Ok(()))) => { /* Actor completed successfully */ } + Some(Ok(Err(e))) => { + tracing::error!(target: "rollup_node", "Critical error in sub-routine: {e}"); + // Cancel all tasks and gracefully shutdown. + $cancellation.cancel(); + return Err(e); + } + Some(Err(e)) => { + let error_msg = format!("Task join error: {e}"); + // Log the error and cancel all tasks. + tracing::error!(target: "rollup_node", "Task join error: {e}"); + // Cancel all tasks and gracefully shutdown. + $cancellation.cancel(); + return Err(error_msg); + } + None => break, // All tasks completed + } + } + } + } + }; +} + +// Export the `spawn_and_wait` macro for use in other modules. +pub(crate) use spawn_and_wait; + +/// Listens for OS shutdown signals (SIGTERM, SIGINT) +pub(crate) async fn shutdown_signal() { + let ctrl_c = async { + tokio::signal::ctrl_c().await.expect("failed to install Ctrl+C handler"); + }; + + #[cfg(unix)] + let terminate = async { + tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate()) + .expect("failed to install SIGTERM handler") + .recv() + .await; + }; + + #[cfg(not(unix))] + let terminate = std::future::pending::<()>(); + + tokio::select! { + _ = ctrl_c => { + tracing::info!(target: "rollup_node", "Received SIGINT (Ctrl+C)"); + }, + _ = terminate => { + tracing::info!(target: "rollup_node", "Received SIGTERM"); + }, + } +} diff --git a/kona/crates/node/service/tests/actors/generator/block_builder.rs b/rust/kona/crates/node/service/tests/actors/generator/block_builder.rs similarity index 100% rename from kona/crates/node/service/tests/actors/generator/block_builder.rs rename to rust/kona/crates/node/service/tests/actors/generator/block_builder.rs diff --git a/kona/crates/node/service/tests/actors/generator/mod.rs b/rust/kona/crates/node/service/tests/actors/generator/mod.rs similarity index 100% rename from kona/crates/node/service/tests/actors/generator/mod.rs rename to rust/kona/crates/node/service/tests/actors/generator/mod.rs diff --git a/kona/crates/node/service/tests/actors/generator/seed.rs b/rust/kona/crates/node/service/tests/actors/generator/seed.rs similarity index 100% rename from kona/crates/node/service/tests/actors/generator/seed.rs rename to rust/kona/crates/node/service/tests/actors/generator/seed.rs diff --git a/kona/crates/node/service/tests/actors/mod.rs b/rust/kona/crates/node/service/tests/actors/mod.rs similarity index 100% rename from kona/crates/node/service/tests/actors/mod.rs rename to rust/kona/crates/node/service/tests/actors/mod.rs diff --git a/rust/kona/crates/node/service/tests/actors/network/mocks/builder.rs b/rust/kona/crates/node/service/tests/actors/network/mocks/builder.rs new file mode 100644 index 00000000000..55e5b84a30f --- /dev/null +++ b/rust/kona/crates/node/service/tests/actors/network/mocks/builder.rs @@ -0,0 +1,128 @@ +use std::net::{IpAddr, Ipv4Addr}; + +use alloy_chains::Chain; +use alloy_signer::k256; +use discv5::{ConfigBuilder, Enr, ListenConfig}; + +use crate::actors::network::TestNetwork; +use alloy_primitives::Address; +use async_trait::async_trait; +use kona_disc::LocalNode; +use kona_genesis::RollupConfig; +use kona_node_service::{ + EngineClientResult, NetworkActor, NetworkBuilder, NetworkEngineClient, NodeActor, +}; +use kona_peers::BootNode; +use kona_sources::BlockSigner; +use libp2p::{Multiaddr, identity::Keypair, multiaddr::Protocol}; +use op_alloy_rpc_types_engine::OpExecutionPayloadEnvelope; +use rand::RngCore; +use tokio::sync::mpsc; +use tokio_util::sync::CancellationToken; +use tracing::error; + +pub(crate) struct TestNetworkBuilder { + chain_id: u64, + unsafe_block_signer: Address, + custom_keypair: Option, +} + +impl TestNetworkBuilder { + fn rollup_config(&self) -> RollupConfig { + RollupConfig { l2_chain_id: Chain::from_id(self.chain_id), ..Default::default() } + } + + pub(crate) fn new() -> Self { + let chain_id = rand::rng().next_u64(); + + Self { chain_id, unsafe_block_signer: Address::ZERO, custom_keypair: None } + } + + /// Sets a sequencer keypair for the network. + /// The next network built will be the sequencer's network. This will set the unsafe block + /// signer to the sequencer's address and the custom keypair to the sequencer's keypair. + /// This amounts to calling [`Self::with_unsafe_block_signer`] and [`Self::with_custom_keypair`] + /// sequentially. + pub(crate) fn set_sequencer(mut self) -> Self { + let sequencer_keypair = Keypair::generate_secp256k1(); + let secp256k1_key = sequencer_keypair.clone().try_into_secp256k1() + .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to secp256k1. This is a bug since we only support secp256k1 keys: {e}")).unwrap() + .secret().to_bytes(); + let local_node_key = k256::ecdsa::SigningKey::from_bytes(&secp256k1_key.into()) + .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to k256 signing key. This is a bug since we only support secp256k1 keys: {e}")).unwrap(); + + self.custom_keypair = Some(sequencer_keypair); + self.unsafe_block_signer = Address::from_private_key(&local_node_key); + + self + } + + /// Minimal network configuration. + /// Only allows loopback addresses in the discovery table. + pub(crate) fn build(&mut self, bootnodes: Vec) -> TestNetwork { + let keypair = self.custom_keypair.take().unwrap_or_else(Keypair::generate_secp256k1); + + let secp256k1_key = keypair.clone().try_into_secp256k1() + .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to secp256k1. This is a bug since we only support secp256k1 keys: {e}")).unwrap() + .secret().to_bytes(); + let local_node_key = k256::ecdsa::SigningKey::from_bytes(&secp256k1_key.into()) + .map_err(|e| anyhow::anyhow!("Impossible to convert keypair to k256 signing key. This is a bug since we only support secp256k1 keys: {e}")).unwrap(); + + let node_addr = IpAddr::V4(Ipv4Addr::UNSPECIFIED); + + let discovery_config = ConfigBuilder::new(ListenConfig::from_ip(node_addr, 0)) + // Only allow loopback addresses. + .table_filter(|enr| { + let Some(ip) = enr.ip4() else { + return false; + }; + + ip.is_loopback() + }) + .build(); + + let mut gossip_multiaddr = Multiaddr::from(node_addr); + gossip_multiaddr.push(Protocol::Tcp(0)); + + // Create a new network actor. No external connections + let builder = NetworkBuilder::new( + // Create a new rollup config. We don't need to specify any of the fields. + self.rollup_config(), + self.unsafe_block_signer, + gossip_multiaddr, + keypair, + LocalNode::new(local_node_key.clone(), node_addr, 0, 0), + discovery_config, + Some(BlockSigner::Local(local_node_key.into())), + ) + .with_bootnodes(bootnodes.into_iter().map(Into::into).collect::>().into()); + + let (blocks_tx, blocks_rx) = mpsc::channel(1024); + let (inbound_data, actor) = NetworkActor::new( + ForwardingNetworkEngineClient { blocks_tx }, + CancellationToken::new(), + builder, + ); + + let handle = tokio::spawn(async move { actor.start(()).await }); + + TestNetwork { inbound_data, blocks_rx, handle } + } +} + +#[derive(Debug)] +struct ForwardingNetworkEngineClient { + blocks_tx: mpsc::Sender, +} + +#[async_trait] +impl NetworkEngineClient for ForwardingNetworkEngineClient { + async fn send_unsafe_block(&self, block: OpExecutionPayloadEnvelope) -> EngineClientResult<()> { + let _ = self + .blocks_tx + .send(block) + .await + .inspect_err(|e| error!(target: "net", "Failed to send block: {:?}", e)); + Ok(()) + } +} diff --git a/kona/crates/node/service/tests/actors/network/mocks/mod.rs b/rust/kona/crates/node/service/tests/actors/network/mocks/mod.rs similarity index 100% rename from kona/crates/node/service/tests/actors/network/mocks/mod.rs rename to rust/kona/crates/node/service/tests/actors/network/mocks/mod.rs diff --git a/kona/crates/node/service/tests/actors/network/mod.rs b/rust/kona/crates/node/service/tests/actors/network/mod.rs similarity index 100% rename from kona/crates/node/service/tests/actors/network/mod.rs rename to rust/kona/crates/node/service/tests/actors/network/mod.rs diff --git a/rust/kona/crates/node/service/tests/actors/network/p2p.rs b/rust/kona/crates/node/service/tests/actors/network/p2p.rs new file mode 100644 index 00000000000..768bdb3d5ad --- /dev/null +++ b/rust/kona/crates/node/service/tests/actors/network/p2p.rs @@ -0,0 +1,44 @@ +use crate::actors::network::mocks::builder::TestNetworkBuilder; + +#[tokio::test(flavor = "multi_thread")] +async fn test_p2p_network_conn() -> anyhow::Result<()> { + let mut builder = TestNetworkBuilder::new(); + let network_1 = builder.build(vec![]); + let enr_1 = network_1.peer_enr().await?; + + let network_2 = builder.build(vec![enr_1]); + + network_2.is_connected_to_with_retries(&network_1).await?; + + network_1.is_connected_to_with_retries(&network_2).await?; + + Ok(()) +} + +#[tokio::test(flavor = "multi_thread")] +async fn test_large_network_conn() -> anyhow::Result<()> { + const NETWORKS: usize = 10; + + let mut builder = TestNetworkBuilder::new(); + + let (mut networks, mut bootnodes) = (vec![], vec![]); + + for _ in 0..NETWORKS { + let network = builder.build(bootnodes.clone()); + let enr = network.peer_enr().await?; + networks.push(network); + bootnodes.push(enr); + } + + for network in &networks { + for other_network in &networks { + if network.peer_id().await? == other_network.peer_id().await? { + continue; + } + + network.is_connected_to_with_retries(other_network).await?; + } + } + + Ok(()) +} diff --git a/rust/kona/crates/node/service/tests/actors/network/sequencer.rs b/rust/kona/crates/node/service/tests/actors/network/sequencer.rs new file mode 100644 index 00000000000..7e868eac45c --- /dev/null +++ b/rust/kona/crates/node/service/tests/actors/network/sequencer.rs @@ -0,0 +1,82 @@ +use crate::actors::{ + generator::{block_builder::PayloadVersion, seed::SEED_GENERATOR_BUILDER}, + network::mocks::builder::TestNetworkBuilder, +}; + +/// Test that we can properly gossip blocks to the sequencer. +#[tokio::test(flavor = "multi_thread")] +async fn test_sequencer_network_conn() -> anyhow::Result<()> { + let mut builder = TestNetworkBuilder::new().set_sequencer(); + + let sequencer_network = builder.build(vec![]); + let enr_1 = sequencer_network.peer_enr().await?; + + let mut validator_network = builder.build(vec![enr_1]); + + sequencer_network.is_connected_to_with_retries(&validator_network).await?; + + validator_network.is_connected_to_with_retries(&sequencer_network).await?; + + let mut seed_generator = SEED_GENERATOR_BUILDER.next_generator(); + + let envelope = seed_generator.random_valid_payload(PayloadVersion::V1)?; + + sequencer_network.inbound_data.gossip_payload_tx.send(envelope.clone()).await?; + + let block = validator_network + .blocks_rx + .recv() + .await + .ok_or_else(|| anyhow::anyhow!("No block received"))?; + + assert_eq!(block.parent_beacon_block_root, envelope.parent_beacon_block_root); + assert_eq!(block.execution_payload, envelope.execution_payload); + + Ok(()) +} + +/// Test that the network can properly propagate blocks to all connected peers. +/// +/// We are setting up a linear network topology, and we check that the block propagates to every +/// block of the network. +#[tokio::test(flavor = "multi_thread")] +async fn test_sequencer_network_propagation() -> anyhow::Result<()> { + const NETWORKS: usize = 10; + + let mut builder = TestNetworkBuilder::new().set_sequencer(); + + let sequencer_network = builder.build(vec![]); + let mut previous_enrs = vec![sequencer_network.peer_enr().await?]; + + let mut validator_networks = Vec::new(); + + for _ in 0..NETWORKS { + let network = builder.build(previous_enrs.clone()); + + previous_enrs.push(network.peer_enr().await?); + validator_networks.push(network); + } + + // Check that all networks are connected to the sequencer. + for network in &validator_networks { + network.is_connected_to_with_retries(&sequencer_network).await?; + } + + // Send a block to the sequencer. + let mut seed_generator = SEED_GENERATOR_BUILDER.next_generator(); + + let envelope = seed_generator.random_valid_payload(PayloadVersion::V1)?; + + sequencer_network.inbound_data.gossip_payload_tx.send(envelope.clone()).await?; + + // Check that the block propagates to all networks. + for network in &mut validator_networks { + let block = + network.blocks_rx.recv().await.ok_or_else(|| anyhow::anyhow!("No block received"))?; + + assert_eq!(block.parent_beacon_block_root, envelope.parent_beacon_block_root); + assert_eq!(block.execution_payload, envelope.execution_payload); + } + + Ok(()) +} diff --git a/kona/crates/node/service/tests/integration.rs b/rust/kona/crates/node/service/tests/integration.rs similarity index 100% rename from kona/crates/node/service/tests/integration.rs rename to rust/kona/crates/node/service/tests/integration.rs diff --git a/kona/crates/node/service/tests/rollup_boost_missing_jwt.rs b/rust/kona/crates/node/service/tests/rollup_boost_missing_jwt.rs similarity index 100% rename from kona/crates/node/service/tests/rollup_boost_missing_jwt.rs rename to rust/kona/crates/node/service/tests/rollup_boost_missing_jwt.rs diff --git a/rust/kona/crates/node/sources/Cargo.toml b/rust/kona/crates/node/sources/Cargo.toml new file mode 100644 index 00000000000..ae53a6a2bff --- /dev/null +++ b/rust/kona/crates/node/sources/Cargo.toml @@ -0,0 +1,50 @@ +[package] +name = "kona-sources" +version = "0.1.2" +description = "Data source types and utilities for the kona-node" + +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage.workspace = true +authors.workspace = true +repository.workspace = true +exclude.workspace = true + +[lints] +workspace = true + +[dependencies] +# Workspace + +# Alloy +alloy-transport.workspace = true +alloy-primitives.workspace = true +alloy-rpc-client.workspace = true +alloy-transport-http = { workspace = true, features = ["reqwest", "reqwest-rustls-tls", "hyper", "hyper-tls"] } + +alloy-signer.workspace = true +alloy-signer-local.workspace = true + +# OP Alloy +op-alloy-rpc-types-engine.workspace = true + +# Misc +tracing.workspace = true +thiserror.workspace = true +derive_more.workspace = true + +# HTTP client and TLS for remote signer +url.workspace = true +serde.workspace = true +serde_json.workspace = true +rustls.workspace = true +tokio = { workspace = true, features = ["full"] } +notify.workspace = true + +[features] +default = [] + +[dev-dependencies] +tokio.workspace = true +serde_json.workspace = true diff --git a/rust/kona/crates/node/sources/README.md b/rust/kona/crates/node/sources/README.md new file mode 100644 index 00000000000..c9c98acd0e3 --- /dev/null +++ b/rust/kona/crates/node/sources/README.md @@ -0,0 +1,3 @@ +## `kona-sources` + +Data source types and utilities for the kona-node. diff --git a/rust/kona/crates/node/sources/src/lib.rs b/rust/kona/crates/node/sources/src/lib.rs new file mode 100644 index 00000000000..d62d6fad2a3 --- /dev/null +++ b/rust/kona/crates/node/sources/src/lib.rs @@ -0,0 +1,14 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg))] + +mod signer; +pub use signer::{ + BlockSigner, BlockSignerError, BlockSignerHandler, BlockSignerStartError, CertificateError, + ClientCert, RemoteSigner, RemoteSignerError, RemoteSignerHandler, RemoteSignerStartError, +}; diff --git a/kona/crates/node/sources/src/signer/mod.rs b/rust/kona/crates/node/sources/src/signer/mod.rs similarity index 100% rename from kona/crates/node/sources/src/signer/mod.rs rename to rust/kona/crates/node/sources/src/signer/mod.rs diff --git a/kona/crates/node/sources/src/signer/remote/cert.rs b/rust/kona/crates/node/sources/src/signer/remote/cert.rs similarity index 100% rename from kona/crates/node/sources/src/signer/remote/cert.rs rename to rust/kona/crates/node/sources/src/signer/remote/cert.rs diff --git a/rust/kona/crates/node/sources/src/signer/remote/client.rs b/rust/kona/crates/node/sources/src/signer/remote/client.rs new file mode 100644 index 00000000000..4cab1294eff --- /dev/null +++ b/rust/kona/crates/node/sources/src/signer/remote/client.rs @@ -0,0 +1,117 @@ +use alloy_primitives::Address; +use alloy_rpc_client::ClientBuilder; +use alloy_transport_http::{Http, reqwest}; +use reqwest::header::HeaderMap; +use std::sync::Arc; +use thiserror::Error; +use tokio::sync::RwLock; +use url::Url; + +use crate::{ + RemoteSignerHandler, + signer::remote::cert::{CertificateError, ClientCert}, +}; + +/// Configuration for the remote signer client +/// +/// This configuration supports various TLS/certificate scenarios: +/// +/// 1. **Basic HTTPS**: Only `endpoint` and `address` are required. +/// 2. **Custom CA**: Provide `ca_cert` to verify servers with custom/self-signed certificates. +/// 3. **Mutual TLS (mTLS)**: Provide both `client_cert` and `client_key` for client authentication. +/// 4. **Full mTLS with custom CA**: Combine all certificate options for maximum security. +/// +/// Certificate formats supported: +/// - PEM format for all certificates and keys +/// - Certificates should be provided as file paths. +/// +/// By default, the process will watch for changes in the client certificate files and reload the +/// client automatically. +#[derive(Debug, Clone)] +pub struct RemoteSigner { + /// The URL of the remote signer endpoint + pub endpoint: Url, + /// The address of the signer. + pub address: Address, + /// Optional client certificate for mTLS (PEM format) + pub client_cert: Option, + /// Optional CA certificate for server verification (PEM format) + pub ca_cert: Option, + /// Headers to pass to the remote signer. + pub headers: HeaderMap, +} + +/// Errors that can occur when starting a remote signer. +#[derive(Debug, Error)] +pub enum RemoteSignerStartError { + /// Failed to ping signer + #[error("Failed to ping signer: {0}")] + Ping(alloy_transport::TransportError), + /// HTTP client build error + #[error("HTTP client build error: {0}")] + HTTPClientBuild(#[from] reqwest::Error), + /// Invalid certificate error + #[error("Invalid certificate: {0}")] + Certificate(#[from] CertificateError), + /// Certificate watcher error + #[error("Certificate watcher error: {0}")] + CertificateWatcher(#[from] notify::Error), +} + +impl RemoteSigner { + /// Creates a new remote signer with the given configuration + /// + /// If client certificates are configured, this will automatically start a certificate watcher + /// that monitors the certificate files for changes. When certificates are updated (e.g., by + /// cert-manager in Kubernetes), the TLS client will be automatically reloaded with the new + /// certificates without requiring a restart. + /// + /// # Certificate Watching + /// + /// The certificate watcher monitors: + /// - Client certificate file (if mTLS is configured) + /// - Client private key file (if mTLS is configured) + /// - CA certificate file (if custom CA is configured) + /// + /// When any of these files are modified, the watcher will: + /// 1. Log the certificate change event + /// 2. Reload the certificate files from disk + /// 3. Rebuild the HTTP client with the new TLS configuration + /// 4. Replace the existing client atomically + /// + /// This enables zero-downtime certificate rotation in production environments. + pub async fn start(self) -> Result { + let http_client = self.build_http_client()?; + let transport = Http::with_client(http_client, self.endpoint.clone()); + let client = ClientBuilder::default().transport(transport, true); + + // Try to ping the signer to check if it's reachable + let version: String = + client.request("health_status", ()).await.map_err(RemoteSignerStartError::Ping)?; + + tracing::info!(target: "signer", version, "Connected to op-signer server"); + + let client = Arc::new(RwLock::new(client)); + + // Start certificate watcher if client certificates are configured + let watcher_handle = self.start_certificate_watcher(client.clone()).await?; + + Ok(RemoteSignerHandler { client, watcher_handle, address: self.address }) + } + + /// Builds an HTTP client with certificate handling for the remote signer + pub(super) fn build_http_client(&self) -> Result { + let mut client_builder = reqwest::Client::builder(); + + // Configure TLS if certificates are provided + if self.client_cert.is_some() || self.ca_cert.is_some() { + let tls_config = self.build_tls_config()?; + client_builder = client_builder.use_preconfigured_tls(tls_config); + } + + // Set headers + client_builder = client_builder.default_headers(self.headers.clone()); + + client_builder.build().map_err(RemoteSignerStartError::HTTPClientBuild) + } +} diff --git a/kona/crates/node/sources/src/signer/remote/handler.rs b/rust/kona/crates/node/sources/src/signer/remote/handler.rs similarity index 100% rename from kona/crates/node/sources/src/signer/remote/handler.rs rename to rust/kona/crates/node/sources/src/signer/remote/handler.rs diff --git a/kona/crates/node/sources/src/signer/remote/mod.rs b/rust/kona/crates/node/sources/src/signer/remote/mod.rs similarity index 100% rename from kona/crates/node/sources/src/signer/remote/mod.rs rename to rust/kona/crates/node/sources/src/signer/remote/mod.rs diff --git a/kona/crates/proof/driver/CHANGELOG.md b/rust/kona/crates/proof/driver/CHANGELOG.md similarity index 100% rename from kona/crates/proof/driver/CHANGELOG.md rename to rust/kona/crates/proof/driver/CHANGELOG.md diff --git a/kona/crates/proof/driver/Cargo.toml b/rust/kona/crates/proof/driver/Cargo.toml similarity index 100% rename from kona/crates/proof/driver/Cargo.toml rename to rust/kona/crates/proof/driver/Cargo.toml diff --git a/kona/crates/proof/driver/README.md b/rust/kona/crates/proof/driver/README.md similarity index 100% rename from kona/crates/proof/driver/README.md rename to rust/kona/crates/proof/driver/README.md diff --git a/rust/kona/crates/proof/driver/src/core.rs b/rust/kona/crates/proof/driver/src/core.rs new file mode 100644 index 00000000000..4aef06f0af8 --- /dev/null +++ b/rust/kona/crates/proof/driver/src/core.rs @@ -0,0 +1,332 @@ +//! The driver of the kona derivation pipeline. + +use crate::{DriverError, DriverPipeline, DriverResult, Executor, PipelineCursor, TipCursor}; +use alloc::{sync::Arc, vec::Vec}; +use alloy_consensus::BlockBody; +use alloy_primitives::{B256, Bytes}; +use alloy_rlp::Decodable; +use core::fmt::Debug; +use kona_derive::{Pipeline, PipelineError, PipelineErrorKind, Signal, SignalReceiver}; +use kona_executor::BlockBuildingOutcome; +use kona_genesis::RollupConfig; +use kona_protocol::L2BlockInfo; +use op_alloy_consensus::{OpBlock, OpTxEnvelope, OpTxType}; +use spin::RwLock; + +/// The Rollup Driver entrypoint. +/// +/// The [`Driver`] is the main coordination component for the rollup derivation and execution +/// process. It manages the interaction between the derivation pipeline and block executor +/// to produce verified L2 blocks from L1 data. +/// +/// ## Architecture +/// The driver operates with three main components: +/// - **Pipeline**: Derives L2 block attributes from L1 data +/// - **Executor**: Builds and executes L2 blocks from attributes +/// - **Cursor**: Tracks the current state of derivation progress +/// +/// ## Usage Pattern +/// ```text +/// 1. Initialize driver with cursor, executor, and pipeline +/// 2. Call wait_for_executor() to ensure readiness +/// 3. Call advance_to_target() to derive blocks up to target +/// 4. Driver coordinates pipeline stepping and block execution +/// 5. Updates cursor with progress and maintains safe head artifacts +/// ``` +/// +/// ## Error Handling +/// The driver handles various error scenarios: +/// - Pipeline derivation failures (temporary, reset, critical) +/// - Block execution failures (with Holocene deposit-only retry) +/// - L1 data exhaustion (graceful halt) +/// - Interop mode considerations +#[derive(Debug)] +pub struct Driver +where + E: Executor + Send + Sync + Debug, + DP: DriverPipeline

+ Send + Sync + Debug, + P: Pipeline + SignalReceiver + Send + Sync + Debug, +{ + /// Marker for the pipeline type parameter. + /// + /// This phantom data ensures type safety while allowing the driver + /// to work with different pipeline implementations. + _marker: core::marker::PhantomData

, + /// Cursor tracking the current L2 derivation state and safe head. + /// + /// The cursor maintains the current position in the derivation process, + /// including the L2 safe head, output root, and L1 origin. It's wrapped + /// in an `Arc>` for thread-safe shared access. + pub cursor: Arc>, + /// The block executor responsible for building and executing L2 blocks. + /// + /// The executor takes payload attributes from the pipeline and produces + /// complete blocks with execution results and state changes. + pub executor: E, + /// The derivation pipeline that produces block attributes from L1 data. + /// + /// The pipeline abstracts the complex derivation logic and provides + /// a high-level interface for producing sequential block attributes. + pub pipeline: DP, + /// Cached execution artifacts and transactions from the most recent safe head. + /// + /// This cache contains the [`BlockBuildingOutcome`] and raw transaction data + /// from the last successfully executed block. It's used for efficiency and + /// debugging purposes. `None` when no block has been executed yet. + pub safe_head_artifacts: Option<(BlockBuildingOutcome, Vec)>, +} + +impl Driver +where + E: Executor + Send + Sync + Debug, + DP: DriverPipeline

+ Send + Sync + Debug, + P: Pipeline + SignalReceiver + Send + Sync + Debug, +{ + /// Creates a new [`Driver`] instance. + /// + /// Initializes the driver with the provided cursor, executor, and pipeline components. + /// The driver starts with no cached safe head artifacts. + /// + /// # Arguments + /// * `cursor` - Shared cursor for tracking derivation state + /// * `executor` - Block executor for building and executing L2 blocks + /// * `pipeline` - Derivation pipeline for producing block attributes + /// + /// # Returns + /// A new [`Driver`] instance ready for operation after calling [`Self::wait_for_executor`]. + /// + /// # Usage + /// ```rust,ignore + /// let driver = Driver::new(cursor, executor, pipeline); + /// driver.wait_for_executor().await; + /// let result = driver.advance_to_target(&config, Some(target_block)).await; + /// ``` + pub const fn new(cursor: Arc>, executor: E, pipeline: DP) -> Self { + Self { + _marker: core::marker::PhantomData, + cursor, + executor, + pipeline, + safe_head_artifacts: None, + } + } + + /// Waits until the executor is ready for block processing. + /// + /// This method blocks until the underlying executor has completed any necessary + /// initialization or synchronization required before it can begin processing + /// payload attributes and executing blocks. + /// + /// # Usage + /// Must be called after creating the driver and before calling [`Self::advance_to_target`]. + /// This ensures the executor is in a valid state for block execution. + /// + /// # Example + /// ```rust,ignore + /// let mut driver = Driver::new(cursor, executor, pipeline); + /// driver.wait_for_executor().await; // Required before derivation + /// ``` + pub async fn wait_for_executor(&mut self) { + self.executor.wait_until_ready().await; + } + + /// Advances the derivation pipeline to the target block number. + /// + /// This is the main driver method that coordinates the derivation pipeline and block + /// executor to produce L2 blocks up to the specified target. It handles the complete + /// lifecycle of block derivation including pipeline stepping, block execution, error + /// recovery, and state updates. + /// + /// # Arguments + /// * `cfg` - The rollup configuration containing chain parameters and activation heights + /// * `target` - Optional target block number. If `None`, derives indefinitely until data source + /// is exhausted or an error occurs + /// + /// # Returns + /// * `Ok((l2_safe_head, output_root))` - Tuple containing the final [`L2BlockInfo`] and output + /// root hash when target is reached or derivation completes + /// * `Err(DriverError)` - Various error conditions that prevent further derivation + /// + /// # Errors + /// This method can fail with several error types: + /// + /// ## Pipeline Errors + /// - **`EndOfSource` (Critical)**: L1 data source exhausted + /// - In interop mode: Returns error immediately for caller handling + /// - In normal mode: Adjusts target to current safe head and halts gracefully + /// - **Temporary**: Insufficient data, automatically retried + /// - **Reset**: Reorg detected, pipeline reset and derivation continues + /// - **Other Critical**: Fatal pipeline errors that stop derivation + /// + /// ## Execution Errors + /// - **Pre-Holocene**: Block execution failures cause block to be discarded + /// - **Holocene+**: Failed blocks are retried as deposit-only blocks + /// - Strips non-deposit transactions and flushes invalidated channel + /// - If deposit-only block also fails, returns critical error + /// + /// ## Other Errors + /// - **`MissingOrigin`**: Pipeline origin not available when expected + /// - **`BlockConversion`**: Failed to convert block format + /// - **RLP**: Failed to decode transaction data + /// + /// # Behavior Details + /// + /// ## Main Loop + /// The method operates in a continuous loop: + /// 1. Check if target block number reached (if specified) + /// 2. Produce payload attributes from pipeline + /// 3. Execute payload with executor + /// 4. Handle execution failures with retry logic + /// 5. Construct complete block and update cursor + /// 6. Cache artifacts and continue + /// + /// ## Target Handling + /// - If `target` is `Some(n)`: Stops when safe head reaches block `n` + /// - If `target` is `None`: Continues until data exhausted or critical error + /// - Target can be dynamically adjusted if data source is exhausted + /// + /// ## State Updates + /// Each successful block updates: + /// - Pipeline cursor with new L1 origin and L2 safe head + /// - Executor safe head for next block building + /// - Cached artifacts for the most recent block + /// - Output root computation for verification + /// + /// # Usage Pattern + /// ```rust,ignore + /// // Derive to specific block + /// let (safe_head, output_root) = driver + /// .advance_to_target(&rollup_config, Some(100)) + /// .await?; + /// + /// // Derive until data exhausted + /// let (final_head, output_root) = driver + /// .advance_to_target(&rollup_config, None) + /// .await?; + /// ``` + /// + /// # Panics + /// This method does not explicitly panic, but may propagate panics from: + /// - `RwLock` poisoning (if another thread panicked while holding the cursor lock) + /// - Executor or pipeline implementation panics + /// - Arithmetic overflow in block number operations (highly unlikely) + pub async fn advance_to_target( + &mut self, + cfg: &RollupConfig, + mut target: Option, + ) -> DriverResult<(L2BlockInfo, B256), E::Error> { + loop { + // Check if we have reached the target block number. + let pipeline_cursor = self.cursor.read(); + let tip_cursor = pipeline_cursor.tip(); + if let Some(tb) = target && + tip_cursor.l2_safe_head.block_info.number >= tb + { + info!(target: "client", "Derivation complete, reached L2 safe head."); + return Ok((tip_cursor.l2_safe_head, tip_cursor.l2_safe_head_output_root)); + } + + let mut attributes = match self.pipeline.produce_payload(tip_cursor.l2_safe_head).await + { + Ok(attrs) => attrs.take_inner(), + Err(PipelineErrorKind::Critical(PipelineError::EndOfSource)) => { + warn!(target: "client", "Exhausted data source; Halting derivation and using current safe head."); + + // Adjust the target block number to the current safe head, as no more blocks + // can be produced. + if target.is_some() { + target = Some(tip_cursor.l2_safe_head.block_info.number); + }; + + // If we are in interop mode, this error must be handled by the caller. + // Otherwise, we continue the loop to halt derivation on the next iteration. + if cfg.is_interop_active(self.cursor.read().l2_safe_head().block_info.number) { + return Err(PipelineError::EndOfSource.crit().into()); + } + continue; + } + Err(e) => { + error!(target: "client", "Failed to produce payload: {:?}", e); + return Err(DriverError::Pipeline(e)); + } + }; + + self.executor.update_safe_head(tip_cursor.l2_safe_head_header.clone()); + let outcome = match self.executor.execute_payload(attributes.clone()).await { + Ok(outcome) => outcome, + Err(e) => { + error!(target: "client", "Failed to execute L2 block: {}", e); + + if cfg.is_holocene_active(attributes.payload_attributes.timestamp) { + // Retry with a deposit-only block. + warn!(target: "client", "Flushing current channel and retrying deposit only block"); + + // Flush the current batch and channel - if a block was replaced with a + // deposit-only block due to execution failure, the + // batch and channel it is contained in is forwards + // invalidated. + self.pipeline.signal(Signal::FlushChannel).await?; + + // Strip out all transactions that are not deposits. + attributes.transactions = attributes.transactions.map(|txs| { + txs.into_iter() + .filter(|tx| !tx.is_empty() && tx[0] == OpTxType::Deposit as u8) + .collect::>() + }); + + // Retry the execution. + self.executor.update_safe_head(tip_cursor.l2_safe_head_header.clone()); + match self.executor.execute_payload(attributes.clone()).await { + Ok(header) => header, + Err(e) => { + error!( + target: "client", + "Critical - Failed to execute deposit-only block: {e}", + ); + return Err(DriverError::Executor(e)); + } + } + } else { + // Pre-Holocene, discard the block if execution fails. + continue; + } + } + }; + + // Construct the block. + let block = OpBlock { + header: outcome.header.inner().clone(), + body: BlockBody { + transactions: attributes + .transactions + .as_ref() + .unwrap_or(&Vec::new()) + .iter() + .map(|tx| OpTxEnvelope::decode(&mut tx.as_ref()).map_err(DriverError::Rlp)) + .collect::, E::Error>>()?, + ommers: Vec::new(), + withdrawals: None, + }, + }; + + // Get the pipeline origin and update the tip cursor. + let origin = self.pipeline.origin().ok_or(PipelineError::MissingOrigin.crit())?; + let l2_info = L2BlockInfo::from_block_and_genesis( + &block, + &self.pipeline.rollup_config().genesis, + )?; + let tip_cursor = TipCursor::new( + l2_info, + outcome.header.clone(), + self.executor.compute_output_root().map_err(DriverError::Executor)?, + ); + + // Advance the derivation pipeline cursor + drop(pipeline_cursor); + self.cursor.write().advance(origin, tip_cursor); + + // Update the latest safe head artifacts. + self.safe_head_artifacts = Some((outcome, attributes.transactions.unwrap_or_default())); + } + } +} diff --git a/kona/crates/proof/driver/src/cursor.rs b/rust/kona/crates/proof/driver/src/cursor.rs similarity index 100% rename from kona/crates/proof/driver/src/cursor.rs rename to rust/kona/crates/proof/driver/src/cursor.rs diff --git a/kona/crates/proof/driver/src/errors.rs b/rust/kona/crates/proof/driver/src/errors.rs similarity index 100% rename from kona/crates/proof/driver/src/errors.rs rename to rust/kona/crates/proof/driver/src/errors.rs diff --git a/kona/crates/proof/driver/src/executor.rs b/rust/kona/crates/proof/driver/src/executor.rs similarity index 100% rename from kona/crates/proof/driver/src/executor.rs rename to rust/kona/crates/proof/driver/src/executor.rs diff --git a/rust/kona/crates/proof/driver/src/lib.rs b/rust/kona/crates/proof/driver/src/lib.rs new file mode 100644 index 00000000000..9bf8d23ceed --- /dev/null +++ b/rust/kona/crates/proof/driver/src/lib.rs @@ -0,0 +1,31 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(test), no_std)] + +extern crate alloc; + +#[macro_use] +extern crate tracing; + +mod errors; +pub use errors::{DriverError, DriverResult}; + +mod pipeline; +pub use pipeline::DriverPipeline; + +mod executor; +pub use executor::Executor; + +mod core; +pub use core::Driver; + +mod cursor; +pub use cursor::PipelineCursor; + +mod tip; +pub use tip::TipCursor; diff --git a/rust/kona/crates/proof/driver/src/pipeline.rs b/rust/kona/crates/proof/driver/src/pipeline.rs new file mode 100644 index 00000000000..40c8fe1ec59 --- /dev/null +++ b/rust/kona/crates/proof/driver/src/pipeline.rs @@ -0,0 +1,167 @@ +//! Abstracts the derivation pipeline from the driver. +//! +//! This module provides the [`DriverPipeline`] trait which serves as a high-level +//! abstraction for the driver's derivation pipeline. The pipeline is responsible +//! for deriving L2 blocks from L1 data and producing payload attributes for execution. + +use alloc::boxed::Box; +use async_trait::async_trait; +use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; + +use kona_derive::{ + ActivationSignal, Pipeline, PipelineError, PipelineErrorKind, ResetError, ResetSignal, + SignalReceiver, StepResult, +}; + +/// High-level abstraction for the driver's derivation pipeline. +/// +/// The [`DriverPipeline`] trait extends the base [`Pipeline`] functionality with +/// driver-specific operations needed for block production. It handles the complex +/// logic of stepping through derivation stages, managing resets and reorgs, and +/// producing payload attributes for block building. +/// +/// ## Key Responsibilities +/// - Stepping through derivation pipeline stages +/// - Handling L1 origin advancement +/// - Managing pipeline resets due to reorgs or activation signals +/// - Producing payload attributes for disputed blocks +/// - Caching and cache invalidation +/// +/// ## Error Handling +/// The pipeline can encounter several types of errors: +/// - **Temporary**: Retryable errors (e.g., missing data) +/// - **Reset**: Errors requiring pipeline reset (e.g., reorgs, activations) +/// - **Critical**: Fatal errors that stop derivation +#[async_trait] +pub trait DriverPipeline

: Pipeline + SignalReceiver +where + P: Pipeline + SignalReceiver, +{ + /// Flushes any cached data due to a reorganization. + /// + /// This method clears internal caches that may contain stale data + /// when a reorganization is detected on the L1 chain. It ensures + /// that subsequent derivation operations work with fresh data. + /// + /// # Usage + /// Called automatically when a reorg is detected during pipeline + /// stepping, but can also be called manually if needed. + fn flush(&mut self); + + /// Produces payload attributes for the next block after the given L2 safe head. + /// + /// This method advances the derivation pipeline to produce the next set of + /// [`OpAttributesWithParent`] that can be used for block building. It handles + /// the complex stepping logic including error recovery, resets, and reorgs. + /// + /// # Arguments + /// * `l2_safe_head` - The current L2 safe head block info to build upon + /// + /// # Returns + /// * `Ok(OpAttributesWithParent)` - Successfully produced payload attributes + /// * `Err(PipelineErrorKind)` - Pipeline error preventing payload production + /// + /// # Errors + /// This method can fail with various error types: + /// - **Temporary errors**: Insufficient data, retries automatically + /// - **Reset errors**: Reorg detected or activation needed, triggers pipeline reset + /// - **Critical errors**: Fatal issues that require external intervention + /// + /// # Behavior + /// The method operates in a loop, continuously stepping the pipeline until: + /// 1. Payload attributes are successfully produced + /// 2. A critical error occurs + /// 3. The pipeline signals completion + /// + /// ## Reset Handling + /// When reset errors occur: + /// - **Reorg detected**: Flushes cache and resets to safe head + /// - **Holocene activation**: Sends activation signal + /// - **Other resets**: Standard reset to safe head with system config + /// + /// ## Step Results + /// The pipeline can return different step results: + /// - **`PreparedAttributes`**: Attributes ready for the next block + /// - **`AdvancedOrigin`**: L1 origin moved forward + /// - **OriginAdvanceErr/StepFailed**: Various error conditions + async fn produce_payload( + &mut self, + l2_safe_head: L2BlockInfo, + ) -> Result { + // As we start the safe head at the disputed block's parent, we step the pipeline until the + // first attributes are produced. All batches at and before the safe head will be + // dropped, so the first payload will always be the disputed one. + loop { + match self.step(l2_safe_head).await { + StepResult::PreparedAttributes => { + info!(target: "client_derivation_driver", "Stepped derivation pipeline") + } + StepResult::AdvancedOrigin => { + info!( + target: "client_derivation_driver", + l1_block_number = self.origin().map(|o| o.number).ok_or(PipelineError::MissingOrigin.crit())?, + "Advanced origin" + ) + } + StepResult::OriginAdvanceErr(e) | StepResult::StepFailed(e) => { + // Break the loop unless the error signifies that there is not enough data to + // complete the current step. In this case, we retry the step to see if other + // stages can make progress. + match e { + PipelineErrorKind::Temporary(_) => { + trace!(target: "client_derivation_driver", "Failed to step derivation pipeline temporarily: {:?}", e); + continue; + } + PipelineErrorKind::Reset(e) => { + warn!(target: "client_derivation_driver", "Failed to step derivation pipeline due to reset: {:?}", e); + let system_config = self + .system_config_by_number(l2_safe_head.block_info.number) + .await?; + + if matches!(e, ResetError::HoloceneActivation) { + let l1_origin = + self.origin().ok_or(PipelineError::MissingOrigin.crit())?; + self.signal( + ActivationSignal { + l2_safe_head, + l1_origin, + system_config: Some(system_config), + } + .signal(), + ) + .await?; + } else { + // Flushes cache if a reorg is detected. + if matches!(e, ResetError::ReorgDetected(_, _)) { + self.flush(); + } + + // Reset the pipeline to the initial L2 safe head and L1 origin, + // and try again. + let l1_origin = + self.origin().ok_or(PipelineError::MissingOrigin.crit())?; + self.signal( + ResetSignal { + l2_safe_head, + l1_origin, + system_config: Some(system_config), + } + .signal(), + ) + .await?; + } + } + PipelineErrorKind::Critical(_) => { + warn!(target: "client_derivation_driver", "Failed to step derivation pipeline: {:?}", e); + return Err(e); + } + } + } + } + + if let Some(attrs) = self.next() { + return Ok(attrs); + } + } + } +} diff --git a/kona/crates/proof/driver/src/tip.rs b/rust/kona/crates/proof/driver/src/tip.rs similarity index 100% rename from kona/crates/proof/driver/src/tip.rs rename to rust/kona/crates/proof/driver/src/tip.rs diff --git a/kona/crates/proof/executor/CHANGELOG.md b/rust/kona/crates/proof/executor/CHANGELOG.md similarity index 100% rename from kona/crates/proof/executor/CHANGELOG.md rename to rust/kona/crates/proof/executor/CHANGELOG.md diff --git a/kona/crates/proof/executor/Cargo.toml b/rust/kona/crates/proof/executor/Cargo.toml similarity index 100% rename from kona/crates/proof/executor/Cargo.toml rename to rust/kona/crates/proof/executor/Cargo.toml diff --git a/rust/kona/crates/proof/executor/README.md b/rust/kona/crates/proof/executor/README.md new file mode 100644 index 00000000000..470d700e365 --- /dev/null +++ b/rust/kona/crates/proof/executor/README.md @@ -0,0 +1,3 @@ +# `kona-executor` + +A `no_std` implementation of a stateless block executor for the OP stack, backed by [`kona-mpt`](../mpt)'s `TrieDB`. diff --git a/kona/crates/proof/executor/src/builder/assemble.rs b/rust/kona/crates/proof/executor/src/builder/assemble.rs similarity index 97% rename from kona/crates/proof/executor/src/builder/assemble.rs rename to rust/kona/crates/proof/executor/src/builder/assemble.rs index 597fe5e96af..29e22684e11 100644 --- a/kona/crates/proof/executor/src/builder/assemble.rs +++ b/rust/kona/crates/proof/executor/src/builder/assemble.rs @@ -1,4 +1,4 @@ -//! [Header] assembly logic for the [StatelessL2Builder]. +//! [Header] assembly logic for the [`StatelessL2Builder`]. use super::StatelessL2Builder; use crate::{ @@ -24,8 +24,8 @@ where H: TrieHinter, Evm: EvmFactory, { - /// Seals the block executed from the given [OpPayloadAttributes] and [BlockEnv], returning the - /// computed [Header]. + /// Seals the block executed from the given [`OpPayloadAttributes`] and [`BlockEnv`], returning + /// the computed [Header]. pub(crate) fn seal_block( &mut self, attrs: &OpPayloadAttributes, diff --git a/rust/kona/crates/proof/executor/src/builder/core.rs b/rust/kona/crates/proof/executor/src/builder/core.rs new file mode 100644 index 00000000000..dc141dee1e1 --- /dev/null +++ b/rust/kona/crates/proof/executor/src/builder/core.rs @@ -0,0 +1,333 @@ +//! Stateless OP Stack L2 block builder implementation. +//! +//! The [`StatelessL2Builder`] provides a complete block building and execution engine +//! for OP Stack L2 chains that operates in a stateless manner, pulling required state +//! data from a [`TrieDB`] during execution rather than maintaining full state. + +use crate::{ExecutorError, ExecutorResult, TrieDB, TrieDBError, TrieDBProvider}; +use alloc::{string::ToString, vec::Vec}; +use alloy_consensus::{Header, Sealed, crypto::RecoveryError}; +use alloy_evm::{ + EvmFactory, FromRecoveredTx, FromTxWithEncoded, + block::{BlockExecutionResult, BlockExecutor, BlockExecutorFactory}, +}; +use alloy_op_evm::{ + OpBlockExecutionCtx, OpBlockExecutorFactory, + block::{OpAlloyReceiptBuilder, OpTxEnv}, +}; +use core::fmt::Debug; +use kona_genesis::RollupConfig; +use kona_mpt::TrieHinter; +use op_alloy_consensus::{OpReceiptEnvelope, OpTxEnvelope}; +use op_alloy_rpc_types_engine::OpPayloadAttributes; +use op_revm::OpSpecId; +use revm::{ + context::BlockEnv, + database::{State, states::bundle_state::BundleRetention}, +}; + +/// Stateless OP Stack L2 block builder that derives state from trie proofs during execution. +/// +/// The [`StatelessL2Builder`] is a specialized block execution engine designed for fault proof +/// systems and stateless verification. Instead of maintaining full L2 state, it dynamically +/// retrieves required state data from a [`TrieDB`] backed by Merkle proofs and witnesses. +/// +/// # Architecture +/// +/// The builder operates in a stateless manner by: +/// 1. **Trie Database**: Uses [`TrieDB`] to access state via Merkle proofs +/// 2. **EVM Factory**: Creates execution environments with proof-backed state +/// 3. **Block Executor**: Executes transactions using witness-provided state +/// 4. **Receipt Generation**: Produces execution receipts and state commitments +/// +/// # Stateless Execution Model +/// +/// Traditional execution engines maintain full state databases, but the stateless model: +/// - Receives state witnesses containing only required data +/// - Verifies state access against Merkle proofs +/// - Executes transactions without persistent state storage +/// - Produces verifiable execution results and state commitments +/// +/// # Use Cases +/// +/// ## Fault Proof Systems +/// - Enables dispute resolution without full state replication +/// - Provides verifiable execution results for challenge games +/// - Supports optimistic rollup fraud proof generation +/// +/// ## Stateless Verification +/// - Allows third parties to verify L2 blocks without full state +/// - Enables light clients to validate L2 execution +/// - Supports decentralized verification networks +/// +/// # Performance Characteristics +/// +/// - **Memory**: Lower memory usage than stateful execution (no full state) +/// - **I/O**: Higher I/O for proof verification and witness access +/// - **CPU**: Additional overhead for cryptographic proof verification +/// - **Determinism**: Guaranteed deterministic execution results +/// +/// # Type Parameters +/// +/// * `P` - Trie database provider implementing [`TrieDBProvider`] +/// * `H` - Trie hinter implementing [`TrieHinter`] for state access optimization +/// * `Evm` - EVM factory implementing [`EvmFactory`] for execution environment creation +#[derive(Debug)] +pub struct StatelessL2Builder<'a, P, H, Evm> +where + P: TrieDBProvider, + H: TrieHinter, + Evm: EvmFactory, +{ + /// The rollup configuration containing chain parameters and activation heights. + /// + /// Provides access to network-specific parameters including gas limits, + /// hard fork activation heights, and system addresses needed for proper + /// L2 block execution and validation. + pub(crate) config: &'a RollupConfig, + /// The trie database providing stateless access to L2 state via Merkle proofs. + /// + /// The [`TrieDB`] serves as the primary interface for state access during + /// execution, resolving account and storage queries using witness data + /// and cryptographic proofs rather than a traditional state database. + pub(crate) trie_db: TrieDB, + /// The block executor factory for creating OP Stack execution environments. + /// + /// This factory creates specialized OP Stack execution environments that + /// understand OP-specific transaction types, system calls, and state + /// management required for proper L2 block execution. + pub(crate) factory: OpBlockExecutorFactory, +} + +impl<'a, P, H, Evm> StatelessL2Builder<'a, P, H, Evm> +where + P: TrieDBProvider + Debug, + H: TrieHinter + Debug, + Evm: EvmFactory + 'static, + ::Tx: + FromTxWithEncoded + FromRecoveredTx + OpTxEnv, +{ + /// Creates a new stateless L2 block builder instance. + /// + /// Initializes the builder with the necessary components for stateless block execution + /// including the trie database, execution factory, and rollup configuration. + /// + /// # Arguments + /// * `config` - Rollup configuration with chain parameters and activation heights + /// * `evm_factory` - EVM factory for creating execution environments + /// * `provider` - Trie database provider for state access + /// * `hinter` - Trie hinter for optimizing state access patterns + /// * `parent_header` - Sealed header of the parent block to build upon + /// + /// # Returns + /// A new [`StatelessL2Builder`] ready for block building operations + /// + /// # Usage + /// ```rust,ignore + /// let builder = StatelessL2Builder::new( + /// &rollup_config, + /// evm_factory, + /// trie_provider, + /// trie_hinter, + /// parent_header, + /// ); + /// ``` + pub fn new( + config: &'a RollupConfig, + evm_factory: Evm, + provider: P, + hinter: H, + parent_header: Sealed

, + ) -> Self { + let trie_db = TrieDB::new(parent_header, provider, hinter); + let factory = OpBlockExecutorFactory::new( + OpAlloyReceiptBuilder::default(), + config.clone(), + evm_factory, + ); + Self { config, trie_db, factory } + } + + /// Builds and executes a new L2 block using the provided payload attributes. + /// + /// This method performs the complete block building and execution process in a stateless + /// manner, dynamically retrieving required state data via the trie database and producing + /// a fully executed block with receipts and state commitments. + /// + /// # Arguments + /// * `attrs` - Payload attributes containing transactions and block metadata + /// + /// # Returns + /// * `Ok(BlockBuildingOutcome)` - Successfully built and executed block with receipts + /// * `Err(ExecutorError)` - Block building or execution failure + /// + /// # Errors + /// This method can fail due to various conditions: + /// + /// ## Input Validation Errors + /// - [`ExecutorError::MissingGasLimit`]: Gas limit not provided in attributes + /// - [`ExecutorError::MissingTransactions`]: Transaction list not provided + /// - [`ExecutorError::MissingEIP1559Params`]: Required fee parameters missing (post-Holocene) + /// - [`ExecutorError::MissingParentBeaconBlockRoot`]: Beacon root missing (post-Dencun) + /// + /// ## Execution Errors + /// - [`ExecutorError::BlockGasLimitExceeded`]: Cumulative gas exceeds block limit + /// - [`ExecutorError::UnsupportedTransactionType`]: Unknown transaction type encountered + /// - [`ExecutorError::ExecutionError`]: EVM-level execution failures + /// + /// ## State Access Errors + /// - [`ExecutorError::TrieDBError`]: State tree access or proof verification failures + /// - Missing account data in witness + /// - Invalid Merkle proofs + /// + /// ## Data Integrity Errors + /// - [`ExecutorError::Recovery`]: Transaction signature recovery failures + /// - [`ExecutorError::RLPError`]: Data encoding/decoding errors + /// + /// # Block Building Process + /// + /// The block building process follows these steps: + /// + /// 1. **Environment Setup**: Configure EVM environment with proper gas settings + /// 2. **Witness Hinting**: Send payload witness hints to optimize state access + /// 3. **Transaction Execution**: Execute each transaction in order with state updates + /// 4. **Receipt Generation**: Generate execution receipts for all transactions + /// 5. **State Commitment**: Compute final state roots and output commitments + /// 6. **Block Assembly**: Assemble complete block with header and execution results + /// + /// # Stateless Execution Details + /// + /// Unlike traditional execution engines, this builder: + /// - Resolves state access via Merkle proofs instead of database lookups + /// - Validates all state access against cryptographic witnesses + /// - Produces deterministic results independent of execution environment + /// - Enables verification without full state replication + /// + /// # Performance Considerations + /// + /// - State access latency depends on proof verification overhead + /// - Memory usage scales with witness size rather than full state + /// - CPU overhead from cryptographic proof verification + /// - I/O patterns optimized through trie hinter guidance + pub fn build_block( + &mut self, + attrs: OpPayloadAttributes, + ) -> ExecutorResult { + // Step 1. Set up the execution environment. + let (base_fee_params, min_base_fee) = Self::active_base_fee_params( + self.config, + self.trie_db.parent_block_header(), + attrs.payload_attributes.timestamp, + )?; + let evm_env = self.evm_env( + self.config.spec_id(attrs.payload_attributes.timestamp), + self.trie_db.parent_block_header(), + &attrs, + &base_fee_params, + min_base_fee, + )?; + let block_env = evm_env.block_env().clone(); + let parent_hash = self.trie_db.parent_block_header().seal(); + + // Attempt to send a payload witness hint to the host. This hint instructs the host to + // populate its preimage store with the preimages required to statelessly execute + // this payload. This feature is experimental, so if the hint fails, we continue + // without it and fall back on on-demand preimage fetching for execution. + self.trie_db + .hinter + .hint_execution_witness(parent_hash, &attrs) + .map_err(|e| TrieDBError::Provider(e.to_string()))?; + + info!( + target: "block_builder", + block_number = %block_env.number, + block_timestamp = %block_env.timestamp, + block_gas_limit = block_env.gas_limit, + transactions = attrs.transactions.as_ref().map_or(0, |txs| txs.len()), + "Beginning block building." + ); + + // Step 2. Create the executor, using the trie database. + let mut state = State::builder() + .with_database(&mut self.trie_db) + .with_bundle_update() + .without_state_clear() + .build(); + let evm = self.factory.evm_factory().create_evm(&mut state, evm_env); + let ctx = OpBlockExecutionCtx { + parent_hash, + parent_beacon_block_root: attrs.payload_attributes.parent_beacon_block_root, + // This field is unused for individual block building jobs. + extra_data: Default::default(), + }; + let executor = self.factory.create_executor(evm, ctx); + + // Step 3. Execute the block containing the transactions within the payload attributes. + let transactions = attrs + .recovered_transactions_with_encoded() + .collect::, RecoveryError>>() + .map_err(ExecutorError::Recovery)?; + let ex_result = executor.execute_block(transactions.iter())?; + + info!( + target: "block_builder", + gas_used = ex_result.gas_used, + gas_limit = block_env.gas_limit, + "Finished block building. Beginning sealing job." + ); + + // Step 4. Merge state transitions and seal the block. + state.merge_transitions(BundleRetention::Reverts); + let bundle = state.take_bundle(); + let header = self.seal_block(&attrs, parent_hash, &block_env, &ex_result, bundle)?; + + info!( + target: "block_builder", + number = header.number, + hash = ?header.seal(), + state_root = ?header.state_root, + transactions_root = ?header.transactions_root, + receipts_root = ?header.receipts_root, + "Sealed new block", + ); + + // Update the parent block hash in the state database, preparing for the next block. + self.trie_db.set_parent_block_header(header.clone()); + Ok((header, ex_result).into()) + } +} + +/// The outcome of a block building operation, returning the sealed block [`Header`] and the +/// [`BlockExecutionResult`]. +#[derive(Debug, Clone)] +pub struct BlockBuildingOutcome { + /// The block header. + pub header: Sealed
, + /// The block execution result. + pub execution_result: BlockExecutionResult, +} + +impl From<(Sealed
, BlockExecutionResult)> for BlockBuildingOutcome { + fn from( + (header, execution_result): (Sealed
, BlockExecutionResult), + ) -> Self { + Self { header, execution_result } + } +} + +#[cfg(test)] +mod test { + use crate::test_utils::run_test_fixture; + use rstest::rstest; + use std::path::PathBuf; + + #[rstest] + #[tokio::test] + async fn test_statelessly_execute_block( + #[base_dir = "./testdata"] + #[files("*.tar.gz")] + path: PathBuf, + ) { + run_test_fixture(path).await; + } +} diff --git a/rust/kona/crates/proof/executor/src/builder/env.rs b/rust/kona/crates/proof/executor/src/builder/env.rs new file mode 100644 index 00000000000..0671a6ac401 --- /dev/null +++ b/rust/kona/crates/proof/executor/src/builder/env.rs @@ -0,0 +1,167 @@ +//! Environment utility functions for [`StatelessL2Builder`]. + +use super::StatelessL2Builder; +use crate::{ + ExecutorError, ExecutorResult, TrieDBProvider, + util::{ + decode_holocene_eip_1559_params_block_header, decode_jovian_eip_1559_params_block_header, + }, +}; +use alloy_consensus::{BlockHeader, Header}; +use alloy_eips::{calc_next_block_base_fee, eip1559::BaseFeeParams, eip7840::BlobParams}; +use alloy_evm::{EvmEnv, EvmFactory}; +use alloy_primitives::U256; +use kona_genesis::RollupConfig; +use kona_mpt::TrieHinter; +use op_alloy_rpc_types_engine::OpPayloadAttributes; +use op_revm::OpSpecId; +use revm::{ + context::{BlockEnv, CfgEnv}, + context_interface::block::BlobExcessGasAndPrice, + primitives::eip4844::{ + BLOB_BASE_FEE_UPDATE_FRACTION_CANCUN, BLOB_BASE_FEE_UPDATE_FRACTION_PRAGUE, + }, +}; + +impl StatelessL2Builder<'_, P, H, Evm> +where + P: TrieDBProvider, + H: TrieHinter, + Evm: EvmFactory, +{ + /// Returns the active [`EvmEnv`] for the executor. + pub(crate) fn evm_env( + &self, + spec_id: OpSpecId, + parent_header: &Header, + payload_attrs: &OpPayloadAttributes, + base_fee_params: &BaseFeeParams, + min_base_fee: u64, + ) -> ExecutorResult> { + let block_env = self.prepare_block_env( + spec_id, + parent_header, + payload_attrs, + base_fee_params, + min_base_fee, + )?; + let cfg_env = self.evm_cfg_env(payload_attrs.payload_attributes.timestamp); + Ok(EvmEnv::new(cfg_env, block_env)) + } + + /// Returns the active [`CfgEnv`] for the executor. + pub(crate) fn evm_cfg_env(&self, timestamp: u64) -> CfgEnv { + CfgEnv::new() + .with_chain_id(self.config.l2_chain_id.id()) + .with_spec_and_mainnet_gas_params(self.config.spec_id(timestamp)) + } + + fn next_block_base_fee( + &self, + params: BaseFeeParams, + parent: &Header, + min_base_fee: u64, + ) -> Option { + if !self.config.is_jovian_active(parent.timestamp()) { + return parent.next_block_base_fee(params); + } + + // Starting from Jovian, we use the maximum of the gas used and the blob gas used to + // calculate the next base fee. + let gas_used = if parent.blob_gas_used().unwrap_or_default() > parent.gas_used() { + parent.blob_gas_used().unwrap_or_default() + } else { + parent.gas_used() + }; + + let mut next_block_base_fee = calc_next_block_base_fee( + gas_used, + parent.gas_limit(), + parent.base_fee_per_gas().unwrap_or_default(), + params, + ); + + // If the next block base fee is less than the min base fee, set it to the min base fee. + // # Note + // Before Jovian activation, the min-base-fee is 0 so this is a no-op. + if next_block_base_fee < min_base_fee { + next_block_base_fee = min_base_fee; + } + + Some(next_block_base_fee) + } + + /// Prepares a [`BlockEnv`] with the given [`OpPayloadAttributes`]. + pub(crate) fn prepare_block_env( + &self, + spec_id: OpSpecId, + parent_header: &Header, + payload_attrs: &OpPayloadAttributes, + base_fee_params: &BaseFeeParams, + min_base_fee: u64, + ) -> ExecutorResult { + let (params, fraction) = if spec_id.is_enabled_in(OpSpecId::ISTHMUS) { + (Some(BlobParams::prague()), BLOB_BASE_FEE_UPDATE_FRACTION_PRAGUE) + } else if spec_id.is_enabled_in(OpSpecId::ECOTONE) { + (Some(BlobParams::cancun()), BLOB_BASE_FEE_UPDATE_FRACTION_CANCUN) + } else { + (None, 0) + }; + + let blob_excess_gas_and_price = parent_header + .maybe_next_block_excess_blob_gas(params) + .or_else(|| spec_id.is_enabled_in(OpSpecId::ECOTONE).then_some(0)) + .map(|excess| BlobExcessGasAndPrice::new(excess, fraction)); + + let next_block_base_fee = self + .next_block_base_fee(*base_fee_params, parent_header, min_base_fee) + .unwrap_or_default(); + + Ok(BlockEnv { + number: U256::from(parent_header.number + 1), + beneficiary: payload_attrs.payload_attributes.suggested_fee_recipient, + timestamp: U256::from(payload_attrs.payload_attributes.timestamp), + gas_limit: payload_attrs.gas_limit.ok_or(ExecutorError::MissingGasLimit)?, + basefee: next_block_base_fee, + prevrandao: Some(payload_attrs.payload_attributes.prev_randao), + blob_excess_gas_and_price, + ..Default::default() + }) + } + + /// Returns the active base fee parameters for the parent header. + /// Returns the min-base-fee as the second element of the tuple. + /// + /// ## Note + /// Before Jovian activation, the min-base-fee is 0. + pub(crate) fn active_base_fee_params( + config: &RollupConfig, + parent_header: &Header, + payload_timestamp: u64, + ) -> ExecutorResult<(BaseFeeParams, u64)> { + match config { + // After Holocene activation, the base fee parameters are stored in the + // `extraData` field of the parent header. If Holocene wasn't active in the + // parent block, the default base fee parameters are used. + _ if config.is_jovian_active(parent_header.timestamp) => { + decode_jovian_eip_1559_params_block_header(parent_header) + } + _ if config.is_holocene_active(parent_header.timestamp) => { + decode_holocene_eip_1559_params_block_header(parent_header) + .map(|base_fee_params| (base_fee_params, 0)) + } + // If the next payload attribute timestamp is past canyon activation, + // use the canyon base fee params from the rollup config. + _ if config.is_canyon_active(payload_timestamp) => { + // If the payload attribute timestamp is past canyon activation, + // use the canyon base fee params from the rollup config. + Ok((config.chain_op_config.post_canyon_params(), 0)) + } + _ => { + // If the next payload attribute timestamp is prior to canyon activation, + // use the default base fee params from the rollup config. + Ok((config.chain_op_config.pre_canyon_params(), 0)) + } + } + } +} diff --git a/kona/crates/proof/executor/src/builder/mod.rs b/rust/kona/crates/proof/executor/src/builder/mod.rs similarity index 100% rename from kona/crates/proof/executor/src/builder/mod.rs rename to rust/kona/crates/proof/executor/src/builder/mod.rs diff --git a/rust/kona/crates/proof/executor/src/db/mod.rs b/rust/kona/crates/proof/executor/src/db/mod.rs new file mode 100644 index 00000000000..45942c0b4c6 --- /dev/null +++ b/rust/kona/crates/proof/executor/src/db/mod.rs @@ -0,0 +1,478 @@ +//! This module contains an implementation of an in-memory Trie DB for [`revm`], that allows for +//! incremental updates through fetching node preimages on the fly during execution. + +use crate::errors::{TrieDBError, TrieDBResult}; +use alloc::{string::ToString, vec::Vec}; +use alloy_consensus::{EMPTY_ROOT_HASH, Header, Sealed}; +use alloy_primitives::{Address, B256, U256, keccak256}; +use alloy_rlp::{Decodable, Encodable}; +use alloy_trie::TrieAccount; +use kona_mpt::{Nibbles, TrieHinter, TrieNode, TrieNodeError}; +use revm::{ + Database, + database::{BundleState, states::StorageSlot}, + primitives::{BLOCK_HASH_HISTORY, HashMap}, + state::{AccountInfo, Bytecode}, +}; + +mod traits; +pub use traits::{NoopTrieDBProvider, TrieDBProvider}; + +/// A Trie DB that caches open state in-memory. +/// +/// When accounts that don't already exist within the cached [`TrieNode`] are queried, the database +/// fetches the preimages of the trie nodes on the path to the account using the `PreimageFetcher` +/// (`F` generic). This allows for data to be fetched in a verifiable manner given an initial +/// trusted state root as it is needed during execution. +/// +/// The [`TrieDB`] is intended to be wrapped by a [`State`], which is then used by [`revm`] to +/// capture state transitions during block execution. +/// +/// **Behavior**: +/// - When an account is queried and the trie path has not already been opened by [`Self::basic`], +/// we fall through to the `PreimageFetcher` to fetch the preimages of the trie nodes on the path +/// to the account. After it has been fetched, the path will be cached until the next call to +/// [`Self::state_root`]. +/// - When querying for the code hash of an account, the [`TrieDBProvider`] is consulted to fetch +/// the code hash of the account. +/// - When a [`BundleState`] changeset is committed to the parent [`State`] database, the changes +/// are first applied to the [`State`]'s cache, then the trie hash is recomputed with +/// [`Self::state_root`]. +/// - When the block hash of a block number is needed via [`Self::block_hash`], the +/// `HeaderByHashFetcher` is consulted to walk back to the desired block number by revealing the +/// parent hash of block headers until the desired block number is reached, up to a maximum of +/// [`BLOCK_HASH_HISTORY`] blocks back relative to the current parent block hash. +/// +/// **Example Construction**: +/// ```rust +/// use alloy_consensus::{Header, Sealable}; +/// use alloy_evm::{EvmEnv, EvmFactory, block::BlockExecutorFactory}; +/// use alloy_op_evm::{ +/// OpBlockExecutionCtx, OpBlockExecutorFactory, OpEvmFactory, block::OpAlloyReceiptBuilder, +/// }; +/// use alloy_op_hardforks::OpChainHardforks; +/// use alloy_primitives::{B256, Bytes}; +/// use kona_executor::{NoopTrieDBProvider, TrieDB}; +/// use kona_mpt::NoopTrieHinter; +/// use revm::database::{State, states::bundle_state::BundleRetention}; +/// +/// let mock_parent_block_header = Header::default(); +/// let trie_db = +/// TrieDB::new(mock_parent_block_header.seal_slow(), NoopTrieDBProvider, NoopTrieHinter); +/// let executor_factory = OpBlockExecutorFactory::new( +/// OpAlloyReceiptBuilder::default(), +/// OpChainHardforks::op_mainnet(), +/// OpEvmFactory::default(), +/// ); +/// let mut state = State::builder().with_database(trie_db).with_bundle_update().build(); +/// let evm = executor_factory.evm_factory().create_evm(&mut state, EvmEnv::default()); +/// let executor = executor_factory.create_executor(evm, OpBlockExecutionCtx::default()); +/// +/// // Execute your block's transactions... +/// drop(executor); +/// +/// state.merge_transitions(BundleRetention::Reverts); +/// let bundle = state.take_bundle(); +/// let state_root = state.database.state_root(&bundle).expect("Failed to compute state root"); +/// ``` +/// +/// [`State`]: revm::database::State +#[derive(Debug, Clone)] +pub struct TrieDB +where + F: TrieDBProvider, + H: TrieHinter, +{ + /// The [`TrieNode`] representation of the root node. + root_node: TrieNode, + /// Storage roots of accounts within the trie. + storage_roots: HashMap, + /// The parent block hash of the current block. + parent_block_header: Sealed
, + /// The [`TrieDBProvider`] + pub fetcher: F, + /// The [`TrieHinter`] + pub hinter: H, +} + +impl TrieDB +where + F: TrieDBProvider, + H: TrieHinter, +{ + /// Creates a new [`TrieDB`] with the given root node. + pub fn new(parent_block_header: Sealed
, fetcher: F, hinter: H) -> Self { + Self { + root_node: TrieNode::new_blinded(parent_block_header.state_root), + storage_roots: Default::default(), + parent_block_header, + fetcher, + hinter, + } + } + + /// Consumes `Self` and takes the current state root of the trie DB. + pub fn take_root_node(self) -> TrieNode { + self.root_node + } + + /// Returns a shared reference to the root [`TrieNode`] of the trie DB. + pub const fn root(&self) -> &TrieNode { + &self.root_node + } + + /// Returns the mapping of [Address]es to storage roots. + pub const fn storage_roots(&self) -> &HashMap { + &self.storage_roots + } + + /// Returns a reference to the current parent block header of the trie DB. + pub const fn parent_block_header(&self) -> &Sealed
{ + &self.parent_block_header + } + + /// Sets the parent block header of the trie DB. Should be called after a block has been + /// executed and the Header has been created. + /// + /// ## Takes + /// - `parent_block_header`: The parent block header of the current block. + pub fn set_parent_block_header(&mut self, parent_block_header: Sealed
) { + self.parent_block_header = parent_block_header; + } + + /// Applies a [`BundleState`] changeset to the [`TrieNode`] and recomputes the state root hash. + /// + /// ## Takes + /// - `bundle`: The [`BundleState`] changeset to apply to the trie DB. + /// + /// ## Returns + /// - `Ok(B256)`: The new state root hash of the trie DB. + /// - `Err(_)`: If the state root hash could not be computed. + pub fn state_root(&mut self, bundle: &BundleState) -> TrieDBResult { + debug!(target: "client_executor", "Recomputing state root"); + + // Update the accounts in the trie with the changeset. + self.update_accounts(bundle)?; + + // Recompute the root hash of the trie. + let root = self.root_node.blind(); + + debug!( + target: "client_executor", + "Recomputed state root: {root}", + ); + + // Extract the new state root from the root node. + Ok(root) + } + + /// Fetches the [`TrieAccount`] of an account from the trie DB. + /// + /// ## Takes + /// - `address`: The address of the account. + /// + /// ## Returns + /// - `Ok(Some(TrieAccount))`: The [`TrieAccount`] of the account. + /// - `Ok(None)`: If the account does not exist in the trie. + /// - `Err(_)`: If the account could not be fetched. + pub fn get_trie_account( + &mut self, + address: &Address, + block_number: u64, + ) -> TrieDBResult> { + // Send a hint to the host to fetch the account proof. + self.hinter + .hint_account_proof(*address, block_number) + .map_err(|e| TrieDBError::Provider(e.to_string()))?; + + // Fetch the account from the trie. + let hashed_address_nibbles = Nibbles::unpack(keccak256(address.as_slice())); + let Some(trie_account_rlp) = self.root_node.open(&hashed_address_nibbles, &self.fetcher)? + else { + return Ok(None); + }; + + // Decode the trie account from the RLP bytes. + TrieAccount::decode(&mut trie_account_rlp.as_ref()) + .map_err(TrieNodeError::RLPError) + .map_err(Into::into) + .map(Some) + } + + /// Modifies the accounts in the storage trie with the given [`BundleState`] changeset. + /// + /// ## Takes + /// - `bundle`: The [`BundleState`] changeset to apply to the trie DB. + /// + /// ## Returns + /// - `Ok(())` if the accounts were successfully updated. + /// - `Err(_)` if the accounts could not be updated. + fn update_accounts(&mut self, bundle: &BundleState) -> TrieDBResult<()> { + // Sort the storage keys prior to applying the changeset, to ensure that the order of + // application is deterministic between runs. + let mut sorted_state = + bundle.state().iter().map(|(k, v)| (k, keccak256(*k), v)).collect::>(); + sorted_state.sort_by_key(|(_, hashed_addr, _)| *hashed_addr); + + for (address, hashed_address, bundle_account) in sorted_state { + if bundle_account.status.is_not_modified() { + continue; + } + + // Compute the path to the account in the trie. + let account_path = Nibbles::unpack(hashed_address.as_slice()); + + // If the account was destroyed, delete it from the trie. + if bundle_account.was_destroyed() { + self.root_node.delete(&account_path, &self.fetcher, &self.hinter)?; + self.storage_roots.remove(address); + continue; + } + + let account_info = + bundle_account.account_info().ok_or(TrieDBError::MissingAccountInfo)?; + + let mut trie_account = TrieAccount { + balance: account_info.balance, + nonce: account_info.nonce, + code_hash: account_info.code_hash, + ..Default::default() + }; + + // Update the account's storage root + let acc_storage_root = self + .storage_roots + .entry(*address) + .or_insert_with(|| TrieNode::new_blinded(EMPTY_ROOT_HASH)); + + // Sort the hashed storage keys prior to applying the changeset, to ensure that the + // order of application is deterministic between runs. + let mut sorted_storage = bundle_account + .storage + .iter() + .map(|(k, v)| (keccak256(k.to_be_bytes::<32>()), v)) + .collect::>(); + sorted_storage.sort_by_key(|(slot, _)| *slot); + + sorted_storage.into_iter().try_for_each(|(hashed_key, value)| { + Self::change_storage( + acc_storage_root, + hashed_key, + value, + &self.fetcher, + &self.hinter, + ) + })?; + + // Recompute the account storage root. + let root = acc_storage_root.blind(); + trie_account.storage_root = root; + + // RLP encode the trie account for insertion. + let mut account_buf = Vec::with_capacity(trie_account.length()); + trie_account.encode(&mut account_buf); + + // Insert or update the account in the trie. + self.root_node.insert(&account_path, account_buf.into(), &self.fetcher)?; + } + + Ok(()) + } + + /// Modifies a storage slot of an account in the Merkle Patricia Trie. + /// + /// ## Takes + /// - `storage_root`: The storage root of the account. + /// - `hashed_key`: The hashed storage slot key. + /// - `value`: The new value of the storage slot. + /// - `fetcher`: The trie node fetcher. + /// - `hinter`: The trie hinter. + /// + /// ## Returns + /// - `Ok(())` if the storage slot was successfully modified. + /// - `Err(_)` if the storage slot could not be modified. + fn change_storage( + storage_root: &mut TrieNode, + hashed_key: B256, + value: &StorageSlot, + fetcher: &F, + hinter: &H, + ) -> TrieDBResult<()> { + if !value.is_changed() { + return Ok(()); + } + + // RLP encode the storage slot value. + let mut rlp_buf = Vec::with_capacity(value.present_value.length()); + value.present_value.encode(&mut rlp_buf); + + // Insert or update the storage slot in the trie. + let hashed_slot_key = Nibbles::unpack(hashed_key.as_slice()); + if value.present_value.is_zero() { + // If the storage slot is being set to zero, prune it from the trie. + storage_root.delete(&hashed_slot_key, fetcher, hinter)?; + } else { + // Otherwise, update the storage slot. + storage_root.insert(&hashed_slot_key, rlp_buf.into(), fetcher)?; + } + + Ok(()) + } +} + +impl Database for TrieDB +where + F: TrieDBProvider, + H: TrieHinter, +{ + type Error = TrieDBError; + + fn basic(&mut self, address: Address) -> Result, Self::Error> { + // Fetch the account from the trie. + let Some(trie_account) = + self.get_trie_account(&address, self.parent_block_header.number)? + else { + // If the account does not exist in the trie, return `Ok(None)`. + return Ok(None); + }; + + // Insert the account's storage root into the cache. + self.storage_roots.insert(address, TrieNode::new_blinded(trie_account.storage_root)); + + // Return a partial DB account. The storage and code are not loaded out-right, and are + // loaded optimistically in the `Database` + `DatabaseRef` trait implementations. + Ok(Some(AccountInfo { + balance: trie_account.balance, + nonce: trie_account.nonce, + code_hash: trie_account.code_hash, + code: None, + account_id: None, + })) + } + + fn code_by_hash(&mut self, code_hash: B256) -> Result { + self.fetcher + .bytecode_by_hash(code_hash) + .map(Bytecode::new_raw) + .map_err(|e| TrieDBError::Provider(e.to_string())) + } + + fn storage(&mut self, address: Address, index: U256) -> Result { + // Send a hint to the host to fetch the storage proof. + self.hinter + .hint_storage_proof(address, index, self.parent_block_header.number) + .map_err(|e| TrieDBError::Provider(e.to_string()))?; + + // Fetch the account's storage root from the cache. If storage is being accessed, the + // account should have been loaded into the cache by the `basic` method. If the account was + // non-existing, the storage root will not be present. + match self.storage_roots.get_mut(&address) { + None => { + // If the storage root for the account does not exist, return zero. + Ok(U256::ZERO) + } + Some(storage_root) => { + // Fetch the storage slot from the trie. + let hashed_slot_key = keccak256(index.to_be_bytes::<32>().as_slice()); + match storage_root.open(&Nibbles::unpack(hashed_slot_key), &self.fetcher)? { + Some(slot_value) => { + // Decode the storage slot value. + let int_slot = U256::decode(&mut slot_value.as_ref()) + .map_err(TrieNodeError::RLPError)?; + Ok(int_slot) + } + None => { + // If the storage slot does not exist, return zero. + Ok(U256::ZERO) + } + } + } + } + } + + fn block_hash(&mut self, block_number: u64) -> Result { + // Copy the current header + let mut header = self.parent_block_header.inner().clone(); + + // Check if the block number is in range. If not, we can fail early. + if block_number > header.number || + header.number.saturating_sub(block_number) > BLOCK_HASH_HISTORY + { + return Ok(B256::default()); + } + + // Walk back the block headers to the desired block number. + while header.number > block_number { + header = self + .fetcher + .header_by_hash(header.parent_hash) + .map_err(|e| TrieDBError::Provider(e.to_string()))?; + } + + Ok(header.hash_slow()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_consensus::Sealable; + use alloy_primitives::b256; + use kona_mpt::NoopTrieHinter; + + fn new_test_db() -> TrieDB { + TrieDB::new(Header::default().seal_slow(), NoopTrieDBProvider, NoopTrieHinter) + } + + #[test] + fn test_trie_db_take_root_node() { + let db = new_test_db(); + let root_node = db.take_root_node(); + assert_eq!(root_node.blind(), EMPTY_ROOT_HASH); + } + + #[test] + fn test_trie_db_root_node_ref() { + let db = new_test_db(); + let root_node = db.root(); + assert_eq!(root_node.blind(), EMPTY_ROOT_HASH); + } + + #[test] + fn test_trie_db_storage_roots() { + let db = new_test_db(); + let storage_roots = db.storage_roots(); + assert!(storage_roots.is_empty()); + } + + #[test] + fn test_block_hash_above_range() { + let mut db = new_test_db(); + db.parent_block_header = Header { number: 10, ..Default::default() }.seal_slow(); + let block_number = 11; + let block_hash = db.block_hash(block_number).unwrap(); + assert_eq!(block_hash, B256::default()); + } + + #[test] + fn test_block_hash_below_range() { + let mut db = new_test_db(); + db.parent_block_header = + Header { number: BLOCK_HASH_HISTORY + 10, ..Default::default() }.seal_slow(); + let block_number = 0; + let block_hash = db.block_hash(block_number).unwrap(); + assert_eq!(block_hash, B256::default()); + } + + #[test] + fn test_block_hash_provider_missing_hash() { + let mut db = new_test_db(); + db.parent_block_header = Header { number: 10, ..Default::default() }.seal_slow(); + let block_number = 5; + let block_hash = db.block_hash(block_number).unwrap(); + assert_eq!( + block_hash, + b256!("78dec18c6d7da925bbe773c315653cdc70f6444ed6c1de9ac30bdb36cff74c3b") + ); + } +} diff --git a/rust/kona/crates/proof/executor/src/db/traits.rs b/rust/kona/crates/proof/executor/src/db/traits.rs new file mode 100644 index 00000000000..fd847f85fbe --- /dev/null +++ b/rust/kona/crates/proof/executor/src/db/traits.rs @@ -0,0 +1,57 @@ +//! Contains the [`TrieDBProvider`] trait for fetching EVM bytecode hash preimages as well as +//! [Header] preimages. + +use alloc::string::String; +use alloy_consensus::Header; +use alloy_primitives::{B256, Bytes}; +use kona_mpt::{TrieNode, TrieProvider}; + +/// The [`TrieDBProvider`] trait defines the synchronous interface for fetching EVM bytecode hash +/// preimages as well as [Header] preimages. +pub trait TrieDBProvider: TrieProvider { + /// Fetches the preimage of the bytecode hash provided. + /// + /// ## Takes + /// - `hash`: The hash of the bytecode. + /// + /// ## Returns + /// - Ok(Bytes): The bytecode of the contract. + /// - `Err(Self::Error)`: If the bytecode hash could not be fetched. + /// + /// [TrieDB]: crate::TrieDB + fn bytecode_by_hash(&self, code_hash: B256) -> Result; + + /// Fetches the preimage of [Header] hash provided. + /// + /// ## Takes + /// - `hash`: The hash of the RLP-encoded [Header]. + /// + /// ## Returns + /// - Ok(Bytes): The [Header]. + /// - `Err(Self::Error)`: If the [Header] could not be fetched. + /// + /// [TrieDB]: crate::TrieDB + fn header_by_hash(&self, hash: B256) -> Result; +} + +/// The default, no-op implementation of the [`TrieDBProvider`] trait, used for testing. +#[derive(Debug, Clone, Copy)] +pub struct NoopTrieDBProvider; + +impl TrieProvider for NoopTrieDBProvider { + type Error = String; + + fn trie_node_by_hash(&self, _key: B256) -> Result { + Ok(TrieNode::Empty) + } +} + +impl TrieDBProvider for NoopTrieDBProvider { + fn bytecode_by_hash(&self, _code_hash: B256) -> Result { + Ok(Bytes::default()) + } + + fn header_by_hash(&self, _hash: B256) -> Result { + Ok(Header::default()) + } +} diff --git a/kona/crates/proof/executor/src/errors.rs b/rust/kona/crates/proof/executor/src/errors.rs similarity index 100% rename from kona/crates/proof/executor/src/errors.rs rename to rust/kona/crates/proof/executor/src/errors.rs diff --git a/rust/kona/crates/proof/executor/src/lib.rs b/rust/kona/crates/proof/executor/src/lib.rs new file mode 100644 index 00000000000..70c7703f548 --- /dev/null +++ b/rust/kona/crates/proof/executor/src/lib.rs @@ -0,0 +1,29 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(any(test, feature = "test-utils")), no_std)] + +extern crate alloc; + +#[macro_use] +extern crate tracing; + +mod db; +pub use db::{NoopTrieDBProvider, TrieDB, TrieDBProvider}; + +mod builder; +pub use builder::{BlockBuildingOutcome, StatelessL2Builder, compute_receipts_root}; + +mod errors; +pub use errors::{ + Eip1559ValidationError, ExecutorError, ExecutorResult, TrieDBError, TrieDBResult, +}; + +pub(crate) mod util; + +#[cfg(any(test, feature = "test-utils"))] +pub mod test_utils; diff --git a/rust/kona/crates/proof/executor/src/test_utils.rs b/rust/kona/crates/proof/executor/src/test_utils.rs new file mode 100644 index 00000000000..e9d01490cc3 --- /dev/null +++ b/rust/kona/crates/proof/executor/src/test_utils.rs @@ -0,0 +1,372 @@ +//! Test utilities for the executor. + +use crate::{StatelessL2Builder, TrieDBProvider}; +use alloy_consensus::Header; +use alloy_op_evm::OpEvmFactory; +use alloy_primitives::{B256, Bytes, Sealable}; +use alloy_provider::{Provider, RootProvider, network::primitives::BlockTransactions}; +use alloy_rlp::Decodable; +use alloy_rpc_client::RpcClient; +use alloy_rpc_types_engine::PayloadAttributes; +use alloy_transport_http::{Client, Http}; +use kona_genesis::RollupConfig; +use kona_mpt::{NoopTrieHinter, TrieNode, TrieProvider}; +use kona_registry::ROLLUP_CONFIGS; +use op_alloy_rpc_types_engine::OpPayloadAttributes; +use rocksdb::{DB, Options}; +use serde::{Deserialize, Serialize}; +use std::{path::PathBuf, sync::Arc}; +use tokio::{fs, runtime::Handle, sync::Mutex}; + +/// Executes a [`ExecutorTestFixture`] stored at the passed `fixture_path` and asserts that the +/// produced block hash matches the expected block hash. +pub async fn run_test_fixture(fixture_path: PathBuf) { + // First, untar the fixture. + let fixture_dir = tempfile::tempdir().expect("Failed to create temporary directory"); + tokio::process::Command::new("tar") + .arg("-xvf") + .arg(fixture_path.as_path()) + .arg("-C") + .arg(fixture_dir.path()) + .arg("--strip-components=1") + .output() + .await + .expect("Failed to untar fixture"); + + let mut options = Options::default(); + options.set_compression_type(rocksdb::DBCompressionType::Snappy); + options.create_if_missing(true); + let kv_store = DB::open(&options, fixture_dir.path().join("kv")) + .unwrap_or_else(|e| panic!("Failed to open database at {fixture_dir:?}: {e}")); + let provider = DiskTrieNodeProvider::new(kv_store); + let fixture: ExecutorTestFixture = + serde_json::from_slice(&fs::read(fixture_dir.path().join("fixture.json")).await.unwrap()) + .expect("Failed to deserialize fixture"); + + let mut executor = StatelessL2Builder::new( + &fixture.rollup_config, + OpEvmFactory::default(), + provider, + NoopTrieHinter, + fixture.parent_header.seal_slow(), + ); + + let outcome = executor.build_block(fixture.executing_payload).unwrap(); + + assert_eq!( + outcome.header.hash(), + fixture.expected_block_hash, + "Produced header does not match the expected header" + ); +} + +/// The test fixture format for the [`StatelessL2Builder`]. +#[derive(Debug, Serialize, Deserialize)] +pub struct ExecutorTestFixture { + /// The rollup configuration for the executing chain. + pub rollup_config: RollupConfig, + /// The parent block header. + pub parent_header: Header, + /// The executing payload attributes. + pub executing_payload: OpPayloadAttributes, + /// The expected block hash + pub expected_block_hash: B256, +} + +/// A test fixture creator for the [`StatelessL2Builder`]. +#[derive(Debug)] +pub struct ExecutorTestFixtureCreator { + /// The RPC provider for the L2 execution layer. + pub provider: RootProvider, + /// The block number to create the test fixture for. + pub block_number: u64, + /// The key value store for the test fixture. + pub kv_store: Arc>, + /// The data directory for the test fixture. + pub data_dir: PathBuf, +} + +impl ExecutorTestFixtureCreator { + /// Creates a new [`ExecutorTestFixtureCreator`] with the given parameters. + pub fn new(provider_url: &str, block_number: u64, base_fixture_directory: PathBuf) -> Self { + let base = base_fixture_directory.join(format!("block-{block_number}")); + + let url = provider_url.parse().expect("Invalid provider URL"); + let http = Http::::new(url); + let provider = RootProvider::new(RpcClient::new(http, false)); + + let mut options = Options::default(); + options.set_compression_type(rocksdb::DBCompressionType::Snappy); + options.create_if_missing(true); + let db = DB::open(&options, base.join("kv").as_path()) + .unwrap_or_else(|e| panic!("Failed to open database at {base:?}: {e}")); + + Self { provider, block_number, kv_store: Arc::new(Mutex::new(db)), data_dir: base } + } +} + +impl ExecutorTestFixtureCreator { + /// Create a static test fixture with the configuration provided. + pub async fn create_static_fixture(self) { + let chain_id = self.provider.get_chain_id().await.expect("Failed to get chain ID"); + let rollup_config = ROLLUP_CONFIGS.get(&chain_id).expect("Rollup config not found"); + + let executing_block = self + .provider + .get_block_by_number(self.block_number.into()) + .await + .expect("Failed to get parent block") + .expect("Block not found"); + let parent_block = self + .provider + .get_block_by_number((self.block_number - 1).into()) + .await + .expect("Failed to get parent block") + .expect("Block not found"); + + let executing_header = executing_block.header; + let parent_header = parent_block.header.inner.seal_slow(); + + let encoded_executing_transactions = match executing_block.transactions { + BlockTransactions::Hashes(transactions) => { + let mut encoded_transactions = Vec::with_capacity(transactions.len()); + for tx_hash in transactions { + let tx = self + .provider + .client() + .request::<&[B256; 1], Bytes>("debug_getRawTransaction", &[tx_hash]) + .await + .expect("Block not found"); + encoded_transactions.push(tx); + } + encoded_transactions + } + _ => panic!("Only BlockTransactions::Hashes are supported."), + }; + + let payload_attrs = OpPayloadAttributes { + payload_attributes: PayloadAttributes { + timestamp: executing_header.timestamp, + parent_beacon_block_root: executing_header.parent_beacon_block_root, + prev_randao: executing_header.mix_hash, + withdrawals: Default::default(), + suggested_fee_recipient: executing_header.beneficiary, + }, + gas_limit: Some(executing_header.gas_limit), + transactions: Some(encoded_executing_transactions), + no_tx_pool: None, + eip_1559_params: rollup_config.is_holocene_active(executing_header.timestamp).then( + || { + executing_header.extra_data[1..9] + .try_into() + .expect("Invalid header format for Holocene") + }, + ), + min_base_fee: rollup_config.is_jovian_active(executing_header.timestamp).then(|| { + // The min base fee is the bytes 9-17 of the extra data. + executing_header.extra_data[9..17] + .try_into() + .map(u64::from_be_bytes) + .expect("Invalid header format for Jovian") + }), + }; + + let fixture_path = self.data_dir.join("fixture.json"); + let fixture = ExecutorTestFixture { + rollup_config: rollup_config.clone(), + parent_header: parent_header.inner().clone(), + executing_payload: payload_attrs.clone(), + expected_block_hash: executing_header.hash_slow(), + }; + + let mut executor = StatelessL2Builder::new( + rollup_config, + OpEvmFactory::default(), + self, + NoopTrieHinter, + parent_header, + ); + let outcome = executor.build_block(payload_attrs).expect("Failed to execute block"); + + assert_eq!( + outcome.header.inner(), + &executing_header.inner, + "Produced header does not match the expected header" + ); + fs::write(fixture_path.as_path(), serde_json::to_vec(&fixture).unwrap()).await.unwrap(); + + // Tar the fixture. + let data_dir = fixture_path.parent().unwrap(); + tokio::process::Command::new("tar") + .arg("-czf") + .arg(data_dir.with_extension("tar.gz").file_name().unwrap()) + .arg(data_dir.file_name().unwrap()) + .current_dir(data_dir.parent().unwrap()) + .output() + .await + .expect("Failed to tar fixture"); + + // Remove the leftover directory. + fs::remove_dir_all(data_dir).await.expect("Failed to remove temporary directory"); + } +} + +impl TrieProvider for ExecutorTestFixtureCreator { + type Error = TestTrieNodeProviderError; + + fn trie_node_by_hash(&self, key: B256) -> Result { + // Fetch the preimage from the L2 chain provider. + let preimage: Bytes = tokio::task::block_in_place(move || { + Handle::current().block_on(async { + let preimage: Bytes = self + .provider + .client() + .request("debug_dbGet", &[key]) + .await + .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)?; + + self.kv_store + .lock() + .await + .put(key, preimage.clone()) + .map_err(|_| TestTrieNodeProviderError::KVStore)?; + + Ok(preimage) + }) + })?; + + // Decode the preimage into a trie node. + TrieNode::decode(&mut preimage.as_ref()).map_err(TestTrieNodeProviderError::Rlp) + } +} + +impl TrieDBProvider for ExecutorTestFixtureCreator { + fn bytecode_by_hash(&self, hash: B256) -> Result { + // geth hashdb scheme code hash key prefix + const CODE_PREFIX: u8 = b'c'; + + // Fetch the preimage from the L2 chain provider. + let preimage: Bytes = tokio::task::block_in_place(move || { + Handle::current().block_on(async { + // Attempt to fetch the code from the L2 chain provider. + let code_hash = [&[CODE_PREFIX], hash.as_slice()].concat(); + let code = self + .provider + .client() + .request::<&[Bytes; 1], Bytes>("debug_dbGet", &[code_hash.into()]) + .await; + + // Check if the first attempt to fetch the code failed. If it did, try fetching the + // code hash preimage without the geth hashdb scheme prefix. + let code = match code { + Ok(code) => code, + Err(_) => self + .provider + .client() + .request::<&[B256; 1], Bytes>("debug_dbGet", &[hash]) + .await + .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)?, + }; + + self.kv_store + .lock() + .await + .put(hash, code.clone()) + .map_err(|_| TestTrieNodeProviderError::KVStore)?; + + Ok(code) + }) + })?; + + Ok(preimage) + } + + fn header_by_hash(&self, hash: B256) -> Result { + let encoded_header: Bytes = tokio::task::block_in_place(move || { + Handle::current().block_on(async { + let preimage: Bytes = self + .provider + .client() + .request("debug_getRawHeader", &[hash]) + .await + .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)?; + + self.kv_store + .lock() + .await + .put(hash, preimage.clone()) + .map_err(|_| TestTrieNodeProviderError::KVStore)?; + + Ok(preimage) + }) + })?; + + // Decode the Header. + Header::decode(&mut encoded_header.as_ref()).map_err(TestTrieNodeProviderError::Rlp) + } +} + +/// A simple [`TrieDBProvider`] that reads data from a disk-based key-value store. +#[derive(Debug)] +pub struct DiskTrieNodeProvider { + kv_store: DB, +} + +impl DiskTrieNodeProvider { + /// Creates a new [`DiskTrieNodeProvider`] with the given [`rocksdb`] K/V store. + pub const fn new(kv_store: DB) -> Self { + Self { kv_store } + } +} + +impl TrieProvider for DiskTrieNodeProvider { + type Error = TestTrieNodeProviderError; + + fn trie_node_by_hash(&self, key: B256) -> Result { + TrieNode::decode( + &mut self + .kv_store + .get(key) + .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)? + .ok_or(TestTrieNodeProviderError::PreimageNotFound)? + .as_slice(), + ) + .map_err(TestTrieNodeProviderError::Rlp) + } +} + +impl TrieDBProvider for DiskTrieNodeProvider { + fn bytecode_by_hash(&self, code_hash: B256) -> Result { + self.kv_store + .get(code_hash) + .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)? + .map(Bytes::from) + .ok_or(TestTrieNodeProviderError::PreimageNotFound) + } + + fn header_by_hash(&self, hash: B256) -> Result { + Header::decode( + &mut self + .kv_store + .get(hash) + .map_err(|_| TestTrieNodeProviderError::PreimageNotFound)? + .ok_or(TestTrieNodeProviderError::PreimageNotFound)? + .as_slice(), + ) + .map_err(TestTrieNodeProviderError::Rlp) + } +} + +/// An error type for the [`DiskTrieNodeProvider`] and [`ExecutorTestFixtureCreator`]. +#[derive(Debug, thiserror::Error)] +pub enum TestTrieNodeProviderError { + /// The preimage was not found in the key-value store. + #[error("Preimage not found")] + PreimageNotFound, + /// Failed to decode the RLP-encoded data. + #[error("Failed to decode RLP: {0}")] + Rlp(alloy_rlp::Error), + /// Failed to write back to the key-value store. + #[error("Failed to write back to key value store")] + KVStore, +} diff --git a/rust/kona/crates/proof/executor/src/util.rs b/rust/kona/crates/proof/executor/src/util.rs new file mode 100644 index 00000000000..6170d11ed43 --- /dev/null +++ b/rust/kona/crates/proof/executor/src/util.rs @@ -0,0 +1,223 @@ +//! Contains utilities for the L2 executor. + +use crate::{Eip1559ValidationError, ExecutorError, ExecutorResult}; +use alloy_consensus::{BlockHeader, Header}; +use alloy_eips::eip1559::BaseFeeParams; +use alloy_primitives::Bytes; +use kona_genesis::RollupConfig; +use op_alloy_consensus::{ + EIP1559ParamError, decode_holocene_extra_data, decode_jovian_extra_data, + encode_holocene_extra_data, encode_jovian_extra_data, +}; +use op_alloy_rpc_types_engine::OpPayloadAttributes; + +/// Parse Holocene [Header] extra data from the block header. +/// +/// ## Takes +/// - `extra_data`: The extra data field of the [Header]. +/// +/// ## Returns +/// - `Ok(BaseFeeParams)`: The EIP-1559 parameters. +/// - `Err(ExecutorError::InvalidExtraData)`: If the extra data is invalid. +pub(crate) fn decode_holocene_eip_1559_params_block_header( + header: &Header, +) -> ExecutorResult { + let (elasticity, denominator) = decode_holocene_extra_data(header.extra_data())?; + + // Check for potential division by zero. + // In the block header, the denominator is always non-zero. + // + if denominator == 0 { + return Err(ExecutorError::InvalidExtraData(Eip1559ValidationError::ZeroDenominator)); + } + + Ok(BaseFeeParams { + elasticity_multiplier: elasticity.into(), + max_change_denominator: denominator.into(), + }) +} + +pub(crate) fn decode_jovian_eip_1559_params_block_header( + header: &Header, +) -> ExecutorResult<(BaseFeeParams, u64)> { + let (elasticity, denominator, min_base_fee) = decode_jovian_extra_data(header.extra_data())?; + + // Check for potential division by zero. + // In the block header, the denominator is always non-zero. + // + if denominator == 0 { + return Err(ExecutorError::InvalidExtraData(Eip1559ValidationError::ZeroDenominator)); + } + + Ok(( + BaseFeeParams { + elasticity_multiplier: elasticity.into(), + max_change_denominator: denominator.into(), + }, + min_base_fee, + )) +} + +/// Encode Holocene [Header] extra data. +/// +/// ## Takes +/// - `config`: The [`RollupConfig`] for the chain. +/// - `attributes`: The [`OpPayloadAttributes`] for the block. +/// +/// ## Returns +/// - `Ok(data)`: The encoded extra data. +/// - `Err(ExecutorError::MissingEIP1559Params)`: If the EIP-1559 parameters are missing. +pub(crate) fn encode_holocene_eip_1559_params( + config: &RollupConfig, + attributes: &OpPayloadAttributes, +) -> ExecutorResult { + Ok(encode_holocene_extra_data( + attributes.eip_1559_params.ok_or(ExecutorError::MissingEIP1559Params)?, + config.chain_op_config.post_canyon_params(), + )?) +} + +/// Encode Jovian [Header] extra data. +/// +/// ## Takes +/// - `config`: The [`RollupConfig`] for the chain. +/// - `attributes`: The [`OpPayloadAttributes`] for the block. +/// +/// ## Returns +/// - `Ok(data)`: The encoded extra data. +/// - `Err(ExecutorError::MissingEIP1559Params)`: If the EIP-1559 parameters are missing. +pub(crate) fn encode_jovian_eip_1559_params( + config: &RollupConfig, + attributes: &OpPayloadAttributes, +) -> ExecutorResult { + Ok(encode_jovian_extra_data( + attributes.eip_1559_params.ok_or(ExecutorError::MissingEIP1559Params)?, + config.chain_op_config.post_canyon_params(), + attributes.min_base_fee.ok_or(ExecutorError::InvalidExtraData( + Eip1559ValidationError::Decode(EIP1559ParamError::MinBaseFeeNotSet), + ))?, + )?) +} + +#[cfg(test)] +mod test { + use super::decode_holocene_eip_1559_params_block_header; + use crate::util::{ + decode_jovian_eip_1559_params_block_header, encode_holocene_eip_1559_params, + }; + use alloy_consensus::Header; + use alloy_primitives::{B64, b64, bytes}; + use alloy_rpc_types_engine::PayloadAttributes; + use kona_genesis::{BaseFeeConfig, RollupConfig}; + use op_alloy_rpc_types_engine::OpPayloadAttributes; + + fn mock_payload(eip_1559_params: Option) -> OpPayloadAttributes { + OpPayloadAttributes { + payload_attributes: PayloadAttributes { + timestamp: 0, + prev_randao: Default::default(), + suggested_fee_recipient: Default::default(), + withdrawals: Default::default(), + parent_beacon_block_root: Default::default(), + }, + transactions: None, + no_tx_pool: None, + gas_limit: None, + eip_1559_params, + min_base_fee: None, + } + } + + #[test] + fn test_decode_holocene_eip_1559_params() { + let params = bytes!("00BEEFBABE0BADC0DE"); + let mock_header = Header { extra_data: params, ..Default::default() }; + let params = decode_holocene_eip_1559_params_block_header(&mock_header).unwrap(); + + assert_eq!(params.elasticity_multiplier, 0x0BAD_C0DE); + assert_eq!(params.max_change_denominator, 0xBEEF_BABE); + } + + #[test] + fn test_decode_jovian_eip_1559_params() { + let params = bytes!("01BEEFBABE0BADC0DE00000000DEADBEEF"); + let mock_header = Header { extra_data: params, ..Default::default() }; + let (params, base_fee) = decode_jovian_eip_1559_params_block_header(&mock_header).unwrap(); + + assert_eq!(params.elasticity_multiplier, 0x0BAD_C0DE); + assert_eq!(params.max_change_denominator, 0xBEEF_BABE); + assert_eq!(base_fee, 0xDEAD_BEEF); + } + + #[test] + fn test_decode_holocene_eip_1559_params_invalid_version() { + let params = bytes!("01BEEFBABE0BADC0DE"); + let mock_header = Header { extra_data: params, ..Default::default() }; + assert!(decode_holocene_eip_1559_params_block_header(&mock_header).is_err()); + } + + #[test] + fn test_decode_holocene_eip_1559_params_invalid_denominator() { + let params = bytes!("00000000000BADC0DE"); + let mock_header = Header { extra_data: params, ..Default::default() }; + assert!(decode_holocene_eip_1559_params_block_header(&mock_header).is_err()); + } + + #[test] + fn test_decode_holocene_eip_1559_params_invalid_length() { + let params = bytes!("00"); + let mock_header = Header { extra_data: params, ..Default::default() }; + assert!(decode_holocene_eip_1559_params_block_header(&mock_header).is_err()); + } + + #[test] + fn test_encode_holocene_eip_1559_params_missing() { + let cfg = RollupConfig { + chain_op_config: BaseFeeConfig { + eip1559_denominator: 50, + eip1559_elasticity: 64, + eip1559_denominator_canyon: 250, + }, + ..Default::default() + }; + let attrs = mock_payload(None); + + assert!(encode_holocene_eip_1559_params(&cfg, &attrs).is_err()); + } + + #[test] + fn test_encode_holocene_eip_1559_params_default() { + let cfg = RollupConfig { + chain_op_config: BaseFeeConfig { + eip1559_denominator: 50, + eip1559_elasticity: 64, + eip1559_denominator_canyon: 250, + }, + ..Default::default() + }; + let attrs = mock_payload(Some(B64::ZERO)); + + assert_eq!( + encode_holocene_eip_1559_params(&cfg, &attrs).unwrap(), + bytes!("00000000fa00000040") + ); + } + + #[test] + fn test_encode_holocene_eip_1559_params() { + let cfg = RollupConfig { + chain_op_config: BaseFeeConfig { + eip1559_denominator: 50, + eip1559_elasticity: 64, + eip1559_denominator_canyon: 250, + }, + ..Default::default() + }; + let attrs = mock_payload(Some(b64!("0000004000000060"))); + + assert_eq!( + encode_holocene_eip_1559_params(&cfg, &attrs).unwrap(), + bytes!("000000004000000060") + ); + } +} diff --git a/kona/crates/proof/executor/testdata/block-26207960.tar.gz b/rust/kona/crates/proof/executor/testdata/block-26207960.tar.gz similarity index 100% rename from kona/crates/proof/executor/testdata/block-26207960.tar.gz rename to rust/kona/crates/proof/executor/testdata/block-26207960.tar.gz diff --git a/kona/crates/proof/executor/testdata/block-26207961.tar.gz b/rust/kona/crates/proof/executor/testdata/block-26207961.tar.gz similarity index 100% rename from kona/crates/proof/executor/testdata/block-26207961.tar.gz rename to rust/kona/crates/proof/executor/testdata/block-26207961.tar.gz diff --git a/kona/crates/proof/executor/testdata/block-26207962.tar.gz b/rust/kona/crates/proof/executor/testdata/block-26207962.tar.gz similarity index 100% rename from kona/crates/proof/executor/testdata/block-26207962.tar.gz rename to rust/kona/crates/proof/executor/testdata/block-26207962.tar.gz diff --git a/kona/crates/proof/executor/testdata/block-26207963.tar.gz b/rust/kona/crates/proof/executor/testdata/block-26207963.tar.gz similarity index 100% rename from kona/crates/proof/executor/testdata/block-26207963.tar.gz rename to rust/kona/crates/proof/executor/testdata/block-26207963.tar.gz diff --git a/kona/crates/proof/executor/testdata/block-26208384.tar.gz b/rust/kona/crates/proof/executor/testdata/block-26208384.tar.gz similarity index 100% rename from kona/crates/proof/executor/testdata/block-26208384.tar.gz rename to rust/kona/crates/proof/executor/testdata/block-26208384.tar.gz diff --git a/kona/crates/proof/executor/testdata/block-26208858.tar.gz b/rust/kona/crates/proof/executor/testdata/block-26208858.tar.gz similarity index 100% rename from kona/crates/proof/executor/testdata/block-26208858.tar.gz rename to rust/kona/crates/proof/executor/testdata/block-26208858.tar.gz diff --git a/kona/crates/proof/executor/testdata/block-26208927.tar.gz b/rust/kona/crates/proof/executor/testdata/block-26208927.tar.gz similarity index 100% rename from kona/crates/proof/executor/testdata/block-26208927.tar.gz rename to rust/kona/crates/proof/executor/testdata/block-26208927.tar.gz diff --git a/kona/crates/proof/executor/testdata/block-26211680.tar.gz b/rust/kona/crates/proof/executor/testdata/block-26211680.tar.gz similarity index 100% rename from kona/crates/proof/executor/testdata/block-26211680.tar.gz rename to rust/kona/crates/proof/executor/testdata/block-26211680.tar.gz diff --git a/kona/crates/proof/mpt/CHANGELOG.md b/rust/kona/crates/proof/mpt/CHANGELOG.md similarity index 100% rename from kona/crates/proof/mpt/CHANGELOG.md rename to rust/kona/crates/proof/mpt/CHANGELOG.md diff --git a/rust/kona/crates/proof/mpt/Cargo.toml b/rust/kona/crates/proof/mpt/Cargo.toml new file mode 100644 index 00000000000..42ed5a2c44a --- /dev/null +++ b/rust/kona/crates/proof/mpt/Cargo.toml @@ -0,0 +1,52 @@ +[package] +name = "kona-mpt" +description = "Utilities for interacting with and iterating through a merkle patricia trie" +version = "0.3.0" +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true + +[lints] +workspace = true + +[dependencies] +# General +thiserror.workspace = true +serde = { workspace = true, optional = true, features = ["derive", "alloc"] } + +# Revm + Alloy +alloy-rlp.workspace = true +alloy-trie.workspace = true +alloy-primitives = { workspace = true, features = ["rlp"] } + +# Op-alloy +op-alloy-rpc-types-engine.workspace = true + +[dev-dependencies] +# Alloy +alloy-provider = { workspace = true, features = ["reqwest"] } +alloy-consensus.workspace = true +alloy-transport-http.workspace = true +alloy-rpc-types = { workspace = true, features = ["eth", "debug"] } + +# General +rand.workspace = true +reqwest.workspace = true +proptest.workspace = true +tokio = { workspace = true, features = ["full"] } +criterion = { workspace = true, features = ["html_reports"] } + +[features] +default = [ "serde" ] +serde = [ + "alloy-primitives/serde", + "alloy-trie/serde", + "dep:serde", + "op-alloy-rpc-types-engine/serde", +] + +[[bench]] +name = "trie_node" +harness = false diff --git a/rust/kona/crates/proof/mpt/README.md b/rust/kona/crates/proof/mpt/README.md new file mode 100644 index 00000000000..08519b7ac24 --- /dev/null +++ b/rust/kona/crates/proof/mpt/README.md @@ -0,0 +1,13 @@ +# `kona-mpt` + +A recursive, in-memory implementation of Ethereum's hexary Merkle Patricia Trie (MPT), supporting: +- Retrieval +- Insertion +- Deletion +- Root Computation + - Trie Node RLP Encoding + +This implementation is intended to serve as a backend for a stateless executor of Ethereum blocks, like +the one in the [`kona-executor`](../executor) crate. Starting with a trie root, the `TrieNode` can be +unravelled to access, insert, or delete values. These operations are all backed by the `TrieProvider`, +which enables fetching the preimages of hashed trie nodes. diff --git a/kona/crates/proof/mpt/benches/trie_node.rs b/rust/kona/crates/proof/mpt/benches/trie_node.rs similarity index 96% rename from kona/crates/proof/mpt/benches/trie_node.rs rename to rust/kona/crates/proof/mpt/benches/trie_node.rs index a177ca0f114..6f1dd172e0b 100644 --- a/kona/crates/proof/mpt/benches/trie_node.rs +++ b/rust/kona/crates/proof/mpt/benches/trie_node.rs @@ -1,10 +1,9 @@ #![allow(missing_docs)] -//! Contains benchmarks for the [TrieNode]. +//! Contains benchmarks for the [`TrieNode`]. use alloy_trie::Nibbles; use criterion::{Criterion, criterion_group, criterion_main}; use kona_mpt::{NoopTrieHinter, NoopTrieProvider, TrieNode}; -use pprof::criterion::{Output, PProfProfiler}; use rand::{Rng, SeedableRng, rngs::StdRng, seq::IteratorRandom}; fn trie(c: &mut Criterion) { @@ -152,7 +151,7 @@ fn trie(c: &mut Criterion) { criterion_group! { name = trie_benches; - config = Criterion::default().with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); + config = Criterion::default(); targets = trie } criterion_main!(trie_benches); diff --git a/rust/kona/crates/proof/mpt/src/errors.rs b/rust/kona/crates/proof/mpt/src/errors.rs new file mode 100644 index 00000000000..cc07e3c7d97 --- /dev/null +++ b/rust/kona/crates/proof/mpt/src/errors.rs @@ -0,0 +1,41 @@ +//! Errors for the `kona-derive` crate. + +use alloc::string::String; +use thiserror::Error; + +/// A [Result] type alias where the error is [`TrieNodeError`]. +pub type TrieNodeResult = Result; + +/// An error type for [`TrieNode`](crate::TrieNode) operations. +#[derive(Error, Debug, PartialEq, Eq)] +pub enum TrieNodeError { + /// Invalid trie node type encountered. + #[error("Invalid trie node type encountered")] + InvalidNodeType, + /// The path was too short to index. + #[error("Path too short")] + PathTooShort, + /// Failed to decode trie node. + #[error("Failed to decode trie node: {0}")] + RLPError(alloy_rlp::Error), + /// Key does not exist in trie. + #[error("Key does not exist in trie.")] + KeyNotFound, + /// Trie node is not a leaf node. + #[error("Trie provider error: {0}")] + Provider(String), +} + +/// A [Result] type alias where the error is [`OrderedListWalkerError`]. +pub type OrderedListWalkerResult = Result; + +/// An error type for [`OrderedListWalker`](crate::OrderedListWalker) operations. +#[derive(Error, Debug, PartialEq, Eq)] +pub enum OrderedListWalkerError { + /// Iterator has already been hydrated, and cannot be re-hydrated until it is exhausted. + #[error("Iterator has already been hydrated, and cannot be re-hydrated until it is exhausted")] + AlreadyHydrated, + /// Trie node error. + #[error("{0}")] + TrieNode(#[from] TrieNodeError), +} diff --git a/rust/kona/crates/proof/mpt/src/lib.rs b/rust/kona/crates/proof/mpt/src/lib.rs new file mode 100644 index 00000000000..04af0c1a5ef --- /dev/null +++ b/rust/kona/crates/proof/mpt/src/lib.rs @@ -0,0 +1,34 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(test), no_std)] + +extern crate alloc; + +mod errors; +pub use errors::{OrderedListWalkerError, OrderedListWalkerResult, TrieNodeError, TrieNodeResult}; + +mod traits; +pub use traits::{TrieHinter, TrieProvider}; + +mod node; +pub use node::TrieNode; + +mod list_walker; +pub use list_walker::OrderedListWalker; + +mod noop; +pub use noop::{NoopTrieHinter, NoopTrieProvider}; + +mod util; +pub use util::ordered_trie_with_encoder; + +// Re-export [alloy_trie::Nibbles]. +pub use alloy_trie::Nibbles; + +#[cfg(test)] +mod test_util; diff --git a/kona/crates/proof/mpt/src/list_walker.rs b/rust/kona/crates/proof/mpt/src/list_walker.rs similarity index 89% rename from kona/crates/proof/mpt/src/list_walker.rs rename to rust/kona/crates/proof/mpt/src/list_walker.rs index 2a2104c1487..1d18c34c6e7 100644 --- a/kona/crates/proof/mpt/src/list_walker.rs +++ b/rust/kona/crates/proof/mpt/src/list_walker.rs @@ -1,5 +1,5 @@ -//! This module contains the [OrderedListWalker] struct, which allows for traversing an MPT root of -//! a derivable ordered list. +//! This module contains the [`OrderedListWalker`] struct, which allows for traversing an MPT root +//! of a derivable ordered list. use crate::{ TrieNode, TrieNodeError, TrieProvider, @@ -10,17 +10,17 @@ use alloy_primitives::{B256, Bytes}; use alloy_rlp::EMPTY_STRING_CODE; use core::marker::PhantomData; -/// A [OrderedListWalker] allows for traversing over a Merkle Patricia Trie containing a derivable +/// A [`OrderedListWalker`] allows for traversing over a Merkle Patricia Trie containing a derivable /// ordered list. /// -/// Once it has been hydrated with [Self::hydrate], the elements in the derivable list can be +/// Once it has been hydrated with [`Self::hydrate`], the elements in the derivable list can be /// iterated over using the [Iterator] implementation. #[derive(Debug, Clone, Eq, PartialEq)] pub struct OrderedListWalker { /// The Merkle Patricia Trie root. root: B256, /// The leaf nodes of the derived list, in order. [None] if the tree has yet to be fully - /// traversed with [Self::hydrate]. + /// traversed with [`Self::hydrate`]. inner: Option>, /// Phantom data _phantom: PhantomData, @@ -30,20 +30,20 @@ impl OrderedListWalker where F: TrieProvider, { - /// Creates a new [OrderedListWalker], yet to be hydrated. + /// Creates a new [`OrderedListWalker`], yet to be hydrated. pub const fn new(root: B256) -> Self { Self { root, inner: None, _phantom: PhantomData } } - /// Creates a new [OrderedListWalker] and hydrates it with [Self::hydrate] and the given fetcher - /// immediately. + /// Creates a new [`OrderedListWalker`] and hydrates it with [`Self::hydrate`] and the given + /// fetcher immediately. pub fn try_new_hydrated(root: B256, fetcher: &F) -> OrderedListWalkerResult { let mut walker = Self { root, inner: None, _phantom: PhantomData }; walker.hydrate(fetcher)?; Ok(walker) } - /// Hydrates the [OrderedListWalker]'s iterator with the leaves of the derivable list. If + /// Hydrates the [`OrderedListWalker`]'s iterator with the leaves of the derivable list. If /// `Self::inner` is [Some], this function will fail fast. pub fn hydrate(&mut self, fetcher: &F) -> OrderedListWalkerResult<()> { // Do not allow for re-hydration if `inner` is `Some` and still contains elements. @@ -75,13 +75,13 @@ where Ok(()) } - /// Takes the inner list of the [OrderedListWalker], returning it and setting the inner list to - /// [None]. + /// Takes the inner list of the [`OrderedListWalker`], returning it and setting the inner list + /// to [None]. pub const fn take_inner(&mut self) -> Option> { self.inner.take() } - /// Traverses a [TrieNode], returning all values of child [TrieNode::Leaf] variants. + /// Traverses a [`TrieNode`], returning all values of child [`TrieNode::Leaf`] variants. fn fetch_leaves( trie_node: &TrieNode, fetcher: &F, @@ -89,7 +89,7 @@ where match trie_node { TrieNode::Branch { stack } => { let mut leaf_values = VecDeque::with_capacity(stack.len()); - for item in stack.iter() { + for item in stack { match item { TrieNode::Blinded { commitment } => { // If the string is a hash, we need to grab the preimage for it and @@ -127,7 +127,7 @@ where } /// Grabs the preimage of `hash` using `fetcher`, and attempts to decode the preimage data into - /// a [TrieNode]. Will error if the conversion of `T` into [B256] fails. + /// a [`TrieNode`]. Will error if the conversion of `T` into [B256] fails. fn get_trie_node(hash: T, fetcher: &F) -> OrderedListWalkerResult where T: Into, diff --git a/rust/kona/crates/proof/mpt/src/node.rs b/rust/kona/crates/proof/mpt/src/node.rs new file mode 100644 index 00000000000..7914b4e8df7 --- /dev/null +++ b/rust/kona/crates/proof/mpt/src/node.rs @@ -0,0 +1,851 @@ +//! This module contains the [`TrieNode`] type, which represents a node within a standard Merkle +//! Patricia Trie. + +use crate::{ + TrieHinter, TrieNodeError, TrieProvider, + errors::TrieNodeResult, + util::{rlp_list_element_length, unpack_path_to_nibbles}, +}; +use alloc::{boxed::Box, string::ToString, vec, vec::Vec}; +use alloy_primitives::{B256, Bytes, keccak256}; +use alloy_rlp::{Buf, Decodable, EMPTY_STRING_CODE, Encodable, Header, length_of_length}; +use alloy_trie::{EMPTY_ROOT_HASH, Nibbles}; + +/// The length of the branch list when RLP encoded +const BRANCH_LIST_LENGTH: usize = 17; + +/// The length of a leaf or extension node's RLP encoded list +const LEAF_OR_EXTENSION_LIST_LENGTH: usize = 2; + +/// The number of nibbles traversed in a branch node. +const BRANCH_NODE_NIBBLES: usize = 1; + +/// Prefix for even-nibbled extension node paths. +const PREFIX_EXTENSION_EVEN: u8 = 0; + +/// Prefix for odd-nibbled extension node paths. +const PREFIX_EXTENSION_ODD: u8 = 1; + +/// Prefix for even-nibbled leaf node paths. +const PREFIX_LEAF_EVEN: u8 = 2; + +/// Prefix for odd-nibbled leaf node paths. +const PREFIX_LEAF_ODD: u8 = 3; + +/// Nibble bit width. +const NIBBLE_WIDTH: usize = 4; + +/// A [`TrieNode`] is a node within a standard Ethereum Merkle Patricia Trie. In this +/// implementation, keys are expected to be fixed-size nibble sequences, and values are arbitrary +/// byte sequences. +/// +/// The [`TrieNode`] has several variants: +/// - [`TrieNode::Empty`] represents an empty node. +/// - [`TrieNode::Blinded`] represents a node that has been blinded by a commitment. +/// - [`TrieNode::Leaf`] represents a 2-item node with the encoding `rlp([encoded_path, value])`. +/// - [`TrieNode::Extension`] represents a 2-item pointer node with the encoding `rlp([encoded_path, +/// key])`. +/// - [`TrieNode::Branch`] represents a node that refers to up to 16 child nodes with the encoding +/// `rlp([ v0, ..., v15, value ])`. +/// +/// In the Ethereum Merkle Patricia Trie, nodes longer than an encoded 32 byte string (33 total +/// bytes) are blinded with [keccak256] hashes. When a node is "opened", it is replaced with the +/// [`TrieNode`] that is decoded from to the preimage of the hash. +/// +/// The [`alloy_rlp::Encodable`] and [`alloy_rlp::Decodable`] traits are implemented for +/// [`TrieNode`], allowing for RLP encoding and decoding of the types for storage and retrieval. The +/// implementation of these traits will implicitly blind nodes that are longer than 32 bytes in +/// length when encoding. When decoding, the implementation will leave blinded nodes in place. +/// +/// ## SAFETY +/// As this implementation only supports uniform key sizes, the [`TrieNode`] data structure will +/// fail to behave correctly if confronted with keys of varying lengths. Namely, this is because it +/// does not support the `value` field in branch nodes, just like the Ethereum Merkle Patricia Trie. +#[derive(Debug, Clone, Eq, PartialEq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum TrieNode { + /// An empty [`TrieNode`] is represented as an [`EMPTY_STRING_CODE`] (0x80). + Empty, + /// A blinded node is a node that has been blinded by a [keccak256] commitment. + Blinded { + /// The commitment that blinds the node. + commitment: B256, + }, + /// A leaf node is a 2-item node with the encoding `rlp([encoded_path, value])` + Leaf { + /// The key of the leaf node + prefix: Nibbles, + /// The value of the leaf node + value: Bytes, + }, + /// An extension node is a 2-item pointer node with the encoding `rlp([encoded_path, key])` + Extension { + /// The path prefix of the extension + prefix: Nibbles, + /// The pointer to the child node + node: Box, + }, + /// A branch node refers to up to 16 child nodes with the encoding + /// `rlp([ v0, ..., v15, value ])` + Branch { + /// The 16 child nodes and value of the branch. + stack: Vec, + }, +} + +impl TrieNode { + /// Creates a new [`TrieNode::Blinded`] node. + /// + /// ## Takes + /// - `commitment` - The commitment that blinds the node + /// + /// ## Returns + /// - `Self` - The new blinded [`TrieNode`]. + pub const fn new_blinded(commitment: B256) -> Self { + Self::Blinded { commitment } + } + + /// Blinds the [`TrieNode`].. Alternatively, if the [`TrieNode`] is a [`TrieNode::Blinded`] node + /// already, its commitment is returned directly. + pub fn blind(&self) -> B256 { + match self { + Self::Blinded { commitment } => *commitment, + Self::Empty => EMPTY_ROOT_HASH, + _ => { + let mut rlp_buf = Vec::with_capacity(self.length()); + self.encode(&mut rlp_buf); + keccak256(rlp_buf) + } + } + } + + /// Unblinds the [`TrieNode`] if it is a [`TrieNode::Blinded`] node. + pub fn unblind(&mut self, fetcher: &F) -> TrieNodeResult<()> { + if let Self::Blinded { commitment } = self { + if *commitment == EMPTY_ROOT_HASH { + // If the commitment is the empty root hash, the node is empty, and we don't need to + // reach out to the fetcher. + *self = Self::Empty; + } else { + *self = fetcher + .trie_node_by_hash(*commitment) + .map_err(|e| TrieNodeError::Provider(e.to_string()))?; + } + } + Ok(()) + } + + /// Walks down the trie to a leaf value with the given key, if it exists. Preimages for blinded + /// nodes along the path are fetched using the `fetcher` function, and persisted in the inner + /// [`TrieNode`] elements. + /// + /// ## Takes + /// - `self` - The root trie node + /// - `path` - The nibbles representation of the path to the leaf node + /// - `fetcher` - The preimage fetcher for intermediate blinded nodes + /// + /// ## Returns + /// - `Err(_)` - Could not retrieve the node with the given key from the trie. + /// - `Ok(None)` - The node with the given key does not exist in the trie. + /// - `Ok(Some(_))` - The value of the node + pub fn open<'a, F: TrieProvider>( + &'a mut self, + path: &Nibbles, + fetcher: &F, + ) -> TrieNodeResult> { + match self { + Self::Branch { stack } => { + let branch_nibble = path.get(0).ok_or(TrieNodeError::PathTooShort)? as usize; + stack + .get_mut(branch_nibble) + .map(|node| node.open(&path.slice(BRANCH_NODE_NIBBLES..), fetcher)) + .unwrap_or(Ok(None)) + } + Self::Leaf { prefix, value } => Ok((path == prefix).then_some(value)), + Self::Extension { prefix, node } => { + if path.slice(..prefix.len()) == *prefix { + // Follow extension branch + node.unblind(fetcher)?; + node.open(&path.slice(prefix.len()..), fetcher) + } else { + Ok(None) + } + } + Self::Blinded { .. } => { + self.unblind(fetcher)?; + self.open(path, fetcher) + } + Self::Empty => Ok(None), + } + } + + /// Inserts a [`TrieNode`] at the given path into the trie rooted at Self. + /// + /// ## Takes + /// - `self` - The root trie node + /// - `path` - The nibbles representation of the path to the leaf node + /// - `node` - The node to insert at the given path + /// - `fetcher` - The preimage fetcher for intermediate blinded nodes + /// + /// ## Returns + /// - `Err(_)` - Could not insert the node at the given path in the trie. + /// - `Ok(())` - The node was successfully inserted at the given path. + pub fn insert( + &mut self, + path: &Nibbles, + value: Bytes, + fetcher: &F, + ) -> TrieNodeResult<()> { + match self { + Self::Empty => { + // If the trie node is null, insert the leaf node at the current path. + *self = Self::Leaf { prefix: *path, value }; + Ok(()) + } + Self::Leaf { prefix, value: leaf_value } => { + let shared_extension_nibbles = path.common_prefix_length(prefix); + + // If all nibbles are shared, update the leaf node with the new value. + if path == prefix { + *self = Self::Leaf { prefix: *prefix, value }; + return Ok(()); + } + + // Create a branch node stack containing the leaf node and the new value. + let mut stack = vec![Self::Empty; BRANCH_LIST_LENGTH]; + + // Insert the shortened extension into the branch stack. + let extension_nibble = + prefix.get(shared_extension_nibbles).ok_or(TrieNodeError::PathTooShort)? + as usize; + stack[extension_nibble] = Self::Leaf { + prefix: prefix.slice(shared_extension_nibbles + BRANCH_NODE_NIBBLES..), + value: leaf_value.clone(), + }; + + // Insert the new value into the branch stack. + let branch_nibble_new = + path.get(shared_extension_nibbles).ok_or(TrieNodeError::PathTooShort)? as usize; + stack[branch_nibble_new] = Self::Leaf { + prefix: path.slice(shared_extension_nibbles + BRANCH_NODE_NIBBLES..), + value, + }; + + // Replace the leaf node with the branch if no nibbles are shared, else create an + // extension. + if shared_extension_nibbles == 0 { + *self = Self::Branch { stack }; + } else { + let raw_ext_nibbles = path.slice(..shared_extension_nibbles); + *self = Self::Extension { + prefix: raw_ext_nibbles, + node: Box::new(Self::Branch { stack }), + }; + } + Ok(()) + } + Self::Extension { prefix, node } => { + let shared_extension_nibbles = path.common_prefix_length(prefix); + if shared_extension_nibbles == prefix.len() { + node.insert(&path.slice(shared_extension_nibbles..), value, fetcher)?; + return Ok(()); + } + + // Create a branch node stack containing the leaf node and the new value. + let mut stack = vec![Self::Empty; BRANCH_LIST_LENGTH]; + + // Insert the shortened extension into the branch stack. + let extension_nibble = + prefix.get(shared_extension_nibbles).ok_or(TrieNodeError::PathTooShort)? + as usize; + let new_prefix = prefix.slice(shared_extension_nibbles + BRANCH_NODE_NIBBLES..); + stack[extension_nibble] = if new_prefix.is_empty() { + // In the case that the extension node no longer has a prefix, insert the node + // verbatim into the branch. + node.as_ref().clone() + } else { + Self::Extension { prefix: new_prefix, node: node.clone() } + }; + + // Insert the new value into the branch stack. + let branch_nibble_new = + path.get(shared_extension_nibbles).ok_or(TrieNodeError::PathTooShort)? as usize; + stack[branch_nibble_new] = Self::Leaf { + prefix: path.slice(shared_extension_nibbles + BRANCH_NODE_NIBBLES..), + value, + }; + + // Replace the extension node with the branch if no nibbles are shared, else create + // an extension. + if shared_extension_nibbles == 0 { + *self = Self::Branch { stack }; + } else { + let extension = path.slice(..shared_extension_nibbles); + *self = Self::Extension { + prefix: extension, + node: Box::new(Self::Branch { stack }), + }; + } + Ok(()) + } + Self::Branch { stack } => { + // Follow the branch node to the next node in the path. + let branch_nibble = path.get(0).ok_or(TrieNodeError::PathTooShort)? as usize; + stack[branch_nibble].insert(&path.slice(BRANCH_NODE_NIBBLES..), value, fetcher) + } + Self::Blinded { .. } => { + // If a blinded node is approached, reveal the node and continue the insertion + // recursion. + self.unblind(fetcher)?; + self.insert(path, value, fetcher) + } + } + } + + /// Deletes a node in the trie at the given path. + /// + /// ## Takes + /// - `self` - The root trie node + /// - `path` - The nibbles representation of the path to the leaf node + /// + /// ## Returns + /// - `Err(_)` - Could not delete the node at the given path in the trie. + /// - `Ok(())` - The node was successfully deleted at the given path. + pub fn delete( + &mut self, + path: &Nibbles, + fetcher: &F, + hinter: &H, + ) -> TrieNodeResult<()> { + match self { + Self::Empty => Err(TrieNodeError::KeyNotFound), + Self::Leaf { prefix, .. } => { + if path == prefix { + *self = Self::Empty; + Ok(()) + } else { + Err(TrieNodeError::KeyNotFound) + } + } + Self::Extension { prefix, node } => { + let shared_nibbles = path.common_prefix_length(prefix); + if shared_nibbles < prefix.len() { + return Err(TrieNodeError::KeyNotFound); + } else if shared_nibbles == path.len() { + *self = Self::Empty; + return Ok(()); + } + + node.delete(&path.slice(prefix.len()..), fetcher, hinter)?; + + // Simplify extension if possible after the deletion + self.collapse_if_possible(fetcher, hinter) + } + Self::Branch { stack } => { + let branch_nibble = path.get(0).ok_or(TrieNodeError::PathTooShort)? as usize; + stack[branch_nibble].delete(&path.slice(BRANCH_NODE_NIBBLES..), fetcher, hinter)?; + + // Simplify the branch if possible after the deletion + self.collapse_if_possible(fetcher, hinter) + } + Self::Blinded { .. } => { + self.unblind(fetcher)?; + self.delete(path, fetcher, hinter) + } + } + } + + /// If applicable, collapses `self` into a more compact form. + /// + /// ## Takes + /// - `self` - The root trie node + /// + /// ## Returns + /// - `Ok(())` - The node was successfully collapsed + /// - `Err(_)` - Could not collapse the node + fn collapse_if_possible( + &mut self, + fetcher: &F, + hinter: &H, + ) -> TrieNodeResult<()> { + match self { + Self::Extension { prefix, node } => match node.as_mut() { + Self::Extension { prefix: child_prefix, node: child_node } => { + // Double extensions are collapsed into a single extension. + let new_prefix = Nibbles::from_nibbles_unchecked( + [prefix.to_vec(), child_prefix.to_vec()].concat(), + ); + *self = Self::Extension { prefix: new_prefix, node: child_node.clone() }; + } + Self::Leaf { prefix: child_prefix, value: child_value } => { + // If the child node is a leaf, convert the extension into a leaf with the full + // path. + let new_prefix = Nibbles::from_nibbles_unchecked( + [prefix.to_vec(), child_prefix.to_vec()].concat(), + ); + *self = Self::Leaf { prefix: new_prefix, value: child_value.clone() }; + } + Self::Empty => { + // If the child node is empty, convert the extension into an empty node. + *self = Self::Empty; + } + _ => { + // If the child is a (blinded?) branch then no need for collapse + // because deletion did not collapse the (blinded?) branch + } + }, + Self::Branch { stack } => { + // Count non-empty children + let mut non_empty_children = stack + .iter_mut() + .enumerate() + .filter(|(_, node)| !matches!(node, Self::Empty)) + .collect::>(); + + if non_empty_children.len() == 1 { + let (index, non_empty_node) = &mut non_empty_children[0]; + + // If only one non-empty child and no value, convert to extension or leaf + match non_empty_node { + Self::Leaf { prefix, value } => { + let new_prefix = Nibbles::from_nibbles_unchecked( + [&[*index as u8], prefix.to_vec().as_slice()].concat(), + ); + *self = Self::Leaf { prefix: new_prefix, value: value.clone() }; + } + Self::Extension { prefix, node } => { + let new_prefix = Nibbles::from_nibbles_unchecked( + [&[*index as u8], prefix.to_vec().as_slice()].concat(), + ); + *self = Self::Extension { prefix: new_prefix, node: node.clone() }; + } + Self::Branch { .. } => { + *self = Self::Extension { + prefix: Nibbles::from_nibbles_unchecked([*index as u8]), + node: Box::new(non_empty_node.clone()), + }; + } + Self::Blinded { commitment } => { + // In this special case, we need to send a hint to fetch the preimage of + // the blinded node, since it is outside of the paths that have been + // traversed so far. + hinter + .hint_trie_node(*commitment) + .map_err(|e| TrieNodeError::Provider(e.to_string()))?; + + non_empty_node.unblind(fetcher)?; + self.collapse_if_possible(fetcher, hinter)?; + } + _ => {} + }; + } + } + _ => {} + } + Ok(()) + } + + /// Attempts to convert a `path` and `value` into a [`TrieNode`], if they correspond to a + /// [`TrieNode::Leaf`] or [`TrieNode::Extension`]. + /// + /// **Note:** This function assumes that the passed reader has already consumed the RLP header + /// of the [`TrieNode::Leaf`] or [`TrieNode::Extension`] node. + fn try_decode_leaf_or_extension_payload(buf: &mut &[u8]) -> TrieNodeResult { + // Decode the path and value of the leaf or extension node. + let path = Bytes::decode(buf).map_err(TrieNodeError::RLPError)?; + let first_nibble = path[0] >> NIBBLE_WIDTH; + let first = match first_nibble { + PREFIX_EXTENSION_ODD | PREFIX_LEAF_ODD => Some(path[0] & 0x0F), + PREFIX_EXTENSION_EVEN | PREFIX_LEAF_EVEN => None, + _ => return Err(TrieNodeError::InvalidNodeType), + }; + + // Check the high-order nibble of the path to determine the type of node. + match first_nibble { + PREFIX_EXTENSION_EVEN | PREFIX_EXTENSION_ODD => { + // Extension node + let extension_node_value = Self::decode(buf).map_err(TrieNodeError::RLPError)?; + Ok(Self::Extension { + prefix: unpack_path_to_nibbles(first, path[1..].as_ref()), + node: Box::new(extension_node_value), + }) + } + PREFIX_LEAF_EVEN | PREFIX_LEAF_ODD => { + // Leaf node + let value = Bytes::decode(buf).map_err(TrieNodeError::RLPError)?; + Ok(Self::Leaf { prefix: unpack_path_to_nibbles(first, path[1..].as_ref()), value }) + } + _ => Err(TrieNodeError::InvalidNodeType), + } + } + + /// Returns the RLP payload length of the [`TrieNode`]. + pub(crate) fn payload_length(&self) -> usize { + match self { + Self::Empty => 0, + Self::Blinded { commitment } => commitment.len(), + Self::Leaf { prefix, value } => { + let mut encoded_key_len = prefix.len() / 2 + 1; + if encoded_key_len != 1 { + encoded_key_len += length_of_length(encoded_key_len); + } + encoded_key_len + value.length() + } + Self::Extension { prefix, node } => { + let mut encoded_key_len = prefix.len() / 2 + 1; + if encoded_key_len != 1 { + encoded_key_len += length_of_length(encoded_key_len); + } + encoded_key_len + node.blinded_length() + } + Self::Branch { stack } => { + // In branch nodes, if an element is longer than an encoded 32 byte string, it is + // blinded. Assuming we have an open trie node, we must re-hash the + // elements that are longer than an encoded 32 byte string + // in length. + stack.iter().fold(0, |mut acc, node| { + acc += node.blinded_length(); + acc + }) + } + } + } + + /// Returns the encoded length of the trie node, blinding it if it is longer than an encoded + /// [B256] string in length. + /// + /// ## Returns + /// - `usize` - The encoded length of the value, blinded if the raw encoded length is longer + /// than a [B256]. + fn blinded_length(&self) -> usize { + let encoded_len = self.length(); + if encoded_len >= B256::ZERO.len() { B256::ZERO.length() } else { encoded_len } + } +} + +impl Encodable for TrieNode { + fn encode(&self, out: &mut dyn alloy_rlp::BufMut) { + let payload_length = self.payload_length(); + match self { + Self::Empty => out.put_u8(EMPTY_STRING_CODE), + Self::Blinded { commitment } => commitment.encode(out), + Self::Leaf { prefix, value } => { + // Encode the leaf node's header and key-value pair. + Header { list: true, payload_length }.encode(out); + alloy_trie::nodes::encode_path_leaf(prefix, true).as_slice().encode(out); + value.encode(out); + } + Self::Extension { prefix, node } => { + // Encode the extension node's header, prefix, and pointer node. + Header { list: true, payload_length }.encode(out); + alloy_trie::nodes::encode_path_leaf(prefix, false).as_slice().encode(out); + if node.length() >= B256::ZERO.len() { + let hash = node.blind(); + hash.encode(out); + } else { + node.encode(out); + } + } + Self::Branch { stack } => { + // In branch nodes, if an element is longer than 32 bytes in length, it is blinded. + // Assuming we have an open trie node, we must re-hash the elements + // that are longer than 32 bytes in length. + Header { list: true, payload_length }.encode(out); + for node in stack { + if node.length() >= B256::ZERO.len() { + let hash = node.blind(); + hash.encode(out); + } else { + node.encode(out); + } + } + } + } + } + + fn length(&self) -> usize { + match self { + Self::Empty => 1, + Self::Blinded { commitment } => commitment.length(), + Self::Leaf { .. } | Self::Extension { .. } | Self::Branch { .. } => { + let payload_length = self.payload_length(); + Header { list: true, payload_length }.length() + payload_length + } + } + } +} + +impl Decodable for TrieNode { + /// Attempts to decode the [`TrieNode`]. + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { + // Peek at the header to determine the type of Trie node we're currently decoding. + let header = Header::decode(&mut (**buf).as_ref())?; + + if header.list { + // Peek at the RLP stream to determine the number of elements in the list. + let list_length = rlp_list_element_length(&mut (**buf).as_ref())?; + + match list_length { + BRANCH_LIST_LENGTH => { + let list = Vec::::decode(buf)?; + Ok(Self::Branch { stack: list }) + } + LEAF_OR_EXTENSION_LIST_LENGTH => { + // Advance the buffer to the start of the list payload. + buf.advance(header.length()); + // Decode the leaf or extension node's raw payload. + Self::try_decode_leaf_or_extension_payload(buf) + .map_err(|_| alloy_rlp::Error::UnexpectedList) + } + _ => Err(alloy_rlp::Error::UnexpectedLength), + } + } else { + match header.payload_length { + 0 => { + buf.advance(header.length()); + Ok(Self::Empty) + } + 32 => { + let commitment = B256::decode(buf)?; + Ok(Self::new_blinded(commitment)) + } + _ => Err(alloy_rlp::Error::UnexpectedLength), + } + } + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::{ + NoopTrieHinter, NoopTrieProvider, TrieNode, ordered_trie_with_encoder, + test_util::TrieNodeProvider, + }; + use alloc::{collections::BTreeMap, vec, vec::Vec}; + use alloy_primitives::{b256, bytes, hex, keccak256}; + use alloy_rlp::{Decodable, EMPTY_STRING_CODE, Encodable}; + use alloy_trie::{HashBuilder, Nibbles}; + use rand::prelude::IteratorRandom; + + #[test] + fn test_empty_blinded() { + let trie_node = TrieNode::Empty; + assert_eq!(trie_node.blind(), EMPTY_ROOT_HASH); + } + + #[test] + fn test_decode_branch() { + const BRANCH_RLP: [u8; 83] = hex!( + "f851a0eb08a66a94882454bec899d3e82952dcc918ba4b35a09a84acd98019aef4345080808080808080a05d87a81d9bbf5aee61a6bfeab3a5643347e2c751b36789d988a5b6b163d496518080808080808080" + ); + let expected = TrieNode::Branch { + stack: vec![ + TrieNode::new_blinded(b256!( + "eb08a66a94882454bec899d3e82952dcc918ba4b35a09a84acd98019aef43450" + )), + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::new_blinded(b256!( + "5d87a81d9bbf5aee61a6bfeab3a5643347e2c751b36789d988a5b6b163d49651" + )), + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + ], + }; + + let mut rlp_buf = Vec::with_capacity(expected.length()); + expected.encode(&mut rlp_buf); + assert_eq!(rlp_buf.len(), BRANCH_RLP.len()); + assert_eq!(expected.length(), BRANCH_RLP.len()); + + assert_eq!(expected, TrieNode::decode(&mut BRANCH_RLP.as_slice()).unwrap()); + assert_eq!(rlp_buf.as_slice(), &BRANCH_RLP[..]); + } + + #[test] + fn test_encode_decode_extension_open_short() { + const EXTENSION_RLP: [u8; 19] = hex!("d28300646fcd308b8a74657374207468726565"); + + let opened = TrieNode::Leaf { + prefix: Nibbles::from_nibbles([0x00]), + value: bytes!("8a74657374207468726565"), + }; + let expected = + TrieNode::Extension { prefix: Nibbles::unpack(bytes!("646f")), node: Box::new(opened) }; + + let mut rlp_buf = Vec::with_capacity(expected.length()); + expected.encode(&mut rlp_buf); + + assert_eq!(expected, TrieNode::decode(&mut EXTENSION_RLP.as_slice()).unwrap()); + } + + #[test] + fn test_encode_decode_extension_blinded_long() { + const EXTENSION_RLP: [u8; 38] = + hex!("e58300646fa0f3fe8b3c5b21d3e52860f1e4a5825a6100bb341069c1e88f4ebf6bd98de0c190"); + let mut rlp_buf = Vec::new(); + + let opened = + TrieNode::Leaf { prefix: Nibbles::from_nibbles([0x00]), value: [0xFF; 64].into() }; + opened.encode(&mut rlp_buf); + let blinded = TrieNode::new_blinded(keccak256(&rlp_buf)); + + rlp_buf.clear(); + let opened_extension = + TrieNode::Extension { prefix: Nibbles::unpack(bytes!("646f")), node: Box::new(opened) }; + opened_extension.encode(&mut rlp_buf); + + let expected = TrieNode::Extension { + prefix: Nibbles::unpack(bytes!("646f")), + node: Box::new(blinded), + }; + assert_eq!(expected, TrieNode::decode(&mut EXTENSION_RLP.as_slice()).unwrap()); + } + + #[test] + fn test_decode_leaf() { + const LEAF_RLP: [u8; 11] = hex!("ca8320646f8576657262FF"); + let expected = + TrieNode::Leaf { prefix: Nibbles::unpack(bytes!("646f")), value: bytes!("76657262FF") }; + assert_eq!(expected, TrieNode::decode(&mut LEAF_RLP.as_slice()).unwrap()); + } + + #[test] + fn test_retrieve_from_trie_simple() { + const VALUES: [&str; 5] = ["yeah", "dog", ", ", "laminar", "flow"]; + + let mut trie = ordered_trie_with_encoder(&VALUES, |v, buf| { + let mut encoded_value = Vec::with_capacity(v.length()); + v.encode(&mut encoded_value); + TrieNode::new_blinded(keccak256(encoded_value)).encode(buf); + }); + let root = trie.root(); + + let preimages = trie.take_proof_nodes().into_inner().into_iter().fold( + BTreeMap::default(), + |mut acc, (_, value)| { + acc.insert(keccak256(value.as_ref()), value); + acc + }, + ); + let fetcher = TrieNodeProvider::new(preimages); + + let mut root_node = fetcher.trie_node_by_hash(root).unwrap(); + for (i, value) in VALUES.iter().enumerate() { + let path_nibbles = Nibbles::unpack([if i == 0 { EMPTY_STRING_CODE } else { i as u8 }]); + let v = root_node.open(&path_nibbles, &fetcher).unwrap().unwrap(); + + let mut encoded_value = Vec::with_capacity(value.length()); + value.encode(&mut encoded_value); + let mut encoded_node = Vec::new(); + TrieNode::new_blinded(keccak256(&encoded_value)).encode(&mut encoded_node); + + assert_eq!(v, encoded_node.as_slice()); + } + + let commitment = root_node.blind(); + assert_eq!(commitment, root); + } + + #[test] + fn test_insert_static() { + let mut node = TrieNode::Empty; + let noop_fetcher = NoopTrieProvider; + node.insert(&Nibbles::unpack(hex!("012345")), bytes!("01"), &noop_fetcher).unwrap(); + node.insert(&Nibbles::unpack(hex!("012346")), bytes!("02"), &noop_fetcher).unwrap(); + + let expected = TrieNode::Extension { + prefix: Nibbles::from_nibbles([0, 1, 2, 3, 4]), + node: Box::new(TrieNode::Branch { + stack: vec![ + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Leaf { prefix: Nibbles::default(), value: bytes!("01") }, + TrieNode::Leaf { prefix: Nibbles::default(), value: bytes!("02") }, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + TrieNode::Empty, + ], + }), + }; + + assert_eq!(node, expected); + } + + proptest::proptest! { + /// Differential test for inserting an arbitrary number of keys into an empty `TrieNode` / `HashBuilder`. + #[test] + fn diff_hash_builder_insert(mut keys in proptest::collection::vec(proptest::prelude::any::<[u8; 32]>(), 1..4096)) { + // Ensure the keys are sorted; `HashBuilder` expects sorted keys.` + keys.sort(); + + let mut hb = HashBuilder::default(); + let mut node = TrieNode::Empty; + + for key in keys { + hb.add_leaf(Nibbles::unpack(key), key.as_ref()); + node.insert(&Nibbles::unpack(key), key.into(), &NoopTrieProvider).unwrap(); + } + + assert_eq!(node.blind(), hb.root()); + } + + /// Differential test for deleting an arbitrary number of keys from a `TrieNode` / `HashBuilder`. + #[test] + fn diff_hash_builder_delete(mut keys in proptest::collection::vec(proptest::prelude::any::<[u8; 32]>(), 1..4096)) { + // Ensure the keys are sorted; `HashBuilder` expects sorted keys.` + keys.sort(); + + let mut hb = HashBuilder::default(); + let mut node = TrieNode::Empty; + + let mut rng = rand::rng(); + let deleted_keys = + keys.clone().into_iter().choose_multiple(&mut rng, 5.min(keys.len())); + + // Insert the keys into the `HashBuilder` and `TrieNode`. + for key in keys { + // Don't add any keys that are to be deleted from the trie node to the `HashBuilder`. + if !deleted_keys.contains(&key) { + hb.add_leaf(Nibbles::unpack(key), key.as_ref()); + } + node.insert(&Nibbles::unpack(key), key.into(), &NoopTrieProvider).unwrap(); + } + + // Delete the keys that were randomly selected from the trie node. + for deleted_key in deleted_keys { + node.delete(&Nibbles::unpack(deleted_key), &NoopTrieProvider, &NoopTrieHinter) + .unwrap(); + } + + // Blind manually, since the single node remaining may be a leaf or empty node, and always must be blinded. + let mut rlp_buf = Vec::with_capacity(node.length()); + node.encode(&mut rlp_buf); + let trie_root = keccak256(rlp_buf); + + assert_eq!(trie_root, hb.root()); + } + } +} diff --git a/kona/crates/proof/mpt/src/noop.rs b/rust/kona/crates/proof/mpt/src/noop.rs similarity index 89% rename from kona/crates/proof/mpt/src/noop.rs rename to rust/kona/crates/proof/mpt/src/noop.rs index a3930ee0315..7bd7f2ec967 100644 --- a/kona/crates/proof/mpt/src/noop.rs +++ b/rust/kona/crates/proof/mpt/src/noop.rs @@ -6,7 +6,7 @@ use alloc::string::String; use alloy_primitives::{Address, B256, U256}; use core::fmt::Debug; -/// The default, no-op implementation of the [TrieProvider] trait, used for testing. +/// The default, no-op implementation of the [`TrieProvider`] trait, used for testing. #[derive(Debug, Clone, Copy)] pub struct NoopTrieProvider; @@ -18,7 +18,7 @@ impl TrieProvider for NoopTrieProvider { } } -/// The default, no-op implementation of the [TrieHinter] trait, used for testing. +/// The default, no-op implementation of the [`TrieHinter`] trait, used for testing. #[derive(Debug, Clone, Copy)] pub struct NoopTrieHinter; diff --git a/kona/crates/proof/mpt/src/test_util.rs b/rust/kona/crates/proof/mpt/src/test_util.rs similarity index 98% rename from kona/crates/proof/mpt/src/test_util.rs rename to rust/kona/crates/proof/mpt/src/test_util.rs index c980d684ebb..188bc32e64a 100644 --- a/kona/crates/proof/mpt/src/test_util.rs +++ b/rust/kona/crates/proof/mpt/src/test_util.rs @@ -126,7 +126,7 @@ pub(crate) async fn get_live_derivable_transactions_list() Ok((root, preimages, consensus_txs)) } -/// A mock [TrieProvider] for testing that serves in-memory preimages. +/// A mock [`TrieProvider`] for testing that serves in-memory preimages. pub(crate) struct TrieNodeProvider { preimages: BTreeMap, } diff --git a/rust/kona/crates/proof/mpt/src/traits.rs b/rust/kona/crates/proof/mpt/src/traits.rs new file mode 100644 index 00000000000..92fd4047ef6 --- /dev/null +++ b/rust/kona/crates/proof/mpt/src/traits.rs @@ -0,0 +1,84 @@ +//! Contains the [`TrieProvider`] trait for fetching trie node preimages, contract bytecode, and +//! headers. + +use crate::TrieNode; +use alloy_primitives::{Address, B256, U256}; +use core::fmt::Display; +use op_alloy_rpc_types_engine::OpPayloadAttributes; + +/// The [`TrieProvider`] trait defines the synchronous interface for fetching trie node preimages. +pub trait TrieProvider { + /// The error type for fetching trie node preimages. + type Error: Display; + + /// Fetches the preimage for the given trie node hash. + /// + /// ## Takes + /// - `key`: The key of the trie node to fetch. + /// + /// ## Returns + /// - Ok(TrieNode): The trie node preimage. + /// - `Err(Self::Error)`: If the trie node preimage could not be fetched. + fn trie_node_by_hash(&self, key: B256) -> Result; +} + +/// The [`TrieHinter`] trait defines the synchronous interface for hinting the host to fetch trie +/// node preimages. +pub trait TrieHinter { + /// The error type for hinting trie node preimages. + type Error: Display; + + /// Hints the host to fetch the trie node preimage by hash. + /// + /// ## Takes + /// - `hash`: The hash of the trie node to hint. + /// + /// ## Returns + /// - Ok(()): If the hint was successful. + fn hint_trie_node(&self, hash: B256) -> Result<(), Self::Error>; + + /// Hints the host to fetch the trie node preimages on the path to the given address. + /// + /// ## Takes + /// - `address` - The address of the contract whose trie node preimages are to be fetched. + /// - `block_number` - The block number at which the trie node preimages are to be fetched. + /// + /// ## Returns + /// - Ok(()): If the hint was successful. + /// - `Err(Self::Error)`: If the hint was unsuccessful. + fn hint_account_proof(&self, address: Address, block_number: u64) -> Result<(), Self::Error>; + + /// Hints the host to fetch the trie node preimages on the path to the storage slot within the + /// given account's storage trie. + /// + /// ## Takes + /// - `address` - The address of the contract whose trie node preimages are to be fetched. + /// - `slot` - The storage slot whose trie node preimages are to be fetched. + /// - `block_number` - The block number at which the trie node preimages are to be fetched. + /// + /// ## Returns + /// - Ok(()): If the hint was successful. + /// - `Err(Self::Error)`: If the hint was unsuccessful. + fn hint_storage_proof( + &self, + address: Address, + slot: U256, + block_number: u64, + ) -> Result<(), Self::Error>; + + /// Hints the host to fetch the execution witness for the [`OpPayloadAttributes`] applied on top + /// of the parent block's state. + /// + /// ## Takes + /// - `parent_hash` - The hash of the parent block. + /// - `op_payload_attributes` - The attributes of the operation payload. + /// + /// ## Returns + /// - Ok(()): If the hint was successful. + /// - `Err(Self::Error)`: If the hint was unsuccessful. + fn hint_execution_witness( + &self, + parent_hash: B256, + op_payload_attributes: &OpPayloadAttributes, + ) -> Result<(), Self::Error>; +} diff --git a/rust/kona/crates/proof/mpt/src/util.rs b/rust/kona/crates/proof/mpt/src/util.rs new file mode 100644 index 00000000000..6ac9d6df681 --- /dev/null +++ b/rust/kona/crates/proof/mpt/src/util.rs @@ -0,0 +1,90 @@ +//! Utilities for `kona-mpt` + +use alloc::vec::Vec; +use alloy_rlp::{Buf, BufMut, Encodable, Header}; +use alloy_trie::{HashBuilder, Nibbles, proof::ProofRetainer}; + +/// Compute a trie root of the collection of items with a custom encoder. +pub fn ordered_trie_with_encoder(items: &[T], mut encode: F) -> HashBuilder +where + F: FnMut(&T, &mut dyn BufMut), +{ + let mut index_buffer = Vec::new(); + let mut value_buffer = Vec::new(); + let items_len = items.len(); + + // Store preimages for all intermediates + let path_nibbles = (0..items_len) + .map(|i| { + let index = adjust_index_for_rlp(i, items_len); + index_buffer.clear(); + index.encode(&mut index_buffer); + Nibbles::unpack(&index_buffer) + }) + .collect::>(); + + let mut hb = HashBuilder::default().with_proof_retainer(ProofRetainer::new(path_nibbles)); + for i in 0..items_len { + let index = adjust_index_for_rlp(i, items_len); + + index_buffer.clear(); + index.encode(&mut index_buffer); + + value_buffer.clear(); + encode(&items[index], &mut value_buffer); + + hb.add_leaf(Nibbles::unpack(&index_buffer), &value_buffer); + } + + hb +} + +/// Adjust the index of an item for rlp encoding. +pub(crate) const fn adjust_index_for_rlp(i: usize, len: usize) -> usize { + if i > 0x7f { + i + } else if i == 0x7f || i + 1 == len { + 0 + } else { + i + 1 + } +} + +/// Walks through a RLP list's elements and returns the total number of elements in the list. +/// Returns [`alloy_rlp::Error::UnexpectedString`] if the RLP stream is not a list. +/// +/// ## Takes +/// - `buf` - The RLP stream to walk through +/// +/// ## Returns +/// - `Ok(usize)` - The total number of elements in the list +/// - `Err(_)` - The RLP stream is not a list +pub(crate) fn rlp_list_element_length(buf: &mut &[u8]) -> alloy_rlp::Result { + let header = Header::decode(buf)?; + if !header.list { + return Err(alloy_rlp::Error::UnexpectedString); + } + let len_after_consume = buf.len() - header.payload_length; + + let mut list_element_length = 0; + while buf.len() > len_after_consume { + let header = Header::decode(buf)?; + buf.advance(header.payload_length); + list_element_length += 1; + } + Ok(list_element_length) +} + +/// Unpack node path to nibbles. +/// +/// ## Takes +/// - `first` - first nibble of the path if it is odd. Must be <= 0x0F, or will create invalid +/// nibbles. +/// - `rest` - rest of the nibbles packed +/// +/// ## Returns +/// - `Nibbles` - unpacked nibbles +pub(crate) fn unpack_path_to_nibbles(first: Option, rest: &[u8]) -> Nibbles { + let rest = Nibbles::unpack(rest); + Nibbles::from_iter_unchecked(first.into_iter().chain(rest.to_vec())) +} diff --git a/kona/crates/proof/preimage/CHANGELOG.md b/rust/kona/crates/proof/preimage/CHANGELOG.md similarity index 100% rename from kona/crates/proof/preimage/CHANGELOG.md rename to rust/kona/crates/proof/preimage/CHANGELOG.md diff --git a/kona/crates/proof/preimage/Cargo.toml b/rust/kona/crates/proof/preimage/Cargo.toml similarity index 100% rename from kona/crates/proof/preimage/Cargo.toml rename to rust/kona/crates/proof/preimage/Cargo.toml diff --git a/rust/kona/crates/proof/preimage/README.md b/rust/kona/crates/proof/preimage/README.md new file mode 100644 index 00000000000..56007b84434 --- /dev/null +++ b/rust/kona/crates/proof/preimage/README.md @@ -0,0 +1,7 @@ +# `kona-preimage` + +This crate offers a high-level API over the [`Preimage Oracle`][preimage-abi-spec]. It is `no_std` compatible to be used in +`client` programs, and the `host` handles are `async` colored to allow for the `host` programs to reach out to external +data sources to populate the `Preimage Oracle`. + +[preimage-abi-spec]: https://specs.optimism.io/experimental/fault-proof/index.html#pre-image-oracle diff --git a/rust/kona/crates/proof/preimage/src/errors.rs b/rust/kona/crates/proof/preimage/src/errors.rs new file mode 100644 index 00000000000..32659fc7229 --- /dev/null +++ b/rust/kona/crates/proof/preimage/src/errors.rs @@ -0,0 +1,52 @@ +//! Errors for the `kona-preimage` crate. + +use alloc::string::String; +use thiserror::Error; + +/// A [`PreimageOracleError`] is an enum that differentiates pipe-related errors from other errors +/// in the [`PreimageOracleServer`](crate::PreimageOracleServer) and +/// [`HintReaderServer`](crate::HintReaderServer) implementations. +#[derive(Error, Debug)] +pub enum PreimageOracleError { + /// The pipe has been broken. + #[error(transparent)] + IOError(#[from] ChannelError), + /// The preimage key is invalid. + #[error("Invalid preimage key.")] + InvalidPreimageKey, + /// Key not found. + #[error("Key not found.")] + KeyNotFound, + /// Timeout while waiting for preimage. + #[error("Timeout while waiting for preimage.")] + Timeout, + /// Buffer length mismatch. + #[error("Buffer length mismatch. Expected {0}, got {1}.")] + BufferLengthMismatch(usize, usize), + /// Failed to parse hint. + #[error("Failed to parse hint: {0}")] + HintParseFailed(String), + /// Other errors. + #[error("Error in preimage server: {0}")] + Other(String), +} + +/// A [Result] type for the [`PreimageOracleError`] enum. +pub type PreimageOracleResult = Result; + +/// A [`ChannelError`] is an enum that describes the error cases of a [Channel] trait +/// implementation. +/// +/// [Channel]: crate::Channel +#[derive(Error, Debug)] +pub enum ChannelError { + /// The channel is closed. + #[error("Channel is closed.")] + Closed, + /// Unexpected EOF. + #[error("Unexpected EOF in channel read operation.")] + UnexpectedEOF, +} + +/// A [Result] type for the [`ChannelError`] enum. +pub type ChannelResult = Result; diff --git a/rust/kona/crates/proof/preimage/src/hint.rs b/rust/kona/crates/proof/preimage/src/hint.rs new file mode 100644 index 00000000000..df821c3947f --- /dev/null +++ b/rust/kona/crates/proof/preimage/src/hint.rs @@ -0,0 +1,223 @@ +use crate::{ + Channel, HintReaderServer, + errors::{PreimageOracleError, PreimageOracleResult}, + traits::{HintRouter, HintWriterClient}, +}; +use alloc::{boxed::Box, format, string::String, vec}; +use async_trait::async_trait; + +/// A [`HintWriter`] is a high-level interface to the hint channel. It provides a way to write hints +/// to the host. +#[derive(Debug, Clone, Copy)] +pub struct HintWriter { + channel: C, +} + +impl HintWriter { + /// Create a new [`HintWriter`] from a [`Channel`]. + pub const fn new(channel: C) -> Self { + Self { channel } + } +} + +#[async_trait] +impl HintWriterClient for HintWriter +where + C: Channel + Send + Sync, +{ + /// Write a hint to the host. This will overwrite any existing hint in the channel, and block + /// until all data has been written. + async fn write(&self, hint: &str) -> PreimageOracleResult<()> { + trace!(target: "hint_writer", "Writing hint \"{hint}\""); + + // Form the hint into a byte buffer. The format is a 4-byte big-endian length prefix + // followed by the hint string. + self.channel.write(u32::to_be_bytes(hint.len() as u32).as_ref()).await?; + self.channel.write(hint.as_bytes()).await?; + + trace!(target: "hint_writer", "Successfully wrote hint"); + + // Read the hint acknowledgement from the host. + let mut hint_ack = [0u8; 1]; + self.channel.read_exact(&mut hint_ack).await?; + + trace!(target: "hint_writer", "Received hint acknowledgement"); + + Ok(()) + } +} + +/// A [`HintReader`] is a router for hints sent by the [`HintWriter`] from the client program. It +/// provides a way for the host to prepare preimages for reading. +#[derive(Debug, Clone, Copy)] +pub struct HintReader { + channel: C, +} + +impl HintReader +where + C: Channel, +{ + /// Create a new [`HintReader`] from a [`Channel`]. + pub const fn new(channel: C) -> Self { + Self { channel } + } +} + +#[async_trait] +impl HintReaderServer for HintReader +where + C: Channel + Send + Sync, +{ + async fn next_hint(&self, hint_router: &R) -> PreimageOracleResult<()> + where + R: HintRouter + Send + Sync, + { + // Read the length of the raw hint payload. + let mut len_buf = [0u8; 4]; + self.channel.read_exact(&mut len_buf).await?; + let len = u32::from_be_bytes(len_buf); + + // Read the raw hint payload. + let mut raw_payload = vec![0u8; len as usize]; + self.channel.read_exact(raw_payload.as_mut_slice()).await?; + let payload = match String::from_utf8(raw_payload) { + Ok(p) => p, + Err(e) => { + // Write back on error to prevent blocking the client. + self.channel.write(&[0x00]).await?; + + return Err(PreimageOracleError::Other(format!( + "Failed to decode hint payload: {e}" + ))); + } + }; + + trace!(target: "hint_reader", "Successfully read hint: \"{payload}\""); + + // Route the hint + if let Err(e) = hint_router.route_hint(payload).await { + // Write back on error to prevent blocking the client. + self.channel.write(&[0x00]).await?; + + error!(target: "hint_reader", "Failed to route hint: {e}"); + return Err(e); + } + + // Write back an acknowledgement to the client to unblock their process. + self.channel.write(&[0x00]).await?; + + trace!(target: "hint_reader", "Successfully routed and acknowledged hint"); + + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::native_channel::BidirectionalChannel; + use alloc::{sync::Arc, vec::Vec}; + use tokio::sync::Mutex; + + struct TestRouter { + incoming_hints: Arc>>, + } + + #[async_trait] + impl HintRouter for TestRouter { + async fn route_hint(&self, hint: String) -> PreimageOracleResult<()> { + self.incoming_hints.lock().await.push(hint); + Ok(()) + } + } + + struct TestFailRouter; + + #[async_trait] + impl HintRouter for TestFailRouter { + async fn route_hint(&self, _hint: String) -> PreimageOracleResult<()> { + Err(PreimageOracleError::KeyNotFound) + } + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_unblock_on_bad_utf8() { + let mock_data = [0xf0, 0x90, 0x28, 0xbc]; + + let hint_channel = BidirectionalChannel::new().unwrap(); + + let client = tokio::task::spawn(async move { + let hint_writer = HintWriter::new(hint_channel.client); + + #[allow(invalid_from_utf8_unchecked)] + hint_writer.write(unsafe { alloc::str::from_utf8_unchecked(&mock_data) }).await + }); + let host = tokio::task::spawn(async move { + let router = TestRouter { incoming_hints: Default::default() }; + + let hint_reader = HintReader::new(hint_channel.host); + hint_reader.next_hint(&router).await + }); + + let (c, h) = tokio::join!(client, host); + c.unwrap().unwrap(); + assert!(h.unwrap().is_err_and(|e| { + let PreimageOracleError::Other(e) = e else { + return false; + }; + e.contains("Failed to decode hint payload") + })); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_unblock_on_fetch_failure() { + const MOCK_DATA: &str = "test-hint 0xfacade"; + + let hint_channel = BidirectionalChannel::new().unwrap(); + + let client = tokio::task::spawn(async move { + let hint_writer = HintWriter::new(hint_channel.client); + + hint_writer.write(MOCK_DATA).await + }); + let host = tokio::task::spawn(async move { + let hint_reader = HintReader::new(hint_channel.host); + hint_reader.next_hint(&TestFailRouter).await + }); + + let (c, h) = tokio::join!(client, host); + c.unwrap().unwrap(); + assert!(h.unwrap().is_err_and(|e| matches!(e, PreimageOracleError::KeyNotFound))); + } + + #[tokio::test(flavor = "multi_thread", worker_threads = 2)] + async fn test_hint_client_and_host() { + const MOCK_DATA: &str = "test-hint 0xfacade"; + + let incoming_hints = Arc::new(Mutex::new(Vec::new())); + let hint_channel = BidirectionalChannel::new().unwrap(); + + let client = tokio::task::spawn(async move { + let hint_writer = HintWriter::new(hint_channel.client); + + hint_writer.write(MOCK_DATA).await + }); + let host = tokio::task::spawn({ + let incoming_hints_ref = Arc::clone(&incoming_hints); + async move { + let router = TestRouter { incoming_hints: incoming_hints_ref }; + + let hint_reader = HintReader::new(hint_channel.host); + hint_reader.next_hint(&router).await.unwrap(); + } + }); + + let _ = tokio::join!(client, host); + let mut hints = incoming_hints.lock().await; + + assert_eq!(hints.len(), 1); + let h = hints.remove(0); + assert_eq!(h, MOCK_DATA); + } +} diff --git a/kona/crates/proof/preimage/src/key.rs b/rust/kona/crates/proof/preimage/src/key.rs similarity index 88% rename from kona/crates/proof/preimage/src/key.rs rename to rust/kona/crates/proof/preimage/src/key.rs index 0ef1a383ff2..14925723c26 100644 --- a/kona/crates/proof/preimage/src/key.rs +++ b/rust/kona/crates/proof/preimage/src/key.rs @@ -1,4 +1,4 @@ -//! Contains the [PreimageKey] type, which is used to identify preimages that may be fetched from +//! Contains the [`PreimageKey`] type, which is used to identify preimages that may be fetched from //! the preimage oracle. use alloy_primitives::{B256, Keccak256, U256}; @@ -26,7 +26,7 @@ pub enum PreimageKeyType { /// low-order 31 bytes of the preimage's `keccak256` digest to the preimage itself. #[default] Keccak256 = 2, - /// GlobalGeneric key types are reserved for future use. + /// `GlobalGeneric` key types are reserved for future use. GlobalGeneric = 3, /// Sha256 key types are global and context independent. Preimages are mapped from the /// low-order 31 bytes of the preimage's `sha256` digest to the preimage itself. @@ -79,31 +79,31 @@ pub struct PreimageKey { } impl PreimageKey { - /// Creates a new [PreimageKey] from a 32-byte value and a [PreimageKeyType]. The 32-byte value - /// will be truncated to 31 bytes by taking the low-order 31 bytes. + /// Creates a new [`PreimageKey`] from a 32-byte value and a [`PreimageKeyType`]. The 32-byte + /// value will be truncated to 31 bytes by taking the low-order 31 bytes. pub fn new(key: [u8; 32], key_type: PreimageKeyType) -> Self { let mut data = [0u8; 31]; data.copy_from_slice(&key[1..]); Self { data, key_type } } - /// Creates a new local [PreimageKey] from a 64-bit local identifier. The local identifier will - /// be written into the low-order 8 bytes of the big-endian 31-byte data field. + /// Creates a new local [`PreimageKey`] from a 64-bit local identifier. The local identifier + /// will be written into the low-order 8 bytes of the big-endian 31-byte data field. pub fn new_local(local_ident: u64) -> Self { let mut data = [0u8; 31]; data[23..].copy_from_slice(&local_ident.to_be_bytes()); Self { data, key_type: PreimageKeyType::Local } } - /// Creates a new keccak256 [PreimageKey] from a 32-byte keccak256 digest. The digest will be + /// Creates a new keccak256 [`PreimageKey`] from a 32-byte keccak256 digest. The digest will be /// truncated to 31 bytes by taking the low-order 31 bytes. pub fn new_keccak256(digest: [u8; 32]) -> Self { Self::new(digest, PreimageKeyType::Keccak256) } - /// Creates a new precompile [PreimageKey] from a precompile address and input. The key will be - /// constructed as `keccak256(precompile_addr ++ input)`, and then the high-order byte of the - /// digest will be set to the type byte. + /// Creates a new precompile [`PreimageKey`] from a precompile address and input. The key will + /// be constructed as `keccak256(precompile_addr ++ input)`, and then the high-order byte of + /// the digest will be set to the type byte. pub fn new_precompile(precompile_addr: [u8; 20], input: &[u8]) -> Self { let mut data = [0u8; 31]; @@ -115,12 +115,12 @@ impl PreimageKey { Self { data, key_type: PreimageKeyType::Precompile } } - /// Returns the [PreimageKeyType] for the [PreimageKey]. + /// Returns the [`PreimageKeyType`] for the [`PreimageKey`]. pub const fn key_type(&self) -> PreimageKeyType { self.key_type } - /// Returns the value of the [PreimageKey] as a [U256]. + /// Returns the value of the [`PreimageKey`] as a [`U256`]. pub const fn key_value(&self) -> U256 { U256::from_be_slice(self.data.as_slice()) } diff --git a/rust/kona/crates/proof/preimage/src/lib.rs b/rust/kona/crates/proof/preimage/src/lib.rs new file mode 100644 index 00000000000..d778674f718 --- /dev/null +++ b/rust/kona/crates/proof/preimage/src/lib.rs @@ -0,0 +1,35 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; + +#[macro_use] +extern crate tracing; + +pub mod errors; + +mod key; +pub use key::{PreimageKey, PreimageKeyType}; + +mod oracle; +pub use oracle::{OracleReader, OracleServer}; + +mod hint; +pub use hint::{HintReader, HintWriter}; + +mod traits; +pub use traits::{ + Channel, CommsClient, HintReaderServer, HintRouter, HintWriterClient, PreimageFetcher, + PreimageOracleClient, PreimageOracleServer, PreimageServerBackend, +}; + +#[cfg(feature = "std")] +mod native_channel; +#[cfg(feature = "std")] +pub use native_channel::{BidirectionalChannel, NativeChannel}; diff --git a/kona/crates/proof/preimage/src/native_channel.rs b/rust/kona/crates/proof/preimage/src/native_channel.rs similarity index 92% rename from kona/crates/proof/preimage/src/native_channel.rs rename to rust/kona/crates/proof/preimage/src/native_channel.rs index db02e513e69..d1b93270a08 100644 --- a/kona/crates/proof/preimage/src/native_channel.rs +++ b/rust/kona/crates/proof/preimage/src/native_channel.rs @@ -1,4 +1,4 @@ -//! Native implementation of the [Channel] trait, backed by [async_channel]'s unbounded +//! Native implementation of the [Channel] trait, backed by [`async_channel`]'s unbounded //! channel primitives. use crate::{ @@ -19,7 +19,7 @@ pub struct BidirectionalChannel { } impl BidirectionalChannel { - /// Creates a [BidirectionalChannel] instance. + /// Creates a [`BidirectionalChannel`] instance. pub fn new() -> Result { let (bw, ar) = unbounded(); let (aw, br) = unbounded(); diff --git a/kona/crates/proof/preimage/src/oracle.rs b/rust/kona/crates/proof/preimage/src/oracle.rs similarity index 96% rename from kona/crates/proof/preimage/src/oracle.rs rename to rust/kona/crates/proof/preimage/src/oracle.rs index 076313049e4..f049273070f 100644 --- a/kona/crates/proof/preimage/src/oracle.rs +++ b/rust/kona/crates/proof/preimage/src/oracle.rs @@ -5,7 +5,7 @@ use crate::{ }; use alloc::{boxed::Box, vec::Vec}; -/// An [OracleReader] is a high-level interface to the preimage oracle channel. +/// An [`OracleReader`] is a high-level interface to the preimage oracle channel. #[derive(Debug, Clone, Copy)] pub struct OracleReader { channel: C, @@ -15,7 +15,7 @@ impl OracleReader where C: Channel, { - /// Create a new [OracleReader] from a [Channel]. + /// Create a new [`OracleReader`] from a [`Channel`]. pub const fn new(channel: C) -> Self { Self { channel } } @@ -90,7 +90,7 @@ where } } -/// An [OracleServer] is a router for the host to serve data back to the client [OracleReader]. +/// An [`OracleServer`] is a router for the host to serve data back to the client [`OracleReader`]. #[derive(Debug, Clone, Copy)] pub struct OracleServer { channel: C, @@ -100,7 +100,7 @@ impl OracleServer where C: Channel, { - /// Create a new [OracleServer] from a [Channel]. + /// Create a new [`OracleServer`] from a [`Channel`]. pub const fn new(channel: C) -> Self { Self { channel } } diff --git a/rust/kona/crates/proof/preimage/src/traits.rs b/rust/kona/crates/proof/preimage/src/traits.rs new file mode 100644 index 00000000000..7f8f8345ba8 --- /dev/null +++ b/rust/kona/crates/proof/preimage/src/traits.rs @@ -0,0 +1,144 @@ +use crate::{ + PreimageKey, + errors::{ChannelResult, PreimageOracleResult}, +}; +use alloc::{boxed::Box, string::String, vec::Vec}; +use async_trait::async_trait; + +/// A [`PreimageOracleClient`] is a high-level interface to read data from the host, keyed by a +/// [`PreimageKey`]. +#[async_trait] +pub trait PreimageOracleClient { + /// Get the data corresponding to the currently set key from the host. Return the data in a new + /// heap allocated `Vec` + /// + /// # Returns + /// - `Ok(Vec)` if the data was successfully fetched from the host. + /// - `Err(_)` if the data could not be fetched from the host. + async fn get(&self, key: PreimageKey) -> PreimageOracleResult>; + + /// Get the data corresponding to the currently set key from the host. Writes the data into the + /// provided buffer. + /// + /// # Returns + /// - `Ok(())` if the data was successfully written into the buffer. + /// - `Err(_)` if the data could not be written into the buffer. + async fn get_exact(&self, key: PreimageKey, buf: &mut [u8]) -> PreimageOracleResult<()>; +} + +/// A [`HintWriterClient`] is a high-level interface to the hint pipe. It provides a way to write +/// hints to the host. +#[async_trait] +pub trait HintWriterClient { + /// Write a hint to the host. This will overwrite any existing hint in the pipe, and block until + /// all data has been written. + /// + /// # Returns + /// - `Ok(())` if the hint was successfully written to the host. + /// - `Err(_)` if the hint could not be written to the host. + async fn write(&self, hint: &str) -> PreimageOracleResult<()>; +} + +/// A [`CommsClient`] is a trait that combines the [`PreimageOracleClient`] and [`HintWriterClient`] +pub trait CommsClient: PreimageOracleClient + Clone + HintWriterClient {} + +// Implement the super trait for any type that satisfies the bounds +impl CommsClient for T {} + +/// A [`PreimageOracleServer`] is a high-level interface to accept read requests from the client and +/// write the preimage data to the client pipe. +#[async_trait] +pub trait PreimageOracleServer { + /// Get the next preimage request and return the response to the client. + /// + /// # Returns + /// - `Ok(())` if the data was successfully written into the client pipe. + /// - `Err(_)` if the data could not be written to the client. + async fn next_preimage_request(&self, get_preimage: &F) -> PreimageOracleResult<()> + where + F: PreimageFetcher + Send + Sync; +} + +/// A [`HintReaderServer`] is a high-level interface to read preimage hints from the +/// [`HintWriterClient`] and prepare them for consumption by the client program. +#[async_trait] +pub trait HintReaderServer { + /// Get the next hint request and return the acknowledgement to the client. + /// + /// # Returns + /// - `Ok(())` if the hint was received and the client was notified of the host's + /// acknowledgement. + /// - `Err(_)` if the hint was not received correctly. + async fn next_hint(&self, route_hint: &R) -> PreimageOracleResult<()> + where + R: HintRouter + Send + Sync; +} + +/// A [`HintRouter`] is a high-level interface to route hints to the appropriate handler. +#[async_trait] +pub trait HintRouter { + /// Routes a hint to the appropriate handler. + /// + /// # Arguments + /// - `hint`: The hint to route. + /// + /// # Returns + /// - `Ok(())` if the hint was successfully routed. + /// - `Err(_)` if the hint could not be routed. + async fn route_hint(&self, hint: String) -> PreimageOracleResult<()>; +} + +/// A [`PreimageFetcher`] is a high-level interface to fetch preimages during preimage requests. +#[async_trait] +pub trait PreimageFetcher { + /// Get the preimage corresponding to the given key. + /// + /// # Arguments + /// - `key`: The key to fetch the preimage for. + /// + /// # Returns + /// - `Ok(Vec)` if the preimage was successfully fetched. + /// - `Err(_)` if the preimage could not be fetched. + async fn get_preimage(&self, key: PreimageKey) -> PreimageOracleResult>; +} + +/// A [`PreimageServerBackend`] is a trait that combines the [`PreimageFetcher`] and [`HintRouter`] +/// traits. +pub trait PreimageServerBackend: PreimageFetcher + HintRouter {} + +// Implement the super trait for any type that satisfies the bounds +impl PreimageServerBackend for T {} + +/// A [`Channel`] is a high-level interface to read and write data to a counterparty. +#[async_trait] +pub trait Channel { + /// Asynchronously read data from the channel into the provided buffer. + /// + /// # Arguments + /// - `buf`: The buffer to read data into. + /// + /// # Returns + /// - `Ok(usize)`: The number of bytes read. + /// - `Err(_)` if the data could not be read. + async fn read(&self, buf: &mut [u8]) -> ChannelResult; + + /// Asynchronously read exactly `buf.len()` bytes into `buf` from the channel. + /// + /// # Arguments + /// - `buf`: The buffer to read data into. + /// + /// # Returns + /// - `Ok(())` if the data was successfully read. + /// - `Err(_)` if the data could not be read. + async fn read_exact(&self, buf: &mut [u8]) -> ChannelResult; + + /// Asynchronously write the provided buffer to the channel. + /// + /// # Arguments + /// - `buf`: The buffer to write to the host. + /// + /// # Returns + /// - `Ok(usize)`: The number of bytes written. + /// - `Err(_)` if the data could not be written. + async fn write(&self, buf: &[u8]) -> ChannelResult; +} diff --git a/kona/crates/proof/proof-interop/CHANGELOG.md b/rust/kona/crates/proof/proof-interop/CHANGELOG.md similarity index 100% rename from kona/crates/proof/proof-interop/CHANGELOG.md rename to rust/kona/crates/proof/proof-interop/CHANGELOG.md diff --git a/rust/kona/crates/proof/proof-interop/Cargo.toml b/rust/kona/crates/proof/proof-interop/Cargo.toml new file mode 100644 index 00000000000..fd3fc6f5898 --- /dev/null +++ b/rust/kona/crates/proof/proof-interop/Cargo.toml @@ -0,0 +1,72 @@ +[package] +name = "kona-proof-interop" +description = "OP Stack Proof SDK with Interop support" +version = "0.2.0" +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true + +[lints] +workspace = true + +[dependencies] +# Workspace +kona-preimage.workspace = true +kona-interop = { workspace = true, features = ["serde"] } +kona-proof.workspace = true +kona-mpt.workspace = true +kona-executor.workspace = true +kona-registry.workspace = true +kona-genesis = { workspace = true, features = ["serde"] } +kona-protocol.workspace = true + +# Alloy +alloy-rlp.workspace = true +alloy-eips.workspace = true +alloy-primitives.workspace = true +alloy-consensus.workspace = true +alloy-rpc-types-engine.workspace = true +alloy-evm = { workspace = true, features = ["op"] } + +# OP Alloy +op-alloy-consensus.workspace = true +op-alloy-rpc-types-engine.workspace = true +alloy-op-evm.workspace = true + +# revm +revm.workspace = true +op-revm.workspace = true + +# General +serde.workspace = true +tracing.workspace = true +serde_json.workspace = true +async-trait.workspace = true +spin.workspace = true +thiserror.workspace = true + +# Arbitrary +arbitrary = { workspace = true, features = ["derive"], optional = true } + +[dev-dependencies] +alloy-primitives = { workspace = true, features = ["rlp", "arbitrary"] } +kona-interop = { workspace = true, features = ["arbitrary"] } +arbitrary = { workspace = true, features = ["derive"] } +rand.workspace = true + +[features] +arbitrary = [ + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", + "alloy-rpc-types-engine/arbitrary", + "dep:arbitrary", + "kona-genesis/arbitrary", + "kona-interop/arbitrary", + "kona-protocol/arbitrary", + "op-alloy-consensus/arbitrary", + "op-alloy-rpc-types-engine/arbitrary", + "revm/arbitrary", +] diff --git a/rust/kona/crates/proof/proof-interop/README.md b/rust/kona/crates/proof/proof-interop/README.md new file mode 100644 index 00000000000..ded1b8559fc --- /dev/null +++ b/rust/kona/crates/proof/proof-interop/README.md @@ -0,0 +1,3 @@ +# `kona-proof-interop` + +`kona-proof-interop` is an OP Stack state transition proof SDK, with interop support, built on top of [`kona-proof`](../proof/) diff --git a/kona/crates/proof/proof-interop/src/boot.rs b/rust/kona/crates/proof/proof-interop/src/boot.rs similarity index 95% rename from kona/crates/proof/proof-interop/src/boot.rs rename to rust/kona/crates/proof/proof-interop/src/boot.rs index 8cd244a7c81..2f9652bfca5 100644 --- a/kona/crates/proof/proof-interop/src/boot.rs +++ b/rust/kona/crates/proof/proof-interop/src/boot.rs @@ -103,9 +103,8 @@ impl BootInfo { if l2_post == INVALID_TRANSITION_HASH { return Err(BootstrapError::InvalidToInvalid); - } else { - return Err(BootstrapError::InvalidPostState(l2_post)); } + return Err(BootstrapError::InvalidPostState(l2_post)); } let agreed_pre_state = @@ -178,19 +177,19 @@ impl BootInfo { }) } - /// Returns the [RollupConfig] corresponding to the [PreState::active_l2_chain_id]. + /// Returns the [`RollupConfig`] corresponding to the [`PreState::active_l2_chain_id`]. pub fn active_rollup_config(&self) -> Option { let active_l2_chain_id = self.agreed_pre_state.active_l2_chain_id()?; self.rollup_configs.get(&active_l2_chain_id).cloned() } - /// Returns the [L1ChainConfig] corresponding to the [PreState::active_l2_chain_id] through the - /// l2 [RollupConfig]. + /// Returns the [`L1ChainConfig`] corresponding to the [`PreState::active_l2_chain_id`] through + /// the l2 [`RollupConfig`]. pub fn active_l1_config(&self) -> L1ChainConfig { self.l1_config.clone() } - /// Returns the [RollupConfig] corresponding to the given `chain_id`. + /// Returns the [`RollupConfig`] corresponding to the given `chain_id`. pub fn rollup_config(&self, chain_id: u64) -> Option { self.rollup_configs.get(&chain_id).cloned() } diff --git a/kona/crates/proof/proof-interop/src/consolidation.rs b/rust/kona/crates/proof/proof-interop/src/consolidation.rs similarity index 95% rename from kona/crates/proof/proof-interop/src/consolidation.rs rename to rust/kona/crates/proof/proof-interop/src/consolidation.rs index 109735e6b41..57874b0c484 100644 --- a/kona/crates/proof/proof-interop/src/consolidation.rs +++ b/rust/kona/crates/proof/proof-interop/src/consolidation.rs @@ -23,7 +23,7 @@ use revm::context::BlockEnv; use thiserror::Error; use tracing::{error, info}; -/// The [SuperchainConsolidator] holds a [MessageGraph] and is responsible for recursively +/// The [`SuperchainConsolidator`] holds a [`MessageGraph`] and is responsible for recursively /// consolidating the blocks within the graph, per [message validity rules]. /// /// [message validity rules]: https://specs.optimism.io/interop/messaging.html#invalid-messages @@ -32,11 +32,11 @@ pub struct SuperchainConsolidator<'a, C, Evm> where C: CommsClient, { - /// The [BootInfo] of the program. + /// The [`BootInfo`] of the program. boot_info: &'a mut BootInfo, - /// The [OracleInteropProvider] used for the message graph. + /// The [`OracleInteropProvider`] used for the message graph. interop_provider: OracleInteropProvider, - /// The [OracleL2ChainProvider]s used for re-execution of invalid blocks, keyed by chain ID. + /// The [`OracleL2ChainProvider`]s used for re-execution of invalid blocks, keyed by chain ID. l2_providers: HashMap>, /// The inner [`EvmFactory`] to create EVM instances for re-execution of bad blocks. evm_factory: Evm, @@ -49,7 +49,7 @@ where ::Tx: FromTxWithEncoded + FromRecoveredTx + OpTxEnv, { - /// Creates a new [SuperchainConsolidator] with the given providers and [Header]s. + /// Creates a new [`SuperchainConsolidator`] with the given providers and [Header]s. /// /// [Header]: alloy_consensus::Header pub const fn new( @@ -61,7 +61,7 @@ where Self { boot_info, interop_provider, l2_providers, evm_factory } } - /// Recursively consolidates the dependencies of the blocks within the [MessageGraph]. + /// Recursively consolidates the dependencies of the blocks within the [`MessageGraph`]. /// /// This method will recurse until all invalid cross-chain dependencies have been resolved, /// re-executing deposit-only blocks for chains with invalid dependencies as needed. @@ -76,7 +76,6 @@ where } Err(ConsolidationError::MessageGraph(MessageGraphError::InvalidMessages(_))) => { // If invalid messages are still present in the graph, continue the loop. - continue; } Err(e) => { error!(target: "superchain_consolidator", "Error consolidating superchain: {:?}", e); @@ -89,8 +88,8 @@ where /// Performs a single iteration of the consolidation process. /// /// Step-wise: - /// 1. Derive a new [MessageGraph] from the current set of local safe [Header]s. - /// 2. Resolve the [MessageGraph]. + /// 1. Derive a new [`MessageGraph`] from the current set of local safe [Header]s. + /// 2. Resolve the [`MessageGraph`]. /// 3. If any invalid messages are found, re-execute the bad block(s) only deposit transactions, /// and bubble up the error. /// @@ -279,7 +278,7 @@ where } } -/// An error type for the [SuperchainConsolidator] struct. +/// An error type for the [`SuperchainConsolidator`] struct. #[derive(Debug, Error)] pub enum ConsolidationError { /// An invalid pre-state variant was passed to the consolidator. diff --git a/rust/kona/crates/proof/proof-interop/src/hint.rs b/rust/kona/crates/proof/proof-interop/src/hint.rs new file mode 100644 index 00000000000..cafa6ab5f5e --- /dev/null +++ b/rust/kona/crates/proof/proof-interop/src/hint.rs @@ -0,0 +1,193 @@ +//! This module contains the [`HintType`] enum. + +use alloc::{string::ToString, vec::Vec}; +use core::{fmt::Display, str::FromStr}; +use kona_proof::{Hint, errors::HintParsingError}; + +/// The [`HintType`] enum is used to specify the type of hint that was received. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum HintType { + /// A hint that specifies the block header of a layer 1 block. + L1BlockHeader, + /// A hint that specifies the transactions of a layer 1 block. + L1Transactions, + /// A hint that specifies the state node of a layer 1 block. + L1Receipts, + /// A hint that specifies a blob in the layer 1 beacon chain. + L1Blob, + /// A hint that specifies a precompile call on layer 1. + L1Precompile, + /// A hint that specifies the block header of a layer 2 block. + L2BlockHeader, + /// A hint that specifies the transactions of a layer 2 block. + L2Transactions, + /// A hint that specifies the receipts of a layer 2 block. + L2Receipts, + /// A hint that specifies the code of a contract on layer 2. + L2Code, + /// A hint that specifies the preimage of the agreed upon pre-state claim. + AgreedPreState, + /// A hint that specifies the preimage of an L2 output root within the agreed upon pre-state, + /// by chain ID. + L2OutputRoot, + /// A hint that specifies the state node in the L2 state trie. + L2StateNode, + /// A hint that specifies the proof on the path to an account in the L2 state trie. + L2AccountProof, + /// A hint that specifies the proof on the path to a storage slot in an account within in the + /// L2 state trie. + L2AccountStorageProof, + /// A hint that specifies loading the payload witness for an optimistic block. + L2BlockData, + /// A hint that specifies bulk storage of all the code, state and keys generated by an + /// execution witness. + L2PayloadWitness, +} + +impl HintType { + /// Creates a new [Hint] from `self` and the specified data. The data passed will be + /// concatenated into a single byte array before being stored in the resulting [Hint]. + pub fn with_data(self, data: &[&[u8]]) -> Hint { + let total_len = data.iter().map(|d| d.len()).sum(); + let hint_data = data.iter().fold(Vec::with_capacity(total_len), |mut acc, d| { + acc.extend_from_slice(d); + acc + }); + Hint::new(self, hint_data) + } +} + +impl FromStr for HintType { + type Err = HintParsingError; + + fn from_str(value: &str) -> Result { + match value { + "l1-block-header" => Ok(Self::L1BlockHeader), + "l1-transactions" => Ok(Self::L1Transactions), + "l1-receipts" => Ok(Self::L1Receipts), + "l1-blob" => Ok(Self::L1Blob), + "l1-precompile" => Ok(Self::L1Precompile), + "l2-block-header" => Ok(Self::L2BlockHeader), + "l2-transactions" => Ok(Self::L2Transactions), + "l2-receipts" => Ok(Self::L2Receipts), + "l2-code" => Ok(Self::L2Code), + "agreed-pre-state" => Ok(Self::AgreedPreState), + "l2-output-root" => Ok(Self::L2OutputRoot), + "l2-state-node" => Ok(Self::L2StateNode), + "l2-account-proof" => Ok(Self::L2AccountProof), + "l2-account-storage-proof" => Ok(Self::L2AccountStorageProof), + "l2-block-data" => Ok(Self::L2BlockData), + "l2-payload-witness" => Ok(Self::L2PayloadWitness), + _ => Err(HintParsingError(value.to_string())), + } + } +} + +impl From for &str { + fn from(value: HintType) -> Self { + match value { + HintType::L1BlockHeader => "l1-block-header", + HintType::L1Transactions => "l1-transactions", + HintType::L1Receipts => "l1-receipts", + HintType::L1Blob => "l1-blob", + HintType::L1Precompile => "l1-precompile", + HintType::L2BlockHeader => "l2-block-header", + HintType::L2Transactions => "l2-transactions", + HintType::L2Receipts => "l2-receipts", + HintType::L2Code => "l2-code", + HintType::AgreedPreState => "agreed-pre-state", + HintType::L2OutputRoot => "l2-output-root", + HintType::L2StateNode => "l2-state-node", + HintType::L2AccountProof => "l2-account-proof", + HintType::L2AccountStorageProof => "l2-account-storage-proof", + HintType::L2BlockData => "l2-block-data", + HintType::L2PayloadWitness => "l2-payload-witness", + } + } +} + +impl Display for HintType { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let s: &str = (*self).into(); + write!(f, "{s}") + } +} + +mod test { + #[test] + fn test_hint_type_from_str() { + use super::HintType; + use crate::alloc::string::ToString; + use core::str::FromStr; + use kona_proof::errors::HintParsingError; + + assert_eq!(HintType::from_str("l1-block-header").unwrap(), HintType::L1BlockHeader); + assert_eq!(HintType::from_str("l1-transactions").unwrap(), HintType::L1Transactions); + assert_eq!(HintType::from_str("l1-receipts").unwrap(), HintType::L1Receipts); + assert_eq!(HintType::from_str("l1-blob").unwrap(), HintType::L1Blob); + assert_eq!(HintType::from_str("l1-precompile").unwrap(), HintType::L1Precompile); + assert_eq!(HintType::from_str("l2-block-header").unwrap(), HintType::L2BlockHeader); + assert_eq!(HintType::from_str("l2-block-data").unwrap(), HintType::L2BlockData); + assert_eq!(HintType::from_str("l2-transactions").unwrap(), HintType::L2Transactions); + assert_eq!(HintType::from_str("l2-receipts").unwrap(), HintType::L2Receipts); + assert_eq!(HintType::from_str("l2-code").unwrap(), HintType::L2Code); + assert_eq!(HintType::from_str("agreed-pre-state").unwrap(), HintType::AgreedPreState); + assert_eq!(HintType::from_str("l2-output-root").unwrap(), HintType::L2OutputRoot); + assert_eq!(HintType::from_str("l2-account-proof").unwrap(), HintType::L2AccountProof); + assert_eq!( + HintType::from_str("l2-account-storage-proof").unwrap(), + HintType::L2AccountStorageProof + ); + assert_eq!(HintType::from_str("l2-block-data").unwrap(), HintType::L2BlockData); + assert_eq!(HintType::from_str("l2-payload-witness").unwrap(), HintType::L2PayloadWitness); + match HintType::from_str("invalid") { + Ok(_) => { + panic!("expected error"); + } + Err(parsing_err) => { + let HintParsingError(str) = parsing_err; + assert_eq!(str, "invalid".to_string()); + } + } + } + + #[test] + fn test_hint_type_to_str() { + use super::HintType; + + assert_eq!(<&str>::from(HintType::L1BlockHeader), "l1-block-header"); + assert_eq!(<&str>::from(HintType::L1Transactions), "l1-transactions"); + assert_eq!(<&str>::from(HintType::L1Receipts), "l1-receipts"); + assert_eq!(<&str>::from(HintType::L1Blob), "l1-blob"); + assert_eq!(<&str>::from(HintType::L1Precompile), "l1-precompile"); + assert_eq!(<&str>::from(HintType::L2BlockHeader), "l2-block-header"); + assert_eq!(<&str>::from(HintType::L2Transactions), "l2-transactions"); + assert_eq!(<&str>::from(HintType::L2Receipts), "l2-receipts"); + assert_eq!(<&str>::from(HintType::L2Code), "l2-code"); + assert_eq!(<&str>::from(HintType::AgreedPreState), "agreed-pre-state"); + assert_eq!(<&str>::from(HintType::L2OutputRoot), "l2-output-root"); + assert_eq!(<&str>::from(HintType::L2StateNode), "l2-state-node"); + assert_eq!(<&str>::from(HintType::L2AccountProof), "l2-account-proof"); + assert_eq!(<&str>::from(HintType::L2AccountStorageProof), "l2-account-storage-proof"); + assert_eq!(<&str>::from(HintType::L2BlockData), "l2-block-data"); + assert_eq!(<&str>::from(HintType::L2PayloadWitness), "l2-payload-witness"); + } + + #[test] + fn test_hint_with_data() { + use super::HintType; + use alloy_primitives::Bytes; + + let hint_data: &[u8] = &[1, 2]; + let l1_block_header = HintType::L1BlockHeader.with_data(&[hint_data]); + assert_eq!(l1_block_header.data, Bytes::from(hint_data)); + } + + #[test] + fn test_hint_fmt() { + use super::HintType; + use alloc::format; + + assert_eq!(format!("{}", HintType::L1BlockHeader), "l1-block-header"); + } +} diff --git a/rust/kona/crates/proof/proof-interop/src/lib.rs b/rust/kona/crates/proof/proof-interop/src/lib.rs new file mode 100644 index 00000000000..b36e7afe912 --- /dev/null +++ b/rust/kona/crates/proof/proof-interop/src/lib.rs @@ -0,0 +1,28 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "arbitrary"), no_std)] + +extern crate alloc; + +mod pre_state; +pub use pre_state::{ + INVALID_TRANSITION, INVALID_TRANSITION_HASH, OptimisticBlock, PreState, + TRANSITION_STATE_MAX_STEPS, TransitionState, +}; + +mod hint; +pub use hint::HintType; + +mod provider; +pub use provider::OracleInteropProvider; + +pub mod boot; +pub use boot::BootInfo; + +mod consolidation; +pub use consolidation::{ConsolidationError, SuperchainConsolidator}; diff --git a/kona/crates/proof/proof-interop/src/pre_state.rs b/rust/kona/crates/proof/proof-interop/src/pre_state.rs similarity index 92% rename from kona/crates/proof/proof-interop/src/pre_state.rs rename to rust/kona/crates/proof/proof-interop/src/pre_state.rs index 36d473802b1..8052d5088ff 100644 --- a/kona/crates/proof/proof-interop/src/pre_state.rs +++ b/rust/kona/crates/proof/proof-interop/src/pre_state.rs @@ -6,10 +6,10 @@ use alloy_rlp::{Buf, Decodable, Encodable, Header, RlpDecodable, RlpEncodable}; use kona_interop::{OutputRootWithChain, SUPER_ROOT_VERSION, SuperRoot}; use serde::{Deserialize, Serialize}; -/// The current [TransitionState] encoding format version. +/// The current [`TransitionState`] encoding format version. pub(crate) const TRANSITION_STATE_VERSION: u8 = 255; -/// The maximum number of steps allowed in a [TransitionState]. +/// The maximum number of steps allowed in a [`TransitionState`]. pub const TRANSITION_STATE_MAX_STEPS: u64 = 2u64.pow(7) - 1; /// The [Bytes] representation of the string "invalid". @@ -19,10 +19,10 @@ pub const INVALID_TRANSITION: Bytes = Bytes::from_static(b"invalid"); pub const INVALID_TRANSITION_HASH: B256 = b256!("ffd7db0f9d5cdeb49c4c9eba649d4dc6d852d64671e65488e57f58584992ac68"); -/// The [PreState] of the interop proof program can be one of two types: a [SuperRoot] or a -/// [TransitionState]. The [SuperRoot] is the canonical state of the superchain, while the -/// [TransitionState] is a super-structure of the [SuperRoot] that represents the progress of a -/// pending superchain state transition from one [SuperRoot] to the next. +/// The [`PreState`] of the interop proof program can be one of two types: a [`SuperRoot`] or a +/// [`TransitionState`]. The [`SuperRoot`] is the canonical state of the superchain, while the +/// [`TransitionState`] is a super-structure of the [`SuperRoot`] that represents the progress of a +/// pending superchain state transition from one [`SuperRoot`] to the next. #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] pub enum PreState { @@ -33,14 +33,14 @@ pub enum PreState { } impl PreState { - /// Hashes the encoded [PreState] using [keccak256]. + /// Hashes the encoded [`PreState`] using [keccak256]. pub fn hash(&self) -> B256 { let mut rlp_buf = Vec::with_capacity(self.length()); self.encode(&mut rlp_buf); keccak256(&rlp_buf) } - /// Returns the timestamp of the [PreState]. + /// Returns the timestamp of the [`PreState`]. pub const fn timestamp(&self) -> u64 { match self { Self::SuperRoot(super_root) => super_root.timestamp, @@ -48,9 +48,9 @@ impl PreState { } } - /// Returns the active L2 output root hash of the [PreState]. This is the output root that + /// Returns the active L2 output root hash of the [`PreState`]. This is the output root that /// represents the pre-state of the chain that is to be committed to in the next transition - /// step, or [None] if the [PreState] has already been fully saturated. + /// step, or [None] if the [`PreState`] has already been fully saturated. pub fn active_l2_output_root(&self) -> Option<&OutputRootWithChain> { match self { Self::SuperRoot(super_root) => super_root.output_roots.first(), @@ -60,14 +60,14 @@ impl PreState { } } - /// Returns the active L2 chain ID of the [PreState]. This is the chain ID of the output root - /// that is to be committed to in the next transition step, or [None] if the [PreState] + /// Returns the active L2 chain ID of the [`PreState`]. This is the chain ID of the output root + /// that is to be committed to in the next transition step, or [None] if the [`PreState`] /// has already been fully saturated. pub fn active_l2_chain_id(&self) -> Option { self.active_l2_output_root().map(|output_root| output_root.chain_id) } - /// Transitions to the next state, appending the [OptimisticBlock] to the pending progress. + /// Transitions to the next state, appending the [`OptimisticBlock`] to the pending progress. pub fn transition(self, optimistic_block: Option) -> Option { match self { Self::SuperRoot(super_root) => Some(Self::TransitionState(TransitionState::new( @@ -98,10 +98,9 @@ impl PreState { .collect(), ); return Some(Self::SuperRoot(super_root)); - } else { - transition_state.step += 1; - return Some(Self::TransitionState(transition_state)); - }; + } + transition_state.step += 1; + return Some(Self::TransitionState(transition_state)); } transition_state.pending_progress.push(optimistic_block?); @@ -146,8 +145,8 @@ impl Decodable for PreState { } } -/// The [TransitionState] is a super-structure of the [SuperRoot] that represents the progress of a -/// pending superchain state transition from one [SuperRoot] to the next. +/// The [`TransitionState`] is a super-structure of the [`SuperRoot`] that represents the progress +/// of a pending superchain state transition from one [`SuperRoot`] to the next. #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] pub struct TransitionState { @@ -160,7 +159,8 @@ pub struct TransitionState { } impl TransitionState { - /// Create a new [TransitionState] with the given pre-state, pending progress, and step number. + /// Create a new [`TransitionState`] with the given pre-state, pending progress, and step + /// number. pub const fn new( pre_state: SuperRoot, pending_progress: Vec, @@ -169,14 +169,14 @@ impl TransitionState { Self { pre_state, pending_progress, step } } - /// Hashes the encoded [TransitionState] using [keccak256]. + /// Hashes the encoded [`TransitionState`] using [keccak256]. pub fn hash(&self) -> B256 { let mut rlp_buf = Vec::with_capacity(self.length()); self.encode(&mut rlp_buf); keccak256(&rlp_buf) } - /// Returns the RLP payload length of the [TransitionState]. + /// Returns the RLP payload length of the [`TransitionState`]. pub fn payload_length(&self) -> usize { Header { list: false, payload_length: self.pre_state.encoded_length() }.length() + self.pre_state.encoded_length() + @@ -247,7 +247,7 @@ pub struct OptimisticBlock { } impl OptimisticBlock { - /// Create a new [OptimisticBlock] with the given block hash and output root hash. + /// Create a new [`OptimisticBlock`] with the given block hash and output root hash. pub const fn new(block_hash: B256, output_root: B256) -> Self { Self { block_hash, output_root } } @@ -311,7 +311,7 @@ mod test { assert_eq!(transition_state, TransitionState::decode(&mut rlp_buf.as_slice()).unwrap()); } - /// Helper function to create a test TransitionState with three output roots + /// Helper function to create a test `TransitionState` with three output roots fn create_test_transition_state(step: u64, chain_count: u64) -> TransitionState { const TIMESTAMP: u64 = 10; diff --git a/rust/kona/crates/proof/proof-interop/src/provider.rs b/rust/kona/crates/proof/proof-interop/src/provider.rs new file mode 100644 index 00000000000..72c314a1188 --- /dev/null +++ b/rust/kona/crates/proof/proof-interop/src/provider.rs @@ -0,0 +1,270 @@ +//! [`InteropProvider`] trait implementation using a [`CommsClient`] data source. + +use crate::{BootInfo, HintType}; +use alloc::{boxed::Box, string::ToString, sync::Arc, vec::Vec}; +use alloy_consensus::{Header, Sealed}; +use alloy_eips::eip2718::Decodable2718; +use alloy_primitives::{Address, B256}; +use alloy_rlp::Decodable; +use async_trait::async_trait; +use kona_interop::InteropProvider; +use kona_mpt::{OrderedListWalker, TrieHinter, TrieNode, TrieProvider}; +use kona_preimage::{CommsClient, PreimageKey, PreimageKeyType, errors::PreimageOracleError}; +use kona_proof::{eip_2935_history_lookup, errors::OracleProviderError}; +use kona_registry::HashMap; +use op_alloy_consensus::OpReceiptEnvelope; +use spin::RwLock; + +/// A [`CommsClient`] backed [`InteropProvider`] implementation. +#[derive(Debug, Clone)] +pub struct OracleInteropProvider { + /// The oracle client. + oracle: Arc, + /// The [`BootInfo`] for the current program execution. + boot: BootInfo, + /// The local safe head block header cache. + local_safe_heads: HashMap>, + /// The chain ID for the current call context. Used to declare the chain ID for the trie hints. + chain_id: Arc>>, +} + +impl OracleInteropProvider +where + C: CommsClient + Send + Sync, +{ + /// Creates a new [`OracleInteropProvider`] with the given oracle client and [`BootInfo`]. + pub fn new( + oracle: Arc, + boot: BootInfo, + local_safe_headers: HashMap>, + ) -> Self { + Self { + oracle, + boot, + local_safe_heads: local_safe_headers, + chain_id: Arc::new(RwLock::new(None)), + } + } + + /// Returns a reference to the local safe heads map. + pub const fn local_safe_heads(&self) -> &HashMap> { + &self.local_safe_heads + } + + /// Replaces a local safe head with the given header. + pub fn replace_local_safe_head(&mut self, chain_id: u64, header: Sealed
) { + self.local_safe_heads.insert(chain_id, header); + } + + /// Fetch the [Header] for the block with the given hash. + pub async fn header_by_hash( + &self, + chain_id: u64, + block_hash: B256, + ) -> Result::Error> { + HintType::L2BlockHeader + .with_data(&[block_hash.as_slice(), chain_id.to_be_bytes().as_ref()]) + .send(self.oracle.as_ref()) + .await?; + + let header_rlp = self + .oracle + .get(PreimageKey::new(*block_hash, PreimageKeyType::Keccak256)) + .await + .map_err(OracleProviderError::Preimage)?; + + Header::decode(&mut header_rlp.as_ref()).map_err(OracleProviderError::Rlp) + } + + /// Fetch the [`OpReceiptEnvelope`]s for the block with the given hash. + async fn derive_receipts( + &self, + chain_id: u64, + block_hash: B256, + header: &Header, + ) -> Result, ::Error> { + // Send a hint for the block's receipts, and walk through the receipts trie in the header to + // verify them. + HintType::L2Receipts + .with_data(&[block_hash.as_ref(), chain_id.to_be_bytes().as_slice()]) + .send(self.oracle.as_ref()) + .await?; + let trie_walker = OrderedListWalker::try_new_hydrated(header.receipts_root, self) + .map_err(OracleProviderError::TrieWalker)?; + + // Decode the receipts within the receipts trie. + let receipts = trie_walker + .into_iter() + .map(|(_, rlp)| { + let envelope = OpReceiptEnvelope::decode_2718(&mut rlp.as_ref())?; + Ok(envelope) + }) + .collect::, _>>() + .map_err(OracleProviderError::Rlp)?; + + Ok(receipts) + } +} + +#[async_trait] +impl InteropProvider for OracleInteropProvider +where + C: CommsClient + Send + Sync, +{ + type Error = OracleProviderError; + + /// Fetch a [Header] by its number. + async fn header_by_number(&self, chain_id: u64, number: u64) -> Result { + let Some(mut header) = + self.local_safe_heads.get(&chain_id).cloned().map(|h| h.into_inner()) + else { + return Err(PreimageOracleError::Other("Missing local safe header".to_string()).into()); + }; + + // Check if the block number is in range. If not, we can fail early. + if number > header.number { + return Err(OracleProviderError::BlockNumberPastHead(number, header.number)); + } + + // Set the chain ID for the trie hints, and explicitly drop the lock. + let mut chain_id_lock = self.chain_id.write(); + *chain_id_lock = Some(chain_id); + drop(chain_id_lock); + + // Walk back the block headers to the desired block number. + let rollup_config = self.boot.rollup_config(chain_id).ok_or_else(|| { + PreimageOracleError::Other("Missing rollup config for chain ID".to_string()) + })?; + let mut linear_fallback = false; + + while header.number > number { + if rollup_config.is_isthmus_active(header.timestamp) && !linear_fallback { + // If Isthmus is active, the EIP-2935 contract is used to perform leaping lookbacks + // through consulting the ring buffer within the contract. If this + // lookup fails for any reason, we fall back to linear walk back. + let block_hash = match eip_2935_history_lookup(&header, 0, self, self).await { + Ok(hash) => hash, + Err(_) => { + // If the EIP-2935 lookup fails for any reason, attempt fallback to linear + // walk back. + linear_fallback = true; + continue; + } + }; + + header = self.header_by_hash(chain_id, block_hash).await?; + } else { + // Walk back the block headers one-by-one until the desired block number is reached. + header = self.header_by_hash(chain_id, header.parent_hash).await?; + } + } + + Ok(header) + } + + /// Fetch all receipts for a given block by number. + async fn receipts_by_number( + &self, + chain_id: u64, + number: u64, + ) -> Result, Self::Error> { + let header = self.header_by_number(chain_id, number).await?; + self.derive_receipts(chain_id, header.hash_slow(), &header).await + } + + /// Fetch all receipts for a given block by hash. + async fn receipts_by_hash( + &self, + chain_id: u64, + block_hash: B256, + ) -> Result, Self::Error> { + let header = self.header_by_hash(chain_id, block_hash).await?; + self.derive_receipts(chain_id, block_hash, &header).await + } +} + +impl TrieProvider for OracleInteropProvider +where + C: CommsClient + Send + Sync + Clone, +{ + type Error = OracleProviderError; + + fn trie_node_by_hash(&self, key: B256) -> Result { + kona_proof::block_on(async move { + let trie_node_rlp = self + .oracle + .get(PreimageKey::new(*key, PreimageKeyType::Keccak256)) + .await + .map_err(OracleProviderError::Preimage)?; + TrieNode::decode(&mut trie_node_rlp.as_ref()).map_err(OracleProviderError::Rlp) + }) + } +} + +impl TrieHinter for OracleInteropProvider { + type Error = OracleProviderError; + + fn hint_trie_node(&self, hash: B256) -> Result<(), Self::Error> { + kona_proof::block_on(async move { + HintType::L2StateNode + .with_data(&[hash.as_slice()]) + .with_data( + self.chain_id.read().map_or_else(Vec::new, |id| id.to_be_bytes().to_vec()), + ) + .send(self.oracle.as_ref()) + .await + }) + } + + fn hint_account_proof(&self, address: Address, block_number: u64) -> Result<(), Self::Error> { + kona_proof::block_on(async move { + HintType::L2AccountProof + .with_data(&[block_number.to_be_bytes().as_ref(), address.as_slice()]) + .with_data( + self.chain_id.read().map_or_else(Vec::new, |id| id.to_be_bytes().to_vec()), + ) + .send(self.oracle.as_ref()) + .await + }) + } + + fn hint_storage_proof( + &self, + address: alloy_primitives::Address, + slot: alloy_primitives::U256, + block_number: u64, + ) -> Result<(), Self::Error> { + kona_proof::block_on(async move { + HintType::L2AccountStorageProof + .with_data(&[ + block_number.to_be_bytes().as_ref(), + address.as_slice(), + slot.to_be_bytes::<32>().as_ref(), + ]) + .with_data( + self.chain_id.read().map_or_else(Vec::new, |id| id.to_be_bytes().to_vec()), + ) + .send(self.oracle.as_ref()) + .await + }) + } + + fn hint_execution_witness( + &self, + parent_hash: B256, + op_payload_attributes: &op_alloy_rpc_types_engine::OpPayloadAttributes, + ) -> Result<(), Self::Error> { + kona_proof::block_on(async move { + let encoded_attributes = + serde_json::to_vec(op_payload_attributes).map_err(OracleProviderError::Serde)?; + + HintType::L2PayloadWitness + .with_data(&[parent_hash.as_slice(), &encoded_attributes]) + .with_data( + self.chain_id.read().map_or_else(Vec::new, |id| id.to_be_bytes().to_vec()), + ) + .send(self.oracle.as_ref()) + .await + }) + } +} diff --git a/kona/crates/proof/proof/CHANGELOG.md b/rust/kona/crates/proof/proof/CHANGELOG.md similarity index 100% rename from kona/crates/proof/proof/CHANGELOG.md rename to rust/kona/crates/proof/proof/CHANGELOG.md diff --git a/kona/crates/proof/proof/Cargo.toml b/rust/kona/crates/proof/proof/Cargo.toml similarity index 100% rename from kona/crates/proof/proof/Cargo.toml rename to rust/kona/crates/proof/proof/Cargo.toml diff --git a/rust/kona/crates/proof/proof/README.md b/rust/kona/crates/proof/proof/README.md new file mode 100644 index 00000000000..48fa8735fa5 --- /dev/null +++ b/rust/kona/crates/proof/proof/README.md @@ -0,0 +1,3 @@ +# `kona-proof` + +`kona-proof` is an OP Stack state transition proof SDK. diff --git a/kona/crates/proof/proof/src/blocking_runtime.rs b/rust/kona/crates/proof/proof/src/blocking_runtime.rs similarity index 100% rename from kona/crates/proof/proof/src/blocking_runtime.rs rename to rust/kona/crates/proof/proof/src/blocking_runtime.rs diff --git a/kona/crates/proof/proof/src/boot.rs b/rust/kona/crates/proof/proof/src/boot.rs similarity index 100% rename from kona/crates/proof/proof/src/boot.rs rename to rust/kona/crates/proof/proof/src/boot.rs diff --git a/kona/crates/proof/proof/src/caching_oracle.rs b/rust/kona/crates/proof/proof/src/caching_oracle.rs similarity index 78% rename from kona/crates/proof/proof/src/caching_oracle.rs rename to rust/kona/crates/proof/proof/src/caching_oracle.rs index 25e437fabe5..a3995d23f69 100644 --- a/kona/crates/proof/proof/src/caching_oracle.rs +++ b/rust/kona/crates/proof/proof/src/caching_oracle.rs @@ -1,8 +1,6 @@ -//! Contains the [CachingOracle], which is a wrapper around an [OracleReader] and [HintWriter] that -//! stores a configurable number of responses in an [LruCache] for quick retrieval. -//! -//! [OracleReader]: kona_preimage::OracleReader -//! [HintWriter]: kona_preimage::HintWriter +//! Contains the [`CachingOracle`], which is a wrapper around an +//! [`OracleReader`](kona_preimage::OracleReader) and [`HintWriter`](kona_preimage::HintWriter) +//! that stores a configurable number of responses in an [`LruCache`] for quick retrieval. use alloc::{boxed::Box, sync::Arc, vec::Vec}; use async_trait::async_trait; @@ -13,11 +11,9 @@ use kona_preimage::{ use lru::LruCache; use spin::Mutex; -/// A wrapper around an [OracleReader] and [HintWriter] that stores a configurable number of -/// responses in an [LruCache] for quick retrieval. -/// -/// [OracleReader]: kona_preimage::OracleReader -/// [HintWriter]: kona_preimage::HintWriter +/// A wrapper around an [`OracleReader`](kona_preimage::OracleReader) and +/// [`HintWriter`](kona_preimage::HintWriter) that stores a configurable number of responses in an +/// [`LruCache`] for quick retrieval. #[allow(unreachable_pub)] #[derive(Debug, Clone)] pub struct CachingOracle @@ -38,10 +34,8 @@ where OR: PreimageOracleClient, HW: HintWriterClient, { - /// Creates a new [CachingOracle] that wraps the given [OracleReader] and stores up to `N` - /// responses in the cache. - /// - /// [OracleReader]: kona_preimage::OracleReader + /// Creates a new [`CachingOracle`] that wraps the given + /// [`OracleReader`](kona_preimage::OracleReader) and stores up to `N` responses in the cache. pub fn new(cache_size: usize, oracle_reader: OR, hint_writer: HW) -> Self { Self { cache: Arc::new(Mutex::new(LruCache::new( @@ -91,12 +85,11 @@ where // SAFETY: The value never enters the cache unless the preimage length matches the // buffer length, due to the checks in the OracleReader. buf.copy_from_slice(value.as_slice()); - Ok(()) } else { self.oracle_reader.get_exact(key, buf).await?; self.cache.lock().put(key, buf.to_vec()); - Ok(()) } + Ok(()) } } diff --git a/kona/crates/proof/proof/src/eip2935.rs b/rust/kona/crates/proof/proof/src/eip2935.rs similarity index 100% rename from kona/crates/proof/proof/src/eip2935.rs rename to rust/kona/crates/proof/proof/src/eip2935.rs diff --git a/kona/crates/proof/proof/src/errors.rs b/rust/kona/crates/proof/proof/src/errors.rs similarity index 100% rename from kona/crates/proof/proof/src/errors.rs rename to rust/kona/crates/proof/proof/src/errors.rs diff --git a/kona/crates/proof/proof/src/executor.rs b/rust/kona/crates/proof/proof/src/executor.rs similarity index 100% rename from kona/crates/proof/proof/src/executor.rs rename to rust/kona/crates/proof/proof/src/executor.rs diff --git a/rust/kona/crates/proof/proof/src/hint.rs b/rust/kona/crates/proof/proof/src/hint.rs new file mode 100644 index 00000000000..dba61d1b363 --- /dev/null +++ b/rust/kona/crates/proof/proof/src/hint.rs @@ -0,0 +1,177 @@ +//! This module contains the [`HintType`] enum. + +use crate::errors::{HintParsingError, OracleProviderError}; +use alloc::{ + string::{String, ToString}, + vec::Vec, +}; +use alloy_primitives::{Bytes, hex}; +use core::{fmt::Display, str::FromStr}; +use kona_preimage::HintWriterClient; + +/// A [Hint] is parsed in the format ` `, where `` is a string that +/// represents the type of hint, and `` is the data associated with the hint (bytes +/// encoded as hex UTF-8). +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct Hint { + /// The type of hint. + pub ty: HT, + /// The data associated with the hint. + pub data: Bytes, +} + +impl Hint +where + HT: Display, +{ + /// Creates a new [Hint] with the specified type and data. + pub fn new>(ty: HT, data: T) -> Self { + Self { ty, data: data.into() } + } + + /// Splits the [Hint] into its components. + pub fn split(self) -> (HT, Bytes) { + (self.ty, self.data) + } + + /// Appends more data to [`Hint::data`]. + pub fn with_data>(self, data: T) -> Self { + // No-op if the data is empty. + if data.as_ref().is_empty() { + return self; + } + + let mut hint_data = Vec::with_capacity(self.data.len() + data.as_ref().len()); + hint_data.extend_from_slice(self.data.as_ref()); + hint_data.extend_from_slice(data.as_ref()); + + Self { data: hint_data.into(), ..self } + } + + /// Sends the hint to the passed [`HintWriterClient`]. + pub async fn send(&self, comms: &T) -> Result<(), OracleProviderError> { + comms.write(&self.encode()).await.map_err(OracleProviderError::Preimage) + } + + /// Encodes the hint as a string. + pub fn encode(&self) -> String { + alloc::format!("{} {}", self.ty, self.data) + } +} + +impl FromStr for Hint +where + HT: FromStr, +{ + type Err = HintParsingError; + + fn from_str(s: &str) -> Result { + let mut parts = s.split(' ').collect::>(); + + if parts.len() != 2 { + return Err(HintParsingError(alloc::format!("Invalid hint format: {s}"))); + } + + let hint_type = parts.remove(0).parse::()?; + let hint_data = + hex::decode(parts.remove(0)).map_err(|e| HintParsingError(e.to_string()))?.into(); + + Ok(Self { ty: hint_type, data: hint_data }) + } +} + +/// The [`HintType`] enum is used to specify the type of hint that was received. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum HintType { + /// A hint that specifies the block header of a layer 1 block. + L1BlockHeader, + /// A hint that specifies the transactions of a layer 1 block. + L1Transactions, + /// A hint that specifies the state node of a layer 1 block. + L1Receipts, + /// A hint that specifies a blob in the layer 1 beacon chain. + L1Blob, + /// A hint that specifies a precompile call on layer 1. + L1Precompile, + /// A hint that specifies the block header of a layer 2 block. + L2BlockHeader, + /// A hint that specifies the transactions of a layer 2 block. + L2Transactions, + /// A hint that specifies the code of a contract on layer 2. + L2Code, + /// A hint that specifies the preimage of the starting L2 output root on layer 2. + StartingL2Output, + /// A hint that specifies the state node in the L2 state trie. + L2StateNode, + /// A hint that specifies the proof on the path to an account in the L2 state trie. + L2AccountProof, + /// A hint that specifies the proof on the path to a storage slot in an account within in the + /// L2 state trie. + L2AccountStorageProof, + /// A hint that specifies bulk storage of all the code, state and keys generated by an + /// execution witness. + L2PayloadWitness, +} + +impl HintType { + /// Creates a new [Hint] from `self` and the specified data. The data passed will be + /// concatenated into a single byte array before being stored in the resulting [Hint]. + pub fn with_data(self, data: &[&[u8]]) -> Hint { + let total_len = data.iter().map(|d| d.len()).sum(); + let hint_data = data.iter().fold(Vec::with_capacity(total_len), |mut acc, d| { + acc.extend_from_slice(d); + acc + }); + Hint::new(self, hint_data) + } +} + +impl FromStr for HintType { + type Err = HintParsingError; + + fn from_str(value: &str) -> Result { + match value { + "l1-block-header" => Ok(Self::L1BlockHeader), + "l1-transactions" => Ok(Self::L1Transactions), + "l1-receipts" => Ok(Self::L1Receipts), + "l1-blob" => Ok(Self::L1Blob), + "l1-precompile" => Ok(Self::L1Precompile), + "l2-block-header" => Ok(Self::L2BlockHeader), + "l2-transactions" => Ok(Self::L2Transactions), + "l2-code" => Ok(Self::L2Code), + "starting-l2-output" => Ok(Self::StartingL2Output), + "l2-state-node" => Ok(Self::L2StateNode), + "l2-account-proof" => Ok(Self::L2AccountProof), + "l2-account-storage-proof" => Ok(Self::L2AccountStorageProof), + "l2-payload-witness" => Ok(Self::L2PayloadWitness), + _ => Err(HintParsingError(value.to_string())), + } + } +} + +impl From for &str { + fn from(value: HintType) -> Self { + match value { + HintType::L1BlockHeader => "l1-block-header", + HintType::L1Transactions => "l1-transactions", + HintType::L1Receipts => "l1-receipts", + HintType::L1Blob => "l1-blob", + HintType::L1Precompile => "l1-precompile", + HintType::L2BlockHeader => "l2-block-header", + HintType::L2Transactions => "l2-transactions", + HintType::L2Code => "l2-code", + HintType::StartingL2Output => "starting-l2-output", + HintType::L2StateNode => "l2-state-node", + HintType::L2AccountProof => "l2-account-proof", + HintType::L2AccountStorageProof => "l2-account-storage-proof", + HintType::L2PayloadWitness => "l2-payload-witness", + } + } +} + +impl Display for HintType { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let s: &str = (*self).into(); + write!(f, "{s}") + } +} diff --git a/rust/kona/crates/proof/proof/src/l1/blob_provider.rs b/rust/kona/crates/proof/proof/src/l1/blob_provider.rs new file mode 100644 index 00000000000..afbfbdf2468 --- /dev/null +++ b/rust/kona/crates/proof/proof/src/l1/blob_provider.rs @@ -0,0 +1,216 @@ +//! Contains the concrete implementation of the [`BlobProvider`] trait for the client program. + +use crate::{HintType, errors::OracleProviderError}; +use alloc::{boxed::Box, sync::Arc, vec::Vec}; +use alloy_consensus::Blob; +use alloy_eips::eip4844::{FIELD_ELEMENTS_PER_BLOB, IndexedBlobHash}; +use alloy_primitives::keccak256; +use ark_bls12_381::Fr; +use ark_ff::{AdditiveGroup, BigInteger, BigInteger256, Field, PrimeField}; +use async_trait::async_trait; +use core::str::FromStr; +use kona_derive::BlobProvider; +use kona_preimage::{CommsClient, PreimageKey, PreimageKeyType}; +use kona_protocol::BlockInfo; +use spin::Lazy; + +/// An oracle-backed blob provider. +#[derive(Debug, Clone)] +pub struct OracleBlobProvider { + oracle: Arc, +} + +impl OracleBlobProvider { + /// Constructs a new `OracleBlobProvider`. + pub const fn new(oracle: Arc) -> Self { + Self { oracle } + } + + /// Retrieves a blob from the oracle. + /// + /// ## Takes + /// - `block_ref`: The block reference. + /// - `blob_hash`: The blob hash. + /// + /// ## Returns + /// - `Ok(blob)`: The blob. + /// - `Err(e)`: The blob could not be retrieved. + #[allow(clippy::large_stack_frames)] + async fn get_blob( + &self, + block_ref: &BlockInfo, + blob_hash: &IndexedBlobHash, + ) -> Result { + let mut blob_req_meta = [0u8; 48]; + blob_req_meta[0..32].copy_from_slice(blob_hash.hash.as_ref()); + blob_req_meta[32..40].copy_from_slice((blob_hash.index).to_be_bytes().as_ref()); + blob_req_meta[40..48].copy_from_slice(block_ref.timestamp.to_be_bytes().as_ref()); + + // Send a hint for the blob commitment and field elements. + HintType::L1Blob.with_data(&[blob_req_meta.as_ref()]).send(self.oracle.as_ref()).await?; + + // Fetch the blob commitment. + let mut commitment = [0u8; 48]; + self.oracle + .get_exact(PreimageKey::new(*blob_hash.hash, PreimageKeyType::Sha256), &mut commitment) + .await + .map_err(OracleProviderError::Preimage)?; + + // Reconstruct the blob from the 4096 field elements. + let mut blob = Blob::default(); + let mut field_element_key = [0u8; 80]; + field_element_key[..48].copy_from_slice(commitment.as_ref()); + for i in 0..FIELD_ELEMENTS_PER_BLOB { + field_element_key[48..] + .copy_from_slice(ROOTS_OF_UNITY[i as usize].into_bigint().to_bytes_be().as_ref()); + + let mut field_element = [0u8; 32]; + self.oracle + .get_exact( + PreimageKey::new(*keccak256(field_element_key), PreimageKeyType::Blob), + &mut field_element, + ) + .await + .map_err(OracleProviderError::Preimage)?; + blob[(i as usize) << 5..(i as usize + 1) << 5].copy_from_slice(field_element.as_ref()); + } + + tracing::info!( + target: "client_blob_oracle", + index = blob_hash.index, + hash = ?blob_hash.hash, + "Retrieved blob" + ); + + Ok(blob) + } +} + +#[async_trait] +impl BlobProvider for OracleBlobProvider { + type Error = OracleProviderError; + + #[allow(clippy::large_stack_frames)] + async fn get_and_validate_blobs( + &mut self, + block_ref: &BlockInfo, + blob_hashes: &[IndexedBlobHash], + ) -> Result>, Self::Error> { + let mut blobs = Vec::with_capacity(blob_hashes.len()); + for hash in blob_hashes { + blobs.push(Box::new(self.get_blob(block_ref, hash).await?)); + } + Ok(blobs) + } +} + +/// The 4096th bit-reversed roots of unity used in EIP-4844 as predefined evaluation points. +/// +/// See `generate_roots_of_unity` for details on how these roots of unity are generated. +pub static ROOTS_OF_UNITY: Lazy<[Fr; FIELD_ELEMENTS_PER_BLOB as usize]> = + Lazy::new(generate_roots_of_unity); + +/// Generates the 4096th bit-reversed roots of unity used in EIP-4844 as predefined evaluation +/// points. To compute the field element at index i in a blob, the blob polynomial is evaluated at +/// the i'th root of unity. Based on go-kzg-4844: +/// Also, see the consensus specs: +/// - `compute_roots_of_unity` +/// - bit-reversal permutation: +fn generate_roots_of_unity() -> [Fr; FIELD_ELEMENTS_PER_BLOB as usize] { + const MAX_ORDER_ROOT: u64 = 32; + + let mut roots_of_unity = [Fr::ZERO; FIELD_ELEMENTS_PER_BLOB as usize]; + + // Generator of the largest 2-adic subgroup of order 2^32. + let root_of_unity = Fr::new( + BigInteger256::from_str( + "10238227357739495823651030575849232062558860180284477541189508159991286009131", + ) + .expect("Failed to initialize root of unity"), + ); + + // Find generator subgroup of order x. + // This can be constructed by powering a generator of the largest 2-adic subgroup of order 2^32 + // by an exponent of (2^32)/x, provided x is <= 2^32. + let log_x = FIELD_ELEMENTS_PER_BLOB.trailing_zeros() as u64; + let expo = 1u64 << (MAX_ORDER_ROOT - log_x); + + // Generator has order x now + let generator = root_of_unity.pow([expo]); + + // Compute all relevant roots of unity, i.e. the multiplicative subgroup of size x + let mut current = Fr::ONE; + (0..FIELD_ELEMENTS_PER_BLOB).for_each(|i| { + roots_of_unity[i as usize] = current; + current *= generator; + }); + + let shift_correction = 64 - FIELD_ELEMENTS_PER_BLOB.trailing_zeros(); + (0..FIELD_ELEMENTS_PER_BLOB).for_each(|i| { + // Find index irev, such that i and irev get swapped + let irev = i.reverse_bits() >> shift_correction; + if irev > i { + roots_of_unity.swap(i as usize, irev as usize); + } + }); + + roots_of_unity +} + +#[cfg(test)] +mod test { + use super::ROOTS_OF_UNITY; + use alloy_eips::eip4844::{FIELD_ELEMENTS_PER_BLOB, env_settings::EnvKzgSettings}; + use ark_ff::{BigInteger, PrimeField}; + use c_kzg::{BYTES_PER_BLOB, Blob, Bytes32, Bytes48}; + use rand::Rng; + use rayon::iter::{IntoParallelIterator, ParallelIterator}; + + #[test] + fn test_roots_of_unity() { + // Initiate the default Ethereum KZG settings. + let kzg = EnvKzgSettings::default(); + + // Create a blob with random data + let mut bytes = [0u8; BYTES_PER_BLOB]; + rand::rng().fill(bytes.as_mut_slice()); + + // Ensure the blob is valid by keeping each field element within range. + (0..FIELD_ELEMENTS_PER_BLOB).for_each(|i| { + bytes[(i as usize) << 5] = 0; + }); + + let blob = Blob::new(bytes); + let blob_commitment = { + let raw = kzg.get().blob_to_kzg_commitment(&blob).unwrap(); + Bytes48::new(raw.as_slice().try_into().unwrap()) + }; + + // Validate each field element in the blob + (0..FIELD_ELEMENTS_PER_BLOB).into_par_iter().for_each(|i| { + let field_element = { + let mut fe = [0u8; 32]; + fe.copy_from_slice(&blob[(i as usize) << 5..(i as usize + 1) << 5]); + Bytes32::new(fe) + }; + + let z_bytes = Bytes32::new(ROOTS_OF_UNITY[i as usize].into_bigint().to_bytes_be().try_into().unwrap()); + let (proof, fe) = kzg.get().compute_kzg_proof(&blob, &z_bytes).unwrap(); + + // Ensure the field element matches the expected value + assert_eq!( + fe.as_slice(), + field_element.as_slice(), + "Field element {i} does not match the expected value. Expected: {field_element:?}, Got: {fe:?}" + ); + + // Ensure the proof can be verified + let proof_bytes = Bytes48::new(proof.as_slice().try_into().unwrap()); + let is_valid = kzg.get().verify_kzg_proof(&blob_commitment, &z_bytes, &field_element, &proof_bytes).unwrap(); + assert!( + is_valid, + "KZG proof verification failed for field element {i}. Commitment: {blob_commitment:?}, Z: {z_bytes:?}, Field Element: {field_element:?}, Proof: {proof_bytes:?}" + ); + }); + } +} diff --git a/rust/kona/crates/proof/proof/src/l1/chain_provider.rs b/rust/kona/crates/proof/proof/src/l1/chain_provider.rs new file mode 100644 index 00000000000..8eaf2c7eb65 --- /dev/null +++ b/rust/kona/crates/proof/proof/src/l1/chain_provider.rs @@ -0,0 +1,141 @@ +//! Contains the concrete implementation of the [`ChainProvider`] trait for the proof. + +use crate::{HintType, errors::OracleProviderError}; +use alloc::{boxed::Box, sync::Arc, vec::Vec}; +use alloy_consensus::{Header, Receipt, ReceiptEnvelope, TxEnvelope}; +use alloy_eips::eip2718::Decodable2718; +use alloy_primitives::B256; +use alloy_rlp::Decodable; +use async_trait::async_trait; +use kona_derive::ChainProvider; +use kona_mpt::{OrderedListWalker, TrieNode, TrieProvider}; +use kona_preimage::{CommsClient, PreimageKey, PreimageKeyType}; +use kona_protocol::BlockInfo; + +/// The oracle-backed L1 chain provider for the client program. +#[derive(Debug, Clone)] +pub struct OracleL1ChainProvider { + /// The L1 head hash. + pub l1_head: B256, + /// The preimage oracle client. + pub oracle: Arc, +} + +impl OracleL1ChainProvider { + /// Creates a new [`OracleL1ChainProvider`] with the given boot information and oracle client. + pub const fn new(l1_head: B256, oracle: Arc) -> Self { + Self { l1_head, oracle } + } +} + +#[async_trait] +impl ChainProvider for OracleL1ChainProvider { + type Error = OracleProviderError; + + async fn header_by_hash(&mut self, hash: B256) -> Result { + // Fetch the header RLP from the oracle. + HintType::L1BlockHeader.with_data(&[hash.as_ref()]).send(self.oracle.as_ref()).await?; + let header_rlp = self.oracle.get(PreimageKey::new_keccak256(*hash)).await?; + + // Decode the header RLP into a Header. + Header::decode(&mut header_rlp.as_slice()).map_err(OracleProviderError::Rlp) + } + + async fn block_info_by_number(&mut self, block_number: u64) -> Result { + // Fetch the starting block header. + let mut header = self.header_by_hash(self.l1_head).await?; + + // Check if the block number is in range. If not, we can fail early. + if block_number > header.number { + return Err(OracleProviderError::BlockNumberPastHead(block_number, header.number)); + } + + // Walk back the block headers to the desired block number. + while header.number > block_number { + header = self.header_by_hash(header.parent_hash).await?; + } + + Ok(BlockInfo { + hash: header.hash_slow(), + number: header.number, + parent_hash: header.parent_hash, + timestamp: header.timestamp, + }) + } + + async fn receipts_by_hash(&mut self, hash: B256) -> Result, Self::Error> { + // Fetch the block header to find the receipts root. + let header = self.header_by_hash(hash).await?; + + // Send a hint for the block's receipts, and walk through the receipts trie in the header to + // verify them. + HintType::L1Receipts.with_data(&[hash.as_ref()]).send(self.oracle.as_ref()).await?; + let trie_walker = OrderedListWalker::try_new_hydrated(header.receipts_root, self) + .map_err(OracleProviderError::TrieWalker)?; + + // Decode the receipts within the receipts trie. + let receipts = trie_walker + .into_iter() + .map(|(_, rlp)| { + let envelope = ReceiptEnvelope::decode_2718(&mut rlp.as_ref())?; + Ok(envelope.as_receipt().expect("Infallible").clone()) + }) + .collect::, _>>() + .map_err(OracleProviderError::Rlp)?; + + Ok(receipts) + } + + async fn block_info_and_transactions_by_hash( + &mut self, + hash: B256, + ) -> Result<(BlockInfo, Vec), Self::Error> { + // Fetch the block header to construct the block info. + let header = self.header_by_hash(hash).await?; + let block_info = BlockInfo { + hash, + number: header.number, + parent_hash: header.parent_hash, + timestamp: header.timestamp, + }; + + // Send a hint for the block's transactions, and walk through the transactions trie in the + // header to verify them. + HintType::L1Transactions.with_data(&[hash.as_ref()]).send(self.oracle.as_ref()).await?; + let trie_walker = OrderedListWalker::try_new_hydrated(header.transactions_root, self) + .map_err(OracleProviderError::TrieWalker)?; + + // Decode the transactions within the transactions trie. + let transactions = trie_walker + .into_iter() + .map(|(_, rlp)| { + // note: not short-handed for error type coercion w/ `?`. + let rlp = TxEnvelope::decode_2718(&mut rlp.as_ref())?; + Ok(rlp) + }) + .collect::, _>>() + .map_err(OracleProviderError::Rlp)?; + + Ok((block_info, transactions)) + } +} + +impl TrieProvider for OracleL1ChainProvider { + type Error = OracleProviderError; + + fn trie_node_by_hash(&self, key: B256) -> Result { + // On L1, trie node preimages are stored as keccak preimage types in the oracle. We assume + // that a hint for these preimages has already been sent, prior to this call. + crate::block_on(async move { + TrieNode::decode( + &mut self + .oracle + .get(PreimageKey::new(*key, PreimageKeyType::Keccak256)) + .await + .map_err(OracleProviderError::Preimage)? + .as_ref(), + ) + .map_err(OracleProviderError::Rlp) + }) + } +} diff --git a/kona/crates/proof/proof/src/l1/mod.rs b/rust/kona/crates/proof/proof/src/l1/mod.rs similarity index 100% rename from kona/crates/proof/proof/src/l1/mod.rs rename to rust/kona/crates/proof/proof/src/l1/mod.rs diff --git a/rust/kona/crates/proof/proof/src/l1/pipeline.rs b/rust/kona/crates/proof/proof/src/l1/pipeline.rs new file mode 100644 index 00000000000..5d954d5e8c0 --- /dev/null +++ b/rust/kona/crates/proof/proof/src/l1/pipeline.rs @@ -0,0 +1,183 @@ +//! Contains an oracle-backed pipeline. + +use crate::FlushableCache; +use alloc::{boxed::Box, sync::Arc}; +use async_trait::async_trait; +use core::fmt::Debug; +use kona_derive::{ + ChainProvider, DataAvailabilityProvider, DerivationPipeline, L2ChainProvider, OriginProvider, + Pipeline, PipelineBuilder, PipelineErrorKind, PipelineResult, PolledAttributesQueueStage, + ResetSignal, Signal, SignalReceiver, StatefulAttributesBuilder, StepResult, +}; +use kona_driver::{DriverPipeline, PipelineCursor}; +use kona_genesis::{L1ChainConfig, RollupConfig, SystemConfig}; +use kona_preimage::CommsClient; +use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; +use spin::RwLock; + +/// An oracle-backed derivation pipeline. +pub type ProviderDerivationPipeline = DerivationPipeline< + PolledAttributesQueueStage>, + L2, +>; + +/// An oracle-backed payload attributes builder for the `AttributesQueue` stage of the derivation +/// pipeline. +pub type ProviderAttributesBuilder = StatefulAttributesBuilder; + +/// An oracle-backed derivation pipeline. +#[derive(Debug)] +pub struct OraclePipeline +where + O: CommsClient + FlushableCache + Send + Sync + Debug, + L1: ChainProvider + Send + Sync + Debug + Clone, + L2: L2ChainProvider + Send + Sync + Debug + Clone, + DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, +{ + /// The internal derivation pipeline. + pub pipeline: ProviderDerivationPipeline, + /// The caching oracle. + pub caching_oracle: Arc, +} + +impl OraclePipeline +where + O: CommsClient + FlushableCache + Send + Sync + Debug, + L1: ChainProvider + Send + Sync + Debug + Clone, + L2: L2ChainProvider + Send + Sync + Debug + Clone, + DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, +{ + /// Constructs a new oracle-backed derivation pipeline. + pub async fn new( + cfg: Arc, + l1_cfg: Arc, + sync_start: Arc>, + caching_oracle: Arc, + da_provider: DA, + chain_provider: L1, + mut l2_chain_provider: L2, + ) -> PipelineResult { + let attributes = StatefulAttributesBuilder::new( + cfg.clone(), + l1_cfg, + l2_chain_provider.clone(), + chain_provider.clone(), + ); + + let cfg_for_reset = cfg.clone(); + + let mut pipeline = PipelineBuilder::new() + .rollup_config(cfg) + .dap_source(da_provider) + .l2_chain_provider(l2_chain_provider.clone()) + .chain_provider(chain_provider) + .builder(attributes) + .origin(sync_start.read().origin()) + .build_polled(); + + // Reset the pipeline to populate the initial system configuration in L1 Traversal. + let l2_safe_head = *sync_start.read().l2_safe_head(); + pipeline + .signal( + ResetSignal { + l2_safe_head, + l1_origin: sync_start.read().origin(), + system_config: l2_chain_provider + .system_config_by_number(l2_safe_head.block_info.number, cfg_for_reset) + .await + .ok(), + } + .signal(), + ) + .await?; + + Ok(Self { pipeline, caching_oracle }) + } +} + +impl DriverPipeline> + for OraclePipeline +where + O: CommsClient + FlushableCache + Send + Sync + Debug, + L1: ChainProvider + Send + Sync + Debug + Clone, + L2: L2ChainProvider + Send + Sync + Debug + Clone, + DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, +{ + /// Flushes the cache on re-org. + fn flush(&mut self) { + self.caching_oracle.flush(); + } +} + +#[async_trait] +impl SignalReceiver for OraclePipeline +where + O: CommsClient + FlushableCache + Send + Sync + Debug, + L1: ChainProvider + Send + Sync + Debug + Clone, + L2: L2ChainProvider + Send + Sync + Debug + Clone, + DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, +{ + /// Receives a signal from the driver. + async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { + self.pipeline.signal(signal).await + } +} + +impl OriginProvider for OraclePipeline +where + O: CommsClient + FlushableCache + Send + Sync + Debug, + L1: ChainProvider + Send + Sync + Debug + Clone, + L2: L2ChainProvider + Send + Sync + Debug + Clone, + DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, +{ + /// Returns the optional L1 [`BlockInfo`] origin. + fn origin(&self) -> Option { + self.pipeline.origin() + } +} + +impl Iterator for OraclePipeline +where + O: CommsClient + FlushableCache + Send + Sync + Debug, + L1: ChainProvider + Send + Sync + Debug + Clone, + L2: L2ChainProvider + Send + Sync + Debug + Clone, + DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, +{ + type Item = OpAttributesWithParent; + + fn next(&mut self) -> Option { + self.pipeline.next() + } +} + +#[async_trait] +impl Pipeline for OraclePipeline +where + O: CommsClient + FlushableCache + Send + Sync + Debug, + L1: ChainProvider + Send + Sync + Debug + Clone, + L2: L2ChainProvider + Send + Sync + Debug + Clone, + DA: DataAvailabilityProvider + Send + Sync + Debug + Clone, +{ + /// Peeks at the next [`OpAttributesWithParent`] from the pipeline. + fn peek(&self) -> Option<&OpAttributesWithParent> { + self.pipeline.peek() + } + + /// Attempts to progress the pipeline. + async fn step(&mut self, cursor: L2BlockInfo) -> StepResult { + self.pipeline.step(cursor).await + } + + /// Returns the rollup config. + fn rollup_config(&self) -> &RollupConfig { + self.pipeline.rollup_config() + } + + /// Returns the [`SystemConfig`] by L2 number. + async fn system_config_by_number( + &mut self, + number: u64, + ) -> Result { + self.pipeline.system_config_by_number(number).await + } +} diff --git a/rust/kona/crates/proof/proof/src/l2/chain_provider.rs b/rust/kona/crates/proof/proof/src/l2/chain_provider.rs new file mode 100644 index 00000000000..678f2fea665 --- /dev/null +++ b/rust/kona/crates/proof/proof/src/l2/chain_provider.rs @@ -0,0 +1,281 @@ +//! Contains the concrete implementation of the [`L2ChainProvider`] trait for the client program. + +use crate::{HintType, eip2935::eip_2935_history_lookup, errors::OracleProviderError}; +use alloc::{boxed::Box, sync::Arc, vec::Vec}; +use alloy_consensus::{BlockBody, Header}; +use alloy_eips::eip2718::Decodable2718; +use alloy_primitives::{Address, B256, Bytes}; +use alloy_rlp::Decodable; +use async_trait::async_trait; +use kona_derive::L2ChainProvider; +use kona_driver::PipelineCursor; +use kona_executor::TrieDBProvider; +use kona_genesis::{RollupConfig, SystemConfig}; +use kona_mpt::{OrderedListWalker, TrieHinter, TrieNode, TrieProvider}; +use kona_preimage::{CommsClient, PreimageKey, PreimageKeyType}; +use kona_protocol::{BatchValidationProvider, L2BlockInfo, to_system_config}; +use op_alloy_consensus::{OpBlock, OpTxEnvelope}; +use spin::RwLock; + +/// The oracle-backed L2 chain provider for the client program. +#[derive(Debug, Clone)] +pub struct OracleL2ChainProvider { + /// The L2 safe head block hash. + l2_head: B256, + /// The rollup configuration. + rollup_config: Arc, + /// The preimage oracle client. + oracle: Arc, + /// The derivation pipeline cursor + cursor: Option>>, + /// The L2 chain ID to use for the provider's hints. + chain_id: Option, +} + +impl OracleL2ChainProvider { + /// Creates a new [`OracleL2ChainProvider`] with the given boot information and oracle client. + pub const fn new(l2_head: B256, rollup_config: Arc, oracle: Arc) -> Self { + Self { l2_head, rollup_config, oracle, cursor: None, chain_id: None } + } + + /// Sets the L2 chain ID to use for the provider's hints. + pub const fn set_chain_id(&mut self, chain_id: Option) { + self.chain_id = chain_id; + } + + /// Updates the derivation pipeline cursor + pub fn set_cursor(&mut self, cursor: Arc>) { + self.cursor = Some(cursor); + } + + /// Fetches the latest known safe head block hash according to the derivation pipeline cursor + /// or uses the initial `l2_head` value if no cursor is set. + pub async fn l2_safe_head(&self) -> Result { + self.cursor + .as_ref() + .map_or(Ok(self.l2_head), |cursor| Ok(cursor.read().l2_safe_head().block_info.hash)) + } +} + +impl OracleL2ChainProvider { + /// Returns a [Header] corresponding to the given L2 block number, by walking back from the + /// L2 safe head. + async fn header_by_number(&self, block_number: u64) -> Result { + // Fetch the starting block header. + let mut header = self.header_by_hash(self.l2_safe_head().await?)?; + + // Check if the block number is in range. If not, we can fail early. + if block_number > header.number { + return Err(OracleProviderError::BlockNumberPastHead(block_number, header.number)); + } + + let mut linear_fallback = false; + while header.number > block_number { + if self.rollup_config.is_isthmus_active(header.timestamp) && !linear_fallback { + // If Isthmus is active, the EIP-2935 contract is used to perform leaping lookbacks + // through consulting the ring buffer within the contract. If this + // lookup fails for any reason, we fall back to linear walk back. + let block_hash = + match eip_2935_history_lookup(&header, block_number, self, self).await { + Ok(hash) => hash, + Err(_) => { + // If the EIP-2935 lookup fails for any reason, attempt fallback to + // linear walk back. + linear_fallback = true; + continue; + } + }; + + header = self.header_by_hash(block_hash)?; + } else { + // Walk back the block headers one-by-one until the desired block number is reached. + header = self.header_by_hash(header.parent_hash)?; + } + } + + Ok(header) + } +} + +#[async_trait] +impl BatchValidationProvider for OracleL2ChainProvider { + type Error = OracleProviderError; + + async fn l2_block_info_by_number(&mut self, number: u64) -> Result { + // Get the block at the given number. + let block = self.block_by_number(number).await?; + + // Construct the system config from the payload. + L2BlockInfo::from_block_and_genesis(&block, &self.rollup_config.genesis) + .map_err(OracleProviderError::BlockInfo) + } + + async fn block_by_number(&mut self, number: u64) -> Result { + // Fetch the header for the given block number. + let header @ Header { transactions_root, timestamp, .. } = + self.header_by_number(number).await?; + let header_hash = header.hash_slow(); + + // Fetch the transactions in the block. + HintType::L2Transactions + .with_data(&[header_hash.as_ref()]) + .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) + .send(self.oracle.as_ref()) + .await?; + let trie_walker = OrderedListWalker::try_new_hydrated(transactions_root, self) + .map_err(OracleProviderError::TrieWalker)?; + + // Decode the transactions within the transactions trie. + let transactions = trie_walker + .into_iter() + .map(|(_, rlp)| { + let res = OpTxEnvelope::decode_2718(&mut rlp.as_ref())?; + Ok(res) + }) + .collect::, _>>() + .map_err(OracleProviderError::Rlp)?; + + let optimism_block = OpBlock { + header, + body: BlockBody { + transactions, + ommers: Vec::new(), + withdrawals: self + .rollup_config + .is_canyon_active(timestamp) + .then(|| alloy_eips::eip4895::Withdrawals::new(Vec::new())), + }, + }; + Ok(optimism_block) + } +} + +#[async_trait] +impl L2ChainProvider for OracleL2ChainProvider { + type Error = OracleProviderError; + + async fn system_config_by_number( + &mut self, + number: u64, + rollup_config: Arc, + ) -> Result::Error> { + // Get the block at the given number. + let block = self.block_by_number(number).await?; + + // Construct the system config from the payload. + to_system_config(&block, rollup_config.as_ref()) + .map_err(OracleProviderError::OpBlockConversion) + } +} + +impl TrieProvider for OracleL2ChainProvider { + type Error = OracleProviderError; + + fn trie_node_by_hash(&self, key: B256) -> Result { + // On L2, trie node preimages are stored as keccak preimage types in the oracle. We assume + // that a hint for these preimages has already been sent, prior to this call. + crate::block_on(async move { + TrieNode::decode( + &mut self + .oracle + .get(PreimageKey::new(*key, PreimageKeyType::Keccak256)) + .await + .map_err(OracleProviderError::Preimage)? + .as_ref(), + ) + .map_err(OracleProviderError::Rlp) + }) + } +} + +impl TrieDBProvider for OracleL2ChainProvider { + fn bytecode_by_hash(&self, hash: B256) -> Result { + // Fetch the bytecode preimage from the caching oracle. + crate::block_on(async move { + HintType::L2Code + .with_data(&[hash.as_slice()]) + .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) + .send(self.oracle.as_ref()) + .await?; + self.oracle + .get(PreimageKey::new_keccak256(*hash)) + .await + .map(Into::into) + .map_err(OracleProviderError::Preimage) + }) + } + + fn header_by_hash(&self, hash: B256) -> Result { + // Fetch the header from the caching oracle. + crate::block_on(async move { + HintType::L2BlockHeader + .with_data(&[hash.as_slice()]) + .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) + .send(self.oracle.as_ref()) + .await?; + let header_bytes = self.oracle.get(PreimageKey::new_keccak256(*hash)).await?; + + Header::decode(&mut header_bytes.as_slice()).map_err(OracleProviderError::Rlp) + }) + } +} + +impl TrieHinter for OracleL2ChainProvider { + type Error = OracleProviderError; + + fn hint_trie_node(&self, hash: B256) -> Result<(), Self::Error> { + crate::block_on(async move { + HintType::L2StateNode + .with_data(&[hash.as_slice()]) + .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) + .send(self.oracle.as_ref()) + .await + }) + } + + fn hint_account_proof(&self, address: Address, block_number: u64) -> Result<(), Self::Error> { + crate::block_on(async move { + HintType::L2AccountProof + .with_data(&[block_number.to_be_bytes().as_ref(), address.as_slice()]) + .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) + .send(self.oracle.as_ref()) + .await + }) + } + + fn hint_storage_proof( + &self, + address: alloy_primitives::Address, + slot: alloy_primitives::U256, + block_number: u64, + ) -> Result<(), Self::Error> { + crate::block_on(async move { + HintType::L2AccountStorageProof + .with_data(&[ + block_number.to_be_bytes().as_ref(), + address.as_slice(), + slot.to_be_bytes::<32>().as_ref(), + ]) + .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) + .send(self.oracle.as_ref()) + .await + }) + } + + fn hint_execution_witness( + &self, + parent_hash: B256, + op_payload_attributes: &op_alloy_rpc_types_engine::OpPayloadAttributes, + ) -> Result<(), Self::Error> { + crate::block_on(async move { + let encoded_attributes = + serde_json::to_vec(op_payload_attributes).map_err(OracleProviderError::Serde)?; + + HintType::L2PayloadWitness + .with_data(&[parent_hash.as_slice(), &encoded_attributes]) + .with_data(self.chain_id.map_or_else(Vec::new, |id| id.to_be_bytes().to_vec())) + .send(self.oracle.as_ref()) + .await + }) + } +} diff --git a/kona/crates/proof/proof/src/l2/mod.rs b/rust/kona/crates/proof/proof/src/l2/mod.rs similarity index 100% rename from kona/crates/proof/proof/src/l2/mod.rs rename to rust/kona/crates/proof/proof/src/l2/mod.rs diff --git a/rust/kona/crates/proof/proof/src/lib.rs b/rust/kona/crates/proof/proof/src/lib.rs new file mode 100644 index 00000000000..4fc669150e5 --- /dev/null +++ b/rust/kona/crates/proof/proof/src/lib.rs @@ -0,0 +1,38 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![no_std] + +extern crate alloc; + +#[macro_use] +extern crate tracing; + +pub mod l1; + +pub mod l2; + +pub mod sync; + +pub mod errors; + +pub mod executor; + +mod hint; +pub use hint::{Hint, HintType}; + +pub mod boot; +pub use boot::BootInfo; + +mod caching_oracle; +pub use caching_oracle::{CachingOracle, FlushableCache}; + +mod blocking_runtime; +pub use blocking_runtime::block_on; + +mod eip2935; +pub use eip2935::eip_2935_history_lookup; diff --git a/kona/crates/proof/proof/src/sync.rs b/rust/kona/crates/proof/proof/src/sync.rs similarity index 100% rename from kona/crates/proof/proof/src/sync.rs rename to rust/kona/crates/proof/proof/src/sync.rs diff --git a/kona/crates/proof/std-fpvm-proc/CHANGELOG.md b/rust/kona/crates/proof/std-fpvm-proc/CHANGELOG.md similarity index 100% rename from kona/crates/proof/std-fpvm-proc/CHANGELOG.md rename to rust/kona/crates/proof/std-fpvm-proc/CHANGELOG.md diff --git a/rust/kona/crates/proof/std-fpvm-proc/Cargo.toml b/rust/kona/crates/proof/std-fpvm-proc/Cargo.toml new file mode 100644 index 00000000000..994e81b926a --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm-proc/Cargo.toml @@ -0,0 +1,27 @@ +[package] +name = "kona-std-fpvm-proc" +description = "Proc macro entry point for `kona-std-fpvm` targeted programs." +version = "0.2.0" +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true + +[lib] +proc-macro = true + +[dependencies] +# General +cfg-if.workspace = true + +# Workspace +kona-std-fpvm.workspace = true + +# Proc Macros +quote.workspace = true +proc-macro2.workspace = true +syn = { workspace = true, features = ["full"] } + +[package.metadata.cargo-udeps.ignore] +normal = ["kona-std-fpvm"] diff --git a/kona/crates/proof/std-fpvm-proc/src/lib.rs b/rust/kona/crates/proof/std-fpvm-proc/src/lib.rs similarity index 100% rename from kona/crates/proof/std-fpvm-proc/src/lib.rs rename to rust/kona/crates/proof/std-fpvm-proc/src/lib.rs diff --git a/kona/crates/proof/std-fpvm/CHANGELOG.md b/rust/kona/crates/proof/std-fpvm/CHANGELOG.md similarity index 100% rename from kona/crates/proof/std-fpvm/CHANGELOG.md rename to rust/kona/crates/proof/std-fpvm/CHANGELOG.md diff --git a/kona/crates/proof/std-fpvm/Cargo.toml b/rust/kona/crates/proof/std-fpvm/Cargo.toml similarity index 100% rename from kona/crates/proof/std-fpvm/Cargo.toml rename to rust/kona/crates/proof/std-fpvm/Cargo.toml diff --git a/rust/kona/crates/proof/std-fpvm/README.md b/rust/kona/crates/proof/std-fpvm/README.md new file mode 100644 index 00000000000..afcb69870f7 --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/README.md @@ -0,0 +1,5 @@ +# `kona-std-fpvm` + +Platform specific [Fault Proof VM][g-fault-proof-vm] kernel APIs. + +[g-fault-proof-vm]: https://specs.optimism.io/experimental/fault-proof/index.html#fault-proof-vm diff --git a/rust/kona/crates/proof/std-fpvm/src/channel.rs b/rust/kona/crates/proof/std-fpvm/src/channel.rs new file mode 100644 index 00000000000..80fe03fd05b --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/src/channel.rs @@ -0,0 +1,158 @@ +//! This module contains a rudimentary channel between two file descriptors, using [`crate::io`] +//! for reading and writing from the file descriptors. + +use crate::{FileDescriptor, io}; +use alloc::boxed::Box; +use async_trait::async_trait; +use core::{ + cell::RefCell, + cmp::Ordering, + future::Future, + pin::Pin, + task::{Context, Poll}, +}; +use kona_preimage::{ + Channel, + errors::{ChannelError, ChannelResult}, +}; + +/// [`FileChannel`] is a handle for one end of a bidirectional channel. +#[derive(Debug, Clone, Copy)] +pub struct FileChannel { + /// File descriptor to read from + read_handle: FileDescriptor, + /// File descriptor to write to + write_handle: FileDescriptor, +} + +impl FileChannel { + /// Create a new [`FileChannel`] from two file descriptors. + pub const fn new(read_handle: FileDescriptor, write_handle: FileDescriptor) -> Self { + Self { read_handle, write_handle } + } + + /// Returns a copy of the [`FileDescriptor`] used for the read end of the channel. + pub const fn read_handle(&self) -> FileDescriptor { + self.read_handle + } + + /// Returns a copy of the [`FileDescriptor`] used for the write end of the channel. + pub const fn write_handle(&self) -> FileDescriptor { + self.write_handle + } +} + +#[async_trait] +impl Channel for FileChannel { + async fn read(&self, buf: &mut [u8]) -> ChannelResult { + io::read(self.read_handle, buf).map_err(|_| ChannelError::Closed) + } + + async fn read_exact(&self, buf: &mut [u8]) -> ChannelResult { + ReadFuture::new(*self, buf).await.map_err(|_| ChannelError::Closed) + } + + async fn write(&self, buf: &[u8]) -> ChannelResult { + WriteFuture::new(*self, buf).await.map_err(|_| ChannelError::Closed) + } +} + +/// A future that reads from a channel, returning [`Poll::Ready`] when the buffer is full. +struct ReadFuture<'a> { + /// The channel to read from + channel: FileChannel, + /// The buffer to read into + buf: RefCell<&'a mut [u8]>, + /// The number of bytes read so far + read: usize, +} + +impl<'a> ReadFuture<'a> { + /// Create a new [`ReadFuture`] from a channel and a buffer. + #[allow(clippy::missing_const_for_fn)] + fn new(channel: FileChannel, buf: &'a mut [u8]) -> Self { + Self { channel, buf: RefCell::new(buf), read: 0 } + } +} + +impl Future for ReadFuture<'_> { + type Output = ChannelResult; + + fn poll(mut self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll { + let mut buf = self.buf.borrow_mut(); + let buf_len = buf.len(); + let chunk_read = io::read(self.channel.read_handle, &mut buf[self.read..]) + .map_err(|_| ChannelError::Closed)?; + + // Drop the borrow on self. + drop(buf); + + self.read += chunk_read; + + match self.read.cmp(&buf_len) { + Ordering::Greater | Ordering::Equal => Poll::Ready(Ok(self.read)), + Ordering::Less => { + // Register the current task to be woken up when it can make progress + ctx.waker().wake_by_ref(); + Poll::Pending + } + } + } +} + +/// A future that writes to a channel, returning [`Poll::Ready`] when the full buffer has been +/// written. +struct WriteFuture<'a> { + /// The channel to write to + channel: FileChannel, + /// The buffer to write + buf: &'a [u8], + /// The number of bytes written so far + written: usize, +} + +impl<'a> WriteFuture<'a> { + /// Create a new [`WriteFuture`] from a channel and a buffer. + const fn new(channel: FileChannel, buf: &'a [u8]) -> Self { + Self { channel, buf, written: 0 } + } +} + +impl Future for WriteFuture<'_> { + type Output = ChannelResult; + + fn poll(mut self: Pin<&mut Self>, ctx: &mut Context<'_>) -> Poll { + match io::write(self.channel.write_handle(), &self.buf[self.written..]) { + Ok(n) => { + self.written += n; + + match self.written.cmp(&self.buf.len()) { + Ordering::Equal | Ordering::Greater => { + // Finished writing + Poll::Ready(Ok(self.written)) + } + Ordering::Less => { + // Register the current task to be woken up when it can make progress + ctx.waker().wake_by_ref(); + Poll::Pending + } + } + } + Err(_) => Poll::Ready(Err(ChannelError::Closed)), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_get_read_handle() { + let read_handle = FileDescriptor::StdIn; + let write_handle = FileDescriptor::StdOut; + let chan = FileChannel::new(read_handle, write_handle); + let ref_read_handle = chan.read_handle(); + assert_eq!(read_handle, ref_read_handle); + } +} diff --git a/rust/kona/crates/proof/std-fpvm/src/errors.rs b/rust/kona/crates/proof/std-fpvm/src/errors.rs new file mode 100644 index 00000000000..4f9d7e59b45 --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/src/errors.rs @@ -0,0 +1,11 @@ +//! Errors for the `kona-std-fpvm` crate. + +use thiserror::Error; + +/// An error that can occur when reading from or writing to a file descriptor. +#[derive(Error, Debug, PartialEq, Eq)] +#[error("IO error (errno: {_0})")] +pub struct IOError(pub i32); + +/// A [Result] type for the [`IOError`]. +pub type IOResult = Result; diff --git a/rust/kona/crates/proof/std-fpvm/src/io.rs b/rust/kona/crates/proof/std-fpvm/src/io.rs new file mode 100644 index 00000000000..da45a8e6f9f --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/src/io.rs @@ -0,0 +1,93 @@ +//! This module contains the `ClientIO` struct, which is a system call interface for the kernel. + +use crate::{BasicKernelInterface, FileDescriptor, errors::IOResult}; +use cfg_if::cfg_if; + +cfg_if! { + if #[cfg(target_arch = "mips64")] { + #[doc = "Concrete implementation of the [`BasicKernelInterface`] trait for the `MIPS64r2` target architecture."] + pub(crate) type ClientIO = crate::mips64::io::Mips64IO; + } else if #[cfg(target_arch = "riscv64")] { + #[doc = "Concrete implementation of the [`BasicKernelInterface`] trait for the `riscv64` target architecture."] + pub(crate) type ClientIO = crate::riscv64::io::RiscV64IO; + } else { + use std::{fs::File, os::fd::FromRawFd, io::{Read, Write}}; + use crate::errors::IOError; + + #[doc = "Native implementation of the [`BasicKernelInterface`] trait."] + pub(crate) struct NativeClientIO; + + impl BasicKernelInterface for NativeClientIO { + fn write(fd: FileDescriptor, buf: &[u8]) -> IOResult { + unsafe { + let mut file = File::from_raw_fd(fd as i32); + file.write_all(buf).map_err(|_| IOError(-9))?; + std::mem::forget(file); + Ok(buf.len()) + } + } + + fn read(fd: FileDescriptor, buf: &mut [u8]) -> IOResult { + unsafe { + let mut file = File::from_raw_fd(fd as i32); + file.read_exact(buf).map_err(|_| IOError(-9))?; + std::mem::forget(file); + Ok(buf.len()) + } + } + + fn mmap(_size: usize) -> IOResult { + unimplemented!("mmap is unimplemented for the native target; The default global allocator is favored."); + } + + fn exit(code: usize) -> ! { + std::process::exit(code as i32) + } + } + + #[doc = "Native implementation of the [`BasicKernelInterface`] trait."] + pub(crate) type ClientIO = NativeClientIO; + } +} + +/// Print the passed string to the standard output [`FileDescriptor`]. +/// +/// # Panics +/// Panics if the write operation fails. +#[inline] +pub fn print(s: &str) { + ClientIO::write(FileDescriptor::StdOut, s.as_bytes()).expect("Error writing to stdout."); +} + +/// Print the passed string to the standard error [`FileDescriptor`]. +/// +/// # Panics +/// Panics if the write operation fails. +#[inline] +pub fn print_err(s: &str) { + ClientIO::write(FileDescriptor::StdErr, s.as_bytes()).expect("Error writing to stderr."); +} + +/// Write the passed buffer to the given [`FileDescriptor`]. +#[inline] +pub fn write(fd: FileDescriptor, buf: &[u8]) -> IOResult { + ClientIO::write(fd, buf) +} + +/// Write the passed buffer to the given [`FileDescriptor`]. +#[inline] +pub fn read(fd: FileDescriptor, buf: &mut [u8]) -> IOResult { + ClientIO::read(fd, buf) +} + +/// Map new memory of block size `size`. Returns the new heap pointer. +#[inline] +pub fn mmap(size: usize) -> IOResult { + ClientIO::mmap(size) +} + +/// Exit the process with the given exit code. +#[inline] +pub fn exit(code: usize) -> ! { + ClientIO::exit(code) +} diff --git a/rust/kona/crates/proof/std-fpvm/src/lib.rs b/rust/kona/crates/proof/std-fpvm/src/lib.rs new file mode 100644 index 00000000000..6f6c83d0305 --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/src/lib.rs @@ -0,0 +1,37 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(target_arch = "mips64", feature(asm_experimental_arch))] +#![cfg_attr(any(target_arch = "mips64", target_arch = "riscv64"), no_std)] + +extern crate alloc; + +pub mod errors; + +pub mod io; + +#[cfg(feature = "tracing")] +pub mod tracing; + +pub mod malloc; + +mod traits; +pub use traits::BasicKernelInterface; + +mod types; +pub use types::FileDescriptor; + +mod channel; +pub use channel::FileChannel; + +pub(crate) mod linux; + +#[cfg(target_arch = "mips64")] +pub(crate) mod mips64; + +#[cfg(target_arch = "riscv64")] +pub(crate) mod riscv64; diff --git a/kona/crates/proof/std-fpvm/src/linux.rs b/rust/kona/crates/proof/std-fpvm/src/linux.rs similarity index 91% rename from kona/crates/proof/std-fpvm/src/linux.rs rename to rust/kona/crates/proof/std-fpvm/src/linux.rs index 8bb87e9652d..b2173cdccb3 100644 --- a/kona/crates/proof/std-fpvm/src/linux.rs +++ b/rust/kona/crates/proof/std-fpvm/src/linux.rs @@ -2,7 +2,7 @@ use crate::errors::{IOError, IOResult}; -/// Converts a return value from a syscall into a [IOResult] type. +/// Converts a return value from a syscall into a [`IOResult`] type. #[inline(always)] #[allow(unused)] pub(crate) const fn from_ret(value: usize) -> IOResult { diff --git a/kona/crates/proof/std-fpvm/src/malloc.rs b/rust/kona/crates/proof/std-fpvm/src/malloc.rs similarity index 95% rename from kona/crates/proof/std-fpvm/src/malloc.rs rename to rust/kona/crates/proof/std-fpvm/src/malloc.rs index a97a85eeef3..2654e2958da 100644 --- a/kona/crates/proof/std-fpvm/src/malloc.rs +++ b/rust/kona/crates/proof/std-fpvm/src/malloc.rs @@ -16,7 +16,7 @@ pub mod global_allocator { #[global_allocator] static ALLOCATOR: LockedHeap = LockedHeap::empty(); - /// Initialize the [SpinLockedAllocator] with the following parameters: + /// Initialize the [`SpinLockedAllocator`] with the following parameters: /// * `heap_start_addr` is the starting address of the heap memory region, /// * `heap_size` is the size of the heap memory region in bytes. /// @@ -36,7 +36,7 @@ pub mod global_allocator { /// # Safety #[cfg_attr( any(target_arch = "mips64", target_arch = "riscv64"), - doc = "See [global_allocator::init_allocator] safety comment." + doc = "See [`global_allocator::init_allocator`] safety comment." )] #[cfg_attr( not(any(target_arch = "mips64", target_arch = "riscv64")), diff --git a/rust/kona/crates/proof/std-fpvm/src/mips64/io.rs b/rust/kona/crates/proof/std-fpvm/src/mips64/io.rs new file mode 100644 index 00000000000..be382d19f63 --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/src/mips64/io.rs @@ -0,0 +1,68 @@ +use crate::{BasicKernelInterface, FileDescriptor, errors::IOResult, mips64::syscall}; + +/// Concrete implementation of the [`BasicKernelInterface`] trait for the `MIPS64r2` target +/// architecture. Exposes a safe interface for performing IO operations within the kernel. +#[derive(Debug)] +pub(crate) struct Mips64IO; + +/// Relevant system call numbers for the `MIPS64r2` target architecture. +/// +/// See [Cannon System Call Specification](https://specs.optimism.io/experimental/fault-proof/cannon-fault-proof-vm.html#syscalls) +/// +/// **Note**: This is not an exhaustive list of system calls available to the `client` program, +/// only the ones necessary for the [`BasicKernelInterface`] trait implementation. If an extension +/// trait for the [`BasicKernelInterface`] trait is created for the `Cannon` kernel, this list +/// should be extended accordingly. +#[repr(usize)] +pub(crate) enum SyscallNumber { + /// Sets the Exited and `ExitCode` states to true and $a0 respectively. + Exit = 5205, + /// Similar behavior as Linux/MIPS with support for unaligned reads. + Read = 5000, + /// Similar behavior as Linux/MIPS with support for unaligned writes. + Write = 5001, + /// Similar behavior as Linux/MIPS for mapping memory on the host machine. Only accepts 2 + /// arguments for cannon. + Mmap = 5009, +} + +impl BasicKernelInterface for Mips64IO { + fn write(fd: FileDescriptor, buf: &[u8]) -> IOResult { + unsafe { + crate::linux::from_ret(syscall::syscall3( + SyscallNumber::Write as usize, + fd.into(), + buf.as_ptr() as usize, + buf.len(), + )) + } + } + + fn read(fd: FileDescriptor, buf: &mut [u8]) -> IOResult { + unsafe { + crate::linux::from_ret(syscall::syscall3( + SyscallNumber::Read as usize, + fd.into(), + buf.as_ptr() as usize, + buf.len(), + )) + } + } + + fn mmap(size: usize) -> IOResult { + unsafe { + crate::linux::from_ret(syscall::syscall2( + SyscallNumber::Mmap as usize, + 0usize, // anonymous map + size, + )) + } + } + + fn exit(code: usize) -> ! { + unsafe { + let _ = syscall::syscall1(SyscallNumber::Exit as usize, code); + panic!("exit syscall returned unexpectedly with code: {}", code) + } + } +} diff --git a/rust/kona/crates/proof/std-fpvm/src/mips64/mod.rs b/rust/kona/crates/proof/std-fpvm/src/mips64/mod.rs new file mode 100644 index 00000000000..3505bc3b693 --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/src/mips64/mod.rs @@ -0,0 +1,5 @@ +//! This module contains raw syscall bindings for the `MIPS64r2` target architecture, as well as a +//! high-level implementation of the [`crate::BasicKernelInterface`] trait for the `Cannon` kernel. + +pub(crate) mod io; +mod syscall; diff --git a/kona/crates/proof/std-fpvm/src/mips64/syscall.rs b/rust/kona/crates/proof/std-fpvm/src/mips64/syscall.rs similarity index 100% rename from kona/crates/proof/std-fpvm/src/mips64/syscall.rs rename to rust/kona/crates/proof/std-fpvm/src/mips64/syscall.rs diff --git a/rust/kona/crates/proof/std-fpvm/src/riscv64/io.rs b/rust/kona/crates/proof/std-fpvm/src/riscv64/io.rs new file mode 100644 index 00000000000..cf0b4a4d9e8 --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/src/riscv64/io.rs @@ -0,0 +1,71 @@ +use crate::{BasicKernelInterface, FileDescriptor, errors::IOResult, riscv64::syscall}; + +/// Concrete implementation of the [`KernelIO`] trait for the `riscv64` target architecture. +#[derive(Debug)] +pub(crate) struct RiscV64IO; + +/// Relevant system call numbers for the `riscv64` target architecture. +/// +/// See +/// +/// **Note**: This is not an exhaustive list of system calls available to the `client` program, +/// only the ones necessary for the [`BasicKernelInterface`] trait implementation. If an extension +/// trait for the [`BasicKernelInterface`] trait is created for the linux kernel, this list +/// should be extended accordingly. +#[repr(usize)] +pub(crate) enum SyscallNumber { + /// Sets the Exited and `ExitCode` states to true and $a0 respectively. + Exit = 93, + /// Similar behavior as Linux with support for unaligned reads. + Read = 63, + /// Similar behavior as Linux with support for unaligned writes. + Write = 64, + /// Similar behavior as Linux for mapping memory on the host machine. + Mmap = 222, +} + +impl BasicKernelInterface for RiscV64IO { + fn write(fd: FileDescriptor, buf: &[u8]) -> IOResult { + unsafe { + crate::linux::from_ret(syscall::syscall3( + SyscallNumber::Write as usize, + fd.into(), + buf.as_ptr() as usize, + buf.len(), + )) + } + } + + fn read(fd: FileDescriptor, buf: &mut [u8]) -> IOResult { + unsafe { + crate::linux::from_ret(syscall::syscall3( + SyscallNumber::Read as usize, + fd.into(), + buf.as_ptr() as usize, + buf.len(), + )) + } + } + + fn mmap(size: usize) -> IOResult { + // https://github.com/ethereum-optimism/asterisc/blob/master/rvgo/fast/vm.go#L360-L398 + unsafe { + crate::linux::from_ret(syscall::syscall6( + SyscallNumber::Mmap as usize, + 0usize, // address hint - 0 for anonymous maps + size, // block size + 0usize, // prot, ignored. + 0x20, // flags - set MAP_ANONYMOUS + u64::MAX as usize, // fd = -1, anonymous memory maps only. + 0usize, // offset - ignored, anonymous memory maps only. + )) + } + } + + fn exit(code: usize) -> ! { + unsafe { + let _ = syscall::syscall1(SyscallNumber::Exit as usize, code); + panic!() + } + } +} diff --git a/rust/kona/crates/proof/std-fpvm/src/riscv64/mod.rs b/rust/kona/crates/proof/std-fpvm/src/riscv64/mod.rs new file mode 100644 index 00000000000..9b7cde55f6f --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/src/riscv64/mod.rs @@ -0,0 +1,5 @@ +//! This module contains raw syscall bindings for the `riscv64imac` target architecture, as well as +//! a high-level implementation of the [`crate::BasicKernelInterface`] trait for the kernel. + +pub(crate) mod io; +mod syscall; diff --git a/kona/crates/proof/std-fpvm/src/riscv64/syscall.rs b/rust/kona/crates/proof/std-fpvm/src/riscv64/syscall.rs similarity index 100% rename from kona/crates/proof/std-fpvm/src/riscv64/syscall.rs rename to rust/kona/crates/proof/std-fpvm/src/riscv64/syscall.rs diff --git a/rust/kona/crates/proof/std-fpvm/src/tracing.rs b/rust/kona/crates/proof/std-fpvm/src/tracing.rs new file mode 100644 index 00000000000..f8ca17072ce --- /dev/null +++ b/rust/kona/crates/proof/std-fpvm/src/tracing.rs @@ -0,0 +1,97 @@ +//! This module contains + +use crate::io; +use alloc::{ + format, + string::{String, ToString}, + vec::Vec, +}; +use tracing::{ + Event, Level, Metadata, Subscriber, + field::{Field, Visit}, + span::{Attributes, Id, Record}, +}; + +/// Custom [`Subscriber`] implementation that uses [`crate::io`] to write log entries to +/// [`crate::FileDescriptor::StdOut`]. +#[derive(Debug, Clone)] +pub struct FpvmTracingSubscriber { + min_level: Level, +} + +impl FpvmTracingSubscriber { + /// Create a new [`FpvmTracingSubscriber`] with the specified minimum log level. + pub const fn new(min_level: Level) -> Self { + Self { min_level } + } +} + +impl Subscriber for FpvmTracingSubscriber { + fn enabled(&self, _metadata: &Metadata<'_>) -> bool { + true + } + + fn new_span(&self, _span: &Attributes<'_>) -> Id { + Id::from_u64(1) + } + + fn record(&self, _span: &Id, _values: &Record<'_>) {} + + fn record_follows_from(&self, _span: &Id, _follows: &Id) {} + + fn event(&self, event: &Event<'_>) { + let metadata = event.metadata(); + // Comparisons for the [Level] type are inverted. See the [Level] documentation for more + // information. + if *metadata.level() > self.min_level { + return; + } + + let mut visitor = FieldVisitor::new(); + event.record(&mut visitor); + + let formatted_message = if visitor.fields.is_empty() { + visitor.message + } else if visitor.message.is_empty() { + visitor.fields.join(", ") + } else { + format!("{} {}", visitor.message, visitor.fields.join(", ")) + }; + + io::print(&format!("[{}] {}: {}", metadata.level(), metadata.target(), formatted_message)); + } + + fn enter(&self, _span: &Id) {} + + fn exit(&self, _span: &Id) {} +} + +/// Custom [`Visit`] implementation to extract log field values. +struct FieldVisitor { + message: String, + fields: Vec, +} + +impl FieldVisitor { + const fn new() -> Self { + Self { message: String::new(), fields: Vec::new() } + } +} + +impl Visit for FieldVisitor { + fn record_debug(&mut self, field: &Field, value: &dyn core::fmt::Debug) { + if field.name() == "message" { + self.message = format!("{value:?}"); + } else { + self.fields.push(format!("{}={:?}", field.name(), value)); + } + } + + fn record_str(&mut self, field: &Field, value: &str) { + if field.name() == "message" { + self.message = value.to_string(); + } else { + self.fields.push(format!("{}={}", field.name(), value)); + } + } +} diff --git a/kona/crates/proof/std-fpvm/src/traits/basic.rs b/rust/kona/crates/proof/std-fpvm/src/traits/basic.rs similarity index 82% rename from kona/crates/proof/std-fpvm/src/traits/basic.rs rename to rust/kona/crates/proof/std-fpvm/src/traits/basic.rs index a5763ec418b..8e4394ab0fb 100644 --- a/kona/crates/proof/std-fpvm/src/traits/basic.rs +++ b/rust/kona/crates/proof/std-fpvm/src/traits/basic.rs @@ -1,10 +1,10 @@ -//! Defines the [BasicKernelInterface] trait, which describes the functionality of several system +//! Defines the [`BasicKernelInterface`] trait, which describes the functionality of several system //! calls inside of the kernel. use crate::{FileDescriptor, errors::IOResult}; -/// The [BasicKernelInterface] trait describes the functionality of several core system calls inside -/// of the kernel. +/// The [`BasicKernelInterface`] trait describes the functionality of several core system calls +/// inside of the kernel. /// /// Commonly, embedded proving environments delegate IO operations to custom file descriptors. /// This trait is a safe wrapper around the raw system calls available to the `client` program diff --git a/kona/crates/proof/std-fpvm/src/traits/mod.rs b/rust/kona/crates/proof/std-fpvm/src/traits/mod.rs similarity index 100% rename from kona/crates/proof/std-fpvm/src/traits/mod.rs rename to rust/kona/crates/proof/std-fpvm/src/traits/mod.rs diff --git a/kona/crates/proof/std-fpvm/src/types.rs b/rust/kona/crates/proof/std-fpvm/src/types.rs similarity index 100% rename from kona/crates/proof/std-fpvm/src/types.rs rename to rust/kona/crates/proof/std-fpvm/src/types.rs diff --git a/kona/crates/protocol/derive/CHANGELOG.md b/rust/kona/crates/protocol/derive/CHANGELOG.md similarity index 100% rename from kona/crates/protocol/derive/CHANGELOG.md rename to rust/kona/crates/protocol/derive/CHANGELOG.md diff --git a/kona/crates/protocol/derive/Cargo.toml b/rust/kona/crates/protocol/derive/Cargo.toml similarity index 100% rename from kona/crates/protocol/derive/Cargo.toml rename to rust/kona/crates/protocol/derive/Cargo.toml diff --git a/rust/kona/crates/protocol/derive/README.md b/rust/kona/crates/protocol/derive/README.md new file mode 100644 index 00000000000..0ec3b9bd9b6 --- /dev/null +++ b/rust/kona/crates/protocol/derive/README.md @@ -0,0 +1,61 @@ +# `kona-derive` + +A `no_std` compatible implementation of the OP Stack's [derivation pipeline][derive]. + +[derive]: (https://specs.optimism.io/protocol/derivation.html#l2-chain-derivation-specification). + +## Usage + +The intended way of working with `kona-derive` is to use the [`DerivationPipeline`][dp] which implements the [`Pipeline`][p] trait. To create an instance of the [`DerivationPipeline`][dp], it's recommended to use the [`PipelineBuilder`][pb] as follows. + +```rust,ignore +use std::sync::Arc; +use kona_genesis::RollupConfig; +use kona_derive::EthereumDataSource; +use kona_derive::PipelineBuilder; +use kona_derive::StatefulAttributesBuilder; + +let chain_provider = todo!(); +let l2_chain_provider = todo!(); +let blob_provider = todo!(); +let l1_origin = todo!(); + +let cfg = Arc::new(RollupConfig::default()); +let attributes = StatefulAttributesBuilder::new( + cfg.clone(), + l2_chain_provider.clone(), + chain_provider.clone(), +); +let dap = EthereumDataSource::new( + chain_provider.clone(), + blob_provider, + cfg.as_ref() +); + +// Construct a new derivation pipeline. +let pipeline = PipelineBuilder::new() + .rollup_config(cfg) + .dap_source(dap) + .l2_chain_provider(l2_chain_provider) + .chain_provider(chain_provider) + .builder(attributes) + .origin(l1_origin) + .build(); +``` + +[p]: ./src/traits/pipeline.rs +[pb]: ./src/pipeline/builder.rs +[dp]: ./src/pipeline/core.rs + +## Features + +The most up-to-date feature list will be available on the [docs.rs `Feature Flags` tab][ff] of the `kona-derive` crate. + +Some features include the following. +- `serde`: Serialization and Deserialization support for `kona-derive` types. +- `test-utils`: Test utilities for downstream libraries. + +By default, `kona-derive` enables the `serde` feature. + +[ap]: https://docs.rs/crate/alloy-providers/latest +[ff]: https://docs.rs/crate/kona-derive/latest/features diff --git a/rust/kona/crates/protocol/derive/src/attributes/mod.rs b/rust/kona/crates/protocol/derive/src/attributes/mod.rs new file mode 100644 index 00000000000..7f50337e41b --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/attributes/mod.rs @@ -0,0 +1,5 @@ +//! Module containing the [`AttributesBuilder`](crate::traits::AttributesBuilder) trait +//! implementations. + +mod stateful; +pub use stateful::StatefulAttributesBuilder; diff --git a/kona/crates/protocol/derive/src/attributes/stateful.rs b/rust/kona/crates/protocol/derive/src/attributes/stateful.rs similarity index 96% rename from kona/crates/protocol/derive/src/attributes/stateful.rs rename to rust/kona/crates/protocol/derive/src/attributes/stateful.rs index f874c51a6a8..8a9f9b74437 100644 --- a/kona/crates/protocol/derive/src/attributes/stateful.rs +++ b/rust/kona/crates/protocol/derive/src/attributes/stateful.rs @@ -79,7 +79,20 @@ where // If the L1 origin changed in this block, then we are in the first block of the epoch. // In this case we need to fetch all transaction receipts from the L1 origin block so // we can scan for user deposits. - let sequence_number = if l2_parent.l1_origin.number != epoch.number { + let sequence_number = if l2_parent.l1_origin.number == epoch.number { + #[allow(clippy::collapsible_else_if)] + if l2_parent.l1_origin.hash != epoch.hash { + return Err(PipelineErrorKind::Reset( + BuilderError::BlockMismatch(epoch, l2_parent.l1_origin).into(), + )); + } + + let header = + self.receipts_fetcher.header_by_hash(epoch.hash).await.map_err(Into::into)?; + l1_header = header; + deposit_transactions = vec![]; + l2_parent.seq_num + 1 + } else { let header = self.receipts_fetcher.header_by_hash(epoch.hash).await.map_err(Into::into)?; if l2_parent.l1_origin.hash != header.parent_hash { @@ -108,19 +121,6 @@ where l1_header = header; deposit_transactions = deposits; 0 - } else { - #[allow(clippy::collapsible_else_if)] - if l2_parent.l1_origin.hash != epoch.hash { - return Err(PipelineErrorKind::Reset( - BuilderError::BlockMismatch(epoch, l2_parent.l1_origin).into(), - )); - } - - let header = - self.receipts_fetcher.header_by_hash(epoch.hash).await.map_err(Into::into)?; - l1_header = header; - deposit_transactions = vec![]; - l2_parent.seq_num + 1 }; // Sanity check the L1 origin was correctly selected to maintain the time invariant @@ -138,12 +138,14 @@ where )); } - let mut upgrade_transactions: Vec = vec![]; - if self.rollup_cfg.is_ecotone_active(next_l2_time) && - !self.rollup_cfg.is_ecotone_active(l2_parent.block_info.timestamp) - { - upgrade_transactions = Hardforks::ECOTONE.txs().collect(); - } + let mut upgrade_transactions: Vec = + if self.rollup_cfg.is_ecotone_active(next_l2_time) && + !self.rollup_cfg.is_ecotone_active(l2_parent.block_info.timestamp) + { + Hardforks::ECOTONE.txs().collect() + } else { + vec![] + }; if self.rollup_cfg.is_fjord_active(next_l2_time) && !self.rollup_cfg.is_fjord_active(l2_parent.block_info.timestamp) { @@ -186,16 +188,12 @@ where txs.extend(deposit_transactions); txs.extend(upgrade_transactions); - let mut withdrawals = None; - if self.rollup_cfg.is_canyon_active(next_l2_time) { - withdrawals = Some(Vec::default()); - } + let withdrawals = self.rollup_cfg.is_canyon_active(next_l2_time).then(Vec::default); - let mut parent_beacon_root = None; - if self.rollup_cfg.is_ecotone_active(next_l2_time) { - // if the parent beacon root is not available, default to zero hash - parent_beacon_root = Some(l1_header.parent_beacon_block_root.unwrap_or_default()); - } + let parent_beacon_root = self + .rollup_cfg + .is_ecotone_active(next_l2_time) + .then(|| l1_header.parent_beacon_block_root.unwrap_or_default()); Ok(OpPayloadAttributes { payload_attributes: PayloadAttributes { @@ -236,11 +234,11 @@ async fn derive_deposits( ) -> Result, PipelineEncodingError> { let mut global_index = 0; let mut res = Vec::new(); - for r in receipts.iter() { + for r in receipts { if Eip658Value::Eip658(false) == r.status { continue; } - for l in r.logs.iter() { + for l in &r.logs { let curr_index = global_index; global_index += 1; if l.data.topics().first().is_none_or(|i| *i != DEPOSIT_EVENT_ABI_HASH) { diff --git a/kona/crates/protocol/derive/src/errors/attributes.rs b/rust/kona/crates/protocol/derive/src/errors/attributes.rs similarity index 100% rename from kona/crates/protocol/derive/src/errors/attributes.rs rename to rust/kona/crates/protocol/derive/src/errors/attributes.rs diff --git a/kona/crates/protocol/derive/src/errors/mod.rs b/rust/kona/crates/protocol/derive/src/errors/mod.rs similarity index 100% rename from kona/crates/protocol/derive/src/errors/mod.rs rename to rust/kona/crates/protocol/derive/src/errors/mod.rs diff --git a/rust/kona/crates/protocol/derive/src/errors/pipeline.rs b/rust/kona/crates/protocol/derive/src/errors/pipeline.rs new file mode 100644 index 00000000000..15b332c504c --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/errors/pipeline.rs @@ -0,0 +1,440 @@ +//! This module contains derivation errors thrown within the pipeline. + +use crate::BuilderError; +use alloc::string::String; +use alloy_primitives::B256; +use kona_genesis::SystemConfigUpdateError; +use kona_protocol::{DepositError, SpanBatchError}; +use thiserror::Error; + +/// [`crate::ensure`] is a short-hand for bubbling up errors in the case of a condition not being +/// met. +#[macro_export] +macro_rules! ensure { + ($cond:expr, $err:expr) => { + if !($cond) { + return Err($err); + } + }; +} + +/// A top-level severity filter for [`PipelineError`] that categorizes errors by handling strategy. +/// +/// The [`PipelineErrorKind`] wrapper provides a severity classification system that enables +/// sophisticated error handling in the derivation pipeline. Different error types require +/// different response strategies: +/// +/// - **Temporary**: Retry-able errors that may resolve with more data +/// - **Critical**: Fatal errors that require external intervention +/// - **Reset**: Errors that require pipeline state reset but allow continued operation +/// +/// # Error Handling Strategy +/// ```text +/// Temporary -> Retry operation, may succeed with more data +/// Critical -> Stop derivation, external intervention required +/// Reset -> Reset pipeline state, continue with clean slate +/// ``` +/// +/// # Usage in Pipeline +/// Error kinds are used by pipeline stages to determine appropriate error handling: +/// - Temporary errors trigger retries in the main derivation loop +/// - Critical errors halt derivation and bubble up to the caller +/// - Reset errors trigger pipeline resets with appropriate recovery logic +#[derive(Error, Debug, PartialEq, Eq)] +pub enum PipelineErrorKind { + /// A temporary error that may resolve with additional data or time. + /// + /// Temporary errors indicate transient conditions such as insufficient data, + /// network timeouts, or resource unavailability. These errors suggest that + /// retrying the operation may succeed once the underlying condition resolves. + /// + /// # Examples + /// - Not enough L1 data available yet + /// - Network communication timeouts + /// - Insufficient channel data for frame assembly + /// + /// # Handling + /// The pipeline typically retries temporary errors in a loop, waiting for + /// conditions to improve or for additional data to become available. + #[error("Temporary error: {0}")] + Temporary(#[source] PipelineError), + /// A critical error that requires external intervention to resolve. + /// + /// Critical errors indicate fundamental issues that cannot be resolved through + /// retries or pipeline resets. These errors require external intervention such + /// as updated L1 data, configuration changes, or system fixes. + /// + /// # Examples + /// - Data source completely exhausted + /// - Fundamental configuration errors + /// - Irrecoverable data corruption + /// + /// # Handling + /// Critical errors halt the derivation process and are returned to the caller + /// for external resolution. The pipeline cannot continue without intervention. + #[error("Critical error: {0}")] + Critical(#[source] PipelineError), + /// A reset error that requires pipeline state reset but allows continued operation. + /// + /// Reset errors indicate conditions that invalidate the current pipeline state + /// but can be resolved by resetting to a known good state and continuing + /// derivation. These typically occur due to chain reorganizations or state + /// inconsistencies. + /// + /// # Examples + /// - L1 chain reorganization detected + /// - Block hash mismatches indicating reorg + /// - Hard fork activation requiring state reset + /// + /// # Handling + /// Reset errors trigger pipeline state cleanup and reset to a safe state, + /// after which derivation can continue with fresh state. + #[error("Pipeline reset: {0}")] + Reset(#[from] ResetError), +} + +/// An error encountered during derivation pipeline processing. +/// +/// [`PipelineError`] represents specific error conditions that can occur during the +/// various stages of L2 block derivation from L1 data. Each error variant provides +/// detailed context about the failure mode and suggests appropriate recovery strategies. +/// +/// # Error Categories +/// +/// ## Data Availability Errors +/// - [`Self::Eof`]: No more data available from source +/// - [`Self::NotEnoughData`]: Insufficient data for current operation +/// - [`Self::MissingL1Data`]: Required L1 data not available +/// - [`Self::EndOfSource`]: Data source completely exhausted +/// +/// ## Stage-Specific Errors +/// - [`Self::ChannelProviderEmpty`]: No channels available for processing +/// - [`Self::ChannelReaderEmpty`]: Channel reader has no data +/// - [`Self::BatchQueueEmpty`]: No batches available for processing +/// +/// ## Validation Errors +/// - [`Self::InvalidBatchType`]: Unsupported or malformed batch type +/// - [`Self::InvalidBatchValidity`]: Batch failed validation checks +/// - [`Self::BadEncoding`]: Data decoding/encoding failures +/// +/// ## System Errors +/// - [`Self::SystemConfigUpdate`]: System configuration update failures +/// - [`Self::AttributesBuilder`]: Block attribute construction failures +/// - [`Self::Provider`]: External provider communication failures +#[derive(Error, Debug, PartialEq, Eq)] +pub enum PipelineError { + /// End of file: no more data available from the channel bank. + /// + /// This error indicates that the channel bank has been completely drained + /// and no additional frame data is available for processing. It typically + /// occurs at the end of a derivation sequence when all available L1 data + /// has been consumed. + /// + /// # Recovery + /// Usually indicates completion of derivation for available data. May + /// require waiting for new L1 blocks to provide additional frame data. + #[error("EOF")] + Eof, + /// Insufficient data available to complete the current processing stage. + /// + /// This error indicates that the current operation requires more data than + /// is currently available, but additional data may become available in the + /// future. It suggests that retrying the operation later may succeed. + /// + /// # Common Scenarios + /// - Partial frame received, waiting for completion + /// - Channel assembly requires more frames + /// - Batch construction needs additional channel data + /// + /// # Recovery + /// Retry the operation after more L1 data becomes available or after + /// waiting for network propagation delays. + #[error("Not enough data")] + NotEnoughData, + /// No channels are available in the [`ChannelProvider`]. + /// + /// This error occurs when the channel provider stage has no assembled + /// channels ready for reading. It typically indicates that frame assembly + /// is still in progress or that no valid channels have been constructed + /// from available L1 data. + /// + /// [`ChannelProvider`]: crate::stages::ChannelProvider + #[error("The channel provider is empty")] + ChannelProviderEmpty, + /// The channel has already been fully processed by the [`ChannelAssembler`] stage. + /// + /// This error indicates an attempt to reprocess a channel that has already + /// been assembled and consumed. It suggests a logic error in channel tracking + /// or an attempt to double-process the same channel data. + /// + /// [`ChannelAssembler`]: crate::stages::ChannelAssembler + #[error("Channel already built")] + ChannelAlreadyBuilt, + /// Failed to locate the requested channel in the [`ChannelProvider`]. + /// + /// This error occurs when attempting to access a specific channel that + /// is not available in the channel provider's cache or storage. It may + /// indicate a channel ID mismatch or premature channel eviction. + /// + /// [`ChannelProvider`]: crate::stages::ChannelProvider + #[error("Channel not found in channel provider")] + ChannelNotFound, + /// No channel data returned by the [`ChannelReader`] stage. + /// + /// This error indicates that the channel reader stage has no channels + /// available for reading. It typically occurs when all channels have + /// been consumed or when no valid channels have been assembled yet. + /// + /// [`ChannelReader`]: crate::stages::ChannelReader + #[error("The channel reader has no channel available")] + ChannelReaderEmpty, + /// The [`BatchQueue`] contains no batches ready for processing. + /// + /// This error occurs when the batch queue stage has no assembled batches + /// available for attribute generation. It indicates that batch assembly + /// is still in progress or that no valid batches have been constructed. + /// + /// [`BatchQueue`]: crate::stages::BatchQueue + #[error("The batch queue has no batches available")] + BatchQueueEmpty, + /// Required L1 origin information is missing from the previous pipeline stage. + /// + /// This error indicates a pipeline stage dependency violation where a stage + /// expects L1 origin information that wasn't provided by the preceding stage. + /// It suggests a configuration or sequencing issue in the pipeline setup. + #[error("Missing L1 origin from previous stage")] + MissingOrigin, + /// Required L1 data is missing from the [`L1Retrieval`] stage. + /// + /// This error occurs when the L1 retrieval stage cannot provide the + /// requested L1 block data, transactions, or receipts. It may indicate + /// network issues, data availability problems, or L1 node synchronization lag. + /// + /// [`L1Retrieval`]: crate::stages::L1Retrieval + #[error("L1 Retrieval missing data")] + MissingL1Data, + /// Invalid or unsupported batch type encountered during processing. + /// + /// This error occurs when a pipeline stage receives a batch type that + /// it cannot process or that violates the expected batch format. It + /// indicates either malformed L1 data or unsupported batch versions. + #[error("Invalid batch type passed to stage")] + InvalidBatchType, + /// Batch failed validation checks during processing. + /// + /// This error indicates that a batch contains invalid data that fails + /// validation rules such as timestamp constraints, parent hash checks, + /// or format requirements. It suggests potentially malicious or corrupted L1 data. + #[error("Invalid batch validity")] + InvalidBatchValidity, + /// [`SystemConfig`] update operation failed. + /// + /// This error occurs when attempting to update the system configuration + /// fails due to invalid parameters, version mismatches, or other + /// configuration-related issues. + /// + /// [`SystemConfig`]: kona_genesis::SystemConfig + #[error("Error updating system config: {0}")] + SystemConfigUpdate(SystemConfigUpdateError), + /// Block attributes construction failed with detailed error information. + /// + /// This error wraps [`BuilderError`] variants that occur during the + /// construction of block attributes from batch data. It indicates issues + /// with attribute validation, formatting, or consistency checks. + #[error("Attributes builder error: {0}")] + AttributesBuilder(#[from] BuilderError), + /// Data encoding or decoding operation failed. + /// + /// This error wraps [`PipelineEncodingError`] variants that occur during + /// serialization or deserialization of pipeline data structures. It + /// indicates malformed input data or encoding format violations. + #[error("Decode error: {0}")] + BadEncoding(#[from] PipelineEncodingError), + /// The data source has been completely exhausted and cannot provide more data. + /// + /// This error indicates that the underlying L1 data source has reached + /// its end and no additional data will become available. It typically + /// occurs when derivation has caught up to the L1 chain head. + #[error("Data source exhausted")] + EndOfSource, + /// External provider communication or operation failed. + /// + /// This error wraps failures from external data providers such as L1 + /// nodes, blob providers, or other data sources. It includes network + /// failures, API errors, and provider-specific issues. + #[error("Provider error: {0}")] + Provider(String), + /// The pipeline received an unsupported signal type. + /// + /// This error occurs when a pipeline stage receives a signal that it + /// cannot process or that is not supported in the current configuration. + /// It indicates a protocol version mismatch or configuration issue. + #[error("Unsupported signal")] + UnsupportedSignal, +} + +impl PipelineError { + /// Wraps this [`PipelineError`] as a [`PipelineErrorKind::Critical`]. + /// + /// Critical errors indicate fundamental issues that cannot be resolved through + /// retries or pipeline resets. They require external intervention to resolve. + /// + /// # Usage + /// Use this method when an error condition is unrecoverable and requires + /// halting the derivation process for external intervention. + /// + /// # Example + /// ```rust,ignore + /// if data_source_corrupted { + /// return Err(PipelineError::Provider("corrupted data".to_string()).crit()); + /// } + /// ``` + pub const fn crit(self) -> PipelineErrorKind { + PipelineErrorKind::Critical(self) + } + + /// Wraps this [`PipelineError`] as a [`PipelineErrorKind::Temporary`]. + /// + /// Temporary errors indicate transient conditions that may resolve with + /// additional data, time, or retries. The pipeline can attempt to recover + /// by retrying the operation. + /// + /// # Usage + /// Use this method when an error condition might resolve if the operation + /// is retried, particularly for data availability or network issues. + /// + /// # Example + /// ```rust,ignore + /// if insufficient_data { + /// return Err(PipelineError::NotEnoughData.temp()); + /// } + /// ``` + pub const fn temp(self) -> PipelineErrorKind { + PipelineErrorKind::Temporary(self) + } +} + +/// A reset error +#[derive(Error, Clone, Debug, Eq, PartialEq)] +pub enum ResetError { + /// The batch has a bad parent hash. + /// The first argument is the expected parent hash, and the second argument is the actual + /// parent hash. + #[error("Bad parent hash: expected {0}, got {1}")] + BadParentHash(B256, B256), + /// The batch has a bad timestamp. + /// The first argument is the expected timestamp, and the second argument is the actual + /// timestamp. + #[error("Bad timestamp: expected {0}, got {1}")] + BadTimestamp(u64, u64), + /// L1 origin mismatch. + #[error("L1 origin mismatch. Expected {0:?}, got {1:?}")] + L1OriginMismatch(u64, u64), + /// The stage detected a block reorg. + /// The first argument is the expected block hash. + /// The second argument is the `parent_hash` of the next l1 origin block. + #[error("L1 reorg detected: expected {0}, got {1}")] + ReorgDetected(B256, B256), + /// Attributes builder error variant, with [`BuilderError`]. + #[error("Attributes builder error: {0}")] + AttributesBuilder(#[from] BuilderError), + /// A Holocene activation temporary error. + #[error("Holocene activation reset")] + HoloceneActivation, + /// The next l1 block provided to the managed traversal stage is not the expected one. + #[error("Next L1 block hash mismatch: expected {0}, got {1}")] + NextL1BlockHashMismatch(B256, B256), +} + +impl ResetError { + /// Wrap [`ResetError`] as a [`PipelineErrorKind::Reset`]. + pub const fn reset(self) -> PipelineErrorKind { + PipelineErrorKind::Reset(self) + } +} + +/// A decoding error. +#[derive(Error, Debug, PartialEq, Eq)] +pub enum PipelineEncodingError { + /// The buffer is empty. + #[error("Empty buffer")] + EmptyBuffer, + /// Deposit decoding error. + #[error("Error decoding deposit: {0}")] + DepositError(#[from] DepositError), + /// Alloy RLP Encoding Error. + #[error("RLP error: {0}")] + AlloyRlpError(alloy_rlp::Error), + /// Span Batch Error. + #[error("{0}")] + SpanBatchError(#[from] SpanBatchError), +} + +#[cfg(test)] +mod tests { + use super::*; + use core::error::Error; + + #[test] + fn test_pipeline_error_kind_source() { + let err = PipelineErrorKind::Temporary(PipelineError::Eof); + assert!(err.source().is_some()); + + let err = PipelineErrorKind::Critical(PipelineError::Eof); + assert!(err.source().is_some()); + + let err = PipelineErrorKind::Reset(ResetError::BadParentHash( + Default::default(), + Default::default(), + )); + assert!(err.source().is_some()); + } + + #[test] + fn test_pipeline_error_source() { + let err = PipelineError::AttributesBuilder(BuilderError::BlockMismatch( + Default::default(), + Default::default(), + )); + assert!(err.source().is_some()); + + let encoding_err = PipelineEncodingError::EmptyBuffer; + let err: PipelineError = encoding_err.into(); + assert!(err.source().is_some()); + + let err = PipelineError::Eof; + assert!(err.source().is_none()); + } + + #[test] + fn test_pipeline_encoding_error_source() { + let err = PipelineEncodingError::DepositError(DepositError::UnexpectedTopicsLen(0)); + assert!(err.source().is_some()); + + let err = SpanBatchError::TooBigSpanBatchSize; + let err: PipelineEncodingError = err.into(); + assert!(err.source().is_some()); + + let err = PipelineEncodingError::EmptyBuffer; + assert!(err.source().is_none()); + } + + #[test] + fn test_reset_error_kinds() { + let reset_errors = [ + ResetError::BadParentHash(Default::default(), Default::default()), + ResetError::BadTimestamp(0, 0), + ResetError::L1OriginMismatch(0, 0), + ResetError::ReorgDetected(Default::default(), Default::default()), + ResetError::AttributesBuilder(BuilderError::BlockMismatch( + Default::default(), + Default::default(), + )), + ResetError::HoloceneActivation, + ]; + for error in reset_errors { + let expected = PipelineErrorKind::Reset(error.clone()); + assert_eq!(error.reset(), expected); + } + } +} diff --git a/kona/crates/protocol/derive/src/errors/sources.rs b/rust/kona/crates/protocol/derive/src/errors/sources.rs similarity index 90% rename from kona/crates/protocol/derive/src/errors/sources.rs rename to rust/kona/crates/protocol/derive/src/errors/sources.rs index 2a16c80db09..ab752eae72d 100644 --- a/kona/crates/protocol/derive/src/errors/sources.rs +++ b/rust/kona/crates/protocol/derive/src/errors/sources.rs @@ -41,10 +41,8 @@ pub enum BlobProviderError { impl From for PipelineErrorKind { fn from(val: BlobProviderError) -> Self { match val { - BlobProviderError::SidecarLengthMismatch(_, _) => { - PipelineError::Provider(val.to_string()).crit() - } - BlobProviderError::SlotDerivation => PipelineError::Provider(val.to_string()).crit(), + BlobProviderError::SidecarLengthMismatch(_, _) | + BlobProviderError::SlotDerivation | BlobProviderError::BlobDecoding(_) => PipelineError::Provider(val.to_string()).crit(), BlobProviderError::Backend(_) => PipelineError::Provider(val.to_string()).temp(), } diff --git a/kona/crates/protocol/derive/src/errors/stages.rs b/rust/kona/crates/protocol/derive/src/errors/stages.rs similarity index 100% rename from kona/crates/protocol/derive/src/errors/stages.rs rename to rust/kona/crates/protocol/derive/src/errors/stages.rs diff --git a/rust/kona/crates/protocol/derive/src/lib.rs b/rust/kona/crates/protocol/derive/src/lib.rs new file mode 100644 index 00000000000..945f22d3ec9 --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/lib.rs @@ -0,0 +1,56 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "metrics"), no_std)] + +extern crate alloc; + +#[macro_use] +extern crate tracing; + +mod attributes; +pub use attributes::StatefulAttributesBuilder; + +mod errors; +pub use errors::{ + BatchDecompressionError, BlobDecodingError, BlobProviderError, BuilderError, + PipelineEncodingError, PipelineError, PipelineErrorKind, ResetError, +}; + +mod pipeline; +pub use pipeline::{ + AttributesQueueStage, BatchProviderStage, BatchStreamStage, ChannelProviderStage, + ChannelReaderStage, DerivationPipeline, FrameQueueStage, IndexedAttributesQueueStage, + L1RetrievalStage, PipelineBuilder, PolledAttributesQueueStage, +}; + +mod sources; +pub use sources::{BlobData, BlobSource, CalldataSource, EthereumDataSource}; + +mod stages; +pub use stages::{ + AttributesQueue, BatchProvider, BatchQueue, BatchStream, BatchStreamProvider, BatchValidator, + ChannelAssembler, ChannelBank, ChannelProvider, ChannelReader, ChannelReaderProvider, + FrameQueue, FrameQueueProvider, IndexedTraversal, L1Retrieval, L1RetrievalProvider, + NextBatchProvider, NextFrameProvider, PollingTraversal, TraversalStage, +}; + +mod traits; +pub use traits::{ + AttributesBuilder, AttributesProvider, BatchValidationProviderDerive, BlobProvider, + ChainProvider, DataAvailabilityProvider, L2ChainProvider, NextAttributes, OriginAdvancer, + OriginProvider, Pipeline, ResetProvider, SignalReceiver, +}; + +mod types; +pub use types::{ActivationSignal, PipelineResult, ResetSignal, Signal, StepResult}; + +mod metrics; +pub use metrics::Metrics; + +#[cfg(any(test, feature = "test-utils"))] +pub mod test_utils; diff --git a/kona/crates/protocol/derive/src/metrics/mod.rs b/rust/kona/crates/protocol/derive/src/metrics/mod.rs similarity index 100% rename from kona/crates/protocol/derive/src/metrics/mod.rs rename to rust/kona/crates/protocol/derive/src/metrics/mod.rs diff --git a/kona/crates/protocol/derive/src/pipeline/builder.rs b/rust/kona/crates/protocol/derive/src/pipeline/builder.rs similarity index 100% rename from kona/crates/protocol/derive/src/pipeline/builder.rs rename to rust/kona/crates/protocol/derive/src/pipeline/builder.rs diff --git a/rust/kona/crates/protocol/derive/src/pipeline/core.rs b/rust/kona/crates/protocol/derive/src/pipeline/core.rs new file mode 100644 index 00000000000..379581aa508 --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/pipeline/core.rs @@ -0,0 +1,365 @@ +//! Contains the core derivation pipeline. + +use crate::{ + ActivationSignal, L2ChainProvider, NextAttributes, OriginAdvancer, OriginProvider, Pipeline, + PipelineError, PipelineErrorKind, PipelineResult, ResetSignal, Signal, SignalReceiver, + StepResult, +}; +use alloc::{boxed::Box, collections::VecDeque, sync::Arc}; +use async_trait::async_trait; +use core::fmt::Debug; +use kona_genesis::{RollupConfig, SystemConfig}; +use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; + +/// The derivation pipeline is responsible for deriving L2 inputs from L1 data. +#[derive(Debug)] +pub struct DerivationPipeline +where + S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send, + P: L2ChainProvider + Send + Sync + Debug, +{ + /// A handle to the next attributes. + pub attributes: S, + /// Reset provider for the pipeline. + /// A list of prepared [`OpAttributesWithParent`] to be used by the derivation pipeline + /// consumer. + pub prepared: VecDeque, + /// The rollup config. + pub rollup_config: Arc, + /// The L2 Chain Provider used to fetch the system config on reset. + pub l2_chain_provider: P, +} + +impl DerivationPipeline +where + S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send, + P: L2ChainProvider + Send + Sync + Debug, +{ + /// Creates a new instance of the [`DerivationPipeline`]. + pub const fn new( + attributes: S, + rollup_config: Arc, + l2_chain_provider: P, + ) -> Self { + Self { attributes, prepared: VecDeque::new(), rollup_config, l2_chain_provider } + } +} + +impl OriginProvider for DerivationPipeline +where + S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send, + P: L2ChainProvider + Send + Sync + Debug, +{ + fn origin(&self) -> Option { + self.attributes.origin() + } +} + +impl Iterator for DerivationPipeline +where + S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send + Sync, + P: L2ChainProvider + Send + Sync + Debug, +{ + type Item = OpAttributesWithParent; + + fn next(&mut self) -> Option { + kona_macros::set!( + gauge, + crate::metrics::Metrics::PIPELINE_PAYLOAD_ATTRIBUTES_BUFFER, + self.prepared.len().saturating_sub(1) as f64 + ); + self.prepared.pop_front() + } +} + +#[async_trait] +impl SignalReceiver for DerivationPipeline +where + S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send + Sync, + P: L2ChainProvider + Send + Sync + Debug, +{ + /// Signals the pipeline by calling the [`SignalReceiver::signal`] method. + /// + /// During a [`Signal::Reset`], each stage is recursively called from the top-level + /// [`crate::stages::AttributesQueue`] to the bottom [`crate::PollingTraversal`] + /// with a head-recursion pattern. This effectively clears the internal state + /// of each stage in the pipeline from bottom on up. + /// + /// [`Signal::Activation`] does a similar thing to the reset, with different + /// holocene-specific reset rules. + /// + /// ### Parameters + /// + /// The `signal` is contains the signal variant with any necessary parameters. + async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { + match signal { + mut s @ (Signal::Reset(ResetSignal { l2_safe_head, .. }) | + Signal::Activation(ActivationSignal { l2_safe_head, .. })) => { + let system_config = self + .l2_chain_provider + .system_config_by_number( + l2_safe_head.block_info.number, + Arc::clone(&self.rollup_config), + ) + .await + .map_err(Into::into)?; + s = s.with_system_config(system_config); + match self.attributes.signal(s).await { + Ok(()) => trace!(target: "pipeline", "Stages reset"), + Err(err) => { + if err == PipelineErrorKind::Temporary(PipelineError::Eof) { + trace!(target: "pipeline", "Stages reset with EOF"); + } else { + error!(target: "pipeline", "Stage reset errored: {:?}", err); + return Err(err); + } + } + } + } + Signal::FlushChannel | Signal::ProvideBlock(_) => { + self.attributes.signal(signal).await?; + } + } + kona_macros::inc!( + gauge, + crate::metrics::Metrics::PIPELINE_SIGNALS, + "type" => signal.to_string(), + ); + Ok(()) + } +} + +#[async_trait] +impl Pipeline for DerivationPipeline +where + S: NextAttributes + SignalReceiver + OriginProvider + OriginAdvancer + Debug + Send + Sync, + P: L2ChainProvider + Send + Sync + Debug, +{ + /// Peeks at the next prepared [`OpAttributesWithParent`] from the pipeline. + fn peek(&self) -> Option<&OpAttributesWithParent> { + self.prepared.front() + } + + /// Returns the rollup config. + fn rollup_config(&self) -> &RollupConfig { + &self.rollup_config + } + + /// Returns the [`SystemConfig`] by L2 number. + async fn system_config_by_number( + &mut self, + number: u64, + ) -> Result { + self.l2_chain_provider + .system_config_by_number(number, self.rollup_config.clone()) + .await + .map_err(Into::into) + } + + /// Attempts to progress the pipeline. + /// + /// ## Returns + /// + /// A [`PipelineError::Eof`] is returned if the pipeline is blocked by waiting for new L1 data. + /// Any other error is critical and the derivation pipeline should be reset. + /// An error is expected when the underlying source closes. + /// + /// When [`DerivationPipeline::step`] returns [Ok(())], it should be called again, to continue + /// the derivation process. + /// + /// [`PipelineError`]: crate::errors::PipelineError + async fn step(&mut self, cursor: L2BlockInfo) -> StepResult { + kona_macros::inc!(gauge, crate::metrics::Metrics::PIPELINE_STEPS); + kona_macros::set!( + gauge, + crate::metrics::Metrics::PIPELINE_STEP_BLOCK, + cursor.block_info.number as f64 + ); + match self.attributes.next_attributes(cursor).await { + Ok(a) => { + trace!(target: "pipeline", "Prepared L2 attributes: {:?}", a); + kona_macros::inc!( + gauge, + crate::metrics::Metrics::PIPELINE_PAYLOAD_ATTRIBUTES_BUFFER + ); + kona_macros::set!( + gauge, + crate::metrics::Metrics::PIPELINE_LATEST_PAYLOAD_TX_COUNT, + a.attributes.transactions.as_ref().map_or(0.0, |txs| txs.len() as f64) + ); + if a.is_last_in_span { + kona_macros::set!( + gauge, + crate::metrics::Metrics::PIPELINE_DERIVED_SPAN_SIZE, + 0 + ); + } else { + kona_macros::inc!(gauge, crate::metrics::Metrics::PIPELINE_DERIVED_SPAN_SIZE); + } + self.prepared.push_back(a); + kona_macros::inc!(gauge, crate::metrics::Metrics::PIPELINE_PREPARED_ATTRIBUTES); + StepResult::PreparedAttributes + } + Err(err) => match err { + PipelineErrorKind::Temporary(PipelineError::Eof) => { + trace!(target: "pipeline", "Pipeline advancing origin"); + if let Err(e) = self.attributes.advance_origin().await { + return StepResult::OriginAdvanceErr(e); + } + StepResult::AdvancedOrigin + } + PipelineErrorKind::Temporary(_) => { + trace!(target: "pipeline", "Attributes queue step failed due to temporary error: {:?}", err); + StepResult::StepFailed(err) + } + _ => { + warn!(target: "pipeline", "Attributes queue step failed: {:?}", err); + StepResult::StepFailed(err) + } + }, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{DerivationPipeline, test_utils::*}; + use alloc::{string::ToString, sync::Arc}; + use alloy_rpc_types_engine::PayloadAttributes; + use kona_genesis::{RollupConfig, SystemConfig}; + use kona_protocol::{L2BlockInfo, OpAttributesWithParent}; + use op_alloy_rpc_types_engine::OpPayloadAttributes; + + fn default_test_payload_attributes() -> OpAttributesWithParent { + OpAttributesWithParent { + attributes: OpPayloadAttributes { + payload_attributes: PayloadAttributes { + timestamp: 0, + prev_randao: Default::default(), + suggested_fee_recipient: Default::default(), + withdrawals: None, + parent_beacon_block_root: None, + }, + transactions: None, + no_tx_pool: None, + gas_limit: None, + eip_1559_params: None, + min_base_fee: None, + }, + parent: Default::default(), + derived_from: Default::default(), + is_last_in_span: false, + } + } + + #[test] + fn test_pipeline_next_attributes_empty() { + let mut pipeline = new_test_pipeline(); + let result = pipeline.next(); + assert_eq!(result, None); + } + + #[test] + fn test_pipeline_next_attributes_with_peek() { + let mut pipeline = new_test_pipeline(); + let expected = default_test_payload_attributes(); + pipeline.prepared.push_back(expected.clone()); + + let result = pipeline.peek(); + assert_eq!(result, Some(&expected)); + + let result = pipeline.next(); + assert_eq!(result, Some(expected)); + } + + #[tokio::test] + async fn test_derivation_pipeline_missing_block() { + let mut pipeline = new_test_pipeline(); + let cursor = L2BlockInfo::default(); + let result = pipeline.step(cursor).await; + assert_eq!( + result, + StepResult::OriginAdvanceErr( + PipelineError::Provider("Block not found".to_string()).temp() + ) + ); + } + + #[tokio::test] + async fn test_derivation_pipeline_prepared_attributes() { + let rollup_config = Arc::new(RollupConfig::default()); + let l2_chain_provider = TestL2ChainProvider::default(); + let expected = default_test_payload_attributes(); + let attributes = TestNextAttributes { next_attributes: Some(expected) }; + let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); + + // Step on the pipeline and expect the result. + let cursor = L2BlockInfo::default(); + let result = pipeline.step(cursor).await; + assert_eq!(result, StepResult::PreparedAttributes); + } + + #[tokio::test] + async fn test_derivation_pipeline_advance_origin() { + let rollup_config = Arc::new(RollupConfig::default()); + let l2_chain_provider = TestL2ChainProvider::default(); + let attributes = TestNextAttributes::default(); + let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); + + // Step on the pipeline and expect the result. + let cursor = L2BlockInfo::default(); + let result = pipeline.step(cursor).await; + assert_eq!(result, StepResult::AdvancedOrigin); + } + + #[tokio::test] + async fn test_derivation_pipeline_signal_activation() { + let rollup_config = Arc::new(RollupConfig::default()); + let mut l2_chain_provider = TestL2ChainProvider::default(); + l2_chain_provider.system_configs.insert(0, SystemConfig::default()); + let attributes = TestNextAttributes::default(); + let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); + + // Signal the pipeline to reset. + let result = pipeline.signal(ActivationSignal::default().signal()).await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_derivation_pipeline_flush_channel() { + let rollup_config = Arc::new(RollupConfig::default()); + let l2_chain_provider = TestL2ChainProvider::default(); + let attributes = TestNextAttributes::default(); + let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); + + // Signal the pipeline to reset. + let result = pipeline.signal(Signal::FlushChannel).await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_derivation_pipeline_signal_reset_missing_sys_config() { + let rollup_config = Arc::new(RollupConfig::default()); + let l2_chain_provider = TestL2ChainProvider::default(); + let attributes = TestNextAttributes::default(); + let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); + + // Signal the pipeline to reset. + let result = pipeline.signal(ResetSignal::default().signal()).await.unwrap_err(); + assert_eq!(result, PipelineError::Provider("System config not found".to_string()).temp()); + } + + #[tokio::test] + async fn test_derivation_pipeline_signal_reset_ok() { + let rollup_config = Arc::new(RollupConfig::default()); + let mut l2_chain_provider = TestL2ChainProvider::default(); + l2_chain_provider.system_configs.insert(0, SystemConfig::default()); + let attributes = TestNextAttributes::default(); + let mut pipeline = DerivationPipeline::new(attributes, rollup_config, l2_chain_provider); + + // Signal the pipeline to reset. + let result = pipeline.signal(ResetSignal::default().signal()).await; + assert!(result.is_ok()); + } +} diff --git a/kona/crates/protocol/derive/src/pipeline/mod.rs b/rust/kona/crates/protocol/derive/src/pipeline/mod.rs similarity index 100% rename from kona/crates/protocol/derive/src/pipeline/mod.rs rename to rust/kona/crates/protocol/derive/src/pipeline/mod.rs diff --git a/kona/crates/protocol/derive/src/pipeline/types.rs b/rust/kona/crates/protocol/derive/src/pipeline/types.rs similarity index 100% rename from kona/crates/protocol/derive/src/pipeline/types.rs rename to rust/kona/crates/protocol/derive/src/pipeline/types.rs diff --git a/kona/crates/protocol/derive/src/sources/blob_data.rs b/rust/kona/crates/protocol/derive/src/sources/blob_data.rs similarity index 100% rename from kona/crates/protocol/derive/src/sources/blob_data.rs rename to rust/kona/crates/protocol/derive/src/sources/blob_data.rs diff --git a/rust/kona/crates/protocol/derive/src/sources/blobs.rs b/rust/kona/crates/protocol/derive/src/sources/blobs.rs new file mode 100644 index 00000000000..14b2f42db51 --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/sources/blobs.rs @@ -0,0 +1,355 @@ +//! Blob Data Source + +use crate::{ + BlobData, BlobProvider, BlobProviderError, ChainProvider, DataAvailabilityProvider, + PipelineError, PipelineResult, +}; +use alloc::{boxed::Box, string::ToString, vec::Vec}; +use alloy_consensus::{ + Transaction, TxEip4844Variant, TxEnvelope, TxType, transaction::SignerRecoverable, +}; +use alloy_eips::eip4844::IndexedBlobHash; +use alloy_primitives::{Address, Bytes}; +use async_trait::async_trait; +use kona_protocol::BlockInfo; + +/// A data iterator that reads from a blob. +#[derive(Debug, Clone)] +pub struct BlobSource +where + F: ChainProvider + Send, + B: BlobProvider + Send, +{ + /// Chain provider. + pub chain_provider: F, + /// Fetches blobs. + pub blob_fetcher: B, + /// The address of the batcher contract. + pub batcher_address: Address, + /// Data. + pub data: Vec, + /// Whether the source is open. + pub open: bool, +} + +impl BlobSource +where + F: ChainProvider + Send, + B: BlobProvider + Send, +{ + /// Creates a new blob source. + pub const fn new(chain_provider: F, blob_fetcher: B, batcher_address: Address) -> Self { + Self { chain_provider, blob_fetcher, batcher_address, data: Vec::new(), open: false } + } + + fn extract_blob_data( + &self, + txs: Vec, + batcher_address: Address, + ) -> (Vec, Vec) { + let mut index: u64 = 0; + let mut data = Vec::new(); + let mut hashes = Vec::new(); + for tx in txs { + let (tx_kind, calldata, blob_hashes) = match &tx { + TxEnvelope::Legacy(tx) => (tx.tx().to(), tx.tx().input.clone(), None), + TxEnvelope::Eip2930(tx) => (tx.tx().to(), tx.tx().input.clone(), None), + TxEnvelope::Eip1559(tx) => (tx.tx().to(), tx.tx().input.clone(), None), + TxEnvelope::Eip4844(blob_tx_wrapper) => match blob_tx_wrapper.tx() { + TxEip4844Variant::TxEip4844(tx) => { + (tx.to(), tx.input.clone(), Some(tx.blob_versioned_hashes.clone())) + } + TxEip4844Variant::TxEip4844WithSidecar(tx) => { + let tx = tx.tx(); + (tx.to(), tx.input.clone(), Some(tx.blob_versioned_hashes.clone())) + } + }, + _ => continue, + }; + let Some(to) = tx_kind else { continue }; + + if to != self.batcher_address { + index += blob_hashes.map_or(0, |h| h.len() as u64); + continue; + } + if tx.recover_signer().unwrap_or_default() != batcher_address { + index += blob_hashes.map_or(0, |h| h.len() as u64); + continue; + } + if tx.tx_type() != TxType::Eip4844 { + let blob_data = BlobData { data: None, calldata: Some(calldata.to_vec().into()) }; + data.push(blob_data); + continue; + } + if !calldata.is_empty() { + let hash = match &tx { + TxEnvelope::Legacy(tx) => Some(tx.hash()), + TxEnvelope::Eip2930(tx) => Some(tx.hash()), + TxEnvelope::Eip1559(tx) => Some(tx.hash()), + TxEnvelope::Eip4844(blob_tx_wrapper) => Some(blob_tx_wrapper.hash()), + _ => None, + }; + warn!(target: "blob_source", "Blob tx has calldata, which will be ignored: {hash:?}"); + } + let blob_hashes = if let Some(b) = blob_hashes { + b + } else { + continue; + }; + for hash in blob_hashes { + let indexed = IndexedBlobHash { hash, index }; + hashes.push(indexed); + data.push(BlobData::default()); + index += 1; + } + } + #[cfg(feature = "metrics")] + metrics::gauge!( + crate::metrics::Metrics::PIPELINE_DATA_AVAILABILITY_PROVIDER, + "source" => "blobs", + ) + .increment(data.len() as f64); + (data, hashes) + } + + /// Loads blob data into the source if it is not open. + async fn load_blobs( + &mut self, + block_ref: &BlockInfo, + batcher_address: Address, + ) -> Result<(), BlobProviderError> { + if self.open { + return Ok(()); + } + + let info = self + .chain_provider + .block_info_and_transactions_by_hash(block_ref.hash) + .await + .map_err(|e| BlobProviderError::Backend(e.to_string()))?; + + let (mut data, blob_hashes) = self.extract_blob_data(info.1, batcher_address); + + // If there are no hashes, set the calldata and return. + if blob_hashes.is_empty() { + self.open = true; + self.data = data; + return Ok(()); + } + + let blobs = + self.blob_fetcher.get_and_validate_blobs(block_ref, &blob_hashes).await.map_err( + |e| { + warn!(target: "blob_source", "Failed to fetch blobs: {e}"); + BlobProviderError::Backend(e.to_string()) + }, + )?; + + // Fill the blob pointers. + let mut blob_index = 0; + for blob in &mut data { + match blob.fill(&blobs, blob_index) { + Ok(should_increment) => { + if should_increment { + blob_index += 1; + } + } + Err(e) => { + return Err(e.into()); + } + } + } + + self.open = true; + self.data = data; + Ok(()) + } + + /// Extracts the next data from the source. + fn next_data(&mut self) -> PipelineResult { + if self.data.is_empty() { + return Err(PipelineError::Eof.temp()); + } + + Ok(self.data.remove(0)) + } +} + +#[async_trait] +impl DataAvailabilityProvider for BlobSource +where + F: ChainProvider + Sync + Send, + B: BlobProvider + Sync + Send, +{ + type Item = Bytes; + + async fn next( + &mut self, + block_ref: &BlockInfo, + batcher_address: Address, + ) -> PipelineResult { + self.load_blobs(block_ref, batcher_address).await?; + + let next_data = self.next_data()?; + if let Some(c) = next_data.calldata { + return Ok(c); + } + + // Decode the blob data to raw bytes. + // Otherwise, ignore blob and recurse next. + match next_data.decode() { + Ok(d) => Ok(d), + Err(_) => { + warn!(target: "blob_source", "Failed to decode blob data, skipping"); + self.next(block_ref, batcher_address).await + } + } + } + + fn clear(&mut self) { + self.data.clear(); + self.open = false; + } +} + +#[cfg(test)] +pub(crate) mod tests { + use super::*; + use crate::{ + errors::PipelineErrorKind, + test_utils::{TestBlobProvider, TestChainProvider}, + }; + use alloc::vec; + use alloy_rlp::Decodable; + + pub(crate) fn default_test_blob_source() -> BlobSource { + let chain_provider = TestChainProvider::default(); + let blob_fetcher = TestBlobProvider::default(); + let batcher_address = Address::default(); + BlobSource::new(chain_provider, blob_fetcher, batcher_address) + } + + pub(crate) fn valid_blob_txs() -> Vec { + // https://sepolia.etherscan.io/getRawTx?tx=0x9a22ccb0029bc8b0ddd073be1a1d923b7ae2b2ea52100bae0db4424f9107e9c0 + let raw_tx = alloy_primitives::hex::decode("0x03f9011d83aa36a7820fa28477359400852e90edd0008252089411e9ca82a3a762b4b5bd264d4173a242e7a770648080c08504a817c800f8a5a0012ec3d6f66766bedb002a190126b3549fce0047de0d4c25cffce0dc1c57921aa00152d8e24762ff22b1cfd9f8c0683786a7ca63ba49973818b3d1e9512cd2cec4a0013b98c6c83e066d5b14af2b85199e3d4fc7d1e778dd53130d180f5077e2d1c7a001148b495d6e859114e670ca54fb6e2657f0cbae5b08063605093a4b3dc9f8f1a0011ac212f13c5dff2b2c6b600a79635103d6f580a4221079951181b25c7e654901a0c8de4cced43169f9aa3d36506363b2d2c44f6c49fc1fd91ea114c86f3757077ea01e11fdd0d1934eda0492606ee0bb80a7bf8f35cc5f86ec60fe5031ba48bfd544").unwrap(); + let eip4844 = TxEnvelope::decode(&mut raw_tx.as_slice()).unwrap(); + vec![eip4844] + } + + #[tokio::test] + async fn test_load_blobs_open() { + let mut source = default_test_blob_source(); + source.open = true; + assert!(source.load_blobs(&BlockInfo::default(), Address::ZERO).await.is_ok()); + } + + #[tokio::test] + async fn test_load_blobs_chain_provider_err() { + let mut source = default_test_blob_source(); + assert!(matches!( + source.load_blobs(&BlockInfo::default(), Address::ZERO).await, + Err(BlobProviderError::Backend(_)) + )); + } + + #[tokio::test] + async fn test_load_blobs_chain_provider_empty_txs() { + let mut source = default_test_blob_source(); + let block_info = BlockInfo::default(); + source.chain_provider.insert_block_with_transactions(0, block_info, Vec::new()); + assert!(!source.open); // Source is not open by default. + assert!(source.load_blobs(&BlockInfo::default(), Address::ZERO).await.is_ok()); + assert!(source.data.is_empty()); + assert!(source.open); + } + + #[tokio::test] + async fn test_load_blobs_chain_provider_4844_txs_blob_fetch_error() { + let mut source = default_test_blob_source(); + let block_info = BlockInfo::default(); + let batcher_address = + alloy_primitives::address!("A83C816D4f9b2783761a22BA6FADB0eB0606D7B2"); + source.batcher_address = + alloy_primitives::address!("11E9CA82A3a762b4B5bd264d4173a242e7a77064"); + let txs = valid_blob_txs(); + source.blob_fetcher.should_error = true; + source.chain_provider.insert_block_with_transactions(1, block_info, txs); + assert!(matches!( + source.load_blobs(&BlockInfo::default(), batcher_address).await, + Err(BlobProviderError::Backend(_)) + )); + } + + #[tokio::test] + async fn test_load_blobs_chain_provider_4844_txs_succeeds() { + use alloy_consensus::Blob; + + let mut source = default_test_blob_source(); + let block_info = BlockInfo::default(); + let batcher_address = + alloy_primitives::address!("A83C816D4f9b2783761a22BA6FADB0eB0606D7B2"); + source.batcher_address = + alloy_primitives::address!("11E9CA82A3a762b4B5bd264d4173a242e7a77064"); + let txs = valid_blob_txs(); + source.chain_provider.insert_block_with_transactions(1, block_info, txs); + let hashes = [ + alloy_primitives::b256!( + "012ec3d6f66766bedb002a190126b3549fce0047de0d4c25cffce0dc1c57921a" + ), + alloy_primitives::b256!( + "0152d8e24762ff22b1cfd9f8c0683786a7ca63ba49973818b3d1e9512cd2cec4" + ), + alloy_primitives::b256!( + "013b98c6c83e066d5b14af2b85199e3d4fc7d1e778dd53130d180f5077e2d1c7" + ), + alloy_primitives::b256!( + "01148b495d6e859114e670ca54fb6e2657f0cbae5b08063605093a4b3dc9f8f1" + ), + alloy_primitives::b256!( + "011ac212f13c5dff2b2c6b600a79635103d6f580a4221079951181b25c7e6549" + ), + ]; + for hash in hashes { + source.blob_fetcher.insert_blob(hash, Blob::with_last_byte(1u8)); + } + source.load_blobs(&BlockInfo::default(), batcher_address).await.unwrap(); + assert!(source.open); + assert!(!source.data.is_empty()); + } + + #[tokio::test] + async fn test_open_empty_data_eof() { + let mut source = default_test_blob_source(); + source.open = true; + + let err = source.next(&BlockInfo::default(), Address::ZERO).await.unwrap_err(); + assert!(matches!(err, PipelineErrorKind::Temporary(PipelineError::Eof))); + } + + #[tokio::test] + async fn test_open_calldata() { + let mut source = default_test_blob_source(); + source.open = true; + source.data.push(BlobData { data: None, calldata: Some(Bytes::default()) }); + + let data = source.next(&BlockInfo::default(), Address::ZERO).await.unwrap(); + assert_eq!(data, Bytes::default()); + } + + #[tokio::test] + async fn test_open_blob_data_decode_missing_data() { + let mut source = default_test_blob_source(); + source.open = true; + source.data.push(BlobData { data: Some(Bytes::from(&[1; 32])), calldata: None }); + + let err = source.next(&BlockInfo::default(), Address::ZERO).await.unwrap_err(); + assert!(matches!(err, PipelineErrorKind::Temporary(PipelineError::Eof))); + } + + #[tokio::test] + async fn test_blob_source_pipeline_error() { + let mut source = default_test_blob_source(); + let err = source.next(&BlockInfo::default(), Address::ZERO).await.unwrap_err(); + assert!(matches!(err, PipelineErrorKind::Temporary(PipelineError::Provider(_)))); + } +} diff --git a/kona/crates/protocol/derive/src/sources/calldata.rs b/rust/kona/crates/protocol/derive/src/sources/calldata.rs similarity index 99% rename from kona/crates/protocol/derive/src/sources/calldata.rs rename to rust/kona/crates/protocol/derive/src/sources/calldata.rs index 2475fe9a860..be9ad3675b4 100644 --- a/kona/crates/protocol/derive/src/sources/calldata.rs +++ b/rust/kona/crates/protocol/derive/src/sources/calldata.rs @@ -1,4 +1,4 @@ -//! CallData Source +//! `CallData` Source use crate::{ChainProvider, DataAvailabilityProvider, PipelineError, PipelineResult}; use alloc::{boxed::Box, collections::VecDeque}; diff --git a/kona/crates/protocol/derive/src/sources/ethereum.rs b/rust/kona/crates/protocol/derive/src/sources/ethereum.rs similarity index 97% rename from kona/crates/protocol/derive/src/sources/ethereum.rs rename to rust/kona/crates/protocol/derive/src/sources/ethereum.rs index 6b434d86e8e..991ba41cddf 100644 --- a/kona/crates/protocol/derive/src/sources/ethereum.rs +++ b/rust/kona/crates/protocol/derive/src/sources/ethereum.rs @@ -1,5 +1,5 @@ -//! Contains the [EthereumDataSource], which is a concrete implementation of the -//! [DataAvailabilityProvider] trait for the Ethereum protocol. +//! Contains the [`EthereumDataSource`], which is a concrete implementation of the +//! [`DataAvailabilityProvider`] trait for the Ethereum protocol. use crate::{ BlobProvider, BlobSource, CalldataSource, ChainProvider, DataAvailabilityProvider, diff --git a/rust/kona/crates/protocol/derive/src/sources/mod.rs b/rust/kona/crates/protocol/derive/src/sources/mod.rs new file mode 100644 index 00000000000..5077c1f9bc3 --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/sources/mod.rs @@ -0,0 +1,18 @@ +//! The data source module. +//! +//! Data sources are data providers for the kona derivation pipeline. +//! They implement the [`DataAvailabilityProvider`](crate::traits::DataAvailabilityProvider) trait, +//! providing a way to iterate over data for a given (L2) +//! [`BlockInfo`](kona_protocol::BlockInfo). + +mod blob_data; +pub use blob_data::BlobData; + +mod ethereum; +pub use ethereum::EthereumDataSource; + +mod blobs; +pub use blobs::BlobSource; + +mod calldata; +pub use calldata::CalldataSource; diff --git a/kona/crates/protocol/derive/src/sources/variant.rs b/rust/kona/crates/protocol/derive/src/sources/variant.rs similarity index 100% rename from kona/crates/protocol/derive/src/sources/variant.rs rename to rust/kona/crates/protocol/derive/src/sources/variant.rs diff --git a/kona/crates/protocol/derive/src/stages/attributes_queue.rs b/rust/kona/crates/protocol/derive/src/stages/attributes_queue.rs similarity index 99% rename from kona/crates/protocol/derive/src/stages/attributes_queue.rs rename to rust/kona/crates/protocol/derive/src/stages/attributes_queue.rs index 2fc54e022f2..72e3e720626 100644 --- a/kona/crates/protocol/derive/src/stages/attributes_queue.rs +++ b/rust/kona/crates/protocol/derive/src/stages/attributes_queue.rs @@ -63,7 +63,7 @@ where self.batch = Some(batch); self.is_last_in_span = self.prev.is_last_in_span(); } - self.batch.as_ref().cloned().ok_or(PipelineError::Eof.temp()) + self.batch.clone().ok_or(PipelineError::Eof.temp()) } /// Returns the next [`OpAttributesWithParent`] from the current batch. @@ -187,7 +187,7 @@ where { async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { match signal { - s @ Signal::Reset(_) | s @ Signal::Activation(_) => { + s @ (Signal::Reset(_) | Signal::Activation(_)) => { self.prev.signal(s).await?; self.batch = None; self.is_last_in_span = false; diff --git a/kona/crates/protocol/derive/src/stages/batch/batch_provider.rs b/rust/kona/crates/protocol/derive/src/stages/batch/batch_provider.rs similarity index 100% rename from kona/crates/protocol/derive/src/stages/batch/batch_provider.rs rename to rust/kona/crates/protocol/derive/src/stages/batch/batch_provider.rs diff --git a/kona/crates/protocol/derive/src/stages/batch/batch_queue.rs b/rust/kona/crates/protocol/derive/src/stages/batch/batch_queue.rs similarity index 98% rename from kona/crates/protocol/derive/src/stages/batch/batch_queue.rs rename to rust/kona/crates/protocol/derive/src/stages/batch/batch_queue.rs index 99b6ff674e6..d3e506358d6 100644 --- a/kona/crates/protocol/derive/src/stages/batch/batch_queue.rs +++ b/rust/kona/crates/protocol/derive/src/stages/batch/batch_queue.rs @@ -80,9 +80,7 @@ where /// The parent is used to set the parent hash of the batch. /// The parent is verified when the batch is later validated. pub fn pop_next_batch(&mut self, parent: L2BlockInfo) -> Option { - if self.next_spans.is_empty() { - panic!("Invalid state: must have next spans to pop"); - } + assert!(!self.next_spans.is_empty(), "Invalid state: must have next spans to pop"); let mut next = self.next_spans.remove(0); next.parent_hash = parent.block_info.hash; Some(next) @@ -91,7 +89,7 @@ where /// Derives the next batch to apply on top of the current L2 safe head. /// Follows the validity rules imposed on consecutive batches. /// Based on currently available buffered batch and L1 origin information. - /// A [PipelineError::Eof] is returned if no batch can be derived yet. + /// A [`PipelineError::Eof`] is returned if no batch can be derived yet. pub async fn derive_next_batch( &mut self, empty: bool, @@ -137,11 +135,11 @@ where // Drop Future batches post-holocene. // // See: - if !self.cfg.is_holocene_active(origin.timestamp) { - remaining.push(batch.clone()); - } else { + if self.cfg.is_holocene_active(origin.timestamp) { self.prev.flush(); warn!(target: "batch_queue", "[HOLOCENE] Dropping future batch with parent: {}", parent.block_info.number); + } else { + remaining.push(batch.clone()); } } BatchValidity::Drop(reason) => { @@ -149,7 +147,6 @@ where // stage. self.prev.flush(); warn!(target: "batch_queue", "Dropping batch with parent: {}, reason: {}", parent.block_info, reason); - continue; } BatchValidity::Accept => { next_batch = Some(batch.clone()); @@ -170,7 +167,6 @@ where } warn!(target: "batch_queue", "[HOLOCENE] Dropping outdated batch with parent: {}", parent.block_info.number); - continue; } } } @@ -324,7 +320,13 @@ where // Batches prior to the l1 origin of the l2 safe head are not accepted. if self.origin != self.prev.origin() { self.origin = self.prev.origin(); - if !origin_behind { + if origin_behind { + // This is to handle the special case of startup. + // At startup, the batch queue is reset and includes the + // l1 origin. That is the only time where immediately after + // reset is called, the origin behind is false. + self.l1_blocks.clear(); + } else { let origin = match self.origin.as_ref().ok_or(PipelineError::MissingOrigin.crit()) { Ok(o) => o, Err(e) => { @@ -332,12 +334,6 @@ where } }; self.l1_blocks.push(*origin); - } else { - // This is to handle the special case of startup. - // At startup, the batch queue is reset and includes the - // l1 origin. That is the only time where immediately after - // reset is called, the origin behind is false. - self.l1_blocks.clear(); } info!(target: "batch_queue", "Advancing batch queue origin: {:?}", self.origin); } @@ -346,14 +342,14 @@ where let mut out_of_data = false; match self.prev.next_batch(parent, &self.l1_blocks).await { Ok(b) => { - if !origin_behind { - self.add_batch(b, parent).await.ok(); - } else { + if origin_behind { warn!(target: "batch_queue", "Dropping batch: Origin is behind"); + } else { + self.add_batch(b, parent).await.ok(); } } Err(e) => { - if let PipelineErrorKind::Temporary(PipelineError::Eof) = e { + if e == PipelineErrorKind::Temporary(PipelineError::Eof) { out_of_data = true; } else { return Err(e); @@ -447,7 +443,7 @@ where self.l1_blocks.push(l1_origin); self.next_spans.clear(); } - s @ Signal::Activation(_) | s @ Signal::FlushChannel => { + s @ (Signal::Activation(_) | Signal::FlushChannel) => { self.prev.signal(s).await?; self.batches.clear(); self.next_spans.clear(); diff --git a/kona/crates/protocol/derive/src/stages/batch/batch_stream.rs b/rust/kona/crates/protocol/derive/src/stages/batch/batch_stream.rs similarity index 100% rename from kona/crates/protocol/derive/src/stages/batch/batch_stream.rs rename to rust/kona/crates/protocol/derive/src/stages/batch/batch_stream.rs diff --git a/kona/crates/protocol/derive/src/stages/batch/batch_validator.rs b/rust/kona/crates/protocol/derive/src/stages/batch/batch_validator.rs similarity index 99% rename from kona/crates/protocol/derive/src/stages/batch/batch_validator.rs rename to rust/kona/crates/protocol/derive/src/stages/batch/batch_validator.rs index 45c8d3c51fd..9c641e1855c 100644 --- a/kona/crates/protocol/derive/src/stages/batch/batch_validator.rs +++ b/rust/kona/crates/protocol/derive/src/stages/batch/batch_validator.rs @@ -1,4 +1,4 @@ -//! Contains the [BatchValidator] stage. +//! Contains the [`BatchValidator`] stage. use super::NextBatchProvider; use crate::{ @@ -77,15 +77,15 @@ where // Batches prior to the l1 origin of the l2 safe head are not accepted. if self.origin != self.prev.origin() { self.origin = self.prev.origin(); - if !origin_behind { - let origin = self.origin.as_ref().ok_or(PipelineError::MissingOrigin.crit())?; - self.l1_blocks.push(*origin); - } else { + if origin_behind { // This is to handle the special case of startup. // At startup, the batch validator is reset and includes the // l1 origin. That is the only time when immediately after // reset is called, the origin behind is false. self.l1_blocks.clear(); + } else { + let origin = self.origin.as_ref().ok_or(PipelineError::MissingOrigin.crit())?; + self.l1_blocks.push(*origin); } debug!( target: "batch_validator", @@ -315,7 +315,7 @@ where self.l1_blocks.clear(); self.l1_blocks.push(l1_origin); } - s @ Signal::Activation(_) | s @ Signal::FlushChannel | s @ Signal::ProvideBlock(_) => { + s @ (Signal::Activation(_) | Signal::FlushChannel | Signal::ProvideBlock(_)) => { self.prev.signal(s).await?; } } diff --git a/rust/kona/crates/protocol/derive/src/stages/batch/mod.rs b/rust/kona/crates/protocol/derive/src/stages/batch/mod.rs new file mode 100644 index 00000000000..2cc9b630c86 --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/stages/batch/mod.rs @@ -0,0 +1,53 @@ +//! Contains stages pertaining to the processing of [`Batch`]es. +//! +//! Sitting after the [`ChannelReader`](crate::stages::channel::ChannelReader) stage, the +//! [`BatchStream`] and [`BatchProvider`] stages are responsible for validating and ordering the +//! [`Batch`]es. The [`BatchStream`] stage is responsible for streaming +//! [`SingleBatch`](kona_protocol::SingleBatch)es from [`SpanBatch`](kona_protocol::SpanBatch)es, +//! while the [`BatchProvider`] stage is responsible for ordering and validating the [`Batch`]es +//! for the [`AttributesQueue`](crate::stages::attributes_queue::AttributesQueue) stage. + +use crate::types::PipelineResult; +use alloc::boxed::Box; +use async_trait::async_trait; +use kona_protocol::{Batch, BlockInfo, L2BlockInfo}; + +mod batch_stream; +pub use batch_stream::{BatchStream, BatchStreamProvider}; + +mod batch_queue; +pub use batch_queue::BatchQueue; + +mod batch_validator; +pub use batch_validator::BatchValidator; + +mod batch_provider; +pub use batch_provider::BatchProvider; + +/// Provides [`Batch`]es for the [`BatchQueue`] and [`BatchValidator`] stages. +#[async_trait] +pub trait NextBatchProvider { + /// Returns the next [`Batch`] in the [`ChannelReader`] stage, if the stage is not complete. + /// This function can only be called once while the stage is in progress, and will return + /// [`None`] on subsequent calls unless the stage is reset or complete. If the stage is + /// complete and the batch has been consumed, an [PipelineError::Eof] error is returned. + /// + /// [`ChannelReader`]: crate::stages::ChannelReader + /// [PipelineError::Eof]: crate::errors::PipelineError::Eof + async fn next_batch( + &mut self, + parent: L2BlockInfo, + l1_origins: &[BlockInfo], + ) -> PipelineResult; + + /// Returns the number of [`SingleBatch`]es that are currently buffered in the [`BatchStream`] + /// from a [`SpanBatch`]. + /// + /// [`SpanBatch`]: kona_protocol::SpanBatch + /// [`SingleBatch`]: kona_protocol::SingleBatch + fn span_buffer_size(&self) -> usize; + + /// Allows the stage to flush the buffer in the [`crate::stages::BatchStream`] + /// if an invalid single batch is found. Pre-holocene hardfork, this will be a no-op. + fn flush(&mut self); +} diff --git a/kona/crates/protocol/derive/src/stages/channel/channel_assembler.rs b/rust/kona/crates/protocol/derive/src/stages/channel/channel_assembler.rs similarity index 95% rename from kona/crates/protocol/derive/src/stages/channel/channel_assembler.rs rename to rust/kona/crates/protocol/derive/src/stages/channel/channel_assembler.rs index 248149bd9db..9741e7743d6 100644 --- a/kona/crates/protocol/derive/src/stages/channel/channel_assembler.rs +++ b/rust/kona/crates/protocol/derive/src/stages/channel/channel_assembler.rs @@ -1,4 +1,4 @@ -//! This module contains the [ChannelAssembler] stage. +//! This module contains the [`ChannelAssembler`] stage. use super::{ChannelReaderProvider, NextFrameProvider}; use crate::{ @@ -46,13 +46,9 @@ where /// Returns whether or not the channel currently being assembled has timed out. pub fn is_timed_out(&self) -> PipelineResult { let origin = self.origin().ok_or(PipelineError::MissingOrigin.crit())?; - let is_timed_out = self - .channel - .as_ref() - .map(|c| { - c.open_block_number() + self.cfg.channel_timeout(origin.timestamp) < origin.number - }) - .unwrap_or_default(); + let is_timed_out = self.channel.as_ref().is_some_and(|c| { + c.open_block_number() + self.cfg.channel_timeout(origin.timestamp) < origin.number + }); Ok(is_timed_out) } @@ -67,17 +63,19 @@ where let origin = self.origin().ok_or(PipelineError::MissingOrigin.crit())?; // Time out the channel if it has timed out. - if let Some(channel) = self.channel.as_ref() { - if self.is_timed_out()? { - warn!( - target: "channel_assembler", - "Channel (ID: {}) timed out at L1 origin #{}, open block #{}. Discarding channel.", - hex::encode(channel.id()), - origin.number, - channel.open_block_number() - ); - self.channel = None; - } + if let Some(channel) = self.channel.as_ref() && + self.is_timed_out()? + { + let channel_id = hex::encode(channel.id()); + let open_block = channel.open_block_number(); + warn!( + target: "channel_assembler", + "Channel (ID: {}) timed out at L1 origin #{}, open block #{}. Discarding channel.", + channel_id, + origin.number, + open_block + ); + self.channel = None; } // Grab the next frame from the previous stage. diff --git a/kona/crates/protocol/derive/src/stages/channel/channel_bank.rs b/rust/kona/crates/protocol/derive/src/stages/channel/channel_bank.rs similarity index 100% rename from kona/crates/protocol/derive/src/stages/channel/channel_bank.rs rename to rust/kona/crates/protocol/derive/src/stages/channel/channel_bank.rs diff --git a/rust/kona/crates/protocol/derive/src/stages/channel/channel_provider.rs b/rust/kona/crates/protocol/derive/src/stages/channel/channel_provider.rs new file mode 100644 index 00000000000..01feda60c1d --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/stages/channel/channel_provider.rs @@ -0,0 +1,364 @@ +//! This module contains the [`ChannelProvider`] stage. + +use super::{ChannelAssembler, ChannelBank, ChannelReaderProvider, NextFrameProvider}; +use crate::{ + errors::PipelineError, + traits::{OriginAdvancer, OriginProvider, SignalReceiver}, + types::{PipelineResult, Signal}, +}; +use alloc::{boxed::Box, sync::Arc}; +use alloy_primitives::Bytes; +use async_trait::async_trait; +use core::fmt::Debug; +use kona_genesis::RollupConfig; +use kona_protocol::BlockInfo; + +/// The [`ChannelProvider`] stage is a mux between the [`ChannelBank`] and [`ChannelAssembler`] +/// stages. +/// +/// Rules: +/// When Holocene is not active, the [`ChannelBank`] is used. +/// When Holocene is active, the [`ChannelAssembler`] is used. +#[derive(Debug)] +pub struct ChannelProvider

+where + P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, +{ + /// The rollup configuration. + pub cfg: Arc, + /// The previous stage of the derivation pipeline. + /// + /// If this is set to [`None`], the multiplexer has been activated and the active stage + /// owns the previous stage. + /// + /// Must be [`None`] if `channel_bank` or `channel_assembler` is [`Some`]. + pub prev: Option

, + /// The channel bank stage of the provider. + /// + /// Must be [`None`] if `prev` or `channel_assembler` is [`Some`]. + pub channel_bank: Option>, + /// The channel assembler stage of the provider. + /// + /// Must be [`None`] if `prev` or `channel_bank` is [`Some`]. + pub channel_assembler: Option>, +} + +impl

ChannelProvider

+where + P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, +{ + /// Creates a new [`ChannelProvider`] with the given configuration and previous stage. + pub const fn new(cfg: Arc, prev: P) -> Self { + Self { cfg, prev: Some(prev), channel_bank: None, channel_assembler: None } + } + + /// Attempts to update the active stage of the mux. + pub(crate) fn attempt_update(&mut self) -> PipelineResult<()> { + let origin = self.origin().ok_or(PipelineError::MissingOrigin.crit())?; + if let Some(prev) = self.prev.take() { + // On the first call to `attempt_update`, we need to determine the active stage to + // initialize the mux with. + if self.cfg.is_holocene_active(origin.timestamp) { + self.channel_assembler = Some(ChannelAssembler::new(self.cfg.clone(), prev)); + } else { + self.channel_bank = Some(ChannelBank::new(self.cfg.clone(), prev)); + } + } else if self.channel_bank.is_some() && self.cfg.is_holocene_active(origin.timestamp) { + // If the channel bank is active and Holocene is also active, transition to the channel + // assembler. + let channel_bank = self.channel_bank.take().expect("Must have channel bank"); + self.channel_assembler = + Some(ChannelAssembler::new(self.cfg.clone(), channel_bank.prev)); + } else if self.channel_assembler.is_some() && !self.cfg.is_holocene_active(origin.timestamp) + { + // If the channel assembler is active, and Holocene is not active, it indicates an L1 + // reorg around Holocene activation. Transition back to the channel bank + // until Holocene re-activates. + let channel_assembler = + self.channel_assembler.take().expect("Must have channel assembler"); + self.channel_bank = Some(ChannelBank::new(self.cfg.clone(), channel_assembler.prev)); + } + Ok(()) + } +} + +#[async_trait] +impl

OriginAdvancer for ChannelProvider

+where + P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, +{ + async fn advance_origin(&mut self) -> PipelineResult<()> { + self.attempt_update()?; + + if let Some(channel_assembler) = self.channel_assembler.as_mut() { + channel_assembler.advance_origin().await + } else if let Some(channel_bank) = self.channel_bank.as_mut() { + channel_bank.advance_origin().await + } else { + Err(PipelineError::NotEnoughData.temp()) + } + } +} + +impl

OriginProvider for ChannelProvider

+where + P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, +{ + fn origin(&self) -> Option { + self.channel_assembler.as_ref().map_or_else( + || { + self.channel_bank.as_ref().map_or_else( + || self.prev.as_ref().and_then(|prev| prev.origin()), + |channel_bank| channel_bank.origin(), + ) + }, + |channel_assembler| channel_assembler.origin(), + ) + } +} + +#[async_trait] +impl

SignalReceiver for ChannelProvider

+where + P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, +{ + async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { + self.attempt_update()?; + + if let Some(channel_assembler) = self.channel_assembler.as_mut() { + channel_assembler.signal(signal).await + } else if let Some(channel_bank) = self.channel_bank.as_mut() { + channel_bank.signal(signal).await + } else { + Err(PipelineError::NotEnoughData.temp()) + } + } +} + +#[async_trait] +impl

ChannelReaderProvider for ChannelProvider

+where + P: NextFrameProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, +{ + async fn next_data(&mut self) -> PipelineResult> { + self.attempt_update()?; + + if let Some(channel_assembler) = self.channel_assembler.as_mut() { + channel_assembler.next_data().await + } else if let Some(channel_bank) = self.channel_bank.as_mut() { + channel_bank.next_data().await + } else { + Err(PipelineError::NotEnoughData.temp()) + } + } +} + +#[cfg(test)] +mod test { + use crate::{ + ChannelProvider, ChannelReaderProvider, OriginProvider, PipelineError, ResetSignal, + SignalReceiver, test_utils::TestNextFrameProvider, + }; + use alloc::{sync::Arc, vec}; + use kona_genesis::{HardForkConfig, RollupConfig}; + use kona_protocol::BlockInfo; + + #[test] + fn test_channel_provider_assembler_active() { + let provider = TestNextFrameProvider::new(vec![]); + let cfg = Arc::new(RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, + ..Default::default() + }); + let mut channel_provider = ChannelProvider::new(cfg, provider); + + assert!(channel_provider.attempt_update().is_ok()); + assert!(channel_provider.prev.is_none()); + assert!(channel_provider.channel_bank.is_none()); + assert!(channel_provider.channel_assembler.is_some()); + } + + #[test] + fn test_channel_provider_bank_active() { + let provider = TestNextFrameProvider::new(vec![]); + let cfg = Arc::new(RollupConfig::default()); + let mut channel_provider = ChannelProvider::new(cfg, provider); + + assert!(channel_provider.attempt_update().is_ok()); + assert!(channel_provider.prev.is_none()); + assert!(channel_provider.channel_bank.is_some()); + assert!(channel_provider.channel_assembler.is_none()); + } + + #[test] + fn test_channel_provider_retain_current_bank() { + let provider = TestNextFrameProvider::new(vec![]); + let cfg = Arc::new(RollupConfig::default()); + let mut channel_provider = ChannelProvider::new(cfg, provider); + + // Assert the multiplexer hasn't been initialized. + assert!(channel_provider.channel_bank.is_none()); + assert!(channel_provider.channel_assembler.is_none()); + assert!(channel_provider.prev.is_some()); + + // Load in the active stage. + channel_provider.attempt_update().unwrap(); + assert!(channel_provider.channel_bank.is_some()); + assert!(channel_provider.channel_assembler.is_none()); + assert!(channel_provider.prev.is_none()); + // Ensure the active stage is retained on the second call. + channel_provider.attempt_update().unwrap(); + assert!(channel_provider.channel_bank.is_some()); + assert!(channel_provider.channel_assembler.is_none()); + assert!(channel_provider.prev.is_none()); + } + + #[test] + fn test_channel_provider_retain_current_assembler() { + let provider = TestNextFrameProvider::new(vec![]); + let cfg = Arc::new(RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, + ..Default::default() + }); + let mut channel_provider = ChannelProvider::new(cfg, provider); + + // Assert the multiplexer hasn't been initialized. + assert!(channel_provider.channel_bank.is_none()); + assert!(channel_provider.channel_assembler.is_none()); + assert!(channel_provider.prev.is_some()); + + // Load in the active stage. + channel_provider.attempt_update().unwrap(); + assert!(channel_provider.channel_bank.is_none()); + assert!(channel_provider.channel_assembler.is_some()); + assert!(channel_provider.prev.is_none()); + // Ensure the active stage is retained on the second call. + channel_provider.attempt_update().unwrap(); + assert!(channel_provider.channel_bank.is_none()); + assert!(channel_provider.channel_assembler.is_some()); + assert!(channel_provider.prev.is_none()); + } + + #[test] + fn test_channel_provider_transition_stage() { + let provider = TestNextFrameProvider::new(vec![]); + let cfg = Arc::new(RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(2), ..Default::default() }, + ..Default::default() + }); + let mut channel_provider = ChannelProvider::new(cfg, provider); + + channel_provider.attempt_update().unwrap(); + + // Update the L1 origin to Holocene activation. + let Some(ref mut stage) = channel_provider.channel_bank else { + panic!("Expected ChannelBank"); + }; + stage.prev.block_info = Some(BlockInfo { number: 1, timestamp: 2, ..Default::default() }); + + // Transition to the ChannelAssembler stage. + channel_provider.attempt_update().unwrap(); + assert!(channel_provider.channel_bank.is_none()); + assert!(channel_provider.channel_assembler.is_some()); + + assert_eq!(channel_provider.origin().unwrap().number, 1); + } + + #[test] + fn test_channel_provider_transition_stage_backwards() { + let provider = TestNextFrameProvider::new(vec![]); + let cfg = Arc::new(RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(2), ..Default::default() }, + ..Default::default() + }); + let mut channel_provider = ChannelProvider::new(cfg, provider); + + channel_provider.attempt_update().unwrap(); + + // Update the L1 origin to Holocene activation. + let Some(ref mut stage) = channel_provider.channel_bank else { + panic!("Expected ChannelBank"); + }; + stage.prev.block_info = Some(BlockInfo { number: 1, timestamp: 2, ..Default::default() }); + + // Transition to the ChannelAssembler stage. + channel_provider.attempt_update().unwrap(); + assert!(channel_provider.channel_bank.is_none()); + assert!(channel_provider.channel_assembler.is_some()); + + // Update the L1 origin to before Holocene activation, to simulate a re-org. + let Some(ref mut stage) = channel_provider.channel_assembler else { + panic!("Expected ChannelAssembler"); + }; + stage.prev.block_info = Some(BlockInfo::default()); + + channel_provider.attempt_update().unwrap(); + assert!(channel_provider.channel_bank.is_some()); + assert!(channel_provider.channel_assembler.is_none()); + } + + #[tokio::test] + async fn test_channel_provider_reset_bank() { + let frames = [ + crate::frame!(0xFF, 0, vec![0xDD; 50], false), + crate::frame!(0xFF, 1, vec![0xDD; 50], true), + ]; + let provider = TestNextFrameProvider::new(frames.into_iter().rev().map(Ok).collect()); + let cfg = Arc::new(RollupConfig::default()); + let mut channel_provider = ChannelProvider::new(cfg.clone(), provider); + + // Load in the first frame. + assert_eq!( + channel_provider.next_data().await.unwrap_err(), + PipelineError::NotEnoughData.temp() + ); + let Some(channel_bank) = channel_provider.channel_bank.as_mut() else { + panic!("Expected ChannelBank"); + }; + // Ensure a channel is in the queue. + assert!(channel_bank.channel_queue.len() == 1); + + // Reset the channel provider. + channel_provider.signal(ResetSignal::default().signal()).await.unwrap(); + + // Ensure the channel queue is empty after reset. + let Some(channel_bank) = channel_provider.channel_bank.as_mut() else { + panic!("Expected ChannelBank"); + }; + assert!(channel_bank.channel_queue.is_empty()); + } + + #[tokio::test] + async fn test_channel_provider_reset_assembler() { + let frames = [ + crate::frame!(0xFF, 0, vec![0xDD; 50], false), + crate::frame!(0xFF, 1, vec![0xDD; 50], true), + ]; + let provider = TestNextFrameProvider::new(frames.into_iter().rev().map(Ok).collect()); + let cfg = Arc::new(RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, + ..Default::default() + }); + let mut channel_provider = ChannelProvider::new(cfg.clone(), provider); + + // Load in the first frame. + assert_eq!( + channel_provider.next_data().await.unwrap_err(), + PipelineError::NotEnoughData.temp() + ); + let Some(channel_assembler) = channel_provider.channel_assembler.as_mut() else { + panic!("Expected ChannelAssembler"); + }; + // Ensure a channel is being built. + assert!(channel_assembler.channel.is_some()); + + // Reset the channel provider. + channel_provider.signal(ResetSignal::default().signal()).await.unwrap(); + + // Ensure the channel assembler is empty after reset. + let Some(channel_assembler) = channel_provider.channel_assembler.as_mut() else { + panic!("Expected ChannelAssembler"); + }; + assert!(channel_assembler.channel.is_none()); + } +} diff --git a/rust/kona/crates/protocol/derive/src/stages/channel/channel_reader.rs b/rust/kona/crates/protocol/derive/src/stages/channel/channel_reader.rs new file mode 100644 index 00000000000..c80b474540d --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/stages/channel/channel_reader.rs @@ -0,0 +1,294 @@ +//! This module contains the `ChannelReader` struct. + +use crate::{ + BatchStreamProvider, OriginAdvancer, OriginProvider, PipelineError, PipelineResult, Signal, + SignalReceiver, +}; +use alloc::{boxed::Box, sync::Arc}; +use alloy_primitives::Bytes; +use async_trait::async_trait; +use core::fmt::Debug; +use kona_genesis::{ + MAX_RLP_BYTES_PER_CHANNEL_BEDROCK, MAX_RLP_BYTES_PER_CHANNEL_FJORD, RollupConfig, +}; +use kona_protocol::{Batch, BatchReader, BlockInfo}; +use tracing::{debug, warn}; + +/// The [`ChannelReader`] provider trait. +#[async_trait] +pub trait ChannelReaderProvider { + /// Pulls the next piece of data from the channel bank. Note that it attempts to pull data out + /// of the channel bank prior to loading data in (unlike most other stages). This is to + /// ensure maintain consistency around channel bank pruning which depends upon the order + /// of operations. + async fn next_data(&mut self) -> PipelineResult>; +} + +/// [`ChannelReader`] is a stateful stage that reads [`Batch`]es from `Channel`s. +/// +/// The [`ChannelReader`] pulls `Channel`s from the channel bank as raw data +/// and pipes it into a `BatchReader`. Since the raw data is compressed, +/// the `BatchReader` first decompresses the data using the first bytes as +/// a compression algorithm identifier. +/// +/// Once the data is decompressed, it is decoded into a `Batch` and passed +/// to the next stage in the pipeline. +#[derive(Debug)] +pub struct ChannelReader

+where + P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, +{ + /// The previous stage of the derivation pipeline. + pub prev: P, + /// The batch reader. + pub next_batch: Option, + /// The rollup configuration. + pub cfg: Arc, +} + +impl

ChannelReader

+where + P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, +{ + /// Create a new [`ChannelReader`] stage. + pub const fn new(prev: P, cfg: Arc) -> Self { + Self { prev, next_batch: None, cfg } + } + + /// Creates the batch reader from available channel data. + async fn set_batch_reader(&mut self) -> PipelineResult<()> { + if self.next_batch.is_none() { + let channel = + self.prev.next_data().await?.ok_or(PipelineError::ChannelReaderEmpty.temp())?; + + let origin = self.prev.origin().ok_or(PipelineError::MissingOrigin.crit())?; + let max_rlp_bytes_per_channel = if self.cfg.is_fjord_active(origin.timestamp) { + MAX_RLP_BYTES_PER_CHANNEL_FJORD + } else { + MAX_RLP_BYTES_PER_CHANNEL_BEDROCK + }; + + self.next_batch = + Some(BatchReader::new(&channel[..], max_rlp_bytes_per_channel as usize)); + kona_macros::set!(gauge, crate::metrics::Metrics::PIPELINE_BATCH_READER_SET, 1); + } + Ok(()) + } + + /// Forces the read to continue with the next channel, resetting any + /// decoding / decompression state to a fresh start. + pub fn next_channel(&mut self) { + self.next_batch = None; + kona_macros::set!(gauge, crate::metrics::Metrics::PIPELINE_BATCH_READER_SET, 0); + } +} + +#[async_trait] +impl

OriginAdvancer for ChannelReader

+where + P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, +{ + async fn advance_origin(&mut self) -> PipelineResult<()> { + self.prev.advance_origin().await + } +} + +#[async_trait] +impl

BatchStreamProvider for ChannelReader

+where + P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Send + Debug, +{ + /// This method is called by the `BatchStream` if an invalid span batch is found. + /// In the case of an invalid span batch, the associated channel must be flushed. + /// + /// See: + /// + /// SAFETY: Only called post-holocene activation. + fn flush(&mut self) { + debug!(target: "channel_reader", "[POST-HOLOCENE] Flushing channel"); + self.next_channel(); + } + + async fn next_batch(&mut self) -> PipelineResult { + if let Err(e) = self.set_batch_reader().await { + debug!(target: "channel_reader", "Failed to set batch reader: {:?}", e); + self.next_channel(); + return Err(e); + } + + // SAFETY: The batch reader must be set above. + let next_batch = self.next_batch.as_mut().expect("Batch reader must be set"); + match next_batch.decompress() { + Ok(()) => { + // Record the decompressed size and type. + let size = next_batch.decompressed.len() as f64; + let ty = if next_batch.brotli_used { + BatchReader::CHANNEL_VERSION_BROTLI + } else { + BatchReader::ZLIB_DEFLATE_COMPRESSION_METHOD + }; + kona_macros::set!( + gauge, + crate::metrics::Metrics::PIPELINE_LATEST_DECOMPRESSED_BATCH_SIZE, + size + ); + kona_macros::set!( + gauge, + crate::metrics::Metrics::PIPELINE_LATEST_DECOMPRESSED_BATCH_TYPE, + ty as f64 + ); + } + Err(err) => { + debug!(target: "channel_reader", ?err, "Failed to decompress batch"); + self.next_channel(); + return Err(PipelineError::NotEnoughData.temp()); + } + } + + // Read the next batch from the reader's decompressed data + match next_batch.next_batch(self.cfg.as_ref()).ok_or(PipelineError::NotEnoughData.temp()) { + Ok(batch) => { + kona_macros::inc!( + gauge, + crate::metrics::Metrics::PIPELINE_READ_BATCHES, + "type" => batch.to_string(), + ); + Ok(batch) + } + Err(e) => { + self.next_channel(); + Err(e) + } + } + } +} + +impl

OriginProvider for ChannelReader

+where + P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug, +{ + fn origin(&self) -> Option { + self.prev.origin() + } +} + +#[async_trait] +impl

SignalReceiver for ChannelReader

+where + P: ChannelReaderProvider + OriginAdvancer + OriginProvider + SignalReceiver + Debug + Send, +{ + async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { + match signal { + Signal::FlushChannel => { + // Drop the current in-progress channel. + warn!(target: "channel_reader", "Flushed channel"); + self.next_batch = None; + kona_macros::set!(gauge, crate::metrics::Metrics::PIPELINE_BATCH_READER_SET, 0); + } + s => { + self.prev.signal(s).await?; + self.next_channel(); + } + } + Ok(()) + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::{ + errors::PipelineErrorKind, test_utils::TestChannelReaderProvider, types::ResetSignal, + }; + use alloc::vec; + use kona_genesis::HardForkConfig; + + fn new_compressed_batch_data() -> Bytes { + let file_contents = + alloc::string::String::from_utf8_lossy(include_bytes!("../../../testdata/batch.hex")); + let file_contents = &(&*file_contents)[..file_contents.len() - 1]; + let data = alloy_primitives::hex::decode(file_contents).unwrap(); + data.into() + } + + #[tokio::test] + async fn test_flush_channel_reader() { + let mock = TestChannelReaderProvider::new(vec![Ok(Some(new_compressed_batch_data()))]); + let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); + reader.next_batch = Some(BatchReader::new( + new_compressed_batch_data(), + MAX_RLP_BYTES_PER_CHANNEL_FJORD as usize, + )); + reader.signal(Signal::FlushChannel).await.unwrap(); + assert!(reader.next_batch.is_none()); + } + + #[tokio::test] + async fn test_reset_channel_reader() { + let mock = TestChannelReaderProvider::new(vec![Ok(None)]); + let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); + reader.next_batch = Some(BatchReader::new( + vec![0x00, 0x01, 0x02], + MAX_RLP_BYTES_PER_CHANNEL_FJORD as usize, + )); + assert!(!reader.prev.reset); + reader.signal(ResetSignal::default().signal()).await.unwrap(); + assert!(reader.next_batch.is_none()); + assert!(reader.prev.reset); + } + + #[tokio::test] + async fn test_next_batch_batch_reader_set_fails() { + let mock = TestChannelReaderProvider::new(vec![Err(PipelineError::Eof.temp())]); + let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); + assert_eq!(reader.next_batch().await, Err(PipelineError::Eof.temp())); + assert!(reader.next_batch.is_none()); + } + + #[tokio::test] + async fn test_next_batch_batch_reader_no_data() { + let mock = TestChannelReaderProvider::new(vec![Ok(None)]); + let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); + assert!(matches!( + reader.next_batch().await.unwrap_err(), + PipelineErrorKind::Temporary(PipelineError::ChannelReaderEmpty) + )); + assert!(reader.next_batch.is_none()); + } + + #[tokio::test] + async fn test_next_batch_batch_reader_not_enough_data() { + let mut first = new_compressed_batch_data(); + let second = first.split_to(first.len() / 2); + let mock = TestChannelReaderProvider::new(vec![Ok(Some(first)), Ok(Some(second))]); + let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); + assert_eq!(reader.next_batch().await, Err(PipelineError::NotEnoughData.temp())); + assert!(reader.next_batch.is_none()); + } + + #[tokio::test] + async fn test_next_batch_succeeds() { + let raw = new_compressed_batch_data(); + let mock = TestChannelReaderProvider::new(vec![Ok(Some(raw))]); + let mut reader = ChannelReader::new(mock, Arc::new(RollupConfig::default())); + let res = reader.next_batch().await.unwrap(); + matches!(res, Batch::Span(_)); + assert!(reader.next_batch.is_some()); + } + + #[tokio::test] + async fn test_flush_post_holocene() { + let raw = new_compressed_batch_data(); + let config = Arc::new(RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, + ..Default::default() + }); + let mock = TestChannelReaderProvider::new(vec![Ok(Some(raw))]); + let mut reader = ChannelReader::new(mock, config); + let res = reader.next_batch().await.unwrap(); + matches!(res, Batch::Span(_)); + assert!(reader.next_batch.is_some()); + reader.flush(); + assert!(reader.next_batch.is_none()); + } +} diff --git a/rust/kona/crates/protocol/derive/src/stages/channel/mod.rs b/rust/kona/crates/protocol/derive/src/stages/channel/mod.rs new file mode 100644 index 00000000000..73b0aaa821f --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/stages/channel/mod.rs @@ -0,0 +1,34 @@ +//! Stages pertaining to the reading and decoding of channels. +//! +//! Sitting after the [`FrameQueue`](crate::stages::FrameQueue) stage, the [`ChannelBank`] and +//! [`ChannelAssembler`] stages are responsible for reading and decoding the [`Frame`]s into +//! [`Channel`](kona_protocol::Channel)s. The [`ChannelReader`] stage is responsible for decoding +//! the [`Channel`](kona_protocol::Channel)s into [`Batch`](kona_protocol::Batch)es, forwarding +//! the [`Batch`](kona_protocol::Batch)es to the +//! [`BatchQueue`](crate::stages::BatchQueue) stage. + +use crate::types::PipelineResult; +use alloc::boxed::Box; +use async_trait::async_trait; +use kona_protocol::Frame; + +pub(crate) mod channel_provider; +pub use channel_provider::ChannelProvider; + +pub(crate) mod channel_bank; +pub use channel_bank::ChannelBank; + +pub(crate) mod channel_assembler; +pub use channel_assembler::ChannelAssembler; + +pub(crate) mod channel_reader; +pub use channel_reader::{ChannelReader, ChannelReaderProvider}; + +/// Provides frames for the [`ChannelBank`] and [`ChannelAssembler`] stages. +#[async_trait] +pub trait NextFrameProvider { + /// Retrieves the next [`Frame`] from the [`FrameQueue`] stage. + /// + /// [`FrameQueue`]: crate::stages::FrameQueue + async fn next_frame(&mut self) -> PipelineResult; +} diff --git a/kona/crates/protocol/derive/src/stages/frame_queue.rs b/rust/kona/crates/protocol/derive/src/stages/frame_queue.rs similarity index 99% rename from kona/crates/protocol/derive/src/stages/frame_queue.rs rename to rust/kona/crates/protocol/derive/src/stages/frame_queue.rs index 4a993a65c9d..92518579a55 100644 --- a/kona/crates/protocol/derive/src/stages/frame_queue.rs +++ b/rust/kona/crates/protocol/derive/src/stages/frame_queue.rs @@ -1,4 +1,4 @@ -//! This module contains the [FrameQueue] stage of the derivation pipeline. +//! This module contains the [`FrameQueue`] stage of the derivation pipeline. use crate::{ NextFrameProvider, OriginAdvancer, OriginProvider, PipelineError, PipelineResult, Signal, diff --git a/kona/crates/protocol/derive/src/stages/l1_retrieval.rs b/rust/kona/crates/protocol/derive/src/stages/l1_retrieval.rs similarity index 96% rename from kona/crates/protocol/derive/src/stages/l1_retrieval.rs rename to rust/kona/crates/protocol/derive/src/stages/l1_retrieval.rs index f4c7541a955..bc302550ec1 100644 --- a/kona/crates/protocol/derive/src/stages/l1_retrieval.rs +++ b/rust/kona/crates/protocol/derive/src/stages/l1_retrieval.rs @@ -1,4 +1,4 @@ -//! Contains the [L1Retrieval] stage of the derivation pipeline. +//! Contains the [`L1Retrieval`] stage of the derivation pipeline. use crate::{ ActivationSignal, DataAvailabilityProvider, FrameQueueProvider, OriginAdvancer, OriginProvider, @@ -16,13 +16,13 @@ pub trait L1RetrievalProvider { /// Returns the next L1 [`BlockInfo`] in the [`PollingTraversal`] stage, if the stage is not /// complete. This function can only be called once while the stage is in progress, and will /// return [`None`] on subsequent calls unless the stage is reset or complete. If the stage - /// is complete and the [`BlockInfo`] has been consumed, an [PipelineError::Eof] error is + /// is complete and the [`BlockInfo`] has been consumed, a [`PipelineError::Eof`] error is /// returned. /// /// [`PollingTraversal`]: crate::PollingTraversal async fn next_l1_block(&mut self) -> PipelineResult>; - /// Returns the batcher [`Address`] from the [kona_genesis::SystemConfig]. + /// Returns the batcher [`Address`] from the [`kona_genesis::SystemConfig`]. fn batcher_addr(&self) -> Address; } @@ -94,7 +94,7 @@ where match self.provider.next(next, self.prev.batcher_addr()).await { Ok(data) => Ok(data), Err(e) => { - if let PipelineErrorKind::Temporary(PipelineError::Eof) = e { + if e == PipelineErrorKind::Temporary(PipelineError::Eof) { self.next = None; self.provider.clear(); } diff --git a/kona/crates/protocol/derive/src/stages/mod.rs b/rust/kona/crates/protocol/derive/src/stages/mod.rs similarity index 100% rename from kona/crates/protocol/derive/src/stages/mod.rs rename to rust/kona/crates/protocol/derive/src/stages/mod.rs diff --git a/kona/crates/protocol/derive/src/stages/traversal/indexed.rs b/rust/kona/crates/protocol/derive/src/stages/traversal/indexed.rs similarity index 99% rename from kona/crates/protocol/derive/src/stages/traversal/indexed.rs rename to rust/kona/crates/protocol/derive/src/stages/traversal/indexed.rs index c763788f0fc..5b940d8a3b0 100644 --- a/kona/crates/protocol/derive/src/stages/traversal/indexed.rs +++ b/rust/kona/crates/protocol/derive/src/stages/traversal/indexed.rs @@ -38,11 +38,11 @@ impl L1RetrievalProvider for IndexedTraversal { } async fn next_l1_block(&mut self) -> PipelineResult> { - if !self.done { + if self.done { + Err(PipelineError::Eof.temp()) + } else { self.done = true; Ok(self.block) - } else { - Err(PipelineError::Eof.temp()) } } } diff --git a/kona/crates/protocol/derive/src/stages/traversal/mod.rs b/rust/kona/crates/protocol/derive/src/stages/traversal/mod.rs similarity index 100% rename from kona/crates/protocol/derive/src/stages/traversal/mod.rs rename to rust/kona/crates/protocol/derive/src/stages/traversal/mod.rs diff --git a/kona/crates/protocol/derive/src/stages/traversal/polling.rs b/rust/kona/crates/protocol/derive/src/stages/traversal/polling.rs similarity index 99% rename from kona/crates/protocol/derive/src/stages/traversal/polling.rs rename to rust/kona/crates/protocol/derive/src/stages/traversal/polling.rs index 1c6fcdd4c2b..c31160fa250 100644 --- a/kona/crates/protocol/derive/src/stages/traversal/polling.rs +++ b/rust/kona/crates/protocol/derive/src/stages/traversal/polling.rs @@ -38,11 +38,11 @@ impl L1RetrievalProvider for PollingTraversal { } async fn next_l1_block(&mut self) -> PipelineResult> { - if !self.done { + if self.done { + Err(PipelineError::Eof.temp()) + } else { self.done = true; Ok(self.block) - } else { - Err(PipelineError::Eof.temp()) } } } diff --git a/kona/crates/protocol/derive/src/test_utils/attributes_queue.rs b/rust/kona/crates/protocol/derive/src/test_utils/attributes_queue.rs similarity index 100% rename from kona/crates/protocol/derive/src/test_utils/attributes_queue.rs rename to rust/kona/crates/protocol/derive/src/test_utils/attributes_queue.rs diff --git a/kona/crates/protocol/derive/src/test_utils/batch_provider.rs b/rust/kona/crates/protocol/derive/src/test_utils/batch_provider.rs similarity index 100% rename from kona/crates/protocol/derive/src/test_utils/batch_provider.rs rename to rust/kona/crates/protocol/derive/src/test_utils/batch_provider.rs diff --git a/kona/crates/protocol/derive/src/test_utils/batch_stream.rs b/rust/kona/crates/protocol/derive/src/test_utils/batch_stream.rs similarity index 100% rename from kona/crates/protocol/derive/src/test_utils/batch_stream.rs rename to rust/kona/crates/protocol/derive/src/test_utils/batch_stream.rs diff --git a/rust/kona/crates/protocol/derive/src/test_utils/blob_provider.rs b/rust/kona/crates/protocol/derive/src/test_utils/blob_provider.rs new file mode 100644 index 00000000000..2fbeda03347 --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/test_utils/blob_provider.rs @@ -0,0 +1,51 @@ +//! An implementation of the [`BlobProvider`] trait for tests. + +use crate::{BlobProvider, errors::BlobProviderError}; +use alloc::{boxed::Box, vec::Vec}; +use alloy_eips::eip4844::{Blob, IndexedBlobHash}; +use alloy_primitives::{B256, map::HashMap}; +use async_trait::async_trait; +use kona_protocol::BlockInfo; + +/// A mock blob provider for testing. +#[derive(Debug, Clone, Default)] +pub struct TestBlobProvider { + /// Maps block hashes to blob data. + pub blobs: HashMap, + /// whether the blob provider should return an error. + pub should_error: bool, +} + +impl TestBlobProvider { + /// Insert a blob into the mock blob provider. + pub fn insert_blob(&mut self, hash: B256, blob: Blob) { + self.blobs.insert(hash, blob); + } + + /// Clears blobs from the mock blob provider. + pub fn clear(&mut self) { + self.blobs.clear(); + } +} + +#[async_trait] +impl BlobProvider for TestBlobProvider { + type Error = BlobProviderError; + + async fn get_and_validate_blobs( + &mut self, + _block_ref: &BlockInfo, + blob_hashes: &[IndexedBlobHash], + ) -> Result>, Self::Error> { + if self.should_error { + return Err(BlobProviderError::SlotDerivation); + } + let mut blobs = Vec::new(); + for blob_hash in blob_hashes { + if let Some(data) = self.blobs.get(&blob_hash.hash) { + blobs.push(Box::new(*data)); + } + } + Ok(blobs) + } +} diff --git a/kona/crates/protocol/derive/src/test_utils/chain_providers.rs b/rust/kona/crates/protocol/derive/src/test_utils/chain_providers.rs similarity index 99% rename from kona/crates/protocol/derive/src/test_utils/chain_providers.rs rename to rust/kona/crates/protocol/derive/src/test_utils/chain_providers.rs index 090eeb9804f..57d40ffeef4 100644 --- a/kona/crates/protocol/derive/src/test_utils/chain_providers.rs +++ b/rust/kona/crates/protocol/derive/src/test_utils/chain_providers.rs @@ -185,7 +185,7 @@ impl BatchValidationProvider for TestL2ChainProvider { self.blocks .iter() .find(|b| b.block_info.number == number) - .cloned() + .copied() .ok_or_else(|| TestProviderError::BlockNotFound) } diff --git a/rust/kona/crates/protocol/derive/src/test_utils/channel_provider.rs b/rust/kona/crates/protocol/derive/src/test_utils/channel_provider.rs new file mode 100644 index 00000000000..e38732957ad --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/test_utils/channel_provider.rs @@ -0,0 +1,63 @@ +//! Mock testing utilities for the [`ChannelBank`](crate::stages::ChannelBank) stage. + +use crate::{ + errors::PipelineError, + stages::NextFrameProvider, + traits::{OriginAdvancer, OriginProvider, SignalReceiver}, + types::{PipelineResult, Signal}, +}; +use alloc::{boxed::Box, vec::Vec}; +use async_trait::async_trait; +use kona_protocol::{BlockInfo, Frame}; + +/// A mock [`NextFrameProvider`] for testing the [`ChannelBank`] stage. +/// +/// [`ChannelBank`]: crate::stages::ChannelBank +#[derive(Debug, Default)] +pub struct TestNextFrameProvider { + /// The data to return. + pub data: Vec>, + /// The block info + pub block_info: Option, + /// Tracks if the channel bank provider has been reset. + pub reset: bool, +} + +impl TestNextFrameProvider { + /// Creates a new [`TestNextFrameProvider`] with the given data. + pub fn new(data: Vec>) -> Self { + Self { data, block_info: Some(BlockInfo::default()), reset: false } + } +} + +impl OriginProvider for TestNextFrameProvider { + fn origin(&self) -> Option { + self.block_info + } +} + +#[async_trait] +impl OriginAdvancer for TestNextFrameProvider { + async fn advance_origin(&mut self) -> PipelineResult<()> { + self.block_info = self.block_info.map(|mut bi| { + bi.number += 1; + bi + }); + Ok(()) + } +} + +#[async_trait] +impl NextFrameProvider for TestNextFrameProvider { + async fn next_frame(&mut self) -> PipelineResult { + self.data.pop().unwrap_or(Err(PipelineError::Eof.temp())) + } +} + +#[async_trait] +impl SignalReceiver for TestNextFrameProvider { + async fn signal(&mut self, _: Signal) -> PipelineResult<()> { + self.reset = true; + Ok(()) + } +} diff --git a/rust/kona/crates/protocol/derive/src/test_utils/channel_reader.rs b/rust/kona/crates/protocol/derive/src/test_utils/channel_reader.rs new file mode 100644 index 00000000000..ae66a2c4a37 --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/test_utils/channel_reader.rs @@ -0,0 +1,58 @@ +//! Test utilities for the [`ChannelReader`](crate::stages::ChannelReader) stage. + +use crate::{ + ChannelReaderProvider, OriginAdvancer, OriginProvider, PipelineError, PipelineResult, Signal, + SignalReceiver, +}; +use alloc::{boxed::Box, vec::Vec}; +use alloy_primitives::Bytes; +use async_trait::async_trait; +use kona_protocol::BlockInfo; + +/// A mock [`ChannelReaderProvider`] for testing the [`ChannelReader`] stage. +/// +/// [`ChannelReader`]: crate::stages::ChannelReader +#[derive(Debug, Default)] +pub struct TestChannelReaderProvider { + /// The data to return. + pub data: Vec>>, + /// The origin block info + pub block_info: Option, + /// Tracks if the channel reader provider has been reset. + pub reset: bool, +} + +impl TestChannelReaderProvider { + /// Creates a new [`TestChannelReaderProvider`] with the given data. + pub fn new(data: Vec>>) -> Self { + Self { data, block_info: Some(BlockInfo::default()), reset: false } + } +} + +impl OriginProvider for TestChannelReaderProvider { + fn origin(&self) -> Option { + self.block_info + } +} + +#[async_trait] +impl OriginAdvancer for TestChannelReaderProvider { + async fn advance_origin(&mut self) -> PipelineResult<()> { + Ok(()) + } +} + +#[async_trait] +impl ChannelReaderProvider for TestChannelReaderProvider { + async fn next_data(&mut self) -> PipelineResult> { + self.data.pop().unwrap_or(Err(PipelineError::Eof.temp())) + } +} + +#[async_trait] +impl SignalReceiver for TestChannelReaderProvider { + async fn signal(&mut self, _: Signal) -> PipelineResult<()> { + self.reset = true; + Ok(()) + } +} diff --git a/kona/crates/protocol/derive/src/test_utils/data_availability_provider.rs b/rust/kona/crates/protocol/derive/src/test_utils/data_availability_provider.rs similarity index 90% rename from kona/crates/protocol/derive/src/test_utils/data_availability_provider.rs rename to rust/kona/crates/protocol/derive/src/test_utils/data_availability_provider.rs index d4445e71750..93ab9e6dcce 100644 --- a/kona/crates/protocol/derive/src/test_utils/data_availability_provider.rs +++ b/rust/kona/crates/protocol/derive/src/test_utils/data_availability_provider.rs @@ -1,4 +1,4 @@ -//! An implementation of the [DataAvailabilityProvider] trait for tests. +//! An implementation of the [`DataAvailabilityProvider`] trait for tests. use crate::{errors::PipelineError, traits::DataAvailabilityProvider, types::PipelineResult}; use alloc::{boxed::Box, vec::Vec}; diff --git a/kona/crates/protocol/derive/src/test_utils/frame_queue.rs b/rust/kona/crates/protocol/derive/src/test_utils/frame_queue.rs similarity index 100% rename from kona/crates/protocol/derive/src/test_utils/frame_queue.rs rename to rust/kona/crates/protocol/derive/src/test_utils/frame_queue.rs diff --git a/kona/crates/protocol/derive/src/test_utils/frames.rs b/rust/kona/crates/protocol/derive/src/test_utils/frames.rs similarity index 97% rename from kona/crates/protocol/derive/src/test_utils/frames.rs rename to rust/kona/crates/protocol/derive/src/test_utils/frames.rs index aee7e783624..987bfb9bb16 100644 --- a/kona/crates/protocol/derive/src/test_utils/frames.rs +++ b/rust/kona/crates/protocol/derive/src/test_utils/frames.rs @@ -22,7 +22,7 @@ pub struct FrameQueueBuilder { fn encode_frames(frames: &[Frame]) -> Bytes { let mut bytes = Vec::new(); bytes.extend_from_slice(&[DERIVATION_VERSION_0]); - for frame in frames.iter() { + for frame in frames { bytes.extend_from_slice(&frame.encode()); } Bytes::from(bytes) @@ -107,10 +107,10 @@ impl FrameQueueAsserter { /// Asserts that holocene is active. pub fn holocene_active(&self, active: bool) { let holocene = self.inner.is_holocene_active(self.inner.origin().unwrap_or_default()); - if !active { - assert!(!holocene); - } else { + if active { assert!(holocene); + } else { + assert!(!holocene); } } @@ -122,7 +122,7 @@ impl FrameQueueAsserter { /// Asserts that the frame queue produces the expected frames. pub async fn next_frames(mut self) { - for eframe in self.expected_frames.into_iter() { + for eframe in self.expected_frames { let frame = self.inner.next_frame().await.expect("unexpected frame"); assert_eq!(frame, eframe); } diff --git a/kona/crates/protocol/derive/src/test_utils/macros.rs b/rust/kona/crates/protocol/derive/src/test_utils/macros.rs similarity index 77% rename from kona/crates/protocol/derive/src/test_utils/macros.rs rename to rust/kona/crates/protocol/derive/src/test_utils/macros.rs index 4fbe58e5ff7..559fc2d1a07 100644 --- a/kona/crates/protocol/derive/src/test_utils/macros.rs +++ b/rust/kona/crates/protocol/derive/src/test_utils/macros.rs @@ -1,6 +1,6 @@ //! Macros used across test utilities. -/// A shorthand syntax for constructing [kona_protocol::Frame]s. +/// A shorthand syntax for constructing [`kona_protocol::Frame`]s. #[macro_export] macro_rules! frame { ($id:expr, $number:expr, $data:expr, $is_last:expr) => { @@ -8,7 +8,7 @@ macro_rules! frame { }; } -/// A shorthand syntax for constructing a list of [kona_protocol::Frame]s. +/// A shorthand syntax for constructing a list of [`kona_protocol::Frame`]s. #[macro_export] macro_rules! frames { ($id:expr, $number:expr, $data:expr, $count:expr) => {{ diff --git a/kona/crates/protocol/derive/src/test_utils/mod.rs b/rust/kona/crates/protocol/derive/src/test_utils/mod.rs similarity index 100% rename from kona/crates/protocol/derive/src/test_utils/mod.rs rename to rust/kona/crates/protocol/derive/src/test_utils/mod.rs diff --git a/kona/crates/protocol/derive/src/test_utils/pipeline.rs b/rust/kona/crates/protocol/derive/src/test_utils/pipeline.rs similarity index 100% rename from kona/crates/protocol/derive/src/test_utils/pipeline.rs rename to rust/kona/crates/protocol/derive/src/test_utils/pipeline.rs diff --git a/kona/crates/protocol/derive/src/test_utils/sys_config_fetcher.rs b/rust/kona/crates/protocol/derive/src/test_utils/sys_config_fetcher.rs similarity index 99% rename from kona/crates/protocol/derive/src/test_utils/sys_config_fetcher.rs rename to rust/kona/crates/protocol/derive/src/test_utils/sys_config_fetcher.rs index 65f1d7d121a..c10d25144cd 100644 --- a/kona/crates/protocol/derive/src/test_utils/sys_config_fetcher.rs +++ b/rust/kona/crates/protocol/derive/src/test_utils/sys_config_fetcher.rs @@ -69,7 +69,7 @@ impl L2ChainProvider for TestSystemConfigL2Fetcher { ) -> Result::Error> { self.system_configs .get(&number) - .cloned() + .copied() .ok_or_else(|| TestSystemConfigL2FetcherError::NotFound(number)) } } diff --git a/rust/kona/crates/protocol/derive/src/test_utils/tracing.rs b/rust/kona/crates/protocol/derive/src/test_utils/tracing.rs new file mode 100644 index 00000000000..d335ad9de67 --- /dev/null +++ b/rust/kona/crates/protocol/derive/src/test_utils/tracing.rs @@ -0,0 +1,58 @@ +//! This module contains a subscriber layer for `tracing-subscriber` that collects traces and their +//! log levels. + +use alloc::{format, string::String, sync::Arc, vec::Vec}; +use spin::Mutex; +use tracing::{Event, Level, Subscriber}; +use tracing_subscriber::{Layer, layer::Context}; + +/// The storage for the collected traces. +#[derive(Debug, Default, Clone)] +pub struct TraceStorage(pub Arc>>); + +impl TraceStorage { + /// Returns the items in the storage that match the specified level. + pub fn get_by_level(&self, level: Level) -> Vec { + self.0 + .lock() + .iter() + .filter(|&(l, _message)| *l == level) + .map(|(_l, message)| message.clone()) + .collect() + } + + /// Locks the storage and returns the items. + pub fn lock(&self) -> spin::MutexGuard<'_, Vec<(Level, String)>> { + self.0.lock() + } + + /// Returns if the storage is empty. + pub fn is_empty(&self) -> bool { + self.0.lock().is_empty() + } +} + +/// A subscriber layer that collects traces and their log levels. +#[derive(Debug, Default)] +pub struct CollectingLayer { + /// The storage for the collected traces. + pub storage: TraceStorage, +} + +impl CollectingLayer { + /// Creates a new collecting layer with the specified storage. + pub const fn new(storage: TraceStorage) -> Self { + Self { storage } + } +} + +impl Layer for CollectingLayer { + fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) { + let metadata = event.metadata(); + let level = *metadata.level(); + let message = format!("{event:?}"); + + let mut storage = self.storage.0.lock(); + storage.push((level, message)); + } +} diff --git a/kona/crates/protocol/derive/src/test_utils/traversal.rs b/rust/kona/crates/protocol/derive/src/test_utils/traversal.rs similarity index 100% rename from kona/crates/protocol/derive/src/test_utils/traversal.rs rename to rust/kona/crates/protocol/derive/src/test_utils/traversal.rs diff --git a/kona/crates/protocol/derive/src/traits/attributes.rs b/rust/kona/crates/protocol/derive/src/traits/attributes.rs similarity index 100% rename from kona/crates/protocol/derive/src/traits/attributes.rs rename to rust/kona/crates/protocol/derive/src/traits/attributes.rs diff --git a/kona/crates/protocol/derive/src/traits/data_sources.rs b/rust/kona/crates/protocol/derive/src/traits/data_sources.rs similarity index 93% rename from kona/crates/protocol/derive/src/traits/data_sources.rs rename to rust/kona/crates/protocol/derive/src/traits/data_sources.rs index 6368a72fe04..c9b68d1d1eb 100644 --- a/kona/crates/protocol/derive/src/traits/data_sources.rs +++ b/rust/kona/crates/protocol/derive/src/traits/data_sources.rs @@ -9,7 +9,7 @@ use async_trait::async_trait; use core::fmt::Display; use kona_protocol::BlockInfo; -/// The BlobProvider trait specifies the functionality of a data source that can provide blobs. +/// The `BlobProvider` trait specifies the functionality of a data source that can provide blobs. #[async_trait] pub trait BlobProvider { /// The error type for the [`BlobProvider`]. diff --git a/kona/crates/protocol/derive/src/traits/mod.rs b/rust/kona/crates/protocol/derive/src/traits/mod.rs similarity index 100% rename from kona/crates/protocol/derive/src/traits/mod.rs rename to rust/kona/crates/protocol/derive/src/traits/mod.rs diff --git a/kona/crates/protocol/derive/src/traits/pipeline.rs b/rust/kona/crates/protocol/derive/src/traits/pipeline.rs similarity index 100% rename from kona/crates/protocol/derive/src/traits/pipeline.rs rename to rust/kona/crates/protocol/derive/src/traits/pipeline.rs diff --git a/kona/crates/protocol/derive/src/traits/providers.rs b/rust/kona/crates/protocol/derive/src/traits/providers.rs similarity index 100% rename from kona/crates/protocol/derive/src/traits/providers.rs rename to rust/kona/crates/protocol/derive/src/traits/providers.rs diff --git a/kona/crates/protocol/derive/src/traits/reset.rs b/rust/kona/crates/protocol/derive/src/traits/reset.rs similarity index 100% rename from kona/crates/protocol/derive/src/traits/reset.rs rename to rust/kona/crates/protocol/derive/src/traits/reset.rs diff --git a/kona/crates/protocol/derive/src/traits/stages.rs b/rust/kona/crates/protocol/derive/src/traits/stages.rs similarity index 100% rename from kona/crates/protocol/derive/src/traits/stages.rs rename to rust/kona/crates/protocol/derive/src/traits/stages.rs diff --git a/kona/crates/protocol/derive/src/types/mod.rs b/rust/kona/crates/protocol/derive/src/types/mod.rs similarity index 100% rename from kona/crates/protocol/derive/src/types/mod.rs rename to rust/kona/crates/protocol/derive/src/types/mod.rs diff --git a/kona/crates/protocol/derive/src/types/results.rs b/rust/kona/crates/protocol/derive/src/types/results.rs similarity index 100% rename from kona/crates/protocol/derive/src/types/results.rs rename to rust/kona/crates/protocol/derive/src/types/results.rs diff --git a/kona/crates/protocol/derive/src/types/signals.rs b/rust/kona/crates/protocol/derive/src/types/signals.rs similarity index 96% rename from kona/crates/protocol/derive/src/types/signals.rs rename to rust/kona/crates/protocol/derive/src/types/signals.rs index 0f91294aa8f..35185f90d24 100644 --- a/kona/crates/protocol/derive/src/types/signals.rs +++ b/rust/kona/crates/protocol/derive/src/types/signals.rs @@ -56,7 +56,7 @@ pub struct ResetSignal { } impl ResetSignal { - /// Creates a new [Signal::Reset] from the [`ResetSignal`]. + /// Creates a new [`Signal::Reset`] from the [`ResetSignal`]. pub const fn signal(self) -> Signal { Signal::Reset(self) } @@ -79,7 +79,7 @@ pub struct ActivationSignal { } impl ActivationSignal { - /// Creates a new [Signal::Activation] from the [`ActivationSignal`]. + /// Creates a new [`Signal::Activation`] from the [`ActivationSignal`]. pub const fn signal(self) -> Signal { Signal::Activation(self) } diff --git a/kona/crates/protocol/derive/testdata/batch.hex b/rust/kona/crates/protocol/derive/testdata/batch.hex similarity index 100% rename from kona/crates/protocol/derive/testdata/batch.hex rename to rust/kona/crates/protocol/derive/testdata/batch.hex diff --git a/kona/crates/protocol/derive/testdata/raw_batcher_tx.hex b/rust/kona/crates/protocol/derive/testdata/raw_batcher_tx.hex similarity index 100% rename from kona/crates/protocol/derive/testdata/raw_batcher_tx.hex rename to rust/kona/crates/protocol/derive/testdata/raw_batcher_tx.hex diff --git a/rust/kona/crates/protocol/genesis/Cargo.toml b/rust/kona/crates/protocol/genesis/Cargo.toml new file mode 100644 index 00000000000..87446eca090 --- /dev/null +++ b/rust/kona/crates/protocol/genesis/Cargo.toml @@ -0,0 +1,89 @@ +[package] +name = "kona-genesis" +version = "0.4.5" +description = "Optimism genesis types" + +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +exclude.workspace = true + +[lints] +workspace = true + +[dependencies] +# Alloy +alloy-sol-types.workspace = true +alloy-primitives.workspace = true +alloy-consensus.workspace = true +alloy-eips.workspace = true +alloy-hardforks.workspace = true +alloy-op-hardforks.workspace = true +alloy-chains.workspace = true +alloy-genesis.workspace = true + +# Misc +thiserror.workspace = true +derive_more = { workspace = true, features = ["from", "try_from"] } + +# `revm` feature +op-revm = { workspace = true, optional = true } + +# `arbitrary` feature +arbitrary = { workspace = true, features = ["derive"], optional = true } + +# `serde` feature +serde = { workspace = true, optional = true } +serde_repr = { workspace = true, optional = true } + +# `tabled` feature +tabled = { workspace = true, features = ["derive"], optional = true } + +[dev-dependencies] +toml = { workspace = true, features = ["parse", "serde"] } +rand = { workspace = true, features = ["thread_rng"] } +serde_json.workspace = true +arbitrary = { workspace = true, features = ["derive"] } +alloy-primitives = { workspace = true, features = ["rand", "arbitrary"] } + +[features] +default = [] +revm = [ "dep:op-revm" ] +tabled = [ "dep:tabled", "std" ] +std = [ + "alloy-chains/std", + "alloy-consensus/std", + "alloy-eips/std", + "alloy-genesis/std", + "alloy-primitives/std", + "alloy-sol-types/std", + "derive_more/std", + "op-revm?/std", + "serde?/std", + "thiserror/std", + "tabled?/std" +] +arbitrary = [ + "alloy-chains/arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", + "alloy-primitives/rand", + "alloy-sol-types/arbitrary", + "dep:arbitrary", + "std", +] +serde = [ + "alloy-chains/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-hardforks/serde", + "alloy-op-hardforks/serde", + "alloy-primitives/serde", + "dep:serde", + "dep:serde_repr", + "op-revm?/serde", +] diff --git a/rust/kona/crates/protocol/genesis/README.md b/rust/kona/crates/protocol/genesis/README.md new file mode 100644 index 00000000000..1c8c7ce66d0 --- /dev/null +++ b/rust/kona/crates/protocol/genesis/README.md @@ -0,0 +1,24 @@ +## `kona-genesis` + +Genesis types for Optimism. + +### Usage + +_By default, `kona-genesis` enables both `std` and `serde` features._ + +If you're working in a `no_std` environment (like [`kona`][kona]), disable default features like so. + +```toml +[dependencies] +kona-genesis = { version = "x.y.z", default-features = false, features = ["serde"] } +``` + +#### Rollup Config + +`kona-genesis` exports a `RollupConfig`, the primary genesis type for Optimism Consensus. + + + + +[alloy-genesis]: https://github.com/alloy-rs +[kona]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/kona/Cargo.toml#L137 diff --git a/kona/crates/protocol/genesis/src/chain/addresses.rs b/rust/kona/crates/protocol/genesis/src/chain/addresses.rs similarity index 100% rename from kona/crates/protocol/genesis/src/chain/addresses.rs rename to rust/kona/crates/protocol/genesis/src/chain/addresses.rs diff --git a/kona/crates/protocol/genesis/src/chain/altda.rs b/rust/kona/crates/protocol/genesis/src/chain/altda.rs similarity index 90% rename from kona/crates/protocol/genesis/src/chain/altda.rs rename to rust/kona/crates/protocol/genesis/src/chain/altda.rs index 42dfcd869b1..a3cf4815a7a 100644 --- a/kona/crates/protocol/genesis/src/chain/altda.rs +++ b/rust/kona/crates/protocol/genesis/src/chain/altda.rs @@ -1,9 +1,9 @@ -//! Contains the AltDA config type. +//! Contains the `AltDA` config type. use alloc::string::String; use alloy_primitives::Address; -/// AltDA configuration. +/// `AltDA` configuration. /// /// See: #[derive(Debug, Clone, Default, Hash, Eq, PartialEq)] @@ -11,14 +11,14 @@ use alloy_primitives::Address; #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", serde(deny_unknown_fields))] pub struct AltDAConfig { - /// AltDA challenge address + /// `AltDA` challenge address #[cfg_attr(feature = "serde", serde(alias = "da_challenge_contract_address"))] pub da_challenge_address: Option

, - /// AltDA challenge window time (in seconds) + /// `AltDA` challenge window time (in seconds) pub da_challenge_window: Option, - /// AltDA resolution window time (in seconds) + /// `AltDA` resolution window time (in seconds) pub da_resolve_window: Option, - /// AltDA commitment type + /// `AltDA` commitment type pub da_commitment_type: Option, } diff --git a/rust/kona/crates/protocol/genesis/src/chain/config.rs b/rust/kona/crates/protocol/genesis/src/chain/config.rs new file mode 100644 index 00000000000..e15b892f2f9 --- /dev/null +++ b/rust/kona/crates/protocol/genesis/src/chain/config.rs @@ -0,0 +1,357 @@ +//! Contains the chain config type. + +use alloc::string::String; +use alloy_chains::Chain; +use alloy_eips::eip1559::BaseFeeParams; +use alloy_primitives::Address; + +use crate::{ + AddressList, AltDAConfig, BaseFeeConfig, ChainGenesis, GRANITE_CHANNEL_TIMEOUT, HardForkConfig, + Roles, RollupConfig, SuperchainLevel, base_fee_params, base_fee_params_canyon, + params::base_fee_config, rollup::DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, +}; + +/// L1 chain configuration from the `alloy-genesis` crate. +pub type L1ChainConfig = alloy_genesis::ChainConfig; + +/// Defines core blockchain settings per block. +/// +/// Tailors unique settings for each network based on +/// its genesis block and superchain configuration. +/// +/// This struct bridges the interface between the [`ChainConfig`][ccr] +/// defined in the [`superchain-registry`][scr] and the [`ChainConfig`][ccg] +/// defined in [`op-geth`][opg]. +/// +/// [opg]: https://github.com/ethereum-optimism/op-geth +/// [scr]: https://github.com/ethereum-optimism/superchain-registry +/// [ccg]: https://github.com/ethereum-optimism/op-geth/blob/optimism/params/config.go#L342 +/// [ccr]: https://github.com/ethereum-optimism/superchain-registry/blob/main/ops/internal/config/superchain.go#L70 +#[derive(Debug, Clone, Default, Eq, PartialEq)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct ChainConfig { + /// Chain name (e.g. "Base") + #[cfg_attr(feature = "serde", serde(rename = "Name", alias = "name"))] + pub name: String, + /// L1 chain ID + #[cfg_attr(feature = "serde", serde(skip))] + pub l1_chain_id: u64, + /// Chain public RPC endpoint + #[cfg_attr(feature = "serde", serde(rename = "PublicRPC", alias = "public_rpc"))] + pub public_rpc: String, + /// Chain sequencer RPC endpoint + #[cfg_attr(feature = "serde", serde(rename = "SequencerRPC", alias = "sequencer_rpc"))] + pub sequencer_rpc: String, + /// Chain explorer HTTP endpoint + #[cfg_attr(feature = "serde", serde(rename = "Explorer", alias = "explorer"))] + pub explorer: String, + /// Level of integration with the superchain. + #[cfg_attr(feature = "serde", serde(rename = "SuperchainLevel", alias = "superchain_level"))] + pub superchain_level: SuperchainLevel, + /// Whether the chain is governed by optimism. + #[cfg_attr( + feature = "serde", + serde(rename = "GovernedByOptimism", alias = "governed_by_optimism") + )] + #[cfg_attr(feature = "serde", serde(default))] + pub governed_by_optimism: bool, + /// Time of when a given chain is opted in to the Superchain. + /// If set, hardforks times after the superchain time + /// will be inherited from the superchain-wide config. + #[cfg_attr(feature = "serde", serde(rename = "SuperchainTime", alias = "superchain_time"))] + pub superchain_time: Option, + /// Data availability type. + #[cfg_attr( + feature = "serde", + serde(rename = "DataAvailabilityType", alias = "data_availability_type") + )] + pub data_availability_type: String, + /// Chain ID + #[cfg_attr(feature = "serde", serde(rename = "l2_chain_id", alias = "chain_id"))] + pub chain_id: u64, + /// Chain-specific batch inbox address + #[cfg_attr( + feature = "serde", + serde(rename = "batch_inbox_address", alias = "batch_inbox_addr") + )] + #[cfg_attr(feature = "serde", serde(default))] + pub batch_inbox_addr: Address, + /// The block time in seconds. + #[cfg_attr(feature = "serde", serde(rename = "block_time"))] + pub block_time: u64, + /// The sequencer window size in seconds. + #[cfg_attr(feature = "serde", serde(rename = "seq_window_size"))] + pub seq_window_size: u64, + /// The maximum sequencer drift in seconds. + #[cfg_attr(feature = "serde", serde(rename = "max_sequencer_drift"))] + pub max_sequencer_drift: u64, + /// Gas paying token metadata. Not consumed by downstream `OPStack` components. + #[cfg_attr(feature = "serde", serde(rename = "GasPayingToken", alias = "gas_paying_token"))] + pub gas_paying_token: Option
, + /// Hardfork Config. These values may override the superchain-wide defaults. + #[cfg_attr(feature = "serde", serde(rename = "hardfork_configuration", alias = "hardforks"))] + pub hardfork_config: HardForkConfig, + /// Optimism configuration + #[cfg_attr(feature = "serde", serde(rename = "optimism"))] + pub optimism: Option, + /// Alternative DA configuration + #[cfg_attr(feature = "serde", serde(rename = "alt_da"))] + pub alt_da: Option, + /// Chain-specific genesis information + pub genesis: ChainGenesis, + /// Roles + #[cfg_attr(feature = "serde", serde(rename = "Roles", alias = "roles"))] + pub roles: Option, + /// Addresses + #[cfg_attr(feature = "serde", serde(rename = "Addresses", alias = "addresses"))] + pub addresses: Option, +} + +impl ChainConfig { + /// Returns the base fee params for the chain. + pub fn base_fee_params(&self) -> BaseFeeParams { + self.optimism + .as_ref() + .map(|op| op.pre_canyon_params()) + .unwrap_or_else(|| base_fee_params(self.chain_id)) + } + + /// Returns the canyon base fee params for the chain. + pub fn canyon_base_fee_params(&self) -> BaseFeeParams { + self.optimism + .as_ref() + .map(|op| op.post_canyon_params()) + .unwrap_or_else(|| base_fee_params_canyon(self.chain_id)) + } + + /// Returns the base fee config for the chain. + pub fn base_fee_config(&self) -> BaseFeeConfig { + self.optimism.as_ref().map(|op| *op).unwrap_or_else(|| base_fee_config(self.chain_id)) + } + + /// Loads the rollup config for the OP-Stack chain given the chain config and address list. + #[deprecated(since = "0.2.1", note = "please use `as_rollup_config` instead")] + pub fn load_op_stack_rollup_config(&self) -> RollupConfig { + self.as_rollup_config() + } + + /// Loads the rollup config for the OP-Stack chain given the chain config and address list. + pub fn as_rollup_config(&self) -> RollupConfig { + RollupConfig { + genesis: self.genesis, + l1_chain_id: self.l1_chain_id, + l2_chain_id: Chain::from(self.chain_id), + block_time: self.block_time, + seq_window_size: self.seq_window_size, + max_sequencer_drift: self.max_sequencer_drift, + hardforks: self.hardfork_config, + batch_inbox_address: self.batch_inbox_addr, + deposit_contract_address: self + .addresses + .as_ref() + .and_then(|a| a.optimism_portal_proxy) + .unwrap_or_default(), + l1_system_config_address: self + .addresses + .as_ref() + .and_then(|a| a.system_config_proxy) + .unwrap_or_default(), + protocol_versions_address: self + .addresses + .as_ref() + .and_then(|a| a.address_manager) + .unwrap_or_default(), + superchain_config_address: None, + blobs_enabled_l1_timestamp: None, + da_challenge_address: self + .alt_da + .as_ref() + .and_then(|alt_da| alt_da.da_challenge_address), + + // The below chain parameters can be different per OP-Stack chain, + // but since none of the superchain chains differ, it's not represented in the + // superchain-registry yet. This restriction on superchain-chains may change in the + // future. Test/Alt configurations can still load custom rollup-configs when + // necessary. + channel_timeout: 300, + granite_channel_timeout: GRANITE_CHANNEL_TIMEOUT, + interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, + chain_op_config: self.base_fee_config(), + alt_da_config: self.alt_da.clone(), + } + } +} + +#[cfg(test)] +#[cfg(feature = "serde")] +mod tests { + use super::*; + + #[test] + fn test_chain_config_json() { + let raw: &str = r#" + { + "Name": "Base", + "PublicRPC": "https://mainnet.base.org", + "SequencerRPC": "https://mainnet-sequencer.base.org", + "Explorer": "https://explorer.base.org", + "SuperchainLevel": 1, + "GovernedByOptimism": false, + "SuperchainTime": 0, + "DataAvailabilityType": "eth-da", + "l2_chain_id": 8453, + "batch_inbox_address": "0xff00000000000000000000000000000000008453", + "block_time": 2, + "seq_window_size": 3600, + "max_sequencer_drift": 600, + "GasPayingToken": null, + "hardfork_configuration": { + "canyon_time": 1704992401, + "delta_time": 1708560000, + "ecotone_time": 1710374401, + "fjord_time": 1720627201, + "granite_time": 1726070401, + "holocene_time": 1736445601 + }, + "optimism": { + "eip1559Elasticity": 6, + "eip1559Denominator": 50, + "eip1559DenominatorCanyon": 250 + }, + "alt_da": null, + "genesis": { + "l1": { + "number": 17481768, + "hash": "0x5c13d307623a926cd31415036c8b7fa14572f9dac64528e857a470511fc30771" + }, + "l2": { + "number": 0, + "hash": "0xf712aa9241cc24369b143cf6dce85f0902a9731e70d66818a3a5845b296c73dd" + }, + "l2_time": 1686789347, + "system_config": { + "batcherAddress": "0x5050f69a9786f081509234f1a7f4684b5e5b76c9", + "overhead": "0xbc", + "scalar": "0xa6fe0", + "gasLimit": 30000000 + } + }, + "Roles": { + "SystemConfigOwner": "0x14536667cd30e52c0b458baaccb9fada7046e056", + "ProxyAdminOwner": "0x7bb41c3008b3f03fe483b28b8db90e19cf07595c", + "Guardian": "0x09f7150d8c019bef34450d6920f6b3608cefdaf2", + "Challenger": "0x6f8c5ba3f59ea3e76300e3becdc231d656017824", + "Proposer": "0x642229f238fb9de03374be34b0ed8d9de80752c5", + "UnsafeBlockSigner": "0xaf6e19be0f9ce7f8afd49a1824851023a8249e8a", + "BatchSubmitter": "0x5050f69a9786f081509234f1a7f4684b5e5b76c9" + }, + "Addresses": { + "AddressManager": "0x8efb6b5c4767b09dc9aa6af4eaa89f749522bae2", + "L1CrossDomainMessengerProxy": "0x866e82a600a1414e583f7f13623f1ac5d58b0afa", + "L1Erc721BridgeProxy": "0x608d94945a64503e642e6370ec598e519a2c1e53", + "L1StandardBridgeProxy": "0x3154cf16ccdb4c6d922629664174b904d80f2c35", + "L2OutputOracleProxy": "0x56315b90c40730925ec5485cf004d835058518a0", + "OptimismMintableErc20FactoryProxy": "0x05cc379ebd9b30bba19c6fa282ab29218ec61d84", + "OptimismPortalProxy": "0x49048044d57e1c92a77f79988d21fa8faf74e97e", + "SystemConfigProxy": "0x73a79fab69143498ed3712e519a88a918e1f4072", + "ProxyAdmin": "0x0475cbcaebd9ce8afa5025828d5b98dfb67e059e", + "AnchorStateRegistryProxy": "0xdb9091e48b1c42992a1213e6916184f9ebdbfedf", + "DelayedWethProxy": "0xa2f2ac6f5af72e494a227d79db20473cf7a1ffe8", + "DisputeGameFactoryProxy": "0x43edb88c4b80fdd2adff2412a7bebf9df42cb40e", + "FaultDisputeGame": "0xcd3c0194db74c23807d4b90a5181e1b28cf7007c", + "Mips": "0x16e83ce5ce29bf90ad9da06d2fe6a15d5f344ce4", + "PermissionedDisputeGame": "0x19009debf8954b610f207d5925eede827805986e", + "PreimageOracle": "0x9c065e11870b891d214bc2da7ef1f9ddfa1be277" + } + } + "#; + + let deserialized: ChainConfig = serde_json::from_str(raw).unwrap(); + assert_eq!(deserialized.name, "Base"); + } + + #[test] + fn test_chain_config_unknown_field_json() { + let raw: &str = r#" + { + "Name": "Base", + "PublicRPC": "https://mainnet.base.org", + "SequencerRPC": "https://mainnet-sequencer.base.org", + "Explorer": "https://explorer.base.org", + "SuperchainLevel": 1, + "GovernedByOptimism": false, + "SuperchainTime": 0, + "DataAvailabilityType": "eth-da", + "l2_chain_id": 8453, + "batch_inbox_address": "0xff00000000000000000000000000000000008453", + "block_time": 2, + "seq_window_size": 3600, + "max_sequencer_drift": 600, + "GasPayingToken": null, + "hardfork_configuration": { + "canyon_time": 1704992401, + "delta_time": 1708560000, + "ecotone_time": 1710374401, + "fjord_time": 1720627201, + "granite_time": 1726070401, + "holocene_time": 1736445601 + }, + "optimism": { + "eip1559Elasticity": "0x6", + "eip1559Denominator": "0x32", + "eip1559DenominatorCanyon": "0xfa" + }, + "alt_da": null, + "genesis": { + "l1": { + "number": 17481768, + "hash": "0x5c13d307623a926cd31415036c8b7fa14572f9dac64528e857a470511fc30771" + }, + "l2": { + "number": 0, + "hash": "0xf712aa9241cc24369b143cf6dce85f0902a9731e70d66818a3a5845b296c73dd" + }, + "l2_time": 1686789347, + "system_config": { + "batcherAddress": "0x5050f69a9786f081509234f1a7f4684b5e5b76c9", + "overhead": "0xbc", + "scalar": "0xa6fe0", + "gasLimit": 30000000 + } + }, + "Roles": { + "SystemConfigOwner": "0x14536667cd30e52c0b458baaccb9fada7046e056", + "ProxyAdminOwner": "0x7bb41c3008b3f03fe483b28b8db90e19cf07595c", + "Guardian": "0x09f7150d8c019bef34450d6920f6b3608cefdaf2", + "Challenger": "0x6f8c5ba3f59ea3e76300e3becdc231d656017824", + "Proposer": "0x642229f238fb9de03374be34b0ed8d9de80752c5", + "UnsafeBlockSigner": "0xaf6e19be0f9ce7f8afd49a1824851023a8249e8a", + "BatchSubmitter": "0x5050f69a9786f081509234f1a7f4684b5e5b76c9" + }, + "Addresses": { + "AddressManager": "0x8efb6b5c4767b09dc9aa6af4eaa89f749522bae2", + "L1CrossDomainMessengerProxy": "0x866e82a600a1414e583f7f13623f1ac5d58b0afa", + "L1Erc721BridgeProxy": "0x608d94945a64503e642e6370ec598e519a2c1e53", + "L1StandardBridgeProxy": "0x3154cf16ccdb4c6d922629664174b904d80f2c35", + "L2OutputOracleProxy": "0x56315b90c40730925ec5485cf004d835058518a0", + "OptimismMintableErc20FactoryProxy": "0x05cc379ebd9b30bba19c6fa282ab29218ec61d84", + "OptimismPortalProxy": "0x49048044d57e1c92a77f79988d21fa8faf74e97e", + "SystemConfigProxy": "0x73a79fab69143498ed3712e519a88a918e1f4072", + "ProxyAdmin": "0x0475cbcaebd9ce8afa5025828d5b98dfb67e059e", + "AnchorStateRegistryProxy": "0xdb9091e48b1c42992a1213e6916184f9ebdbfedf", + "DelayedWethProxy": "0xa2f2ac6f5af72e494a227d79db20473cf7a1ffe8", + "DisputeGameFactoryProxy": "0x43edb88c4b80fdd2adff2412a7bebf9df42cb40e", + "FaultDisputeGame": "0xcd3c0194db74c23807d4b90a5181e1b28cf7007c", + "Mips": "0x16e83ce5ce29bf90ad9da06d2fe6a15d5f344ce4", + "PermissionedDisputeGame": "0x19009debf8954b610f207d5925eede827805986e", + "PreimageOracle": "0x9c065e11870b891d214bc2da7ef1f9ddfa1be277" + }, + "unknown_field": "unknown" + } + "#; + + let err = serde_json::from_str::(raw).unwrap_err(); + assert_eq!(err.classify(), serde_json::error::Category::Data); + } +} diff --git a/kona/crates/protocol/genesis/src/chain/hardfork.rs b/rust/kona/crates/protocol/genesis/src/chain/hardfork.rs similarity index 95% rename from kona/crates/protocol/genesis/src/chain/hardfork.rs rename to rust/kona/crates/protocol/genesis/src/chain/hardfork.rs index b63de5eccce..7bd2e2bf001 100644 --- a/kona/crates/protocol/genesis/src/chain/hardfork.rs +++ b/rust/kona/crates/protocol/genesis/src/chain/hardfork.rs @@ -14,37 +14,37 @@ pub struct HardForkConfig { /// `regolith_time` sets the activation time of the Regolith network-upgrade: /// a pre-mainnet Bedrock change that addresses findings of the Sherlock contest related to /// deposit attributes. "Regolith" is the loose deposited rock that sits on top of Bedrock. - /// Active if regolith_time != None && L2 block timestamp >= Some(regolith_time), inactive + /// Active if `regolith_time` != None && L2 block timestamp >= `Some(regolith_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub regolith_time: Option, /// `canyon_time` sets the activation time of the Canyon network upgrade. - /// Active if `canyon_time` != None && L2 block timestamp >= Some(canyon_time), inactive + /// Active if `canyon_time` != None && L2 block timestamp >= `Some(canyon_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub canyon_time: Option, /// `delta_time` sets the activation time of the Delta network upgrade. - /// Active if `delta_time` != None && L2 block timestamp >= Some(delta_time), inactive + /// Active if `delta_time` != None && L2 block timestamp >= `Some(delta_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub delta_time: Option, /// `ecotone_time` sets the activation time of the Ecotone network upgrade. - /// Active if `ecotone_time` != None && L2 block timestamp >= Some(ecotone_time), inactive + /// Active if `ecotone_time` != None && L2 block timestamp >= `Some(ecotone_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub ecotone_time: Option, /// `fjord_time` sets the activation time of the Fjord network upgrade. - /// Active if `fjord_time` != None && L2 block timestamp >= Some(fjord_time), inactive + /// Active if `fjord_time` != None && L2 block timestamp >= `Some(fjord_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub fjord_time: Option, /// `granite_time` sets the activation time for the Granite network upgrade. - /// Active if `granite_time` != None && L2 block timestamp >= Some(granite_time), inactive + /// Active if `granite_time` != None && L2 block timestamp >= `Some(granite_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub granite_time: Option, /// `holocene_time` sets the activation time for the Holocene network upgrade. - /// Active if `holocene_time` != None && L2 block timestamp >= Some(holocene_time), inactive + /// Active if `holocene_time` != None && L2 block timestamp >= `Some(holocene_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub holocene_time: Option, @@ -54,21 +54,21 @@ pub struct HardForkConfig { /// <=v1.11.1 sequencing the network. /// /// Active if `pectra_blob_schedule_time` != None && L2 block timestamp >= - /// Some(pectra_blob_schedule_time), inactive otherwise. + /// `Some(pectra_blob_schedule_time)`, inactive otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub pectra_blob_schedule_time: Option, /// `isthmus_time` sets the activation time for the Isthmus network upgrade. - /// Active if `isthmus_time` != None && L2 block timestamp >= Some(isthmus_time), inactive + /// Active if `isthmus_time` != None && L2 block timestamp >= `Some(isthmus_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub isthmus_time: Option, /// `jovian_time` sets the activation time for the Jovian network upgrade. - /// Active if `jovian_time` != None && L2 block timestamp >= Some(jovian_time), inactive + /// Active if `jovian_time` != None && L2 block timestamp >= `Some(jovian_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub jovian_time: Option, /// `interop_time` sets the activation time for the Interop network upgrade. - /// Active if `interop_time` != None && L2 block timestamp >= Some(interop_time), inactive + /// Active if `interop_time` != None && L2 block timestamp >= `Some(interop_time)`, inactive /// otherwise. #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] pub interop_time: Option, diff --git a/kona/crates/protocol/genesis/src/chain/mod.rs b/rust/kona/crates/protocol/genesis/src/chain/mod.rs similarity index 100% rename from kona/crates/protocol/genesis/src/chain/mod.rs rename to rust/kona/crates/protocol/genesis/src/chain/mod.rs diff --git a/kona/crates/protocol/genesis/src/chain/roles.rs b/rust/kona/crates/protocol/genesis/src/chain/roles.rs similarity index 100% rename from kona/crates/protocol/genesis/src/chain/roles.rs rename to rust/kona/crates/protocol/genesis/src/chain/roles.rs diff --git a/kona/crates/protocol/genesis/src/genesis.rs b/rust/kona/crates/protocol/genesis/src/genesis.rs similarity index 100% rename from kona/crates/protocol/genesis/src/genesis.rs rename to rust/kona/crates/protocol/genesis/src/genesis.rs diff --git a/rust/kona/crates/protocol/genesis/src/lib.rs b/rust/kona/crates/protocol/genesis/src/lib.rs new file mode 100644 index 00000000000..25ef675f0d9 --- /dev/null +++ b/rust/kona/crates/protocol/genesis/src/lib.rs @@ -0,0 +1,66 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; + +mod params; +pub use params::{ + BASE_MAINNET_BASE_FEE_CONFIG, BASE_MAINNET_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON, + BASE_MAINNET_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR, + BASE_MAINNET_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER, BASE_SEPOLIA_BASE_FEE_CONFIG, + BASE_SEPOLIA_BASE_FEE_PARAMS, BASE_SEPOLIA_BASE_FEE_PARAMS_CANYON, + BASE_SEPOLIA_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON, + BASE_SEPOLIA_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR, + BASE_SEPOLIA_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER, BaseFeeConfig, OP_MAINNET_BASE_FEE_CONFIG, + OP_MAINNET_BASE_FEE_PARAMS, OP_MAINNET_BASE_FEE_PARAMS_CANYON, + OP_MAINNET_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON, + OP_MAINNET_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR, + OP_MAINNET_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER, OP_SEPOLIA_BASE_FEE_CONFIG, + OP_SEPOLIA_BASE_FEE_PARAMS, OP_SEPOLIA_BASE_FEE_PARAMS_CANYON, + OP_SEPOLIA_EIP1559_BASE_FEE_MAX_CHANGE_DENOMINATOR_CANYON, + OP_SEPOLIA_EIP1559_DEFAULT_BASE_FEE_MAX_CHANGE_DENOMINATOR, + OP_SEPOLIA_EIP1559_DEFAULT_ELASTICITY_MULTIPLIER, base_fee_config, base_fee_params, + base_fee_params_canyon, +}; + +mod superchain; +pub use superchain::{ + Chain, ChainList, FaultProofs, Superchain, SuperchainConfig, SuperchainL1Info, SuperchainLevel, + SuperchainParent, Superchains, +}; + +mod updates; +pub use updates::{ + BatcherUpdate, DaFootprintGasScalarUpdate, Eip1559Update, GasConfigUpdate, GasLimitUpdate, + MinBaseFeeUpdate, OperatorFeeUpdate, UnsafeBlockSignerUpdate, +}; + +mod system; +pub use system::{ + BatcherUpdateError, CONFIG_UPDATE_EVENT_VERSION_0, CONFIG_UPDATE_TOPIC, + DaFootprintGasScalarUpdateError, EIP1559UpdateError, GasConfigUpdateError, GasLimitUpdateError, + LogProcessingError, MinBaseFeeUpdateError, OperatorFeeUpdateError, SystemConfig, + SystemConfigLog, SystemConfigUpdate, SystemConfigUpdateError, SystemConfigUpdateKind, + UnsafeBlockSignerUpdateError, +}; + +mod chain; +pub use chain::{ + AddressList, AltDAConfig, BASE_MAINNET_CHAIN_ID, BASE_SEPOLIA_CHAIN_ID, ChainConfig, + HardForkConfig, L1ChainConfig, OP_MAINNET_CHAIN_ID, OP_SEPOLIA_CHAIN_ID, Roles, +}; + +mod genesis; +pub use genesis::ChainGenesis; + +mod rollup; +pub use rollup::{ + DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, FJORD_MAX_SEQUENCER_DRIFT, GRANITE_CHANNEL_TIMEOUT, + MAX_RLP_BYTES_PER_CHANNEL_BEDROCK, MAX_RLP_BYTES_PER_CHANNEL_FJORD, RollupConfig, +}; diff --git a/kona/crates/protocol/genesis/src/params.rs b/rust/kona/crates/protocol/genesis/src/params.rs similarity index 95% rename from kona/crates/protocol/genesis/src/params.rs rename to rust/kona/crates/protocol/genesis/src/params.rs index 5b44dd9a8cf..e16dd3b6332 100644 --- a/kona/crates/protocol/genesis/src/params.rs +++ b/rust/kona/crates/protocol/genesis/src/params.rs @@ -2,9 +2,7 @@ use alloy_eips::eip1559::BaseFeeParams; -use crate::{ - BASE_MAINNET_CHAIN_ID, BASE_SEPOLIA_CHAIN_ID, OP_MAINNET_CHAIN_ID, OP_SEPOLIA_CHAIN_ID, -}; +use crate::{BASE_MAINNET_CHAIN_ID, BASE_SEPOLIA_CHAIN_ID, OP_SEPOLIA_CHAIN_ID}; /// Base fee max change denominator for Optimism Mainnet as defined in the Optimism /// [transaction costs](https://docs.optimism.io/app-developers/transactions/fees) doc. @@ -93,9 +91,7 @@ pub const OP_MAINNET_BASE_FEE_PARAMS_CANYON: BaseFeeParams = BaseFeeParams { /// Returns the [`BaseFeeParams`] for the given chain id. pub const fn base_fee_params(chain_id: u64) -> BaseFeeParams { match chain_id { - OP_MAINNET_CHAIN_ID => OP_MAINNET_BASE_FEE_PARAMS, OP_SEPOLIA_CHAIN_ID => OP_SEPOLIA_BASE_FEE_PARAMS, - BASE_MAINNET_CHAIN_ID => OP_MAINNET_BASE_FEE_PARAMS, BASE_SEPOLIA_CHAIN_ID => BASE_SEPOLIA_BASE_FEE_PARAMS, _ => OP_MAINNET_BASE_FEE_PARAMS, } @@ -104,9 +100,7 @@ pub const fn base_fee_params(chain_id: u64) -> BaseFeeParams { /// Returns the [`BaseFeeParams`] for the given chain id, for canyon hardfork. pub const fn base_fee_params_canyon(chain_id: u64) -> BaseFeeParams { match chain_id { - OP_MAINNET_CHAIN_ID => OP_MAINNET_BASE_FEE_PARAMS_CANYON, OP_SEPOLIA_CHAIN_ID => OP_SEPOLIA_BASE_FEE_PARAMS_CANYON, - BASE_MAINNET_CHAIN_ID => OP_MAINNET_BASE_FEE_PARAMS_CANYON, BASE_SEPOLIA_CHAIN_ID => BASE_SEPOLIA_BASE_FEE_PARAMS_CANYON, _ => OP_MAINNET_BASE_FEE_PARAMS_CANYON, } @@ -115,7 +109,6 @@ pub const fn base_fee_params_canyon(chain_id: u64) -> BaseFeeParams { /// Returns the [`BaseFeeConfig`] for the given chain id. pub const fn base_fee_config(chain_id: u64) -> BaseFeeConfig { match chain_id { - OP_MAINNET_CHAIN_ID => OP_MAINNET_BASE_FEE_CONFIG, OP_SEPOLIA_CHAIN_ID => OP_SEPOLIA_BASE_FEE_CONFIG, BASE_MAINNET_CHAIN_ID => BASE_MAINNET_BASE_FEE_CONFIG, BASE_SEPOLIA_CHAIN_ID => BASE_SEPOLIA_BASE_FEE_CONFIG, @@ -186,7 +179,7 @@ impl BaseFeeConfig { } } - /// Returns the [BaseFeeParams] before Canyon hardfork. + /// Returns the [`BaseFeeParams`] before Canyon hardfork. pub const fn pre_canyon_params(&self) -> BaseFeeParams { BaseFeeParams { max_change_denominator: self.eip1559_denominator as u128, @@ -194,7 +187,7 @@ impl BaseFeeConfig { } } - /// Returns the [BaseFeeParams] since Canyon hardfork. + /// Returns the [`BaseFeeParams`] since Canyon hardfork. pub const fn post_canyon_params(&self) -> BaseFeeParams { BaseFeeParams { max_change_denominator: self.eip1559_denominator_canyon as u128, @@ -206,6 +199,7 @@ impl BaseFeeConfig { #[cfg(test)] mod tests { use super::*; + use crate::OP_MAINNET_CHAIN_ID; #[test] fn test_base_fee_params_from_chain_id() { diff --git a/rust/kona/crates/protocol/genesis/src/rollup.rs b/rust/kona/crates/protocol/genesis/src/rollup.rs new file mode 100644 index 00000000000..8691197f5cd --- /dev/null +++ b/rust/kona/crates/protocol/genesis/src/rollup.rs @@ -0,0 +1,950 @@ +//! Rollup Config Types + +use crate::{AltDAConfig, BaseFeeConfig, ChainGenesis, HardForkConfig, OP_MAINNET_BASE_FEE_CONFIG}; +use alloy_chains::Chain; +use alloy_hardforks::{EthereumHardfork, EthereumHardforks, ForkCondition}; +use alloy_op_hardforks::{OpHardfork, OpHardforks}; +use alloy_primitives::Address; + +/// The max rlp bytes per channel for the Bedrock hardfork. +pub const MAX_RLP_BYTES_PER_CHANNEL_BEDROCK: u64 = 10_000_000; + +/// The max rlp bytes per channel for the Fjord hardfork. +pub const MAX_RLP_BYTES_PER_CHANNEL_FJORD: u64 = 100_000_000; + +/// The max sequencer drift when the Fjord hardfork is active. +pub const FJORD_MAX_SEQUENCER_DRIFT: u64 = 1800; + +/// The channel timeout once the Granite hardfork is active. +pub const GRANITE_CHANNEL_TIMEOUT: u64 = 50; + +/// The default interop message expiry window. (1 hour, in seconds) +pub const DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW: u64 = 60 * 60; + +#[cfg(feature = "serde")] +const fn default_granite_channel_timeout() -> u64 { + GRANITE_CHANNEL_TIMEOUT +} + +#[cfg(feature = "serde")] +const fn default_interop_message_expiry_window() -> u64 { + DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW +} + +/// The Rollup configuration. +#[derive(Debug, Clone, Eq, PartialEq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(deny_unknown_fields))] +pub struct RollupConfig { + /// The genesis state of the rollup. + pub genesis: ChainGenesis, + /// The block time of the L2, in seconds. + pub block_time: u64, + /// Sequencer batches may not be more than `MaxSequencerDrift` seconds after + /// the L1 timestamp of the sequencing window end. + /// + /// Note: When L1 has many 1 second consecutive blocks, and L2 grows at fixed 2 seconds, + /// the L2 time may still grow beyond this difference. + /// + /// Note: After the Fjord hardfork, this value becomes a constant of `1800`. + pub max_sequencer_drift: u64, + /// The sequencer window size. + pub seq_window_size: u64, + /// Number of L1 blocks between when a channel can be opened and when it can be closed. + pub channel_timeout: u64, + /// The channel timeout after the Granite hardfork. + #[cfg_attr(feature = "serde", serde(default = "default_granite_channel_timeout"))] + pub granite_channel_timeout: u64, + /// The L1 chain ID + pub l1_chain_id: u64, + /// The L2 chain ID + pub l2_chain_id: Chain, + /// Hardfork timestamps. + #[cfg_attr(feature = "serde", serde(flatten))] + pub hardforks: HardForkConfig, + /// `batch_inbox_address` is the L1 address that batches are sent to. + pub batch_inbox_address: Address, + /// `deposit_contract_address` is the L1 address that deposits are sent to. + pub deposit_contract_address: Address, + /// `l1_system_config_address` is the L1 address that the system config is stored at. + pub l1_system_config_address: Address, + /// `protocol_versions_address` is the L1 address that the protocol versions are stored at. + pub protocol_versions_address: Address, + /// The superchain config address. + #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] + pub superchain_config_address: Option
, + /// `blobs_enabled_l1_timestamp` is the timestamp to start reading blobs as a batch data + /// source. Optional. + #[cfg_attr( + feature = "serde", + serde(rename = "blobs_data", skip_serializing_if = "Option::is_none") + )] + pub blobs_enabled_l1_timestamp: Option, + /// `da_challenge_address` is the L1 address that the data availability challenge contract is + /// stored at. + #[cfg_attr(feature = "serde", serde(skip_serializing_if = "Option::is_none"))] + pub da_challenge_address: Option
, + /// `interop_message_expiry_window` is the maximum time (in seconds) that an initiating message + /// can be referenced on a remote chain before it expires. + #[cfg_attr(feature = "serde", serde(default = "default_interop_message_expiry_window"))] + pub interop_message_expiry_window: u64, + /// `alt_da_config` is the chain-specific DA config for the rollup. + #[cfg_attr(feature = "serde", serde(rename = "alt_da"))] + pub alt_da_config: Option, + /// `chain_op_config` is the chain-specific EIP1559 config for the rollup. + #[cfg_attr(feature = "serde", serde(default = "BaseFeeConfig::optimism"))] + pub chain_op_config: BaseFeeConfig, +} + +#[cfg(feature = "arbitrary")] +impl<'a> arbitrary::Arbitrary<'a> for RollupConfig { + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + use crate::{ + BASE_SEPOLIA_BASE_FEE_CONFIG, OP_MAINNET_BASE_FEE_CONFIG, OP_SEPOLIA_BASE_FEE_CONFIG, + }; + let chain_op_config = match u32::arbitrary(u)? % 3 { + 0 => OP_MAINNET_BASE_FEE_CONFIG, + 1 => OP_SEPOLIA_BASE_FEE_CONFIG, + _ => BASE_SEPOLIA_BASE_FEE_CONFIG, + }; + + Ok(Self { + genesis: ChainGenesis::arbitrary(u)?, + block_time: u.arbitrary()?, + max_sequencer_drift: u.arbitrary()?, + seq_window_size: u.arbitrary()?, + channel_timeout: u.arbitrary()?, + granite_channel_timeout: u.arbitrary()?, + l1_chain_id: u.arbitrary()?, + l2_chain_id: u.arbitrary()?, + hardforks: HardForkConfig::arbitrary(u)?, + batch_inbox_address: Address::arbitrary(u)?, + deposit_contract_address: Address::arbitrary(u)?, + l1_system_config_address: Address::arbitrary(u)?, + protocol_versions_address: Address::arbitrary(u)?, + superchain_config_address: Option::
::arbitrary(u)?, + blobs_enabled_l1_timestamp: Option::::arbitrary(u)?, + da_challenge_address: Option::
::arbitrary(u)?, + interop_message_expiry_window: u.arbitrary()?, + chain_op_config, + alt_da_config: Option::::arbitrary(u)?, + }) + } +} + +// Need to manually implement Default because [`BaseFeeParams`] has no Default impl. +impl Default for RollupConfig { + fn default() -> Self { + Self { + genesis: ChainGenesis::default(), + block_time: 0, + max_sequencer_drift: 0, + seq_window_size: 0, + channel_timeout: 0, + granite_channel_timeout: GRANITE_CHANNEL_TIMEOUT, + l1_chain_id: 0, + l2_chain_id: Chain::from_id(0), + hardforks: HardForkConfig::default(), + batch_inbox_address: Address::ZERO, + deposit_contract_address: Address::ZERO, + l1_system_config_address: Address::ZERO, + protocol_versions_address: Address::ZERO, + superchain_config_address: None, + blobs_enabled_l1_timestamp: None, + da_challenge_address: None, + interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, + alt_da_config: None, + chain_op_config: OP_MAINNET_BASE_FEE_CONFIG, + } + } +} + +#[cfg(feature = "revm")] +impl RollupConfig { + /// Returns the active [`op_revm::OpSpecId`] for the executor. + /// + /// ## Takes + /// - `timestamp`: The timestamp of the executing block. + /// + /// ## Returns + /// The active [`op_revm::OpSpecId`] for the executor. + pub fn spec_id(&self, timestamp: u64) -> op_revm::OpSpecId { + if self.is_interop_active(timestamp) { + op_revm::OpSpecId::INTEROP + } else if self.is_jovian_active(timestamp) { + op_revm::OpSpecId::JOVIAN + } else if self.is_isthmus_active(timestamp) { + op_revm::OpSpecId::ISTHMUS + } else if self.is_holocene_active(timestamp) { + op_revm::OpSpecId::HOLOCENE + } else if self.is_fjord_active(timestamp) { + op_revm::OpSpecId::FJORD + } else if self.is_ecotone_active(timestamp) { + op_revm::OpSpecId::ECOTONE + } else if self.is_canyon_active(timestamp) { + op_revm::OpSpecId::CANYON + } else if self.is_regolith_active(timestamp) { + op_revm::OpSpecId::REGOLITH + } else { + op_revm::OpSpecId::BEDROCK + } + } +} + +impl RollupConfig { + /// Returns true if Regolith is active at the given timestamp. + pub fn is_regolith_active(&self, timestamp: u64) -> bool { + self.hardforks.regolith_time.is_some_and(|t| timestamp >= t) || + self.is_canyon_active(timestamp) + } + + /// Returns true if the timestamp marks the first Regolith block. + pub fn is_first_regolith_block(&self, timestamp: u64) -> bool { + self.is_regolith_active(timestamp) && + !self.is_regolith_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if Canyon is active at the given timestamp. + pub fn is_canyon_active(&self, timestamp: u64) -> bool { + self.hardforks.canyon_time.is_some_and(|t| timestamp >= t) || + self.is_delta_active(timestamp) + } + + /// Returns true if the timestamp marks the first Canyon block. + pub fn is_first_canyon_block(&self, timestamp: u64) -> bool { + self.is_canyon_active(timestamp) && + !self.is_canyon_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if Delta is active at the given timestamp. + pub fn is_delta_active(&self, timestamp: u64) -> bool { + self.hardforks.delta_time.is_some_and(|t| timestamp >= t) || + self.is_ecotone_active(timestamp) + } + + /// Returns true if the timestamp marks the first Delta block. + pub fn is_first_delta_block(&self, timestamp: u64) -> bool { + self.is_delta_active(timestamp) && + !self.is_delta_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if Ecotone is active at the given timestamp. + pub fn is_ecotone_active(&self, timestamp: u64) -> bool { + self.hardforks.ecotone_time.is_some_and(|t| timestamp >= t) || + self.is_fjord_active(timestamp) + } + + /// Returns true if the timestamp marks the first Ecotone block. + pub fn is_first_ecotone_block(&self, timestamp: u64) -> bool { + self.is_ecotone_active(timestamp) && + !self.is_ecotone_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if Fjord is active at the given timestamp. + pub fn is_fjord_active(&self, timestamp: u64) -> bool { + self.hardforks.fjord_time.is_some_and(|t| timestamp >= t) || + self.is_granite_active(timestamp) + } + + /// Returns true if the timestamp marks the first Fjord block. + pub fn is_first_fjord_block(&self, timestamp: u64) -> bool { + self.is_fjord_active(timestamp) && + !self.is_fjord_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if Granite is active at the given timestamp. + pub fn is_granite_active(&self, timestamp: u64) -> bool { + self.hardforks.granite_time.is_some_and(|t| timestamp >= t) || + self.is_holocene_active(timestamp) + } + + /// Returns true if the timestamp marks the first Granite block. + pub fn is_first_granite_block(&self, timestamp: u64) -> bool { + self.is_granite_active(timestamp) && + !self.is_granite_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if Holocene is active at the given timestamp. + pub fn is_holocene_active(&self, timestamp: u64) -> bool { + self.hardforks.holocene_time.is_some_and(|t| timestamp >= t) || + self.is_isthmus_active(timestamp) + } + + /// Returns true if the timestamp marks the first Holocene block. + pub fn is_first_holocene_block(&self, timestamp: u64) -> bool { + self.is_holocene_active(timestamp) && + !self.is_holocene_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if the pectra blob schedule is active at the given timestamp. + pub fn is_pectra_blob_schedule_active(&self, timestamp: u64) -> bool { + self.hardforks.pectra_blob_schedule_time.is_some_and(|t| timestamp >= t) + } + + /// Returns true if the timestamp marks the first pectra blob schedule block. + pub fn is_first_pectra_blob_schedule_block(&self, timestamp: u64) -> bool { + self.is_pectra_blob_schedule_active(timestamp) && + !self.is_pectra_blob_schedule_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if Isthmus is active at the given timestamp. + pub fn is_isthmus_active(&self, timestamp: u64) -> bool { + self.hardforks.isthmus_time.is_some_and(|t| timestamp >= t) || + self.is_jovian_active(timestamp) + } + + /// Returns true if the timestamp marks the first Isthmus block. + pub fn is_first_isthmus_block(&self, timestamp: u64) -> bool { + self.is_isthmus_active(timestamp) && + !self.is_isthmus_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if Jovian is active at the given timestamp. + pub fn is_jovian_active(&self, timestamp: u64) -> bool { + self.hardforks.jovian_time.is_some_and(|t| timestamp >= t) || + self.is_interop_active(timestamp) + } + + /// Returns true if the timestamp marks the first Jovian block. + pub fn is_first_jovian_block(&self, timestamp: u64) -> bool { + self.is_jovian_active(timestamp) && + !self.is_jovian_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if Interop is active at the given timestamp. + pub fn is_interop_active(&self, timestamp: u64) -> bool { + self.hardforks.interop_time.is_some_and(|t| timestamp >= t) + } + + /// Returns true if the timestamp marks the first Interop block. + pub fn is_first_interop_block(&self, timestamp: u64) -> bool { + self.is_interop_active(timestamp) && + !self.is_interop_active(timestamp.saturating_sub(self.block_time)) + } + + /// Returns true if a DA Challenge proxy Address is provided in the rollup config and the + /// address is not zero. + pub fn is_alt_da_enabled(&self) -> bool { + self.da_challenge_address.is_some_and(|addr| !addr.is_zero()) + } + + /// Returns the max sequencer drift for the given timestamp. + pub fn max_sequencer_drift(&self, timestamp: u64) -> u64 { + if self.is_fjord_active(timestamp) { + FJORD_MAX_SEQUENCER_DRIFT + } else { + self.max_sequencer_drift + } + } + + /// Returns the max rlp bytes per channel for the given timestamp. + pub fn max_rlp_bytes_per_channel(&self, timestamp: u64) -> u64 { + if self.is_fjord_active(timestamp) { + MAX_RLP_BYTES_PER_CHANNEL_FJORD + } else { + MAX_RLP_BYTES_PER_CHANNEL_BEDROCK + } + } + + /// Returns the channel timeout for the given timestamp. + pub fn channel_timeout(&self, timestamp: u64) -> u64 { + if self.is_granite_active(timestamp) { + self.granite_channel_timeout + } else { + self.channel_timeout + } + } + + /// Returns the [`HardForkConfig`] using [`RollupConfig`] timestamps. + #[deprecated(since = "0.1.0", note = "Use the `hardforks` field instead.")] + pub const fn hardfork_config(&self) -> HardForkConfig { + self.hardforks + } + + /// Computes a block number from a timestamp, relative to the L2 genesis time and the block + /// time. + /// + /// This function assumes that the timestamp is aligned with the block time, and uses floor + /// division in its computation. + pub const fn block_number_from_timestamp(&self, timestamp: u64) -> u64 { + timestamp.saturating_sub(self.genesis.l2_time).saturating_div(self.block_time) + } + + /// Checks the scalar value in Ecotone. + pub fn check_ecotone_l1_system_config_scalar(scalar: [u8; 32]) -> Result<(), &'static str> { + let version_byte = scalar[0]; + match version_byte { + 0 => { + if scalar[1..28] != [0; 27] { + return Err("Bedrock scalar padding not empty"); + } + Ok(()) + } + 1 => { + if scalar[1..24] != [0; 23] { + return Err("Invalid version 1 scalar padding"); + } + Ok(()) + } + _ => { + // ignore the event if it's an unknown scalar format + Err("Unrecognized scalar version") + } + } + } +} + +impl EthereumHardforks for RollupConfig { + fn ethereum_fork_activation(&self, fork: EthereumHardfork) -> ForkCondition { + if fork <= EthereumHardfork::Berlin { + // We assume that OP chains were launched with all forks before Berlin activated. + ForkCondition::Block(0) + } else if fork <= EthereumHardfork::Paris { + // Bedrock activates all hardforks up to Paris. + self.op_fork_activation(OpHardfork::Bedrock) + } else if fork <= EthereumHardfork::Shanghai { + // Canyon activates Shanghai hardfork. + self.op_fork_activation(OpHardfork::Canyon) + } else if fork <= EthereumHardfork::Cancun { + // Ecotone activates Cancun hardfork. + self.op_fork_activation(OpHardfork::Ecotone) + } else if fork <= EthereumHardfork::Prague { + // Isthmus activates Prague hardfork. + self.op_fork_activation(OpHardfork::Isthmus) + } else { + ForkCondition::Never + } + } +} + +impl OpHardforks for RollupConfig { + fn op_fork_activation(&self, fork: OpHardfork) -> ForkCondition { + match fork { + OpHardfork::Bedrock => ForkCondition::Block(0), + OpHardfork::Regolith => self + .hardforks + .regolith_time + .map(ForkCondition::Timestamp) + .unwrap_or_else(|| self.op_fork_activation(OpHardfork::Canyon)), + OpHardfork::Canyon => self + .hardforks + .canyon_time + .map(ForkCondition::Timestamp) + .unwrap_or_else(|| self.op_fork_activation(OpHardfork::Ecotone)), + OpHardfork::Ecotone => self + .hardforks + .ecotone_time + .map(ForkCondition::Timestamp) + .unwrap_or_else(|| self.op_fork_activation(OpHardfork::Fjord)), + OpHardfork::Fjord => self + .hardforks + .fjord_time + .map(ForkCondition::Timestamp) + .unwrap_or_else(|| self.op_fork_activation(OpHardfork::Granite)), + OpHardfork::Granite => self + .hardforks + .granite_time + .map(ForkCondition::Timestamp) + .unwrap_or_else(|| self.op_fork_activation(OpHardfork::Holocene)), + OpHardfork::Holocene => self + .hardforks + .holocene_time + .map(ForkCondition::Timestamp) + .unwrap_or_else(|| self.op_fork_activation(OpHardfork::Isthmus)), + OpHardfork::Isthmus => self + .hardforks + .isthmus_time + .map(ForkCondition::Timestamp) + .unwrap_or_else(|| self.op_fork_activation(OpHardfork::Jovian)), + OpHardfork::Jovian => self + .hardforks + .jovian_time + .map(ForkCondition::Timestamp) + .unwrap_or(ForkCondition::Never), + OpHardfork::Interop => self + .hardforks + .interop_time + .map(ForkCondition::Timestamp) + .unwrap_or(ForkCondition::Never), + _ => ForkCondition::Never, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + #[cfg(feature = "serde")] + use alloy_eips::BlockNumHash; + use alloy_primitives::address; + #[cfg(feature = "serde")] + use alloy_primitives::{U256, b256}; + + #[test] + #[cfg(feature = "arbitrary")] + fn test_arbitrary_rollup_config() { + use arbitrary::Arbitrary; + use rand::Rng; + let mut bytes = [0u8; 1024]; + rand::rng().fill(bytes.as_mut_slice()); + RollupConfig::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(); + } + + #[test] + #[cfg(feature = "revm")] + fn test_revm_spec_id() { + // By default, the spec ID should be BEDROCK. + let mut config = RollupConfig { + hardforks: HardForkConfig { regolith_time: Some(10), ..Default::default() }, + ..Default::default() + }; + assert_eq!(config.spec_id(0), op_revm::OpSpecId::BEDROCK); + assert_eq!(config.spec_id(10), op_revm::OpSpecId::REGOLITH); + config.hardforks.canyon_time = Some(20); + assert_eq!(config.spec_id(20), op_revm::OpSpecId::CANYON); + config.hardforks.ecotone_time = Some(30); + assert_eq!(config.spec_id(30), op_revm::OpSpecId::ECOTONE); + config.hardforks.fjord_time = Some(40); + assert_eq!(config.spec_id(40), op_revm::OpSpecId::FJORD); + config.hardforks.holocene_time = Some(50); + assert_eq!(config.spec_id(50), op_revm::OpSpecId::HOLOCENE); + config.hardforks.isthmus_time = Some(60); + assert_eq!(config.spec_id(60), op_revm::OpSpecId::ISTHMUS); + } + + #[test] + fn test_regolith_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_regolith_active(0)); + config.hardforks.regolith_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(!config.is_regolith_active(9)); + } + + #[test] + fn test_canyon_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_canyon_active(0)); + config.hardforks.canyon_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(config.is_canyon_active(10)); + assert!(!config.is_canyon_active(9)); + } + + #[test] + fn test_delta_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_delta_active(0)); + config.hardforks.delta_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(config.is_canyon_active(10)); + assert!(config.is_delta_active(10)); + assert!(!config.is_delta_active(9)); + } + + #[test] + fn test_ecotone_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_ecotone_active(0)); + config.hardforks.ecotone_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(config.is_canyon_active(10)); + assert!(config.is_delta_active(10)); + assert!(config.is_ecotone_active(10)); + assert!(!config.is_ecotone_active(9)); + } + + #[test] + fn test_fjord_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_fjord_active(0)); + config.hardforks.fjord_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(config.is_canyon_active(10)); + assert!(config.is_delta_active(10)); + assert!(config.is_ecotone_active(10)); + assert!(config.is_fjord_active(10)); + assert!(!config.is_fjord_active(9)); + } + + #[test] + fn test_granite_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_granite_active(0)); + config.hardforks.granite_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(config.is_canyon_active(10)); + assert!(config.is_delta_active(10)); + assert!(config.is_ecotone_active(10)); + assert!(config.is_fjord_active(10)); + assert!(config.is_granite_active(10)); + assert!(!config.is_granite_active(9)); + } + + #[test] + fn test_holocene_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_holocene_active(0)); + config.hardforks.holocene_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(config.is_canyon_active(10)); + assert!(config.is_delta_active(10)); + assert!(config.is_ecotone_active(10)); + assert!(config.is_fjord_active(10)); + assert!(config.is_granite_active(10)); + assert!(config.is_holocene_active(10)); + assert!(!config.is_holocene_active(9)); + } + + #[test] + fn test_pectra_blob_schedule_active() { + let mut config = RollupConfig::default(); + config.hardforks.pectra_blob_schedule_time = Some(10); + // Pectra blob schedule is a unique fork, not included in the hierarchical ordering. Its + // activation does not imply the activation of any other forks. + assert!(!config.is_regolith_active(10)); + assert!(!config.is_canyon_active(10)); + assert!(!config.is_delta_active(10)); + assert!(!config.is_ecotone_active(10)); + assert!(!config.is_fjord_active(10)); + assert!(!config.is_granite_active(10)); + assert!(!config.is_holocene_active(0)); + assert!(config.is_pectra_blob_schedule_active(10)); + assert!(!config.is_pectra_blob_schedule_active(9)); + } + + #[test] + fn test_isthmus_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_isthmus_active(0)); + config.hardforks.isthmus_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(config.is_canyon_active(10)); + assert!(config.is_delta_active(10)); + assert!(config.is_ecotone_active(10)); + assert!(config.is_fjord_active(10)); + assert!(config.is_granite_active(10)); + assert!(config.is_holocene_active(10)); + assert!(!config.is_pectra_blob_schedule_active(10)); + assert!(config.is_isthmus_active(10)); + assert!(!config.is_isthmus_active(9)); + } + + #[test] + fn test_jovian_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_interop_active(0)); + config.hardforks.jovian_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(config.is_canyon_active(10)); + assert!(config.is_delta_active(10)); + assert!(config.is_ecotone_active(10)); + assert!(config.is_fjord_active(10)); + assert!(config.is_granite_active(10)); + assert!(config.is_holocene_active(10)); + assert!(!config.is_pectra_blob_schedule_active(10)); + assert!(config.is_isthmus_active(10)); + assert!(config.is_jovian_active(10)); + assert!(!config.is_jovian_active(9)); + } + + #[test] + fn test_interop_active() { + let mut config = RollupConfig::default(); + assert!(!config.is_interop_active(0)); + config.hardforks.interop_time = Some(10); + assert!(config.is_regolith_active(10)); + assert!(config.is_canyon_active(10)); + assert!(config.is_delta_active(10)); + assert!(config.is_ecotone_active(10)); + assert!(config.is_fjord_active(10)); + assert!(config.is_granite_active(10)); + assert!(config.is_holocene_active(10)); + assert!(!config.is_pectra_blob_schedule_active(10)); + assert!(config.is_isthmus_active(10)); + assert!(config.is_interop_active(10)); + assert!(!config.is_interop_active(9)); + } + + #[test] + fn test_is_first_fork_block() { + let cfg = RollupConfig { + hardforks: HardForkConfig { + regolith_time: Some(10), + canyon_time: Some(20), + delta_time: Some(30), + ecotone_time: Some(40), + fjord_time: Some(50), + granite_time: Some(60), + holocene_time: Some(70), + pectra_blob_schedule_time: Some(80), + isthmus_time: Some(90), + jovian_time: Some(100), + interop_time: Some(110), + }, + block_time: 2, + ..Default::default() + }; + + // Regolith + assert!(!cfg.is_first_regolith_block(8)); + assert!(cfg.is_first_regolith_block(10)); + assert!(!cfg.is_first_regolith_block(12)); + + // Canyon + assert!(!cfg.is_first_canyon_block(18)); + assert!(cfg.is_first_canyon_block(20)); + assert!(!cfg.is_first_canyon_block(22)); + + // Delta + assert!(!cfg.is_first_delta_block(28)); + assert!(cfg.is_first_delta_block(30)); + assert!(!cfg.is_first_delta_block(32)); + + // Ecotone + assert!(!cfg.is_first_ecotone_block(38)); + assert!(cfg.is_first_ecotone_block(40)); + assert!(!cfg.is_first_ecotone_block(42)); + + // Fjord + assert!(!cfg.is_first_fjord_block(48)); + assert!(cfg.is_first_fjord_block(50)); + assert!(!cfg.is_first_fjord_block(52)); + + // Granite + assert!(!cfg.is_first_granite_block(58)); + assert!(cfg.is_first_granite_block(60)); + assert!(!cfg.is_first_granite_block(62)); + + // Holocene + assert!(!cfg.is_first_holocene_block(68)); + assert!(cfg.is_first_holocene_block(70)); + assert!(!cfg.is_first_holocene_block(72)); + + // Pectra blob schedule + assert!(!cfg.is_first_pectra_blob_schedule_block(78)); + assert!(cfg.is_first_pectra_blob_schedule_block(80)); + assert!(!cfg.is_first_pectra_blob_schedule_block(82)); + + // Isthmus + assert!(!cfg.is_first_isthmus_block(88)); + assert!(cfg.is_first_isthmus_block(90)); + assert!(!cfg.is_first_isthmus_block(92)); + + // Jovian + assert!(!cfg.is_first_jovian_block(98)); + assert!(cfg.is_first_jovian_block(100)); + assert!(!cfg.is_first_jovian_block(102)); + + // Interop + assert!(!cfg.is_first_interop_block(108)); + assert!(cfg.is_first_interop_block(110)); + assert!(!cfg.is_first_interop_block(112)); + } + + #[test] + fn test_alt_da_enabled() { + let mut config = RollupConfig::default(); + assert!(!config.is_alt_da_enabled()); + config.da_challenge_address = Some(Address::ZERO); + assert!(!config.is_alt_da_enabled()); + config.da_challenge_address = Some(address!("0000000000000000000000000000000000000001")); + assert!(config.is_alt_da_enabled()); + } + + #[test] + fn test_granite_channel_timeout() { + let mut config = RollupConfig { + channel_timeout: 100, + hardforks: HardForkConfig { granite_time: Some(10), ..Default::default() }, + ..Default::default() + }; + assert_eq!(config.channel_timeout(0), 100); + assert_eq!(config.channel_timeout(10), GRANITE_CHANNEL_TIMEOUT); + config.hardforks.granite_time = None; + assert_eq!(config.channel_timeout(10), 100); + } + + #[test] + fn test_max_sequencer_drift() { + let mut config = RollupConfig { max_sequencer_drift: 100, ..Default::default() }; + assert_eq!(config.max_sequencer_drift(0), 100); + config.hardforks.fjord_time = Some(10); + assert_eq!(config.max_sequencer_drift(0), 100); + assert_eq!(config.max_sequencer_drift(10), FJORD_MAX_SEQUENCER_DRIFT); + } + + #[test] + #[cfg(feature = "serde")] + fn test_deserialize_reference_rollup_config() { + use crate::{OP_MAINNET_BASE_FEE_CONFIG, SystemConfig}; + + let raw: &str = r#" + { + "genesis": { + "l1": { + "hash": "0x481724ee99b1f4cb71d826e2ec5a37265f460e9b112315665c977f4050b0af54", + "number": 10 + }, + "l2": { + "hash": "0x88aedfbf7dea6bfa2c4ff315784ad1a7f145d8f650969359c003bbed68c87631", + "number": 0 + }, + "l2_time": 1725557164, + "system_config": { + "batcherAddr": "0xc81f87a644b41e49b3221f41251f15c6cb00ce03", + "overhead": "0x0000000000000000000000000000000000000000000000000000000000000000", + "scalar": "0x00000000000000000000000000000000000000000000000000000000000f4240", + "gasLimit": 30000000, + "baseFeeScalar": 1234, + "blobBaseFeeScalar": 5678, + "eip1559Denominator": 10, + "eip1559Elasticity": 20, + "operatorFeeScalar": 30, + "operatorFeeConstant": 40, + "minBaseFee": 50, + "daFootprintGasScalar": 10 + } + }, + "block_time": 2, + "max_sequencer_drift": 600, + "seq_window_size": 3600, + "channel_timeout": 300, + "l1_chain_id": 3151908, + "l2_chain_id": 1337, + "regolith_time": 0, + "canyon_time": 0, + "delta_time": 0, + "ecotone_time": 0, + "fjord_time": 0, + "batch_inbox_address": "0xff00000000000000000000000000000000042069", + "deposit_contract_address": "0x08073dc48dde578137b8af042bcbc1c2491f1eb2", + "l1_system_config_address": "0x94ee52a9d8edd72a85dea7fae3ba6d75e4bf1710", + "protocol_versions_address": "0x0000000000000000000000000000000000000000", + "chain_op_config": { + "eip1559Elasticity": 6, + "eip1559Denominator": 50, + "eip1559DenominatorCanyon": 250 + }, + "alt_da": null + } + "#; + + let expected = RollupConfig { + genesis: ChainGenesis { + l1: BlockNumHash { + hash: b256!("481724ee99b1f4cb71d826e2ec5a37265f460e9b112315665c977f4050b0af54"), + number: 10, + }, + l2: BlockNumHash { + hash: b256!("88aedfbf7dea6bfa2c4ff315784ad1a7f145d8f650969359c003bbed68c87631"), + number: 0, + }, + l2_time: 1725557164, + system_config: Some(SystemConfig { + batcher_address: address!("c81f87a644b41e49b3221f41251f15c6cb00ce03"), + overhead: U256::ZERO, + scalar: U256::from(0xf4240), + gas_limit: 30_000_000, + base_fee_scalar: Some(1234), + blob_base_fee_scalar: Some(5678), + eip1559_denominator: Some(10), + eip1559_elasticity: Some(20), + operator_fee_scalar: Some(30), + operator_fee_constant: Some(40), + min_base_fee: Some(50), + da_footprint_gas_scalar: Some(10), + }), + }, + block_time: 2, + max_sequencer_drift: 600, + seq_window_size: 3600, + channel_timeout: 300, + granite_channel_timeout: GRANITE_CHANNEL_TIMEOUT, + l1_chain_id: 3151908, + l2_chain_id: Chain::from_id(1337), + hardforks: HardForkConfig { + regolith_time: Some(0), + canyon_time: Some(0), + delta_time: Some(0), + ecotone_time: Some(0), + fjord_time: Some(0), + ..Default::default() + }, + batch_inbox_address: address!("ff00000000000000000000000000000000042069"), + deposit_contract_address: address!("08073dc48dde578137b8af042bcbc1c2491f1eb2"), + l1_system_config_address: address!("94ee52a9d8edd72a85dea7fae3ba6d75e4bf1710"), + protocol_versions_address: Address::ZERO, + superchain_config_address: None, + blobs_enabled_l1_timestamp: None, + da_challenge_address: None, + interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, + chain_op_config: OP_MAINNET_BASE_FEE_CONFIG, + alt_da_config: None, + }; + + let deserialized: RollupConfig = serde_json::from_str(raw).unwrap(); + assert_eq!(deserialized, expected); + } + + #[test] + fn test_rollup_config_unknown_field() { + let raw: &str = r#" + { + "genesis": { + "l1": { + "hash": "0x481724ee99b1f4cb71d826e2ec5a37265f460e9b112315665c977f4050b0af54", + "number": 10 + }, + "l2": { + "hash": "0x88aedfbf7dea6bfa2c4ff315784ad1a7f145d8f650969359c003bbed68c87631", + "number": 0 + }, + "l2_time": 1725557164, + "system_config": { + "batcherAddr": "0xc81f87a644b41e49b3221f41251f15c6cb00ce03", + "overhead": "0x0000000000000000000000000000000000000000000000000000000000000000", + "scalar": "0x00000000000000000000000000000000000000000000000000000000000f4240", + "gasLimit": 30000000 + } + }, + "block_time": 2, + "max_sequencer_drift": 600, + "seq_window_size": 3600, + "channel_timeout": 300, + "l1_chain_id": 3151908, + "l2_chain_id": 1337, + "regolith_time": 0, + "canyon_time": 0, + "delta_time": 0, + "ecotone_time": 0, + "fjord_time": 0, + "batch_inbox_address": "0xff00000000000000000000000000000000042069", + "deposit_contract_address": "0x08073dc48dde578137b8af042bcbc1c2491f1eb2", + "l1_system_config_address": "0x94ee52a9d8edd72a85dea7fae3ba6d75e4bf1710", + "protocol_versions_address": "0x0000000000000000000000000000000000000000", + "chain_op_config": { + "eip1559_elasticity": 100, + "eip1559_denominator": 100, + "eip1559_denominator_canyon": 100 + }, + "unknown_field": "unknown" + } + "#; + + let err = serde_json::from_str::(raw).unwrap_err(); + assert_eq!(err.classify(), serde_json::error::Category::Data); + } + + #[test] + fn test_compute_block_number_from_time() { + let cfg = RollupConfig { + genesis: ChainGenesis { l2_time: 10, ..Default::default() }, + block_time: 2, + ..Default::default() + }; + + assert_eq!(cfg.block_number_from_timestamp(20), 5); + assert_eq!(cfg.block_number_from_timestamp(30), 10); + } +} diff --git a/kona/crates/protocol/genesis/src/superchain/chain.rs b/rust/kona/crates/protocol/genesis/src/superchain/chain.rs similarity index 100% rename from kona/crates/protocol/genesis/src/superchain/chain.rs rename to rust/kona/crates/protocol/genesis/src/superchain/chain.rs diff --git a/kona/crates/protocol/genesis/src/superchain/chain_list.rs b/rust/kona/crates/protocol/genesis/src/superchain/chain_list.rs similarity index 98% rename from kona/crates/protocol/genesis/src/superchain/chain_list.rs rename to rust/kona/crates/protocol/genesis/src/superchain/chain_list.rs index 5ec99d04900..8115ba9ec69 100644 --- a/kona/crates/protocol/genesis/src/superchain/chain_list.rs +++ b/rust/kona/crates/protocol/genesis/src/superchain/chain_list.rs @@ -27,7 +27,7 @@ impl ChainList { self.chains.iter().find(|c| c.chain_id == chain_id) } - /// Fetch a [Chain] by the corresponding [AlloyChain] + /// Fetch a [Chain] by the corresponding [`AlloyChain`] pub fn get_chain_by_alloy_ident(&self, chain: &AlloyChain) -> Option<&Chain> { self.get_chain_by_id(chain.id()) } diff --git a/kona/crates/protocol/genesis/src/superchain/chains.rs b/rust/kona/crates/protocol/genesis/src/superchain/chains.rs similarity index 100% rename from kona/crates/protocol/genesis/src/superchain/chains.rs rename to rust/kona/crates/protocol/genesis/src/superchain/chains.rs diff --git a/kona/crates/protocol/genesis/src/superchain/config.rs b/rust/kona/crates/protocol/genesis/src/superchain/config.rs similarity index 100% rename from kona/crates/protocol/genesis/src/superchain/config.rs rename to rust/kona/crates/protocol/genesis/src/superchain/config.rs diff --git a/kona/crates/protocol/genesis/src/superchain/info.rs b/rust/kona/crates/protocol/genesis/src/superchain/info.rs similarity index 100% rename from kona/crates/protocol/genesis/src/superchain/info.rs rename to rust/kona/crates/protocol/genesis/src/superchain/info.rs diff --git a/kona/crates/protocol/genesis/src/superchain/level.rs b/rust/kona/crates/protocol/genesis/src/superchain/level.rs similarity index 100% rename from kona/crates/protocol/genesis/src/superchain/level.rs rename to rust/kona/crates/protocol/genesis/src/superchain/level.rs diff --git a/kona/crates/protocol/genesis/src/superchain/mod.rs b/rust/kona/crates/protocol/genesis/src/superchain/mod.rs similarity index 100% rename from kona/crates/protocol/genesis/src/superchain/mod.rs rename to rust/kona/crates/protocol/genesis/src/superchain/mod.rs diff --git a/rust/kona/crates/protocol/genesis/src/system/config.rs b/rust/kona/crates/protocol/genesis/src/system/config.rs new file mode 100644 index 00000000000..2a6b93445dd --- /dev/null +++ b/rust/kona/crates/protocol/genesis/src/system/config.rs @@ -0,0 +1,584 @@ +//! Contains the [`SystemConfig`] type. + +use crate::{ + CONFIG_UPDATE_TOPIC, RollupConfig, SystemConfigLog, SystemConfigUpdateError, + SystemConfigUpdateKind, +}; +use alloy_consensus::{Eip658Value, Receipt}; +use alloy_primitives::{Address, B64, Log, U256}; + +/// System configuration. +#[derive(Debug, Copy, Clone, Default, Hash, Eq, PartialEq)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[cfg_attr(feature = "serde", derive(serde::Serialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +#[cfg_attr(feature = "serde", serde(deny_unknown_fields))] +pub struct SystemConfig { + /// Batcher address + #[cfg_attr(feature = "serde", serde(rename = "batcherAddr"))] + pub batcher_address: Address, + /// Fee overhead value + #[cfg_attr(feature = "serde", serde(serialize_with = "serialize_u256_full"))] + pub overhead: U256, + /// Fee scalar value + #[cfg_attr(feature = "serde", serde(serialize_with = "serialize_u256_full"))] + pub scalar: U256, + /// Gas limit value + pub gas_limit: u64, + /// Base fee scalar value + pub base_fee_scalar: Option, + /// Blob base fee scalar value + pub blob_base_fee_scalar: Option, + /// EIP-1559 denominator + pub eip1559_denominator: Option, + /// EIP-1559 elasticity + pub eip1559_elasticity: Option, + /// The operator fee scalar (isthmus hardfork) + pub operator_fee_scalar: Option, + /// The operator fee constant (isthmus hardfork) + pub operator_fee_constant: Option, + /// Min base fee (jovian hardfork) + /// Note: according to the [spec](https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/system-config.md#initialization), as long as the `MinBaseFee` is not + /// explicitly set, the default value (`0`) will be systematically applied. + pub min_base_fee: Option, + /// DA footprint gas scalar (Jovian hardfork) + /// Note: according to the [spec](https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/system-config.md#initialization), as long as the `DAFootprintGasScalar` is not + /// explicitly set, the default value (`400`) will be systematically applied. + pub da_footprint_gas_scalar: Option, +} + +/// Custom EIP-1559 parameter decoding is needed here for holocene encoding. +/// +/// This is used by the Optimism monorepo [here][here]. +/// +/// [here]: https://github.com/ethereum-optimism/optimism/blob/cf28bffc7d880292794f53bb76bfc4df7898307b/op-service/eth/types.go#L519 +#[cfg(feature = "serde")] +impl<'a> serde::Deserialize<'a> for SystemConfig { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'a>, + { + use alloy_primitives::B256; + // An alias struct that is identical to `SystemConfig`. + // We use the alias to decode the eip1559 params as their u32 values. + #[derive(serde::Deserialize)] + #[serde(rename_all = "camelCase")] + #[serde(deny_unknown_fields)] + struct SystemConfigAlias { + #[serde(rename = "batcherAddress", alias = "batcherAddr")] + batcher_address: Address, + overhead: U256, + scalar: U256, + gas_limit: u64, + base_fee_scalar: Option, + blob_base_fee_scalar: Option, + eip1559_params: Option, + eip1559_denominator: Option, + eip1559_elasticity: Option, + operator_fee_params: Option, + operator_fee_scalar: Option, + operator_fee_constant: Option, + min_base_fee: Option, + da_footprint_gas_scalar: Option, + } + + let mut alias = SystemConfigAlias::deserialize(deserializer)?; + if let Some(params) = alias.eip1559_params { + alias.eip1559_denominator = + Some(u32::from_be_bytes(params.as_slice().get(0..4).unwrap().try_into().unwrap())); + alias.eip1559_elasticity = + Some(u32::from_be_bytes(params.as_slice().get(4..8).unwrap().try_into().unwrap())); + } + if let Some(params) = alias.operator_fee_params { + alias.operator_fee_scalar = Some(u32::from_be_bytes( + params.as_slice().get(20..24).unwrap().try_into().unwrap(), + )); + alias.operator_fee_constant = Some(u64::from_be_bytes( + params.as_slice().get(24..32).unwrap().try_into().unwrap(), + )); + } + + Ok(Self { + batcher_address: alias.batcher_address, + overhead: alias.overhead, + scalar: alias.scalar, + gas_limit: alias.gas_limit, + base_fee_scalar: alias.base_fee_scalar, + blob_base_fee_scalar: alias.blob_base_fee_scalar, + eip1559_denominator: alias.eip1559_denominator, + eip1559_elasticity: alias.eip1559_elasticity, + operator_fee_scalar: alias.operator_fee_scalar, + operator_fee_constant: alias.operator_fee_constant, + min_base_fee: alias.min_base_fee, + da_footprint_gas_scalar: alias.da_footprint_gas_scalar, + }) + } +} + +impl SystemConfig { + /// Filters all L1 receipts to find config updates and applies the config updates. + /// + /// Returns `true` if any config updates were applied, `false` otherwise. + pub fn update_with_receipts( + &mut self, + receipts: &[Receipt], + l1_system_config_address: Address, + ecotone_active: bool, + ) -> Result { + let mut updated = false; + for receipt in receipts { + if Eip658Value::Eip658(false) == receipt.status { + continue; + } + + receipt.logs.iter().try_for_each(|log| { + let topics = log.topics(); + if log.address == l1_system_config_address && + !topics.is_empty() && + topics[0] == CONFIG_UPDATE_TOPIC + { + // Safety: Error is bubbled up by the trailing `?` + self.process_config_update_log(log, ecotone_active)?; + updated = true; + } + Ok::<(), SystemConfigUpdateError>(()) + })?; + } + Ok(updated) + } + + /// Returns the eip1559 parameters from a [`SystemConfig`] encoded as a [B64]. + pub fn eip_1559_params( + &self, + rollup_config: &RollupConfig, + parent_timestamp: u64, + next_timestamp: u64, + ) -> Option { + let is_holocene = rollup_config.is_holocene_active(next_timestamp); + + // For the first holocene block, a zero'd out B64 is returned to signal the + // execution layer to use the canyon base fee parameters. Else, the system + // config's eip1559 parameters are encoded as a B64. + if is_holocene && !rollup_config.is_holocene_active(parent_timestamp) { + Some(B64::ZERO) + } else { + is_holocene.then_some(B64::from_slice( + &[ + self.eip1559_denominator.unwrap_or_default().to_be_bytes(), + self.eip1559_elasticity.unwrap_or_default().to_be_bytes(), + ] + .concat(), + )) + } + } + + /// Decodes an EVM log entry emitted by the system config contract and applies it as a + /// [`SystemConfig`] change. + /// + /// Parse log data for: + /// + /// ```text + /// event ConfigUpdate( + /// uint256 indexed version, + /// UpdateType indexed updateType, + /// bytes data + /// ); + /// ``` + fn process_config_update_log( + &mut self, + log: &Log, + ecotone_active: bool, + ) -> Result { + // Construct the system config log from the log. + let log = SystemConfigLog::new(log.clone(), ecotone_active); + + // Construct the update type from the log. + let update = log.build()?; + + // Apply the update to the system config. + update.apply(self); + + // Return the update type. + Ok(update.kind()) + } +} + +/// Compatibility helper function to serialize a [`U256`] as a [`B256`]. +/// +/// [`B256`]: alloy_primitives::B256 +#[cfg(feature = "serde")] +fn serialize_u256_full(ts: &U256, ser: S) -> Result +where + S: serde::Serializer, +{ + use serde::Serialize; + + alloy_primitives::B256::from(ts.to_be_bytes::<32>()).serialize(ser) +} + +#[cfg(test)] +mod test { + use super::*; + use crate::{CONFIG_UPDATE_EVENT_VERSION_0, HardForkConfig}; + use alloc::vec; + use alloy_primitives::{B256, LogData, address, b256, hex}; + + #[test] + #[cfg(feature = "serde")] + fn test_system_config_da_footprint_gas_scalar() { + let raw = r#"{ + "batcherAddress": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985", + "overhead": "0x00000000000000000000000000000000000000000000000000000000000000bc", + "scalar": "0x00000000000000000000000000000000000000000000000000000000000a6fe0", + "gasLimit": 30000000, + "eip1559Params": "0x000000ab000000cd", + "daFootprintGasScalar": 10 + }"#; + let system_config: SystemConfig = serde_json::from_str(raw).unwrap(); + assert_eq!(system_config.da_footprint_gas_scalar, Some(10), "da_footprint_gas_scalar"); + } + + #[test] + #[cfg(feature = "serde")] + fn test_system_config_eip1559_params() { + let raw = r#"{ + "batcherAddress": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985", + "overhead": "0x00000000000000000000000000000000000000000000000000000000000000bc", + "scalar": "0x00000000000000000000000000000000000000000000000000000000000a6fe0", + "gasLimit": 30000000, + "eip1559Params": "0x000000ab000000cd" + }"#; + let system_config: SystemConfig = serde_json::from_str(raw).unwrap(); + assert_eq!(system_config.eip1559_denominator, Some(0xab_u32), "eip1559_denominator"); + assert_eq!(system_config.eip1559_elasticity, Some(0xcd_u32), "eip1559_elasticity"); + } + + #[test] + #[cfg(feature = "serde")] + fn test_system_config_serde() { + let raw = r#"{ + "batcherAddr": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985", + "overhead": "0x00000000000000000000000000000000000000000000000000000000000000bc", + "scalar": "0x00000000000000000000000000000000000000000000000000000000000a6fe0", + "gasLimit": 30000000 + }"#; + let expected = SystemConfig { + batcher_address: address!("6887246668a3b87F54DeB3b94Ba47a6f63F32985"), + overhead: U256::from(0xbc), + scalar: U256::from(0xa6fe0), + gas_limit: 30000000, + ..Default::default() + }; + + let deserialized: SystemConfig = serde_json::from_str(raw).unwrap(); + assert_eq!(deserialized, expected); + } + + #[test] + #[cfg(feature = "serde")] + fn test_system_config_unknown_field() { + let raw = r#"{ + "batcherAddr": "0x6887246668a3b87F54DeB3b94Ba47a6f63F32985", + "overhead": "0x00000000000000000000000000000000000000000000000000000000000000bc", + "scalar": "0x00000000000000000000000000000000000000000000000000000000000a6fe0", + "gasLimit": 30000000, + "unknown": 0 + }"#; + let err = serde_json::from_str::(raw).unwrap_err(); + assert_eq!(err.classify(), serde_json::error::Category::Data); + } + + #[test] + #[cfg(feature = "arbitrary")] + fn test_arbitrary_system_config() { + use arbitrary::Arbitrary; + use rand::Rng; + let mut bytes = [0u8; 1024]; + rand::rng().fill(bytes.as_mut_slice()); + SystemConfig::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(); + } + + #[test] + fn test_eip_1559_params_from_system_config_none() { + let rollup_config = RollupConfig::default(); + let sys_config = SystemConfig::default(); + assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), None); + } + + #[test] + fn test_eip_1559_params_from_system_config_some() { + let rollup_config = RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, + ..Default::default() + }; + let sys_config = SystemConfig { + eip1559_denominator: Some(1), + eip1559_elasticity: None, + ..Default::default() + }; + let expected = Some(B64::from_slice(&[1u32.to_be_bytes(), 0u32.to_be_bytes()].concat())); + assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), expected); + } + + #[test] + fn test_eip_1559_params_from_system_config() { + let rollup_config = RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, + ..Default::default() + }; + let sys_config = SystemConfig { + eip1559_denominator: Some(1), + eip1559_elasticity: Some(2), + ..Default::default() + }; + let expected = Some(B64::from_slice(&[1u32.to_be_bytes(), 2u32.to_be_bytes()].concat())); + assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), expected); + } + + #[test] + fn test_default_eip_1559_params_from_system_config() { + let rollup_config = RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(0), ..Default::default() }, + ..Default::default() + }; + let sys_config = SystemConfig { + eip1559_denominator: None, + eip1559_elasticity: None, + ..Default::default() + }; + let expected = Some(B64::ZERO); + assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), expected); + } + + #[test] + fn test_default_eip_1559_params_from_system_config_pre_holocene() { + let rollup_config = RollupConfig::default(); + let sys_config = SystemConfig { + eip1559_denominator: Some(1), + eip1559_elasticity: Some(2), + ..Default::default() + }; + assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 0), None); + } + + #[test] + fn test_default_eip_1559_params_first_block_holocene() { + let rollup_config = RollupConfig { + hardforks: HardForkConfig { holocene_time: Some(2), ..Default::default() }, + ..Default::default() + }; + let sys_config = SystemConfig { + eip1559_denominator: Some(1), + eip1559_elasticity: Some(2), + ..Default::default() + }; + assert_eq!(sys_config.eip_1559_params(&rollup_config, 0, 2), Some(B64::ZERO)); + } + + #[test] + fn test_system_config_update_with_receipts_unchanged() { + let mut system_config = SystemConfig::default(); + let receipts = vec![]; + let l1_system_config_address = Address::ZERO; + let ecotone_active = false; + + let updated = system_config + .update_with_receipts(&receipts, l1_system_config_address, ecotone_active) + .unwrap(); + assert!(!updated); + + assert_eq!(system_config, SystemConfig::default()); + } + + #[test] + fn test_system_config_update_with_receipts_batcher_address() { + const UPDATE_TYPE: B256 = + b256!("0000000000000000000000000000000000000000000000000000000000000000"); + let mut system_config = SystemConfig::default(); + let l1_system_config_address = Address::ZERO; + let ecotone_active = false; + + let update_log = Log { + address: Address::ZERO, + data: LogData::new_unchecked( + vec![ + CONFIG_UPDATE_TOPIC, + CONFIG_UPDATE_EVENT_VERSION_0, + UPDATE_TYPE, + ], + hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000beef").into() + ) + }; + + let receipt = Receipt { + logs: vec![update_log], + status: Eip658Value::Eip658(true), + cumulative_gas_used: 0, + }; + + let updated = system_config + .update_with_receipts(&[receipt], l1_system_config_address, ecotone_active) + .unwrap(); + assert!(updated); + + assert_eq!( + system_config.batcher_address, + address!("000000000000000000000000000000000000bEEF"), + ); + } + + #[test] + fn test_system_config_update_batcher_log() { + const UPDATE_TYPE: B256 = + b256!("0000000000000000000000000000000000000000000000000000000000000000"); + + let mut system_config = SystemConfig::default(); + + let update_log = Log { + address: Address::ZERO, + data: LogData::new_unchecked( + vec![ + CONFIG_UPDATE_TOPIC, + CONFIG_UPDATE_EVENT_VERSION_0, + UPDATE_TYPE, + ], + hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000beef").into() + ) + }; + + // Update the batcher address. + system_config.process_config_update_log(&update_log, false).unwrap(); + + assert_eq!( + system_config.batcher_address, + address!("000000000000000000000000000000000000bEEF") + ); + } + + #[test] + fn test_system_config_update_gas_config_log() { + const UPDATE_TYPE: B256 = + b256!("0000000000000000000000000000000000000000000000000000000000000001"); + + let mut system_config = SystemConfig::default(); + + let update_log = Log { + address: Address::ZERO, + data: LogData::new_unchecked( + vec![ + CONFIG_UPDATE_TOPIC, + CONFIG_UPDATE_EVENT_VERSION_0, + UPDATE_TYPE, + ], + hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000babe000000000000000000000000000000000000000000000000000000000000beef").into() + ) + }; + + // Update the batcher address. + system_config.process_config_update_log(&update_log, false).unwrap(); + + assert_eq!(system_config.overhead, U256::from(0xbabe)); + assert_eq!(system_config.scalar, U256::from(0xbeef)); + } + + #[test] + fn test_system_config_update_gas_config_log_ecotone() { + const UPDATE_TYPE: B256 = + b256!("0000000000000000000000000000000000000000000000000000000000000001"); + + let mut system_config = SystemConfig::default(); + + let update_log = Log { + address: Address::ZERO, + data: LogData::new_unchecked( + vec![ + CONFIG_UPDATE_TOPIC, + CONFIG_UPDATE_EVENT_VERSION_0, + UPDATE_TYPE, + ], + hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000babe000000000000000000000000000000000000000000000000000000000000beef").into() + ) + }; + + // Update the gas limit. + system_config.process_config_update_log(&update_log, true).unwrap(); + + assert_eq!(system_config.overhead, U256::from(0)); + assert_eq!(system_config.scalar, U256::from(0xbeef)); + } + + #[test] + fn test_system_config_update_gas_limit_log() { + const UPDATE_TYPE: B256 = + b256!("0000000000000000000000000000000000000000000000000000000000000002"); + + let mut system_config = SystemConfig::default(); + + let update_log = Log { + address: Address::ZERO, + data: LogData::new_unchecked( + vec![ + CONFIG_UPDATE_TOPIC, + CONFIG_UPDATE_EVENT_VERSION_0, + UPDATE_TYPE, + ], + hex!("00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000beef").into() + ) + }; + + // Update the gas limit. + system_config.process_config_update_log(&update_log, false).unwrap(); + + assert_eq!(system_config.gas_limit, 0xbeef_u64); + } + + #[test] + fn test_system_config_update_eip1559_params_log() { + const UPDATE_TYPE: B256 = + b256!("0000000000000000000000000000000000000000000000000000000000000004"); + + let mut system_config = SystemConfig::default(); + let update_log = Log { + address: Address::ZERO, + data: LogData::new_unchecked( + vec![ + CONFIG_UPDATE_TOPIC, + CONFIG_UPDATE_EVENT_VERSION_0, + UPDATE_TYPE, + ], + hex!("000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000babe0000beef").into() + ) + }; + + // Update the EIP-1559 parameters. + system_config.process_config_update_log(&update_log, false).unwrap(); + + assert_eq!(system_config.eip1559_denominator, Some(0xbabe_u32)); + assert_eq!(system_config.eip1559_elasticity, Some(0xbeef_u32)); + } + + #[test] + fn test_system_config_update_operator_fee_log() { + const UPDATE_TYPE: B256 = + b256!("0000000000000000000000000000000000000000000000000000000000000005"); + + let mut system_config = SystemConfig::default(); + let update_log = Log { + address: Address::ZERO, + data: LogData::new_unchecked( + vec![ + CONFIG_UPDATE_TOPIC, + CONFIG_UPDATE_EVENT_VERSION_0, + UPDATE_TYPE, + ], + hex!("0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000babe000000000000beef").into() + ) + }; + + // Update the operator fee. + system_config.process_config_update_log(&update_log, false).unwrap(); + + assert_eq!(system_config.operator_fee_scalar, Some(0xbabe_u32)); + assert_eq!(system_config.operator_fee_constant, Some(0xbeef_u64)); + } +} diff --git a/rust/kona/crates/protocol/genesis/src/system/errors.rs b/rust/kona/crates/protocol/genesis/src/system/errors.rs new file mode 100644 index 00000000000..54e65f4a798 --- /dev/null +++ b/rust/kona/crates/protocol/genesis/src/system/errors.rs @@ -0,0 +1,257 @@ +//! Contains error types for system config updates. + +use alloy_primitives::B256; +use derive_more::From; + +/// An error for processing the [`crate::SystemConfig`] update log. +#[derive(Debug, From, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum SystemConfigUpdateError { + /// An error occurred while processing the update log. + #[error("Log processing error: {0}")] + LogProcessing(LogProcessingError), + /// A batcher update error. + #[error("Batcher update error: {0}")] + Batcher(BatcherUpdateError), + /// A gas config update error. + #[error("Gas config update error: {0}")] + GasConfig(GasConfigUpdateError), + /// A gas limit update error. + #[error("Gas limit update error: {0}")] + GasLimit(GasLimitUpdateError), + /// An EIP-1559 parameter update error. + #[error("EIP-1559 parameter update error: {0}")] + Eip1559(EIP1559UpdateError), + /// An operator fee parameter update error. + #[error("Operator fee parameter update error: {0}")] + OperatorFee(OperatorFeeUpdateError), + /// An unsafe block signer update error. + #[error("Unsafe block signer update error: {0}")] + UnsafeBlockSigner(UnsafeBlockSignerUpdateError), + /// A min base fee parameter update error. + #[error("Min base fee parameter update error: {0}")] + MinBaseFee(MinBaseFeeUpdateError), + /// A da footprint gas scalar update error. + #[error("DA footprint gas scalar update error: {0}")] + DaFootprintGasScalar(DaFootprintGasScalarUpdateError), +} + +/// An error occurred while processing the update log. +#[derive(Debug, From, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum LogProcessingError { + /// Received an incorrect number of log topics. + #[error("Invalid config update log: invalid topic length: {0}")] + InvalidTopicLen(usize), + /// The log topic is invalid. + #[error("Invalid config update log: invalid topic")] + InvalidTopic, + /// The config update log version is unsupported. + #[error("Invalid config update log: unsupported version: {0}")] + UnsupportedVersion(B256), + /// Failed to decode the update type from the config update log. + #[error("Failed to decode config update log: update type")] + UpdateTypeDecodingError, + /// An invalid system config update type. + #[error("Invalid system config update type: {0}")] + InvalidSystemConfigUpdateType(u64), +} + +/// An error for updating the batcher address on the [`crate::SystemConfig`]. +#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum BatcherUpdateError { + /// Invalid data length. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLen(usize), + /// Failed to decode the data pointer argument from the batcher update log. + #[error("Failed to decode batcher update log: data pointer")] + PointerDecodingError, + /// The data pointer is invalid. + #[error("Invalid config update log: invalid data pointer: {0}")] + InvalidDataPointer(u64), + /// Failed to decode the data length argument from the batcher update log. + #[error("Failed to decode batcher update log: data length")] + LengthDecodingError, + /// The data length is invalid. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLength(u64), + /// Failed to decode the batcher address argument from the batcher update log. + #[error("Failed to decode batcher update log: batcher address")] + BatcherAddressDecodingError, +} + +/// An error for updating the unsafe block signer address on the [`crate::SystemConfig`]. +#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum UnsafeBlockSignerUpdateError { + /// Invalid data length. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLen(usize), + /// Failed to decode the data pointer argument from the update log. + #[error("Failed to decode unsafe block signer update log: data pointer")] + PointerDecodingError, + /// The data pointer is invalid. + #[error("Invalid config update log: invalid data pointer: {0}")] + InvalidDataPointer(u64), + /// Failed to decode the data length argument from the update log. + #[error("Failed to decode unsafe block signer update log: data length")] + LengthDecodingError, + /// The data length is invalid. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLength(u64), + /// Failed to decode the unsafe block signer address argument from the update log. + #[error("Failed to decode unsafe block signer update log: unsafe block signer address")] + UnsafeBlockSignerAddressDecodingError, +} + +/// An error for updating the gas config on the [`crate::SystemConfig`]. +#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum GasConfigUpdateError { + /// Invalid data length. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLen(usize), + /// Failed to decode the data pointer argument from the gas config update log. + #[error("Failed to decode gas config update log: data pointer")] + PointerDecodingError, + /// The data pointer is invalid. + #[error("Invalid config update log: invalid data pointer: {0}")] + InvalidDataPointer(u64), + /// Failed to decode the data length argument from the gas config update log. + #[error("Failed to decode gas config update log: data length")] + LengthDecodingError, + /// The data length is invalid. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLength(u64), + /// Failed to decode the overhead argument from the gas config update log. + #[error("Failed to decode gas config update log: overhead")] + OverheadDecodingError, + /// Failed to decode the scalar argument from the gas config update log. + #[error("Failed to decode gas config update log: scalar")] + ScalarDecodingError, +} + +/// An error for updating the min base fee on the [`crate::SystemConfig`]. +#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum MinBaseFeeUpdateError { + /// Invalid data length. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLen(usize), + /// Failed to decode the data pointer argument from the min base fee update log. + #[error("Failed to decode gas limit update log: data pointer")] + PointerDecodingError, + /// The data pointer is invalid. + #[error("Invalid config update log: invalid data pointer: {0}")] + InvalidDataPointer(u64), + /// Failed to decode the data length argument from the min base fee update log. + #[error("Failed to decode gas limit update log: data length")] + LengthDecodingError, + /// The data length is invalid. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLength(u64), + /// Failed to decode the min base fee argument from the min base fee update log. + #[error("Failed to decode min base fee update log: min base fee")] + MinBaseFeeDecodingError, +} + +/// An error for updating the da footprint gas scalar on the [`crate::SystemConfig`]. +#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum DaFootprintGasScalarUpdateError { + /// Invalid data length. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLen(usize), + /// Failed to decode the data pointer argument from the min base fee update log. + #[error("Failed to decode gas limit update log: data pointer")] + PointerDecodingError, + /// The data pointer is invalid. + #[error("Invalid config update log: invalid data pointer: {0}")] + InvalidDataPointer(u64), + /// Failed to decode the data length argument from the min base fee update log. + #[error("Failed to decode gas limit update log: data length")] + LengthDecodingError, + /// The data length is invalid. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLength(u64), + /// Failed to decode the da footprint gas scalar argument from the da footprint gas scalar + /// update log. + #[error("Failed to decode da footprint gas scalar update log: da footprint gas scalar")] + DaFootprintGasScalarDecodingError, +} + +/// An error for updating the gas limit on the [`crate::SystemConfig`]. +#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum GasLimitUpdateError { + /// Invalid data length. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLen(usize), + /// Failed to decode the data pointer argument from the gas limit update log. + #[error("Failed to decode gas limit update log: data pointer")] + PointerDecodingError, + /// The data pointer is invalid. + #[error("Invalid config update log: invalid data pointer: {0}")] + InvalidDataPointer(u64), + /// Failed to decode the data length argument from the gas limit update log. + #[error("Failed to decode gas limit update log: data length")] + LengthDecodingError, + /// The data length is invalid. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLength(u64), + /// Failed to decode the gas limit argument from the gas limit update log. + #[error("Failed to decode gas limit update log: gas limit")] + GasLimitDecodingError, +} + +/// An error for updating the EIP-1559 parameters on the [`crate::SystemConfig`]. +#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum EIP1559UpdateError { + /// Invalid data length. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLen(usize), + /// Failed to decode the data pointer argument from the eip 1559 update log. + #[error("Failed to decode eip1559 parameter update log: data pointer")] + PointerDecodingError, + /// The data pointer is invalid. + #[error("Invalid config update log: invalid data pointer: {0}")] + InvalidDataPointer(u64), + /// Failed to decode the data length argument from the eip 1559 update log. + #[error("Failed to decode eip1559 parameter update log: data length")] + LengthDecodingError, + /// The data length is invalid. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLength(u64), + /// Failed to decode the eip1559 params argument from the eip 1559 update log. + #[error("Failed to decode eip1559 parameter update log: eip1559 parameters")] + EIP1559DecodingError, +} + +/// An error for updating the operator fee parameters on the [`crate::SystemConfig`]. +#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum OperatorFeeUpdateError { + /// Invalid data length. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLen(usize), + /// Failed to decode the data pointer argument from the operator fee update log. + #[error("Failed to decode operator fee parameter update log: data pointer")] + PointerDecodingError, + /// The data pointer is invalid. + #[error("Invalid config update log: invalid data pointer: {0}")] + InvalidDataPointer(u64), + /// Failed to decode the data length argument from the operator fee update log. + #[error("Failed to decode operator fee parameter update log: data length")] + LengthDecodingError, + /// The data length is invalid. + #[error("Invalid config update log: invalid data length: {0}")] + InvalidDataLength(u64), + /// Failed to decode the scalar argument from the update log. + #[error("Failed to decode operator fee parameter update log: scalar")] + ScalarDecodingError, + /// Failed to decode the constant argument from the update log. + #[error("Failed to decode operator fee parameter update log: constant")] + ConstantDecodingError, +} diff --git a/kona/crates/protocol/genesis/src/system/kind.rs b/rust/kona/crates/protocol/genesis/src/system/kind.rs similarity index 100% rename from kona/crates/protocol/genesis/src/system/kind.rs rename to rust/kona/crates/protocol/genesis/src/system/kind.rs diff --git a/kona/crates/protocol/genesis/src/system/log.rs b/rust/kona/crates/protocol/genesis/src/system/log.rs similarity index 100% rename from kona/crates/protocol/genesis/src/system/log.rs rename to rust/kona/crates/protocol/genesis/src/system/log.rs diff --git a/kona/crates/protocol/genesis/src/system/mod.rs b/rust/kona/crates/protocol/genesis/src/system/mod.rs similarity index 100% rename from kona/crates/protocol/genesis/src/system/mod.rs rename to rust/kona/crates/protocol/genesis/src/system/mod.rs diff --git a/kona/crates/protocol/genesis/src/system/update.rs b/rust/kona/crates/protocol/genesis/src/system/update.rs similarity index 100% rename from kona/crates/protocol/genesis/src/system/update.rs rename to rust/kona/crates/protocol/genesis/src/system/update.rs diff --git a/kona/crates/protocol/genesis/src/updates/batcher.rs b/rust/kona/crates/protocol/genesis/src/updates/batcher.rs similarity index 100% rename from kona/crates/protocol/genesis/src/updates/batcher.rs rename to rust/kona/crates/protocol/genesis/src/updates/batcher.rs diff --git a/kona/crates/protocol/genesis/src/updates/common.rs b/rust/kona/crates/protocol/genesis/src/updates/common.rs similarity index 82% rename from kona/crates/protocol/genesis/src/updates/common.rs rename to rust/kona/crates/protocol/genesis/src/updates/common.rs index 959aee4ece0..3dbd157efbd 100644 --- a/kona/crates/protocol/genesis/src/updates/common.rs +++ b/rust/kona/crates/protocol/genesis/src/updates/common.rs @@ -1,20 +1,20 @@ -//! Common validation utilities for SystemConfig updates. +//! Common validation utilities for `SystemConfig` updates. //! -//! This module provides shared validation logic for decoding SystemConfigLog data +//! This module provides shared validation logic for decoding `SystemConfigLog` data //! that is used across multiple update types. use alloy_sol_types::{SolType, sol}; -/// The expected data length for a standard SystemConfigLog update. +/// The expected data length for a standard `SystemConfigLog` update. pub(crate) const STANDARD_UPDATE_DATA_LEN: usize = 96; -/// The expected pointer value for a standard SystemConfigLog update. +/// The expected pointer value for a standard `SystemConfigLog` update. pub(crate) const EXPECTED_POINTER: u64 = 32; -/// The expected data length value for a standard SystemConfigLog update. +/// The expected data length value for a standard `SystemConfigLog` update. pub(crate) const EXPECTED_DATA_LENGTH: u64 = 32; -/// Validated SystemConfig update data. +/// Validated `SystemConfig` update data. /// /// After validation, this struct provides access to the validated pointer, length, /// and the payload data starting at byte offset 64. @@ -31,7 +31,7 @@ impl<'a> ValidatedUpdateData<'a> { } } -/// Common validation errors for SystemConfig updates. +/// Common validation errors for `SystemConfig` updates. #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) enum ValidationError { /// Invalid data length. Contains (expected, actual). @@ -46,7 +46,7 @@ pub(crate) enum ValidationError { InvalidDataLength(u64), } -/// Validates the common structure of a SystemConfig update log data. +/// Validates the common structure of a `SystemConfig` update log data. /// /// This function performs the following validations: /// 1. Checks that the data length is exactly 96 bytes diff --git a/kona/crates/protocol/genesis/src/updates/da_footprint_gas_scalar.rs b/rust/kona/crates/protocol/genesis/src/updates/da_footprint_gas_scalar.rs similarity index 100% rename from kona/crates/protocol/genesis/src/updates/da_footprint_gas_scalar.rs rename to rust/kona/crates/protocol/genesis/src/updates/da_footprint_gas_scalar.rs diff --git a/kona/crates/protocol/genesis/src/updates/eip1559.rs b/rust/kona/crates/protocol/genesis/src/updates/eip1559.rs similarity index 100% rename from kona/crates/protocol/genesis/src/updates/eip1559.rs rename to rust/kona/crates/protocol/genesis/src/updates/eip1559.rs diff --git a/kona/crates/protocol/genesis/src/updates/gas_config.rs b/rust/kona/crates/protocol/genesis/src/updates/gas_config.rs similarity index 100% rename from kona/crates/protocol/genesis/src/updates/gas_config.rs rename to rust/kona/crates/protocol/genesis/src/updates/gas_config.rs diff --git a/kona/crates/protocol/genesis/src/updates/gas_limit.rs b/rust/kona/crates/protocol/genesis/src/updates/gas_limit.rs similarity index 100% rename from kona/crates/protocol/genesis/src/updates/gas_limit.rs rename to rust/kona/crates/protocol/genesis/src/updates/gas_limit.rs diff --git a/kona/crates/protocol/genesis/src/updates/min_base_fee.rs b/rust/kona/crates/protocol/genesis/src/updates/min_base_fee.rs similarity index 100% rename from kona/crates/protocol/genesis/src/updates/min_base_fee.rs rename to rust/kona/crates/protocol/genesis/src/updates/min_base_fee.rs diff --git a/rust/kona/crates/protocol/genesis/src/updates/mod.rs b/rust/kona/crates/protocol/genesis/src/updates/mod.rs new file mode 100644 index 00000000000..dd94bccb12d --- /dev/null +++ b/rust/kona/crates/protocol/genesis/src/updates/mod.rs @@ -0,0 +1,27 @@ +//! Contains all updates to the [`crate::SystemConfig`] type. + +mod common; + +mod batcher; +pub use batcher::BatcherUpdate; + +mod signer; +pub use signer::UnsafeBlockSignerUpdate; + +mod gas_config; +pub use gas_config::GasConfigUpdate; + +mod gas_limit; +pub use gas_limit::GasLimitUpdate; + +mod eip1559; +pub use eip1559::Eip1559Update; + +mod operator_fee; +pub use operator_fee::OperatorFeeUpdate; + +mod min_base_fee; +pub use min_base_fee::MinBaseFeeUpdate; + +mod da_footprint_gas_scalar; +pub use da_footprint_gas_scalar::DaFootprintGasScalarUpdate; diff --git a/kona/crates/protocol/genesis/src/updates/operator_fee.rs b/rust/kona/crates/protocol/genesis/src/updates/operator_fee.rs similarity index 100% rename from kona/crates/protocol/genesis/src/updates/operator_fee.rs rename to rust/kona/crates/protocol/genesis/src/updates/operator_fee.rs diff --git a/kona/crates/protocol/genesis/src/updates/signer.rs b/rust/kona/crates/protocol/genesis/src/updates/signer.rs similarity index 100% rename from kona/crates/protocol/genesis/src/updates/signer.rs rename to rust/kona/crates/protocol/genesis/src/updates/signer.rs diff --git a/rust/kona/crates/protocol/hardforks/Cargo.toml b/rust/kona/crates/protocol/hardforks/Cargo.toml new file mode 100644 index 00000000000..0210f18d8c1 --- /dev/null +++ b/rust/kona/crates/protocol/hardforks/Cargo.toml @@ -0,0 +1,42 @@ +[package] +name = "kona-hardforks" +version = "0.4.5" +description = "Consensus hardfork types for the OP Stack" + +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +exclude.workspace = true + +[lints] +workspace = true + +[dependencies] +# Workspace +kona-protocol.workspace = true + +# Alloy +alloy-eips.workspace = true +alloy-primitives = { workspace = true, features = ["rlp"] } + +# OP Alloy +op-alloy-consensus.workspace = true + +[dev-dependencies] +alloy-primitives = { workspace = true, features = ["rand", "arbitrary"] } +revm = { version = "34.0.0", default-features = false } +op-revm = { version = "15.0.0", default-features = false } + +[features] +default = [] +std = [ + "alloy-eips/std", + "alloy-primitives/std", + "kona-protocol/std", + "op-alloy-consensus/std", +] +k256 = [ "alloy-primitives/k256", "op-alloy-consensus/k256" ] +kzg = [ "alloy-eips/kzg", "op-alloy-consensus/kzg", "std" ] diff --git a/rust/kona/crates/protocol/hardforks/README.md b/rust/kona/crates/protocol/hardforks/README.md new file mode 100644 index 00000000000..84c8e0705f9 --- /dev/null +++ b/rust/kona/crates/protocol/hardforks/README.md @@ -0,0 +1,9 @@ +# `kona-hardforks` + +Consensus layer hardfork types for the OP Stack including network upgrade transactions. + +### Provenance + +This code was ported [op-alloy] as part of `kona` monorepo migrations. + +[op-alloy]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/op-alloy diff --git a/kona/crates/protocol/hardforks/src/bytecode/crossl2inbox_interop.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/crossl2inbox_interop.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/crossl2inbox_interop.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/crossl2inbox_interop.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_0.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_0.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_0.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_0.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_1.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_1.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_1.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_1.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_2.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_2.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_2.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_2.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_3.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_3.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_3.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_3.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_4.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_4.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_4.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_4.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_5.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_5.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_5.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/ecotone_tx_5.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/eip2935_isthmus.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/eip2935_isthmus.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/eip2935_isthmus.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/eip2935_isthmus.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/eip4788_ecotone.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/eip4788_ecotone.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/eip4788_ecotone.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/eip4788_ecotone.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/fjord_tx_0.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/fjord_tx_0.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/fjord_tx_0.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/fjord_tx_0.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/fjord_tx_1.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/fjord_tx_1.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/fjord_tx_1.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/fjord_tx_1.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/fjord_tx_2.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/fjord_tx_2.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/fjord_tx_2.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/fjord_tx_2.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/gpo_ecotone.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/gpo_ecotone.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/gpo_ecotone.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/gpo_ecotone.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/gpo_fjord.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/gpo_fjord.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/gpo_fjord.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/gpo_fjord.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/gpo_isthmus.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/gpo_isthmus.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/gpo_isthmus.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/gpo_isthmus.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/interop_tx_0.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/interop_tx_0.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/interop_tx_0.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/interop_tx_0.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/interop_tx_1.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/interop_tx_1.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/interop_tx_1.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/interop_tx_1.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/interop_tx_2.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/interop_tx_2.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/interop_tx_2.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/interop_tx_2.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/interop_tx_3.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/interop_tx_3.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/interop_tx_3.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/interop_tx_3.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_0.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_0.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_0.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_0.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_1.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_1.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_1.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_1.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_2.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_2.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_2.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_2.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_3.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_3.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_3.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_3.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_4.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_4.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_4.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_4.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_5.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_5.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_5.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_5.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_6.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_6.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_6.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_6.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_7.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_7.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_7.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/isthmus_tx_7.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/jovian-gas-price-oracle-deployment.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/jovian-gas-price-oracle-deployment.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/jovian-gas-price-oracle-deployment.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/jovian-gas-price-oracle-deployment.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/jovian-l1-block-deployment.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/jovian-l1-block-deployment.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/jovian-l1-block-deployment.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/jovian-l1-block-deployment.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/l1_block_ecotone.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/l1_block_ecotone.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/l1_block_ecotone.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/l1_block_ecotone.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/l1_block_isthmus.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/l1_block_isthmus.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/l1_block_isthmus.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/l1_block_isthmus.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/l2tol2_xdm_interop.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/l2tol2_xdm_interop.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/l2tol2_xdm_interop.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/l2tol2_xdm_interop.hex diff --git a/kona/crates/protocol/hardforks/src/bytecode/ofv_isthmus.hex b/rust/kona/crates/protocol/hardforks/src/bytecode/ofv_isthmus.hex similarity index 100% rename from kona/crates/protocol/hardforks/src/bytecode/ofv_isthmus.hex rename to rust/kona/crates/protocol/hardforks/src/bytecode/ofv_isthmus.hex diff --git a/rust/kona/crates/protocol/hardforks/src/ecotone.rs b/rust/kona/crates/protocol/hardforks/src/ecotone.rs new file mode 100644 index 00000000000..0302987e9ec --- /dev/null +++ b/rust/kona/crates/protocol/hardforks/src/ecotone.rs @@ -0,0 +1,307 @@ +//! Module containing a [`TxDeposit`] builder for the Ecotone network upgrade transactions. + +use alloc::{string::String, vec::Vec}; +use alloy_eips::eip2718::Encodable2718; +use alloy_primitives::{Address, B256, Bytes, TxKind, U256, address, hex}; +use kona_protocol::Predeploys; +use op_alloy_consensus::{TxDeposit, UpgradeDepositSource}; + +use crate::Hardfork; + +/// The Ecotone network upgrade transactions. +#[derive(Debug, Default, Clone, Copy)] +pub struct Ecotone; + +impl Ecotone { + /// The Gas Price Oracle Address + /// This is computed by using go-ethereum's `crypto.CreateAddress` function, + /// with the Gas Price Oracle Deployer Address and nonce 0. + pub const GAS_PRICE_ORACLE: Address = address!("b528d11cc114e026f138fe568744c6d45ce6da7a"); + + /// The depositor account address. + pub const DEPOSITOR_ACCOUNT: Address = address!("DeaDDEaDDeAdDeAdDEAdDEaddeAddEAdDEAd0001"); + + /// The Enable Ecotone Input Method 4Byte Signature + pub const ENABLE_ECOTONE_INPUT: [u8; 4] = hex!("22b90ab3"); + + /// L1 Block Deployer Address + pub const L1_BLOCK_DEPLOYER: Address = address!("4210000000000000000000000000000000000000"); + + /// The Gas Price Oracle Deployer Address + pub const GAS_PRICE_ORACLE_DEPLOYER: Address = + address!("4210000000000000000000000000000000000001"); + + /// The new L1 Block Address + /// This is computed by using go-ethereum's `crypto.CreateAddress` function, + /// with the L1 Block Deployer Address and nonce 0. + pub const NEW_L1_BLOCK: Address = address!("07dbe8500fc591d1852b76fee44d5a05e13097ff"); + + /// EIP-4788 From Address + pub const EIP4788_FROM: Address = address!("0B799C86a49DEeb90402691F1041aa3AF2d3C875"); + + /// The L1 Block Deployer Code Hash + /// See: + pub const L1_BLOCK_DEPLOYER_CODE_HASH: B256 = alloy_primitives::b256!( + "0xc88a313aa75dc4fbf0b6850d9f9ae41e04243b7008cf3eadb29256d4a71c1dfd" + ); + /// The Gas Price Oracle Code Hash + /// See: + pub const GAS_PRICE_ORACLE_CODE_HASH: B256 = alloy_primitives::b256!( + "0x8b71360ea773b4cfaf1ae6d2bd15464a4e1e2e360f786e475f63aeaed8da0ae5" + ); + + /// Returns the source hash for the deployment of the l1 block contract. + pub fn deploy_l1_block_source() -> B256 { + UpgradeDepositSource { intent: String::from("Ecotone: L1 Block Deployment") }.source_hash() + } + + /// Returns the source hash for the deployment of the gas price oracle contract. + pub fn deploy_gas_price_oracle_source() -> B256 { + UpgradeDepositSource { intent: String::from("Ecotone: Gas Price Oracle Deployment") } + .source_hash() + } + + /// Returns the source hash for the update of the l1 block proxy. + pub fn update_l1_block_source() -> B256 { + UpgradeDepositSource { intent: String::from("Ecotone: L1 Block Proxy Update") } + .source_hash() + } + + /// Returns the source hash for the update of the gas price oracle proxy. + pub fn update_gas_price_oracle_source() -> B256 { + UpgradeDepositSource { intent: String::from("Ecotone: Gas Price Oracle Proxy Update") } + .source_hash() + } + + /// Returns the source hash for the Ecotone Beacon Block Roots Contract deployment. + pub fn beacon_roots_source() -> B256 { + UpgradeDepositSource { + intent: String::from("Ecotone: beacon block roots contract deployment"), + } + .source_hash() + } + + /// Returns the source hash for the Ecotone Gas Price Oracle activation. + pub fn enable_ecotone_source() -> B256 { + UpgradeDepositSource { intent: String::from("Ecotone: Gas Price Oracle Set Ecotone") } + .source_hash() + } + + /// Returns the EIP-4788 creation data. + pub fn eip4788_creation_data() -> Bytes { + hex::decode(include_str!("./bytecode/eip4788_ecotone.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the raw bytecode for the L1 Block deployment. + pub fn l1_block_deployment_bytecode() -> Bytes { + hex::decode(include_str!("./bytecode/l1_block_ecotone.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the gas price oracle deployment bytecode. + pub fn ecotone_gas_price_oracle_deployment_bytecode() -> Bytes { + hex::decode(include_str!("./bytecode/gpo_ecotone.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the list of [`TxDeposit`]s for the Ecotone network upgrade. + pub fn deposits() -> impl Iterator { + ([ + // Deploy the L1 Block contract for Ecotone. + // See: + TxDeposit { + source_hash: Self::deploy_l1_block_source(), + from: Self::L1_BLOCK_DEPLOYER, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 375_000, + is_system_transaction: false, + input: Self::l1_block_deployment_bytecode(), + }, + // Deploy the Gas Price Oracle contract for Ecotone. + // See: + TxDeposit { + source_hash: Self::deploy_gas_price_oracle_source(), + from: Self::GAS_PRICE_ORACLE_DEPLOYER, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 1_000_000, + is_system_transaction: false, + input: Self::ecotone_gas_price_oracle_deployment_bytecode(), + }, + // Updates the l1 block proxy to point to the new L1 Block contract. + // See: + TxDeposit { + source_hash: Self::update_l1_block_source(), + from: Address::ZERO, + to: TxKind::Call(Predeploys::L1_BLOCK_INFO), + mint: 0, + value: U256::ZERO, + gas_limit: 50_000, + is_system_transaction: false, + input: super::upgrade_to_calldata(Self::NEW_L1_BLOCK), + }, + // Updates the gas price oracle proxy to point to the new Gas Price Oracle contract. + // See: + TxDeposit { + source_hash: Self::update_gas_price_oracle_source(), + from: Address::ZERO, + to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), + mint: 0, + value: U256::ZERO, + gas_limit: 50_000, + is_system_transaction: false, + input: super::upgrade_to_calldata(Self::GAS_PRICE_ORACLE), + }, + // Enables the Ecotone Gas Price Oracle. + // See: + TxDeposit { + source_hash: Self::enable_ecotone_source(), + from: Self::DEPOSITOR_ACCOUNT, + to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), + mint: 0, + value: U256::ZERO, + gas_limit: 80_000, + is_system_transaction: false, + input: Self::ENABLE_ECOTONE_INPUT.into(), + }, + // Deploys the beacon block roots contract. + // See: + TxDeposit { + source_hash: Self::beacon_roots_source(), + from: Self::EIP4788_FROM, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 250_000, + is_system_transaction: false, + input: Self::eip4788_creation_data(), + }, + ]) + .into_iter() + } +} + +impl Hardfork for Ecotone { + /// Constructs the Ecotone network upgrade transactions. + fn txs(&self) -> impl Iterator + '_ { + Self::deposits().map(|tx| { + let mut encoded = Vec::new(); + tx.encode_2718(&mut encoded); + Bytes::from(encoded) + }) + } +} + +#[cfg(test)] +mod tests { + use crate::test_utils::check_deployment_code; + + use super::*; + use alloc::vec; + + #[test] + fn test_deploy_l1_block_source() { + assert_eq!( + Ecotone::deploy_l1_block_source(), + hex!("877a6077205782ea15a6dc8699fa5ebcec5e0f4389f09cb8eda09488231346f8") + ); + } + #[test] + fn test_verify_ecotone_l1_deployment_code_hash() { + let txs = Ecotone::deposits().collect::>(); + + check_deployment_code( + txs[0].clone(), + Ecotone::NEW_L1_BLOCK, + Ecotone::L1_BLOCK_DEPLOYER_CODE_HASH, + ); + } + + #[test] + fn test_verify_ecotone_gas_price_oracle_deployment_code_hash() { + let txs = Ecotone::deposits().collect::>(); + + check_deployment_code( + txs[1].clone(), + Ecotone::GAS_PRICE_ORACLE, + Ecotone::GAS_PRICE_ORACLE_CODE_HASH, + ); + } + + #[test] + fn test_deploy_gas_price_oracle_source() { + assert_eq!( + Ecotone::deploy_gas_price_oracle_source(), + hex!("a312b4510adf943510f05fcc8f15f86995a5066bd83ce11384688ae20e6ecf42") + ); + } + + #[test] + fn test_update_l1_block_source() { + assert_eq!( + Ecotone::update_l1_block_source(), + hex!("18acb38c5ff1c238a7460ebc1b421fa49ec4874bdf1e0a530d234104e5e67dbc") + ); + } + + #[test] + fn test_update_gas_price_oracle_source() { + assert_eq!( + Ecotone::update_gas_price_oracle_source(), + hex!("ee4f9385eceef498af0be7ec5862229f426dec41c8d42397c7257a5117d9230a") + ); + } + + #[test] + fn test_enable_ecotone_source() { + assert_eq!( + Ecotone::enable_ecotone_source(), + hex!("0c1cb38e99dbc9cbfab3bb80863380b0905290b37eb3d6ab18dc01c1f3e75f93") + ); + } + + #[test] + fn test_beacon_block_roots_source() { + assert_eq!( + Ecotone::beacon_roots_source(), + hex!("69b763c48478b9dc2f65ada09b3d92133ec592ea715ec65ad6e7f3dc519dc00c") + ); + } + + #[test] + fn test_ecotone_txs_encoded() { + let ecotone_upgrade_tx = Ecotone.txs().collect::>(); + assert_eq!(ecotone_upgrade_tx.len(), 6); + + let expected_txs: Vec = vec![ + hex::decode(include_str!("./bytecode/ecotone_tx_0.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/ecotone_tx_1.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/ecotone_tx_2.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/ecotone_tx_3.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/ecotone_tx_4.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/ecotone_tx_5.hex").replace('\n', "")) + .unwrap() + .into(), + ]; + for (i, expected) in expected_txs.iter().enumerate() { + assert_eq!(ecotone_upgrade_tx[i], *expected); + } + } +} diff --git a/kona/crates/protocol/hardforks/src/fjord.rs b/rust/kona/crates/protocol/hardforks/src/fjord.rs similarity index 97% rename from kona/crates/protocol/hardforks/src/fjord.rs rename to rust/kona/crates/protocol/hardforks/src/fjord.rs index 8068d4364b1..f71f7fe8670 100644 --- a/kona/crates/protocol/hardforks/src/fjord.rs +++ b/rust/kona/crates/protocol/hardforks/src/fjord.rs @@ -50,7 +50,7 @@ impl Fjord { .source_hash() } - /// [UpgradeDepositSource] for setting the Fjord Gas Price Oracle. + /// [`UpgradeDepositSource`] for setting the Fjord Gas Price Oracle. pub fn enable_fjord_source() -> B256 { UpgradeDepositSource { intent: String::from("Fjord: Gas Price Oracle Set Fjord") } .source_hash() @@ -58,7 +58,7 @@ impl Fjord { /// Returns the fjord gas price oracle deployment bytecode. pub fn gas_price_oracle_deployment_bytecode() -> alloy_primitives::Bytes { - hex::decode(include_str!("./bytecode/gpo_fjord.hex").replace("\n", "")) + hex::decode(include_str!("./bytecode/gpo_fjord.hex").replace('\n', "")) .expect("Expected hex byte string") .into() } @@ -155,13 +155,13 @@ mod tests { assert_eq!(fjord_upgrade_tx.len(), 3); let expected_txs: Vec = vec![ - hex::decode(include_str!("./bytecode/fjord_tx_0.hex").replace("\n", "")) + hex::decode(include_str!("./bytecode/fjord_tx_0.hex").replace('\n', "")) .unwrap() .into(), - hex::decode(include_str!("./bytecode/fjord_tx_1.hex").replace("\n", "")) + hex::decode(include_str!("./bytecode/fjord_tx_1.hex").replace('\n', "")) .unwrap() .into(), - hex::decode(include_str!("./bytecode/fjord_tx_2.hex").replace("\n", "")) + hex::decode(include_str!("./bytecode/fjord_tx_2.hex").replace('\n', "")) .unwrap() .into(), ]; diff --git a/kona/crates/protocol/hardforks/src/forks.rs b/rust/kona/crates/protocol/hardforks/src/forks.rs similarity index 97% rename from kona/crates/protocol/hardforks/src/forks.rs rename to rust/kona/crates/protocol/hardforks/src/forks.rs index aaf111dbcd0..c8c368ac525 100644 --- a/kona/crates/protocol/hardforks/src/forks.rs +++ b/rust/kona/crates/protocol/hardforks/src/forks.rs @@ -1,4 +1,4 @@ -//! Contains all hardforks represented in the [crate::Hardfork] type. +//! Contains all hardforks represented in the [`crate::Hardfork`] type. use crate::{Ecotone, Fjord, Interop, Isthmus, Jovian}; diff --git a/rust/kona/crates/protocol/hardforks/src/interop.rs b/rust/kona/crates/protocol/hardforks/src/interop.rs new file mode 100644 index 00000000000..10a821907cf --- /dev/null +++ b/rust/kona/crates/protocol/hardforks/src/interop.rs @@ -0,0 +1,223 @@ +//! Module containing a [`TxDeposit`] builder for the Interop network upgrade transactions. +//! +//! Interop network upgrade transactions are defined in the [OP Stack Specs][specs]. +//! +//! [specs]: https://specs.optimism.io/interop/derivation.html#network-upgrade-transactions + +use alloc::string::String; +use alloy_eips::Encodable2718; +use alloy_primitives::{Address, B256, Bytes, TxKind, U256, address, b256, hex}; +use kona_protocol::Predeploys; +use op_alloy_consensus::{TxDeposit, UpgradeDepositSource}; + +use crate::Hardfork; + +/// The Interop network upgrade transactions. +#[derive(Debug, Default, Clone, Copy)] +pub struct Interop; + +impl Interop { + /// The deployer of the `CrossL2Inbox` contract. + pub const CROSS_L2_INBOX_DEPLOYER: Address = + address!("0x4220000000000000000000000000000000000000"); + + /// The deployer of the `L2ToL2CrossDomainMessenger` contract. + pub const L2_TO_L2_XDM_DEPLOYER: Address = + address!("0x4220000000000000000000000000000000000001"); + + /// The deployed address of the `CrossL2Inbox` implementation contract. + pub const NEW_CROSS_L2_INBOX_IMPL: Address = + address!("0x691300f512e48B463C2617b34Eef1A9f82EE7dBf"); + + /// The code hash of the deployed `CrossL2Inbox` implementation contract. + pub const CROSS_L2_INBOX_IMPL_CODE_HASH: B256 = + b256!("0x0e7d028dd71bac22d1fb28966043c8d35c3232c78b7fb99fd1db112b5b60d9dd"); + + /// The deployment address of the `L2ToL2CrossDomainMessenger` implementation contract. + pub const NEW_L2_TO_L2_XDM_IMPL: Address = + address!("0x0D0eDd0ebd0e94d218670a8De867Eb5C4d37cadD"); + + /// The code hash of the deployed `L2ToL2CrossDomainMessenger` implementation contract. + pub const L2_TO_L2_XDM_IMPL_CODE_HASH: B256 = + b256!("0x458925c90ec70736600bef3d6529643a0e7a0a848e62626d61314c057b4a71a9"); + + /// Returns the source hash for the `CrossL2Inbox` contract deployment transaction. + pub fn deploy_cross_l2_inbox_source() -> B256 { + UpgradeDepositSource { intent: String::from("Interop: CrossL2Inbox Deployment") } + .source_hash() + } + + /// Returns the source hash for the `CrossL2Inbox` proxy upgrade transaction. + pub fn upgrade_cross_l2_inbox_proxy_source() -> B256 { + UpgradeDepositSource { intent: String::from("Interop: CrossL2Inbox Proxy Update") } + .source_hash() + } + + /// Returns the source hash for the `L2ToL2CrossDomainMessenger` deployment transaction. + pub fn deploy_l2_to_l2_xdm_source() -> B256 { + UpgradeDepositSource { + intent: String::from("Interop: L2ToL2CrossDomainMessenger Deployment"), + } + .source_hash() + } + + /// Returns the source hash for the `L2ToL2CrossDomainMessenger` proxy upgrade transaction. + pub fn upgrade_l2_to_l2_xdm_proxy_source() -> B256 { + UpgradeDepositSource { + intent: String::from("Interop: L2ToL2CrossDomainMessenger Proxy Update"), + } + .source_hash() + } + + /// Returns the `CrossL2Inbox` deployment bytecode. + pub fn cross_l2_inbox_deployment_bytecode() -> Bytes { + hex::decode(include_str!("./bytecode/crossl2inbox_interop.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the `L2ToL2CrossDomainMessenger` proxy upgrade bytecode. + pub fn l2_to_l2_xdm_deployment_bytecode() -> Bytes { + hex::decode(include_str!("./bytecode/l2tol2_xdm_interop.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the list of [`TxDeposit`]s for the network upgrade. + pub fn deposits() -> impl Iterator { + ([ + TxDeposit { + source_hash: Self::deploy_cross_l2_inbox_source(), + from: Self::CROSS_L2_INBOX_DEPLOYER, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 420_000, + is_system_transaction: false, + input: Self::cross_l2_inbox_deployment_bytecode(), + }, + TxDeposit { + source_hash: Self::upgrade_cross_l2_inbox_proxy_source(), + from: Address::ZERO, + to: TxKind::Call(Predeploys::CROSS_L2_INBOX), + mint: 0, + value: U256::ZERO, + gas_limit: 50_000, + is_system_transaction: false, + input: super::upgrade_to_calldata(Self::NEW_CROSS_L2_INBOX_IMPL), + }, + TxDeposit { + source_hash: Self::deploy_l2_to_l2_xdm_source(), + from: Self::L2_TO_L2_XDM_DEPLOYER, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 1_100_000, + is_system_transaction: false, + input: Self::l2_to_l2_xdm_deployment_bytecode(), + }, + TxDeposit { + source_hash: Self::upgrade_l2_to_l2_xdm_proxy_source(), + from: Address::ZERO, + to: TxKind::Call(Predeploys::L2_TO_L2_XDM), + mint: 0, + value: U256::ZERO, + gas_limit: 50_000, + is_system_transaction: false, + input: super::upgrade_to_calldata(Self::NEW_L2_TO_L2_XDM_IMPL), + }, + ]) + .into_iter() + } +} + +impl Hardfork for Interop { + /// Constructs the network upgrade transactions. + fn txs(&self) -> impl Iterator { + Self::deposits().map(|tx| tx.encoded_2718().into()) + } +} + +#[cfg(test)] +mod test { + use alloc::{vec, vec::Vec}; + + use super::*; + use crate::test_utils::check_deployment_code; + + #[test] + fn test_deploy_cross_l2_inbox_source() { + assert_eq!( + Interop::deploy_cross_l2_inbox_source(), + b256!("0x6e5e214f73143df8fe6f6054a3ed7eb472d373376458a9c8aecdf23475beb616") + ); + } + + #[test] + fn test_upgrade_cross_l2_inbox_proxy_source() { + assert_eq!( + Interop::upgrade_cross_l2_inbox_proxy_source(), + b256!("0x88c6b48354c367125a59792a93a7b60ad7cd66e516157dbba16558c68a46d3cb") + ); + } + + #[test] + fn test_deploy_l2_to_l2_xdm_source() { + assert_eq!( + Interop::deploy_l2_to_l2_xdm_source(), + b256!("0xf5484697c7a9a791db32a3bf0763bf2ba686c77ae7d4c0a5ee8c222a92a8dcc2") + ); + } + + #[test] + fn test_upgrade_l2_to_l2_xdm_proxy_source() { + assert_eq!( + Interop::upgrade_l2_to_l2_xdm_proxy_source(), + b256!("0xe54b4d06bbcc857f41ae00e89d820339ac5ce0034aac722c817b2873e03a7e68") + ); + } + + #[test] + fn test_deploy_cross_l2_inbox_address_and_code() { + let txs = Interop::deposits().collect::>(); + check_deployment_code( + txs[0].clone(), + Interop::NEW_CROSS_L2_INBOX_IMPL, + Interop::CROSS_L2_INBOX_IMPL_CODE_HASH, + ); + } + + #[test] + fn test_deploy_l2_to_l2_xdm_address_and_code() { + let txs = Interop::deposits().collect::>(); + check_deployment_code( + txs[2].clone(), + Interop::NEW_L2_TO_L2_XDM_IMPL, + Interop::L2_TO_L2_XDM_IMPL_CODE_HASH, + ); + } + + #[test] + fn test_interop_txs_encoded() { + let interop_upgrade_tx = Interop.txs().collect::>(); + assert_eq!(interop_upgrade_tx.len(), 4); + + let expected_txs: Vec = vec![ + hex::decode(include_str!("./bytecode/interop_tx_0.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/interop_tx_1.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/interop_tx_2.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/interop_tx_3.hex").replace('\n', "")) + .unwrap() + .into(), + ]; + for (i, expected) in expected_txs.iter().enumerate() { + assert_eq!(interop_upgrade_tx[i], *expected); + } + } +} diff --git a/rust/kona/crates/protocol/hardforks/src/isthmus.rs b/rust/kona/crates/protocol/hardforks/src/isthmus.rs new file mode 100644 index 00000000000..47c05d2ef5a --- /dev/null +++ b/rust/kona/crates/protocol/hardforks/src/isthmus.rs @@ -0,0 +1,361 @@ +//! Module containing a [`TxDeposit`] builder for the Isthmus network upgrade transactions. +//! +//! Isthmus network upgrade transactions are defined in the [OP Stack Specs][specs]. +//! +//! [specs]: https://specs.optimism.io/protocol/isthmus/derivation.html#network-upgrade-automation-transactions + +use alloc::{string::String, vec::Vec}; +use alloy_eips::eip2718::Encodable2718; +use alloy_primitives::{Address, B256, Bytes, TxKind, U256, address, hex}; +use kona_protocol::Predeploys; +use op_alloy_consensus::{TxDeposit, UpgradeDepositSource}; + +use crate::Hardfork; + +/// The Isthmus network upgrade transactions. +#[derive(Debug, Default, Clone, Copy)] +pub struct Isthmus; + +impl Isthmus { + /// The depositor account address. + pub const DEPOSITOR_ACCOUNT: Address = address!("DeaDDEaDDeAdDeAdDEAdDEaddeAddEAdDEAd0001"); + + /// The Enable Isthmus Input Method 4Byte Signature. + /// + /// Derive this by running `cast sig "setIsthmus()"`. + pub const ENABLE_ISTHMUS_INPUT: [u8; 4] = hex!("291b0383"); + + /// EIP-2935 From Address + pub const EIP2935_FROM: Address = address!("3462413Af4609098e1E27A490f554f260213D685"); + + /// L1 Block Deployer Address + pub const L1_BLOCK_DEPLOYER: Address = address!("4210000000000000000000000000000000000003"); + + /// The Gas Price Oracle Deployer Address + pub const GAS_PRICE_ORACLE_DEPLOYER: Address = + address!("4210000000000000000000000000000000000004"); + + /// The Operator Fee Vault Deployer Address + pub const OPERATOR_FEE_VAULT_DEPLOYER: Address = + address!("4210000000000000000000000000000000000005"); + + /// The new L1 Block Address + /// This is computed by using go-ethereum's `crypto.CreateAddress` function, + /// with the L1 Block Deployer Address and nonce 0. + pub const NEW_L1_BLOCK: Address = address!("ff256497d61dcd71a9e9ff43967c13fde1f72d12"); + + /// The Gas Price Oracle Address + /// This is computed by using go-ethereum's `crypto.CreateAddress` function, + /// with the Gas Price Oracle Deployer Address and nonce 0. + pub const GAS_PRICE_ORACLE: Address = address!("93e57a196454cb919193fa9946f14943cf733845"); + + /// The Operator Fee Vault Address + /// This is computed by using go-ethereum's `crypto.CreateAddress` function, + /// with the Operator Fee Vault Deployer Address and nonce 0. + pub const OPERATOR_FEE_VAULT: Address = address!("4fa2be8cd41504037f1838bce3bcc93bc68ff537"); + + /// The Isthmus L1 Block Deployer Code Hash + /// See: + pub const L1_BLOCK_DEPLOYER_CODE_HASH: B256 = alloy_primitives::b256!( + "0x8e3fe7a416d3e5f3b7be74ddd4e7e58e516fa3f80b67c6d930e3cd7297da4a4b" + ); + + /// The Isthmus Gas Price Oracle Code Hash + /// See: + pub const GAS_PRICE_ORACLE_CODE_HASH: B256 = alloy_primitives::b256!( + "0x4d195a9d7caf9fb6d4beaf80de252c626c853afd5868c4f4f8d19c9d301c2679" + ); + /// The Isthmus Operator Fee Vault Code Hash + /// See: + pub const OPERATOR_FEE_VAULT_CODE_HASH: B256 = alloy_primitives::b256!( + "0x57dc55c9c09ca456fa728f253fe7b895d3e6aae0706104935fe87c7721001971" + ); + /// Returns the source hash for the Isthmus Gas Price Oracle activation. + pub fn enable_isthmus_source() -> B256 { + UpgradeDepositSource { intent: String::from("Isthmus: Gas Price Oracle Set Isthmus") } + .source_hash() + } + + /// Returns the source hash for the EIP-2935 block hash history contract deployment. + pub fn block_hash_history_contract_source() -> B256 { + UpgradeDepositSource { intent: String::from("Isthmus: EIP-2935 Contract Deployment") } + .source_hash() + } + + /// Returns the source hash for the deployment of the gas price oracle contract. + pub fn deploy_gas_price_oracle_source() -> B256 { + UpgradeDepositSource { intent: String::from("Isthmus: Gas Price Oracle Deployment") } + .source_hash() + } + + /// Returns the source hash for the deployment of the l1 block contract. + pub fn deploy_l1_block_source() -> B256 { + UpgradeDepositSource { intent: String::from("Isthmus: L1 Block Deployment") }.source_hash() + } + + /// Returns the source hash for the deployment of the operator fee vault contract. + pub fn deploy_operator_fee_vault_source() -> B256 { + UpgradeDepositSource { intent: String::from("Isthmus: Operator Fee Vault Deployment") } + .source_hash() + } + + /// Returns the source hash for the update of the l1 block proxy. + pub fn update_l1_block_source() -> B256 { + UpgradeDepositSource { intent: String::from("Isthmus: L1 Block Proxy Update") } + .source_hash() + } + + /// Returns the source hash for the update of the gas price oracle proxy. + pub fn update_gas_price_oracle_source() -> B256 { + UpgradeDepositSource { intent: String::from("Isthmus: Gas Price Oracle Proxy Update") } + .source_hash() + } + + /// Returns the source hash for the update of the operator fee vault proxy. + pub fn update_operator_fee_vault_source() -> B256 { + UpgradeDepositSource { intent: String::from("Isthmus: Operator Fee Vault Proxy Update") } + .source_hash() + } + + /// Returns the raw bytecode for the L1 Block deployment. + pub fn l1_block_deployment_bytecode() -> Bytes { + hex::decode(include_str!("./bytecode/l1_block_isthmus.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the gas price oracle deployment bytecode. + pub fn gas_price_oracle_deployment_bytecode() -> Bytes { + hex::decode(include_str!("./bytecode/gpo_isthmus.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the gas price oracle deployment bytecode. + pub fn operator_fee_vault_deployment_bytecode() -> Bytes { + hex::decode(include_str!("./bytecode/ofv_isthmus.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the EIP-2935 creation data. + pub fn eip2935_creation_data() -> Bytes { + hex::decode(include_str!("./bytecode/eip2935_isthmus.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the list of [`TxDeposit`]s for the network upgrade. + pub fn deposits() -> impl Iterator { + ([ + TxDeposit { + source_hash: Self::deploy_l1_block_source(), + from: Self::L1_BLOCK_DEPLOYER, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 425_000, + is_system_transaction: false, + input: Self::l1_block_deployment_bytecode(), + }, + TxDeposit { + source_hash: Self::deploy_gas_price_oracle_source(), + from: Self::GAS_PRICE_ORACLE_DEPLOYER, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 1_625_000, + is_system_transaction: false, + input: Self::gas_price_oracle_deployment_bytecode(), + }, + TxDeposit { + source_hash: Self::deploy_operator_fee_vault_source(), + from: Self::OPERATOR_FEE_VAULT_DEPLOYER, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 500_000, + is_system_transaction: false, + input: Self::operator_fee_vault_deployment_bytecode(), + }, + TxDeposit { + source_hash: Self::update_l1_block_source(), + from: Address::default(), + to: TxKind::Call(Predeploys::L1_BLOCK_INFO), + mint: 0, + value: U256::ZERO, + gas_limit: 50_000, + is_system_transaction: false, + input: super::upgrade_to_calldata(Self::NEW_L1_BLOCK), + }, + TxDeposit { + source_hash: Self::update_gas_price_oracle_source(), + from: Address::default(), + to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), + mint: 0, + value: U256::ZERO, + gas_limit: 50_000, + is_system_transaction: false, + input: super::upgrade_to_calldata(Self::GAS_PRICE_ORACLE), + }, + TxDeposit { + source_hash: Self::update_operator_fee_vault_source(), + from: Address::default(), + to: TxKind::Call(Predeploys::OPERATOR_FEE_VAULT), + mint: 0, + value: U256::ZERO, + gas_limit: 50_000, + is_system_transaction: false, + input: super::upgrade_to_calldata(Self::OPERATOR_FEE_VAULT), + }, + TxDeposit { + source_hash: Self::enable_isthmus_source(), + from: Self::DEPOSITOR_ACCOUNT, + to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), + mint: 0, + value: U256::ZERO, + gas_limit: 90_000, + is_system_transaction: false, + input: Self::ENABLE_ISTHMUS_INPUT.into(), + }, + TxDeposit { + source_hash: Self::block_hash_history_contract_source(), + from: Self::EIP2935_FROM, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 250_000, + is_system_transaction: false, + input: Self::eip2935_creation_data(), + }, + ]) + .into_iter() + } +} + +impl Hardfork for Isthmus { + /// Constructs the network upgrade transactions. + fn txs(&self) -> impl Iterator + '_ { + Self::deposits().map(|tx| { + let mut encoded = Vec::new(); + tx.encode_2718(&mut encoded); + Bytes::from(encoded) + }) + } +} + +#[cfg(test)] +mod tests { + use crate::test_utils::check_deployment_code; + + use super::*; + use alloc::vec; + use alloy_primitives::b256; + + #[test] + fn test_l1_block_source_hash() { + let expected = b256!("3b2d0821ca2411ad5cd3595804d1213d15737188ae4cbd58aa19c821a6c211bf"); + assert_eq!(Isthmus::deploy_l1_block_source(), expected); + } + + #[test] + fn test_gas_price_oracle_source_hash() { + let expected = b256!("fc70b48424763fa3fab9844253b4f8d508f91eb1f7cb11a247c9baec0afb8035"); + assert_eq!(Isthmus::deploy_gas_price_oracle_source(), expected); + } + + #[test] + fn test_operator_fee_vault_source_hash() { + let expected = b256!("107a570d3db75e6110817eb024f09f3172657e920634111ce9875d08a16daa96"); + assert_eq!(Isthmus::deploy_operator_fee_vault_source(), expected); + } + + #[test] + fn test_l1_block_update_source_hash() { + let expected = b256!("ebe8b5cb10ca47e0d8bda8f5355f2d66711a54ddeb0ef1d30e29418c9bf17a0e"); + assert_eq!(Isthmus::update_l1_block_source(), expected); + } + + #[test] + fn test_gas_price_oracle_update_source_hash() { + let expected = b256!("ecf2d9161d26c54eda6b7bfdd9142719b1e1199a6e5641468d1bf705bc531ab0"); + assert_eq!(Isthmus::update_gas_price_oracle_source(), expected); + } + + #[test] + fn test_operator_fee_vault_update_source_hash() { + let expected = b256!("ad74e1adb877ccbe176b8fa1cc559388a16e090ddbe8b512f5b37d07d887a927"); + assert_eq!(Isthmus::update_operator_fee_vault_source(), expected); + } + + #[test] + fn test_enable_isthmus_source() { + let expected = b256!("3ddf4b1302548dd92939826e970f260ba36167f4c25f18390a5e8b194b295319"); + assert_eq!(Isthmus::enable_isthmus_source(), expected); + } + + #[test] + fn test_isthmus_txs_encoded() { + let isthmus_upgrade_tx = Isthmus.txs().collect::>(); + assert_eq!(isthmus_upgrade_tx.len(), 8); + + let expected_txs: Vec = vec![ + hex::decode(include_str!("./bytecode/isthmus_tx_0.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/isthmus_tx_1.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/isthmus_tx_2.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/isthmus_tx_3.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/isthmus_tx_4.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/isthmus_tx_5.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/isthmus_tx_6.hex").replace('\n', "")) + .unwrap() + .into(), + hex::decode(include_str!("./bytecode/isthmus_tx_7.hex").replace('\n', "")) + .unwrap() + .into(), + ]; + for (i, expected) in expected_txs.iter().enumerate() { + assert_eq!(isthmus_upgrade_tx[i], *expected); + } + } + #[test] + fn test_verify_isthmus_l1_block_deployment_code_hash() { + let txs = Isthmus::deposits().collect::>(); + check_deployment_code( + txs[0].clone(), + Isthmus::NEW_L1_BLOCK, + Isthmus::L1_BLOCK_DEPLOYER_CODE_HASH, + ); + } + #[test] + fn test_verify_isthmus_gas_price_oracle_deployment_code_hash() { + let txs = Isthmus::deposits().collect::>(); + + check_deployment_code( + txs[1].clone(), + Isthmus::GAS_PRICE_ORACLE, + Isthmus::GAS_PRICE_ORACLE_CODE_HASH, + ); + } + #[test] + fn test_verify_isthmus_operator_fee_vault_deployment_code_hash() { + let txs = Isthmus::deposits().collect::>(); + + check_deployment_code( + txs[2].clone(), + Isthmus::OPERATOR_FEE_VAULT, + Isthmus::OPERATOR_FEE_VAULT_CODE_HASH, + ); + } +} diff --git a/rust/kona/crates/protocol/hardforks/src/jovian.rs b/rust/kona/crates/protocol/hardforks/src/jovian.rs new file mode 100644 index 00000000000..43c75101de0 --- /dev/null +++ b/rust/kona/crates/protocol/hardforks/src/jovian.rs @@ -0,0 +1,246 @@ +//! Module containing a [`TxDeposit`] builder for the Jovian network upgrade transactions. +//! +//! Jovian network upgrade transactions are defined in the [OP Stack Specs][specs]. +//! +//! [specs]: https://specs.optimism.io/protocol/jovian/derivation.html#network-upgrade-automation-transactions + +use alloc::{string::String, vec::Vec}; +use alloy_eips::eip2718::Encodable2718; +use alloy_primitives::{Address, B256, Bytes, TxKind, U256, address, hex, keccak256}; +use kona_protocol::Predeploys; +use op_alloy_consensus::{TxDeposit, UpgradeDepositSource}; + +use crate::{Hardfork, upgrade_to_calldata}; + +/// The Jovian network upgrade transactions. +#[derive(Debug, Default, Clone, Copy)] +pub struct Jovian; + +impl Jovian { + /// The depositor account address. + pub const DEPOSITOR_ACCOUNT: Address = address!("DeaDDEaDDeAdDeAdDEAdDEaddeAddEAdDEAd0001"); + + /// L1 Block Deployer Address + pub const L1_BLOCK_DEPLOYER: Address = address!("4210000000000000000000000000000000000006"); + + /// Zero address + pub const ZERO_ADDRESS: Address = address!("0x0000000000000000000000000000000000000000"); + + /// The Gas Price Oracle Deployer Address + pub const GAS_PRICE_ORACLE_DEPLOYER: Address = + address!("4210000000000000000000000000000000000007"); + + /// Returns the source hash for the deployment of the l1 block contract. + pub fn deploy_l1_block_source() -> B256 { + UpgradeDepositSource { intent: String::from("Jovian: L1 Block Deployment") }.source_hash() + } + + /// Returns the source hash for the deployment of the gas price oracle contract. + pub fn l1_block_proxy_update() -> B256 { + UpgradeDepositSource { intent: String::from("Jovian: L1 Block Proxy Update") }.source_hash() + } + + /// Returns the source hash for the deployment of the operator fee vault contract. + pub fn gas_price_oracle() -> B256 { + UpgradeDepositSource { intent: String::from("Jovian: Gas Price Oracle Deployment") } + .source_hash() + } + + /// Returns the source hash for the update of the l1 block proxy. + pub fn gas_price_oracle_proxy_update() -> B256 { + UpgradeDepositSource { intent: String::from("Jovian: Gas Price Oracle Proxy Update") } + .source_hash() + } + + /// The Jovian L1 Block Address + /// This is computed by using `Address::create` function, + /// with the L1 Block Deployer Address and nonce 0. + pub fn l1_block_address() -> Address { + Self::L1_BLOCK_DEPLOYER.create(0) + } + + /// The Jovian Gas Price Oracle Address + /// This is computed by using `Address::create` function, + /// with the Gas Price Oracle Deployer Address and nonce 0. + pub fn gas_price_oracle_address() -> Address { + Self::GAS_PRICE_ORACLE_DEPLOYER.create(0) + } + + /// Returns the source hash to the enable the gas price oracle for Jovian. + pub fn gas_price_oracle_enable_jovian() -> B256 { + UpgradeDepositSource { intent: String::from("Jovian: Gas Price Oracle Set Jovian") } + .source_hash() + } + + /// Returns the raw bytecode for the L1 Block deployment. + pub fn l1_block_deployment_bytecode() -> Bytes { + hex::decode(include_str!("./bytecode/jovian-l1-block-deployment.hex").replace('\n', "")) + .expect("Expected hex byte string") + .into() + } + + /// Returns the gas price oracle deployment bytecode. + pub fn gas_price_oracle_deployment_bytecode() -> Bytes { + hex::decode( + include_str!("./bytecode/jovian-gas-price-oracle-deployment.hex").replace('\n', ""), + ) + .expect("Expected hex byte string") + .into() + } + + /// Returns the bytecode to enable the gas price oracle for Jovian. + pub fn gas_price_oracle_enable_jovian_bytecode() -> Bytes { + let mut bytes = Vec::new(); + bytes.extend_from_slice(&keccak256("setJovian()")[..4]); + bytes.into() + } + + /// Returns the list of [`TxDeposit`]s for the network upgrade. + pub fn deposits() -> impl Iterator { + ([ + TxDeposit { + source_hash: Self::deploy_l1_block_source(), + from: Self::L1_BLOCK_DEPLOYER, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 447_315, + is_system_transaction: false, + input: Self::l1_block_deployment_bytecode(), + }, + TxDeposit { + source_hash: Self::l1_block_proxy_update(), + from: Self::ZERO_ADDRESS, + to: TxKind::Call(Predeploys::L1_BLOCK_INFO), + mint: 0, + value: U256::ZERO, + gas_limit: 50_000, + is_system_transaction: false, + input: upgrade_to_calldata(Self::l1_block_address()), + }, + TxDeposit { + source_hash: Self::gas_price_oracle(), + from: Self::GAS_PRICE_ORACLE_DEPLOYER, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 1_750_714, + is_system_transaction: false, + input: Self::gas_price_oracle_deployment_bytecode(), + }, + TxDeposit { + source_hash: Self::gas_price_oracle_proxy_update(), + from: Self::ZERO_ADDRESS, + to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), + mint: 0, + value: U256::ZERO, + gas_limit: 50_000, + is_system_transaction: false, + input: upgrade_to_calldata(Self::gas_price_oracle_address()), + }, + TxDeposit { + source_hash: Self::gas_price_oracle_enable_jovian(), + from: Self::DEPOSITOR_ACCOUNT, + to: TxKind::Call(Predeploys::GAS_PRICE_ORACLE), + mint: 0, + value: U256::ZERO, + gas_limit: 90_000, + is_system_transaction: false, + input: Self::gas_price_oracle_enable_jovian_bytecode(), + }, + ]) + .into_iter() + } +} + +impl Hardfork for Jovian { + /// Constructs the network upgrade transactions. + fn txs(&self) -> impl Iterator + '_ { + Self::deposits().map(|tx| { + let mut encoded = Vec::new(); + tx.encode_2718(&mut encoded); + Bytes::from(encoded) + }) + } +} + +#[cfg(test)] +mod tests { + use crate::test_utils::check_deployment_code; + + use super::*; + use alloy_primitives::b256; + + #[test] + fn test_l1_block_source_hash() { + let expected = b256!("bb1a656f65401240fac3db12e7a79ebb954b11e62f7626eb11691539b798d3bf"); + assert_eq!(Jovian::deploy_l1_block_source(), expected); + } + + #[test] + fn test_l1_block_proxy_update_source_hash() { + let expected = b256!("f3275f829340521028f9ad5bce4ecb1c64a45d448794effa2a77674627338e76"); + assert_eq!(Jovian::l1_block_proxy_update(), expected); + } + + #[test] + fn test_gas_price_oracle_source_hash() { + let expected = b256!("239b7021a6c2cf3a918481242bbb5a9499057f24501539467536c691bb133962"); + assert_eq!(Jovian::gas_price_oracle(), expected); + } + + #[test] + fn test_upgrade_to_calldata_for_gas_price_oracle() { + assert_eq!( + **upgrade_to_calldata(Jovian::gas_price_oracle_address()), + hex!("0x3659cfe60000000000000000000000004f1db3c6abd250ba86e0928471a8f7db3afd88f1") + ); + } + + #[test] + fn test_upgrade_to_calldata_for_l1_block_proxy_update() { + assert_eq!( + **upgrade_to_calldata(Jovian::l1_block_address()), + hex!("0x3659cfe60000000000000000000000003ba4007f5c922fbb33c454b41ea7a1f11e83df2c") + ); + } + + #[test] + fn test_gas_price_oracle_proxy_update_source_hash() { + let expected = b256!("a70c60aa53b8c1c0d52b39b1e901e7d7c09f7819595cb24048a6bb1983b401ff"); + assert_eq!(Jovian::gas_price_oracle_proxy_update(), expected); + } + + #[test] + fn test_gas_price_oracle_enable_jovian_source_hash() { + let expected = b256!("e836db6a959371756f8941be3e962d000f7e12a32e49e2c9ca42ba177a92716c"); + assert_eq!(Jovian::gas_price_oracle_enable_jovian(), expected); + } + + #[test] + fn test_verify_jovian_l1_block_deployment_code_hash() { + let txs = Jovian::deposits().collect::>(); + check_deployment_code( + txs[0].clone(), + Jovian::l1_block_address(), + hex!("5f885ca815d2cf27a203123e50b8ae204fdca910b6995d90b2d7700cbb9240d1").into(), + ); + } + + #[test] + fn test_verify_set_jovian() { + let hash = &keccak256("setJovian()")[..4]; + assert_eq!(hash, hex!("0xb3d72079")) + } + + #[test] + fn test_verify_jovian_gas_price_oracle_deployment_code_hash() { + let txs = Jovian::deposits().collect::>(); + + check_deployment_code( + txs[2].clone(), + Jovian::gas_price_oracle_address(), + hex!("e9fc7c96c4db0d6078e3d359d7e8c982c350a513cb2c31121adf5e1e8a446614").into(), + ); + } +} diff --git a/rust/kona/crates/protocol/hardforks/src/lib.rs b/rust/kona/crates/protocol/hardforks/src/lib.rs new file mode 100644 index 00000000000..044e92c83a4 --- /dev/null +++ b/rust/kona/crates/protocol/hardforks/src/lib.rs @@ -0,0 +1,37 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; + +mod traits; +pub use traits::Hardfork; + +mod forks; +pub use forks::Hardforks; + +mod fjord; +pub use fjord::Fjord; + +mod ecotone; +pub use ecotone::Ecotone; + +mod isthmus; +pub use isthmus::Isthmus; + +mod interop; +pub use interop::Interop; + +mod jovian; +pub use jovian::Jovian; + +mod utils; +pub(crate) use utils::upgrade_to_calldata; + +#[cfg(test)] +mod test_utils; diff --git a/kona/crates/protocol/hardforks/src/test_utils.rs b/rust/kona/crates/protocol/hardforks/src/test_utils.rs similarity index 100% rename from kona/crates/protocol/hardforks/src/test_utils.rs rename to rust/kona/crates/protocol/hardforks/src/test_utils.rs diff --git a/kona/crates/protocol/hardforks/src/traits.rs b/rust/kona/crates/protocol/hardforks/src/traits.rs similarity index 100% rename from kona/crates/protocol/hardforks/src/traits.rs rename to rust/kona/crates/protocol/hardforks/src/traits.rs diff --git a/rust/kona/crates/protocol/hardforks/src/utils.rs b/rust/kona/crates/protocol/hardforks/src/utils.rs new file mode 100644 index 00000000000..55e64505b01 --- /dev/null +++ b/rust/kona/crates/protocol/hardforks/src/utils.rs @@ -0,0 +1,54 @@ +//! Utilities for creating hardforks. + +use alloy_primitives::{Address, Bytes, hex}; + +/// `UpgradeTo` Function 4Byte Signature +pub(crate) const UPGRADE_TO_FUNC_BYTES_4: [u8; 4] = hex!("3659cfe6"); + +/// Turns the given address into calldata for the `upgradeTo` function. +pub(crate) fn upgrade_to_calldata(addr: Address) -> Bytes { + let mut v = UPGRADE_TO_FUNC_BYTES_4.to_vec(); + v.extend_from_slice(addr.into_word().as_slice()); + v.into() +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{Ecotone, Fjord, Isthmus}; + use alloy_primitives::keccak256; + + #[test] + fn test_upgrade_to_selector_is_valid() { + let expected_selector = &keccak256("upgradeTo(address)")[..4]; + assert_eq!(UPGRADE_TO_FUNC_BYTES_4, expected_selector); + } + + #[test] + fn test_upgrade_to_calldata_format() { + let test_addr = Address::from([0x42; 20]); + let calldata = upgrade_to_calldata(test_addr); + + assert_eq!(calldata.len(), 36); + assert_eq!(&calldata[..4], UPGRADE_TO_FUNC_BYTES_4); + assert_eq!(&calldata[4..36], test_addr.into_word().as_slice()); + } + + #[test] + fn test_ecotone_selector_is_valid() { + let expected_selector = &keccak256("setEcotone()")[..4]; + assert_eq!(Ecotone::ENABLE_ECOTONE_INPUT, expected_selector); + } + + #[test] + fn test_fjord_selector_is_valid() { + let expected_selector = &keccak256("setFjord()")[..4]; + assert_eq!(Fjord::SET_FJORD_METHOD_SIGNATURE, expected_selector); + } + + #[test] + fn test_isthmus_selector_is_valid() { + let expected_selector = &keccak256("setIsthmus()")[..4]; + assert_eq!(Isthmus::ENABLE_ISTHMUS_INPUT, expected_selector); + } +} diff --git a/kona/crates/protocol/interop/CHANGELOG.md b/rust/kona/crates/protocol/interop/CHANGELOG.md similarity index 100% rename from kona/crates/protocol/interop/CHANGELOG.md rename to rust/kona/crates/protocol/interop/CHANGELOG.md diff --git a/rust/kona/crates/protocol/interop/Cargo.toml b/rust/kona/crates/protocol/interop/Cargo.toml new file mode 100644 index 00000000000..59f982443f6 --- /dev/null +++ b/rust/kona/crates/protocol/interop/Cargo.toml @@ -0,0 +1,90 @@ +[package] +name = "kona-interop" +description = "Core functionality and primitives for the Interop feature of the OP Stack." +version = "0.4.5" +edition.workspace = true +authors.workspace = true +license.workspace = true +repository.workspace = true +homepage.workspace = true + +[lints] +workspace = true + +[dependencies] +# Workspace +kona-genesis.workspace = true +kona-registry.workspace = true +kona-protocol.workspace = true + +# General +thiserror.workspace = true +async-trait.workspace = true +tracing.workspace = true +derive_more = { workspace = true, features = ["from", "as_ref", "constructor"] } + +# Alloy +alloy-serde = { workspace = true, optional = true } +alloy-rlp.workspace = true +alloy-eips.workspace = true +alloy-sol-types.workspace = true +alloy-consensus.workspace = true +alloy-primitives = { workspace = true, features = ["rlp"] } +op-alloy-consensus.workspace = true + +# Arbitrary +arbitrary = { workspace = true, features = ["derive"], optional = true } + +# Serde +serde = { workspace = true, optional = true } + +[dev-dependencies] +serde_json.workspace = true +tokio = { workspace = true, features = ["full"] } +alloy-primitives = { workspace = true, features = ["rlp", "arbitrary"] } +arbitrary = { workspace = true, features = ["derive"] } +rand = { workspace = true, features = ["thread_rng"] } + +[features] +default = [] +std = [ + "alloy-consensus/std", + "alloy-eips/std", + "alloy-primitives/std", + "alloy-rlp/std", + "alloy-serde?/std", + "alloy-sol-types/std", + "derive_more/display", + "derive_more/std", + "kona-genesis/std", + "kona-protocol/std", + "kona-registry/std", + "op-alloy-consensus/std", + "serde?/std", + "thiserror/std", + "tracing/std", +] +arbitrary = [ + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", + "alloy-serde?/arbitrary", + "alloy-sol-types/arbitrary", + "dep:arbitrary", + "kona-genesis/arbitrary", + "kona-protocol/arbitrary", + "op-alloy-consensus/arbitrary", + "std", +] +serde = [ + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "dep:alloy-serde", + "dep:serde", + "kona-genesis/serde", + "kona-protocol/serde", + "op-alloy-consensus/serde", +] +test-utils = [ "kona-protocol/test-utils", "std" ] + diff --git a/rust/kona/crates/protocol/interop/README.md b/rust/kona/crates/protocol/interop/README.md new file mode 100644 index 00000000000..7d14506a883 --- /dev/null +++ b/rust/kona/crates/protocol/interop/README.md @@ -0,0 +1,3 @@ +# `kona-interop` + +Core functionality and primitives for the [Interop feature](https://specs.optimism.io/interop/overview.html) of the OP Stack. diff --git a/kona/crates/protocol/interop/src/access_list.rs b/rust/kona/crates/protocol/interop/src/access_list.rs similarity index 100% rename from kona/crates/protocol/interop/src/access_list.rs rename to rust/kona/crates/protocol/interop/src/access_list.rs diff --git a/kona/crates/protocol/interop/src/constants.rs b/rust/kona/crates/protocol/interop/src/constants.rs similarity index 75% rename from kona/crates/protocol/interop/src/constants.rs rename to rust/kona/crates/protocol/interop/src/constants.rs index 567b33d46c6..ab62c27e5a7 100644 --- a/kona/crates/protocol/interop/src/constants.rs +++ b/rust/kona/crates/protocol/interop/src/constants.rs @@ -4,7 +4,5 @@ /// pub const MESSAGE_EXPIRY_WINDOW: u64 = 7 * 24 * 60 * 60; -/// The current version of the [SuperRoot] encoding format. -/// -/// [SuperRoot]: crate::SuperRoot +/// The current version of the [`SuperRoot`](crate::SuperRoot) encoding format. pub const SUPER_ROOT_VERSION: u8 = 1; diff --git a/kona/crates/protocol/interop/src/control.rs b/rust/kona/crates/protocol/interop/src/control.rs similarity index 100% rename from kona/crates/protocol/interop/src/control.rs rename to rust/kona/crates/protocol/interop/src/control.rs diff --git a/kona/crates/protocol/interop/src/depset.rs b/rust/kona/crates/protocol/interop/src/depset.rs similarity index 96% rename from kona/crates/protocol/interop/src/depset.rs rename to rust/kona/crates/protocol/interop/src/depset.rs index 86b821dfec0..5b9b29e7efc 100644 --- a/kona/crates/protocol/interop/src/depset.rs +++ b/rust/kona/crates/protocol/interop/src/depset.rs @@ -12,6 +12,7 @@ pub struct ChainDependency {} #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +#[allow(clippy::zero_sized_map_values)] pub struct DependencySet { /// Dependencies information per chain. pub dependencies: HashMap, @@ -31,6 +32,7 @@ impl DependencySet { } #[cfg(test)] +#[allow(clippy::zero_sized_map_values)] mod tests { use super::*; use alloy_primitives::ChainId; diff --git a/kona/crates/protocol/interop/src/derived.rs b/rust/kona/crates/protocol/interop/src/derived.rs similarity index 100% rename from kona/crates/protocol/interop/src/derived.rs rename to rust/kona/crates/protocol/interop/src/derived.rs diff --git a/rust/kona/crates/protocol/interop/src/errors.rs b/rust/kona/crates/protocol/interop/src/errors.rs new file mode 100644 index 00000000000..2184fbe4ff7 --- /dev/null +++ b/rust/kona/crates/protocol/interop/src/errors.rs @@ -0,0 +1,130 @@ +//! Error types for the `kona-interop` crate. + +use crate::InteropProvider; +use alloy_primitives::{Address, B256}; +use core::fmt::Debug; +use kona_registry::HashMap; +use thiserror::Error; + +/// An error type for the [`MessageGraph`](crate::MessageGraph) struct. +#[derive(Debug, Clone, PartialEq, Eq, Error)] +pub enum MessageGraphError { + /// Dependency set is impossibly empty + #[error("Dependency set is impossibly empty")] + EmptyDependencySet, + /// Missing a [`RollupConfig`](kona_genesis::RollupConfig) for a chain ID + #[error("Missing a RollupConfig for chain ID {0}")] + MissingRollupConfig(u64), + /// Interop provider error + #[error("Interop provider: {0}")] + InteropProviderError(#[from] E), + /// Remote message not found + #[error("Remote message not found on chain ID {chain_id} with message hash {message_hash}")] + RemoteMessageNotFound { + /// The remote chain ID + chain_id: u64, + /// The message hash + message_hash: B256, + }, + /// Invalid message origin + #[error("Invalid message origin. Expected {expected}, got {actual}")] + InvalidMessageOrigin { + /// The expected message origin + expected: Address, + /// The actual message origin + actual: Address, + }, + /// Invalid message payload hash + #[error("Invalid message hash. Expected {expected}, got {actual}")] + InvalidMessageHash { + /// The expected message hash + expected: B256, + /// The actual message hash + actual: B256, + }, + /// Invalid message timestamp + #[error("Invalid message timestamp. Expected {expected}, got {actual}")] + InvalidMessageTimestamp { + /// The expected timestamp + expected: u64, + /// The actual timestamp + actual: u64, + }, + /// Interop has not been activated for at least one block on the initiating message's chain. + #[error( + "Interop has not been active for at least one block on initiating message's chain. Activation time: {activation_time}, initiating message time: {initiating_message_time}" + )] + InitiatedTooEarly { + /// The timestamp of the interop activation + activation_time: u64, + /// The timestamp of the initiating message + initiating_message_time: u64, + }, + /// Message is in the future + #[error("Message is in the future. Expected timestamp to be <= {max}, got {actual}")] + MessageInFuture { + /// The expected max timestamp + max: u64, + /// The actual timestamp + actual: u64, + }, + /// Message has exceeded the expiry window. + #[error( + "Message has exceeded the expiry window. Initiating Timestamp: {initiating_timestamp}, Executing Timestamp: {executing_timestamp}" + )] + MessageExpired { + /// The timestamp of the initiating message + initiating_timestamp: u64, + /// The timestamp of the executing message + executing_timestamp: u64, + }, + /// Invalid messages were found + #[error("Invalid messages found on chains: {0:?}")] + InvalidMessages(HashMap), +} + +/// A [Result] alias for the [`MessageGraphError`] type. +#[allow(type_alias_bounds)] +pub type MessageGraphResult = + core::result::Result>; + +/// An error type for the [`SuperRoot`](crate::SuperRoot) struct's serialization and +/// deserialization. +#[derive(Debug, Clone, Error)] +pub enum SuperRootError { + /// Invalid super root version byte + #[error("Invalid super root version byte")] + InvalidVersionByte, + /// Unexpected encoded super root length + #[error("Unexpected encoded super root length")] + UnexpectedLength, + /// Slice conversion error + #[error("Slice conversion error: {0}")] + SliceConversionError(#[from] core::array::TryFromSliceError), +} + +/// A [Result] alias for the [`SuperRootError`] type. +pub type SuperRootResult = core::result::Result; + +/// Errors that can occur during interop validation. +#[derive(Debug, Error, PartialEq, Eq)] +pub enum InteropValidationError { + /// Interop is not enabled on one or both chains at the required timestamp. + #[error("interop not enabled")] + InteropNotEnabled, + + /// Executing timestamp is earlier than the initiating timestamp. + #[error( + "executing timestamp is earlier than initiating timestamp, executing: {executing}, initiating: {initiating}" + )] + InvalidTimestampInvariant { + /// Executing timestamp of the message + executing: u64, + /// Initiating timestamp of the message + initiating: u64, + }, + + /// Timestamp is outside the allowed interop expiry window. + #[error("timestamp outside allowed interop window, timestamp: {0}")] + InvalidInteropTimestamp(u64), +} diff --git a/rust/kona/crates/protocol/interop/src/event.rs b/rust/kona/crates/protocol/interop/src/event.rs new file mode 100644 index 00000000000..a5bdd511a70 --- /dev/null +++ b/rust/kona/crates/protocol/interop/src/event.rs @@ -0,0 +1,65 @@ +//! Contains the managed node event. + +use crate::{BlockReplacement, DerivedRefPair}; +use alloc::{format, string::String, vec::Vec}; +use derive_more::Constructor; +use kona_protocol::BlockInfo; + +/// Event sent by the node to the supervisor to share updates. +/// +/// This struct is used to communicate various events that occur within the node. +/// At least one of the fields will be `Some`, and the rest will be `None`. +/// +/// See: +#[derive(Debug, Clone, Default, PartialEq, Eq, Constructor)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct ManagedEvent { + /// This is emitted when the node has determined that it needs a reset. + /// It tells the supervisor to send the `interop_reset` event with the + /// required parameters. + pub reset: Option, + + /// New L2 unsafe block was processed, updating local-unsafe head. + pub unsafe_block: Option, + + /// Signals that an L2 block is considered local-safe. + pub derivation_update: Option, + + /// Emitted when no more L1 Blocks are available. + /// Ready to take new L1 blocks from supervisor. + pub exhaust_l1: Option, + + /// Emitted when a block gets replaced for any reason. + pub replace_block: Option, + + /// Signals that an L2 block is now local-safe because of the given L1 traversal. + /// This would be accompanied with [`Self::derivation_update`]. + pub derivation_origin_update: Option, +} + +impl core::fmt::Display for ManagedEvent { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let mut parts = Vec::new(); + if let Some(ref reset) = self.reset { + parts.push(format!("reset: {reset}")); + } + if let Some(ref block) = self.unsafe_block { + parts.push(format!("unsafe_block: {block}")); + } + if let Some(ref pair) = self.derivation_update { + parts.push(format!("derivation_update: {pair}")); + } + if let Some(ref pair) = self.exhaust_l1 { + parts.push(format!("exhaust_l1: {pair}")); + } + if let Some(ref replacement) = self.replace_block { + parts.push(format!("replace_block: {replacement}")); + } + if let Some(ref origin) = self.derivation_origin_update { + parts.push(format!("derivation_origin_update: {origin}")); + } + + if parts.is_empty() { write!(f, "none") } else { write!(f, "{}", parts.join(", ")) } + } +} diff --git a/kona/crates/protocol/interop/src/graph.rs b/rust/kona/crates/protocol/interop/src/graph.rs similarity index 99% rename from kona/crates/protocol/interop/src/graph.rs rename to rust/kona/crates/protocol/interop/src/graph.rs index 5316ac6067d..92012c05cee 100644 --- a/kona/crates/protocol/interop/src/graph.rs +++ b/rust/kona/crates/protocol/interop/src/graph.rs @@ -58,7 +58,7 @@ where ); let mut messages = Vec::with_capacity(blocks.len()); - for (chain_id, header) in blocks.iter() { + for (chain_id, header) in blocks { let receipts = provider.receipts_by_hash(*chain_id, header.hash()).await?; let executing_messages = extract_executing_messages(receipts.as_slice()); @@ -99,7 +99,7 @@ where // Prune all valid messages, collecting errors for any chain whose block contains an invalid // message. Errors are de-duplicated by chain ID in a map, since a single invalid // message is cause for invalidating a block. - for message in self.messages.iter() { + for message in &self.messages { if let Err(e) = self.check_single_dependency(message).await { warn!( target: "message_graph", diff --git a/rust/kona/crates/protocol/interop/src/lib.rs b/rust/kona/crates/protocol/interop/src/lib.rs new file mode 100644 index 00000000000..dc15696475e --- /dev/null +++ b/rust/kona/crates/protocol/interop/src/lib.rs @@ -0,0 +1,65 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; + +mod graph; +pub use graph::MessageGraph; + +mod event; +pub use event::ManagedEvent; + +mod control; +pub use control::ControlEvent; + +mod replacement; +pub use replacement::BlockReplacement; + +mod traits; +pub use traits::{InteropProvider, InteropValidator}; + +mod safety; +pub use safety::SafetyLevelParseError; + +mod errors; +pub use errors::{ + InteropValidationError, MessageGraphError, MessageGraphResult, SuperRootError, SuperRootResult, +}; + +mod root; +pub use root::{ChainRootInfo, OutputRootWithChain, SuperRoot, SuperRootOutput}; + +mod message; +pub use message::{ + EnrichedExecutingMessage, ExecutingDescriptor, ExecutingMessage, MessageIdentifier, + RawMessagePayload, extract_executing_messages, parse_log_to_executing_message, + parse_logs_to_executing_msgs, +}; + +mod depset; +pub use depset::{ChainDependency, DependencySet}; + +pub use op_alloy_consensus::interop::SafetyLevel; + +mod access_list; +pub use access_list::{ + parse_access_list_item_to_inbox_entries, parse_access_list_items_to_inbox_entries, +}; +mod derived; +pub use derived::{DerivedIdPair, DerivedRefPair}; + +mod constants; +pub use constants::{MESSAGE_EXPIRY_WINDOW, SUPER_ROOT_VERSION}; + +#[cfg(any(test, feature = "test-utils"))] +mod test_util; +#[cfg(any(test, feature = "test-utils"))] +pub use test_util::{ + ChainBuilder, ExecutingMessageBuilder, InteropProviderError, MockInteropProvider, + SuperchainBuilder, +}; diff --git a/rust/kona/crates/protocol/interop/src/message.rs b/rust/kona/crates/protocol/interop/src/message.rs new file mode 100644 index 00000000000..3ff0d530078 --- /dev/null +++ b/rust/kona/crates/protocol/interop/src/message.rs @@ -0,0 +1,241 @@ +//! Interop message primitives. +//! +//! +//! + +use alloc::{vec, vec::Vec}; +use alloy_primitives::{Bytes, ChainId, Log, keccak256}; +use alloy_sol_types::{SolEvent, sol}; +use derive_more::{AsRef, Constructor, From}; +use kona_protocol::Predeploys; +use op_alloy_consensus::OpReceiptEnvelope; + +sol! { + /// @notice The struct for a pointer to a message payload in a remote (or local) chain. + #[derive(Default, Debug, PartialEq, Eq)] + #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] + struct MessageIdentifier { + address origin; + uint256 blockNumber; + uint256 logIndex; + uint256 timestamp; + #[cfg_attr(feature = "serde", serde(rename = "chainID"))] + uint256 chainId; + } + + /// @notice Emitted when a cross chain message is being executed. + /// @param payloadHash Hash of message payload being executed. + /// @param identifier Encoded Identifier of the message. + /// + /// Parameter names are derived from the `op-supervisor` JSON field names. + /// See the relevant definition in the Optimism repository: + /// [Ethereum-Optimism/op-supervisor](https://github.com/ethereum-optimism/optimism/blob/4ba2eb00eafc3d7de2c8ceb6fd83913a8c0a2c0d/op-supervisor/supervisor/types/types.go#L61-L64). + #[derive(Default, Debug, PartialEq, Eq)] + #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] + event ExecutingMessage(bytes32 indexed payloadHash, MessageIdentifier identifier); + + /// @notice Executes a cross chain message on the destination chain. + /// @param _id Identifier of the message. + /// @param _target Target address to call. + /// @param _message Message payload to call target with. + function executeMessage( + MessageIdentifier calldata _id, + address _target, + bytes calldata _message + ) external; +} + +/// A [`RawMessagePayload`] is the raw payload of an initiating message. +#[derive(Debug, Clone, From, AsRef, PartialEq, Eq)] +pub struct RawMessagePayload(Bytes); + +impl From<&Log> for RawMessagePayload { + fn from(log: &Log) -> Self { + let mut data = vec![0u8; log.topics().len() * 32 + log.data.data.len()]; + for (i, topic) in log.topics().iter().enumerate() { + data[i * 32..(i + 1) * 32].copy_from_slice(topic.as_ref()); + } + data[(log.topics().len() * 32)..].copy_from_slice(log.data.data.as_ref()); + data.into() + } +} + +impl From> for RawMessagePayload { + fn from(data: Vec) -> Self { + Self(Bytes::from(data)) + } +} + +impl From for ExecutingMessage { + fn from(call: executeMessageCall) -> Self { + Self { identifier: call._id, payloadHash: keccak256(call._message.as_ref()) } + } +} + +/// An [`ExecutingDescriptor`] is a part of the payload to `supervisor_checkAccessList` +/// Spec: +#[derive(Default, Debug, PartialEq, Eq, Clone, Constructor)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct ExecutingDescriptor { + /// The timestamp used to enforce timestamp [invariant](https://github.com/ethereum-optimism/specs/blob/main/specs/interop/derivation.md#invariants) + #[cfg_attr(feature = "serde", serde(with = "alloy_serde::quantity"))] + pub timestamp: u64, + /// The timeout that requests verification to still hold at `timestamp+timeout` + /// (message expiry may drop previously valid messages). + #[cfg_attr( + feature = "serde", + serde( + default, + skip_serializing_if = "Option::is_none", + with = "alloy_serde::quantity::opt" + ) + )] + pub timeout: Option, + /// Chain ID of the chain that the message was executed on. + #[cfg_attr( + feature = "serde", + serde( + default, + rename = "chainID", + skip_serializing_if = "Option::is_none", + with = "alloy_serde::quantity::opt" + ) + )] + pub chain_id: Option, +} + +/// A wrapper type for [`ExecutingMessage`] containing the chain ID of the chain that the message +/// was executed on. +#[derive(Debug)] +pub struct EnrichedExecutingMessage { + /// The inner [`ExecutingMessage`]. + pub inner: ExecutingMessage, + /// The chain ID of the chain that the message was executed on. + pub executing_chain_id: u64, + /// The timestamp of the block that the executing message was included in. + pub executing_timestamp: u64, +} + +impl EnrichedExecutingMessage { + /// Create a new [`EnrichedExecutingMessage`] from an [`ExecutingMessage`] and a chain ID. + pub const fn new( + inner: ExecutingMessage, + executing_chain_id: u64, + executing_timestamp: u64, + ) -> Self { + Self { inner, executing_chain_id, executing_timestamp } + } +} + +/// Extracts all [`ExecutingMessage`] events from list of [`OpReceiptEnvelope`]s. +/// +/// See [`parse_log_to_executing_message`]. +/// +/// Note: filters out logs that don't contain executing message events. +pub fn extract_executing_messages(receipts: &[OpReceiptEnvelope]) -> Vec { + receipts.iter().fold(Vec::new(), |mut acc, envelope| { + let executing_messages = envelope.logs().iter().filter_map(parse_log_to_executing_message); + + acc.extend(executing_messages); + acc + }) +} + +/// Parses [`Log`]s to [`ExecutingMessage`]s. +/// +/// See [`parse_log_to_executing_message`] for more details. Return iterator maps 1-1 with input. +pub fn parse_logs_to_executing_msgs<'a>( + logs: impl Iterator, +) -> impl Iterator> { + logs.map(parse_log_to_executing_message) +} + +/// Parse [`Log`] to [`ExecutingMessage`], if any. +/// +/// Max one [`ExecutingMessage`] event can exist per log. Returns `None` if log doesn't contain +/// executing message event. +pub fn parse_log_to_executing_message(log: &Log) -> Option { + (log.address == Predeploys::CROSS_L2_INBOX && log.topics().len() == 2) + .then(|| ExecutingMessage::decode_log_data(&log.data).ok()) + .flatten() +} + +#[cfg(test)] +mod tests { + use alloy_primitives::{Address, B256, LogData, U256}; + + use super::*; + + // Test the serialization of ExecutingDescriptor + #[cfg(feature = "serde")] + #[test] + fn test_serialize_executing_descriptor() { + let descriptor = ExecutingDescriptor { + timestamp: 1234567890, + timeout: Some(3600), + chain_id: Some(1000), + }; + let serialized = serde_json::to_string(&descriptor).unwrap(); + let expected = r#"{"timestamp":"0x499602d2","timeout":"0xe10","chainID":"0x3e8"}"#; + assert_eq!(serialized, expected); + + let deserialized: ExecutingDescriptor = serde_json::from_str(&serialized).unwrap(); + assert_eq!(descriptor, deserialized); + } + + #[cfg(feature = "serde")] + #[test] + fn test_deserialize_executing_descriptor_missing_chain_id() { + let json = r#"{ + "timestamp": "0x499602d2", + "timeout": "0xe10" + }"#; + + let expected = + ExecutingDescriptor { timestamp: 1234567890, timeout: Some(3600), chain_id: None }; + + let deserialized: ExecutingDescriptor = serde_json::from_str(json).unwrap(); + assert_eq!(deserialized, expected); + } + + #[cfg(feature = "serde")] + #[test] + fn test_deserialize_executing_descriptor_missing_timeout() { + let json = r#"{ + "timestamp": "0x499602d2", + "chainID": "0x3e8" + }"#; + + let expected = + ExecutingDescriptor { timestamp: 1234567890, timeout: None, chain_id: Some(1000) }; + + let deserialized: ExecutingDescriptor = serde_json::from_str(json).unwrap(); + assert_eq!(deserialized, expected); + } + + #[test] + fn test_parse_logs_to_executing_msgs_iterator() { + // One valid, one invalid log + let identifier = MessageIdentifier { + origin: Address::repeat_byte(0x77), + blockNumber: U256::from(200), + logIndex: U256::from(3), + timestamp: U256::from(777777), + chainId: U256::from(12), + }; + let payload_hash = B256::repeat_byte(0x88); + let event = ExecutingMessage { payloadHash: payload_hash, identifier }; + let data = ExecutingMessage::encode_log_data(&event); + + let valid_log = Log { address: Predeploys::CROSS_L2_INBOX, data }; + let invalid_log = Log { + address: Address::repeat_byte(0x99), + data: LogData::new_unchecked([B256::ZERO, B256::ZERO].to_vec(), Bytes::default()), + }; + + let logs = vec![&valid_log, &invalid_log]; + let mut iter = parse_logs_to_executing_msgs(logs.into_iter()); + assert_eq!(iter.next().unwrap().unwrap(), event); + assert!(iter.next().unwrap().is_none()); + } +} diff --git a/kona/crates/protocol/interop/src/replacement.rs b/rust/kona/crates/protocol/interop/src/replacement.rs similarity index 100% rename from kona/crates/protocol/interop/src/replacement.rs rename to rust/kona/crates/protocol/interop/src/replacement.rs diff --git a/kona/crates/protocol/interop/src/root.rs b/rust/kona/crates/protocol/interop/src/root.rs similarity index 94% rename from kona/crates/protocol/interop/src/root.rs rename to rust/kona/crates/protocol/interop/src/root.rs index 6ea6217d5ab..a5f525bccc8 100644 --- a/kona/crates/protocol/interop/src/root.rs +++ b/rust/kona/crates/protocol/interop/src/root.rs @@ -1,4 +1,4 @@ -//! The [SuperRoot] type. +//! The [`SuperRoot`] type. //! //! Represents a snapshot of the state of the superchain at a given integer timestamp. @@ -8,7 +8,7 @@ use alloy_eips::BlockNumHash; use alloy_primitives::{B256, Bytes, U256, keccak256}; use alloy_rlp::{Buf, BufMut}; -/// The [SuperRoot] is the snapshot of the superchain at a given timestamp. +/// The [`SuperRoot`] is the snapshot of the superchain at a given timestamp. #[derive(Debug, Clone, Eq, PartialEq)] #[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] @@ -20,14 +20,14 @@ pub struct SuperRoot { } impl SuperRoot { - /// Create a new [SuperRoot] with the given timestamp and output roots. + /// Create a new [`SuperRoot`] with the given timestamp and output roots. pub fn new(timestamp: u64, mut output_roots: Vec) -> Self { // Guarantee that the output roots are sorted by chain ID. output_roots.sort_by_key(|r| r.chain_id); Self { timestamp, output_roots } } - /// Decodes a [SuperRoot] from the given buffer. + /// Decodes a [`SuperRoot`] from the given buffer. pub fn decode(buf: &mut &[u8]) -> SuperRootResult { if buf.is_empty() { return Err(SuperRootError::UnexpectedLength); @@ -61,7 +61,7 @@ impl SuperRoot { Ok(Self { timestamp, output_roots }) } - /// Encode the [SuperRoot] into the given buffer. + /// Encode the [`SuperRoot`] into the given buffer. pub fn encode(&self, out: &mut dyn BufMut) { out.put_u8(SUPER_ROOT_VERSION); @@ -72,12 +72,12 @@ impl SuperRoot { } } - /// Returns the encoded length of the [SuperRoot]. + /// Returns the encoded length of the [`SuperRoot`]. pub const fn encoded_length(&self) -> usize { 1 + 8 + 64 * self.output_roots.len() } - /// Hashes the encoded [SuperRoot] using [keccak256]. + /// Hashes the encoded [`SuperRoot`] using [keccak256]. pub fn hash(&self) -> B256 { let mut rlp_buf = Vec::with_capacity(self.encoded_length()); self.encode(&mut rlp_buf); @@ -135,7 +135,7 @@ pub struct OutputRootWithChain { } impl OutputRootWithChain { - /// Create a new [OutputRootWithChain] with the given chain ID and output root hash. + /// Create a new [`OutputRootWithChain`] with the given chain ID and output root hash. pub const fn new(chain_id: u64, output_root: B256) -> Self { Self { chain_id, output_root } } diff --git a/kona/crates/protocol/interop/src/safety.rs b/rust/kona/crates/protocol/interop/src/safety.rs similarity index 97% rename from kona/crates/protocol/interop/src/safety.rs rename to rust/kona/crates/protocol/interop/src/safety.rs index 9fc61743633..f1860438562 100644 --- a/kona/crates/protocol/interop/src/safety.rs +++ b/rust/kona/crates/protocol/interop/src/safety.rs @@ -2,7 +2,7 @@ use alloc::string::String; use thiserror::Error; -/// Error when parsing SafetyLevel from string. +/// Error when parsing `SafetyLevel` from string. #[derive(Error, Debug)] #[error("Invalid SafetyLevel, error: {0}")] pub struct SafetyLevelParseError(pub String); diff --git a/kona/crates/protocol/interop/src/test_util.rs b/rust/kona/crates/protocol/interop/src/test_util.rs similarity index 100% rename from kona/crates/protocol/interop/src/test_util.rs rename to rust/kona/crates/protocol/interop/src/test_util.rs diff --git a/kona/crates/protocol/interop/src/traits.rs b/rust/kona/crates/protocol/interop/src/traits.rs similarity index 100% rename from kona/crates/protocol/interop/src/traits.rs rename to rust/kona/crates/protocol/interop/src/traits.rs diff --git a/rust/kona/crates/protocol/protocol/Cargo.toml b/rust/kona/crates/protocol/protocol/Cargo.toml new file mode 100644 index 00000000000..3362e5bfbab --- /dev/null +++ b/rust/kona/crates/protocol/protocol/Cargo.toml @@ -0,0 +1,132 @@ +[package] +name = "kona-protocol" +version = "0.4.5" +description = "Optimism protocol-specific types" + +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +exclude.workspace = true + +[lints] +workspace = true + +[dependencies] +# Workspace +kona-genesis.workspace = true + +# OP Alloy +op-alloy-consensus.workspace = true +op-alloy-rpc-types.workspace = true +op-alloy-rpc-types-engine.workspace = true + +# Alloy +alloy-primitives = { workspace = true, features = ["map"] } +alloy-rlp.workspace = true +alloy-hardforks.workspace = true +alloy-eips.workspace = true +alloy-consensus.workspace = true +alloy-rpc-types-eth.workspace = true +alloy-rpc-types-engine.workspace = true + +# Misc +tracing.workspace = true +thiserror.workspace = true +async-trait.workspace = true +unsigned-varint.workspace = true +derive_more = { workspace = true, features = ["display"] } + +# Compression +brotli.workspace = true +miniz_oxide.workspace = true +alloc-no-stdlib.workspace = true + +# `arbitrary` feature +arbitrary = { workspace = true, features = ["derive"], optional = true } + +# `serde` feature +serde = { workspace = true, optional = true } +alloy-serde = { workspace = true, optional = true } + +# `test-utils` feature +spin = { workspace = true, optional = true } +tracing-subscriber = { workspace = true, features = ["fmt"], optional = true } +ambassador.workspace = true + +[dev-dependencies] +brotli = { workspace = true, features = ["std"] } +spin.workspace = true +rand = { workspace = true, features = ["std", "std_rng"] } +rstest.workspace = true +proptest.workspace = true +serde_json.workspace = true +alloy-sol-types.workspace = true +tokio = { workspace = true, features = ["full"] } +arbitrary = { workspace = true, features = ["derive"] } +tracing-subscriber = { workspace = true, features = ["fmt"] } +alloy-primitives = { workspace = true, features = ["arbitrary"] } +op-alloy-consensus.workspace = true +alloy-rpc-types-eth.workspace = true +op-alloy-rpc-types.workspace = true + +kona-registry.workspace = true + +[features] +default = [] +std = [ + "alloy-consensus/std", + "alloy-eips/std", + "alloy-primitives/std", + "alloy-rlp/std", + "alloy-rpc-types-engine/std", + "alloy-rpc-types-eth/std", + "alloy-serde?/std", + "brotli/std", + "derive_more/std", + "kona-genesis/std", + "miniz_oxide/std", + "op-alloy-consensus/std", + "op-alloy-rpc-types-engine/std", + "op-alloy-rpc-types/std", + "serde?/std", + "spin?/std", + "thiserror/std", + "tracing/std", + "unsigned-varint/std", + "tracing-subscriber?/std" +] +test-utils = [ "dep:spin", "dep:tracing-subscriber" ] +arbitrary = [ + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/arbitrary", + "alloy-primitives/rand", + "alloy-rpc-types-engine/arbitrary", + "alloy-rpc-types-eth/arbitrary", + "alloy-serde?/arbitrary", + "dep:arbitrary", + "kona-genesis/arbitrary", + "op-alloy-consensus/arbitrary", + "op-alloy-rpc-types-engine/arbitrary", + "op-alloy-rpc-types/arbitrary", + "std", +] +serde = [ + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-hardforks/serde", + "alloy-primitives/serde", + "alloy-rpc-types-engine/serde", + "alloy-rpc-types-eth/serde", + "dep:alloy-serde", + "dep:serde", + "kona-genesis/serde", + "op-alloy-consensus/serde", + "op-alloy-rpc-types-engine/serde", + "op-alloy-rpc-types/serde", + "tracing-subscriber?/serde", + "miniz_oxide/serde" +] diff --git a/rust/kona/crates/protocol/protocol/README.md b/rust/kona/crates/protocol/protocol/README.md new file mode 100644 index 00000000000..96274461e01 --- /dev/null +++ b/rust/kona/crates/protocol/protocol/README.md @@ -0,0 +1,5 @@ +## `kona-protocol` + +Core protocol types for Optimism. + +These include types, constants, and methods for derivation as well as batch-submission. diff --git a/kona/crates/protocol/protocol/examples/frames_to_batch.rs b/rust/kona/crates/protocol/protocol/examples/frames_to_batch.rs similarity index 99% rename from kona/crates/protocol/protocol/examples/frames_to_batch.rs rename to rust/kona/crates/protocol/protocol/examples/frames_to_batch.rs index 0b7535f9d3c..5d079289df4 100644 --- a/kona/crates/protocol/protocol/examples/frames_to_batch.rs +++ b/rust/kona/crates/protocol/protocol/examples/frames_to_batch.rs @@ -1,4 +1,4 @@ -//! This example decodes raw [Frame]s and reads them into a [Channel] and into a [SingleBatch]. +//! This example decodes raw [Frame]s and reads them into a [Channel] and into a [`SingleBatch`]. use alloy_consensus::{SignableTransaction, TxEip1559, TxEnvelope}; use alloy_eips::eip2718::{Decodable2718, Encodable2718}; diff --git a/rust/kona/crates/protocol/protocol/src/attributes.rs b/rust/kona/crates/protocol/protocol/src/attributes.rs new file mode 100644 index 00000000000..21374eb7fc7 --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/attributes.rs @@ -0,0 +1,233 @@ +//! Optimism Payload attributes that reference the parent L2 block. + +use crate::{BlockInfo, L2BlockInfo}; +use op_alloy_consensus::OpTxType; +use op_alloy_rpc_types_engine::OpPayloadAttributes; + +/// Optimism Payload Attributes with parent block reference and the L1 origin block. +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpAttributesWithParent { + /// The payload attributes. + pub attributes: OpPayloadAttributes, + /// The parent block reference. + pub parent: L2BlockInfo, + /// The L1 block that the attributes were derived from. + pub derived_from: Option, + /// Whether the current batch is the last in its span. + pub is_last_in_span: bool, +} + +impl OpAttributesWithParent { + /// Create a new [`OpAttributesWithParent`] instance. + pub const fn new( + attributes: OpPayloadAttributes, + parent: L2BlockInfo, + derived_from: Option, + is_last_in_span: bool, + ) -> Self { + Self { attributes, parent, derived_from, is_last_in_span } + } + + /// Returns the L2 block number for the payload attributes if made canonical. + /// Derived as the parent block height plus one. + pub const fn block_number(&self) -> u64 { + self.parent.block_info.number.saturating_add(1) + } + + /// Consumes `self` and returns the inner [`OpPayloadAttributes`]. + pub fn take_inner(self) -> OpPayloadAttributes { + self.attributes + } + + /// Returns the payload attributes. + pub const fn attributes(&self) -> &OpPayloadAttributes { + &self.attributes + } + + /// Returns the parent block reference. + pub const fn parent(&self) -> &L2BlockInfo { + &self.parent + } + + /// Returns the L1 origin block reference. + pub const fn derived_from(&self) -> Option<&BlockInfo> { + self.derived_from.as_ref() + } + + /// Returns whether the current batch is the last in its span. + pub const fn is_last_in_span(&self) -> bool { + self.is_last_in_span + } + + /// Returns `true` if all transactions in the payload are deposits. + pub fn is_deposits_only(&self) -> bool { + self.attributes + .transactions + .iter() + .all(|tx| tx.first().is_some_and(|tx| tx[0] == OpTxType::Deposit as u8)) + } + + /// Converts the [`OpAttributesWithParent`] into a deposits-only payload. + pub fn as_deposits_only(&self) -> Self { + let mut attributes = self.attributes.clone(); + + attributes + .transactions + .iter_mut() + .for_each(|txs| txs.retain(|tx| tx.first().copied() == Some(OpTxType::Deposit as u8))); + + Self { + attributes, + parent: self.parent, + derived_from: self.derived_from, + is_last_in_span: self.is_last_in_span, + } + } + + /// Returns the number of transactions in the attributes. + pub fn count_transactions(&self) -> u64 { + self.attributes().decoded_transactions().count().try_into().unwrap() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::vec; + + #[test] + fn test_op_attributes_with_parent() { + let attributes = OpPayloadAttributes::default(); + let parent = L2BlockInfo::default(); + let is_last_in_span = true; + let op_attributes_with_parent = + OpAttributesWithParent::new(attributes.clone(), parent, None, is_last_in_span); + + assert_eq!(op_attributes_with_parent.attributes(), &attributes); + assert_eq!(op_attributes_with_parent.parent(), &parent); + assert_eq!(op_attributes_with_parent.is_last_in_span(), is_last_in_span); + assert_eq!(op_attributes_with_parent.derived_from(), None); + } + + /// Test that the [`OpAttributesWithParent::as_deposits_only`] method strips out all + /// transactions that are not deposits. + #[test] + fn test_op_attributes_with_parent_as_deposits_only() { + let attributes = OpPayloadAttributes { + transactions: Some(vec![ + vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), + vec![OpTxType::Legacy as u8, 0x0, 0x11, 0x21].into(), + vec![OpTxType::Eip2930 as u8, 0x0, 0x12, 0x22].into(), + vec![OpTxType::Eip1559 as u8, 0x0, 0x13, 0x23].into(), + vec![OpTxType::Eip7702 as u8, 0x0, 0x14, 0x24].into(), + vec![].into(), + ]), + ..OpPayloadAttributes::default() + }; + let parent = L2BlockInfo::default(); + let is_last_in_span = true; + let op_attributes_with_parent = + OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); + let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); + + assert_eq!( + deposits_only_attributes.attributes().transactions, + Some(vec![vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into()]) + ); + } + + #[test] + fn test_op_attributes_with_parent_as_deposits_multi_deposits() { + let attributes = OpPayloadAttributes { + transactions: Some(vec![ + vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), + vec![OpTxType::Legacy as u8, 0x0, 0x11, 0x21].into(), + vec![OpTxType::Eip2930 as u8, 0x0, 0x12, 0x22].into(), + vec![OpTxType::Deposit as u8, 0x98, 0x21, 0x31].into(), + vec![OpTxType::Eip1559 as u8, 0x0, 0x13, 0x23].into(), + vec![OpTxType::Eip7702 as u8, 0x0, 0x14, 0x24].into(), + vec![OpTxType::Deposit as u8, 0x56, 0x31, 0x41].into(), + vec![].into(), + ]), + ..OpPayloadAttributes::default() + }; + let parent = L2BlockInfo::default(); + let is_last_in_span = true; + let op_attributes_with_parent = + OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); + let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); + + assert_eq!( + deposits_only_attributes.attributes().transactions, + Some(vec![ + vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), + vec![OpTxType::Deposit as u8, 0x98, 0x21, 0x31].into(), + vec![OpTxType::Deposit as u8, 0x56, 0x31, 0x41].into(), + ]) + ); + } + + /// Test that the [`OpAttributesWithParent::as_deposits_only`] method strips out all + /// transactions that are not deposits. + #[test] + fn test_op_attributes_with_parent_as_deposits_no_deposits() { + let attributes = OpPayloadAttributes { + transactions: Some(vec![ + vec![OpTxType::Legacy as u8, 0x0, 0x11, 0x21].into(), + vec![OpTxType::Eip2930 as u8, 0x0, 0x12, 0x22].into(), + vec![OpTxType::Eip1559 as u8, 0x0, 0x13, 0x23].into(), + vec![OpTxType::Eip7702 as u8, 0x0, 0x14, 0x24].into(), + vec![].into(), + ]), + ..OpPayloadAttributes::default() + }; + let parent = L2BlockInfo::default(); + let is_last_in_span = true; + let op_attributes_with_parent = + OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); + let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); + + assert_eq!(deposits_only_attributes.attributes().transactions, Some(vec![])); + } + + #[test] + fn test_op_attributes_with_parent_as_deposits_only_deposits() { + let attributes = OpPayloadAttributes { + transactions: Some(vec![ + vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), + vec![OpTxType::Deposit as u8, 0x98, 0x21, 0x31].into(), + vec![OpTxType::Deposit as u8, 0x56, 0x31, 0x41].into(), + vec![].into(), + ]), + ..OpPayloadAttributes::default() + }; + let parent = L2BlockInfo::default(); + let is_last_in_span = true; + let op_attributes_with_parent = + OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); + let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); + + assert_eq!( + deposits_only_attributes.attributes().transactions, + Some(vec![ + vec![OpTxType::Deposit as u8, 0x0, 0x10, 0x20].into(), + vec![OpTxType::Deposit as u8, 0x98, 0x21, 0x31].into(), + vec![OpTxType::Deposit as u8, 0x56, 0x31, 0x41].into(), + ]) + ); + } + + #[test] + fn test_op_attributes_with_parent_as_deposits_no_txs() { + let attributes = + OpPayloadAttributes { transactions: None, ..OpPayloadAttributes::default() }; + let parent = L2BlockInfo::default(); + let is_last_in_span = true; + let op_attributes_with_parent = + OpAttributesWithParent::new(attributes, parent, None, is_last_in_span); + let deposits_only_attributes = op_attributes_with_parent.as_deposits_only(); + + assert_eq!(deposits_only_attributes.attributes().transactions, None); + } +} diff --git a/kona/crates/protocol/protocol/src/batch/bits.rs b/rust/kona/crates/protocol/protocol/src/batch/bits.rs similarity index 95% rename from kona/crates/protocol/protocol/src/batch/bits.rs rename to rust/kona/crates/protocol/protocol/src/batch/bits.rs index b25158fb47f..815051b659c 100644 --- a/kona/crates/protocol/protocol/src/batch/bits.rs +++ b/rust/kona/crates/protocol/protocol/src/batch/bits.rs @@ -26,7 +26,7 @@ impl SpanBatchBits { /// The bitlist is encoded as big-endian integer, left-padded with zeroes to a multiple of 8 /// bits. The encoded bitlist cannot be longer than `bit_length`. pub fn decode(b: &mut &[u8], bit_length: usize) -> Result { - let buffer_len = bit_length / 8 + if !bit_length.is_multiple_of(8) { 1 } else { 0 }; + let buffer_len = bit_length / 8 + if bit_length.is_multiple_of(8) { 0 } else { 1 }; let bits = if b.len() < buffer_len { let mut bits = vec![0; buffer_len]; bits[..b.len()].copy_from_slice(b); @@ -60,7 +60,7 @@ impl SpanBatchBits { // Round up, ensure enough bytes when number of bits is not a multiple of 8. // Alternative of (L+7)/8 is not overflow-safe. - let buf_len = bit_length / 8 + if !bit_length.is_multiple_of(8) { 1 } else { 0 }; + let buf_len = bit_length / 8 + if bit_length.is_multiple_of(8) { 0 } else { 1 }; let mut buf = vec![0; buf_len]; buf[buf_len - bits.0.len()..].copy_from_slice(bits.as_ref()); w.put_slice(&buf); @@ -73,18 +73,15 @@ impl SpanBatchBits { let bit_index = index % 8; // Check if the byte index is within the bounds of the bitlist - if byte_index < self.0.len() { + (byte_index < self.0.len()).then(|| { // Retrieve the specific byte that contains the bit we're interested in let byte = self.0[self.0.len() - byte_index - 1]; // Shift the bits of the byte to the right, based on the bit index, and // mask it with 1 to isolate the bit we're interested in. // If the result is not zero, the bit is set to 1, otherwise it's 0. - Some(if byte & (1 << bit_index) != 0 { 1 } else { 0 }) - } else { - // Return None if the index is out of bounds - None - } + if byte & (1 << bit_index) != 0 { 1 } else { 0 } + }) } /// Sets a bit in the [`SpanBatchBits`] bitlist. diff --git a/kona/crates/protocol/protocol/src/batch/core.rs b/rust/kona/crates/protocol/protocol/src/batch/core.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/core.rs rename to rust/kona/crates/protocol/protocol/src/batch/core.rs diff --git a/kona/crates/protocol/protocol/src/batch/element.rs b/rust/kona/crates/protocol/protocol/src/batch/element.rs similarity index 95% rename from kona/crates/protocol/protocol/src/batch/element.rs rename to rust/kona/crates/protocol/protocol/src/batch/element.rs index f5e3cf1da3c..1ae78b14bbe 100644 --- a/kona/crates/protocol/protocol/src/batch/element.rs +++ b/rust/kona/crates/protocol/protocol/src/batch/element.rs @@ -4,7 +4,7 @@ use crate::SingleBatch; use alloc::vec::Vec; use alloy_primitives::Bytes; -/// MAX_SPAN_BATCH_ELEMENTS is the maximum number of blocks, transactions in total, +/// `MAX_SPAN_BATCH_ELEMENTS` is the maximum number of blocks, transactions in total, /// or transaction per block allowed in a span batch. pub const MAX_SPAN_BATCH_ELEMENTS: u64 = 10_000_000; diff --git a/kona/crates/protocol/protocol/src/batch/errors.rs b/rust/kona/crates/protocol/protocol/src/batch/errors.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/errors.rs rename to rust/kona/crates/protocol/protocol/src/batch/errors.rs diff --git a/kona/crates/protocol/protocol/src/batch/inclusion.rs b/rust/kona/crates/protocol/protocol/src/batch/inclusion.rs similarity index 94% rename from kona/crates/protocol/protocol/src/batch/inclusion.rs rename to rust/kona/crates/protocol/protocol/src/batch/inclusion.rs index 2c6bb9f1f82..28d6cf27dee 100644 --- a/kona/crates/protocol/protocol/src/batch/inclusion.rs +++ b/rust/kona/crates/protocol/protocol/src/batch/inclusion.rs @@ -19,8 +19,8 @@ impl BatchWithInclusionBlock { } /// Validates the batch can be applied on top of the specified L2 safe head. - /// The first entry of the l1_blocks should match the origin of the l2_safe_head. - /// One or more consecutive l1_blocks should be provided. + /// The first entry of the `l1_blocks` should match the origin of the `l2_safe_head`. + /// One or more consecutive `l1_blocks` should be provided. /// In case of only a single L1 block, the decision whether a batch is valid may have to stay /// undecided. pub async fn check_batch( diff --git a/kona/crates/protocol/protocol/src/batch/mod.rs b/rust/kona/crates/protocol/protocol/src/batch/mod.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/mod.rs rename to rust/kona/crates/protocol/protocol/src/batch/mod.rs diff --git a/rust/kona/crates/protocol/protocol/src/batch/payload.rs b/rust/kona/crates/protocol/protocol/src/batch/payload.rs new file mode 100644 index 00000000000..4185867521a --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/batch/payload.rs @@ -0,0 +1,197 @@ +//! Raw Span Batch Payload + +use super::MAX_SPAN_BATCH_ELEMENTS; +use crate::{SpanBatchBits, SpanBatchError, SpanBatchTransactions, SpanDecodingError}; +use alloc::vec::Vec; +use alloy_primitives::bytes; + +/// Span Batch Payload +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct SpanBatchPayload { + /// Number of L2 block in the span + pub block_count: u64, + /// Standard span-batch bitlist of blockCount bits. Each bit indicates if the L1 origin is + /// changed at the L2 block. + pub origin_bits: SpanBatchBits, + /// List of transaction counts for each L2 block + pub block_tx_counts: Vec, + /// Transactions encoded in `SpanBatch` specs + pub txs: SpanBatchTransactions, +} + +impl SpanBatchPayload { + /// Decodes a [`SpanBatchPayload`] from a reader. + pub fn decode_payload(r: &mut &[u8]) -> Result { + let mut payload = Self::default(); + payload.decode_block_count(r)?; + payload.decode_origin_bits(r)?; + payload.decode_block_tx_counts(r)?; + payload.decode_txs(r)?; + Ok(payload) + } + + /// Encodes a [`SpanBatchPayload`] into a writer. + pub fn encode_payload(&self, w: &mut dyn bytes::BufMut) -> Result<(), SpanBatchError> { + self.encode_block_count(w); + self.encode_origin_bits(w)?; + self.encode_block_tx_counts(w); + self.encode_txs(w) + } + + /// Decodes the origin bits from a reader. + pub fn decode_origin_bits(&mut self, r: &mut &[u8]) -> Result<(), SpanBatchError> { + if self.block_count > MAX_SPAN_BATCH_ELEMENTS { + return Err(SpanBatchError::TooBigSpanBatchSize); + } + + self.origin_bits = SpanBatchBits::decode(r, self.block_count as usize)?; + Ok(()) + } + + /// Decode a block count from a reader. + pub fn decode_block_count(&mut self, r: &mut &[u8]) -> Result<(), SpanBatchError> { + let (block_count, remaining) = unsigned_varint::decode::u64(r) + .map_err(|_| SpanBatchError::Decoding(SpanDecodingError::BlockCount))?; + // The number of transactions in a single L2 block cannot be greater than + // [MAX_SPAN_BATCH_ELEMENTS]. + if block_count > MAX_SPAN_BATCH_ELEMENTS { + return Err(SpanBatchError::TooBigSpanBatchSize); + } + if block_count == 0 { + return Err(SpanBatchError::EmptySpanBatch); + } + self.block_count = block_count; + *r = remaining; + Ok(()) + } + + /// Decode block transaction counts from a reader. + pub fn decode_block_tx_counts(&mut self, r: &mut &[u8]) -> Result<(), SpanBatchError> { + // Initially allocate the vec with the block count, to reduce re-allocations in the first + // few blocks. + let mut block_tx_counts = Vec::with_capacity(self.block_count as usize); + + for _ in 0..self.block_count { + let (block_tx_count, remaining) = unsigned_varint::decode::u64(r) + .map_err(|_| SpanBatchError::Decoding(SpanDecodingError::BlockTxCounts))?; + + // The number of transactions in a single L2 block cannot be greater than + // [MAX_SPAN_BATCH_ELEMENTS]. + if block_tx_count > MAX_SPAN_BATCH_ELEMENTS { + return Err(SpanBatchError::TooBigSpanBatchSize); + } + block_tx_counts.push(block_tx_count); + *r = remaining; + } + self.block_tx_counts = block_tx_counts; + Ok(()) + } + + /// Decode transactions from a reader. + pub fn decode_txs(&mut self, r: &mut &[u8]) -> Result<(), SpanBatchError> { + if self.block_tx_counts.is_empty() { + return Err(SpanBatchError::EmptySpanBatch); + } + + let total_block_tx_count = + self.block_tx_counts.iter().try_fold(0u64, |acc, block_tx_count| { + acc.checked_add(*block_tx_count).ok_or(SpanBatchError::TooBigSpanBatchSize) + })?; + + // The total number of transactions in a span batch cannot be greater than + // [MAX_SPAN_BATCH_ELEMENTS]. + if total_block_tx_count > MAX_SPAN_BATCH_ELEMENTS { + return Err(SpanBatchError::TooBigSpanBatchSize); + } + self.txs.total_block_tx_count = total_block_tx_count; + self.txs.decode(r)?; + Ok(()) + } + + /// Encode the origin bits into a writer. + pub fn encode_origin_bits(&self, w: &mut dyn bytes::BufMut) -> Result<(), SpanBatchError> { + SpanBatchBits::encode(w, self.block_count as usize, &self.origin_bits) + } + + /// Encode the block count into a writer. + pub fn encode_block_count(&self, w: &mut dyn bytes::BufMut) { + let mut u64_varint_buf = [0u8; 10]; + w.put_slice(unsigned_varint::encode::u64(self.block_count, &mut u64_varint_buf)); + } + + /// Encode the block transaction counts into a writer. + pub fn encode_block_tx_counts(&self, w: &mut dyn bytes::BufMut) { + let mut u64_varint_buf = [0u8; 10]; + for block_tx_count in &self.block_tx_counts { + u64_varint_buf.fill(0); + w.put_slice(unsigned_varint::encode::u64(*block_tx_count, &mut u64_varint_buf)); + } + } + + /// Encode the transactions into a writer. + pub fn encode_txs(&self, w: &mut dyn bytes::BufMut) -> Result<(), SpanBatchError> { + self.txs.encode(w) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::vec; + + #[test] + fn test_decode_origin_bits() { + let block_count = 10; + let encoded = vec![2; block_count / 8 + 1]; + let mut payload = + SpanBatchPayload { block_count: block_count as u64, ..Default::default() }; + payload.decode_origin_bits(&mut encoded.as_slice()).unwrap(); + assert_eq!(payload.origin_bits, SpanBatchBits::new(vec![2; block_count / 8 + 1])); + } + + #[test] + fn test_zero_block_count() { + let mut u64_varint_buf = [0; 10]; + let mut encoded = unsigned_varint::encode::u64(0, &mut u64_varint_buf); + let mut payload = SpanBatchPayload::default(); + let err = payload.decode_block_count(&mut encoded).unwrap_err(); + assert_eq!(err, SpanBatchError::EmptySpanBatch); + } + + #[test] + fn test_decode_block_count() { + let block_count = MAX_SPAN_BATCH_ELEMENTS; + let mut u64_varint_buf = [0; 10]; + let mut encoded = unsigned_varint::encode::u64(block_count, &mut u64_varint_buf); + let mut payload = SpanBatchPayload::default(); + payload.decode_block_count(&mut encoded).unwrap(); + assert_eq!(payload.block_count, block_count); + } + + #[test] + fn test_decode_block_count_errors() { + let block_count = MAX_SPAN_BATCH_ELEMENTS + 1; + let mut u64_varint_buf = [0; 10]; + let mut encoded = unsigned_varint::encode::u64(block_count, &mut u64_varint_buf); + let mut payload = SpanBatchPayload::default(); + let err = payload.decode_block_count(&mut encoded).unwrap_err(); + assert_eq!(err, SpanBatchError::TooBigSpanBatchSize); + } + + #[test] + fn test_decode_block_tx_counts() { + let block_count = 2; + let mut u64_varint_buf = [0; 10]; + let mut encoded = unsigned_varint::encode::u64(block_count, &mut u64_varint_buf); + let mut payload = SpanBatchPayload::default(); + payload.decode_block_count(&mut encoded).unwrap(); + let mut r: Vec = Vec::new(); + for _ in 0..2 { + let mut buf = [0u8; 10]; + let encoded = unsigned_varint::encode::u64(2, &mut buf); + r.append(&mut encoded.to_vec()); + } + payload.decode_block_tx_counts(&mut r.as_slice()).unwrap(); + assert_eq!(payload.block_tx_counts, vec![2, 2]); + } +} diff --git a/kona/crates/protocol/protocol/src/batch/prefix.rs b/rust/kona/crates/protocol/protocol/src/batch/prefix.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/prefix.rs rename to rust/kona/crates/protocol/protocol/src/batch/prefix.rs diff --git a/kona/crates/protocol/protocol/src/batch/raw.rs b/rust/kona/crates/protocol/protocol/src/batch/raw.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/raw.rs rename to rust/kona/crates/protocol/protocol/src/batch/raw.rs diff --git a/kona/crates/protocol/protocol/src/batch/reader.rs b/rust/kona/crates/protocol/protocol/src/batch/reader.rs similarity index 98% rename from kona/crates/protocol/protocol/src/batch/reader.rs rename to rust/kona/crates/protocol/protocol/src/batch/reader.rs index 3c06ab4ec16..25582968b4f 100644 --- a/kona/crates/protocol/protocol/src/batch/reader.rs +++ b/rust/kona/crates/protocol/protocol/src/batch/reader.rs @@ -28,7 +28,7 @@ pub enum DecompressionError { } /// Batch Reader provides a function that iteratively consumes batches from the reader. -/// The L1Inclusion block is also provided at creation time. +/// The `L1Inclusion` block is also provided at creation time. /// Warning: the batch reader can read every batch-type. /// The caller of the batch-reader should filter the results. #[derive(Debug)] diff --git a/kona/crates/protocol/protocol/src/batch/single.rs b/rust/kona/crates/protocol/protocol/src/batch/single.rs similarity index 99% rename from kona/crates/protocol/protocol/src/batch/single.rs rename to rust/kona/crates/protocol/protocol/src/batch/single.rs index 7d2d62aac54..aff2d263f68 100644 --- a/kona/crates/protocol/protocol/src/batch/single.rs +++ b/rust/kona/crates/protocol/protocol/src/batch/single.rs @@ -146,7 +146,9 @@ impl SingleBatch { let next_origin = l1_blocks[1]; // Check if the next L1 Origin could have been adopted if self.timestamp >= next_origin.timestamp { - return BatchValidity::Drop(BatchDropReason::SequencerDriftNotAdoptedNextOrigin); + return BatchValidity::Drop( + BatchDropReason::SequencerDriftNotAdoptedNextOrigin, + ); } } } @@ -164,7 +166,7 @@ impl SingleBatch { } // We can do this check earlier, but it's intensive so we do it last for the sad-path. - for tx in self.transactions.iter() { + for tx in &self.transactions { if tx.is_empty() { return BatchValidity::Drop(BatchDropReason::EmptyTransaction); } diff --git a/kona/crates/protocol/protocol/src/batch/span.rs b/rust/kona/crates/protocol/protocol/src/batch/span.rs similarity index 99% rename from kona/crates/protocol/protocol/src/batch/span.rs rename to rust/kona/crates/protocol/protocol/src/batch/span.rs index 096d498557e..5caaa34ea8b 100644 --- a/kona/crates/protocol/protocol/src/batch/span.rs +++ b/rust/kona/crates/protocol/protocol/src/batch/span.rs @@ -341,9 +341,10 @@ impl SpanBatch { seq_num: u64, ) -> Result<(), SpanBatchError> { // If the new element is not ordered with respect to the last element, panic. - if !self.batches.is_empty() && self.peek(0).timestamp > singular_batch.timestamp { - panic!("Batch is not ordered"); - } + assert!( + self.batches.is_empty() || self.peek(0).timestamp <= singular_batch.timestamp, + "Batch is not ordered" + ); let SingleBatch { epoch_hash, parent_hash, .. } = singular_batch; @@ -468,12 +469,11 @@ impl SpanBatch { return BatchValidity::Drop( BatchDropReason::SequencerDriftNotAdoptedNextOrigin, ); - } else { - info!( - target: "batch_span", - "continuing with empty batch before late L1 block to preserve L2 time invariant" - ); } + info!( + target: "batch_span", + "continuing with empty batch before late L1 block to preserve L2 time invariant" + ); } } else { // If the sequencer is ignoring the time drift rule, then drop the batch and @@ -660,6 +660,7 @@ impl SpanBatch { // If the span batch does not overlap the current safe chain, parent block should be the L2 // safe head. let mut parent_num = l2_safe_head.block_info.number; + #[allow(clippy::useless_let_if_seq)] let mut parent_block = l2_safe_head; if self.starting_timestamp() < next_timestamp { if self.starting_timestamp() > l2_safe_head.block_info.timestamp { diff --git a/kona/crates/protocol/protocol/src/batch/testdata/raw_batch.hex b/rust/kona/crates/protocol/protocol/src/batch/testdata/raw_batch.hex similarity index 100% rename from kona/crates/protocol/protocol/src/batch/testdata/raw_batch.hex rename to rust/kona/crates/protocol/protocol/src/batch/testdata/raw_batch.hex diff --git a/kona/crates/protocol/protocol/src/batch/traits.rs b/rust/kona/crates/protocol/protocol/src/batch/traits.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/traits.rs rename to rust/kona/crates/protocol/protocol/src/batch/traits.rs diff --git a/kona/crates/protocol/protocol/src/batch/transactions.rs b/rust/kona/crates/protocol/protocol/src/batch/transactions.rs similarity index 98% rename from kona/crates/protocol/protocol/src/batch/transactions.rs rename to rust/kona/crates/protocol/protocol/src/batch/transactions.rs index 852bcf8f070..3938d31aa1e 100644 --- a/kona/crates/protocol/protocol/src/batch/transactions.rs +++ b/rust/kona/crates/protocol/protocol/src/batch/transactions.rs @@ -251,7 +251,9 @@ impl SpanBatchTransactions { .ok_or(SpanBatchError::Decoding(SpanDecodingError::InvalidTransactionData))?; let to = if bit == 0 { if self.tx_tos.len() <= to_idx { - return Err(SpanBatchError::Decoding(SpanDecodingError::InvalidTransactionData)); + return Err(SpanBatchError::Decoding( + SpanDecodingError::InvalidTransactionData, + )); } to_idx += 1; Some(self.tx_tos[to_idx - 1]) @@ -311,7 +313,9 @@ impl SpanBatchTransactions { (sig, tx.to(), tx.nonce(), tx.gas_limit(), tx.chain_id()) } _ => { - return Err(SpanBatchError::Decoding(SpanDecodingError::InvalidTransactionData)); + return Err(SpanBatchError::Decoding( + SpanDecodingError::InvalidTransactionData, + )); } }; diff --git a/rust/kona/crates/protocol/protocol/src/batch/tx.rs b/rust/kona/crates/protocol/protocol/src/batch/tx.rs new file mode 100644 index 00000000000..67a3f3bf9a6 --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/batch/tx.rs @@ -0,0 +1,57 @@ +//! Transaction Types + +use crate::Frame; +use alloc::vec::Vec; +use alloy_primitives::Bytes; + +/// `BatchTransaction` is a set of [`Frame`]s that can be [`Into::into`] [`Bytes`]. +/// if the size exceeds the desired threshold. +#[derive(Debug, Clone)] +pub struct BatchTransaction { + /// The frames in the batch. + pub frames: Vec, + /// The size of the potential transaction. + pub size: usize, +} + +impl BatchTransaction { + /// Returns the size of the transaction. + pub const fn size(&self) -> usize { + self.size + } + + /// Returns if the transaction has reached the max frame count. + pub const fn is_full(&self, max_frames: u16) -> bool { + self.frames.len() as u16 >= max_frames + } + + /// Returns the [`BatchTransaction`] as a [`Bytes`]. + pub fn to_bytes(&self) -> Bytes { + self.frames + .iter() + .fold(Vec::new(), |mut acc, frame| { + acc.append(&mut frame.encode()); + acc + }) + .into() + } +} + +#[cfg(test)] +mod test { + use super::*; + use alloc::vec; + + #[test] + fn test_batch_transaction() { + let frame = Frame { id: [0xFF; 16], number: 0xEE, data: vec![0xDD; 50], is_last: true }; + let batch = BatchTransaction { frames: vec![frame.clone(); 5], size: 5 * frame.size() }; + let bytes: Bytes = batch.to_bytes(); + let bytes = std::iter::once(&crate::DERIVATION_VERSION_0) + .chain(bytes.iter()) + .copied() + .collect::>(); + let frames = Frame::parse_frames(&bytes).unwrap(); + assert_eq!(frames, vec![frame; 5]); + } +} diff --git a/kona/crates/protocol/protocol/src/batch/tx_data/eip1559.rs b/rust/kona/crates/protocol/protocol/src/batch/tx_data/eip1559.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/tx_data/eip1559.rs rename to rust/kona/crates/protocol/protocol/src/batch/tx_data/eip1559.rs diff --git a/kona/crates/protocol/protocol/src/batch/tx_data/eip2930.rs b/rust/kona/crates/protocol/protocol/src/batch/tx_data/eip2930.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/tx_data/eip2930.rs rename to rust/kona/crates/protocol/protocol/src/batch/tx_data/eip2930.rs diff --git a/kona/crates/protocol/protocol/src/batch/tx_data/eip7702.rs b/rust/kona/crates/protocol/protocol/src/batch/tx_data/eip7702.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/tx_data/eip7702.rs rename to rust/kona/crates/protocol/protocol/src/batch/tx_data/eip7702.rs diff --git a/kona/crates/protocol/protocol/src/batch/tx_data/legacy.rs b/rust/kona/crates/protocol/protocol/src/batch/tx_data/legacy.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/tx_data/legacy.rs rename to rust/kona/crates/protocol/protocol/src/batch/tx_data/legacy.rs diff --git a/kona/crates/protocol/protocol/src/batch/tx_data/mod.rs b/rust/kona/crates/protocol/protocol/src/batch/tx_data/mod.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/tx_data/mod.rs rename to rust/kona/crates/protocol/protocol/src/batch/tx_data/mod.rs diff --git a/kona/crates/protocol/protocol/src/batch/tx_data/wrapper.rs b/rust/kona/crates/protocol/protocol/src/batch/tx_data/wrapper.rs similarity index 97% rename from kona/crates/protocol/protocol/src/batch/tx_data/wrapper.rs rename to rust/kona/crates/protocol/protocol/src/batch/tx_data/wrapper.rs index 23a08b07e12..8ade66108d0 100644 --- a/kona/crates/protocol/protocol/src/batch/tx_data/wrapper.rs +++ b/rust/kona/crates/protocol/protocol/src/batch/tx_data/wrapper.rs @@ -162,7 +162,9 @@ impl SpanBatchTransactionData { } Self::Eip7702(data) => { let Some(addr) = to else { - return Err(SpanBatchError::Decoding(SpanDecodingError::InvalidTransactionData)); + return Err(SpanBatchError::Decoding( + SpanDecodingError::InvalidTransactionData, + )); }; TxEnvelope::Eip7702(data.to_signed_tx(nonce, gas, addr, chain_id, signature)?) } diff --git a/kona/crates/protocol/protocol/src/batch/type.rs b/rust/kona/crates/protocol/protocol/src/batch/type.rs similarity index 100% rename from kona/crates/protocol/protocol/src/batch/type.rs rename to rust/kona/crates/protocol/protocol/src/batch/type.rs diff --git a/kona/crates/protocol/protocol/src/batch/validity.rs b/rust/kona/crates/protocol/protocol/src/batch/validity.rs similarity index 98% rename from kona/crates/protocol/protocol/src/batch/validity.rs rename to rust/kona/crates/protocol/protocol/src/batch/validity.rs index b26e5d6914a..8d671f174ff 100644 --- a/kona/crates/protocol/protocol/src/batch/validity.rs +++ b/rust/kona/crates/protocol/protocol/src/batch/validity.rs @@ -28,7 +28,7 @@ pub enum BatchDropReason { // === Timestamp/origin relationship drops === /// Batch timestamp is before the L1 origin timestamp. TimestampBeforeL1Origin, - /// Sequencer drift overflow (checked_add failed). + /// Sequencer drift overflow (`checked_add` failed). SequencerDriftOverflow, /// Batch exceeded sequencer time drift with non-empty transactions. SequencerDriftExceeded, @@ -62,7 +62,7 @@ pub enum BatchDropReason { OverlappedTxCountMismatch, /// Overlapped block's transaction does not match. OverlappedTxMismatch, - /// Failed to extract L2BlockInfo from execution payload. + /// Failed to extract `L2BlockInfo` from execution payload. L2BlockInfoExtractionFailed, /// Overlapped block's L1 origin number does not match. OverlappedL1OriginMismatch, diff --git a/rust/kona/crates/protocol/protocol/src/block.rs b/rust/kona/crates/protocol/protocol/src/block.rs new file mode 100644 index 00000000000..832b6e1173f --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/block.rs @@ -0,0 +1,586 @@ +//! Block Types for Optimism. + +use crate::{DecodeError, L1BlockInfoTx}; +use alloc::vec::Vec; +use alloy_consensus::{Block, Transaction, Typed2718}; +use alloy_eips::{BlockNumHash, eip2718::Eip2718Error, eip7685::EMPTY_REQUESTS_HASH}; +use alloy_primitives::B256; +use alloy_rpc_types_engine::{CancunPayloadFields, PraguePayloadFields}; +use alloy_rpc_types_eth::Block as RpcBlock; +use derive_more::Display; +use kona_genesis::ChainGenesis; +use op_alloy_consensus::{OpBlock, OpTxEnvelope}; +use op_alloy_rpc_types_engine::{OpExecutionPayload, OpExecutionPayloadSidecar, OpPayloadError}; + +/// Block Header Info +#[derive(Debug, Clone, Display, Copy, Eq, Hash, PartialEq, Default)] +#[display( + "BlockInfo {{ hash: {hash}, number: {number}, parent_hash: {parent_hash}, timestamp: {timestamp} }}" +)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct BlockInfo { + /// The block hash + pub hash: B256, + /// The block number + pub number: u64, + /// The parent block hash + pub parent_hash: B256, + /// The block timestamp + pub timestamp: u64, +} + +impl BlockInfo { + /// Instantiates a new [`BlockInfo`]. + pub const fn new(hash: B256, number: u64, parent_hash: B256, timestamp: u64) -> Self { + Self { hash, number, parent_hash, timestamp } + } + + /// Returns the block ID. + pub const fn id(&self) -> BlockNumHash { + BlockNumHash { hash: self.hash, number: self.number } + } + + /// Returns `true` if this [`BlockInfo`] is the direct parent of the given block. + pub fn is_parent_of(&self, block: &Self) -> bool { + self.number + 1 == block.number && self.hash == block.parent_hash + } +} + +impl From> for BlockInfo { + fn from(block: Block) -> Self { + Self::from(&block) + } +} + +impl From<&Block> for BlockInfo { + fn from(block: &Block) -> Self { + Self { + hash: block.header.hash_slow(), + number: block.header.number, + parent_hash: block.header.parent_hash, + timestamp: block.header.timestamp, + } + } +} + +impl From> for BlockInfo { + fn from(block: RpcBlock) -> Self { + Self { + hash: block.header.hash_slow(), + number: block.header.number, + parent_hash: block.header.parent_hash, + timestamp: block.header.timestamp, + } + } +} + +impl From<&RpcBlock> for BlockInfo { + fn from(block: &RpcBlock) -> Self { + Self { + hash: block.header.hash_slow(), + number: block.header.number, + parent_hash: block.header.parent_hash, + timestamp: block.header.timestamp, + } + } +} + +/// L2 Block Header Info +#[derive(Debug, Display, Clone, Copy, Hash, Eq, PartialEq, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +#[display( + "L2BlockInfo {{ block_info: {block_info}, l1_origin: {l1_origin:?}, seq_num: {seq_num} }}" +)] +pub struct L2BlockInfo { + /// The base [`BlockInfo`] + #[cfg_attr(feature = "serde", serde(flatten))] + pub block_info: BlockInfo, + /// The L1 origin [`BlockNumHash`] + #[cfg_attr(feature = "serde", serde(rename = "l1origin", alias = "l1Origin"))] + pub l1_origin: BlockNumHash, + /// The sequence number of the L2 block + #[cfg_attr(feature = "serde", serde(rename = "sequenceNumber", alias = "seqNum"))] + pub seq_num: u64, +} + +impl L2BlockInfo { + /// Returns the block hash. + pub const fn hash(&self) -> B256 { + self.block_info.hash + } +} + +#[cfg(feature = "arbitrary")] +impl arbitrary::Arbitrary<'_> for L2BlockInfo { + fn arbitrary(g: &mut arbitrary::Unstructured<'_>) -> arbitrary::Result { + Ok(Self { + block_info: g.arbitrary()?, + l1_origin: BlockNumHash { number: g.arbitrary()?, hash: g.arbitrary()? }, + seq_num: g.arbitrary()?, + }) + } +} + +/// An error that can occur when converting an OP [`Block`] to [`L2BlockInfo`]. +#[derive(Debug, thiserror::Error)] +pub enum FromBlockError { + /// The genesis block hash does not match the expected value. + #[error("Invalid genesis hash")] + InvalidGenesisHash, + /// The L2 block is missing the L1 info deposit transaction. + #[error("L2 block is missing L1 info deposit transaction ({0})")] + MissingL1InfoDeposit(B256), + /// The first payload transaction has an unexpected type. + #[error("First payload transaction has unexpected type: {0}")] + UnexpectedTxType(u8), + /// Failed to decode the first transaction into an OP transaction. + #[error("Failed to decode the first transaction into an OP transaction: {0}")] + TxEnvelopeDecodeError(Eip2718Error), + /// The first payload transaction is not a deposit transaction. + #[error("First payload transaction is not a deposit transaction, type: {0}")] + FirstTxNonDeposit(u8), + /// Failed to decode the [`L1BlockInfoTx`] from the deposit transaction. + #[error("Failed to decode the L1BlockInfoTx from the deposit transaction: {0}")] + BlockInfoDecodeError(#[from] DecodeError), + /// Failed to convert [`OpExecutionPayload`] to [`OpBlock`]. + #[error(transparent)] + OpPayload(#[from] OpPayloadError), +} + +impl PartialEq for FromBlockError { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::InvalidGenesisHash, Self::InvalidGenesisHash) | + (Self::TxEnvelopeDecodeError(_), Self::TxEnvelopeDecodeError(_)) => true, + (Self::MissingL1InfoDeposit(a), Self::MissingL1InfoDeposit(b)) => a == b, + (Self::UnexpectedTxType(a), Self::UnexpectedTxType(b)) | + (Self::FirstTxNonDeposit(a), Self::FirstTxNonDeposit(b)) => a == b, + (Self::BlockInfoDecodeError(a), Self::BlockInfoDecodeError(b)) => a == b, + _ => false, + } + } +} + +impl From for FromBlockError { + fn from(value: Eip2718Error) -> Self { + Self::TxEnvelopeDecodeError(value) + } +} + +impl L2BlockInfo { + /// Instantiates a new [`L2BlockInfo`]. + pub const fn new(block_info: BlockInfo, l1_origin: BlockNumHash, seq_num: u64) -> Self { + Self { block_info, l1_origin, seq_num } + } + + /// Constructs an [`L2BlockInfo`] from a given OP [`Block`] and [`ChainGenesis`]. + pub fn from_block_and_genesis>( + block: &Block, + genesis: &ChainGenesis, + ) -> Result { + let block_info = BlockInfo::from(block); + + let (l1_origin, sequence_number) = if block_info.number == genesis.l2.number { + if block_info.hash != genesis.l2.hash { + return Err(FromBlockError::InvalidGenesisHash); + } + (genesis.l1, 0) + } else { + if block.body.transactions.is_empty() { + return Err(FromBlockError::MissingL1InfoDeposit(block_info.hash)); + } + + let tx = block.body.transactions[0].as_ref(); + let Some(tx) = tx.as_deposit() else { + return Err(FromBlockError::FirstTxNonDeposit(tx.ty())); + }; + + let l1_info = L1BlockInfoTx::decode_calldata(tx.input().as_ref()) + .map_err(FromBlockError::BlockInfoDecodeError)?; + (l1_info.id(), l1_info.sequence_number()) + }; + + Ok(Self { block_info, l1_origin, seq_num: sequence_number }) + } + + /// Constructs an [`L2BlockInfo`] From a given [`OpExecutionPayload`] and [`ChainGenesis`]. + pub fn from_payload_and_genesis( + payload: OpExecutionPayload, + parent_beacon_block_root: Option, + genesis: &ChainGenesis, + ) -> Result { + let block: OpBlock = match payload { + OpExecutionPayload::V4(_) => { + let sidecar = OpExecutionPayloadSidecar::v4( + CancunPayloadFields::new( + parent_beacon_block_root.unwrap_or_default(), + Vec::new(), + ), + PraguePayloadFields::new(EMPTY_REQUESTS_HASH), + ); + payload.try_into_block_with_sidecar(&sidecar)? + } + OpExecutionPayload::V3(_) => { + let sidecar = OpExecutionPayloadSidecar::v3(CancunPayloadFields::new( + parent_beacon_block_root.unwrap_or_default(), + Vec::new(), + )); + payload.try_into_block_with_sidecar(&sidecar)? + } + _ => payload.try_into_block()?, + }; + Self::from_block_and_genesis(&block, genesis) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::string::ToString; + use alloy_consensus::{Header, TxEnvelope}; + use alloy_primitives::b256; + use op_alloy_consensus::OpBlock; + + #[test] + fn test_rpc_block_into_info() { + let block: alloy_rpc_types_eth::Block = alloy_rpc_types_eth::Block { + header: alloy_rpc_types_eth::Header { + hash: b256!("04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b"), + inner: alloy_consensus::Header { + number: 1, + parent_hash: b256!( + "0202020202020202020202020202020202020202020202020202020202020202" + ), + timestamp: 1, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + let expected = BlockInfo { + hash: b256!("04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b"), + number: 1, + parent_hash: b256!("0202020202020202020202020202020202020202020202020202020202020202"), + timestamp: 1, + }; + let block = block.into_consensus(); + assert_eq!(BlockInfo::from(block), expected); + } + + #[test] + fn test_from_block_and_genesis() { + use crate::test_utils::RAW_BEDROCK_INFO_TX; + use alloc::vec; + let genesis = ChainGenesis { + l1: BlockNumHash { hash: B256::from([4; 32]), number: 2 }, + l2: BlockNumHash { hash: B256::from([5; 32]), number: 1 }, + ..Default::default() + }; + let tx_env = alloy_rpc_types_eth::Transaction { + inner: alloy_consensus::transaction::Recovered::new_unchecked( + op_alloy_consensus::OpTxEnvelope::Deposit(alloy_primitives::Sealed::new( + op_alloy_consensus::TxDeposit { + input: alloy_primitives::Bytes::from(&RAW_BEDROCK_INFO_TX), + ..Default::default() + }, + )), + Default::default(), + ), + block_hash: None, + block_number: Some(1), + effective_gas_price: Some(1), + transaction_index: Some(0), + }; + let block: alloy_rpc_types_eth::Block = + alloy_rpc_types_eth::Block { + header: alloy_rpc_types_eth::Header { + hash: b256!("04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b"), + inner: alloy_consensus::Header { + number: 3, + parent_hash: b256!( + "0202020202020202020202020202020202020202020202020202020202020202" + ), + timestamp: 1, + ..Default::default() + }, + ..Default::default() + }, + transactions: alloy_rpc_types_eth::BlockTransactions::Full(vec![ + op_alloy_rpc_types::Transaction { + inner: tx_env, + deposit_nonce: None, + deposit_receipt_version: None, + }, + ]), + ..Default::default() + }; + let expected = L2BlockInfo { + block_info: BlockInfo { + hash: b256!("e65ecd961cee8e4d2d6e1d424116f6fe9a794df0244578b6d5860a3d2dfcd97e"), + number: 3, + parent_hash: b256!( + "0202020202020202020202020202020202020202020202020202020202020202" + ), + timestamp: 1, + }, + l1_origin: BlockNumHash { + hash: b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc"), + number: 18334955, + }, + seq_num: 4, + }; + let block = block.into_consensus(); + let derived = L2BlockInfo::from_block_and_genesis(&block, &genesis).unwrap(); + assert_eq!(derived, expected); + } + + #[test] + fn test_from_block_error_partial_eq() { + assert_eq!(FromBlockError::InvalidGenesisHash, FromBlockError::InvalidGenesisHash); + assert_eq!( + FromBlockError::MissingL1InfoDeposit(b256!( + "04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b" + )), + FromBlockError::MissingL1InfoDeposit(b256!( + "04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b" + )), + ); + assert_eq!(FromBlockError::UnexpectedTxType(1), FromBlockError::UnexpectedTxType(1)); + assert_eq!( + FromBlockError::TxEnvelopeDecodeError(Eip2718Error::UnexpectedType(1)), + FromBlockError::TxEnvelopeDecodeError(Eip2718Error::UnexpectedType(1)) + ); + assert_eq!(FromBlockError::FirstTxNonDeposit(1), FromBlockError::FirstTxNonDeposit(1)); + assert_eq!( + FromBlockError::BlockInfoDecodeError(DecodeError::InvalidSelector), + FromBlockError::BlockInfoDecodeError(DecodeError::InvalidSelector) + ); + } + + #[test] + fn test_l2_block_info_invalid_genesis_hash() { + let genesis = ChainGenesis { + l1: BlockNumHash { hash: B256::from([4; 32]), number: 2 }, + l2: BlockNumHash { hash: B256::from([5; 32]), number: 1 }, + ..Default::default() + }; + let op_block = OpBlock { + header: Header { + number: 1, + parent_hash: B256::from([2; 32]), + timestamp: 1, + ..Default::default() + }, + body: Default::default(), + }; + let err = L2BlockInfo::from_block_and_genesis(&op_block, &genesis).unwrap_err(); + assert_eq!(err, FromBlockError::InvalidGenesisHash); + } + + #[test] + fn test_from_block() { + let block: Block = Block { + header: Header { + number: 1, + parent_hash: B256::from([2; 32]), + timestamp: 1, + ..Default::default() + }, + body: Default::default(), + }; + let block_info = BlockInfo::from(&block); + assert_eq!( + block_info, + BlockInfo { + hash: b256!("04d6fefc87466405ba0e5672dcf5c75325b33e5437da2a42423080aab8be889b"), + number: block.header.number, + parent_hash: block.header.parent_hash, + timestamp: block.header.timestamp, + } + ); + } + + #[test] + fn test_block_info_display() { + let hash = B256::from([1; 32]); + let parent_hash = B256::from([2; 32]); + let block_info = BlockInfo::new(hash, 1, parent_hash, 1); + assert_eq!( + block_info.to_string(), + "BlockInfo { hash: 0x0101010101010101010101010101010101010101010101010101010101010101, number: 1, parent_hash: 0x0202020202020202020202020202020202020202020202020202020202020202, timestamp: 1 }" + ); + } + + #[test] + #[cfg(feature = "arbitrary")] + fn test_arbitrary_block_info() { + use arbitrary::Arbitrary; + use rand::Rng; + let mut bytes = [0u8; 1024]; + rand::rng().fill(bytes.as_mut_slice()); + BlockInfo::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(); + } + + #[test] + #[cfg(feature = "arbitrary")] + fn test_arbitrary_l2_block_info() { + use arbitrary::Arbitrary; + use rand::Rng; + let mut bytes = [0u8; 1024]; + rand::rng().fill(bytes.as_mut_slice()); + L2BlockInfo::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(); + } + + #[test] + fn test_block_id_bounds() { + let block_info = BlockInfo { + hash: B256::from([1; 32]), + number: 0, + parent_hash: B256::from([2; 32]), + timestamp: 1, + }; + let expected = BlockNumHash { hash: B256::from([1; 32]), number: 0 }; + assert_eq!(block_info.id(), expected); + + let block_info = BlockInfo { + hash: B256::from([1; 32]), + number: u64::MAX, + parent_hash: B256::from([2; 32]), + timestamp: 1, + }; + let expected = BlockNumHash { hash: B256::from([1; 32]), number: u64::MAX }; + assert_eq!(block_info.id(), expected); + } + + #[test] + #[cfg(feature = "serde")] + fn test_deserialize_block_info() { + let block_info = BlockInfo { + hash: B256::from([1; 32]), + number: 1, + parent_hash: B256::from([2; 32]), + timestamp: 1, + }; + + let json = r#"{ + "hash": "0x0101010101010101010101010101010101010101010101010101010101010101", + "number": 1, + "parentHash": "0x0202020202020202020202020202020202020202020202020202020202020202", + "timestamp": 1 + }"#; + + let deserialized: BlockInfo = serde_json::from_str(json).unwrap(); + assert_eq!(deserialized, block_info); + } + + #[test] + #[cfg(feature = "serde")] + fn test_deserialize_block_info_with_hex() { + let block_info = BlockInfo { + hash: B256::from([1; 32]), + number: 1, + parent_hash: B256::from([2; 32]), + timestamp: 1, + }; + + let json = r#"{ + "hash": "0x0101010101010101010101010101010101010101010101010101010101010101", + "number": 1, + "parentHash": "0x0202020202020202020202020202020202020202020202020202020202020202", + "timestamp": 1 + }"#; + + let deserialized: BlockInfo = serde_json::from_str(json).unwrap(); + assert_eq!(deserialized, block_info); + } + + #[test] + #[cfg(feature = "serde")] + fn test_deserialize_l2_block_info() { + let l2_block_info = L2BlockInfo { + block_info: BlockInfo { + hash: B256::from([1; 32]), + number: 1, + parent_hash: B256::from([2; 32]), + timestamp: 1, + }, + l1_origin: BlockNumHash { hash: B256::from([3; 32]), number: 2 }, + seq_num: 3, + }; + + let json = r#"{ + "hash": "0x0101010101010101010101010101010101010101010101010101010101010101", + "number": 1, + "parentHash": "0x0202020202020202020202020202020202020202020202020202020202020202", + "timestamp": 1, + "l1origin": { + "hash": "0x0303030303030303030303030303030303030303030303030303030303030303", + "number": 2 + }, + "sequenceNumber": 3 + }"#; + + let deserialized: L2BlockInfo = serde_json::from_str(json).unwrap(); + assert_eq!(deserialized, l2_block_info); + } + + #[test] + #[cfg(feature = "serde")] + fn test_deserialize_l2_block_info_hex() { + let l2_block_info = L2BlockInfo { + block_info: BlockInfo { + hash: B256::from([1; 32]), + number: 1, + parent_hash: B256::from([2; 32]), + timestamp: 1, + }, + l1_origin: BlockNumHash { hash: B256::from([3; 32]), number: 2 }, + seq_num: 3, + }; + + let json = r#"{ + "hash": "0x0101010101010101010101010101010101010101010101010101010101010101", + "number": 1, + "parentHash": "0x0202020202020202020202020202020202020202020202020202020202020202", + "timestamp": 1, + "l1origin": { + "hash": "0x0303030303030303030303030303030303030303030303030303030303030303", + "number": 2 + }, + "sequenceNumber": 3 + }"#; + + let deserialized: L2BlockInfo = serde_json::from_str(json).unwrap(); + assert_eq!(deserialized, l2_block_info); + } + + #[test] + fn test_is_parent_of() { + let parent = BlockInfo { + hash: B256::from([1u8; 32]), + number: 10, + parent_hash: B256::from([0u8; 32]), + timestamp: 1000, + }; + let child = BlockInfo { + hash: B256::from([2u8; 32]), + number: 11, + parent_hash: parent.hash, + timestamp: 1010, + }; + let unrelated = BlockInfo { + hash: B256::from([3u8; 32]), + number: 12, + parent_hash: B256::from([9u8; 32]), + timestamp: 1020, + }; + + assert!(parent.is_parent_of(&child)); + assert!(!child.is_parent_of(&parent)); + assert!(!parent.is_parent_of(&unrelated)); + } +} diff --git a/rust/kona/crates/protocol/protocol/src/brotli.rs b/rust/kona/crates/protocol/protocol/src/brotli.rs new file mode 100644 index 00000000000..ea5d0c43358 --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/brotli.rs @@ -0,0 +1,105 @@ +//! Contains brotli decompression utilities. + +use alloc::{vec, vec::Vec}; +use alloc_no_stdlib::*; +use brotli::*; +use core::ops; + +use crate::MAX_SPAN_BATCH_ELEMENTS; + +/// A frame decompression error. +#[derive(thiserror::Error, Debug, PartialEq, Eq)] +pub enum BrotliDecompressionError { + /// The buffer exceeds the [`MAX_SPAN_BATCH_ELEMENTS`] protocol parameter. + #[error("The batch exceeds the maximum number of elements: {max_size}", max_size = MAX_SPAN_BATCH_ELEMENTS)] + BatchTooLarge, +} + +/// Decompresses the given bytes data using the Brotli decompressor implemented +/// in the [`brotli`](https://crates.io/crates/brotli) crate. +#[allow(clippy::large_stack_frames)] +pub fn decompress_brotli( + data: &[u8], + max_rlp_bytes_per_channel: usize, +) -> Result, BrotliDecompressionError> { + declare_stack_allocator_struct!(MemPool, 4096, stack); + + let mut u8_buffer = vec![0; 32 * 1024 * 1024].into_boxed_slice(); + let mut u32_buffer = vec![0; 1024 * 1024].into_boxed_slice(); + let mut hc_buffer = vec![HuffmanCode::default(); 4 * 1024 * 1024].into_boxed_slice(); + let u8_allocator = MemPool::::new_allocator(&mut u8_buffer, bzero); + let u32_allocator = MemPool::::new_allocator(&mut u32_buffer, bzero); + let hc_allocator = MemPool::::new_allocator(&mut hc_buffer, bzero); + let mut brotli_state = BrotliState::new(u8_allocator, u32_allocator, hc_allocator); + + // Setup the decompressor inputs and outputs + let mut output = vec![0; data.len()]; + let mut available_in = data.len(); + let mut input_offset = 0; + let mut available_out = output.len(); + let mut output_offset = 0; + let mut written = 0; + + // Decompress the data stream until success or failure + while matches!( + brotli::BrotliDecompressStream( + &mut available_in, + &mut input_offset, + data, + &mut available_out, + &mut output_offset, + &mut output, + &mut written, + &mut brotli_state, + ), + brotli::BrotliResult::NeedsMoreOutput + ) { + // Resize the output buffer to double the size, following standard + // practice for buffer resizing in streams. + let old_len = output.len(); + let new_len = old_len * 2; + + if new_len > max_rlp_bytes_per_channel { + return Err(BrotliDecompressionError::BatchTooLarge); + } + + output.resize(new_len, 0); + available_out += old_len; + } + + // Truncate the output buffer to the written bytes + output.truncate(written); + + Ok(output) +} + +#[cfg(test)] +mod test { + use super::*; + use alloy_primitives::hex; + use kona_genesis::MAX_RLP_BYTES_PER_CHANNEL_FJORD; + + #[test] + fn test_decompress_brotli() { + let expected = hex!("75ed184249e9bc19675e"); + let compressed = hex!("8b048075ed184249e9bc19675e03"); + + let decompressed = + decompress_brotli(&compressed, MAX_RLP_BYTES_PER_CHANNEL_FJORD as usize).unwrap(); + assert_eq!(decompressed, expected); + } + + #[test] + fn test_decompress_batch_brotli() { + let raw_batch_decompressed = hex!( + "b930d700f930d3a0a8d01076e1235e0c33674a449c13fc37ee57f9ea065bf41af3aa03d5981f1432833bd0b0a0652a19cd927ae4a22e8f8069385002252d78e1c3cc91a59ac188708b7074449184766cbcf3f93085b903ee02f903ea82014d884062b70d4e215ee885019d47a37c8543ae9f382a8310c97b9451294f5cd6e52c003ecfb412ca8b42705c618d29883782dace9d900000b903690d669b0cd98174ac3b57393839029ac04ad36454109851443b4f6580664fe06766a7dea5b1ed31e14e7c11aa738eecb86e979f874873cd3d7ca9481681b4b17d134316e7bbe828ef69339ef85c6f0e9dcdfe1dc85309effb487569383d5464b519bdc1c85fffc72bfe93d4081a3e1b75e5dd39f95a91df0997a22d8fbdeca57a8b35b4f0e277ec8502cc55581a94eec1d1000b2921b4d7c3985ace205713641d03c3975e4049e13b3d2c5926b224684e38beb3b8d2e5d4060b109aafc3f2d144783aadf6086aa1d5a931d21282711484a9c0537bd4981fc222444f2c057211708e70dc4223063cbf39e4af0b795d3ec0dfba32391611d151145c1b6bb33d53ce2bb7983bd7b6c1516f7a1a719fd876f4b20910aba76c16dbfc57199a60e2ab938bc285613c3802c17aa03cb9654f5142d607bac01293c9aaf4e58b422c543f7e5e458af0b7cf57f33109558bef71e8b5506da723d996eb8e2c265b1cae43dba571d07d3ea1bcfdcb73089597e3744344e049bf21b4244d5aff60d559010b69a6335f4bb21178de504f50808204da652c7767dbf11f2a34b4fb710e6df9ad8810aa75dcdb2c99dfe9bf898912817e490b4982d44fe09f8adb43e0da2a0c824a9069ce8cc36b5fb0074c2db895ee92d92fa6b7efdf5c97ae05ae27556bc07ddc9d9d6261a53e3a10c350c3b1da26b27b345768e17da7dabfe6e30e019c88ef4a0e8df840bbd3fbbb639edf775449d8be7510cc811564789b861372fe97f7b5b1389f20c9872517634e9225669ee80cf077f9c8606cdbad53819a875ecd9f7b6d778c1dc302ca19ae67ffb054eb99206fc90eacbac8177712d0b4c72700df3f5e2c88fb4e9c8284cefa66390a78605ad9320aee34f72f3cb263020204393d9359a65f48b0e6e942b016a1f2c5bd6579f0a65997635ab15fa38db76ae8a5d3be516441499819bfaf730ebaec389db082e41443660dcc6280315154888b9e726b971237fae5e06b01958aac081398c814e446a003039dd090c0efa5d39735ed0ab46c7b4e4c960ae414b045fd19117089e65aaf3779cc9045d6e62538b1b75c2689d23ba3c08ceed46d4fdf9b969b34a1903ebd96a3a6b091842480e638b095c1ec11bb5c599668ea1b0a5a714d13462edb39dfd992b569897ac8f45c587182770631c262fc459afa6f23d5670eee2aac2ddaa89314607d30c6bfd408980c082749ad6b48a5310ac75b880cc080a00b5d23a075615f50233ce278d11b7b0ba0ad6a01486dbf31c54aae096f0f066aa02d9feeb4771b5a37d1247a4cc58a64d392f3916b5602d9d41d97b52b391ffd47b9011801f9011482014d88a793ab3f17510b308821f5d9030532aae9831708c1940b6f262f685c8d0ff7dfc9ba9686d8f75b78923c80b89f7644852b70713a788b69f191c54ec8368a7f2675623b2369f9078516605d0d4550ff9f5b92b9da2147fa3a24cc17605f30cccedc5bacafb2bb86e2640db6654a514b8eb13d3c3ab6b5e344498de0c709dd9bef58a8af16d3efcd2c0b2cb69d6089d0af8d42baab434dea885253e42050aeec01f233e64289b2e894c680fbab4f25a653745dbd89edb19d97e35bdd4293794c69503b0e60ed9cffe7e9ab3cbbc080a0dd08ebab0802fc61ccf26c357b638a55cbcd6b366251c17e2fa52d328d9d59e5a027d334772553048d6b76fc39ddee5f85363810c235219356cb4c5c3dbf9661d5b90298f9029588e383f18817bb0d1c882c58aa6b12de88f3830a7831945c1c1314ed944220436fad3742023cba2a71c4a2886124fee993bc0000b90219fb039c014cd76a327bb9b3f59e8176f377249385e67cb1681f8eacff1dee5a5a949511438ce370f8ad6618f3af81cb1f775a0b365546dd7791b0ad71fb1f2f29154265a8175b7e518580732a5a46dae3752e1234ff779d4eb614af2c66beec964181ecd0cfd1640bb2ca2b860649c41930a60de0cc754884a780488f05d1d5833a381670b368c85bf08d6650e26122f6714056382a006fcd5f9c97f55a98d68dd9293bb1be24823eaa8cb007481dc78a7a670123976e7b6e81fc223f42637759a0c933b73ba89a1d902c0874fedeb0a97dfab298972a18378539c2894ca6df9c0a423c2e98df4c133e5e808809849785b069e323640bf93d4b82a0917aaea8fda9a3072ab9a00a4b8b9b7b3a3eb326e54231d0f6a064cdf4a1fc06c961e5087359c029b13e229fb477d6651bad52c75e503ac45002a803a7457488966cc16bbc9be5c1c9a797d0377710c028e4f05a6cb929cc1fd4018912929252e04e107ffbcbd4c81ba01ab4b11faa90be0f9f9a6a22c87257e4a2aa8283e6f71d7b9e03b5308b16525c4d79705bb0906be0e947e8075ac6ce2235356aa0a66bec39e918e47a6220b322e326bf8fd65e47778e14074c47cb62b7ef8ef956c996097d2919df7aac8ea2ed69c1fd9f1d96b6b82b411c524cacec0f4a4269821fd6766d24954b8870fb1d85f5cda0528ae18419915a8b30b25baf6a162978a4bec86009cece83017d50667a202b3fad18f8ed8b5140c97fa74e91be608fdb788202bea05f469660e363ec580825d1e2bf753c01db044279f862720a27831744b91494f5a050fa7445e0e6156dfdb712a647ef73a2dd35b73d5cc988430c831352d4ac7e8bb90458f9045588a106e4c16d06833a881973c4c642fba1bb83068f2294050c84206ba9d32d93d144884644e5bd36fc92d0883782dace9d900000b903d9b303f8efb68766822d7eea21ca4b7c5dd79dce832c4893247f6784fe47cd7a18caea7b5b4d8bdf02da0276aca185add01fa2d16c2f1188ff7cbf6fb8c6308999037b2b92d725094d8faed86f0b1a45b55de4f36dbb71dcbf4be12fe624077213e0c170afbbbb546a343ac3f2a1333a7a7a7db7be46640a73d61b3aabc805b022be416198d809b62f99d26cf4a3bf555d40686f4b8970ec15386462bec5f2b728de0da047d6b3f3ea51f571507f32f047322fa204f0c5697cbb56b4b5c7792acaa40f02926651fa715a40e1f212c78cd4ecca285ada2c8cbb6e5dcfa3823725b44e29aacbeb9b6224f90fbc895a5980d63da46688832e9776b0666e90deacbcf8a4c559b625cf004cd04c686aaf9d7d6e2d394f5d36311f7afdcec5033daccc63c0540935f59514c9aa8ac3c2aeff48f624f2dbd38062fcd046651e92fc7ffce4dd914bb0dae704e5b26a8b73b3baef8ea022881e15666fada8e43fd621793713cb8c867775b9cdcf3b066582fc9baa705a0e1dc61a4b33b1b33ad3ba3bd0cc41b5850cadc04654dec222178709910209c6ac3db9054ef91facae2d729d7ee54898a18411b6d20d599a3de14d5375e5a9c90f3bce78479cb0f20afca895e40b576940e063587f451a8828ec2dd4a8538b4bebc39f72a6c54e379a07b7d5e0c02ccd57dbff13729bbfe5e78498c01cea12e830944fd0a123b7383fdcda97d8d9cc831e542ab6d9b36774d540b180c2bd52d46ca7f0e17d400cf3cd559b1b4e51ba93cd954777ba27a9f0327eb6c68aafe74fabca4610210db7498aecffd3164c5eef8cede655e1b42d5f54f5a52b4f5fe9698a4463f30f20693263d41074d0403a737c4d4986f0ee7fee828fb7072a80603613fb4d6c219dfa47adad433af6b437dd199f3bbc651487718b2e6d42728034c242672a98a9f36fab6d4162f4e8eb7bf2a9868cead8ad657a67f0aa50286113db972936260323d7b11353328151e80691d551bbe1f7f11774e15db4f175aeac5b91668a712c3c2399a977abb9fd9c2b53c5ba68f2c0ea353028416b36a47028f78918e2b205bf9b3bce6f1a08bd4448abc3f12a240482b4be98dcb77c74fff47e92d833735e802465e50b79d51de5a7fe45a95b650b051c61a529d5f51cd0c603a2de67a3123be1c52263e1c9167765b13ad1e01cfb27531c9203f39e8913fe0cab9d8c14b17bad0100b76c41d41d68ae3b7aeef5f6af4f66d113fd29eb9c4bf994f04decad13880d9d1eb3865a30e2540e86923b36369c121ef2a6a43a618aa4b15560fa806601a85be361468bd09c6dca39ad7ec44809adc0907dd0458177343a7c23330605b802f3ffd3ae61b3be952ca2effae8222e9ed0b6ea4240728a7800e4882efa7dd1ef8202bea05db690cab7dc8c52c2c375428c0aa9ead02bf44e2b1f8ee06e1cf7af25eecc13a07d967fb12e1f0073adac46e0676a6006b30d780e6a1387afec76cbd1f07016e3b9012401f9012082014d88df6f092495b7f4148840c5b5541d013c63830408e194aef36f2041e560a641af89e0ba2799ea630a9592881bc16d674ec80000b8a3afb9380f9228224c1aa59eab115ed4172b471aa2ee11b3d4ac93f4b6a33518007a798170801f4f582e188b489005d8f108e2a4acd6f7ac28852580e73b6a1590ea1af1443666f1d14affb0a9d0655a5c57cd4190b2a00c07276054641ee4204ed8a806ded2b3aaa7453c24e442992434d060b51d2255c1cc2a002264b5dadb32057f4a5d52626e0ff453e2f05f1e0d8294614916c00110853462d51d9ab7e03b7019c6c001a06028ddc42f0d3e1cd6cb1ed7377d518480626d56c80e6d15eacd42ecf2f30957a03f6e1098b300b6329997bacc5e667eeed72a38f6c4e1db7199483bc9a18267d8b90222f9021f88c0988653bce0e07388fbc67f04e5c6772e8311bd5c94eeecd6da1ee441093ef70d8c86a26f4dc4da11588853444835ec580000b901a349e745c1cca19957c43f15309935f7bf49547884332dfe6d5b8b9d61542dd88ecc61187fda813a7f700ca96e8847a33bf8552690d91ec8e8fa70c21b380c9c681b54e859add36c3c19e7fda3075ec1a3cf47ed39c89241bb73f206d7497f93c47db9a85be7135948e19809c195ccd4c9a379ed464bf77ec562e360c52b9225f103d323364a72e8a725ad2b34a355928acc6aa563b67d120ddf54cf68f710624499ddeb30b0c94b8722ef2d641ae49f17f4a916d54350ec483ec5bcfd9748e0a228c3e73cee9ea248ad85060ac51b3e6834e1f771f725a466affa28453ad3726d794caab223fa76c8b994ac5d3a1e8ee830e4fadfe0786174364af3109c04d7d607aca17933c4366d44d9c5376ca34febaaa612707eec4e2fc5c6b1668b3450340938d17e5552df96ae84a905d069f9e3455bccab30640a0720f9b4598d8f82ebd19bd32b7e82165303123a0ed80c57375174c08d32ad3ae354251c97316b2977f3a2fdf2dba1c595093c88275badc54e3aad65f77c56f55d04b1e6d668406058ea01da2364fc207659b028d9c55371c776f732e63255dd177b95f857e3cbdb4c66fabd8202bda060830662664d96755362addcc0908287c99c60761cf9c7a613058894eab6e599a059cd2461d4a89458dc68adf287fee71a783dab0aaa05587a21b4aba1ca4f5efeb9017801f9017482014d88d15c09b7ee8f9562880ae58585f383aacc831e72f6808853444835ec580000b9010a2e818d2c4fa7a974f5c3acf3c0f9439f4c83721b2bb9df4fa290c7fa57bc1f9f77e4b80866845a8bbbf8030b707b1f07a54a0ab901188eb2e1262a45618a08517f943cb032eeec926e4343d5d3089c145da1d53128ae901ce91a813c205c615bc1ce9b8658a9da4c2d258fe36f6ffb6289df910566386dd1a9f73b44053bb64523d8faf7b9055c592695fc426c360479c1e2d1f68ca5c7965dd20b6879989606cea7c0db28f27ead4a591ee264f755b7358146586c6a1a8530ec463dd754f100fac603ec3360c0440874c12bb179c43a23e40957bd446f2573af413f3314e9f0668af2491de96156a9bf35bc469d51935305f4df051580b84e98ec8395fbd42fc0c3f3e7410ac4719af4c080a09a774db7e3a26966edb91c1f7956a091425044ead1589f435c8d04aac9533764a04325d5543464929773cc6ac555f5ce1830c997f4d26f2dad5a7e056db6f0a2e6b9032d02f9032982014d88828a67bc288355d78498c2cc318542aa1a60df8305fbb6808853444835ec580000b902bd082cb3f3fa41ebf06fbb17afeed9ccdcf3d2999e2fdd1e1171e0b1549c06de17dffc4ee7785232184a698311c7487fdf090e34b9954a41affc0d0ad44104f70750f6a896b1b2b5ff1024de66ba877c5494e67735cdfd45f9ec0df1c198b357b60e4d840abaa72c5667074c43bfa5e1f07b5970f018820db6fc2bf84341cd024cefe455c92426f876e51aec0fedded8d4aa4003aaf6970c48d898d8d82a8411990e73c8ec792a2cc4a129e526d0fa34a54c37ac13ecf4e3c597304cdbd327704fc97f2ba0b110afee78da5c3f46d3354bd20f56cb91b7ba8d302422428082748faf8b4828ba925ab1a02ba695e686da4d1e759b6456b0388ac8fd769f3b726332be36d3153ebee040b5d822fe62d73b629a6251c8e49a988cdfe599762759df03c9100db5f7a87ce7102ddd21831e0736924f230ffe6aaf6b012423e351627e118f2bc12736a3694b5468858ec6310017b10de24fe75ff0abc060b1e60271dc5274b4bbf0b755a0a617bc23f57ee2286c805086d5824ca4bb6297545c5c1ccaf03be03b7df33c953ddb183730313f09c88392e4bdf688f1d2b730318cc9b148e488c2f1e383505a383672755a221ee7dffec5a4f77e7efe66043d686a126480ea01a8ef0f72f9a5799e03e863a85b7aa56c88b7575d6ebb9df809a240969d3a2b2e086e742130e38cfe7870db79bbd281849912fa611e04b8dd0dea9b7da5d16a66969e54ab9def159b9c1d351d719a93821c40ad6c6014644c5f77374cbd486d6a7cfe75d7d849ce240ac86a1c0843aab27fba4d317c725eb101752803ea67d3e12b784bb424eee6f766e33d6664ca113af63c54ba27b8a8e904c572dc3fd09848cca3499c403a1c601db77a7f36d244024ceacfd9d6ae494b7e7e0f92fa5f83458d5da139eb127709e3dd75c88fd5f75244e15f1bb8cdbd3056bfa56139442c0bacbf3263f29ef34946e928b9a4f1c085e5df3b09f31c6e87397bd939c001a08b9ac3bc299eff8eedc51ed3ff077e49da6fb145a0c495f430964581fd4d230ba05fef2837a800e231a3178226f59a981d2c4bcebc4b4cfba9680371da1e2c1a61b9042bf904288821c649ab1ae8ea668896d6c78054ad7a6583121a8994e3294b628e98892fc56ae3fcbce852265aa657e7884563918244f40000b903ac0177c66fecad5135344e89f45ec7e083130a3e5eab1abb75bab0aa357cf044c0582542047a3f9985d3439a6f850466061142af44a9208656e278b7ad1bd0e03539cc019d6ebf8758bde3e0489ba540c523f178a0b055c1fedc3627fee427467ab67545c154106bb9e0c12a7120c175d66f9e3eb9183ae5c7640d4cb4bd3dc94c7b4e0c9fe70e692c3fd027e0ebb46bb32b73a269037a76731a9f114343ea0584c3f7e9cb4530d086609b59ab6b72e7dc6c2c0c95699091e06a33af5ba200a168ef483fe11056330e84da4f2a59db72d5d697d262b9565fe81a738a48d24a9f1c8c49a671101bb7db5eb64deb454a117eb00f4ccc31bc93c061e975ab6d375967544a2a06ff8b9d59bfe1ecb1dc47d5536c645d764028c5de77f3f34d6c7999785b70b187d9ec4631e83cc69499a4ff8ace98a6f17b77f648ab7a07d5ee0558a8efc19d4601573156a0264d2e6574e867c1eca423eac1fdbfe0967bb8f02524cc2d9933141acf619ffe99483305fbdd6913f1e1feb78a17fc6b81c705c81eb08d5602b097ddec64f6c334509caeed7525e3e34845b21e56e4424aa9609f4df8bb13f31c5448b6bdede84d9a9aeba9fcc38a3c8eb1f3f31b80918e045266c7d69b252c86f8b5711b2cf7136e2c3d86d1301608c7c16655c3ffe6d04014dfd55a9563c2a307525088fd017486ffeaeed45873013a7940a7a91442b975065c765c32546aee9b001ba78d8563e039c8edc24a92f9f457ae28172eb29e16cc588d52c8e75a565aad1a8f9d6d341189a24718c26c19a83c6cfe1bbec2f4b878759a7dbeb4ffc0568b902b1dfb18af00c7014f2822965ddfb56d7aec508822531834ad2c869affba1f95bf3dfdf1d1dd1c2994d904b9c5133900962c8137d7fce9f0b9a7d0474dff9173edbcefb4bf355539dfa791241031e90770c8f09af595eb1aa0d083bac4fb9b929ad7e23c0fc8d3ecc7458a0790929cf7588cc255916a6c16811f09d0c972b294dee6e1f739c5e9d3eab8016b565c8570e41bcddeef2dfbbf95910ae6a46a2834919742ec599b9ed204d1f86ce6baa534039ed308d8be0d289824303deb54af5f9f50d88807134b8f42485cec121432e58b83c8aecb32fc62623b06c39c3f1e0e921b1bb880d2eb017578e5f33a25a335a813f02259e1b12b8a76a90a65d015bb214032a095cd8918b78003d310a06a246ac95c126188911bda8a6623407c0dad308e25a438f78c7409267b729413b7d248a6a88cd64c73118999f00981aa4f6b639e4252d39b1706c686c7763ae9c41aea7b46fdd48bc490502ae876175e5aff8361ccc530ad8202bea0b0209fabc8a5c0e2a5bd08e9a6b532d51670f41513cf007781f27e49b070ccdba0795755f4fe231840196d847d100e7cf1e5650ae172890c469428269cb105c16cb9031ef9031b882565c357c3279f0c88e90114422a470a4682e988808829a2241af62c0000b902b424fb91666edaa16addea67f72c9e0bc7a8053bda59776ede2a0ec3f7c78ffac0eee97ff259f92b21378193aeeadd0253b08897a14f10ab537db63202a4c9f78eb4b399d55c5a256a8414f58f45b109e6228a75ed1eb09627f44b56eb539c334df412b30ee6f4ea39a04aa671aee9e7157b9cb69aad4ab1d9d75c6d90f3488342b29bb59c97ecfd2bec4f991b095038b9e20eeb591b641f64e32e5020130f8a8daf7c51caf93ca460a4e60132835119f99d0484529cf541ab9f922bf15a782521a0f6739c1edb8d4bc26a07e63790087b4c098e4df74534340bf7815039326d1bdcafa53932deeaff03a31e97c6733cc702cdd42be18e4716dd0d014f3e916b0cee3a16bd52cf717f5efb59fb7e41c8e4c0d7eee8ba92ee5b293b25612ee9a3b0043664e918a2aa2b602accd357c8f22f382b16f637b57f2fedb7d8f66172f22e67cc04f230e28ec96b928f449fba63b7862bc3102181d6c7bf063d9376363b8be8200169aa88c46732c5ab1e19dcbd8abeb34f1e1cbc632484d9864e630c4567c0f04a2bf5895d3cafae1b0e70e4c1ea28d4d9578a82611f09ddb22c3c4440e8236be2bf9cecd3fa64b19930af8664d78d6f10aa9c913be537bf2b539e3a9042d5744eb3d1bbc16d98564488a51ba45edb2713b466beac560789c4eda3c0961bab002b95eba9f512108dee2e39a8759c04b18a923f2f2aab2e1ca30ec7361b25ae71923027c950c089469820a4ec3ec60529f1509b92ef04fb7fac70f25d3e5ea5c6a28226fe19317bd4d0f42085884020a2b22dcb0ed8e5600ac969b4f910e54f617597a84b05774776d694ba38ccd3d1055a7245334cddb1ca20d7e001285a57001d03b2fc1ff893ab044612dba9b311247528d7490a9a7f3e7c3ed8531844d3b829de3604e8546ee8d4c3d7a308d32035159aecfa20ae4660e6dc94b6a155aa78150a01fb0e6c48b660a0f051ab59accaf4508202bda080d51bfef036fd4c4ebe7151b2755d6606122e565323878701113b84fc86548fa06fb34b02deb66359ae8095d3c339673ab2a8b138fcf9aed2d4276c8a16435a60b88801f88582014d88bbd39acc70c3229d884ec80fa5565439d283119a84942d89ae04c33fcbd75e3c6c43b826b266625b854f883782dace9d9000008911d1f14d3a721904f1c001a046bf61e70c69943c277ef7d09ce5e779a10e3671cfec81423e0f951254dfaad2a012fa75748afaa79673d94a17d35666009001775a2b868b9b839c77065649bbebb90143f9014088e1cba06e2ce482dc8804b98caf86fcf0898305c61980880de0b6b3a7640000b8d9854e530ac567b7d29eedd91690a0d2397591c6a1b1f5068bc292b740f6aa5d38003a933c0560971d4701b31d537fb7c1ff68c40ef07221089f37671b101309000e0eccbc42284732aa002f2cb3197def9947c2b2fe47d3fea2efc71b1f3cd681082d043dbc1471a56a5d0a5c757b8c115277a2af2e044e56e5e3c2cf8756dbe51a347096a4ead46fe53f4c03fc100fe0009f6b2fd6ade28fc89230602e9221962f4512740857b87f415f134a224c5149e374fe22f3048f0620f1bddbc9acdc268a5de1296d265bac65fc2650b3de55e6bcbc26bc4d01dbf7548202bda03e35d4429ee24e44134f7f51b32fb69691a16c60a0347d9283a8e593d5a095baa01c590af4c1fcd3aca728bb5aaf03f48aca22c756a87607b4153a5ac6be59ebb5b9029002f9028c82014d88aab881c6fe3d0b7484b0da2b368542c231bfe483115994808829a2241af62c0000b90220a8317aae8cca53d039d79f09934b9c5d0b07bf13ceeffacf1011fda22a85505eb7c717168c18d8fb230a7a3f166a4e93326fa82884ad3093b5e07b4edee095d98bb92f357fd4a98201be26960d4253da6fcd09874b364595a47b95d2b50f8cd45921931469a302be9699779775b59f27deea2aaae41a010a47b825a46103b7d355f1c154b3422b4fbe4e62c71c5b6b98b627beb82014ad990bda2b6c06ddd237543b3652c7a029928153a8cec540311406260fd3a55cc5788610321d66c29f168ffe5d93f92378359231ff89492db2bd2e90a4d9c28263d75b77842584d253fd7316e61c27f71771ac7e7a3c8ae6921ff2280c459c36348e0a098fe8da94c1546c15db7968d6b2821b24edced45a7ca8f2bfb2b9bb7a497b950bdaaf771bd777e918887c0d2d6ad3b72c168228f49fae155862e0baef308ace6952606a660beee10da3fd2d29b5ac31f2d55e34da94a4274e1bd679fa42bccc5db074a070b899e28948680d82c7229223d846a1a2c19143dd99c78bc42c33490b85be5067a25f6361d6b803b315519de254191557ec691967ccc3d087b8799dfa5888ad748b7a6e164da0c726bc1f916110b6fe6a013ce0e28b79bee045d250657a70211dc11a5dee69a2c05e9eedde536a9911883e5ef2ee76729ff8fbc3aae0fa13a36daf01199a7ac60b21c7fcac00d7c6a80f5ce10b79f4666d69a1a45b3ec864a57f1f6fd492223c539351326d7a25b18bcfd8697f55e972607b9675b1d40dea3ba4c0b3c080a0e69a3802e5dbe5284f817eaa05c76127a3898633d4524f3da9ba8d7e7b98af23a05a2672729a0136c572a68b494cdd49ce47c2c0e33582b601632b3a1d15f3cc38b9016001f9015c82014d889e607b89f9d2717488ee3a5d83a713a9fa831ab7e68080b8fb754cefe26136c37abae044d7be8e1a3b8aa3ff230de4579b08bf12020e9ea66a2f282ef549cd7f72d056ded10c2fa21fe339fe56715960a4bacb65525bde1671a0a691f44c0ed582e64d3799c4ee453a4fbb700cc130eef66cc66913d919b6a96bd31efc3d77e4accf3a7c695275188ed2e5a76526e4706bea7df44cf6a36fb9e43d0e37cf5d6e3c5b984062e57ceeb1c5e6a9d0c418a5a83b77c4c99e8799fba27bd884e51d5df3db1562fa0b13cb1051ef5d5269b4215078384fa84cbcdd93cd7e67d166ebfb88eadc77cfab6a09fd1ea8f82f530ecf62d60d176d3bdf4f2eebf57b45b532ba6471fb53312e32c3452ac69c7b0ce227a61e69cac080a0434df311dffabb4af9df6fd81f48814ad8f5363567d421c5466423bf3bdacc05a0032341e2314432f05701cb222c2868894039e6e156ee6872ebc8739a4c45a43db9027d01f9027982014d880843386325d71bf988456fca4e1ec42cda830601c994c5e72917d21e4aa0f724ed1cbe014171f1be66ff80b90203e082cfea48d8bbd73dc4f299c37a26fcfe1286a62d17e6bfd13084a47fbccd302a44770baa03092d7aa3bf8f15281bde3418b5a6f610199a7ca97fc11df8058de81fdc05527047d32e0e4527db10cddaa2e1a190d7dde1987c0501a200df8eea07d61ea0028930e7422451b44295ce91f79de155d6169bd64c0cadae791e59b67544023e5fcde77eb509d6418daa17dba99d0f09c23c7df78d609f4af7c1ad95b01c26edae2080556b8e63ac632d78b87eb57ef23791c2336775ccf12f62dba46b65a5b5c7017068194fd2b7bff11923ac2dba3ba0d7e28c1ed2ef1c5d2069e189c09bc51efb571c63f2891acacd6a327dc810180290f9699541f4b65bdd8935e074f80887d3f6f4c3ecd75a54c95476b26b42f02964c16ae02532433d48fb5b5f779562224d1bc099f51d332c67cecb1e619bcda1aee26011a463952719987f705b12fbbbf34e3989d6b5c5182bddc569fb545de391ef10031bf1b0f673f0ea1a9763f652624852bee8f09dd517250da77dd194f8310086ba52032212ed38e014a9bb3f47d8a16cd463a977a443ee02d5548ebb5c518e5a0125c6645f2ad2d52f99aec5c88cf4aba79167cb8f7012386916fe2b863da27d16a7c3c350442ebf9b54a569ccfcfe4f4e64853fd810e6a5b3b3cba9ac8525a260505d12492b99437309f94b91dd68c7658291052e2c4d414f87c1d7b7bde565791fdf99004316f02ef4d7c001a05044b928ccada6036e32565da0b9ac1b51d4a0eb5d702efb781a832c120665aca027befe34f4cf0deb37ef259882c20be1af0efa2ab726e06eb33736ab2f0b34e5b90186f90183881a09a2f1c8cde2c488c2eb098e1a51326d83159c2580884563918244f40000b9011b643c223acabd55c37efc426850758db45eb7a0ccb908d9e2ab6a122d812921618aaf4e30c377ed8c7c5b829846b473702496e87f2fac0a78fe92a7602239414117ba9d42c354b05e5561f234e4fc76ecf8285abc17060e980e1713a3f0ab031a53c6757c972e363485581436b20fcb4aa524281e6765ae59362fe284cb6c9c26e3980cec0a9b2f61d1446e9a1679fd055fca089b838872a26f866cb09ceaa5a57a061440ba3a342807d83a5a83589a7297afba2c456c628954a3daa451cb42207f9de22fd5dad066647b8e8ed43fccd3f335298291601fd8737a2ed69cb89e0573fc8eef594568c236f8f976870f2da93c65f77aeda9ae17d812e16dae936ca069e489d3d820580c636f12164c73795e287db92ddcc73dd6b341408202bda0b8ad8ad3d5218e0e27145286459b952ffce119c42b7b143d3ae68f08991c6198a07bd60b6dd3efcb39d42fbd3b15f2f65f9561ed6106484285f3a9d235d2962c2cb903a9f903a6883c0753f96351f096886eb111ddc0775d1c8308a6ae80881bc16d674ec80000b9033e6cc26ae2edabe8f726535a61e77b09496c76d81407ade4466993d4785c16ae669c39a5f9ee18875389a6004576a39465d66329e18646036b9ff5657ba1ec659bb2acedda2862458a642949d15f2108c9c9a712216e2d9d13077a134a69c64daa48018d835b542cfa7861a12febf7b79023af48f860377d4d8bf99639ba627ae9844ddd982438e2a508b6cb89c87d4b78f31e42f842f62af9cd59a69f4e899720156f7a2adf1d348e9b665481165af600a3f781aceea0589215f06dc022fd28fc6025ff85e3d4b7c25c358f35ed5f5f025eb2b0ec5511634494515a197f3e06f4e8a2fef699f33f58ab71376581b455cbf592e1e657115448db5237d010399045e023d0d69797131720de65ffba81c41037657951db3bd5fcc555b8bf6944a67f1fc0ae9ddecbdbb955743a86d2ca82b6239a47f0d37759cb3bcca9d95d7ad084bd8269d06f6cee9effb2173096ef22875db79714328f2d80beac6cff4b3f8fbde3ea1a1040b6885d86bc92390ed2efa52181d3fcf6b761c0a14b8417ea3878d311d3690f93258e57848e926364fc0a60dcaa161a1cd9ea4fda657c5e868f59bc6d2ded1e264a100ff752fbc32d30728f13d74f60a1931cf1cd302aec02f4ca94541335c0f0717cda44c966db4c2c1e522794e0cc5a9dd84ed6355f979c4931231225096d3f651aa1970fd8a6de80325a6b7b3362b11eeeb3401df138bf8742bb94fca940ed45f8b4937d1645c98adad12836b19e09b59dd1e4cf020a2d4efeae49aff02a0c92537dfbcd4a560e876d0a3da71a38302efd5986e70a0592c02c4a8e5638869db811e47ce514bbe71acb864580d9f3be29e73f8af1584130a448b85c0a4a790d750a3d67a4f1c3e52b0db1c7ec28b891c66570c894b9955f0914981f28efef48616b004ca747fcdb448d0a1b6d7196e2ca002e17cfe65e7bb08027b95bea17ba0dd5b9a479726b5cd32a0fe24052c2afb163e60733e6ab77f8d1d2f606de15a31a2db1c8b7827434b64f794b808287f612854c7df802822340442cb00b8c508eb8d74a6334da415319557d4a8cb58247a7e65c74ef2238843fd02d24d6a859f02c547fab6e35903f69394659a2b1bb02fb89a613733cce7c4af817f6b8cf2ce38f425fa8b59b3fea76273664b8215d0503198393443c926b578202bda0115d2f3409265aaa2d214d11e19f314193884ce34c3274f4258d5f09a97172fca0418e2cf579d94373b0a81e66636160ad2f1de4597445af60d0ec37e9a97770deb882f880880f511ab07ca9dce1889745de5325aa780e8311fec19424eb7935928d6e5fc275944276ee070e90b9619e8853444835ec58000086428a36f8feba8202bda0d3d221e5abc91d1bf4721d9f51100bdb7e25f4e1b2eb363d200aa1b0c09727bba07688424185824dde9b365f31e258987ffcdbf3c850f9992ed80d0e71e54712ffb902d702f902d382014d88e4400f9aa703b1f98501db23a8d88543ec7b3d868309954b94e59842fa49a842609ce51ec1a4e9f75a00da8e1280b9025a30fadb0cd19a05ca7d20dbd28ffd1ec743d59a1169a730091be383f6c571c51a8514f9ddf9961a588f38bd388786c9e7efc5d0e71ca89e7f24a73201839f40e9378e5305f4174752c6eef07273a2c51009f04350abed1b6dbfff400ac6f790013028b56aa08f5090e4483b7bfd1b08042b8651dfb27520b3167e9b912e37bbefe7f13153571ef8ae23f2034df09ae737e672bd09d896bb01cc035322407ab3ca2a026f1d8d5beab70178c580a650874a57787d92b6f31f7f86ee939bf8fac22b23c6b6666b5e0241fb55dd4d397f1c78fe6da9fc3e66c2e34058e223a4567d259e3e1a3560bae9f5e2e3e7df1b7384b6af9a4155f1eeb61a6bf4b5e149db22109c635cbe9a4266ef48c211fe1236becc472cb7869906e27166f3f017ce75d188fa708e037fe1a5729b43892460458478cdaa91af1f9367cd1164204b240212101e631cbd027c814efd1e46368b37041836964dc6a76701c38810f36cc02ae93eddd5ebe83c24527244a55eceec6d47ec8df4b158fd1166a7d0d7bbee043632852ecd8e5aab24d71717a232eae9facb45b534f75103fc57f5cd8f978a362249a16e6b3783443bc5100bd1d8bbbd45144b7c63393f5d8169c4381f645bbbabc899e022d58e7b4293125d6c4d7ef75436b4542618636fb247b48ff823f52f416348fb767f6146c1f443147baeea5c6ca7fdcfe3795e09112224301f87c5667027b74b54dcc0f3c4e149a1e67aa6f8a940e1f2891980a6e565821a1f06d522eee5803650f6c0b8c8f5452804f9c456550cb8f1d4827c7fd1c8fe77b71aca3aef9be16494a4bf7d40b274d28ed9cd92a2169b6de5fdfa3ed1b6ef8318c080a008c406d42212f12e384b8f8bb7bb40d0c4660b67026646436ca589d143edc5a9a055fb6596377274cd6af52d95a127c503c0af5b7df6df59ec493d2bf15cf02bcbb9046102f9045d82014d8822e3c64dba5192b7843cffd35685424e576804831aa2e894b002add3a6fe3cfc260c378a187213b6bac436f3887ce66c50e2840000b903dd35dffee48e5855b9f4e7d47630f215334f242c738b2aaccc6e4a815ad70d29a94bd5fea67cd0cc855835ab9bf81c789806e311f744dfc370960d5246099d70e509571437c3c61e11c2971782d7ebbe3dd231c3025966d5ae37fea256ab601339db76c325884b7939ac8e772ff54c8196d35cb823cd42287ccad89e0f1a8092caae92612bc897cee16c73c18a39a5b1ba5bc5df73beb108cf5c896a420837ff53f6e601052ec017e75d3554c0ada83b7874ded4edab8b1a25e39c56c4666ae2812fe82f65f5f7d423ab3a173261ff29495a5ed0851171d1c261129b2062fffa4fc682cb41394f5ebe335bc2220abe7e950d9afa85f305eac439eec8eba9227352f592804f5b47208c262b220c1eb39d6ef89a92ec3ef051e9cca642658a8d8e55b35e78583d7a6cfc01bc5b9d579a1514c201d34230684e4385a1774f8b5f38b5191682a8b91b536ccd3821ee409028180d0f5eabf6e1e2e3dcbeeae0d92cd83e52ae68842bf781824cb7dc8c1507361d7d03b03bb15f7f7a0a9bf12171e01408f60b35722a5a819d7d9107fcea1b94184160cd9890f1f510207d47752fc27f58729ca8490b81ea720d5fcae71db92a9b140099047f45526d26af5da8bfe3e41beffe14d5d1cbe31bd1e50b9c38b9b393ef4b1b5514050e4a934d9501fc70d9ee3720a22fe18533b420cda21aea8c483e5bd3cb4786d6ce2d0f97d1a653253efd1c0283772e8ae43013dba4990bb6c7d9c7087c0d9b2fd3b79decd9a775989c81b87ccbb1e2d6b3c4df6dbe1b7e3a147dd8ff6998a0dcbe3f517899f2dbbbc788d5004d2de3d23224268406d02fecb0ba553123528c6b41f6f55aeaf8f32aa767a9f3113ca91d92e2dcf656cdef77f966a6b2cba83340658aa5c26aa0cb8ce54ae3a55b1eaafef66763ff4de971cd6a0b65a680169837dac945b0a7f13864795670922c99dfc6b5a5465e5043ad1b3205e4579cfc0e037f0b4e0a8b22b5d6ddba7d24b31388620d4aba83f84c5a1334261955d52294bd8b56d7175afbae015933ab1e0ef91e8161468f8eaa76a6f7a9bb8c8fc1195b9d8ff5dc4a51ff73a74b0640999bebcecb6036ef676c65e9fa5b1be22872082989c55a789fc4c2252452f786a13c4e868b85fbcd09bab689bb66dfae14c2ea7024647ad97728deed03314b007dbe461c1836e97f928308d39e5afc43ee3ae22ff47fff183553f56711880cc5ef72c5d66b4e2c6f651c57311d48fcc0aec762fae6444a5be11793be04c85ba97450673687734e681a1f3c64699686880d32d4cf87202b49ce13fbc8771fcf30d5593b41ffa61462c64061449b2c0a24ad8a03d280500bc86049bd55a27a05d70b12c7fd700454dbf3869b329a1ffa9994ecc2a6ec9572e3adaa0056c080a013fed42f6ecae05ccdb9bd8dc88ed44579b6a8871118710058f72c29f6db3b8ea03d200c0fb3e4416a51538d2ba41be88cfe830fa74c280e8b4b66cc3fad24ec06" + ); + let raw_batch = hex!( + "1bd930e08e94a89daf73710d130fc039db221fa427e3e9d10b5ff602fca4577fc203ad9313f493c51668a017c2a4ed1260401ae0dd8967eb390d13f2fab12f43bdb0cf432a6630bc76a84c50bedb2a48e562bff35eeabe9cc219de13de55412f6692e1708609ce3440ac1909a693fdf68b581342ecf8d480342c3e3b435349a5d903609718170fa9a4702fc7df772fec119dd097c017e8531040192c66d18eaa4261721c01c8932d0e8890ac2be0630cc398f04f556750355a3a608612f9d782f52746c2c5c83c8e01cc0b5afb9b97080505da0ed526076535d4a34650979f8f1f98ddaf306fa58591a92e25a86a1d62a3ba6d6b53be59da78c1b1a3128059e51e7fdef133a3e0979cfbb47040a51c6e684b6320b624ee51f731fd95ddf7fc672367b4bce94f92714dc4ab37394f3b3e612dab56829e8171d3af31a6cf940504421122cf830dfe1783a42dc48c2296849ef352bf18ee96eb5deff308e094b61e61eae5c02c14320345cbf250a6c15f725d6c2b12e8a10c1331f91d4161667dda26ea1f2a7cbdcd1d73070b70c818d9f543b7b3523e02b58f08f6858b951c735820579cf0ca7e4dff854cb2414a29556658374c977897ff125470427dfcfbf5c8bec622fd5b5d9cfcb898b3ea3846440ecdc29a7f99da330597db06d49dfd085d0b56bcee9b1031aacb1d71d7df7509b2cd76ab53620623cc85f880037e10a14e6b55758925f8ae7eac9489aafb831809662dd12013e9e8ebf67fba771c88da3157aec7ad6a4ee554abe967f1ccb486c47592eba5ae33812285bf3f26dd11d232f63c24a5b6e5fb285aa8950dbecc16f501c87665df4d159b307d36d554d54240306bd6ccdeb6eb37648c5c2d6fae684e2fb5608c2acfffebcc595b277d515158a141f2c8f2a005d5ed82e875c9ed3546149042a2dddfd82107d3067825968eb4cbe455b6b2f6ab2da38c3ad83a3a6d87fec0ff797916e6a5220218436a438d6bb44dfe5cba3f7602cbd7fa0ef7d000b9e02b05b4b867b1eef9b76ecbfc2d6f2df9955e4f8ca9d06f563e3991d86e9f194fad8d7c05e413bf68f02c5592696cf28f51aebd5fc6cd1cd76b3543b37f994c17f83b79c7920c01ff10d4d97e35689d65913b4fa0d5748de37963cdb48cd1416d899a3083df547241e17f5f6df8917ccc0c5639912eb99ed8849a2c8140187ee114fd3253b986c3138906dcc2db911e6bdfeb32fd0c4b8346d3e2b876fbe3d2f95e752b71f94c82be7a77b4ae73bebc06d03e8ea40dea94450887ba163826dfcd21038bf7f560db0190165d83809d398eb32f038186ce9b49ecbf2a9dcfe0be406a71f457514a47dac76990fe20c074893a34a8e7f59d4a945e3aa4e16b6c37a28d9a132cee8fbd5c7052ddca49cfe12a4c14e9492f2e6b480aa70e39e46b481b38c7ec36d24fff714a8464e0aa8c2dc3bacebfb59adc6a17e5377e6fa4e70af286e318b47897ce7e75a65ab445bb64ac6159ab48c1310b641fed5b40c84441a093af75902be5401a3304a3f48740908da9209ee6a66a5442bb3eb344fec8905a7b809c531fc788421da2333a9c3d84a5e0b2c59bc8807796da4f6924da6a3ef92ec94107b8ba4092d1cac44ff621db09c007bc007040006570794ab5289e3a323b98e261151a96b3ea240c0f612015d99996ed87511cfad3d644577ae4ca93a14fb250484781975404938bab804f8cdd4dd288ca384f7430ada7852095dd0b7c04ae9931aab4da57816172e71a85ecab00f5149e9929fbd4dfff8635f54ddd91bb56a86dd60aea8af18dc242026dad7b52f271db63881b39577a15f5b8f357d3ccc8cc6d79665133f571125dd592caa7600dcd7d72b5ba73c0edf74389a8a6e3d4d190b76a559a324d0fe39ea88bc6bc8c3dc30d89145f253b354134b38bdcafa3936aa1eefe10c806c2593502f0dd7cead691dbdf325a7b72da81c7427d2088ad9485332e4fff004237cfe54da30913e7e0f5cebf71691ac1c38731c84d91a233a96424dc976ebed809cc7c01a681f7c26ec078dda8c46066bd2a07ac4df05d18920f47aa113136ce45aa04b9a4732daf0450a88bd175b8086c4efd7992f21b0a0a90e00d3a17a0b46ccfe9dfd9fc901fea75e74d9d127118d0f8832cbee68be4d2c020350d533276cfe5b9d606ffae3e7492ccdb0099475b66c33ba9a1d6f58d8c8de19b8475059e61907a44883ba381ccda9e272b16d797779e4a1b4e3db34def79ba78e8f9ccbf592be4a63f4c9170f2c304ec65a8db539e72e1e5217209b0b38b61027cb82ecd3fc60dafe36cd476cd291f5dc574f818a19ca74d73331e0c3297e25619041b7ba9412255b10df0722463d17eb600aa8c9ffe3f43df2945252cbdf52113dfdb052bb2491299113c3e371b2a035f9b323318f17923f807a394cab6729124845833b794b0454c42c088e119110d767b5456c82fc28a2048925f5dc54765313c632704493126c75f40a499f6408263e61162357d5ff80e37617e80e0aedfcfd0284259d0e2bd644d54ab3166a22630ac06ac802e97f600a73b0e38fcce39189828cf98e1f5c6e8a7dfbf3670ec6498225b00446125276b6cab6004bf4d2e8c1341085b1ac9aa127bd10bb2ed29c7dd74f78baa4061874f24fef9d0adec31b81a46cabe2e860d890edb27b2c7f006a37f29b9b9ed21650ee7fc27f8fb7e16e4cd947bb47d094b26b2def138f04ab29316ed57f12f3a13e988810c045b7e35f1451776031f0524e96d1d4ce2c41a4a35e7e80a127620b2252f27ea3445b0cb1b49c4c33444237a279c20c92086bdc9b0de1e97c1a7a477dc0cf1efdf3040a09a8d1f3993682dfef3458cbad84470b94a52af59c2ba0f08d80b31954937dbb33cd743a099ddedf31402acc348f83e5bb821d185e14975e2a43e40d45e3da4b70fbf397db46395c95eb9176d70b70b1b4d802551c2b035166a82623a61f45e60b4c18570fb034e7061026002f7e15189b7c2ee30b804ca545894707287ca7996945929b08cd4410fcf7bf28c385be9abcdd0cf576dbf6c402c41a7147f14038c97f3fe8631cba55007db867fca4efbe1ff39f537548ed902ae01bd6a0a236a67c88a661dd930c15f017dce1da3ec5159d0fe4cc9cb3488ca09752bcec884d2adc6fb774eddaefffb1477d80ea9e1ddb0b7075ceabbbbb5ecb904866e0bbf0bf8f905b6f7ca5821b92f1109548fc33650f68a9b67ae20b6b165cd39de17f7691b8bfd70568c7239ffc66765d13b72db4ebf890a915d6abe3b557f70550be6bc96e5642b82b91eb10be8d669691df365fc53820e4cb6517f753510dbb9c51a8b5d38ff436fb0c61cdbfdd3f85f318897a64585a16af22cc782fa05fd7794817ec89270890d388c35c3abc1e667e266cdefe79211fd369a7f504a334a3fecebf3027fb2f0ab1af37090f97dfc1d8116ae99b2ecd742e47e48c399a88a1e1aacfb927ba4be5d9f0fb1789f91b1264d7e0f7edfdf48526c583b823968b28f716feeba8a87508249bfd938d756ec8b2e51f8f2624fc6467a7b764eff1384b306bde754b918a0918c122a7e6f6c1698ef129c99126f8d40a9ed97d1da1ca4c4fb859804441cad11ee84557921aba96371cb0b3a90cb2c0cc76c9b43d5cf16de51d6f43ca89c4017fceb239bdb708bf45e91b68fac6b27b66da9172c4d08a63f6759a8d08c513c1b2a702b1b51e1cd866f5fdcee679ed65dffc276cbe93b380acfec273ec53a664f559d29a46ae713fdbf96b1b23a1546aac5d8b6da6cebb128d61832d8a3b1e0587ebd1328867237ad9d43a4a2de95329d26ebdd455779cd19d4361a5d7fa45afd47068302b55d3efafc6b1e57c9e42af6e2507ba785c554eba19449d5f4c42e5acaf20e9ddc8ed37201c363464cc03d40593ef2fa32f81294d00ecf1862c683fda6ec4891f72a5b5b2b29f0d8c2bb415020f8db1ae7976b0cab93845b08d7a0842d6366e59d73b593b8c5fdf199ff6d6564ece94aadb59fed75951abd39f67a06030f2d34d57223b62667a8fa315cd2a27af7ced30d9ec78e71cb8d675d8d61924db42bb3105556a57775e7472e93e648d78fdbfe536e767a71079e1217faa728fcdd26d8be1cc1bfce84083d5272d543378cd430a096deccffed011e5ff741c92bdfdd4d42a8ad0f907d17490eca3fa52b0dad916189cd4b19161f886746a18b366d8bb1047746282d772670bdad1b0566b789dfe8348993a1eff2a3b03f51aaf362711afd6b0150ed8ee20b243fea04fd2e1f1eeb556d66b13f18ce72155f52af95cf6bb1c1a879a4cd9106ecbb5a6891c9823c3cb958a4b7652502e6d1258dda66af2136800ac33d739998995ca73ffcb541c37288b5fd898133d2a1de5c020154dfe1603b80775ff375e6cdbd69cc4557afc794acf9336da712626ed13e50fb60d6d7c0d92b10b01762dc96f8a7fd7facc6e090a7442c52e5e90cd3bd0a1359fcf64fe2a77a9acb296c48607a70232b19947b6d8dccb6adbd195c33aa0f9a3df6affa73afc9d96b17dcbd4e0035e005400e022883b79c11a9d3daef71c06223ad5a240021cb3018849dd4ba3b6772f103b332f1faa8ed2ebaac534ba4b46430d18093adca381454c5f59d7ce8c9f4944a84a5f9d598260b784cb284459798cd0b3529f76dc5dcf8507ebea12e2164aa7aacf8317289b02b3708bb25354b4f35f41134214782f6df124f096fa4786c6e6615be1a2a67ac0d8c74a7c5139b2028f074665a56a4fbe42a2b15709b73cd55e5d242d4fb1259d45c3366ad2494da03538c509456ad6beb9cb0c10ac61a163fd1ef3577af4d495141a9e6f2b8fd008c082e8b4592ecf66d411782d17e00c48c7e63980d5584786992749937503d3cc4c249671ccde9dbe9b4c4f9ed1da22e44f427466633541b675646d794894dd0e53223dfe3f0ceba6b969ce04421c876a51348f9022403f767466afedede7607bf8d06c31c8c7ab38661f618a55e9e2fad91ee8b238a3ca1c64616392b0faf61ea8135a5e4b8cff5a0a0008ae58fa407a60ab3748745bfb167713ff5c96bf9847f67f974328cc933d76259899f32c70f5e0b15087641a9fc09962d167cd6a64d5c251d3f7e751924e243c9fd41a475ac5f3bef284470f4510c6f3250fc4ff6827f3c59bcdbfd166e593e386538b0b3c2f0085b5f6e271371206d6a61a2d8f74246f12968c462cf6c842999e6067a9e8a47c1edb89ca69689ab583b397acabed4b22d100b754bebdf8f270c0ba9ac8d33f68609c55f94572c5684fb0578f795b88b926ae7722223bf3f32e4b68be8878e842ef38be46a23e0904688447e70ed3cb93ed194d8d4bfd24b0bccbb39f92a553551bd7a8d77a6d6180b90c61fb3efbf6e6dfb987bf028dc61e4c22c2fc1d714fa7e1fe671925a1de1752c563dab2ac372093a57611b196db489e152e342e49b0dd2d6d84aaf0baf849db17bd993369caa66b74282277f69d18f4b009dcde6cc3305817035a1b104d056507479d53dfae3386b05f6b4688833381c18bcef8a3e6ed70b47d21085c07486b5232a02b5d64f013a0fc6308d874b3fc4ccf44e016b5456efe45efa0df4ab239aae635e4f9c879cda1b78fe69cfba7b93eb4a36af3d20600fc42c0ccec24639dd53d3a2f67f7f22e8d744ae9917f1cb5819362c38f5b4ed200ba23f4d6dbe5091aaf7ff47ededafcf23421fe16aa42a583d3f8a96eac23faa269f9d001fc00bc003045006cf1a21b65f26a45980910e2222eec2aaa6c248dd1e433ae25f22b186c631ab96577a3c0cd5dcf5bf48162885b91131756ea916258ebdeafe262bf0deef40b0093788e97e864676f127832f5540ea04e0c737edd0324a9b4723a807a70a35705e9e27ff94945c9c47c8c5312e5ce4a0af4b243e210c15223732371cf89b13a957b9a6c44293b0e7ecfc6611b595046bc3e7345bf92428052bd8264db5f2fad4096ba44f9bf62ee1c803e33bb03bfb185b3a966e3c87fcc337331dee6f79ff3afd6d50ad823ee9aed593763b77a88c9ea33d6104fbb98cf0b2d60dd4eb28f4f977b37e29048f01a646df6101aa7d44dd1e29671af77a71d1ef3827d736d1b7f22427e63a957ddcbf65f2d4533461efb760bf8574a8649e87a5bd2db0f50fdd1d89230dbb66dff78740b2bd95dbf78aec6c2e3a89c97c752049126a52a7b37a059246713055139abc5610499a452d2eabe40cf729fb11ed87bff8ec1319f773ce2cb50641b04e6dd745879dd02cc01768061040190c8ab6fd4d1fa6bd1c9e3938c51121514568b61506fbd696f91b12600f0273f3ddabf8d9b573375efde5ead4ffbbb9ac7cb60d524cd7ed46ad5cb84dbcad7795231f0d4e7c05bb30cc31b9e02d4434aece3405f1fa7754a40571982778b5c78af4c6a6d62f0cea4d9bae5f015aa987dedcd31fd22fe7a8370399cdba6d68cae1485de5cc3ab6f04a927da53bd7fefa2ed7f820d4b677a66749f169a0d2d5bef60435edb3d701e139fac5e6ca42951874d563068adc4ae6ca0a633866169afbf8b92f23f37021c301edcc2b57a9126f0df6f9fdde4806bbd2fa3c9d8bea443013a411a3fed267cd4854669e5b710e5d6732a9bd2b8e9d9a522204e491501f2347df956cd008612a4b3b8c5c5326f5ccb1d269e08b1efff02a1074b3e4ece599ff26d2bb2dd6ba42f969b12c68916da13ebe9f9d19bb7590e545a7bf053d8181dafa54117084c1b24111460acf93ac4a85fe695fef00a0a6da53b708c24c601aa0e329b653d4fa11113fca0185d788baab7a647a5ddd6fd6780874fdafe1d1d27dddae0d29c3fb4df510b44bef18a216b908522ae9b6c8d0323222fe732db82d1878279426bc8ecfcbce218a381e96bcdff308be996b67e7889d6894db070fdeec85a919f0f1b8791a50921e6d7d8e943c05057ddac008ffb0c7b20a3905545ca1bbbd94fae6431f5b5618fa953a82db758d7f76e73d231689a5e70930b122fcf4a060df8bfdf47159f7ed9e0b0dcfc27a352785e9d8403dcd092c9db5b749cfd7aacebbfa96934bc24de29a9d022216ab7534c3b15232f5e655ea9173b20ff8f45c5e91ff4b8d346e4f8c2059d514dca5cc11e066d208f0a4873eb59ddf61f2516ca1be3c7cb2d913b6b1fa8329f028a4d545d751710233e2f65f7426536eaa583e574c80d88ca4dd2f98674e0aa874fa6f75a94e5e3128083df9d5344c3aceb890ff0ccb1b716fc3733c61f149436ac794a863ba875da7afd49c5f8a19b9a68fd3f236ff4e5ee684beb3e4a63fe2604b10f18ef8e72f7eff55fe7e0024267be83743fe57fcc508e9fc177c90fc9a73a3346438ed9e3d5d3af443990a19627a45cf5b01b5cb518c07a27dc8ce246156fcfb5b51e9adf207b4eb1a2933a179270cd30b0c3d986254be9af0f8d4069cbe3416a255eb671d86451895bac7a068119f19c53662bff7fefb5883d6a04cf7082c6d990492ba8782025d03f01e753eaf55e7e65289ba3719db0ec3461231a926ecf6ec6aa8e20eb896ead7a39180f113cd8a9897cc768e80b181c394a897aa248fd4d9f569af259ad9e6e69f02e4fdecfba5d7b3b72d97532a364275e30369d01ef8fccf43f7b94f27e3d7e6293da085e1d0b93dc0e84a3ee0b9e49c2fd2892f70306685aad4d2233ca1e4af8252708466c72c3a43b77dd6e2d0cce45e6407ede7e54e58802929790a1b3ef4743229cd3e136996a35fede076f4df911925cd2e3169dbfe7bbd611154e18f2b39d11d0c9def68e16baa8cfaeb6e8b4b1973169d3aa6c784eed172730a05c4b1f265ae1844edeb266dca67d20a98410de84a531cbf53facd4f3cab9d78f56db51418e1be62f2f4fb76ce1bffcb2e6a3a5a197b89d18f6c7adfdd293bfa66f918ba34fe5a3d97e138161a4dcd2af98afe9b5976e3effd2857ed07bf7809ab577135902703d0e5d081d02ab35a7b1cdb0e9c97509d0e7cf46da7fb775cd3504fb1647dc721fc675ef09925f71df66dc30efb66e7b33d1aefdd21740c769cb4214e07d890b1716ef538c4a5965b77e149b3b72727dd44aab32fa1506956a0fcdc8d7d47ac25d7d67371ac9c9d7d56f93e142d14df7877471492140fa36133b69443c31cf9dcea4ac4fc84fd93593872961d17616cc0467be8eb70460c676bd120cd72b0185e430dfc01f088fc3abd5cd0730708f88a9557e248747ac2197919716ad95fe6401195c745586ef38f5f0c2a24bfdcebd6d1e3b136e5e34ee9c5698c1f19e818d41226e43971614615c9e20f3a125408397e12f50ede77f8786607f6b67cf5ebc4243291bce1d7438d0154e929d38db75a9dfcced5c0949af85cb5cc91d95f5d64697dc21f37b31bc40ca9ab309d23d8fc50e9cca1bccbef27d79de533b2ca5f49ee17bfaf5afab8b5f9b7ca93a831384ee05dc6afb31fd2ce082133615dc36f39c9d9cbbb42e8e3f3e763d2d1f089c9b94f7ab183da49f68eb8a1648833136e4da99b873b4ffc2327f3a71d00071da308977da2e9cd2b96b7beb424a4c3127b7aaea40c8973fd9cfc3998d967c7c3ae522ee8fc7984955e54fa4c6a76e133ad7ad302b515303cb66282849cd139160ee7414cd878dd24e7bb858520dc50ae28295a32115147c8dc19c0d3e7e04e80a698bb02fb9a527fa79129daab12c97ae65b37851827246d3a0abf3d047a1e03624f6d3f6184650e4e225a8bb6a1120b40ad658fa729e17b8af540a4f5774bc56e9f932bab885d5272c78ccaba460cad5275b0cc97d098cfc1831b8d1cf3123819263cf597f95888194e54633cf6c23331f80a339f1a61af05017b210de405d5e3a5fdbba53d082765ad9c8bb82ef7dfb0ff417987de06c937b84cad437c75b5ef3fa9f0c5089cc20331d0026e0eac9176dca2506452e969731b61071c3ba1495fa089c034d643ba43740528e013008e04a32c920ce8041c026628a2267c648682026ca17e4bb2f9b95668bf716afbc49c8f3c56012bb8a6effd7393116de8692ce1b5fff224f856ff8589823734a5ee7403a8d900ce2854c5a8d60c6ce304964c3cc5b734672d1a19d0d887e33c244837221e52467b5e9036a4d3dd2bea9c69e67e57bec76a463bbc3fe5872894b9d69d1c7df3cf6dcbae55685c5d36724abc930b9368ca69cdcd38ff603a57cd224254e3ebdc453bd327b222b3da635523c7468f8eab0f50fff3225462567208e00c532778c98309d7c87d10af2e4866ba31f0a1a1803cbae792aec7290edac31ff22622f82b21c62b3f497371213f85aaf1733a11fdaf2fe5e7dc3cfd822e26cd1875171a034e2f30edc4cbe26ea0025445921c502e05707b34feb9069bbce9bf05898feff72f5f1e77255f1a3208d298b39e1437c0f0589de017553199314ecdeb8edfb2f131e13ef2b606b35db4af3c9abbddb2da4ec1dcb2efc64f38242748157459b647320d6150842e8df5c109a778f108c61c9303ecaac0c3b69c23d5a404ff7ba27b9b5549897f5b5287af46aa58a248dbf65f2b303d44190bd5d711a1b9ec0f9cd22facea4683ccc910379a9885a4c48ea91c76fb72cfe75fad1ab3d8eb23ad96e39c31aa7293040f78fb9834f3051225163d549a67bb079c3275a4c0a5442526eb74d82d36bd353af051c317c5fde944d5504c6967950f58187197acfc159eb9308dd1c9a26c8cd5acc4c568633c443475aec9ec74136afd513299e425e722a3b00c376a39c957306fc1352eb7c62226a5a34520da4f020eff85997bf208b018795113cb24daca8119d2845dcb0bc681aab967468522acb7acd7526a17dfd4fc2cac819bf477a58dc63fe4cbdb007a035d2812e8a677b2e7946a1819acf5ca664c6a4ffa6579a4ec60910091154d7ca9f90e864d1e9863ad9fc70b43cbc508f3e4dcdfb2cf5fc9eb64cc0effa7b6156a57f97c4302bca139cab59941aed5abf56bfedcab81803d909045a2cf6b9e0f25955e57f5264f631b382c561d4daa5fbf009882e1ef915a0910e76645e0669ba57e5d48dafc10bfad40534523dffb4bdddc029d6334aea481590718f01c01022883bbe7b3a75c8628f3c02ae3e8a53a5afc736198d9e1a92c51753043a293cb26428e921db44d36168611aaffb96e38e6ec8db2801b01cc4d3f0022d3677e8462972a4417f434937b70e45b88c6e3faef3c5442043d0d4b6bab6a0e82f5eae911fd5a9eeebeaa8037af63039508f036608a8cc909cbf586d391ef3eeb0448be00c4c03b93909fccfba0ff6098ced8fac8f7eba830d851821030ea765b73b9151454ab112a9a4823b6ed73f917abb88990397ecfa4d1c2c607b898c1e476b1c72a633e2881142158b30c12594033896670fbc0d78f61b46b370a84025e5b220c6c442834b4a9df12f4b29c55506ccccd04815759b2834d9fb2f39f4557634464424ba1082c30c2bf715c4bed8d918c3cfd633135bc8bea596154740ef606fffdd2593f20e472492f395d703e1055827ec740df862a70605baadd4d184f6637634da6486793c6f240d0ed081637c556a0545297dff3f8a4bc83498023bfe9599fa8f94f1b6dbcc3e0446b5863fd4eabd6bca97df8fb37ed6f65c0fa9356316944b81724f27755a4b05583d59bd9dad2930a1dcd205c81c9611507298b90b42e08b13ed2fdc0fb7c4d397db7413df47df41fca319d0a2ff8966f0206a3bdfa67ad9dc044e00b301699aa8ed0d14f61648ff08635269e0889418ebe7d04fdd4a1e711915770f8d5c5fed19ce15f2e404c51cc354686efc3fe7bf5fa0f03f3a3883142cda47d0c0f37167fe58d0ac94f14d75e2585d3b9823ebc963da575db5f65733b6d35a6938d3b78a11204e8a54d4795d05c739e46fca5c8239d56e29f36d78a1ebba04f918263570cec4dcff2cef06c2da0db3c65acda270420c976d0949843b7cf6bdf0c68354b30a9f6aa588c111b3a64b6d1f57690e3d46621af3139c26dccf16a09f2688c41189243352bfe8e8871e0c0d1a2cf971a8df844627092d80c16e0267c1aa7bc50f97027737c45c9b334f5b02696ac0e822c970dbdc369c2e7343fdc710f89e99ef05c6b82fc84a20f93c96ee951a47379b8e29110138ed75207b41cbfadfbfe586a0211515ffc5d3008a8b8ed6beaecf693f74f435eabf7265af63ec10707a0b2d8cfb733e382e8ef0beabee9596c775db147ffb5d330b3b741bedc412dfe606168ade1b85d34e15a4b2da153215af27d95f83d65dce00171e9c8da8d92fb810a1aa34ca65a91292c1a4892dcc81a8b1966fe2e8f1cd1ab665b646a69bed401ddfc4d3d6f578be09beeb91d81edd4d0ccaf0edfbc573d70ad478cdf4f5c65c818ed6fb224738cb64f7b80d0f66e8c6fefb6f49c9ab59f0b05c900a1f1a55d51bf49fec5a6a67d162658c4e4f6d2cbade0f96da86afb15bbd8a91e4090ed378a4c31f65e03b53c5a816eb483ec7b6fe36457586228326e551a4ce6bc904c29a499a2cee9e447d318f36fc52e58fbc4cdcc3fddb37101f554b0a4bfb93f047298cd073c583fa0570d0daa821d33a72b8e8afcbea0a12a5cd91517e49f594f0531a07573cb06f08cb895c5c82b6dc8ff951decdfe306b5012c990448bedd4df17502cc002f00231040199c6fe61ff532e7ea01be14300e2bc7ae7c0d236c3f0c09e978f354bada35e719f2ebda965aee8d27148c2efea242ddd7cd41e8c302a2e597d9b3d1b33ec84f07bbcde86ccea2b01591edf17feab8ea9b95744d5a6b186ee2ba42ca92ee95d0164187cabc59c397d202aadd2e2803ec978f8ea376d8ab046d950ef3a4efc2defb35ff0402ec343cf1e3e70ecaad69f75d1c4e03ef951e8b9d3bf785d178ff19ff1432cc14b33808b86c1c39ac9c19c62fad10f41e9ec8ff95f556e4bf127e40627cb7fecde215197b1243fdca58c3ae8542cd874fb542e9f746ca7490edaccdd91bf8f4bff7bf6a7bc40fd28a67364db47f164ba8784e825baaac670ffed2ad9c5d56ae6f9a1cac9c43d28ca3b9fe28bb7465a4767ffa432092ca77985bafdd0a2f5bce2b6472a10a2b0f3cdcb60b14233256547d826b53b010682af15d0e29ce6b5dc0242533fd8f2831fec31d9dcb1f6e67e3eed94ad225c29dc040c00bd0170450062348f259d22c13bd80a59dffa8900e33af85c1012652478e18f0e64815204fd417c4bd0071d79b5e9baf904e20f436e8dfa9dc4af7b2f0f06dd6901fedfb275664190bde61df2c7b7849f0ef697646296fe42a684416bfe2be846ff4449b5cf8ad658f1804d90195c10324cfb071c764ff61355c64e759d1a4e9d631a6d78a760a139737763203600145505bf1a7f04ba4106014fb9104b57a8b44dadfa4a7bc1ded25dc9c252594da3a5f52fd364f29a088e9f451502be292785c15de7a651e3ae2a050e0539c5981c2d3406d5a0331ed451d7988b643bc658d258b4f47506bd02d6fd2e0775bcfa91b368bf51207ebd2d63180cb0f02d5b9f6be1b02aa1a962e41c8f26e2ce9dc15b131b9dd4e547fce08e99b2eb1e56d14e19f697bcec0710c7c60e28b5d9af87d9be14614f7b6c733c2aff9c7fba1f36503ad092daf2607896b06ab01fb6d1a4e4961b9374353ef340b4a65a2feac0792efccb67f2749cd73a60beb76cd304b2cd3e80832835d0cb1debaef54f8a3965a47f0993646ecdeb48cf792ae30a0896e1a1eddaf4f09332c1f352ce4347a8faff316f16850cb0367539f39a022bb34a029b12ef8b6712abb4565570a1d172c2bbd4b242f818b5af1dd46eaf106009a512b53c6b945b6acba91f1d8fbeaf224dbc904172c3e3bdc4fa648e1a240dcf2a1213529d6be1cad52bba9f74f5515ab08d1158cc3d2e6e6c9ca9a089a223335632c79a62c4977c417c5a48d1f63d6a0245856666571d55f03cbed3d07d6be645b595092b8d7acf7cbfd00889a5427fd546d19f44f4e1d6348670d91fa02e4ccd885f5cd87308c190bceba0642d7fbc975ff0ff58cf78a26133488bc538ff6cad84ebbfdb39997a79a0d99eba01310f9020803132216dd8c4fef0e8307cf10e309d5399dc2bee5d2845cdfd30320b212a214f8d3a33d14f42ef143cd33aec5a41d32d589b0ba5b6d8fc512ca40611e5dafc23ee47d111b6008ca94697177a14e3f0e66ab41f2f94c2e37a3e41717c7ebca9318d26a30d136bfe5da7ff73a7fa637f88d0787968986875d7c5d0d4da839ea1990c1cac315a187c3d3843ea9504a4d4f6a6b5da7cfc3b61b3ee9984bbb9789728e94c3663e2bf5331bd7f703d6f40f424e18d8adc839d2b121f7b4b4d40f0e47ac4b808b1e7e45c0204c2fdb2da3be8b59dad1224aab78ad447d52823c386f976d716dc6c6ca3f3e7e41746afe8e9b01946446b6e2eec7ba94db910febfe1e7fa52ffd6390e7f9c5eb173a4ba590f593df45651dde0ad68e535d8a23c46e3f6a7e855c0fc5d2190b57c9ddd7843eb093e5ff98f052b3b81808d803e9a88d9e5fa48847a3c3d18894ce49637bdf211866f2c71116384c40c82236cf84f82f213bcd5f4df22fb0f5087ebb7d344d33bf3087939388b8ab9ce39e4b6766ee84ae7c812e030bc16bbe58aa5fa7837f36626ef47b1b13872194d585381f3b17b488e3a0fdee45f5f113ada681f9913fa2bca3d70a0e7cedbe8c5dca828f116e9d4d2e7fe9cb25fe2fcda8322869afe254eb254b869d4819688782076bffc273eb9ca69a8b357a0be9682b06f959530a848989722c8a9f2c79d8f07ed80a76a3ca557280b830432de6571ff342b6b3a7f644aa7ab96733de40e5989774fe2e0bbf9370e58d4c6abedd5284b6c9a8f140459f3b5289678947c69769fdb20295a80cf26fce7e8c5b245cf139365aa1b8068f50c54fa1359710bde46fd74efd941ff4ef66345f117b0bed8346dc09dc17a6a64093a686736d4c4ba9547503826011b8fbe3a2cfc7ac3b51bd6a575eebd016edc987e49d435d4c2db9750dde190cfe44e96daa99a58c0c6c3cf24debafaed0610c5e6b5ff575c1c5f711ed8e3e3ba9b260b2febabccdc9f54e6d0f81c8b93fa036aa6cd9eb796ffd49c1a9ca5563bf99f01e90dd8cb3e365fe57131e7bfa15e52fe3f60c1cce049690de1ac72f45d1849ebd53420cb136b071e4ef647eb4b9ab528ad0f9f7c776f3fe42c570bc533e9f79cc793bf4149a78ddf0f8644bab7724b3ff55b884c4aba7aa6bd601269109fabf9d582e48691530d09f34de8658d8dd12b09755cd0a53886fa6d919cf81f77f52b5b0b9fbaf5d03d6a4266cd695984935e852b7a70cf2565496b84d3372e539a8b068109d44ad090b33d057ca3643380d1cfcbf5b34925e368b91dcf0f5fd92e84e7daf14bb907e6e4909c4959e885e9ba5e769cc476ccd9bddff07446251f9ac93afbf664449d60c7c9b56d041fbe584245c4ec8c7cefaa11f7984049bdccfac10afc31799d781b91f7080d37d443819291db27ad7cb70241a7da327ce5e22d76184a4e08bea89246c5b723374c084da38764edf91346aed329eda99668a889349467f752567ee00a5542efbbe2e158744e4e49abefb078a15efdfa1897f43085da7e1295e17ea626789af9b83d13c23faae98c6607da3e521fcf7c36aefe7d9b947b8cd6fc5842c8de3ed200fae36555fc510d0af47ac08a5c06720884a4c8ab90139562dbe6359c1926b4d5c93403b5021b615245b7e68e47145c9172e3ac342bd54a17fcdac155cfd933b51d48e5f46bfa8b11bf8165586ed2ef43740e119efb1e31ff35e828469456b8ee8a9171d8f550785312c3441588a9450b2832e08d803c13466e342a435a862a150c6dc29e0104f012bed29717adcd3c4992256bababd43c4e3991f7c5725dd1b2a486d2ccdcd6ad948f6f53da4ebcd66ce794f2abd5d363b40cf21607475c28680caff3be00ce94d4d9a2a1fb430cccf8154ea335feee4b89fa6839ea9125e97f068899de6f916004e229ae7f9b32b009af9398a83ea0912a27b379202750ce4f5209afb9da6331e6172a4c286fe0cba6e881758423c02a4bc99c363cab1ab9719fcbe7e37aef692c5ba828ae67a208bf5d5095c06be00e7b786da7d31f4ec72e8c69708c03a55c54a84e4b9bf706418629a62ea41a6c4ab7c858459ee01e940c9c99301d45a3c16b5c980fd751d65361bf64f20f9fa5cc207e998e46236a65d393b22d15ed8e388eb086104cecbc64b3aa15e025f0fdfafc889a3ab919923e28afc60ea724e405881d8096fbc26ec3d9eacc6e8e5b59b7bd10806e2b5a45af5059153df9718d2e85322809dbed51e92a096b82f27e8ff418400c314a9bed5e449de8c1b9493c4cadb7d7574c3a1a94bfa5c47750bad7c9d7dc47be8d683b892de0d9882d6a414f36bfec308742689333c6d07f88c8494a9fd52d0f5094c6ebb230b8ebc4cc9797e1d64a21a6db37130018abf696f28f30713fb7c3a55be8bd80cee89e9ed5295e804d2e48b10729b759d3cecee1d6d11b987b7d5678b6bdf5cb6113adcb7eec8af5362e45a1af5664bd85cc90d627e62f1f44ec932b74766c1edbadadc5de3fdf59c05bfbac44307ef94bae54846519b4987fb4c5725d593a5d84e635a912b5203b130482d897b8001a12a1fa4323c31bc30f83ce9caa3e5b6802130c69d633fe389c8c6e2d9b110b5869b54a9c9df7327d9f3b8fb46bd0f4c9bf299e5ee4b181ece08d6e978836aea653cbc22ced393d749e956ae2775e877dd87e9848c681e4af9c29f0ebc6152822318c8b32bdd3dd2388a0196fca2c6a176c37c645686ddd5e359db948bf1fe122958c68eca414f5a3c2d5ab4f896ce4db22d09cf540f6ce296726f5ec1e63203f79238fe75a7468ee51ffff67c4d103129a9d9c97e8dd8b8d0b52b6afdefbf1bde912f3cf42b7bae14dbb98d2208293bb0061192c12d525e1e84f0a83df6778c3f48d3c3bc0ceb68a374dc2c80028267c73fbddb09ff085ce5ec58a596f4058a3579ccc5af4e2717e1e6381d7cbc8accb65d85e1f787401086e11628b16e58f9141362dabbc566866d906d813632928ea551b39217239510ed37eb745e378f69fb0796b442ba11e8fe7ac3c0c72dfd737961a61ba36ba6c94e1873e00b8c3108a00ca6dc1b55ee524f6e0f17fa9ad7899050d1fd01134658749cda00ac9d2ffb147aa745e18dc677c36eeb1ae6b903071c3aaaed860ba4c06f706f7deec7eb6977de1f2d78b1df7efbae4acdec1ec35833f55321d4601995a15271f1b32c60662a428fbb3ae799d827136e0ff3496a6bc8251d55430631cfe500511787776894147330030fb47cd62b3cc73104d4b759ace3fd2cd2a936c3e65ff71aa2012bfaf2d7c47bb33d2885a6cf1b75504d4bd007fc59c947270c49fe53976cce349ef177c7d17d209abfb4b1cb7064cbbdb711e19f5194bd0402ef97e6e3210096b51fefc8985babbfb642d0c76373e1a23a8690662f5767d8c67e3794ed98cdfae16981aa5a008fd3fc8b41dec0642602d37576d01c2b87dce2eb5575143429ceaf6ad2fbdd709012a937280d35fb35e20ef67498ff72fcac92d25de3213944d550963c9696891285b439efe77376f2b9c8b5fac954998475745cbc76b3898f9eb09fe33be0b7619e6ef6379c41c0bc04fb0f15c988426fd51853be56025c50452791a6e3341fc5a558223d3e2aba49f5e3ceeedeffded3ed55e615118dba1fe14c4fa120a5f6ffb1dfe0794802a11b041b4d83fd90726e285cb771101e91b9dd180d42f30293e0df4f8952f1c5cda633136f1e30c803653dd90683f5dc722be491434fdd504dff1c917432e6e04065c1044b6b38d1d61b57f4eded135d7de22cce4eee11cd1e20e7f27536a75c291269e3c0a229a428a701de5d562f79c98bd87622beb7904f17119ec6ca8918ce4fca462efd6541cf982dd3a411f920068679b346efb363af976421b78dad8e2104a0e6b0cdb7e79daf967b66e68676044c36ee2e350f6f39f5120509e004ae7cd96542fef78aaafb64ddae778f8117a19459f6e638a969c3e166d8ce1bbab439a834621dc41f1f0c4e9fef18cb6d2bef30852a499277ff3fea4c5f79bfd894354d567c17b38e2e1db4874cc61e28ec951a92567d3eda5a7e299fb84edb235b9785e066f2ae4d483794dff059f9eab82433676d8db696ca98a849d61271c2eeebe6bea3410723ab20c550b62e6d7405523763832d5015bac29e950cb0b96809b41729537b627496f10cdeea00fadfab49d15e4843cd6512e2abbcca9e2abb631306080cf3121efe2ba87fb9972bc28965e59cfd9d34e3b9b275b43e793524daa5774360881a31f029181ea4a1d6788a2c1452898c89789b46ec6a8beab6d9aac3193c75a1b6f25bf5a6dfbc80e650840aec9521c6e739094e0e4398cf377897bc14d865bb0ffec2e7d67cb0c504a38c5cb98d2c39a8303b5b13eb7290c8bdf7d78d59bc1e4a2918eee0a5f4c28c1b567aa8d9f2fc7257f94148266465971e0946e55cf8f78b9c49fe2ff6dbb837d93ff6457d41f1af321c8b513173a91c6624eada68e8b91035e47133f91eed223fd86564acb10f1718adf5bbc81cce6cb2d7acd4f3c1b2f334b7bdda2a289dfe1008f6e702dbcf3fdb46d39d3d71e3f10bd2be6d15bf30f15da1f49e98191ed705e321c2e428e8cdfbe2f6ea9a714c2544c7b19f61e8e54af468318a3653a2b5d4e770" + ); + + let decompressed = + decompress_brotli(&raw_batch, MAX_RLP_BYTES_PER_CHANNEL_FJORD as usize).unwrap(); + assert_eq!(decompressed, raw_batch_decompressed); + } +} diff --git a/rust/kona/crates/protocol/protocol/src/channel.rs b/rust/kona/crates/protocol/protocol/src/channel.rs new file mode 100644 index 00000000000..65e53b1e7aa --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/channel.rs @@ -0,0 +1,343 @@ +//! Channel Types + +use alloc::vec::Vec; +use alloy_primitives::{Bytes, map::HashMap}; + +use crate::{BlockInfo, Frame}; + +/// [`CHANNEL_ID_LENGTH`] is the length of the channel ID. +pub const CHANNEL_ID_LENGTH: usize = 16; + +/// [`ChannelId`] is an opaque identifier for a channel. +pub type ChannelId = [u8; CHANNEL_ID_LENGTH]; + +/// [`MAX_RLP_BYTES_PER_CHANNEL`] is the maximum amount of bytes that will be read from +/// a channel. This limit is set when decoding the RLP. +pub const MAX_RLP_BYTES_PER_CHANNEL: u64 = 10_000_000; + +/// [`FJORD_MAX_RLP_BYTES_PER_CHANNEL`] is the maximum amount of bytes that will be read from +/// a channel when the Fjord Hardfork is activated. This limit is set when decoding the RLP. +pub const FJORD_MAX_RLP_BYTES_PER_CHANNEL: u64 = 100_000_000; + +/// An error returned when adding a frame to a channel. +#[derive(Debug, thiserror::Error, Clone, Copy, PartialEq, Eq, Hash)] +pub enum ChannelError { + /// The frame id does not match the channel id. + #[error("Frame id does not match channel id")] + FrameIdMismatch, + /// The channel is closed. + #[error("Channel is closed")] + ChannelClosed, + /// The frame number is already in the channel. + #[error("Frame number {0} already exists")] + FrameNumberExists(usize), + /// The frame number is beyond the end frame. + #[error("Frame number {0} is beyond end frame")] + FrameBeyondEndFrame(usize), +} + +/// A Channel is a set of batches that are split into at least one, but possibly multiple frames. +/// +/// Frames are allowed to be ingested out of order. +/// Each frame is ingested one by one. Once a frame with `closed` is added to the channel, the +/// channel may mark itself as ready for reading once all intervening frames have been added +#[derive(Debug, Clone, Default)] +pub struct Channel { + /// The unique identifier for this channel + pub id: ChannelId, + /// The block that the channel is currently open at + pub open_block: BlockInfo, + /// Estimated memory size, used to drop the channel if we have too much data + pub estimated_size: usize, + /// True if the last frame has been buffered + pub closed: bool, + /// The highest frame number that has been ingested + pub highest_frame_number: u16, + /// The frame number of the frame where `is_last` is true + /// No other frame number may be higher than this + pub last_frame_number: u16, + /// Store a map of frame number to frame for constant time ordering + pub inputs: HashMap, + /// The highest L1 inclusion block that a frame was included in + pub highest_l1_inclusion_block: BlockInfo, +} + +impl Channel { + /// Create a new [`Channel`] with the given [`ChannelId`] and [`BlockInfo`]. + pub fn new(id: ChannelId, open_block: BlockInfo) -> Self { + Self { id, open_block, inputs: HashMap::default(), ..Default::default() } + } + + /// Returns the current [`ChannelId`] for the channel. + pub const fn id(&self) -> ChannelId { + self.id + } + + /// Returns the number of frames ingested. + pub fn len(&self) -> usize { + self.inputs.len() + } + + /// Returns if the channel is empty. + pub fn is_empty(&self) -> bool { + self.inputs.is_empty() + } + + /// Add a frame to the channel. + /// + /// ## Takes + /// - `frame`: The frame to add to the channel + /// - `l1_inclusion_block`: The block that the frame was included in + /// + /// ## Returns + /// - `Ok(()):` If the frame was successfully buffered + /// - `Err(_):` If the frame was invalid + pub fn add_frame( + &mut self, + frame: Frame, + l1_inclusion_block: BlockInfo, + ) -> Result<(), ChannelError> { + // Ensure that the frame ID is equal to the channel ID. + if frame.id != self.id { + return Err(ChannelError::FrameIdMismatch); + } + if frame.is_last && self.closed { + return Err(ChannelError::ChannelClosed); + } + if self.inputs.contains_key(&frame.number) { + return Err(ChannelError::FrameNumberExists(frame.number as usize)); + } + if self.closed && frame.number >= self.last_frame_number { + return Err(ChannelError::FrameBeyondEndFrame(frame.number as usize)); + } + + // Guaranteed to succeed at this point. Update the channel state. + if frame.is_last { + self.last_frame_number = frame.number; + self.closed = true; + + // Prune frames with a higher number than the last frame number when we receive a + // closing frame. + if self.last_frame_number < self.highest_frame_number { + self.inputs.retain(|id, frame| { + self.estimated_size -= frame.size(); + *id < self.last_frame_number + }); + self.highest_frame_number = self.last_frame_number; + } + } + + // Update the highest frame number. + if frame.number > self.highest_frame_number { + self.highest_frame_number = frame.number; + } + + if self.highest_l1_inclusion_block.number < l1_inclusion_block.number { + self.highest_l1_inclusion_block = l1_inclusion_block; + } + + self.estimated_size += frame.size(); + self.inputs.insert(frame.number, frame); + Ok(()) + } + + /// Returns the block number of the L1 block that contained the first [`Frame`] in this channel. + pub const fn open_block_number(&self) -> u64 { + self.open_block.number + } + + /// Returns the estimated size of the channel including [`Frame`] overhead. + pub const fn size(&self) -> usize { + self.estimated_size + } + + /// Returns `true` if the channel is ready to be read. + pub fn is_ready(&self) -> bool { + // Must have buffered the last frame before the channel is ready. + if !self.closed { + return false; + } + + // Must have the possibility of contiguous frames. + if self.inputs.len() != (self.last_frame_number + 1) as usize { + return false; + } + + // Check for contiguous frames. + for i in 0..=self.last_frame_number { + if !self.inputs.contains_key(&i) { + return false; + } + } + + true + } + + /// Returns all of the channel's [`Frame`]s concatenated together. + /// + /// ## Returns + /// + /// - `Some(Bytes)`: The concatenated frame data + /// - `None`: If the channel is missing frames + pub fn frame_data(&self) -> Option { + if self.is_empty() { + return None; + } + let mut data = Vec::with_capacity(self.size()); + (0..=self.last_frame_number).try_for_each(|i| { + let frame = self.inputs.get(&i)?; + data.extend_from_slice(&frame.data); + Some(()) + })?; + Some(data.into()) + } +} + +#[cfg(test)] +mod test { + use super::*; + use alloc::{ + string::{String, ToString}, + vec, + }; + + struct FrameValidityTestCase { + #[allow(dead_code)] + name: String, + frames: Vec, + should_error: Vec, + sizes: Vec, + frame_data: Option, + } + + fn run_frame_validity_test(test_case: FrameValidityTestCase) { + // #[cfg(feature = "std")] + // println!("Running test: {}", test_case.name); + + let id = [0xFF; 16]; + let block = BlockInfo::default(); + let mut channel = Channel::new(id, block); + + if test_case.frames.len() != test_case.should_error.len() || + test_case.frames.len() != test_case.sizes.len() + { + panic!("Test case length mismatch"); + } + + for (i, frame) in test_case.frames.iter().enumerate() { + let result = channel.add_frame(frame.clone(), block); + if test_case.should_error[i] { + assert!(result.is_err()); + } else { + assert!(result.is_ok()); + } + assert_eq!(channel.size(), test_case.sizes[i] as usize); + } + + if let Some(test_frame_data) = test_case.frame_data { + assert_eq!(channel.frame_data().unwrap(), test_frame_data); + } + } + + #[test] + fn test_channel_accessors() { + let id = [0xFF; 16]; + let block = BlockInfo { number: 42, timestamp: 0, ..Default::default() }; + let channel = Channel::new(id, block); + + assert_eq!(channel.id(), id); + assert_eq!(channel.open_block_number(), block.number); + assert_eq!(channel.size(), 0); + assert_eq!(channel.len(), 0); + assert!(channel.is_empty()); + assert!(!channel.is_ready()); + } + + #[test] + fn test_frame_validity() { + let id = [0xFF; 16]; + let test_cases = [ + FrameValidityTestCase { + name: "wrong channel".to_string(), + frames: vec![Frame { id: [0xEE; 16], ..Default::default() }], + should_error: vec![true], + sizes: vec![0], + frame_data: None, + }, + FrameValidityTestCase { + name: "double close".to_string(), + frames: vec![ + Frame { id, is_last: true, number: 2, data: b"four".to_vec() }, + Frame { id, is_last: true, number: 1, ..Default::default() }, + ], + should_error: vec![false, true], + sizes: vec![204, 204], + frame_data: None, + }, + FrameValidityTestCase { + name: "duplicate frame".to_string(), + frames: vec![ + Frame { id, number: 2, data: b"four".to_vec(), ..Default::default() }, + Frame { id, number: 2, data: b"seven".to_vec(), ..Default::default() }, + ], + should_error: vec![false, true], + sizes: vec![204, 204], + frame_data: None, + }, + FrameValidityTestCase { + name: "duplicate closing frames".to_string(), + frames: vec![ + Frame { id, number: 2, is_last: true, data: b"four".to_vec() }, + Frame { id, number: 2, is_last: true, data: b"seven".to_vec() }, + ], + should_error: vec![false, true], + sizes: vec![204, 204], + frame_data: None, + }, + FrameValidityTestCase { + name: "frame past closing".to_string(), + frames: vec![ + Frame { id, number: 2, is_last: true, data: b"four".to_vec() }, + Frame { id, number: 10, data: b"seven".to_vec(), ..Default::default() }, + ], + should_error: vec![false, true], + sizes: vec![204, 204], + frame_data: None, + }, + FrameValidityTestCase { + name: "prune after close frame".to_string(), + frames: vec![ + Frame { id, number: 0, is_last: false, data: b"seven".to_vec() }, + Frame { id, number: 1, is_last: true, data: b"four".to_vec() }, + ], + should_error: vec![false, false], + sizes: vec![205, 409], + frame_data: Some(b"sevenfour".to_vec().into()), + }, + FrameValidityTestCase { + name: "multiple valid frames, no data".to_string(), + frames: vec![ + Frame { id, number: 1, data: b"seven__".to_vec(), ..Default::default() }, + Frame { id, number: 2, data: b"four".to_vec(), ..Default::default() }, + ], + should_error: vec![false, false], + sizes: vec![207, 411], + // Notice: this is none because there is no frame at index 0, + // which causes the frame_data to short-circuit to None. + frame_data: None, + }, + FrameValidityTestCase { + name: "multiple valid frames".to_string(), + frames: vec![ + Frame { id, number: 0, data: b"seven__".to_vec(), ..Default::default() }, + Frame { id, number: 1, data: b"four".to_vec(), ..Default::default() }, + ], + should_error: vec![false, false], + sizes: vec![207, 411], + frame_data: Some(b"seven__".to_vec().into()), + }, + ]; + + test_cases.into_iter().for_each(run_frame_validity_test); + } +} diff --git a/kona/crates/protocol/protocol/src/deposits.rs b/rust/kona/crates/protocol/protocol/src/deposits.rs similarity index 99% rename from kona/crates/protocol/protocol/src/deposits.rs rename to rust/kona/crates/protocol/protocol/src/deposits.rs index a1485cd3265..7ed34b53112 100644 --- a/kona/crates/protocol/protocol/src/deposits.rs +++ b/rust/kona/crates/protocol/protocol/src/deposits.rs @@ -172,7 +172,7 @@ pub fn decode_deposit(block_hash: B256, index: usize, log: &Log) -> Result u64; - /// The address of the batch new_from_l1_base_feesubmitter + /// The address of the batch `new_from_l1_base_feesubmitter` fn batcher_address(&self) -> Address; } diff --git a/rust/kona/crates/protocol/protocol/src/info/ecotone.rs b/rust/kona/crates/protocol/protocol/src/info/ecotone.rs new file mode 100644 index 00000000000..e578a28896f --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/info/ecotone.rs @@ -0,0 +1,256 @@ +//! Contains ecotone-specific L1 block info types. + +use crate::{ + DecodeError, + info::{ + L1BlockInfoEcotoneBaseFields, + bedrock_base::{ + L1BlockInfoBedrockBaseFields, ambassador_impl_L1BlockInfoBedrockBaseFields, + }, + ecotone_base::{L1BlockInfoEcotoneBase, ambassador_impl_L1BlockInfoEcotoneBaseFields}, + }, +}; +use alloc::vec::Vec; +use alloy_primitives::{Address, B256, Bytes, U256}; +use ambassador::Delegate; + +/// Represents the fields within an Ecotone L1 block info transaction. +/// +/// Ecotone Binary Format +/// +---------+--------------------------+ +/// | Bytes | Field | +/// +---------+--------------------------+ +/// | 4 | Function signature | +/// | 4 | `BaseFeeScalar` | +/// | 4 | `BlobBaseFeeScalar` | +/// | 8 | `SequenceNumber` | +/// | 8 | Timestamp | +/// | 8 | `L1BlockNumber` | +/// | 32 | `BaseFee` | +/// | 32 | `BlobBaseFee` | +/// | 32 | `BlockHash` | +/// | 32 | `BatcherHash` | +/// +---------+--------------------------+ +#[derive(Debug, Clone, Hash, Eq, PartialEq, Default, Copy, Delegate)] +#[allow(clippy::duplicated_attributes)] +#[delegate(L1BlockInfoBedrockBaseFields, target = "base")] +#[delegate(L1BlockInfoEcotoneBaseFields, target = "base")] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct L1BlockInfoEcotone { + #[cfg_attr(feature = "serde", serde(flatten))] + base: L1BlockInfoEcotoneBase, + /// Indicates that the scalars are empty. + /// This is an edge case where the first block in ecotone has no scalars, + /// so the bedrock tx l1 cost function needs to be used. + pub empty_scalars: bool, + /// The l1 fee overhead used along with the `empty_scalars` field for the + /// bedrock tx l1 cost function. + /// + /// This field is deprecated in the Ecotone Hardfork. + pub l1_fee_overhead: U256, +} + +/// Accessors to fields deprecated in later Isthmus. +pub trait L1BlockInfoEcotoneOnlyFields { + /// Indicates that the scalars are empty. + /// This is an edge case where the first block in ecotone has no scalars, + /// so the bedrock tx l1 cost function needs to be used. + fn empty_scalars(&self) -> bool; + + /// The l1 fee overhead used along with the `empty_scalars` field for the + /// bedrock tx l1 cost function. + /// + /// This field is deprecated in the Ecotone Hardfork. + fn l1_fee_overhead(&self) -> U256; +} + +impl L1BlockInfoEcotoneOnlyFields for L1BlockInfoEcotone { + fn empty_scalars(&self) -> bool { + self.empty_scalars + } + + fn l1_fee_overhead(&self) -> U256 { + self.l1_fee_overhead + } +} + +/// Accessors for all Ecotone fields. +pub trait L1BlockInfoEcotoneFields: + L1BlockInfoBedrockBaseFields + L1BlockInfoEcotoneOnlyFields +{ +} + +impl L1BlockInfoEcotoneFields for L1BlockInfoEcotone {} + +impl L1BlockInfoEcotone { + /// The type byte identifier for the L1 scalar format in Ecotone. + pub const L1_SCALAR: u8 = 1; + + /// The length of an L1 info transaction in Ecotone. + pub const L1_INFO_TX_LEN: usize = 4 + 32 * 5; + + /// The 4 byte selector of "`setL1BlockValuesEcotone()`" + pub const L1_INFO_TX_SELECTOR: [u8; 4] = [0x44, 0x0a, 0x5e, 0x20]; + + /// Encodes the [`L1BlockInfoEcotone`] object into Ethereum transaction calldata. + pub fn encode_calldata(&self) -> Bytes { + let mut buf = Vec::with_capacity(Self::L1_INFO_TX_LEN); + self.encode_ecotone_header(&mut buf); + self.base.encode_calldata_body(&mut buf); + // Notice: do not include the `empty_scalars` field in the calldata. + // Notice: do not include the `l1_fee_overhead` field in the calldata. + buf.into() + } + + /// Encodes the header part of the [`L1BlockInfoEcotone`] object. + pub fn encode_ecotone_header(&self, buf: &mut Vec) { + buf.extend_from_slice(Self::L1_INFO_TX_SELECTOR.as_ref()) + } + + /// Decodes the [`L1BlockInfoEcotone`] object from ethereum transaction calldata. + pub fn decode_calldata(r: &[u8]) -> Result { + if r.len() != Self::L1_INFO_TX_LEN { + return Err(DecodeError::InvalidEcotoneLength(Self::L1_INFO_TX_LEN, r.len())); + } + // SAFETY: For all below slice operations, the full + // length is validated above to be `164`. + let base = L1BlockInfoEcotoneBase::decode_calldata_body(r); + + Ok(Self::new( + base.number(), + base.time(), + base.base_fee(), + base.block_hash(), + base.sequence_number(), + base.batcher_address(), + base.blob_base_fee, + base.blob_base_fee_scalar, + base.base_fee_scalar, + // Notice: the `empty_scalars` field is not included in the calldata. + // This is used by the evm to indicate that the bedrock tx l1 cost function + // needs to be used. + false, + // Notice: the `l1_fee_overhead` field is not included in the calldata. + U256::ZERO, + )) + } + + /// Construct from all values. + #[allow(clippy::too_many_arguments)] + pub(crate) const fn new( + number: u64, + time: u64, + base_fee: u64, + block_hash: B256, + sequence_number: u64, + batcher_address: Address, + blob_base_fee: u128, + blob_base_fee_scalar: u32, + base_fee_scalar: u32, + empty_scalars: bool, + l1_fee_overhead: U256, + ) -> Self { + Self { + base: L1BlockInfoEcotoneBase::new( + number, + time, + base_fee, + block_hash, + sequence_number, + batcher_address, + blob_base_fee, + blob_base_fee_scalar, + base_fee_scalar, + ), + empty_scalars, + l1_fee_overhead, + } + } + /// Construct from default values and `base_fee`. + pub fn new_from_base_fee(base_fee: u64) -> Self { + Self { base: L1BlockInfoEcotoneBase::new_from_base_fee(base_fee), ..Default::default() } + } + /// Construct from default values and `block_hash`. + pub fn new_from_block_hash(block_hash: B256) -> Self { + let base = L1BlockInfoEcotoneBase::new_from_block_hash(block_hash); + Self { base, ..Default::default() } + } + /// Construct from default values and `sequence_number`. + pub fn new_from_sequence_number(sequence_number: u64) -> Self { + Self { + base: L1BlockInfoEcotoneBase::new_from_sequence_number(sequence_number), + ..Default::default() + } + } + /// Construct from default values and `batcher_address`. + pub fn new_from_batcher_address(batcher_address: Address) -> Self { + Self { + base: L1BlockInfoEcotoneBase::new_from_batcher_address(batcher_address), + ..Default::default() + } + } + /// Construct from default values and `blob_base_fee`. + pub fn new_from_blob_base_fee(blob_base_fee: u128) -> Self { + let base = L1BlockInfoEcotoneBase::new_from_blob_base_fee(blob_base_fee); + Self { base, ..Default::default() } + } + /// Construct from default values and `blob_base_fee_scalar`. + pub fn new_from_blob_base_fee_scalar(base_fee_scalar: u32) -> Self { + let base = L1BlockInfoEcotoneBase::new_from_blob_base_fee_scalar(base_fee_scalar); + Self { base, ..Default::default() } + } + /// Construct from default values and `base_fee_scalar`. + pub fn new_from_base_fee_scalar(base_fee: u32) -> Self { + let base = L1BlockInfoEcotoneBase::new_from_base_fee_scalar(base_fee); + Self { base, ..Default::default() } + } + /// Construct from default values and `l1_fee_overhead`. + pub fn new_from_l1_fee_overhead(l1_fee_overhead: U256) -> Self { + Self { l1_fee_overhead, ..Default::default() } + } + /// Construct from default values and `empty_scalars`. + pub fn new_from_empty_scalars(empty_scalars: bool) -> Self { + Self { empty_scalars, ..Default::default() } + } + /// Construct from default values, `number` and `block_hash`. + pub fn new_from_number_and_block_hash(number: u64, block_hash: B256) -> Self { + let base = L1BlockInfoEcotoneBase::new_from_number_and_block_hash(number, block_hash); + Self { base, ..Default::default() } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::vec; + + #[test] + fn test_decode_calldata_ecotone_invalid_length() { + let r = vec![0u8; 1]; + assert_eq!( + L1BlockInfoEcotone::decode_calldata(&r), + Err(DecodeError::InvalidEcotoneLength(L1BlockInfoEcotone::L1_INFO_TX_LEN, r.len(),)) + ); + } + + #[test] + fn test_l1_block_info_ecotone_roundtrip_calldata_encoding() { + let info = L1BlockInfoEcotone::new( + 1, + 2, + 3, + B256::from([4u8; 32]), + 5, + Address::from([6u8; 20]), + 7, + 8, + 9, + false, + U256::ZERO, + ); + + let calldata = info.encode_calldata(); + let decoded_info = L1BlockInfoEcotone::decode_calldata(&calldata).unwrap(); + assert_eq!(info, decoded_info); + } +} diff --git a/kona/crates/protocol/protocol/src/info/ecotone_base.rs b/rust/kona/crates/protocol/protocol/src/info/ecotone_base.rs similarity index 100% rename from kona/crates/protocol/protocol/src/info/ecotone_base.rs rename to rust/kona/crates/protocol/protocol/src/info/ecotone_base.rs diff --git a/kona/crates/protocol/protocol/src/info/errors.rs b/rust/kona/crates/protocol/protocol/src/info/errors.rs similarity index 100% rename from kona/crates/protocol/protocol/src/info/errors.rs rename to rust/kona/crates/protocol/protocol/src/info/errors.rs diff --git a/rust/kona/crates/protocol/protocol/src/info/isthmus.rs b/rust/kona/crates/protocol/protocol/src/info/isthmus.rs new file mode 100644 index 00000000000..3d34d067391 --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/info/isthmus.rs @@ -0,0 +1,265 @@ +//! Isthmus L1 Block Info transaction types. + +use crate::info::{ + bedrock_base::ambassador_impl_L1BlockInfoBedrockBaseFields, + ecotone_base::ambassador_impl_L1BlockInfoEcotoneBaseFields, +}; +use alloc::vec::Vec; +use alloy_primitives::{Address, B256, Bytes}; +use ambassador::{Delegate, delegatable_trait}; + +use crate::{ + DecodeError, + info::{ + bedrock_base::L1BlockInfoBedrockBaseFields, + ecotone_base::{L1BlockInfoEcotoneBase, L1BlockInfoEcotoneBaseFields}, + }, +}; + +/// Represents the fields within an Isthmus L1 block info transaction. +/// +/// Isthmus Binary Format +/// +---------+--------------------------+ +/// | Bytes | Field | +/// +---------+--------------------------+ +/// | 4 | Function signature | +/// | 4 | `BaseFeeScalar` | +/// | 4 | `BlobBaseFeeScalar` | +/// | 8 | `SequenceNumber` | +/// | 8 | Timestamp | +/// | 8 | `L1BlockNumber` | +/// | 32 | `BaseFee` | +/// | 32 | `BlobBaseFee` | +/// | 32 | `BlockHash` | +/// | 32 | `BatcherHash` | +/// | 4 | `OperatorFeeScalar` | +/// | 8 | `OperatorFeeConstant` | +/// +---------+--------------------------+ +#[derive(Debug, Clone, Hash, Eq, PartialEq, Default, Copy, Delegate)] +#[allow(clippy::duplicated_attributes)] +#[delegate(L1BlockInfoBedrockBaseFields, target = "base")] +#[delegate(L1BlockInfoEcotoneBaseFields, target = "base")] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct L1BlockInfoIsthmus { + #[cfg_attr(feature = "serde", serde(flatten))] + base: L1BlockInfoEcotoneBase, + /// The operator fee scalar + pub operator_fee_scalar: u32, + /// The operator fee constant + pub operator_fee_constant: u64, +} + +/// Accessors for fields in Isthmus and later. +#[delegatable_trait] +pub trait L1BlockInfoIsthmusBaseFields: L1BlockInfoEcotoneBaseFields { + /// The operator fee scalar + fn operator_fee_scalar(&self) -> u32; + /// The operator fee constant + fn operator_fee_constant(&self) -> u64; +} + +impl L1BlockInfoIsthmusBaseFields for L1BlockInfoIsthmus { + /// The operator fee scalar + fn operator_fee_scalar(&self) -> u32 { + self.operator_fee_scalar + } + /// The operator fee constant + fn operator_fee_constant(&self) -> u64 { + self.operator_fee_constant + } +} + +/// Accessors for all Isthmus fields. +pub trait L1BlockInfoIsthmusFields: + L1BlockInfoEcotoneBaseFields + L1BlockInfoIsthmusBaseFields +{ +} + +impl L1BlockInfoIsthmusFields for L1BlockInfoIsthmus {} + +impl L1BlockInfoIsthmus { + /// The type byte identifier for the L1 scalar format in Isthmus. + pub const L1_SCALAR: u8 = 2; + + /// The length of an L1 info transaction in Isthmus. + pub const L1_INFO_TX_LEN: usize = 4 + 32 * 5 + 4 + 8; + + /// The 4 byte selector of "`setL1BlockValuesIsthmus()`" + pub const L1_INFO_TX_SELECTOR: [u8; 4] = [0x09, 0x89, 0x99, 0xbe]; + + /// Encodes the [`L1BlockInfoIsthmus`] object into Ethereum transaction calldata. + pub fn encode_calldata(&self) -> Bytes { + let mut buf = Vec::with_capacity(Self::L1_INFO_TX_LEN); + self.encode_calldata_header(&mut buf); + self.encode_calldata_body(&mut buf); + buf.into() + } + + /// Encodes the header of the [`L1BlockInfoIsthmus`] object. + pub fn encode_calldata_header(&self, buf: &mut Vec) { + buf.extend_from_slice(Self::L1_INFO_TX_SELECTOR.as_ref()); + } + + /// Encodes the base of the [`L1BlockInfoIsthmus`] object. + pub fn encode_calldata_body(&self, buf: &mut Vec) { + self.base.encode_calldata_body(buf); + + // Encode Isthmus-specific fields + buf.extend_from_slice(self.operator_fee_scalar.to_be_bytes().as_ref()); + buf.extend_from_slice(self.operator_fee_constant.to_be_bytes().as_ref()); + } + + /// Decodes the [`L1BlockInfoIsthmus`] object from ethereum transaction calldata. + pub fn decode_calldata(r: &[u8]) -> Result { + if r.len() != Self::L1_INFO_TX_LEN { + return Err(DecodeError::InvalidIsthmusLength(Self::L1_INFO_TX_LEN, r.len())); + } + // SAFETY: For all below slice operations, the full + // length is validated above to be `176`. + Self::decode_calldata_body(r) + } + + /// Decodes the body of the [`L1BlockInfoIsthmus`] object. + pub fn decode_calldata_body(r: &[u8]) -> Result { + let base = L1BlockInfoEcotoneBase::decode_calldata_body(r); + + // Decode Isthmus-specific fields + // SAFETY: 4 bytes are copied directly into the array + let mut operator_fee_scalar = [0u8; 4]; + operator_fee_scalar.copy_from_slice(&r[164..168]); + let operator_fee_scalar = u32::from_be_bytes(operator_fee_scalar); + + // SAFETY: 8 bytes are copied directly into the array + let mut operator_fee_constant = [0u8; 8]; + operator_fee_constant.copy_from_slice(&r[168..176]); + let operator_fee_constant = u64::from_be_bytes(operator_fee_constant); + + Ok(Self::new( + base.number(), + base.time(), + base.base_fee(), + base.block_hash(), + base.sequence_number(), + base.batcher_address(), + base.blob_base_fee(), + base.blob_base_fee_scalar(), + base.base_fee_scalar(), + operator_fee_scalar, + operator_fee_constant, + )) + } + /// Construct from all values. + #[allow(clippy::too_many_arguments)] + pub const fn new( + number: u64, + time: u64, + base_fee: u64, + block_hash: alloy_primitives::FixedBytes<32>, + sequence_number: u64, + batcher_address: Address, + blob_base_fee: u128, + blob_base_fee_scalar: u32, + base_fee_scalar: u32, + operator_fee_scalar: u32, + operator_fee_constant: u64, + ) -> Self { + Self { + base: L1BlockInfoEcotoneBase::new( + number, + time, + base_fee, + block_hash, + sequence_number, + batcher_address, + blob_base_fee, + blob_base_fee_scalar, + base_fee_scalar, + ), + operator_fee_scalar, + operator_fee_constant, + } + } + /// Construct from default values and `base_fee`. + pub fn new_from_base_fee(base_fee: u64) -> Self { + Self { base: L1BlockInfoEcotoneBase::new_from_base_fee(base_fee), ..Default::default() } + } + /// Construct from default values and `sequence_number`. + pub fn new_from_sequence_number(sequence_number: u64) -> Self { + Self { + base: L1BlockInfoEcotoneBase::new_from_sequence_number(sequence_number), + ..Default::default() + } + } + /// Construct from default values and `batcher_address`. + pub fn new_from_batcher_address(batcher_address: Address) -> Self { + Self { + base: L1BlockInfoEcotoneBase::new_from_batcher_address(batcher_address), + ..Default::default() + } + } + /// Construct from default values and `base_fee_scalar`. + pub fn new_from_base_fee_scalar(base_fee: u32) -> Self { + let base = L1BlockInfoEcotoneBase::new_from_base_fee_scalar(base_fee); + Self { base, ..Default::default() } + } + /// Construct from default values and `blob_base_fee`. + pub fn new_from_blob_base_fee(blob_base_fee: u128) -> Self { + let base = L1BlockInfoEcotoneBase::new_from_blob_base_fee(blob_base_fee); + Self { base, ..Default::default() } + } + /// Construct from default values and `blob_base_fee_scalar`. + pub fn new_from_blob_base_fee_scalar(base_fee_scalar: u32) -> Self { + let base = L1BlockInfoEcotoneBase::new_from_blob_base_fee_scalar(base_fee_scalar); + Self { base, ..Default::default() } + } + /// Construct from default values and `operator_fee_scalar`. + pub fn new_from_operator_fee_scalar(operator_fee_scalar: u32) -> Self { + Self { operator_fee_scalar, ..Default::default() } + } + /// Construct from default values and `operator_fee_constant`. + pub fn new_from_operator_fee_constant(operator_fee_constant: u64) -> Self { + Self { operator_fee_constant, ..Default::default() } + } + /// Construct from default values, `number` and `block_hash`. + pub fn new_from_number_and_block_hash(number: u64, block_hash: B256) -> Self { + let base = L1BlockInfoEcotoneBase::new_from_number_and_block_hash(number, block_hash); + Self { base, ..Default::default() } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::vec; + + #[test] + fn test_decode_calldata_isthmus_invalid_length() { + let r = vec![0u8; 1]; + assert_eq!( + L1BlockInfoIsthmus::decode_calldata(&r), + Err(DecodeError::InvalidIsthmusLength(L1BlockInfoIsthmus::L1_INFO_TX_LEN, r.len())) + ); + } + + #[test] + fn test_l1_block_info_isthmus_roundtrip_calldata_encoding() { + let info = L1BlockInfoIsthmus::new( + 1, + 2, + 3, + B256::from([4; 32]), + 5, + Address::from_slice(&[6; 20]), + 7, + 8, + 9, + 10, + 11, + ); + + let calldata = info.encode_calldata(); + let decoded_info = L1BlockInfoIsthmus::decode_calldata(&calldata).unwrap(); + + assert_eq!(info, decoded_info); + } +} diff --git a/rust/kona/crates/protocol/protocol/src/info/jovian.rs b/rust/kona/crates/protocol/protocol/src/info/jovian.rs new file mode 100644 index 00000000000..0375b506c7b --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/info/jovian.rs @@ -0,0 +1,225 @@ +//! Jovian L1 Block Info transaction types. + +use crate::{ + DecodeError, L1BlockInfoIsthmus, + info::{ + L1BlockInfoBedrockBaseFields, L1BlockInfoEcotoneBaseFields, + bedrock_base::ambassador_impl_L1BlockInfoBedrockBaseFields, + ecotone_base::ambassador_impl_L1BlockInfoEcotoneBaseFields, + isthmus::{L1BlockInfoIsthmusBaseFields, ambassador_impl_L1BlockInfoIsthmusBaseFields}, + }, +}; +use alloc::vec::Vec; +use alloy_primitives::{Address, B256, Bytes}; +use ambassador::{self, Delegate}; + +/// Represents the fields within an Jovian L1 block info transaction. +/// +/// Jovian Binary Format +/// +---------+--------------------------+ +/// | Bytes | Field | +/// +---------+--------------------------+ +/// | 4 | Function signature | +/// | 4 | `BaseFeeScalar` | +/// | 4 | `BlobBaseFeeScalar` | +/// | 8 | `SequenceNumber` | +/// | 8 | Timestamp | +/// | 8 | `L1BlockNumber` | +/// | 32 | `BaseFee` | +/// | 32 | `BlobBaseFee` | +/// | 32 | `BlockHash` | +/// | 32 | `BatcherHash` | +/// | 4 | `OperatorFeeScalar` | +/// | 8 | `OperatorFeeConstant` | +/// | 2 | `DAFootprintGasScalar` | +/// +---------+--------------------------+ +#[derive(Debug, Clone, Hash, Eq, PartialEq, Default, Copy, Delegate)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[allow(clippy::duplicated_attributes)] +#[delegate(L1BlockInfoBedrockBaseFields, target = "base")] +#[delegate(L1BlockInfoEcotoneBaseFields, target = "base")] +#[delegate(L1BlockInfoIsthmusBaseFields, target = "base")] +pub struct L1BlockInfoJovian { + /// Fields inherited from Isthmus. + #[cfg_attr(feature = "serde", serde(flatten))] + pub base: L1BlockInfoIsthmus, + /// The DA footprint gas scalar + pub da_footprint_gas_scalar: u16, +} +/// Accessors to fields available in Jovian and later. +pub trait L1BlockInfoJovianBaseFields: L1BlockInfoIsthmusBaseFields { + /// The DA footprint gas scalar + fn da_footprint_gas_scalar(&self) -> u16; +} + +impl L1BlockInfoJovianBaseFields for L1BlockInfoJovian { + fn da_footprint_gas_scalar(&self) -> u16 { + self.da_footprint_gas_scalar + } +} + +/// Accessors for all Jovian fields. +pub trait L1BlockInfoJovianFields: + L1BlockInfoIsthmusBaseFields + L1BlockInfoJovianBaseFields +{ +} + +impl L1BlockInfoJovianFields for L1BlockInfoJovian {} + +impl L1BlockInfoJovian { + /// The default DA footprint gas scalar + /// + pub const DEFAULT_DA_FOOTPRINT_GAS_SCALAR: u16 = 400; + + /// The type byte identifier for the L1 scalar format in Jovian. + pub const L1_SCALAR: u8 = 2; + + /// The length of an L1 info transaction in Jovian. + pub const L1_INFO_TX_LEN: usize = 4 + 32 * 5 + 4 + 8 + 2; + + /// The 4 byte selector of "`setL1BlockValuesJovian()`" + /// Those are the first 4 calldata bytes -> `` + pub const L1_INFO_TX_SELECTOR: [u8; 4] = [0x3d, 0xb6, 0xbe, 0x2b]; + + /// Encodes the [`L1BlockInfoJovian`] object into Ethereum transaction calldata. + pub fn encode_calldata(&self) -> Bytes { + let mut buf = Vec::with_capacity(Self::L1_INFO_TX_LEN); + self.encode_calldata_header(&mut buf); + self.encode_calldata_body(&mut buf); + buf.into() + } + + /// Encodes the header part of the [`L1BlockInfoJovian`] object. + pub fn encode_calldata_header(&self, buf: &mut Vec) { + buf.extend_from_slice(Self::L1_INFO_TX_SELECTOR.as_ref()); + } + + /// Encodes the base part of the [`L1BlockInfoJovian`] object. + pub fn encode_calldata_body(&self, buf: &mut Vec) { + self.base.encode_calldata_body(buf); + buf.extend_from_slice(self.da_footprint_gas_scalar.to_be_bytes().as_ref()); + } + + /// Decodes the [`L1BlockInfoJovian`] object from ethereum transaction calldata. + pub fn decode_calldata(r: &[u8]) -> Result { + if r.len() != Self::L1_INFO_TX_LEN { + return Err(DecodeError::InvalidJovianLength(Self::L1_INFO_TX_LEN, r.len())); + } + Self::decode_calldata_body(r) + } + + /// Decodes the body of the [`L1BlockInfoJovian`] object. + pub fn decode_calldata_body(r: &[u8]) -> Result { + // SAFETY: For all below slice operations, the full + // length is validated above to be `178`. + + let base = L1BlockInfoIsthmus::decode_calldata_body(r)?; + + // SAFETY: 2 bytes are copied directly into the array + let mut da_footprint_gas_scalar = [0u8; 2]; + da_footprint_gas_scalar.copy_from_slice(&r[176..178]); + let mut da_footprint_gas_scalar = u16::from_be_bytes(da_footprint_gas_scalar); + + // If the da footprint gas scalar is 0, use the default value (`https://github.com/ethereum-optimism/specs/blob/664cba65ab9686b0e70ad19fdf2ad054d6295986/specs/protocol/jovian/l1-attributes.md#overview`). + if da_footprint_gas_scalar == 0 { + da_footprint_gas_scalar = Self::DEFAULT_DA_FOOTPRINT_GAS_SCALAR; + } + + Ok(Self::new( + base.number(), + base.time(), + base.base_fee(), + base.block_hash(), + base.sequence_number(), + base.batcher_address(), + base.blob_base_fee(), + base.blob_base_fee_scalar(), + base.base_fee_scalar(), + base.operator_fee_scalar(), + base.operator_fee_constant(), + da_footprint_gas_scalar, + )) + } + + /// Construct from all values. + #[allow(clippy::too_many_arguments)] + pub const fn new( + number: u64, + time: u64, + base_fee: u64, + block_hash: B256, + sequence_number: u64, + batcher_address: Address, + blob_base_fee: u128, + blob_base_fee_scalar: u32, + base_fee_scalar: u32, + operator_fee_scalar: u32, + operator_fee_constant: u64, + da_footprint_gas_scalar: u16, + ) -> Self { + Self { + base: L1BlockInfoIsthmus::new( + number, + time, + base_fee, + block_hash, + sequence_number, + batcher_address, + blob_base_fee, + blob_base_fee_scalar, + base_fee_scalar, + operator_fee_scalar, + operator_fee_constant, + ), + da_footprint_gas_scalar, + } + } +} + +#[cfg(test)] +mod tests { + + use super::*; + use alloc::vec; + use alloy_primitives::keccak256; + + #[test] + fn test_decode_calldata_jovian_invalid_length() { + let r = vec![0u8; 1]; + assert_eq!( + L1BlockInfoJovian::decode_calldata(&r), + Err(DecodeError::InvalidJovianLength(L1BlockInfoJovian::L1_INFO_TX_LEN, r.len())) + ); + } + + #[test] + fn test_function_selector() { + assert_eq!( + keccak256("setL1BlockValuesJovian()")[..4].to_vec(), + L1BlockInfoJovian::L1_INFO_TX_SELECTOR + ); + } + + #[test] + fn test_l1_block_info_jovian_roundtrip_calldata_encoding() { + let info = L1BlockInfoJovian::new( + 1, + 2, + 3, + B256::from([4; 32]), + 5, + Address::from_slice(&[6; 20]), + 7, + 8, + 9, + 10, + 11, + 12, + ); + + let calldata = info.encode_calldata(); + let decoded_info = L1BlockInfoJovian::decode_calldata(&calldata).unwrap(); + + assert_eq!(info, decoded_info); + } +} diff --git a/rust/kona/crates/protocol/protocol/src/info/mod.rs b/rust/kona/crates/protocol/protocol/src/info/mod.rs new file mode 100644 index 00000000000..67eeb37b315 --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/info/mod.rs @@ -0,0 +1,45 @@ +//! Module containing L1 Attributes types (aka the L1 block info transaction). +//! +//! # Developer notes +//! +//! The structs implemented throughout this module form three chains of +//! embedding to emulate inheritance. By `a < b` we denote that the fields of +//! struct `a` are a subset of the fields of struct `b`. Delegation is +//! implemented through accessors and by help of the `ambassador` crate. The +//! hardforks `Bedrock` and `Ecotone` each contain both fields that are used by +//! all later hardforks and some that are not. They are implemented by +//! splitting them in two, e.g. `L1BlockInfoBedrockBase` and +//! `L1BlockInfoBedrock`, where the former contains exactly the fields are used +//! by later hardforks and the latter embeds the former and then adds some +//! fields. +//! +//! The chains of embedding are: +//! +//! 1. `L1BlockInfoBedrockBase` < `L1BlockInfoEcotoneBase` < `L1BlockInfoIsthmus` < +//! `L1BlockInfoJovian` +//! 2. `L1BlockInfoBedrockBase` < `L1BlockInfoBedrock` +//! 3. `L1BlockInfoEcotoneBase` < `L1BlockInfoEcotone` + +mod variant; +pub use variant::L1BlockInfoTx; + +mod bedrock; +pub use bedrock::{L1BlockInfoBedrock, L1BlockInfoBedrockFields, L1BlockInfoBedrockOnlyFields}; + +mod bedrock_base; +pub use bedrock_base::L1BlockInfoBedrockBaseFields; + +mod ecotone; +pub use ecotone::{L1BlockInfoEcotone, L1BlockInfoEcotoneFields, L1BlockInfoEcotoneOnlyFields}; + +mod ecotone_base; +pub use ecotone_base::L1BlockInfoEcotoneBaseFields; + +mod isthmus; +pub use isthmus::{L1BlockInfoIsthmus, L1BlockInfoIsthmusBaseFields, L1BlockInfoIsthmusFields}; + +mod jovian; +pub use jovian::{L1BlockInfoJovian, L1BlockInfoJovianBaseFields, L1BlockInfoJovianFields}; + +mod errors; +pub use errors::{BlockInfoError, DecodeError}; diff --git a/rust/kona/crates/protocol/protocol/src/info/variant.rs b/rust/kona/crates/protocol/protocol/src/info/variant.rs new file mode 100644 index 00000000000..be82398a2cf --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/info/variant.rs @@ -0,0 +1,1058 @@ +//! Contains the `L1BlockInfoTx` enum, containing different variants of the L1 block info +//! transaction. + +use alloy_consensus::Header; +use alloy_eips::{BlockNumHash, eip7840::BlobParams}; +use alloy_primitives::{Address, B256, Bytes, Sealable, Sealed, TxKind, U256, address}; +use kona_genesis::{L1ChainConfig, RollupConfig, SystemConfig}; +use op_alloy_consensus::{DepositSourceDomain, L1InfoDepositSource, TxDeposit}; + +use crate::{ + BlockInfoError, DecodeError, L1BlockInfoBedrock, L1BlockInfoEcotone, L1BlockInfoIsthmus, + Predeploys, + info::{ + L1BlockInfoBedrockBaseFields, L1BlockInfoEcotoneBaseFields as _, L1BlockInfoJovian, + bedrock::L1BlockInfoBedrockOnlyFields as _, ecotone::L1BlockInfoEcotoneOnlyFields as _, + isthmus::L1BlockInfoIsthmusBaseFields as _, + }, +}; + +/// The system transaction gas limit post-Regolith +const REGOLITH_SYSTEM_TX_GAS: u64 = 1_000_000; + +/// The depositor address of the L1 info transaction +pub(crate) const L1_INFO_DEPOSITOR_ADDRESS: Address = + address!("deaddeaddeaddeaddeaddeaddeaddeaddead0001"); + +/// The [`L1BlockInfoTx`] enum contains variants for the different versions of the L1 block info +/// transaction on OP Stack chains. +/// +/// This transaction always sits at the top of the block, and alters the `L1 Block` contract's +/// knowledge of the L1 chain. +#[derive(Debug, Clone, Eq, PartialEq, Copy)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum L1BlockInfoTx { + /// A Bedrock L1 info transaction + Bedrock(L1BlockInfoBedrock), + /// An Ecotone L1 info transaction + Ecotone(L1BlockInfoEcotone), + /// An Isthmus L1 info transaction + Isthmus(L1BlockInfoIsthmus), + /// A Jovian L1 info transaction + Jovian(L1BlockInfoJovian), +} + +impl L1BlockInfoTx { + /// Creates a new [`L1BlockInfoTx`] from the given information. + pub fn try_new( + rollup_config: &RollupConfig, + l1_config: &L1ChainConfig, + system_config: &SystemConfig, + sequence_number: u64, + l1_header: &Header, + l2_block_time: u64, + ) -> Result { + // In the first block of Ecotone, the L1Block contract has not been upgraded yet due to the + // upgrade transactions being placed after the L1 info transaction. Because of this, + // for the first block of Ecotone, we send a Bedrock style L1 block info transaction + if !rollup_config.is_ecotone_active(l2_block_time) || + rollup_config.is_first_ecotone_block(l2_block_time) + { + return Ok(Self::Bedrock(L1BlockInfoBedrock::new( + l1_header.number, + l1_header.timestamp, + l1_header.base_fee_per_gas.unwrap_or(0), + l1_header.hash_slow(), + sequence_number, + system_config.batcher_address, + system_config.overhead, + system_config.scalar, + ))); + } + + // --- Post-Ecotone Operations --- + + let scalar = system_config.scalar.to_be_bytes::<32>(); + let blob_base_fee_scalar = (scalar[0] == L1BlockInfoEcotone::L1_SCALAR) + .then(|| { + Ok::(u32::from_be_bytes( + scalar[24..28].try_into().map_err(|_| BlockInfoError::L1BlobBaseFeeScalar)?, + )) + }) + .transpose()? + .unwrap_or_default(); + let base_fee_scalar = u32::from_be_bytes( + scalar[28..32].try_into().map_err(|_| BlockInfoError::BaseFeeScalar)?, + ); + + // Determine the blob fee configuration based on the timestamp. + // We start with the scheduled blob fee parameters, and then check for the osaka and prague + // parameters. + let blob_fee_params = l1_config.blob_schedule_blob_params(); + + let blob_fee_config = + match blob_fee_params.active_scheduled_params_at_timestamp(l1_header.timestamp) { + Some(blob_fee_param) => *blob_fee_param, + None if l1_config.osaka_time.is_some_and(|time| time <= l1_header.timestamp) => { + BlobParams::osaka() + } + None if l1_config + .prague_time.is_some_and(|time| time <= l1_header.timestamp) && + // There was an incident on OP Stack Sepolia chains (03-05-2025) when L1 activated pectra, + // where the sequencer followed the incorrect chain, using the legacy Cancun blob fee + // schedule instead of the new Prague blob fee schedule. This portion of the chain was + // chosen to be canonicalized in favor of the prospect of a deep reorg imposed by the + // sequencers of the testnet chains. An optional hardfork was introduced for Sepolia only, + // where if present, activates the use of the Prague blob fee schedule. If the hardfork is + // not present, and L1 has activated pectra, the Prague blob fee schedule is used + // immediately. + (rollup_config.hardforks.pectra_blob_schedule_time.is_none() || + rollup_config.is_pectra_blob_schedule_active(l1_header.timestamp)) => + { + BlobParams::prague() + } + _ => BlobParams::cancun(), + }; + + let blob_base_fee = l1_header.blob_fee(blob_fee_config).unwrap_or(1); + let block_hash = l1_header.hash_slow(); + let base_fee = l1_header.base_fee_per_gas.unwrap_or(0); + + if rollup_config.is_jovian_active(l2_block_time) && + !rollup_config.is_first_jovian_block(l2_block_time) + { + let operator_fee_scalar = system_config.operator_fee_scalar.unwrap_or_default(); + let operator_fee_constant = system_config.operator_fee_constant.unwrap_or_default(); + let mut da_footprint_gas_scalar = system_config + .da_footprint_gas_scalar + .unwrap_or(L1BlockInfoJovian::DEFAULT_DA_FOOTPRINT_GAS_SCALAR); + + if da_footprint_gas_scalar == 0 { + da_footprint_gas_scalar = L1BlockInfoJovian::DEFAULT_DA_FOOTPRINT_GAS_SCALAR; + } + + return Ok(Self::Jovian(L1BlockInfoJovian::new( + l1_header.number, + l1_header.timestamp, + base_fee, + block_hash, + sequence_number, + system_config.batcher_address, + blob_base_fee, + blob_base_fee_scalar, + base_fee_scalar, + operator_fee_scalar, + operator_fee_constant, + da_footprint_gas_scalar, + ))); + } + + if rollup_config.is_isthmus_active(l2_block_time) && + !rollup_config.is_first_isthmus_block(l2_block_time) + { + let operator_fee_scalar = system_config.operator_fee_scalar.unwrap_or_default(); + let operator_fee_constant = system_config.operator_fee_constant.unwrap_or_default(); + return Ok(Self::Isthmus(L1BlockInfoIsthmus::new( + l1_header.number, + l1_header.timestamp, + base_fee, + block_hash, + sequence_number, + system_config.batcher_address, + blob_base_fee, + blob_base_fee_scalar, + base_fee_scalar, + operator_fee_scalar, + operator_fee_constant, + ))); + } + + Ok(Self::Ecotone(L1BlockInfoEcotone::new( + l1_header.number, + l1_header.timestamp, + base_fee, + block_hash, + sequence_number, + system_config.batcher_address, + blob_base_fee, + blob_base_fee_scalar, + base_fee_scalar, + false, + U256::ZERO, + ))) + } + + /// Creates a new [`L1BlockInfoTx`] from the given information and returns a typed [`TxDeposit`] + /// to include at the top of a block. + pub fn try_new_with_deposit_tx( + rollup_config: &RollupConfig, + l1_config: &L1ChainConfig, + system_config: &SystemConfig, + sequence_number: u64, + l1_header: &Header, + l2_block_time: u64, + ) -> Result<(Self, Sealed), BlockInfoError> { + let l1_info = Self::try_new( + rollup_config, + l1_config, + system_config, + sequence_number, + l1_header, + l2_block_time, + )?; + + let source = DepositSourceDomain::L1Info(L1InfoDepositSource { + l1_block_hash: l1_info.block_hash(), + seq_number: sequence_number, + }); + + let mut deposit_tx = TxDeposit { + source_hash: source.source_hash(), + from: L1_INFO_DEPOSITOR_ADDRESS, + to: TxKind::Call(Predeploys::L1_BLOCK_INFO), + mint: 0, + value: U256::ZERO, + gas_limit: 150_000_000, + is_system_transaction: true, + input: l1_info.encode_calldata(), + }; + + // With the regolith hardfork, system transactions were deprecated, and we allocate + // a constant amount of gas for special transactions like L1 block info. + if rollup_config.is_regolith_active(l2_block_time) { + deposit_tx.is_system_transaction = false; + deposit_tx.gas_limit = REGOLITH_SYSTEM_TX_GAS; + } + + Ok((l1_info, deposit_tx.seal_slow())) + } + + /// Decodes the [`L1BlockInfoTx`] object from Ethereum transaction calldata. + pub fn decode_calldata(r: &[u8]) -> Result { + if r.len() < 4 { + return Err(DecodeError::MissingSelector); + } + // SAFETY: The length of `r` must be at least 4 bytes. + let mut selector = [0u8; 4]; + selector.copy_from_slice(&r[0..4]); + match selector { + L1BlockInfoBedrock::L1_INFO_TX_SELECTOR => { + L1BlockInfoBedrock::decode_calldata(r).map(Self::Bedrock) + } + L1BlockInfoEcotone::L1_INFO_TX_SELECTOR => { + L1BlockInfoEcotone::decode_calldata(r).map(Self::Ecotone) + } + L1BlockInfoIsthmus::L1_INFO_TX_SELECTOR => { + L1BlockInfoIsthmus::decode_calldata(r).map(Self::Isthmus) + } + L1BlockInfoJovian::L1_INFO_TX_SELECTOR => { + L1BlockInfoJovian::decode_calldata(r).map(Self::Jovian) + } + _ => Err(DecodeError::InvalidSelector), + } + } + + /// Returns whether the scalars are empty. + pub fn empty_scalars(&self) -> bool { + match self { + Self::Bedrock(_) | Self::Isthmus(..) | Self::Jovian(_) => false, + Self::Ecotone(info) => info.empty_scalars(), + } + } + + /// Returns the block hash for the [`L1BlockInfoTx`]. + pub fn block_hash(&self) -> B256 { + match self { + Self::Bedrock(tx) => tx.block_hash(), + Self::Ecotone(tx) => tx.block_hash(), + Self::Isthmus(tx) => tx.block_hash(), + Self::Jovian(tx) => tx.block_hash(), + } + } + + /// Encodes the [`L1BlockInfoTx`] object into Ethereum transaction calldata. + pub fn encode_calldata(&self) -> Bytes { + match self { + Self::Bedrock(bedrock_tx) => bedrock_tx.encode_calldata(), + Self::Ecotone(ecotone_tx) => ecotone_tx.encode_calldata(), + Self::Isthmus(isthmus_tx) => isthmus_tx.encode_calldata(), + Self::Jovian(jovian_tx) => jovian_tx.encode_calldata(), + } + } + + /// Returns the L1 [`BlockNumHash`] for the info transaction. + pub fn id(&self) -> BlockNumHash { + match self { + Self::Bedrock(tx) => BlockNumHash { number: tx.number(), hash: tx.block_hash() }, + Self::Ecotone(tx) => BlockNumHash { number: tx.number(), hash: tx.block_hash() }, + Self::Isthmus(tx) => BlockNumHash { number: tx.number(), hash: tx.block_hash() }, + Self::Jovian(tx) => BlockNumHash { number: tx.number(), hash: tx.block_hash() }, + } + } + + /// Returns the operator fee scalar. + pub fn operator_fee_scalar(&self) -> u32 { + match self { + Self::Jovian(block_info) => block_info.operator_fee_scalar(), + Self::Isthmus(block_info) => block_info.operator_fee_scalar(), + _ => 0, + } + } + + /// Returns the operator fee constant. + pub fn operator_fee_constant(&self) -> u64 { + match self { + Self::Jovian(block_info) => block_info.operator_fee_constant(), + Self::Isthmus(block_info) => block_info.operator_fee_constant(), + _ => 0, + } + } + + /// Returns the da footprint + pub const fn da_footprint(&self) -> Option { + match self { + Self::Jovian(L1BlockInfoJovian { da_footprint_gas_scalar, .. }) => { + Some(*da_footprint_gas_scalar) + } + _ => None, + } + } + + /// Returns the l1 base fee. + pub fn l1_base_fee(&self) -> U256 { + match self { + Self::Bedrock(block_info) => U256::from(block_info.base_fee()), + Self::Ecotone(block_info) => U256::from(block_info.base_fee()), + Self::Isthmus(block_info) => U256::from(block_info.base_fee()), + Self::Jovian(block_info) => U256::from(block_info.base_fee()), + } + } + + /// Returns the l1 fee scalar. + pub fn l1_fee_scalar(&self) -> U256 { + match self { + Self::Bedrock(block) => U256::from(block.l1_fee_scalar()), + Self::Ecotone(block) => U256::from(block.base_fee_scalar()), + Self::Isthmus(block) => U256::from(block.base_fee_scalar()), + Self::Jovian(block) => U256::from(block.base_fee_scalar()), + } + } + + /// Returns the blob base fee. + pub fn blob_base_fee(&self) -> U256 { + match self { + Self::Bedrock(_) => U256::ZERO, + Self::Ecotone(block) => U256::from(block.blob_base_fee()), + Self::Isthmus(block) => U256::from(block.blob_base_fee()), + Self::Jovian(block) => U256::from(block.blob_base_fee()), + } + } + + /// Returns the blob base fee scalar. + pub fn blob_base_fee_scalar(&self) -> U256 { + match self { + Self::Bedrock(_) => U256::ZERO, + Self::Ecotone(block_info) => U256::from(block_info.blob_base_fee_scalar()), + Self::Isthmus(block_info) => U256::from(block_info.blob_base_fee_scalar()), + Self::Jovian(block_info) => U256::from(block_info.blob_base_fee_scalar()), + } + } + + /// Returns the L1 fee overhead for the info transaction. After ecotone, this value is ignored. + pub fn l1_fee_overhead(&self) -> U256 { + match self { + Self::Bedrock(block_info) => block_info.l1_fee_overhead(), + Self::Ecotone(block_info) => block_info.l1_fee_overhead(), + Self::Isthmus(_) | Self::Jovian(_) => U256::ZERO, + } + } + + /// Returns the batcher address for the info transaction + pub fn batcher_address(&self) -> Address { + match self { + Self::Bedrock(block) => block.batcher_address(), + Self::Ecotone(block) => block.batcher_address(), + Self::Isthmus(block) => block.batcher_address(), + Self::Jovian(block) => block.batcher_address(), + } + } + + /// Returns the sequence number for the info transaction + pub fn sequence_number(&self) -> u64 { + match self { + Self::Bedrock(block) => block.sequence_number(), + Self::Ecotone(block) => block.sequence_number(), + Self::Isthmus(block) => block.sequence_number(), + Self::Jovian(block) => block.sequence_number(), + } + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::test_utils::{RAW_BEDROCK_INFO_TX, RAW_ECOTONE_INFO_TX, RAW_ISTHMUS_INFO_TX}; + use alloc::{string::ToString, vec::Vec}; + use alloy_primitives::{address, b256}; + use kona_genesis::HardForkConfig; + use kona_registry::L1Config; + use rstest::rstest; + + #[test] + fn test_l1_block_info_missing_selector() { + let err = L1BlockInfoTx::decode_calldata(&[]); + assert_eq!(err, Err(DecodeError::MissingSelector)); + } + + #[test] + fn test_l1_block_info_tx_invalid_len() { + let calldata = L1BlockInfoBedrock::L1_INFO_TX_SELECTOR + .into_iter() + .chain([0xde, 0xad]) + .collect::>(); + let err = L1BlockInfoTx::decode_calldata(&calldata); + assert!(err.is_err()); + assert_eq!( + err.err().unwrap().to_string(), + "Invalid bedrock data length. Expected 260, got 6" + ); + + let calldata = L1BlockInfoEcotone::L1_INFO_TX_SELECTOR + .into_iter() + .chain([0xde, 0xad]) + .collect::>(); + let err = L1BlockInfoTx::decode_calldata(&calldata); + assert!(err.is_err()); + assert_eq!( + err.err().unwrap().to_string(), + "Invalid ecotone data length. Expected 164, got 6" + ); + + let calldata = L1BlockInfoIsthmus::L1_INFO_TX_SELECTOR + .into_iter() + .chain([0xde, 0xad]) + .collect::>(); + let err = L1BlockInfoTx::decode_calldata(&calldata); + assert!(err.is_err()); + assert_eq!( + err.err().unwrap().to_string(), + "Invalid isthmus data length. Expected 176, got 6" + ); + } + + #[test] + fn test_l1_block_info_tx_block_hash() { + let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_block_hash(b256!( + "392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc" + ))); + assert_eq!( + bedrock.block_hash(), + b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc") + ); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_block_hash(b256!( + "1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3" + ))); + assert_eq!( + ecotone.block_hash(), + b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3") + ); + } + + #[test] + fn test_decode_calldata_invalid_selector() { + let err = L1BlockInfoTx::decode_calldata(&[0xde, 0xad, 0xbe, 0xef]); + assert_eq!(err, Err(DecodeError::InvalidSelector)); + } + + #[test] + fn test_l1_block_info_id() { + let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_number_and_block_hash( + 123, + b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc"), + )); + assert_eq!( + bedrock.id(), + BlockNumHash { + number: 123, + hash: b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc") + } + ); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_number_and_block_hash( + 456, + b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3"), + )); + + assert_eq!( + ecotone.id(), + BlockNumHash { + number: 456, + hash: b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3") + } + ); + + let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_number_and_block_hash( + 101112, + b256!("4f98b83baf52c498b49bfff33e59965b27da7febbea9a2fcc4719d06dc06932a"), + )); + assert_eq!( + isthmus.id(), + BlockNumHash { + number: 101112, + hash: b256!("4f98b83baf52c498b49bfff33e59965b27da7febbea9a2fcc4719d06dc06932a") + } + ); + } + + #[test] + fn test_l1_block_info_sequence_number() { + let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_sequence_number(123)); + assert_eq!(bedrock.sequence_number(), 123); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_sequence_number(456)); + assert_eq!(ecotone.sequence_number(), 456); + + let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_sequence_number(101112)); + assert_eq!(isthmus.sequence_number(), 101112); + } + + #[test] + fn test_operator_fee_constant() { + let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::default()); + assert_eq!(bedrock.operator_fee_constant(), 0); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::default()); + assert_eq!(ecotone.operator_fee_constant(), 0); + + let isthmus = + L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_operator_fee_constant(123)); + assert_eq!(isthmus.operator_fee_constant(), 123); + } + + #[test] + fn test_operator_fee_scalar() { + let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::default()); + assert_eq!(bedrock.operator_fee_scalar(), 0); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::default()); + assert_eq!(ecotone.operator_fee_scalar(), 0); + + let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_operator_fee_scalar(123)); + assert_eq!(isthmus.operator_fee_scalar(), 123); + } + + #[test] + fn test_l1_base_fee() { + let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_base_fee(123)); + assert_eq!(bedrock.l1_base_fee(), U256::from(123)); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_base_fee(456)); + assert_eq!(ecotone.l1_base_fee(), U256::from(456)); + + let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_base_fee(101112)); + assert_eq!(isthmus.l1_base_fee(), U256::from(101112)); + } + + #[test] + fn test_l1_fee_overhead() { + let bedrock = + L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_l1_fee_overhead(U256::from(123))); + assert_eq!(bedrock.l1_fee_overhead(), U256::from(123)); + + let ecotone = + L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_l1_fee_overhead(U256::from(456))); + assert_eq!(ecotone.l1_fee_overhead(), U256::from(456)); + + let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::default()); + assert_eq!(isthmus.l1_fee_overhead(), U256::ZERO); + } + + #[test] + fn test_batcher_address() { + let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_batcher_address( + address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), + )); + assert_eq!(bedrock.batcher_address(), address!("6887246668a3b87f54deb3b94ba47a6f63f32985")); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_batcher_address( + address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), + )); + assert_eq!(ecotone.batcher_address(), address!("6887246668a3b87f54deb3b94ba47a6f63f32985")); + + let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_batcher_address( + address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), + )); + assert_eq!(isthmus.batcher_address(), address!("6887246668a3b87f54deb3b94ba47a6f63f32985")); + } + + #[test] + fn test_l1_fee_scalar() { + let bedrock = + L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::new_from_l1_fee_scalar(U256::from(123))); + assert_eq!(bedrock.l1_fee_scalar(), U256::from(123)); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_base_fee_scalar(456)); + assert_eq!(ecotone.l1_fee_scalar(), U256::from(456)); + + let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_base_fee_scalar(101112)); + assert_eq!(isthmus.l1_fee_scalar(), U256::from(101112)); + } + + #[test] + fn test_blob_base_fee() { + let bedrock = L1BlockInfoTx::Bedrock(Default::default()); + assert_eq!(bedrock.blob_base_fee(), U256::ZERO); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_blob_base_fee(456)); + assert_eq!(ecotone.blob_base_fee(), U256::from(456)); + + let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_blob_base_fee(101112)); + assert_eq!(isthmus.blob_base_fee(), U256::from(101112)); + } + + #[test] + fn test_blob_base_fee_scalar() { + let bedrock = L1BlockInfoTx::Bedrock(L1BlockInfoBedrock::default()); + assert_eq!(bedrock.blob_base_fee_scalar(), U256::ZERO); + + let ecotone = + L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_blob_base_fee_scalar(456)); + //dbg!("{}", ecotone); + assert_eq!(ecotone.blob_base_fee_scalar(), U256::from(456)); + + let isthmus = + L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new_from_blob_base_fee_scalar(101112)); + assert_eq!(isthmus.blob_base_fee_scalar(), U256::from(101112)); + } + + #[test] + fn test_empty_scalars() { + let bedrock = L1BlockInfoTx::Bedrock(Default::default()); + assert!(!bedrock.empty_scalars()); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::new_from_empty_scalars(true)); + assert!(ecotone.empty_scalars()); + + let ecotone = L1BlockInfoTx::Ecotone(L1BlockInfoEcotone::default()); + assert!(!ecotone.empty_scalars()); + + let isthmus = L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::default()); + assert!(!isthmus.empty_scalars()); + } + + #[test] + fn test_isthmus_l1_block_info_tx_roundtrip() { + let expected = L1BlockInfoIsthmus::new( + 19655712, + 1713121139, + 10445852825, + b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3"), + 5, + address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), + 1, + 810949, + 1368, + 0xabcd, + 0xdcba, + ); + + let L1BlockInfoTx::Isthmus(decoded) = + L1BlockInfoTx::decode_calldata(RAW_ISTHMUS_INFO_TX.as_ref()).unwrap() + else { + panic!("Wrong fork"); + }; + assert_eq!(expected, decoded); + assert_eq!(L1BlockInfoTx::Isthmus(decoded).encode_calldata().as_ref(), RAW_ISTHMUS_INFO_TX); + } + + #[test] + fn test_bedrock_l1_block_info_tx_roundtrip() { + let expected = L1BlockInfoBedrock::new( + 18334955, + 1697121143, + 10419034451, + b256!("392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc"), + 4, + address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), + U256::from(0xbc), + U256::from(0xa6fe0), + ); + + let L1BlockInfoTx::Bedrock(decoded) = + L1BlockInfoTx::decode_calldata(RAW_BEDROCK_INFO_TX.as_ref()).unwrap() + else { + panic!("Wrong fork"); + }; + assert_eq!(expected, decoded); + assert_eq!(L1BlockInfoTx::Bedrock(decoded).encode_calldata().as_ref(), RAW_BEDROCK_INFO_TX); + } + + #[test] + fn test_ecotone_l1_block_info_tx_roundtrip() { + let expected = L1BlockInfoEcotone::new( + 19655712, + 1713121139, + 10445852825, + b256!("1c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add3"), + 5, + address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), + 1, + 810949, + 1368, + false, + U256::ZERO, + ); + + let L1BlockInfoTx::Ecotone(decoded) = + L1BlockInfoTx::decode_calldata(RAW_ECOTONE_INFO_TX.as_ref()).unwrap() + else { + panic!("Wrong fork"); + }; + assert_eq!(expected, decoded); + assert_eq!(L1BlockInfoTx::Ecotone(decoded).encode_calldata().as_ref(), RAW_ECOTONE_INFO_TX); + } + + #[test] + fn test_try_new_bedrock() { + let rollup_config = RollupConfig::default(); + let l1_config = L1Config::sepolia(); + let system_config = SystemConfig::default(); + let sequence_number = 0; + let l1_header = Header::default(); + let l2_block_time = 0; + + let l1_info = L1BlockInfoTx::try_new( + &rollup_config, + &l1_config, + &system_config, + sequence_number, + &l1_header, + l2_block_time, + ) + .unwrap(); + + let L1BlockInfoTx::Bedrock(l1_info) = l1_info else { + panic!("Wrong fork"); + }; + + assert_eq!(l1_info.number(), l1_header.number); + assert_eq!(l1_info.time(), l1_header.timestamp); + assert_eq!(l1_info.base_fee(), { l1_header.base_fee_per_gas.unwrap_or(0) }); + assert_eq!(l1_info.block_hash(), l1_header.hash_slow()); + assert_eq!(l1_info.sequence_number(), sequence_number); + assert_eq!(l1_info.batcher_address(), system_config.batcher_address); + assert_eq!(l1_info.l1_fee_overhead(), system_config.overhead); + assert_eq!(l1_info.l1_fee_scalar(), system_config.scalar); + } + + #[test] + fn test_try_new_ecotone() { + let rollup_config = RollupConfig { + hardforks: HardForkConfig { ecotone_time: Some(1), ..Default::default() }, + ..Default::default() + }; + let l1_config = L1Config::sepolia(); + let system_config = SystemConfig::default(); + let sequence_number = 0; + let l1_header = Header::default(); + let l2_block_time = 0xFF; + + let l1_info = L1BlockInfoTx::try_new( + &rollup_config, + &l1_config, + &system_config, + sequence_number, + &l1_header, + l2_block_time, + ) + .unwrap(); + + let L1BlockInfoTx::Ecotone(l1_info) = l1_info else { + panic!("Wrong fork"); + }; + + assert_eq!(l1_info.number(), l1_header.number); + assert_eq!(l1_info.time(), l1_header.timestamp); + assert_eq!(l1_info.base_fee(), { l1_header.base_fee_per_gas.unwrap_or(0) }); + assert_eq!(l1_info.block_hash(), l1_header.hash_slow()); + assert_eq!(l1_info.sequence_number(), sequence_number); + assert_eq!(l1_info.batcher_address(), system_config.batcher_address); + assert_eq!(l1_info.blob_base_fee(), l1_header.blob_fee(BlobParams::cancun()).unwrap_or(1)); + + let scalar = system_config.scalar.to_be_bytes::<32>(); + let blob_base_fee_scalar = if scalar[0] == L1BlockInfoEcotone::L1_SCALAR { + { + u32::from_be_bytes( + scalar[24..28].try_into().expect("Failed to parse L1 blob base fee scalar"), + ) + } + } else { + Default::default() + }; + let base_fee_scalar = + u32::from_be_bytes(scalar[28..32].try_into().expect("Failed to parse base fee scalar")); + assert_eq!(l1_info.blob_base_fee_scalar(), blob_base_fee_scalar); + assert_eq!(l1_info.base_fee_scalar(), base_fee_scalar); + } + + #[rstest] + #[case::fork_active(true, false)] + #[case::fork_inactive(false, false)] + #[should_panic] + #[case::fork_active_wrong_params(true, true)] + #[should_panic] + #[case::fork_inactive_wrong_params(false, true)] + fn test_try_new_ecotone_with_optional_prague_fee_fork( + #[case] fork_active: bool, + #[case] use_wrong_params: bool, + ) { + let rollup_config = RollupConfig { + hardforks: HardForkConfig { + ecotone_time: Some(1), + pectra_blob_schedule_time: Some(2), + ..Default::default() + }, + ..Default::default() + }; + let mut l1_genesis: L1ChainConfig = L1Config::sepolia().into(); + l1_genesis.prague_time = Some(2); + + let system_config = SystemConfig::default(); + let sequence_number = 0; + let l1_header = Header { + timestamp: if fork_active { 2 } else { 1 }, + excess_blob_gas: Some(0x5080000), + blob_gas_used: Some(0x100000), + requests_hash: Some(B256::ZERO), + ..Default::default() + }; + let l2_block_time = 0xFF; + + let l1_info = L1BlockInfoTx::try_new( + &rollup_config, + &l1_genesis, + &system_config, + sequence_number, + &l1_header, + l2_block_time, + ) + .unwrap(); + + let L1BlockInfoTx::Ecotone(l1_info) = l1_info else { + panic!("Wrong fork"); + }; + + assert_eq!(l1_info.number(), l1_header.number); + assert_eq!(l1_info.time(), l1_header.timestamp); + assert_eq!(l1_info.base_fee(), { l1_header.base_fee_per_gas.unwrap_or(0) }); + assert_eq!(l1_info.block_hash(), l1_header.hash_slow()); + assert_eq!(l1_info.sequence_number(), sequence_number); + assert_eq!(l1_info.batcher_address(), system_config.batcher_address); + assert_eq!( + l1_info.blob_base_fee(), + l1_header + .blob_fee(if fork_active == use_wrong_params { + BlobParams::cancun() + } else { + BlobParams::prague() + }) + .unwrap_or(1) + ); + + let scalar = system_config.scalar.to_be_bytes::<32>(); + let blob_base_fee_scalar = if scalar[0] == L1BlockInfoEcotone::L1_SCALAR { + { + u32::from_be_bytes( + scalar[24..28].try_into().expect("Failed to parse L1 blob base fee scalar"), + ) + } + } else { + Default::default() + }; + let base_fee_scalar = + u32::from_be_bytes(scalar[28..32].try_into().expect("Failed to parse base fee scalar")); + assert_eq!(l1_info.blob_base_fee_scalar(), blob_base_fee_scalar); + assert_eq!(l1_info.base_fee_scalar(), base_fee_scalar); + } + + #[test] + fn test_try_new_isthmus_before_pectra_blob_schedule() { + let rollup_config = RollupConfig { + hardforks: HardForkConfig { + isthmus_time: Some(1), + pectra_blob_schedule_time: Some(1713121140), + ..Default::default() + }, + ..Default::default() + }; + let l1_config = L1Config::sepolia(); + let system_config = SystemConfig { + batcher_address: address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), + operator_fee_scalar: Some(0xabcd), + operator_fee_constant: Some(0xdcba), + ..Default::default() + }; + let sequence_number = 0; + let l1_header = Header { + number: 19655712, + timestamp: 1713121139, + base_fee_per_gas: Some(10445852825), + // Assume Pectra is active on L1 + requests_hash: Some(B256::ZERO), + ..Default::default() + }; + let l2_block_time = 0xFF; + + let l1_info = L1BlockInfoTx::try_new( + &rollup_config, + &l1_config, + &system_config, + sequence_number, + &l1_header, + l2_block_time, + ) + .unwrap(); + + assert!(matches!(l1_info, L1BlockInfoTx::Isthmus(_))); + + let scalar = system_config.scalar.to_be_bytes::<32>(); + let blob_base_fee_scalar = if scalar[0] == L1BlockInfoIsthmus::L1_SCALAR { + { + u32::from_be_bytes( + scalar[24..28].try_into().expect("Failed to parse L1 blob base fee scalar"), + ) + } + } else { + Default::default() + }; + let base_fee_scalar = + u32::from_be_bytes(scalar[28..32].try_into().expect("Failed to parse base fee scalar")); + + assert_eq!( + l1_info, + L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new( + l1_header.number, + l1_header.timestamp, + l1_header.base_fee_per_gas.unwrap_or(0), + l1_header.hash_slow(), + sequence_number, + system_config.batcher_address, + // Expect cancun blob schedule to be used, since pectra blob schedule is scheduled + // but not active yet. + l1_header.blob_fee(BlobParams::cancun()).unwrap_or(1), + blob_base_fee_scalar, + base_fee_scalar, + system_config.operator_fee_scalar.unwrap_or_default(), + system_config.operator_fee_constant.unwrap_or_default(), + )) + ); + } + + #[test] + fn test_try_new_isthmus() { + let rollup_config = RollupConfig { + hardforks: HardForkConfig { isthmus_time: Some(1), ..Default::default() }, + ..Default::default() + }; + let l1_config = L1Config::sepolia(); + let system_config = SystemConfig { + batcher_address: address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), + operator_fee_scalar: Some(0xabcd), + operator_fee_constant: Some(0xdcba), + ..Default::default() + }; + let sequence_number = 0; + let l1_header = Header { + number: 19655712, + timestamp: 1713121139, + base_fee_per_gas: Some(10445852825), + ..Default::default() + }; + let l2_block_time = 0xFF; + + let l1_info = L1BlockInfoTx::try_new( + &rollup_config, + &l1_config, + &system_config, + sequence_number, + &l1_header, + l2_block_time, + ) + .unwrap(); + + assert!(matches!(l1_info, L1BlockInfoTx::Isthmus(_))); + + let scalar = system_config.scalar.to_be_bytes::<32>(); + let blob_base_fee_scalar = if scalar[0] == L1BlockInfoIsthmus::L1_SCALAR { + { + u32::from_be_bytes( + scalar[24..28].try_into().expect("Failed to parse L1 blob base fee scalar"), + ) + } + } else { + Default::default() + }; + let base_fee_scalar = + u32::from_be_bytes(scalar[28..32].try_into().expect("Failed to parse base fee scalar")); + + assert_eq!( + l1_info, + L1BlockInfoTx::Isthmus(L1BlockInfoIsthmus::new( + l1_header.number, + l1_header.timestamp, + l1_header.base_fee_per_gas.unwrap_or(0), + l1_header.hash_slow(), + sequence_number, + system_config.batcher_address, + l1_header.blob_fee(BlobParams::prague()).unwrap_or(1), + blob_base_fee_scalar, + base_fee_scalar, + system_config.operator_fee_scalar.unwrap_or_default(), + system_config.operator_fee_constant.unwrap_or_default(), + )) + ); + } + + #[test] + fn test_try_new_with_deposit_tx() { + let rollup_config = RollupConfig { + hardforks: HardForkConfig { isthmus_time: Some(1), ..Default::default() }, + ..Default::default() + }; + let l1_config = L1Config::sepolia(); + let system_config = SystemConfig { + batcher_address: address!("6887246668a3b87f54deb3b94ba47a6f63f32985"), + operator_fee_scalar: Some(0xabcd), + operator_fee_constant: Some(0xdcba), + ..Default::default() + }; + let sequence_number = 0; + let l1_header = Header { + number: 19655712, + timestamp: 1713121139, + base_fee_per_gas: Some(10445852825), + ..Default::default() + }; + let l2_block_time = 0xFF; + + let (l1_info, deposit_tx) = L1BlockInfoTx::try_new_with_deposit_tx( + &rollup_config, + &l1_config, + &system_config, + sequence_number, + &l1_header, + l2_block_time, + ) + .unwrap(); + + assert!(matches!(l1_info, L1BlockInfoTx::Isthmus(_))); + assert_eq!(deposit_tx.from, L1_INFO_DEPOSITOR_ADDRESS); + assert_eq!(deposit_tx.to, TxKind::Call(Predeploys::L1_BLOCK_INFO)); + assert_eq!(deposit_tx.mint, 0); + assert_eq!(deposit_tx.value, U256::ZERO); + assert_eq!(deposit_tx.gas_limit, REGOLITH_SYSTEM_TX_GAS); + assert!(!deposit_tx.is_system_transaction); + assert_eq!(deposit_tx.input, l1_info.encode_calldata()); + } +} diff --git a/rust/kona/crates/protocol/protocol/src/lib.rs b/rust/kona/crates/protocol/protocol/src/lib.rs new file mode 100644 index 00000000000..7a8c05facc6 --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/lib.rs @@ -0,0 +1,74 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; + +mod batch; +pub use batch::{ + Batch, BatchDecodingError, BatchDropReason, BatchEncodingError, BatchReader, BatchTransaction, + BatchType, BatchValidationProvider, BatchValidity, BatchWithInclusionBlock, DecompressionError, + MAX_SPAN_BATCH_ELEMENTS, RawSpanBatch, SINGLE_BATCH_TYPE, SPAN_BATCH_TYPE, SingleBatch, + SpanBatch, SpanBatchBits, SpanBatchEip1559TransactionData, SpanBatchEip2930TransactionData, + SpanBatchEip7702TransactionData, SpanBatchElement, SpanBatchError, + SpanBatchLegacyTransactionData, SpanBatchPayload, SpanBatchPrefix, SpanBatchTransactionData, + SpanBatchTransactions, SpanDecodingError, +}; + +mod brotli; +pub use brotli::{BrotliDecompressionError, decompress_brotli}; + +mod sync; +pub use sync::SyncStatus; + +mod attributes; +pub use attributes::OpAttributesWithParent; + +mod errors; +pub use errors::OpBlockConversionError; + +mod block; +pub use block::{BlockInfo, FromBlockError, L2BlockInfo}; + +mod frame; +pub use frame::{ + DERIVATION_VERSION_0, FRAME_OVERHEAD, Frame, FrameDecodingError, FrameParseError, MAX_FRAME_LEN, +}; + +mod utils; +pub use utils::{read_tx_data, to_system_config}; + +mod channel; +pub use channel::{ + CHANNEL_ID_LENGTH, Channel, ChannelError, ChannelId, FJORD_MAX_RLP_BYTES_PER_CHANNEL, + MAX_RLP_BYTES_PER_CHANNEL, +}; + +mod deposits; +pub use deposits::{ + DEPOSIT_EVENT_ABI, DEPOSIT_EVENT_ABI_HASH, DEPOSIT_EVENT_VERSION_0, DepositError, + decode_deposit, +}; + +mod info; +pub use info::{ + BlockInfoError, DecodeError, L1BlockInfoBedrock, L1BlockInfoBedrockBaseFields, + L1BlockInfoBedrockFields, L1BlockInfoBedrockOnlyFields, L1BlockInfoEcotone, + L1BlockInfoEcotoneBaseFields, L1BlockInfoEcotoneFields, L1BlockInfoEcotoneOnlyFields, + L1BlockInfoIsthmus, L1BlockInfoIsthmusBaseFields, L1BlockInfoIsthmusFields, L1BlockInfoJovian, + L1BlockInfoJovianBaseFields, L1BlockInfoJovianFields, L1BlockInfoTx, +}; + +mod predeploys; +pub use predeploys::Predeploys; + +mod output_root; +pub use output_root::OutputRoot; + +#[cfg(any(test, feature = "test-utils"))] +pub mod test_utils; diff --git a/kona/crates/protocol/protocol/src/output_root.rs b/rust/kona/crates/protocol/protocol/src/output_root.rs similarity index 94% rename from kona/crates/protocol/protocol/src/output_root.rs rename to rust/kona/crates/protocol/protocol/src/output_root.rs index ce076d2951f..5011524565f 100644 --- a/kona/crates/protocol/protocol/src/output_root.rs +++ b/rust/kona/crates/protocol/protocol/src/output_root.rs @@ -4,12 +4,11 @@ use alloy_primitives::{B256, keccak256}; use derive_more::Display; /// The [`OutputRoot`] is a high-level commitment to an L2 block. It lifts the state root from the -/// block header as well as the storage root of the [Predeploys::L2_TO_L1_MESSAGE_PASSER] account +/// block header as well as the storage root of the +/// [`Predeploys::L2_TO_L1_MESSAGE_PASSER`](crate::Predeploys::L2_TO_L1_MESSAGE_PASSER) account /// into the top-level commitment construction. /// /// -/// -/// [Predeploys::L2_TO_L1_MESSAGE_PASSER]: crate::Predeploys::L2_TO_L1_MESSAGE_PASSER #[derive(Debug, Display, Clone, Copy, PartialEq, Eq, Hash)] #[display("OutputRootV0({}, {}, {})", state_root, bridge_storage_root, block_hash)] pub struct OutputRoot { diff --git a/kona/crates/protocol/protocol/src/predeploys.rs b/rust/kona/crates/protocol/protocol/src/predeploys.rs similarity index 92% rename from kona/crates/protocol/protocol/src/predeploys.rs rename to rust/kona/crates/protocol/protocol/src/predeploys.rs index 6d30ee0ad60..5921ebe7718 100644 --- a/kona/crates/protocol/protocol/src/predeploys.rs +++ b/rust/kona/crates/protocol/protocol/src/predeploys.rs @@ -39,19 +39,19 @@ impl Predeploys { Self::L2_TO_L2_XDM, ]; - /// The LegacyMessagePasser contract stores commitments to withdrawal transactions before the + /// The `LegacyMessagePasser` contract stores commitments to withdrawal transactions before the /// Bedrock upgrade. /// pub const LEGACY_MESSAGE_PASSER: Address = address!("0x4200000000000000000000000000000000000000"); - /// The DeployerWhitelist was used to provide additional safety during initial phases of + /// The `DeployerWhitelist` was used to provide additional safety during initial phases of /// Optimism. /// pub const DEPLOYER_WHITELIST: Address = address!("0x4200000000000000000000000000000000000002"); - /// The LegacyERC20ETH predeploy represented all ether in the system before the Bedrock upgrade. - /// + /// The `LegacyERC20ETH` predeploy represented all ether in the system before the Bedrock + /// upgrade. pub const LEGACY_ERC20_ETH: Address = address!("0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000"); /// The WETH9 predeploy address. @@ -133,9 +133,9 @@ impl Predeploys { /// The Operator Fee Vault proxy address. pub const OPERATOR_FEE_VAULT: Address = address!("0x420000000000000000000000000000000000001B"); - /// The CrossL2Inbox proxy address. + /// The `CrossL2Inbox` proxy address. pub const CROSS_L2_INBOX: Address = address!("0x4200000000000000000000000000000000000022"); - /// The L2ToL2CrossDomainMessenger proxy address. + /// The `L2ToL2CrossDomainMessenger` proxy address. pub const L2_TO_L2_XDM: Address = address!("0x4200000000000000000000000000000000000023"); } diff --git a/kona/crates/protocol/protocol/src/sync.rs b/rust/kona/crates/protocol/protocol/src/sync.rs similarity index 100% rename from kona/crates/protocol/protocol/src/sync.rs rename to rust/kona/crates/protocol/protocol/src/sync.rs diff --git a/rust/kona/crates/protocol/protocol/src/test_utils.rs b/rust/kona/crates/protocol/protocol/src/test_utils.rs new file mode 100644 index 00000000000..38455129065 --- /dev/null +++ b/rust/kona/crates/protocol/protocol/src/test_utils.rs @@ -0,0 +1,132 @@ +//! Test utilities for the protocol crate. + +use alloc::{boxed::Box, format, string::String, sync::Arc, vec::Vec}; +use alloy_primitives::hex; +use async_trait::async_trait; +use op_alloy_consensus::OpBlock; +use spin::Mutex; +use tracing::{Event, Level, Subscriber}; +use tracing_subscriber::{Layer, layer::Context}; + +use crate::{ + BatchValidationProvider, L1BlockInfoBedrock, L1BlockInfoEcotone, L1BlockInfoIsthmus, + L2BlockInfo, +}; + +/// Raw encoded bedrock L1 block info transaction. +pub const RAW_BEDROCK_INFO_TX: [u8; L1BlockInfoBedrock::L1_INFO_TX_LEN] = hex!( + "015d8eb9000000000000000000000000000000000000000000000000000000000117c4eb0000000000000000000000000000000000000000000000000000000065280377000000000000000000000000000000000000000000000000000000026d05d953392012032675be9f94aae5ab442de73c5f4fb1bf30fa7dd0d2442239899a40fc00000000000000000000000000000000000000000000000000000000000000040000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f3298500000000000000000000000000000000000000000000000000000000000000bc00000000000000000000000000000000000000000000000000000000000a6fe0" +); + +/// Raw encoded ecotone L1 block info transaction. +pub const RAW_ECOTONE_INFO_TX: [u8; L1BlockInfoEcotone::L1_INFO_TX_LEN] = hex!( + "440a5e2000000558000c5fc5000000000000000500000000661c277300000000012bec20000000000000000000000000000000000000000000000000000000026e9f109900000000000000000000000000000000000000000000000000000000000000011c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add30000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f32985" +); + +/// Raw encoded isthmus L1 block info transaction. +pub const RAW_ISTHMUS_INFO_TX: [u8; L1BlockInfoIsthmus::L1_INFO_TX_LEN] = hex!( + "098999be00000558000c5fc5000000000000000500000000661c277300000000012bec20000000000000000000000000000000000000000000000000000000026e9f109900000000000000000000000000000000000000000000000000000000000000011c4c84c50740386c7dc081efddd644405f04cde73e30a2e381737acce9f5add30000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f329850000abcd000000000000dcba" +); + +/// An error for implementations of the [`BatchValidationProvider`] trait. +#[derive(Debug, thiserror::Error)] +pub enum TestBatchValidatorError { + /// The block was not found. + #[error("Block not found")] + BlockNotFound, + /// The L2 block was not found. + #[error("L2 Block not found")] + L2BlockNotFound, +} + +/// An [`TestBatchValidator`] implementation for testing. +#[derive(Default, Debug, Clone)] +pub struct TestBatchValidator { + /// Blocks + pub blocks: Vec, + /// Short circuit the block return to be the first block. + pub short_circuit: bool, + /// Blocks + pub op_blocks: Vec, +} + +impl TestBatchValidator { + /// Creates a new [`TestBatchValidator`] with the given origin and batches. + pub const fn new(blocks: Vec, op_blocks: Vec) -> Self { + Self { blocks, short_circuit: false, op_blocks } + } +} + +#[async_trait] +impl BatchValidationProvider for TestBatchValidator { + type Error = TestBatchValidatorError; + + async fn l2_block_info_by_number(&mut self, number: u64) -> Result { + if self.short_circuit { + return self + .blocks + .first() + .copied() + .ok_or_else(|| TestBatchValidatorError::BlockNotFound); + } + self.blocks + .iter() + .find(|b| b.block_info.number == number) + .copied() + .ok_or_else(|| TestBatchValidatorError::BlockNotFound) + } + + async fn block_by_number(&mut self, number: u64) -> Result { + self.op_blocks + .iter() + .find(|p| p.header.number == number) + .cloned() + .ok_or_else(|| TestBatchValidatorError::L2BlockNotFound) + } +} + +/// The storage for the collected traces. +#[derive(Debug, Default, Clone)] +pub struct TraceStorage(pub Arc>>); + +impl TraceStorage { + /// Returns the items in the storage that match the specified level. + pub fn get_by_level(&self, level: Level) -> Vec { + self.0 + .lock() + .iter() + .filter(|&(l, _message)| *l == level) + .map(|(_l, message)| message.clone()) + .collect() + } + + /// Returns if the storage is empty. + pub fn is_empty(&self) -> bool { + self.0.lock().is_empty() + } +} + +/// A subscriber layer that collects traces and their log levels. +#[derive(Debug, Default)] +pub struct CollectingLayer { + /// The storage for the collected traces. + pub storage: TraceStorage, +} + +impl CollectingLayer { + /// Creates a new collecting layer with the specified storage. + pub const fn new(storage: TraceStorage) -> Self { + Self { storage } + } +} + +impl Layer for CollectingLayer { + fn on_event(&self, event: &Event<'_>, _ctx: Context<'_, S>) { + let metadata = event.metadata(); + let level = *metadata.level(); + let message = format!("{event:?}"); + + let mut storage = self.storage.0.lock(); + storage.push((level, message)); + } +} diff --git a/kona/crates/protocol/protocol/src/utils.rs b/rust/kona/crates/protocol/protocol/src/utils.rs similarity index 100% rename from kona/crates/protocol/protocol/src/utils.rs rename to rust/kona/crates/protocol/protocol/src/utils.rs diff --git a/kona/crates/protocol/protocol/testdata/batch.hex b/rust/kona/crates/protocol/protocol/testdata/batch.hex similarity index 100% rename from kona/crates/protocol/protocol/testdata/batch.hex rename to rust/kona/crates/protocol/protocol/testdata/batch.hex diff --git a/rust/kona/crates/protocol/registry/Cargo.toml b/rust/kona/crates/protocol/registry/Cargo.toml new file mode 100644 index 00000000000..ffe2511d58d --- /dev/null +++ b/rust/kona/crates/protocol/registry/Cargo.toml @@ -0,0 +1,61 @@ +[package] +name = "kona-registry" +version = "0.4.5" +description = "A registry of superchain configs" + +edition.workspace = true +rust-version.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +exclude.workspace = true + +[lints] +workspace = true + +[dependencies] +# Workspace +kona-genesis = { workspace = true, features = ["serde"] } + +# Alloy +alloy-primitives = { workspace = true, features = ["map"] } +alloy-genesis.workspace = true +alloy-eips.workspace = true +alloy-hardforks.workspace = true +alloy-chains = { workspace = true, features = ["serde"] } +alloy-op-hardforks = { workspace = true } + +# `serde` +serde = { workspace = true, features = ["derive", "alloc"] } +serde_json = { workspace = true, features = ["raw_value"] } + +# misc +lazy_static = { workspace = true, features = ["spin_no_std"] } + +# `tabled` feature +tabled = { workspace = true, features = ["derive"], optional = true } + +[build-dependencies] +toml = { workspace = true, features = ["parse", "serde"] } +serde = { workspace = true } +serde_json = { workspace = true, features = ["raw_value"] } +kona-genesis = { workspace = true, features = ["serde"] } + +[dev-dependencies] +alloy-eips.workspace = true + +[features] +default = [] +tabled = [ "dep:tabled", "std" ] +std = [ + "alloy-chains/std", + "alloy-eips/std", + "alloy-genesis/std", + "alloy-primitives/std", + "kona-genesis/std", + "serde/std", + "serde_json/std", + "tabled?/std", + "toml/std" +] diff --git a/rust/kona/crates/protocol/registry/README.md b/rust/kona/crates/protocol/registry/README.md new file mode 100644 index 00000000000..9f4d92a5952 --- /dev/null +++ b/rust/kona/crates/protocol/registry/README.md @@ -0,0 +1,123 @@ +## `kona-registry` + +[`kona-registry`][sc] is a `no_std` crate that exports rust type definitions for chains +in the [`superchain-registry`][osr]. Since it reads static files to read configurations for +various chains into instantiated objects, the [`kona-registry`][sc] crate requires +[`serde`][serde] as a dependency. To use the [`kona-registry`][sc] crate, add the crate +as a dependency to a `Cargo.toml`. + +```toml +kona-registry = "0.1.0" +``` + +[`kona-registry`][sc] declares lazy evaluated statics that expose `ChainConfig`s, `RollupConfig`s, +and `Chain` objects for all chains with static definitions in the superchain registry. The way this works +is the golang side of the superchain registry contains an "internal code generation" script that has +been modified to output configuration files to the [`crates/registry`][s] directory in the +`etc` folder that are read by the [`kona-registry`][sc] rust crate. These static config files +contain an up-to-date list of all superchain configurations with their chain configs. It is expected +that if the commit hash of the [`superchain-registry`][osr] pulled in as a git submodule has breaking +changes, the tests in this crate (`kona-registry`) will break and updates will need to be made. + +There are three core statics exposed by the [`kona-registry`][sc]. +- `CHAINS`: A list of chain objects containing the superchain metadata for this chain. +- `OPCHAINS`: A map from chain id to `ChainConfig`. +- `ROLLUP_CONFIGS`: A map from chain id to `RollupConfig`. + +[`kona-registry`][sc] exports the _complete_ list of chains within the superchain, as well as each +chain's `RollupConfig`s and `ChainConfig`s. + +### Custom chain configurations + +`kona-registry` embeds a frozen snapshot of the upstream superchain registry, but downstream +users can extend that snapshot at build time. This is useful when you need bespoke test chains or +partner networks that are not yet part of the public registry but still want to rely on the crate's +lazy statics. + +1. Produce JSON files that follow the same schema as the generated artifacts in `etc/`: + - `chainList.json` containing additional [`Chain`][chains] entries. + - `configs.json` containing [`Superchain`][superchains] structures with matching `ChainConfig`s and + `RollupConfig`s for the new chain ids. +2. Point the build to those files by setting the following environment variables during `cargo build` + (or `cargo test`): + ```sh + export KONA_CUSTOM_CONFIGS=true + export KONA_CUSTOM_CONFIGS_DIR=/absolute/path/to/custom-configs + cargo build -p kona-registry + ``` +3. The build script merges the custom files into the generated `etc/chainList.json` and + `etc/configs.json` before compiling the crate. Attempting to override existing chain ids will + result in build failures. + +Both JSON files must stay in lockstep: every chain listed in `configs.json` must also appear in +`chainList.json`, and chain identifiers must map to a single chain id. The build script validates +those invariants and will fail fast if it detects duplicates or mismatches. When publishing another +crate that depends on `kona-registry`, you can check the custom artifacts into your workspace and set +`KONA_CUSTOM_CONFIGS_DIR` via a build script or `just` recipe so that consumers automatically embed +the additional definitions. + +### Usage + +Add the following to your `Cargo.toml`. + +```toml +[dependencies] +kona-registry = "0.1.0" +``` + +To make `kona-registry` `no_std`, toggle `default-features` off like so. + +```toml +[dependencies] +kona-registry = { version = "0.1.0", default-features = false } +``` + +Below demonstrates getting the `RollupConfig` for OP Mainnet (Chain ID `10`). + +```rust +use kona_registry::ROLLUP_CONFIGS; + +let op_chain_id = 10; +let op_rollup_config = ROLLUP_CONFIGS.get(&op_chain_id); +println!("OP Mainnet Rollup Config: {:?}", op_rollup_config); +``` + +A mapping from chain id to `ChainConfig` is also available. + +```rust +use kona_registry::OPCHAINS; + +let op_chain_id = 10; +let op_chain_config = OPCHAINS.get(&op_chain_id); +println!("OP Mainnet Chain Config: {:?}", op_chain_config); +``` + + +### Feature Flags + +- `std`: Uses the standard library to pull in environment variables. + + +### Credits + +[superchain-registry][osr] contributors for building and maintaining superchain types. + +[alloy] and [op-alloy] for creating and maintaining high quality Ethereum and Optimism types in rust. + + + + +[serde]: https://crates.io/crates/serde +[alloy]: https://github.com/alloy-rs/alloy +[op-alloy]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/op-alloy +[op-superchain]: https://docs.optimism.io/stack/explainer +[osr]: https://github.com/ethereum-optimism/superchain-registry + +[s]: ./crates/registry +[sc]: https://crates.io/crates/kona-registry +[g]: https://crates.io/crates/kona-genesis + +[chains]: https://docs.rs/kona-registry/latest/kona_registry/struct.CHAINS.html +[opchains]: https://docs.rs/kona-registry/latest/kona_registry/struct.OPCHAINS.html +[rollups]: https://docs.rs/kona-registry/latest/kona_registry/struct.ROLLUP_CONFIGS.html +[superchains]: https://docs.rs/kona-genesis/latest/kona_genesis/struct.Superchain.html diff --git a/rust/kona/crates/protocol/registry/build.rs b/rust/kona/crates/protocol/registry/build.rs new file mode 100644 index 00000000000..3e5b3ab8975 --- /dev/null +++ b/rust/kona/crates/protocol/registry/build.rs @@ -0,0 +1,313 @@ +//! Build script that generates a `configs.json` file from the configs. + +use std::{ + collections::{BTreeMap, BTreeSet, btree_map::Entry}, + fs, + path::{Path, PathBuf}, +}; + +use kona_genesis::{Chain, ChainConfig, ChainList, Superchain, SuperchainConfig, Superchains}; +use serde::de::DeserializeOwned; + +fn main() { + // If the `KONA_BIND` environment variable is _not_ set, then return early. + let kona_bind: bool = + std::env::var("KONA_BIND").unwrap_or_else(|_| "false".to_string()) == "true"; + println!("cargo:rerun-if-env-changed=KONA_BIND"); + if !kona_bind { + merge_custom_configs(); + return; + } + + // Get the directory of this file from the environment + let src_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + + // Check if the `superchain-registry` directory exists + let superchain_registry = format!("{src_dir}/superchain-registry"); + assert!( + std::path::Path::new(&superchain_registry).exists(), + "Git Submodule missing. Please run `just source` to initialize the submodule." + ); + + // Copy the `superchain-registry/chainList.json` file to `etc/chainList.json` + let chain_list = format!("{src_dir}/superchain-registry/chainList.json"); + let etc_dir = std::path::Path::new("etc"); + if !etc_dir.exists() { + std::fs::create_dir_all(etc_dir).unwrap(); + } + std::fs::copy(chain_list, "etc/chainList.json").unwrap(); + + // Get the `superchain-registry/superchain/configs` directory` + let configs_dir = format!("{src_dir}/superchain-registry/superchain/configs"); + let configs = std::fs::read_dir(configs_dir).unwrap(); + + // Get all the directories in the `configs` directory + let mut superchains = Superchains::default(); + for config in configs { + let config = config.unwrap(); + let config_path = config.path(); + let superchain_name = config.file_name().into_string().unwrap(); + let mut superchain = + Superchain { name: superchain_name, chains: Vec::new(), ..Default::default() }; + if config_path.is_dir() { + let config_files = std::fs::read_dir(&config_path).unwrap(); + for config_file in config_files { + let config_file = config_file.unwrap(); + let config_file_path = config_file.path(); + + // Read the `superchain.toml` as the `SuperchainConfig` + let config_file_name = config_file.file_name().into_string().unwrap(); + if config_file_name == "superchain.toml" { + let config = std::fs::read_to_string(config_file_path).unwrap(); + let config: SuperchainConfig = toml::from_str(&config).unwrap(); + superchain.config = config; + continue; + } + + // Read the config file as a `ChainConfig` + let config = std::fs::read_to_string(config_file_path).unwrap(); + let config: ChainConfig = toml::from_str(&config).unwrap(); + superchain.chains.push(config); + } + superchains.superchains.push(superchain); + } + } + + // Sort the superchains by name. + superchains.superchains.sort_by(|a, b| a.name.cmp(&b.name)); + + // For each superchain, sort the list of chains by chain id. + for superchain in &mut superchains.superchains { + superchain.chains.sort_by(|a, b| a.chain_id.cmp(&b.chain_id)); + } + + let output_path = std::path::Path::new("etc/configs.json"); + std::fs::write(output_path, serde_json::to_string_pretty(&superchains).unwrap()).unwrap(); + merge_custom_configs(); +} + +fn merge_custom_configs() { + let kona_custom_configs = + std::env::var("KONA_CUSTOM_CONFIGS").unwrap_or_else(|_| "false".to_string()) == "true"; + println!("cargo:rerun-if-env-changed=KONA_CUSTOM_CONFIGS"); + println!("cargo:rerun-if-env-changed=KONA_CUSTOM_CONFIGS_TEST"); + + // if we're running tests, bust the cache if the base etc configs are updated. This ensures that + // the test build can be repeated after modifying the base configs + if std::env::var("KONA_CUSTOM_CONFIGS_TEST") == Ok("true".to_string()) { + println!("cargo:rerun-if-changed=etc/chainList.json"); + println!("cargo:rerun-if-changed=etc/configs.json"); + } + + if !kona_custom_configs { + return; + } + + let custom_configs_dir = std::env::var("KONA_CUSTOM_CONFIGS_DIR") + .expect("KONA_CUSTOM_CONFIGS_DIR must be set when KONA_CUSTOM_CONFIGS is enabled"); + println!("cargo:rerun-if-env-changed=KONA_CUSTOM_CONFIGS_DIR"); + let custom_configs_dir = PathBuf::from(custom_configs_dir); + assert!( + custom_configs_dir.exists(), + "Custom configs directory {} does not exist", + custom_configs_dir.display() + ); + + let custom_chain_list_path = custom_configs_dir.join("chainList.json"); + let custom_configs_path = custom_configs_dir.join("configs.json"); + + println!("cargo:rerun-if-changed={}", custom_chain_list_path.display()); + println!("cargo:rerun-if-changed={}", custom_configs_path.display()); + + let target_chain_list = Path::new("etc/chainList.json"); + let target_superchains = Path::new("etc/configs.json"); + + validate_chain_configs(&custom_chain_list_path, &custom_configs_path); + + merge_chain_list(&custom_chain_list_path, target_chain_list); + merge_superchain_configs(&custom_configs_path, target_superchains); + validate_chain_configs(target_chain_list, target_superchains); +} + +fn merge_chain_list(custom_path: &Path, target_path: &Path) { + assert!(custom_path.exists(), "Custom chain list {} does not exist", custom_path.display()); + assert!(target_path.exists(), "Target chain list {} does not exist", target_path.display()); + + let mut merged_chain_list: ChainList = read_json(target_path); + let custom_chain_list: ChainList = read_json(custom_path); + + let mut chains_by_id: BTreeMap = BTreeMap::new(); + let mut identifiers: BTreeMap = BTreeMap::new(); + + for chain in &merged_chain_list.chains { + let ident_key = chain.identifier.to_ascii_lowercase(); + identifiers.insert(ident_key, chain.clone()); + chains_by_id.insert(chain.chain_id, chain.clone()); + } + // preserve ordering of chains in etc/chainList.json + for chain in &custom_chain_list.chains { + let ident_key = chain.identifier.to_ascii_lowercase(); + if let Some(existing_chain) = identifiers.get(&ident_key) { + if existing_chain == chain { + continue; + } + panic!( + "Chain identifier `{}` in {} already exists in the registry with a different config", + chain.identifier, + custom_path.display() + ); + } + if let Some(existing_chain) = chains_by_id.get(&chain.chain_id) { + if existing_chain == chain { + continue; + } + panic!( + "Chain id {} in {} already exists in the registry with a different config for identifier `{}`", + chain.chain_id, + custom_path.display(), + existing_chain.identifier + ); + } + identifiers.insert(ident_key, chain.clone()); + chains_by_id.insert(chain.chain_id, chain.clone()); + merged_chain_list.chains.push(chain.clone()); + } + + write_pretty_json(target_path, &merged_chain_list); +} + +fn merge_superchain_configs(custom_path: &Path, target_path: &Path) { + assert!(custom_path.exists(), "Custom configs {} does not exist", custom_path.display()); + assert!(target_path.exists(), "Target configs {} does not exist", target_path.display()); + + let mut superchains: BTreeMap = read_json::(target_path) + .superchains + .into_iter() + .map(|sc| (sc.name.clone(), sc)) + .collect(); + + let custom_superchains: Superchains = read_json(custom_path); + + for custom in custom_superchains.superchains { + match superchains.entry(custom.name.clone()) { + Entry::Occupied(mut entry) => { + println!( + "cargo:warning=debug: merging custom chains {}: [{}]", + custom.name, + custom.chains.iter().map(|c| c.name.as_str()).collect::>().join(",") + ); + let existing = entry.get_mut(); + *existing = merge_superchain_entry(std::mem::take(existing), custom); + } + Entry::Vacant(entry) => { + println!( + "cargo:warning=debug: inserting new custom chain {}: [{}]", + custom.name, + custom.chains.iter().map(|c| c.name.as_str()).collect::>().join(",") + ); + entry.insert(custom); + } + } + } + + let mut merged: Vec = superchains.into_values().collect(); + merged.sort_by(|a, b| a.name.cmp(&b.name)); + for superchain in &mut merged { + superchain.chains.sort_by(|a, b| a.chain_id.cmp(&b.chain_id)); + } + + let merged = Superchains { superchains: merged }; + write_pretty_json(target_path, &merged); +} + +/// Merges the custom chains to the chains in the superchain-registry, panicking on conflicts +fn merge_superchain_entry(base: Superchain, custom: Superchain) -> Superchain { + let mut merged = base; + + // maintain the ordering of chains in base + let mut chain_map: BTreeMap = + merged.chains.clone().into_iter().map(|chain| (chain.chain_id, chain)).collect(); + for chain in custom.chains { + if let Some(existing_config) = chain_map.get(&chain.chain_id) { + if existing_config == &chain { + continue; + } + panic!( + "conflict merging superchain `{}`: chain id {} has differing configs", + merged.name, chain.chain_id + ); + } + chain_map.insert(chain.chain_id, chain.clone()); + merged.chains.push(chain.clone()); + } + merged +} + +fn validate_chain_configs(chain_list_path: &Path, superchains_path: &Path) { + if !chain_list_path.exists() || !superchains_path.exists() { + return; + } + + let chain_list: ChainList = read_json(chain_list_path); + let superchains: Superchains = read_json(superchains_path); + + let mut list_chain_ids = BTreeSet::new(); + for chain in &chain_list.chains { + assert!( + list_chain_ids.insert(chain.chain_id), + "Duplicate chain id {} (identifier `{}`) detected in {}", + chain.chain_id, + chain.identifier, + chain_list_path.display() + ); + } + + let mut config_chain_ids = BTreeSet::new(); + for superchain in &superchains.superchains { + for chain in &superchain.chains { + assert!( + config_chain_ids.insert(chain.chain_id), + "Duplicate chain id {} detected across superchain configs in {}", + chain.chain_id, + superchains_path.display() + ); + } + } + + for chain_id in &config_chain_ids { + assert!( + list_chain_ids.contains(chain_id), + "Chain id {} present in {} but missing from {}", + chain_id, + superchains_path.display(), + chain_list_path.display() + ); + } + + for chain in chain_list.chains { + assert!( + config_chain_ids.contains(&chain.chain_id), + "Chain `{}` (chain id {}) present in {} but missing from {}", + chain.identifier, + chain.chain_id, + chain_list_path.display(), + superchains_path.display() + ); + } +} + +fn read_json(path: &Path) -> T { + let contents = fs::read_to_string(path) + .unwrap_or_else(|e| panic!("Failed to read {}: {e}", path.display())); + serde_json::from_str(&contents) + .unwrap_or_else(|e| panic!("Failed to parse {}: {e}", path.display())) +} + +fn write_pretty_json(path: &Path, value: &T) { + fs::write( + path, + serde_json::to_string_pretty(value) + .unwrap_or_else(|e| panic!("Failed to serialize {}: {e}", path.display())), + ) + .unwrap_or_else(|e| panic!("Failed to write {}: {e}", path.display())); +} diff --git a/kona/crates/protocol/registry/etc/chainList.json b/rust/kona/crates/protocol/registry/etc/chainList.json similarity index 100% rename from kona/crates/protocol/registry/etc/chainList.json rename to rust/kona/crates/protocol/registry/etc/chainList.json diff --git a/kona/crates/protocol/registry/etc/configs.json b/rust/kona/crates/protocol/registry/etc/configs.json similarity index 100% rename from kona/crates/protocol/registry/etc/configs.json rename to rust/kona/crates/protocol/registry/etc/configs.json diff --git a/kona/crates/protocol/registry/justfile b/rust/kona/crates/protocol/registry/justfile similarity index 100% rename from kona/crates/protocol/registry/justfile rename to rust/kona/crates/protocol/registry/justfile diff --git a/kona/crates/protocol/registry/src/l1/mod.rs b/rust/kona/crates/protocol/registry/src/l1/mod.rs similarity index 100% rename from kona/crates/protocol/registry/src/l1/mod.rs rename to rust/kona/crates/protocol/registry/src/l1/mod.rs diff --git a/rust/kona/crates/protocol/registry/src/lib.rs b/rust/kona/crates/protocol/registry/src/lib.rs new file mode 100644 index 00000000000..dd51ece79ae --- /dev/null +++ b/rust/kona/crates/protocol/registry/src/lib.rs @@ -0,0 +1,195 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] + +extern crate alloc; + +pub use alloy_primitives::map::HashMap; +use kona_genesis::L1ChainConfig; +pub use kona_genesis::{Chain, ChainConfig, ChainList, RollupConfig}; + +pub mod superchain; +pub use superchain::Registry; + +/// L1 chain configurations. +pub mod l1; +pub use l1::L1Config; + +#[cfg(test)] +pub mod test_utils; + +lazy_static::lazy_static! { + /// Private initializer that loads the superchain configurations. + static ref _INIT: Registry = Registry::from_chain_list(); + + /// Chain configurations exported from the registry + pub static ref CHAINS: ChainList = _INIT.chain_list.clone(); + + /// OP Chain configurations exported from the registry + pub static ref OPCHAINS: HashMap = _INIT.op_chains.clone(); + + /// Rollup configurations exported from the registry + pub static ref ROLLUP_CONFIGS: HashMap = _INIT.rollup_configs.clone(); + + /// L1 chain configurations exported from the registry + /// Note: the l1 chain configurations are not exported from the superchain registry but rather from a genesis dump file. + pub static ref L1_CONFIGS: HashMap = _INIT.l1_configs.clone(); +} + +/// Returns a [`RollupConfig`] by its identifier. +pub fn scr_rollup_config_by_ident(ident: &str) -> Option<&RollupConfig> { + let chain_id = CHAINS.get_chain_by_ident(ident)?.chain_id; + ROLLUP_CONFIGS.get(&chain_id) +} + +/// Returns a [`RollupConfig`] by its identifier. +pub fn scr_rollup_config_by_alloy_ident(chain: &alloy_chains::Chain) -> Option<&RollupConfig> { + ROLLUP_CONFIGS.get(&chain.id()) +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_chains::Chain as AlloyChain; + use alloy_hardforks::{ + holesky::{HOLESKY_BPO1_TIMESTAMP, HOLESKY_BPO2_TIMESTAMP}, + sepolia::{SEPOLIA_BPO1_TIMESTAMP, SEPOLIA_BPO2_TIMESTAMP}, + }; + use alloy_op_hardforks::{ + BASE_MAINNET_JOVIAN_TIMESTAMP, BASE_SEPOLIA_JOVIAN_TIMESTAMP, OP_MAINNET_JOVIAN_TIMESTAMP, + OP_SEPOLIA_JOVIAN_TIMESTAMP, + }; + + #[test] + fn test_hardcoded_rollup_configs() { + let test_cases = [ + (10, test_utils::OP_MAINNET_CONFIG), + (8453, test_utils::BASE_MAINNET_CONFIG), + (11155420, test_utils::OP_SEPOLIA_CONFIG), + (84532, test_utils::BASE_SEPOLIA_CONFIG), + ] + .to_vec(); + + for (chain_id, expected) in test_cases { + let derived = super::ROLLUP_CONFIGS.get(&chain_id).unwrap(); + assert_eq!(expected, *derived); + } + } + + #[test] + fn test_chain_by_ident() { + const ALLOY_BASE: AlloyChain = AlloyChain::base_mainnet(); + + let chain_by_ident = CHAINS.get_chain_by_ident("mainnet/base").unwrap(); + let chain_by_alloy_ident = CHAINS.get_chain_by_alloy_ident(&ALLOY_BASE).unwrap(); + let chain_by_id = CHAINS.get_chain_by_id(8453).unwrap(); + + assert_eq!(chain_by_ident, chain_by_id); + assert_eq!(chain_by_alloy_ident, chain_by_id); + } + + #[test] + fn test_rollup_config_by_ident() { + const ALLOY_BASE: AlloyChain = AlloyChain::base_mainnet(); + + let rollup_config_by_ident = scr_rollup_config_by_ident("mainnet/base").unwrap(); + let rollup_config_by_alloy_ident = scr_rollup_config_by_alloy_ident(&ALLOY_BASE).unwrap(); + let rollup_config_by_id = ROLLUP_CONFIGS.get(&8453).unwrap(); + + assert_eq!(rollup_config_by_ident, rollup_config_by_id); + assert_eq!(rollup_config_by_alloy_ident, rollup_config_by_id); + } + + #[test] + fn test_jovian_timestamps() { + let base_mainnet_config_by_ident = scr_rollup_config_by_ident("mainnet/base").unwrap(); + assert_eq!( + base_mainnet_config_by_ident.hardforks.jovian_time, + Some(BASE_MAINNET_JOVIAN_TIMESTAMP) + ); + + let base_sepolia_config_by_ident = scr_rollup_config_by_ident("sepolia/base").unwrap(); + assert_eq!( + base_sepolia_config_by_ident.hardforks.jovian_time, + Some(BASE_SEPOLIA_JOVIAN_TIMESTAMP) + ); + + let op_mainnet_config_by_ident = scr_rollup_config_by_ident("mainnet/op").unwrap(); + assert_eq!( + op_mainnet_config_by_ident.hardforks.jovian_time, + Some(OP_MAINNET_JOVIAN_TIMESTAMP) + ); + + let op_sepolia_config_by_ident = scr_rollup_config_by_ident("sepolia/op").unwrap(); + assert_eq!( + op_sepolia_config_by_ident.hardforks.jovian_time, + Some(OP_SEPOLIA_JOVIAN_TIMESTAMP) + ); + } + + #[test] + fn test_bpo_timestamps() { + let sepolia_config = L1_CONFIGS.get(&11155111).unwrap(); + assert_eq!(sepolia_config.bpo1_time, Some(SEPOLIA_BPO1_TIMESTAMP)); + assert_eq!(sepolia_config.bpo2_time, Some(SEPOLIA_BPO2_TIMESTAMP)); + + let holesky_config = L1_CONFIGS.get(&17000).unwrap(); + assert_eq!(holesky_config.bpo1_time, Some(HOLESKY_BPO1_TIMESTAMP)); + assert_eq!(holesky_config.bpo2_time, Some(HOLESKY_BPO2_TIMESTAMP)); + } + + const CUSTOM_CONFIGS_TEST_ENABLED: Option<&str> = option_env!("KONA_CUSTOM_CONFIGS_TEST"); + const CUSTOM_CONFIGS: Option<&str> = option_env!("KONA_CUSTOM_CONFIGS"); + const CUSTOM_CONFIGS_DIR: Option<&str> = option_env!("KONA_CUSTOM_CONFIGS_DIR"); + + #[test] + fn custom_chain_is_loaded_when_enabled() { + if CUSTOM_CONFIGS_TEST_ENABLED != Some("true") { + return; + }; + assert!( + CUSTOM_CONFIGS == Some("true"), + "KONA_CUSTOM_CONFIGS is required when KONA_CUSTOM_CONFIGS_TEST is set" + ); + assert!( + CUSTOM_CONFIGS_DIR.is_some(), + "KONA_CUSTOM_CONFIGS_DIR is required when KONA_CUSTOM_CONFIGS_TEST is set" + ); + + let test1_chain_id = 123999119; + let test2_chain_id = 223999119; + let test1_ident = "test1/testnet"; + let test2_ident = "test2/testnet"; + + let chain1 = CHAINS + .get_chain_by_ident(test1_ident) + .unwrap_or_else(|| panic!("custom chain `{test1_ident}` missing")); + assert_eq!(chain1.chain_id, test1_chain_id); + let chain2 = CHAINS + .get_chain_by_ident(test2_ident) + .unwrap_or_else(|| panic!("custom chain `{test2_ident}` missing")); + assert_eq!(chain2.chain_id, test2_chain_id); + + assert!( + OPCHAINS.contains_key(&test1_chain_id), + "chain config missing for {test1_chain_id}" + ); + assert!( + ROLLUP_CONFIGS.contains_key(&test1_chain_id), + "rollup config missing for {test1_chain_id}" + ); + assert!( + OPCHAINS.contains_key(&test2_chain_id), + "chain config missing for {test2_chain_id}" + ); + assert!( + ROLLUP_CONFIGS.contains_key(&test2_chain_id), + "rollup config missing for {test2_chain_id}" + ); + } +} diff --git a/kona/crates/protocol/registry/src/superchain.rs b/rust/kona/crates/protocol/registry/src/superchain.rs similarity index 100% rename from kona/crates/protocol/registry/src/superchain.rs rename to rust/kona/crates/protocol/registry/src/superchain.rs diff --git a/kona/crates/protocol/registry/src/test_utils/base_mainnet.rs b/rust/kona/crates/protocol/registry/src/test_utils/base_mainnet.rs similarity index 98% rename from kona/crates/protocol/registry/src/test_utils/base_mainnet.rs rename to rust/kona/crates/protocol/registry/src/test_utils/base_mainnet.rs index 7d0c73e77bd..0082ad633ba 100644 --- a/kona/crates/protocol/registry/src/test_utils/base_mainnet.rs +++ b/rust/kona/crates/protocol/registry/src/test_utils/base_mainnet.rs @@ -13,7 +13,7 @@ use kona_genesis::{ HardForkConfig, RollupConfig, SystemConfig, }; -/// The [RollupConfig] for Base Mainnet. +/// The [`RollupConfig`] for Base Mainnet. pub const BASE_MAINNET_CONFIG: RollupConfig = RollupConfig { genesis: ChainGenesis { l1: BlockNumHash { diff --git a/rust/kona/crates/protocol/registry/src/test_utils/base_sepolia.rs b/rust/kona/crates/protocol/registry/src/test_utils/base_sepolia.rs new file mode 100644 index 00000000000..5f5b667d475 --- /dev/null +++ b/rust/kona/crates/protocol/registry/src/test_utils/base_sepolia.rs @@ -0,0 +1,73 @@ +//! Base Sepolia Rollup Config. + +use alloy_chains::Chain; +use alloy_eips::BlockNumHash; +use alloy_op_hardforks::{ + BASE_SEPOLIA_CANYON_TIMESTAMP, BASE_SEPOLIA_ECOTONE_TIMESTAMP, BASE_SEPOLIA_FJORD_TIMESTAMP, + BASE_SEPOLIA_GRANITE_TIMESTAMP, BASE_SEPOLIA_HOLOCENE_TIMESTAMP, + BASE_SEPOLIA_ISTHMUS_TIMESTAMP, BASE_SEPOLIA_JOVIAN_TIMESTAMP, +}; +use alloy_primitives::{address, b256, uint}; +use kona_genesis::{ + BASE_SEPOLIA_BASE_FEE_CONFIG, ChainGenesis, DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, + HardForkConfig, RollupConfig, SystemConfig, +}; + +/// The [`RollupConfig`] for Base Sepolia. +pub const BASE_SEPOLIA_CONFIG: RollupConfig = RollupConfig { + genesis: ChainGenesis { + l1: BlockNumHash { + hash: b256!("cac9a83291d4dec146d6f7f69ab2304f23f5be87b1789119a0c5b1e4482444ed"), + number: 4370868, + }, + l2: BlockNumHash { + hash: b256!("0dcc9e089e30b90ddfc55be9a37dd15bc551aeee999d2e2b51414c54eaf934e4"), + number: 0, + }, + l2_time: 1695768288, + system_config: Some(SystemConfig { + batcher_address: address!("6cdebe940bc0f26850285caca097c11c33103e47"), + overhead: uint!(0x834_U256), + scalar: uint!(0xf4240_U256), + gas_limit: 25000000, + base_fee_scalar: None, + blob_base_fee_scalar: None, + eip1559_denominator: None, + eip1559_elasticity: None, + operator_fee_scalar: None, + operator_fee_constant: None, + min_base_fee: None, + da_footprint_gas_scalar: None, + }), + }, + block_time: 2, + max_sequencer_drift: 600, + seq_window_size: 3600, + channel_timeout: 300, + granite_channel_timeout: 50, + l1_chain_id: 11155111, + l2_chain_id: Chain::base_sepolia(), + chain_op_config: BASE_SEPOLIA_BASE_FEE_CONFIG, + alt_da_config: None, + hardforks: HardForkConfig { + regolith_time: None, + canyon_time: Some(BASE_SEPOLIA_CANYON_TIMESTAMP), + delta_time: Some(1703203200), + ecotone_time: Some(BASE_SEPOLIA_ECOTONE_TIMESTAMP), + fjord_time: Some(BASE_SEPOLIA_FJORD_TIMESTAMP), + granite_time: Some(BASE_SEPOLIA_GRANITE_TIMESTAMP), + holocene_time: Some(BASE_SEPOLIA_HOLOCENE_TIMESTAMP), + pectra_blob_schedule_time: Some(1742486400), + isthmus_time: Some(BASE_SEPOLIA_ISTHMUS_TIMESTAMP), + jovian_time: Some(BASE_SEPOLIA_JOVIAN_TIMESTAMP), + interop_time: None, + }, + batch_inbox_address: address!("ff00000000000000000000000000000000084532"), + deposit_contract_address: address!("49f53e41452c74589e85ca1677426ba426459e85"), + l1_system_config_address: address!("f272670eb55e895584501d564afeb048bed26194"), + protocol_versions_address: address!("79add5713b383daa0a138d3c4780c7a1804a8090"), + superchain_config_address: Some(address!("C2Be75506d5724086DEB7245bd260Cc9753911Be")), + da_challenge_address: None, + blobs_enabled_l1_timestamp: None, + interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, +}; diff --git a/kona/crates/protocol/registry/src/test_utils/mod.rs b/rust/kona/crates/protocol/registry/src/test_utils/mod.rs similarity index 100% rename from kona/crates/protocol/registry/src/test_utils/mod.rs rename to rust/kona/crates/protocol/registry/src/test_utils/mod.rs diff --git a/kona/crates/protocol/registry/src/test_utils/op_mainnet.rs b/rust/kona/crates/protocol/registry/src/test_utils/op_mainnet.rs similarity index 98% rename from kona/crates/protocol/registry/src/test_utils/op_mainnet.rs rename to rust/kona/crates/protocol/registry/src/test_utils/op_mainnet.rs index 1bb0cbba4e7..b6e3eea7417 100644 --- a/kona/crates/protocol/registry/src/test_utils/op_mainnet.rs +++ b/rust/kona/crates/protocol/registry/src/test_utils/op_mainnet.rs @@ -13,7 +13,7 @@ use kona_genesis::{ OP_MAINNET_BASE_FEE_CONFIG, RollupConfig, SystemConfig, }; -/// The [RollupConfig] for OP Mainnet. +/// The [`RollupConfig`] for OP Mainnet. pub const OP_MAINNET_CONFIG: RollupConfig = RollupConfig { genesis: ChainGenesis { l1: BlockNumHash { diff --git a/rust/kona/crates/protocol/registry/src/test_utils/op_sepolia.rs b/rust/kona/crates/protocol/registry/src/test_utils/op_sepolia.rs new file mode 100644 index 00000000000..9c42a29c8c8 --- /dev/null +++ b/rust/kona/crates/protocol/registry/src/test_utils/op_sepolia.rs @@ -0,0 +1,73 @@ +//! OP Sepolia Rollup Config. + +use alloy_chains::Chain; +use alloy_eips::BlockNumHash; +use alloy_op_hardforks::{ + OP_SEPOLIA_CANYON_TIMESTAMP, OP_SEPOLIA_ECOTONE_TIMESTAMP, OP_SEPOLIA_FJORD_TIMESTAMP, + OP_SEPOLIA_GRANITE_TIMESTAMP, OP_SEPOLIA_HOLOCENE_TIMESTAMP, OP_SEPOLIA_ISTHMUS_TIMESTAMP, + OP_SEPOLIA_JOVIAN_TIMESTAMP, +}; +use alloy_primitives::{address, b256, uint}; +use kona_genesis::{ + ChainGenesis, DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, HardForkConfig, + OP_SEPOLIA_BASE_FEE_CONFIG, RollupConfig, SystemConfig, +}; + +/// The [`RollupConfig`] for OP Sepolia. +pub const OP_SEPOLIA_CONFIG: RollupConfig = RollupConfig { + genesis: ChainGenesis { + l1: BlockNumHash { + hash: b256!("48f520cf4ddaf34c8336e6e490632ea3cf1e5e93b0b2bc6e917557e31845371b"), + number: 4071408, + }, + l2: BlockNumHash { + hash: b256!("102de6ffb001480cc9b8b548fd05c34cd4f46ae4aa91759393db90ea0409887d"), + number: 0, + }, + l2_time: 1691802540, + system_config: Some(SystemConfig { + batcher_address: address!("8f23bb38f531600e5d8fddaaec41f13fab46e98c"), + overhead: uint!(0xbc_U256), + scalar: uint!(0xa6fe0_U256), + gas_limit: 30_000_000, + base_fee_scalar: None, + blob_base_fee_scalar: None, + eip1559_denominator: None, + eip1559_elasticity: None, + operator_fee_scalar: None, + operator_fee_constant: None, + min_base_fee: None, + da_footprint_gas_scalar: None, + }), + }, + block_time: 2, + max_sequencer_drift: 600, + seq_window_size: 3600, + channel_timeout: 300, + granite_channel_timeout: 50, + l1_chain_id: 11155111, + l2_chain_id: Chain::optimism_sepolia(), + chain_op_config: OP_SEPOLIA_BASE_FEE_CONFIG, + alt_da_config: None, + hardforks: HardForkConfig { + regolith_time: None, + canyon_time: Some(OP_SEPOLIA_CANYON_TIMESTAMP), + delta_time: Some(1703203200), + ecotone_time: Some(OP_SEPOLIA_ECOTONE_TIMESTAMP), + fjord_time: Some(OP_SEPOLIA_FJORD_TIMESTAMP), + granite_time: Some(OP_SEPOLIA_GRANITE_TIMESTAMP), + holocene_time: Some(OP_SEPOLIA_HOLOCENE_TIMESTAMP), + pectra_blob_schedule_time: Some(1742486400), + isthmus_time: Some(OP_SEPOLIA_ISTHMUS_TIMESTAMP), + jovian_time: Some(OP_SEPOLIA_JOVIAN_TIMESTAMP), + interop_time: None, + }, + batch_inbox_address: address!("ff00000000000000000000000000000011155420"), + deposit_contract_address: address!("16fc5058f25648194471939df75cf27a2fdc48bc"), + l1_system_config_address: address!("034edd2a225f7f429a63e0f1d2084b9e0a93b538"), + protocol_versions_address: address!("79add5713b383daa0a138d3c4780c7a1804a8090"), + superchain_config_address: Some(address!("C2Be75506d5724086DEB7245bd260Cc9753911Be")), + da_challenge_address: None, + blobs_enabled_l1_timestamp: None, + interop_message_expiry_window: DEFAULT_INTEROP_MESSAGE_EXPIRY_WINDOW, +}; diff --git a/kona/crates/protocol/registry/tests/fixtures/custom/chainList.json b/rust/kona/crates/protocol/registry/tests/fixtures/custom/chainList.json similarity index 100% rename from kona/crates/protocol/registry/tests/fixtures/custom/chainList.json rename to rust/kona/crates/protocol/registry/tests/fixtures/custom/chainList.json diff --git a/kona/crates/protocol/registry/tests/fixtures/custom/configs.json b/rust/kona/crates/protocol/registry/tests/fixtures/custom/configs.json similarity index 100% rename from kona/crates/protocol/registry/tests/fixtures/custom/configs.json rename to rust/kona/crates/protocol/registry/tests/fixtures/custom/configs.json diff --git a/rust/kona/crates/providers/providers-alloy/Cargo.toml b/rust/kona/crates/providers/providers-alloy/Cargo.toml new file mode 100644 index 00000000000..0eb62c6f63c --- /dev/null +++ b/rust/kona/crates/providers/providers-alloy/Cargo.toml @@ -0,0 +1,61 @@ +[package] +name = "kona-providers-alloy" +version = "0.3.3" +description = "Alloy Backed Providers" + +edition.workspace = true +authors.workspace = true +license.workspace = true +homepage.workspace = true +keywords.workspace = true +categories.workspace = true +repository.workspace = true +rust-version.workspace = true + +[lints] +workspace = true + +[dependencies] +# Kona +kona-macros.workspace = true +kona-genesis.workspace = true +kona-protocol.workspace = true +kona-derive.workspace = true + +# Alloy +alloy-serde.workspace = true +alloy-eips = { workspace = true, features = ["kzg"] } +alloy-transport.workspace = true +alloy-transport-http = { workspace = true, features = ["reqwest", "reqwest-rustls-tls", "hyper", "hyper-tls", "jwt-auth"] } +alloy-consensus.workspace = true +alloy-rpc-types-beacon.workspace = true +alloy-rpc-types-engine.workspace = true +alloy-rpc-client.workspace = true +alloy-provider = { workspace = true, features = ["ipc", "ws", "reqwest"] } +alloy-primitives = { workspace = true, features = ["map"] } + +# Op Alloy +op-alloy-consensus.workspace = true +op-alloy-network.workspace = true + +# Misc +lru.workspace = true +serde.workspace = true +thiserror.workspace = true +async-trait.workspace = true +tower.workspace = true +http-body-util.workspace = true + +c-kzg.workspace = true + +# `metrics` feature +metrics = { workspace = true, optional = true } + +[features] +default = [] +metrics = [ "dep:metrics", "kona-derive/metrics" ] + +[dev-dependencies] +tokio.workspace = true +httpmock.workspace = true +serde_json.workspace = true diff --git a/rust/kona/crates/providers/providers-alloy/README.md b/rust/kona/crates/providers/providers-alloy/README.md new file mode 100644 index 00000000000..482483b825a --- /dev/null +++ b/rust/kona/crates/providers/providers-alloy/README.md @@ -0,0 +1,3 @@ +# `kona-providers-alloy` + +Alloy-backed providers for `kona`. diff --git a/kona/crates/providers/providers-alloy/src/beacon_client.rs b/rust/kona/crates/providers/providers-alloy/src/beacon_client.rs similarity index 97% rename from kona/crates/providers/providers-alloy/src/beacon_client.rs rename to rust/kona/crates/providers/providers-alloy/src/beacon_client.rs index cacaf9d5112..ba06919ba69 100644 --- a/kona/crates/providers/providers-alloy/src/beacon_client.rs +++ b/rust/kona/crates/providers/providers-alloy/src/beacon_client.rs @@ -6,9 +6,9 @@ use crate::blobs::BoxedBlobWithIndex; use alloy_eips::eip4844::{IndexedBlobHash, env_settings::EnvKzgSettings, kzg_to_versioned_hash}; use alloy_primitives::{B256, FixedBytes}; use alloy_rpc_types_beacon::sidecar::GetBlobsResponse; +use alloy_transport_http::reqwest::{self, Client}; use async_trait::async_trait; use c_kzg::Blob; -use reqwest::Client; use std::{boxed::Box, collections::HashMap, format, string::String, vec::Vec}; use thiserror::Error; @@ -67,10 +67,10 @@ impl APIGenesisResponse { } } -/// The [BeaconClient] is a thin wrapper around the Beacon API. +/// The [`BeaconClient`] is a thin wrapper around the Beacon API. #[async_trait] pub trait BeaconClient { - /// The error type for [BeaconClient] implementations. + /// The error type for [`BeaconClient`] implementations. type Error: core::fmt::Display; /// Returns the slot interval in seconds. @@ -114,7 +114,7 @@ pub enum BeaconClientError { KZG(#[from] c_kzg::Error), } -/// An online implementation of the [BeaconClient] trait. +/// An online implementation of the [`BeaconClient`] trait. #[derive(Debug, Clone)] pub struct OnlineBeaconClient { /// The base URL of the beacon API. @@ -127,10 +127,10 @@ pub struct OnlineBeaconClient { } impl OnlineBeaconClient { - /// Creates a new [OnlineBeaconClient] from the provided base URL string. + /// Creates a new [`OnlineBeaconClient`] from the provided base URL string. pub fn new_http(mut base: String) -> Self { // If base ends with a slash, remove it - if base.ends_with("/") { + if base.ends_with('/') { base.remove(base.len() - 1); } Self { diff --git a/rust/kona/crates/providers/providers-alloy/src/blobs.rs b/rust/kona/crates/providers/providers-alloy/src/blobs.rs new file mode 100644 index 00000000000..1f73789e0ab --- /dev/null +++ b/rust/kona/crates/providers/providers-alloy/src/blobs.rs @@ -0,0 +1,197 @@ +//! Contains an online implementation of the `BlobProvider` trait. + +use crate::BeaconClient; +#[cfg(feature = "metrics")] +use crate::Metrics; +use alloy_eips::eip4844::{ + Blob, BlobTransactionSidecarItem, IndexedBlobHash, env_settings::EnvKzgSettings, +}; +use alloy_primitives::FixedBytes; +use async_trait::async_trait; +use kona_derive::{BlobProvider, BlobProviderError}; +use kona_protocol::BlockInfo; +use std::{boxed::Box, string::ToString, vec::Vec}; + +/// A boxed blob with index. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BoxedBlobWithIndex { + /// The index of the blob. + pub index: u64, + /// The blob data. + pub blob: Box, +} + +/// An online implementation of the [`BlobProvider`] trait. +#[derive(Debug, Clone)] +pub struct OnlineBlobProvider { + /// The Beacon API client. + pub beacon_client: B, + /// Beacon Genesis time used for the time to slot conversion. + pub genesis_time: u64, + /// Slot interval used for the time to slot conversion. + pub slot_interval: u64, +} + +impl OnlineBlobProvider { + /// Creates a new instance of the [`OnlineBlobProvider`]. + /// + /// The `genesis_time` and `slot_interval` arguments are _optional_ and the + /// [`OnlineBlobProvider`] will attempt to load them dynamically at runtime if they are not + /// provided. + /// + /// ## Panics + /// Panics if the genesis time or slot interval cannot be loaded from the beacon client. + pub async fn init(beacon_client: B) -> Self { + let genesis_time = beacon_client + .genesis_time() + .await + .map(|r| r.data.genesis_time) + .map_err(|e| BlobProviderError::Backend(e.to_string())) + .expect("Failed to load genesis time from beacon client"); + let slot_interval = beacon_client + .slot_interval() + .await + .map(|r| r.data.seconds_per_slot) + .map_err(|e| BlobProviderError::Backend(e.to_string())) + .expect("Failed to load slot interval from beacon client"); + Self { beacon_client, genesis_time, slot_interval } + } + + /// Computes the slot for the given timestamp. + pub const fn slot( + genesis: u64, + slot_time: u64, + timestamp: u64, + ) -> Result { + if timestamp < genesis { + return Err(BlobProviderError::SlotDerivation); + } + Ok((timestamp - genesis) / slot_time) + } + + /// Fetches blobs for the given slot. + async fn fetch_filtered_blobs( + &self, + slot: u64, + blob_hashes: &[IndexedBlobHash], + ) -> Result, BlobProviderError> { + kona_macros::inc!(gauge, Metrics::BLOB_FETCHES); + + let result = self + .beacon_client + .filtered_beacon_blobs(slot, blob_hashes) + .await + .map_err(|e| BlobProviderError::Backend(e.to_string())); + + #[cfg(feature = "metrics")] + if result.is_err() { + kona_macros::inc!(gauge, Metrics::BLOB_FETCH_ERRORS); + } + + result + } + + /// Converts a vector of boxed blobs with index to a vector of blob transaction sidecar items. + /// + /// Note: for performance reasons, we need to transmute the blobs to the `c_kzg::Blob` type to + /// avoid the overhead of moving the blobs around or reallocating the memory. + fn sidecar_from_blobs( + blobs: Vec, + ) -> Result, c_kzg::Error> { + blobs + .into_iter() + .map(|blob| { + let kzg_settings = EnvKzgSettings::Default; + + // SAFETY: all types have the same size and alignment + let kzg_blob = + unsafe { Box::from_raw(Box::::into_raw(blob.blob) as *mut c_kzg::Blob) }; + + let commitment = kzg_settings + .get() + .blob_to_kzg_commitment(&kzg_blob) + .map(|blob| blob.to_bytes())?; + let proof = kzg_settings + .get() + .compute_blob_kzg_proof(&kzg_blob, &commitment) + .map(|proof| proof.to_bytes())?; + + // SAFETY: all types have the same size and alignment + let alloy_blob = + unsafe { Box::from_raw(Box::::into_raw(kzg_blob) as *mut Blob) }; + + Ok(BlobTransactionSidecarItem { + index: blob.index, + blob: alloy_blob, + kzg_commitment: FixedBytes::from(*commitment), + kzg_proof: FixedBytes::from(*proof), + }) + }) + .collect() + } + + /// Fetches blob sidecars for the given block reference and blob hashes. + /// Does not validate the blobs. Recomputes the kzg proofs associated with the blobs. + /// + /// Use [`Self::beacon_client`] to fetch the blobs without recomputing the kzg + /// proofs/commitments. + pub async fn fetch_filtered_blob_sidecars( + &self, + block_ref: &BlockInfo, + blob_hashes: &[IndexedBlobHash], + ) -> Result, BlobProviderError> { + if blob_hashes.is_empty() { + return Ok(Default::default()); + } + + // Calculate the slot for the given timestamp. + let slot = Self::slot(self.genesis_time, self.slot_interval, block_ref.timestamp)?; + + // Fetch blobs for the slot using. + let blobs = self.fetch_filtered_blobs(slot, blob_hashes).await?; + + Self::sidecar_from_blobs(blobs) + .map_err(|e| BlobProviderError::Backend(format!("KZG commitment error: {e}"))) + } +} + +#[async_trait] +impl BlobProvider for OnlineBlobProvider +where + B: BeaconClient + Send + Sync, +{ + type Error = BlobProviderError; + + /// Fetches blobs that were confirmed in the specified L1 block with the given indexed + /// hashes. The blobs are validated for their index and hashes using the specified + /// [`IndexedBlobHash`]. + async fn get_and_validate_blobs( + &mut self, + block_ref: &BlockInfo, + blob_hashes: &[IndexedBlobHash], + ) -> Result>, Self::Error> { + // Fetch the blob sidecars for the given block reference and blob hashes. + let blobs = self.fetch_filtered_blob_sidecars(block_ref, blob_hashes).await?; + + // Validate the blob sidecars straight away with the num hashes. + let blobs = blobs + .into_iter() + .enumerate() + .map(|(i, sidecar)| { + let hash = blob_hashes + .get(i) + .ok_or_else(|| BlobProviderError::Backend("Missing blob hash".to_string()))? + .hash + .as_slice(); + + if sidecar.to_kzg_versioned_hash() != hash { + return Err(BlobProviderError::Backend("KZG commitment mismatch".to_string())); + } + + Ok(sidecar.blob) + }) + .collect::>, BlobProviderError>>() + .map_err(|e| BlobProviderError::Backend(e.to_string()))?; + Ok(blobs) + } +} diff --git a/rust/kona/crates/providers/providers-alloy/src/chain_provider.rs b/rust/kona/crates/providers/providers-alloy/src/chain_provider.rs new file mode 100644 index 00000000000..b36a0c715fb --- /dev/null +++ b/rust/kona/crates/providers/providers-alloy/src/chain_provider.rs @@ -0,0 +1,272 @@ +//! Providers that use alloy provider types on the backend. + +#[cfg(feature = "metrics")] +use crate::Metrics; +use alloy_consensus::{Header, Receipt, TxEnvelope}; +use alloy_eips::BlockId; +use alloy_primitives::B256; +use alloy_provider::{Provider, RootProvider}; +use alloy_transport::{RpcError, TransportErrorKind}; +use alloy_transport_http::reqwest; +use async_trait::async_trait; +use kona_derive::{ChainProvider, PipelineError, PipelineErrorKind}; +use kona_protocol::BlockInfo; +use lru::LruCache; +use std::{boxed::Box, num::NonZeroUsize, vec::Vec}; + +/// The [`AlloyChainProvider`] is a concrete implementation of the [`ChainProvider`] trait, +/// providing data over Ethereum JSON-RPC using an alloy provider as the backend. +#[derive(Debug, Clone)] +pub struct AlloyChainProvider { + /// The inner Ethereum JSON-RPC provider. + pub inner: RootProvider, + /// Whether to trust the RPC without verification. + pub trust_rpc: bool, + /// `header_by_hash` LRU cache. + header_by_hash_cache: LruCache, + /// `receipts_by_hash_cache` LRU cache. + receipts_by_hash_cache: LruCache>, + /// `block_info_and_transactions_by_hash` LRU cache. + block_info_and_transactions_by_hash_cache: LruCache)>, +} + +impl AlloyChainProvider { + /// Creates a new [`AlloyChainProvider`] with the given alloy provider. + /// + /// ## Panics + /// - Panics if `cache_size` is zero. + pub fn new(inner: RootProvider, cache_size: usize) -> Self { + Self::new_with_trust(inner, cache_size, true) + } + + /// Creates a new [`AlloyChainProvider`] with the given alloy provider and trust setting. + /// + /// ## Panics + /// - Panics if `cache_size` is zero. + pub fn new_with_trust(inner: RootProvider, cache_size: usize, trust_rpc: bool) -> Self { + Self { + inner, + trust_rpc, + header_by_hash_cache: LruCache::new(NonZeroUsize::new(cache_size).unwrap()), + receipts_by_hash_cache: LruCache::new(NonZeroUsize::new(cache_size).unwrap()), + block_info_and_transactions_by_hash_cache: LruCache::new( + NonZeroUsize::new(cache_size).unwrap(), + ), + } + } + + /// Creates a new [`AlloyChainProvider`] from the provided [`reqwest::Url`]. + pub fn new_http(url: reqwest::Url, cache_size: usize) -> Self { + let inner = RootProvider::new_http(url); + Self::new(inner, cache_size) + } + + /// Returns the latest L2 block number. + pub async fn latest_block_number(&mut self) -> Result> { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "block_number"); + + let result = self.inner.get_block_number().await; + + #[cfg(feature = "metrics")] + if result.is_err() { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "block_number"); + } + + result + } + + /// Returns the chain ID. + pub async fn chain_id(&mut self) -> Result> { + self.inner.get_chain_id().await + } + + /// Verifies that a header's hash matches the expected hash when `trust_rpc` is false. + fn verify_header_hash( + &self, + header: &Header, + expected_hash: B256, + ) -> Result<(), AlloyChainProviderError> { + if self.trust_rpc { + return Ok(()); + } + + let actual_hash = header.hash_slow(); + if actual_hash != expected_hash { + return Err(AlloyChainProviderError::Transport(RpcError::Transport( + TransportErrorKind::Custom( + format!( + "Header hash mismatch: expected {expected_hash:?}, got {actual_hash:?}" + ) + .into(), + ), + ))); + } + + Ok(()) + } +} + +/// An error for the [`AlloyChainProvider`]. +#[allow(clippy::enum_variant_names)] +#[derive(Debug, thiserror::Error)] +pub enum AlloyChainProviderError { + /// Transport error + #[error(transparent)] + Transport(#[from] RpcError), + /// Block not found. + #[error("Block not found: {0}")] + BlockNotFound(BlockId), + /// Failed to convert RPC receipts into consensus receipts. + #[error("Failed to convert RPC receipts into consensus receipts: {0}")] + ReceiptsConversion(B256), +} + +impl From for PipelineErrorKind { + fn from(e: AlloyChainProviderError) -> Self { + match e { + AlloyChainProviderError::Transport(e) => { + Self::Temporary(PipelineError::Provider(format!("Transport error: {e}"))) + } + AlloyChainProviderError::BlockNotFound(id) => { + Self::Temporary(PipelineError::Provider(format!("L1 Block not found: {id}"))) + } + AlloyChainProviderError::ReceiptsConversion(_) => { + Self::Temporary(PipelineError::Provider( + "Failed to convert RPC receipts into consensus receipts".to_string(), + )) + } + } + } +} + +#[async_trait] +impl ChainProvider for AlloyChainProvider { + type Error = AlloyChainProviderError; + + async fn header_by_hash(&mut self, hash: B256) -> Result { + if let Some(header) = self.header_by_hash_cache.get(&hash) { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_HITS, "cache" => "header_by_hash"); + return Ok(header.clone()); + } + + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_MISSES, "cache" => "header_by_hash"); + + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "header_by_hash"); + + let block = self + .inner + .get_block_by_hash(hash) + .await + .inspect_err(|_e| { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "header_by_hash"); + })? + .ok_or(AlloyChainProviderError::BlockNotFound(hash.into()))?; + let header = block.header.into_consensus(); + + // Verify the header hash matches what we requested + self.verify_header_hash(&header, hash)?; + + self.header_by_hash_cache.put(hash, header.clone()); + + kona_macros::inc!(gauge, Metrics::CACHE_ENTRIES, "cache" => "header_by_hash"); + + Ok(header) + } + + async fn block_info_by_number(&mut self, number: u64) -> Result { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "block_by_number"); + + let block = self + .inner + .get_block_by_number(number.into()) + .await + .inspect_err(|_e| { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "block_by_number"); + })? + .ok_or(AlloyChainProviderError::BlockNotFound(number.into()))?; + let header = block.header.into_consensus(); + + let block_info = BlockInfo { + hash: header.hash_slow(), + number, + parent_hash: header.parent_hash, + timestamp: header.timestamp, + }; + Ok(block_info) + } + + async fn receipts_by_hash(&mut self, hash: B256) -> Result, Self::Error> { + if let Some(receipts) = self.receipts_by_hash_cache.get(&hash) { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_HITS, "cache" => "receipts_by_hash"); + return Ok(receipts.clone()); + } + + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_MISSES, "cache" => "receipts_by_hash"); + + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "receipts_by_hash"); + + let receipts = self + .inner + .get_block_receipts(hash.into()) + .await + .inspect_err(|_e| { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "receipts_by_hash"); + })? + .ok_or(AlloyChainProviderError::BlockNotFound(hash.into()))?; + let consensus_receipts = receipts + .into_iter() + .map(|r| r.inner.into_primitives_receipt().as_receipt().cloned()) + .collect::>>() + .ok_or(AlloyChainProviderError::ReceiptsConversion(hash))?; + + self.receipts_by_hash_cache.put(hash, consensus_receipts.clone()); + + kona_macros::inc!(gauge, Metrics::CACHE_ENTRIES, "cache" => "receipts_by_hash"); + + Ok(consensus_receipts) + } + + async fn block_info_and_transactions_by_hash( + &mut self, + hash: B256, + ) -> Result<(BlockInfo, Vec), Self::Error> { + if let Some(block_info_and_txs) = self.block_info_and_transactions_by_hash_cache.get(&hash) + { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_HITS, "cache" => "block_info_and_tx"); + return Ok(block_info_and_txs.clone()); + } + + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_CACHE_MISSES, "cache" => "block_info_and_tx"); + + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_CALLS, "method" => "block_by_hash"); + + let block = self + .inner + .get_block_by_hash(hash) + .full() + .await + .inspect_err(|_e| { + kona_macros::inc!(gauge, Metrics::CHAIN_PROVIDER_RPC_ERRORS, "method" => "block_by_hash"); + })? + .ok_or(AlloyChainProviderError::BlockNotFound(hash.into()))? + .into_consensus() + .map_transactions(|t| t.inner.into_inner()); + + // Verify the block hash matches what we requested + self.verify_header_hash(&block.header, hash)?; + + let block_info = BlockInfo { + hash, // Use the already verified hash instead of recomputing + number: block.header.number, + parent_hash: block.header.parent_hash, + timestamp: block.header.timestamp, + }; + + self.block_info_and_transactions_by_hash_cache + .put(hash, (block_info, block.body.transactions.clone())); + + kona_macros::inc!(gauge, Metrics::CACHE_ENTRIES, "cache" => "block_info_and_tx"); + + Ok((block_info, block.body.transactions)) + } +} diff --git a/kona/crates/providers/providers-alloy/src/l2_chain_provider.rs b/rust/kona/crates/providers/providers-alloy/src/l2_chain_provider.rs similarity index 92% rename from kona/crates/providers/providers-alloy/src/l2_chain_provider.rs rename to rust/kona/crates/providers/providers-alloy/src/l2_chain_provider.rs index d12ef30852a..d26f15e4a60 100644 --- a/kona/crates/providers/providers-alloy/src/l2_chain_provider.rs +++ b/rust/kona/crates/providers/providers-alloy/src/l2_chain_provider.rs @@ -11,6 +11,7 @@ use alloy_transport::{RpcError, TransportErrorKind}; use alloy_transport_http::{ AuthLayer, Http, HyperClient, hyper_util::{client::legacy::Client, rt::TokioExecutor}, + reqwest, }; use async_trait::async_trait; use http_body_util::Full; @@ -23,7 +24,7 @@ use op_alloy_network::Optimism; use std::{num::NonZeroUsize, sync::Arc}; use tower::ServiceBuilder; -/// The [AlloyL2ChainProvider] is a concrete implementation of the [L2ChainProvider] trait, +/// The [`AlloyL2ChainProvider`] is a concrete implementation of the [`L2ChainProvider`] trait, /// providing data over Ethereum JSON-RPC using an alloy provider as the backend. #[derive(Debug, Clone)] pub struct AlloyL2ChainProvider { @@ -38,7 +39,7 @@ pub struct AlloyL2ChainProvider { } impl AlloyL2ChainProvider { - /// Creates a new [AlloyL2ChainProvider] with the given alloy provider and [RollupConfig]. + /// Creates a new [`AlloyL2ChainProvider`] with the given alloy provider and [`RollupConfig`]. /// /// ## Panics /// - Panics if `cache_size` is zero. @@ -50,7 +51,7 @@ impl AlloyL2ChainProvider { Self::new_with_trust(inner, rollup_config, cache_size, true) } - /// Creates a new [AlloyL2ChainProvider] with the given alloy provider, [RollupConfig], and + /// Creates a new [`AlloyL2ChainProvider`] with the given alloy provider, [`RollupConfig`], and /// trust setting. /// /// ## Panics @@ -79,7 +80,7 @@ impl AlloyL2ChainProvider { self.inner.get_block_number().await } - /// Verifies that a block's hash matches the expected hash when trust_rpc is false. + /// Verifies that a block's hash matches the expected hash when `trust_rpc` is false. fn verify_block_hash( &self, block_hash: B256, @@ -98,7 +99,7 @@ impl AlloyL2ChainProvider { Ok(()) } - /// Returns the [L2BlockInfo] for the given [BlockId]. [None] is returned if the block + /// Returns the [`L2BlockInfo`] for the given [`BlockId`]. [None] is returned if the block /// does not exist. pub async fn block_info_by_id( &mut self, @@ -156,7 +157,7 @@ impl AlloyL2ChainProvider { result } - /// Creates a new [AlloyL2ChainProvider] from the provided [reqwest::Url]. + /// Creates a new [`AlloyL2ChainProvider`] from the provided [`reqwest::Url`]. pub fn new_http( url: reqwest::Url, rollup_config: Arc, @@ -177,7 +178,7 @@ impl AlloyL2ChainProvider { } } -/// An error for the [AlloyL2ChainProvider]. +/// An error for the [`AlloyL2ChainProvider`]. #[derive(Debug, thiserror::Error)] pub enum AlloyL2ChainProviderError { /// Transport error @@ -186,10 +187,10 @@ pub enum AlloyL2ChainProviderError { /// Failed to find a block. #[error("Failed to fetch block {0}")] BlockNotFound(u64), - /// Failed to construct [L2BlockInfo] from the block and genesis. + /// Failed to construct [`L2BlockInfo`] from the block and genesis. #[error("Failed to construct L2BlockInfo from block {0} and genesis")] L2BlockInfoConstruction(u64), - /// Failed to convert the block into a [SystemConfig]. + /// Failed to convert the block into a [`SystemConfig`]. #[error("Failed to convert block {0} into SystemConfig")] SystemConfigConversion(u64), } diff --git a/rust/kona/crates/providers/providers-alloy/src/lib.rs b/rust/kona/crates/providers/providers-alloy/src/lib.rs new file mode 100644 index 00000000000..97c7fe3e15b --- /dev/null +++ b/rust/kona/crates/providers/providers-alloy/src/lib.rs @@ -0,0 +1,29 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] + +mod metrics; +pub use beacon_client::BeaconClientError; +pub use metrics::Metrics; + +mod beacon_client; +pub use beacon_client::{ + APIConfigResponse, APIGenesisResponse, BeaconClient, OnlineBeaconClient, ReducedConfigData, + ReducedGenesisData, +}; + +mod blobs; +pub use blobs::{BoxedBlobWithIndex, OnlineBlobProvider}; + +mod chain_provider; +pub use chain_provider::{AlloyChainProvider, AlloyChainProviderError}; + +mod l2_chain_provider; +pub use l2_chain_provider::{AlloyL2ChainProvider, AlloyL2ChainProviderError}; + +mod pipeline; +pub use pipeline::OnlinePipeline; diff --git a/kona/crates/providers/providers-alloy/src/metrics.rs b/rust/kona/crates/providers/providers-alloy/src/metrics.rs similarity index 100% rename from kona/crates/providers/providers-alloy/src/metrics.rs rename to rust/kona/crates/providers/providers-alloy/src/metrics.rs diff --git a/rust/kona/crates/providers/providers-alloy/src/pipeline.rs b/rust/kona/crates/providers/providers-alloy/src/pipeline.rs new file mode 100644 index 00000000000..66d0dbed902 --- /dev/null +++ b/rust/kona/crates/providers/providers-alloy/src/pipeline.rs @@ -0,0 +1,230 @@ +//! Contains an online derivation pipeline. + +use crate::{AlloyChainProvider, AlloyL2ChainProvider, OnlineBeaconClient, OnlineBlobProvider}; +use async_trait::async_trait; +use core::fmt::Debug; +use kona_derive::{ + DerivationPipeline, EthereumDataSource, IndexedAttributesQueueStage, L2ChainProvider, + OriginProvider, Pipeline, PipelineBuilder, PipelineErrorKind, PipelineResult, + PolledAttributesQueueStage, ResetSignal, Signal, SignalReceiver, StatefulAttributesBuilder, + StepResult, +}; +use kona_genesis::{L1ChainConfig, RollupConfig, SystemConfig}; +use kona_protocol::{BlockInfo, L2BlockInfo, OpAttributesWithParent}; +use std::sync::Arc; + +/// An online polled derivation pipeline. +type OnlinePolledDerivationPipeline = DerivationPipeline< + PolledAttributesQueueStage< + OnlineDataProvider, + AlloyChainProvider, + AlloyL2ChainProvider, + OnlineAttributesBuilder, + >, + AlloyL2ChainProvider, +>; + +/// An online managed derivation pipeline. +type OnlineManagedDerivationPipeline = DerivationPipeline< + IndexedAttributesQueueStage< + OnlineDataProvider, + AlloyChainProvider, + AlloyL2ChainProvider, + OnlineAttributesBuilder, + >, + AlloyL2ChainProvider, +>; + +/// An RPC-backed Ethereum data source. +type OnlineDataProvider = + EthereumDataSource>; + +/// An RPC-backed payload attributes builder for the `AttributesQueue` stage of the derivation +/// pipeline. +type OnlineAttributesBuilder = StatefulAttributesBuilder; + +/// An online derivation pipeline. +#[derive(Debug)] +pub enum OnlinePipeline { + /// An online derivation pipeline that uses a polled traversal stage. + Polled(OnlinePolledDerivationPipeline), + /// An online derivation pipeline that uses a managed traversal stage. + Managed(OnlineManagedDerivationPipeline), +} + +impl OnlinePipeline { + /// Constructs a new polled derivation pipeline that is initialized. + pub async fn new( + cfg: Arc, + l1_cfg: Arc, + l2_safe_head: L2BlockInfo, + l1_origin: BlockInfo, + blob_provider: OnlineBlobProvider, + chain_provider: AlloyChainProvider, + mut l2_chain_provider: AlloyL2ChainProvider, + ) -> PipelineResult { + let mut pipeline = Self::new_polled( + cfg.clone(), + l1_cfg.clone(), + blob_provider, + chain_provider, + l2_chain_provider.clone(), + ); + + // Reset the pipeline to populate the initial L1/L2 cursor and system configuration in L1 + // Traversal. + pipeline + .signal( + ResetSignal { + l2_safe_head, + l1_origin, + system_config: l2_chain_provider + .system_config_by_number(l2_safe_head.block_info.number, cfg.clone()) + .await + .ok(), + } + .signal(), + ) + .await?; + + Ok(pipeline) + } + + /// Constructs a new polled derivation pipeline that is uninitialized. + /// + /// Uses online providers as specified by the arguments. + /// + /// Before using the returned pipeline, a [`ResetSignal`] must be sent to + /// instantiate the pipeline state. [`Self::new`] is a convenience method that + /// constructs a new online pipeline and sends the reset signal. + pub fn new_polled( + cfg: Arc, + l1_cfg: Arc, + blob_provider: OnlineBlobProvider, + chain_provider: AlloyChainProvider, + l2_chain_provider: AlloyL2ChainProvider, + ) -> Self { + let attributes = StatefulAttributesBuilder::new( + cfg.clone(), + l1_cfg, + l2_chain_provider.clone(), + chain_provider.clone(), + ); + let dap = EthereumDataSource::new_from_parts(chain_provider.clone(), blob_provider, &cfg); + + let pipeline = PipelineBuilder::new() + .rollup_config(cfg) + .dap_source(dap) + .l2_chain_provider(l2_chain_provider) + .chain_provider(chain_provider) + .builder(attributes) + .origin(BlockInfo::default()) + .build_polled(); + + Self::Polled(pipeline) + } + + /// Constructs a new indexed derivation pipeline that is uninitialized. + /// + /// Uses online providers as specified by the arguments. + /// + /// Before using the returned pipeline, a [`ResetSignal`] must be sent to + /// instantiate the pipeline state. [`Self::new`] is a convenience method that + /// constructs a new online pipeline and sends the reset signal. + pub fn new_indexed( + cfg: Arc, + l1_cfg: Arc, + blob_provider: OnlineBlobProvider, + chain_provider: AlloyChainProvider, + l2_chain_provider: AlloyL2ChainProvider, + ) -> Self { + let attributes = StatefulAttributesBuilder::new( + cfg.clone(), + l1_cfg, + l2_chain_provider.clone(), + chain_provider.clone(), + ); + let dap = EthereumDataSource::new_from_parts(chain_provider.clone(), blob_provider, &cfg); + + let pipeline = PipelineBuilder::new() + .rollup_config(cfg) + .dap_source(dap) + .l2_chain_provider(l2_chain_provider) + .chain_provider(chain_provider) + .builder(attributes) + .origin(BlockInfo::default()) + .build_indexed(); + + Self::Managed(pipeline) + } +} + +#[async_trait] +impl SignalReceiver for OnlinePipeline { + /// Receives a signal from the driver. + async fn signal(&mut self, signal: Signal) -> PipelineResult<()> { + match self { + Self::Polled(pipeline) => pipeline.signal(signal).await, + Self::Managed(pipeline) => pipeline.signal(signal).await, + } + } +} + +impl OriginProvider for OnlinePipeline { + /// Returns the optional L1 [`BlockInfo`] origin. + fn origin(&self) -> Option { + match self { + Self::Polled(pipeline) => pipeline.origin(), + Self::Managed(pipeline) => pipeline.origin(), + } + } +} + +impl Iterator for OnlinePipeline { + type Item = OpAttributesWithParent; + + fn next(&mut self) -> Option { + match self { + Self::Polled(pipeline) => pipeline.next(), + Self::Managed(pipeline) => pipeline.next(), + } + } +} + +#[async_trait] +impl Pipeline for OnlinePipeline { + /// Peeks at the next [`OpAttributesWithParent`] from the pipeline. + fn peek(&self) -> Option<&OpAttributesWithParent> { + match self { + Self::Polled(pipeline) => pipeline.peek(), + Self::Managed(pipeline) => pipeline.peek(), + } + } + + /// Attempts to progress the pipeline. + async fn step(&mut self, cursor: L2BlockInfo) -> StepResult { + match self { + Self::Polled(pipeline) => pipeline.step(cursor).await, + Self::Managed(pipeline) => pipeline.step(cursor).await, + } + } + + /// Returns the rollup config. + fn rollup_config(&self) -> &RollupConfig { + match self { + Self::Polled(pipeline) => pipeline.rollup_config(), + Self::Managed(pipeline) => pipeline.rollup_config(), + } + } + + /// Returns the [`SystemConfig`] by L2 number. + async fn system_config_by_number( + &mut self, + number: u64, + ) -> Result { + match self { + Self::Polled(pipeline) => pipeline.system_config_by_number(number).await, + Self::Managed(pipeline) => pipeline.system_config_by_number(number).await, + } + } +} diff --git a/kona/crates/providers/providers-local/Cargo.toml b/rust/kona/crates/providers/providers-local/Cargo.toml similarity index 100% rename from kona/crates/providers/providers-local/Cargo.toml rename to rust/kona/crates/providers/providers-local/Cargo.toml diff --git a/rust/kona/crates/providers/providers-local/README.md b/rust/kona/crates/providers/providers-local/README.md new file mode 100644 index 00000000000..50d76dd0cef --- /dev/null +++ b/rust/kona/crates/providers/providers-local/README.md @@ -0,0 +1,77 @@ +# `kona-providers-local` + +This crate provides a pure in-memory L2 provider implementation for the Kona OP Stack. It operates without any external RPC dependencies, serving all data from its internal cache. + +## Features + +- **`BufferedL2Provider`**: A pure in-memory L2 provider that serves data from cached blocks +- **`ChainStateBuffer`**: LRU cache for managing chain state with reorganization support +- **Chain Event Handling**: Support for processing execution extension notifications for chain events (commits, reorgs, reverts) +- **No External Dependencies**: Operates entirely from in-memory state without RPC calls + +## Architecture + +The buffered provider operates as a standalone in-memory data store: + +1. **In-Memory Storage**: Complete blocks with L2 block info are stored in memory +2. **Dual Indexing**: Blocks are indexed by both hash and number for efficient queries +3. **Reorg Handling**: Intelligent cache invalidation during chain reorganizations up to a configurable depth +4. **Event Processing**: Integration with execution extension notifications to maintain cache consistency +5. **Genesis Support**: Special handling for genesis blocks from the rollup configuration + +## Usage + +```rust,ignore +use kona_providers_local::{BufferedL2Provider, ChainStateEvent}; +use kona_genesis::RollupConfig; +use kona_protocol::{BatchValidationProvider, L2BlockInfo}; +use op_alloy_consensus::OpBlock; +use std::sync::Arc; + +async fn example() -> Result<(), Box> { + // Create a buffered provider with rollup configuration + let rollup_config = Arc::new(RollupConfig::default()); + let provider = BufferedL2Provider::new(rollup_config, 1000, 64); + + // Add blocks to the provider + // In practice, these would come from execution extension or other sources + let block: OpBlock = unimplemented!(); + let l2_info: L2BlockInfo = unimplemented!(); + provider.add_block(block, l2_info).await?; + + // Handle chain events from execution extension notifications + let event = ChainStateEvent::ChainCommitted { + new_head: alloy_primitives::B256::ZERO, + committed: vec![], + }; + provider.handle_chain_event(event).await?; + + // Query blocks from the cache + let mut provider_clone = provider.clone(); + let block = provider_clone.block_by_number(1).await?; + let l2_info = provider_clone.l2_block_info_by_number(1).await?; + + Ok(()) +} +``` + +## Configuration + +- `cache_size`: Number of blocks to cache (affects memory usage) +- `max_reorg_depth`: Maximum reorganization depth to handle before clearing cache + +## Provider Traits + +The `BufferedL2Provider` implements the following traits from `kona-derive`: + +- `ChainProvider`: Basic block and receipt access +- `L2ChainProvider`: L2-specific functionality including system config access +- `BatchValidationProvider`: Batch validation support + +## Error Handling + +The provider returns specific errors for different failure scenarios: +- `BlockNotFound`: When a requested block is not in the cache +- `L2BlockInfoConstruction`: When L2 block info cannot be constructed +- `SystemConfigConversion`: When a block cannot be converted to system config +- `Buffer` errors: For cache-related issues including deep reorgs diff --git a/kona/crates/providers/providers-local/src/buffer.rs b/rust/kona/crates/providers/providers-local/src/buffer.rs similarity index 100% rename from kona/crates/providers/providers-local/src/buffer.rs rename to rust/kona/crates/providers/providers-local/src/buffer.rs diff --git a/kona/crates/providers/providers-local/src/buffered.rs b/rust/kona/crates/providers/providers-local/src/buffered.rs similarity index 99% rename from kona/crates/providers/providers-local/src/buffered.rs rename to rust/kona/crates/providers/providers-local/src/buffered.rs index 6c1bd84a978..ae9ee8dfca9 100644 --- a/kona/crates/providers/providers-local/src/buffered.rs +++ b/rust/kona/crates/providers/providers-local/src/buffered.rs @@ -147,7 +147,7 @@ impl BufferedL2Provider { } } -/// Clone implementation for BufferedL2Provider +/// Clone implementation for `BufferedL2Provider` impl Clone for BufferedL2Provider { fn clone(&self) -> Self { Self { @@ -255,10 +255,10 @@ pub enum BufferedProviderError { /// Block not found in cache #[error("Block {0} not found in cache")] BlockNotFound(u64), - /// Failed to construct L2BlockInfo + /// Failed to construct `L2BlockInfo` #[error("Failed to construct L2BlockInfo for block {0}")] L2BlockInfoConstruction(u64), - /// Failed to convert block to SystemConfig + /// Failed to convert block to `SystemConfig` #[error("Failed to convert block {0} to SystemConfig")] SystemConfigConversion(u64), /// System config missing from genesis diff --git a/rust/kona/crates/providers/providers-local/src/lib.rs b/rust/kona/crates/providers/providers-local/src/lib.rs new file mode 100644 index 00000000000..d0a22852874 --- /dev/null +++ b/rust/kona/crates/providers/providers-local/src/lib.rs @@ -0,0 +1,18 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] + +mod buffer; +pub use buffer::{CacheStats, CachedBlock, ChainBufferError, ChainStateBuffer, ChainStateEvent}; + +mod buffered; +pub use buffered::{BufferedL2Provider, BufferedProviderError}; + +#[cfg(feature = "metrics")] +mod metrics; +#[cfg(feature = "metrics")] +pub use metrics::Metrics; diff --git a/kona/crates/providers/providers-local/src/metrics.rs b/rust/kona/crates/providers/providers-local/src/metrics.rs similarity index 100% rename from kona/crates/providers/providers-local/src/metrics.rs rename to rust/kona/crates/providers/providers-local/src/metrics.rs diff --git a/kona/crates/providers/providers-local/tests/integration.rs b/rust/kona/crates/providers/providers-local/tests/integration.rs similarity index 100% rename from kona/crates/providers/providers-local/tests/integration.rs rename to rust/kona/crates/providers/providers-local/tests/integration.rs diff --git a/rust/kona/crates/supervisor/core/Cargo.toml b/rust/kona/crates/supervisor/core/Cargo.toml new file mode 100644 index 00000000000..020aeba6f94 --- /dev/null +++ b/rust/kona/crates/supervisor/core/Cargo.toml @@ -0,0 +1,65 @@ +[package] +name = "kona-supervisor-core" +version = "0.1.0" + +edition.workspace = true +license.workspace = true +rust-version.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +keywords.workspace = true +categories.workspace = true +exclude.workspace = true + +[dependencies] +# workspace +kona-interop.workspace = true +kona-protocol.workspace = true +kona-supervisor-types.workspace = true +kona-supervisor-rpc = { workspace = true, features = ["jsonrpsee", "client"] } +kona-supervisor-storage.workspace = true +kona-supervisor-metrics.workspace = true +kona-genesis.workspace = true + +# alloy +alloy-eips.workspace = true +alloy-network.workspace = true +alloy-provider = { workspace = true, features = ["reqwest"] } +alloy-primitives = { workspace = true, features = ["map", "rlp", "serde"] } +alloy-rpc-types-engine = { workspace = true, features = ["jwt", "serde"] } +alloy-rpc-client.workspace = true +alloy-rpc-types-eth.workspace = true +alloy-consensus.workspace = true + +# op-alloy +op-alloy-rpc-types = { workspace = true, features = ["jsonrpsee"] } +op-alloy-consensus.workspace = true + +# jsonrpsee +jsonrpsee = { workspace = true, features = [ "macros", "server", "client", "ws-client" ] } + +# general +async-trait.workspace = true +serde.workspace = true +serde_json.workspace = true +tracing.workspace = true +thiserror.workspace = true +tokio = { workspace = true, features = ["sync", "macros"] } +tokio-util.workspace = true +auto_impl.workspace = true +futures = { workspace = true } +derive_more = { workspace = true, features = ["try_from"] } + +# `metrics` feature +metrics = { workspace = true } + +[dev-dependencies] +serde_json.workspace = true +tempfile.workspace = true +alloy-transport.workspace = true +kona-interop = {workspace = true, features = ["std", "test-utils"]} +mockall.workspace = true + +[lints] +workspace = true diff --git a/kona/crates/supervisor/core/src/chain_processor/chain.rs b/rust/kona/crates/supervisor/core/src/chain_processor/chain.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/chain.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/chain.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/error.rs b/rust/kona/crates/supervisor/core/src/chain_processor/error.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/error.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/error.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/handlers/cross_chain.rs b/rust/kona/crates/supervisor/core/src/chain_processor/handlers/cross_chain.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/handlers/cross_chain.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/handlers/cross_chain.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/handlers/finalized.rs b/rust/kona/crates/supervisor/core/src/chain_processor/handlers/finalized.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/handlers/finalized.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/handlers/finalized.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/handlers/invalidation.rs b/rust/kona/crates/supervisor/core/src/chain_processor/handlers/invalidation.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/handlers/invalidation.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/handlers/invalidation.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/handlers/mod.rs b/rust/kona/crates/supervisor/core/src/chain_processor/handlers/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/handlers/mod.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/handlers/mod.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/handlers/origin.rs b/rust/kona/crates/supervisor/core/src/chain_processor/handlers/origin.rs similarity index 99% rename from kona/crates/supervisor/core/src/chain_processor/handlers/origin.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/handlers/origin.rs index 94f488adea8..b7c4e503e53 100644 --- a/kona/crates/supervisor/core/src/chain_processor/handlers/origin.rs +++ b/rust/kona/crates/supervisor/core/src/chain_processor/handlers/origin.rs @@ -219,7 +219,7 @@ mod tests { assert!(result.is_ok()); // The handler should send the reset command - if let Some(ManagedNodeCommand::Reset {}) = rx.recv().await { + if rx.recv().await == Some(ManagedNodeCommand::Reset {}) { // Command received successfully } else { panic!("Expected Reset command"); diff --git a/kona/crates/supervisor/core/src/chain_processor/handlers/safe_block.rs b/rust/kona/crates/supervisor/core/src/chain_processor/handlers/safe_block.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/handlers/safe_block.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/handlers/safe_block.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/handlers/unsafe_block.rs b/rust/kona/crates/supervisor/core/src/chain_processor/handlers/unsafe_block.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/handlers/unsafe_block.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/handlers/unsafe_block.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/metrics.rs b/rust/kona/crates/supervisor/core/src/chain_processor/metrics.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/metrics.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/metrics.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/mod.rs b/rust/kona/crates/supervisor/core/src/chain_processor/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/mod.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/mod.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/state/mod.rs b/rust/kona/crates/supervisor/core/src/chain_processor/state/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/state/mod.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/state/mod.rs diff --git a/kona/crates/supervisor/core/src/chain_processor/state/processor.rs b/rust/kona/crates/supervisor/core/src/chain_processor/state/processor.rs similarity index 100% rename from kona/crates/supervisor/core/src/chain_processor/state/processor.rs rename to rust/kona/crates/supervisor/core/src/chain_processor/state/processor.rs diff --git a/kona/crates/supervisor/core/src/config/core_config.rs b/rust/kona/crates/supervisor/core/src/config/core_config.rs similarity index 100% rename from kona/crates/supervisor/core/src/config/core_config.rs rename to rust/kona/crates/supervisor/core/src/config/core_config.rs diff --git a/kona/crates/supervisor/core/src/config/mod.rs b/rust/kona/crates/supervisor/core/src/config/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/config/mod.rs rename to rust/kona/crates/supervisor/core/src/config/mod.rs diff --git a/kona/crates/supervisor/core/src/config/rollup_config_set.rs b/rust/kona/crates/supervisor/core/src/config/rollup_config_set.rs similarity index 94% rename from kona/crates/supervisor/core/src/config/rollup_config_set.rs rename to rust/kona/crates/supervisor/core/src/config/rollup_config_set.rs index fa5d03ee8e8..66ef32a06f4 100644 --- a/kona/crates/supervisor/core/src/config/rollup_config_set.rs +++ b/rust/kona/crates/supervisor/core/src/config/rollup_config_set.rs @@ -21,7 +21,7 @@ impl Genesis { Self { l1, l2 } } - /// Creates a new Genesis from a RollupConfig. + /// Creates a new Genesis from a `RollupConfig`. pub const fn new_from_rollup_genesis(genesis: ChainGenesis, l1_block: BlockInfo) -> Self { Self { l1: l1_block, @@ -35,7 +35,7 @@ impl Genesis { } } -/// RollupConfig contains the configuration for the Optimism rollup. +/// `RollupConfig` contains the configuration for the Optimism rollup. #[derive(Debug, Default, Clone)] pub struct RollupConfig { /// Genesis anchor information for the rollup. @@ -49,7 +49,7 @@ pub struct RollupConfig { } impl RollupConfig { - /// Creates a new RollupConfig with the given genesis and block time. + /// Creates a new `RollupConfig` with the given genesis and block time. pub const fn new(genesis: Genesis, block_time: u64, interop_time: Option) -> Self { Self { genesis, block_time, interop_time } } @@ -106,7 +106,7 @@ impl RollupConfig { } } -/// RollupConfigSet contains the configuration for multiple Optimism rollups. +/// `RollupConfigSet` contains the configuration for multiple Optimism rollups. #[derive(Debug, Clone, Default)] pub struct RollupConfigSet { /// The rollup configurations for the Optimism rollups. @@ -114,7 +114,7 @@ pub struct RollupConfigSet { } impl RollupConfigSet { - /// Creates a new RollupConfigSet with the given rollup configurations. + /// Creates a new `RollupConfigSet` with the given rollup configurations. pub const fn new(rollups: HashMap) -> Self { Self { rollups } } @@ -124,7 +124,7 @@ impl RollupConfigSet { self.rollups.get(&chain_id) } - /// adds a new rollup configuration to the set using the provided chain ID and RollupConfig. + /// adds a new rollup configuration to the set using the provided chain ID and `RollupConfig`. pub fn add_from_rollup_config( &mut self, chain_id: u64, diff --git a/rust/kona/crates/supervisor/core/src/error.rs b/rust/kona/crates/supervisor/core/src/error.rs new file mode 100644 index 00000000000..8f188dae46e --- /dev/null +++ b/rust/kona/crates/supervisor/core/src/error.rs @@ -0,0 +1,209 @@ +//! [`SupervisorService`](crate::SupervisorService) errors. + +use crate::syncnode::ManagedNodeError; +use derive_more; +use jsonrpsee::types::{ErrorCode, ErrorObjectOwned}; +use kona_supervisor_storage::StorageError; +use kona_supervisor_types::AccessListError; +use op_alloy_rpc_types::SuperchainDAError; +use thiserror::Error; + +/// Custom error type for the Supervisor core logic. +#[derive(Debug, Error)] +pub enum SupervisorError { + /// Indicates that a feature or method is not yet implemented. + #[error("functionality not implemented")] + Unimplemented, + + /// No chains are configured for supervision. + #[error("empty dependency set")] + EmptyDependencySet, + + /// Unsupported chain ID. + #[error("unsupported chain ID")] + UnsupportedChainId, + + /// Data availability errors. + /// + /// Spec . + #[error(transparent)] + SpecError(#[from] SpecError), + + /// Indicates that error occurred while interacting with the storage layer. + #[error(transparent)] + StorageError(#[from] StorageError), + + /// Indicates that managed node not found for the chain. + #[error("managed node not found for chain: {0}")] + ManagedNodeMissing(u64), + + /// Indicates the error occurred while interacting with the managed node. + #[error(transparent)] + ManagedNodeError(#[from] ManagedNodeError), + + /// Indicates the error occurred while parsing the `access_list` + #[error(transparent)] + AccessListError(#[from] AccessListError), + + /// Indicates the error occurred while serializing or deserializing JSON. + #[error(transparent)] + SerdeJson(#[from] serde_json::Error), + + /// Indicates the L1 block does not match the expected L1 block. + #[error("L1 block number mismatch. expected: {expected}, but got {got}")] + L1BlockMismatch { + /// Expected L1 block. + expected: u64, + /// Received L1 block. + got: u64, + }, + + /// Indicates that the chain ID could not be parsed from the access list. + #[error("failed to parse chain id from access list")] + ChainIdParseError(), +} + +impl PartialEq for SupervisorError { + fn eq(&self, other: &Self) -> bool { + use SupervisorError::{ + AccessListError, EmptyDependencySet, L1BlockMismatch, ManagedNodeError, + ManagedNodeMissing, SerdeJson, SpecError, StorageError, Unimplemented, + }; + match (self, other) { + (Unimplemented, Unimplemented) | (EmptyDependencySet, EmptyDependencySet) => true, + (SpecError(a), SpecError(b)) => a == b, + (StorageError(a), StorageError(b)) => a == b, + (ManagedNodeMissing(a), ManagedNodeMissing(b)) => a == b, + (ManagedNodeError(a), ManagedNodeError(b)) => a == b, + (AccessListError(a), AccessListError(b)) => a == b, + (SerdeJson(a), SerdeJson(b)) => a.to_string() == b.to_string(), + (L1BlockMismatch { expected: a, got: b }, L1BlockMismatch { expected: c, got: d }) => { + a == c && b == d + } + _ => false, + } + } +} + +impl Eq for SupervisorError {} + +/// Extending the [`SuperchainDAError`] to include errors not in the spec. +#[derive(Error, Debug, PartialEq, Eq, derive_more::TryFrom)] +#[repr(i32)] +#[try_from(repr)] +pub enum SpecError { + /// [`SuperchainDAError`] from the spec. + #[error(transparent)] + SuperchainDAError(#[from] SuperchainDAError), + + /// Error not in spec. + #[error("error not in spec")] + ErrorNotInSpec, +} + +impl SpecError { + /// Maps the proper error code from `SuperchainDAError`. + /// Introduced a new error code for errors not in the spec. + pub const fn code(&self) -> i32 { + match self { + Self::SuperchainDAError(e) => *e as i32, + Self::ErrorNotInSpec => -321300, + } + } +} + +impl From for ErrorObjectOwned { + fn from(err: SpecError) -> Self { + ErrorObjectOwned::owned(err.code(), err.to_string(), None::<()>) + } +} + +impl From for ErrorObjectOwned { + fn from(err: SupervisorError) -> Self { + match err { + // todo: handle these errors more gracefully + SupervisorError::Unimplemented | + SupervisorError::EmptyDependencySet | + SupervisorError::UnsupportedChainId | + SupervisorError::L1BlockMismatch { .. } | + SupervisorError::ManagedNodeMissing(_) | + SupervisorError::ManagedNodeError(_) | + SupervisorError::StorageError(_) | + SupervisorError::AccessListError(_) | + SupervisorError::ChainIdParseError() | + SupervisorError::SerdeJson(_) => ErrorObjectOwned::from(ErrorCode::InternalError), + SupervisorError::SpecError(err) => err.into(), + } + } +} + +impl From for SpecError { + fn from(err: StorageError) -> Self { + match err { + StorageError::Database(_) => Self::from(SuperchainDAError::DataCorruption), + StorageError::FutureData => Self::from(SuperchainDAError::FutureData), + StorageError::EntryNotFound(_) => Self::from(SuperchainDAError::MissedData), + StorageError::ConflictError => Self::from(SuperchainDAError::ConflictingData), + StorageError::BlockOutOfOrder => Self::from(SuperchainDAError::OutOfOrder), + _ => Self::ErrorNotInSpec, + } + } +} + +#[cfg(test)] +mod test { + use kona_supervisor_storage::EntryNotFoundError; + + use super::*; + + #[test] + fn test_storage_error_conversion() { + let test_err = SpecError::from(StorageError::DatabaseNotInitialised); + let expected_err = SpecError::ErrorNotInSpec; + + assert_eq!(test_err, expected_err); + } + + #[test] + fn test_unmapped_storage_error_conversion() { + let spec_err = ErrorObjectOwned::from(SpecError::ErrorNotInSpec); + let expected_err = SpecError::ErrorNotInSpec; + + assert_eq!(spec_err, expected_err.into()); + + let spec_err = ErrorObjectOwned::from(SpecError::from(StorageError::LockPoisoned)); + let expected_err = SpecError::ErrorNotInSpec; + + assert_eq!(spec_err, expected_err.into()); + + let spec_err = ErrorObjectOwned::from(SpecError::from(StorageError::FutureData)); + let expected_err = SpecError::SuperchainDAError(SuperchainDAError::FutureData); + + assert_eq!(spec_err, expected_err.into()); + + let spec_err = ErrorObjectOwned::from(SpecError::from(StorageError::EntryNotFound( + EntryNotFoundError::DerivedBlockNotFound(12), + ))); + let expected_err = SpecError::SuperchainDAError(SuperchainDAError::MissedData); + + assert_eq!(spec_err, expected_err.into()); + } + + #[test] + fn test_supervisor_error_conversion() { + // This will happen implicitly in server rpc response calls. + let supervisor_err = ErrorObjectOwned::from(SupervisorError::SpecError(SpecError::from( + StorageError::LockPoisoned, + ))); + let expected_err = SpecError::ErrorNotInSpec; + + assert_eq!(supervisor_err, expected_err.into()); + + let supervisor_err = ErrorObjectOwned::from(SupervisorError::SpecError(SpecError::from( + StorageError::FutureData, + ))); + let expected_err = SpecError::SuperchainDAError(SuperchainDAError::FutureData); + + assert_eq!(supervisor_err, expected_err.into()); + } +} diff --git a/kona/crates/supervisor/core/src/event/chain.rs b/rust/kona/crates/supervisor/core/src/event/chain.rs similarity index 98% rename from kona/crates/supervisor/core/src/event/chain.rs rename to rust/kona/crates/supervisor/core/src/event/chain.rs index 69711ae20b4..36f8b2593ae 100644 --- a/kona/crates/supervisor/core/src/event/chain.rs +++ b/rust/kona/crates/supervisor/core/src/event/chain.rs @@ -20,7 +20,8 @@ pub enum ChainEvent { derived_ref_pair: DerivedRefPair, }, - /// A derivation origin update event, indicating that the origin for derived blocks has changed. + /// A derivation origin update event, indicating that the origin for derived blocks has + /// changed. DerivationOriginUpdate { /// The [`BlockInfo`] of the block that is the new derivation origin. origin: BlockInfo, diff --git a/kona/crates/supervisor/core/src/event/mod.rs b/rust/kona/crates/supervisor/core/src/event/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/event/mod.rs rename to rust/kona/crates/supervisor/core/src/event/mod.rs diff --git a/kona/crates/supervisor/core/src/l1_watcher/mod.rs b/rust/kona/crates/supervisor/core/src/l1_watcher/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/l1_watcher/mod.rs rename to rust/kona/crates/supervisor/core/src/l1_watcher/mod.rs diff --git a/kona/crates/supervisor/core/src/l1_watcher/watcher.rs b/rust/kona/crates/supervisor/core/src/l1_watcher/watcher.rs similarity index 98% rename from kona/crates/supervisor/core/src/l1_watcher/watcher.rs rename to rust/kona/crates/supervisor/core/src/l1_watcher/watcher.rs index 00476c970ae..b2d16ceb658 100644 --- a/kona/crates/supervisor/core/src/l1_watcher/watcher.rs +++ b/rust/kona/crates/supervisor/core/src/l1_watcher/watcher.rs @@ -329,7 +329,7 @@ mod tests { #[tokio::test] async fn test_handle_new_finalized_block_updates_and_broadcasts() { let (tx, mut rx) = mpsc::channel(1); - let event_txs = [(1, tx)].into_iter().collect(); + let event_txs = std::iter::once((1, tx)).collect(); let mut mock_storage = Mockfinalized_l1_storage::new(); mock_storage.expect_update_finalized_l1().returning(|_block| Ok(())); @@ -376,7 +376,7 @@ mod tests { #[tokio::test] async fn test_handle_new_finalized_block_storage_error() { let (tx, mut rx) = mpsc::channel(1); - let event_txs = [(1, tx)].into_iter().collect(); + let event_txs = std::iter::once((1, tx)).collect(); let mut mock_storage = Mockfinalized_l1_storage::new(); mock_storage @@ -415,7 +415,7 @@ mod tests { #[tokio::test] async fn test_handle_new_latest_block_updates() { let (tx, mut rx) = mpsc::channel(1); - let event_txs = [(1, tx)].into_iter().collect(); + let event_txs = std::iter::once((1, tx)).collect(); let watcher = L1Watcher { rpc_client: mock_rpc_client(), @@ -448,7 +448,7 @@ mod tests { #[tokio::test] async fn test_trigger_reorg_handler() { let (tx, mut rx) = mpsc::channel(1); - let event_txs = [(1, tx)].into_iter().collect(); + let event_txs = std::iter::once((1, tx)).collect(); let watcher = L1Watcher { rpc_client: mock_rpc_client(), diff --git a/kona/crates/supervisor/core/src/lib.rs b/rust/kona/crates/supervisor/core/src/lib.rs similarity index 100% rename from kona/crates/supervisor/core/src/lib.rs rename to rust/kona/crates/supervisor/core/src/lib.rs diff --git a/kona/crates/supervisor/core/src/logindexer/indexer.rs b/rust/kona/crates/supervisor/core/src/logindexer/indexer.rs similarity index 100% rename from kona/crates/supervisor/core/src/logindexer/indexer.rs rename to rust/kona/crates/supervisor/core/src/logindexer/indexer.rs diff --git a/kona/crates/supervisor/core/src/logindexer/mod.rs b/rust/kona/crates/supervisor/core/src/logindexer/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/logindexer/mod.rs rename to rust/kona/crates/supervisor/core/src/logindexer/mod.rs diff --git a/kona/crates/supervisor/core/src/logindexer/util.rs b/rust/kona/crates/supervisor/core/src/logindexer/util.rs similarity index 100% rename from kona/crates/supervisor/core/src/logindexer/util.rs rename to rust/kona/crates/supervisor/core/src/logindexer/util.rs diff --git a/kona/crates/supervisor/core/src/reorg/error.rs b/rust/kona/crates/supervisor/core/src/reorg/error.rs similarity index 100% rename from kona/crates/supervisor/core/src/reorg/error.rs rename to rust/kona/crates/supervisor/core/src/reorg/error.rs diff --git a/kona/crates/supervisor/core/src/reorg/handler.rs b/rust/kona/crates/supervisor/core/src/reorg/handler.rs similarity index 100% rename from kona/crates/supervisor/core/src/reorg/handler.rs rename to rust/kona/crates/supervisor/core/src/reorg/handler.rs diff --git a/kona/crates/supervisor/core/src/reorg/metrics.rs b/rust/kona/crates/supervisor/core/src/reorg/metrics.rs similarity index 100% rename from kona/crates/supervisor/core/src/reorg/metrics.rs rename to rust/kona/crates/supervisor/core/src/reorg/metrics.rs diff --git a/kona/crates/supervisor/core/src/reorg/mod.rs b/rust/kona/crates/supervisor/core/src/reorg/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/reorg/mod.rs rename to rust/kona/crates/supervisor/core/src/reorg/mod.rs diff --git a/rust/kona/crates/supervisor/core/src/reorg/task.rs b/rust/kona/crates/supervisor/core/src/reorg/task.rs new file mode 100644 index 00000000000..4069bbad2d4 --- /dev/null +++ b/rust/kona/crates/supervisor/core/src/reorg/task.rs @@ -0,0 +1,1250 @@ +use super::metrics::Metrics; +use crate::ReorgHandlerError; +use alloy_eips::BlockNumberOrTag; +use alloy_primitives::{B256, ChainId}; +use alloy_rpc_client::RpcClient; +use alloy_rpc_types_eth::Block; +use derive_more::Constructor; +use kona_interop::DerivedRefPair; +use kona_protocol::BlockInfo; +use kona_supervisor_storage::{DbReader, StorageError, StorageRewinder}; +use std::sync::Arc; +use tracing::{debug, info, trace, warn}; + +/// Handles reorg for a single chain +#[derive(Debug, Constructor)] +pub(crate) struct ReorgTask { + chain_id: ChainId, + db: Arc, + rpc_client: RpcClient, +} + +#[derive(Debug)] +struct RewoundState { + source: BlockInfo, + derived: Option, +} + +impl ReorgTask +where + DB: DbReader + StorageRewinder + Send + Sync + 'static, +{ + /// Processes reorg for a single chain. If the chain is consistent with the L1 chain, + /// does nothing. + pub(crate) async fn process_chain_reorg(&self) -> Result<(), ReorgHandlerError> { + trace!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + "Processing reorg for chain..." + ); + + let latest_state = self.db.latest_derivation_state()?; + + // Find last valid source block for this chain + let rewound_state = match self.find_rewind_target(latest_state).await { + Ok(Some(rewind_target_source)) => { + Some(self.rewind_to_target_source(rewind_target_source).await?) + } + Ok(None) => { + // No reorg needed, latest source block is still canonical + return Ok(()); + } + Err(ReorgHandlerError::RewindTargetPreInterop) => { + self.rewind_to_activation_block().await? + } + Err(err) => { + return Err(err); + } + }; + + // record metrics + if let Some(rewound_state) = rewound_state { + let l1_depth = latest_state.source.number - rewound_state.source.number; + let l2_depth = rewound_state + .derived + .map_or(0, |derived| latest_state.derived.number - derived.number); + Metrics::record_block_depth(self.chain_id, l1_depth, l2_depth); + } + info!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + "Processed reorged successfully" + ); + Ok(()) + } + + async fn rewind_to_target_source( + &self, + rewind_target_source: BlockInfo, + ) -> Result { + info!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + rewind_target_source = rewind_target_source.number, + "Reorg detected - rewinding to target source block..." + ); + + // Call the rewinder to handle the DB rewinding + let derived_block_rewound = + self.db.rewind_to_source(&rewind_target_source.id()).inspect_err(|err| { + warn!( + target: "supervisor::reorg_handler::db", + chain_id = %self.chain_id, + %err, + "Failed to rewind DB to derived block" + ); + })?; + + Ok(RewoundState { source: rewind_target_source, derived: derived_block_rewound }) + } + + async fn rewind_to_activation_block(&self) -> Result, ReorgHandlerError> { + info!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + "Reorg detected - rewinding to activation block..." + ); + + // If the rewind target is pre-interop, we need to rewind to the activation block + match self.db.get_activation_block() { + Ok(activation_block) => { + let activation_source_block = self.db.derived_to_source(activation_block.id())?; + self.db.rewind(&activation_block.id()).inspect_err(|err| { + warn!( + target: "supervisor::reorg_handler::db", + chain_id = %self.chain_id, + %err, + "Failed to rewind DB to activation block" + ); + })?; + Ok(Some(RewoundState { + source: activation_source_block, + derived: Some(activation_block), + })) + } + Err(StorageError::DatabaseNotInitialised) => { + debug!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + "No activation block found, no rewind required" + ); + Ok(None) + } + Err(err) => Err(ReorgHandlerError::StorageError(err)), + } + } + + /// Finds the rewind target for a chain during a reorg + /// + /// Returns `None` if no rewind is needed, or the target block to rewind to. + /// Returns `ReorgHandlerError::RewindTargetPreInterop` if the rewind target is before the + /// interop activation block. + async fn find_rewind_target( + &self, + latest_state: DerivedRefPair, + ) -> Result, ReorgHandlerError> { + trace!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + "Finding rewind target..." + ); + + // Check if the latest source block is still canonical + if self.is_block_canonical(latest_state.source.number, latest_state.source.hash).await? { + debug!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + block_number = latest_state.source.number, + "Latest source block is still canonical, no reorg needed" + ); + return Ok(None); + } + + let common_ancestor = self.find_common_ancestor().await?; + let mut prev_source = latest_state.source; + let mut current_source = self.db.get_source_block(prev_source.number - 1)?; + + while current_source.number > common_ancestor.number { + if current_source.number % 5 == 0 { + trace!( + target: "supervisor::reorg_handler", + current_block=current_source.number, + common_ancestor=common_ancestor.number, + "Finding rewind target..." + ) + } + + // If the current source block is canonical, we found the rewind target + if self.is_block_canonical(current_source.number, current_source.hash).await? { + info!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + block_number = current_source.number, + "Found canonical block as rewind target" + ); + break; + } + + // Otherwise, walk back to the previous source block + prev_source = current_source; + current_source = self.db.get_source_block(current_source.number - 1)?; + } + + // return the previous source block as the rewind target since rewinding is inclusive + Ok(Some(prev_source)) + } + + async fn find_common_ancestor(&self) -> Result { + trace!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + "Finding common ancestor." + ); + + match self.db.get_safety_head_ref(kona_interop::SafetyLevel::Finalized) { + Ok(finalized_block) => { + let common_ancestor = self.db.derived_to_source(finalized_block.id())?; + return Ok(common_ancestor); + } + Err(StorageError::FutureData) => { /* fall through to activation block */ } + Err(err) => { + return Err(ReorgHandlerError::StorageError(err)); + } + } + + debug!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + "No finalized block found, checking activation block." + ); + + match self.db.get_activation_block() { + Ok(activation_block) => { + let activation_source_block = self.db.derived_to_source(activation_block.id())?; + if self + .is_block_canonical( + activation_source_block.number, + activation_source_block.hash, + ) + .await? + { + Ok(activation_source_block) + } else { + debug!( + target: "supervisor::reorg_handler", + chain_id = %self.chain_id, + "Activation block is not canonical, no common ancestor found" + ); + Err(ReorgHandlerError::RewindTargetPreInterop) + } + } + Err(StorageError::DatabaseNotInitialised) => { + Err(ReorgHandlerError::RewindTargetPreInterop) + } + Err(err) => Err(ReorgHandlerError::StorageError(err)), + } + } + + /// Checks if a block is canonical on L1 + async fn is_block_canonical( + &self, + block_number: u64, + expected_hash: B256, + ) -> Result { + let canonical_l1 = self + .rpc_client + .request::<_, Block>( + "eth_getBlockByNumber", + (BlockNumberOrTag::Number(block_number), false), + ) + .await + .map_err(|err| { + warn!( + target: "supervisor::reorg_handler", + block_number, + %err, + "Failed to fetch L1 block from RPC" + ); + ReorgHandlerError::RPCError(err.to_string()) + })?; + Ok(canonical_l1.hash() == expected_hash) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_eips::BlockNumHash; + use alloy_rpc_types_eth::Header; + use alloy_transport::mock::*; + use kona_interop::{DerivedRefPair, SafetyLevel}; + use kona_protocol::BlockInfo; + use kona_supervisor_storage::{ + DerivationStorageReader, HeadRefStorageReader, LogStorageReader, StorageError, + }; + use kona_supervisor_types::{Log, SuperHead}; + use mockall::{mock, predicate}; + + mock!( + #[derive(Debug)] + pub Db {} + + impl LogStorageReader for Db { + fn get_block(&self, block_number: u64) -> Result; + fn get_latest_block(&self) -> Result; + fn get_log(&self, block_number: u64,log_index: u32) -> Result; + fn get_logs(&self, block_number: u64) -> Result, StorageError>; + } + + impl DerivationStorageReader for Db { + fn derived_to_source(&self, derived_block_id: BlockNumHash) -> Result; + fn latest_derived_block_at_source(&self, source_block_id: BlockNumHash) -> Result; + fn latest_derivation_state(&self) -> Result; + fn get_source_block(&self, source_block_number: u64) -> Result; + fn get_activation_block(&self) -> Result; + } + + impl HeadRefStorageReader for Db { + fn get_safety_head_ref(&self, safety_level: SafetyLevel) -> Result; + fn get_super_head(&self) -> Result; + } + + impl StorageRewinder for Db { + fn rewind(&self, to: &BlockNumHash) -> Result<(), StorageError>; + fn rewind_log_storage(&self, to: &BlockNumHash) -> Result<(), StorageError>; + fn rewind_to_source(&self, to: &BlockNumHash) -> Result, StorageError>; + } + ); + + mock! ( + pub chain_db {} + ); + + #[tokio::test] + async fn test_process_chain_reorg_no_reorg_needed() { + let mut mock_db = MockDb::new(); + + let latest_source = + BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); + + let latest_state = DerivedRefPair { + source: latest_source, + derived: BlockInfo::new(B256::from([3u8; 32]), 50, B256::from([4u8; 32]), 12346), + }; + + // Mock the latest derivation state + mock_db.expect_latest_derivation_state().times(1).returning(move || Ok(latest_state)); + + // Mock the RPC to return the same block (no reorg) + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let rpc_client = RpcClient::new(transport, false); + + let canonical_block: Block = Block { + header: Header { + hash: latest_source.hash, + inner: alloy_consensus::Header { + number: latest_source.number, + parent_hash: latest_source.parent_hash, + timestamp: latest_source.timestamp, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + asserter.push_success(&canonical_block); + + let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); + + let result = reorg_task.process_chain_reorg().await; + + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_process_chain_reorg_with_rewind() { + let mut mock_db = MockDb::new(); + + let latest_source = + BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); + + let latest_state = DerivedRefPair { + source: latest_source, + derived: BlockInfo::new(B256::from([3u8; 32]), 50, B256::from([4u8; 32]), 12346), + }; + + let canonical_source = + BlockInfo::new(B256::from([5u8; 32]), 95, B256::from([6u8; 32]), 12344); + + let rewind_target_source = + BlockInfo::new(B256::from([10u8; 32]), 96, B256::from([11u8; 32]), 12340); + + let rewind_target_derived = + BlockInfo::new(B256::from([12u8; 32]), 45, B256::from([13u8; 32]), 12341); + + let finalized_block = + BlockInfo::new(B256::from([20u8; 32]), 40, B256::from([21u8; 32]), 12330); + + // Mock the latest derivation state + mock_db.expect_latest_derivation_state().times(1).returning(move || Ok(latest_state)); + + // Mock finding common ancestor + mock_db.expect_get_safety_head_ref().times(1).returning(move |_| Ok(finalized_block)); + + mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(canonical_source)); + + mock_db.expect_get_source_block().times(5).returning( + move |block_number| match block_number { + 99 => Ok(BlockInfo::new(B256::from([16u8; 32]), 99, B256::from([17u8; 32]), 12344)), + 98 => Ok(BlockInfo::new(B256::from([17u8; 32]), 98, B256::from([18u8; 32]), 12343)), + 97 => Ok(BlockInfo::new(B256::from([18u8; 32]), 97, B256::from([19u8; 32]), 12342)), + 96 => Ok(rewind_target_source), + 95 => Ok(canonical_source), + _ => Err(StorageError::ConflictError), + }, + ); + + // Mock the RPC to show reorg happened + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let rpc_client = RpcClient::new(transport, false); + + // First call shows different hash (reorg detected) + let different_block: Block = Block { + header: Header { + hash: B256::from([99u8; 32]), // Different hash + inner: alloy_consensus::Header { + number: latest_source.number, + parent_hash: latest_source.parent_hash, + timestamp: latest_source.timestamp, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + asserter.push_success(&different_block); + asserter.push_success(&different_block); + asserter.push_success(&different_block); + asserter.push_success(&different_block); + asserter.push_success(&different_block); + + // Second call for checking if rewind target is canonical + let canonical_block: Block = Block { + header: Header { + hash: canonical_source.hash, + inner: alloy_consensus::Header { + number: canonical_source.number, + parent_hash: canonical_source.parent_hash, + timestamp: canonical_source.timestamp, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + asserter.push_success(&canonical_block); + + // Mock rewind operations + mock_db + .expect_rewind_to_source() + .times(1) + .returning(move |_| Ok(Some(rewind_target_derived))); + + let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); + + let result = reorg_task.process_chain_reorg().await; + + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_process_chain_reorg_rewind_pre_interop() { + let mut mock_db = MockDb::new(); + + let latest_source = + BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); + + let latest_state = DerivedRefPair { + source: latest_source, + derived: BlockInfo::new(B256::from([3u8; 32]), 50, B256::from([4u8; 32]), 12346), + }; + + let activation_block = + BlockInfo::new(B256::from([10u8; 32]), 1, B256::from([11u8; 32]), 12000); + + let activation_source = + BlockInfo::new(B256::from([12u8; 32]), 10, B256::from([13u8; 32]), 11999); + + // Mock the latest derivation state + mock_db.expect_latest_derivation_state().times(1).returning(move || Ok(latest_state)); + + // Mock finding common ancestor fails with pre-interop + mock_db.expect_get_safety_head_ref().times(1).returning(|_| Err(StorageError::FutureData)); + + mock_db + .expect_get_activation_block() + .times(2) // Once in find_common_ancestor, once in rewind_to_activation_block + .returning(move || Ok(activation_block)); + + mock_db + .expect_derived_to_source() + .times(2) // Once in find_common_ancestor, once in rewind_to_activation_block + .returning(move |_| Ok(activation_source)); + + // Mock the RPC calls + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let rpc_client = RpcClient::new(transport, false); + + // First call shows different hash (reorg detected) + let different_block: Block = Block { + header: Header { + hash: B256::from([99u8; 32]), + inner: alloy_consensus::Header { + number: latest_source.number, + parent_hash: latest_source.parent_hash, + timestamp: latest_source.timestamp, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + asserter.push_success(&different_block); + + // Activation block is not canonical + let non_canonical_activation: Block = Block { + header: Header { + hash: B256::from([99u8; 32]), // Different from expected + inner: alloy_consensus::Header { + number: activation_source.number, + parent_hash: activation_source.parent_hash, + timestamp: activation_source.timestamp, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + asserter.push_success(&non_canonical_activation); + + // Mock rewind to activation block + mock_db.expect_rewind().times(1).returning(|_| Ok(())); + + let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); + + let result = reorg_task.process_chain_reorg().await; + + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_process_chain_reorg_storage_error() { + let mut mock_db = MockDb::new(); + + // DB fails to get latest derivation state + mock_db + .expect_latest_derivation_state() + .times(1) + .returning(|| Err(StorageError::LockPoisoned)); + + let reorg_task = ReorgTask::new( + 1, + Arc::new(mock_db), + RpcClient::new(MockTransport::new(Asserter::new()), false), + ); + + let result = reorg_task.process_chain_reorg().await; + assert!(result.is_err()); + assert!(matches!( + result.unwrap_err(), + ReorgHandlerError::StorageError(StorageError::LockPoisoned) + )); + } + + #[tokio::test] + async fn test_find_rewind_target_without_reorg() { + let mut mock_db = MockDb::new(); + let latest_source: Block = Block { + header: Header { + hash: B256::from([1u8; 32]), + inner: alloy_consensus::Header { + number: 42, + parent_hash: B256::ZERO, + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let latest_state = DerivedRefPair { + source: BlockInfo::new( + latest_source.header.hash, + latest_source.header.number, + latest_source.header.parent_hash, + latest_source.header.timestamp, + ), + derived: BlockInfo::new(B256::from([5u8; 32]), 200, B256::ZERO, 1100), + }; + + // Mock the latest derivation state and expect this to be called once + mock_db.expect_latest_derivation_state().times(1).returning(move || Ok(latest_state)); + + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let rpc_client = RpcClient::new(transport, false); + // Mock RPC response + asserter.push_success(&latest_source); + + let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); + let rewind_target = reorg_task.process_chain_reorg().await; + + // Should succeed since the latest source block is still canonical + assert!(rewind_target.is_ok()); + } + + #[tokio::test] + async fn test_find_rewind_target_with_reorg() { + let mut mock_db = MockDb::new(); + let latest_source: Block = Block { + header: Header { + hash: B256::from([1u8; 32]), + inner: alloy_consensus::Header { + number: 41, + parent_hash: B256::from([2u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let latest_state = DerivedRefPair { + source: BlockInfo::new( + latest_source.header.hash, + latest_source.header.number, + latest_source.header.parent_hash, + latest_source.header.timestamp, + ), + derived: BlockInfo::new(B256::from([10u8; 32]), 200, B256::ZERO, 1100), + }; + + let finalized_source: Block = Block { + header: Header { + hash: B256::from([2u8; 32]), + inner: alloy_consensus::Header { + number: 38, + parent_hash: B256::from([1u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let finalized_state = DerivedRefPair { + source: BlockInfo::new( + finalized_source.header.hash, + finalized_source.header.number, + finalized_source.header.parent_hash, + finalized_source.header.timestamp, + ), + derived: BlockInfo::new(B256::from([20u8; 32]), 200, B256::ZERO, 1100), + }; + + let reorg_source: Block = Block { + header: Header { + hash: B256::from([14u8; 32]), + inner: alloy_consensus::Header { + number: 40, + parent_hash: B256::from([13u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let reorg_source_info = BlockInfo::new( + reorg_source.header.hash, + reorg_source.header.number, + reorg_source.header.parent_hash, + reorg_source.header.timestamp, + ); + + let mut source_39: Block = reorg_source.clone(); + source_39.header.inner.number = 39; + let source_39_info = BlockInfo::new( + source_39.header.hash, + source_39.header.number, + source_39.header.parent_hash, + source_39.header.timestamp, + ); + + let incorrect_source: Block = Block { + header: Header { + hash: B256::from([15u8; 32]), + inner: alloy_consensus::Header { + number: 5000, + parent_hash: B256::from([13u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + mock_db.expect_latest_derivation_state().returning(move || Ok(latest_state)); + mock_db + .expect_get_safety_head_ref() + .times(1) + .returning(move |_| Ok(finalized_state.derived)); + mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(finalized_state.source)); + + mock_db.expect_get_source_block().times(3).returning( + move |block_number| match block_number { + 41 => Ok(latest_state.source), + 40 => Ok(reorg_source_info), + 39 => Ok(source_39_info), + _ => Ok(finalized_state.source), + }, + ); + + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let rpc_client = RpcClient::new(transport, false); + + // First return the reorged block + asserter.push_success(&reorg_source); + + // Then returning some random incorrect blocks 3 times till it reaches the finalized block + asserter.push_success(&incorrect_source); + asserter.push_success(&incorrect_source); + asserter.push_success(&incorrect_source); + + // Finally returning the correct block + asserter.push_success(&finalized_source); + + let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); + let rewind_target = reorg_task.find_rewind_target(latest_state).await; + + // Should succeed since the latest source block is still canonical + assert!(rewind_target.is_ok()); + assert_eq!(rewind_target.unwrap(), Some(source_39_info)); + } + + #[tokio::test] + async fn test_find_rewind_target_with_finalized_future_activation_canonical() { + let mut mock_db = MockDb::new(); + let latest_source: Block = Block { + header: Header { + hash: B256::from([1u8; 32]), + inner: alloy_consensus::Header { + number: 41, + parent_hash: B256::from([2u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let latest_state = DerivedRefPair { + source: BlockInfo::new( + latest_source.header.hash, + latest_source.header.number, + latest_source.header.parent_hash, + latest_source.header.timestamp, + ), + derived: BlockInfo::new(B256::from([10u8; 32]), 200, B256::ZERO, 1100), + }; + + let activation_source: Block = Block { + header: Header { + hash: B256::from([2u8; 32]), + inner: alloy_consensus::Header { + number: 38, + parent_hash: B256::from([1u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let activation_state = DerivedRefPair { + source: BlockInfo::new( + activation_source.header.hash, + activation_source.header.number, + activation_source.header.parent_hash, + activation_source.header.timestamp, + ), + derived: BlockInfo::new(B256::from([20u8; 32]), 200, B256::ZERO, 1100), + }; + + let reorg_source: Block = Block { + header: Header { + hash: B256::from([14u8; 32]), + inner: alloy_consensus::Header { + number: 40, + parent_hash: B256::from([13u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let reorg_source_info = BlockInfo::new( + reorg_source.header.hash, + reorg_source.header.number, + reorg_source.header.parent_hash, + reorg_source.header.timestamp, + ); + + let mut source_39: Block = reorg_source.clone(); + source_39.header.inner.number = 39; + let source_39_info = BlockInfo::new( + source_39.header.hash, + source_39.header.number, + source_39.header.parent_hash, + source_39.header.timestamp, + ); + + let incorrect_source: Block = Block { + header: Header { + hash: B256::from([15u8; 32]), + inner: alloy_consensus::Header { + number: 5000, + parent_hash: B256::from([13u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + mock_db + .expect_get_safety_head_ref() + .times(1) + .returning(move |_| Err(StorageError::FutureData)); + mock_db + .expect_get_activation_block() + .times(1) + .returning(move || Ok(activation_state.derived)); + mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(activation_state.source)); + + mock_db.expect_get_source_block().times(3).returning( + move |block_number| match block_number { + 41 => Ok(latest_state.source), + 40 => Ok(reorg_source_info), + 39 => Ok(source_39_info), + _ => Ok(activation_state.source), + }, + ); + + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let rpc_client = RpcClient::new(transport, false); + + // First return the reorged block + asserter.push_success(&reorg_source); + + // Return the activation block source to make sure it is canonical + // Used in `find_common_ancestor` + asserter.push_success(&activation_source); + + // Then returning some random incorrect blocks 3 times till it reaches the finalized block + asserter.push_success(&incorrect_source); + asserter.push_success(&incorrect_source); + asserter.push_success(&incorrect_source); + + // Finally returning the correct block + asserter.push_success(&activation_source); + + let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); + let rewind_target = reorg_task.find_rewind_target(latest_state).await; + + // Should succeed since the latest source block is still canonical + assert!(rewind_target.is_ok()); + assert_eq!(rewind_target.unwrap(), Some(source_39_info)); + } + + #[tokio::test] + async fn test_find_rewind_target_with_finalized_future_activation_not_canonical() { + let mut mock_db = MockDb::new(); + let latest_source: Block = Block { + header: Header { + hash: B256::from([1u8; 32]), + inner: alloy_consensus::Header { + number: 41, + parent_hash: B256::from([2u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let latest_state = DerivedRefPair { + source: BlockInfo::new( + latest_source.header.hash, + latest_source.header.number, + latest_source.header.parent_hash, + latest_source.header.timestamp, + ), + derived: BlockInfo::new(B256::from([10u8; 32]), 200, B256::ZERO, 1100), + }; + + let activation_source: Block = Block { + header: Header { + hash: B256::from([2u8; 32]), + inner: alloy_consensus::Header { + number: 38, + parent_hash: B256::from([1u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let activation_state = DerivedRefPair { + source: BlockInfo::new( + activation_source.header.hash, + activation_source.header.number, + activation_source.header.parent_hash, + activation_source.header.timestamp, + ), + derived: BlockInfo::new(B256::from([20u8; 32]), 200, B256::ZERO, 1100), + }; + + let reorg_source: Block = Block { + header: Header { + hash: B256::from([14u8; 32]), + inner: alloy_consensus::Header { + number: 40, + parent_hash: B256::from([13u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let incorrect_source: Block = Block { + header: Header { + hash: B256::from([15u8; 32]), + inner: alloy_consensus::Header { + number: 5000, + parent_hash: B256::from([13u8; 32]), + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + mock_db + .expect_get_safety_head_ref() + .times(1) + .returning(move |_| Err(StorageError::FutureData)); + mock_db + .expect_get_activation_block() + .times(1) + .returning(move || Ok(activation_state.derived)); + mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(activation_state.source)); + + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let rpc_client = RpcClient::new(transport, false); + + // First return the reorged block + asserter.push_success(&reorg_source); + + // Return the incorrect source to make sure activation block is not canonical + // Used in `find_common_ancestor` + asserter.push_success(&incorrect_source); + + let reorg_task = ReorgTask::new(1, Arc::new(mock_db), rpc_client); + let rewind_target = reorg_task.find_rewind_target(latest_state).await; + + assert!(matches!(rewind_target, Err(ReorgHandlerError::RewindTargetPreInterop))); + } + + #[tokio::test] + async fn test_is_block_canonical() { + let canonical_hash = B256::from([1u8; 32]); + let non_canonical_hash = B256::from([2u8; 32]); + + let canonical_block: Block = Block { + header: Header { + hash: canonical_hash, + inner: alloy_consensus::Header { + number: 100, + parent_hash: B256::ZERO, + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let non_canonical_block: Block = Block { + header: Header { + hash: non_canonical_hash, + inner: alloy_consensus::Header { + number: 100, + parent_hash: B256::ZERO, + timestamp: 12345, + ..Default::default() + }, + ..Default::default() + }, + ..Default::default() + }; + + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let rpc_client = RpcClient::new(transport, false); + asserter.push_success(&canonical_block); + asserter.push_success(&non_canonical_block); + + let reorg_task = ReorgTask::new(1, Arc::new(MockDb::new()), rpc_client); + + let result = reorg_task.is_block_canonical(100, canonical_hash).await; + assert!(result.is_ok()); + + // Should return false + let result = reorg_task.is_block_canonical(100, canonical_hash).await; + assert!(result.is_ok()); + assert!(!result.unwrap()); + } + + #[tokio::test] + async fn test_rewind_to_activation_block_success() { + let mut mock_db = MockDb::new(); + + let activation_block = + BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); + + let activation_source = + BlockInfo::new(B256::from([3u8; 32]), 200, B256::from([4u8; 32]), 12346); + + // Expect get_activation_block to be called + mock_db.expect_get_activation_block().times(1).returning(move || Ok(activation_block)); + + // Expect derived_to_source to be called + mock_db + .expect_derived_to_source() + .times(1) + .with(mockall::predicate::eq(activation_block.id())) + .returning(move |_| Ok(activation_source)); + + // Expect rewind to be called + mock_db + .expect_rewind() + .times(1) + .with(mockall::predicate::eq(activation_block.id())) + .returning(|_| Ok(())); + + let reorg_task = ReorgTask::new( + 1, + Arc::new(mock_db), + RpcClient::new(MockTransport::new(Asserter::new()), false), + ); + + let result = reorg_task.rewind_to_activation_block().await; + + assert!(result.is_ok()); + let pair = result.unwrap().unwrap(); + assert_eq!(pair.source, activation_source); + assert_eq!(pair.derived.unwrap(), activation_block); + } + + #[tokio::test] + async fn test_rewind_to_activation_block_database_not_initialized() { + let mut mock_db = MockDb::new(); + + // Expect get_activation_block to return DatabaseNotInitialised + mock_db + .expect_get_activation_block() + .times(1) + .returning(|| Err(StorageError::DatabaseNotInitialised)); + + let reorg_task = ReorgTask::new( + 1, + Arc::new(mock_db), + RpcClient::new(MockTransport::new(Asserter::new()), false), + ); + + let result = reorg_task.rewind_to_activation_block().await; + + // Should succeed with None (no-op case) + assert!(result.is_ok()); + assert!(result.unwrap().is_none()); + } + + #[tokio::test] + async fn test_rewind_to_activation_block_storage_error() { + let mut mock_db = MockDb::new(); + + // Expect get_activation_block to return a different storage error + mock_db + .expect_get_activation_block() + .times(1) + .returning(|| Err(StorageError::LockPoisoned)); + + let reorg_task = ReorgTask::new( + 1, + Arc::new(mock_db), + RpcClient::new(MockTransport::new(Asserter::new()), false), + ); + + let result = reorg_task.rewind_to_activation_block().await; + + // Should return storage error + assert!(result.is_err()); + assert!(matches!( + result.unwrap_err(), + ReorgHandlerError::StorageError(StorageError::LockPoisoned) + )); + } + + #[tokio::test] + async fn test_rewind_to_activation_block_derived_to_source_fails() { + let mut mock_db = MockDb::new(); + + let activation_block = + BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); + + // Expect get_activation_block to succeed + mock_db.expect_get_activation_block().times(1).returning(move || Ok(activation_block)); + + // Expect derived_to_source to fail + mock_db.expect_derived_to_source().times(1).returning(|_| Err(StorageError::LockPoisoned)); + + let reorg_task = ReorgTask::new( + 1, + Arc::new(mock_db), + RpcClient::new(MockTransport::new(Asserter::new()), false), + ); + + let result = reorg_task.rewind_to_activation_block().await; + + // Should return storage error + assert!(result.is_err()); + assert!(matches!( + result.unwrap_err(), + ReorgHandlerError::StorageError(StorageError::LockPoisoned) + )); + } + + #[tokio::test] + async fn test_rewind_to_activation_block_rewind_fails() { + let mut mock_db = MockDb::new(); + + let activation_block = + BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); + + let activation_source = + BlockInfo::new(B256::from([3u8; 32]), 200, B256::from([4u8; 32]), 12346); + + // Expect get_activation_block to succeed + mock_db.expect_get_activation_block().times(1).returning(move || Ok(activation_block)); + + // Expect derived_to_source to succeed + mock_db.expect_derived_to_source().times(1).returning(move |_| Ok(activation_source)); + + // Expect rewind to fail + mock_db.expect_rewind().times(1).returning(|_| Err(StorageError::LockPoisoned)); + + let reorg_task = ReorgTask::new( + 1, + Arc::new(mock_db), + RpcClient::new(MockTransport::new(Asserter::new()), false), + ); + + let result = reorg_task.rewind_to_activation_block().await; + + // Should return storage error + assert!(result.is_err()); + assert!(matches!( + result.unwrap_err(), + ReorgHandlerError::StorageError(StorageError::LockPoisoned) + )); + } + + #[tokio::test] + async fn test_rewind_to_target_source_success() { + let mut mock_db = MockDb::new(); + + let rewind_target_source = + BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); + + let rewind_target_derived = + BlockInfo::new(B256::from([3u8; 32]), 50, B256::from([4u8; 32]), 12346); + + // Expect rewind to be called + mock_db + .expect_rewind_to_source() + .times(1) + .with(predicate::eq(rewind_target_source.id())) + .returning(move |_| Ok(Some(rewind_target_derived))); + + let reorg_task = ReorgTask::new( + 1, + Arc::new(mock_db), + RpcClient::new(MockTransport::new(Asserter::new()), false), + ); + + let result = reorg_task.rewind_to_target_source(rewind_target_source).await; + + assert!(result.is_ok()); + let pair = result.unwrap(); + assert_eq!(pair.source, rewind_target_source); + assert_eq!(pair.derived.unwrap(), rewind_target_derived); + } + + #[tokio::test] + async fn test_rewind_to_target_source_rewind_fails() { + let mut mock_db = MockDb::new(); + + let rewind_target_source = + BlockInfo::new(B256::from([1u8; 32]), 100, B256::from([2u8; 32]), 12345); + + // Expect rewind to fail + mock_db.expect_rewind_to_source().times(1).returning(|_| Err(StorageError::LockPoisoned)); + + let reorg_task = ReorgTask::new( + 1, + Arc::new(mock_db), + RpcClient::new(MockTransport::new(Asserter::new()), false), + ); + + let result = reorg_task.rewind_to_target_source(rewind_target_source).await; + + assert!(result.is_err()); + assert!(matches!( + result.unwrap_err(), + ReorgHandlerError::StorageError(StorageError::LockPoisoned) + )); + } +} diff --git a/kona/crates/supervisor/core/src/rpc/admin.rs b/rust/kona/crates/supervisor/core/src/rpc/admin.rs similarity index 100% rename from kona/crates/supervisor/core/src/rpc/admin.rs rename to rust/kona/crates/supervisor/core/src/rpc/admin.rs diff --git a/kona/crates/supervisor/core/src/rpc/metrics.rs b/rust/kona/crates/supervisor/core/src/rpc/metrics.rs similarity index 100% rename from kona/crates/supervisor/core/src/rpc/metrics.rs rename to rust/kona/crates/supervisor/core/src/rpc/metrics.rs diff --git a/kona/crates/supervisor/core/src/rpc/mod.rs b/rust/kona/crates/supervisor/core/src/rpc/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/rpc/mod.rs rename to rust/kona/crates/supervisor/core/src/rpc/mod.rs diff --git a/rust/kona/crates/supervisor/core/src/rpc/server.rs b/rust/kona/crates/supervisor/core/src/rpc/server.rs new file mode 100644 index 00000000000..fddcd0e6ba2 --- /dev/null +++ b/rust/kona/crates/supervisor/core/src/rpc/server.rs @@ -0,0 +1,514 @@ +//! Server-side implementation of the Supervisor RPC API. + +use super::Metrics; +use crate::{SpecError, SupervisorError, SupervisorService}; +use alloy_eips::eip1898::BlockNumHash; +use alloy_primitives::{B256, ChainId, map::HashMap}; +use async_trait::async_trait; +use jsonrpsee::{core::RpcResult, types::ErrorObject}; +use kona_interop::{DependencySet, DerivedIdPair, ExecutingDescriptor, SafetyLevel}; +use kona_protocol::BlockInfo; +use kona_supervisor_rpc::{ + SuperRootOutputRpc, SupervisorApiServer, SupervisorChainSyncStatus, SupervisorSyncStatus, +}; +use kona_supervisor_types::{HexStringU64, SuperHead}; +use std::sync::Arc; +use tracing::{trace, warn}; + +/// The server-side implementation struct for the [`SupervisorApiServer`]. +/// It holds a reference to the core Supervisor logic. +#[derive(Debug)] +pub struct SupervisorRpc { + /// Reference to the core Supervisor logic. + /// Using Arc allows sharing the Supervisor instance if needed, + supervisor: Arc, +} + +impl SupervisorRpc { + /// Creates a new [`SupervisorRpc`] instance. + pub fn new(supervisor: Arc) -> Self { + Metrics::init(); + trace!(target: "supervisor::rpc", "Creating new SupervisorRpc handler"); + Self { supervisor } + } +} + +#[async_trait] +impl SupervisorApiServer for SupervisorRpc +where + T: SupervisorService + 'static, +{ + async fn cross_derived_to_source( + &self, + chain_id_hex: HexStringU64, + derived: BlockNumHash, + ) -> RpcResult { + let chain_id = ChainId::from(chain_id_hex); + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_CROSS_DERIVED_TO_SOURCE, + async { + trace!( + target: "supervisor::rpc", + %chain_id, + ?derived, + "Received cross_derived_to_source request" + ); + + let source_block = + self.supervisor.derived_to_source_block(chain_id, derived).map_err(|err| { + warn!( + target: "supervisor::rpc", + %chain_id, + ?derived, + %err, + "Failed to get source block for derived block" + ); + ErrorObject::from(err) + })?; + + Ok(source_block) + } + .await + ) + } + + async fn local_unsafe(&self, chain_id_hex: HexStringU64) -> RpcResult { + let chain_id = ChainId::from(chain_id_hex); + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_LOCAL_UNSAFE, + async { + trace!(target: "supervisor::rpc", + %chain_id, + "Received local_unsafe request" + ); + + Ok(self.supervisor.local_unsafe(chain_id)?.id()) + } + .await + ) + } + + async fn local_safe(&self, chain_id_hex: HexStringU64) -> RpcResult { + let chain_id = ChainId::from(chain_id_hex); + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_LOCAL_SAFE, + async { + trace!(target: "supervisor::rpc", + %chain_id, + "Received local_safe request" + ); + + let derived = self.supervisor.local_safe(chain_id)?.id(); + let source = self.supervisor.derived_to_source_block(chain_id, derived)?.id(); + + Ok(DerivedIdPair { source, derived }) + } + .await + ) + } + + async fn dependency_set_v1(&self) -> RpcResult { + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_DEPENDENCY_SET, + async { + trace!(target: "supervisor::rpc", + "Received the dependency set" + ); + + Ok(self.supervisor.dependency_set().to_owned()) + } + .await + ) + } + + async fn cross_safe(&self, chain_id_hex: HexStringU64) -> RpcResult { + let chain_id = ChainId::from(chain_id_hex); + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_CROSS_SAFE, + async { + trace!(target: "supervisor::rpc", + %chain_id, + "Received cross_safe request" + ); + + let derived = self.supervisor.cross_safe(chain_id)?.id(); + let source = self.supervisor.derived_to_source_block(chain_id, derived)?.id(); + + Ok(DerivedIdPair { source, derived }) + } + .await + ) + } + + async fn finalized(&self, chain_id_hex: HexStringU64) -> RpcResult { + let chain_id = ChainId::from(chain_id_hex); + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_FINALIZED, + async { + trace!(target: "supervisor::rpc", + %chain_id, + "Received finalized request" + ); + + Ok(self.supervisor.finalized(chain_id)?.id()) + } + .await + ) + } + + async fn finalized_l1(&self) -> RpcResult { + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_FINALIZED_L1, + async { + trace!(target: "supervisor::rpc", "Received finalized_l1 request"); + Ok(self.supervisor.finalized_l1()?) + } + .await + ) + } + + async fn super_root_at_timestamp( + &self, + timestamp_hex: HexStringU64, + ) -> RpcResult { + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_SUPER_ROOT_AT_TIMESTAMP, + async { + let timestamp = u64::from(timestamp_hex); + trace!(target: "supervisor::rpc", + %timestamp, + "Received super_root_at_timestamp request" + ); + + self.supervisor.super_root_at_timestamp(timestamp) + .await + .map_err(|err| { + warn!(target: "supervisor::rpc", %err, "Error from core supervisor super_root_at_timestamp"); + ErrorObject::from(err) + }) + }.await + ) + } + + async fn check_access_list( + &self, + inbox_entries: Vec, + min_safety: SafetyLevel, + executing_descriptor: ExecutingDescriptor, + ) -> RpcResult<()> { + // TODO:: refactor, maybe build proc macro to record metrics + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_CHECK_ACCESS_LIST, + async { + trace!(target: "supervisor::rpc", + num_inbox_entries = inbox_entries.len(), + ?min_safety, + ?executing_descriptor, + "Received check_access_list request", + ); + self.supervisor + .check_access_list(inbox_entries, min_safety, executing_descriptor) + .map_err(|err| { + warn!(target: "supervisor::rpc", %err, "Error from core supervisor check_access_list"); + ErrorObject::from(err) + }) + }.await + ) + } + + async fn sync_status(&self) -> RpcResult { + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_SYNC_STATUS, + async { + trace!(target: "supervisor::rpc", "Received sync_status request"); + + let mut chains = self + .supervisor + .chain_ids() + .map(|id| (id, Default::default())) + .collect::>(); + + if chains.is_empty() { + // return error if no chains configured + // + // + // + // todo: add to spec + Err(SupervisorError::EmptyDependencySet)?; + } + + let mut min_synced_l1 = BlockInfo { number: u64::MAX, ..Default::default() }; + let mut cross_safe_timestamp = u64::MAX; + let mut finalized_timestamp = u64::MAX; + let mut uninitialized_chain_db_count = 0; + + for (id, status) in &mut chains { + let head = match self.supervisor.super_head(*id) { + Ok(head) => head, + Err(SupervisorError::SpecError(SpecError::ErrorNotInSpec)) => { + uninitialized_chain_db_count += 1; + continue; + } + Err(err) => return Err(ErrorObject::from(err)), + }; + + // uses lowest safe and finalized timestamps, as well as l1 block, of all l2s + // + // + // + // todo: add to spec + let SuperHead { l1_source, cross_safe, finalized, .. } = &head; + + let default_block = BlockInfo::default(); + let l1_source = l1_source.as_ref().unwrap_or(&default_block); + let cross_safe = cross_safe.as_ref().unwrap_or(&default_block); + let finalized = finalized.as_ref().unwrap_or(&default_block); + + if l1_source.number < min_synced_l1.number { + min_synced_l1 = *l1_source; + } + if cross_safe.timestamp < cross_safe_timestamp { + cross_safe_timestamp = cross_safe.timestamp; + } + if finalized.timestamp < finalized_timestamp { + finalized_timestamp = finalized.timestamp; + } + + *status = head.into(); + } + + if uninitialized_chain_db_count == chains.len() { + warn!(target: "supervisor::rpc", "No chain db initialized"); + return Err(ErrorObject::from(SupervisorError::SpecError( + SpecError::ErrorNotInSpec, + ))); + } + + Ok(SupervisorSyncStatus { + min_synced_l1, + cross_safe_timestamp, + finalized_timestamp, + chains, + }) + } + .await + ) + } + + async fn all_safe_derived_at( + &self, + derived_from: BlockNumHash, + ) -> RpcResult> { + crate::observe_rpc_call!( + Metrics::SUPERVISOR_RPC_METHOD_ALL_SAFE_DERIVED_AT, + async { + trace!(target: "supervisor::rpc", + ?derived_from, + "Received all_safe_derived_at request" + ); + + let mut chains = self + .supervisor + .chain_ids() + .map(|id| (id, Default::default())) + .collect::>(); + + for (id, block) in &mut chains { + *block = self.supervisor.latest_block_from(derived_from, *id)?.id(); + } + + Ok(chains) + } + .await + ) + } +} + +impl Clone for SupervisorRpc { + fn clone(&self) -> Self { + Self { supervisor: self.supervisor.clone() } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::ChainId; + use kona_protocol::BlockInfo; + use kona_supervisor_storage::{EntryNotFoundError, StorageError}; + use mockall::*; + use std::sync::Arc; + + mock!( + #[derive(Debug)] + pub SupervisorService {} + + #[async_trait] + impl SupervisorService for SupervisorService { + fn chain_ids(&self) -> impl Iterator; + fn dependency_set(&self) -> &DependencySet; + fn super_head(&self, chain: ChainId) -> Result; + fn latest_block_from(&self, l1_block: BlockNumHash, chain: ChainId) -> Result; + fn derived_to_source_block(&self, chain: ChainId, derived: BlockNumHash) -> Result; + fn local_unsafe(&self, chain: ChainId) -> Result; + fn local_safe(&self, chain: ChainId) -> Result; + fn cross_safe(&self, chain: ChainId) -> Result; + fn finalized(&self, chain: ChainId) -> Result; + fn finalized_l1(&self) -> Result; + fn check_access_list(&self, inbox_entries: Vec, min_safety: SafetyLevel, executing_descriptor: ExecutingDescriptor) -> Result<(), SupervisorError>; + async fn super_root_at_timestamp(&self, timestamp: u64) -> Result; + } + ); + + #[tokio::test] + async fn test_sync_status_empty_chains() { + let mut mock_service = MockSupervisorService::new(); + mock_service.expect_chain_ids().returning(|| Box::new(vec![].into_iter())); + + let rpc = SupervisorRpc::new(Arc::new(mock_service)); + let result = rpc.sync_status().await; + + assert!(result.is_err()); + assert_eq!(result.unwrap_err(), ErrorObject::from(SupervisorError::EmptyDependencySet)); + } + + #[tokio::test] + async fn test_sync_status_single_chain() { + let chain_id = ChainId::from(1u64); + + let block_info = BlockInfo { number: 42, ..Default::default() }; + let super_head = SuperHead { + l1_source: Some(block_info), + cross_safe: Some(BlockInfo { timestamp: 100, ..Default::default() }), + finalized: Some(BlockInfo { timestamp: 50, ..Default::default() }), + ..Default::default() + }; + + let mut mock_service = MockSupervisorService::new(); + mock_service.expect_chain_ids().returning(move || Box::new(vec![chain_id].into_iter())); + mock_service.expect_super_head().returning(move |_| Ok(super_head)); + + let rpc = SupervisorRpc::new(Arc::new(mock_service)); + let result = rpc.sync_status().await.unwrap(); + + assert_eq!(result.min_synced_l1.number, 42); + assert_eq!(result.cross_safe_timestamp, 100); + assert_eq!(result.finalized_timestamp, 50); + assert_eq!(result.chains.len(), 1); + } + + #[tokio::test] + async fn test_sync_status_missing_super_head() { + let chain_id_1 = ChainId::from(1u64); + let chain_id_2 = ChainId::from(2u64); + + // Only chain_id_1 has a SuperHead, chain_id_2 is missing + let block_info = BlockInfo { number: 42, ..Default::default() }; + let super_head = SuperHead { + l1_source: Some(block_info), + cross_safe: Some(BlockInfo { timestamp: 100, ..Default::default() }), + finalized: Some(BlockInfo { timestamp: 50, ..Default::default() }), + ..Default::default() + }; + + let mut mock_service = MockSupervisorService::new(); + mock_service + .expect_chain_ids() + .returning(move || Box::new(vec![chain_id_1, chain_id_2].into_iter())); + mock_service.expect_super_head().returning(move |chain_id| { + if chain_id == chain_id_1 { + Ok(super_head) + } else { + Err(SupervisorError::StorageError(StorageError::EntryNotFound( + EntryNotFoundError::DerivedBlockNotFound(1), + ))) + } + }); + + let rpc = SupervisorRpc::new(Arc::new(mock_service)); + let result = rpc.sync_status().await; + + assert!(result.is_err()); + } + + #[tokio::test] + async fn test_sync_status_uninitialized_chain_db() { + let chain_id_1 = ChainId::from(1u64); + let chain_id_2 = ChainId::from(2u64); + + // Case 1: No chain db is initialized + let mut mock_service = MockSupervisorService::new(); + mock_service + .expect_chain_ids() + .returning(move || Box::new(vec![chain_id_1, chain_id_2].into_iter())); + mock_service + .expect_super_head() + .times(2) + .returning(move |_| Err(SupervisorError::SpecError(SpecError::ErrorNotInSpec))); + + let rpc = SupervisorRpc::new(Arc::new(mock_service)); + let result = rpc.sync_status().await; + assert!(result.is_err()); + assert_eq!( + result.unwrap_err(), + ErrorObject::from(SupervisorError::SpecError(SpecError::ErrorNotInSpec,)) + ); + + // Case 2: Only one chain db is initialized + let block_info = BlockInfo { number: 42, ..Default::default() }; + let super_head = SuperHead { + l1_source: Some(block_info), + cross_safe: Some(BlockInfo { timestamp: 100, ..Default::default() }), + finalized: Some(BlockInfo { timestamp: 50, ..Default::default() }), + ..Default::default() + }; + + let mut mock_service = MockSupervisorService::new(); + mock_service + .expect_chain_ids() + .returning(move || Box::new(vec![chain_id_1, chain_id_2].into_iter())); + mock_service.expect_super_head().times(2).returning(move |chain_id| { + if chain_id == chain_id_1 { + Ok(super_head) + } else { + Err(SupervisorError::SpecError(SpecError::ErrorNotInSpec)) + } + }); + + let rpc = SupervisorRpc::new(Arc::new(mock_service)); + let result = rpc.sync_status().await; + assert!(result.is_ok()); + + // Case 3: Both chain dbs are initialized + let block_info_1 = BlockInfo { number: 42, ..Default::default() }; + let super_head_1 = SuperHead { + l1_source: Some(block_info_1), + cross_safe: Some(BlockInfo { timestamp: 100, ..Default::default() }), + finalized: Some(BlockInfo { timestamp: 50, ..Default::default() }), + ..Default::default() + }; + let block_info_2 = BlockInfo { number: 43, ..Default::default() }; + let super_head_2 = SuperHead { + l1_source: Some(block_info_2), + cross_safe: Some(BlockInfo { timestamp: 110, ..Default::default() }), + finalized: Some(BlockInfo { timestamp: 60, ..Default::default() }), + ..Default::default() + }; + let mut mock_service = MockSupervisorService::new(); + mock_service + .expect_chain_ids() + .returning(move || Box::new(vec![chain_id_1, chain_id_2].into_iter())); + mock_service.expect_super_head().times(2).returning(move |chain_id| { + if chain_id == chain_id_1 { Ok(super_head_1) } else { Ok(super_head_2) } + }); + + let rpc = SupervisorRpc::new(Arc::new(mock_service)); + let result = rpc.sync_status().await; + assert!(result.is_ok()); + let status = result.unwrap(); + assert_eq!(status.min_synced_l1.number, 42); + assert_eq!(status.cross_safe_timestamp, 100); + assert_eq!(status.finalized_timestamp, 50); + assert_eq!(status.chains.len(), 2); + } +} diff --git a/kona/crates/supervisor/core/src/safety_checker/cross.rs b/rust/kona/crates/supervisor/core/src/safety_checker/cross.rs similarity index 99% rename from kona/crates/supervisor/core/src/safety_checker/cross.rs rename to rust/kona/crates/supervisor/core/src/safety_checker/cross.rs index 7fcca42a2f1..b15300b0c5b 100644 --- a/kona/crates/supervisor/core/src/safety_checker/cross.rs +++ b/rust/kona/crates/supervisor/core/src/safety_checker/cross.rs @@ -673,7 +673,6 @@ mod tests { hash: b256(333), }), }]), - ("3", 20) => Ok(vec![]), // No further dependency — traversal ends here _ => Ok(vec![]), } }); diff --git a/kona/crates/supervisor/core/src/safety_checker/error.rs b/rust/kona/crates/supervisor/core/src/safety_checker/error.rs similarity index 100% rename from kona/crates/supervisor/core/src/safety_checker/error.rs rename to rust/kona/crates/supervisor/core/src/safety_checker/error.rs diff --git a/kona/crates/supervisor/core/src/safety_checker/mod.rs b/rust/kona/crates/supervisor/core/src/safety_checker/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/safety_checker/mod.rs rename to rust/kona/crates/supervisor/core/src/safety_checker/mod.rs diff --git a/kona/crates/supervisor/core/src/safety_checker/promoter.rs b/rust/kona/crates/supervisor/core/src/safety_checker/promoter.rs similarity index 89% rename from kona/crates/supervisor/core/src/safety_checker/promoter.rs rename to rust/kona/crates/supervisor/core/src/safety_checker/promoter.rs index ca8ecdb0330..3172bd738db 100644 --- a/kona/crates/supervisor/core/src/safety_checker/promoter.rs +++ b/rust/kona/crates/supervisor/core/src/safety_checker/promoter.rs @@ -4,7 +4,7 @@ use kona_protocol::BlockInfo; use kona_supervisor_storage::CrossChainSafetyProvider; use op_alloy_consensus::interop::SafetyLevel; -/// CrossUnsafePromoter implements [`SafetyPromoter`] for [`SafetyLevel::CrossUnsafe`] +/// `CrossUnsafePromoter` implements [`SafetyPromoter`] for [`SafetyLevel::CrossUnsafe`] #[derive(Debug)] pub struct CrossUnsafePromoter; @@ -28,7 +28,7 @@ impl SafetyPromoter for CrossUnsafePromoter { } } -/// CrossSafePromoter implements [`SafetyPromoter`] for [`SafetyLevel::CrossSafe`] +/// `CrossSafePromoter` implements [`SafetyPromoter`] for [`SafetyLevel::CrossSafe`] #[derive(Debug)] pub struct CrossSafePromoter; diff --git a/kona/crates/supervisor/core/src/safety_checker/task.rs b/rust/kona/crates/supervisor/core/src/safety_checker/task.rs similarity index 100% rename from kona/crates/supervisor/core/src/safety_checker/task.rs rename to rust/kona/crates/supervisor/core/src/safety_checker/task.rs diff --git a/kona/crates/supervisor/core/src/safety_checker/traits.rs b/rust/kona/crates/supervisor/core/src/safety_checker/traits.rs similarity index 100% rename from kona/crates/supervisor/core/src/safety_checker/traits.rs rename to rust/kona/crates/supervisor/core/src/safety_checker/traits.rs diff --git a/kona/crates/supervisor/core/src/state/mod.rs b/rust/kona/crates/supervisor/core/src/state/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/state/mod.rs rename to rust/kona/crates/supervisor/core/src/state/mod.rs diff --git a/kona/crates/supervisor/core/src/supervisor.rs b/rust/kona/crates/supervisor/core/src/supervisor.rs similarity index 100% rename from kona/crates/supervisor/core/src/supervisor.rs rename to rust/kona/crates/supervisor/core/src/supervisor.rs diff --git a/rust/kona/crates/supervisor/core/src/syncnode/client.rs b/rust/kona/crates/supervisor/core/src/syncnode/client.rs new file mode 100644 index 00000000000..46b00225f8e --- /dev/null +++ b/rust/kona/crates/supervisor/core/src/syncnode/client.rs @@ -0,0 +1,406 @@ +use super::{AuthenticationError, ClientError, metrics::Metrics}; +use alloy_primitives::{B256, ChainId}; +use alloy_rpc_types_engine::{Claims, JwtSecret}; +use alloy_rpc_types_eth::BlockNumHash; +use async_trait::async_trait; +use jsonrpsee::{ + core::client::Subscription, + ws_client::{HeaderMap, HeaderValue, WsClient, WsClientBuilder}, +}; +use kona_supervisor_metrics::observe_metrics_for_result_async; +use kona_supervisor_rpc::{BlockInfo, ManagedModeApiClient, jsonrpsee::SubscriptionTopic}; +use kona_supervisor_types::{BlockSeal, OutputV0, Receipts, SubscriptionEvent}; +use std::{ + fmt::Debug, + sync::{Arc, OnceLock}, +}; +use tokio::sync::Mutex; +use tracing::{error, info}; + +/// Trait for a managed node client that provides various methods to interact with the node. +#[async_trait] +pub trait ManagedNodeClient: Send + Sync + Debug { + /// Returns the [`ChainId`] of the managed node. + async fn chain_id(&self) -> Result; + + /// Subscribes to [`SubscriptionEvent`] from the managed node. + async fn subscribe_events(&self) -> Result, ClientError>; + + /// Fetches [`Receipts`] for a given block hash. + async fn fetch_receipts(&self, block_hash: B256) -> Result; + + /// Fetches the [`OutputV0`] at a specific timestamp. + async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result; + + /// Fetches the pending [`OutputV0`] at a specific timestamp. + async fn pending_output_v0_at_timestamp(&self, timestamp: u64) + -> Result; + + /// Fetches the L2 [`BlockInfo`] by timestamp. + async fn l2_block_ref_by_timestamp(&self, timestamp: u64) -> Result; + + /// Fetches the [`BlockInfo`] by block number. + async fn block_ref_by_number(&self, block_number: u64) -> Result; + + /// Resets the managed node to the pre-interop state. + async fn reset_pre_interop(&self) -> Result<(), ClientError>; + + /// Resets the node state with the provided block IDs. + async fn reset( + &self, + unsafe_id: BlockNumHash, + cross_unsafe_id: BlockNumHash, + local_safe_id: BlockNumHash, + cross_safe_id: BlockNumHash, + finalised_id: BlockNumHash, + ) -> Result<(), ClientError>; + + /// Invalidates a block in the managed node. + async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ClientError>; + + /// Provides L1 [`BlockInfo`] to the managed node. + async fn provide_l1(&self, block_info: BlockInfo) -> Result<(), ClientError>; + + /// Updates the finalized block ID in the managed node. + async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ClientError>; + + /// Updates the cross-unsafe block ID in the managed node. + async fn update_cross_unsafe( + &self, + cross_unsafe_block_id: BlockNumHash, + ) -> Result<(), ClientError>; + + /// Updates the cross-safe block ID in the managed node. + async fn update_cross_safe( + &self, + source_block_id: BlockNumHash, + derived_block_id: BlockNumHash, + ) -> Result<(), ClientError>; + + /// Resets the ws-client to None when server disconnects + async fn reset_ws_client(&self); +} + +/// [`ClientConfig`] sets the configuration for the managed node client. +#[derive(Debug, Clone)] +pub struct ClientConfig { + /// The URL + port of the managed node + pub url: String, + /// jwt secret for the managed node interop rpc + pub jwt_secret: JwtSecret, +} + +/// Client for interacting with a managed node. +#[derive(Debug)] +pub struct Client { + config: ClientConfig, + /// Chain ID of the managed node + chain_id: OnceLock, + /// The attached web socket client + ws_client: Mutex>>, +} + +impl Client { + /// Creates a new [`Client`] with the given configuration. + pub fn new(config: ClientConfig) -> Self { + Metrics::init(config.url.as_ref()); + Self { config, chain_id: OnceLock::new(), ws_client: Mutex::new(None) } + } + + /// Creates authentication headers using JWT secret. + fn create_auth_headers(&self) -> Result { + // Create JWT claims with current time + let claims = Claims::with_current_timestamp(); + let token = self.config.jwt_secret.encode(&claims).map_err(|err| { + error!(target: "supervisor::managed_node", %err, "Failed to encode JWT claims"); + AuthenticationError::InvalidJwt + })?; + + let mut headers = HeaderMap::new(); + let auth_header = format!("Bearer {token}"); + + headers.insert( + "Authorization", + HeaderValue::from_str(&auth_header).map_err(|err| { + error!(target: "supervisor::managed_node", %err, "Invalid authorization header"); + AuthenticationError::InvalidHeader + })?, + ); + + Ok(headers) + } + + /// Returns a reference to the `WebSocket` client, creating it if it doesn't exist. + // todo: support http client as well + pub async fn get_ws_client(&self) -> Result, ClientError> { + let mut ws_client_guard = self.ws_client.lock().await; + if ws_client_guard.is_none() { + let headers = self.create_auth_headers().inspect_err(|err| { + error!(target: "supervisor::managed_node", %err, "Failed to create auth headers"); + })?; + + info!(target: "supervisor::managed_node", ws_url = self.config.url, "Creating a new web socket client"); + let client = + WsClientBuilder::default().set_headers(headers).build(&self.config.url).await?; + + *ws_client_guard = Some(Arc::new(client)); + } + Ok(ws_client_guard.clone().unwrap()) + } +} + +#[async_trait] +impl ManagedNodeClient for Client { + async fn reset_ws_client(&self) { + let mut ws_client_guard = self.ws_client.lock().await; + if ws_client_guard.is_some() { + *ws_client_guard = None; + }; + } + + async fn chain_id(&self) -> Result { + if let Some(chain_id) = self.chain_id.get() { + return Ok(*chain_id); + } + + let client = self.get_ws_client().await?; + let chain_id_str = observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_CHAIN_ID, + async { + client.chain_id().await + }, + "node" => self.config.url.clone() + ) + .inspect_err(|err| { + error!(target: "supervisor::managed_node", %err, "Failed to get chain ID"); + })?; + + let chain_id = chain_id_str.parse::().inspect_err(|err| { + error!(target: "supervisor::managed_node", %err, "Failed to parse chain ID"); + })?; + + let _ = self.chain_id.set(chain_id); + Ok(chain_id) + } + + async fn subscribe_events(&self) -> Result, ClientError> { + let client = self.get_ws_client().await?; // This returns ManagedNodeError, handled by your function + let subscription = observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_SUBSCRIBE_EVENTS, + async { + ManagedModeApiClient::subscribe_events(client.as_ref(), SubscriptionTopic::Events).await + }, + "node" => self.config.url.clone() + )?; + + Ok(subscription) + } + + async fn fetch_receipts(&self, block_hash: B256) -> Result { + let client = self.get_ws_client().await?; // This returns ManagedNodeError, handled by your function + let receipts = observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_FETCH_RECEIPTS, + async { + ManagedModeApiClient::fetch_receipts(client.as_ref(), block_hash).await + }, + "node" => self.config.url.clone() + )?; + + Ok(receipts) + } + + async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result { + let client = self.get_ws_client().await?; + let output_v0 = observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_OUTPUT_V0_AT_TIMESTAMP, + async { + ManagedModeApiClient::output_v0_at_timestamp(client.as_ref(), timestamp).await + }, + "node" => self.config.url.clone() + )?; + + Ok(output_v0) + } + + async fn pending_output_v0_at_timestamp( + &self, + timestamp: u64, + ) -> Result { + let client = self.get_ws_client().await?; + let output_v0 = observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_PENDING_OUTPUT_V0_AT_TIMESTAMP, + async { + ManagedModeApiClient::pending_output_v0_at_timestamp(client.as_ref(), timestamp).await + }, + "node" => self.config.url.clone() + )?; + + Ok(output_v0) + } + + async fn l2_block_ref_by_timestamp(&self, timestamp: u64) -> Result { + let client = self.get_ws_client().await?; + let block_info = observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_L2_BLOCK_REF_BY_TIMESTAMP, + async { + ManagedModeApiClient::l2_block_ref_by_timestamp(client.as_ref(), timestamp).await + }, + "node" => self.config.url.clone() + )?; + + Ok(block_info) + } + + async fn block_ref_by_number(&self, block_number: u64) -> Result { + let client = self.get_ws_client().await?; + let block_info = observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_BLOCK_REF_BY_NUMBER, + async { + ManagedModeApiClient::l2_block_ref_by_number(client.as_ref(), block_number).await + }, + "node" => self.config.url.clone() + )?; + + Ok(block_info) + } + + async fn reset_pre_interop(&self) -> Result<(), ClientError> { + let client = self.get_ws_client().await?; + observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_RESET_PRE_INTEROP, + async { + ManagedModeApiClient::reset_pre_interop(client.as_ref()).await + }, + "node" => self.config.url.clone() + )?; + Ok(()) + } + + async fn reset( + &self, + unsafe_id: BlockNumHash, + cross_unsafe_id: BlockNumHash, + local_safe_id: BlockNumHash, + cross_safe_id: BlockNumHash, + finalised_id: BlockNumHash, + ) -> Result<(), ClientError> { + let client = self.get_ws_client().await?; + observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_RESET, + async { + ManagedModeApiClient::reset(client.as_ref(), unsafe_id, cross_unsafe_id, local_safe_id, cross_safe_id, finalised_id).await + }, + "node" => self.config.url.clone() + )?; + Ok(()) + } + + async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ClientError> { + let client = self.get_ws_client().await?; + observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_INVALIDATE_BLOCK, + async { + ManagedModeApiClient::invalidate_block(client.as_ref(), seal).await + }, + "node" => self.config.url.clone() + )?; + Ok(()) + } + + async fn provide_l1(&self, block_info: BlockInfo) -> Result<(), ClientError> { + let client = self.get_ws_client().await?; + observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_PROVIDE_L1, + async { + ManagedModeApiClient::provide_l1(client.as_ref(), block_info).await + }, + "node" => self.config.url.clone() + )?; + Ok(()) + } + + async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ClientError> { + let client = self.get_ws_client().await?; + observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_UPDATE_FINALIZED, + async { + ManagedModeApiClient::update_finalized(client.as_ref(), finalized_block_id).await + }, + "node" => self.config.url.clone() + )?; + Ok(()) + } + + async fn update_cross_unsafe( + &self, + cross_unsafe_block_id: BlockNumHash, + ) -> Result<(), ClientError> { + let client = self.get_ws_client().await?; + observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_UPDATE_CROSS_UNSAFE, + async { + ManagedModeApiClient::update_cross_unsafe(client.as_ref(), cross_unsafe_block_id).await + }, + "node" => self.config.url.clone() + )?; + Ok(()) + } + + async fn update_cross_safe( + &self, + source_block_id: BlockNumHash, + derived_block_id: BlockNumHash, + ) -> Result<(), ClientError> { + let client = self.get_ws_client().await?; + observe_metrics_for_result_async!( + Metrics::MANAGED_NODE_RPC_REQUESTS_SUCCESS_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUESTS_ERROR_TOTAL, + Metrics::MANAGED_NODE_RPC_REQUEST_DURATION_SECONDS, + Metrics::RPC_METHOD_UPDATE_CROSS_SAFE, + async { + ManagedModeApiClient::update_cross_safe(client.as_ref(), derived_block_id, source_block_id).await + }, + "node" => self.config.url.clone() + )?; + Ok(()) + } +} diff --git a/kona/crates/supervisor/core/src/syncnode/command.rs b/rust/kona/crates/supervisor/core/src/syncnode/command.rs similarity index 100% rename from kona/crates/supervisor/core/src/syncnode/command.rs rename to rust/kona/crates/supervisor/core/src/syncnode/command.rs diff --git a/rust/kona/crates/supervisor/core/src/syncnode/error.rs b/rust/kona/crates/supervisor/core/src/syncnode/error.rs new file mode 100644 index 00000000000..859938e01cb --- /dev/null +++ b/rust/kona/crates/supervisor/core/src/syncnode/error.rs @@ -0,0 +1,67 @@ +use kona_supervisor_storage::StorageError; +use thiserror::Error; + +/// Represents various errors that can occur during node management. +#[derive(Debug, Error, PartialEq, Eq)] +pub enum ManagedNodeError { + /// Represents an error that occurred while starting the managed node. + #[error(transparent)] + ClientError(#[from] ClientError), + + /// Represents an error that occurred while fetching data from the storage. + #[error(transparent)] + StorageError(#[from] StorageError), + + /// Unable to successfully fetch block. + #[error("failed to get block by number, number: {0}")] + GetBlockByNumberFailed(u64), + + /// Represents an error that occurred while sending an event to the channel. + #[error("failed to send event to channel: {0}")] + ChannelSendFailed(String), + + /// Represents an error that occurred while resetting the managed node. + #[error("failed to reset the managed node")] + ResetFailed, +} + +/// Error establishing authenticated connection to managed node. +#[derive(Debug, Error, PartialEq, Eq)] +pub enum AuthenticationError { + /// Missing valid JWT secret for authentication header. + #[error("jwt secret not found or invalid")] + InvalidJwt, + /// Invalid header format. + #[error("invalid authorization header")] + InvalidHeader, +} + +/// Represents errors that can occur while interacting with the managed node client. +#[derive(Debug, Error)] +pub enum ClientError { + /// Represents an error that occurred while starting the managed node. + #[error(transparent)] + Client(#[from] jsonrpsee::core::ClientError), + + /// Represents an error that occurred while authenticating to the managed node. + #[error("failed to authenticate: {0}")] + Authentication(#[from] AuthenticationError), + + /// Represents an error that occurred while parsing a chain ID from a string. + #[error(transparent)] + ChainIdParseError(#[from] std::num::ParseIntError), +} + +impl PartialEq for ClientError { + fn eq(&self, other: &Self) -> bool { + use ClientError::{Authentication, ChainIdParseError, Client}; + match (self, other) { + (Client(a), Client(b)) => a.to_string() == b.to_string(), + (Authentication(a), Authentication(b)) => a == b, + (ChainIdParseError(a), ChainIdParseError(b)) => a == b, + _ => false, + } + } +} + +impl Eq for ClientError {} diff --git a/kona/crates/supervisor/core/src/syncnode/metrics.rs b/rust/kona/crates/supervisor/core/src/syncnode/metrics.rs similarity index 100% rename from kona/crates/supervisor/core/src/syncnode/metrics.rs rename to rust/kona/crates/supervisor/core/src/syncnode/metrics.rs diff --git a/kona/crates/supervisor/core/src/syncnode/mod.rs b/rust/kona/crates/supervisor/core/src/syncnode/mod.rs similarity index 100% rename from kona/crates/supervisor/core/src/syncnode/mod.rs rename to rust/kona/crates/supervisor/core/src/syncnode/mod.rs diff --git a/rust/kona/crates/supervisor/core/src/syncnode/node.rs b/rust/kona/crates/supervisor/core/src/syncnode/node.rs new file mode 100644 index 00000000000..610737d3a30 --- /dev/null +++ b/rust/kona/crates/supervisor/core/src/syncnode/node.rs @@ -0,0 +1,943 @@ +//! [`ManagedNode`] implementation for handling events from the managed node. + +use super::{ + BlockProvider, ManagedNodeClient, ManagedNodeController, ManagedNodeDataProvider, + ManagedNodeError, SubscriptionHandler, resetter::Resetter, +}; +use crate::event::ChainEvent; +use alloy_eips::BlockNumberOrTag; +use alloy_network::Ethereum; +use alloy_primitives::{B256, ChainId}; +use alloy_provider::{Provider, RootProvider}; +use alloy_rpc_types_eth::BlockNumHash; +use async_trait::async_trait; +use kona_interop::{BlockReplacement, DerivedRefPair}; +use kona_protocol::BlockInfo; +use kona_supervisor_storage::{DerivationStorageReader, HeadRefStorageReader, LogStorageReader}; +use kona_supervisor_types::{BlockSeal, OutputV0, Receipts}; +use std::sync::Arc; +use tokio::sync::{Mutex, mpsc}; +use tracing::{debug, error, trace, warn}; + +/// [`ManagedNode`] processes events dispatched from the managed node. +/// +/// It implements `SubscriptionHandler`, forwards resulting `ChainEvent`s to the chain +/// processor, and delegates control operations to the underlying client/resetter. +/// The `WebSocket` subscription lifecycle (subscription creation, reconnection/restart) +/// is managed by the supervisor actor and the client, not by this type. +#[derive(Debug)] +pub struct ManagedNode { + /// The attached web socket client + client: Arc, + /// Shared L1 provider for fetching receipts + l1_provider: RootProvider, + /// Resetter for handling node resets + resetter: Arc>, + /// Channel for sending events to the chain processor + chain_event_sender: mpsc::Sender, + + /// Cached chain ID + chain_id: Mutex>, +} + +impl ManagedNode +where + DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, + C: ManagedNodeClient + Send + Sync + 'static, +{ + /// Creates a new [`ManagedNode`] with the specified client. + pub fn new( + client: Arc, + db_provider: Arc, + l1_provider: RootProvider, + chain_event_sender: mpsc::Sender, + ) -> Self { + let resetter = Arc::new(Resetter::new(client.clone(), l1_provider.clone(), db_provider)); + + Self { client, resetter, l1_provider, chain_event_sender, chain_id: Mutex::new(None) } + } + + /// Returns the [`ChainId`] of the [`ManagedNode`]. + /// If the chain ID is already cached, it returns that. + /// If not, it fetches the chain ID from the managed node. + pub async fn chain_id(&self) -> Result { + // we are caching the chain ID here to avoid multiple calls to the client + // there is a possibility that chain ID might be being cached in the client already + // but we are caching it here to make sure it caches in the `ManagedNode` context + let mut cache = self.chain_id.lock().await; + if let Some(chain_id) = *cache { + Ok(chain_id) + } else { + let chain_id = self.client.chain_id().await?; + *cache = Some(chain_id); + Ok(chain_id) + } + } +} + +#[async_trait] +impl SubscriptionHandler for ManagedNode +where + DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, + C: ManagedNodeClient + Send + Sync + 'static, +{ + async fn handle_exhaust_l1( + &self, + derived_ref_pair: &DerivedRefPair, + ) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!( + target: "supervisor::managed_node", + %chain_id, + %derived_ref_pair, + "Handling L1 exhaust event" + ); + + let next_block_number = derived_ref_pair.source.number + 1; + let next_block = self + .l1_provider + .get_block_by_number(BlockNumberOrTag::Number(next_block_number)) + .await + .map_err(|err| { + error!(target: "supervisor::managed_node", %chain_id, %err, "Failed to fetch next L1 block"); + ManagedNodeError::GetBlockByNumberFailed(next_block_number) + })?; + + let block = match next_block { + Some(block) => block, + None => { + // If the block is None, it means the block is either empty or unavailable. + // ignore this case + return Ok(()); + } + }; + + let new_source = BlockInfo { + hash: block.header.hash, + number: block.header.number, + parent_hash: block.header.parent_hash, + timestamp: block.header.timestamp, + }; + + if new_source.parent_hash != derived_ref_pair.source.hash { + // this could happen due to a reorg. + // this case should be handled by the reorg manager + debug!( + target: "supervisor::managed_node", + %chain_id, + %new_source, + current_source = %derived_ref_pair.source, + "Parent hash mismatch. Possible reorg detected" + ); + } + + self.client.provide_l1(new_source).await.inspect_err(|err| { + error!( + target: "supervisor::managed_node", + %chain_id, + %new_source, + %err, + "Failed to provide L1 block" + ); + })?; + Ok(()) + } + + async fn handle_reset(&self, reset_id: &str) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, reset_id, "Handling reset event"); + + self.resetter.reset().await?; + Ok(()) + } + + async fn handle_unsafe_block(&self, unsafe_block: &BlockInfo) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, %unsafe_block, "Unsafe block event received"); + + self.chain_event_sender.send(ChainEvent::UnsafeBlock { block: *unsafe_block }).await.map_err(|err| { + warn!(target: "supervisor::managed_node", %chain_id, %err, "Failed to send unsafe block event"); + ManagedNodeError::ChannelSendFailed(err.to_string()) + })?; + Ok(()) + } + + async fn handle_derivation_update( + &self, + derived_ref_pair: &DerivedRefPair, + ) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, "Derivation update event received"); + + self.chain_event_sender.send(ChainEvent::DerivedBlock { derived_ref_pair: *derived_ref_pair }).await.map_err(|err| { + warn!(target: "supervisor::managed_node", %chain_id, %err, "Failed to send derivation update event"); + ManagedNodeError::ChannelSendFailed(err.to_string()) + })?; + Ok(()) + } + + async fn handle_replace_block( + &self, + replacement: &BlockReplacement, + ) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, %replacement, "Block replacement received"); + + self.chain_event_sender.send(ChainEvent::BlockReplaced { replacement: *replacement }).await.map_err(|err| { + warn!(target: "supervisor::managed_node", %chain_id, %err, "Failed to send block replacement event"); + ManagedNodeError::ChannelSendFailed(err.to_string()) + })?; + Ok(()) + } + + async fn handle_derivation_origin_update( + &self, + origin: &BlockInfo, + ) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, %origin, "Derivation origin update received"); + + self.chain_event_sender.send(ChainEvent::DerivationOriginUpdate { origin: *origin }).await.map_err(|err| { + warn!(target: "supervisor::managed_node", %chain_id, %err, "Failed to send derivation origin update event"); + ManagedNodeError::ChannelSendFailed(err.to_string()) + })?; + Ok(()) + } +} + +/// Implements [`BlockProvider`] for [`ManagedNode`] by delegating to the underlying `WebSocket` +/// client. +#[async_trait] +impl BlockProvider for ManagedNode +where + DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, + C: ManagedNodeClient + Send + Sync + 'static, +{ + async fn block_by_number(&self, block_number: u64) -> Result { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, block_number, "Fetching block by number"); + + let block = self.client.block_ref_by_number(block_number).await?; + Ok(block) + } + async fn fetch_receipts(&self, block_hash: B256) -> Result { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, %block_hash, "Fetching receipts for block"); + + let receipt = self.client.fetch_receipts(block_hash).await?; + Ok(receipt) + } +} + +#[async_trait] +impl ManagedNodeDataProvider for ManagedNode +where + DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, + C: ManagedNodeClient + Send + Sync + 'static, +{ + async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, timestamp, "Fetching output v0 at timestamp"); + + let outputv0 = self.client.output_v0_at_timestamp(timestamp).await?; + Ok(outputv0) + } + + async fn pending_output_v0_at_timestamp( + &self, + timestamp: u64, + ) -> Result { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, timestamp, "Fetching pending output v0 at timestamp"); + + let outputv0 = self.client.pending_output_v0_at_timestamp(timestamp).await?; + Ok(outputv0) + } + + async fn l2_block_ref_by_timestamp( + &self, + timestamp: u64, + ) -> Result { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, timestamp, "Fetching L2 block ref by timestamp"); + + let block = self.client.l2_block_ref_by_timestamp(timestamp).await?; + Ok(block) + } +} + +#[async_trait] +impl ManagedNodeController for ManagedNode +where + DB: LogStorageReader + DerivationStorageReader + HeadRefStorageReader + Send + Sync + 'static, + C: ManagedNodeClient + Send + Sync + 'static, +{ + async fn update_finalized( + &self, + finalized_block_id: BlockNumHash, + ) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!( + target: "supervisor::managed_node", + %chain_id, + finalized_block_number = finalized_block_id.number, + "Updating finalized block" + ); + + self.client.update_finalized(finalized_block_id).await?; + Ok(()) + } + + async fn update_cross_unsafe( + &self, + cross_unsafe_block_id: BlockNumHash, + ) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!( + target: "supervisor::managed_node", + %chain_id, + cross_unsafe_block_number = cross_unsafe_block_id.number, + "Updating cross unsafe block", + ); + + self.client.update_cross_unsafe(cross_unsafe_block_id).await?; + Ok(()) + } + + async fn update_cross_safe( + &self, + source_block_id: BlockNumHash, + derived_block_id: BlockNumHash, + ) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!( + target: "supervisor::managed_node", + %chain_id, + source_block_number = source_block_id.number, + derived_block_number = derived_block_id.number, + "Updating cross safe block" + ); + self.client.update_cross_safe(source_block_id, derived_block_id).await?; + Ok(()) + } + + async fn reset(&self) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!(target: "supervisor::managed_node", %chain_id, "Resetting managed node state"); + + self.resetter.reset().await?; + Ok(()) + } + + async fn invalidate_block(&self, block_seal: BlockSeal) -> Result<(), ManagedNodeError> { + let chain_id = self.chain_id().await?; + trace!( + target: "supervisor::managed_node", + %chain_id, + block_number = block_seal.number, + "Invalidating block" + ); + + self.client.invalidate_block(block_seal).await?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::syncnode::ClientError; + use alloy_primitives::{B256, ChainId, hex::FromHex}; + use alloy_provider::RootProvider; + use alloy_rpc_client::RpcClient; + use alloy_transport::mock::*; + use jsonrpsee::core::client::Subscription; + use kona_interop::{BlockReplacement, DerivedRefPair, SafetyLevel}; + use kona_protocol::BlockInfo; + use kona_supervisor_storage::{ + DerivationStorageReader, HeadRefStorageReader, LogStorageReader, StorageError, + }; + use kona_supervisor_types::{BlockSeal, Log, OutputV0, Receipts, SubscriptionEvent, SuperHead}; + use mockall::{mock, predicate::*}; + use std::sync::Arc; + use tokio::sync::mpsc; + + mock! { + #[derive(Debug)] + pub Client {} + + #[async_trait] + impl ManagedNodeClient for Client { + async fn chain_id(&self) -> Result; + async fn subscribe_events(&self) -> Result, ClientError>; + async fn fetch_receipts(&self, block_hash: B256) -> Result; + async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result; + async fn pending_output_v0_at_timestamp(&self, timestamp: u64) -> Result; + async fn l2_block_ref_by_timestamp(&self, timestamp: u64) -> Result; + async fn block_ref_by_number(&self, block_number: u64) -> Result; + async fn reset_pre_interop(&self) -> Result<(), ClientError>; + async fn reset(&self, unsafe_id: BlockNumHash, cross_unsafe_id: BlockNumHash, local_safe_id: BlockNumHash, cross_safe_id: BlockNumHash, finalised_id: BlockNumHash) -> Result<(), ClientError>; + async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ClientError>; + async fn provide_l1(&self, block_info: BlockInfo) -> Result<(), ClientError>; + async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ClientError>; + async fn update_cross_unsafe(&self, cross_unsafe_block_id: BlockNumHash) -> Result<(), ClientError>; + async fn update_cross_safe(&self, source_block_id: BlockNumHash, derived_block_id: BlockNumHash) -> Result<(), ClientError>; + async fn reset_ws_client(&self); + } + } + + mock! { + #[derive(Debug)] + pub Db {} + + impl LogStorageReader for Db { + fn get_block(&self, block_number: u64) -> Result; + fn get_latest_block(&self) -> Result; + fn get_log(&self, block_number: u64, log_index: u32) -> Result; + fn get_logs(&self, block_number: u64) -> Result, StorageError>; + } + + impl DerivationStorageReader for Db { + fn derived_to_source(&self, derived_block_id: BlockNumHash) -> Result; + fn latest_derived_block_at_source(&self, _source_block_id: BlockNumHash) -> Result; + fn latest_derivation_state(&self) -> Result; + fn get_source_block(&self, source_block_number: u64) -> Result; + fn get_activation_block(&self) -> Result; + } + + impl HeadRefStorageReader for Db { + fn get_safety_head_ref(&self, level: SafetyLevel) -> Result; + fn get_super_head(&self) -> Result; + } + } + + #[tokio::test] + async fn test_chain_id_caching() { + let mut client = MockClient::new(); + + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + // First call fetches from client + let id1 = node.chain_id().await.unwrap(); + assert_eq!(id1, ChainId::from(42u64)); + // Second call uses cache + let id2 = node.chain_id().await.unwrap(); + assert_eq!(id2, ChainId::from(42u64)); + } + + #[tokio::test] + async fn test_handle_unsafe_block_sends_event() { + let unsafe_block = + BlockInfo { hash: B256::ZERO, number: 1, parent_hash: B256::ZERO, timestamp: 123 }; + + let mut client = MockClient::new(); + + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, mut rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let result = node.handle_unsafe_block(&unsafe_block).await; + assert!(result.is_ok()); + + let event = rx.recv().await.unwrap(); + match event { + ChainEvent::UnsafeBlock { block } => assert_eq!(block.number, 1), + _ => panic!("Wrong event"), + } + } + + #[tokio::test] + async fn test_handle_derivation_update_sends_event() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, mut rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let derived_ref_pair = DerivedRefPair { + source: BlockInfo::new(B256::from([0u8; 32]), 0, B256::ZERO, 0), + derived: BlockInfo::new(B256::from([1u8; 32]), 1, B256::ZERO, 0), + }; + + let result = node.handle_derivation_update(&derived_ref_pair).await; + assert!(result.is_ok()); + + let event = rx.recv().await.unwrap(); + match event { + ChainEvent::DerivedBlock { derived_ref_pair: pair } => { + assert_eq!(pair, derived_ref_pair); + } + _ => panic!("Wrong event"), + } + } + + #[tokio::test] + async fn test_handle_replace_block_sends_event() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, mut rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let replacement = BlockReplacement { + replacement: BlockInfo::new(B256::from([1u8; 32]), 1, B256::ZERO, 0), + invalidated: B256::from([2u8; 32]), + }; + + let result = node.handle_replace_block(&replacement).await; + assert!(result.is_ok()); + + let event = rx.recv().await.unwrap(); + match event { + ChainEvent::BlockReplaced { replacement: rep } => assert_eq!(rep, replacement), + _ => panic!("Wrong event"), + } + } + + #[tokio::test] + async fn test_handle_derivation_origin_update_sends_event() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, mut rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let origin = + BlockInfo { hash: B256::ZERO, number: 10, parent_hash: B256::ZERO, timestamp: 12345 }; + + let result = node.handle_derivation_origin_update(&origin).await; + assert!(result.is_ok()); + + let event = rx.recv().await.unwrap(); + match event { + ChainEvent::DerivationOriginUpdate { origin: block } => assert_eq!(block.number, 10), + _ => panic!("Wrong event"), + } + } + + #[tokio::test] + async fn test_handle_exhaust_l1_calls_provide_l1_on_success() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client.expect_provide_l1().times(1).returning(|_| Ok(())); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + + let derived_ref_pair = DerivedRefPair { + source: BlockInfo { + hash: B256::from_hex( + "0x1f68ac259155e2f38211ddad0f0a15394d55417b185a93923e2abe71bb7a4d6d", + ) + .unwrap(), + number: 5, + parent_hash: B256::from([14u8; 32]), + timestamp: 300, + }, + derived: BlockInfo { + hash: B256::from([11u8; 32]), + number: 40, + parent_hash: B256::from([12u8; 32]), + timestamp: 301, + }, + }; + + let next_block = r#"{ + "number": "6", + "hash": "0xd5f1812548be429cbdc6376b29611fc49e06f1359758c4ceaaa3b393e2239f9c", + "mixHash": "0x24900fb3da77674a861c428429dce0762707ecb6052325bbd9b3c64e74b5af9d", + "parentHash": "0x1f68ac259155e2f38211ddad0f0a15394d55417b185a93923e2abe71bb7a4d6d", + "nonce": "0x378da40ff335b070", + "sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "logsBloom": "0x00000000000000100000004080000000000500000000000000020000100000000800001000000004000001000000000000000800040010000020100000000400000010000000000000000040000000000000040000000000000000000000000000000400002400000000000000000000000000000004000004000000000000840000000800000080010004000000001000000800000000000000000000000000000000000800000000000040000000020000000000000000000800000400000000000000000000000600000400000000002000000000000000000000004000000000000000100000000000000000000000000000000000040000900010000000", + "transactionsRoot":"0x4d0c8e91e16bdff538c03211c5c73632ed054d00a7e210c0eb25146c20048126", + "stateRoot": "0x91309efa7e42c1f137f31fe9edbe88ae087e6620d0d59031324da3e2f4f93233", + "receiptsRoot": "0x68461ab700003503a305083630a8fb8d14927238f0bc8b6b3d246c0c64f21f4a", + "miner":"0xb42b6c4a95406c78ff892d270ad20b22642e102d", + "difficulty": "0x66e619a", + "totalDifficulty": "0x1e875d746ae", + "extraData": "0xd583010502846765746885676f312e37856c696e7578", + "size": "0x334", + "gasLimit": "0x47e7c4", + "gasUsed": "0x37993", + "timestamp": "0x5835c54d", + "uncles": [], + "transactions": [ + "0xa0807e117a8dd124ab949f460f08c36c72b710188f01609595223b325e58e0fc", + "0xeae6d797af50cb62a596ec3939114d63967c374fa57de9bc0f4e2b576ed6639d" + ], + "baseFeePerGas": "0x7", + "withdrawalsRoot": "0x7a4ecf19774d15cf9c15adf0dd8e8a250c128b26c9e2ab2a08d6c9c8ffbd104f", + "withdrawals": [], + "blobGasUsed": "0x0", + "excessBlobGas": "0x0", + "parentBeaconBlockRoot": "0x95c4dbd5b19f6fe3cbc3183be85ff4e85ebe75c5b4fc911f1c91e5b7a554a685" + }"#; + + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + + asserter.push(MockResponse::Success(serde_json::from_str(next_block).unwrap())); + + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let result = node.handle_exhaust_l1(&derived_ref_pair).await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_handle_exhaust_l1_calls_provide_l1_on_parent_hash_mismatch() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client.expect_provide_l1().times(1).returning(|_| Ok(())); // Should be called + + let client = Arc::new(client); + let db = MockDb::new(); + + let derived_ref_pair = DerivedRefPair { + source: BlockInfo { + hash: B256::from([1u8; 32]), // This will NOT match parent_hash below + number: 5, + parent_hash: B256::from([14u8; 32]), + timestamp: 300, + }, + derived: BlockInfo { + hash: B256::from([11u8; 32]), + number: 40, + parent_hash: B256::from([12u8; 32]), + timestamp: 301, + }, + }; + + // Block with mismatched parent_hash + let next_block = r#"{ + "number": "10", + "hash": "0xd5f1812548be429cbdc6376b29611fc49e06f1359758c4ceaaa3b393e2239f9c", + "mixHash": "0x24900fb3da77674a861c428429dce0762707ecb6052325bbd9b3c64e74b5af9d", + "parentHash": "0x1f68ac259155e2f38211ddad0f0a15394d55417b185a93923e2abe71bb7a4d6d", + "nonce": "0x378da40ff335b070", + "sha3Uncles": "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + "logsBloom": "0x00000000000000100000004080000000000500000000000000020000100000000800001000000004000001000000000000000800040010000020100000000400000010000000000000000040000000000000040000000000000000000000000000000400002400000000000000000000000000000004000004000000000000840000000800000080010004000000001000000800000000000000000000000000000000000800000000000040000000020000000000000000000800000400000000000000000000000600000400000000002000000000000000000000004000000000000000100000000000000000000000000000000000040000900010000000", + "transactionsRoot":"0x4d0c8e91e16bdff538c03211c5c73632ed054d00a7e210c0eb25146c20048126", + "stateRoot": "0x91309efa7e42c1f137f31fe9edbe88ae087e6620d0d59031324da3e2f4f93233", + "receiptsRoot": "0x68461ab700003503a305083630a8fb8d14927238f0bc8b6b3d246c0c64f21f4a", + "miner":"0xb42b6c4a95406c78ff892d270ad20b22642e102d", + "difficulty": "0x66e619a", + "totalDifficulty": "0x1e875d746ae", + "extraData": "0xd583010502846765746885676f312e37856c696e7578", + "size": "0x334", + "gasLimit": "0x47e7c4", + "gasUsed": "0x37993", + "timestamp": "0x5835c54d", + "uncles": [], + "transactions": [ + "0xa0807e117a8dd124ab949f460f08c36c72b710188f01609595223b325e58e0fc", + "0xeae6d797af50cb62a596ec3939114d63967c374fa57de9bc0f4e2b576ed6639d" + ], + "baseFeePerGas": "0x7", + "withdrawalsRoot": "0x7a4ecf19774d15cf9c15adf0dd8e8a250c128b26c9e2ab2a08d6c9c8ffbd104f", + "withdrawals": [], + "blobGasUsed": "0x0", + "excessBlobGas": "0x0", + "parentBeaconBlockRoot": "0x95c4dbd5b19f6fe3cbc3183be85ff4e85ebe75c5b4fc911f1c91e5b7a554a685" + }"#; + + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + + asserter.push(MockResponse::Success(serde_json::from_str(next_block).unwrap())); + + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), Arc::new(db), l1_provider, tx); + + let result = node.handle_exhaust_l1(&derived_ref_pair).await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_handle_reset_calls_resetter() { + let mut client = MockClient::new(); + client.expect_chain_id().times(2).returning(|| Ok(ChainId::from(42u64))); + client.expect_reset_pre_interop().times(1).returning(|| Ok(())); + + let mut db = MockDb::new(); + db.expect_latest_derivation_state() + .times(1) + .returning(|| Err(StorageError::DatabaseNotInitialised)); + + let client = Arc::new(client); + let db = Arc::new(db); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + // Just check that it completes without error + let result = node.handle_reset("reset_id").await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_block_by_number_delegates_to_client() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client.expect_block_ref_by_number().with(eq(10)).times(1).returning(|_| { + Ok(BlockInfo { + hash: B256::from([1u8; 32]), + number: 10, + parent_hash: B256::from([2u8; 32]), + timestamp: 12345, + }) + }); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let block = node.block_by_number(10).await.unwrap(); + assert_eq!(block.number, 10); + assert_eq!(block.hash, B256::from([1u8; 32])); + } + + #[tokio::test] + async fn test_fetch_receipts_delegates_to_client() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client + .expect_fetch_receipts() + .withf(|hash| *hash == B256::from([1u8; 32])) + .times(1) + .returning(|_| Ok(Receipts::default())); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let receipts = node.fetch_receipts(B256::from([1u8; 32])).await.unwrap(); + assert!(receipts.is_empty()); + } + + #[tokio::test] + async fn test_output_v0_at_timestamp_delegates_to_client() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client + .expect_output_v0_at_timestamp() + .with(eq(12345)) + .times(1) + .returning(|_| Ok(OutputV0::default())); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let output = node.output_v0_at_timestamp(12345).await.unwrap(); + assert_eq!(output, OutputV0::default()); + } + + #[tokio::test] + async fn test_pending_output_v0_at_timestamp_delegates_to_client() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client + .expect_pending_output_v0_at_timestamp() + .with(eq(54321)) + .times(1) + .returning(|_| Ok(OutputV0::default())); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let output = node.pending_output_v0_at_timestamp(54321).await.unwrap(); + assert_eq!(output, OutputV0::default()); + } + + #[tokio::test] + async fn test_l2_block_ref_by_timestamp_delegates_to_client() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client.expect_l2_block_ref_by_timestamp().with(eq(11111)).times(1).returning(|_| { + Ok(BlockInfo { + hash: B256::from([9u8; 32]), + number: 99, + parent_hash: B256::from([8u8; 32]), + timestamp: 11111, + }) + }); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let block = node.l2_block_ref_by_timestamp(11111).await.unwrap(); + assert_eq!(block.number, 99); + assert_eq!(block.timestamp, 11111); + } + + #[tokio::test] + async fn test_update_finalized_delegates_to_client() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client + .expect_update_finalized() + .withf(|block_id| block_id.number == 100) + .times(1) + .returning(|_| Ok(())); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let block_id = BlockNumHash { number: 100, hash: B256::from([1u8; 32]) }; + let result = node.update_finalized(block_id).await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_update_cross_unsafe_delegates_to_client() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client + .expect_update_cross_unsafe() + .withf(|block_id| block_id.number == 200) + .times(1) + .returning(|_| Ok(())); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let block_id = BlockNumHash { number: 200, hash: B256::from([2u8; 32]) }; + let result = node.update_cross_unsafe(block_id).await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_update_cross_safe_delegates_to_client() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client + .expect_update_cross_safe() + .withf(|source, derived| source.number == 300 && derived.number == 301) + .times(1) + .returning(|_, _| Ok(())); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let source_block_id = BlockNumHash { number: 300, hash: B256::from([3u8; 32]) }; + let derived_block_id = BlockNumHash { number: 301, hash: B256::from([4u8; 32]) }; + let result = node.update_cross_safe(source_block_id, derived_block_id).await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_invalidate_block_delegates_to_client() { + let mut client = MockClient::new(); + client.expect_chain_id().times(1).returning(|| Ok(ChainId::from(42u64))); + client + .expect_invalidate_block() + .withf(|seal| seal.number == 400) + .times(1) + .returning(|_| Ok(())); + + let client = Arc::new(client); + let db = Arc::new(MockDb::new()); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let block_seal = BlockSeal { number: 400, hash: B256::from([5u8; 32]), timestamp: 0 }; + let result = node.invalidate_block(block_seal).await; + assert!(result.is_ok()); + } + + #[tokio::test] + async fn test_reset_calls_resetter() { + let mut client = MockClient::new(); + client.expect_chain_id().times(2).returning(|| Ok(ChainId::from(42u64))); + client.expect_reset_pre_interop().times(1).returning(|| Ok(())); + + let mut db = MockDb::new(); + db.expect_latest_derivation_state() + .times(1) + .returning(|| Err(StorageError::DatabaseNotInitialised)); + + let client = Arc::new(client); + let db = Arc::new(db); + let asserter = Asserter::new(); + let transport = MockTransport::new(asserter.clone()); + let l1_provider = RootProvider::::new(RpcClient::new(transport, false)); + let (tx, _rx) = mpsc::channel(10); + let node = ManagedNode::new(client.clone(), db, l1_provider, tx); + + let result = node.reset().await; + assert!(result.is_ok()); + } +} diff --git a/kona/crates/supervisor/core/src/syncnode/resetter.rs b/rust/kona/crates/supervisor/core/src/syncnode/resetter.rs similarity index 100% rename from kona/crates/supervisor/core/src/syncnode/resetter.rs rename to rust/kona/crates/supervisor/core/src/syncnode/resetter.rs diff --git a/kona/crates/supervisor/core/src/syncnode/traits.rs b/rust/kona/crates/supervisor/core/src/syncnode/traits.rs similarity index 100% rename from kona/crates/supervisor/core/src/syncnode/traits.rs rename to rust/kona/crates/supervisor/core/src/syncnode/traits.rs diff --git a/kona/crates/supervisor/metrics/Cargo.toml b/rust/kona/crates/supervisor/metrics/Cargo.toml similarity index 100% rename from kona/crates/supervisor/metrics/Cargo.toml rename to rust/kona/crates/supervisor/metrics/Cargo.toml diff --git a/kona/crates/supervisor/metrics/src/lib.rs b/rust/kona/crates/supervisor/metrics/src/lib.rs similarity index 100% rename from kona/crates/supervisor/metrics/src/lib.rs rename to rust/kona/crates/supervisor/metrics/src/lib.rs diff --git a/kona/crates/supervisor/metrics/src/macros.rs b/rust/kona/crates/supervisor/metrics/src/macros.rs similarity index 100% rename from kona/crates/supervisor/metrics/src/macros.rs rename to rust/kona/crates/supervisor/metrics/src/macros.rs diff --git a/kona/crates/supervisor/metrics/src/reporter.rs b/rust/kona/crates/supervisor/metrics/src/reporter.rs similarity index 100% rename from kona/crates/supervisor/metrics/src/reporter.rs rename to rust/kona/crates/supervisor/metrics/src/reporter.rs diff --git a/kona/crates/supervisor/rpc/Cargo.toml b/rust/kona/crates/supervisor/rpc/Cargo.toml similarity index 100% rename from kona/crates/supervisor/rpc/Cargo.toml rename to rust/kona/crates/supervisor/rpc/Cargo.toml diff --git a/kona/crates/supervisor/rpc/README.md b/rust/kona/crates/supervisor/rpc/README.md similarity index 100% rename from kona/crates/supervisor/rpc/README.md rename to rust/kona/crates/supervisor/rpc/README.md diff --git a/kona/crates/supervisor/rpc/src/config.rs b/rust/kona/crates/supervisor/rpc/src/config.rs similarity index 100% rename from kona/crates/supervisor/rpc/src/config.rs rename to rust/kona/crates/supervisor/rpc/src/config.rs diff --git a/kona/crates/supervisor/rpc/src/jsonrpsee.rs b/rust/kona/crates/supervisor/rpc/src/jsonrpsee.rs similarity index 97% rename from kona/crates/supervisor/rpc/src/jsonrpsee.rs rename to rust/kona/crates/supervisor/rpc/src/jsonrpsee.rs index 494c6888d1a..c21c80c1ff7 100644 --- a/kona/crates/supervisor/rpc/src/jsonrpsee.rs +++ b/rust/kona/crates/supervisor/rpc/src/jsonrpsee.rs @@ -217,11 +217,12 @@ pub trait ManagedModeApi { #[method(name = "chainID")] async fn chain_id(&self) -> RpcResult; - /// Get the state_root, message_parser_storage_root, and block_hash at a given timestamp + /// Get the `state_root`, `message_parser_storage_root`, and `block_hash` at a given timestamp #[method(name = "outputV0AtTimestamp")] async fn output_v0_at_timestamp(&self, timestamp: u64) -> RpcResult; - /// Get the pending state_root, message_parser_storage_root, and block_hash at a given timestamp + /// Get the pending `state_root`, `message_parser_storage_root`, and `block_hash` at a given + /// timestamp #[method(name = "pendingOutputV0AtTimestamp")] async fn pending_output_v0_at_timestamp(&self, timestamp: u64) -> RpcResult; diff --git a/kona/crates/supervisor/rpc/src/lib.rs b/rust/kona/crates/supervisor/rpc/src/lib.rs similarity index 100% rename from kona/crates/supervisor/rpc/src/lib.rs rename to rust/kona/crates/supervisor/rpc/src/lib.rs diff --git a/kona/crates/supervisor/rpc/src/reqwest.rs b/rust/kona/crates/supervisor/rpc/src/reqwest.rs similarity index 100% rename from kona/crates/supervisor/rpc/src/reqwest.rs rename to rust/kona/crates/supervisor/rpc/src/reqwest.rs diff --git a/kona/crates/supervisor/rpc/src/response.rs b/rust/kona/crates/supervisor/rpc/src/response.rs similarity index 100% rename from kona/crates/supervisor/rpc/src/response.rs rename to rust/kona/crates/supervisor/rpc/src/response.rs diff --git a/kona/crates/supervisor/rpc/src/server.rs b/rust/kona/crates/supervisor/rpc/src/server.rs similarity index 100% rename from kona/crates/supervisor/rpc/src/server.rs rename to rust/kona/crates/supervisor/rpc/src/server.rs diff --git a/kona/crates/supervisor/service/Cargo.toml b/rust/kona/crates/supervisor/service/Cargo.toml similarity index 100% rename from kona/crates/supervisor/service/Cargo.toml rename to rust/kona/crates/supervisor/service/Cargo.toml diff --git a/kona/crates/supervisor/service/src/actors/metric.rs b/rust/kona/crates/supervisor/service/src/actors/metric.rs similarity index 100% rename from kona/crates/supervisor/service/src/actors/metric.rs rename to rust/kona/crates/supervisor/service/src/actors/metric.rs diff --git a/kona/crates/supervisor/service/src/actors/mod.rs b/rust/kona/crates/supervisor/service/src/actors/mod.rs similarity index 100% rename from kona/crates/supervisor/service/src/actors/mod.rs rename to rust/kona/crates/supervisor/service/src/actors/mod.rs diff --git a/rust/kona/crates/supervisor/service/src/actors/node.rs b/rust/kona/crates/supervisor/service/src/actors/node.rs new file mode 100644 index 00000000000..c16a14a83a2 --- /dev/null +++ b/rust/kona/crates/supervisor/service/src/actors/node.rs @@ -0,0 +1,361 @@ +use anyhow::Error; +use async_trait::async_trait; +use derive_more::Constructor; +use kona_interop::ManagedEvent; +use kona_supervisor_core::syncnode::{ + ManagedNodeClient, ManagedNodeCommand, ManagedNodeController, SubscriptionHandler, +}; +use std::sync::Arc; +use thiserror::Error; +use tokio::sync::mpsc; +use tokio_util::sync::CancellationToken; +use tracing::{error, info, warn}; + +use crate::{SupervisorActor, actors::utils::spawn_task_with_retry}; + +/// Actor for managing a node in the supervisor environment. +#[derive(Debug, Constructor)] +pub struct ManagedNodeActor { + client: Arc, + node: Arc, + command_rx: mpsc::Receiver, + cancel_token: CancellationToken, +} + +#[async_trait] +impl SupervisorActor for ManagedNodeActor +where + C: ManagedNodeClient + 'static, + N: ManagedNodeController + SubscriptionHandler + 'static, +{ + type InboundEvent = ManagedNodeCommand; + type Error = SupervisorRpcActorError; + + async fn start(mut self) -> Result<(), Self::Error> { + // Task 1: Subscription handling + let node = self.node.clone(); + let client = self.client.clone(); + let cancel_token = self.cancel_token.clone(); + + spawn_task_with_retry( + move || { + let handler = node.clone(); + let client = client.clone(); + + async move { run_subscription_task(client, handler).await } + }, + cancel_token, + usize::MAX, + ); + + // Task 2: Command handling + let node = self.node.clone(); + let cancel_token = self.cancel_token.clone(); + run_command_task(node, self.command_rx, cancel_token).await?; + Ok(()) + } +} + +async fn run_command_task( + node: Arc, + mut command_rx: mpsc::Receiver, + cancel_token: CancellationToken, +) -> Result<(), SupervisorRpcActorError> +where + N: ManagedNodeController + SubscriptionHandler + 'static, +{ + info!(target: "supervisor::syncnode_actor", "Starting command task for managed node"); + loop { + tokio::select! { + _ = cancel_token.cancelled() => { + info!(target: "supervisor::syncnode", "Cancellation requested, shutting down command task"); + return Ok(()); + } + maybe_cmd = command_rx.recv() => { + match maybe_cmd { + Some(cmd) => { + match cmd { + ManagedNodeCommand::UpdateFinalized { block_id } => { + let result = node.update_finalized(block_id).await; + if let Err(err) = result { + warn!( + target: "supervisor::syncnode", + %err, + "Failed to update finalized block" + ); + } + } + ManagedNodeCommand::UpdateCrossUnsafe { block_id } => { + let result = node.update_cross_unsafe(block_id).await; + if let Err(err) = result { + warn!( + target: "supervisor::syncnode", + %err, + "Failed to update cross unsafe block" + ); + } + } + ManagedNodeCommand::UpdateCrossSafe { source_block_id, derived_block_id } => { + let result = node.update_cross_safe(source_block_id, derived_block_id).await; + if let Err(err) = result { + warn!( + target: "supervisor::syncnode", + %err, + "Failed to update cross safe block" + ); + } + } + ManagedNodeCommand::Reset {} => { + let result = node.reset().await; + if let Err(err) = result { + warn!( + target: "supervisor::syncnode", + %err, + "Failed to reset managed node" + ); + } + } + ManagedNodeCommand::InvalidateBlock { seal } => { + let result = node.invalidate_block(seal).await; + if let Err(err) = result { + warn!( + target: "supervisor::syncnode", + %err, + "Failed to invalidate block" + ); + } + } + } + } + None => { + info!(target: "supervisor::syncnode", "Command channel closed, shutting down command task"); + return Err(SupervisorRpcActorError::CommandReceiverClosed); + } + } + } + } + } +} + +async fn run_subscription_task( + client: Arc, + handler: Arc, +) -> Result<(), Error> { + info!(target: "supervisor::syncnode", "Starting subscription task for managed node"); + + let mut subscription = client.subscribe_events().await.inspect_err(|err| { + error!( + target: "supervisor::syncnode", + %err, + "Failed to subscribe to node events" + ); + })?; + + loop { + tokio::select! { + incoming_event = subscription.next() => { + match incoming_event { + Some(Ok(subscription_event)) => { + if let Some(event) = subscription_event.data { + handle_subscription_event(&handler, event).await; + } + } + Some(Err(err)) => { + error!( + target: "supervisor::managed_event_task", + %err, + "Error in event deserialization" + ); + return Err(err.into()); + } + None => { + warn!(target: "supervisor::managed_event_task", "Subscription closed by server"); + client.reset_ws_client().await; + break; + } + } + } + } + } + Ok(()) +} + +async fn handle_subscription_event(handler: &Arc, event: ManagedEvent) { + if let Some(reset_id) = &event.reset && + let Err(err) = handler.handle_reset(reset_id).await + { + warn!( + target: "supervisor::syncnode", + %err, + %reset_id, + "Failed to handle reset event" + ); + } + + if let Some(unsafe_block) = &event.unsafe_block && + let Err(err) = handler.handle_unsafe_block(unsafe_block).await + { + warn!( + target: "supervisor::syncnode", + %err, + %unsafe_block, + "Failed to handle unsafe block event" + ); + } + + if let Some(derived_ref_pair) = &event.derivation_update && + event.derivation_origin_update.is_none() && + let Err(err) = handler.handle_derivation_update(derived_ref_pair).await + { + warn!( + target: "supervisor::syncnode", + %err, + %derived_ref_pair, + "Failed to handle derivation update event" + ); + } + + if let Some(origin) = &event.derivation_origin_update && + let Err(err) = handler.handle_derivation_origin_update(origin).await + { + warn!( + target: "supervisor::syncnode", + %err, + %origin, + "Failed to handle derivation origin update event" + ); + } + + if let Some(derived_ref_pair) = &event.exhaust_l1 && + let Err(err) = handler.handle_exhaust_l1(derived_ref_pair).await + { + warn!( + target: "supervisor::syncnode", + %err, + %derived_ref_pair, + "Failed to handle L1 exhaust event" + ); + } + + if let Some(replacement) = &event.replace_block && + let Err(err) = handler.handle_replace_block(replacement).await + { + warn!( + target: "supervisor::syncnode", + %err, + %replacement, + "Failed to handle block replacement event" + ); + } +} + +#[derive(Debug, Error)] +pub enum SupervisorRpcActorError { + /// Error indicating that command receiver is closed. + #[error("managed node command receiver closed")] + CommandReceiverClosed, +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_eips::BlockNumHash; + use alloy_primitives::{B256, ChainId}; + use jsonrpsee::core::client::Subscription; + use kona_interop::{BlockReplacement, DerivedRefPair}; + use kona_protocol::BlockInfo; + use kona_supervisor_core::syncnode::{ + ClientError, ManagedNodeClient, ManagedNodeCommand, ManagedNodeController, + ManagedNodeError, SubscriptionHandler, + }; + use kona_supervisor_types::{BlockSeal, OutputV0, Receipts, SubscriptionEvent}; + use mockall::{mock, predicate::*}; + use std::sync::Arc; + use tokio::sync::mpsc; + use tokio_util::sync::CancellationToken; + + // Mock the ManagedNodeController trait + mock! { + #[derive(Debug)] + pub Node {} + + #[async_trait::async_trait] + impl ManagedNodeController for Node { + async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ManagedNodeError>; + async fn update_cross_unsafe(&self, cross_unsafe_block_id: BlockNumHash) -> Result<(), ManagedNodeError>; + async fn update_cross_safe(&self,source_block_id: BlockNumHash,derived_block_id: BlockNumHash) -> Result<(), ManagedNodeError>; + async fn reset(&self) -> Result<(), ManagedNodeError>; + async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ManagedNodeError>; + } + + #[async_trait::async_trait] + impl SubscriptionHandler for Node { + async fn handle_exhaust_l1(&self, derived_ref_pair: &DerivedRefPair) -> Result<(), ManagedNodeError>; + async fn handle_reset(&self, reset_id: &str) -> Result<(), ManagedNodeError>; + async fn handle_unsafe_block(&self, block: &BlockInfo) -> Result<(), ManagedNodeError>; + async fn handle_derivation_update(&self, derived_ref_pair: &DerivedRefPair) -> Result<(), ManagedNodeError>; + async fn handle_replace_block(&self, replacement: &BlockReplacement) -> Result<(), ManagedNodeError>; + async fn handle_derivation_origin_update(&self, origin: &BlockInfo) -> Result<(), ManagedNodeError>; + } + } + + mock! { + #[derive(Debug)] + pub NodeClient {} + + #[async_trait::async_trait] + impl ManagedNodeClient for NodeClient { + async fn chain_id(&self) -> Result; + async fn subscribe_events(&self) -> Result, ClientError>; + async fn fetch_receipts(&self, block_hash: B256) -> Result; + async fn output_v0_at_timestamp(&self, timestamp: u64) -> Result; + async fn pending_output_v0_at_timestamp(&self, timestamp: u64)-> Result; + async fn l2_block_ref_by_timestamp(&self, timestamp: u64) -> Result; + async fn block_ref_by_number(&self, block_number: u64) -> Result; + async fn reset_pre_interop(&self) -> Result<(), ClientError>; + async fn reset( + &self, + unsafe_id: BlockNumHash, + cross_unsafe_id: BlockNumHash, + local_safe_id: BlockNumHash, + cross_safe_id: BlockNumHash, + finalised_id: BlockNumHash, + ) -> Result<(), ClientError>; + async fn invalidate_block(&self, seal: BlockSeal) -> Result<(), ClientError>; + async fn provide_l1(&self, block_info: BlockInfo) -> Result<(), ClientError>; + async fn update_finalized(&self, finalized_block_id: BlockNumHash) -> Result<(), ClientError>; + async fn update_cross_unsafe(&self,cross_unsafe_block_id: BlockNumHash) -> Result<(), ClientError>; + async fn update_cross_safe(&self,source_block_id: BlockNumHash,derived_block_id: BlockNumHash) -> Result<(), ClientError>; + async fn reset_ws_client(&self); + } + } + + #[tokio::test] + async fn test_run_command_task_update_finalized_and_reset() { + let mut mock_node = MockNode::new(); + mock_node.expect_update_finalized().times(1).returning(|_| Ok(())); + mock_node.expect_reset().times(1).returning(|| Ok(())); + + let node = Arc::new(mock_node); + let (tx, rx) = mpsc::channel(10); + let cancel_token = CancellationToken::new(); + + // Spawn the command task + let handle = tokio::spawn(super::run_command_task(node.clone(), rx, cancel_token.clone())); + + // Send commands + tx.send(ManagedNodeCommand::UpdateFinalized { + block_id: BlockNumHash::new(1, B256::random()), + }) + .await + .unwrap(); + tx.send(ManagedNodeCommand::Reset {}).await.unwrap(); + + // Drop the sender to close the channel and end the task + drop(tx); + + // Wait for the task to finish + let result = handle.await.unwrap(); + assert!(matches!(result, Err(SupervisorRpcActorError::CommandReceiverClosed))); + } +} diff --git a/kona/crates/supervisor/service/src/actors/processor.rs b/rust/kona/crates/supervisor/service/src/actors/processor.rs similarity index 100% rename from kona/crates/supervisor/service/src/actors/processor.rs rename to rust/kona/crates/supervisor/service/src/actors/processor.rs diff --git a/kona/crates/supervisor/service/src/actors/rpc.rs b/rust/kona/crates/supervisor/service/src/actors/rpc.rs similarity index 100% rename from kona/crates/supervisor/service/src/actors/rpc.rs rename to rust/kona/crates/supervisor/service/src/actors/rpc.rs diff --git a/kona/crates/supervisor/service/src/actors/traits.rs b/rust/kona/crates/supervisor/service/src/actors/traits.rs similarity index 100% rename from kona/crates/supervisor/service/src/actors/traits.rs rename to rust/kona/crates/supervisor/service/src/actors/traits.rs diff --git a/kona/crates/supervisor/service/src/actors/utils.rs b/rust/kona/crates/supervisor/service/src/actors/utils.rs similarity index 100% rename from kona/crates/supervisor/service/src/actors/utils.rs rename to rust/kona/crates/supervisor/service/src/actors/utils.rs diff --git a/kona/crates/supervisor/service/src/lib.rs b/rust/kona/crates/supervisor/service/src/lib.rs similarity index 100% rename from kona/crates/supervisor/service/src/lib.rs rename to rust/kona/crates/supervisor/service/src/lib.rs diff --git a/kona/crates/supervisor/service/src/service.rs b/rust/kona/crates/supervisor/service/src/service.rs similarity index 100% rename from kona/crates/supervisor/service/src/service.rs rename to rust/kona/crates/supervisor/service/src/service.rs diff --git a/rust/kona/crates/supervisor/storage/Cargo.toml b/rust/kona/crates/supervisor/storage/Cargo.toml new file mode 100644 index 00000000000..6239df045e5 --- /dev/null +++ b/rust/kona/crates/supervisor/storage/Cargo.toml @@ -0,0 +1,54 @@ +[package] +name = "kona-supervisor-storage" +version = "0.1.0" + +edition.workspace = true +license.workspace = true +rust-version.workspace = true +authors.workspace = true +homepage.workspace = true +repository.workspace = true +keywords.workspace = true +categories.workspace = true +exclude.workspace = true + +[dependencies] +# Workspace +kona-protocol.workspace = true +kona-interop.workspace = true +kona-supervisor-types.workspace = true +kona-supervisor-metrics.workspace = true + +# Alloy +alloy-primitives = { workspace = true, features = ["map", "rlp", "serde", "rand"] } +alloy-eips = { workspace = true } + +# Op-Alloy +op-alloy-consensus.workspace = true + +# Misc +serde = { workspace = true, features = ["derive"] } +derive_more.workspace = true +bytes.workspace = true +modular-bitfield.workspace = true +thiserror.workspace = true +tracing.workspace = true +eyre.workspace = true +metrics.workspace = true + +#reth +reth-db-api = { workspace = true } +reth-db = { workspace = true, features = ["mdbx"] } +reth-codecs = { workspace = true } +reth-primitives-traits = { workspace = true } + +# HTTP client and TLS for remote signer +tokio = { workspace = true, features = ["full"] } + +[dev-dependencies] +tempfile = { workspace = true } +tokio.workspace = true +kona-cli.workspace = true + +[lints] +workspace = true diff --git a/kona/crates/supervisor/storage/src/chaindb.rs b/rust/kona/crates/supervisor/storage/src/chaindb.rs similarity index 99% rename from kona/crates/supervisor/storage/src/chaindb.rs rename to rust/kona/crates/supervisor/storage/src/chaindb.rs index 376f0fd6ac1..9eeca7416dc 100644 --- a/kona/crates/supervisor/storage/src/chaindb.rs +++ b/rust/kona/crates/supervisor/storage/src/chaindb.rs @@ -528,9 +528,9 @@ impl MetricsReporter for ChainDb { .env .view(|tx| { for table in crate::models::Tables::ALL.iter().map(crate::models::Tables::name) { - let table_db = tx.inner.open_db(Some(table))?; + let table_db = tx.inner().open_db(Some(table))?; - let stats = tx.inner.db_stat(&table_db)?; + let stats = tx.inner().db_stat(table_db.dbi())?; let page_size = stats.page_size() as usize; let leaf_pages = stats.leaf_pages(); diff --git a/kona/crates/supervisor/storage/src/chaindb_factory.rs b/rust/kona/crates/supervisor/storage/src/chaindb_factory.rs similarity index 94% rename from kona/crates/supervisor/storage/src/chaindb_factory.rs rename to rust/kona/crates/supervisor/storage/src/chaindb_factory.rs index db46f024848..d3ef793f9dd 100644 --- a/kona/crates/supervisor/storage/src/chaindb_factory.rs +++ b/rust/kona/crates/supervisor/storage/src/chaindb_factory.rs @@ -140,7 +140,7 @@ impl FinalizedL1Storage for ChainDbFactory { error!(target: "supervisor::storage", %err, "Failed to acquire read lock on finalized_l1"); StorageError::LockPoisoned })?; - guard.as_ref().cloned().ok_or(StorageError::FutureData) + guard.as_ref().copied().ok_or(StorageError::FutureData) } ) } @@ -158,15 +158,15 @@ impl FinalizedL1Storage for ChainDbFactory { })?; // Check if the new block number is greater than the current finalized block - if let Some(ref current) = *guard { - if block.number <= current.number { - error!(target: "supervisor::storage", - current_block_number = current.number, - new_block_number = block.number, - "New finalized block number is not greater than current finalized block number", - ); - return Err(StorageError::BlockOutOfOrder); - } + if let Some(ref current) = *guard + && block.number <= current.number + { + error!(target: "supervisor::storage", + current_block_number = current.number, + new_block_number = block.number, + "New finalized block number is not greater than current finalized block number", + ); + return Err(StorageError::BlockOutOfOrder); } *guard = Some(block); Ok(()) diff --git a/rust/kona/crates/supervisor/storage/src/error.rs b/rust/kona/crates/supervisor/storage/src/error.rs new file mode 100644 index 00000000000..909067117df --- /dev/null +++ b/rust/kona/crates/supervisor/storage/src/error.rs @@ -0,0 +1,99 @@ +use alloy_eips::BlockNumHash; +use reth_db::DatabaseError; +use thiserror::Error; + +/// Errors that may occur while interacting with supervisor log storage. +/// +/// This enum is used across all implementations of the Storage traits. +#[derive(Debug, Error)] +pub enum StorageError { + /// Represents a database error that occurred while interacting with storage. + #[error(transparent)] + Database(#[from] DatabaseError), + + /// Represents an error that occurred while initializing the database. + #[error(transparent)] + DatabaseInit(#[from] eyre::Report), + + /// Represents an error that occurred while writing to the database. + #[error("lock poisoned")] + LockPoisoned, + + /// The expected entry was not found in the database. + #[error(transparent)] + EntryNotFound(#[from] EntryNotFoundError), + + /// Represents an error that occurred while getting data that is not yet available. + #[error("data not yet available")] + FutureData, + + /// Represents an error that occurred when database is not initialized. + #[error("database not initialized")] + DatabaseNotInitialised, + + /// Represents a conflict occurred while attempting to write to the database. + #[error("conflicting data")] + ConflictError, + + /// Represents an error that occurred while writing to log database. + #[error("latest stored block is not parent of the incoming block")] + BlockOutOfOrder, + + /// Represents an error that occurred when there is inconsistency in log storage + #[error("reorg required due to inconsistent storage state")] + ReorgRequired, + + /// Represents an error that occurred when attempting to rewind log storage beyond the local + /// safe head. + #[error("rewinding log storage beyond local safe head. to: {to}, local_safe: {local_safe}")] + RewindBeyondLocalSafeHead { + /// The target block number to rewind to. + to: u64, + /// The local safe head block number. + local_safe: u64, + }, +} + +impl PartialEq for StorageError { + fn eq(&self, other: &Self) -> bool { + use StorageError::{ + ConflictError, Database, DatabaseInit, DatabaseNotInitialised, EntryNotFound, + }; + match (self, other) { + (Database(a), Database(b)) => format!("{a}") == format!("{b}"), + (DatabaseInit(a), DatabaseInit(b)) => format!("{a}") == format!("{b}"), + (EntryNotFound(a), EntryNotFound(b)) => a == b, + (DatabaseNotInitialised, DatabaseNotInitialised) | (ConflictError, ConflictError) => { + true + } + _ => false, + } + } +} + +impl Eq for StorageError {} + +/// Entry not found error. +#[derive(Debug, Error, PartialEq, Eq)] +pub enum EntryNotFoundError { + /// No derived blocks found for given source block. + #[error("no derived blocks for source block, number: {}, hash: {}", .0.number, .0.hash)] + MissingDerivedBlocks(BlockNumHash), + + /// Expected source block not found. + #[error("source block not found, number: {0}")] + SourceBlockNotFound(u64), + + /// Expected derived block not found. + #[error("derived block not found, number: {0}")] + DerivedBlockNotFound(u64), + + /// Expected log not found. + #[error("log not found at block {block_number} index {log_index}")] + LogNotFound { + /// Block number. + block_number: u64, + /// Log index within the block. + log_index: u32, + }, +} diff --git a/rust/kona/crates/supervisor/storage/src/lib.rs b/rust/kona/crates/supervisor/storage/src/lib.rs new file mode 100644 index 00000000000..24e1a0c489a --- /dev/null +++ b/rust/kona/crates/supervisor/storage/src/lib.rs @@ -0,0 +1,44 @@ +//! Persistent storage for the Supervisor. +//! +//! This crate provides structured, append-only storage for the Supervisor, +//! exposing high-level APIs to write and query logs, block metadata, and +//! other execution states. +//! +//! The storage system is built on top of `reth-db`, using MDBX, +//! and defines schemas for supervisor-specific data like: +//! - L2 log entries +//! - Block ancestry metadata +//! - Source and Derived Blocks +//! - Chain heads for safety levels: **SAFE**, **UNSAFE**, and **CROSS-SAFE** +//! +//! +//! ## Capabilities +//! +//! - Append logs emitted by L2 execution +//! - Look up logs by block number and index +//! - Rewind logs during reorgs +//! - Track sealed blocks and ancestry metadata + +pub mod models; +pub use models::SourceBlockTraversal; + +mod error; +pub use error::{EntryNotFoundError, StorageError}; + +mod providers; + +mod chaindb; +pub use chaindb::ChainDb; + +mod metrics; +pub(crate) use metrics::Metrics; + +mod chaindb_factory; +pub use chaindb_factory::ChainDbFactory; + +mod traits; +pub use traits::{ + CrossChainSafetyProvider, DbReader, DerivationStorage, DerivationStorageReader, + DerivationStorageWriter, FinalizedL1Storage, HeadRefStorage, HeadRefStorageReader, + HeadRefStorageWriter, LogStorage, LogStorageReader, LogStorageWriter, StorageRewinder, +}; diff --git a/rust/kona/crates/supervisor/storage/src/metrics.rs b/rust/kona/crates/supervisor/storage/src/metrics.rs new file mode 100644 index 00000000000..56988aa5fab --- /dev/null +++ b/rust/kona/crates/supervisor/storage/src/metrics.rs @@ -0,0 +1,118 @@ +use alloy_primitives::ChainId; + +/// Container for `ChainDb` metrics. +#[derive(Debug, Clone)] +pub(crate) struct Metrics; + +// todo: implement this using the reth metrics for tables +impl Metrics { + pub(crate) const STORAGE_REQUESTS_SUCCESS_TOTAL: &'static str = + "kona_supervisor_storage_success_total"; + pub(crate) const STORAGE_REQUESTS_ERROR_TOTAL: &'static str = + "kona_supervisor_storage_error_total"; + pub(crate) const STORAGE_REQUEST_DURATION_SECONDS: &'static str = + "kona_supervisor_storage_duration_seconds"; + + pub(crate) const STORAGE_METHOD_DERIVED_TO_SOURCE: &'static str = "derived_to_source"; + pub(crate) const STORAGE_METHOD_LATEST_DERIVED_BLOCK_AT_SOURCE: &'static str = + "latest_derived_block_at_source"; + pub(crate) const STORAGE_METHOD_LATEST_DERIVATION_STATE: &'static str = + "latest_derivation_state"; + pub(crate) const STORAGE_METHOD_GET_SOURCE_BLOCK: &'static str = "get_source_block"; + pub(crate) const STORAGE_METHOD_GET_ACTIVATION_BLOCK: &'static str = "get_activation_block"; + pub(crate) const STORAGE_METHOD_INITIALISE_DERIVATION_STORAGE: &'static str = + "initialise_derivation_storage"; + pub(crate) const STORAGE_METHOD_SAVE_DERIVED_BLOCK: &'static str = "save_derived_block"; + pub(crate) const STORAGE_METHOD_SAVE_SOURCE_BLOCK: &'static str = "save_source_block"; + pub(crate) const STORAGE_METHOD_GET_LATEST_BLOCK: &'static str = "get_latest_block"; + pub(crate) const STORAGE_METHOD_GET_BLOCK: &'static str = "get_block"; + pub(crate) const STORAGE_METHOD_GET_LOG: &'static str = "get_log"; + pub(crate) const STORAGE_METHOD_GET_LOGS: &'static str = "get_logs"; + pub(crate) const STORAGE_METHOD_INITIALISE_LOG_STORAGE: &'static str = "initialise_log_storage"; + pub(crate) const STORAGE_METHOD_STORE_BLOCK_LOGS: &'static str = "store_block_logs"; + pub(crate) const STORAGE_METHOD_GET_SAFETY_HEAD_REF: &'static str = "get_safety_head_ref"; + pub(crate) const STORAGE_METHOD_GET_SUPER_HEAD: &'static str = "get_super_head"; + pub(crate) const STORAGE_METHOD_UPDATE_FINALIZED_USING_SOURCE: &'static str = + "update_finalized_using_source"; + pub(crate) const STORAGE_METHOD_UPDATE_CURRENT_CROSS_UNSAFE: &'static str = + "update_current_cross_unsafe"; + pub(crate) const STORAGE_METHOD_UPDATE_CURRENT_CROSS_SAFE: &'static str = + "update_current_cross_safe"; + pub(crate) const STORAGE_METHOD_UPDATE_FINALIZED_L1: &'static str = "update_finalized_l1"; + pub(crate) const STORAGE_METHOD_GET_FINALIZED_L1: &'static str = "get_finalized_l1"; + pub(crate) const STORAGE_METHOD_REWIND_LOG_STORAGE: &'static str = "rewind_log_storage"; + pub(crate) const STORAGE_METHOD_REWIND: &'static str = "rewind"; + pub(crate) const STORAGE_METHOD_REWIND_TO_SOURCE: &'static str = "rewind_to_source"; + + pub(crate) fn init(chain_id: ChainId) { + Self::describe(); + Self::zero(chain_id); + } + + fn describe() { + metrics::describe_counter!( + Self::STORAGE_REQUESTS_SUCCESS_TOTAL, + metrics::Unit::Count, + "Total number of successful Kona Supervisor Storage requests" + ); + metrics::describe_counter!( + Self::STORAGE_REQUESTS_ERROR_TOTAL, + metrics::Unit::Count, + "Total number of failed Kona Supervisor Storage requests" + ); + metrics::describe_histogram!( + Self::STORAGE_REQUEST_DURATION_SECONDS, + metrics::Unit::Seconds, + "Duration of Kona Supervisor Storage requests" + ); + } + + fn zero_storage_methods(chain_id: ChainId, method_name: &'static str) { + metrics::counter!( + Self::STORAGE_REQUESTS_SUCCESS_TOTAL, + "method" => method_name, + "chain_id" => chain_id.to_string() + ) + .increment(0); + + metrics::counter!( + Self::STORAGE_REQUESTS_ERROR_TOTAL, + "method" => method_name, + "chain_id" => chain_id.to_string() + ) + .increment(0); + + metrics::histogram!( + Self::STORAGE_REQUEST_DURATION_SECONDS, + "method" => method_name, + "chain_id" => chain_id.to_string() + ) + .record(0.0); + } + + fn zero(chain_id: ChainId) { + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_DERIVED_TO_SOURCE); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_LATEST_DERIVED_BLOCK_AT_SOURCE); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_LATEST_DERIVATION_STATE); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_SOURCE_BLOCK); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_INITIALISE_DERIVATION_STORAGE); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_SAVE_DERIVED_BLOCK); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_SAVE_SOURCE_BLOCK); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_LATEST_BLOCK); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_BLOCK); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_LOG); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_LOGS); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_INITIALISE_LOG_STORAGE); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_STORE_BLOCK_LOGS); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_SAFETY_HEAD_REF); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_SUPER_HEAD); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_UPDATE_FINALIZED_USING_SOURCE); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_UPDATE_CURRENT_CROSS_UNSAFE); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_UPDATE_CURRENT_CROSS_SAFE); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_UPDATE_FINALIZED_L1); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_GET_FINALIZED_L1); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_REWIND_LOG_STORAGE); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_REWIND); + Self::zero_storage_methods(chain_id, Self::STORAGE_METHOD_REWIND_TO_SOURCE); + } +} diff --git a/kona/crates/supervisor/storage/src/models/block.rs b/rust/kona/crates/supervisor/storage/src/models/block.rs similarity index 100% rename from kona/crates/supervisor/storage/src/models/block.rs rename to rust/kona/crates/supervisor/storage/src/models/block.rs diff --git a/kona/crates/supervisor/storage/src/models/common.rs b/rust/kona/crates/supervisor/storage/src/models/common.rs similarity index 100% rename from kona/crates/supervisor/storage/src/models/common.rs rename to rust/kona/crates/supervisor/storage/src/models/common.rs diff --git a/kona/crates/supervisor/storage/src/models/derivation.rs b/rust/kona/crates/supervisor/storage/src/models/derivation.rs similarity index 100% rename from kona/crates/supervisor/storage/src/models/derivation.rs rename to rust/kona/crates/supervisor/storage/src/models/derivation.rs diff --git a/kona/crates/supervisor/storage/src/models/head_ref.rs b/rust/kona/crates/supervisor/storage/src/models/head_ref.rs similarity index 100% rename from kona/crates/supervisor/storage/src/models/head_ref.rs rename to rust/kona/crates/supervisor/storage/src/models/head_ref.rs diff --git a/kona/crates/supervisor/storage/src/models/log.rs b/rust/kona/crates/supervisor/storage/src/models/log.rs similarity index 97% rename from kona/crates/supervisor/storage/src/models/log.rs rename to rust/kona/crates/supervisor/storage/src/models/log.rs index 8e43a312f1d..0ab2787de56 100644 --- a/kona/crates/supervisor/storage/src/models/log.rs +++ b/rust/kona/crates/supervisor/storage/src/models/log.rs @@ -11,6 +11,7 @@ use alloy_primitives::B256; use bytes::{Buf, BufMut}; use kona_supervisor_types::{ExecutingMessage, Log}; use reth_codecs::Compact; +use reth_primitives_traits::ValueWithSubKey; use serde::{Deserialize, Serialize}; /// Metadata associated with a single emitted log. @@ -66,18 +67,24 @@ impl Compact for LogEntry { let hash = B256::from_slice(&buf[..32]); buf.advance(32); - let executing_message = if has_msg { + let executing_message = has_msg.then(|| { let (msg, rest) = ExecutingMessageEntry::from_compact(buf, buf.len()); buf = rest; - Some(msg) - } else { - None - }; + msg + }); (Self { index, hash, executing_message }, buf) } } +impl ValueWithSubKey for LogEntry { + type SubKey = u32; + + fn get_subkey(&self) -> Self::SubKey { + self.index + } +} + /// Conversion from [`Log`] to [`LogEntry`] used for internal storage. /// /// Maps fields 1:1, converting `executing_message` using `Into`. diff --git a/kona/crates/supervisor/storage/src/models/mod.rs b/rust/kona/crates/supervisor/storage/src/models/mod.rs similarity index 100% rename from kona/crates/supervisor/storage/src/models/mod.rs rename to rust/kona/crates/supervisor/storage/src/models/mod.rs diff --git a/kona/crates/supervisor/storage/src/providers/derivation_provider.rs b/rust/kona/crates/supervisor/storage/src/providers/derivation_provider.rs similarity index 98% rename from kona/crates/supervisor/storage/src/providers/derivation_provider.rs rename to rust/kona/crates/supervisor/storage/src/providers/derivation_provider.rs index 5f342c8ed68..af0099c35f6 100644 --- a/kona/crates/supervisor/storage/src/providers/derivation_provider.rs +++ b/rust/kona/crates/supervisor/storage/src/providers/derivation_provider.rs @@ -322,16 +322,15 @@ where if incoming_pair == stored_pair.into() { return Ok(()); - } else { - warn!( - target: "supervisor::storage", - chain_id = %self.chain_id, - %latest_derivation_state, - incoming_derived_block_pair = %incoming_pair, - "Incoming derived block is not consistent with the latest stored derived block" - ); - return Err(StorageError::ConflictError); } + warn!( + target: "supervisor::storage", + chain_id = %self.chain_id, + %latest_derivation_state, + incoming_derived_block_pair = %incoming_pair, + "Incoming derived block is not consistent with the latest stored derived block" + ); + return Err(StorageError::ConflictError); } // Latest source block must be same as the incoming source block @@ -457,16 +456,15 @@ where if source_block == incoming_source { return Ok(()); - } else { - error!( - target: "supervisor::storage", - chain_id = %self.chain_id, - latest_source_block = %latest_source_block, - incoming_source = %incoming_source, - "Incoming source block is not consistent with the latest source block" - ); - return Err(StorageError::ConflictError); } + error!( + target: "supervisor::storage", + chain_id = %self.chain_id, + latest_source_block = %latest_source_block, + incoming_source = %incoming_source, + "Incoming source block is not consistent with the latest source block" + ); + return Err(StorageError::ConflictError); } if !latest_source_block.is_parent_of(&incoming_source) { diff --git a/kona/crates/supervisor/storage/src/providers/head_ref_provider.rs b/rust/kona/crates/supervisor/storage/src/providers/head_ref_provider.rs similarity index 95% rename from kona/crates/supervisor/storage/src/providers/head_ref_provider.rs rename to rust/kona/crates/supervisor/storage/src/providers/head_ref_provider.rs index fe71f13b61f..ea048e58056 100644 --- a/kona/crates/supervisor/storage/src/providers/head_ref_provider.rs +++ b/rust/kona/crates/supervisor/storage/src/providers/head_ref_provider.rs @@ -51,18 +51,18 @@ where ) -> Result<(), StorageError> { // Ensure the block_info.number is greater than the stored head reference // If the head reference is not set, this check will be skipped. - if let Ok(current_head_ref) = self.get_safety_head_ref(safety_level) { - if current_head_ref.number > incoming_head_ref.number { - warn!( - target: "supervisor::storage", - chain_id = %self.chain_id, - %current_head_ref, - %incoming_head_ref, - %safety_level, - "Attempting to update head reference with a block that has a lower number than the current head reference", - ); - return Ok(()); - } + if let Ok(current_head_ref) = self.get_safety_head_ref(safety_level) && + current_head_ref.number > incoming_head_ref.number + { + warn!( + target: "supervisor::storage", + chain_id = %self.chain_id, + %current_head_ref, + %incoming_head_ref, + %safety_level, + "Attempting to update head reference with a block that has a lower number than the current head reference", + ); + return Ok(()); } self.tx diff --git a/kona/crates/supervisor/storage/src/providers/log_provider.rs b/rust/kona/crates/supervisor/storage/src/providers/log_provider.rs similarity index 98% rename from kona/crates/supervisor/storage/src/providers/log_provider.rs rename to rust/kona/crates/supervisor/storage/src/providers/log_provider.rs index 3254cfb722a..0fb5d442685 100644 --- a/kona/crates/supervisor/storage/src/providers/log_provider.rs +++ b/rust/kona/crates/supervisor/storage/src/providers/log_provider.rs @@ -1,7 +1,7 @@ //! Reth's MDBX-backed abstraction of [`LogProvider`] for superchain state. //! //! This module provides the [`LogProvider`] struct, which uses the -//! [`reth-db`] abstraction of reth to store execution logs +//! `reth-db` abstraction of reth to store execution logs //! and block metadata required by the Optimism supervisor. //! //! It supports: @@ -452,17 +452,13 @@ mod tests { Log { index: log_index, hash: B256::from([log_index as u8; 32]), - executing_message: if with_msg { - Some(ExecutingMessage { - chain_id: 10, - block_number: 999, - log_index: 7, - hash: B256::from([0x44; 32]), - timestamp: 88888, - }) - } else { - None - }, + executing_message: with_msg.then_some(ExecutingMessage { + chain_id: 10, + block_number: 999, + log_index: 7, + hash: B256::from([0x44; 32]), + timestamp: 88888, + }), } } diff --git a/kona/crates/supervisor/storage/src/providers/mod.rs b/rust/kona/crates/supervisor/storage/src/providers/mod.rs similarity index 100% rename from kona/crates/supervisor/storage/src/providers/mod.rs rename to rust/kona/crates/supervisor/storage/src/providers/mod.rs diff --git a/rust/kona/crates/supervisor/storage/src/traits.rs b/rust/kona/crates/supervisor/storage/src/traits.rs new file mode 100644 index 00000000000..5880b913c44 --- /dev/null +++ b/rust/kona/crates/supervisor/storage/src/traits.rs @@ -0,0 +1,475 @@ +use crate::StorageError; +use alloy_eips::eip1898::BlockNumHash; +use alloy_primitives::ChainId; +use kona_interop::DerivedRefPair; +use kona_protocol::BlockInfo; +use kona_supervisor_types::{Log, SuperHead}; +use op_alloy_consensus::interop::SafetyLevel; +use std::fmt::Debug; + +/// Provides an interface for supervisor storage to manage source and derived blocks. +/// +/// Defines methods to retrieve derived block information, +/// enabling the supervisor to track the derivation progress. +/// +/// Implementations are expected to provide persistent and thread-safe access to block data. +pub trait DerivationStorageReader: Debug { + /// Gets the source [`BlockInfo`] for a given derived block [`BlockNumHash`]. + /// + /// NOTE: [`LocalUnsafe`] block is not pushed to L1 yet, hence it cannot be part of derivation + /// storage. + /// + /// # Arguments + /// * `derived_block_id` - The identifier (number and hash) of the derived (L2) block. + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the source block information if it exists. + /// * `Err(StorageError)` if there is an issue retrieving the source block. + /// + /// [`LocalUnsafe`]: SafetyLevel::LocalUnsafe + fn derived_to_source(&self, derived_block_id: BlockNumHash) -> Result; + + /// Gets the latest derived [`BlockInfo`] associated with the given source block + /// [`BlockNumHash`]. + /// + /// # Arguments + /// * `source_block_id` - The identifier (number and hash) of the L1 source block. + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the latest derived block information if it exists. + /// * `Err(StorageError)` if there is an issue retrieving the derived block. + fn latest_derived_block_at_source( + &self, + source_block_id: BlockNumHash, + ) -> Result; + + /// Gets the latest derivation state [`DerivedRefPair`] from the storage, which includes the + /// latest source block and the latest derived block. + /// + /// # Returns + /// + /// * `Ok(DerivedRefPair)` containing the latest derived block pair if it exists. + /// * `Err(StorageError)` if there is an issue retrieving the pair. + fn latest_derivation_state(&self) -> Result; + + /// Gets the source block for the given source block number. + /// + /// # Arguments + /// * `source_block_number` - The number of the source block to retrieve. + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the source block information if it exists. + /// * `Err(StorageError)` if there is an issue retrieving the source block. + fn get_source_block(&self, source_block_number: u64) -> Result; + + /// Gets the interop activation [`BlockInfo`]. + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the activation block information if it exists. + /// * `Err(StorageError)` if there is an issue retrieving the activation block. + fn get_activation_block(&self) -> Result; +} + +/// Provides an interface for supervisor storage to write source and derived blocks. +/// +/// Defines methods to persist derived block information, +/// enabling the supervisor to track the derivation progress. +/// +/// Implementations are expected to provide persistent and thread-safe access to block data. +pub trait DerivationStorageWriter: Debug { + /// Initializes the derivation storage with a given [`DerivedRefPair`]. + /// This method is typically called once to set up the storage with the initial pair. + /// + /// # Arguments + /// * `incoming_pair` - The derived block pair to initialize the storage with. + /// + /// # Returns + /// * `Ok(())` if the storage was successfully initialized. + /// * `Err(StorageError)` if there is an issue initializing the storage. + fn initialise_derivation_storage( + &self, + incoming_pair: DerivedRefPair, + ) -> Result<(), StorageError>; + + /// Saves a [`DerivedRefPair`] to the storage. + /// + /// This method is **append-only**: it does not overwrite existing pairs. + /// - If a pair with the same block number already exists and is identical to the incoming pair, + /// the request is silently ignored (idempotent). + /// - If a pair with the same block number exists but differs from the incoming pair, an error + /// is returned to indicate a data inconsistency. + /// - If the pair is new and consistent, it is appended to the storage. + /// + /// Ensures that the latest stored pair is the parent of the incoming pair before saving. + /// + /// # Arguments + /// * `incoming_pair` - The derived block pair to save. + /// + /// # Returns + /// * `Ok(())` if the pair was successfully saved. + /// * `Err(StorageError)` if there is an issue saving the pair. + fn save_derived_block(&self, incoming_pair: DerivedRefPair) -> Result<(), StorageError>; + + /// Saves the latest incoming source [`BlockInfo`] to the storage. + /// + /// This method is **append-only**: it does not overwrite existing source blocks. + /// - If a source block with the same number already exists and is identical to the incoming + /// block, the request is silently ignored (idempotent). + /// - If a source block with the same number exists but differs from the incoming block, an + /// error is returned to indicate a data inconsistency. + /// - If the block is new and consistent, it is appended to the storage. + /// + /// Ensures that the latest stored source block is the parent of the incoming block before + /// saving. + /// + /// # Arguments + /// * `source` - The source block to save. + /// + /// # Returns + /// * `Ok(())` if the source block was successfully saved. + /// * `Err(StorageError)` if there is an issue saving the source block. + fn save_source_block(&self, source: BlockInfo) -> Result<(), StorageError>; +} + +/// Combines both reading and writing capabilities for derivation storage. +/// +/// Any type that implements both [`DerivationStorageReader`] and [`DerivationStorageWriter`] +/// automatically implements this trait. +pub trait DerivationStorage: DerivationStorageReader + DerivationStorageWriter {} + +impl DerivationStorage for T {} + +/// Provides an interface for retrieving logs associated with blocks. +/// +/// This trait defines methods to retrieve the latest block, +/// find a block by a specific log, and retrieve logs for a given block number. +/// +/// Implementations are expected to provide persistent and thread-safe access to block logs. +pub trait LogStorageReader: Debug { + /// Retrieves the latest [`BlockInfo`] from the storage. + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the latest block information. + /// * `Err(StorageError)` if there is an issue retrieving the latest block. + fn get_latest_block(&self) -> Result; + + /// Retrieves the [`BlockInfo`] from the storage for a given block number + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the block information. + /// * `Err(StorageError)` if there is an issue retrieving the block. + fn get_block(&self, block_number: u64) -> Result; + + /// Finds a [`Log`] by `block_number` and `log_index` + /// + /// # Arguments + /// * `block_number` - The block number to search for the log. + /// * `log_index` - The index of the log within the block. + /// + /// # Returns + /// * `Ok(Log)` containing the [`Log`] object. + /// * `Err(StorageError)` if there is an issue retrieving the log or if the log is not found. + fn get_log(&self, block_number: u64, log_index: u32) -> Result; + + /// Retrieves all [`Log`]s associated with a specific block number. + /// + /// # Arguments + /// * `block_number` - The block number for which to retrieve logs. + /// + /// # Returns + /// * `Ok(Vec)` containing the logs associated with the block number. + /// * `Err(StorageError)` if there is an issue retrieving the logs or if no logs are found. + fn get_logs(&self, block_number: u64) -> Result, StorageError>; +} + +/// Provides an interface for storing blocks and logs associated with blocks. +/// +/// Implementations are expected to provide persistent and thread-safe access to block logs. +pub trait LogStorageWriter: Send + Sync + Debug { + /// Initializes the log storage with a given [`BlockInfo`]. + /// This method is typically called once to set up the storage with the initial block. + /// + /// # Arguments + /// * `block` - The [`BlockInfo`] to initialize the storage with. + /// + /// # Returns + /// * `Ok(())` if the storage was successfully initialized. + /// * `Err(StorageError)` if there is an issue initializing the storage. + fn initialise_log_storage(&self, block: BlockInfo) -> Result<(), StorageError>; + + /// Stores [`BlockInfo`] and [`Log`]s in the storage. + /// This method is append-only and does not overwrite existing logs. + /// Ensures that the latest stored block is the parent of the incoming block before saving. + /// + /// # Arguments + /// * `block` - [`BlockInfo`] to associate with the logs. + /// * `logs` - The [`Log`] events associated with the block. + /// + /// # Returns + /// * `Ok(())` if the logs were successfully stored. + /// * `Err(StorageError)` if there is an issue storing the logs. + fn store_block_logs(&self, block: &BlockInfo, logs: Vec) -> Result<(), StorageError>; +} + +/// Combines both reading and writing capabilities for log storage. +/// +/// Any type that implements both [`LogStorageReader`] and [`LogStorageWriter`] +/// automatically implements this trait. +pub trait LogStorage: LogStorageReader + LogStorageWriter {} + +impl LogStorage for T {} + +/// Provides an interface for retrieving head references. +/// +/// This trait defines methods to manage safety head references for different safety levels. +/// Each safety level maintains a reference to a block. +/// +/// Implementations are expected to provide persistent and thread-safe access to safety head +/// references. +pub trait HeadRefStorageReader: Debug { + /// Retrieves the current [`BlockInfo`] for a given [`SafetyLevel`]. + /// + /// # Arguments + /// * `safety_level` - The safety level for which to retrieve the head reference. + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the current safety head reference. + /// * `Err(StorageError)` if there is an issue retrieving the reference. + fn get_safety_head_ref(&self, safety_level: SafetyLevel) -> Result; + + /// Retrieves the super head reference from the storage. + /// + /// # Returns + /// * `Ok(SuperHead)` containing the super head reference. + /// * `Err(StorageError)` if there is an issue retrieving the super head reference. + fn get_super_head(&self) -> Result; +} + +/// Provides an interface for storing head references. +/// +/// This trait defines methods to manage safety head references for different safety levels. +/// Each safety level maintains a reference to a block. +/// +/// Implementations are expected to provide persistent and thread-safe access to safety head +/// references. +pub trait HeadRefStorageWriter: Debug { + /// Updates the finalized head reference using a finalized source(l1) block. + /// + /// # Arguments + /// * `source_block` - The [`BlockInfo`] of the source block to use for the update. + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the updated finalized derived(l2) block information. + /// * `Err(StorageError)` if there is an issue updating the finalized head reference. + fn update_finalized_using_source( + &self, + finalized_source_block: BlockInfo, + ) -> Result; + + /// Updates the current [`CrossUnsafe`](SafetyLevel::CrossUnsafe) head reference in storage. + /// + /// Ensures the provided block still exists in log storage and was not removed due to a re-org. + /// If the stored block's hash does not match the provided block, the update is aborted. + /// # Arguments + /// * `block` - The [`BlockInfo`] to set as the head reference + /// + /// # Returns + /// * `Ok(())` if the reference was successfully updated. + /// * `Err(StorageError)` if there is an issue updating the reference. + fn update_current_cross_unsafe(&self, block: &BlockInfo) -> Result<(), StorageError>; + + /// Updates the current [`CrossSafe`](SafetyLevel::CrossSafe) head reference in storage and + /// returns the corresponding derived pair. + /// + /// Ensures the provided block still exists in derivation storage and was not removed due to a + /// re-org. # Arguments + /// * `block` - The [`BlockInfo`] to set as the head reference + /// + /// # Returns + /// * `Ok(DerivedRefPair)` if the reference was successfully updated. + /// * `Err(StorageError)` if there is an issue updating the reference. + fn update_current_cross_safe(&self, block: &BlockInfo) -> Result; +} + +/// Combines both reading and writing capabilities for safety head ref storage. +/// +/// Any type that implements both [`HeadRefStorageReader`] and [`HeadRefStorageWriter`] +/// automatically implements this trait. +pub trait HeadRefStorage: HeadRefStorageReader + HeadRefStorageWriter {} + +impl HeadRefStorage for T {} + +/// Provides an interface for managing the finalized L1 block reference in the storage. +/// +/// This trait defines methods to update and retrieve the finalized L1 block reference. +pub trait FinalizedL1Storage { + /// Updates the finalized L1 block reference in the storage. + /// + /// # Arguments + /// * `block` - The new [`BlockInfo`] to set as the finalized L1 block reference. + /// + /// # Returns + /// * `Ok(())` if the reference was successfully updated. + /// * `Err(StorageError)` if there is an issue updating the reference. + fn update_finalized_l1(&self, block: BlockInfo) -> Result<(), StorageError>; + + /// Retrieves the finalized L1 block reference from the storage. + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the finalized L1 block reference. + /// * `Err(StorageError)` if there is an issue retrieving the reference. + fn get_finalized_l1(&self) -> Result; +} + +/// Provides an interface for retrieving block and safety information across multiple chains. +/// +/// This trait defines methods required by the cross-chain safety checker to access +/// block metadata, logs, and safe head references for various chains. +pub trait CrossChainSafetyProvider { + /// Retrieves the [`BlockInfo`] for a given block number on the specified chain. + /// + /// # Arguments + /// * `chain_id` - The [`ChainId`] of the target chain. + /// * `block_number` - The number of the block to retrieve. + /// + /// # Returns + /// * `Ok(BlockInfo)` containing the block metadata if available. + /// * `Err(StorageError)` if there is an issue fetching the block. + fn get_block(&self, chain_id: ChainId, block_number: u64) -> Result; + + /// Retrieves a [`Log`] by `block_number` and `log_index` + /// + /// # Arguments + /// * `chain_id` - The [`ChainId`] of the target chain. + /// * `block_number` - The block number to search for the log. + /// * `log_index` - The index of the log within the block. + /// + /// # Returns + /// * `Ok(Log)` containing the [`Log`] object. + /// * `Err(StorageError)` if there is an issue retrieving the log or if the log is not found. + fn get_log( + &self, + chain_id: ChainId, + block_number: u64, + log_index: u32, + ) -> Result; + + /// Retrieves all logs associated with the specified block on the given chain. + /// + /// # Arguments + /// * `chain_id` - The [`ChainId`] of the target chain. + /// * `block_number` - The number of the block whose logs should be retrieved. + /// + /// # Returns + /// * `Ok(Vec)` containing all logs for the block. + /// * `Err(StorageError)` if there is an issue fetching the logs. + fn get_block_logs( + &self, + chain_id: ChainId, + block_number: u64, + ) -> Result, StorageError>; + + /// Retrieves the latest known safe head reference for a given chain at the specified safety + /// level. + /// + /// # Arguments + /// * `chain_id` - The [`ChainId`] of the target chain. + /// * `level` - The desired [`SafetyLevel`] (e.g., `CrossSafe`, `LocalSafe`). + /// + /// # Returns + /// * `Ok(BlockInfo)` representing the safe head block at the requested safety level. + /// * `Err(StorageError)` if the safe head cannot be retrieved. + fn get_safety_head_ref( + &self, + chain_id: ChainId, + level: SafetyLevel, + ) -> Result; + + /// Updates the current [`CrossUnsafe`](SafetyLevel::CrossUnsafe) head reference in storage. + /// + /// Ensures the provided block still exists in log storage and was not removed due to a re-org. + /// If the stored block's hash does not match the provided block, the update is aborted. + /// # Arguments + /// * `chain_id` - The [`ChainId`] of the target chain. + /// * `block` - The [`BlockInfo`] to set as the head reference + /// + /// # Returns + /// * `Ok(())` if the reference was successfully updated. + /// * `Err(StorageError)` if there is an issue updating the reference. + fn update_current_cross_unsafe( + &self, + chain_id: ChainId, + block: &BlockInfo, + ) -> Result<(), StorageError>; + + /// Updates the current [`CrossSafe`](SafetyLevel::CrossSafe) head reference in storage and + /// returns the corresponding derived pair. + /// + /// Ensures the provided block still exists in derivation storage and was not removed due to a + /// re-org. # Arguments + /// * `chain_id` - The [`ChainId`] of the target chain. + /// * `block` - The [`BlockInfo`] to set as the head reference + /// + /// # Returns + /// * `Ok(DerivedRefPair)` if the reference was successfully updated. + /// * `Err(StorageError)` if there is an issue updating the reference. + fn update_current_cross_safe( + &self, + chain_id: ChainId, + block: &BlockInfo, + ) -> Result; +} + +/// Trait for rewinding supervisor-related state in the database. +/// +/// This trait provides an interface to revert persisted log data, derivation records, +/// and safety head references from the latest block back to a specified block number (inclusive). +/// It is typically used during chain reorganizations or when invalid blocks are detected and need +/// to be rolled back. +pub trait StorageRewinder { + /// Rewinds the log storage from the latest block down to the specified block (inclusive). + /// This method ensures that log storage is never rewound to(since it's inclusive) and beyond + /// the local safe head. If the target block is beyond the local safe head, an error is + /// returned. Use [`StorageRewinder::rewind`] to rewind to and beyond the local safe head. + /// + /// # Arguments + /// * `to` - The block id to rewind to. + /// + /// # Errors + /// Returns a [`StorageError`] if any database operation fails during the rewind. + fn rewind_log_storage(&self, to: &BlockNumHash) -> Result<(), StorageError>; + + /// Rewinds all supervisor-managed state (log storage, derivation, and safety head refs) + /// from the latest block back to the given block (inclusive). + /// + /// This method performs a coordinated rewind across all components, ensuring consistency + /// of supervisor state after chain reorganizations or rollback of invalid blocks. + /// + /// # Arguments + /// * `to` - The target block id to rewind to. Rewind is performed from the latest block down to + /// this block. + /// + /// # Errors + /// Returns a [`StorageError`] if any part of the rewind process fails. + fn rewind(&self, to: &BlockNumHash) -> Result<(), StorageError>; + + /// Rewinds the storage to a specific source block (inclusive), ensuring that all derived blocks + /// and logs associated with that source blocks are also reverted. + /// + /// # Arguments + /// * `to` - The source block [`BlockNumHash`] to rewind to. + /// + /// # Returns + /// * [`BlockInfo`] of the derived block that was rewound to, or `None` if no derived blocks + /// were found. + /// * `Err(StorageError)` if there is an issue during the rewind operation. + fn rewind_to_source(&self, to: &BlockNumHash) -> Result, StorageError>; +} + +/// Combines the reader traits for the database. +/// +/// Any type that implements [`DerivationStorageReader`], [`HeadRefStorageReader`], and +/// [`LogStorageReader`] automatically implements this trait. +pub trait DbReader: DerivationStorageReader + HeadRefStorageReader + LogStorageReader {} + +impl DbReader for T {} diff --git a/kona/crates/supervisor/types/Cargo.toml b/rust/kona/crates/supervisor/types/Cargo.toml similarity index 100% rename from kona/crates/supervisor/types/Cargo.toml rename to rust/kona/crates/supervisor/types/Cargo.toml diff --git a/kona/crates/supervisor/types/README.md b/rust/kona/crates/supervisor/types/README.md similarity index 100% rename from kona/crates/supervisor/types/README.md rename to rust/kona/crates/supervisor/types/README.md diff --git a/rust/kona/crates/supervisor/types/src/access_list.rs b/rust/kona/crates/supervisor/types/src/access_list.rs new file mode 100644 index 00000000000..9928f5fa9b4 --- /dev/null +++ b/rust/kona/crates/supervisor/types/src/access_list.rs @@ -0,0 +1,396 @@ +use alloy_primitives::{B256, keccak256}; +use thiserror::Error; + +/// A structured representation of a parsed `CrossL2Inbox` message access entry. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Access { + /// Full 256-bit chain ID (combined from lookup + extension) + pub chain_id: [u8; 32], + /// Block number in the source chain + pub block_number: u64, + /// Timestamp of the message's block + pub timestamp: u64, + /// Log index of the message within the block + pub log_index: u32, + /// Provided checksum entry (prefix 0x03) + pub checksum: B256, +} + +impl Access { + /// Constructs a new [`Access`] from a `LookupEntry`, optional `ChainIdExtensionEntry`, + /// and a `ChecksumEntry`. Used internally by the parser. + fn from_entries( + lookup: LookupEntry, + chain_id_ext: Option, + checksum: ChecksumEntry, + ) -> Self { + let mut chain_id = [0u8; 32]; + + if let Some(ext) = chain_id_ext { + chain_id[0..24].copy_from_slice(&ext.upper_bytes); + } + + chain_id[24..32].copy_from_slice(&lookup.chain_id_low); + + Self { + chain_id, + block_number: lookup.block_number, + timestamp: lookup.timestamp, + log_index: lookup.log_index, + checksum: checksum.raw, + } + } + + /// Recomputes the checksum for this access entry. + /// + /// This follows the spec: + /// - `idPacked = 12 zero bytes ++ block_number ++ timestamp ++ log_index` + /// - `idLogHash = keccak256(log_hash ++ idPacked)` + /// - `bareChecksum = keccak256(idLogHash ++ chain_id)` + /// - Prepend 0x03 to `bareChecksum[1..]` + /// + /// Returns the full 32-byte checksum with prefix 0x03. + /// + /// Reference: [Checksum Calculation](https://github.com/ethereum-optimism/specs/blob/main/specs/interop/predeploys.md#type-3-checksum) + pub fn recompute_checksum(&self, log_hash: &B256) -> B256 { + // Step 1: idPacked = [0u8; 12] ++ block_number ++ timestamp ++ log_index + let mut id_packed = [0u8; 12 + 8 + 8 + 4]; // 32 bytes + id_packed[12..20].copy_from_slice(&self.block_number.to_be_bytes()); + id_packed[20..28].copy_from_slice(&self.timestamp.to_be_bytes()); + id_packed[28..32].copy_from_slice(&self.log_index.to_be_bytes()); + + // Step 2: keccak256(log_hash ++ id_packed) + let id_log_hash = keccak256([log_hash.as_slice(), &id_packed].concat()); + + // Step 3: keccak256(id_log_hash ++ chain_id) + let bare_checksum = keccak256([id_log_hash.as_slice(), &self.chain_id].concat()); + + // Step 4: Prepend type byte 0x03 (overwrite first byte) + let mut checksum = bare_checksum; + checksum.0[0] = 0x03; + + checksum + } + + /// Verify the checksums after recalculation + pub fn verify_checksum(&self, log_hash: &B256) -> Result<(), AccessListError> { + if self.recompute_checksum(log_hash) != self.checksum { + return Err(AccessListError::MalformedEntry); + } + Ok(()) + } +} + +/// Represents a single entry in the access list. +#[derive(Debug, Clone)] +enum AccessListEntry { + Lookup(LookupEntry), + ChainIdExtension(ChainIdExtensionEntry), + Checksum(ChecksumEntry), +} + +/// Parsed lookup identity entry (type 0x01). +#[derive(Debug, Clone)] +struct LookupEntry { + pub chain_id_low: [u8; 8], + pub block_number: u64, + pub timestamp: u64, + pub log_index: u32, +} + +/// Parsed Chain ID extension entry (type 0x02). +#[derive(Debug, Clone)] +struct ChainIdExtensionEntry { + pub upper_bytes: [u8; 24], +} + +/// Parsed checksum entry (type 0x03). +#[derive(Debug, Clone)] +struct ChecksumEntry { + pub raw: B256, +} + +/// Error returned when access list parsing fails. +#[derive(Debug, Error, PartialEq, Eq)] +pub enum AccessListError { + /// Input ended before a complete message group was parsed. + #[error("unexpected end of access list")] + UnexpectedEnd, + + /// Unexpected entry type found. + #[error("expected type {expected:#x}, got {found:#x}")] + UnexpectedType { + /// The type we expected (e.g. 0x01, 0x02, or 0x03) + expected: u8, + /// The actual type byte we found + found: u8, + }, + + /// Malformed entry sequence or invalid prefix structure. + #[error("malformed entry")] + MalformedEntry, + + /// Message expired. + #[error("message expired")] + MessageExpired, + + /// Timestamp invariant violated. + #[error("executing timestamp is earlier than initiating timestamp")] + InvalidTimestampInvariant, +} + +// Access list entry type byte constants +const PREFIX_LOOKUP: u8 = 0x01; +const PREFIX_CHAIN_ID_EXTENSION: u8 = 0x02; +const PREFIX_CHECKSUM: u8 = 0x03; + +/// Parses a vector of raw `B256` access list entries into structured [`Access`] objects. +/// +/// Each `Access` group must follow the pattern: +/// - One `Lookup` entry (prefix `0x01`) +/// - Optionally one `ChainIdExtension` entry (prefix `0x02`) +/// - One `Checksum` entry (prefix `0x03`) +/// +/// Entries are consumed in order. If any group is malformed, this function returns a +/// [`AccessListError`]. +/// +/// # Arguments +/// +/// * `entries` - A `Vec` representing the raw access list entries. +/// +/// # Returns +/// +/// A vector of fully parsed [`Access`] items if all entries are valid. +/// +/// # Errors +/// +/// Returns [`AccessListError`] if entries are out-of-order, malformed, or incomplete. +pub fn parse_access_list(entries: Vec) -> Result, AccessListError> { + let mut list = Vec::with_capacity(entries.len() / 2); + let mut lookup_entry: Option = None; + let mut chain_id_ext: Option = None; + + for entry in entries { + let parsed = parse_entry(&entry)?; + + match parsed { + AccessListEntry::Lookup(lookup) => { + if lookup_entry.is_some() { + return Err(AccessListError::MalformedEntry); + } + lookup_entry = Some(lookup); + } + + AccessListEntry::ChainIdExtension(ext) => { + if lookup_entry.is_none() || chain_id_ext.is_some() { + return Err(AccessListError::MalformedEntry); + } + chain_id_ext = Some(ext); + } + + AccessListEntry::Checksum(checksum) => { + let lookup = lookup_entry.take().ok_or(AccessListError::MalformedEntry)?; + let access = Access::from_entries(lookup, chain_id_ext.take(), checksum); + list.push(access); + } + } + } + + if lookup_entry.is_some() { + return Err(AccessListError::UnexpectedEnd); + } + + Ok(list) +} + +/// Parses a single 32-byte access list entry into a typed [`AccessListEntry`]. +/// +/// This function performs a prefix-based decoding of the input hash: +/// +/// ### Entry Type Encoding +/// +/// | Prefix Byte | Type | Description | +/// |-------------|------------------------|-------------------------------------------------------------------| +/// | `0x01` | `LookupEntry` | Contains chain ID (low bits), block number, timestamp, log index. | +/// | `0x02` | `ChainIdExtensionEntry`| Contains upper 24 bytes of a 256-bit chain ID. | +/// | `0x03` | `ChecksumEntry` | Contains the checksum hash used for message validation. | +/// +/// ### Spec References +/// +/// - [Optimism Access List Format](https://github.com/ethereum-optimism/specs/blob/main/specs/interop/predeploys.md#access-list) +/// - Entry format and layout based on `CrossL2Inbox` access-list encoding. +fn parse_entry(entry: &B256) -> Result { + match entry[0] { + PREFIX_LOOKUP => { + if entry[1..4] != [0; 3] { + return Err(AccessListError::MalformedEntry); + } + Ok(AccessListEntry::Lookup(LookupEntry { + chain_id_low: entry[4..12].try_into().unwrap(), + block_number: u64::from_be_bytes(entry[12..20].try_into().unwrap()), + timestamp: u64::from_be_bytes(entry[20..28].try_into().unwrap()), + log_index: u32::from_be_bytes(entry[28..32].try_into().unwrap()), + })) + } + + PREFIX_CHAIN_ID_EXTENSION => { + if entry[1..8] != [0; 7] { + return Err(AccessListError::MalformedEntry); + } + Ok(AccessListEntry::ChainIdExtension(ChainIdExtensionEntry { + upper_bytes: entry[8..32].try_into().unwrap(), + })) + } + + PREFIX_CHECKSUM => Ok(AccessListEntry::Checksum(ChecksumEntry { raw: *entry })), + + other => Err(AccessListError::UnexpectedType { expected: PREFIX_LOOKUP, found: other }), + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::{B256, U256, b256}; + + fn make_lookup_entry( + block_number: u64, + timestamp: u64, + log_index: u32, + chain_id_low: [u8; 8], + ) -> B256 { + let mut buf = [0u8; 32]; + buf[0] = PREFIX_LOOKUP; + // 3 zero padding + buf[4..12].copy_from_slice(&chain_id_low); + buf[12..20].copy_from_slice(&block_number.to_be_bytes()); + buf[20..28].copy_from_slice(×tamp.to_be_bytes()); + buf[28..32].copy_from_slice(&log_index.to_be_bytes()); + B256::from(buf) + } + + fn make_chain_id_ext(upper: [u8; 24]) -> B256 { + let mut buf = [0u8; 32]; + buf[0] = PREFIX_CHAIN_ID_EXTENSION; + // 7 zero padding + buf[8..32].copy_from_slice(&upper); + B256::from(buf) + } + + fn make_checksum(access: &Access, log_hash: &B256) -> B256 { + access.recompute_checksum(log_hash) + } + + #[test] + fn test_parse_valid_access_list_with_chain_id_ext() { + let block_number = 1234; + let timestamp = 9999; + let log_index = 5; + let chain_id_low = [1u8; 8]; + let upper_bytes = [2u8; 24]; + let log_hash = keccak256([0u8; 32]); + + let lookup = make_lookup_entry(block_number, timestamp, log_index, chain_id_low); + let chain_ext = make_chain_id_ext(upper_bytes); + + let access = Access::from_entries( + LookupEntry { chain_id_low, block_number, timestamp, log_index }, + Some(ChainIdExtensionEntry { upper_bytes }), + ChecksumEntry { + raw: B256::default(), // will override later + }, + ); + + let checksum = make_checksum(&access, &log_hash); + + let access = Access::from_entries( + LookupEntry { chain_id_low, block_number, timestamp, log_index }, + Some(ChainIdExtensionEntry { upper_bytes }), + ChecksumEntry { raw: checksum }, + ); + + let list = vec![lookup, chain_ext, checksum]; + let parsed = parse_access_list(list).unwrap(); + assert_eq!(parsed.len(), 1); + assert_eq!(parsed[0], access); + assert!(parsed[0].verify_checksum(&log_hash).is_ok()); + } + + #[test] + fn test_parse_access_list_without_chain_id_ext() { + let block_number = 1; + let timestamp = 2; + let log_index = 3; + let chain_id_low = [0xaa; 8]; + let log_hash = keccak256([1u8; 32]); + + let lookup = make_lookup_entry(block_number, timestamp, log_index, chain_id_low); + let access = Access::from_entries( + LookupEntry { chain_id_low, block_number, timestamp, log_index }, + None, + ChecksumEntry { raw: B256::default() }, + ); + let checksum = make_checksum(&access, &log_hash); + let access = Access::from_entries( + LookupEntry { chain_id_low, block_number, timestamp, log_index }, + None, + ChecksumEntry { raw: checksum }, + ); + + let list = vec![lookup, checksum]; + let parsed = parse_access_list(list).unwrap(); + assert_eq!(parsed.len(), 1); + assert_eq!(parsed[0], access); + assert!(parsed[0].verify_checksum(&log_hash).is_ok()); + } + + #[test] + fn test_recompute_checksum_against_known_value() { + // Input data + let access = Access { + chain_id: U256::from(3).to_be_bytes(), + block_number: 2587, + timestamp: 4660, + log_index: 66, + checksum: B256::default(), // not used in this test + }; + + let log_hash = b256!("0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"); + + // Expected checksum computed previously using spec logic + let expected = b256!("0x03ca886771056d8ea647bb809b888ba14986f57daaf28954d40408321717716a"); + + let computed = access.recompute_checksum(&log_hash); + assert_eq!(computed, expected, "Checksum does not match expected value"); + } + + #[test] + fn test_checksum_mismatch() { + let block_number = 1; + let timestamp = 2; + let log_index = 3; + let chain_id_low = [0xaa; 8]; + let log_hash = keccak256([1u8; 32]); + + let lookup = make_lookup_entry(block_number, timestamp, log_index, chain_id_low); + let fake_checksum = + b256!("0x03ca886771056d8ea647bb809b888ba14986f57daaf28954d40408321717716a"); + let list = vec![lookup, fake_checksum]; + + let parsed = parse_access_list(list).unwrap(); + let err = parsed[0].verify_checksum(&log_hash); + assert_eq!(err, Err(AccessListError::MalformedEntry)); + } + + #[test] + fn test_invalid_entry_order_should_fail() { + let mut raw = [0u8; 32]; + raw[0] = PREFIX_CHECKSUM; + let checksum = B256::from(raw); + + let lookup = make_lookup_entry(0, 0, 0, [0u8; 8]); + let entries = vec![checksum, lookup]; + + assert!(matches!(parse_access_list(entries), Err(AccessListError::MalformedEntry))); + } +} diff --git a/kona/crates/supervisor/types/src/head.rs b/rust/kona/crates/supervisor/types/src/head.rs similarity index 100% rename from kona/crates/supervisor/types/src/head.rs rename to rust/kona/crates/supervisor/types/src/head.rs diff --git a/kona/crates/supervisor/types/src/hex_string_u64.rs b/rust/kona/crates/supervisor/types/src/hex_string_u64.rs similarity index 100% rename from kona/crates/supervisor/types/src/hex_string_u64.rs rename to rust/kona/crates/supervisor/types/src/hex_string_u64.rs diff --git a/kona/crates/supervisor/types/src/lib.rs b/rust/kona/crates/supervisor/types/src/lib.rs similarity index 100% rename from kona/crates/supervisor/types/src/lib.rs rename to rust/kona/crates/supervisor/types/src/lib.rs diff --git a/kona/crates/supervisor/types/src/log.rs b/rust/kona/crates/supervisor/types/src/log.rs similarity index 100% rename from kona/crates/supervisor/types/src/log.rs rename to rust/kona/crates/supervisor/types/src/log.rs diff --git a/kona/crates/supervisor/types/src/message.rs b/rust/kona/crates/supervisor/types/src/message.rs similarity index 100% rename from kona/crates/supervisor/types/src/message.rs rename to rust/kona/crates/supervisor/types/src/message.rs diff --git a/kona/crates/supervisor/types/src/receipt.rs b/rust/kona/crates/supervisor/types/src/receipt.rs similarity index 100% rename from kona/crates/supervisor/types/src/receipt.rs rename to rust/kona/crates/supervisor/types/src/receipt.rs diff --git a/kona/crates/supervisor/types/src/types.rs b/rust/kona/crates/supervisor/types/src/types.rs similarity index 100% rename from kona/crates/supervisor/types/src/types.rs rename to rust/kona/crates/supervisor/types/src/types.rs diff --git a/rust/kona/crates/utilities/cli/Cargo.toml b/rust/kona/crates/utilities/cli/Cargo.toml new file mode 100644 index 00000000000..2b44a4dc2a6 --- /dev/null +++ b/rust/kona/crates/utilities/cli/Cargo.toml @@ -0,0 +1,44 @@ +[package] +name = "kona-cli" +version = "0.3.2" +description = "Shared CLI utilities for Kona crates" +edition.workspace = true +license.workspace = true +authors.workspace = true +repository.workspace = true +homepage.workspace = true + +[lints] +workspace = true + +[dependencies] +# Workspace +kona-genesis.workspace = true +kona-registry.workspace = true + +# Alloy +alloy-chains.workspace = true + +# General +tracing.workspace = true +serde = { workspace = true, features = ["derive"]} +clap = { workspace = true, features = ["derive", "env"] } +tracing-subscriber = { workspace = true, features = ["fmt", "env-filter", "json", "tracing-log", "ansi"] } +tracing-appender.workspace = true +metrics-exporter-prometheus = { workspace = true, features = ["http-listener"] } +metrics-process.workspace = true +thiserror.workspace = true + +# `secrets` feature +libp2p = { workspace = true, features = ["secp256k1"], optional = true } +alloy-primitives.workspace = true + +[dev-dependencies] +rstest.workspace = true + +[target.'cfg(unix)'.dependencies] +libc.workspace = true + +[features] +default = [] +secrets = [ "dep:libp2p" ] diff --git a/kona/crates/utilities/cli/README.md b/rust/kona/crates/utilities/cli/README.md similarity index 100% rename from kona/crates/utilities/cli/README.md rename to rust/kona/crates/utilities/cli/README.md diff --git a/kona/crates/utilities/cli/src/backtrace.rs b/rust/kona/crates/utilities/cli/src/backtrace.rs similarity index 81% rename from kona/crates/utilities/cli/src/backtrace.rs rename to rust/kona/crates/utilities/cli/src/backtrace.rs index 7b88a57ab26..8da48f7a0b1 100644 --- a/kona/crates/utilities/cli/src/backtrace.rs +++ b/rust/kona/crates/utilities/cli/src/backtrace.rs @@ -1,6 +1,6 @@ //! Helper to set the backtrace env var. -/// Sets the RUST_BACKTRACE environment variable to 1 if it is not already set. +/// Sets the `RUST_BACKTRACE` environment variable to 1 if it is not already set. pub fn enable() { // Enable backtraces unless a RUST_BACKTRACE value has already been explicitly provided. if std::env::var_os("RUST_BACKTRACE").is_none() { diff --git a/kona/crates/utilities/cli/src/clap.rs b/rust/kona/crates/utilities/cli/src/clap.rs similarity index 100% rename from kona/crates/utilities/cli/src/clap.rs rename to rust/kona/crates/utilities/cli/src/clap.rs diff --git a/kona/crates/utilities/cli/src/error.rs b/rust/kona/crates/utilities/cli/src/error.rs similarity index 100% rename from kona/crates/utilities/cli/src/error.rs rename to rust/kona/crates/utilities/cli/src/error.rs diff --git a/kona/crates/utilities/cli/src/flags/globals.rs b/rust/kona/crates/utilities/cli/src/flags/globals.rs similarity index 100% rename from kona/crates/utilities/cli/src/flags/globals.rs rename to rust/kona/crates/utilities/cli/src/flags/globals.rs diff --git a/kona/crates/utilities/cli/src/flags/log.rs b/rust/kona/crates/utilities/cli/src/flags/log.rs similarity index 100% rename from kona/crates/utilities/cli/src/flags/log.rs rename to rust/kona/crates/utilities/cli/src/flags/log.rs diff --git a/rust/kona/crates/utilities/cli/src/flags/metrics.rs b/rust/kona/crates/utilities/cli/src/flags/metrics.rs new file mode 100644 index 00000000000..b900c19bf8e --- /dev/null +++ b/rust/kona/crates/utilities/cli/src/flags/metrics.rs @@ -0,0 +1,97 @@ +//! Utility module to house implementation and declaration of `MetricsArgs` since it's being used in +//! multiple places, it's just being referenced from this module. + +use crate::{CliResult, init_prometheus_server}; +use clap::Parser; +use std::net::IpAddr; + +/// Configuration for Prometheus metrics. +#[derive(Debug, Clone, Parser)] +#[command(next_help_heading = "Metrics")] +pub struct MetricsArgs { + /// Controls whether Prometheus metrics are enabled. Disabled by default. + #[arg( + long = "metrics.enabled", + global = true, + default_value_t = false, + env = "KONA_METRICS_ENABLED" + )] + pub enabled: bool, + + /// The port to serve Prometheus metrics on. + #[arg(long = "metrics.port", global = true, default_value = "9090", env = "KONA_METRICS_PORT")] + pub port: u16, + + /// The IP address to use for Prometheus metrics. + #[arg( + long = "metrics.addr", + global = true, + default_value = "0.0.0.0", + env = "KONA_METRICS_ADDR" + )] + pub addr: IpAddr, +} + +impl Default for MetricsArgs { + fn default() -> Self { + Self::parse_from::<[_; 0], &str>([]) + } +} + +impl MetricsArgs { + /// Initialize the tracing stack and Prometheus metrics recorder. + /// + /// This function should be called at the beginning of the program. + pub fn init_metrics(&self) -> CliResult<()> { + if self.enabled { + init_prometheus_server(self.addr, self.port)?; + } + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use clap::Parser; + use std::net::{IpAddr, Ipv4Addr}; + + /// Helper struct to parse `MetricsArgs` within a test CLI structure. + #[derive(Parser, Debug)] + struct TestCli { + #[command(flatten)] + metrics: MetricsArgs, + } + + #[test] + fn test_default_metrics_args() { + let cli = TestCli::parse_from(["test_app"]); + assert!(!cli.metrics.enabled, "Default for metrics.enabled should be false."); + assert_eq!(cli.metrics.port, 9090, "Default for metrics.port should be 9090."); + assert_eq!( + cli.metrics.addr, + IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), + "Default for metrics.addr should be 0.0.0.0." + ); + } + + #[test] + fn test_metrics_args_from_cli() { + let cli = TestCli::parse_from([ + "test_app", + "--metrics.enabled", + "--metrics.port", + "9999", + "--metrics.addr", + "127.0.0.1", + ]); + assert!(cli.metrics.enabled, "metrics.enabled should be true."); + assert_eq!(cli.metrics.port, 9999, "metrics.port should be parsed from CLI."); + assert_eq!( + cli.metrics.addr, + IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), + "metrics.addr should be parsed from CLI." + ); + } +} diff --git a/kona/crates/utilities/cli/src/flags/mod.rs b/rust/kona/crates/utilities/cli/src/flags/mod.rs similarity index 100% rename from kona/crates/utilities/cli/src/flags/mod.rs rename to rust/kona/crates/utilities/cli/src/flags/mod.rs diff --git a/kona/crates/utilities/cli/src/flags/overrides.rs b/rust/kona/crates/utilities/cli/src/flags/overrides.rs similarity index 100% rename from kona/crates/utilities/cli/src/flags/overrides.rs rename to rust/kona/crates/utilities/cli/src/flags/overrides.rs diff --git a/rust/kona/crates/utilities/cli/src/lib.rs b/rust/kona/crates/utilities/cli/src/lib.rs new file mode 100644 index 00000000000..caaa300dd55 --- /dev/null +++ b/rust/kona/crates/utilities/cli/src/lib.rs @@ -0,0 +1,33 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] + +mod error; +pub use error::{CliError, CliResult}; + +mod flags; +pub use flags::{GlobalArgs, LogArgs, MetricsArgs, OverrideArgs}; + +mod logs; +pub use logs::{FileLogConfig, LogConfig, LogRotation, StdoutLogConfig}; + +mod clap; +pub use clap::cli_styles; + +#[cfg(feature = "secrets")] +mod secrets; +#[cfg(feature = "secrets")] +pub use secrets::{KeypairError, ParseKeyError, SecretKeyLoader}; + +pub mod backtrace; + +mod tracing; +pub use tracing::{LogFormat, init_test_tracing}; + +mod prometheus; +pub use prometheus::init_prometheus_server; + +pub mod sigsegv_handler; diff --git a/kona/crates/utilities/cli/src/logs.rs b/rust/kona/crates/utilities/cli/src/logs.rs similarity index 100% rename from kona/crates/utilities/cli/src/logs.rs rename to rust/kona/crates/utilities/cli/src/logs.rs diff --git a/kona/crates/utilities/cli/src/prometheus.rs b/rust/kona/crates/utilities/cli/src/prometheus.rs similarity index 100% rename from kona/crates/utilities/cli/src/prometheus.rs rename to rust/kona/crates/utilities/cli/src/prometheus.rs diff --git a/kona/crates/utilities/cli/src/secrets.rs b/rust/kona/crates/utilities/cli/src/secrets.rs similarity index 100% rename from kona/crates/utilities/cli/src/secrets.rs rename to rust/kona/crates/utilities/cli/src/secrets.rs diff --git a/kona/crates/utilities/cli/src/sigsegv_handler.rs b/rust/kona/crates/utilities/cli/src/sigsegv_handler.rs similarity index 95% rename from kona/crates/utilities/cli/src/sigsegv_handler.rs rename to rust/kona/crates/utilities/cli/src/sigsegv_handler.rs index 374749e1616..dcd2e51f663 100644 --- a/kona/crates/utilities/cli/src/sigsegv_handler.rs +++ b/rust/kona/crates/utilities/cli/src/sigsegv_handler.rs @@ -121,13 +121,13 @@ pub fn install() { let mut alt_stack: libc::stack_t = mem::zeroed(); alt_stack.ss_sp = alloc(Layout::from_size_align(alt_stack_size, 1).unwrap()).cast(); alt_stack.ss_size = alt_stack_size; - libc::sigaltstack(&alt_stack, ptr::null_mut()); + libc::sigaltstack(&raw const alt_stack, ptr::null_mut()); let mut sa: libc::sigaction = mem::zeroed(); - sa.sa_sigaction = print_stack_trace as libc::sighandler_t; + sa.sa_sigaction = print_stack_trace as *const () as libc::sighandler_t; sa.sa_flags = libc::SA_NODEFER | libc::SA_RESETHAND | libc::SA_ONSTACK; - libc::sigemptyset(&mut sa.sa_mask); - libc::sigaction(libc::SIGSEGV, &sa, ptr::null_mut()); + libc::sigemptyset(&raw mut sa.sa_mask); + libc::sigaction(libc::SIGSEGV, &raw const sa, ptr::null_mut()); } } diff --git a/rust/kona/crates/utilities/cli/src/tracing.rs b/rust/kona/crates/utilities/cli/src/tracing.rs new file mode 100644 index 00000000000..f6372761583 --- /dev/null +++ b/rust/kona/crates/utilities/cli/src/tracing.rs @@ -0,0 +1,152 @@ +//! [`tracing_subscriber`] utilities. + +use tracing_subscriber::{ + Layer, + fmt::{ + format::{FormatEvent, FormatFields, Writer}, + time::{FormatTime, SystemTime}, + }, + prelude::__tracing_subscriber_SubscriberExt, + registry::LookupSpan, + util::{SubscriberInitExt, TryInitError}, +}; + +use serde::{Deserialize, Serialize}; +use std::fmt; +use tracing_subscriber::EnvFilter; + +use crate::{LogConfig, LogRotation}; + +/// The format of the logs. +#[derive( + Default, Debug, Clone, Copy, PartialEq, Eq, Hash, clap::ValueEnum, Serialize, Deserialize, +)] +#[serde(rename_all = "lowercase")] +#[clap(rename_all = "lowercase")] +pub enum LogFormat { + /// Full format (default). + #[default] + Full, + /// JSON format. + Json, + /// Pretty format. + Pretty, + /// Compact format. + Compact, + /// Logfmt format. + Logfmt, +} + +/// Custom logfmt formatter for tracing events. +struct LogfmtFormatter; + +impl FormatEvent for LogfmtFormatter +where + S: tracing::Subscriber + for<'a> LookupSpan<'a>, + N: for<'a> FormatFields<'a> + 'static, +{ + fn format_event( + &self, + ctx: &tracing_subscriber::fmt::FmtContext<'_, S, N>, + mut writer: Writer<'_>, + event: &tracing::Event<'_>, + ) -> fmt::Result { + let meta = event.metadata(); + + // Write timestamp + let time_format = SystemTime; + write!(writer, "time=\"")?; + time_format.format_time(&mut writer)?; + write!(writer, "\" ")?; + + // Write level + write!(writer, "level={} ", meta.level())?; + + // Write target + write!(writer, "target={} ", meta.target())?; + + // Write the message and fields + ctx.field_format().format_fields(writer.by_ref(), event)?; + + writeln!(writer) + } +} + +impl LogConfig { + /// Initializes the tracing subscriber + /// + /// # Arguments + /// * `verbosity_level` - The verbosity level (0-5). If `0`, no logs are printed. + /// * `env_filter` - Optional environment filter for the subscriber. + /// + /// # Returns + /// * `Result<()>` - Ok if successful, Err otherwise. + pub fn init_tracing_subscriber( + &self, + env_filter: Option, + ) -> Result<(), TryInitError> { + let file_layer = self.file_logs.as_ref().map(|file_logs| { + let directory_path = file_logs.directory_path.clone(); + + let appender = match file_logs.rotation { + LogRotation::Minutely => { + tracing_appender::rolling::minutely(directory_path, "kona.log") + } + LogRotation::Hourly => { + tracing_appender::rolling::hourly(directory_path, "kona.log") + } + LogRotation::Daily => tracing_appender::rolling::daily(directory_path, "kona.log"), + LogRotation::Never => tracing_appender::rolling::never(directory_path, "kona.log"), + }; + + match file_logs.format { + LogFormat::Full => tracing_subscriber::fmt::layer().with_writer(appender).boxed(), + LogFormat::Json => { + tracing_subscriber::fmt::layer().json().with_writer(appender).boxed() + } + LogFormat::Pretty => { + tracing_subscriber::fmt::layer().pretty().with_writer(appender).boxed() + } + LogFormat::Compact => { + tracing_subscriber::fmt::layer().compact().with_writer(appender).boxed() + } + LogFormat::Logfmt => tracing_subscriber::fmt::layer() + .event_format(LogfmtFormatter) + .with_writer(appender) + .boxed(), + } + }); + + let stdout_layer = self.stdout_logs.as_ref().map(|stdout_logs| match stdout_logs.format { + LogFormat::Full => tracing_subscriber::fmt::layer().boxed(), + LogFormat::Json => tracing_subscriber::fmt::layer().json().boxed(), + LogFormat::Pretty => tracing_subscriber::fmt::layer().pretty().boxed(), + LogFormat::Compact => tracing_subscriber::fmt::layer().compact().boxed(), + LogFormat::Logfmt => { + tracing_subscriber::fmt::layer().event_format(LogfmtFormatter).boxed() + } + }); + + let env_filter = env_filter + .unwrap_or_else(EnvFilter::from_default_env) + .add_directive(self.global_level.into()); + + tracing_subscriber::registry() + .with(env_filter) + .with(file_layer) + .with(stdout_layer) + .try_init()?; + + Ok(()) + } +} + +/// This provides function for init tracing in testing +/// +/// # Functions +/// - `init_test_tracing`: A helper function for initializing tracing in test environments. +/// - `init_tracing_subscriber`: Initializes the tracing subscriber with a specified verbosity level +/// and optional environment filter. +pub fn init_test_tracing() { + let _ = LogConfig::default().init_tracing_subscriber(None::); +} diff --git a/kona/crates/utilities/macros/Cargo.toml b/rust/kona/crates/utilities/macros/Cargo.toml similarity index 100% rename from kona/crates/utilities/macros/Cargo.toml rename to rust/kona/crates/utilities/macros/Cargo.toml diff --git a/rust/kona/crates/utilities/macros/README.md b/rust/kona/crates/utilities/macros/README.md new file mode 100644 index 00000000000..de8b0b0d161 --- /dev/null +++ b/rust/kona/crates/utilities/macros/README.md @@ -0,0 +1,3 @@ +# `kona-macros` + +Utility helper macros for kona crates. diff --git a/rust/kona/crates/utilities/macros/src/lib.rs b/rust/kona/crates/utilities/macros/src/lib.rs new file mode 100644 index 00000000000..a049fa0849f --- /dev/null +++ b/rust/kona/crates/utilities/macros/src/lib.rs @@ -0,0 +1,11 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![no_std] + +mod metrics; diff --git a/kona/crates/utilities/macros/src/metrics.rs b/rust/kona/crates/utilities/macros/src/metrics.rs similarity index 100% rename from kona/crates/utilities/macros/src/metrics.rs rename to rust/kona/crates/utilities/macros/src/metrics.rs diff --git a/kona/crates/utilities/serde/Cargo.toml b/rust/kona/crates/utilities/serde/Cargo.toml similarity index 100% rename from kona/crates/utilities/serde/Cargo.toml rename to rust/kona/crates/utilities/serde/Cargo.toml diff --git a/rust/kona/crates/utilities/serde/README.md b/rust/kona/crates/utilities/serde/README.md new file mode 100644 index 00000000000..ae21ec73e94 --- /dev/null +++ b/rust/kona/crates/utilities/serde/README.md @@ -0,0 +1,59 @@ +## `kona-serde` + +Serde related helpers for kona. + +### Graceful Serialization + +This crate extends the serialization and deserialization +functionality provided by [`alloy-serde`][alloy-serde] to +deserialize raw number quantity values. + +This issue arose in `u128` toml deserialization where +deserialization of a raw number fails. +[This rust playground][invalid] demonstrates how toml fails to +deserialize a native `u128` internal value. + +With `kona-serde`, tagging the inner `u128` field with `#[serde(with = "kona_serde::quantity")]`, +allows the `u128` or any other type within the following constraints to be deserialized by toml properly. + +These are the supported native types: +- `bool` +- `u8` +- `u16` +- `u32` +- `u64` +- `u128` + +Below demonstrates the use of the `#[serde(with = "kona_serde::quantity")]` attribute. + +```rust +use serde::{Serialize, Deserialize}; + +/// My wrapper type. +#[derive(Debug, Serialize, Deserialize)] +pub struct MyStruct { + /// The inner `u128` value. + #[serde(with = "kona_serde::quantity")] + pub inner: u128, +} + +// Correctly deserializes a raw value. +let raw_toml = r#"inner = 120"#; +let b: MyStruct = toml::from_str(raw_toml).expect("failed to deserialize toml"); +println!("{}", b.inner); + +// Notice that a string value is also deserialized correctly. +let raw_toml = r#"inner = "120""#; +let b: MyStruct = toml::from_str(raw_toml).expect("failed to deserialize toml"); +println!("{}", b.inner); +``` + +### Provenance + +This code is heavily based on the [`alloy-serde`][alloy-serde] crate. + + + + +[invalid]: https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=d3c674d02a90c574e3f543144621418d +[alloy-serde]: https://crates.io/crates/alloy-serde diff --git a/rust/kona/crates/utilities/serde/src/lib.rs b/rust/kona/crates/utilities/serde/src/lib.rs new file mode 100644 index 00000000000..136c6eb837f --- /dev/null +++ b/rust/kona/crates/utilities/serde/src/lib.rs @@ -0,0 +1,12 @@ +#![doc = include_str!("../README.md")] +#![doc( + html_logo_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/square.png", + html_favicon_url = "https://raw.githubusercontent.com/ethereum-optimism/optimism/develop/rust/kona/assets/favicon.ico", + issue_tracker_base_url = "https://github.com/ethereum-optimism/optimism/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![no_std] + +extern crate alloc; + +pub mod quantity; diff --git a/kona/crates/utilities/serde/src/quantity.rs b/rust/kona/crates/utilities/serde/src/quantity.rs similarity index 100% rename from kona/crates/utilities/serde/src/quantity.rs rename to rust/kona/crates/utilities/serde/src/quantity.rs diff --git a/rust/kona/docker/README.md b/rust/kona/docker/README.md new file mode 100644 index 00000000000..ca93ba07b46 --- /dev/null +++ b/rust/kona/docker/README.md @@ -0,0 +1,92 @@ +# `docker` + +This directory contains all of the repositories' dockerfiles as well as the [bake file](https://docs.docker.com/build/bake/) +used to define this repository's docker build configuration. In addition, the [recipes](./recipes) directory contains +example deployment strategies + grafana dashboards for applications such as [`kona-node`](../bin/node). + +## Install Dependencies + +* `docker`: https://www.docker.com/get-started/ +* `docker-buildx`: https://github.com/docker/buildx?tab=readme-ov-file#installing + +## Building Locally + +To build any image in the bake file locally, use `docker buildx bake`: + +```sh +# The target is one of the available bake targets within the `docker-bake.hcl`. +# A list can be viewed by running `docker buildx bake --list-targets` +export TARGET="" + +(cd "$(git rev-parse --show-toplevel)" && docker buildx bake \ + --progress plain \ + -f docker/docker-bake.hcl \ + $TARGET) +``` + +### Build Options + +Relevant build options (variables) for each target can be viewed by running `docker buildx bake --list-variables` or +manually inspecting the targets in the `docker-bake.hcl`. + +#### Troubleshooting + +If you receive an error like the following: + +``` +ERROR: Multi-platform build is not supported for the docker driver. +Switch to a different driver, or turn on the containerd image store, and try again. +Learn more at https://docs.docker.com/go/build-multi-platform/ +``` + +Create and activate a new builder and retry the bake command. + +```sh +docker buildx create --name kona-builder --use +``` + +## Nightly Builds + +Nightly Docker images are automatically built and published every day at 2 AM UTC for: +- `kona-node` +- `kona-host` +- `kona-supervisor` + +### Using Nightly Images + +```sh +# Pull the latest nightly build (multi-platform: linux/amd64, linux/arm64) +docker pull ghcr.io/op-rs/kona/kona-node:nightly +docker pull ghcr.io/op-rs/kona/kona-host:nightly +docker pull ghcr.io/op-rs/kona/kona-supervisor:nightly + +# Pull a specific date's nightly build +docker pull ghcr.io/op-rs/kona/kona-node:nightly-2024-12-10 +``` + +### Manual Trigger + +To manually trigger a nightly build: +```sh +gh workflow run "Build and Publish Nightly Docker Images" +``` + +## Cutting a Release (for maintainers / forks) + +To cut a release of the docker image for any of the targets, cut a new annotated tag for the target like so: + +```sh +# Example formats: +# - `kona-host/v0.1.0-beta.8` +# - `cannon-builder/v1.2.0` +TAG="/" +git tag -a $TAG -m "" && git push origin tag $TAG +``` + +To run the workflow manually, navigate over to the ["Build and Publish Docker Image"](https://github.com/ethereum-optimism/optimism/actions/workflows/docker.yaml) +action. From there, run a `workflow_dispatch` trigger, select the tag you just pushed, and then finally select the image to release. + +Or, if you prefer to use the `gh` CLI, you can run: +```sh +gh workflow run "Build and Publish Docker Image" --ref -f image_to_release= +``` diff --git a/kona/docker/apps/README.md b/rust/kona/docker/apps/README.md similarity index 100% rename from kona/docker/apps/README.md rename to rust/kona/docker/apps/README.md diff --git a/kona/docker/apps/entrypoint.sh b/rust/kona/docker/apps/entrypoint.sh similarity index 100% rename from kona/docker/apps/entrypoint.sh rename to rust/kona/docker/apps/entrypoint.sh diff --git a/rust/kona/docker/apps/justfile b/rust/kona/docker/apps/justfile new file mode 100644 index 00000000000..eff94e1bfaa --- /dev/null +++ b/rust/kona/docker/apps/justfile @@ -0,0 +1,48 @@ +DOCKER_JUSTFILE := source_directory() + +_docker_arch: + #!/bin/bash + if [[ -z "$PLATFORMS" ]]; then + echo $(docker system info --format '{{"{{"}}.OSType{{"}}"}}/{{"{{"}}.Architecture{{"}}"}}') + else + echo "$PLATFORMS" + fi + +# Builds an application image from the local repository. +build-local bin_name image_tag='kona:local' load_flag='': + #!/bin/bash + export BIN_TARGET="{{bin_name}}" + export DEFAULT_TAG="{{image_tag}}" + export PLATFORMS="$(just _docker_arch)" + export REPO_LOCATION="local" + + LOAD_FLAG="" + if [[ "{{load_flag}}" == "load" ]]; then + LOAD_FLAG="--load" + fi + + (cd {{DOCKER_JUSTFILE}}/../../../ && docker buildx bake \ + --progress plain \ + -f kona/docker/docker-bake.hcl \ + $LOAD_FLAG \ + generic) + +# Builds an application image from a remote revision. +build-remote bin_name git_tag='' image_tag='kona:local': + #!/bin/bash + export BIN_TARGET="{{bin_name}}" + export DEFAULT_TAG="{{image_tag}}" + export PLATFORMS="$(just _docker_arch)" + export REPO_LOCATION="remote" + + # If no git tag is provided, use `main` + if [[ -z "{{git_tag}}" ]]; then + export GIT_REF_NAME="main" + else + export GIT_REF_NAME="{{git_tag}}" + fi + + (cd {{DOCKER_JUSTFILE}}/../../../ && docker buildx bake \ + --progress plain \ + -f kona/docker/docker-bake.hcl \ + generic) diff --git a/kona/docker/apps/kona_app_generic.dockerfile b/rust/kona/docker/apps/kona_app_generic.dockerfile similarity index 89% rename from kona/docker/apps/kona_app_generic.dockerfile rename to rust/kona/docker/apps/kona_app_generic.dockerfile index 919c1b20cf1..1bc5b6289f7 100644 --- a/kona/docker/apps/kona_app_generic.dockerfile +++ b/rust/kona/docker/apps/kona_app_generic.dockerfile @@ -32,8 +32,8 @@ RUN cargo binstall cargo-chef -y ################################ FROM dep-setup-stage AS app-local-setup-stage -# Copy in the local repository -COPY . /kona +# Copy in the local workspace repository +COPY . /workspace ################################ # Remote Repo Setup Stage # @@ -45,9 +45,10 @@ ARG TAG ARG REPOSITORY # Clone kona at the specified tag -RUN git clone https://github.com/${REPOSITORY} && \ - cd kona && \ - git checkout "${TAG}" +RUN git clone https://github.com/${REPOSITORY} repo && \ + cd repo && \ + git checkout "${TAG}" && \ + mv rust /workspace ################################ # App Build Stage # @@ -64,7 +65,7 @@ WORKDIR /app FROM build-entrypoint AS planner # Triggers a cache invalidation if `app-setup` is modified. -COPY --from=app-setup kona . +COPY --from=app-setup /workspace . RUN cargo chef prepare --recipe-path recipe.json FROM build-entrypoint AS builder @@ -75,7 +76,7 @@ COPY --from=planner /app/recipe.json recipe.json RUN RUSTFLAGS="-C target-cpu=generic" cargo chef cook --bin "${BIN_TARGET}" --profile "${BUILD_PROFILE}" --recipe-path recipe.json # Build application. This step will systematically trigger a cache invalidation if the source code changes. -COPY --from=app-setup kona . +COPY --from=app-setup /workspace . # Build the application binary on the selected tag. Since we build the external dependencies in the previous step, # this step will reuse the target directory from the previous step. RUN RUSTFLAGS="-C target-cpu=generic" cargo build --bin "${BIN_TARGET}" --profile "${BUILD_PROFILE}" @@ -105,10 +106,10 @@ RUN groupadd --gid ${GID} app \ app # Copy in the binary from the build image. -COPY --from=builder "app/target/${BUILD_PROFILE}/${BIN_TARGET}" "/usr/local/bin/${BIN_TARGET}" +COPY --from=builder "/app/target/${BUILD_PROFILE}/${BIN_TARGET}" "/usr/local/bin/${BIN_TARGET}" # Copy in the entrypoint script. -COPY ./docker/apps/entrypoint.sh /entrypoint.sh +COPY ./kona/docker/apps/entrypoint.sh /entrypoint.sh # Ensure the entrypoint and binary are executable and readable by the non-root user RUN chmod 0555 "/usr/local/bin/${BIN_TARGET}" \ diff --git a/kona/docker/asterisc/asterisc.dockerfile b/rust/kona/docker/asterisc/asterisc.dockerfile similarity index 100% rename from kona/docker/asterisc/asterisc.dockerfile rename to rust/kona/docker/asterisc/asterisc.dockerfile diff --git a/kona/docker/cannon/cannon.dockerfile b/rust/kona/docker/cannon/cannon.dockerfile similarity index 100% rename from kona/docker/cannon/cannon.dockerfile rename to rust/kona/docker/cannon/cannon.dockerfile diff --git a/kona/docker/cannon/mips64-unknown-none.json b/rust/kona/docker/cannon/mips64-unknown-none.json similarity index 100% rename from kona/docker/cannon/mips64-unknown-none.json rename to rust/kona/docker/cannon/mips64-unknown-none.json diff --git a/kona/docker/docker-bake.hcl b/rust/kona/docker/docker-bake.hcl similarity index 88% rename from kona/docker/docker-bake.hcl rename to rust/kona/docker/docker-bake.hcl index b6cac2517e1..90b5cde12a9 100644 --- a/kona/docker/docker-bake.hcl +++ b/rust/kona/docker/docker-bake.hcl @@ -7,7 +7,7 @@ variable "REGISTRY" { } variable "REPOSITORY" { - default = "op-rs/kona" + default = "ethereum-optimism/kona" } // The tag to use for the built image. @@ -63,7 +63,7 @@ variable "BUILD_PROFILE" { target "generic" { inherits = ["docker-metadata-action"] context = "." - dockerfile = "docker/apps/kona_app_generic.dockerfile" + dockerfile = "kona/docker/apps/kona_app_generic.dockerfile" args = { REPO_LOCATION = "${REPO_LOCATION}" REPOSITORY = "${REPOSITORY}" @@ -78,12 +78,9 @@ target "generic" { // Proof Images // //////////////////////////////////////////////////////////////// -// The tag of `cannon` to use in the `kona-cannon-prestate` target. -// -// You can override this if you'd like to use a different tag to generate the prestate. -// https://github.com/ethereum-optimism/optimism/releases -variable "CANNON_TAG" { - default = "cannon/v1.5.0-alpha.1" +// The path to the monorepo root, used to build cannon from local source. +variable "MONOREPO_CONTEXT" { + default = ".." } // The `kona-client` binary to use in the `kona-cannon-prestate` target. @@ -128,13 +125,13 @@ target "cannon-builder" { target "kona-cannon-prestate" { inherits = ["docker-metadata-action"] context = "." - dockerfile = "docker/fpvm-prestates/cannon-repro.dockerfile" + dockerfile = "kona/docker/fpvm-prestates/cannon-repro.dockerfile" contexts = { custom_configs = "${CUSTOM_CONFIGS_CONTEXT}" + monorepo = "${MONOREPO_CONTEXT}" } args = { CLIENT_BIN = "${CLIENT_BIN}" - CANNON_TAG = "${CANNON_TAG}" KONA_CUSTOM_CONFIGS = "${KONA_CUSTOM_CONFIGS}" } # Only build on linux/amd64 for a single source of reproducibility. diff --git a/rust/kona/docker/fpvm-prestates/README.md b/rust/kona/docker/fpvm-prestates/README.md new file mode 100644 index 00000000000..3725488944c --- /dev/null +++ b/rust/kona/docker/fpvm-prestates/README.md @@ -0,0 +1,28 @@ +# `fpvm-prestates` + +Images for creating reproducible `kona-client` prestate builds for supported fault proof virtual machines. + +Cannon is built from the local monorepo source. + +## Usage + +### `kona-client` + `cannon` prestate artifacts + +```sh +# Produce the prestate artifacts for `kona-client` running on `cannon` (built from local monorepo source) +just cannon +``` + +### `kona-client` + `cannon` prestate artifacts with custom output directory + +```sh +just cannon +``` + +### `kona-client` + `cannon` prestate artifacts for custom chains + +To create a reproducible kona-client prestate build that supports custom or devnet chain configurations that are not in the superchain-registry: + +```sh +just cannon +``` diff --git a/kona/docker/fpvm-prestates/cannon-repro.dockerfile b/rust/kona/docker/fpvm-prestates/cannon-repro.dockerfile similarity index 88% rename from kona/docker/fpvm-prestates/cannon-repro.dockerfile rename to rust/kona/docker/fpvm-prestates/cannon-repro.dockerfile index fc7e8a482e1..5f4d90596e2 100644 --- a/kona/docker/fpvm-prestates/cannon-repro.dockerfile +++ b/rust/kona/docker/fpvm-prestates/cannon-repro.dockerfile @@ -1,15 +1,14 @@ ################################################################ -# Build Cannon @ `CANNON_TAG` # +# Build Cannon from local monorepo # ################################################################ FROM ubuntu:22.04 AS cannon-build SHELL ["/bin/bash", "-c"] ARG TARGETARCH -ARG CANNON_TAG # Install deps -RUN apt-get update && apt-get install -y --no-install-recommends git curl ca-certificates make +RUN apt-get update && apt-get install -y --no-install-recommends curl ca-certificates make ENV GO_VERSION=1.23.8 @@ -19,10 +18,14 @@ RUN curl -sL https://go.dev/dl/go$GO_VERSION.linux-$TARGETARCH.tar.gz -o go$GO_V ENV GOPATH=/go ENV PATH=/usr/local/go/bin:$GOPATH/bin:$PATH -# Clone and build Cannon @ `CANNON_TAG` -RUN git clone https://github.com/ethereum-optimism/optimism && \ - cd optimism/cannon && \ - git checkout $CANNON_TAG && \ +# Copy monorepo source needed for the cannon build +COPY --from=monorepo go.mod go.sum /optimism/ +COPY --from=monorepo cannon/ /optimism/cannon/ +COPY --from=monorepo op-service/ /optimism/op-service/ +COPY --from=monorepo op-preimage/ /optimism/op-preimage/ + +# Build cannon from local source +RUN cd /optimism/cannon && \ make && \ cp bin/cannon /cannon-bin diff --git a/rust/kona/docker/fpvm-prestates/justfile b/rust/kona/docker/fpvm-prestates/justfile new file mode 100644 index 00000000000..a54235e580b --- /dev/null +++ b/rust/kona/docker/fpvm-prestates/justfile @@ -0,0 +1,59 @@ +set positional-arguments +alias cannon := build-client-prestate-cannon-artifacts + +# default recipe to display help information +default: + @just --list + +# Build the `kona-client` prestate artifacts from local source (cannon). +build-client-prestate-cannon-artifacts \ + kona_client_variant \ + out='./prestate-artifacts-cannon' \ + custom_config_dir='': + #!/bin/bash + OUTPUT_DIR={{out}} + + # Docker bake env + export CLIENT_BIN="{{kona_client_variant}}" + export DEFAULT_TAG="kona-cannon-prestate:local" + + # Navigate to rust workspace root + cd ../../.. + + # Set monorepo context (parent of rust workspace root) + export MONOREPO_CONTEXT="$(cd .. && pwd)" + + if [[ -n "{{custom_config_dir}}" ]]; then + export KONA_CUSTOM_CONFIGS="true" + export CUSTOM_CONFIGS_CONTEXT="{{custom_config_dir}}" + if [ ! -d "{{custom_config_dir}}" ]; then + echo "Invalid custom config directory: {{custom_config_dir}}" + exit 1 + fi + echo "Using custom config directory: {{custom_config_dir}}" + else + # set to an empty directory to satisfy the docker build context requirement + TEMP_DIR=$(mktemp -d) + trap "rm -rf $TEMP_DIR" EXIT + export CUSTOM_CONFIGS_CONTEXT="$TEMP_DIR" + fi + + # Create the output directory + mkdir -p $OUTPUT_DIR + + echo "Building kona-client (variant: {{kona_client_variant}}) prestate artifacts for the cannon target (local monorepo cannon)." + + # Build the --allow flag conditionally (requires Docker Buildx v0.15.0+) + ALLOW_FLAG="" + BUILDX_VERSION=$(docker buildx version 2>/dev/null | grep -oE '[0-9]+\.[0-9]+\.[0-9]+' | head -1 || echo "0.0.0") + MAJOR=$(echo "$BUILDX_VERSION" | cut -d. -f1) + MINOR=$(echo "$BUILDX_VERSION" | cut -d. -f2) + if [[ "$MAJOR" -gt 0 ]] || [[ "$MAJOR" -eq 0 && "$MINOR" -ge 15 ]]; then + ALLOW_FLAG="--allow fs=${CUSTOM_CONFIGS_CONTEXT} --allow fs=${MONOREPO_CONTEXT}" + fi + + docker buildx bake \ + --set "*.output=$OUTPUT_DIR" \ + $ALLOW_FLAG \ + -f kona/docker/docker-bake.hcl \ + kona-cannon-prestate diff --git a/kona/docker/recipes/kona-node-dev/.gitignore b/rust/kona/docker/recipes/kona-node-dev/.gitignore similarity index 100% rename from kona/docker/recipes/kona-node-dev/.gitignore rename to rust/kona/docker/recipes/kona-node-dev/.gitignore diff --git a/rust/kona/docker/recipes/kona-node-dev/README.md b/rust/kona/docker/recipes/kona-node-dev/README.md new file mode 100644 index 00000000000..6f41447b112 --- /dev/null +++ b/rust/kona/docker/recipes/kona-node-dev/README.md @@ -0,0 +1,83 @@ +# Requirements + +This is intended to run on x86-64 architecture. + +## Purpose + +This recipe, `kona-node-dev`, is different from the `kona-node` recipe in that +it builds a local container image of `kona-node` instead of pulling a nightly +image of `main`. This is useful, because it allows developers to checkout a +development branch and see how it behaves on a network. + +## Set up + +Assuming you are on Ubuntu and your user is member of the group `docker`, first time run + + git clone 'https://github.com/ethereum-optimism/optimism.git' + cd kona/docker/recipes/kona-node-dev/ + just init + +If the last step fails due to missing packages, you can run `just setup-ubuntu` +and then run `just init` again. This will install the required packages for +Ubuntu. `just init` will also set up a virtual network, and finally spin up +`kona-node`, `op-reth`, `prometheus` and `grafana`. + +## Normal usage + +For future invocation it suffices to spin the system up and down with: + + just up + just down + +You can also run `just upd` if you want to detach from the docker logs. +If you want to update the `kona` submodule, you can run `just update`. + +A typical workflow after init could look like this: + + # remove existing images causing them to be rebuild + just rmi + # pull latest commits + just update + # checkout dev branch + just checkout + # build images and start containers + just upd + # visit Grafana + just stop + +For more info on the commands please refer to `justfile`. + +## Environment + +This setup uses `publicnode.com` as default L1, and the environment is configured in `publicnode.env`. +To use different RPC servers or ports, you can copy the file and make modifications. Then run: + + just up myenv.env + just down myenv.env + +or change the default in the `justfile. + +## Services and observability + +The following services are provided: + + http://localhost:3000 + +Default credentials are `admin:admin` and you should change that if you plan to +use this instance over longer time. + +## Storage + +The data is stored in current directory `./datadirs`, but you can modify the +`volume` mapping in `docker-compose.yml` to use a different volume. + +## Caveats + +The port numbers are fixed, so it would not be possible to run more than one +instance on a machine at the same time. Please bear this in mind when running +an instance for longer time. You can check if ports are in use with `docker +ps`. + +## Bugs and development + +Everything is orchestrated from `justfile`. Feel free to edit and submit PRs. diff --git a/kona/docker/recipes/kona-node-dev/compose.yaml b/rust/kona/docker/recipes/kona-node-dev/compose.yaml similarity index 100% rename from kona/docker/recipes/kona-node-dev/compose.yaml rename to rust/kona/docker/recipes/kona-node-dev/compose.yaml diff --git a/kona/docker/recipes/kona-node-dev/default.env b/rust/kona/docker/recipes/kona-node-dev/default.env similarity index 100% rename from kona/docker/recipes/kona-node-dev/default.env rename to rust/kona/docker/recipes/kona-node-dev/default.env diff --git a/kona/docker/recipes/kona-node-dev/generate-jwt.sh b/rust/kona/docker/recipes/kona-node-dev/generate-jwt.sh similarity index 100% rename from kona/docker/recipes/kona-node-dev/generate-jwt.sh rename to rust/kona/docker/recipes/kona-node-dev/generate-jwt.sh diff --git a/kona/docker/recipes/kona-node-dev/grafana/dashboards/dashboard.yml b/rust/kona/docker/recipes/kona-node-dev/grafana/dashboards/dashboard.yml similarity index 100% rename from kona/docker/recipes/kona-node-dev/grafana/dashboards/dashboard.yml rename to rust/kona/docker/recipes/kona-node-dev/grafana/dashboards/dashboard.yml diff --git a/kona/docker/recipes/kona-node-dev/grafana/dashboards/overview.json b/rust/kona/docker/recipes/kona-node-dev/grafana/dashboards/overview.json similarity index 100% rename from kona/docker/recipes/kona-node-dev/grafana/dashboards/overview.json rename to rust/kona/docker/recipes/kona-node-dev/grafana/dashboards/overview.json diff --git a/kona/docker/recipes/kona-node-dev/grafana/datasources/prometheus.yml b/rust/kona/docker/recipes/kona-node-dev/grafana/datasources/prometheus.yml similarity index 100% rename from kona/docker/recipes/kona-node-dev/grafana/datasources/prometheus.yml rename to rust/kona/docker/recipes/kona-node-dev/grafana/datasources/prometheus.yml diff --git a/kona/docker/recipes/kona-node-dev/justfile b/rust/kona/docker/recipes/kona-node-dev/justfile similarity index 100% rename from kona/docker/recipes/kona-node-dev/justfile rename to rust/kona/docker/recipes/kona-node-dev/justfile diff --git a/kona/docker/recipes/kona-node-dev/kona-node/bootstores/sepolia.json b/rust/kona/docker/recipes/kona-node-dev/kona-node/bootstores/sepolia.json similarity index 100% rename from kona/docker/recipes/kona-node-dev/kona-node/bootstores/sepolia.json rename to rust/kona/docker/recipes/kona-node-dev/kona-node/bootstores/sepolia.json diff --git a/kona/docker/recipes/kona-node-dev/kona-node/kona-node.dockerfile b/rust/kona/docker/recipes/kona-node-dev/kona-node/kona-node.dockerfile similarity index 100% rename from kona/docker/recipes/kona-node-dev/kona-node/kona-node.dockerfile rename to rust/kona/docker/recipes/kona-node-dev/kona-node/kona-node.dockerfile diff --git a/kona/docker/recipes/kona-node-dev/op-reth/op-reth.dockerfile b/rust/kona/docker/recipes/kona-node-dev/op-reth/op-reth.dockerfile similarity index 100% rename from kona/docker/recipes/kona-node-dev/op-reth/op-reth.dockerfile rename to rust/kona/docker/recipes/kona-node-dev/op-reth/op-reth.dockerfile diff --git a/kona/docker/recipes/kona-node-dev/prometheus/prometheus.yml b/rust/kona/docker/recipes/kona-node-dev/prometheus/prometheus.yml similarity index 100% rename from kona/docker/recipes/kona-node-dev/prometheus/prometheus.yml rename to rust/kona/docker/recipes/kona-node-dev/prometheus/prometheus.yml diff --git a/kona/docker/recipes/kona-node-dev/publicnode.env b/rust/kona/docker/recipes/kona-node-dev/publicnode.env similarity index 100% rename from kona/docker/recipes/kona-node-dev/publicnode.env rename to rust/kona/docker/recipes/kona-node-dev/publicnode.env diff --git a/kona/docker/recipes/kona-node/.gitignore b/rust/kona/docker/recipes/kona-node/.gitignore similarity index 100% rename from kona/docker/recipes/kona-node/.gitignore rename to rust/kona/docker/recipes/kona-node/.gitignore diff --git a/rust/kona/docker/recipes/kona-node/README.md b/rust/kona/docker/recipes/kona-node/README.md new file mode 100644 index 00000000000..b3c9042ddcd --- /dev/null +++ b/rust/kona/docker/recipes/kona-node/README.md @@ -0,0 +1,88 @@ +# `kona-node` recipe + +> [!WARNING] +> +> `kona-node` is in active development, and this recipe is subject to frequent change (and may not work!) For the time +> being, it is intended to be used for development purposes. Please [file an issue][new-issue] if you have any problems +> during development. + +This directory contains a simple `docker-compose` setup for `kona-node` and `op-reth`, including example Grafana +dashboards and a default Prometheus configuration. + +By default, this recipe is configured to sync the [`OP Sepolia`][op-sepolia] L2. + +## Usage + +### Running + +An L1 Execution Client RPC and L1 Beacon API endpoint must be configured in your environment. The `L1_PROVIDER_RPC` and +`L1_BEACON_API` environment variables can be set in [`cfg.env`](./cfg.env). + +Once these two environment variables are set, the environment can be spun up and shut down as follows: + +```sh +# Start `kona-node`, `op-reth`, and `grafana` + `prometheus` +just up + +# Shutdown the docker compose environment +just down + +# Restart the docker compose environment +just restart +``` + +### Grafana + +The grafana instance can be accessed at `http://localhost:3000` in your browser. The username and password, by default, +are both `admin`. + +#### Adding a new visualization + +The `kona-node` dashboard is provisioned within the grafana instance by default. A new visualization can be added to the +dashboard by navigating to the `Kona Node` dashboard, and then clicking `Add` > `Visualization` in the top right. + +Once your visualization has been added, click `Share` > `Export` (tab), and toggle "Export for sharing externally" on. +Then, copy the JSON, and replace the contents of [`overview.json`](./grafana/dashboards/overview.json) +before making a PR. + +## Default Ports + +| Port | Service | +|---------|-----------------------------| +| `9223` | `kona-node` discovery | +| `9002` | `kona-node` metrics | +| `5060` | `kona-node` RPC | +| `30303` | `op-reth` discovery | +| `9001` | `op-reth` metrics | +| `8545` | `op-reth` RPC | +| `8551` | `op-reth` engine | +| `9090` | `prometheus` metrics server | +| `3000` | `grafana` dashboard UI | + +## Configuration + +### Adjusting host ports + +Host ports for both `op-reth` and `kona-node` can be configured in [`cfg.env`](./cfg.env). + +### Syncing a different OP Stack chain + +To adjust the chain that the node is syncing, you must modify the `docker-compose.yml` file to specify the desired +network parameters. Specifically: +1. Ensure `L1_PROVIDER_RPC` and `L1_BEACON_API` are set to L1 clients that represent the settlement layer of the L2. +1. `op-reth` + - `--chain` must specify the desired chain. + - `--rollup.sequencer-http` must specify the sequencer endpoint. +1. `kona-node` + - `--chain` must specify the chain ID of the desired chain. + +### Adjusting log filters + +Log filters can be adjusted by setting the `RUST_LOG` environment variable. This environment variable will be forwarded +to the `kona-node` container's entrypoint. + +Example: `export RUST_LOG=engine_builder=trace,runtime=debug` + +[op-sepolia]: https://sepolia-optimism.etherscan.io +[op-reth]: https://github.com/paradigmxyz/reth +[new-issue]: https://github.com/ethereum-optimism/optimism/issues/new diff --git a/kona/docker/recipes/kona-node/cfg.env b/rust/kona/docker/recipes/kona-node/cfg.env similarity index 98% rename from kona/docker/recipes/kona-node/cfg.env rename to rust/kona/docker/recipes/kona-node/cfg.env index 18e11a84432..8ebf0b37c0d 100644 --- a/kona/docker/recipes/kona-node/cfg.env +++ b/rust/kona/docker/recipes/kona-node/cfg.env @@ -53,4 +53,4 @@ PROMETHEUS_PORT= # docker cluster name # ####################### # (default: kona-node) -CLUSTER_NAME= \ No newline at end of file +CLUSTER_NAME= diff --git a/kona/docker/recipes/kona-node/docker-compose.yaml b/rust/kona/docker/recipes/kona-node/docker-compose.yaml similarity index 100% rename from kona/docker/recipes/kona-node/docker-compose.yaml rename to rust/kona/docker/recipes/kona-node/docker-compose.yaml diff --git a/kona/docker/recipes/kona-node/generate-jwt.sh b/rust/kona/docker/recipes/kona-node/generate-jwt.sh similarity index 100% rename from kona/docker/recipes/kona-node/generate-jwt.sh rename to rust/kona/docker/recipes/kona-node/generate-jwt.sh diff --git a/kona/docker/recipes/kona-node/grafana/dashboards/dashboard.yml b/rust/kona/docker/recipes/kona-node/grafana/dashboards/dashboard.yml similarity index 100% rename from kona/docker/recipes/kona-node/grafana/dashboards/dashboard.yml rename to rust/kona/docker/recipes/kona-node/grafana/dashboards/dashboard.yml diff --git a/kona/docker/recipes/kona-node/grafana/dashboards/overview.json b/rust/kona/docker/recipes/kona-node/grafana/dashboards/overview.json similarity index 100% rename from kona/docker/recipes/kona-node/grafana/dashboards/overview.json rename to rust/kona/docker/recipes/kona-node/grafana/dashboards/overview.json diff --git a/kona/docker/recipes/kona-node/grafana/datasources/prometheus.yml b/rust/kona/docker/recipes/kona-node/grafana/datasources/prometheus.yml similarity index 100% rename from kona/docker/recipes/kona-node/grafana/datasources/prometheus.yml rename to rust/kona/docker/recipes/kona-node/grafana/datasources/prometheus.yml diff --git a/kona/docker/recipes/kona-node/justfile b/rust/kona/docker/recipes/kona-node/justfile similarity index 100% rename from kona/docker/recipes/kona-node/justfile rename to rust/kona/docker/recipes/kona-node/justfile diff --git a/kona/docker/recipes/kona-node/prometheus/prometheus.yml b/rust/kona/docker/recipes/kona-node/prometheus/prometheus.yml similarity index 100% rename from kona/docker/recipes/kona-node/prometheus/prometheus.yml rename to rust/kona/docker/recipes/kona-node/prometheus/prometheus.yml diff --git a/kona/docker/recipes/kona-supervisor/grafana/dashboards/dashboard.yml b/rust/kona/docker/recipes/kona-supervisor/grafana/dashboards/dashboard.yml similarity index 100% rename from kona/docker/recipes/kona-supervisor/grafana/dashboards/dashboard.yml rename to rust/kona/docker/recipes/kona-supervisor/grafana/dashboards/dashboard.yml diff --git a/kona/docker/recipes/kona-supervisor/grafana/dashboards/kona-supervisor.json b/rust/kona/docker/recipes/kona-supervisor/grafana/dashboards/kona-supervisor.json similarity index 100% rename from kona/docker/recipes/kona-supervisor/grafana/dashboards/kona-supervisor.json rename to rust/kona/docker/recipes/kona-supervisor/grafana/dashboards/kona-supervisor.json diff --git a/rust/kona/examples/README.md b/rust/kona/examples/README.md new file mode 100644 index 00000000000..69983405bf4 --- /dev/null +++ b/rust/kona/examples/README.md @@ -0,0 +1,17 @@ +## Examples + +These examples demonstrate how to work with kona crates. +Some examples are isolated services broken out from OP Stack components. + +To run an example, use the command `cargo run -p `. + +If you have an idea for a new example, [open an issue][issue]. +Otherwise if you already have an example you'd like to add, open a PR! + +#### Discovery + + + + + +[issue]: https://github.com/ethereum-optimism/optimism/issues/new diff --git a/kona/examples/discovery/Cargo.toml b/rust/kona/examples/discovery/Cargo.toml similarity index 100% rename from kona/examples/discovery/Cargo.toml rename to rust/kona/examples/discovery/Cargo.toml diff --git a/kona/examples/discovery/src/main.rs b/rust/kona/examples/discovery/src/main.rs similarity index 100% rename from kona/examples/discovery/src/main.rs rename to rust/kona/examples/discovery/src/main.rs diff --git a/kona/examples/execution-fixture/Cargo.toml b/rust/kona/examples/execution-fixture/Cargo.toml similarity index 100% rename from kona/examples/execution-fixture/Cargo.toml rename to rust/kona/examples/execution-fixture/Cargo.toml diff --git a/kona/examples/execution-fixture/src/main.rs b/rust/kona/examples/execution-fixture/src/main.rs similarity index 100% rename from kona/examples/execution-fixture/src/main.rs rename to rust/kona/examples/execution-fixture/src/main.rs diff --git a/kona/examples/gossip/Cargo.toml b/rust/kona/examples/gossip/Cargo.toml similarity index 100% rename from kona/examples/gossip/Cargo.toml rename to rust/kona/examples/gossip/Cargo.toml diff --git a/kona/examples/gossip/src/main.rs b/rust/kona/examples/gossip/src/main.rs similarity index 100% rename from kona/examples/gossip/src/main.rs rename to rust/kona/examples/gossip/src/main.rs diff --git a/rust/kona/justfile b/rust/kona/justfile new file mode 100644 index 00000000000..b9110a3f7cf --- /dev/null +++ b/rust/kona/justfile @@ -0,0 +1,154 @@ + +# E2e integration tests for kona. +import "./tests/justfile" +# Builds docker images for kona +import "./docker/apps/justfile" + +KONA_ROOT := source_directory() + +set positional-arguments +alias t := tests +alias la := lint-all +alias l := lint-native +alias lint := lint-native +alias f := fmt-native-fix +alias b := build-native +alias h := hack + +# default recipe to display help information +default: + @just --list + +# Build the rollup node in a single command. +build-node: + cargo build --release --bin kona-node + +# Build the supervisor +build-supervisor: + cargo build --release --bin kona-supervisor + +# Run all tests (excluding online tests) +tests: test test-docs + +# Test for the native target with all features. By default, excludes online tests. +test *args="-E '!test(test_online)'": + cargo nextest run --release --workspace --all-features {{args}} + just test-custom-embeds + +# Run all online tests +test-online: + just test "-E 'test(test_online)'" + +# Test custom embedded chain configuration functionality +test-custom-embeds: + cargo test --release --package kona-registry custom_chain_is_loaded_when_enabled \ + --config 'env.KONA_CUSTOM_CONFIGS="true"' \ + --config "env.KONA_CUSTOM_CONFIGS_DIR=\"{{justfile_directory()}}/crates/protocol/registry/tests/fixtures/custom\"" \ + --config 'env.KONA_CUSTOM_CONFIGS_TEST="true"' + +# Runs the tests with llvm-cov +llvm-cov-tests: + #!/usr/bin/env bash + # collect coverage of `just test` and `just test-custom-embeds` + cargo llvm-cov nextest --no-report --locked --workspace \ + --all-features \ + --exclude kona-node --exclude kona-p2p --exclude kona-sources \ + --ignore-run-fail -E '!test(test_online)' + + cargo llvm-cov nextest --no-report --locked \ + --all-features \ + --ignore-run-fail \ + --package kona-registry \ + -E 'test(custom_chain_is_loaded_when_enabled)' \ + --config 'env.KONA_CUSTOM_CONFIGS="true"' \ + --config "env.KONA_CUSTOM_CONFIGS_DIR=\"{{justfile_directory()}}/crates/protocol/registry/tests/fixtures/custom\"" \ + --config 'env.KONA_CUSTOM_CONFIGS_TEST="true"' + + cargo llvm-cov report --lcov --output-path lcov.info + +# Runs benchmarks +benches: + cargo bench --no-run --workspace --features test-utils --exclude example-gossip --exclude example-discovery + +# Lint the workspace for all available targets +lint-all: lint-native lint-cannon lint-asterisc lint-docs lint-typos + +# Check spelling with typos (`cargo install typos-cli`) +lint-typos: + typos + +# Runs `cargo hack check` against the workspace +hack: + cargo hack check --feature-powerset --no-dev-deps + +# Fixes the formatting of the workspace +fmt-native-fix: + cargo +nightly fmt --all + +# Check the formatting of the workspace +fmt-native-check: + cargo +nightly fmt --all -- --check + +# Lint the workspace +lint-native: fmt-native-check lint-docs + cargo clippy --workspace --all-features --all-targets -- -D warnings + +# Lint the workspace (mips arch). Currently, only the `kona-std-fpvm` crate is linted for the `cannon` target, as it is the only crate with architecture-specific code. +lint-cannon: + docker run \ + --rm \ + -e RUSTUP_TOOLCHAIN=nightly \ + -v {{KONA_ROOT}}/../:/workdir \ + -w="/workdir" \ + ghcr.io/op-rs/kona/cannon-builder:0.3.0 cargo clippy -p kona-std-fpvm --all-features -Zbuild-std=core,alloc -- -D warnings + +# Lint the workspace (risc-v arch). Currently, only the `kona-std-fpvm` crate is linted for the `asterisc` target, as it is the only crate with architecture-specific code. +lint-asterisc: + docker run \ + --rm \ + -e RUSTUP_TOOLCHAIN=nightly \ + -v {{KONA_ROOT}}/../:/workdir \ + -w="/workdir" \ + ghcr.io/op-rs/kona/asterisc-builder:0.3.0 cargo clippy -p kona-std-fpvm --all-features -Zbuild-std=core,alloc -- -D warnings + +# Lint the Rust documentation +lint-docs: + RUSTDOCFLAGS="-D warnings" cargo doc --workspace --no-deps --document-private-items + +# Test the Rust documentation +test-docs: + cargo test --doc --workspace --locked + +# Build for the native target +build-native *args='': + #!/usr/bin/env bash + cargo build --workspace $@ + +# Build `kona-client` for the `cannon` target. +build-cannon-client: + docker run \ + --rm \ + -v {{KONA_ROOT}}/../:/workdir \ + -w="/workdir" \ + ghcr.io/op-rs/kona/cannon-builder:0.3.0 cargo build -Zbuild-std=core,alloc -p kona-client --bin kona-client --profile release-client-lto + +# Build `kona-client` for the `asterisc` target. +build-asterisc-client: + docker run \ + --rm \ + -v {{KONA_ROOT}}/../:/workdir \ + -w="/workdir" \ + ghcr.io/op-rs/kona/asterisc-builder:0.3.0 cargo build -Zbuild-std=core,alloc -p kona-client --bin kona-client --profile release-client-lto + +# Check for unused dependencies in the crate graph. +check-udeps: + cargo +nightly udeps --release --workspace --all-features --all-targets + + +# Updates the `superchain-registry` git submodule source +source-registry: + @just --justfile ./crates/protocol/registry/justfile source + +# Generate file bindings for super-registry +bind-registry: + @just --justfile ./crates/protocol/registry/justfile bind diff --git a/kona/lychee.toml b/rust/kona/lychee.toml similarity index 90% rename from kona/lychee.toml rename to rust/kona/lychee.toml index 388c842084c..b55113e7b14 100644 --- a/kona/lychee.toml +++ b/rust/kona/lychee.toml @@ -12,7 +12,7 @@ accept = [200, 403] exclude = [ 'foo.bar', 'localhost', - '^https://github\.com/op-rs/kona/pull/', + '^https://github\.com/ethereum-optimism/optimism/pull/', '^https://www\.intel\.com/content/www/us/en/developer/tools/trust-domain-extensions/documentation\.html', 'https://sepolia-optimism\.etherscan\.io' ] diff --git a/kona/release.toml b/rust/kona/release.toml similarity index 100% rename from kona/release.toml rename to rust/kona/release.toml diff --git a/kona/tests/.gitignore b/rust/kona/tests/.gitignore similarity index 100% rename from kona/tests/.gitignore rename to rust/kona/tests/.gitignore diff --git a/kona/tests/README.md b/rust/kona/tests/README.md similarity index 100% rename from kona/tests/README.md rename to rust/kona/tests/README.md diff --git a/kona/tests/justfile b/rust/kona/tests/justfile similarity index 100% rename from kona/tests/justfile rename to rust/kona/tests/justfile diff --git a/kona/tests/node/common/conductor_test.go b/rust/kona/tests/node/common/conductor_test.go similarity index 98% rename from kona/tests/node/common/conductor_test.go rename to rust/kona/tests/node/common/conductor_test.go index a96a12a8325..a414d5946a1 100644 --- a/kona/tests/node/common/conductor_test.go +++ b/rust/kona/tests/node/common/conductor_test.go @@ -7,12 +7,12 @@ import ( "testing" "time" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-conductor/consensus" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-devstack/stack" "github.com/ethereum-optimism/optimism/op-service/testlog" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" "github.com/ethereum/go-ethereum/log" "github.com/stretchr/testify/require" ) diff --git a/kona/tests/node/common/engine_test.go b/rust/kona/tests/node/common/engine_test.go similarity index 93% rename from kona/tests/node/common/engine_test.go rename to rust/kona/tests/node/common/engine_test.go index 0820072f1aa..e0fb99eed37 100644 --- a/kona/tests/node/common/engine_test.go +++ b/rust/kona/tests/node/common/engine_test.go @@ -4,10 +4,10 @@ import ( "sync" "testing" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" "github.com/stretchr/testify/require" ) diff --git a/rust/kona/tests/node/common/init_test.go b/rust/kona/tests/node/common/init_test.go new file mode 100644 index 00000000000..1a15b2416d0 --- /dev/null +++ b/rust/kona/tests/node/common/init_test.go @@ -0,0 +1,17 @@ +package node + +import ( + "fmt" + "testing" + + "github.com/ethereum-optimism/optimism/op-devstack/presets" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" +) + +// TestMain creates the test-setups against the shared backend +func TestMain(m *testing.M) { + config := node_utils.ParseL2NodeConfigFromEnv() + + fmt.Printf("Running e2e tests with Config: %d\n", config) + presets.DoMain(m, node_utils.WithMixedOpKona(config)) +} diff --git a/kona/tests/node/common/p2p_test.go b/rust/kona/tests/node/common/p2p_test.go similarity index 98% rename from kona/tests/node/common/p2p_test.go rename to rust/kona/tests/node/common/p2p_test.go index 288836432fb..1d461efdca1 100644 --- a/kona/tests/node/common/p2p_test.go +++ b/rust/kona/tests/node/common/p2p_test.go @@ -4,11 +4,11 @@ import ( "fmt" "testing" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-service/apis" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" "github.com/libp2p/go-libp2p/core/peer" "github.com/stretchr/testify/require" ) diff --git a/kona/tests/node/common/rpc_test.go b/rust/kona/tests/node/common/rpc_test.go similarity index 98% rename from kona/tests/node/common/rpc_test.go rename to rust/kona/tests/node/common/rpc_test.go index 5d36a88e0fd..34720e92c8f 100644 --- a/kona/tests/node/common/rpc_test.go +++ b/rust/kona/tests/node/common/rpc_test.go @@ -4,11 +4,11 @@ import ( "sync" "testing" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-node/rollup" "github.com/ethereum-optimism/optimism/op-service/apis" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" "github.com/ethereum/go-ethereum/common" "github.com/stretchr/testify/require" ) diff --git a/kona/tests/node/common/sync_test.go b/rust/kona/tests/node/common/sync_test.go similarity index 95% rename from kona/tests/node/common/sync_test.go rename to rust/kona/tests/node/common/sync_test.go index de3aa0975d2..5db91b82a45 100644 --- a/kona/tests/node/common/sync_test.go +++ b/rust/kona/tests/node/common/sync_test.go @@ -3,10 +3,10 @@ package node import ( "testing" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" ) // Check that all the nodes in the network are synced to the local safe block and can catch up to the sequencer node. diff --git a/kona/tests/node/common/sync_ws_test.go b/rust/kona/tests/node/common/sync_ws_test.go similarity index 99% rename from kona/tests/node/common/sync_ws_test.go rename to rust/kona/tests/node/common/sync_ws_test.go index 74c74e76430..a0099a8a80b 100644 --- a/kona/tests/node/common/sync_ws_test.go +++ b/rust/kona/tests/node/common/sync_ws_test.go @@ -5,11 +5,11 @@ import ( "testing" "time" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" "github.com/stretchr/testify/require" ) diff --git a/kona/tests/node/common/tx_inclusion_test.go b/rust/kona/tests/node/common/tx_inclusion_test.go similarity index 95% rename from kona/tests/node/common/tx_inclusion_test.go rename to rust/kona/tests/node/common/tx_inclusion_test.go index adbc057dd5f..089d2de7ae2 100644 --- a/kona/tests/node/common/tx_inclusion_test.go +++ b/rust/kona/tests/node/common/tx_inclusion_test.go @@ -3,10 +3,10 @@ package node import ( "testing" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-service/eth" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" ) func TestL2TransactionInclusion(gt *testing.T) { diff --git a/kona/tests/node/long-running/README.md b/rust/kona/tests/node/long-running/README.md similarity index 100% rename from kona/tests/node/long-running/README.md rename to rust/kona/tests/node/long-running/README.md diff --git a/rust/kona/tests/node/long-running/init_test.go b/rust/kona/tests/node/long-running/init_test.go new file mode 100644 index 00000000000..610f6bbd8f2 --- /dev/null +++ b/rust/kona/tests/node/long-running/init_test.go @@ -0,0 +1,32 @@ +package node + +import ( + "flag" + "testing" + + "github.com/ethereum-optimism/optimism/op-devstack/presets" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" +) + +var ( + num_threads = flag.Int("num-threads", 10, "number of threads to use for the test") + percentageNewAccounts = flag.Int("percentage-new-accounts", 20, "percentage of new accounts to produce transactions for") + fundAmount = flag.Int("fund-amount", 10, "eth amount to fund each new account with") + initNumAccounts = flag.Int("init-num-accounts", 10, "initial number of accounts to fund") +) + +// TestMain creates the test-setups against the shared backend +func TestMain(m *testing.M) { + flag.Parse() + + presets.DoMain(m, node_utils.WithMixedOpKona(node_utils.L2NodeConfig{ + OpSequencerNodesWithGeth: 0, + OpSequencerNodesWithReth: 0, + KonaSequencerNodesWithGeth: 1, + KonaSequencerNodesWithReth: 0, + OpNodesWithGeth: 1, + OpNodesWithReth: 1, + KonaNodesWithGeth: 1, + KonaNodesWithReth: 1, + })) +} diff --git a/kona/tests/node/long-running/tx_producer_test.go b/rust/kona/tests/node/long-running/tx_producer_test.go similarity index 98% rename from kona/tests/node/long-running/tx_producer_test.go rename to rust/kona/tests/node/long-running/tx_producer_test.go index 7a1b1f9932d..3b1417c80e5 100644 --- a/kona/tests/node/long-running/tx_producer_test.go +++ b/rust/kona/tests/node/long-running/tx_producer_test.go @@ -6,11 +6,11 @@ import ( "sync/atomic" "testing" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-service/txplan" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" ) // Define a global atomic counter for the number of transactions produced. diff --git a/rust/kona/tests/node/reorgs/init_test.go b/rust/kona/tests/node/reorgs/init_test.go new file mode 100644 index 00000000000..ceb5850b8f2 --- /dev/null +++ b/rust/kona/tests/node/reorgs/init_test.go @@ -0,0 +1,18 @@ +package reorgs + +import ( + "fmt" + "testing" + + "github.com/ethereum-optimism/optimism/op-devstack/presets" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" +) + +// TestMain creates the test-setups against the shared backend +func TestMain(m *testing.M) { + l2Config := node_utils.ParseL2NodeConfigFromEnv() + + fmt.Printf("Running e2e reorg tests with Config: %d\n", l2Config) + + presets.DoMain(m, node_utils.WithMixedWithTestSequencer(l2Config)) +} diff --git a/kona/tests/node/reorgs/l2_reorg_after_l1_reorgs_test.go b/rust/kona/tests/node/reorgs/l2_reorg_after_l1_reorgs_test.go similarity index 98% rename from kona/tests/node/reorgs/l2_reorg_after_l1_reorgs_test.go rename to rust/kona/tests/node/reorgs/l2_reorg_after_l1_reorgs_test.go index b40ac4584ff..366ed16504a 100644 --- a/kona/tests/node/reorgs/l2_reorg_after_l1_reorgs_test.go +++ b/rust/kona/tests/node/reorgs/l2_reorg_after_l1_reorgs_test.go @@ -4,7 +4,6 @@ import ( "testing" "time" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-devstack/stack" @@ -13,6 +12,7 @@ import ( "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" "github.com/ethereum-optimism/optimism/op-test-sequencer/sequencer/seqtypes" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" "github.com/ethereum/go-ethereum/common" "github.com/stretchr/testify/require" ) diff --git a/kona/tests/node/reorgs/l2_reorg_test.go b/rust/kona/tests/node/reorgs/l2_reorg_test.go similarity index 98% rename from kona/tests/node/reorgs/l2_reorg_test.go rename to rust/kona/tests/node/reorgs/l2_reorg_test.go index dcba40270dd..5e0a25e5b75 100644 --- a/kona/tests/node/reorgs/l2_reorg_test.go +++ b/rust/kona/tests/node/reorgs/l2_reorg_test.go @@ -4,13 +4,13 @@ import ( "fmt" "testing" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-service/txplan" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" "github.com/ethereum-optimism/optimism/op-test-sequencer/sequencer/seqtypes" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" "github.com/stretchr/testify/require" ) @@ -60,7 +60,6 @@ func TestL2Reorg(gt *testing.T) { } reorgFun := func() error { - // Stop the batcher out.L2Batcher.Stop() diff --git a/kona/tests/node/restart/conn_drop_test.go b/rust/kona/tests/node/restart/conn_drop_test.go similarity index 98% rename from kona/tests/node/restart/conn_drop_test.go rename to rust/kona/tests/node/restart/conn_drop_test.go index afd813cac36..41f25ff7b93 100644 --- a/kona/tests/node/restart/conn_drop_test.go +++ b/rust/kona/tests/node/restart/conn_drop_test.go @@ -4,10 +4,10 @@ import ( "fmt" "testing" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" ) // Ensure that kona-nodes reconnect to the sequencer and sync properly when the connection is dropped. diff --git a/rust/kona/tests/node/restart/init_test.go b/rust/kona/tests/node/restart/init_test.go new file mode 100644 index 00000000000..2d9b0e7cae2 --- /dev/null +++ b/rust/kona/tests/node/restart/init_test.go @@ -0,0 +1,21 @@ +package node_restart + +import ( + "fmt" + "testing" + + "github.com/ethereum-optimism/optimism/op-devstack/presets" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" +) + +// TestMain creates the test-setups against the shared backend +func TestMain(m *testing.M) { + // Currently, the restart tests only support kona nodes. The op node based configs are not supported (because of req-resp sync incompatibility). + config := node_utils.L2NodeConfig{ + KonaSequencerNodesWithGeth: 1, + KonaNodesWithGeth: 1, + } + + fmt.Printf("Running restart e2e tests with Config: %d\n", config) + presets.DoMain(m, node_utils.WithMixedOpKona(config)) +} diff --git a/kona/tests/node/restart/restart_test.go b/rust/kona/tests/node/restart/restart_test.go similarity index 97% rename from kona/tests/node/restart/restart_test.go rename to rust/kona/tests/node/restart/restart_test.go index 80d5670e093..f57af1ab4d3 100644 --- a/kona/tests/node/restart/restart_test.go +++ b/rust/kona/tests/node/restart/restart_test.go @@ -6,12 +6,12 @@ import ( "testing" "time" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-service/retry" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" ) // Ensure that kona-nodes reconnect to the sequencer and sync properly when the connection is dropped. diff --git a/kona/tests/node/restart/sequencer_restart_test.go b/rust/kona/tests/node/restart/sequencer_restart_test.go similarity index 97% rename from kona/tests/node/restart/sequencer_restart_test.go rename to rust/kona/tests/node/restart/sequencer_restart_test.go index eada3d04f1f..9a02bde6ec9 100644 --- a/kona/tests/node/restart/sequencer_restart_test.go +++ b/rust/kona/tests/node/restart/sequencer_restart_test.go @@ -5,11 +5,11 @@ import ( "testing" "time" - node_utils "github.com/ethereum-optimism/optimism/kona/tests/node/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/dsl" "github.com/ethereum-optimism/optimism/op-service/retry" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" + node_utils "github.com/ethereum-optimism/optimism/rust/kona/tests/node/utils" ) func TestSequencerRestart(gt *testing.T) { diff --git a/kona/tests/node/utils/mixed_preset.go b/rust/kona/tests/node/utils/mixed_preset.go similarity index 100% rename from kona/tests/node/utils/mixed_preset.go rename to rust/kona/tests/node/utils/mixed_preset.go diff --git a/kona/tests/node/utils/mixed_preset_with_conductor.go b/rust/kona/tests/node/utils/mixed_preset_with_conductor.go similarity index 100% rename from kona/tests/node/utils/mixed_preset_with_conductor.go rename to rust/kona/tests/node/utils/mixed_preset_with_conductor.go diff --git a/kona/tests/node/utils/mod.go b/rust/kona/tests/node/utils/mod.go similarity index 100% rename from kona/tests/node/utils/mod.go rename to rust/kona/tests/node/utils/mod.go diff --git a/kona/tests/node/utils/test_sequencer_preset.go b/rust/kona/tests/node/utils/test_sequencer_preset.go similarity index 100% rename from kona/tests/node/utils/test_sequencer_preset.go rename to rust/kona/tests/node/utils/test_sequencer_preset.go diff --git a/kona/tests/node/utils/ws.go b/rust/kona/tests/node/utils/ws.go similarity index 100% rename from kona/tests/node/utils/ws.go rename to rust/kona/tests/node/utils/ws.go diff --git a/kona/tests/supervisor/l1reorg/init_test.go b/rust/kona/tests/supervisor/l1reorg/init_test.go similarity index 100% rename from kona/tests/supervisor/l1reorg/init_test.go rename to rust/kona/tests/supervisor/l1reorg/init_test.go diff --git a/kona/tests/supervisor/l1reorg/reorg_test.go b/rust/kona/tests/supervisor/l1reorg/reorg_test.go similarity index 98% rename from kona/tests/supervisor/l1reorg/reorg_test.go rename to rust/kona/tests/supervisor/l1reorg/reorg_test.go index 5321753b52b..57d09359b5f 100644 --- a/kona/tests/supervisor/l1reorg/reorg_test.go +++ b/rust/kona/tests/supervisor/l1reorg/reorg_test.go @@ -4,11 +4,11 @@ import ( "testing" "time" - "github.com/ethereum-optimism/optimism/kona/tests/supervisor/utils" "github.com/ethereum-optimism/optimism/op-devstack/devtest" "github.com/ethereum-optimism/optimism/op-devstack/presets" "github.com/ethereum-optimism/optimism/op-service/eth" "github.com/ethereum-optimism/optimism/op-supervisor/supervisor/types" + "github.com/ethereum-optimism/optimism/rust/kona/tests/supervisor/utils" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) diff --git a/kona/tests/supervisor/l2reorg/init_exec_msg_test.go b/rust/kona/tests/supervisor/l2reorg/init_exec_msg_test.go similarity index 100% rename from kona/tests/supervisor/l2reorg/init_exec_msg_test.go rename to rust/kona/tests/supervisor/l2reorg/init_exec_msg_test.go diff --git a/kona/tests/supervisor/l2reorg/init_test.go b/rust/kona/tests/supervisor/l2reorg/init_test.go similarity index 100% rename from kona/tests/supervisor/l2reorg/init_test.go rename to rust/kona/tests/supervisor/l2reorg/init_test.go diff --git a/kona/tests/supervisor/l2reorg/invalid_exec_msgs_test.go b/rust/kona/tests/supervisor/l2reorg/invalid_exec_msgs_test.go similarity index 100% rename from kona/tests/supervisor/l2reorg/invalid_exec_msgs_test.go rename to rust/kona/tests/supervisor/l2reorg/invalid_exec_msgs_test.go diff --git a/kona/tests/supervisor/l2reorg/unsafe_head_test.go b/rust/kona/tests/supervisor/l2reorg/unsafe_head_test.go similarity index 100% rename from kona/tests/supervisor/l2reorg/unsafe_head_test.go rename to rust/kona/tests/supervisor/l2reorg/unsafe_head_test.go diff --git a/rust/kona/tests/supervisor/l2reorgAfterL1reorg/init_test.go b/rust/kona/tests/supervisor/l2reorgAfterL1reorg/init_test.go new file mode 100644 index 00000000000..c2ebd6ccd59 --- /dev/null +++ b/rust/kona/tests/supervisor/l2reorgAfterL1reorg/init_test.go @@ -0,0 +1,14 @@ +package sysgo + +import ( + "testing" + + "github.com/ethereum-optimism/optimism/op-devstack/presets" + spresets "github.com/ethereum-optimism/optimism/rust/kona/tests/supervisor/presets" +) + +// TestMain creates the test-setups against the shared backend +func TestMain(m *testing.M) { + // Other setups may be added here, hydrated from the same orchestrator + presets.DoMain(m, spresets.WithSimpleInteropMinimal()) +} diff --git a/kona/tests/supervisor/l2reorgAfterL1reorg/reorg_test.go b/rust/kona/tests/supervisor/l2reorgAfterL1reorg/reorg_test.go similarity index 100% rename from kona/tests/supervisor/l2reorgAfterL1reorg/reorg_test.go rename to rust/kona/tests/supervisor/l2reorgAfterL1reorg/reorg_test.go diff --git a/kona/tests/supervisor/message/init_test.go b/rust/kona/tests/supervisor/message/init_test.go similarity index 100% rename from kona/tests/supervisor/message/init_test.go rename to rust/kona/tests/supervisor/message/init_test.go diff --git a/kona/tests/supervisor/message/interop_contract_test.go b/rust/kona/tests/supervisor/message/interop_contract_test.go similarity index 100% rename from kona/tests/supervisor/message/interop_contract_test.go rename to rust/kona/tests/supervisor/message/interop_contract_test.go diff --git a/kona/tests/supervisor/message/interop_happy_tx_test.go b/rust/kona/tests/supervisor/message/interop_happy_tx_test.go similarity index 100% rename from kona/tests/supervisor/message/interop_happy_tx_test.go rename to rust/kona/tests/supervisor/message/interop_happy_tx_test.go diff --git a/kona/tests/supervisor/message/interop_msg_test.go b/rust/kona/tests/supervisor/message/interop_msg_test.go similarity index 100% rename from kona/tests/supervisor/message/interop_msg_test.go rename to rust/kona/tests/supervisor/message/interop_msg_test.go diff --git a/rust/kona/tests/supervisor/pre_interop/init_test.go b/rust/kona/tests/supervisor/pre_interop/init_test.go new file mode 100644 index 00000000000..c173e8ce9e7 --- /dev/null +++ b/rust/kona/tests/supervisor/pre_interop/init_test.go @@ -0,0 +1,20 @@ +package preinterop + +// todo: add tests +import ( + "testing" + + "github.com/ethereum-optimism/optimism/op-devstack/presets" + spresets "github.com/ethereum-optimism/optimism/rust/kona/tests/supervisor/presets" +) + +// TestMain creates the test-setups against the shared backend +func TestMain(m *testing.M) { + // sleep to ensure the backend is ready + + presets.DoMain(m, + spresets.WithSimpleInteropMinimal(), + presets.WithSuggestedInteropActivationOffset(30), + presets.WithInteropNotAtGenesis()) + +} diff --git a/kona/tests/supervisor/pre_interop/post_test.go b/rust/kona/tests/supervisor/pre_interop/post_test.go similarity index 100% rename from kona/tests/supervisor/pre_interop/post_test.go rename to rust/kona/tests/supervisor/pre_interop/post_test.go diff --git a/kona/tests/supervisor/pre_interop/pre_test.go b/rust/kona/tests/supervisor/pre_interop/pre_test.go similarity index 100% rename from kona/tests/supervisor/pre_interop/pre_test.go rename to rust/kona/tests/supervisor/pre_interop/pre_test.go diff --git a/kona/tests/supervisor/presets/interop_minimal.go b/rust/kona/tests/supervisor/presets/interop_minimal.go similarity index 100% rename from kona/tests/supervisor/presets/interop_minimal.go rename to rust/kona/tests/supervisor/presets/interop_minimal.go diff --git a/kona/tests/supervisor/rpc/init_test.go b/rust/kona/tests/supervisor/rpc/init_test.go similarity index 100% rename from kona/tests/supervisor/rpc/init_test.go rename to rust/kona/tests/supervisor/rpc/init_test.go diff --git a/kona/tests/supervisor/rpc/rpc_test.go b/rust/kona/tests/supervisor/rpc/rpc_test.go similarity index 100% rename from kona/tests/supervisor/rpc/rpc_test.go rename to rust/kona/tests/supervisor/rpc/rpc_test.go diff --git a/kona/tests/supervisor/sync/init_test.go b/rust/kona/tests/supervisor/sync/init_test.go similarity index 100% rename from kona/tests/supervisor/sync/init_test.go rename to rust/kona/tests/supervisor/sync/init_test.go diff --git a/kona/tests/supervisor/sync/resync_test.go b/rust/kona/tests/supervisor/sync/resync_test.go similarity index 100% rename from kona/tests/supervisor/sync/resync_test.go rename to rust/kona/tests/supervisor/sync/resync_test.go diff --git a/kona/tests/supervisor/sync/sync_test.go b/rust/kona/tests/supervisor/sync/sync_test.go similarity index 100% rename from kona/tests/supervisor/sync/sync_test.go rename to rust/kona/tests/supervisor/sync/sync_test.go diff --git a/kona/tests/supervisor/utils/builder.go b/rust/kona/tests/supervisor/utils/builder.go similarity index 100% rename from kona/tests/supervisor/utils/builder.go rename to rust/kona/tests/supervisor/utils/builder.go diff --git a/kona/tests/supervisor/utils/pos.go b/rust/kona/tests/supervisor/utils/pos.go similarity index 100% rename from kona/tests/supervisor/utils/pos.go rename to rust/kona/tests/supervisor/utils/pos.go diff --git a/kona/tests/supervisor/utils/reorg.go b/rust/kona/tests/supervisor/utils/reorg.go similarity index 100% rename from kona/tests/supervisor/utils/reorg.go rename to rust/kona/tests/supervisor/utils/reorg.go diff --git a/rust/kona/version.json b/rust/kona/version.json new file mode 100644 index 00000000000..8d8e7807a20 --- /dev/null +++ b/rust/kona/version.json @@ -0,0 +1,5 @@ +{ + "version": "1.2.7", + "prestateHash": "0x0323914d3050e80c3d09da528be54794fde60cd26849cd3410dde0da7cd7d4fa", + "interopPrestateHash": "0x03f03018773fae0603f7c110ef1defa8d19b601b32ee530f9951987baec435b0" +} \ No newline at end of file diff --git a/op-alloy/.config/nextest.toml b/rust/op-alloy/.config/nextest.toml similarity index 100% rename from op-alloy/.config/nextest.toml rename to rust/op-alloy/.config/nextest.toml diff --git a/op-alloy/.config/zepter.yaml b/rust/op-alloy/.config/zepter.yaml similarity index 100% rename from op-alloy/.config/zepter.yaml rename to rust/op-alloy/.config/zepter.yaml diff --git a/op-alloy/.gitignore b/rust/op-alloy/.gitignore similarity index 100% rename from op-alloy/.gitignore rename to rust/op-alloy/.gitignore diff --git a/op-alloy/CHANGELOG.md b/rust/op-alloy/CHANGELOG.md similarity index 100% rename from op-alloy/CHANGELOG.md rename to rust/op-alloy/CHANGELOG.md diff --git a/op-alloy/CONTRIBUTING.md b/rust/op-alloy/CONTRIBUTING.md similarity index 100% rename from op-alloy/CONTRIBUTING.md rename to rust/op-alloy/CONTRIBUTING.md diff --git a/op-alloy/FUNDING.json b/rust/op-alloy/FUNDING.json similarity index 100% rename from op-alloy/FUNDING.json rename to rust/op-alloy/FUNDING.json diff --git a/rust/op-alloy/Justfile b/rust/op-alloy/Justfile new file mode 100644 index 00000000000..b0625fbf4db --- /dev/null +++ b/rust/op-alloy/Justfile @@ -0,0 +1,68 @@ +set positional-arguments +alias t := tests +alias l := lint +alias f := fmtf +alias b := build +alias h := hack +alias c := check + +# default recipe to display help information +default: + @just --list + +# Run all tests +tests: test test-docs + +# Test for the native target with optional flags. +test *args='': + cargo nextest run --workspace {{args}} + +# Test the Rust documentation +test-docs: + cargo test --doc --all + +# Lint the workspace for all available targets +lint: lint-native lint-docs + +# Lint the workspace +lint-native: fmt-check lint-docs clippy + +# Checks the workspace with clippy +clippy: + cargo +stable clippy --workspace --all-features --all-targets -- -D warnings + +# Fix clippy warnings across the workspace +clippy-fix: + cargo +stable clippy --workspace --all-features --all-targets --fix --allow-staged --allow-dirty -- -D warnings + +# Check the formatting of the workspace +fmt-check: + cargo +nightly fmt --all -- --check + +# Lint the Rust documentation +lint-docs: + RUSTDOCFLAGS="-D warnings" cargo doc --all --no-deps --document-private-items + +# Fixes the formatting of the workspace +fmtf: + cargo +nightly fmt --all + +# Build for the native target +build *args='': + cargo build --workspace $@ + +# Checks the workspace with a cfg-check +check: + cargo +nightly check -Zcheck-cfg --workspace + +# Runs `cargo hack check` against the workspace +hack: + cargo hack check --feature-powerset --no-dev-deps --exclude op-alloy --workspace + +# Updates the git submodule source +source: + git submodule update --remote + +# Generate file bindings for super-registry +bind: + @just --justfile ./crates/registry/Justfile bind diff --git a/op-alloy/LICENSE-APACHE b/rust/op-alloy/LICENSE-APACHE similarity index 100% rename from op-alloy/LICENSE-APACHE rename to rust/op-alloy/LICENSE-APACHE diff --git a/op-alloy/LICENSE-MIT b/rust/op-alloy/LICENSE-MIT similarity index 100% rename from op-alloy/LICENSE-MIT rename to rust/op-alloy/LICENSE-MIT diff --git a/rust/op-alloy/README.md b/rust/op-alloy/README.md new file mode 100644 index 00000000000..0f7934af120 --- /dev/null +++ b/rust/op-alloy/README.md @@ -0,0 +1,97 @@ +# op-alloy + +Built on [Alloy][alloy], op-alloy aggregates the OP stack's unique primitives from [Maili][maili], +to the subset of L1 types used by Optimistic rollups. + + +## Usage + +The following crates are provided by `op-alloy`: + +| Crate Name | Description / Purpose | Version | +|-------------|-----------------------------------------|---------| +| [op-alloy-consensus](https://crates.io/crates/op-alloy-consensus) | Handles consensus-related logic | [![version](https://img.shields.io/crates/v/op-alloy-consensus)](https://crates.io/crates/op-alloy-consensus) | +| [op-alloy-network](https://crates.io/crates/op-alloy-network) | Manages networking functionality | [![version](https://img.shields.io/crates/v/op-alloy-network)](https://crates.io/crates/op-alloy-network) | +| [op-alloy-rpc-jsonrpsee](https://crates.io/crates/op-alloy-rpc-jsonrpsee) | RPC implementation using `jsonrpsee` | [![version](https://img.shields.io/crates/v/op-alloy-rpc-jsonrpsee)](https://crates.io/crates/op-alloy-rpc-jsonrpsee) | +| [op-alloy-rpc-types-engine](https://crates.io/crates/op-alloy-rpc-types-engine) | Type definitions specific to RPC engine | [![version](https://img.shields.io/crates/v/op-alloy-rpc-types-engine)](https://crates.io/crates/op-alloy-rpc-types-engine) | +| [op-alloy-rpc-types](https://crates.io/crates/op-alloy-rpc-types) | Shared types used across RPC components | [![version](https://img.shields.io/crates/v/op-alloy-rpc-types)](https://crates.io/crates/op-alloy-rpc-types) | + + + +## Development Status + +`op-alloy` is currently in active development, and is not yet ready for use in production. + + +## Supported Rust Versions (MSRV) + +The current MSRV (minimum supported rust version) is 1.86. + +Unlike Alloy, op-alloy may use the latest stable release, +to benefit from the latest features. + +The MSRV is not increased automatically, and will be updated +only as part of a patch (pre-1.0) or minor (post-1.0) release. + + +## Contributing + +op-alloy is built by open source contributors like you, thank you for improving the project! + +A [contributing guide][contributing] is available that sets guidelines for contributing. + +Pull requests will not be merged unless CI passes, so please ensure that your contribution follows the +linting rules and passes clippy. + + +## `no_std` + +op-alloy is intended to be `no_std` compatible, initially for use in [kona][kona]. + +The following crates support `no_std`. +Notice, provider crates do not support `no_std` compatibility. + + +| Crate Name | Description / Purpose | Version | +|----------------------------------------------------------|-----------------------------------------|---------| +| [`op-alloy-consensus`] | Handles consensus-related logic | [![version](https://img.shields.io/crates/v/op-alloy-consensus)](https://crates.io/crates/op-alloy-consensus) | +| [`op-alloy-rpc-types`] | Shared types used across RPC components | [![version](https://img.shields.io/crates/v/op-alloy-rpc-types)](https://crates.io/crates/op-alloy-rpc-types) | +| [`op-alloy-rpc-types-engine`] | RPC types specific to the engine API | [![version](https://img.shields.io/crates/v/op-alloy-rpc-types-engine)](https://crates.io/crates/op-alloy-rpc-types-engine) | + + +If you would like to add no_std support to a crate, +please make sure to update [scripts/check_no_std.sh][check-no-std]. + + +## Credits + +op-alloy is inspired by the work of several teams and projects, most notably [the Alloy project][alloy]. + +This would not be possible without the hard work from open source contributors. Thank you. + + +## License + +Licensed under either of Apache License, Version +2.0 or MIT license at your option. + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in these crates by you, as defined in the Apache-2.0 license, +shall be dual licensed as above, without any additional terms or conditions. + + + + +[check-no-std]: ./scripts/check_no_std.sh + +[maili]: https://github.com/op-rs/maili +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[alloy]: https://github.com/alloy-rs/alloy +[contributing]: https://alloy-rs.github.io/op-alloy + +[`op-alloy-consensus`]: https://crates.io/crates/op-alloy-consensus +[`op-alloy-network`]: https://crates.io/crates/op-alloy-network +[`op-alloy-rpc-jsonrpsee`]: https://crates.io/crates/op-alloy-rpc-jsonrpsee +[`op-alloy-rpc-types-engine`]: https://crates.io/crates/op-alloy-rpc-types-engine +[`op-alloy-rpc-types`]: https://crates.io/crates/op-alloy-rpc-types + diff --git a/op-alloy/SECURITY.md b/rust/op-alloy/SECURITY.md similarity index 100% rename from op-alloy/SECURITY.md rename to rust/op-alloy/SECURITY.md diff --git a/op-alloy/SNAPPY-LICENSE b/rust/op-alloy/SNAPPY-LICENSE similarity index 100% rename from op-alloy/SNAPPY-LICENSE rename to rust/op-alloy/SNAPPY-LICENSE diff --git a/rust/op-alloy/crates/consensus/Cargo.toml b/rust/op-alloy/crates/consensus/Cargo.toml new file mode 100644 index 00000000000..4fa43515e54 --- /dev/null +++ b/rust/op-alloy/crates/consensus/Cargo.toml @@ -0,0 +1,88 @@ +[package] +name = "op-alloy-consensus" +description = "Optimism alloy consensus types" + +version = "0.23.1" +edition.workspace = true +rust-version.workspace = true +authors = ["Alloy Contributors"] +license.workspace = true +homepage = "https://github.com/ethereum-optimism/optimism" +repository = "https://github.com/ethereum-optimism/optimism" +exclude = ["benches/", "tests/"] + +[lints] +workspace = true + +[dependencies] +# Alloy +alloy-rlp.workspace = true +alloy-eips.workspace = true +alloy-consensus.workspace = true +alloy-primitives = { workspace = true, features = ["rlp"] } + +# compat +alloy-network = { workspace = true, optional = true } +alloy-rpc-types-eth = { workspace = true, optional = true } + +# misc +thiserror.workspace = true +derive_more = { workspace = true, features = ["display"] } + +# arbitrary +arbitrary = { workspace = true, features = ["derive"], optional = true } + +# serde +serde_with = { workspace = true, optional = true } +alloy-serde = { workspace = true, optional = true } +serde = { workspace = true, features = ["derive"], optional = true } + +[dev-dependencies] +rand.workspace = true +bincode = { workspace = true } +serde_json.workspace = true +alloy-signer.workspace = true +arbitrary = { workspace = true, features = ["derive"] } +alloy-primitives = { workspace = true, features = ["rand", "arbitrary"] } + +[features] +default = ["std"] +std = [ + "alloy-eips/std", + "alloy-consensus/std", + "derive_more/std", + "alloy-primitives/std", + "alloy-rlp/std", + "alloy-rpc-types-eth?/std", + "alloy-serde?/std", + "serde?/std", + "serde_with?/std", + "thiserror/std" +] +alloy-compat = ["serde", "dep:alloy-network", "dep:alloy-rpc-types-eth"] +k256 = ["alloy-primitives/k256", "alloy-consensus/k256"] +kzg = ["alloy-eips/kzg", "alloy-consensus/kzg", "std"] +arbitrary = [ + "std", + "dep:arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-primitives/rand", + "alloy-primitives/arbitrary", + "alloy-rpc-types-eth?/arbitrary", + "alloy-serde?/arbitrary" +] +serde = [ + "dep:serde", + "dep:alloy-serde", + "alloy-primitives/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-rpc-types-eth?/serde" +] +serde-bincode-compat = [ + "serde_with", + "alloy-consensus/serde-bincode-compat", + "alloy-eips/serde-bincode-compat", + "alloy-rpc-types-eth?/serde-bincode-compat" +] diff --git a/rust/op-alloy/crates/consensus/README.md b/rust/op-alloy/crates/consensus/README.md new file mode 100644 index 00000000000..8c5cda53e90 --- /dev/null +++ b/rust/op-alloy/crates/consensus/README.md @@ -0,0 +1,18 @@ +## `op-alloy-consensus` + +Optimism consensus interface. + +This crate contains constants, types, and functions for implementing Optimism EL consensus and communication. This +includes an extended `OpTxEnvelope` type with [deposit transactions][deposit], and receipts containing OP Stack +specific fields (`deposit_nonce` + `deposit_receipt_version`). + +In general a type belongs in this crate if it exists in the `alloy-consensus` crate, but was modified from the base Ethereum protocol in the OP Stack. +For consensus types that are not modified by the OP Stack, the `alloy-consensus` types should be used instead. + +[deposit]: https://specs.optimism.io/protocol/deposits.html + +### Provenance + +Much of this code was ported from [reth-primitives] as part of ongoing alloy migrations. + +[reth-primitives]: https://github.com/paradigmxyz/reth/tree/main/crates/primitives diff --git a/op-alloy/crates/consensus/src/alloy_compat.rs b/rust/op-alloy/crates/consensus/src/alloy_compat.rs similarity index 100% rename from op-alloy/crates/consensus/src/alloy_compat.rs rename to rust/op-alloy/crates/consensus/src/alloy_compat.rs diff --git a/op-alloy/crates/consensus/src/block.rs b/rust/op-alloy/crates/consensus/src/block.rs similarity index 100% rename from op-alloy/crates/consensus/src/block.rs rename to rust/op-alloy/crates/consensus/src/block.rs diff --git a/op-alloy/crates/consensus/src/eip1559.rs b/rust/op-alloy/crates/consensus/src/eip1559.rs similarity index 100% rename from op-alloy/crates/consensus/src/eip1559.rs rename to rust/op-alloy/crates/consensus/src/eip1559.rs diff --git a/rust/op-alloy/crates/consensus/src/interop.rs b/rust/op-alloy/crates/consensus/src/interop.rs new file mode 100644 index 00000000000..21d4ed95582 --- /dev/null +++ b/rust/op-alloy/crates/consensus/src/interop.rs @@ -0,0 +1,95 @@ +//! Commonly used types for interop. + +use alloc::string::{String, ToString}; +use alloy_primitives::{Address, address}; +use core::str::FromStr; +use derive_more::Display; + +/// The address of the L2 cross chain inbox predeploy proxy. +pub const CROSS_L2_INBOX_ADDRESS: Address = address!("0x4200000000000000000000000000000000000022"); + +/// The safety level of a message. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Display)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "kebab-case"))] +pub enum SafetyLevel { + /// The message is finalized. + Finalized, + /// The message is safe across chains. + #[cfg_attr(feature = "serde", serde(rename = "safe"))] + CrossSafe, + /// The message is safe locally. + LocalSafe, + /// The message is unsafe across chains. + CrossUnsafe, + /// The message is unsafe locally. + #[cfg_attr(feature = "serde", serde(rename = "unsafe"))] + LocalUnsafe, + /// The message is invalid. + Invalid, +} + +impl FromStr for SafetyLevel { + type Err = SafetyLevelParseError; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "finalized" => Ok(Self::Finalized), + "safe" => Ok(Self::CrossSafe), + "local-safe" | "localsafe" => Ok(Self::LocalSafe), + "cross-unsafe" | "crossunsafe" => Ok(Self::CrossUnsafe), + "unsafe" => Ok(Self::LocalUnsafe), + "invalid" => Ok(Self::Invalid), + _ => Err(SafetyLevelParseError(s.to_string())), + } + } +} + +/// Error when parsing `SafetyLevel` from string. +#[derive(thiserror::Error, Debug)] +#[error("Invalid SafetyLevel, error: {0}")] +pub struct SafetyLevelParseError(pub String); + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + #[cfg(feature = "serde")] + fn test_safety_level_serde() { + let level = SafetyLevel::Finalized; + let json = serde_json::to_string(&level).unwrap(); + assert_eq!(json, r#""finalized""#); + + let level: SafetyLevel = serde_json::from_str(&json).unwrap(); + assert_eq!(level, SafetyLevel::Finalized); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_safety_level_fails() { + let json = r#""failed""#; + let level: Result = serde_json::from_str(json); + assert!(level.is_err()); + } + + #[test] + fn test_safety_level_from_str_valid() { + assert_eq!(SafetyLevel::from_str("finalized").unwrap(), SafetyLevel::Finalized); + assert_eq!(SafetyLevel::from_str("safe").unwrap(), SafetyLevel::CrossSafe); + assert_eq!(SafetyLevel::from_str("local-safe").unwrap(), SafetyLevel::LocalSafe); + assert_eq!(SafetyLevel::from_str("localsafe").unwrap(), SafetyLevel::LocalSafe); + assert_eq!(SafetyLevel::from_str("cross-unsafe").unwrap(), SafetyLevel::CrossUnsafe); + assert_eq!(SafetyLevel::from_str("crossunsafe").unwrap(), SafetyLevel::CrossUnsafe); + assert_eq!(SafetyLevel::from_str("unsafe").unwrap(), SafetyLevel::LocalUnsafe); + assert_eq!(SafetyLevel::from_str("invalid").unwrap(), SafetyLevel::Invalid); + } + + #[test] + fn test_safety_level_from_str_invalid() { + assert!(SafetyLevel::from_str("unknown").is_err()); + assert!(SafetyLevel::from_str("123").is_err()); + assert!(SafetyLevel::from_str("").is_err()); + assert!(SafetyLevel::from_str("safe ").is_err()); + } +} diff --git a/op-alloy/crates/consensus/src/lib.rs b/rust/op-alloy/crates/consensus/src/lib.rs similarity index 100% rename from op-alloy/crates/consensus/src/lib.rs rename to rust/op-alloy/crates/consensus/src/lib.rs diff --git a/op-alloy/crates/consensus/src/predeploys.rs b/rust/op-alloy/crates/consensus/src/predeploys.rs similarity index 100% rename from op-alloy/crates/consensus/src/predeploys.rs rename to rust/op-alloy/crates/consensus/src/predeploys.rs diff --git a/rust/op-alloy/crates/consensus/src/receipts/deposit.rs b/rust/op-alloy/crates/consensus/src/receipts/deposit.rs new file mode 100644 index 00000000000..66ea786904d --- /dev/null +++ b/rust/op-alloy/crates/consensus/src/receipts/deposit.rs @@ -0,0 +1,534 @@ +//! Transaction receipt types for Optimism. + +use super::OpTxReceipt; +use crate::transaction::OpDepositInfo; +use alloy_consensus::{ + Eip658Value, Receipt, ReceiptWithBloom, RlpDecodableReceipt, RlpEncodableReceipt, TxReceipt, +}; +use alloy_primitives::{Bloom, Log}; +use alloy_rlp::{Buf, BufMut, Decodable, Encodable, Header}; + +/// [`OpDepositReceipt`] with calculated bloom filter, modified for the OP Stack. +/// +/// This convenience type allows us to lazily calculate the bloom filter for a +/// receipt, similar to [`Sealed`]. +/// +/// [`Sealed`]: alloy_consensus::Sealed +pub type OpDepositReceiptWithBloom = ReceiptWithBloom>; + +/// Receipt containing result of transaction execution. +#[derive(Clone, Debug, PartialEq, Eq, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct OpDepositReceipt { + /// The inner receipt type. + #[cfg_attr(feature = "serde", serde(flatten))] + pub inner: Receipt, + /// Deposit nonce for Optimism deposit transactions + #[cfg_attr( + feature = "serde", + serde( + default, + skip_serializing_if = "Option::is_none", + with = "alloy_serde::quantity::opt" + ) + )] + pub deposit_nonce: Option, + /// Deposit receipt version for Optimism deposit transactions + /// + /// The deposit receipt version was introduced in Canyon to indicate an update to how + /// receipt hashes should be computed when set. The state transition process + /// ensures this is only set for post-Canyon deposit transactions. + #[cfg_attr( + feature = "serde", + serde( + default, + skip_serializing_if = "Option::is_none", + with = "alloy_serde::quantity::opt" + ) + )] + pub deposit_receipt_version: Option, +} + +impl OpDepositReceipt { + /// Calculates [`Log`]'s bloom filter. this is slow operation and [`OpDepositReceiptWithBloom`] + /// can be used to cache this value. + pub fn bloom_slow(&self) -> Bloom { + self.inner.logs.iter().collect() + } + + /// Calculates the bloom filter for the receipt and returns the [`OpDepositReceiptWithBloom`] + /// container type. + pub fn with_bloom(self) -> OpDepositReceiptWithBloom { + self.into() + } +} + +impl OpDepositReceipt { + /// Maps the inner receipt value of this receipt. + /// + /// This is mainly useful for mapping the receipt log type to the rpc variant. + pub fn map_inner(self, f: F) -> OpDepositReceipt + where + F: FnOnce(Receipt) -> Receipt, + { + OpDepositReceipt { + inner: f(self.inner), + deposit_nonce: self.deposit_nonce, + deposit_receipt_version: self.deposit_receipt_version, + } + } + + /// Attaches the given bloom to the receipt returning [`ReceiptWithBloom`]. + pub const fn with_bloom_unchecked(self, bloom: Bloom) -> ReceiptWithBloom { + ReceiptWithBloom::new(self, bloom) + } + + /// Consumes the type and returns the inner [`Receipt`]. + pub fn into_inner(self) -> Receipt { + self.inner + } + + /// Returns the deposit info for this receipt. + pub const fn deposit_info(&self) -> OpDepositInfo { + OpDepositInfo { + deposit_nonce: self.deposit_nonce, + deposit_receipt_version: self.deposit_receipt_version, + } + } + + /// Converts the receipt's log type by applying a function to each log. + /// + /// Returns the receipt with the new log type + pub fn map_logs(self, f: impl FnMut(T) -> U) -> OpDepositReceipt { + self.map_inner(|r| r.map_logs(f)) + } +} + +impl OpDepositReceipt { + /// Returns length of RLP-encoded receipt fields with the given [`Bloom`] without an RLP header. + pub fn rlp_encoded_fields_length_with_bloom(&self, bloom: &Bloom) -> usize { + self.inner.rlp_encoded_fields_length_with_bloom(bloom) + + self.deposit_nonce.map_or(0, |nonce| nonce.length()) + + self.deposit_receipt_version.map_or(0, |version| version.length()) + } + + /// RLP-encodes receipt fields with the given [`Bloom`] without an RLP header. + pub fn rlp_encode_fields_with_bloom(&self, bloom: &Bloom, out: &mut dyn BufMut) { + self.inner.rlp_encode_fields_with_bloom(bloom, out); + + if let Some(nonce) = self.deposit_nonce { + nonce.encode(out); + } + if let Some(version) = self.deposit_receipt_version { + version.encode(out); + } + } + + /// Returns RLP header for this receipt encoding with the given [`Bloom`]. + pub fn rlp_header_with_bloom(&self, bloom: &Bloom) -> Header { + Header { list: true, payload_length: self.rlp_encoded_fields_length_with_bloom(bloom) } + } +} + +impl OpDepositReceipt { + /// RLP-decodes receipt's field with a [`Bloom`]. + /// + /// Does not expect an RLP header. + pub fn rlp_decode_fields_with_bloom( + buf: &mut &[u8], + ) -> alloy_rlp::Result> { + let ReceiptWithBloom { receipt: inner, logs_bloom } = + Receipt::rlp_decode_fields_with_bloom(buf)?; + + let deposit_nonce = (!buf.is_empty()).then(|| Decodable::decode(buf)).transpose()?; + let deposit_receipt_version = + (!buf.is_empty()).then(|| Decodable::decode(buf)).transpose()?; + + Ok(ReceiptWithBloom { + logs_bloom, + receipt: Self { inner, deposit_nonce, deposit_receipt_version }, + }) + } +} + +impl AsRef> for OpDepositReceipt { + fn as_ref(&self) -> &Receipt { + &self.inner + } +} + +impl From> for Receipt { + fn from(value: OpDepositReceipt) -> Self { + value.into_inner() + } +} + +impl TxReceipt for OpDepositReceipt +where + T: AsRef + Clone + core::fmt::Debug + PartialEq + Eq + Send + Sync, +{ + type Log = T; + + fn status_or_post_state(&self) -> Eip658Value { + self.inner.status_or_post_state() + } + + fn status(&self) -> bool { + self.inner.status() + } + + fn bloom(&self) -> Bloom { + self.inner.bloom_slow() + } + + fn cumulative_gas_used(&self) -> u64 { + self.inner.cumulative_gas_used() + } + + fn logs(&self) -> &[Self::Log] { + self.inner.logs() + } +} + +impl RlpEncodableReceipt for OpDepositReceipt { + fn rlp_encoded_length_with_bloom(&self, bloom: &Bloom) -> usize { + self.rlp_header_with_bloom(bloom).length_with_payload() + } + + fn rlp_encode_with_bloom(&self, bloom: &Bloom, out: &mut dyn BufMut) { + self.rlp_header_with_bloom(bloom).encode(out); + self.rlp_encode_fields_with_bloom(bloom, out); + } +} + +impl RlpDecodableReceipt for OpDepositReceipt { + fn rlp_decode_with_bloom(buf: &mut &[u8]) -> alloy_rlp::Result> { + let header = Header::decode(buf)?; + if !header.list { + return Err(alloy_rlp::Error::UnexpectedString); + } + + if buf.len() < header.payload_length { + return Err(alloy_rlp::Error::InputTooShort); + } + + // Note: we pass a separate buffer to `rlp_decode_fields_with_bloom` to allow it decode + // optional fields based on the remaining length. + let mut fields_buf = &buf[..header.payload_length]; + let this = Self::rlp_decode_fields_with_bloom(&mut fields_buf)?; + + if !fields_buf.is_empty() { + return Err(alloy_rlp::Error::UnexpectedLength); + } + + buf.advance(header.payload_length); + + Ok(this) + } +} + +impl OpTxReceipt for OpDepositReceipt { + fn deposit_nonce(&self) -> Option { + self.deposit_nonce + } + + fn deposit_receipt_version(&self) -> Option { + self.deposit_receipt_version + } +} + +impl From> for OpDepositReceipt { + fn from(value: ReceiptWithBloom) -> Self { + value.receipt + } +} + +#[cfg(feature = "arbitrary")] +impl<'a, T> arbitrary::Arbitrary<'a> for OpDepositReceipt +where + T: arbitrary::Arbitrary<'a>, +{ + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + use alloc::vec::Vec; + let deposit_nonce = Option::::arbitrary(u)?; + let deposit_receipt_version = + deposit_nonce.is_some().then(|| u64::arbitrary(u)).transpose()?; + Ok(Self { + inner: Receipt { + status: Eip658Value::arbitrary(u)?, + cumulative_gas_used: u64::arbitrary(u)?, + logs: Vec::::arbitrary(u)?, + }, + deposit_nonce, + deposit_receipt_version, + }) + } +} + +/// Bincode-compatible [`OpDepositReceipt`] serde implementation. +#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] +pub(crate) mod serde_bincode_compat { + use alloc::borrow::Cow; + use alloy_consensus::Receipt; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + use serde_with::{DeserializeAs, SerializeAs}; + + /// Bincode-compatible [`super::OpDepositReceipt`] serde implementation. + /// + /// Intended to use with the [`serde_with::serde_as`] macro in the following way: + /// ```rust + /// use op_alloy_consensus::{OpDepositReceipt, serde_bincode_compat}; + /// use serde::{Deserialize, Serialize, de::DeserializeOwned}; + /// use serde_with::serde_as; + /// + /// #[serde_as] + /// #[derive(Serialize, Deserialize)] + /// struct Data { + /// #[serde_as(as = "serde_bincode_compat::OpDepositReceipt<'_, T>")] + /// receipt: OpDepositReceipt, + /// } + /// ``` + #[derive(Debug, Serialize, Deserialize)] + pub struct OpDepositReceipt<'a, T: Clone> { + logs: Cow<'a, [T]>, + status: bool, + cumulative_gas_used: u64, + deposit_nonce: Option, + deposit_receipt_version: Option, + } + + impl<'a, T: Clone> From<&'a super::OpDepositReceipt> for OpDepositReceipt<'a, T> { + fn from(value: &'a super::OpDepositReceipt) -> Self { + Self { + logs: Cow::Borrowed(&value.inner.logs), + // OP has no post state root variant + status: value.inner.status.coerce_status(), + cumulative_gas_used: value.inner.cumulative_gas_used, + deposit_nonce: value.deposit_nonce, + deposit_receipt_version: value.deposit_receipt_version, + } + } + } + + impl<'a, T: Clone> From> for super::OpDepositReceipt { + fn from(value: OpDepositReceipt<'a, T>) -> Self { + Self { + inner: Receipt { + status: value.status.into(), + cumulative_gas_used: value.cumulative_gas_used, + logs: value.logs.into_owned(), + }, + deposit_nonce: value.deposit_nonce, + deposit_receipt_version: value.deposit_receipt_version, + } + } + } + + impl SerializeAs> for OpDepositReceipt<'_, T> { + fn serialize_as( + source: &super::OpDepositReceipt, + serializer: S, + ) -> Result + where + S: Serializer, + { + OpDepositReceipt::<'_, T>::from(source).serialize(serializer) + } + } + + impl<'de, T: Deserialize<'de> + Clone> DeserializeAs<'de, super::OpDepositReceipt> + for OpDepositReceipt<'de, T> + { + fn deserialize_as(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + OpDepositReceipt::<'_, T>::deserialize(deserializer).map(Into::into) + } + } + + #[cfg(test)] + mod tests { + use super::super::{OpDepositReceipt, serde_bincode_compat}; + use alloy_primitives::Log; + use arbitrary::Arbitrary; + use rand::Rng; + use serde::{Deserialize, Serialize, de::DeserializeOwned}; + use serde_with::serde_as; + + #[test] + fn test_tx_deposit_bincode_roundtrip() { + #[serde_as] + #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] + struct Data { + #[serde_as(as = "serde_bincode_compat::OpDepositReceipt<'_,T>")] + transaction: OpDepositReceipt, + } + + let mut bytes = [0u8; 1024]; + rand::rng().fill(bytes.as_mut_slice()); + let mut data = Data { + transaction: OpDepositReceipt::arbitrary(&mut arbitrary::Unstructured::new(&bytes)) + .unwrap(), + }; + // ensure we don't have an invalid poststate variant + data.transaction.inner.status = data.transaction.inner.status.coerce_status().into(); + + let encoded = bincode::serde::encode_to_vec(&data, bincode::config::legacy()).unwrap(); + let (decoded, _) = bincode::serde::decode_from_slice::, _>( + &encoded, + bincode::config::legacy(), + ) + .unwrap(); + assert_eq!(decoded, data); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_consensus::Receipt; + use alloy_primitives::{Bytes, Log, LogData, address, b256, bytes, hex}; + use alloy_rlp::{Decodable, Encodable}; + + #[cfg(not(feature = "std"))] + use alloc::{vec, vec::Vec}; + + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + #[test] + fn decode_legacy_receipt() { + let data = hex!( + "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" + ); + + // EIP658Receipt + let expected = OpDepositReceiptWithBloom { + receipt: OpDepositReceipt { + inner: Receipt { + status: false.into(), + cumulative_gas_used: 0x1, + logs: vec![Log { + address: address!("0000000000000000000000000000000000000011"), + data: LogData::new_unchecked( + vec![ + b256!( + "000000000000000000000000000000000000000000000000000000000000dead" + ), + b256!( + "000000000000000000000000000000000000000000000000000000000000beef" + ), + ], + bytes!("0100ff"), + ), + }], + }, + deposit_nonce: None, + deposit_receipt_version: None, + }, + logs_bloom: [0; 256].into(), + }; + + let receipt = OpDepositReceiptWithBloom::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + } + + #[test] + fn gigantic_receipt() { + let receipt = OpDepositReceipt { + inner: Receipt { + cumulative_gas_used: 16747627, + status: true.into(), + logs: vec![ + Log { + address: address!("4bf56695415f725e43c3e04354b604bcfb6dfb6e"), + data: LogData::new_unchecked( + vec![b256!( + "c69dc3d7ebff79e41f525be431d5cd3cc08f80eaf0f7819054a726eeb7086eb9" + )], + Bytes::from(vec![1; 0xffffff]), + ), + }, + Log { + address: address!("faca325c86bf9c2d5b413cd7b90b209be92229c2"), + data: LogData::new_unchecked( + vec![b256!( + "8cca58667b1e9ffa004720ac99a3d61a138181963b294d270d91c53d36402ae2" + )], + Bytes::from(vec![1; 0xffffff]), + ), + }, + ], + }, + deposit_nonce: None, + deposit_receipt_version: None, + } + .with_bloom(); + + let mut data = vec![]; + + receipt.encode(&mut data); + let decoded = OpDepositReceiptWithBloom::decode(&mut &data[..]).unwrap(); + + // receipt.clone().to_compact(&mut data); + // let (decoded, _) = Receipt::from_compact(&data[..], data.len()); + assert_eq!(decoded, receipt); + } + + #[test] + fn regolith_receipt_roundtrip() { + let data = hex!( + "f9010c0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf" + ); + + // Deposit Receipt (post-regolith) + let expected = OpDepositReceiptWithBloom { + receipt: OpDepositReceipt { + inner: Receipt:: { + cumulative_gas_used: 46913, + logs: vec![], + status: true.into(), + }, + deposit_nonce: Some(4012991), + deposit_receipt_version: None, + }, + logs_bloom: [0; 256].into(), + }; + + let receipt = OpDepositReceiptWithBloom::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + + let mut buf = Vec::new(); + receipt.encode(&mut buf); + assert_eq!(buf, &data[..]); + } + + #[test] + fn post_canyon_receipt_roundtrip() { + let data = hex!( + "f9010d0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf01" + ); + + // Deposit Receipt (post-regolith) + let expected = OpDepositReceiptWithBloom { + receipt: OpDepositReceipt { + inner: Receipt:: { + cumulative_gas_used: 46913, + logs: vec![], + status: true.into(), + }, + deposit_nonce: Some(4012991), + deposit_receipt_version: Some(1), + }, + logs_bloom: [0; 256].into(), + }; + + let receipt = OpDepositReceiptWithBloom::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + + let mut buf = Vec::new(); + expected.encode(&mut buf); + assert_eq!(buf, &data[..]); + } +} diff --git a/rust/op-alloy/crates/consensus/src/receipts/envelope.rs b/rust/op-alloy/crates/consensus/src/receipts/envelope.rs new file mode 100644 index 00000000000..d0efcea1583 --- /dev/null +++ b/rust/op-alloy/crates/consensus/src/receipts/envelope.rs @@ -0,0 +1,430 @@ +//! Receipt envelope types for Optimism. + +use crate::{OpDepositReceipt, OpDepositReceiptWithBloom, OpTxType}; +use alloc::vec::Vec; +use alloy_consensus::{Eip658Value, Receipt, ReceiptWithBloom, TxReceipt}; +use alloy_eips::{ + Typed2718, + eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718, IsTyped2718}, +}; +use alloy_primitives::{Bloom, Log, logs_bloom}; +use alloy_rlp::{BufMut, Decodable, Encodable, length_of_length}; + +/// Receipt envelope, as defined in [EIP-2718], modified for OP Stack chains. +/// +/// This enum distinguishes between tagged and untagged legacy receipts, as the +/// in-protocol merkle tree may commit to EITHER 0-prefixed or raw. Therefore +/// we must ensure that encoding returns the precise byte-array that was +/// decoded, preserving the presence or absence of the `TransactionType` flag. +/// +/// Transaction receipt payloads are specified in their respective EIPs. +/// +/// [EIP-2718]: https://eips.ethereum.org/EIPS/eip-2718 +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(tag = "type"))] +pub enum OpReceiptEnvelope { + /// Receipt envelope with no type flag. + #[cfg_attr(feature = "serde", serde(rename = "0x0", alias = "0x00"))] + Legacy(ReceiptWithBloom>), + /// Receipt envelope with type flag 1, containing a [EIP-2930] receipt. + /// + /// [EIP-2930]: https://eips.ethereum.org/EIPS/eip-2930 + #[cfg_attr(feature = "serde", serde(rename = "0x1", alias = "0x01"))] + Eip2930(ReceiptWithBloom>), + /// Receipt envelope with type flag 2, containing a [EIP-1559] receipt. + /// + /// [EIP-1559]: https://eips.ethereum.org/EIPS/eip-1559 + #[cfg_attr(feature = "serde", serde(rename = "0x2", alias = "0x02"))] + Eip1559(ReceiptWithBloom>), + /// Receipt envelope with type flag 4, containing a [EIP-7702] receipt. + /// + /// [EIP-7702]: https://eips.ethereum.org/EIPS/eip-7702 + #[cfg_attr(feature = "serde", serde(rename = "0x4", alias = "0x04"))] + Eip7702(ReceiptWithBloom>), + /// Receipt envelope with type flag 126, containing a [deposit] receipt. + /// + /// [deposit]: https://specs.optimism.io/protocol/deposits.html + #[cfg_attr(feature = "serde", serde(rename = "0x7e", alias = "0x7E"))] + Deposit(ReceiptWithBloom>), +} + +impl OpReceiptEnvelope { + /// Creates a new [`OpReceiptEnvelope`] from the given parts. + pub fn from_parts<'a>( + status: bool, + cumulative_gas_used: u64, + logs: impl IntoIterator, + tx_type: OpTxType, + deposit_nonce: Option, + deposit_receipt_version: Option, + ) -> Self { + let logs = logs.into_iter().cloned().collect::>(); + let logs_bloom = logs_bloom(&logs); + let inner_receipt = + Receipt { status: Eip658Value::Eip658(status), cumulative_gas_used, logs }; + match tx_type { + OpTxType::Legacy => { + Self::Legacy(ReceiptWithBloom { receipt: inner_receipt, logs_bloom }) + } + OpTxType::Eip2930 => { + Self::Eip2930(ReceiptWithBloom { receipt: inner_receipt, logs_bloom }) + } + OpTxType::Eip1559 => { + Self::Eip1559(ReceiptWithBloom { receipt: inner_receipt, logs_bloom }) + } + OpTxType::Eip7702 => { + Self::Eip7702(ReceiptWithBloom { receipt: inner_receipt, logs_bloom }) + } + OpTxType::Deposit => { + let inner = OpDepositReceiptWithBloom { + receipt: OpDepositReceipt { + inner: inner_receipt, + deposit_nonce, + deposit_receipt_version, + }, + logs_bloom, + }; + Self::Deposit(inner) + } + } + } +} + +impl OpReceiptEnvelope { + /// Return the [`OpTxType`] of the inner receipt. + pub const fn tx_type(&self) -> OpTxType { + match self { + Self::Legacy(_) => OpTxType::Legacy, + Self::Eip2930(_) => OpTxType::Eip2930, + Self::Eip1559(_) => OpTxType::Eip1559, + Self::Eip7702(_) => OpTxType::Eip7702, + Self::Deposit(_) => OpTxType::Deposit, + } + } + + /// Return true if the transaction was successful. + pub const fn is_success(&self) -> bool { + self.status() + } + + /// Returns the success status of the receipt's transaction. + pub const fn status(&self) -> bool { + self.as_receipt().unwrap().status.coerce_status() + } + + /// Returns the cumulative gas used at this receipt. + pub const fn cumulative_gas_used(&self) -> u64 { + self.as_receipt().unwrap().cumulative_gas_used + } + + /// Converts the receipt's log type by applying a function to each log. + /// + /// Returns the receipt with the new log type. + pub fn map_logs(self, f: impl FnMut(T) -> U) -> OpReceiptEnvelope { + match self { + Self::Legacy(r) => OpReceiptEnvelope::Legacy(r.map_logs(f)), + Self::Eip2930(r) => OpReceiptEnvelope::Eip2930(r.map_logs(f)), + Self::Eip1559(r) => OpReceiptEnvelope::Eip1559(r.map_logs(f)), + Self::Eip7702(r) => OpReceiptEnvelope::Eip7702(r.map_logs(f)), + Self::Deposit(r) => OpReceiptEnvelope::Deposit(r.map_receipt(|r| r.map_logs(f))), + } + } + + /// Return the receipt logs. + pub fn logs(&self) -> &[T] { + &self.as_receipt().unwrap().logs + } + + /// Consumes the type and returns the logs. + pub fn into_logs(self) -> Vec { + self.into_receipt().logs + } + + /// Return the receipt's bloom. + pub const fn logs_bloom(&self) -> &Bloom { + match self { + Self::Legacy(t) | Self::Eip2930(t) | Self::Eip1559(t) | Self::Eip7702(t) => { + &t.logs_bloom + } + Self::Deposit(t) => &t.logs_bloom, + } + } + + /// Return the receipt's `deposit_nonce` if it is a deposit receipt. + pub fn deposit_nonce(&self) -> Option { + self.as_deposit_receipt().and_then(|r| r.deposit_nonce) + } + + /// Return the receipt's deposit version if it is a deposit receipt. + pub fn deposit_receipt_version(&self) -> Option { + self.as_deposit_receipt().and_then(|r| r.deposit_receipt_version) + } + + /// Returns the deposit receipt if it is a deposit receipt. + pub const fn as_deposit_receipt_with_bloom(&self) -> Option<&OpDepositReceiptWithBloom> { + match self { + Self::Deposit(t) => Some(t), + _ => None, + } + } + + /// Returns the deposit receipt if it is a deposit receipt. + pub const fn as_deposit_receipt(&self) -> Option<&OpDepositReceipt> { + match self { + Self::Deposit(t) => Some(&t.receipt), + _ => None, + } + } + + /// Consumes the type and returns the underlying [`Receipt`]. + pub fn into_receipt(self) -> Receipt { + match self { + Self::Legacy(t) | Self::Eip2930(t) | Self::Eip1559(t) | Self::Eip7702(t) => t.receipt, + Self::Deposit(t) => t.receipt.into_inner(), + } + } + + /// Return the inner receipt. Currently this is infallible, however, future + /// receipt types may be added. + pub const fn as_receipt(&self) -> Option<&Receipt> { + match self { + Self::Legacy(t) | Self::Eip2930(t) | Self::Eip1559(t) | Self::Eip7702(t) => { + Some(&t.receipt) + } + Self::Deposit(t) => Some(&t.receipt.inner), + } + } +} + +impl OpReceiptEnvelope { + /// Get the length of the inner receipt in the 2718 encoding. + pub fn inner_length(&self) -> usize { + match self { + Self::Legacy(t) | Self::Eip2930(t) | Self::Eip1559(t) | Self::Eip7702(t) => t.length(), + Self::Deposit(t) => t.length(), + } + } + + /// Calculate the length of the rlp payload of the network encoded receipt. + pub fn rlp_payload_length(&self) -> usize { + let length = self.inner_length(); + match self { + Self::Legacy(_) => length, + _ => length + 1, + } + } +} + +impl TxReceipt for OpReceiptEnvelope +where + T: Clone + core::fmt::Debug + PartialEq + Eq + Send + Sync, +{ + type Log = T; + + fn status_or_post_state(&self) -> Eip658Value { + self.as_receipt().unwrap().status + } + + fn status(&self) -> bool { + self.as_receipt().unwrap().status.coerce_status() + } + + /// Return the receipt's bloom. + fn bloom(&self) -> Bloom { + *self.logs_bloom() + } + + fn bloom_cheap(&self) -> Option { + Some(self.bloom()) + } + + /// Returns the cumulative gas used at this receipt. + fn cumulative_gas_used(&self) -> u64 { + self.as_receipt().unwrap().cumulative_gas_used + } + + /// Return the receipt logs. + fn logs(&self) -> &[T] { + &self.as_receipt().unwrap().logs + } +} + +impl Encodable for OpReceiptEnvelope { + fn encode(&self, out: &mut dyn alloy_rlp::BufMut) { + self.network_encode(out) + } + + fn length(&self) -> usize { + let mut payload_length = self.rlp_payload_length(); + if !self.is_legacy() { + payload_length += length_of_length(payload_length); + } + payload_length + } +} + +impl Decodable for OpReceiptEnvelope { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { + Self::network_decode(buf) + .map_or_else(|_| Err(alloy_rlp::Error::Custom("Unexpected type")), Ok) + } +} + +impl Typed2718 for OpReceiptEnvelope { + fn ty(&self) -> u8 { + let ty = match self { + Self::Legacy(_) => OpTxType::Legacy, + Self::Eip2930(_) => OpTxType::Eip2930, + Self::Eip1559(_) => OpTxType::Eip1559, + Self::Eip7702(_) => OpTxType::Eip7702, + Self::Deposit(_) => OpTxType::Deposit, + }; + ty as u8 + } +} + +impl IsTyped2718 for OpReceiptEnvelope { + fn is_type(type_id: u8) -> bool { + ::is_type(type_id) + } +} + +impl Encodable2718 for OpReceiptEnvelope { + fn encode_2718_len(&self) -> usize { + self.inner_length() + !self.is_legacy() as usize + } + + fn encode_2718(&self, out: &mut dyn BufMut) { + match self.type_flag() { + None => {} + Some(ty) => out.put_u8(ty), + } + match self { + Self::Deposit(t) => t.encode(out), + Self::Legacy(t) | Self::Eip2930(t) | Self::Eip1559(t) | Self::Eip7702(t) => { + t.encode(out) + } + } + } +} + +impl Decodable2718 for OpReceiptEnvelope { + fn typed_decode(ty: u8, buf: &mut &[u8]) -> Eip2718Result { + match ty.try_into().map_err(|_| Eip2718Error::UnexpectedType(ty))? { + OpTxType::Legacy => { + Err(alloy_rlp::Error::Custom("type-0 eip2718 transactions are not supported") + .into()) + } + OpTxType::Eip1559 => Ok(Self::Eip1559(Decodable::decode(buf)?)), + OpTxType::Eip7702 => Ok(Self::Eip7702(Decodable::decode(buf)?)), + OpTxType::Eip2930 => Ok(Self::Eip2930(Decodable::decode(buf)?)), + OpTxType::Deposit => Ok(Self::Deposit(Decodable::decode(buf)?)), + } + } + + fn fallback_decode(buf: &mut &[u8]) -> Eip2718Result { + Ok(Self::Legacy(Decodable::decode(buf)?)) + } +} + +impl From for OpReceiptEnvelope +where + T: Into>>, +{ + fn from(value: T) -> Self { + Self::Deposit(value.into()) + } +} + +impl From> for Receipt { + fn from(receipt: OpReceiptEnvelope) -> Self { + receipt.into_receipt() + } +} + +#[cfg(all(test, feature = "arbitrary"))] +impl<'a, T> arbitrary::Arbitrary<'a> for OpReceiptEnvelope +where + T: arbitrary::Arbitrary<'a>, +{ + fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { + match u.int_in_range(0..=4)? { + 0 => Ok(Self::Legacy(ReceiptWithBloom::arbitrary(u)?)), + 1 => Ok(Self::Eip2930(ReceiptWithBloom::arbitrary(u)?)), + 2 => Ok(Self::Eip1559(ReceiptWithBloom::arbitrary(u)?)), + _ => Ok(Self::Deposit(OpDepositReceiptWithBloom::arbitrary(u)?)), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_consensus::{Receipt, ReceiptWithBloom}; + use alloy_eips::eip2718::Encodable2718; + use alloy_primitives::{Log, LogData, address, b256, bytes, hex}; + use alloy_rlp::Encodable; + + #[cfg(not(feature = "std"))] + use alloc::vec; + + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + #[test] + fn encode_legacy_receipt() { + let expected = hex!( + "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" + ); + + let mut data = vec![]; + let receipt = OpReceiptEnvelope::Legacy(ReceiptWithBloom { + receipt: Receipt { + status: false.into(), + cumulative_gas_used: 0x1, + logs: vec![Log { + address: address!("0000000000000000000000000000000000000011"), + data: LogData::new_unchecked( + vec![ + b256!( + "000000000000000000000000000000000000000000000000000000000000dead" + ), + b256!( + "000000000000000000000000000000000000000000000000000000000000beef" + ), + ], + bytes!("0100ff"), + ), + }], + }, + logs_bloom: [0; 256].into(), + }); + + receipt.network_encode(&mut data); + + // check that the rlp length equals the length of the expected rlp + assert_eq!(receipt.length(), expected.len()); + assert_eq!(data, expected); + } + + #[test] + fn legacy_receipt_from_parts() { + let receipt = + OpReceiptEnvelope::from_parts(true, 100, vec![], OpTxType::Legacy, None, None); + assert!(receipt.status()); + assert_eq!(receipt.cumulative_gas_used(), 100); + assert_eq!(receipt.logs().len(), 0); + assert_eq!(receipt.tx_type(), OpTxType::Legacy); + } + + #[test] + fn deposit_receipt_from_parts() { + let receipt = + OpReceiptEnvelope::from_parts(true, 100, vec![], OpTxType::Deposit, Some(1), Some(2)); + assert!(receipt.status()); + assert_eq!(receipt.cumulative_gas_used(), 100); + assert_eq!(receipt.logs().len(), 0); + assert_eq!(receipt.tx_type(), OpTxType::Deposit); + assert_eq!(receipt.deposit_nonce(), Some(1)); + assert_eq!(receipt.deposit_receipt_version(), Some(2)); + } +} diff --git a/op-alloy/crates/consensus/src/receipts/mod.rs b/rust/op-alloy/crates/consensus/src/receipts/mod.rs similarity index 100% rename from op-alloy/crates/consensus/src/receipts/mod.rs rename to rust/op-alloy/crates/consensus/src/receipts/mod.rs diff --git a/rust/op-alloy/crates/consensus/src/receipts/receipt.rs b/rust/op-alloy/crates/consensus/src/receipts/receipt.rs new file mode 100644 index 00000000000..21b2b2ad632 --- /dev/null +++ b/rust/op-alloy/crates/consensus/src/receipts/receipt.rs @@ -0,0 +1,769 @@ +//! Optimism receipt type for execution and storage. + +use core::fmt::Debug; + +use super::{OpDepositReceipt, OpTxReceipt}; +use crate::{OpReceiptEnvelope, OpTxType}; +use alloc::vec::Vec; +use alloy_consensus::{ + Eip658Value, Eip2718DecodableReceipt, Eip2718EncodableReceipt, Receipt, ReceiptWithBloom, + RlpDecodableReceipt, RlpEncodableReceipt, TxReceipt, Typed2718, +}; +use alloy_eips::eip2718::{Eip2718Error, Eip2718Result, IsTyped2718}; +use alloy_primitives::{Bloom, Log}; +use alloy_rlp::{Buf, BufMut, Decodable, Encodable, Header}; + +/// Typed Optimism transaction receipt. +/// +/// Receipt containing result of transaction execution. +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[cfg_attr(feature = "serde", serde(tag = "type"))] +pub enum OpReceipt { + /// Legacy receipt + #[cfg_attr(feature = "serde", serde(rename = "0x0", alias = "0x00"))] + Legacy(Receipt), + /// EIP-2930 receipt + #[cfg_attr(feature = "serde", serde(rename = "0x1", alias = "0x01"))] + Eip2930(Receipt), + /// EIP-1559 receipt + #[cfg_attr(feature = "serde", serde(rename = "0x2", alias = "0x02"))] + Eip1559(Receipt), + /// EIP-7702 receipt + #[cfg_attr(feature = "serde", serde(rename = "0x4", alias = "0x04"))] + Eip7702(Receipt), + /// Deposit receipt + #[cfg_attr(feature = "serde", serde(rename = "0x7e", alias = "0x7E"))] + Deposit(OpDepositReceipt), +} + +impl OpReceipt { + /// Returns [`OpTxType`] of the receipt. + pub const fn tx_type(&self) -> OpTxType { + match self { + Self::Legacy(_) => OpTxType::Legacy, + Self::Eip2930(_) => OpTxType::Eip2930, + Self::Eip1559(_) => OpTxType::Eip1559, + Self::Eip7702(_) => OpTxType::Eip7702, + Self::Deposit(_) => OpTxType::Deposit, + } + } + + /// Returns inner [`Receipt`]. + pub const fn as_receipt(&self) -> &Receipt { + match self { + Self::Legacy(receipt) | + Self::Eip2930(receipt) | + Self::Eip1559(receipt) | + Self::Eip7702(receipt) => receipt, + Self::Deposit(receipt) => &receipt.inner, + } + } + + /// Returns a mutable reference to the inner [`Receipt`]. + pub const fn as_receipt_mut(&mut self) -> &mut Receipt { + match self { + Self::Legacy(receipt) | + Self::Eip2930(receipt) | + Self::Eip1559(receipt) | + Self::Eip7702(receipt) => receipt, + Self::Deposit(receipt) => &mut receipt.inner, + } + } + + /// Consumes this and returns the inner [`Receipt`]. + pub fn into_receipt(self) -> Receipt { + match self { + Self::Legacy(receipt) | + Self::Eip2930(receipt) | + Self::Eip1559(receipt) | + Self::Eip7702(receipt) => receipt, + Self::Deposit(receipt) => receipt.inner, + } + } + + /// Converts the receipt's log type by applying a function to each log. + /// + /// Returns the receipt with the new log type + pub fn map_logs(self, f: impl FnMut(T) -> U) -> OpReceipt { + match self { + Self::Legacy(receipt) => OpReceipt::Legacy(receipt.map_logs(f)), + Self::Eip2930(receipt) => OpReceipt::Eip2930(receipt.map_logs(f)), + Self::Eip1559(receipt) => OpReceipt::Eip1559(receipt.map_logs(f)), + Self::Eip7702(receipt) => OpReceipt::Eip7702(receipt.map_logs(f)), + Self::Deposit(receipt) => OpReceipt::Deposit(receipt.map_logs(f)), + } + } + + /// Returns length of RLP-encoded receipt fields with the given [`Bloom`] without an RLP header. + pub fn rlp_encoded_fields_length(&self, bloom: &Bloom) -> usize + where + T: Encodable, + { + match self { + Self::Legacy(receipt) | + Self::Eip2930(receipt) | + Self::Eip1559(receipt) | + Self::Eip7702(receipt) => receipt.rlp_encoded_fields_length_with_bloom(bloom), + Self::Deposit(receipt) => receipt.rlp_encoded_fields_length_with_bloom(bloom), + } + } + + /// RLP-encodes receipt fields with the given [`Bloom`] without an RLP header. + pub fn rlp_encode_fields(&self, bloom: &Bloom, out: &mut dyn BufMut) + where + T: Encodable, + { + match self { + Self::Legacy(receipt) | + Self::Eip2930(receipt) | + Self::Eip1559(receipt) | + Self::Eip7702(receipt) => receipt.rlp_encode_fields_with_bloom(bloom, out), + Self::Deposit(receipt) => receipt.rlp_encode_fields_with_bloom(bloom, out), + } + } + + /// Returns RLP header for inner encoding. + pub fn rlp_header_inner(&self, bloom: &Bloom) -> Header + where + T: Encodable, + { + Header { list: true, payload_length: self.rlp_encoded_fields_length(bloom) } + } + + /// Returns RLP header for inner encoding without bloom. + pub fn rlp_header_without_bloom(&self) -> Header + where + T: Encodable, + { + Header { list: true, payload_length: self.rlp_encoded_fields_length_without_bloom() } + } + + /// RLP-decodes the receipt from the provided buffer. This does not expect a type byte or + /// network header. + pub fn rlp_decode_inner( + buf: &mut &[u8], + tx_type: OpTxType, + ) -> alloy_rlp::Result> + where + T: Decodable, + { + match tx_type { + OpTxType::Legacy => { + let ReceiptWithBloom { receipt, logs_bloom } = + RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; + Ok(ReceiptWithBloom { receipt: Self::Legacy(receipt), logs_bloom }) + } + OpTxType::Eip2930 => { + let ReceiptWithBloom { receipt, logs_bloom } = + RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; + Ok(ReceiptWithBloom { receipt: Self::Eip2930(receipt), logs_bloom }) + } + OpTxType::Eip1559 => { + let ReceiptWithBloom { receipt, logs_bloom } = + RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; + Ok(ReceiptWithBloom { receipt: Self::Eip1559(receipt), logs_bloom }) + } + OpTxType::Eip7702 => { + let ReceiptWithBloom { receipt, logs_bloom } = + RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; + Ok(ReceiptWithBloom { receipt: Self::Eip7702(receipt), logs_bloom }) + } + OpTxType::Deposit => { + let ReceiptWithBloom { receipt, logs_bloom } = + RlpDecodableReceipt::rlp_decode_with_bloom(buf)?; + Ok(ReceiptWithBloom { receipt: Self::Deposit(receipt), logs_bloom }) + } + } + } + + /// RLP-encodes receipt fields without an RLP header. + pub fn rlp_encode_fields_without_bloom(&self, out: &mut dyn BufMut) + where + T: Encodable, + { + self.tx_type().encode(out); + match self { + Self::Legacy(receipt) | + Self::Eip2930(receipt) | + Self::Eip1559(receipt) | + Self::Eip7702(receipt) => { + receipt.status.encode(out); + receipt.cumulative_gas_used.encode(out); + receipt.logs.encode(out); + } + Self::Deposit(receipt) => { + receipt.inner.status.encode(out); + receipt.inner.cumulative_gas_used.encode(out); + receipt.inner.logs.encode(out); + if let Some(nonce) = receipt.deposit_nonce { + nonce.encode(out); + } + if let Some(version) = receipt.deposit_receipt_version { + version.encode(out); + } + } + } + } + + /// Returns length of RLP-encoded receipt fields without an RLP header. + pub fn rlp_encoded_fields_length_without_bloom(&self) -> usize + where + T: Encodable, + { + self.tx_type().length() + + match self { + Self::Legacy(receipt) | + Self::Eip2930(receipt) | + Self::Eip1559(receipt) | + Self::Eip7702(receipt) => { + receipt.status.length() + + receipt.cumulative_gas_used.length() + + receipt.logs.length() + } + Self::Deposit(receipt) => { + receipt.inner.status.length() + + receipt.inner.cumulative_gas_used.length() + + receipt.inner.logs.length() + + receipt.deposit_nonce.map_or(0, |nonce| nonce.length()) + + receipt.deposit_receipt_version.map_or(0, |version| version.length()) + } + } + } + + /// RLP-decodes the receipt from the provided buffer without bloom. + pub fn rlp_decode_fields_without_bloom(buf: &mut &[u8]) -> alloy_rlp::Result + where + T: Decodable, + { + let tx_type = OpTxType::decode(buf)?; + let status = Decodable::decode(buf)?; + let cumulative_gas_used = Decodable::decode(buf)?; + let logs = Decodable::decode(buf)?; + + let mut deposit_nonce = None; + let mut deposit_receipt_version = None; + + // For deposit receipts, try to decode nonce and version if they exist + if tx_type == OpTxType::Deposit && !buf.is_empty() { + deposit_nonce = Some(Decodable::decode(buf)?); + if !buf.is_empty() { + deposit_receipt_version = Some(Decodable::decode(buf)?); + } + } + + match tx_type { + OpTxType::Legacy => Ok(Self::Legacy(Receipt { status, cumulative_gas_used, logs })), + OpTxType::Eip2930 => Ok(Self::Eip2930(Receipt { status, cumulative_gas_used, logs })), + OpTxType::Eip1559 => Ok(Self::Eip1559(Receipt { status, cumulative_gas_used, logs })), + OpTxType::Eip7702 => Ok(Self::Eip7702(Receipt { status, cumulative_gas_used, logs })), + OpTxType::Deposit => Ok(Self::Deposit(OpDepositReceipt { + inner: Receipt { status, cumulative_gas_used, logs }, + deposit_nonce, + deposit_receipt_version, + })), + } + } +} + +impl Eip2718EncodableReceipt for OpReceipt { + fn eip2718_encoded_length_with_bloom(&self, bloom: &Bloom) -> usize { + !self.tx_type().is_legacy() as usize + self.rlp_header_inner(bloom).length_with_payload() + } + + fn eip2718_encode_with_bloom(&self, bloom: &Bloom, out: &mut dyn BufMut) { + if !self.tx_type().is_legacy() { + out.put_u8(self.tx_type() as u8); + } + self.rlp_header_inner(bloom).encode(out); + self.rlp_encode_fields(bloom, out); + } +} + +impl Eip2718DecodableReceipt for OpReceipt { + fn typed_decode_with_bloom(ty: u8, buf: &mut &[u8]) -> Eip2718Result> { + let tx_type = OpTxType::try_from(ty).map_err(|_| Eip2718Error::UnexpectedType(ty))?; + Ok(Self::rlp_decode_inner(buf, tx_type)?) + } + + fn fallback_decode_with_bloom(buf: &mut &[u8]) -> Eip2718Result> { + Ok(Self::rlp_decode_inner(buf, OpTxType::Legacy)?) + } +} + +impl RlpEncodableReceipt for OpReceipt { + fn rlp_encoded_length_with_bloom(&self, bloom: &Bloom) -> usize { + let mut len = self.eip2718_encoded_length_with_bloom(bloom); + if !self.tx_type().is_legacy() { + len += Header { + list: false, + payload_length: self.eip2718_encoded_length_with_bloom(bloom), + } + .length(); + } + + len + } + + fn rlp_encode_with_bloom(&self, bloom: &Bloom, out: &mut dyn BufMut) { + if !self.tx_type().is_legacy() { + Header { list: false, payload_length: self.eip2718_encoded_length_with_bloom(bloom) } + .encode(out); + } + self.eip2718_encode_with_bloom(bloom, out); + } +} + +impl RlpDecodableReceipt for OpReceipt { + fn rlp_decode_with_bloom(buf: &mut &[u8]) -> alloy_rlp::Result> { + let header_buf = &mut &**buf; + let header = Header::decode(header_buf)?; + + // Legacy receipt, reuse initial buffer without advancing + if header.list { + return Self::rlp_decode_inner(buf, OpTxType::Legacy); + } + + // Otherwise, advance the buffer and try decoding type flag followed by receipt + *buf = *header_buf; + + let remaining = buf.len(); + let tx_type = OpTxType::decode(buf)?; + let this = Self::rlp_decode_inner(buf, tx_type)?; + + if buf.len() + header.payload_length != remaining { + return Err(alloy_rlp::Error::UnexpectedLength); + } + + Ok(this) + } +} + +impl Encodable for OpReceipt { + fn encode(&self, out: &mut dyn BufMut) { + self.rlp_header_without_bloom().encode(out); + self.rlp_encode_fields_without_bloom(out); + } + + fn length(&self) -> usize { + self.rlp_header_without_bloom().length_with_payload() + } +} + +impl Decodable for OpReceipt { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { + let header = Header::decode(buf)?; + if !header.list { + return Err(alloy_rlp::Error::UnexpectedString); + } + + if buf.len() < header.payload_length { + return Err(alloy_rlp::Error::InputTooShort); + } + let mut fields_buf = &buf[..header.payload_length]; + let this = Self::rlp_decode_fields_without_bloom(&mut fields_buf)?; + + if !fields_buf.is_empty() { + return Err(alloy_rlp::Error::UnexpectedLength); + } + + buf.advance(header.payload_length); + + Ok(this) + } +} + +impl> TxReceipt for OpReceipt { + type Log = T; + + fn status_or_post_state(&self) -> Eip658Value { + self.as_receipt().status_or_post_state() + } + + fn status(&self) -> bool { + self.as_receipt().status() + } + + fn bloom(&self) -> Bloom { + self.as_receipt().bloom() + } + + fn cumulative_gas_used(&self) -> u64 { + self.as_receipt().cumulative_gas_used() + } + + fn logs(&self) -> &[Self::Log] { + self.as_receipt().logs() + } + + fn into_logs(self) -> Vec { + match self { + Self::Legacy(receipt) | + Self::Eip2930(receipt) | + Self::Eip1559(receipt) | + Self::Eip7702(receipt) => receipt.logs, + Self::Deposit(receipt) => receipt.inner.logs, + } + } +} + +impl Typed2718 for OpReceipt { + fn ty(&self) -> u8 { + self.tx_type().into() + } +} + +impl IsTyped2718 for OpReceipt { + fn is_type(type_id: u8) -> bool { + ::is_type(type_id) + } +} + +impl> OpTxReceipt for OpReceipt { + fn deposit_nonce(&self) -> Option { + match self { + Self::Deposit(receipt) => receipt.deposit_nonce, + _ => None, + } + } + + fn deposit_receipt_version(&self) -> Option { + match self { + Self::Deposit(receipt) => receipt.deposit_receipt_version, + _ => None, + } + } +} + +impl From for OpReceipt { + fn from(envelope: super::OpReceiptEnvelope) -> Self { + match envelope { + super::OpReceiptEnvelope::Legacy(receipt) => Self::Legacy(receipt.receipt), + super::OpReceiptEnvelope::Eip2930(receipt) => Self::Eip2930(receipt.receipt), + super::OpReceiptEnvelope::Eip1559(receipt) => Self::Eip1559(receipt.receipt), + super::OpReceiptEnvelope::Eip7702(receipt) => Self::Eip7702(receipt.receipt), + super::OpReceiptEnvelope::Deposit(receipt) => Self::Deposit(OpDepositReceipt { + deposit_nonce: receipt.receipt.deposit_nonce, + deposit_receipt_version: receipt.receipt.deposit_receipt_version, + inner: receipt.receipt.inner, + }), + } + } +} + +impl From>> for OpReceiptEnvelope { + fn from(value: ReceiptWithBloom>) -> Self { + let (receipt, logs_bloom) = value.into_components(); + match receipt { + OpReceipt::Legacy(receipt) => Self::Legacy(ReceiptWithBloom { receipt, logs_bloom }), + OpReceipt::Eip2930(receipt) => Self::Eip2930(ReceiptWithBloom { receipt, logs_bloom }), + OpReceipt::Eip1559(receipt) => Self::Eip1559(ReceiptWithBloom { receipt, logs_bloom }), + OpReceipt::Eip7702(receipt) => Self::Eip7702(ReceiptWithBloom { receipt, logs_bloom }), + OpReceipt::Deposit(receipt) => Self::Deposit(ReceiptWithBloom { receipt, logs_bloom }), + } + } +} + +/// Bincode-compatible serde implementations for opreceipt type. +#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] +pub(crate) mod serde_bincode_compat { + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + use serde_with::{DeserializeAs, SerializeAs}; + + /// Bincode-compatible [`super::OpReceipt`] serde implementation. + /// + /// Intended to use with the [`serde_with::serde_as`] macro in the following way: + /// ```rust + /// use op_alloy_consensus::{OpReceipt, serde_bincode_compat}; + /// use serde::{Deserialize, Serialize, de::DeserializeOwned}; + /// use serde_with::serde_as; + /// + /// #[serde_as] + /// #[derive(Serialize, Deserialize)] + /// struct Data { + /// #[serde_as(as = "serde_bincode_compat::OpReceipt<'_>")] + /// receipt: OpReceipt, + /// } + /// ``` + #[derive(Debug, Serialize, Deserialize)] + pub enum OpReceipt<'a> { + /// Legacy receipt + Legacy(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), + /// EIP-2930 receipt + Eip2930(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), + /// EIP-1559 receipt + Eip1559(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), + /// EIP-7702 receipt + Eip7702(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), + /// Deposit receipt + Deposit(crate::serde_bincode_compat::OpDepositReceipt<'a, alloy_primitives::Log>), + } + + impl<'a> From<&'a super::OpReceipt> for OpReceipt<'a> { + fn from(value: &'a super::OpReceipt) -> Self { + match value { + super::OpReceipt::Legacy(receipt) => Self::Legacy(receipt.into()), + super::OpReceipt::Eip2930(receipt) => Self::Eip2930(receipt.into()), + super::OpReceipt::Eip1559(receipt) => Self::Eip1559(receipt.into()), + super::OpReceipt::Eip7702(receipt) => Self::Eip7702(receipt.into()), + super::OpReceipt::Deposit(receipt) => Self::Deposit(receipt.into()), + } + } + } + + impl<'a> From> for super::OpReceipt { + fn from(value: OpReceipt<'a>) -> Self { + match value { + OpReceipt::Legacy(receipt) => Self::Legacy(receipt.into()), + OpReceipt::Eip2930(receipt) => Self::Eip2930(receipt.into()), + OpReceipt::Eip1559(receipt) => Self::Eip1559(receipt.into()), + OpReceipt::Eip7702(receipt) => Self::Eip7702(receipt.into()), + OpReceipt::Deposit(receipt) => Self::Deposit(receipt.into()), + } + } + } + + impl SerializeAs for OpReceipt<'_> { + fn serialize_as(source: &super::OpReceipt, serializer: S) -> Result + where + S: Serializer, + { + OpReceipt::<'_>::from(source).serialize(serializer) + } + } + + impl<'de> DeserializeAs<'de, super::OpReceipt> for OpReceipt<'de> { + fn deserialize_as(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + OpReceipt::<'_>::deserialize(deserializer).map(Into::into) + } + } + + #[cfg(test)] + mod tests { + use crate::OpReceipt; + use arbitrary::Arbitrary; + use rand::Rng; + use serde::{Deserialize, Serialize}; + use serde_with::serde_as; + + #[test] + fn test_tx_bincode_roundtrip() { + #[serde_as] + #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] + struct Data { + #[serde_as(as = "super::OpReceipt<'_>")] + receipt: OpReceipt, + } + + let mut bytes = [0u8; 1024]; + rand::rng().fill(bytes.as_mut_slice()); + let mut data = Data { + receipt: OpReceipt::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(), + }; + let success = data.receipt.as_receipt_mut().status.coerce_status(); + // // ensure we don't have an invalid poststate variant + data.receipt.as_receipt_mut().status = success.into(); + + let encoded = bincode::serde::encode_to_vec(&data, bincode::config::legacy()).unwrap(); + let (decoded, _) = + bincode::serde::decode_from_slice::(&encoded, bincode::config::legacy()) + .unwrap(); + assert_eq!(decoded, data); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::vec; + use alloy_eips::Encodable2718; + use alloy_primitives::{Bytes, address, b256, bytes, hex_literal::hex}; + use alloy_rlp::Encodable; + + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + #[test] + fn encode_legacy_receipt() { + let expected = hex!( + "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" + ); + + let mut data = Vec::with_capacity(expected.length()); + let receipt = ReceiptWithBloom { + receipt: OpReceipt::Legacy(Receipt { + status: Eip658Value::Eip658(false), + cumulative_gas_used: 0x1, + logs: vec![Log::new_unchecked( + address!("0x0000000000000000000000000000000000000011"), + vec![ + b256!("0x000000000000000000000000000000000000000000000000000000000000dead"), + b256!("0x000000000000000000000000000000000000000000000000000000000000beef"), + ], + bytes!("0100ff"), + )], + }), + logs_bloom: [0; 256].into(), + }; + + receipt.encode(&mut data); + + // check that the rlp length equals the length of the expected rlp + assert_eq!(receipt.length(), expected.len()); + assert_eq!(data, expected); + } + + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + #[test] + fn decode_legacy_receipt() { + let data = hex!( + "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" + ); + + // EIP658Receipt + let expected = ReceiptWithBloom { + receipt: OpReceipt::Legacy(Receipt { + status: Eip658Value::Eip658(false), + cumulative_gas_used: 0x1, + logs: vec![Log::new_unchecked( + address!("0x0000000000000000000000000000000000000011"), + vec![ + b256!("0x000000000000000000000000000000000000000000000000000000000000dead"), + b256!("0x000000000000000000000000000000000000000000000000000000000000beef"), + ], + bytes!("0100ff"), + )], + }), + logs_bloom: [0; 256].into(), + }; + + let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + } + + #[test] + fn decode_deposit_receipt_regolith_roundtrip() { + let data = hex!( + "b901107ef9010c0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf" + ); + + // Deposit Receipt (post-regolith) + let expected: ReceiptWithBloom = ReceiptWithBloom { + receipt: OpReceipt::Deposit(OpDepositReceipt { + inner: Receipt { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 46913, + logs: vec![], + }, + deposit_nonce: Some(4012991), + deposit_receipt_version: None, + }), + logs_bloom: [0; 256].into(), + }; + + let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + + let mut buf = Vec::with_capacity(data.len()); + receipt.encode(&mut buf); + assert_eq!(buf, &data[..]); + } + + #[test] + fn decode_deposit_receipt_canyon_roundtrip() { + let data = hex!( + "b901117ef9010d0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf01" + ); + + // Deposit Receipt (post-canyon) + let expected: ReceiptWithBloom = ReceiptWithBloom { + receipt: OpReceipt::Deposit(OpDepositReceipt { + inner: Receipt { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 46913, + logs: vec![], + }, + deposit_nonce: Some(4012991), + deposit_receipt_version: Some(1), + }), + logs_bloom: [0; 256].into(), + }; + + let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + + let mut buf = Vec::with_capacity(data.len()); + expected.encode(&mut buf); + assert_eq!(buf, &data[..]); + } + + #[test] + fn gigantic_receipt() { + let receipt = OpReceipt::Legacy(Receipt { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 16747627, + logs: vec![ + Log::new_unchecked( + address!("0x4bf56695415f725e43c3e04354b604bcfb6dfb6e"), + vec![b256!( + "0xc69dc3d7ebff79e41f525be431d5cd3cc08f80eaf0f7819054a726eeb7086eb9" + )], + Bytes::from(vec![1; 0xffffff]), + ), + Log::new_unchecked( + address!("0xfaca325c86bf9c2d5b413cd7b90b209be92229c2"), + vec![b256!( + "0x8cca58667b1e9ffa004720ac99a3d61a138181963b294d270d91c53d36402ae2" + )], + Bytes::from(vec![1; 0xffffff]), + ), + ], + }); + + let _bloom = receipt.bloom(); + let mut encoded = vec![]; + receipt.encode(&mut encoded); + + let decoded = OpReceipt::decode(&mut &encoded[..]).unwrap(); + assert_eq!(decoded, receipt); + } + + #[test] + fn test_encode_2718_length() { + let receipt: ReceiptWithBloom = ReceiptWithBloom { + receipt: OpReceipt::Eip1559(Receipt { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 21000, + logs: vec![], + }), + logs_bloom: Bloom::default(), + }; + + let encoded = receipt.encoded_2718(); + assert_eq!( + encoded.len(), + receipt.encode_2718_len(), + "Encoded length should match the actual encoded data length" + ); + + // Test for legacy receipt as well + let legacy_receipt: ReceiptWithBloom = ReceiptWithBloom { + receipt: OpReceipt::Legacy(Receipt { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 21000, + logs: vec![], + }), + logs_bloom: Bloom::default(), + }; + + let legacy_encoded = legacy_receipt.encoded_2718(); + assert_eq!( + legacy_encoded.len(), + legacy_receipt.encode_2718_len(), + "Encoded length for legacy receipt should match the actual encoded data length" + ); + } +} diff --git a/op-alloy/crates/consensus/src/source.rs b/rust/op-alloy/crates/consensus/src/source.rs similarity index 96% rename from op-alloy/crates/consensus/src/source.rs rename to rust/op-alloy/crates/consensus/src/source.rs index 61607adaad8..f454dd2403f 100644 --- a/op-alloy/crates/consensus/src/source.rs +++ b/rust/op-alloy/crates/consensus/src/source.rs @@ -52,7 +52,7 @@ pub struct UserDepositSource { } impl UserDepositSource { - /// Creates a new [UserDepositSource]. + /// Creates a new [`UserDepositSource`]. pub const fn new(l1_block_hash: B256, log_index: u64) -> Self { Self { l1_block_hash, log_index } } @@ -81,7 +81,7 @@ pub struct L1InfoDepositSource { } impl L1InfoDepositSource { - /// Creates a new [L1InfoDepositSource]. + /// Creates a new [`L1InfoDepositSource`]. pub const fn new(l1_block_hash: B256, seq_number: u64) -> Self { Self { l1_block_hash, seq_number } } @@ -115,7 +115,7 @@ pub struct UpgradeDepositSource { } impl UpgradeDepositSource { - /// Creates a new [UpgradeDepositSource]. + /// Creates a new [`UpgradeDepositSource`]. pub const fn new(intent: String) -> Self { Self { intent } } @@ -147,7 +147,7 @@ pub struct InteropBlockReplacementDepositSource { } impl InteropBlockReplacementDepositSource { - /// Creates a new [InteropBlockReplacementDepositSource]. + /// Creates a new [`InteropBlockReplacementDepositSource`]. pub const fn new(output_root: B256) -> Self { Self { output_root } } diff --git a/rust/op-alloy/crates/consensus/src/transaction/deposit.rs b/rust/op-alloy/crates/consensus/src/transaction/deposit.rs new file mode 100644 index 00000000000..ba1eec8d51d --- /dev/null +++ b/rust/op-alloy/crates/consensus/src/transaction/deposit.rs @@ -0,0 +1,733 @@ +//! Deposit Transaction type. + +use super::OpTxType; +use alloc::vec::Vec; +use alloy_consensus::{Sealable, Transaction, Typed2718}; +use alloy_eips::{ + eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718, IsTyped2718}, + eip2930::AccessList, +}; +use alloy_primitives::{Address, B256, Bytes, ChainId, Signature, TxHash, TxKind, U256, keccak256}; +use alloy_rlp::{BufMut, Decodable, Encodable, Header}; +use core::mem; + +/// Deposit transactions, also known as deposits are initiated on L1, and executed on L2. +#[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct TxDeposit { + /// Hash that uniquely identifies the source of the deposit. + pub source_hash: B256, + /// The address of the sender account. + pub from: Address, + /// The address of the recipient account, or the null (zero-length) address if the deposited + /// transaction is a contract creation. + #[cfg_attr(feature = "serde", serde(default, skip_serializing_if = "TxKind::is_create"))] + pub to: TxKind, + /// The ETH value to mint on L2. + #[cfg_attr(feature = "serde", serde(default, with = "alloy_serde::quantity"))] + pub mint: u128, + /// The ETH value to send to the recipient account. + pub value: U256, + /// The gas limit for the L2 transaction. + #[cfg_attr(feature = "serde", serde(with = "alloy_serde::quantity", rename = "gas"))] + pub gas_limit: u64, + /// Field indicating if this transaction is exempt from the L2 gas limit. + #[cfg_attr( + feature = "serde", + serde( + default, + with = "alloy_serde::quantity", + rename = "isSystemTx", + skip_serializing_if = "core::ops::Not::not" + ) + )] + pub is_system_transaction: bool, + /// Input has two uses depending if transaction is Create or Call (if `to` field is None or + /// Some). + pub input: Bytes, +} + +impl TxDeposit { + /// Decodes the inner [`TxDeposit`] fields from RLP bytes. + /// + /// NOTE: This assumes a RLP header has already been decoded, and _just_ decodes the following + /// RLP fields in the following order: + /// + /// - `source_hash` + /// - `from` + /// - `to` + /// - `mint` + /// - `value` + /// - `gas_limit` + /// - `is_system_transaction` + /// - `input` + pub fn rlp_decode_fields(buf: &mut &[u8]) -> alloy_rlp::Result { + Ok(Self { + source_hash: Decodable::decode(buf)?, + from: Decodable::decode(buf)?, + to: Decodable::decode(buf)?, + mint: Decodable::decode(buf)?, + value: Decodable::decode(buf)?, + gas_limit: Decodable::decode(buf)?, + is_system_transaction: Decodable::decode(buf)?, + input: Decodable::decode(buf)?, + }) + } + + /// Decodes the transaction from RLP bytes. + pub fn rlp_decode(buf: &mut &[u8]) -> alloy_rlp::Result { + let header = Header::decode(buf)?; + if !header.list { + return Err(alloy_rlp::Error::UnexpectedString); + } + let remaining = buf.len(); + + if header.payload_length > remaining { + return Err(alloy_rlp::Error::InputTooShort); + } + + let this = Self::rlp_decode_fields(buf)?; + + if buf.len() + header.payload_length != remaining { + return Err(alloy_rlp::Error::UnexpectedLength); + } + + Ok(this) + } + + /// Outputs the length of the transaction's fields, without a RLP header or length of the + /// eip155 fields. + pub(crate) fn rlp_encoded_fields_length(&self) -> usize { + self.source_hash.length() + + self.from.length() + + self.to.length() + + self.mint.length() + + self.value.length() + + self.gas_limit.length() + + self.is_system_transaction.length() + + self.input.0.length() + } + + /// Encodes only the transaction's fields into the desired buffer, without a RLP header. + /// + pub(crate) fn rlp_encode_fields(&self, out: &mut dyn alloy_rlp::BufMut) { + self.source_hash.encode(out); + self.from.encode(out); + self.to.encode(out); + self.mint.encode(out); + self.value.encode(out); + self.gas_limit.encode(out); + self.is_system_transaction.encode(out); + self.input.encode(out); + } + + /// Calculates a heuristic for the in-memory size of the [`TxDeposit`] transaction. + #[inline] + pub fn size(&self) -> usize { + mem::size_of::() + // source_hash + mem::size_of::
() + // from + self.to.size() + // to + mem::size_of::() + // mint + mem::size_of::() + // value + mem::size_of::() + // gas_limit + mem::size_of::() + // is_system_transaction + self.input.len() // input + } + + /// Get the transaction type + pub(crate) const fn tx_type(&self) -> OpTxType { + OpTxType::Deposit + } + + /// Create an rlp header for the transaction. + fn rlp_header(&self) -> Header { + Header { list: true, payload_length: self.rlp_encoded_fields_length() } + } + + /// RLP encodes the transaction. + pub fn rlp_encode(&self, out: &mut dyn BufMut) { + self.rlp_header().encode(out); + self.rlp_encode_fields(out); + } + + /// Get the length of the transaction when RLP encoded. + pub fn rlp_encoded_length(&self) -> usize { + self.rlp_header().length_with_payload() + } + + /// Get the length of the transaction when EIP-2718 encoded. This is the + /// 1 byte type flag + the length of the RLP encoded transaction. + pub fn eip2718_encoded_length(&self) -> usize { + self.rlp_encoded_length() + 1 + } + + fn network_header(&self) -> Header { + Header { list: false, payload_length: self.eip2718_encoded_length() } + } + + /// Get the length of the transaction when network encoded. This is the + /// EIP-2718 encoded length with an outer RLP header. + pub fn network_encoded_length(&self) -> usize { + self.network_header().length_with_payload() + } + + /// Network encode the transaction with the given signature. + pub fn network_encode(&self, out: &mut dyn BufMut) { + self.network_header().encode(out); + self.encode_2718(out); + } + + /// Calculate the transaction hash. + pub fn tx_hash(&self) -> TxHash { + let mut buf = Vec::with_capacity(self.eip2718_encoded_length()); + self.encode_2718(&mut buf); + keccak256(&buf) + } + + /// Returns the signature for the optimism deposit transactions, which don't include a + /// signature. + pub const fn signature() -> Signature { + Signature::new(U256::ZERO, U256::ZERO, false) + } +} + +impl Typed2718 for TxDeposit { + fn ty(&self) -> u8 { + OpTxType::Deposit as u8 + } +} + +impl IsTyped2718 for TxDeposit { + fn is_type(ty: u8) -> bool { + OpTxType::Deposit as u8 == ty + } +} + +impl Transaction for TxDeposit { + fn chain_id(&self) -> Option { + None + } + + fn nonce(&self) -> u64 { + 0u64 + } + + fn gas_limit(&self) -> u64 { + self.gas_limit + } + + fn gas_price(&self) -> Option { + None + } + + fn max_fee_per_gas(&self) -> u128 { + 0 + } + + fn max_priority_fee_per_gas(&self) -> Option { + None + } + + fn max_fee_per_blob_gas(&self) -> Option { + None + } + + fn priority_fee_or_price(&self) -> u128 { + 0 + } + + fn effective_gas_price(&self, _: Option) -> u128 { + 0 + } + + fn is_dynamic_fee(&self) -> bool { + false + } + + fn kind(&self) -> TxKind { + self.to + } + + fn is_create(&self) -> bool { + self.to.is_create() + } + + fn value(&self) -> U256 { + self.value + } + + fn input(&self) -> &Bytes { + &self.input + } + + fn access_list(&self) -> Option<&AccessList> { + None + } + + fn blob_versioned_hashes(&self) -> Option<&[B256]> { + None + } + + fn authorization_list(&self) -> Option<&[alloy_eips::eip7702::SignedAuthorization]> { + None + } +} + +impl Encodable2718 for TxDeposit { + fn type_flag(&self) -> Option { + Some(OpTxType::Deposit as u8) + } + + fn encode_2718_len(&self) -> usize { + self.eip2718_encoded_length() + } + + fn encode_2718(&self, out: &mut dyn alloy_rlp::BufMut) { + out.put_u8(self.tx_type() as u8); + self.rlp_encode(out); + } +} + +impl Decodable2718 for TxDeposit { + fn typed_decode(ty: u8, data: &mut &[u8]) -> Eip2718Result { + let ty: OpTxType = ty.try_into().map_err(|_| Eip2718Error::UnexpectedType(ty))?; + if ty != OpTxType::Deposit as u8 { + return Err(Eip2718Error::UnexpectedType(ty as u8)); + } + let tx = Self::decode(data)?; + Ok(tx) + } + + fn fallback_decode(data: &mut &[u8]) -> Eip2718Result { + let tx = Self::decode(data)?; + Ok(tx) + } +} + +impl Encodable for TxDeposit { + fn encode(&self, out: &mut dyn BufMut) { + Header { list: true, payload_length: self.rlp_encoded_fields_length() }.encode(out); + self.rlp_encode_fields(out); + } + + fn length(&self) -> usize { + let payload_length = self.rlp_encoded_fields_length(); + Header { list: true, payload_length }.length() + payload_length + } +} + +impl Decodable for TxDeposit { + fn decode(data: &mut &[u8]) -> alloy_rlp::Result { + Self::rlp_decode(data) + } +} + +impl Sealable for TxDeposit { + fn hash_slow(&self) -> B256 { + self.tx_hash() + } +} + +#[cfg(feature = "alloy-compat")] +impl From for alloy_rpc_types_eth::TransactionRequest { + fn from(tx: TxDeposit) -> Self { + let TxDeposit { + source_hash: _, + from, + to, + mint: _, + value, + gas_limit, + is_system_transaction: _, + input, + } = tx; + + Self { + from: Some(from), + to: Some(to), + value: Some(value), + gas: Some(gas_limit), + input: input.into(), + ..Default::default() + } + } +} + +/// A trait representing a deposit transaction with specific attributes. +pub trait DepositTransaction: Transaction { + /// Returns the hash that uniquely identifies the source of the deposit. + /// + /// # Returns + /// An `Option` containing the source hash if available. + fn source_hash(&self) -> Option; + + /// Returns the optional mint value of the deposit transaction. + /// + /// # Returns + /// An `u128` representing the ETH value to mint on L2, if any. + fn mint(&self) -> u128; + + /// Indicates whether the transaction is exempt from the L2 gas limit. + /// + /// # Returns + /// A `bool` indicating if the transaction is a system transaction. + fn is_system_transaction(&self) -> bool; +} + +impl DepositTransaction for TxDeposit { + #[inline] + fn source_hash(&self) -> Option { + Some(self.source_hash) + } + + #[inline] + fn mint(&self) -> u128 { + self.mint + } + + #[inline] + fn is_system_transaction(&self) -> bool { + self.is_system_transaction + } +} + +/// Deposit transactions don't have a signature, however, we include an empty signature in the +/// response for better compatibility. +/// +/// This function can be used as `serialize_with` serde attribute for the [`TxDeposit`] and will +/// flatten [`TxDeposit::signature`] into response. +#[cfg(feature = "serde")] +pub fn serde_deposit_tx_rpc( + value: &T, + serializer: S, +) -> Result { + use serde::Serialize; + + #[derive(Serialize)] + struct SerdeHelper<'a, T> { + #[serde(flatten)] + value: &'a T, + #[serde(flatten)] + signature: Signature, + } + + SerdeHelper { value, signature: TxDeposit::signature() }.serialize(serializer) +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::hex; + use alloy_rlp::BytesMut; + + #[test] + fn test_deposit_transaction_trait() { + let tx = TxDeposit { + source_hash: B256::with_last_byte(42), + from: Address::default(), + to: TxKind::default(), + mint: 100, + value: U256::from(1000), + gas_limit: 50000, + is_system_transaction: true, + input: Bytes::default(), + }; + + assert_eq!(tx.source_hash(), Some(B256::with_last_byte(42))); + assert_eq!(tx.mint(), 100); + assert!(tx.is_system_transaction()); + } + + #[test] + fn test_deposit_transaction_without_mint() { + let tx = TxDeposit { + source_hash: B256::default(), + from: Address::default(), + to: TxKind::default(), + mint: 0, + value: U256::default(), + gas_limit: 50000, + is_system_transaction: false, + input: Bytes::default(), + }; + + assert_eq!(tx.source_hash(), Some(B256::default())); + assert_eq!(tx.mint(), 0); + assert!(!tx.is_system_transaction()); + } + + #[test] + fn test_deposit_transaction_to_contract() { + let contract_address = Address::with_last_byte(0xFF); + let tx = TxDeposit { + source_hash: B256::default(), + from: Address::default(), + to: TxKind::Call(contract_address), + mint: 200, + value: U256::from(500), + gas_limit: 100000, + is_system_transaction: false, + input: Bytes::from_static(&[1, 2, 3]), + }; + + assert_eq!(tx.source_hash(), Some(B256::default())); + assert_eq!(tx.mint(), 200); + assert!(!tx.is_system_transaction()); + assert_eq!(tx.kind(), TxKind::Call(contract_address)); + } + + #[test] + fn test_rlp_roundtrip() { + let bytes = Bytes::from_static(&hex!( + "7ef9015aa044bae9d41b8380d781187b426c6fe43df5fb2fb57bd4466ef6a701e1f01e015694deaddeaddeaddeaddeaddeaddeaddeaddead000194420000000000000000000000000000000000001580808408f0d18001b90104015d8eb900000000000000000000000000000000000000000000000000000000008057650000000000000000000000000000000000000000000000000000000063d96d10000000000000000000000000000000000000000000000000000000000009f35273d89754a1e0387b89520d989d3be9c37c1f32495a88faf1ea05c61121ab0d1900000000000000000000000000000000000000000000000000000000000000010000000000000000000000002d679b567db6187c0c8323fa982cfb88b74dbcc7000000000000000000000000000000000000000000000000000000000000083400000000000000000000000000000000000000000000000000000000000f4240" + )); + let tx_a = TxDeposit::decode(&mut bytes[1..].as_ref()).unwrap(); + let mut buf_a = BytesMut::default(); + tx_a.encode(&mut buf_a); + assert_eq!(&buf_a[..], &bytes[1..]); + } + + #[test] + fn test_encode_decode_fields() { + let original = TxDeposit { + source_hash: B256::default(), + from: Address::default(), + to: TxKind::default(), + mint: 100, + value: U256::default(), + gas_limit: 50000, + is_system_transaction: true, + input: Bytes::default(), + }; + + let mut buffer = BytesMut::new(); + original.rlp_encode_fields(&mut buffer); + let decoded = TxDeposit::rlp_decode_fields(&mut &buffer[..]).expect("Failed to decode"); + + assert_eq!(original, decoded); + } + + #[test] + fn test_encode_with_and_without_header() { + let tx_deposit = TxDeposit { + source_hash: B256::default(), + from: Address::default(), + to: TxKind::default(), + mint: 100, + value: U256::default(), + gas_limit: 50000, + is_system_transaction: true, + input: Bytes::default(), + }; + + let mut buffer_with_header = BytesMut::new(); + tx_deposit.encode(&mut buffer_with_header); + + let mut buffer_without_header = BytesMut::new(); + tx_deposit.rlp_encode_fields(&mut buffer_without_header); + + assert!(buffer_with_header.len() > buffer_without_header.len()); + } + + #[test] + fn test_payload_length() { + let tx_deposit = TxDeposit { + source_hash: B256::default(), + from: Address::default(), + to: TxKind::default(), + mint: 100, + value: U256::default(), + gas_limit: 50000, + is_system_transaction: true, + input: Bytes::default(), + }; + + assert!(tx_deposit.size() > tx_deposit.rlp_encoded_fields_length()); + } + + #[test] + fn test_encode_inner_with_and_without_header() { + let tx_deposit = TxDeposit { + source_hash: B256::default(), + from: Address::default(), + to: TxKind::default(), + mint: 100, + value: U256::default(), + gas_limit: 50000, + is_system_transaction: true, + input: Bytes::default(), + }; + + let mut buffer_with_header = BytesMut::new(); + tx_deposit.network_encode(&mut buffer_with_header); + + let mut buffer_without_header = BytesMut::new(); + tx_deposit.encode_2718(&mut buffer_without_header); + + assert!(buffer_with_header.len() > buffer_without_header.len()); + } + + #[test] + fn test_payload_length_header() { + let tx_deposit = TxDeposit { + source_hash: B256::default(), + from: Address::default(), + to: TxKind::default(), + mint: 100, + value: U256::default(), + gas_limit: 50000, + is_system_transaction: true, + input: Bytes::default(), + }; + + let total_len = tx_deposit.network_encoded_length(); + let len_without_header = tx_deposit.eip2718_encoded_length(); + + assert!(total_len > len_without_header); + } + #[test] + fn test_deposit_tx_roundtrip() { + let raw_txs = [ + "7ef8f8a0871ec5fb6afe7e5ae950bbb4cfd7d7cb277b413e67da806d50834a814b14c9f494deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000008dd00101c12000000000000000400000000681c941f0000000001566261000000000000000000000000000000000000000000000000000000005f629c020000000000000000000000000000000000000000000000000000000000000001937badfbcce566e0ba932a3f7659644aa0c6ef019541d3134a1d8cb9f84d45c70000000000000000000000005050f69a9786f081509234f1a7f4684b5e5b76c9", + ]; + + for raw_tx_hex in raw_txs { + let raw_tx = hex::decode(raw_tx_hex).unwrap(); + + let tx = TxDeposit::decode_2718(&mut raw_tx.as_ref()).unwrap(); + let mut encoded = BytesMut::new(); + tx.encode_2718(&mut encoded); + assert_eq!(&encoded[..], &raw_tx[..], "Encoded bytes don't match original"); + + let tx_from_fields = TxDeposit::rlp_decode(&mut &raw_tx[1..]).unwrap(); + let mut encoded_fields = BytesMut::new(); + tx_from_fields.rlp_encode(&mut encoded_fields); + assert_eq!( + &encoded_fields[..], + &raw_tx[1..], + "RLP encoded fields don't match original" + ); + } + } +} + +/// Bincode-compatible [`TxDeposit`] serde implementation. +#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] +pub(super) mod serde_bincode_compat { + use alloc::borrow::Cow; + use alloy_primitives::{Address, B256, Bytes, TxKind, U256}; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + use serde_with::{DeserializeAs, SerializeAs}; + + /// Bincode-compatible [`super::TxDeposit`] serde implementation. + /// + /// Intended to use with the [`serde_with::serde_as`] macro in the following way: + /// ```rust + /// use op_alloy_consensus::{TxDeposit, serde_bincode_compat}; + /// use serde::{Deserialize, Serialize}; + /// use serde_with::serde_as; + /// + /// #[serde_as] + /// #[derive(Serialize, Deserialize)] + /// struct Data { + /// #[serde_as(as = "serde_bincode_compat::TxDeposit")] + /// transaction: TxDeposit, + /// } + /// ``` + #[derive(Debug, Serialize, Deserialize)] + pub struct TxDeposit<'a> { + source_hash: B256, + from: Address, + #[serde(default)] + to: TxKind, + #[serde(default)] + mint: u128, + value: U256, + gas_limit: u64, + is_system_transaction: bool, + input: Cow<'a, Bytes>, + } + + impl<'a> From<&'a super::TxDeposit> for TxDeposit<'a> { + fn from(value: &'a super::TxDeposit) -> Self { + Self { + source_hash: value.source_hash, + from: value.from, + to: value.to, + mint: value.mint, + value: value.value, + gas_limit: value.gas_limit, + is_system_transaction: value.is_system_transaction, + input: Cow::Borrowed(&value.input), + } + } + } + + impl<'a> From> for super::TxDeposit { + fn from(value: TxDeposit<'a>) -> Self { + Self { + source_hash: value.source_hash, + from: value.from, + to: value.to, + mint: value.mint, + value: value.value, + gas_limit: value.gas_limit, + is_system_transaction: value.is_system_transaction, + input: value.input.into_owned(), + } + } + } + + impl SerializeAs for TxDeposit<'_> { + fn serialize_as(source: &super::TxDeposit, serializer: S) -> Result + where + S: Serializer, + { + TxDeposit::from(source).serialize(serializer) + } + } + + impl<'de> DeserializeAs<'de, super::TxDeposit> for TxDeposit<'de> { + fn deserialize_as(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + TxDeposit::deserialize(deserializer).map(Into::into) + } + } + + #[cfg(test)] + mod tests { + use arbitrary::Arbitrary; + use rand::Rng; + use serde::{Deserialize, Serialize}; + use serde_with::serde_as; + + use super::super::{TxDeposit, serde_bincode_compat}; + + #[test] + fn test_tx_deposit_bincode_roundtrip() { + #[serde_as] + #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] + struct Data { + #[serde_as(as = "serde_bincode_compat::TxDeposit")] + transaction: TxDeposit, + } + + let mut bytes = [0u8; 1024]; + rand::rng().fill(bytes.as_mut_slice()); + let data = Data { + transaction: TxDeposit::arbitrary(&mut arbitrary::Unstructured::new(&bytes)) + .unwrap(), + }; + + let encoded = bincode::serde::encode_to_vec(&data, bincode::config::legacy()).unwrap(); + let (decoded, _) = + bincode::serde::decode_from_slice::(&encoded, bincode::config::legacy()) + .unwrap(); + assert_eq!(decoded, data); + } + } +} diff --git a/rust/op-alloy/crates/consensus/src/transaction/envelope.rs b/rust/op-alloy/crates/consensus/src/transaction/envelope.rs new file mode 100644 index 00000000000..afe73b2474a --- /dev/null +++ b/rust/op-alloy/crates/consensus/src/transaction/envelope.rs @@ -0,0 +1,786 @@ +use crate::{ + OpPooledTransaction, TxDeposit, + transaction::{OpDepositInfo, OpTransactionInfo}, +}; +use alloy_consensus::{ + EthereumTxEnvelope, Extended, Sealable, Sealed, SignableTransaction, Signed, + TransactionEnvelope, TxEip1559, TxEip2930, TxEip7702, TxEnvelope, TxLegacy, + error::ValueError, + transaction::{TransactionInfo, TxHashRef}, +}; +use alloy_eips::eip2718::Encodable2718; +use alloy_primitives::{B256, Bytes, Signature, TxHash}; + +/// The Ethereum [EIP-2718] Transaction Envelope, modified for OP Stack chains. +/// +/// # Note: +/// +/// This enum distinguishes between tagged and untagged legacy transactions, as +/// the in-protocol merkle tree may commit to EITHER 0-prefixed or raw. +/// Therefore we must ensure that encoding returns the precise byte-array that +/// was decoded, preserving the presence or absence of the `TransactionType` +/// flag. +/// +/// [EIP-2718]: https://eips.ethereum.org/EIPS/eip-2718 +#[derive(Debug, Clone, TransactionEnvelope)] +#[envelope(tx_type_name = OpTxType, typed = OpTypedTransaction, serde_cfg(feature = "serde"))] +pub enum OpTxEnvelope { + /// An untagged [`TxLegacy`]. + #[envelope(ty = 0)] + Legacy(Signed), + /// A [`TxEip2930`] tagged with type 1. + #[envelope(ty = 1)] + Eip2930(Signed), + /// A [`TxEip1559`] tagged with type 2. + #[envelope(ty = 2)] + Eip1559(Signed), + /// A [`TxEip7702`] tagged with type 4. + #[envelope(ty = 4)] + Eip7702(Signed), + /// A [`TxDeposit`] tagged with type 0x7E. + #[envelope(ty = 126)] + #[serde(serialize_with = "crate::serde_deposit_tx_rpc")] + Deposit(Sealed), +} + +/// Represents an Optimism transaction envelope. +/// +/// Compared to Ethereum it can tell whether the transaction is a deposit. +pub trait OpTransaction { + /// Returns `true` if the transaction is a deposit. + fn is_deposit(&self) -> bool; + + /// Returns `Some` if the transaction is a deposit. + fn as_deposit(&self) -> Option<&Sealed>; +} + +impl OpTransaction for OpTxEnvelope { + fn is_deposit(&self) -> bool { + self.is_deposit() + } + + fn as_deposit(&self) -> Option<&Sealed> { + self.as_deposit() + } +} + +impl OpTransaction for Extended +where + B: OpTransaction, + T: OpTransaction, +{ + fn is_deposit(&self) -> bool { + match self { + Self::BuiltIn(b) => b.is_deposit(), + Self::Other(t) => t.is_deposit(), + } + } + + fn as_deposit(&self) -> Option<&Sealed> { + match self { + Self::BuiltIn(b) => b.as_deposit(), + Self::Other(t) => t.as_deposit(), + } + } +} + +impl AsRef for OpTxEnvelope { + fn as_ref(&self) -> &Self { + self + } +} + +impl From> for OpTxEnvelope { + fn from(v: Signed) -> Self { + Self::Legacy(v) + } +} + +impl From> for OpTxEnvelope { + fn from(v: Signed) -> Self { + Self::Eip2930(v) + } +} + +impl From> for OpTxEnvelope { + fn from(v: Signed) -> Self { + Self::Eip1559(v) + } +} + +impl From> for OpTxEnvelope { + fn from(v: Signed) -> Self { + Self::Eip7702(v) + } +} + +impl From for OpTxEnvelope { + fn from(v: TxDeposit) -> Self { + v.seal_slow().into() + } +} + +impl From> for OpTxEnvelope { + fn from(value: Signed) -> Self { + let (tx, sig, hash) = value.into_parts(); + match tx { + OpTypedTransaction::Legacy(tx_legacy) => { + let tx = Signed::new_unchecked(tx_legacy, sig, hash); + Self::Legacy(tx) + } + OpTypedTransaction::Eip2930(tx_eip2930) => { + let tx = Signed::new_unchecked(tx_eip2930, sig, hash); + Self::Eip2930(tx) + } + OpTypedTransaction::Eip1559(tx_eip1559) => { + let tx = Signed::new_unchecked(tx_eip1559, sig, hash); + Self::Eip1559(tx) + } + OpTypedTransaction::Eip7702(tx_eip7702) => { + let tx = Signed::new_unchecked(tx_eip7702, sig, hash); + Self::Eip7702(tx) + } + OpTypedTransaction::Deposit(tx) => Self::Deposit(Sealed::new_unchecked(tx, hash)), + } + } +} + +impl From<(OpTypedTransaction, Signature)> for OpTxEnvelope { + fn from(value: (OpTypedTransaction, Signature)) -> Self { + Self::new_unhashed(value.0, value.1) + } +} + +impl From> for OpTxEnvelope { + fn from(v: Sealed) -> Self { + Self::Deposit(v) + } +} + +impl From for Extended { + fn from(value: OpTxEnvelope) -> Self { + Self::BuiltIn(value) + } +} + +impl TryFrom> for OpTxEnvelope { + type Error = EthereumTxEnvelope; + + fn try_from(value: EthereumTxEnvelope) -> Result { + Self::try_from_eth_envelope(value) + } +} + +impl TryFrom for TxEnvelope { + type Error = ValueError; + + fn try_from(value: OpTxEnvelope) -> Result { + value.try_into_eth_envelope() + } +} + +#[cfg(feature = "alloy-compat")] +impl From for alloy_rpc_types_eth::TransactionRequest { + fn from(value: OpTxEnvelope) -> Self { + match value { + OpTxEnvelope::Eip2930(tx) => tx.into_parts().0.into(), + OpTxEnvelope::Eip1559(tx) => tx.into_parts().0.into(), + OpTxEnvelope::Eip7702(tx) => tx.into_parts().0.into(), + OpTxEnvelope::Deposit(tx) => tx.into_inner().into(), + OpTxEnvelope::Legacy(tx) => tx.into_parts().0.into(), + } + } +} + +impl OpTxEnvelope { + /// Creates a new enveloped transaction from the given transaction, signature and hash. + /// + /// Caution: This assumes the given hash is the correct transaction hash. + pub fn new_unchecked( + transaction: OpTypedTransaction, + signature: Signature, + hash: B256, + ) -> Self { + Signed::new_unchecked(transaction, signature, hash).into() + } + + /// Creates a new signed transaction from the given typed transaction and signature without the + /// hash. + /// + /// Note: this only calculates the hash on the first [`OpTxEnvelope::hash`] call. + pub fn new_unhashed(transaction: OpTypedTransaction, signature: Signature) -> Self { + transaction.into_signed(signature).into() + } + + /// Returns true if the transaction is a legacy transaction. + #[inline] + pub const fn is_legacy(&self) -> bool { + matches!(self, Self::Legacy(_)) + } + + /// Returns true if the transaction is an EIP-2930 transaction. + #[inline] + pub const fn is_eip2930(&self) -> bool { + matches!(self, Self::Eip2930(_)) + } + + /// Returns true if the transaction is an EIP-1559 transaction. + #[inline] + pub const fn is_eip1559(&self) -> bool { + matches!(self, Self::Eip1559(_)) + } + + /// Returns true if the transaction is a system transaction. + #[inline] + pub const fn is_system_transaction(&self) -> bool { + match self { + Self::Deposit(tx) => tx.inner().is_system_transaction, + _ => false, + } + } + + /// Attempts to convert the envelope into the pooled variant. + /// + /// Returns an error if the envelope's variant is incompatible with the pooled format: + /// [`TxDeposit`]. + pub fn try_into_pooled(self) -> Result> { + match self { + Self::Legacy(tx) => Ok(tx.into()), + Self::Eip2930(tx) => Ok(tx.into()), + Self::Eip1559(tx) => Ok(tx.into()), + Self::Eip7702(tx) => Ok(tx.into()), + Self::Deposit(tx) => { + Err(ValueError::new(tx.into(), "Deposit transactions cannot be pooled")) + } + } + } + + /// Attempts to convert the envelope into the ethereum pooled variant. + /// + /// Returns an error if the envelope's variant is incompatible with the pooled format: + /// [`TxDeposit`]. + pub fn try_into_eth_pooled( + self, + ) -> Result> { + self.try_into_pooled().map(Into::into) + } + + /// Attempts to convert the optimism variant into an ethereum [`TxEnvelope`]. + /// + /// Returns the envelope as error if it is a variant unsupported on ethereum: [`TxDeposit`] + pub fn try_into_eth_envelope(self) -> Result> { + match self { + Self::Legacy(tx) => Ok(tx.into()), + Self::Eip2930(tx) => Ok(tx.into()), + Self::Eip1559(tx) => Ok(tx.into()), + Self::Eip7702(tx) => Ok(tx.into()), + tx @ Self::Deposit(_) => Err(ValueError::new( + tx, + "Deposit transactions cannot be converted to ethereum transaction", + )), + } + } + + /// Helper that creates [`OpTransactionInfo`] by adding [`OpDepositInfo`] obtained from the + /// given closure if this transaction is a deposit and return the [`OpTransactionInfo`]. + pub fn try_to_tx_info( + &self, + tx_info: TransactionInfo, + f: F, + ) -> Result + where + F: FnOnce(TxHash) -> Result, E>, + { + let deposit_meta = + if self.is_deposit() { f(self.tx_hash())? } else { None }.unwrap_or_default(); + + Ok(OpTransactionInfo::new(tx_info, deposit_meta)) + } + + /// Attempts to convert an ethereum [`TxEnvelope`] into the optimism variant. + /// + /// Returns the given envelope as error if [`OpTxEnvelope`] doesn't support the variant + /// (EIP-4844) + pub fn try_from_eth_envelope( + tx: EthereumTxEnvelope, + ) -> Result> { + match tx { + EthereumTxEnvelope::Legacy(tx) => Ok(tx.into()), + EthereumTxEnvelope::Eip2930(tx) => Ok(tx.into()), + EthereumTxEnvelope::Eip1559(tx) => Ok(tx.into()), + tx @ EthereumTxEnvelope::::Eip4844(_) => Err(tx), + EthereumTxEnvelope::Eip7702(tx) => Ok(tx.into()), + } + } + + /// Returns mutable access to the input bytes. + /// + /// Caution: modifying this will cause side-effects on the hash. + #[doc(hidden)] + pub const fn input_mut(&mut self) -> &mut Bytes { + match self { + Self::Eip1559(tx) => &mut tx.tx_mut().input, + Self::Eip2930(tx) => &mut tx.tx_mut().input, + Self::Legacy(tx) => &mut tx.tx_mut().input, + Self::Eip7702(tx) => &mut tx.tx_mut().input, + Self::Deposit(tx) => &mut tx.inner_mut().input, + } + } + + /// Attempts to convert an ethereum [`TxEnvelope`] into the optimism variant. + /// + /// Returns the given envelope as error if [`OpTxEnvelope`] doesn't support the variant + /// (EIP-4844) + #[cfg(feature = "alloy-compat")] + pub fn try_from_any_envelope( + tx: alloy_network::AnyTxEnvelope, + ) -> Result { + match tx.try_into_envelope() { + Ok(eth) => { + Self::try_from_eth_envelope(eth).map_err(alloy_network::AnyTxEnvelope::Ethereum) + } + Err(err) => match err.into_value() { + alloy_network::AnyTxEnvelope::Unknown(unknown) => { + let Ok(deposit) = unknown.inner.clone().try_into() else { + return Err(alloy_network::AnyTxEnvelope::Unknown(unknown)); + }; + Ok(Self::Deposit(Sealed::new_unchecked(deposit, unknown.hash))) + } + unsupported => Err(unsupported), + }, + } + } + + /// Returns true if the transaction is a deposit transaction. + #[inline] + pub const fn is_deposit(&self) -> bool { + matches!(self, Self::Deposit(_)) + } + + /// Returns the [`TxLegacy`] variant if the transaction is a legacy transaction. + pub const fn as_legacy(&self) -> Option<&Signed> { + match self { + Self::Legacy(tx) => Some(tx), + _ => None, + } + } + + /// Returns the [`TxEip2930`] variant if the transaction is an EIP-2930 transaction. + pub const fn as_eip2930(&self) -> Option<&Signed> { + match self { + Self::Eip2930(tx) => Some(tx), + _ => None, + } + } + + /// Returns the [`TxEip1559`] variant if the transaction is an EIP-1559 transaction. + pub const fn as_eip1559(&self) -> Option<&Signed> { + match self { + Self::Eip1559(tx) => Some(tx), + _ => None, + } + } + + /// Returns the [`TxEip1559`] variant if the transaction is an EIP-1559 transaction. + pub const fn as_deposit(&self) -> Option<&Sealed> { + match self { + Self::Deposit(tx) => Some(tx), + _ => None, + } + } + + /// Return the reference to signature. + /// + /// Returns `None` if this is a deposit variant. + pub const fn signature(&self) -> Option<&Signature> { + match self { + Self::Legacy(tx) => Some(tx.signature()), + Self::Eip2930(tx) => Some(tx.signature()), + Self::Eip1559(tx) => Some(tx.signature()), + Self::Eip7702(tx) => Some(tx.signature()), + Self::Deposit(_) => None, + } + } + + /// Return the [`OpTxType`] of the inner txn. + pub const fn tx_type(&self) -> OpTxType { + match self { + Self::Legacy(_) => OpTxType::Legacy, + Self::Eip2930(_) => OpTxType::Eip2930, + Self::Eip1559(_) => OpTxType::Eip1559, + Self::Eip7702(_) => OpTxType::Eip7702, + Self::Deposit(_) => OpTxType::Deposit, + } + } + + /// Returns the inner transaction hash. + pub fn hash(&self) -> &B256 { + match self { + Self::Legacy(tx) => tx.hash(), + Self::Eip1559(tx) => tx.hash(), + Self::Eip2930(tx) => tx.hash(), + Self::Eip7702(tx) => tx.hash(), + Self::Deposit(tx) => tx.hash_ref(), + } + } + + /// Returns the inner transaction hash. + pub fn tx_hash(&self) -> B256 { + *self.hash() + } + + /// Return the length of the inner txn, including type byte length + pub fn eip2718_encoded_length(&self) -> usize { + match self { + Self::Legacy(t) => t.eip2718_encoded_length(), + Self::Eip2930(t) => t.eip2718_encoded_length(), + Self::Eip1559(t) => t.eip2718_encoded_length(), + Self::Eip7702(t) => t.eip2718_encoded_length(), + Self::Deposit(t) => t.eip2718_encoded_length(), + } + } +} + +impl TxHashRef for OpTxEnvelope { + fn tx_hash(&self) -> &B256 { + Self::hash(self) + } +} + +#[cfg(feature = "k256")] +impl alloy_consensus::transaction::SignerRecoverable for OpTxEnvelope { + fn recover_signer( + &self, + ) -> Result { + let signature_hash = match self { + Self::Legacy(tx) => tx.signature_hash(), + Self::Eip2930(tx) => tx.signature_hash(), + Self::Eip1559(tx) => tx.signature_hash(), + Self::Eip7702(tx) => tx.signature_hash(), + // Optimism's Deposit transaction does not have a signature. Directly return the + // `from` address. + Self::Deposit(tx) => return Ok(tx.from), + }; + let signature = match self { + Self::Legacy(tx) => tx.signature(), + Self::Eip2930(tx) => tx.signature(), + Self::Eip1559(tx) => tx.signature(), + Self::Eip7702(tx) => tx.signature(), + Self::Deposit(_) => unreachable!("Deposit transactions should not be handled here"), + }; + alloy_consensus::crypto::secp256k1::recover_signer(signature, signature_hash) + } + + fn recover_signer_unchecked( + &self, + ) -> Result { + let signature_hash = match self { + Self::Legacy(tx) => tx.signature_hash(), + Self::Eip2930(tx) => tx.signature_hash(), + Self::Eip1559(tx) => tx.signature_hash(), + Self::Eip7702(tx) => tx.signature_hash(), + // Optimism's Deposit transaction does not have a signature. Directly return the + // `from` address. + Self::Deposit(tx) => return Ok(tx.from), + }; + let signature = match self { + Self::Legacy(tx) => tx.signature(), + Self::Eip2930(tx) => tx.signature(), + Self::Eip1559(tx) => tx.signature(), + Self::Eip7702(tx) => tx.signature(), + Self::Deposit(_) => unreachable!("Deposit transactions should not be handled here"), + }; + alloy_consensus::crypto::secp256k1::recover_signer_unchecked(signature, signature_hash) + } + + fn recover_unchecked_with_buf( + &self, + buf: &mut alloc::vec::Vec, + ) -> Result { + match self { + Self::Legacy(tx) => { + alloy_consensus::transaction::SignerRecoverable::recover_unchecked_with_buf(tx, buf) + } + Self::Eip2930(tx) => { + alloy_consensus::transaction::SignerRecoverable::recover_unchecked_with_buf(tx, buf) + } + Self::Eip1559(tx) => { + alloy_consensus::transaction::SignerRecoverable::recover_unchecked_with_buf(tx, buf) + } + Self::Eip7702(tx) => { + alloy_consensus::transaction::SignerRecoverable::recover_unchecked_with_buf(tx, buf) + } + Self::Deposit(tx) => Ok(tx.from), + } + } +} + +/// Bincode-compatible serde implementation for `OpTxEnvelope`. +#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] +pub mod serde_bincode_compat { + use crate::serde_bincode_compat::TxDeposit; + use alloy_consensus::{ + Sealed, Signed, + transaction::serde_bincode_compat::{TxEip1559, TxEip2930, TxEip7702, TxLegacy}, + }; + use alloy_primitives::{B256, Signature}; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + use serde_with::{DeserializeAs, SerializeAs}; + + /// Bincode-compatible representation of an `OpTxEnvelope`. + #[derive(Debug, Serialize, Deserialize)] + pub enum OpTxEnvelope<'a> { + /// Legacy variant. + Legacy { + /// Transaction signature. + signature: Signature, + /// Borrowed legacy transaction data. + transaction: TxLegacy<'a>, + }, + /// EIP-2930 variant. + Eip2930 { + /// Transaction signature. + signature: Signature, + /// Borrowed EIP-2930 transaction data. + transaction: TxEip2930<'a>, + }, + /// EIP-1559 variant. + Eip1559 { + /// Transaction signature. + signature: Signature, + /// Borrowed EIP-1559 transaction data. + transaction: TxEip1559<'a>, + }, + /// EIP-7702 variant. + Eip7702 { + /// Transaction signature. + signature: Signature, + /// Borrowed EIP-7702 transaction data. + transaction: TxEip7702<'a>, + }, + /// Deposit variant. + Deposit { + /// Precomputed hash. + hash: B256, + /// Borrowed deposit transaction data. + transaction: TxDeposit<'a>, + }, + } + + impl<'a> From<&'a super::OpTxEnvelope> for OpTxEnvelope<'a> { + fn from(value: &'a super::OpTxEnvelope) -> Self { + match value { + super::OpTxEnvelope::Legacy(signed_legacy) => Self::Legacy { + signature: *signed_legacy.signature(), + transaction: signed_legacy.tx().into(), + }, + super::OpTxEnvelope::Eip2930(signed_2930) => Self::Eip2930 { + signature: *signed_2930.signature(), + transaction: signed_2930.tx().into(), + }, + super::OpTxEnvelope::Eip1559(signed_1559) => Self::Eip1559 { + signature: *signed_1559.signature(), + transaction: signed_1559.tx().into(), + }, + super::OpTxEnvelope::Eip7702(signed_7702) => Self::Eip7702 { + signature: *signed_7702.signature(), + transaction: signed_7702.tx().into(), + }, + super::OpTxEnvelope::Deposit(sealed_deposit) => Self::Deposit { + hash: sealed_deposit.seal(), + transaction: sealed_deposit.inner().into(), + }, + } + } + } + + impl<'a> From> for super::OpTxEnvelope { + fn from(value: OpTxEnvelope<'a>) -> Self { + match value { + OpTxEnvelope::Legacy { signature, transaction } => { + Self::Legacy(Signed::new_unhashed(transaction.into(), signature)) + } + OpTxEnvelope::Eip2930 { signature, transaction } => { + Self::Eip2930(Signed::new_unhashed(transaction.into(), signature)) + } + OpTxEnvelope::Eip1559 { signature, transaction } => { + Self::Eip1559(Signed::new_unhashed(transaction.into(), signature)) + } + OpTxEnvelope::Eip7702 { signature, transaction } => { + Self::Eip7702(Signed::new_unhashed(transaction.into(), signature)) + } + OpTxEnvelope::Deposit { hash, transaction } => { + Self::Deposit(Sealed::new_unchecked(transaction.into(), hash)) + } + } + } + } + + impl SerializeAs for OpTxEnvelope<'_> { + fn serialize_as(source: &super::OpTxEnvelope, serializer: S) -> Result + where + S: Serializer, + { + let borrowed = OpTxEnvelope::from(source); + borrowed.serialize(serializer) + } + } + + impl<'de> DeserializeAs<'de, super::OpTxEnvelope> for OpTxEnvelope<'de> { + fn deserialize_as(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let borrowed = OpTxEnvelope::deserialize(deserializer)?; + Ok(borrowed.into()) + } + } + + #[cfg(test)] + mod tests { + use super::*; + use arbitrary::Arbitrary; + use rand::Rng; + use serde::{Deserialize, Serialize}; + use serde_with::serde_as; + + /// Tests a bincode round-trip for `OpTxEnvelope` using an arbitrary instance. + #[test] + fn test_op_tx_envelope_bincode_roundtrip_arbitrary() { + #[serde_as] + #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] + struct Data { + // Use the bincode-compatible representation defined in this module. + #[serde_as(as = "OpTxEnvelope<'_>")] + envelope: super::super::OpTxEnvelope, + } + + let mut bytes = [0u8; 1024]; + rand::rng().fill(bytes.as_mut_slice()); + let data = Data { + envelope: super::super::OpTxEnvelope::arbitrary(&mut arbitrary::Unstructured::new( + &bytes, + )) + .unwrap(), + }; + + let encoded = bincode::serde::encode_to_vec(&data, bincode::config::legacy()).unwrap(); + let (decoded, _) = + bincode::serde::decode_from_slice::(&encoded, bincode::config::legacy()) + .unwrap(); + assert_eq!(decoded, data); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::vec; + use alloy_consensus::{SignableTransaction, Transaction}; + use alloy_primitives::{Address, B256, Bytes, Signature, TxKind, U256, hex}; + + #[test] + fn test_tx_gas_limit() { + let tx = TxDeposit { gas_limit: 1, ..Default::default() }; + let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); + assert_eq!(tx_envelope.gas_limit(), 1); + } + + #[test] + fn test_deposit() { + let tx = TxDeposit { is_system_transaction: true, ..Default::default() }; + let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); + assert!(tx_envelope.is_deposit()); + + let tx = TxEip1559::default(); + let sig = Signature::test_signature(); + let tx_envelope = OpTxEnvelope::Eip1559(tx.into_signed(sig)); + assert!(!tx_envelope.is_system_transaction()); + } + + #[test] + fn test_system_transaction() { + let mut tx = TxDeposit { is_system_transaction: true, ..Default::default() }; + let tx_envelope = OpTxEnvelope::Deposit(tx.clone().seal_slow()); + assert!(tx_envelope.is_system_transaction()); + + tx.is_system_transaction = false; + let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); + assert!(!tx_envelope.is_system_transaction()); + } + + #[test] + fn test_encode_decode_deposit() { + let tx = TxDeposit { + source_hash: B256::left_padding_from(&[0xde, 0xad]), + from: Address::left_padding_from(&[0xbe, 0xef]), + mint: 1, + gas_limit: 2, + to: TxKind::Call(Address::left_padding_from(&[3])), + value: U256::from(4_u64), + input: Bytes::from(vec![5]), + is_system_transaction: false, + }; + let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); + let encoded = tx_envelope.encoded_2718(); + let decoded = OpTxEnvelope::decode_2718(&mut encoded.as_ref()).unwrap(); + assert_eq!(encoded.len(), tx_envelope.encode_2718_len()); + assert_eq!(decoded, tx_envelope); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_roundtrip_deposit() { + let tx = TxDeposit { + gas_limit: u64::MAX, + to: TxKind::Call(Address::random()), + value: U256::MAX, + input: Bytes::new(), + source_hash: U256::MAX.into(), + from: Address::random(), + mint: u128::MAX, + is_system_transaction: false, + }; + let tx_envelope = OpTxEnvelope::Deposit(tx.seal_slow()); + + let serialized = serde_json::to_string(&tx_envelope).unwrap(); + let deserialized: OpTxEnvelope = serde_json::from_str(&serialized).unwrap(); + + assert_eq!(tx_envelope, deserialized); + } + + #[test] + fn eip2718_deposit_decode() { + // + let b = hex!( + "7ef8f8a0417d134467f4737fcdf2475f0ecdd2a0ed6d87ecffc888ba9f60ee7e3b8ac26a94deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000008dd00101c1200000000000000040000000066c352bb000000000139c4f500000000000000000000000000000000000000000000000000000000c0cff1460000000000000000000000000000000000000000000000000000000000000001d4c88f4065ac9671e8b1329b90773e89b5ddff9cf8675b2b5e9c1b28320609930000000000000000000000005050f69a9786f081509234f1a7f4684b5e5b76c9" + ); + + let tx = OpTxEnvelope::decode_2718(&mut b[..].as_ref()).unwrap(); + let deposit = tx.as_deposit().unwrap(); + assert_eq!(deposit.mint, 0); + } + + #[test] + fn eip1559_decode() { + let tx = TxEip1559 { + chain_id: 1u64, + nonce: 2, + max_fee_per_gas: 3, + max_priority_fee_per_gas: 4, + gas_limit: 5, + to: Address::left_padding_from(&[6]).into(), + value: U256::from(7_u64), + input: vec![8].into(), + access_list: Default::default(), + }; + let sig = Signature::test_signature(); + let tx_signed = tx.into_signed(sig); + let envelope: OpTxEnvelope = tx_signed.into(); + let encoded = envelope.encoded_2718(); + let mut slice = encoded.as_slice(); + let decoded = OpTxEnvelope::decode_2718(&mut slice).unwrap(); + assert!(matches!(decoded, OpTxEnvelope::Eip1559(_))); + } +} diff --git a/op-alloy/crates/consensus/src/transaction/meta.rs b/rust/op-alloy/crates/consensus/src/transaction/meta.rs similarity index 100% rename from op-alloy/crates/consensus/src/transaction/meta.rs rename to rust/op-alloy/crates/consensus/src/transaction/meta.rs diff --git a/op-alloy/crates/consensus/src/transaction/mod.rs b/rust/op-alloy/crates/consensus/src/transaction/mod.rs similarity index 100% rename from op-alloy/crates/consensus/src/transaction/mod.rs rename to rust/op-alloy/crates/consensus/src/transaction/mod.rs diff --git a/op-alloy/crates/consensus/src/transaction/pooled.rs b/rust/op-alloy/crates/consensus/src/transaction/pooled.rs similarity index 100% rename from op-alloy/crates/consensus/src/transaction/pooled.rs rename to rust/op-alloy/crates/consensus/src/transaction/pooled.rs diff --git a/op-alloy/crates/consensus/src/transaction/tx_type.rs b/rust/op-alloy/crates/consensus/src/transaction/tx_type.rs similarity index 100% rename from op-alloy/crates/consensus/src/transaction/tx_type.rs rename to rust/op-alloy/crates/consensus/src/transaction/tx_type.rs diff --git a/op-alloy/crates/consensus/src/transaction/typed.rs b/rust/op-alloy/crates/consensus/src/transaction/typed.rs similarity index 100% rename from op-alloy/crates/consensus/src/transaction/typed.rs rename to rust/op-alloy/crates/consensus/src/transaction/typed.rs diff --git a/rust/op-alloy/crates/network/Cargo.toml b/rust/op-alloy/crates/network/Cargo.toml new file mode 100644 index 00000000000..f9e1882b979 --- /dev/null +++ b/rust/op-alloy/crates/network/Cargo.toml @@ -0,0 +1,44 @@ +[package] +name = "op-alloy-network" +description = "Optimism blockchain RPC behavior abstraction" + +version = "0.23.1" +edition.workspace = true +rust-version.workspace = true +authors = ["Alloy Contributors"] +license.workspace = true +homepage = "https://github.com/ethereum-optimism/optimism" +repository = "https://github.com/ethereum-optimism/optimism" +exclude = ["benches/", "tests/"] + +[lints] +workspace = true + +[dependencies] +# Workspace +op-alloy-consensus = { workspace = true, features = ["alloy-compat"] } +op-alloy-rpc-types.workspace = true + +# Alloy +alloy-consensus.workspace = true +alloy-network.workspace = true +alloy-primitives.workspace = true +alloy-provider.workspace = true +alloy-rpc-types-eth.workspace = true +alloy-signer.workspace = true + +[features] +std = [ + "op-alloy-consensus/std", + "op-alloy-rpc-types/std", + "alloy-consensus/std", + "alloy-primitives/std", + "alloy-rpc-types-eth/std" +] +serde = [ + "op-alloy-consensus/serde", + "op-alloy-rpc-types/serde", + "alloy-consensus/serde", + "alloy-primitives/serde", + "alloy-rpc-types-eth/serde" +] diff --git a/rust/op-alloy/crates/network/README.md b/rust/op-alloy/crates/network/README.md new file mode 100644 index 00000000000..85613f8a529 --- /dev/null +++ b/rust/op-alloy/crates/network/README.md @@ -0,0 +1,8 @@ +## `op-alloy-network` + +Optimism blockchain RPC behavior abstraction. + +This crate contains a simple abstraction of the RPC behavior of an +Op-stack blockchain. It is intended to be used by the Alloy client to +provide a consistent interface to the rest of the library, regardless of +changes the underlying blockchain makes to the RPC interface. diff --git a/op-alloy/crates/network/src/lib.rs b/rust/op-alloy/crates/network/src/lib.rs similarity index 100% rename from op-alloy/crates/network/src/lib.rs rename to rust/op-alloy/crates/network/src/lib.rs diff --git a/rust/op-alloy/crates/op-alloy/Cargo.toml b/rust/op-alloy/crates/op-alloy/Cargo.toml new file mode 100644 index 00000000000..01f792166be --- /dev/null +++ b/rust/op-alloy/crates/op-alloy/Cargo.toml @@ -0,0 +1,74 @@ +[package] +name = "op-alloy" +description = "Connect applications to the OP Stack" +version = "0.23.1" +edition.workspace = true +rust-version.workspace = true +authors = ["Alloy Contributors"] +license.workspace = true +homepage = "https://github.com/ethereum-optimism/optimism" +repository = "https://github.com/ethereum-optimism/optimism" +exclude = ["benches/", "tests/"] + +[package.metadata.docs.rs] +all-features = true +rustdoc-args = ["--cfg", "docsrs"] + +[lints] +workspace = true + +[dependencies] +# Workspace +op-alloy-consensus = { workspace = true, optional = true } +op-alloy-provider = { workspace = true, optional = true } +op-alloy-network = { workspace = true, optional = true } +op-alloy-rpc-jsonrpsee = { workspace = true, optional = true } +op-alloy-rpc-types-engine = { workspace = true, optional = true } +op-alloy-rpc-types = { workspace = true, optional = true } + +[features] +default = ["std", "k256", "serde"] + +std = [ + "op-alloy-consensus?/std", + "op-alloy-rpc-types?/std", + "op-alloy-rpc-types-engine?/std", + "op-alloy-network?/std", + "op-alloy-provider?/std" +] + +full = [ + "consensus", + "network", + "rpc-types", + "rpc-types-engine", + "rpc-jsonrpsee", +] + +k256 = [ + "op-alloy-consensus?/k256", +] + +arbitrary = [ + "op-alloy-consensus?/arbitrary", + "op-alloy-rpc-types?/arbitrary", + "op-alloy-rpc-types-engine?/arbitrary", +] + +serde = [ + "op-alloy-consensus?/serde", + "op-alloy-rpc-types-engine?/serde", + "op-alloy-network?/serde", + "op-alloy-provider?/serde", + "op-alloy-rpc-types?/serde" +] + +# `no_std` support +consensus = ["dep:op-alloy-consensus"] +rpc-types = ["dep:op-alloy-rpc-types"] +rpc-types-engine = ["dep:op-alloy-rpc-types-engine"] + +# std features +network = ["dep:op-alloy-network"] +rpc-jsonrpsee = ["dep:op-alloy-rpc-jsonrpsee"] +provider = ["dep:op-alloy-provider"] diff --git a/rust/op-alloy/crates/op-alloy/README.md b/rust/op-alloy/crates/op-alloy/README.md new file mode 100644 index 00000000000..775f6d256f4 --- /dev/null +++ b/rust/op-alloy/crates/op-alloy/README.md @@ -0,0 +1,85 @@ +## `op-alloy` + +Built on [Alloy][alloy], `op-alloy` connects applications to the OP Stack. + + +### Usage + +To use `op-alloy`, add the crate as a dependency to a `Cargo.toml`. + +```toml +op-alloy = "0.6" +``` + +### Development Status + +`op-alloy` is currently in active development, and is not yet ready for use in production. + + +### Supported Rust Versions (MSRV) + +The current MSRV (minimum supported rust version) is 1.86. + +Unlike Alloy, op-alloy may use the latest stable release, +to benefit from the latest features. + +The MSRV is not increased automatically, and will be updated +only as part of a patch (pre-1.0) or minor (post-1.0) release. + + +### Contributing + +op-alloy is built by open source contributors like you, thank you for improving the project! + +A [contributing guide][contributing] is available that sets guidelines for contributing. + +Pull requests will not be merged unless CI passes, so please ensure that your contribution follows the +linting rules and passes clippy. + + +### `no_std` + +op-alloy is intended to be `no_std` compatible, initially for use in [kona][kona]. + +The following crates support `no_std`. +Notice, provider crates do not support `no_std` compatibility. + +- [`op-alloy-consensus`][op-alloy-consensus] +- [`op-alloy-rpc-types-engine`][op-alloy-rpc-types-engine] +- [`op-alloy-rpc-types`][op-alloy-rpc-types] + +If you would like to add `no_std` support to a crate, +please make sure to update [scripts/check_no_std.sh][check-no-std]. + + +### Credits + +op-alloy is inspired by the work of several teams and projects, most notably [the Alloy project][alloy]. + +This would not be possible without the hard work from open source contributors. Thank you. + + +### License + +Licensed under either of Apache License, Version +2.0 or MIT license at your option. + +Unless you explicitly state otherwise, any contribution intentionally submitted +for inclusion in these crates by you, as defined in the Apache-2.0 license, +shall be dual licensed as above, without any additional terms or conditions. + + + + +[check-no-std]: https://github.com/ethereum-optimism/optimism/blob/develop/rust/op-alloy/scripts/check_no_std.sh + +[maili]: https://github.com/op-rs/maili +[kona]: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +[alloy]: https://github.com/alloy-rs/alloy +[contributing]: https://alloy-rs.github.io/op-alloy + +[op-alloy-consensus]: https://crates.io/crates/op-alloy-consensus +[op-alloy-network]: https://crates.io/crates/op-alloy-network +[op-alloy-rpc-jsonrpsee]: https://crates.io/crates/op-alloy-rpc-jsonrpsee +[op-alloy-rpc-types-engine]: https://crates.io/crates/op-alloy-rpc-types-engine +[op-alloy-rpc-types]: https://crates.io/crates/op-alloy-rpc-types diff --git a/op-alloy/crates/op-alloy/src/lib.rs b/rust/op-alloy/crates/op-alloy/src/lib.rs similarity index 100% rename from op-alloy/crates/op-alloy/src/lib.rs rename to rust/op-alloy/crates/op-alloy/src/lib.rs diff --git a/rust/op-alloy/crates/provider/Cargo.toml b/rust/op-alloy/crates/provider/Cargo.toml new file mode 100644 index 00000000000..6eea54cf581 --- /dev/null +++ b/rust/op-alloy/crates/provider/Cargo.toml @@ -0,0 +1,41 @@ +[package] +name = "op-alloy-provider" +description = "Interface with an OP Stack blockchain" + +version = "0.23.1" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://github.com/ethereum-optimism/optimism" +authors = ["Alloy Contributors"] +repository = "https://github.com/ethereum-optimism/optimism" +exclude = ["benches/", "tests/"] + +[lints] +workspace = true + +[dependencies] +# Workspace +op-alloy-rpc-types-engine = { workspace = true, features = ["serde"] } + +# Alloy +alloy-network.workspace = true +alloy-provider.workspace = true +alloy-transport.workspace = true +alloy-primitives = { workspace = true, features = ["rlp", "serde"] } +alloy-rpc-types-engine = { workspace = true, features = ["serde"] } + +# misc +async-trait.workspace = true + +[features] +std = [ + "op-alloy-rpc-types-engine/std", + "alloy-primitives/std", + "alloy-rpc-types-engine/std" +] +serde = [ + "op-alloy-rpc-types-engine/serde", + "alloy-primitives/serde", + "alloy-rpc-types-engine/serde" +] diff --git a/rust/op-alloy/crates/provider/README.md b/rust/op-alloy/crates/provider/README.md new file mode 100644 index 00000000000..f3ec93a6671 --- /dev/null +++ b/rust/op-alloy/crates/provider/README.md @@ -0,0 +1,5 @@ +## `op-alloy-provider` + +Optimism providers to interface with the engine API, adopted from L1, and [OP-unique engine API][op-api]. + +[op-api]: https://github.com/op-rs/maili/blob/main/crates/provider/README.md diff --git a/rust/op-alloy/crates/provider/src/ext/engine.rs b/rust/op-alloy/crates/provider/src/ext/engine.rs new file mode 100644 index 00000000000..4cfcd106932 --- /dev/null +++ b/rust/op-alloy/crates/provider/src/ext/engine.rs @@ -0,0 +1,318 @@ +use alloy_network::Network; +use alloy_primitives::{B256, BlockHash, Bytes}; +use alloy_provider::Provider; +use alloy_rpc_types_engine::{ + ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadEnvelopeV2, ExecutionPayloadInputV2, + ExecutionPayloadV3, ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, +}; +use alloy_transport::{Transport, TransportResult}; +use op_alloy_rpc_types_engine::{ + OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, OpExecutionPayloadV4, + OpPayloadAttributes, ProtocolVersion, +}; + +/// Extension trait that gives access to Optimism engine API RPC methods. +/// +/// Note: +/// > The provider should use a JWT authentication layer. +/// +/// This follows the Optimism specs that can be found at: +/// +#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] +pub trait OpEngineApi { + /// Sends the given payload to the execution layer client, as specified for the Shanghai fork. + /// + /// See also + /// + /// No modifications needed for OP compatibility. + async fn new_payload_v2( + &self, + payload: ExecutionPayloadInputV2, + ) -> TransportResult; + + /// Sends the given payload to the execution layer client, as specified for the Cancun fork. + /// + /// See also + /// + /// OP modifications: + /// - expected versioned hashes MUST be an empty array: therefore the `versioned_hashes` + /// parameter is removed. + /// - parent beacon block root MUST be the parent beacon block root from the L1 origin block of + /// the L2 block. + async fn new_payload_v3( + &self, + payload: ExecutionPayloadV3, + parent_beacon_block_root: B256, + ) -> TransportResult; + + /// Sends the given payload to the execution layer client, as specified for the Prague fork. + /// + /// See also + /// + /// OP modifications: TODO + async fn new_payload_v4( + &self, + payload: OpExecutionPayloadV4, + parent_beacon_block_root: B256, + ) -> TransportResult; + + /// Updates the execution layer client with the given fork choice, as specified for the Shanghai + /// fork. + /// + /// Caution: This should not accept the `parentBeaconBlockRoot` field in the payload attributes. + /// + /// See also + /// + /// OP modifications: + /// - The `payload_attributes` parameter is extended with the [`OpPayloadAttributes`] type + /// as described in + async fn fork_choice_updated_v2( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> TransportResult; + + /// Updates the execution layer client with the given fork choice, as specified for the Cancun + /// fork. + /// + /// See also + /// + /// OP modifications: + /// - Must be called with an Ecotone payload + /// - Attributes must contain the parent beacon block root field + /// - The `payload_attributes` parameter is extended with the [`OpPayloadAttributes`] type + /// as described in + async fn fork_choice_updated_v3( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> TransportResult; + + /// Retrieves an execution payload from a previously started build process, as specified for the + /// Shanghai fork. + /// + /// See also + /// + /// Note: + /// > Provider software MAY stop the corresponding build process after serving this call. + /// + /// No modifications needed for OP compatibility. + async fn get_payload_v2( + &self, + payload_id: PayloadId, + ) -> TransportResult; + + /// Retrieves an execution payload from a previously started build process, as specified for the + /// Cancun fork. + /// + /// See also + /// + /// Note: + /// > Provider software MAY stop the corresponding build process after serving this call. + /// + /// OP modifications: + /// - the response type is extended to [`OpExecutionPayloadEnvelopeV3`]. + async fn get_payload_v3( + &self, + payload_id: PayloadId, + ) -> TransportResult; + + /// Returns the most recent version of the payload that is available in the corresponding + /// payload build process at the time of receiving this call. + /// + /// See also + /// + /// Note: + /// > Provider software MAY stop the corresponding build process after serving this call. + /// + /// OP modifications: + /// - the response type is extended to [`OpExecutionPayloadEnvelopeV4`]. + async fn get_payload_v4( + &self, + payload_id: PayloadId, + ) -> TransportResult; + + /// Returns the execution payload bodies by the given hash. + /// + /// See also + async fn get_payload_bodies_by_hash_v1( + &self, + block_hashes: Vec, + ) -> TransportResult; + + /// Returns the execution payload bodies by the range starting at `start`, containing `count` + /// blocks. + /// + /// WARNING: This method is associated with the `BeaconBlocksByRange` message in the consensus + /// layer p2p specification, meaning the input should be treated as untrusted or potentially + /// adversarial. + /// + /// Implementers should take care when acting on the input to this method, specifically + /// ensuring that the range is limited properly, and that the range boundaries are computed + /// correctly and without panics. + /// + /// See also + async fn get_payload_bodies_by_range_v1( + &self, + start: u64, + count: u64, + ) -> TransportResult; + + /// Returns the execution client version information. + /// + /// Note: + /// > The `client_version` parameter identifies the consensus client. + /// + /// See also + async fn get_client_version_v1( + &self, + client_version: ClientVersionV1, + ) -> TransportResult>; + + /// Optional extension to the Engine API. + /// + /// Signals superchain information to the Engine: V1 signals which protocol version is + /// recommended and required. + /// + /// See : + async fn signal_superchain_v1( + &self, + recommended: ProtocolVersion, + required: ProtocolVersion, + ) -> TransportResult; + + /// Returns the list of Engine API methods supported by the execution layer client software. + /// + /// See also + async fn exchange_capabilities( + &self, + capabilities: Vec, + ) -> TransportResult>; +} + +#[cfg_attr(target_arch = "wasm32", async_trait::async_trait(?Send))] +#[cfg_attr(not(target_arch = "wasm32"), async_trait::async_trait)] +impl OpEngineApi for P +where + N: Network, + T: Transport + Clone, + P: Provider, +{ + async fn new_payload_v2( + &self, + payload: ExecutionPayloadInputV2, + ) -> TransportResult { + self.client().request("engine_newPayloadV2", (payload,)).await + } + + async fn new_payload_v3( + &self, + payload: ExecutionPayloadV3, + parent_beacon_block_root: B256, + ) -> TransportResult { + // Note: The `versioned_hashes` parameter is always an empty array for OP chains. + let versioned_hashes: Vec = vec![]; + + self.client() + .request("engine_newPayloadV3", (payload, versioned_hashes, parent_beacon_block_root)) + .await + } + + async fn new_payload_v4( + &self, + payload: OpExecutionPayloadV4, + parent_beacon_block_root: B256, + ) -> TransportResult { + // Note: The `versioned_hashes`, `execution_requests` parameters are always an empty array + // for OP chains. + let versioned_hashes: Vec = vec![]; + let execution_requests: Vec = vec![]; + + self.client() + .request( + "engine_newPayloadV4", + (payload, versioned_hashes, parent_beacon_block_root, execution_requests), + ) + .await + } + + async fn fork_choice_updated_v2( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> TransportResult { + self.client() + .request("engine_forkchoiceUpdatedV2", (fork_choice_state, payload_attributes)) + .await + } + + async fn fork_choice_updated_v3( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> TransportResult { + self.client() + .request("engine_forkchoiceUpdatedV3", (fork_choice_state, payload_attributes)) + .await + } + + async fn get_payload_v2( + &self, + payload_id: PayloadId, + ) -> TransportResult { + self.client().request("engine_getPayloadV2", (payload_id,)).await + } + + async fn get_payload_v3( + &self, + payload_id: PayloadId, + ) -> TransportResult { + self.client().request("engine_getPayloadV3", (payload_id,)).await + } + + async fn get_payload_v4( + &self, + payload_id: PayloadId, + ) -> TransportResult { + self.client().request("engine_getPayloadV4", (payload_id,)).await + } + + async fn get_payload_bodies_by_hash_v1( + &self, + block_hashes: Vec, + ) -> TransportResult { + self.client().request("engine_getPayloadBodiesByHashV1", (block_hashes,)).await + } + + async fn get_payload_bodies_by_range_v1( + &self, + start: u64, + count: u64, + ) -> TransportResult { + self.client().request("engine_getPayloadBodiesByRangeV1", (start, count)).await + } + + async fn get_client_version_v1( + &self, + client_version: ClientVersionV1, + ) -> TransportResult> { + self.client().request("engine_getClientVersionV1", (client_version,)).await + } + + async fn signal_superchain_v1( + &self, + recommended: ProtocolVersion, + required: ProtocolVersion, + ) -> TransportResult { + let signal = op_alloy_rpc_types_engine::SuperchainSignal { recommended, required }; + self.client().request("engine_signalSuperchainV1", (signal,)).await + } + + async fn exchange_capabilities( + &self, + capabilities: Vec, + ) -> TransportResult> { + self.client().request("engine_exchangeCapabilities", (capabilities,)).await + } +} diff --git a/op-alloy/crates/provider/src/ext/mod.rs b/rust/op-alloy/crates/provider/src/ext/mod.rs similarity index 100% rename from op-alloy/crates/provider/src/ext/mod.rs rename to rust/op-alloy/crates/provider/src/ext/mod.rs diff --git a/op-alloy/crates/provider/src/lib.rs b/rust/op-alloy/crates/provider/src/lib.rs similarity index 100% rename from op-alloy/crates/provider/src/lib.rs rename to rust/op-alloy/crates/provider/src/lib.rs diff --git a/rust/op-alloy/crates/rpc-jsonrpsee/Cargo.toml b/rust/op-alloy/crates/rpc-jsonrpsee/Cargo.toml new file mode 100644 index 00000000000..1e0eec62d0e --- /dev/null +++ b/rust/op-alloy/crates/rpc-jsonrpsee/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "op-alloy-rpc-jsonrpsee" +description = "Optimism RPC Client" + +version = "0.23.1" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://github.com/ethereum-optimism/optimism" +authors = ["Alloy Contributors"] +repository = "https://github.com/ethereum-optimism/optimism" +exclude = ["benches/", "tests/"] + +[lints] +workspace = true + +[dependencies] +# Alloy +alloy-primitives = { workspace = true, features = ["serde"] } + +# rpc +jsonrpsee.workspace = true + +[features] +client = [ + "jsonrpsee/client", + "jsonrpsee/async-client", +] diff --git a/rust/op-alloy/crates/rpc-jsonrpsee/README.md b/rust/op-alloy/crates/rpc-jsonrpsee/README.md new file mode 100644 index 00000000000..5ce648ff00f --- /dev/null +++ b/rust/op-alloy/crates/rpc-jsonrpsee/README.md @@ -0,0 +1,3 @@ +## `op-alloy-rpc-jsonrpsee` + +Low-level Optimism JSON-RPC server and client implementations. diff --git a/op-alloy/crates/rpc-jsonrpsee/src/lib.rs b/rust/op-alloy/crates/rpc-jsonrpsee/src/lib.rs similarity index 100% rename from op-alloy/crates/rpc-jsonrpsee/src/lib.rs rename to rust/op-alloy/crates/rpc-jsonrpsee/src/lib.rs diff --git a/op-alloy/crates/rpc-jsonrpsee/src/traits.rs b/rust/op-alloy/crates/rpc-jsonrpsee/src/traits.rs similarity index 100% rename from op-alloy/crates/rpc-jsonrpsee/src/traits.rs rename to rust/op-alloy/crates/rpc-jsonrpsee/src/traits.rs diff --git a/rust/op-alloy/crates/rpc-types-engine/Cargo.toml b/rust/op-alloy/crates/rpc-types-engine/Cargo.toml new file mode 100644 index 00000000000..2b7a8d0b3ac --- /dev/null +++ b/rust/op-alloy/crates/rpc-types-engine/Cargo.toml @@ -0,0 +1,91 @@ +[package] +name = "op-alloy-rpc-types-engine" +description = "Optimism RPC types for the `engine` namespace" + +version = "0.23.1" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://github.com/ethereum-optimism/optimism" +authors = ["Alloy Contributors"] +repository = "https://github.com/ethereum-optimism/optimism" +exclude = ["benches/", "tests/"] + +[lints] +workspace = true + +[dependencies] +# Workspace +op-alloy-consensus.workspace = true + +# Alloy +alloy-primitives.workspace = true +alloy-eips.workspace = true +alloy-rpc-types-engine.workspace = true +alloy-rlp.workspace = true +alloy-consensus.workspace = true + +# Encoding +snap = { workspace = true, optional = true } +ethereum_ssz = { workspace = true, optional = true } +ethereum_ssz_derive = { workspace = true, optional = true } + +# serde +serde = { workspace = true, optional = true } +alloy-serde = { workspace = true, optional = true } + +# misc +thiserror.workspace = true +arbitrary = { workspace = true, features = ["derive"], optional = true } + +# hashing +sha2.workspace = true + +derive_more = { workspace = true, features = ["as_ref", "deref_mut"] } + +[dev-dependencies] +arbtest.workspace = true +serde_json.workspace = true +arbitrary = { workspace = true, features = ["derive"] } +alloy-primitives = { workspace = true, features = ["arbitrary", "getrandom"] } + +[features] +default = ["std", "serde"] +std = [ + "dep:snap", + "dep:ethereum_ssz", + "dep:ethereum_ssz_derive", + "alloy-rpc-types-engine/ssz", + "alloy-primitives/std", + "alloy-rpc-types-engine/std", + "op-alloy-consensus/std", + "alloy-consensus/std", + "alloy-eips/std", + "alloy-rlp/std", + "alloy-serde?/std", + "derive_more/std", + "serde?/std", + "sha2/std", + "thiserror/std" +] +serde = [ + "dep:serde", + "dep:alloy-serde", + "alloy-rpc-types-engine/serde", + "op-alloy-consensus/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-primitives/serde" +] +k256 = ["op-alloy-consensus/k256"] +arbitrary = [ + "std", + "dep:arbitrary", + "alloy-primitives/arbitrary", + "alloy-primitives/rand", + "op-alloy-consensus/arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-rpc-types-engine/arbitrary", + "alloy-serde?/arbitrary" +] diff --git a/rust/op-alloy/crates/rpc-types-engine/README.md b/rust/op-alloy/crates/rpc-types-engine/README.md new file mode 100644 index 00000000000..3797d14c592 --- /dev/null +++ b/rust/op-alloy/crates/rpc-types-engine/README.md @@ -0,0 +1,3 @@ +## `op-alloy-rpc-types-engine` + +Optimism RPC types for the `engine` namespace. diff --git a/op-alloy/crates/rpc-types-engine/src/attributes.rs b/rust/op-alloy/crates/rpc-types-engine/src/attributes.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/attributes.rs rename to rust/op-alloy/crates/rpc-types-engine/src/attributes.rs diff --git a/rust/op-alloy/crates/rpc-types-engine/src/envelope.rs b/rust/op-alloy/crates/rpc-types-engine/src/envelope.rs new file mode 100644 index 00000000000..b1abc900fda --- /dev/null +++ b/rust/op-alloy/crates/rpc-types-engine/src/envelope.rs @@ -0,0 +1,940 @@ +//! Optimism execution payload envelope in network format and related types. +//! +//! This module uses the `snappy` compression algorithm to decompress the payload. +//! The license for snappy can be found in the `SNAPPY-LICENSE` at the root of the repository. + +use crate::{ + OpExecutionPayload, OpExecutionPayloadSidecar, OpExecutionPayloadV4, OpFlashblockError, + OpFlashblockPayload, +}; +use alloc::vec::Vec; +use alloy_consensus::{Block, BlockHeader, Sealable, Transaction}; +use alloy_eips::{Encodable2718, eip4895::Withdrawal, eip7685::Requests}; +use alloy_primitives::{B256, Signature, keccak256}; +use alloy_rpc_types_engine::{ + CancunPayloadFields, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV2, + ExecutionPayloadV3, PraguePayloadFields, +}; + +/// A thin wrapper around [`OpExecutionPayload`] that includes the parent beacon block root. +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct OpExecutionPayloadEnvelope { + /// The parent beacon block root, if any. + pub parent_beacon_block_root: Option, + /// The execution payload. + pub execution_payload: OpExecutionPayload, +} + +impl OpExecutionPayloadEnvelope { + /// Returns the payload hash over the ssz encoded payload envelope data. + /// + /// + #[cfg(feature = "std")] + pub fn payload_hash(&self) -> crate::PayloadHash { + use ssz::Encode; + let ssz_bytes = self.as_ssz_bytes(); + crate::PayloadHash::from(ssz_bytes.as_slice()) + } +} + +#[cfg(feature = "std")] +impl ssz::Encode for OpExecutionPayloadEnvelope { + fn is_ssz_fixed_len() -> bool { + false + } + + fn ssz_append(&self, buf: &mut Vec) { + // Write parent beacon block root only if the payload is not a v1 or v2 payload. + // + if !matches!(self.execution_payload, OpExecutionPayload::V1(_) | OpExecutionPayload::V2(_)) + { + buf.extend_from_slice(self.parent_beacon_block_root.unwrap_or_default().as_slice()); + } + + // Write payload + self.execution_payload.ssz_append(buf); + } + + fn ssz_bytes_len(&self) -> usize { + let mut len = 0; + len += B256::ssz_fixed_len(); // parent_beacon_block_root is always 32 bytes + len += self.execution_payload.ssz_bytes_len(); + len + } +} + +#[cfg(feature = "std")] +impl ssz::Decode for OpExecutionPayloadEnvelope { + fn is_ssz_fixed_len() -> bool { + false + } + + fn from_ssz_bytes(bytes: &[u8]) -> Result { + if bytes.len() < B256::ssz_fixed_len() { + return Err(ssz::DecodeError::InvalidByteLength { + len: bytes.len(), + expected: B256::ssz_fixed_len(), + }); + } + + // Decode parent_beacon_block_root + let parent_beacon_block_root = { + let root_bytes = &bytes[..B256::ssz_fixed_len()]; + if root_bytes.iter().all(|&b| b == 0) { + None + } else { + Some(B256::from_slice(root_bytes)) + } + }; + + // Decode payload + let execution_payload = + OpExecutionPayload::from_ssz_bytes(&bytes[B256::ssz_fixed_len()..])?; + + Ok(Self { parent_beacon_block_root, execution_payload }) + } +} + +impl From for OpExecutionPayloadEnvelope { + fn from(envelope: OpNetworkPayloadEnvelope) -> Self { + Self { + execution_payload: envelope.payload, + parent_beacon_block_root: envelope.parent_beacon_block_root, + } + } +} + +/// Struct aggregating [`OpExecutionPayload`] and [`OpExecutionPayloadSidecar`] and encapsulating +/// complete payload supplied for execution. +#[derive(Debug, Clone)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct OpExecutionData { + /// Execution payload. + pub payload: OpExecutionPayload, + /// Additional fork-specific fields. + pub sidecar: OpExecutionPayloadSidecar, +} + +impl OpExecutionData { + /// Creates new instance of [`OpExecutionData`]. + pub const fn new(payload: OpExecutionPayload, sidecar: OpExecutionPayloadSidecar) -> Self { + Self { payload, sidecar } + } + + /// Conversion from [`alloy_consensus::Block`]. Also returns the [`OpExecutionPayloadSidecar`] + /// extracted from the block. + /// + /// See also [`from_block_unchecked`](OpExecutionPayload::from_block_slow). + /// + /// Note: This re-calculates the block hash. + pub fn from_block_slow(block: &Block) -> Self + where + T: Encodable2718 + Transaction, + H: BlockHeader + Sealable, + { + let (payload, sidecar) = OpExecutionPayload::from_block_slow(block); + + Self::new(payload, sidecar) + } + + /// Conversion from [`alloy_consensus::Block`]. Also returns the [`OpExecutionPayloadSidecar`] + /// extracted from the block. + /// + /// See also [`OpExecutionPayload::from_block_unchecked`]. + pub fn from_block_unchecked(block_hash: B256, block: &Block) -> Self + where + T: Encodable2718 + Transaction, + H: BlockHeader, + { + let (payload, sidecar) = OpExecutionPayload::from_block_unchecked(block_hash, block); + + Self::new(payload, sidecar) + } + + /// Conversion from a vec of [`OpFlashblockPayload`]. Also returns the + /// [`OpExecutionPayloadSidecar`] extracted from the payloads. + /// + /// # Validation + /// + /// This method performs the following validations: + /// - At least one flashblock must be present + /// - Indices must be sequential starting from 0 + /// - First flashblock (index 0) must have a base payload + /// - Only the first flashblock may have a base payload + /// + /// # Errors + /// + /// Returns an error if any validation fails. + pub fn from_flashblocks( + flashblocks: &[OpFlashblockPayload], + ) -> Result { + // Validate we have at least one flashblock + if flashblocks.is_empty() { + return Err(OpFlashblockError::MissingPayload); + } + + // Validate indices are sequential starting from 0 + for (i, fb) in flashblocks.iter().enumerate() { + if fb.index as usize != i { + return Err(OpFlashblockError::InvalidIndex); + } + } + + // Validate first flashblock has base and extract it + let first = flashblocks.first().unwrap(); // Safe: checked empty above + if first.base.is_none() { + return Err(OpFlashblockError::MissingBasePayload); + } + + // Validate no other flashblocks have base (only first should have it) + for fb in flashblocks.iter().skip(1) { + if fb.base.is_some() { + return Err(OpFlashblockError::UnexpectedBasePayload); + } + } + + Ok(Self::from_flashblocks_unchecked(flashblocks)) + } + + /// Conversion from a vec of [`OpFlashblockPayload`] without validation. + /// + /// This is a faster alternative to [`Self::from_flashblocks`] that skips all validation + /// checks. Use this method only when you are certain the input data is valid. + /// + /// # Safety Requirements + /// + /// The caller must ensure: + /// - At least one flashblock is present + /// - Indices are sequential starting from 0 + /// - First flashblock (index 0) has a base payload + /// - Only the first flashblock has a base payload + /// + /// # Panics + /// + /// Panics if any of the safety requirements are violated. + pub fn from_flashblocks_unchecked(flashblocks: &[OpFlashblockPayload]) -> Self { + // Extract base from first flashblock + // SAFETY: Caller guarantees at least one flashblock exists with base payload + let first = flashblocks.first().expect("flashblocks must not be empty"); + let base = first.base.as_ref().expect("first flashblock must have base payload"); + + // Get the final state from the last flashblock + // SAFETY: Caller guarantees at least one flashblock exists + let diff = &flashblocks.last().expect("flashblocks must not be empty").diff; + + // Collect all transactions and withdrawals from all flashblocks + let (transactions, withdrawals) = + flashblocks.iter().fold((Vec::new(), Vec::new()), |(mut txs, mut withdrawals), p| { + txs.extend(p.diff.transactions.iter().cloned()); + withdrawals.extend(p.diff.withdrawals.iter().copied()); + (txs, withdrawals) + }); + + let v3 = ExecutionPayloadV3 { + blob_gas_used: diff.blob_gas_used.unwrap_or(0), + excess_blob_gas: 0, + payload_inner: ExecutionPayloadV2 { + withdrawals, + payload_inner: ExecutionPayloadV1 { + parent_hash: base.parent_hash, + fee_recipient: base.fee_recipient, + state_root: diff.state_root, + receipts_root: diff.receipts_root, + logs_bloom: diff.logs_bloom, + prev_randao: base.prev_randao, + block_number: base.block_number, + gas_limit: base.gas_limit, + gas_used: diff.gas_used, + timestamp: base.timestamp, + extra_data: base.extra_data.clone(), + base_fee_per_gas: base.base_fee_per_gas, + block_hash: diff.block_hash, + transactions, + }, + }, + }; + + // Before Isthmus hardfork, withdrawals_root was not included. + // A zero withdrawals_root indicates a pre-Isthmus flashblock. + if diff.withdrawals_root == B256::ZERO { + return Self::v3(v3, Vec::new(), base.parent_beacon_block_root); + } + + let v4 = + OpExecutionPayloadV4 { withdrawals_root: diff.withdrawals_root, payload_inner: v3 }; + + Self::v4(v4, Vec::new(), base.parent_beacon_block_root, Default::default()) + } + + /// Creates a new instance from args to engine API method `newPayloadV2`. + /// + /// Spec: + pub fn v2(payload: ExecutionPayloadInputV2) -> Self { + Self::new(OpExecutionPayload::v2(payload), OpExecutionPayloadSidecar::default()) + } + + /// Creates a new instance from args to engine API method `newPayloadV3`. + /// + /// Spec: + pub fn v3( + payload: ExecutionPayloadV3, + versioned_hashes: Vec, + parent_beacon_block_root: B256, + ) -> Self { + Self::new( + OpExecutionPayload::v3(payload), + OpExecutionPayloadSidecar::v3(CancunPayloadFields::new( + parent_beacon_block_root, + versioned_hashes, + )), + ) + } + + /// Creates a new instance from args to engine API method `newPayloadV4`. + /// + /// Spec: + pub fn v4( + payload: OpExecutionPayloadV4, + versioned_hashes: Vec, + parent_beacon_block_root: B256, + execution_requests: Requests, + ) -> Self { + Self::new( + OpExecutionPayload::v4(payload), + OpExecutionPayloadSidecar::v4( + CancunPayloadFields::new(parent_beacon_block_root, versioned_hashes), + PraguePayloadFields::new(execution_requests), + ), + ) + } + + /// Returns the parent beacon block root, if any. + pub fn parent_beacon_block_root(&self) -> Option { + self.sidecar.parent_beacon_block_root() + } + + /// Return the withdrawals for the payload or attributes. + pub const fn withdrawals(&self) -> Option<&Vec> { + match &self.payload { + OpExecutionPayload::V1(_) => None, + OpExecutionPayload::V2(execution_payload_v2) => Some(&execution_payload_v2.withdrawals), + OpExecutionPayload::V3(execution_payload_v3) => { + Some(execution_payload_v3.withdrawals()) + } + OpExecutionPayload::V4(op_execution_payload_v4) => { + Some(op_execution_payload_v4.payload_inner.withdrawals()) + } + } + } + + /// Returns the parent hash of the block. + pub const fn parent_hash(&self) -> B256 { + self.payload.parent_hash() + } + + /// Returns the hash of the block. + pub const fn block_hash(&self) -> B256 { + self.payload.block_hash() + } + + /// Returns the number of the block. + pub const fn block_number(&self) -> u64 { + self.payload.block_number() + } +} + +/// Optimism execution payload envelope in network format. +/// +/// This struct is used to represent payloads that are sent over the Optimism +/// CL p2p network in a snappy-compressed format. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct OpNetworkPayloadEnvelope { + /// The execution payload. + pub payload: OpExecutionPayload, + /// A signature for the payload. + pub signature: Signature, + /// The hash of the payload. + pub payload_hash: PayloadHash, + /// The parent beacon block root. + pub parent_beacon_block_root: Option, +} + +impl OpNetworkPayloadEnvelope { + /// Decode a payload envelope from a snappy-compressed byte array. + /// The payload version decoded is `ExecutionPayloadV1` from SSZ bytes. + #[cfg(feature = "std")] + pub fn decode_v1(data: &[u8]) -> Result { + use ssz::Decode; + let mut decoder = snap::raw::Decoder::new(); + let decompressed = decoder.decompress_vec(data)?; + + if decompressed.len() < 66 { + return Err(PayloadEnvelopeError::InvalidLength); + } + + let sig_data = &decompressed[..65]; + let block_data = &decompressed[65..]; + + let signature = Signature::try_from(sig_data)?; + let hash = PayloadHash::from(block_data); + + let payload = OpExecutionPayload::V1( + alloy_rpc_types_engine::ExecutionPayloadV1::from_ssz_bytes(block_data)?, + ); + + Ok(Self { payload, signature, payload_hash: hash, parent_beacon_block_root: None }) + } + + /// Encodes a payload envelope as a snappy-compressed byte array. + #[cfg(feature = "std")] + pub fn encode_v1(&self) -> Result, PayloadEnvelopeEncodeError> { + use ssz::Encode; + let execution_payload_v1 = match &self.payload { + OpExecutionPayload::V1(execution_payload_v1) => execution_payload_v1, + _ => return Err(PayloadEnvelopeEncodeError::WrongVersion), + }; + + let mut data = Vec::new(); + let mut sig = self.signature.as_bytes(); + sig[64] = self.signature.v() as u8; + data.extend_from_slice(&sig[..]); + let block_data = execution_payload_v1.as_ssz_bytes(); + data.extend_from_slice(block_data.as_slice()); + + Ok(snap::raw::Encoder::new().compress_vec(&data)?) + } + + /// Decode a payload envelope from a snappy-compressed byte array. + /// The payload version decoded is `ExecutionPayloadV2` from SSZ bytes. + #[cfg(feature = "std")] + pub fn decode_v2(data: &[u8]) -> Result { + use ssz::Decode; + let mut decoder = snap::raw::Decoder::new(); + let decompressed = decoder.decompress_vec(data)?; + + if decompressed.len() < 66 { + return Err(PayloadEnvelopeError::InvalidLength); + } + + let sig_data = &decompressed[..65]; + let block_data = &decompressed[65..]; + + let signature = Signature::try_from(sig_data)?; + let hash = PayloadHash::from(block_data); + + let payload = OpExecutionPayload::V2( + alloy_rpc_types_engine::ExecutionPayloadV2::from_ssz_bytes(block_data)?, + ); + + Ok(Self { payload, signature, payload_hash: hash, parent_beacon_block_root: None }) + } + + /// Encodes a payload envelope as a snappy-compressed byte array. + #[cfg(feature = "std")] + pub fn encode_v2(&self) -> Result, PayloadEnvelopeEncodeError> { + use ssz::Encode; + let execution_payload_v2 = match &self.payload { + OpExecutionPayload::V2(execution_payload_v2) => execution_payload_v2, + _ => return Err(PayloadEnvelopeEncodeError::WrongVersion), + }; + + let mut data = Vec::new(); + let mut sig = self.signature.as_bytes(); + sig[64] = self.signature.v() as u8; + data.extend_from_slice(&sig[..]); + let block_data = execution_payload_v2.as_ssz_bytes(); + data.extend_from_slice(block_data.as_slice()); + + Ok(snap::raw::Encoder::new().compress_vec(&data)?) + } + + /// Decode a payload envelope from a snappy-compressed byte array. + /// The payload version decoded is `ExecutionPayloadV3` from SSZ bytes. + #[cfg(feature = "std")] + pub fn decode_v3(data: &[u8]) -> Result { + use ssz::Decode; + let mut decoder = snap::raw::Decoder::new(); + let decompressed = decoder.decompress_vec(data)?; + + if decompressed.len() < 98 { + return Err(PayloadEnvelopeError::InvalidLength); + } + + let sig_data = &decompressed[..65]; + let parent_beacon_block_root = &decompressed[65..97]; + let block_data = &decompressed[97..]; + + let signature = Signature::try_from(sig_data)?; + let parent_beacon_block_root = B256::from_slice(parent_beacon_block_root); + let hash = PayloadHash::from( + [parent_beacon_block_root.as_slice(), block_data].concat().as_slice(), + ); + + let payload = OpExecutionPayload::V3( + alloy_rpc_types_engine::ExecutionPayloadV3::from_ssz_bytes(block_data)?, + ); + + Ok(Self { + payload, + signature, + payload_hash: hash, + parent_beacon_block_root: Some(parent_beacon_block_root), + }) + } + + /// Encodes a payload envelope as a snappy-compressed byte array. + #[cfg(feature = "std")] + pub fn encode_v3(&self) -> Result, PayloadEnvelopeEncodeError> { + use ssz::Encode; + let execution_payload_v3 = match &self.payload { + OpExecutionPayload::V3(execution_payload_v3) => execution_payload_v3, + _ => return Err(PayloadEnvelopeEncodeError::WrongVersion), + }; + + let mut data = Vec::new(); + let mut sig = self.signature.as_bytes(); + sig[64] = self.signature.v() as u8; + data.extend_from_slice(&sig[..]); + data.extend_from_slice(self.parent_beacon_block_root.as_ref().unwrap().as_slice()); + let block_data = execution_payload_v3.as_ssz_bytes(); + data.extend_from_slice(block_data.as_slice()); + + Ok(snap::raw::Encoder::new().compress_vec(&data)?) + } + + /// Decode a payload envelope from a snappy-compressed byte array. + /// The payload version decoded is `ExecutionPayloadV4` from SSZ bytes. + #[cfg(feature = "std")] + pub fn decode_v4(data: &[u8]) -> Result { + use ssz::Decode; + let mut decoder = snap::raw::Decoder::new(); + let decompressed = decoder.decompress_vec(data)?; + + if decompressed.len() < 98 { + return Err(PayloadEnvelopeError::InvalidLength); + } + + let sig_data = &decompressed[..65]; + let parent_beacon_block_root = &decompressed[65..97]; + let block_data = &decompressed[97..]; + + let signature = Signature::try_from(sig_data)?; + let parent_beacon_block_root = B256::from_slice(parent_beacon_block_root); + let hash = PayloadHash::from( + [parent_beacon_block_root.as_slice(), block_data].concat().as_slice(), + ); + + let payload = OpExecutionPayload::V4(OpExecutionPayloadV4::from_ssz_bytes(block_data)?); + + Ok(Self { + payload, + signature, + payload_hash: hash, + parent_beacon_block_root: Some(parent_beacon_block_root), + }) + } + + /// Encodes a payload envelope as a snappy-compressed byte array. + #[cfg(feature = "std")] + pub fn encode_v4(&self) -> Result, PayloadEnvelopeEncodeError> { + use ssz::Encode; + let execution_payload_v4 = match &self.payload { + OpExecutionPayload::V4(execution_payload_v4) => execution_payload_v4, + _ => return Err(PayloadEnvelopeEncodeError::WrongVersion), + }; + + let mut data = Vec::new(); + let mut sig = self.signature.as_bytes(); + sig[64] = self.signature.v() as u8; + data.extend_from_slice(&sig[..]); + data.extend_from_slice(self.parent_beacon_block_root.as_ref().unwrap().as_slice()); + let block_data = execution_payload_v4.as_ssz_bytes(); + data.extend_from_slice(block_data.as_slice()); + + Ok(snap::raw::Encoder::new().compress_vec(&data)?) + } +} + +/// Errors that can occur when encoding a payload envelope. +#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)] +pub enum PayloadEnvelopeEncodeError { + /// Wrong versions of the payload. + #[error("Wrong version of the payload")] + WrongVersion, + /// An error occurred during snap encoding. + #[error(transparent)] + #[cfg(feature = "std")] + SnapEncoding(#[from] snap::Error), +} + +/// Errors that can occur when decoding a payload envelope. +#[derive(Debug, Clone, PartialEq, Eq, thiserror::Error)] +pub enum PayloadEnvelopeError { + /// The snappy encoding is broken. + #[error("Broken snappy encoding")] + BrokenSnappyEncoding, + /// The signature is invalid. + #[error("Invalid signature")] + InvalidSignature, + /// The SSZ encoding is broken. + #[error("Broken SSZ encoding")] + BrokenSszEncoding, + /// The payload envelope is of invalid length. + #[error("Invalid length")] + InvalidLength, +} + +impl From for PayloadEnvelopeError { + fn from(_: alloy_primitives::SignatureError) -> Self { + Self::InvalidSignature + } +} + +#[cfg(feature = "std")] +impl From for PayloadEnvelopeError { + fn from(_: snap::Error) -> Self { + Self::BrokenSnappyEncoding + } +} + +#[cfg(feature = "std")] +impl From for PayloadEnvelopeError { + fn from(_: ssz::DecodeError) -> Self { + Self::BrokenSszEncoding + } +} + +/// Represents the Keccak256 hash of the block +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq)] +#[cfg_attr(feature = "arbitrary", derive(arbitrary::Arbitrary))] +pub struct PayloadHash(pub B256); + +impl From<&[u8]> for PayloadHash { + /// Returns the Keccak256 hash of a sequence of bytes + fn from(value: &[u8]) -> Self { + Self(keccak256(value)) + } +} + +impl PayloadHash { + /// The expected message that should be signed by the unsafe block signer. + pub fn signature_message(&self, chain_id: u64) -> B256 { + let domain = B256::ZERO.as_slice(); + let chain_id = B256::left_padding_from(&chain_id.to_be_bytes()[..]); + let payload_hash = self.0.as_slice(); + keccak256([domain, chain_id.as_slice(), payload_hash].concat()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::b256; + + #[test] + #[cfg(feature = "std")] + fn test_roundtrip_encode_rpc_execution_payload_envelope() { + use alloy_primitives::hex; + use ssz::{Decode, Encode}; + let data = hex!( + "00000000000000000000000000000000000000000000000000000000000001230000000000000000000000000000000000000000000000000000000000000123000000000000000000000000000000000000045600000000000000000000000000000000000000000000000000000000000007890000000000000000000000000000000000000000000000000000000000000abc0d0e0f000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111de000000000000004d01000000000000bc010000000000002b02000000000000300200000903000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000088832020000380200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001236666040000009999" + ); + + let payload = OpExecutionPayloadEnvelope::from_ssz_bytes(&data).unwrap(); + let serialized = payload.as_ssz_bytes(); + assert_eq!(data, &serialized[..]); + } + + #[test] + #[cfg(feature = "serde")] + fn test_serde_roundtrip_op_execution_payload_envelope() { + let envelope_str = r#"{ + "executionPayload": {"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[],"blobGasUsed":"0x0","excessBlobGas":"0x0","withdrawalsRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119"}, + "parentBeaconBlockRoot": "0x9999999999999999999999999999999999999999999999999999999999999999" + }"#; + + let envelope: OpExecutionPayloadEnvelope = serde_json::from_str(envelope_str).unwrap(); + let expected = b256!("9999999999999999999999999999999999999999999999999999999999999999"); + assert_eq!(envelope.parent_beacon_block_root.unwrap(), expected); + let _ = serde_json::to_string(&envelope).unwrap(); + } + + #[test] + fn test_signature_message() { + let inner = b256!("9999999999999999999999999999999999999999999999999999999999999999"); + let hash = PayloadHash::from(inner.as_slice()); + let chain_id = 10; + let expected = b256!("44a0e2b1aba1aae1771eddae1dcd2ad18a8cdac8891517153f03253e49d3f206"); + assert_eq!(hash.signature_message(chain_id), expected); + } + + #[test] + fn test_inner_payload_hash() { + arbtest::arbtest(|u| { + let inner = B256::from(u.arbitrary::<[u8; 32]>()?); + let hash = PayloadHash::from(inner.as_slice()); + assert_eq!(hash.0, keccak256(inner.as_slice())); + Ok(()) + }); + } + + #[test] + #[cfg(feature = "std")] + fn test_roundtrip_encode_envelope_v1() { + use alloy_primitives::hex; + let data = hex::decode("0xbd04f043128457c6ccf35128497167442bcc0f8cce78cda8b366e6a12e526d938d1e4c1046acffffbfc542a7e212bb7d80d3a4b2f84f7b196d935398a24eb84c519789b401000000fe0300fe0300fe0300fe0300fe0300fe0300a203000c4a8fd56621ad04fc0101067601008ce60be0005b220117c32c0f3b394b346c2aa42cfa8157cd41f891aa0bec485a62fc010000").unwrap(); + let payload_envelop = OpNetworkPayloadEnvelope::decode_v1(&data).unwrap(); + assert_eq!(1725271882, payload_envelop.payload.timestamp()); + let encoded = payload_envelop.encode_v1().unwrap(); + assert_eq!(data, encoded); + } + + #[test] + #[cfg(feature = "std")] + fn test_roundtrip_encode_envelope_v2() { + use alloy_primitives::hex; + let data = hex::decode("0xc104f0433805080eb36c0b130a7cc1dc74c3f721af4e249aa6f61bb89d1557143e971bb738a3f3b98df7c457e74048e9d2d7e5cd82bb45e3760467e2270e9db86d1271a700000000fe0300fe0300fe0300fe0300fe0300fe0300a203000c6b89d46525ad000205067201009cda69cb5b9b73fc4eb2458b37d37f04ff507fe6c9cd2ab704a05ea9dae3cd61760002000000020000").unwrap(); + let payload_envelop = OpNetworkPayloadEnvelope::decode_v2(&data).unwrap(); + assert_eq!(1708427627, payload_envelop.payload.timestamp()); + let encoded = payload_envelop.encode_v2().unwrap(); + assert_eq!(data, encoded); + } + + #[test] + #[cfg(feature = "std")] + fn test_roundtrip_encode_envelope_v3() { + use alloy_primitives::hex; + let data = hex::decode("0xf104f0434442b9eb38b259f5b23826e6b623e829d2fb878dac70187a1aecf42a3f9bedfd29793d1fcb5822324be0d3e12340a95855553a65d64b83e5579dffb31470df5d010000006a03000412346a1d00fe0100fe0100fe0100fe0100fe0100fe01004201000cc588d465219504100201067601007cfece77b89685f60e3663b6e0faf2de0734674eb91339700c4858c773a8ff921e014401043e0100").unwrap(); + let payload_envelop = OpNetworkPayloadEnvelope::decode_v3(&data).unwrap(); + assert_eq!(1708427461, payload_envelop.payload.timestamp()); + let encoded = payload_envelop.encode_v3().unwrap(); + assert_eq!(data, encoded); + } + + #[test] + #[cfg(feature = "std")] + fn test_roundtrip_encode_envelope_v4() { + use alloy_primitives::hex; + let data = hex::decode("0x9105f043cee25401b6853202950d1d8a082f31a80c4fef5782c049a731f5d104b1b9b9aa7618605b420438ae98b44c8aaaebd482854473c2ae57c079286bb634bece5210000000006a03000412346a1d00fe0100fe0100fe0100fe0100fe0100fe01004201000c5766d26721950430020106f6010001440104b60100049876").unwrap(); + let payload_envelop = OpNetworkPayloadEnvelope::decode_v4(&data).unwrap(); + assert_eq!(1741842007, payload_envelop.payload.timestamp()); + let encoded = payload_envelop.encode_v4().unwrap(); + assert_eq!(data, encoded); + } + + // Helper function to create a test flashblock + #[cfg(test)] + fn create_test_flashblock(index: u64, with_base: bool) -> OpFlashblockPayload { + use crate::flashblock::{ + OpFlashblockPayloadBase, OpFlashblockPayloadDelta, OpFlashblockPayloadMetadata, + }; + use alloc::collections::BTreeMap; + use alloy_primitives::{Address, Bloom, Bytes, U256}; + use alloy_rpc_types_engine::PayloadId; + + let base = with_base.then(|| OpFlashblockPayloadBase { + parent_beacon_block_root: B256::ZERO, + parent_hash: B256::ZERO, + fee_recipient: Address::ZERO, + prev_randao: B256::ZERO, + block_number: 100, + gas_limit: 30_000_000, + timestamp: 1234567890, + extra_data: Bytes::default(), + base_fee_per_gas: U256::from(1000000000u64), + }); + + let diff = OpFlashblockPayloadDelta { + state_root: B256::ZERO, + receipts_root: B256::ZERO, + logs_bloom: Bloom::ZERO, + gas_used: 21000, + block_hash: B256::ZERO, + transactions: Vec::new(), + withdrawals: Vec::new(), + withdrawals_root: B256::from([1u8; 32]), // Non-zero for Isthmus + blob_gas_used: Some(0), + }; + + let metadata = OpFlashblockPayloadMetadata { + block_number: 100, + new_account_balances: BTreeMap::new(), + receipts: BTreeMap::new(), + }; + + OpFlashblockPayload { payload_id: PayloadId::new([1u8; 8]), index, base, diff, metadata } + } + + #[test] + fn test_from_flashblocks_empty_vec() { + let result = OpExecutionData::from_flashblocks(&[]); + assert!(matches!(result, Err(OpFlashblockError::MissingPayload))); + } + + #[test] + fn test_from_flashblocks_non_sequential_indices() { + let fb1 = create_test_flashblock(0, true); + let fb2 = create_test_flashblock(2, false); // Skip index 1 + + let result = OpExecutionData::from_flashblocks(&[fb1, fb2]); + assert!(matches!(result, Err(OpFlashblockError::InvalidIndex))); + } + + #[test] + fn test_from_flashblocks_missing_base_in_first() { + let fb1 = create_test_flashblock(0, false); // First should have base + + let result = OpExecutionData::from_flashblocks(&[fb1]); + assert!(matches!(result, Err(OpFlashblockError::MissingBasePayload))); + } + + #[test] + fn test_from_flashblocks_unexpected_base_in_second() { + let fb1 = create_test_flashblock(0, true); + let fb2 = create_test_flashblock(1, true); // Should not have base + + let result = OpExecutionData::from_flashblocks(&[fb1, fb2]); + assert!(matches!(result, Err(OpFlashblockError::UnexpectedBasePayload))); + } + + #[test] + fn test_from_flashblocks_single_valid_flashblock() { + let fb1 = create_test_flashblock(0, true); + + let result = OpExecutionData::from_flashblocks(&[fb1]); + assert!(result.is_ok(), "Single valid flashblock should succeed"); + } + + #[test] + fn test_from_flashblocks_multiple_valid_flashblocks() { + let fb1 = create_test_flashblock(0, true); + let fb2 = create_test_flashblock(1, false); + let fb3 = create_test_flashblock(2, false); + + let result = OpExecutionData::from_flashblocks(&[fb1, fb2, fb3]); + assert!(result.is_ok(), "Multiple valid flashblocks should succeed"); + } + + #[test] + fn test_from_flashblocks_wrong_first_index() { + let fb1 = create_test_flashblock(1, true); // Should be index 0 + let result = OpExecutionData::from_flashblocks(&[fb1]); + assert!(matches!(result, Err(OpFlashblockError::InvalidIndex))); + } + + // Real-world test case from Unichain Sepolia + // + #[test] + #[cfg(feature = "serde")] + fn test_from_flashblocks_unichain_sepolia_block() { + use alloy_primitives::{address, b256}; + + let raw_sequence = r#"[{"payload_id":"0x03c446f063e3735a","index":0,"base":{"parent_beacon_block_root":"0xf6d335a6b2b4fd8fb539cd51a49769df4d53c31a90c54dd270e54542638ff101","parent_hash":"0x06ff95a9cd23b0328da74a984aa986b2e01d377dab1825f1029e39ece6c4a3ea","fee_recipient":"0x4200000000000000000000000000000000000011","prev_randao":"0x8beee738d20a9d77c5f27e9cb799ebe5b536f0985efad5f7d77ebff47f092c4a","block_number":"0x21e3b52","gas_limit":"0x3938700","timestamp":"0x690be89e","extra_data":"0x00000000320000000c","base_fee_per_gas":"0x33"},"diff":{"state_root":"0xb29a9bcae8cf3ae6d68985fcd70db80b3818cd629c9d5da0bb116451739b2078","receipts_root":"0x91d8ad10740ccfc1bd848fba0e02668d95769c08eeea30f10698692ba86c6159","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x10994","block_hash":"0xa66f8562a861f906a2438d7d6ba79495640d98d9c6922b9605c54b57f97a345c","transactions":["0x7ef90104a035dd2ec802504a143048c7830f8f570e0d6cf5147217af869939c6b4ba710a3694deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8b0098999be000007d0000dbba0000000000000000800000000690be848000000000092042e000000000000000000000000000000000000000000000000000000000000000900000000000000000000000000000000000000000000000000000000000000010ffd7e2fb2c36e5f27c015872ce733a7b4f3fc0f4ee668d7469c557c48f8250f0000000000000000000000004ab3387810ef500bfe05a49dc53a44c222cbab3e000000000000000000000000","0x02f87e8205158401c8ea9180338255789400000000000000000000000000000000000000008096426c6f636b204e756d6265723a203335353335363938c080a091f83058c881d9ad71c179ce680326501702eb68150d20b2bf7786e388f954a2a0180185d83e503f11bf3c265c1f9296ed8d3d7c04031cd8bb30509ad188ce7bbc"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c446f063e3735a","index":1,"base":null,"diff":{"state_root":"0xfb1794f74d405b345672c57a5053c6105cc55c8e63f96fb0db5b0260df42413a","receipts_root":"0x1eaaaeb9d43bead7d32b90f1b320589174c63d2fa8f5fd366f841a205b1eb2e0","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000004040000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x18f7d","block_hash":"0x67b0521ebfcb03d6ce2b6e1bad9c9c66795365f63ad8dc51e1e8f582a5ab7821","transactions":["0x02f86c8205158401c8ea92803382880994f878f0340bf132c28f3211e8b46c569edf81749580843fd553e8c001a0d73ce313aafea312e0b7244767e45f8b05d50305e0f4e4c3c564ddc751666815a02ee015ce2363311823c0b2e96bfb0e8090fd53c6cdd99be8cf343af123036dfc"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c446f063e3735a","index":2,"base":null,"diff":{"state_root":"0x90dd105c4a2a0dd9ffe994204bfa3e2b4f70f7ea760d5cb9a4263f26a89f91b4","receipts_root":"0x0fff0488aa3732c34018b938839ab2f0caa96018221e4ffaeca011fb06ba288f","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000004040000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x21566","block_hash":"0x720feb7457110a565b479fafbaa89cc984f5d673846a27d44bbb8cf5200b32fe","transactions":["0x02f86c8205158401c8ea93803382880994f878f0340bf132c28f3211e8b46c569edf81749580843fd553e8c001a0f8cd94080642e116bc772f36a02d002505227aa542e1c13e5129ab40b8b037fba00608318d3895388e39b218bcb275380cebc566e68f26d3d434e32b8b58366cdf"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c446f063e3735a","index":3,"base":null,"diff":{"state_root":"0x71f8c60fdfdd84cffda3b0b6af7c8ff92195918f4fc2abae750a7306521ac0dc","receipts_root":"0xa62d1d98f56ffb1464a2beb185484253df68208004306e155c0bd1519137afe6","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000004040000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x29b4f","block_hash":"0x670844e30f7325d4f290ea375e01f7e819afca317fc7db9723e6867a184984fa","transactions":["0x02f86c8205158401c8ea94803382880994f878f0340bf132c28f3211e8b46c569edf81749580843fd553e8c080a04368492ec1d087703aaf6f5fefe4427b3bf382e5cd07133f638bb6701f15fe61a05e28757fbdc7e744118be36d5a1548eb7c009eefcb5dc5c5040e09c2fc6de9d8"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c446f063e3735a","index":4,"base":null,"diff":{"state_root":"0x5615e4342d231c352438f0ba6a8f0f641459f67961961764b781a909969b28ad","receipts_root":"0x588e1d47b0618d7e935b20c3945cba3b7b8c00141904f79ceed20312ea502e63","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000001000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000004040000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0x32138","block_hash":"0xc463a3120c35268f610d969f5608b479332ef10953af77c7a6be806195831196","transactions":["0x02f86c8205158401c8ea95803382880994f878f0340bf132c28f3211e8b46c569edf81749580843fd553e8c080a0802ba6d4f37e3b8de96095bd0b216144f276171d16dc62a004f1a89009af5deea00f0c6250cfd1a062a1bc2bc353a5c227a980cac0f233b7be8932f2192342ec4f"],"withdrawals":[],"withdrawals_root":"0x62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2"},"metadata":{"block_number":35535698,"new_account_balances":{},"receipts":{}}}]"#; + + let flashblocks: Vec = serde_json::from_str(raw_sequence).unwrap(); + let execution_data = OpExecutionData::from_flashblocks(&flashblocks).unwrap(); + + // Validate against expected final block state + assert_eq!( + execution_data.payload.parent_hash(), + b256!("06ff95a9cd23b0328da74a984aa986b2e01d377dab1825f1029e39ece6c4a3ea") + ); + assert_eq!( + execution_data.payload.block_hash(), + b256!("c463a3120c35268f610d969f5608b479332ef10953af77c7a6be806195831196") + ); + assert_eq!(execution_data.payload.block_number(), 0x21E3B52); + assert_eq!(execution_data.payload.timestamp(), 0x690be89e); + assert_eq!( + execution_data.payload.fee_recipient(), + address!("4200000000000000000000000000000000000011") + ); + assert_eq!(execution_data.payload.gas_limit(), 0x3938700); + assert_eq!(execution_data.payload.as_v1().gas_used, 0x32138); + assert_eq!( + execution_data.payload.as_v1().state_root, + b256!("5615e4342d231c352438f0ba6a8f0f641459f67961961764b781a909969b28ad") + ); + assert_eq!( + execution_data.payload.as_v1().receipts_root, + b256!("588e1d47b0618d7e935b20c3945cba3b7b8c00141904f79ceed20312ea502e63") + ); + assert_eq!(execution_data.payload.transactions().len(), 6); + assert_eq!( + execution_data.payload.as_v4().unwrap().withdrawals_root, + b256!("62ed62e0391b081bf172f287fbbe75e87d8a6c22f1d3b1f1aef4788c134633d2") + ); + + // Verify parent beacon block root + assert_eq!( + execution_data.parent_beacon_block_root(), + Some(b256!("f6d335a6b2b4fd8fb539cd51a49769df4d53c31a90c54dd270e54542638ff101")) + ); + } + + // Real-world test case from Base Sepolia + // Block #33439826 with 11 flashblocks (indices 0-10) + #[test] + #[cfg(feature = "serde")] + fn test_from_flashblocks_base_sepolia_block() { + use alloy_primitives::{address, b256}; + + let raw_sequence = r#"[{"payload_id":"0x03c33cc62b81edb6","index":0,"base":{"parent_beacon_block_root":"0xf058b1e43890ed5f838bd07e77db06d075d894343d1b31f6099a345b0d8f7d1b","parent_hash":"0x6ffd2714d5af6c412c57db3f664a5a127516573bbd987fd242d06f71ea662741","fee_recipient":"0x4200000000000000000000000000000000000011","prev_randao":"0x9985c1f8ec25b468cbf2b727a8371b4554b7e7adb059c08abf7a7d51d86ceee5","block_number":"0x1fe4052","gas_limit":"0x3938700","timestamp":"0x690fdf84","extra_data":"0x000000003200000004","base_fee_per_gas":"0x34"},"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x1b2fa5e4cbbc1f8c01a7c7204571ebe339dbdfadc666451d8e70d5c10c99830f","logs_bloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","gas_used":"0xb41c","block_hash":"0x87c6775cc427caf4c0ffe0d4b6d76627536f38d77d23f105f9f104ef3e5541c7","transactions":["0x7ef90104a01c055ffd19ea027da4a8aae0a2734c6bf17c3f487d4cc22931d7dbe261409cda94deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8b0098999be0000044d000a118b000000000000000400000000690fde3c00000000009252e3000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000014f1595c3798e3082aa093e433bd5cbd102a11f9619d20e6e821c1a30fb56b12b000000000000000000000000fc56e7272eebbba5bc6c544e159483c4a38f8ba3000000000000000000000000"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":1,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xe38b2090ddfa6ee25b15a8ebcdd7ecc0f1ee9128ec98cb24f47909e29e11832e","logs_bloom":"0x00000000000000000000000020000000040080000000000000020005000000004000000040040000000080000000000000000000000000000002000000000000008000000000000000000000000000014000000000800000000000000000000000000000000000040100000000000000000000000100000000000380008a02000000100000400200000100800000000000000000000004001000200000000000000000000800020000000000400000000000000000008000400801080000000000005000000400000000000000000000000110000000000000000000000000100200021004400010000000010000000400000008002000004080000000000000","gas_used":"0x9d2f2","block_hash":"0x4548d5014de4883cec380838f1b225996fa3c08c176f2f63d98d8c23169fab44","transactions":["0x02f89283014a348202ea830f4275830f427583045dd594a449bc031fa0b815ca14fafd0c5edb75ccd9c80f80a4de0e9a3e000000000000000000000000000000000000000000000001236efcbcbb340000c001a0742ff606597cda39751dd369e66e9978946ce8f4eb578a8d73314535a2df4388a06a6f83c3606c32e1677f62408b8ec69b09a82f499395b26eaefea567deb83843","0x02f9101583014a34830597bd830f4240830f42aa8306aecc9442826e92e6418877459f0920cb058e462ac6a0a480b90fa4dbaa1e6400000000000000000000000000a739e4479c97289801654ec1a52a67077613c000000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000691d0e7f4f6ae70adc2708ec4857d3d5ca54a11710c9ac11989b1cb3d3d8d3298a78f6a50000000000000000000000000000000000000000000000000000000000000f200000000000000000000000000000000000000000000000000000000000000e44b653f0c300000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000033bea00000000000000000000000000000000000000000000000000000000000000380000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000014000000000000000000000000000000000000000000000000000000000000002200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000004747970650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000026f6b00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000086f6b2e746f6b656e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003657468000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000086f6b2e74785f69640000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000046626173653a3078343865643835396232636630633962366261633864373134653162363436313264313232346436643a38343533323a33333433393832323a3333393131333600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000a20000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000090000000000000000000000000000000000000000000000000000000000000120000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000002e000000000000000000000000000000000000000000000000000000000000003e000000000000000000000000000000000000000000000000000000000000004c000000000000000000000000000000000000000000000000000000000000005a000000000000000000000000000000000000000000000000000000000000006a000000000000000000000000000000000000000000000000000000000000007c000000000000000000000000000000000000000000000000000000000000008c00000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000004747970650000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000087769746864726177000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000001977697468647261772e73656e6465722e636861696e5f7569640000000000000000000000000000000000000000000000000000000000000000000000000000046261736500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000001777697468647261772e73656e6465722e61646472657373000000000000000000000000000000000000000000000000000000000000000000000000000000002a30783438656438353962326366306339623662616338643731346531623634363132643132323464366400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000e77697468647261772e746f6b656e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000036574680000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000f77697468647261772e616d6f756e740000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001431303030303030303030303030303030303030300000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000002f77697468647261772e63726f73735f636861696e5f6164647265737365732e302e757365722e636861696e5f756964000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000077365706f6c6961000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000002d77697468647261772e63726f73735f636861696e5f6164647265737365732e302e757365722e6164647265737300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002a307834386564383539623263663063396236626163386437313465316236343631326431323234643664000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000a0000000000000000000000000000000000000000000000000000000000000003977697468647261772e63726f73735f636861696e5f6164647265737365732e302e6c696d69742e6c6573735f7468616e5f6f725f657175616c0000000000000000000000000000000000000000000000000000000000000000000000000000143130303030303030303030303030303030303030000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000000000000000e77697468647261772e74785f69640000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000046626173653a3078343865643835396232636630633962366261633864373134653162363436313264313232346436643a38343533323a33333433393832323a333339313133360000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000041ffb578b6e9ab1699e4d9cd0078d9f28e7f0ef2136a11596aa7b6d7fe7f896dd353b7b786bf155c924f35d5099f0df90650e74a5858b75673835d24ac6dc8f1e41b00000000000000000000000000000000000000000000000000000000000000c080a09c4f42d262ed1f1bee31461fd10d8d8fbac6e340d9bc2b8035df5faa30f88d4da06d832693c1e28d4f647a6ff08f5d037d08ad2599964a9f3600396efdaec07e4a"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":2,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xda7caba0b5682eda3aed5f47132da84aa2c2757499c23d609aa73dd3a449be1d","logs_bloom":"0x00000000000000000000000020000000040080000000000000020005000000004000000040040000000080000000000000000000000000000002000000000040008000000000000000000000000000014000000800800000000004000000000000000000000400040100000000000000000002800100000000000b80008a02000000100000400200000100800000000000000000000004001000200000000000000000000800020000000000400000000000000000008000440801080200000000005000000400000000000000000000000110000000000008000000000000100200021004400010000000110000000400000008002000004080010000000000","gas_used":"0xd6a91","block_hash":"0x17e106bfeebb2ff0123cf2e1f555e0441ed308773224513dc4ac6257d943e52c","transactions":["0x02f89283014a3482015f830f4275830f427583045dc694a449bc031fa0b815ca14fafd0c5edb75ccd9c80f80a4de0e9a3e000000000000000000000000000000000000000000000000c249fdd327780000c001a098b7dd6d4454a8d31170b5b2d1461bc8a74eed745eddc982232b2c1483cba322a07d3acfe989366b2729aa728ebca7009c15dc908954a9fb5459b75cff1bfd103f"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":3,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xda7caba0b5682eda3aed5f47132da84aa2c2757499c23d609aa73dd3a449be1d","logs_bloom":"0x00000000000000000000000020000000040080000000000000020005000000004000000040040000000080000000000000000000000000000002000000000040008000000000000000000000000000014000000800800000000004000000000000000000000400040100000000000000000002800100000000000b80008a02000000100000400200000100800000000000000000000004001000200000000000000000000800020000000000400000000000000000008000440801080200000000005000000400000000000000000000000110000000000008000000000000100200021004400010000000110000000400000008002000004080010000000000","gas_used":"0xd6a91","block_hash":"0x17e106bfeebb2ff0123cf2e1f555e0441ed308773224513dc4ac6257d943e52c","transactions":[],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":4,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xaff50907a173fc423a499319437afffb8abc2071ce36b6f040dc487579a5d4c3","logs_bloom":"0x0002800000000000002000012000040004008000000010000012000500000000480000004004000000918000000000000000000000000000000200821000806000800010000000000000000800000001c000000800800000000004202000000800000000000400040100020100000000000002800100000000000b90008a02000000100000480200000100800010080400000000000004001000224080000000000000008c0002040080000840000000000000000100c000c4080108020000000001500a000400000000000000000000100110000020000008000000000000100a00221004400010000000110100000400100008002100004280010000000000","gas_used":"0x1498a3","block_hash":"0x4764a20ee262986e45d29251db593320bd4bf6de1133de553b6363a5691e7644","transactions":["0x02f89283014a348203af830f4275830f427583045dd594a449bc031fa0b815ca14fafd0c5edb75ccd9c80f80a4de0e9a3e000000000000000000000000000000000000000000000002017a67f731740000c001a04ce59ff67dc25a76f3027441513f916b809f55b29d5de4fecd4aa0136a3a1a4fa02c1b32b3a1600f6bb2365130797238162cbc797843169a4cfb1ebb41465877c7","0x02f8d483014a348309087a830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000022e40d0a0c0bb77b570445fb59d39bcf14790b660000000000000000000000000000000000000000000000004a61b425a5ee98000000000000000000000000000000000000000000000000000006431e74449860c001a002c2402941acdc25bcaae67c62d58f1a942b32723827f77972c74b159b2c174ea04772118ec71bc7fbe0c9f1c9ef90f58927126480ca769d73704365bfbac65db3","0x02f8d483014a348308c06b830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b78177700000000000000000000000005643a7772017c8544d3841894c1f7c264cd05ffe0000000000000000000000000000000000000000000000000b035a61b2e8be000000000000000000000000000000000000000000000000000006431e7446c578c001a0ac31a5ad06a3897a0c1a909770badf8cec728abd2daf4d125a551778fa597124a013b1de6f741139d957f299bf22de0a91c1d8a4f2ade6743ddcec89bcc9e8b07d","0x02f8d483014a34830922b1830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000576831e77af4b5425b39efb23528441b79ee71e20000000000000000000000000000000000000000000000002bed26c4505ca4000000000000000000000000000000000000000000000000000006431e7446f712c080a0c105ef2c930e95694d112028a642399e5a56ce6416f9b8df9ad27baa26244483a064f6e5881fa728b7afaa2e2ddd62c3182789cb247f90b6276c14f8bfc1b4f2cf"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":5,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x6d12b13dcae85ef97ec3756b317ac9d33752bcd231a9323046ecd5a65e8ca8a2","logs_bloom":"0x0002800000000000002000012000040004008000000010000012000500000000480000004004000000918000000000000000000000000100000200821000806000800010000000000000000800000001c000000800800000000004202000000800000000000400040100020100000000000002800100000000004b90008a02000000100000480200000100800010080400000000000004001000224080000000000000008c0002040080000840000000000000000100c000c4080108020000000001500a000400000000000000000000100110004020000008000000000000100a00221004400010000000110100000400100008002100004280010000000000","gas_used":"0x153998","block_hash":"0x810679ccd05f90093eb0e88549d52ad196214f3a4a555cf0b06201f30aa61a2d","transactions":["0x02f8d483014a34830966ae830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000046195a8573f2610bba630bb0bd5c21c064594f3a0000000000000000000000000000000000000000000000002c94bc176f7cb4000000000000000000000000000000000000000000000000000006431e743d37eac080a053f1881c67ad8fa9838d83943afe83b6498dae96a13a019704f25e0df515dbdba05eef8e08269eaafd63ba7e14e13d73e03ec5e7fad5bcdbaaabc124da41e8e32c"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":6,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x1b76e086c31a8a08d1c4a93b868b00238faabd4d52d9e75e55a4abf3a75e65d8","logs_bloom":"0x0002800000000000002000012000048004008000000010000012000500000000480000004004000000918000000000000000000000000100000200821000806000800010000000000000000800000001c008000800800000000004202000000800000000000400040100020100000000000002800100000000004b90008a02000000100000480200000100800010080400000000000004001000224080000000000000008c0002040080000840000000000000000100c000c4080108020000010001500a000400000000000001000000100110004021000008000000000000100a00221004400010000000110100000400100008002100004280010000000001","gas_used":"0x189dc4","block_hash":"0xfdf2cbb452a36c9c4033d1c0bc2b3dd9cee7ba91d0ca5488aa3d9a23b127b79f","transactions":["0x02f89383014a348304e447830f4240830f42a8830226b494cd997aef0b9a1d8c02a16204ccce354844edeeff80a4f7a308060000000000000000000000000000000000000000000000000000000000016636c001a07dc2c0285cd2c53657c87826a698de9ae5bb38e2580657fe1772fc08ab53a9f2a05a183dac1ed51f6aac2eff4add4510fd76d71f9dce59a3536fc00bfbb2ac750c","0x02f8d483014a3483096a27830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000fde9b0be445930f929705125fe24049093e628e4000000000000000000000000000000000000000000000001517fd24c7f6670000000000000000000000000000000000000000000000000000006431e74408803c080a036f0e0df96ee863041cc41fad376f2f88364225ff6c10c2e492da014d71ab530a03cca82dd065d09a150f75103ea2e1f2867210c604fd82592ec49fae02cadc20a","0x02f8d483014a348309a03d830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000088c7e4701045571734e2147bad80e3d8c56500d300000000000000000000000000000000000000000000000023e284d65ede20000000000000000000000000000000000000000000000000000006431e7441d02ac080a03ee196fff4a614411f9d41431f0b174141ae6f62246df4e54117205bb19c4f64a022f123e006139ae334de3bf7b62c06b72045ba7dc0a508d137bcd056d950da33"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":7,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x065878c1c4d88295544c04fec2e74c9dd8b5d656e196a1b7b09ce8cadbb8f979","logs_bloom":"0x0002800000000010002000052000048004008000000010000012000500000000490000004004000000918000010000000000008000000100040200821000806800800010000000000000000800000001c008000800800000000004202000000804000020000400040100020100000000020002800100000000004b90008a02000000180000480200000100800010080400000000000004011000224080000000000000008c0002040080000840000200000000000100c000c4080108020000012001500a000400000000000001000000100110004021000008000000000000100a00221104400010000000110100000400100008002100004280010000000001","gas_used":"0x1bc281","block_hash":"0xcc9c18ed55c91e97f32353e253c69766cd0d2e0acb0e7f92098d01e1d7761ce3","transactions":["0x02f8d483014a3483091e1f830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000b501c0a0f800e68d980f5253650d0cf3a69d16c00000000000000000000000000000000000000000000000000b87d57d89ffe7800000000000000000000000000000000000000000000000000006431e7442365fc001a0b276c68f59bcfb78fe7905a720e9418130d5c87d60da4b6d55faf07e1b1724aba03425daae2e51a061a26bedcd89cf6ead44146ac97f831371ec36a0192728d204","0x02f8d483014a3483094dde830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b78177700000000000000000000000000097cc7164250c464fea5f9f91d1abec7718814a0000000000000000000000000000000000000000000000004c40d37c20f440000000000000000000000000000000000000000000000000000006431e744372abc001a01f3e58f3baa5e472c08097dafe1e756163c61e7200dc90751f167e796d542f20a02c10596de8b29462c0953a023a8b6c06f74fe77ea66a24598df920d542edab3b","0x02f8d483014a3483094ece830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000083fe74125ec8ffaeee4b2371d7ea17f6ad6f9ba2000000000000000000000000000000000000000000000000f9e4840a6e4938000000000000000000000000000000000000000000000000000006431e744362dec001a066724129c4de96e835cd1377b55541b4582bf4ebcd7c2a3faa4231ade86b14d8a03736bce9203cc0c92878fcc28ee8710961eaddde92bf6a2158c602b4d1bbdbd7","0x02f8d483014a348303750a830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000414d9179c5d2207a6e0efeb0319b6c556265974600000000000000000000000000000000000000000000000033979a45ffefac000000000000000000000000000000000000000000000000000006431e74442677c001a0682d2489ba1d9666324060a006f0abe06830cecdeed4398169dc9fbf7199eb59a02e971034255d087d02b25f45a7962b31360bbed70e3aa30e69ee8f64dd6afdb4","0x02f8d483014a348308acf2830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000097c152d0fa30c49603e0e3e013e36c4e29bf7fea0000000000000000000000000000000000000000000000001d58bdca2addf5000000000000000000000000000000000000000000000000000006431e744447a9c001a030e423ab3697fe4ccc5ce92232d7a642a8295f489f2e52b3c3ba2f110c828e7ca057fd4d3d0e700734568b0be067deda7927188f6a67f9600bae3d6c75d201fe57"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":8,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0x7bf525f832aecc6bf7f7b7e329779640bb4477cb47bf1bde512934c5ed45519b","logs_bloom":"0x0003800000000210002000052000048004008000000010000012000500000000490000004004000000918000010000000000008000000100040200821000806800800010000000000000000800000001c00c000800802000000004202000000804000020000400040100020108000000020002800100000000044b98008a02200000180000480200000100800010080400000000002004011000224080000000000000108c0002040080000844000200000040000100c000c4080108020000012001500a000400000000000001000000104110004021000108000000000000100a00221104400010010000110100000400100008002140004280010000000001","gas_used":"0x213d0b","block_hash":"0x5f9c957cde671b50c5661b328b7f3f8a0e56e194a954d8d7cc4274eb1e014a1e","transactions":["0x02f89283014a34820392830f4275830f427583045dd594a449bc031fa0b815ca14fafd0c5edb75ccd9c80f80a4de0e9a3e000000000000000000000000000000000000000000000002017a67f731740000c001a05c4f86d9218cfab447e6ead7abb27444f7e8d3a185a1fbfb6860a36513c89d93a01d4b9b74f049bfc10feeabcb101a18a14e774e89de35ac246e6452c05e94bc98","0x02f8d483014a348309087a830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b781777000000000000000000000000022e40d0a0c0bb77b570445fb59d39bcf14790b660000000000000000000000000000000000000000000000004a61b425a5ee98000000000000000000000000000000000000000000000000000006431e74449860c001a002c2402941acdc25bcaae67c62d58f1a942b32723827f77972c74b159b2c174ea04772118ec71bc7fbe0c9f1c9ef90f58927126480ca769d73704365bfbac65db3","0x02f8d483014a348308c06b830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b78177700000000000000000000000005643a7772017c8544d3841894c1f7c264cd05ffe0000000000000000000000000000000000000000000000000b035a61b2e8be000000000000000000000000000000000000000000000000000006431e7446c578c001a0ac31a5ad06a3897a0c1a909770badf8cec728abd2daf4d125a551778fa597124a013b1de6f741139d957f299bf22de0a91c1d8a4f2ade6743ddcec89bcc9e8b07d","0x02f8d483014a34830922b1830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000576831e77af4b5425b39efb23528441b79ee71e20000000000000000000000000000000000000000000000002bed26c4505ca4000000000000000000000000000000000000000000000000000006431e7446f712c080a0c105ef2c930e95694d112028a642399e5a56ce6416f9b8df9ad27baa26244483a064f6e5881fa728b7afaa2e2ddd62c3182789cb247f90b6276c14f8bfc1b4f2cf"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":9,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xeb419bf069b8bf9738adcb7fad118724a1d4d6a83821bc532983a2949aa0910d","logs_bloom":"0x000380000000021000200005200004800400800000001000001a000500001000490000004004000000918000010000000000008000000100040200821000806800800010000000000000000800000001c00c000800802000000004202000000804000020000400040100020108000000020002800100000000044b98008a02200000180000480200000100800010080400000000002004011000224080000000000000108c0002040080000844000200000040000100c000c4080108020000012001500a000400000000000001000000104110004021000108000000000000100a00221104400010010004110100000400100008002140004280010000000001","gas_used":"0x21de0c","block_hash":"0xb802c08c65bdefdd507fe07634ea29eeaad1859b33ffac2c426dc7b620d22b19","transactions":["0x02f8d483014a3483095beb830f4240830f42a883030d4094d89f830d7795c10613e4d4769c24c05bf60932c680b864b7817770000000000000000000000000f73c129529caa024337c39e467c720cfc45874220000000000000000000000000000000000000000000000000de4f04092790e800000000000000000000000000000000000000000000000000006431e74489081c080a0a100818c4c3ec3b0bced80f81f09fc878b23274266b45e2043956562b6714dcfa023dbcbc4df92ed5817fcc9bcd238a038aad806c69585dc8cf582e6012d012d28"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}},{"payload_id":"0x03c33cc62b81edb6","index":10,"diff":{"state_root":"0x0000000000000000000000000000000000000000000000000000000000000000","receipts_root":"0xaa280e93aa4a7d3f616ad391404411abbeebe8bc8fb1ed9b3ef4d0a42bf64ccd","logs_bloom":"0x000380000000021000200005200004800400800000001000001a000500001000490000204004000000918000010000000000008000000100040200821020886800800010000000000000000800000001c00c000800802000000004202000000804000020000400040100020108000000020002800100000000044b98008a02200000180000480200000100800010080400000000002004011000224080000000020000108c0002040080000844000200000040000100c000c4080108020000012001500a000400000000000001000000104110004021000108000000000200100a10221104400010010004110100000400100008002140004280010000000001","gas_used":"0x49f43c","block_hash":"0x2b440a266840a96993d85d45d1de1e81f7a859aaac4654dcd5a990ffa2ef947b","transactions":["0x02f90fb583014a34831d4797830f4240830f42a88327fdba94ebaff6d578733e4603b99cbdbb221482f29a78e180b90f4484779f44000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000140000000000000000000000000000000000000000000000000000000000000280000000000000000000000000000000000000000000000000000000000000032000000000000000000000000000000000000000000000000000000000000003c00000000000000000000000000000000000000000000000000000000000000460000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000005a0000000000000000000000000000000000000000000000000000000000000064000000000000000000000000000000000000000000000000000000000000006e00000000000000000000000000000000000000000000000000000000000000780000000000000000000000000000000000000000000000000000000000000082000000000000000000000000000000000000000000000000000000000000008c000000000000000000000000000000000000000000000000000000000000009600000000000000000000000000000000000000000000000000000000000000a000000000000000000000000000000000000000000000000000000000000000aa00000000000000000000000000000000000000000000000000000000000000b400000000000000000000000000000000000000000000000000000000000000be00000000000000000000000000000000000000000000000000000000000000c800000000000000000000000000000000000000000000000000000000000000d200000000000000000000000000000000000000000000000000000000000000dc00000000000000000000000000000000000000000000000000000000000000e600000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce75532be4cf5bacb01e018950b5be900eafa59f2431fed6b869799529ab39fe0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce76343a51197104ee22e37cf9c48a9eb5c99031a25196c2f1264deb5d4d3ff80000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce770a821c08f4e200bf42a148754153d78e977260a213094b521b5625618ec70000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce78bb3bcd3592df48dcd3a6383c8f61d8434b6058f61a587dfb0c37134294420000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce79b35d157e36939c03df12e39599530f615a90e624610d8d023eaf2f8329030000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7a6370bb580180c882bf7214d1f701529ea455f8567b2be79496c9437a2ce30000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7bf53208371925c87cacbb0bbfbf330fc8a02818e1d73c56760a9fded7f8c80000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7cac670fbf544ec6d7360aacecd6e3fb35ea8a6ebef6161c9563a6d16a4a200000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7d91406552fdfe569345c8561328604a63912a36d21cafa1efed0275ce6b190000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7e6e0b5ccd73c9cea553a19e7ab6e533bc253f552e6b9145dd5470d2612f8d0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce7fc72e52aaff88c842a2092b7ce047cf47a8f56da1035142a41b6a59b856420000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce80fcaa166cc2fd1353b40f3071a491cd7ca2746c8943caaa6c024c8df0131f0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce812aabb780f12ed0c0c5dc6932220d8c5f730c54ee63384fbfe1e7fa90a5090000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce82c1dea3b99a38cf0743f31402eba0d22c4da43e715d37533da9bc5f8ca4ae0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce835d696b1a6f5089cf9bc4c2c529e181678fa2f2feb745223e7520d885a2260000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce8400f527b7b931ddfe77007be944f58173dfc1c5928eb433ae71e96f61a8420000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce85b6dcd2b462f2d1c72e4b46ea316f9183fb9ea40866724b7eef10211a83390000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce860986c742f73c595e7cf75d5014bdccde828c0fa3891f8a7e77cbaf974e7d0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce879d5711ffb11c2d9fe9737837f55726ba0609c21d62e2783cc38db59edafa0000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000000000000000000000000000000000000000000000000000000000053ce882976c03e7cf30e96a5a578eff196e4062258f3d859abdf161bcb5fd18356000000000000000000000000000000000000000000000000000000000000000000800000000000000000000000000000000000000000000000000000000000000000c080a0c7ccb6ec845a35639b2905d243be7a6cf2ee1412331d348a4bf65f53ae89cde8a06ecc40e8297c75e86332c2924b96c6bf2334a6d1b1ef803e27c9de692906b138","0x02f8b183014a3481ad830ecd10830ecdaf82b6a994af33add7918f685b2a82c1077bd8c07d220ffa0480b844095ea7b3000000000000000000000000a449bc031fa0b815ca14fafd0c5edb75ccd9c80f00000000000000000000000000000000000000000000000c6a036eb4bc740000c001a0d1877e98821074c02cf20dc84d31d70fbc00027d404fe99f3e887a33082bb6cda016f8a55aea1573b3834180e43d90eb6c4b1ffb321d2a0be8b3aa71eeaed5104a"],"withdrawals":[],"withdrawals_root":"0x77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44"},"metadata":{"block_number":33439826,"new_account_balances":{},"receipts":{}}}]"#; + + let flashblocks: Vec = serde_json::from_str(raw_sequence).unwrap(); + let execution_data = OpExecutionData::from_flashblocks(&flashblocks).unwrap(); + + // Validate against expected final block state from base payload (index 0) + assert_eq!( + execution_data.payload.parent_hash(), + b256!("6ffd2714d5af6c412c57db3f664a5a127516573bbd987fd242d06f71ea662741") + ); + assert_eq!(execution_data.payload.block_number(), 0x1fe4052); + assert_eq!(execution_data.payload.timestamp(), 0x690fdf84); + assert_eq!( + execution_data.payload.fee_recipient(), + address!("4200000000000000000000000000000000000011") + ); + assert_eq!(execution_data.payload.gas_limit(), 0x3938700); + assert_eq!(execution_data.payload.as_v1().gas_used, 0x49f43c); + + // Base skipped state root calculation thus state root is expected to be zeros. + // And subsequently the last flashblocks' block hash is not the final block's block hash. + // Real block hash: 0x0c3c3ff081d8a5ea1239bfb8a0593f641154a06b783fa142809880e011cd6a3f + assert_eq!( + execution_data.payload.as_v1().state_root, + b256!("0000000000000000000000000000000000000000000000000000000000000000") + ); + assert_eq!( + execution_data.payload.block_hash(), + // last flashblock block hash + b256!("2b440a266840a96993d85d45d1de1e81f7a859aaac4654dcd5a990ffa2ef947b") + ); + + // Verify receipts root from last flashblock (index 10) + assert_eq!( + execution_data.payload.as_v1().receipts_root, + b256!("aa280e93aa4a7d3f616ad391404411abbeebe8bc8fb1ed9b3ef4d0a42bf64ccd") + ); + + // Verify total transaction count across all 11 flashblocks + // Index 0: 1, Index 1: 2, Index 2: 1, Index 3: 0, Index 4: 4, Index 5: 1 + // Index 6: 3, Index 7: 5, Index 8: 4, Index 9: 1, Index 10: 2 + // Total: 24 transactions + assert_eq!(execution_data.payload.transactions().len(), 24); + + // Verify withdrawals root from last flashblock + assert_eq!( + execution_data.payload.as_v4().unwrap().withdrawals_root, + b256!("77b0fb1616a212bd7cf33d7c28651f19bf6093b2c5f1967e674ec861aeaf9d44") + ); + + // Verify parent beacon block root from base payload + assert_eq!( + execution_data.parent_beacon_block_root(), + Some(b256!("f058b1e43890ed5f838bd07e77db06d075d894343d1b31f6099a345b0d8f7d1b")) + ); + } +} diff --git a/op-alloy/crates/rpc-types-engine/src/flashblock/base.rs b/rust/op-alloy/crates/rpc-types-engine/src/flashblock/base.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/flashblock/base.rs rename to rust/op-alloy/crates/rpc-types-engine/src/flashblock/base.rs diff --git a/op-alloy/crates/rpc-types-engine/src/flashblock/delta.rs b/rust/op-alloy/crates/rpc-types-engine/src/flashblock/delta.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/flashblock/delta.rs rename to rust/op-alloy/crates/rpc-types-engine/src/flashblock/delta.rs diff --git a/op-alloy/crates/rpc-types-engine/src/flashblock/error.rs b/rust/op-alloy/crates/rpc-types-engine/src/flashblock/error.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/flashblock/error.rs rename to rust/op-alloy/crates/rpc-types-engine/src/flashblock/error.rs diff --git a/op-alloy/crates/rpc-types-engine/src/flashblock/metadata.rs b/rust/op-alloy/crates/rpc-types-engine/src/flashblock/metadata.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/flashblock/metadata.rs rename to rust/op-alloy/crates/rpc-types-engine/src/flashblock/metadata.rs diff --git a/op-alloy/crates/rpc-types-engine/src/flashblock/mod.rs b/rust/op-alloy/crates/rpc-types-engine/src/flashblock/mod.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/flashblock/mod.rs rename to rust/op-alloy/crates/rpc-types-engine/src/flashblock/mod.rs diff --git a/op-alloy/crates/rpc-types-engine/src/flashblock/payload.rs b/rust/op-alloy/crates/rpc-types-engine/src/flashblock/payload.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/flashblock/payload.rs rename to rust/op-alloy/crates/rpc-types-engine/src/flashblock/payload.rs diff --git a/op-alloy/crates/rpc-types-engine/src/lib.rs b/rust/op-alloy/crates/rpc-types-engine/src/lib.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/lib.rs rename to rust/op-alloy/crates/rpc-types-engine/src/lib.rs diff --git a/op-alloy/crates/rpc-types-engine/src/payload/error.rs b/rust/op-alloy/crates/rpc-types-engine/src/payload/error.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/payload/error.rs rename to rust/op-alloy/crates/rpc-types-engine/src/payload/error.rs diff --git a/rust/op-alloy/crates/rpc-types-engine/src/payload/mod.rs b/rust/op-alloy/crates/rpc-types-engine/src/payload/mod.rs new file mode 100644 index 00000000000..613251ef6b4 --- /dev/null +++ b/rust/op-alloy/crates/rpc-types-engine/src/payload/mod.rs @@ -0,0 +1,818 @@ +//! Versioned Optimism execution payloads + +pub mod error; +pub mod v3; +pub mod v4; + +use crate::{OpExecutionPayloadSidecar, OpExecutionPayloadV4}; +use alloc::vec::Vec; +use alloy_consensus::{Block, BlockHeader, HeaderInfo, Transaction}; +use alloy_eips::{Decodable2718, Encodable2718, Typed2718, eip7685::EMPTY_REQUESTS_HASH}; +use alloy_primitives::{Address, B256, Bytes, Sealable, U256}; +use alloy_rpc_types_engine::{ + ExecutionPayload, ExecutionPayloadInputV2, ExecutionPayloadV1, ExecutionPayloadV2, + ExecutionPayloadV3, PayloadError, +}; +use error::OpPayloadError; + +/// An execution payload, which can be either [`ExecutionPayloadV2`], [`ExecutionPayloadV3`], or +/// [`OpExecutionPayloadV4`]. +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize))] +#[cfg_attr(feature = "std", derive(ssz_derive::Encode, ssz_derive::Decode))] +#[cfg_attr(feature = "std", ssz(enum_behaviour = "transparent"))] +#[cfg_attr(feature = "serde", serde(untagged))] +pub enum OpExecutionPayload { + /// V1 payload + V1(ExecutionPayloadV1), + /// V2 payload + V2(ExecutionPayloadV2), + /// V3 payload + V3(ExecutionPayloadV3), + /// V4 payload + V4(OpExecutionPayloadV4), +} + +#[cfg(feature = "serde")] +impl<'de> serde::Deserialize<'de> for OpExecutionPayload { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct ExecutionPayloadVisitor; + + impl<'de> serde::de::Visitor<'de> for ExecutionPayloadVisitor { + type Value = OpExecutionPayload; + + fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + formatter.write_str("a valid OpExecutionPayload object") + } + + fn visit_map(self, mut map: A) -> Result + where + A: serde::de::MapAccess<'de>, + { + use alloc::string::String; + use alloy_primitives::{U64, map::HashMap}; + use alloy_rpc_types_engine::ExecutionPayloadV1; + + enum Fields { + ParentHash, + FeeRecipient, + StateRoot, + ReceiptsRoot, + LogsBloom, + PrevRandao, + BlockNumber, + GasLimit, + GasUsed, + Timestamp, + ExtraData, + BaseFeePerGas, + BlockHash, + Transactions, + Withdrawals, + BlobGasUsed, + ExcessBlobGas, + WithdrawalsRoot, + Unknown(alloc::string::String), + } + + impl<'de> serde::Deserialize<'de> for Fields { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct FieldVisitor; + + impl serde::de::Visitor<'_> for FieldVisitor { + type Value = Fields; + + fn expecting( + &self, + formatter: &mut core::fmt::Formatter<'_>, + ) -> core::fmt::Result { + formatter.write_str("a known field") + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + Ok(match value { + "parentHash" => Fields::ParentHash, + "feeRecipient" => Fields::FeeRecipient, + "stateRoot" => Fields::StateRoot, + "receiptsRoot" => Fields::ReceiptsRoot, + "logsBloom" => Fields::LogsBloom, + "prevRandao" => Fields::PrevRandao, + "blockNumber" => Fields::BlockNumber, + "gasLimit" => Fields::GasLimit, + "gasUsed" => Fields::GasUsed, + "timestamp" => Fields::Timestamp, + "extraData" => Fields::ExtraData, + "baseFeePerGas" => Fields::BaseFeePerGas, + "blockHash" => Fields::BlockHash, + "transactions" => Fields::Transactions, + "withdrawals" => Fields::Withdrawals, + "blobGasUsed" => Fields::BlobGasUsed, + "excessBlobGas" => Fields::ExcessBlobGas, + "withdrawalsRoot" => Fields::WithdrawalsRoot, + _ => Fields::Unknown(value.into()), + }) + } + } + + deserializer.deserialize_str(FieldVisitor) + } + } + + let mut parent_hash = None; + let mut fee_recipient = None; + let mut state_root = None; + let mut receipts_root = None; + let mut logs_bloom = None; + let mut prev_randao = None; + let mut block_number = None; + let mut gas_limit = None; + let mut gas_used = None; + let mut timestamp = None; + let mut extra_data = None; + let mut base_fee_per_gas = None; + let mut block_hash = None; + let mut transactions = None; + let mut withdrawals = None; + let mut blob_gas_used = None; + let mut excess_blob_gas = None; + let mut withdrawals_root = None; + + #[allow(clippy::collection_is_never_read)] + let mut extra_fields = HashMap::new(); + + while let Some(key) = map.next_key()? { + match key { + Fields::ParentHash => parent_hash = Some(map.next_value()?), + Fields::FeeRecipient => fee_recipient = Some(map.next_value()?), + Fields::StateRoot => state_root = Some(map.next_value()?), + Fields::ReceiptsRoot => receipts_root = Some(map.next_value()?), + Fields::LogsBloom => logs_bloom = Some(map.next_value()?), + Fields::PrevRandao => prev_randao = Some(map.next_value()?), + Fields::BlockNumber => { + let raw = map.next_value::()?; + block_number = Some(raw.to()); + } + Fields::GasLimit => { + let raw = map.next_value::()?; + gas_limit = Some(raw.to()); + } + Fields::GasUsed => { + let raw = map.next_value::()?; + gas_used = Some(raw.to()); + } + Fields::Timestamp => { + let raw = map.next_value::()?; + timestamp = Some(raw.to()); + } + Fields::ExtraData => extra_data = Some(map.next_value()?), + Fields::BaseFeePerGas => base_fee_per_gas = Some(map.next_value()?), + Fields::BlockHash => block_hash = Some(map.next_value()?), + Fields::Transactions => transactions = Some(map.next_value()?), + Fields::Withdrawals => withdrawals = Some(map.next_value()?), + Fields::BlobGasUsed => { + let raw = map.next_value::()?; + blob_gas_used = Some(raw.to()); + } + Fields::ExcessBlobGas => { + let raw = map.next_value::()?; + excess_blob_gas = Some(raw.to()); + } + Fields::WithdrawalsRoot => withdrawals_root = Some(map.next_value()?), + Fields::Unknown(field) => { + let raw = map.next_value::()?; + extra_fields.insert(field, raw); + } + } + } + + let v1 = ExecutionPayloadV1 { + parent_hash: parent_hash + .ok_or_else(|| serde::de::Error::missing_field("parentHash"))?, + fee_recipient: fee_recipient + .ok_or_else(|| serde::de::Error::missing_field("feeRecipient"))?, + state_root: state_root + .ok_or_else(|| serde::de::Error::missing_field("stateRoot"))?, + receipts_root: receipts_root + .ok_or_else(|| serde::de::Error::missing_field("receiptsRoot"))?, + logs_bloom: logs_bloom + .ok_or_else(|| serde::de::Error::missing_field("logsBloom"))?, + prev_randao: prev_randao + .ok_or_else(|| serde::de::Error::missing_field("prevRandao"))?, + block_number: block_number + .ok_or_else(|| serde::de::Error::missing_field("blockNumber"))?, + gas_limit: gas_limit + .ok_or_else(|| serde::de::Error::missing_field("gasLimit"))?, + gas_used: gas_used.ok_or_else(|| serde::de::Error::missing_field("gasUsed"))?, + timestamp: timestamp + .ok_or_else(|| serde::de::Error::missing_field("timestamp"))?, + extra_data: extra_data + .ok_or_else(|| serde::de::Error::missing_field("extraData"))?, + base_fee_per_gas: base_fee_per_gas + .ok_or_else(|| serde::de::Error::missing_field("baseFeePerGas"))?, + block_hash: block_hash + .ok_or_else(|| serde::de::Error::missing_field("blockHash"))?, + transactions: transactions + .ok_or_else(|| serde::de::Error::missing_field("transactions"))?, + }; + + // Ensure `withdrawals` is present before proceeding + let withdrawals = + withdrawals.ok_or_else(|| serde::de::Error::missing_field("withdrawals"))?; + + // Construct base V2 payload + let payload_v2 = ExecutionPayloadV2 { payload_inner: v1, withdrawals }; + + // Ensure `blob_gas_used` and `excess_blob_gas` are either both present or both + // absent + match (blob_gas_used, excess_blob_gas) { + // If both are present, create V3 + (Some(blob_gas_used), Some(excess_blob_gas)) => { + let payload_v3 = ExecutionPayloadV3 { + payload_inner: payload_v2, + blob_gas_used, + excess_blob_gas, + }; + + // If `withdrawals_root` is present, wrap into V4; otherwise, return V3 + if let Some(withdrawals_root) = withdrawals_root { + Ok(OpExecutionPayload::V4(OpExecutionPayloadV4 { + payload_inner: payload_v3, + withdrawals_root, + })) + } else { + Ok(OpExecutionPayload::V3(payload_v3)) + } + } + // If one is missing, reject as invalid + (Some(_), None) | (None, Some(_)) => { + Err(serde::de::Error::custom("invalid enum variant")) + } + // If neither are present, return V2 + (None, None) => Ok(OpExecutionPayload::V2(payload_v2)), + } + } + } + + const FIELDS: &[&str] = &[ + "parentHash", + "feeRecipient", + "stateRoot", + "receiptsRoot", + "logsBloom", + "prevRandao", + "blockNumber", + "gasLimit", + "gasUsed", + "timestamp", + "extraData", + "baseFeePerGas", + "blockHash", + "transactions", + "withdrawals", + "blobGasUsed", + "excessBlobGas", + "withdrawalsRoot", + ]; + + deserializer.deserialize_struct("OpExecutionPayload", FIELDS, ExecutionPayloadVisitor) + } +} + +impl OpExecutionPayload { + /// Conversion from [`alloy_consensus::Block`]. Also returns the + /// [`OpExecutionPayloadSidecar`] extracted from the block. + /// + /// See also [`from_block_unchecked`](OpExecutionPayload::from_block_unchecked). + /// + /// Note: This re-calculates the block hash. + pub fn from_block_slow(block: &Block) -> (Self, OpExecutionPayloadSidecar) + where + T: Encodable2718 + Transaction, + H: BlockHeader + Sealable, + { + Self::from_block_unchecked(block.hash_slow(), block) + } + + /// Conversion from [`alloy_consensus::Block`]. Also returns the + /// [`OpExecutionPayloadSidecar`] extracted from the block. + /// + /// See also [`ExecutionPayload::from_block_unchecked`]. + /// See also [`OpExecutionPayloadSidecar::from_block`]. + pub fn from_block_unchecked( + block_hash: B256, + block: &Block, + ) -> (Self, OpExecutionPayloadSidecar) + where + T: Encodable2718 + Transaction, + H: BlockHeader, + { + let sidecar = OpExecutionPayloadSidecar::from_block(block); + + let execution_payload = match block.withdrawals_root() { + Some(withdrawals_root) if sidecar.isthmus().is_some() => { + // block with (empty) request hashes: V4 + Self::V4(OpExecutionPayloadV4::from_v3_with_withdrawals_root( + ExecutionPayloadV3::from_block_unchecked(block_hash, block), + withdrawals_root, + )) + } + Some(_) if block.header.parent_beacon_block_root().is_some() => { + // block with parent beacon block root: at least V3 + Self::V3(ExecutionPayloadV3::from_block_unchecked(block_hash, block)) + } + Some(_) => { + // block with withdrawals root: at least V2 + Self::V2(ExecutionPayloadV2::from_block_unchecked(block_hash, block)) + } + None => { + // otherwise V1 + Self::V1(ExecutionPayloadV1::from_block_unchecked(block_hash, block)) + } + }; + + (execution_payload, sidecar) + } + + /// Creates a new instance from `newPayloadV2` payload, i.e. [`V1`](Self::V1) or + /// [`V2`](Self::V2) variant. + /// + /// Spec: + pub fn v2(payload: ExecutionPayloadInputV2) -> Self { + match payload.into_payload() { + ExecutionPayload::V1(payload) => Self::V1(payload), + ExecutionPayload::V2(payload) => Self::V2(payload), + _ => unreachable!(), + } + } + + /// Creates a new instance from `newPayloadV3` payload, i.e. [`V3`](Self::V3) variant. + /// + /// Spec: + pub const fn v3(payload: ExecutionPayloadV3) -> Self { + Self::V3(payload) + } + + /// Creates a new instance from `newPayloadV4` payload, i.e. [`V4`](Self::V4) variant. + /// + /// Spec: + pub const fn v4(payload: OpExecutionPayloadV4) -> Self { + Self::V4(payload) + } + + /// Returns a reference to the V1 payload. + pub const fn as_v1(&self) -> &ExecutionPayloadV1 { + match self { + Self::V1(payload) => payload, + Self::V2(payload) => &payload.payload_inner, + Self::V3(payload) => &payload.payload_inner.payload_inner, + Self::V4(payload) => &payload.payload_inner.payload_inner.payload_inner, + } + } + + /// Returns a mutable reference to the V1 payload. + pub const fn as_v1_mut(&mut self) -> &mut ExecutionPayloadV1 { + match self { + Self::V1(payload) => payload, + Self::V2(payload) => &mut payload.payload_inner, + Self::V3(payload) => &mut payload.payload_inner.payload_inner, + Self::V4(payload) => &mut payload.payload_inner.payload_inner.payload_inner, + } + } + + /// Returns a reference to the V2 payload, if any. + pub const fn as_v2(&self) -> Option<&ExecutionPayloadV2> { + match self { + Self::V1(_) => None, + Self::V2(payload) => Some(payload), + Self::V3(payload) => Some(&payload.payload_inner), + Self::V4(payload) => Some(&payload.payload_inner.payload_inner), + } + } + + /// Returns a mutable reference to the V2 payload, if any. + pub const fn as_v2_mut(&mut self) -> Option<&mut ExecutionPayloadV2> { + match self { + Self::V1(_) => None, + Self::V2(payload) => Some(payload), + Self::V3(payload) => Some(&mut payload.payload_inner), + Self::V4(payload) => Some(&mut payload.payload_inner.payload_inner), + } + } + + /// Returns a reference to the V3 payload, if any. + pub const fn as_v3(&self) -> Option<&ExecutionPayloadV3> { + match self { + Self::V1(_) | Self::V2(_) => None, + Self::V3(payload) => Some(payload), + Self::V4(payload) => Some(&payload.payload_inner), + } + } + + /// Returns a mutable reference to the V3 payload, if any. + pub const fn as_v3_mut(&mut self) -> Option<&mut ExecutionPayloadV3> { + match self { + Self::V1(_) | Self::V2(_) => None, + Self::V3(payload) => Some(payload), + Self::V4(payload) => Some(&mut payload.payload_inner), + } + } + + /// Returns a reference to the V4 payload, if any. + pub const fn as_v4(&self) -> Option<&OpExecutionPayloadV4> { + match self { + Self::V1(_) | Self::V2(_) | Self::V3(_) => None, + Self::V4(payload) => Some(payload), + } + } + + /// Returns a mutable reference to the V4 payload, if any. + pub const fn as_v4_mut(&mut self) -> Option<&mut OpExecutionPayloadV4> { + match self { + Self::V1(_) | Self::V2(_) | Self::V3(_) => None, + Self::V4(payload) => Some(payload), + } + } + + /// Returns the transactions for the payload. + pub const fn transactions(&self) -> &Vec { + &self.as_v1().transactions + } + + /// Returns a mutable reference to the transactions for the payload. + pub const fn transactions_mut(&mut self) -> &mut Vec { + &mut self.as_v1_mut().transactions + } + + /// Returns the parent hash for the payload. + pub const fn parent_hash(&self) -> B256 { + self.as_v1().parent_hash + } + + /// Returns the block hash for the payload. + pub const fn block_hash(&self) -> B256 { + self.as_v1().block_hash + } + + /// Returns the block number for this payload. + pub const fn block_number(&self) -> u64 { + self.as_v1().block_number + } + + /// Returns the timestamp for this payload. + pub const fn timestamp(&self) -> u64 { + self.as_v1().timestamp + } + + /// Returns the fee recipient for this payload. + pub const fn fee_recipient(&self) -> Address { + self.as_v1().fee_recipient + } + + /// Returns the gas limit for this payload. + pub const fn gas_limit(&self) -> u64 { + self.as_v1().gas_limit + } + + /// Returns the saturated base fee per gas for this payload. + pub fn saturated_base_fee_per_gas(&self) -> u64 { + self.as_v1().base_fee_per_gas.saturating_to() + } + + /// Returns the excess blob gas for this payload. + pub fn excess_blob_gas(&self) -> Option { + self.as_v3().map(|payload| payload.excess_blob_gas) + } + + /// Returns the blob gas used for this payload. + pub fn blob_gas_used(&self) -> Option { + self.as_v3().map(|payload| payload.blob_gas_used) + } + + /// Returns the prev randao for this payload. + pub const fn prev_randao(&self) -> B256 { + self.as_v1().prev_randao + } + + /// Extracts essential information into one container type. + pub fn header_info(&self) -> HeaderInfo { + HeaderInfo { + number: self.block_number(), + beneficiary: self.fee_recipient(), + timestamp: self.timestamp(), + gas_limit: self.gas_limit(), + base_fee_per_gas: Some(self.saturated_base_fee_per_gas()), + excess_blob_gas: self.excess_blob_gas(), + blob_gas_used: self.blob_gas_used(), + difficulty: U256::ZERO, + mix_hash: Some(self.prev_randao()), + } + } + + /// Converts [`OpExecutionPayload`] to [`Block`] with raw transactions. + /// + /// Caution: This does not set fields that are not part of the payload and only part of the + /// [`OpExecutionPayloadSidecar`]: + /// - `parent_beacon_block_root` + /// + /// See also: [`OpExecutionPayload::into_block_with_sidecar_raw`] + pub fn into_block_raw(self) -> Result, PayloadError> { + match self { + Self::V1(payload) => payload.into_block_raw(), + Self::V2(payload) => payload.into_block_raw(), + Self::V3(payload) => payload.into_block_raw(), + Self::V4(payload) => payload.into_block_raw(), + } + } + + /// Creates a new unsealed block from the given payload and payload sidecar with raw + /// transactions. + /// + /// This sets the `parent_beacon_block_root` and `requests_hash` if present in the sidecar. + /// Also validates that L1 withdrawals are empty. + /// + /// See also: [`OpExecutionPayload::try_into_block_with_sidecar`] + pub fn into_block_with_sidecar_raw( + self, + sidecar: &OpExecutionPayloadSidecar, + ) -> Result, OpPayloadError> { + if let Some(payload) = self.as_v2() && + !payload.withdrawals.is_empty() + { + return Err(OpPayloadError::NonEmptyL1Withdrawals); + } + + let mut block = self.into_block_raw()?; + + if let Some(blobs_hashes) = sidecar.versioned_hashes() && + !blobs_hashes.is_empty() + { + return Err(OpPayloadError::NonEmptyBlobVersionedHashes); + } + if let Some(reqs_hash) = sidecar.requests_hash() { + if reqs_hash != EMPTY_REQUESTS_HASH { + return Err(OpPayloadError::NonEmptyELRequests); + } + block.header.requests_hash = Some(EMPTY_REQUESTS_HASH) + } + block.header.parent_beacon_block_root = sidecar.parent_beacon_block_root(); + + Ok(block) + } + + #[allow(rustdoc::broken_intra_doc_links)] + /// Converts [`OpExecutionPayload`] to [`Block`]. + /// + /// Checks that payload doesn't contain: + /// - blob transactions + /// - L1 withdrawals + /// + /// Caution: This does not set fields that are not part of the payload and only part of the + /// [`OpExecutionPayloadSidecar`]: + /// - `parent_beacon_block_root` + /// + /// See also: [`OpExecutionPayload::try_into_block_with_sidecar`] + pub fn try_into_block(self) -> Result, OpPayloadError> { + self.try_into_block_with(|tx| { + T::decode_2718_exact(tx.as_ref()) + .map_err(alloy_rlp::Error::from) + .map_err(PayloadError::from) + }) + } + + #[allow(rustdoc::broken_intra_doc_links)] + /// Converts [`OpExecutionPayload`] to [`Block`] with a custom transaction mapper. + /// + /// Checks that payload doesn't contain: + /// - blob transactions + /// - L1 withdrawals + /// + /// Caution: This does not set fields that are not part of the payload and only part of the + /// [`OpExecutionPayloadSidecar`]: + /// - `parent_beacon_block_root` + /// + /// See also: [`OpExecutionPayload::try_into_block_with_sidecar_with`] + pub fn try_into_block_with(self, f: F) -> Result, OpPayloadError> + where + T: Typed2718, + F: FnMut(alloy_primitives::Bytes) -> Result, + E: Into, + { + if let Some(payload) = self.as_v2() && + !payload.withdrawals.is_empty() + { + return Err(OpPayloadError::NonEmptyL1Withdrawals); + } + let block = match self { + Self::V1(payload) => return Ok(payload.try_into_block_with(f)?), + Self::V2(payload) => return Ok(payload.try_into_block_with(f)?), + Self::V3(payload) => payload.try_into_block_with(f)?, + Self::V4(payload) => payload.try_into_block_with(f)?, + }; + if block.body.has_eip4844_transactions() { + return Err(OpPayloadError::BlobTransaction); + } + + Ok(block) + } + + /// Tries to create a new unsealed block from the given payload and payload sidecar. + /// + /// Additional to checks performed in [`OpExecutionPayload::try_into_block`], which is called + /// under the hood, also checks that sidecar doesn't contain: + /// - blob versioned hashes + /// - execution layer requests + /// + /// See also docs for + /// [`ExecutionPayload::try_into_block_with_sidecar`](alloy_rpc_types_engine::ExecutionPayload::try_into_block_with_sidecar). + pub fn try_into_block_with_sidecar( + self, + sidecar: &OpExecutionPayloadSidecar, + ) -> Result, OpPayloadError> { + self.try_into_block_with_sidecar_with(sidecar, |tx| { + T::decode_2718_exact(tx.as_ref()) + .map_err(alloy_rlp::Error::from) + .map_err(PayloadError::from) + }) + } + + /// Tries to create a new unsealed block from the given payload and payload sidecar with a + /// custom transaction mapper. + /// + /// Additional to checks performed in [`OpExecutionPayload::try_into_block_with`], which is + /// called under the hood, also checks that sidecar doesn't contain: + /// - blob versioned hashes + /// - execution layer requests + /// + /// See also docs for + /// [`ExecutionPayload::try_into_block_with_sidecar_with`](alloy_rpc_types_engine::ExecutionPayload::try_into_block_with_sidecar_with). + pub fn try_into_block_with_sidecar_with( + self, + sidecar: &OpExecutionPayloadSidecar, + f: F, + ) -> Result, OpPayloadError> + where + T: Typed2718, + F: FnMut(alloy_primitives::Bytes) -> Result, + E: Into, + { + let mut base_payload = self.try_into_block_with(f)?; + if let Some(blobs_hashes) = sidecar.versioned_hashes() && + !blobs_hashes.is_empty() + { + return Err(OpPayloadError::NonEmptyBlobVersionedHashes); + } + if let Some(reqs_hash) = sidecar.requests_hash() { + if reqs_hash != EMPTY_REQUESTS_HASH { + return Err(OpPayloadError::NonEmptyELRequests); + } + base_payload.header.requests_hash = Some(EMPTY_REQUESTS_HASH) + } + base_payload.header.parent_beacon_block_root = sidecar.parent_beacon_block_root(); + + Ok(base_payload) + } + + /// Returns an iterator over the decoded transactions in this payload. + /// + /// This iterator will decode transactions on the fly. + pub fn decoded_transactions( + &self, + ) -> impl Iterator> + '_ { + self.transactions().iter().map(|tx_bytes| T::decode_2718_exact(tx_bytes.as_ref())) + } + + /// Returns iterator over decoded transactions with their original encoded bytes. + /// + /// This iterator will decode transactions on the fly and return them with their bytes. + pub fn decoded_transactions_with_encoded( + &self, + ) -> impl Iterator>> + '_ + { + self.transactions().iter().map(|tx_bytes| { + T::decode_2718_exact(tx_bytes.as_ref()) + .map(|tx| alloy_eips::eip2718::WithEncoded::new(tx_bytes.clone(), tx)) + }) + } + + /// Returns an iterator over the recovered transactions in this payload. + /// + /// This iterator will decode and recover signer addresses for transactions on the fly. + pub fn recovered_transactions( + &self, + ) -> impl Iterator< + Item = Result< + alloy_consensus::transaction::Recovered, + alloy_consensus::crypto::RecoveryError, + >, + > + '_ + where + T: Decodable2718 + alloy_consensus::transaction::SignerRecoverable, + { + self.decoded_transactions::().map(|res| { + res.map_err(alloy_consensus::crypto::RecoveryError::from_source) + .and_then(|tx| tx.try_into_recovered()) + }) + } + + /// Returns an iterator over the recovered transactions in this payload with their + /// original encoded bytes. + /// + /// This iterator will decode and recover signer addresses for transactions on the fly + /// and return them with their bytes. + pub fn recovered_transactions_with_encoded( + &self, + ) -> impl Iterator< + Item = Result< + alloy_eips::eip2718::WithEncoded>, + alloy_consensus::crypto::RecoveryError, + >, + > + '_ + where + T: Decodable2718 + alloy_consensus::transaction::SignerRecoverable, + { + self.transactions().iter().map(|tx_bytes| { + T::decode_2718_exact(tx_bytes.as_ref()) + .map_err(alloy_consensus::crypto::RecoveryError::from_source) + .and_then(|tx| { + tx.try_into_recovered().map(|recovered| { + alloy_eips::eip2718::WithEncoded::new(tx_bytes.clone(), recovered) + }) + }) + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + #[cfg(feature = "serde")] + fn serde_payload_input_enum_v4() { + let response_v4 = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[],"blobGasUsed":"0x0","excessBlobGas":"0x0","withdrawalsRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119"}"#; + + let payload: OpExecutionPayload = serde_json::from_str(response_v4).unwrap(); + assert!(payload.as_v4().is_some()); + assert_eq!(serde_json::to_string(&payload).unwrap(), response_v4); + + let payload_v4: OpExecutionPayloadV4 = serde_json::from_str(response_v4).unwrap(); + assert_eq!(payload.as_v4().unwrap(), &payload_v4); + } + + #[test] + #[cfg(feature = "serde")] + fn serde_payload_input_enum_v3() { + let response_v3 = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[],"blobGasUsed":"0x0","excessBlobGas":"0x0"}"#; + + let payload: OpExecutionPayload = serde_json::from_str(response_v3).unwrap(); + assert!(payload.as_v3().is_some()); + assert_eq!(serde_json::to_string(&payload).unwrap(), response_v3); + + let payload_v3: ExecutionPayloadV3 = serde_json::from_str(response_v3).unwrap(); + assert_eq!(payload.as_v3().unwrap(), &payload_v3); + } + + #[test] + #[cfg(feature = "serde")] + fn serde_payload_input_enum_v2() { + let response_v2 = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[]}"#; + + let payload: OpExecutionPayload = serde_json::from_str(response_v2).unwrap(); + assert!(payload.as_v3().is_none()); + assert_eq!(serde_json::to_string(&payload).unwrap(), response_v2); + + let payload_v2: ExecutionPayloadV2 = serde_json::from_str(response_v2).unwrap(); + assert_eq!(payload.as_v2(), Some(&payload_v2)); + } + + #[test] + #[cfg(feature = "serde")] + fn serde_payload_input_enum_faulty_v2() { + // incomplete V3 payload should be rejected even if it has all V2 fields + let response_faulty = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"withdrawals":[], "blobGasUsed": "0x0"}"#; + + let payload: Result = + serde_json::from_str(response_faulty); + assert!(payload.is_err()); + } + + #[test] + #[cfg(feature = "serde")] + fn serde_payload_input_enum_faulty_v1() { + // incomplete V3 payload should be rejected even if it has all V1 fields + let response_faulty = r#"{"parentHash":"0xe927a1448525fb5d32cb50ee1408461a945ba6c39bd5cf5621407d500ecc8de9","feeRecipient":"0x0000000000000000000000000000000000000000","stateRoot":"0x10f8a0830000e8edef6d00cc727ff833f064b1950afd591ae41357f97e543119","receiptsRoot":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","prevRandao":"0xe0d8b4521a7da1582a713244ffb6a86aa1726932087386e2dc7973f43fc6cb24","blockNumber":"0x1","gasLimit":"0x2ffbd2","gasUsed":"0x0","timestamp":"0x1235","extraData":"0xd883010d00846765746888676f312e32312e30856c696e7578","baseFeePerGas":"0x342770c0","blockHash":"0x44d0fa5f2f73a938ebb96a2a21679eb8dea3e7b7dd8fd9f35aa756dda8bf0a8a","transactions":[],"blobGasUsed": "0x0"}"#; + + let payload: Result = + serde_json::from_str(response_faulty); + assert!(payload.is_err()); + } +} diff --git a/op-alloy/crates/rpc-types-engine/src/payload/v3.rs b/rust/op-alloy/crates/rpc-types-engine/src/payload/v3.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/payload/v3.rs rename to rust/op-alloy/crates/rpc-types-engine/src/payload/v3.rs diff --git a/op-alloy/crates/rpc-types-engine/src/payload/v4.rs b/rust/op-alloy/crates/rpc-types-engine/src/payload/v4.rs similarity index 96% rename from op-alloy/crates/rpc-types-engine/src/payload/v4.rs rename to rust/op-alloy/crates/rpc-types-engine/src/payload/v4.rs index b0eb947612c..24e4059f9b8 100644 --- a/op-alloy/crates/rpc-types-engine/src/payload/v4.rs +++ b/rust/op-alloy/crates/rpc-types-engine/src/payload/v4.rs @@ -143,12 +143,12 @@ impl ssz::Encode for OpExecutionPayloadV4 { } fn ssz_append(&self, buf: &mut Vec) { - let offset = ::ssz_fixed_len() * 6 - + ::ssz_fixed_len() - + ::ssz_fixed_len() - + ::ssz_fixed_len() * 6 - + ::ssz_fixed_len() - + ssz::BYTES_PER_LENGTH_OFFSET * 3; + let offset = ::ssz_fixed_len() * 6 + + ::ssz_fixed_len() + + ::ssz_fixed_len() + + ::ssz_fixed_len() * 6 + + ::ssz_fixed_len() + + ssz::BYTES_PER_LENGTH_OFFSET * 3; let mut encoder = ssz::SszEncoder::container(buf, offset); @@ -175,8 +175,8 @@ impl ssz::Encode for OpExecutionPayloadV4 { } fn ssz_bytes_len(&self) -> usize { - ::ssz_bytes_len(&self.payload_inner) - + ::ssz_fixed_len() + ::ssz_bytes_len(&self.payload_inner) + + ::ssz_fixed_len() } } diff --git a/op-alloy/crates/rpc-types-engine/src/sidecar.rs b/rust/op-alloy/crates/rpc-types-engine/src/sidecar.rs similarity index 100% rename from op-alloy/crates/rpc-types-engine/src/sidecar.rs rename to rust/op-alloy/crates/rpc-types-engine/src/sidecar.rs diff --git a/op-alloy/crates/rpc-types-engine/src/superchain.rs b/rust/op-alloy/crates/rpc-types-engine/src/superchain.rs similarity index 98% rename from op-alloy/crates/rpc-types-engine/src/superchain.rs rename to rust/op-alloy/crates/rpc-types-engine/src/superchain.rs index 04635a3a90f..39b35054af2 100644 --- a/op-alloy/crates/rpc-types-engine/src/superchain.rs +++ b/rust/op-alloy/crates/rpc-types-engine/src/superchain.rs @@ -57,7 +57,7 @@ impl core::fmt::Display for ProtocolVersion { } } -/// An error that can occur when encoding or decoding a ProtocolVersion. +/// An error that can occur when encoding or decoding a `ProtocolVersion`. #[derive(Copy, Clone, thiserror::Error, Debug, Display, From)] pub enum ProtocolVersionError { /// An unsupported version was encountered. @@ -114,14 +114,14 @@ impl ProtocolVersion { } } - /// Returns the inner value of the ProtocolVersion enum + /// Returns the inner value of the `ProtocolVersion` enum pub const fn inner(&self) -> ProtocolVersionFormatV0 { match self { Self::V0(value) => *value, } } - /// Returns the inner value of the ProtocolVersion enum if it is V0, otherwise None + /// Returns the inner value of the `ProtocolVersion` enum if it is V0, otherwise None pub const fn as_v0(&self) -> Option { match self { Self::V0(value) => Some(*value), @@ -163,7 +163,7 @@ impl ProtocolVersion { } } - /// Returns a human-readable string representation of the ProtocolVersion + /// Returns a human-readable string representation of the `ProtocolVersion` pub fn display(&self) -> String { match self { Self::V0(value) => format!("{value}"), diff --git a/rust/op-alloy/crates/rpc-types/Cargo.toml b/rust/op-alloy/crates/rpc-types/Cargo.toml new file mode 100644 index 00000000000..58537af2a04 --- /dev/null +++ b/rust/op-alloy/crates/rpc-types/Cargo.toml @@ -0,0 +1,85 @@ +[package] +name = "op-alloy-rpc-types" +description = "Optimism RPC types" + +version = "0.23.1" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://github.com/ethereum-optimism/optimism" +authors = ["Alloy Contributors"] +repository = "https://github.com/ethereum-optimism/optimism" +exclude = ["benches/", "tests/"] + +[lints] +workspace = true + +[dependencies] +# Workspace +op-alloy-consensus = { workspace = true, features = ["serde"] } + +# Alloy +alloy-serde.workspace = true +alloy-consensus.workspace = true +alloy-network-primitives.workspace = true +alloy-eips = { workspace = true, features = ["serde"] } +alloy-rpc-types-eth = { workspace = true, features = ["serde"] } +alloy-primitives = { workspace = true, features = ["map", "rlp", "serde"] } + +# Serde +serde_json.workspace = true +serde = { workspace = true, features = ["derive"] } + +# RPC +jsonrpsee = { workspace = true, optional = true } + +# arbitrary +arbitrary = { workspace = true, features = ["derive"], optional = true } + +# misc +derive_more = { workspace = true, features = ["as_ref", "deref_mut", "try_from"] } +thiserror.workspace = true + +[dev-dependencies] +rand.workspace = true +arbitrary = { workspace = true, features = ["derive"] } +alloy-consensus = { workspace = true, features = ["arbitrary"] } +alloy-primitives = { workspace = true, features = ["arbitrary"] } +alloy-rpc-types-eth = { workspace = true, features = ["arbitrary"] } +similar-asserts.workspace = true + +[features] +default = ["std"] +std = [ + "alloy-network-primitives/std", + "alloy-eips/std", + "alloy-primitives/std", + "alloy-rpc-types-eth/std", + "op-alloy-consensus/std", + "alloy-consensus/std", + "alloy-serde/std", + "derive_more/std", + "serde/std", + "serde_json/std", + "thiserror/std" +] +arbitrary = [ + "std", + "dep:arbitrary", + "alloy-primitives/arbitrary", + "alloy-rpc-types-eth/arbitrary", + "op-alloy-consensus/arbitrary", + "alloy-consensus/arbitrary", + "alloy-eips/arbitrary", + "alloy-serde/arbitrary" +] +k256 = ["alloy-rpc-types-eth/k256", "op-alloy-consensus/k256"] +serde = [ + "op-alloy-consensus/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-network-primitives/serde", + "alloy-primitives/serde", + "alloy-rpc-types-eth/serde" +] +jsonrpsee = ["dep:jsonrpsee"] diff --git a/rust/op-alloy/crates/rpc-types/README.md b/rust/op-alloy/crates/rpc-types/README.md new file mode 100644 index 00000000000..4bee757ed1d --- /dev/null +++ b/rust/op-alloy/crates/rpc-types/README.md @@ -0,0 +1,3 @@ +## `op-alloy-rpc-types` + +Optimism RPC-related types. diff --git a/op-alloy/crates/rpc-types/src/error.rs b/rust/op-alloy/crates/rpc-types/src/error.rs similarity index 100% rename from op-alloy/crates/rpc-types/src/error.rs rename to rust/op-alloy/crates/rpc-types/src/error.rs diff --git a/op-alloy/crates/rpc-types/src/genesis.rs b/rust/op-alloy/crates/rpc-types/src/genesis.rs similarity index 100% rename from op-alloy/crates/rpc-types/src/genesis.rs rename to rust/op-alloy/crates/rpc-types/src/genesis.rs diff --git a/op-alloy/crates/rpc-types/src/lib.rs b/rust/op-alloy/crates/rpc-types/src/lib.rs similarity index 100% rename from op-alloy/crates/rpc-types/src/lib.rs rename to rust/op-alloy/crates/rpc-types/src/lib.rs diff --git a/rust/op-alloy/crates/rpc-types/src/receipt.rs b/rust/op-alloy/crates/rpc-types/src/receipt.rs new file mode 100644 index 00000000000..5d7be127446 --- /dev/null +++ b/rust/op-alloy/crates/rpc-types/src/receipt.rs @@ -0,0 +1,335 @@ +//! Receipt types for RPC + +use alloy_consensus::{Receipt, ReceiptWithBloom, TxReceipt}; +use alloy_rpc_types_eth::Log; +use alloy_serde::OtherFields; +use op_alloy_consensus::{ + OpDepositReceipt, OpDepositReceiptWithBloom, OpReceipt, OpReceiptEnvelope, +}; +use serde::{Deserialize, Serialize}; + +/// OP Transaction Receipt type +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +#[doc(alias = "OpTxReceipt")] +pub struct OpTransactionReceipt { + /// Regular eth transaction receipt including deposit receipts + #[serde(flatten)] + pub inner: alloy_rpc_types_eth::TransactionReceipt>>, + /// L1 block info of the transaction. + #[serde(flatten)] + pub l1_block_info: L1BlockInfo, +} + +impl alloy_network_primitives::ReceiptResponse for OpTransactionReceipt { + fn contract_address(&self) -> Option { + self.inner.contract_address + } + + fn status(&self) -> bool { + self.inner.inner.status() + } + + fn block_hash(&self) -> Option { + self.inner.block_hash + } + + fn block_number(&self) -> Option { + self.inner.block_number + } + + fn transaction_hash(&self) -> alloy_primitives::TxHash { + self.inner.transaction_hash + } + + fn transaction_index(&self) -> Option { + self.inner.transaction_index() + } + + fn gas_used(&self) -> u64 { + self.inner.gas_used() + } + + fn effective_gas_price(&self) -> u128 { + self.inner.effective_gas_price() + } + + fn blob_gas_used(&self) -> Option { + self.inner.blob_gas_used() + } + + fn blob_gas_price(&self) -> Option { + self.inner.blob_gas_price() + } + + fn from(&self) -> alloy_primitives::Address { + self.inner.from() + } + + fn to(&self) -> Option { + self.inner.to() + } + + fn cumulative_gas_used(&self) -> u64 { + self.inner.cumulative_gas_used() + } + + fn state_root(&self) -> Option { + self.inner.state_root() + } +} + +/// Additional fields for Optimism transaction receipts: +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +#[doc(alias = "OptimismTxReceiptFields")] +pub struct OpTransactionReceiptFields { + /// L1 block info. + #[serde(flatten)] + pub l1_block_info: L1BlockInfo, + /* --------------------------------------- Regolith --------------------------------------- */ + /// Deposit nonce for deposit transactions. + /// + /// Always null prior to the Regolith hardfork. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub deposit_nonce: Option, + /* ---------------------------------------- Canyon ---------------------------------------- */ + /// Deposit receipt version for deposit transactions. + /// + /// Always null prior to the Canyon hardfork. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub deposit_receipt_version: Option, +} + +/// Serialize/Deserialize l1FeeScalar to/from string +mod l1_fee_scalar_serde { + use serde::{Deserialize, de}; + + pub(super) fn serialize(value: &Option, s: S) -> Result + where + S: serde::Serializer, + { + use alloc::string::ToString; + if let Some(v) = value { + return s.serialize_str(&v.to_string()); + } + s.serialize_none() + } + + pub(super) fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: serde::Deserializer<'de>, + { + use alloc::string::String; + let s: Option = Option::deserialize(deserializer)?; + if let Some(s) = s { + return Ok(Some(s.parse::().map_err(de::Error::custom)?)); + } + + Ok(None) + } +} + +impl From for OtherFields { + fn from(value: OpTransactionReceiptFields) -> Self { + serde_json::to_value(value).unwrap().try_into().unwrap() + } +} + +/// L1 block info extracted from input of first transaction in every block. +/// +/// The subset of [`OpTransactionReceiptFields`], that encompasses L1 block +/// info: +/// +#[derive(Clone, Copy, Debug, Default, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct L1BlockInfo { + /// L1 base fee is the minimum price per unit of gas. + /// + /// Present from pre-bedrock as de facto L1 price per unit of gas. L1 base fee after Bedrock. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub l1_gas_price: Option, + /// L1 gas used. + /// + /// Present from pre-bedrock, deprecated as of Fjord. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub l1_gas_used: Option, + /// L1 fee for the transaction. + /// + /// Present from pre-bedrock. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub l1_fee: Option, + /// L1 fee scalar for the transaction + /// + /// Present from pre-bedrock to Ecotone. Null after Ecotone. + #[serde(default, skip_serializing_if = "Option::is_none", with = "l1_fee_scalar_serde")] + pub l1_fee_scalar: Option, + /* ---------------------------------------- Ecotone ---------------------------------------- */ + /// L1 base fee scalar. Applied to base fee to compute weighted gas price multiplier. + /// + /// Always null prior to the Ecotone hardfork. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub l1_base_fee_scalar: Option, + /// L1 blob base fee. + /// + /// Always null prior to the Ecotone hardfork. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub l1_blob_base_fee: Option, + /// L1 blob base fee scalar. Applied to blob base fee to compute weighted gas price multiplier. + /// + /// Always null prior to the Ecotone hardfork. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub l1_blob_base_fee_scalar: Option, + /* ---------------------------------------- Isthmus ---------------------------------------- */ + /// Operator fee scalar. + /// + /// Always null prior to the Isthmus hardfork. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub operator_fee_scalar: Option, + /// Operator fee constant. + /// + /// Always null prior to the Isthmus hardfork. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub operator_fee_constant: Option, + /* ---------------------------------------- Jovian ---------------------------------------- */ + /// DA footprint gas scalar. Used to set the DA footprint block limit on the L2. + /// + /// Always null prior to the Jovian hardfork. + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub da_footprint_gas_scalar: Option, +} + +impl Eq for L1BlockInfo {} + +impl From for OpReceiptEnvelope { + fn from(value: OpTransactionReceipt) -> Self { + let inner_envelope = value.inner.inner.into(); + + /// Helper function to convert the inner logs within a [`ReceiptWithBloom`] from RPC to + /// consensus types. + #[inline(always)] + fn convert_standard_receipt( + receipt: ReceiptWithBloom>, + ) -> ReceiptWithBloom> { + let ReceiptWithBloom { logs_bloom, receipt } = receipt; + + let consensus_logs = receipt.logs.into_iter().map(|log| log.inner).collect(); + ReceiptWithBloom { + receipt: Receipt { + status: receipt.status, + cumulative_gas_used: receipt.cumulative_gas_used, + logs: consensus_logs, + }, + logs_bloom, + } + } + + match inner_envelope { + OpReceiptEnvelope::Legacy(receipt) => Self::Legacy(convert_standard_receipt(receipt)), + OpReceiptEnvelope::Eip2930(receipt) => Self::Eip2930(convert_standard_receipt(receipt)), + OpReceiptEnvelope::Eip1559(receipt) => Self::Eip1559(convert_standard_receipt(receipt)), + OpReceiptEnvelope::Eip7702(receipt) => Self::Eip7702(convert_standard_receipt(receipt)), + OpReceiptEnvelope::Deposit(OpDepositReceiptWithBloom { logs_bloom, receipt }) => { + let consensus_logs = receipt.inner.logs.into_iter().map(|log| log.inner).collect(); + let consensus_receipt = OpDepositReceiptWithBloom { + receipt: OpDepositReceipt { + inner: Receipt { + status: receipt.inner.status, + cumulative_gas_used: receipt.inner.cumulative_gas_used, + logs: consensus_logs, + }, + deposit_nonce: receipt.deposit_nonce, + deposit_receipt_version: receipt.deposit_receipt_version, + }, + logs_bloom, + }; + Self::Deposit(consensus_receipt) + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::string::ToString; + use serde_json::{Value, json}; + + // + #[test] + fn parse_rpc_receipt() { + let s = r#"{ + "blockHash": "0x9e6a0fb7e22159d943d760608cc36a0fb596d1ab3c997146f5b7c55c8c718c67", + "blockNumber": "0x6cfef89", + "contractAddress": null, + "cumulativeGasUsed": "0xfa0d", + "depositNonce": "0x8a2d11", + "effectiveGasPrice": "0x0", + "from": "0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001", + "gasUsed": "0xfa0d", + "logs": [], + "logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", + "status": "0x1", + "to": "0x4200000000000000000000000000000000000015", + "transactionHash": "0xb7c74afdeb7c89fb9de2c312f49b38cb7a850ba36e064734c5223a477e83fdc9", + "transactionIndex": "0x0", + "type": "0x7e", + "l1GasPrice": "0x3ef12787", + "l1GasUsed": "0x1177", + "l1Fee": "0x5bf1ab43d", + "l1BaseFeeScalar": "0x1", + "l1BlobBaseFee": "0x600ab8f05e64", + "l1BlobBaseFeeScalar": "0x1", + "operatorFeeScalar": "0x1", + "operatorFeeConstant": "0x1", + "daFootprintGasScalar": "0x1" + }"#; + + let receipt: OpTransactionReceipt = serde_json::from_str(s).unwrap(); + let value = serde_json::to_value(&receipt).unwrap(); + let expected_value = serde_json::from_str::(s).unwrap(); + assert_eq!(value, expected_value); + } + + #[test] + fn serialize_empty_optimism_transaction_receipt_fields_struct() { + let op_fields = OpTransactionReceiptFields::default(); + + let json = serde_json::to_value(op_fields).unwrap(); + assert_eq!(json, json!({})); + } + + #[test] + fn serialize_l1_fee_scalar() { + let op_fields = OpTransactionReceiptFields { + l1_block_info: L1BlockInfo { l1_fee_scalar: Some(0.678), ..Default::default() }, + ..Default::default() + }; + + let json = serde_json::to_value(op_fields).unwrap(); + + assert_eq!(json["l1FeeScalar"], serde_json::Value::String("0.678".to_string())); + } + + #[test] + fn deserialize_l1_fee_scalar() { + let json = json!({ + "l1FeeScalar": "0.678" + }); + + let op_fields: OpTransactionReceiptFields = serde_json::from_value(json).unwrap(); + assert_eq!(op_fields.l1_block_info.l1_fee_scalar, Some(0.678f64)); + + let json = json!({ + "l1FeeScalar": Value::Null + }); + + let op_fields: OpTransactionReceiptFields = serde_json::from_value(json).unwrap(); + assert_eq!(op_fields.l1_block_info.l1_fee_scalar, None); + + let json = json!({}); + + let op_fields: OpTransactionReceiptFields = serde_json::from_value(json).unwrap(); + assert_eq!(op_fields.l1_block_info.l1_fee_scalar, None); + } +} diff --git a/rust/op-alloy/crates/rpc-types/src/transaction.rs b/rust/op-alloy/crates/rpc-types/src/transaction.rs new file mode 100644 index 00000000000..ae936895d56 --- /dev/null +++ b/rust/op-alloy/crates/rpc-types/src/transaction.rs @@ -0,0 +1,359 @@ +//! Optimism specific types related to transactions. + +use alloy_consensus::{Transaction as TransactionTrait, Typed2718, transaction::Recovered}; +use alloy_eips::{Encodable2718, eip2930::AccessList, eip7702::SignedAuthorization}; +use alloy_primitives::{Address, B256, BlockHash, Bytes, ChainId, TxKind, U256}; +use alloy_serde::OtherFields; +use op_alloy_consensus::{OpTransaction, OpTxEnvelope, transaction::OpTransactionInfo}; +use serde::{Deserialize, Serialize}; + +mod request; +pub use request::OpTransactionRequest; + +/// OP Transaction type +#[derive( + Clone, Debug, PartialEq, Eq, Serialize, Deserialize, derive_more::Deref, derive_more::DerefMut, +)] +#[cfg_attr(all(any(test, feature = "arbitrary"), feature = "k256"), derive(arbitrary::Arbitrary))] +#[serde( + try_from = "tx_serde::TransactionSerdeHelper", + into = "tx_serde::TransactionSerdeHelper", + bound = "T: TransactionTrait + OpTransaction + Clone + serde::Serialize + serde::de::DeserializeOwned" +)] +pub struct Transaction { + /// Ethereum Transaction Types + #[deref] + #[deref_mut] + pub inner: alloy_rpc_types_eth::Transaction, + + /// Nonce for deposit transactions. Only present in RPC responses. + pub deposit_nonce: Option, + + /// Deposit receipt version for deposit transactions post-canyon + pub deposit_receipt_version: Option, +} + +impl Transaction { + /// Converts a consensus `tx` with an additional context `tx_info` into an RPC [`Transaction`]. + pub fn from_transaction(tx: Recovered, tx_info: OpTransactionInfo) -> Self { + let base_fee = tx_info.inner.base_fee; + let effective_gas_price = if tx.is_deposit() { + // For deposits, we must always set the `gasPrice` field to 0 in rpc + // deposit tx don't have a gas price field, but serde of `Transaction` will take care of + // it + 0 + } else { + base_fee + .map(|base_fee| { + tx.effective_tip_per_gas(base_fee).unwrap_or_default() + base_fee as u128 + }) + .unwrap_or_else(|| tx.max_fee_per_gas()) + }; + + Self { + inner: alloy_rpc_types_eth::Transaction { + inner: tx, + block_hash: tx_info.inner.block_hash, + block_number: tx_info.inner.block_number, + transaction_index: tx_info.inner.index, + effective_gas_price: Some(effective_gas_price), + }, + deposit_nonce: tx_info.deposit_meta.deposit_nonce, + deposit_receipt_version: tx_info.deposit_meta.deposit_receipt_version, + } + } +} + +impl Typed2718 for Transaction { + fn ty(&self) -> u8 { + self.inner.ty() + } +} + +impl TransactionTrait for Transaction { + fn chain_id(&self) -> Option { + self.inner.chain_id() + } + + fn nonce(&self) -> u64 { + self.inner.nonce() + } + + fn gas_limit(&self) -> u64 { + self.inner.gas_limit() + } + + fn gas_price(&self) -> Option { + self.inner.gas_price() + } + + fn max_fee_per_gas(&self) -> u128 { + self.inner.max_fee_per_gas() + } + + fn max_priority_fee_per_gas(&self) -> Option { + self.inner.max_priority_fee_per_gas() + } + + fn max_fee_per_blob_gas(&self) -> Option { + self.inner.max_fee_per_blob_gas() + } + + fn priority_fee_or_price(&self) -> u128 { + self.inner.priority_fee_or_price() + } + + fn effective_gas_price(&self, base_fee: Option) -> u128 { + self.inner.effective_gas_price(base_fee) + } + + fn is_dynamic_fee(&self) -> bool { + self.inner.is_dynamic_fee() + } + + fn kind(&self) -> TxKind { + self.inner.kind() + } + + fn is_create(&self) -> bool { + self.inner.is_create() + } + + fn to(&self) -> Option
{ + self.inner.to() + } + + fn value(&self) -> U256 { + self.inner.value() + } + + fn input(&self) -> &Bytes { + self.inner.input() + } + + fn access_list(&self) -> Option<&AccessList> { + self.inner.access_list() + } + + fn blob_versioned_hashes(&self) -> Option<&[B256]> { + self.inner.blob_versioned_hashes() + } + + fn authorization_list(&self) -> Option<&[SignedAuthorization]> { + self.inner.authorization_list() + } +} + +impl alloy_network_primitives::TransactionResponse + for Transaction +{ + fn tx_hash(&self) -> alloy_primitives::TxHash { + self.inner.tx_hash() + } + + fn block_hash(&self) -> Option { + self.inner.block_hash() + } + + fn block_number(&self) -> Option { + self.inner.block_number() + } + + fn transaction_index(&self) -> Option { + self.inner.transaction_index() + } + + fn from(&self) -> Address { + self.inner.from() + } +} + +/// Optimism specific transaction fields +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Serialize, Deserialize)] +#[doc(alias = "OptimismTxFields")] +#[serde(rename_all = "camelCase")] +pub struct OpTransactionFields { + /// The ETH value to mint on L2 + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub mint: Option, + /// Hash that uniquely identifies the source of the deposit. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub source_hash: Option, + /// Field indicating whether the transaction is a system transaction, and therefore + /// exempt from the L2 gas limit. + #[serde(default, skip_serializing_if = "Option::is_none")] + pub is_system_tx: Option, + /// Deposit receipt version for deposit transactions post-canyon + #[serde(default, skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + pub deposit_receipt_version: Option, +} + +impl From for OtherFields { + fn from(value: OpTransactionFields) -> Self { + serde_json::to_value(value).unwrap().try_into().unwrap() + } +} + +impl AsRef for Transaction { + fn as_ref(&self) -> &T { + self.inner.as_ref() + } +} + +mod tx_serde { + //! Helper module for serializing and deserializing OP [`Transaction`]. + //! + //! This is needed because we might need to deserialize the `from` field into both + //! [`alloy_consensus::transaction::Recovered::signer`] which resides in + //! [`alloy_rpc_types_eth::Transaction::inner`] and [`op_alloy_consensus::TxDeposit::from`]. + //! + //! Additionally, we need similar logic for the `gasPrice` field + use super::*; + use alloy_consensus::transaction::Recovered; + use op_alloy_consensus::OpTransaction; + use serde::de::Error; + + /// Helper struct which will be flattened into the transaction and will only contain `from` + /// field if inner [`OpTxEnvelope`] did not consume it. + #[derive(Serialize, Deserialize)] + struct OptionalFields { + #[serde(default, skip_serializing_if = "Option::is_none")] + from: Option
, + #[serde( + default, + rename = "gasPrice", + skip_serializing_if = "Option::is_none", + with = "alloy_serde::quantity::opt" + )] + effective_gas_price: Option, + #[serde( + default, + rename = "nonce", + skip_serializing_if = "Option::is_none", + with = "alloy_serde::quantity::opt" + )] + deposit_nonce: Option, + } + + #[derive(Serialize, Deserialize)] + #[serde(rename_all = "camelCase")] + pub(crate) struct TransactionSerdeHelper { + #[serde(flatten)] + inner: T, + #[serde(default)] + block_hash: Option, + #[serde(default, with = "alloy_serde::quantity::opt")] + block_number: Option, + #[serde(default, with = "alloy_serde::quantity::opt")] + transaction_index: Option, + #[serde( + default, + skip_serializing_if = "Option::is_none", + with = "alloy_serde::quantity::opt" + )] + deposit_receipt_version: Option, + + #[serde(flatten)] + other: OptionalFields, + } + + impl From> for TransactionSerdeHelper { + fn from(value: Transaction) -> Self { + let Transaction { + inner: + alloy_rpc_types_eth::Transaction { + inner, + block_hash, + block_number, + transaction_index, + effective_gas_price, + }, + deposit_receipt_version, + deposit_nonce, + } = value; + + // if inner transaction is a deposit, then don't serialize `from` directly + let from = if inner.as_deposit().is_some() { None } else { Some(inner.signer()) }; + + // if inner transaction has its own `gasPrice` don't serialize it in this struct. + let effective_gas_price = effective_gas_price.filter(|_| inner.gas_price().is_none()); + + Self { + inner: inner.into_inner(), + block_hash, + block_number, + transaction_index, + deposit_receipt_version, + other: OptionalFields { from, effective_gas_price, deposit_nonce }, + } + } + } + + impl TryFrom> for Transaction { + type Error = serde_json::Error; + + fn try_from(value: TransactionSerdeHelper) -> Result { + let TransactionSerdeHelper { + inner, + block_hash, + block_number, + transaction_index, + deposit_receipt_version, + other, + } = value; + + // Try to get `from` field from inner envelope or from `MaybeFrom`, otherwise return + // error + let from = if let Some(from) = other.from { + from + } else { + inner + .as_deposit() + .map(|v| v.from) + .ok_or_else(|| serde_json::Error::custom("missing `from` field"))? + }; + + // Only serialize deposit_nonce if inner transaction is deposit to avoid duplicated keys + let deposit_nonce = other.deposit_nonce.filter(|_| inner.is_deposit()); + + let effective_gas_price = other.effective_gas_price.or_else(|| inner.gas_price()); + + Ok(Self { + inner: alloy_rpc_types_eth::Transaction { + inner: Recovered::new_unchecked(inner, from), + block_hash, + block_number, + transaction_index, + effective_gas_price, + }, + deposit_receipt_version, + deposit_nonce, + }) + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn can_deserialize_deposit() { + // cast rpc eth_getTransactionByHash + // 0xbc9329afac05556497441e2b3ee4c5d4da7ca0b2a4c212c212d0739e94a24df9 --rpc-url optimism + let rpc_tx = r#"{"blockHash":"0x9d86bb313ebeedf4f9f82bf8a19b426be656a365648a7c089b618771311db9f9","blockNumber":"0x798ad0b","hash":"0xbc9329afac05556497441e2b3ee4c5d4da7ca0b2a4c212c212d0739e94a24df9","transactionIndex":"0x0","type":"0x7e","nonce":"0x152ea95","input":"0x440a5e200000146b000f79c50000000000000003000000006725333f000000000141e287000000000000000000000000000000000000000000000000000000012439ee7e0000000000000000000000000000000000000000000000000000000063f363e973e96e7145ff001c81b9562cba7b6104eeb12a2bc4ab9f07c27d45cd81a986620000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f32985","mint":"0x0","sourceHash":"0x04e9a69416471ead93b02f0c279ab11ca0b635db5c1726a56faf22623bafde52","r":"0x0","s":"0x0","v":"0x0","yParity":"0x0","gas":"0xf4240","from":"0xdeaddeaddeaddeaddeaddeaddeaddeaddead0001","to":"0x4200000000000000000000000000000000000015","depositReceiptVersion":"0x1","value":"0x0","gasPrice":"0x0"}"#; + + let tx = serde_json::from_str::(rpc_tx).unwrap(); + + let OpTxEnvelope::Deposit(inner) = tx.as_ref() else { + panic!("Expected deposit transaction"); + }; + assert_eq!(tx.inner.inner.signer(), inner.from); + assert_eq!(tx.deposit_nonce, Some(22211221)); + assert_eq!(tx.inner.effective_gas_price, Some(0)); + + let deserialized = serde_json::to_value(&tx).unwrap(); + let expected = serde_json::from_str::(rpc_tx).unwrap(); + similar_asserts::assert_eq!(deserialized, expected); + } +} diff --git a/op-alloy/crates/rpc-types/src/transaction/request.rs b/rust/op-alloy/crates/rpc-types/src/transaction/request.rs similarity index 100% rename from op-alloy/crates/rpc-types/src/transaction/request.rs rename to rust/op-alloy/crates/rpc-types/src/transaction/request.rs diff --git a/op-alloy/release.toml b/rust/op-alloy/release.toml similarity index 100% rename from op-alloy/release.toml rename to rust/op-alloy/release.toml diff --git a/op-alloy/scripts/changelog.sh b/rust/op-alloy/scripts/changelog.sh similarity index 100% rename from op-alloy/scripts/changelog.sh rename to rust/op-alloy/scripts/changelog.sh diff --git a/op-alloy/scripts/check_no_std.sh b/rust/op-alloy/scripts/check_no_std.sh similarity index 100% rename from op-alloy/scripts/check_no_std.sh rename to rust/op-alloy/scripts/check_no_std.sh diff --git a/op-reth/.config/nextest.toml b/rust/op-reth/.config/nextest.toml similarity index 100% rename from op-reth/.config/nextest.toml rename to rust/op-reth/.config/nextest.toml diff --git a/op-reth/.config/zepter.yaml b/rust/op-reth/.config/zepter.yaml similarity index 100% rename from op-reth/.config/zepter.yaml rename to rust/op-reth/.config/zepter.yaml diff --git a/op-reth/Cross.toml b/rust/op-reth/Cross.toml similarity index 100% rename from op-reth/Cross.toml rename to rust/op-reth/Cross.toml diff --git a/op-reth/DockerfileOp b/rust/op-reth/DockerfileOp similarity index 89% rename from op-reth/DockerfileOp rename to rust/op-reth/DockerfileOp index ba6e6627fda..d09aa670524 100644 --- a/op-reth/DockerfileOp +++ b/rust/op-reth/DockerfileOp @@ -23,10 +23,10 @@ ENV RUSTFLAGS="$RUSTFLAGS" ARG FEATURES="" ENV FEATURES=$FEATURES -RUN cargo chef cook --profile $BUILD_PROFILE --features "$FEATURES" --recipe-path recipe.json --manifest-path /app/crates/optimism/bin/Cargo.toml +RUN cargo chef cook --profile $BUILD_PROFILE --features "$FEATURES" --recipe-path recipe.json --manifest-path /app/op-reth/bin/Cargo.toml COPY . . -RUN cargo build --profile $BUILD_PROFILE --features "$FEATURES" --bin op-reth --manifest-path /app/crates/optimism/bin/Cargo.toml +RUN cargo build --profile $BUILD_PROFILE --features "$FEATURES" --bin op-reth --manifest-path /app/op-reth/bin/Cargo.toml RUN ls -la /app/target/$BUILD_PROFILE/op-reth RUN cp /app/target/$BUILD_PROFILE/op-reth /app/op-reth diff --git a/op-reth/DockerfileOp.cross b/rust/op-reth/DockerfileOp.cross similarity index 100% rename from op-reth/DockerfileOp.cross rename to rust/op-reth/DockerfileOp.cross diff --git a/op-reth/LICENSE-APACHE b/rust/op-reth/LICENSE-APACHE similarity index 100% rename from op-reth/LICENSE-APACHE rename to rust/op-reth/LICENSE-APACHE diff --git a/op-reth/LICENSE-MIT b/rust/op-reth/LICENSE-MIT similarity index 100% rename from op-reth/LICENSE-MIT rename to rust/op-reth/LICENSE-MIT diff --git a/op-reth/Makefile b/rust/op-reth/Makefile similarity index 99% rename from op-reth/Makefile rename to rust/op-reth/Makefile index 670005866f3..10bb631d625 100644 --- a/op-reth/Makefile +++ b/rust/op-reth/Makefile @@ -22,7 +22,7 @@ PROFILE ?= release CARGO_INSTALL_EXTRA_FLAGS ?= # The docker image name -DOCKER_IMAGE_NAME ?= ghcr.io/ethereum-optimism/op-reth +DOCKER_IMAGE_NAME ?= ghcr.io/paradigmxyz/op-reth ##@ Help diff --git a/rust/op-reth/README.md b/rust/op-reth/README.md new file mode 100644 index 00000000000..3539a7e2a06 --- /dev/null +++ b/rust/op-reth/README.md @@ -0,0 +1,140 @@ +# reth + +**Modular, contributor-friendly and blazing-fast implementation of the Ethereum protocol** + +![](./assets/reth-prod.png) + +**[Install](https://paradigmxyz.github.io/reth/installation/installation.html)** +| [User Docs](https://reth.rs) +| [Developer Docs](./docs) +| [Crate Docs](https://reth.rs/docs) + +## What is Reth? + +Reth (short for Rust Ethereum, [pronunciation](https://x.com/kelvinfichter/status/1597653609411268608)) is a new Ethereum full node implementation that is focused on being user-friendly, highly modular, as well as being fast and efficient. Reth is an Execution Layer (EL) and is compatible with all Ethereum Consensus Layer (CL) implementations that support the [Engine API](https://github.com/ethereum/execution-apis/tree/a0d03086564ab1838b462befbc083f873dcf0c0f/src/engine). It is originally built and driven forward by [Paradigm](https://paradigm.xyz/), and is licensed under the Apache and MIT licenses. + +## Goals + +As a full Ethereum node, Reth allows users to connect to the Ethereum network and interact with the Ethereum blockchain. This includes sending and receiving transactions/logs/traces, as well as accessing and interacting with smart contracts. Building a successful Ethereum node requires creating a high-quality implementation that is both secure and efficient, as well as being easy to use on consumer hardware. It also requires building a strong community of contributors who can help support and improve the software. + +More concretely, our goals are: + +1. **Modularity**: Every component of Reth is built to be used as a library: well-tested, heavily documented and benchmarked. We envision that developers will import the node's crates, mix and match, and innovate on top of them. Examples of such usage include but are not limited to spinning up standalone P2P networks, talking directly to a node's database, or "unbundling" the node into the components you need. To achieve that, we are licensing Reth under the Apache/MIT permissive license. You can learn more about the project's components [here](./docs/repo/layout.md). +2. **Performance**: Reth aims to be fast, so we use Rust and the [Erigon staged-sync](https://erigon.substack.com/p/erigon-stage-sync-and-control-flows) node architecture. We also use our Ethereum libraries (including [Alloy](https://github.com/alloy-rs/alloy/) and [revm](https://github.com/bluealloy/revm/)) which we've battle-tested and optimized via [Foundry](https://github.com/foundry-rs/foundry/). +3. **Free for anyone to use any way they want**: Reth is free open source software, built for the community, by the community. By licensing the software under the Apache/MIT license, we want developers to use it without being bound by business licenses, or having to think about the implications of GPL-like licenses. +4. **Client Diversity**: The Ethereum protocol becomes more antifragile when no node implementation dominates. This ensures that if there's a software bug, the network does not finalize a bad block. By building a new client, we hope to contribute to Ethereum's antifragility. +5. **Support as many EVM chains as possible**: We aspire that Reth can full-sync not only Ethereum, but also other chains like Optimism, Polygon, BNB Smart Chain, and more. If you're working on any of these projects, please reach out. +6. **Configurability**: We want to solve for node operators that care about fast historical queries, but also for hobbyists who cannot operate on large hardware. We also want to support teams and individuals who want both sync from genesis and via "fast sync". We envision that Reth will be configurable enough and provide configurable "profiles" for the tradeoffs that each team faces. + +## Status + +Reth is production ready, and suitable for usage in mission-critical environments such as staking or high-uptime services. We also actively recommend professional node operators to switch to Reth in production for performance and cost reasons in use cases where high performance with great margins is required such as RPC, MEV, Indexing, Simulations, and P2P activities. + +More historical context below: + +- We released 1.0 "production-ready" stable Reth in June 2024. + - Reth completed an audit with [Sigma Prime](https://sigmaprime.io/), the developers of [Lighthouse](https://github.com/sigp/lighthouse), the Rust Consensus Layer implementation. Find it [here](./audit/sigma_prime_audit_v2.pdf). + - Revm (the EVM used in Reth) underwent an audit with [Guido Vranken](https://x.com/guidovranken) (#1 [Ethereum Bug Bounty](https://ethereum.org/en/bug-bounty)). We will publish the results soon. +- We released multiple iterative beta versions, up to [beta.9](https://github.com/paradigmxyz/reth/releases/tag/v0.2.0-beta.9) on Monday June 3, 2024, the last beta release. +- We released [beta](https://github.com/paradigmxyz/reth/releases/tag/v0.2.0-beta.1) on Monday March 4, 2024, our first breaking change to the database model, providing faster query speed, smaller database footprint, and allowing "history" to be mounted on separate drives. +- We shipped iterative improvements until the last alpha release on February 28, 2024, [0.1.0-alpha.21](https://github.com/paradigmxyz/reth/releases/tag/v0.1.0-alpha.21). +- We [initially announced](https://www.paradigm.xyz/2023/06/reth-alpha) [0.1.0-alpha.1](https://github.com/paradigmxyz/reth/releases/tag/v0.1.0-alpha.1) on June 20, 2023. + +### Database compatibility + +We do not have any breaking database changes since beta.1, and we do not plan any in the near future. + +Reth [v0.2.0-beta.1](https://github.com/paradigmxyz/reth/releases/tag/v0.2.0-beta.1) includes +a [set of breaking database changes](https://github.com/paradigmxyz/reth/pull/5191) that makes it impossible to use database files produced by earlier versions. + +If you had a database produced by alpha versions of Reth, you need to drop it with `reth db drop` +(using the same arguments such as `--config` or `--datadir` that you passed to `reth node`), and resync using the same `reth node` command you've used before. + +## For Users + +See the [Reth documentation](https://reth.rs/) for instructions on how to install and run Reth. + +## For Developers + +### Using reth as a library + +You can use individual crates of reth in your project. + +The crate docs can be found [here](https://reth.rs/docs/). + +For a general overview of the crates, see [Project Layout](./docs/repo/layout.md). + +### Contributing + +If you want to contribute, or follow along with contributor discussion, you can use our [main telegram](https://t.me/paradigm_reth) to chat with us about the development of Reth! + +- Our contributor guidelines can be found in [`CONTRIBUTING.md`](./CONTRIBUTING.md). +- See our [contributor docs](./docs) for more information on the project. A good starting point is [Project Layout](./docs/repo/layout.md). + +### Building and testing + + + +The Minimum Supported Rust Version (MSRV) of this project is [1.88.0](https://blog.rust-lang.org/2025/06/26/Rust-1.88.0/). + +See the docs for detailed instructions on how to [build from source](https://reth.rs/installation/source/). + +To fully test Reth, you will need to have [Geth installed](https://geth.ethereum.org/docs/getting-started/installing-geth), but it is possible to run a subset of tests without Geth. + +First, clone the repository: + +```sh +git clone https://github.com/paradigmxyz/reth +cd reth +``` + +Next, run the tests: + +```sh +cargo nextest run --workspace + +# Run the Ethereum Foundation tests +make ef-tests +``` + +We highly recommend using [`cargo nextest`](https://nexte.st/) to speed up testing. +Using `cargo test` to run tests may work fine, but this is not tested and does not support more advanced features like retries for spurious failures. + +> **Note** +> +> Some tests use random number generators to generate test data. If you want to use a deterministic seed, you can set the `SEED` environment variable. + +## Getting Help + +If you have any questions, first see if the answer to your question can be found in the [docs][book]. + +If the answer is not there: + +- Join the [Telegram][tg-url] to get help, or +- Open a [discussion](https://github.com/paradigmxyz/reth/discussions/new) with your question, or +- Open an issue with [the bug](https://github.com/paradigmxyz/reth/issues/new?assignees=&labels=C-bug%2CS-needs-triage&projects=&template=bug.yml) + +## Security + +See [`SECURITY.md`](./SECURITY.md). + +## Acknowledgements + +Reth is a new implementation of the Ethereum protocol. In the process of developing the node we investigated the design decisions other nodes have made to understand what is done well, what is not, and where we can improve the status quo. + +None of this would have been possible without them, so big shoutout to the teams below: + +- [Geth](https://github.com/ethereum/go-ethereum/): We would like to express our heartfelt gratitude to the go-ethereum team for their outstanding contributions to Ethereum over the years. Their tireless efforts and dedication have helped to shape the Ethereum ecosystem and make it the vibrant and innovative community it is today. Thank you for your hard work and commitment to the project. +- [Erigon](https://github.com/ledgerwatch/erigon) (fka Turbo-Geth): Erigon pioneered the ["Staged Sync" architecture](https://erigon.substack.com/p/erigon-stage-sync-and-control-flows) that Reth is using, as well as [introduced MDBX](https://github.com/ledgerwatch/erigon/wiki/Choice-of-storage-engine) as the database of choice. We thank Erigon for pushing the state of the art research on the performance limits of Ethereum nodes. +- [Akula](https://github.com/akula-bft/akula/): Reth uses forks of the Apache versions of Akula's [MDBX Bindings](https://github.com/paradigmxyz/reth/pull/132), [FastRLP](https://github.com/paradigmxyz/reth/pull/63) and [ECIES](https://github.com/paradigmxyz/reth/pull/80). Given that these packages were already released under the Apache License, and they implement standardized solutions, we decided not to reimplement them to iterate faster. We thank the Akula team for their contributions to the Rust Ethereum ecosystem and for publishing these packages. + +## Warning + +The `NippyJar` and `Compact` encoding formats and their implementations are designed for storing and retrieving data internally. They are not hardened to safely read potentially malicious data. + +[book]: https://reth.rs/ +[tg-url]: https://t.me/paradigm_reth diff --git a/rust/op-reth/bin/Cargo.toml b/rust/op-reth/bin/Cargo.toml new file mode 100644 index 00000000000..0af1181b491 --- /dev/null +++ b/rust/op-reth/bin/Cargo.toml @@ -0,0 +1,64 @@ +[package] +name = "op-reth" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" +exclude = [".github/"] + +[dependencies] +reth-cli-util.workspace = true +reth-optimism-cli.workspace = true +reth-optimism-rpc.workspace = true +reth-optimism-node.workspace = true +reth-optimism-chainspec.workspace = true +reth-optimism-consensus.workspace = true +reth-optimism-evm.workspace = true +reth-optimism-payload-builder.workspace = true +reth-optimism-primitives.workspace = true +reth-optimism-forks.workspace = true + +clap = { workspace = true, features = ["derive", "env"] } +tracing.workspace = true + +[lints] +workspace = true + +[features] +default = ["jemalloc", "otlp", "reth-optimism-evm/portable", "js-tracer", "keccak-cache-global", "asm-keccak"] + +otlp = ["reth-optimism-cli/otlp"] + +js-tracer = [ + "reth-optimism-node/js-tracer", +] + +jemalloc = ["reth-cli-util/jemalloc", "reth-optimism-cli/jemalloc"] +jemalloc-prof = ["jemalloc", "reth-cli-util/jemalloc-prof", "reth-optimism-cli/jemalloc-prof"] +jemalloc-symbols = ["jemalloc-prof", "reth-optimism-cli/jemalloc-symbols"] +tracy-allocator = ["reth-cli-util/tracy-allocator", "tracy"] +tracy = ["reth-optimism-cli/tracy"] + +asm-keccak = ["reth-optimism-cli/asm-keccak", "reth-optimism-node/asm-keccak"] +keccak-cache-global = [ + "reth-optimism-cli/keccak-cache-global", + "reth-optimism-node/keccak-cache-global", +] +dev = [ + "reth-optimism-cli/dev", + "reth-optimism-primitives/arbitrary", +] + +min-error-logs = ["tracing/release_max_level_error"] +min-warn-logs = ["tracing/release_max_level_warn"] +min-info-logs = ["tracing/release_max_level_info"] +min-debug-logs = ["tracing/release_max_level_debug"] +min-trace-logs = ["tracing/release_max_level_trace"] + +edge = ["reth-optimism-cli/edge"] + +[[bin]] +name = "op-reth" +path = "src/main.rs" diff --git a/op-reth/bin/src/lib.rs b/rust/op-reth/bin/src/lib.rs similarity index 100% rename from op-reth/bin/src/lib.rs rename to rust/op-reth/bin/src/lib.rs diff --git a/rust/op-reth/bin/src/main.rs b/rust/op-reth/bin/src/main.rs new file mode 100644 index 00000000000..bfd63af539e --- /dev/null +++ b/rust/op-reth/bin/src/main.rs @@ -0,0 +1,36 @@ +#![allow(missing_docs, rustdoc::missing_crate_level_docs)] + +use clap::Parser; +use reth_optimism_cli::{Cli, chainspec::OpChainSpecParser}; +use reth_optimism_node::{OpNode, args::RollupArgs}; +use tracing::info; + +#[global_allocator] +static ALLOC: reth_cli_util::allocator::Allocator = reth_cli_util::allocator::new_allocator(); + +#[cfg(all(feature = "jemalloc-prof", unix))] +#[unsafe(export_name = "_rjem_malloc_conf")] +static MALLOC_CONF: &[u8] = b"prof:true,prof_active:true,lg_prof_sample:19\0"; + +fn main() { + reth_cli_util::sigsegv_handler::install(); + + // Enable backtraces unless a RUST_BACKTRACE value has already been explicitly provided. + if std::env::var_os("RUST_BACKTRACE").is_none() { + unsafe { + std::env::set_var("RUST_BACKTRACE", "1"); + } + } + + if let Err(err) = + Cli::::parse().run(async move |builder, rollup_args| { + info!(target: "reth::cli", "Launching node"); + let handle = + builder.node(OpNode::new(rollup_args)).launch_with_debug_capabilities().await?; + handle.node_exit_future.await + }) + { + eprintln!("Error: {err:?}"); + std::process::exit(1); + } +} diff --git a/rust/op-reth/crates/chainspec/Cargo.toml b/rust/op-reth/crates/chainspec/Cargo.toml new file mode 100644 index 00000000000..0922f102a93 --- /dev/null +++ b/rust/op-reth/crates/chainspec/Cargo.toml @@ -0,0 +1,90 @@ +[package] +name = "reth-optimism-chainspec" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" +description = "EVM chain spec implementation for optimism." + +[lints] +workspace = true + +[dependencies] +# reth +reth-chainspec.workspace = true +reth-ethereum-forks.workspace = true +reth-primitives-traits.workspace = true +reth-network-peers.workspace = true + +# op-reth +reth-optimism-forks.workspace = true +reth-optimism-primitives.workspace = true + +# ethereum +alloy-chains.workspace = true +alloy-genesis.workspace = true +alloy-primitives.workspace = true +alloy-consensus.workspace = true +alloy-eips.workspace = true +alloy-hardforks.workspace = true + +# op +op-alloy-rpc-types.workspace = true + +serde = { workspace = true, optional = true } +serde_json.workspace = true + +# io +tar-no-std = { workspace = true, optional = true } +miniz_oxide = { workspace = true, features = ["with-alloc"], optional = true } + +# misc +derive_more.workspace = true +paste = { workspace = true, optional = true } +thiserror = { workspace = true, optional = true } +op-alloy-consensus.workspace = true + +[dev-dependencies] +reth-chainspec = { workspace = true, features = ["test-utils"] } +alloy-op-hardforks.workspace = true + +[features] +default = ["std"] +superchain-configs = ["miniz_oxide", "paste", "tar-no-std", "thiserror", "thiserror", "dep:serde"] +std = [ + "alloy-chains/std", + "alloy-genesis/std", + "alloy-primitives/std", + "alloy-eips/std", + "op-alloy-rpc-types/std", + "reth-chainspec/std", + "reth-ethereum-forks/std", + "reth-primitives-traits/std", + "reth-optimism-forks/std", + "reth-optimism-primitives/std", + "alloy-consensus/std", + "derive_more/std", + "reth-network-peers/std", + "serde_json/std", + "serde?/std", + "miniz_oxide?/std", + "thiserror?/std", + "op-alloy-consensus/std", +] +serde = [ + "alloy-chains/serde", + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-hardforks/serde", + "alloy-primitives/serde", + "miniz_oxide?/serde", + "op-alloy-rpc-types/serde", + "reth-ethereum-forks/serde", + "reth-optimism-forks/serde", + "reth-optimism-primitives/serde", + "reth-primitives-traits/serde", + "op-alloy-consensus/serde", + "alloy-op-hardforks/serde", +] diff --git a/op-reth/crates/chainspec/res/fetch_superchain_config.sh b/rust/op-reth/crates/chainspec/res/fetch_superchain_config.sh similarity index 100% rename from op-reth/crates/chainspec/res/fetch_superchain_config.sh rename to rust/op-reth/crates/chainspec/res/fetch_superchain_config.sh diff --git a/op-reth/crates/chainspec/res/genesis/base.json b/rust/op-reth/crates/chainspec/res/genesis/base.json similarity index 100% rename from op-reth/crates/chainspec/res/genesis/base.json rename to rust/op-reth/crates/chainspec/res/genesis/base.json diff --git a/op-reth/crates/chainspec/res/genesis/dev.json b/rust/op-reth/crates/chainspec/res/genesis/dev.json similarity index 100% rename from op-reth/crates/chainspec/res/genesis/dev.json rename to rust/op-reth/crates/chainspec/res/genesis/dev.json diff --git a/op-reth/crates/chainspec/res/genesis/optimism.json b/rust/op-reth/crates/chainspec/res/genesis/optimism.json similarity index 100% rename from op-reth/crates/chainspec/res/genesis/optimism.json rename to rust/op-reth/crates/chainspec/res/genesis/optimism.json diff --git a/op-reth/crates/chainspec/res/genesis/sepolia_base.json b/rust/op-reth/crates/chainspec/res/genesis/sepolia_base.json similarity index 100% rename from op-reth/crates/chainspec/res/genesis/sepolia_base.json rename to rust/op-reth/crates/chainspec/res/genesis/sepolia_base.json diff --git a/op-reth/crates/chainspec/res/genesis/sepolia_op.json b/rust/op-reth/crates/chainspec/res/genesis/sepolia_op.json similarity index 100% rename from op-reth/crates/chainspec/res/genesis/sepolia_op.json rename to rust/op-reth/crates/chainspec/res/genesis/sepolia_op.json diff --git a/op-reth/crates/chainspec/res/superchain-configs.tar b/rust/op-reth/crates/chainspec/res/superchain-configs.tar similarity index 100% rename from op-reth/crates/chainspec/res/superchain-configs.tar rename to rust/op-reth/crates/chainspec/res/superchain-configs.tar diff --git a/op-reth/crates/chainspec/res/superchain_registry_commit b/rust/op-reth/crates/chainspec/res/superchain_registry_commit similarity index 100% rename from op-reth/crates/chainspec/res/superchain_registry_commit rename to rust/op-reth/crates/chainspec/res/superchain_registry_commit diff --git a/op-reth/crates/chainspec/src/base.rs b/rust/op-reth/crates/chainspec/src/base.rs similarity index 89% rename from op-reth/crates/chainspec/src/base.rs rename to rust/op-reth/crates/chainspec/src/base.rs index c93d1c4b2ab..7505e9f590c 100644 --- a/op-reth/crates/chainspec/src/base.rs +++ b/rust/op-reth/crates/chainspec/src/base.rs @@ -3,13 +3,13 @@ use alloc::{sync::Arc, vec}; use alloy_chains::Chain; -use alloy_primitives::{b256, U256}; +use alloy_primitives::{U256, b256}; use reth_chainspec::{BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_ethereum_forks::{EthereumHardfork, Hardfork}; -use reth_optimism_forks::{OpHardfork, BASE_MAINNET_HARDFORKS}; +use reth_optimism_forks::{BASE_MAINNET_HARDFORKS, OpHardfork}; use reth_primitives_traits::SealedHeader; -use crate::{make_op_genesis_header, LazyLock, OpChainSpec}; +use crate::{LazyLock, OpChainSpec, make_op_genesis_header}; /// The Base mainnet spec pub static BASE_MAINNET: LazyLock> = LazyLock::new(|| { diff --git a/rust/op-reth/crates/chainspec/src/base_sepolia.rs b/rust/op-reth/crates/chainspec/src/base_sepolia.rs new file mode 100644 index 00000000000..62984e67514 --- /dev/null +++ b/rust/op-reth/crates/chainspec/src/base_sepolia.rs @@ -0,0 +1,41 @@ +//! Chain specification for the Base Sepolia testnet network. + +use alloc::{sync::Arc, vec}; + +use alloy_chains::Chain; +use alloy_primitives::{U256, b256}; +use reth_chainspec::{BaseFeeParams, BaseFeeParamsKind, ChainSpec, Hardfork}; +use reth_ethereum_forks::EthereumHardfork; +use reth_optimism_forks::{BASE_SEPOLIA_HARDFORKS, OpHardfork}; +use reth_primitives_traits::SealedHeader; + +use crate::{LazyLock, OpChainSpec, make_op_genesis_header}; + +/// The Base Sepolia spec +pub static BASE_SEPOLIA: LazyLock> = LazyLock::new(|| { + let genesis = serde_json::from_str(include_str!("../res/genesis/sepolia_base.json")) + .expect("Can't deserialize Base Sepolia genesis json"); + let hardforks = BASE_SEPOLIA_HARDFORKS.clone(); + OpChainSpec { + inner: ChainSpec { + chain: Chain::base_sepolia(), + genesis_header: SealedHeader::new( + make_op_genesis_header(&genesis, &hardforks), + b256!("0x0dcc9e089e30b90ddfc55be9a37dd15bc551aeee999d2e2b51414c54eaf934e4"), + ), + genesis, + paris_block_and_final_difficulty: Some((0, U256::from(0))), + hardforks, + base_fee_params: BaseFeeParamsKind::Variable( + vec![ + (EthereumHardfork::London.boxed(), BaseFeeParams::base_sepolia()), + (OpHardfork::Canyon.boxed(), BaseFeeParams::base_sepolia_canyon()), + ] + .into(), + ), + prune_delete_limit: 10000, + ..Default::default() + }, + } + .into() +}); diff --git a/op-reth/crates/chainspec/src/basefee.rs b/rust/op-reth/crates/chainspec/src/basefee.rs similarity index 97% rename from op-reth/crates/chainspec/src/basefee.rs rename to rust/op-reth/crates/chainspec/src/basefee.rs index 3c0dcdfd88d..83ffca8ec03 100644 --- a/op-reth/crates/chainspec/src/basefee.rs +++ b/rust/op-reth/crates/chainspec/src/basefee.rs @@ -4,7 +4,7 @@ use core::cmp::max; use alloy_consensus::BlockHeader; use alloy_eips::calc_next_block_base_fee; -use op_alloy_consensus::{decode_holocene_extra_data, decode_jovian_extra_data, EIP1559ParamError}; +use op_alloy_consensus::{EIP1559ParamError, decode_holocene_extra_data, decode_jovian_extra_data}; use reth_chainspec::{BaseFeeParams, EthChainSpec}; use reth_optimism_forks::OpHardforks; @@ -82,7 +82,7 @@ mod tests { use reth_chainspec::{ChainSpec, ForkCondition, Hardfork}; use reth_optimism_forks::OpHardfork; - use crate::{OpChainSpec, BASE_SEPOLIA}; + use crate::{BASE_SEPOLIA, OpChainSpec}; use super::*; diff --git a/op-reth/crates/chainspec/src/constants.rs b/rust/op-reth/crates/chainspec/src/constants.rs similarity index 100% rename from op-reth/crates/chainspec/src/constants.rs rename to rust/op-reth/crates/chainspec/src/constants.rs diff --git a/op-reth/crates/chainspec/src/dev.rs b/rust/op-reth/crates/chainspec/src/dev.rs similarity index 95% rename from op-reth/crates/chainspec/src/dev.rs rename to rust/op-reth/crates/chainspec/src/dev.rs index ac8eaad24a8..5abf93aa5d2 100644 --- a/op-reth/crates/chainspec/src/dev.rs +++ b/rust/op-reth/crates/chainspec/src/dev.rs @@ -8,7 +8,7 @@ use reth_chainspec::{BaseFeeParams, BaseFeeParamsKind, ChainSpec}; use reth_optimism_forks::DEV_HARDFORKS; use reth_primitives_traits::SealedHeader; -use crate::{make_op_genesis_header, LazyLock, OpChainSpec}; +use crate::{LazyLock, OpChainSpec, make_op_genesis_header}; /// OP dev testnet specification /// diff --git a/rust/op-reth/crates/chainspec/src/lib.rs b/rust/op-reth/crates/chainspec/src/lib.rs new file mode 100644 index 00000000000..cfa40ff5732 --- /dev/null +++ b/rust/op-reth/crates/chainspec/src/lib.rs @@ -0,0 +1,1344 @@ +//! OP-Reth chain specs. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(not(feature = "std"), no_std)] + +// About the provided chain specs from `res/superchain-configs.tar`: +// The provided `OpChainSpec` structs are built from config files read from +// `superchain-configs.tar`. This `superchain-configs.tar` file contains the chain configs and +// genesis files for all chains. It is created by the `fetch_superchain_config.sh` script in +// the `res` directory. Where all configs are where initial loaded from +// . See the script for more details. +// +// The file is a tar archive containing the following files: +// - `genesis//.json.zz`: The genesis file compressed with deflate. It +// contains the initial accounts, etc. +// - `configs//.json`: The chain metadata file containing the chain id, +// hard forks, etc. +// +// For example, for `UNICHAIN_MAINNET`, the `genesis/mainnet/unichain.json.zz` and +// `configs/mainnet/base.json` is loaded and combined into the `OpChainSpec` struct. +// See `read_superchain_genesis` in `configs.rs` for more details. +// +// To update the chain specs, run the `fetch_superchain_config.sh` script in the `res` directory. +// This will fetch the latest chain configs from the superchain registry and create a new +// `superchain-configs.tar` file. See the script for more details. + +extern crate alloc; + +mod base; +mod base_sepolia; +mod basefee; + +pub mod constants; +mod dev; +mod op; +mod op_sepolia; + +#[cfg(feature = "superchain-configs")] +mod superchain; +#[cfg(feature = "superchain-configs")] +pub use superchain::*; + +pub use base::BASE_MAINNET; +pub use base_sepolia::BASE_SEPOLIA; +pub use basefee::*; +pub use dev::OP_DEV; +pub use op::OP_MAINNET; +pub use op_sepolia::OP_SEPOLIA; + +/// Re-export for convenience +pub use reth_optimism_forks::*; + +use alloc::{boxed::Box, vec, vec::Vec}; +use alloy_chains::Chain; +use alloy_consensus::{BlockHeader, Header, proofs::storage_root_unhashed}; +use alloy_eips::eip7840::BlobParams; +use alloy_genesis::Genesis; +use alloy_hardforks::Hardfork; +use alloy_primitives::{B256, U256}; +use derive_more::{Constructor, Deref, From, Into}; +use reth_chainspec::{ + BaseFeeParams, BaseFeeParamsKind, ChainSpec, ChainSpecBuilder, DepositContract, + DisplayHardforks, EthChainSpec, EthereumHardforks, ForkFilter, ForkId, Hardforks, Head, +}; +use reth_ethereum_forks::{ChainHardforks, EthereumHardfork, ForkCondition}; +use reth_network_peers::NodeRecord; +use reth_optimism_primitives::L2_TO_L1_MESSAGE_PASSER_ADDRESS; +use reth_primitives_traits::{SealedHeader, sync::LazyLock}; + +/// Chain spec builder for a OP stack chain. +#[derive(Debug, Default, From)] +pub struct OpChainSpecBuilder { + /// [`ChainSpecBuilder`] + inner: ChainSpecBuilder, +} + +impl OpChainSpecBuilder { + /// Construct a new builder from the base mainnet chain spec. + pub fn base_mainnet() -> Self { + let mut inner = ChainSpecBuilder::default() + .chain(BASE_MAINNET.chain) + .genesis(BASE_MAINNET.genesis.clone()); + let forks = BASE_MAINNET.hardforks.clone(); + inner = inner.with_forks(forks); + + Self { inner } + } + + /// Construct a new builder from the optimism mainnet chain spec. + pub fn optimism_mainnet() -> Self { + let mut inner = + ChainSpecBuilder::default().chain(OP_MAINNET.chain).genesis(OP_MAINNET.genesis.clone()); + let forks = OP_MAINNET.hardforks.clone(); + inner = inner.with_forks(forks); + + Self { inner } + } +} + +impl OpChainSpecBuilder { + /// Set the chain ID + pub fn chain(mut self, chain: Chain) -> Self { + self.inner = self.inner.chain(chain); + self + } + + /// Set the genesis block. + pub fn genesis(mut self, genesis: Genesis) -> Self { + self.inner = self.inner.genesis(genesis); + self + } + + /// Add the given fork with the given activation condition to the spec. + pub fn with_fork(mut self, fork: H, condition: ForkCondition) -> Self { + self.inner = self.inner.with_fork(fork, condition); + self + } + + /// Add the given forks with the given activation condition to the spec. + pub fn with_forks(mut self, forks: ChainHardforks) -> Self { + self.inner = self.inner.with_forks(forks); + self + } + + /// Remove the given fork from the spec. + pub fn without_fork(mut self, fork: OpHardfork) -> Self { + self.inner = self.inner.without_fork(fork); + self + } + + /// Enable Bedrock at genesis + pub fn bedrock_activated(mut self) -> Self { + self.inner = self.inner.paris_activated(); + self.inner = self.inner.with_fork(OpHardfork::Bedrock, ForkCondition::Block(0)); + self + } + + /// Enable Regolith at genesis + pub fn regolith_activated(mut self) -> Self { + self = self.bedrock_activated(); + self.inner = self.inner.with_fork(OpHardfork::Regolith, ForkCondition::Timestamp(0)); + self + } + + /// Enable Canyon at genesis + pub fn canyon_activated(mut self) -> Self { + self = self.regolith_activated(); + // Canyon also activates changes from L1's Shanghai hardfork + self.inner = self.inner.with_fork(EthereumHardfork::Shanghai, ForkCondition::Timestamp(0)); + self.inner = self.inner.with_fork(OpHardfork::Canyon, ForkCondition::Timestamp(0)); + self + } + + /// Enable Ecotone at genesis + pub fn ecotone_activated(mut self) -> Self { + self = self.canyon_activated(); + self.inner = self.inner.with_fork(EthereumHardfork::Cancun, ForkCondition::Timestamp(0)); + self.inner = self.inner.with_fork(OpHardfork::Ecotone, ForkCondition::Timestamp(0)); + self + } + + /// Enable Fjord at genesis + pub fn fjord_activated(mut self) -> Self { + self = self.ecotone_activated(); + self.inner = self.inner.with_fork(OpHardfork::Fjord, ForkCondition::Timestamp(0)); + self + } + + /// Enable Granite at genesis + pub fn granite_activated(mut self) -> Self { + self = self.fjord_activated(); + self.inner = self.inner.with_fork(OpHardfork::Granite, ForkCondition::Timestamp(0)); + self + } + + /// Enable Holocene at genesis + pub fn holocene_activated(mut self) -> Self { + self = self.granite_activated(); + self.inner = self.inner.with_fork(OpHardfork::Holocene, ForkCondition::Timestamp(0)); + self + } + + /// Enable Isthmus at genesis + pub fn isthmus_activated(mut self) -> Self { + self = self.holocene_activated(); + self.inner = self.inner.with_fork(OpHardfork::Isthmus, ForkCondition::Timestamp(0)); + self + } + + /// Enable Jovian at genesis + pub fn jovian_activated(mut self) -> Self { + self = self.isthmus_activated(); + self.inner = self.inner.with_fork(OpHardfork::Jovian, ForkCondition::Timestamp(0)); + self + } + + /// Enable Interop at genesis + pub fn interop_activated(mut self) -> Self { + self = self.jovian_activated(); + self.inner = self.inner.with_fork(OpHardfork::Interop, ForkCondition::Timestamp(0)); + self + } + + /// Build the resulting [`OpChainSpec`]. + /// + /// # Panics + /// + /// This function panics if the chain ID and genesis is not set ([`Self::chain`] and + /// [`Self::genesis`]) + pub fn build(self) -> OpChainSpec { + let mut inner = self.inner.build(); + inner.genesis_header = + SealedHeader::seal_slow(make_op_genesis_header(&inner.genesis, &inner.hardforks)); + + OpChainSpec { inner } + } +} + +/// OP stack chain spec type. +#[derive(Debug, Clone, Deref, Into, Constructor, PartialEq, Eq)] +pub struct OpChainSpec { + /// [`ChainSpec`]. + pub inner: ChainSpec, +} + +impl OpChainSpec { + /// Converts the given [`Genesis`] into a [`OpChainSpec`]. + pub fn from_genesis(genesis: Genesis) -> Self { + genesis.into() + } +} + +impl EthChainSpec for OpChainSpec { + type Header = Header; + + fn chain(&self) -> Chain { + self.inner.chain() + } + + fn base_fee_params_at_timestamp(&self, timestamp: u64) -> BaseFeeParams { + self.inner.base_fee_params_at_timestamp(timestamp) + } + + fn blob_params_at_timestamp(&self, timestamp: u64) -> Option { + self.inner.blob_params_at_timestamp(timestamp) + } + + fn deposit_contract(&self) -> Option<&DepositContract> { + self.inner.deposit_contract() + } + + fn genesis_hash(&self) -> B256 { + self.inner.genesis_hash() + } + + fn prune_delete_limit(&self) -> usize { + self.inner.prune_delete_limit() + } + + fn display_hardforks(&self) -> Box { + // filter only op hardforks + let op_forks = self.inner.hardforks.forks_iter().filter(|(fork, _)| { + !EthereumHardfork::VARIANTS.iter().any(|h| h.name() == (*fork).name()) + }); + + Box::new(DisplayHardforks::new(op_forks)) + } + + fn genesis_header(&self) -> &Self::Header { + self.inner.genesis_header() + } + + fn genesis(&self) -> &Genesis { + self.inner.genesis() + } + + fn bootnodes(&self) -> Option> { + self.inner.bootnodes() + } + + fn is_optimism(&self) -> bool { + true + } + + fn final_paris_total_difficulty(&self) -> Option { + self.inner.final_paris_total_difficulty() + } + + fn next_block_base_fee(&self, parent: &Header, target_timestamp: u64) -> Option { + if self.is_jovian_active_at_timestamp(parent.timestamp()) { + compute_jovian_base_fee(self, parent, target_timestamp).ok() + } else if self.is_holocene_active_at_timestamp(parent.timestamp()) { + decode_holocene_base_fee(self, parent, target_timestamp).ok() + } else { + self.inner.next_block_base_fee(parent, target_timestamp) + } + } +} + +impl Hardforks for OpChainSpec { + fn fork(&self, fork: H) -> ForkCondition { + self.inner.fork(fork) + } + + fn forks_iter(&self) -> impl Iterator { + self.inner.forks_iter() + } + + fn fork_id(&self, head: &Head) -> ForkId { + self.inner.fork_id(head) + } + + fn latest_fork_id(&self) -> ForkId { + self.inner.latest_fork_id() + } + + fn fork_filter(&self, head: Head) -> ForkFilter { + self.inner.fork_filter(head) + } +} + +impl EthereumHardforks for OpChainSpec { + fn ethereum_fork_activation(&self, fork: EthereumHardfork) -> ForkCondition { + self.fork(fork) + } +} + +impl OpHardforks for OpChainSpec { + fn op_fork_activation(&self, fork: OpHardfork) -> ForkCondition { + self.fork(fork) + } +} + +impl From for OpChainSpec { + fn from(genesis: Genesis) -> Self { + use reth_optimism_forks::OpHardfork; + let optimism_genesis_info = OpGenesisInfo::extract_from(&genesis); + let genesis_info = + optimism_genesis_info.optimism_chain_info.genesis_info.unwrap_or_default(); + + // Block-based hardforks + let hardfork_opts = [ + (EthereumHardfork::Frontier.boxed(), Some(0)), + (EthereumHardfork::Homestead.boxed(), genesis.config.homestead_block), + (EthereumHardfork::Tangerine.boxed(), genesis.config.eip150_block), + (EthereumHardfork::SpuriousDragon.boxed(), genesis.config.eip155_block), + (EthereumHardfork::Byzantium.boxed(), genesis.config.byzantium_block), + (EthereumHardfork::Constantinople.boxed(), genesis.config.constantinople_block), + (EthereumHardfork::Petersburg.boxed(), genesis.config.petersburg_block), + (EthereumHardfork::Istanbul.boxed(), genesis.config.istanbul_block), + (EthereumHardfork::MuirGlacier.boxed(), genesis.config.muir_glacier_block), + (EthereumHardfork::Berlin.boxed(), genesis.config.berlin_block), + (EthereumHardfork::London.boxed(), genesis.config.london_block), + (EthereumHardfork::ArrowGlacier.boxed(), genesis.config.arrow_glacier_block), + (EthereumHardfork::GrayGlacier.boxed(), genesis.config.gray_glacier_block), + (OpHardfork::Bedrock.boxed(), genesis_info.bedrock_block), + ]; + let mut block_hardforks = hardfork_opts + .into_iter() + .filter_map(|(hardfork, opt)| opt.map(|block| (hardfork, ForkCondition::Block(block)))) + .collect::>(); + + // We set the paris hardfork for OP networks to zero + block_hardforks.push(( + EthereumHardfork::Paris.boxed(), + ForkCondition::TTD { + activation_block_number: 0, + total_difficulty: U256::ZERO, + fork_block: genesis.config.merge_netsplit_block, + }, + )); + + // Time-based hardforks + let time_hardfork_opts = [ + // L1 + // we need to map the L1 hardforks to the activation timestamps of the correspondong op + // hardforks + (EthereumHardfork::Shanghai.boxed(), genesis_info.canyon_time), + (EthereumHardfork::Cancun.boxed(), genesis_info.ecotone_time), + (EthereumHardfork::Prague.boxed(), genesis_info.isthmus_time), + // OP + (OpHardfork::Regolith.boxed(), genesis_info.regolith_time), + (OpHardfork::Canyon.boxed(), genesis_info.canyon_time), + (OpHardfork::Ecotone.boxed(), genesis_info.ecotone_time), + (OpHardfork::Fjord.boxed(), genesis_info.fjord_time), + (OpHardfork::Granite.boxed(), genesis_info.granite_time), + (OpHardfork::Holocene.boxed(), genesis_info.holocene_time), + (OpHardfork::Isthmus.boxed(), genesis_info.isthmus_time), + (OpHardfork::Jovian.boxed(), genesis_info.jovian_time), + (OpHardfork::Interop.boxed(), genesis_info.interop_time), + ]; + + let mut time_hardforks = time_hardfork_opts + .into_iter() + .filter_map(|(hardfork, opt)| { + opt.map(|time| (hardfork, ForkCondition::Timestamp(time))) + }) + .collect::>(); + + block_hardforks.append(&mut time_hardforks); + + // Ordered Hardforks + let mainnet_hardforks = OP_MAINNET_HARDFORKS.clone(); + let mainnet_order = mainnet_hardforks.forks_iter(); + + let mut ordered_hardforks = Vec::with_capacity(block_hardforks.len()); + for (hardfork, _) in mainnet_order { + if let Some(pos) = block_hardforks.iter().position(|(e, _)| **e == *hardfork) { + ordered_hardforks.push(block_hardforks.remove(pos)); + } + } + + // append the remaining unknown hardforks to ensure we don't filter any out + ordered_hardforks.append(&mut block_hardforks); + + let hardforks = ChainHardforks::new(ordered_hardforks); + let genesis_header = SealedHeader::seal_slow(make_op_genesis_header(&genesis, &hardforks)); + + Self { + inner: ChainSpec { + chain: genesis.config.chain_id.into(), + genesis_header, + genesis, + hardforks, + // We assume no OP network merges, and set the paris block and total difficulty to + // zero + paris_block_and_final_difficulty: Some((0, U256::ZERO)), + base_fee_params: optimism_genesis_info.base_fee_params, + ..Default::default() + }, + } + } +} + +impl From for OpChainSpec { + fn from(value: ChainSpec) -> Self { + Self { inner: value } + } +} + +#[derive(Default, Debug)] +struct OpGenesisInfo { + optimism_chain_info: op_alloy_rpc_types::OpChainInfo, + base_fee_params: BaseFeeParamsKind, +} + +impl OpGenesisInfo { + fn extract_from(genesis: &Genesis) -> Self { + let mut info = Self { + optimism_chain_info: op_alloy_rpc_types::OpChainInfo::extract_from( + &genesis.config.extra_fields, + ) + .unwrap_or_default(), + ..Default::default() + }; + if let Some(optimism_base_fee_info) = &info.optimism_chain_info.base_fee_info && + let (Some(elasticity), Some(denominator)) = ( + optimism_base_fee_info.eip1559_elasticity, + optimism_base_fee_info.eip1559_denominator, + ) + { + let base_fee_params = optimism_base_fee_info.eip1559_denominator_canyon.map_or_else( + || BaseFeeParams::new(denominator as u128, elasticity as u128).into(), + |canyon_denominator| { + BaseFeeParamsKind::Variable( + vec![ + ( + EthereumHardfork::London.boxed(), + BaseFeeParams::new(denominator as u128, elasticity as u128), + ), + ( + OpHardfork::Canyon.boxed(), + BaseFeeParams::new(canyon_denominator as u128, elasticity as u128), + ), + ] + .into(), + ) + }, + ); + + info.base_fee_params = base_fee_params; + } + + info + } +} + +/// Helper method building a [`Header`] given [`Genesis`] and [`ChainHardforks`]. +pub fn make_op_genesis_header(genesis: &Genesis, hardforks: &ChainHardforks) -> Header { + let mut header = reth_chainspec::make_genesis_header(genesis, hardforks); + + // If Isthmus is active, overwrite the withdrawals root with the storage root of predeploy + // `L2ToL1MessagePasser.sol` + if hardforks.fork(OpHardfork::Isthmus).active_at_timestamp(header.timestamp) && + let Some(predeploy) = genesis.alloc.get(&L2_TO_L1_MESSAGE_PASSER_ADDRESS) && + let Some(storage) = &predeploy.storage + { + header.withdrawals_root = Some(storage_root_unhashed(storage.iter().filter_map( + |(k, v)| { + if v.is_zero() { None } else { Some((*k, (*v).into())) } + }, + ))); + } + + header +} + +#[cfg(test)] +mod tests { + use alloc::string::{String, ToString}; + use alloy_genesis::{ChainConfig, Genesis}; + use alloy_op_hardforks::{ + BASE_MAINNET_JOVIAN_TIMESTAMP, BASE_SEPOLIA_JOVIAN_TIMESTAMP, OP_MAINNET_JOVIAN_TIMESTAMP, + OP_SEPOLIA_JOVIAN_TIMESTAMP, + }; + use alloy_primitives::{b256, hex}; + use reth_chainspec::{BaseFeeParams, BaseFeeParamsKind, test_fork_ids}; + use reth_ethereum_forks::{EthereumHardfork, ForkCondition, ForkHash, ForkId, Head}; + use reth_optimism_forks::{OpHardfork, OpHardforks}; + + use crate::*; + + #[test] + fn test_storage_root_consistency() { + use alloy_primitives::{B256, U256}; + use core::str::FromStr; + + let k1 = + B256::from_str("0x0000000000000000000000000000000000000000000000000000000000000001") + .unwrap(); + let v1 = + U256::from_str("0x0000000000000000000000000000000000000000000000000000000000000000") + .unwrap(); + let k2 = + B256::from_str("0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc") + .unwrap(); + let v2 = + U256::from_str("0x000000000000000000000000c0d3c0d3c0d3c0d3c0d3c0d3c0d3c0d3c0d30016") + .unwrap(); + let k3 = + B256::from_str("0xb53127684a568b3173ae13b9f8a6016e243e63b6e8ee1178d6a717850b5d6103") + .unwrap(); + let v3 = + U256::from_str("0x0000000000000000000000004200000000000000000000000000000000000018") + .unwrap(); + let origin_root = + B256::from_str("0x5d5ba3a8093ede3901ad7a569edfb7b9aecafa54730ba0bf069147cbcc00e345") + .unwrap(); + let expected_root = + B256::from_str("0x8ed4baae3a927be3dea54996b4d5899f8c01e7594bf50b17dc1e741388ce3d12") + .unwrap(); + + let storage_origin = vec![(k1, v1), (k2, v2), (k3, v3)]; + let storage_fix = vec![(k2, v2), (k3, v3)]; + let root_origin = storage_root_unhashed(storage_origin); + let root_fix = storage_root_unhashed(storage_fix); + assert_ne!(root_origin, root_fix); + assert_eq!(root_origin, origin_root); + assert_eq!(root_fix, expected_root); + } + + #[test] + fn base_mainnet_forkids() { + let mut base_mainnet = OpChainSpecBuilder::base_mainnet().build(); + base_mainnet.inner.genesis_header.set_hash(BASE_MAINNET.genesis_hash()); + test_fork_ids( + &BASE_MAINNET, + &[ + ( + Head { number: 0, ..Default::default() }, + ForkId { hash: ForkHash([0x67, 0xda, 0x02, 0x60]), next: 1704992401 }, + ), + ( + Head { number: 0, timestamp: 1704992400, ..Default::default() }, + ForkId { hash: ForkHash([0x67, 0xda, 0x02, 0x60]), next: 1704992401 }, + ), + ( + Head { number: 0, timestamp: 1704992401, ..Default::default() }, + ForkId { hash: ForkHash([0x3c, 0x28, 0x3c, 0xb3]), next: 1710374401 }, + ), + ( + Head { number: 0, timestamp: 1710374400, ..Default::default() }, + ForkId { hash: ForkHash([0x3c, 0x28, 0x3c, 0xb3]), next: 1710374401 }, + ), + ( + Head { number: 0, timestamp: 1710374401, ..Default::default() }, + ForkId { hash: ForkHash([0x51, 0xcc, 0x98, 0xb3]), next: 1720627201 }, + ), + ( + Head { number: 0, timestamp: 1720627200, ..Default::default() }, + ForkId { hash: ForkHash([0x51, 0xcc, 0x98, 0xb3]), next: 1720627201 }, + ), + ( + Head { number: 0, timestamp: 1720627201, ..Default::default() }, + ForkId { hash: ForkHash([0xe4, 0x01, 0x0e, 0xb9]), next: 1726070401 }, + ), + ( + Head { number: 0, timestamp: 1726070401, ..Default::default() }, + ForkId { hash: ForkHash([0xbc, 0x38, 0xf9, 0xca]), next: 1736445601 }, + ), + ( + Head { number: 0, timestamp: 1736445601, ..Default::default() }, + ForkId { hash: ForkHash([0x3a, 0x2a, 0xf1, 0x83]), next: 1746806401 }, + ), + // Isthmus + ( + Head { number: 0, timestamp: 1746806401, ..Default::default() }, + ForkId { + hash: ForkHash([0x86, 0x72, 0x8b, 0x4e]), + next: BASE_MAINNET_JOVIAN_TIMESTAMP, + }, + ), + // Jovian + ( + Head { + number: 0, + timestamp: BASE_MAINNET_JOVIAN_TIMESTAMP, + ..Default::default() + }, + BASE_MAINNET.hardfork_fork_id(OpHardfork::Jovian).unwrap(), + ), + ], + ); + } + + #[test] + fn op_sepolia_forkids() { + test_fork_ids( + &OP_SEPOLIA, + &[ + ( + Head { number: 0, ..Default::default() }, + ForkId { hash: ForkHash([0x67, 0xa4, 0x03, 0x28]), next: 1699981200 }, + ), + ( + Head { number: 0, timestamp: 1699981199, ..Default::default() }, + ForkId { hash: ForkHash([0x67, 0xa4, 0x03, 0x28]), next: 1699981200 }, + ), + ( + Head { number: 0, timestamp: 1699981200, ..Default::default() }, + ForkId { hash: ForkHash([0xa4, 0x8d, 0x6a, 0x00]), next: 1708534800 }, + ), + ( + Head { number: 0, timestamp: 1708534799, ..Default::default() }, + ForkId { hash: ForkHash([0xa4, 0x8d, 0x6a, 0x00]), next: 1708534800 }, + ), + ( + Head { number: 0, timestamp: 1708534800, ..Default::default() }, + ForkId { hash: ForkHash([0xcc, 0x17, 0xc7, 0xeb]), next: 1716998400 }, + ), + ( + Head { number: 0, timestamp: 1716998399, ..Default::default() }, + ForkId { hash: ForkHash([0xcc, 0x17, 0xc7, 0xeb]), next: 1716998400 }, + ), + ( + Head { number: 0, timestamp: 1716998400, ..Default::default() }, + ForkId { hash: ForkHash([0x54, 0x0a, 0x8c, 0x5d]), next: 1723478400 }, + ), + ( + Head { number: 0, timestamp: 1723478399, ..Default::default() }, + ForkId { hash: ForkHash([0x54, 0x0a, 0x8c, 0x5d]), next: 1723478400 }, + ), + ( + Head { number: 0, timestamp: 1723478400, ..Default::default() }, + ForkId { hash: ForkHash([0x75, 0xde, 0xa4, 0x1e]), next: 1732633200 }, + ), + ( + Head { number: 0, timestamp: 1732633200, ..Default::default() }, + ForkId { hash: ForkHash([0x4a, 0x1c, 0x79, 0x2e]), next: 1744905600 }, + ), + // Isthmus + ( + Head { number: 0, timestamp: 1744905600, ..Default::default() }, + ForkId { + hash: ForkHash([0x6c, 0x62, 0x5e, 0xe1]), + next: OP_SEPOLIA_JOVIAN_TIMESTAMP, + }, + ), + // Jovian + ( + Head { + number: 0, + timestamp: OP_SEPOLIA_JOVIAN_TIMESTAMP, + ..Default::default() + }, + OP_SEPOLIA.hardfork_fork_id(OpHardfork::Jovian).unwrap(), + ), + ], + ); + } + + #[test] + fn op_mainnet_forkids() { + let mut op_mainnet = OpChainSpecBuilder::optimism_mainnet().build(); + // for OP mainnet we have to do this because the genesis header can't be properly computed + // from the genesis.json file + op_mainnet.inner.genesis_header.set_hash(OP_MAINNET.genesis_hash()); + test_fork_ids( + &op_mainnet, + &[ + ( + Head { number: 0, ..Default::default() }, + ForkId { hash: ForkHash([0xca, 0xf5, 0x17, 0xed]), next: 3950000 }, + ), + // London + ( + Head { number: 105235063, ..Default::default() }, + ForkId { hash: ForkHash([0xe3, 0x39, 0x8d, 0x7c]), next: 1704992401 }, + ), + // Bedrock + ( + Head { number: 105235063, ..Default::default() }, + ForkId { hash: ForkHash([0xe3, 0x39, 0x8d, 0x7c]), next: 1704992401 }, + ), + // Shanghai + ( + Head { number: 105235063, timestamp: 1704992401, ..Default::default() }, + ForkId { hash: ForkHash([0xbd, 0xd4, 0xfd, 0xb2]), next: 1710374401 }, + ), + // OP activation timestamps + // https://specs.optimism.io/protocol/superchain-upgrades.html#activation-timestamps + // Canyon + ( + Head { number: 105235063, timestamp: 1704992401, ..Default::default() }, + ForkId { hash: ForkHash([0xbd, 0xd4, 0xfd, 0xb2]), next: 1710374401 }, + ), + // Ecotone + ( + Head { number: 105235063, timestamp: 1710374401, ..Default::default() }, + ForkId { hash: ForkHash([0x19, 0xda, 0x4c, 0x52]), next: 1720627201 }, + ), + // Fjord + ( + Head { number: 105235063, timestamp: 1720627201, ..Default::default() }, + ForkId { hash: ForkHash([0x49, 0xfb, 0xfe, 0x1e]), next: 1726070401 }, + ), + // Granite + ( + Head { number: 105235063, timestamp: 1726070401, ..Default::default() }, + ForkId { hash: ForkHash([0x44, 0x70, 0x4c, 0xde]), next: 1736445601 }, + ), + // Holocene + ( + Head { number: 105235063, timestamp: 1736445601, ..Default::default() }, + ForkId { hash: ForkHash([0x2b, 0xd9, 0x3d, 0xc8]), next: 1746806401 }, + ), + // Isthmus + ( + Head { number: 105235063, timestamp: 1746806401, ..Default::default() }, + ForkId { + hash: ForkHash([0x37, 0xbe, 0x75, 0x8f]), + next: OP_MAINNET_JOVIAN_TIMESTAMP, + }, + ), + // Jovian + ( + Head { + number: 105235063, + timestamp: OP_MAINNET_JOVIAN_TIMESTAMP, + ..Default::default() + }, + OP_MAINNET.hardfork_fork_id(OpHardfork::Jovian).unwrap(), + ), + ], + ); + } + + #[test] + fn base_sepolia_forkids() { + test_fork_ids( + &BASE_SEPOLIA, + &[ + ( + Head { number: 0, ..Default::default() }, + ForkId { hash: ForkHash([0xb9, 0x59, 0xb9, 0xf7]), next: 1699981200 }, + ), + ( + Head { number: 0, timestamp: 1699981199, ..Default::default() }, + ForkId { hash: ForkHash([0xb9, 0x59, 0xb9, 0xf7]), next: 1699981200 }, + ), + ( + Head { number: 0, timestamp: 1699981200, ..Default::default() }, + ForkId { hash: ForkHash([0x60, 0x7c, 0xd5, 0xa1]), next: 1708534800 }, + ), + ( + Head { number: 0, timestamp: 1708534799, ..Default::default() }, + ForkId { hash: ForkHash([0x60, 0x7c, 0xd5, 0xa1]), next: 1708534800 }, + ), + ( + Head { number: 0, timestamp: 1708534800, ..Default::default() }, + ForkId { hash: ForkHash([0xbe, 0x96, 0x9b, 0x17]), next: 1716998400 }, + ), + ( + Head { number: 0, timestamp: 1716998399, ..Default::default() }, + ForkId { hash: ForkHash([0xbe, 0x96, 0x9b, 0x17]), next: 1716998400 }, + ), + ( + Head { number: 0, timestamp: 1716998400, ..Default::default() }, + ForkId { hash: ForkHash([0x4e, 0x45, 0x7a, 0x49]), next: 1723478400 }, + ), + ( + Head { number: 0, timestamp: 1723478399, ..Default::default() }, + ForkId { hash: ForkHash([0x4e, 0x45, 0x7a, 0x49]), next: 1723478400 }, + ), + ( + Head { number: 0, timestamp: 1723478400, ..Default::default() }, + ForkId { hash: ForkHash([0x5e, 0xdf, 0xa3, 0xb6]), next: 1732633200 }, + ), + ( + Head { number: 0, timestamp: 1732633200, ..Default::default() }, + ForkId { hash: ForkHash([0x8b, 0x5e, 0x76, 0x29]), next: 1744905600 }, + ), + // Isthmus + ( + Head { number: 0, timestamp: 1744905600, ..Default::default() }, + ForkId { + hash: ForkHash([0x06, 0x0a, 0x4d, 0x1d]), + next: BASE_SEPOLIA_JOVIAN_TIMESTAMP, + }, + ), + // Jovian + ( + Head { + number: 0, + timestamp: BASE_SEPOLIA_JOVIAN_TIMESTAMP, + ..Default::default() + }, + BASE_SEPOLIA.hardfork_fork_id(OpHardfork::Jovian).unwrap(), + ), + ], + ); + } + + #[test] + fn base_mainnet_genesis() { + let genesis = BASE_MAINNET.genesis_header(); + assert_eq!( + genesis.hash_slow(), + b256!("0xf712aa9241cc24369b143cf6dce85f0902a9731e70d66818a3a5845b296c73dd") + ); + let base_fee = BASE_MAINNET.next_block_base_fee(genesis, genesis.timestamp).unwrap(); + // + assert_eq!(base_fee, 980000000); + } + + #[test] + fn base_sepolia_genesis() { + let genesis = BASE_SEPOLIA.genesis_header(); + assert_eq!( + genesis.hash_slow(), + b256!("0x0dcc9e089e30b90ddfc55be9a37dd15bc551aeee999d2e2b51414c54eaf934e4") + ); + let base_fee = BASE_SEPOLIA.next_block_base_fee(genesis, genesis.timestamp).unwrap(); + // + assert_eq!(base_fee, 980000000); + } + + #[test] + fn op_sepolia_genesis() { + let genesis = OP_SEPOLIA.genesis_header(); + assert_eq!( + genesis.hash_slow(), + b256!("0x102de6ffb001480cc9b8b548fd05c34cd4f46ae4aa91759393db90ea0409887d") + ); + let base_fee = OP_SEPOLIA.next_block_base_fee(genesis, genesis.timestamp).unwrap(); + // + assert_eq!(base_fee, 980000000); + } + + #[test] + fn latest_base_mainnet_fork_id() { + assert_eq!( + ForkId { hash: ForkHash(hex!("1cfeafc9")), next: 0 }, + BASE_MAINNET.latest_fork_id() + ) + } + + #[test] + fn latest_base_mainnet_fork_id_with_builder() { + let base_mainnet = OpChainSpecBuilder::base_mainnet().build(); + assert_eq!( + ForkId { hash: ForkHash(hex!("1cfeafc9")), next: 0 }, + base_mainnet.latest_fork_id() + ) + } + + #[test] + fn is_bedrock_active() { + let op_mainnet = OpChainSpecBuilder::optimism_mainnet().build(); + assert!(!op_mainnet.is_bedrock_active_at_block(1)) + } + + #[test] + fn parse_optimism_hardforks() { + let geth_genesis = r#" + { + "config": { + "bedrockBlock": 10, + "regolithTime": 20, + "canyonTime": 30, + "ecotoneTime": 40, + "fjordTime": 50, + "graniteTime": 51, + "holoceneTime": 52, + "isthmusTime": 53, + "optimism": { + "eip1559Elasticity": 60, + "eip1559Denominator": 70 + } + } + } + "#; + let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); + + let actual_bedrock_block = genesis.config.extra_fields.get("bedrockBlock"); + assert_eq!(actual_bedrock_block, Some(serde_json::Value::from(10)).as_ref()); + let actual_regolith_timestamp = genesis.config.extra_fields.get("regolithTime"); + assert_eq!(actual_regolith_timestamp, Some(serde_json::Value::from(20)).as_ref()); + let actual_canyon_timestamp = genesis.config.extra_fields.get("canyonTime"); + assert_eq!(actual_canyon_timestamp, Some(serde_json::Value::from(30)).as_ref()); + let actual_ecotone_timestamp = genesis.config.extra_fields.get("ecotoneTime"); + assert_eq!(actual_ecotone_timestamp, Some(serde_json::Value::from(40)).as_ref()); + let actual_fjord_timestamp = genesis.config.extra_fields.get("fjordTime"); + assert_eq!(actual_fjord_timestamp, Some(serde_json::Value::from(50)).as_ref()); + let actual_granite_timestamp = genesis.config.extra_fields.get("graniteTime"); + assert_eq!(actual_granite_timestamp, Some(serde_json::Value::from(51)).as_ref()); + let actual_holocene_timestamp = genesis.config.extra_fields.get("holoceneTime"); + assert_eq!(actual_holocene_timestamp, Some(serde_json::Value::from(52)).as_ref()); + let actual_isthmus_timestamp = genesis.config.extra_fields.get("isthmusTime"); + assert_eq!(actual_isthmus_timestamp, Some(serde_json::Value::from(53)).as_ref()); + + let optimism_object = genesis.config.extra_fields.get("optimism").unwrap(); + assert_eq!( + optimism_object, + &serde_json::json!({ + "eip1559Elasticity": 60, + "eip1559Denominator": 70, + }) + ); + + let chain_spec: OpChainSpec = genesis.into(); + + assert_eq!( + chain_spec.base_fee_params, + BaseFeeParamsKind::Constant(BaseFeeParams::new(70, 60)) + ); + + assert!(!chain_spec.is_fork_active_at_block(OpHardfork::Bedrock, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Regolith, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Canyon, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Ecotone, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Fjord, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Granite, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Holocene, 0)); + + assert!(chain_spec.is_fork_active_at_block(OpHardfork::Bedrock, 10)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Regolith, 20)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Canyon, 30)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Ecotone, 40)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Fjord, 50)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Granite, 51)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Holocene, 52)); + } + + #[test] + fn parse_optimism_hardforks_variable_base_fee_params() { + let geth_genesis = r#" + { + "config": { + "bedrockBlock": 10, + "regolithTime": 20, + "canyonTime": 30, + "ecotoneTime": 40, + "fjordTime": 50, + "graniteTime": 51, + "holoceneTime": 52, + "isthmusTime": 53, + "optimism": { + "eip1559Elasticity": 60, + "eip1559Denominator": 70, + "eip1559DenominatorCanyon": 80 + } + } + } + "#; + let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); + + let actual_bedrock_block = genesis.config.extra_fields.get("bedrockBlock"); + assert_eq!(actual_bedrock_block, Some(serde_json::Value::from(10)).as_ref()); + let actual_regolith_timestamp = genesis.config.extra_fields.get("regolithTime"); + assert_eq!(actual_regolith_timestamp, Some(serde_json::Value::from(20)).as_ref()); + let actual_canyon_timestamp = genesis.config.extra_fields.get("canyonTime"); + assert_eq!(actual_canyon_timestamp, Some(serde_json::Value::from(30)).as_ref()); + let actual_ecotone_timestamp = genesis.config.extra_fields.get("ecotoneTime"); + assert_eq!(actual_ecotone_timestamp, Some(serde_json::Value::from(40)).as_ref()); + let actual_fjord_timestamp = genesis.config.extra_fields.get("fjordTime"); + assert_eq!(actual_fjord_timestamp, Some(serde_json::Value::from(50)).as_ref()); + let actual_granite_timestamp = genesis.config.extra_fields.get("graniteTime"); + assert_eq!(actual_granite_timestamp, Some(serde_json::Value::from(51)).as_ref()); + let actual_holocene_timestamp = genesis.config.extra_fields.get("holoceneTime"); + assert_eq!(actual_holocene_timestamp, Some(serde_json::Value::from(52)).as_ref()); + let actual_isthmus_timestamp = genesis.config.extra_fields.get("isthmusTime"); + assert_eq!(actual_isthmus_timestamp, Some(serde_json::Value::from(53)).as_ref()); + + let optimism_object = genesis.config.extra_fields.get("optimism").unwrap(); + assert_eq!( + optimism_object, + &serde_json::json!({ + "eip1559Elasticity": 60, + "eip1559Denominator": 70, + "eip1559DenominatorCanyon": 80 + }) + ); + + let chain_spec: OpChainSpec = genesis.into(); + + assert_eq!( + chain_spec.base_fee_params, + BaseFeeParamsKind::Variable( + vec![ + (EthereumHardfork::London.boxed(), BaseFeeParams::new(70, 60)), + (OpHardfork::Canyon.boxed(), BaseFeeParams::new(80, 60)), + ] + .into() + ) + ); + + assert!(!chain_spec.is_fork_active_at_block(OpHardfork::Bedrock, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Regolith, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Canyon, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Ecotone, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Fjord, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Granite, 0)); + assert!(!chain_spec.is_fork_active_at_timestamp(OpHardfork::Holocene, 0)); + + assert!(chain_spec.is_fork_active_at_block(OpHardfork::Bedrock, 10)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Regolith, 20)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Canyon, 30)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Ecotone, 40)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Fjord, 50)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Granite, 51)); + assert!(chain_spec.is_fork_active_at_timestamp(OpHardfork::Holocene, 52)); + } + + #[test] + fn parse_genesis_optimism_with_variable_base_fee_params() { + use op_alloy_rpc_types::OpBaseFeeInfo; + + let geth_genesis = r#" + { + "config": { + "chainId": 8453, + "homesteadBlock": 0, + "eip150Block": 0, + "eip155Block": 0, + "eip158Block": 0, + "byzantiumBlock": 0, + "constantinopleBlock": 0, + "petersburgBlock": 0, + "istanbulBlock": 0, + "muirGlacierBlock": 0, + "berlinBlock": 0, + "londonBlock": 0, + "arrowGlacierBlock": 0, + "grayGlacierBlock": 0, + "mergeNetsplitBlock": 0, + "bedrockBlock": 0, + "regolithTime": 15, + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true, + "optimism": { + "eip1559Elasticity": 6, + "eip1559Denominator": 50 + } + } + } + "#; + let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); + let chainspec = OpChainSpec::from(genesis.clone()); + + let actual_chain_id = genesis.config.chain_id; + assert_eq!(actual_chain_id, 8453); + + assert_eq!( + chainspec.hardforks.get(EthereumHardfork::Istanbul), + Some(ForkCondition::Block(0)) + ); + + let actual_bedrock_block = genesis.config.extra_fields.get("bedrockBlock"); + assert_eq!(actual_bedrock_block, Some(serde_json::Value::from(0)).as_ref()); + let actual_canyon_timestamp = genesis.config.extra_fields.get("canyonTime"); + assert_eq!(actual_canyon_timestamp, None); + + assert!(genesis.config.terminal_total_difficulty_passed); + + let optimism_object = genesis.config.extra_fields.get("optimism").unwrap(); + let optimism_base_fee_info = + serde_json::from_value::(optimism_object.clone()).unwrap(); + + assert_eq!( + optimism_base_fee_info, + OpBaseFeeInfo { + eip1559_elasticity: Some(6), + eip1559_denominator: Some(50), + eip1559_denominator_canyon: None, + } + ); + assert_eq!( + chainspec.base_fee_params, + BaseFeeParamsKind::Constant(BaseFeeParams { + max_change_denominator: 50, + elasticity_multiplier: 6, + }) + ); + + assert!(chainspec.is_fork_active_at_block(OpHardfork::Bedrock, 0)); + + assert!(chainspec.is_fork_active_at_timestamp(OpHardfork::Regolith, 20)); + } + + #[test] + fn test_fork_order_optimism_mainnet() { + use reth_optimism_forks::OpHardfork; + + let genesis = Genesis { + config: ChainConfig { + chain_id: 0, + homestead_block: Some(0), + dao_fork_block: Some(0), + dao_fork_support: false, + eip150_block: Some(0), + eip155_block: Some(0), + eip158_block: Some(0), + byzantium_block: Some(0), + constantinople_block: Some(0), + petersburg_block: Some(0), + istanbul_block: Some(0), + muir_glacier_block: Some(0), + berlin_block: Some(0), + london_block: Some(0), + arrow_glacier_block: Some(0), + gray_glacier_block: Some(0), + merge_netsplit_block: Some(0), + shanghai_time: Some(0), + cancun_time: Some(0), + prague_time: Some(0), + terminal_total_difficulty: Some(U256::ZERO), + extra_fields: [ + (String::from("bedrockBlock"), 0.into()), + (String::from("regolithTime"), 0.into()), + (String::from("canyonTime"), 0.into()), + (String::from("ecotoneTime"), 0.into()), + (String::from("fjordTime"), 0.into()), + (String::from("graniteTime"), 0.into()), + (String::from("holoceneTime"), 0.into()), + (String::from("isthmusTime"), 0.into()), + (String::from("jovianTime"), 0.into()), + ] + .into_iter() + .collect(), + ..Default::default() + }, + ..Default::default() + }; + + let chain_spec: OpChainSpec = genesis.into(); + + let hardforks: Vec<_> = chain_spec.hardforks.forks_iter().map(|(h, _)| h).collect(); + let expected_hardforks = vec![ + EthereumHardfork::Frontier.boxed(), + EthereumHardfork::Homestead.boxed(), + EthereumHardfork::Tangerine.boxed(), + EthereumHardfork::SpuriousDragon.boxed(), + EthereumHardfork::Byzantium.boxed(), + EthereumHardfork::Constantinople.boxed(), + EthereumHardfork::Petersburg.boxed(), + EthereumHardfork::Istanbul.boxed(), + EthereumHardfork::MuirGlacier.boxed(), + EthereumHardfork::Berlin.boxed(), + EthereumHardfork::London.boxed(), + EthereumHardfork::ArrowGlacier.boxed(), + EthereumHardfork::GrayGlacier.boxed(), + EthereumHardfork::Paris.boxed(), + OpHardfork::Bedrock.boxed(), + OpHardfork::Regolith.boxed(), + EthereumHardfork::Shanghai.boxed(), + OpHardfork::Canyon.boxed(), + EthereumHardfork::Cancun.boxed(), + OpHardfork::Ecotone.boxed(), + OpHardfork::Fjord.boxed(), + OpHardfork::Granite.boxed(), + OpHardfork::Holocene.boxed(), + EthereumHardfork::Prague.boxed(), + OpHardfork::Isthmus.boxed(), + OpHardfork::Jovian.boxed(), + // OpHardfork::Interop.boxed(), + ]; + + for (expected, actual) in expected_hardforks.iter().zip(hardforks.iter()) { + assert_eq!(&**expected, &**actual); + } + assert_eq!(expected_hardforks.len(), hardforks.len()); + } + + #[test] + fn json_genesis() { + let geth_genesis = r#" +{ + "config": { + "chainId": 1301, + "homesteadBlock": 0, + "eip150Block": 0, + "eip155Block": 0, + "eip158Block": 0, + "byzantiumBlock": 0, + "constantinopleBlock": 0, + "petersburgBlock": 0, + "istanbulBlock": 0, + "muirGlacierBlock": 0, + "berlinBlock": 0, + "londonBlock": 0, + "arrowGlacierBlock": 0, + "grayGlacierBlock": 0, + "mergeNetsplitBlock": 0, + "shanghaiTime": 0, + "cancunTime": 0, + "bedrockBlock": 0, + "regolithTime": 0, + "canyonTime": 0, + "ecotoneTime": 0, + "fjordTime": 0, + "graniteTime": 0, + "holoceneTime": 1732633200, + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true, + "optimism": { + "eip1559Elasticity": 6, + "eip1559Denominator": 50, + "eip1559DenominatorCanyon": 250 + } + }, + "nonce": "0x0", + "timestamp": "0x66edad4c", + "extraData": "0x424544524f434b", + "gasLimit": "0x1c9c380", + "difficulty": "0x0", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase": "0x4200000000000000000000000000000000000011", + "alloc": {}, + "number": "0x0", + "gasUsed": "0x0", + "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "baseFeePerGas": "0x3b9aca00", + "excessBlobGas": "0x0", + "blobGasUsed": "0x0" +} + "#; + + let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); + let chainspec = OpChainSpec::from_genesis(genesis); + assert!(chainspec.is_holocene_active_at_timestamp(1732633200)); + } + + #[test] + fn json_genesis_mapped_l1_timestamps() { + let geth_genesis = r#" +{ + "config": { + "chainId": 1301, + "homesteadBlock": 0, + "eip150Block": 0, + "eip155Block": 0, + "eip158Block": 0, + "byzantiumBlock": 0, + "constantinopleBlock": 0, + "petersburgBlock": 0, + "istanbulBlock": 0, + "muirGlacierBlock": 0, + "berlinBlock": 0, + "londonBlock": 0, + "arrowGlacierBlock": 0, + "grayGlacierBlock": 0, + "mergeNetsplitBlock": 0, + "bedrockBlock": 0, + "regolithTime": 0, + "canyonTime": 0, + "ecotoneTime": 1712633200, + "fjordTime": 0, + "graniteTime": 0, + "holoceneTime": 1732633200, + "isthmusTime": 1742633200, + "terminalTotalDifficulty": 0, + "terminalTotalDifficultyPassed": true, + "optimism": { + "eip1559Elasticity": 6, + "eip1559Denominator": 50, + "eip1559DenominatorCanyon": 250 + } + }, + "nonce": "0x0", + "timestamp": "0x66edad4c", + "extraData": "0x424544524f434b", + "gasLimit": "0x1c9c380", + "difficulty": "0x0", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase": "0x4200000000000000000000000000000000000011", + "alloc": {}, + "number": "0x0", + "gasUsed": "0x0", + "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "baseFeePerGas": "0x3b9aca00", + "excessBlobGas": "0x0", + "blobGasUsed": "0x0" +} + "#; + + let genesis: Genesis = serde_json::from_str(geth_genesis).unwrap(); + let chainspec = OpChainSpec::from_genesis(genesis); + assert!(chainspec.is_holocene_active_at_timestamp(1732633200)); + + assert!(chainspec.is_shanghai_active_at_timestamp(0)); + assert!(chainspec.is_canyon_active_at_timestamp(0)); + + assert!(chainspec.is_ecotone_active_at_timestamp(1712633200)); + assert!(chainspec.is_cancun_active_at_timestamp(1712633200)); + + assert!(chainspec.is_prague_active_at_timestamp(1742633200)); + assert!(chainspec.is_isthmus_active_at_timestamp(1742633200)); + } + + #[test] + fn display_hardorks() { + let content = BASE_MAINNET.display_hardforks().to_string(); + for eth_hf in EthereumHardfork::VARIANTS { + assert!(!content.contains(eth_hf.name())); + } + } +} diff --git a/op-reth/crates/chainspec/src/op.rs b/rust/op-reth/crates/chainspec/src/op.rs similarity index 90% rename from op-reth/crates/chainspec/src/op.rs rename to rust/op-reth/crates/chainspec/src/op.rs index d6ca36be264..7b463aecf5c 100644 --- a/op-reth/crates/chainspec/src/op.rs +++ b/rust/op-reth/crates/chainspec/src/op.rs @@ -1,12 +1,12 @@ //! Chain specification for the Optimism Mainnet network. -use crate::{make_op_genesis_header, LazyLock, OpChainSpec}; +use crate::{LazyLock, OpChainSpec, make_op_genesis_header}; use alloc::{sync::Arc, vec}; use alloy_chains::Chain; -use alloy_primitives::{b256, U256}; +use alloy_primitives::{U256, b256}; use reth_chainspec::{BaseFeeParams, BaseFeeParamsKind, ChainSpec, Hardfork}; use reth_ethereum_forks::EthereumHardfork; -use reth_optimism_forks::{OpHardfork, OP_MAINNET_HARDFORKS}; +use reth_optimism_forks::{OP_MAINNET_HARDFORKS, OpHardfork}; use reth_primitives_traits::SealedHeader; /// The Optimism Mainnet spec diff --git a/rust/op-reth/crates/chainspec/src/op_sepolia.rs b/rust/op-reth/crates/chainspec/src/op_sepolia.rs new file mode 100644 index 00000000000..d792fbe3004 --- /dev/null +++ b/rust/op-reth/crates/chainspec/src/op_sepolia.rs @@ -0,0 +1,39 @@ +//! Chain specification for the Optimism Sepolia testnet network. + +use crate::{LazyLock, OpChainSpec, make_op_genesis_header}; +use alloc::{sync::Arc, vec}; +use alloy_chains::{Chain, NamedChain}; +use alloy_primitives::{U256, b256}; +use reth_chainspec::{BaseFeeParams, BaseFeeParamsKind, ChainSpec, Hardfork}; +use reth_ethereum_forks::EthereumHardfork; +use reth_optimism_forks::{OP_SEPOLIA_HARDFORKS, OpHardfork}; +use reth_primitives_traits::SealedHeader; + +/// The OP Sepolia spec +pub static OP_SEPOLIA: LazyLock> = LazyLock::new(|| { + let genesis = serde_json::from_str(include_str!("../res/genesis/sepolia_op.json")) + .expect("Can't deserialize OP Sepolia genesis json"); + let hardforks = OP_SEPOLIA_HARDFORKS.clone(); + OpChainSpec { + inner: ChainSpec { + chain: Chain::from_named(NamedChain::OptimismSepolia), + genesis_header: SealedHeader::new( + make_op_genesis_header(&genesis, &hardforks), + b256!("0x102de6ffb001480cc9b8b548fd05c34cd4f46ae4aa91759393db90ea0409887d"), + ), + genesis, + paris_block_and_final_difficulty: Some((0, U256::from(0))), + hardforks, + base_fee_params: BaseFeeParamsKind::Variable( + vec![ + (EthereumHardfork::London.boxed(), BaseFeeParams::optimism_sepolia()), + (OpHardfork::Canyon.boxed(), BaseFeeParams::optimism_sepolia_canyon()), + ] + .into(), + ), + prune_delete_limit: 10000, + ..Default::default() + }, + } + .into() +}); diff --git a/op-reth/crates/chainspec/src/superchain/chain_metadata.rs b/rust/op-reth/crates/chainspec/src/superchain/chain_metadata.rs similarity index 100% rename from op-reth/crates/chainspec/src/superchain/chain_metadata.rs rename to rust/op-reth/crates/chainspec/src/superchain/chain_metadata.rs diff --git a/op-reth/crates/chainspec/src/superchain/chain_spec_macro.rs b/rust/op-reth/crates/chainspec/src/superchain/chain_spec_macro.rs similarity index 99% rename from op-reth/crates/chainspec/src/superchain/chain_spec_macro.rs rename to rust/op-reth/crates/chainspec/src/superchain/chain_spec_macro.rs index 1274f243a12..b8835954ae9 100644 --- a/op-reth/crates/chainspec/src/superchain/chain_spec_macro.rs +++ b/rust/op-reth/crates/chainspec/src/superchain/chain_spec_macro.rs @@ -16,7 +16,7 @@ macro_rules! create_chain_spec { /// Generates the key string for a given name and environment pair. #[macro_export] macro_rules! key_for { - ($name:expr, "mainnet") => { + ($name:expr,"mainnet") => { $name }; ($name:expr, $env:expr) => { diff --git a/op-reth/crates/chainspec/src/superchain/chain_specs.rs b/rust/op-reth/crates/chainspec/src/superchain/chain_specs.rs similarity index 100% rename from op-reth/crates/chainspec/src/superchain/chain_specs.rs rename to rust/op-reth/crates/chainspec/src/superchain/chain_specs.rs diff --git a/op-reth/crates/chainspec/src/superchain/configs.rs b/rust/op-reth/crates/chainspec/src/superchain/configs.rs similarity index 96% rename from op-reth/crates/chainspec/src/superchain/configs.rs rename to rust/op-reth/crates/chainspec/src/superchain/configs.rs index b0ebc0fb84d..f28b1233fd9 100644 --- a/op-reth/crates/chainspec/src/superchain/configs.rs +++ b/rust/op-reth/crates/chainspec/src/superchain/configs.rs @@ -1,4 +1,4 @@ -use crate::superchain::chain_metadata::{to_genesis_chain_config, ChainMetadata}; +use crate::superchain::chain_metadata::{ChainMetadata, to_genesis_chain_config}; use alloc::{ format, string::{String, ToString}, @@ -78,7 +78,7 @@ fn read_file( ) -> Result, SuperchainConfigError> { for entry in archive.entries() { if entry.filename().as_str()? == file_path { - return Ok(entry.data().to_vec()) + return Ok(entry.data().to_vec()); } } Err(SuperchainConfigError::FileNotFound(file_path.to_string())) @@ -87,16 +87,16 @@ fn read_file( #[cfg(test)] mod tests { use super::*; - use crate::{generated_chain_value_parser, superchain::Superchain, SUPPORTED_CHAINS}; + use crate::{SUPPORTED_CHAINS, generated_chain_value_parser, superchain::Superchain}; use alloy_chains::NamedChain; use alloy_op_hardforks::{ - OpHardfork, BASE_MAINNET_CANYON_TIMESTAMP, BASE_MAINNET_ECOTONE_TIMESTAMP, + BASE_MAINNET_CANYON_TIMESTAMP, BASE_MAINNET_ECOTONE_TIMESTAMP, BASE_MAINNET_ISTHMUS_TIMESTAMP, BASE_MAINNET_JOVIAN_TIMESTAMP, BASE_SEPOLIA_CANYON_TIMESTAMP, BASE_SEPOLIA_ECOTONE_TIMESTAMP, BASE_SEPOLIA_ISTHMUS_TIMESTAMP, BASE_SEPOLIA_JOVIAN_TIMESTAMP, OP_MAINNET_CANYON_TIMESTAMP, OP_MAINNET_ECOTONE_TIMESTAMP, OP_MAINNET_ISTHMUS_TIMESTAMP, OP_MAINNET_JOVIAN_TIMESTAMP, OP_SEPOLIA_CANYON_TIMESTAMP, OP_SEPOLIA_ECOTONE_TIMESTAMP, OP_SEPOLIA_ISTHMUS_TIMESTAMP, - OP_SEPOLIA_JOVIAN_TIMESTAMP, + OP_SEPOLIA_JOVIAN_TIMESTAMP, OpHardfork, }; use reth_optimism_primitives::L2_TO_L1_MESSAGE_PASSER_ADDRESS; use tar_no_std::TarArchiveRef; @@ -137,7 +137,7 @@ mod tests { .map(|s| s.to_string()) .collect::>(); if filename.first().unwrap().ne(&"genesis") { - continue + continue; } read_superchain_metadata( &filename.get(2).unwrap().replace(".json.zz", ""), diff --git a/op-reth/crates/chainspec/src/superchain/mod.rs b/rust/op-reth/crates/chainspec/src/superchain/mod.rs similarity index 100% rename from op-reth/crates/chainspec/src/superchain/mod.rs rename to rust/op-reth/crates/chainspec/src/superchain/mod.rs diff --git a/rust/op-reth/crates/cli/Cargo.toml b/rust/op-reth/crates/cli/Cargo.toml new file mode 100644 index 00000000000..7cd9474a9db --- /dev/null +++ b/rust/op-reth/crates/cli/Cargo.toml @@ -0,0 +1,127 @@ +[package] +name = "reth-optimism-cli" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" + +[lints] +workspace = true + +[dependencies] +reth-static-file-types = { workspace = true, features = ["clap"] } +reth-cli.workspace = true +reth-cli-commands.workspace = true +reth-consensus.workspace = true +reth-rpc-server-types.workspace = true +reth-primitives-traits.workspace = true +reth-db = { workspace = true, features = ["mdbx", "op"] } +reth-db-api.workspace = true +reth-db-common.workspace = true +reth-downloaders.workspace = true +reth-provider.workspace = true +reth-prune.workspace = true +reth-stages.workspace = true +reth-static-file.workspace = true +reth-execution-types.workspace = true +reth-node-core.workspace = true +reth-optimism-node.workspace = true +reth-fs-util.workspace = true + +# so jemalloc metrics can be included +reth-node-metrics.workspace = true + +## optimism +reth-optimism-primitives.workspace = true +reth-optimism-chainspec = { workspace = true, features = ["superchain-configs"] } +reth-optimism-consensus.workspace = true + +reth-chainspec.workspace = true +reth-node-events.workspace = true +reth-optimism-evm.workspace = true +reth-cli-runner.workspace = true +reth-node-builder = { workspace = true, features = ["op"] } +reth-tracing.workspace = true + +# eth +alloy-eips.workspace = true +alloy-consensus.workspace = true +alloy-primitives.workspace = true +alloy-rlp.workspace = true + +# misc +futures-util.workspace = true +derive_more.workspace = true +serde.workspace = true +clap = { workspace = true, features = ["derive", "env"] } + +tokio = { workspace = true, features = ["sync", "macros", "time", "rt-multi-thread"] } +tokio-util = { workspace = true, features = ["codec"] } +tracing.workspace = true +eyre.workspace = true + +# reth test-vectors +proptest = { workspace = true, optional = true } +op-alloy-consensus.workspace = true + +[dev-dependencies] +tempfile.workspace = true +reth-stages = { workspace = true, features = ["test-utils"] } + +[build-dependencies] +reth-optimism-chainspec = { workspace = true, features = ["std", "superchain-configs"] } + +[features] +default = [] + +# Opentelemetry feature to activate tracing and logs export +otlp = ["reth-tracing/otlp", "reth-node-core/otlp"] +otlp-logs = ["reth-tracing/otlp-logs", "reth-node-core/otlp-logs"] + +asm-keccak = [ + "alloy-primitives/asm-keccak", + "reth-node-core/asm-keccak", + "reth-optimism-node/asm-keccak", +] + +keccak-cache-global = [ + "alloy-primitives/keccak-cache-global", + "reth-node-core/keccak-cache-global", + "reth-optimism-node/keccak-cache-global", +] + +# Jemalloc feature for vergen to generate correct env vars +jemalloc = [ + "reth-node-core/jemalloc", + "reth-node-metrics/jemalloc", +] +jemalloc-prof = [ + "jemalloc", + "reth-node-metrics/jemalloc-prof", +] +jemalloc-symbols = [ + "jemalloc-prof", + "reth-node-metrics/jemalloc-symbols", +] + +tracy = ["reth-tracing/tracy", "reth-node-core/tracy"] + +dev = [ + "dep:proptest", + "reth-cli-commands/arbitrary", +] + +serde = [ + "alloy-consensus/serde", + "alloy-eips/serde", + "alloy-primitives/serde", + "op-alloy-consensus/serde", + "reth-execution-types/serde", + "reth-optimism-primitives/serde", + "reth-primitives-traits/serde", + "reth-optimism-chainspec/serde", +] + +edge = ["reth-cli-commands/edge", "reth-node-core/edge"] diff --git a/op-reth/crates/cli/src/app.rs b/rust/op-reth/crates/cli/src/app.rs similarity index 99% rename from op-reth/crates/cli/src/app.rs rename to rust/op-reth/crates/cli/src/app.rs index 2ef171a4731..22df73b0d90 100644 --- a/op-reth/crates/cli/src/app.rs +++ b/rust/op-reth/crates/cli/src/app.rs @@ -1,5 +1,5 @@ use crate::{Cli, Commands}; -use eyre::{eyre, Result}; +use eyre::{Result, eyre}; use reth_cli::chainspec::ChainSpecParser; use reth_cli_commands::launcher::Launcher; use reth_cli_runner::CliRunner; diff --git a/op-reth/crates/cli/src/chainspec.rs b/rust/op-reth/crates/cli/src/chainspec.rs similarity index 88% rename from op-reth/crates/cli/src/chainspec.rs rename to rust/op-reth/crates/cli/src/chainspec.rs index 14e7450c9e6..6af7dcbf324 100644 --- a/op-reth/crates/cli/src/chainspec.rs +++ b/rust/op-reth/crates/cli/src/chainspec.rs @@ -1,5 +1,5 @@ -use reth_cli::chainspec::{parse_genesis, ChainSpecParser}; -use reth_optimism_chainspec::{generated_chain_value_parser, OpChainSpec, SUPPORTED_CHAINS}; +use reth_cli::chainspec::{ChainSpecParser, parse_genesis}; +use reth_optimism_chainspec::{OpChainSpec, SUPPORTED_CHAINS, generated_chain_value_parser}; use std::sync::Arc; /// Optimism chain specification parser. diff --git a/op-reth/crates/cli/src/commands/import.rs b/rust/op-reth/crates/cli/src/commands/import.rs similarity index 98% rename from op-reth/crates/cli/src/commands/import.rs rename to rust/op-reth/crates/cli/src/commands/import.rs index 74656511af1..0d7e0cde8cd 100644 --- a/op-reth/crates/cli/src/commands/import.rs +++ b/rust/op-reth/crates/cli/src/commands/import.rs @@ -13,7 +13,7 @@ use reth_node_builder::BlockTy; use reth_node_core::version::version_metadata; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_evm::OpExecutorProvider; -use reth_optimism_primitives::{bedrock::is_dup_tx, OpPrimitives}; +use reth_optimism_primitives::{OpPrimitives, bedrock::is_dup_tx}; use reth_provider::{BlockNumReader, ChainSpecProvider, HeaderProvider, StageCheckpointReader}; use reth_prune::PruneModes; use reth_stages::StageId; @@ -92,7 +92,7 @@ impl> ImportOpCommand { body.transactions.retain(|_| { if is_dup_tx(block_number) { total_filtered_out_dup_txns += 1; - return false + return false; } true }) diff --git a/op-reth/crates/cli/src/commands/import_receipts.rs b/rust/op-reth/crates/cli/src/commands/import_receipts.rs similarity index 97% rename from op-reth/crates/cli/src/commands/import_receipts.rs rename to rust/op-reth/crates/cli/src/commands/import_receipts.rs index 8ab71f66102..d2a66fe90d0 100644 --- a/op-reth/crates/cli/src/commands/import_receipts.rs +++ b/rust/op-reth/crates/cli/src/commands/import_receipts.rs @@ -14,12 +14,12 @@ use reth_execution_types::ExecutionOutcome; use reth_node_builder::ReceiptTy; use reth_node_core::version::version_metadata; use reth_optimism_chainspec::OpChainSpec; -use reth_optimism_primitives::{bedrock::is_dup_tx, OpPrimitives, OpReceipt}; +use reth_optimism_primitives::{OpPrimitives, OpReceipt, bedrock::is_dup_tx}; use reth_primitives_traits::NodePrimitives; use reth_provider::{ - providers::ProviderNodeTypes, DBProvider, DatabaseProviderFactory, OriginalValuesKnown, - ProviderFactory, StageCheckpointReader, StageCheckpointWriter, StateWriteConfig, StateWriter, - StaticFileProviderFactory, StatsReader, + DBProvider, DatabaseProviderFactory, OriginalValuesKnown, ProviderFactory, + StageCheckpointReader, StageCheckpointWriter, StateWriteConfig, StateWriter, + StaticFileProviderFactory, StatsReader, providers::ProviderNodeTypes, }; use reth_stages::{StageCheckpoint, StageId}; use reth_static_file_types::StaticFileSegment; @@ -174,7 +174,7 @@ where { if highest_block_receipts == highest_block_transactions { warn!(target: "reth::cli", highest_block_receipts, highest_block_transactions, "Ignoring all other blocks in the file since we have reached the desired height"); - break + break; } // create a new file client from chunk read from file diff --git a/op-reth/crates/cli/src/commands/init_state.rs b/rust/op-reth/crates/cli/src/commands/init_state.rs similarity index 98% rename from op-reth/crates/cli/src/commands/init_state.rs rename to rust/op-reth/crates/cli/src/commands/init_state.rs index 93de398675c..09e2bd23ab7 100644 --- a/op-reth/crates/cli/src/commands/init_state.rs +++ b/rust/op-reth/crates/cli/src/commands/init_state.rs @@ -7,10 +7,10 @@ use reth_cli_commands::common::{AccessRights, CliNodeTypes, Environment}; use reth_db_common::init::init_from_state_dump; use reth_optimism_chainspec::OpChainSpec; use reth_optimism_primitives::{ - bedrock::{BEDROCK_HEADER, BEDROCK_HEADER_HASH}, OpPrimitives, + bedrock::{BEDROCK_HEADER, BEDROCK_HEADER_HASH}, }; -use reth_primitives_traits::{header::HeaderMut, SealedHeader}; +use reth_primitives_traits::{SealedHeader, header::HeaderMut}; use reth_provider::{ BlockNumReader, DBProvider, DatabaseProviderFactory, StaticFileProviderFactory, StaticFileWriter, @@ -91,7 +91,7 @@ impl> InitStateCommandOp { } else if last_block_number > 0 && last_block_number < BEDROCK_HEADER.number { return Err(eyre::eyre!( "Data directory should be empty when calling init-state with --without-ovm." - )) + )); } info!(target: "reth::cli", "Initiating state dump"); diff --git a/rust/op-reth/crates/cli/src/commands/mod.rs b/rust/op-reth/crates/cli/src/commands/mod.rs new file mode 100644 index 00000000000..3c08400d5dc --- /dev/null +++ b/rust/op-reth/crates/cli/src/commands/mod.rs @@ -0,0 +1,90 @@ +use crate::chainspec::OpChainSpecParser; +use clap::Subcommand; +use import::ImportOpCommand; +use import_receipts::ImportReceiptsOpCommand; +use reth_chainspec::{EthChainSpec, EthereumHardforks, Hardforks}; +use reth_cli::chainspec::ChainSpecParser; +use reth_cli_commands::{ + config_cmd, db, dump_genesis, init_cmd, + node::{self, NoArgs}, + p2p, prune, re_execute, stage, +}; +use std::{fmt, sync::Arc}; + +pub mod import; +pub mod import_receipts; +pub mod init_state; + +#[cfg(feature = "dev")] +pub mod test_vectors; + +/// Commands to be executed +#[derive(Debug, Subcommand)] +pub enum Commands +{ + /// Start the node + #[command(name = "node")] + Node(Box>), + /// Initialize the database from a genesis file. + #[command(name = "init")] + Init(init_cmd::InitCommand), + /// Initialize the database from a state dump file. + #[command(name = "init-state")] + InitState(init_state::InitStateCommandOp), + /// This syncs RLP encoded OP blocks below Bedrock from a file, without executing. + #[command(name = "import-op")] + ImportOp(ImportOpCommand), + /// This imports RLP encoded receipts from a file. + #[command(name = "import-receipts-op")] + ImportReceiptsOp(ImportReceiptsOpCommand), + /// Dumps genesis block JSON configuration to stdout. + DumpGenesis(dump_genesis::DumpGenesisCommand), + /// Database debugging utilities + #[command(name = "db")] + Db(db::Command), + /// Manipulate individual stages. + #[command(name = "stage")] + Stage(Box>), + /// P2P Debugging utilities + #[command(name = "p2p")] + P2P(Box>), + /// Write config to stdout + #[command(name = "config")] + Config(config_cmd::Command), + /// Prune according to the configuration without any limits + #[command(name = "prune")] + Prune(prune::PruneCommand), + /// Generate Test Vectors + #[cfg(feature = "dev")] + #[command(name = "test-vectors")] + TestVectors(test_vectors::Command), + /// Re-execute blocks in parallel to verify historical sync correctness. + #[command(name = "re-execute")] + ReExecute(re_execute::Command), +} + +impl< + C: ChainSpecParser, + Ext: clap::Args + fmt::Debug, +> Commands +{ + /// Returns the underlying chain being used for commands + pub fn chain_spec(&self) -> Option<&Arc> { + match self { + Self::Node(cmd) => cmd.chain_spec(), + Self::Init(cmd) => cmd.chain_spec(), + Self::InitState(cmd) => cmd.chain_spec(), + Self::DumpGenesis(cmd) => cmd.chain_spec(), + Self::Db(cmd) => cmd.chain_spec(), + Self::Stage(cmd) => cmd.chain_spec(), + Self::P2P(cmd) => cmd.chain_spec(), + Self::Config(_) => None, + Self::Prune(cmd) => cmd.chain_spec(), + Self::ImportOp(cmd) => cmd.chain_spec(), + Self::ImportReceiptsOp(cmd) => cmd.chain_spec(), + #[cfg(feature = "dev")] + Self::TestVectors(_) => None, + Self::ReExecute(cmd) => cmd.chain_spec(), + } + } +} diff --git a/op-reth/crates/cli/src/commands/test_vectors.rs b/rust/op-reth/crates/cli/src/commands/test_vectors.rs similarity index 94% rename from op-reth/crates/cli/src/commands/test_vectors.rs rename to rust/op-reth/crates/cli/src/commands/test_vectors.rs index c018dafedae..165ca0dac39 100644 --- a/op-reth/crates/cli/src/commands/test_vectors.rs +++ b/rust/op-reth/crates/cli/src/commands/test_vectors.rs @@ -9,8 +9,8 @@ use reth_cli_commands::{ test_vectors::{ compact, compact::{ - generate_vector, read_vector, GENERATE_VECTORS as ETH_GENERATE_VECTORS, - READ_VECTORS as ETH_READ_VECTORS, + GENERATE_VECTORS as ETH_GENERATE_VECTORS, READ_VECTORS as ETH_READ_VECTORS, + generate_vector, read_vector, }, tables, }, diff --git a/rust/op-reth/crates/cli/src/lib.rs b/rust/op-reth/crates/cli/src/lib.rs new file mode 100644 index 00000000000..a5951f1e5fa --- /dev/null +++ b/rust/op-reth/crates/cli/src/lib.rs @@ -0,0 +1,215 @@ +//! OP-Reth CLI implementation. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg))] + +/// A configurable App on top of the cli parser. +pub mod app; +/// Optimism chain specification parser. +pub mod chainspec; +/// Optimism CLI commands. +pub mod commands; +/// Module with a codec for reading and encoding receipts in files. +/// +/// Enables decoding and encoding `OpGethReceipt` type. See . +/// +/// Currently configured to use codec [`OpGethReceipt`](receipt_file_codec::OpGethReceipt) based on +/// export of below Bedrock data using . Codec can +/// be replaced with regular encoding of receipts for export. +/// +/// NOTE: receipts can be exported using regular op-geth encoding for `Receipt` type, to fit +/// reth's needs for importing. However, this would require patching the diff in to export the `Receipt` and not `OpGethReceipt` type (originally +/// made for op-erigon's import needs). +pub mod receipt_file_codec; + +/// OVM block, same as EVM block at bedrock, except for signature of deposit transaction +/// not having a signature back then. +/// Enables decoding and encoding `Block` types within file contexts. +pub mod ovm_file_codec; + +pub use app::CliApp; +pub use commands::{import::ImportOpCommand, import_receipts::ImportReceiptsOpCommand}; +use reth_optimism_chainspec::OpChainSpec; +use reth_rpc_server_types::{DefaultRpcModuleValidator, RpcModuleValidator}; + +use std::{ffi::OsString, fmt, marker::PhantomData, sync::Arc}; + +use chainspec::OpChainSpecParser; +use clap::Parser; +use commands::Commands; +use futures_util::Future; +use reth_cli::chainspec::ChainSpecParser; +use reth_cli_commands::launcher::FnLauncher; +use reth_cli_runner::CliRunner; +use reth_db::DatabaseEnv; +use reth_node_builder::{NodeBuilder, WithLaunchContext}; +use reth_node_core::{ + args::{LogArgs, TraceArgs}, + version::version_metadata, +}; +use reth_optimism_node::args::RollupArgs; + +// This allows us to manually enable node metrics features, required for proper jemalloc metric +// reporting +use reth_node_metrics as _; + +/// The main op-reth cli interface. +/// +/// This is the entrypoint to the executable. +#[derive(Debug, Parser)] +#[command(author, name = version_metadata().name_client.as_ref(), version = version_metadata().short_version.as_ref(), long_version = version_metadata().long_version.as_ref(), about = "Reth", long_about = None)] +pub struct Cli< + Spec: ChainSpecParser = OpChainSpecParser, + Ext: clap::Args + fmt::Debug = RollupArgs, + Rpc: RpcModuleValidator = DefaultRpcModuleValidator, +> { + /// The command to run + #[command(subcommand)] + pub command: Commands, + + /// The logging configuration for the CLI. + #[command(flatten)] + pub logs: LogArgs, + + /// The metrics configuration for the CLI. + #[command(flatten)] + pub traces: TraceArgs, + + /// Type marker for the RPC module validator + #[arg(skip)] + _phantom: PhantomData, +} + +impl Cli { + /// Parsers only the default CLI arguments + pub fn parse_args() -> Self { + Self::parse() + } + + /// Parsers only the default CLI arguments from the given iterator + pub fn try_parse_args_from(itr: I) -> Result + where + I: IntoIterator, + T: Into + Clone, + { + Self::try_parse_from(itr) + } +} + +impl Cli +where + C: ChainSpecParser, + Ext: clap::Args + fmt::Debug, + Rpc: RpcModuleValidator, +{ + /// Configures the CLI and returns a [`CliApp`] instance. + /// + /// This method is used to prepare the CLI for execution by wrapping it in a + /// [`CliApp`] that can be further configured before running. + pub fn configure(self) -> CliApp { + CliApp::new(self) + } + + /// Execute the configured cli command. + /// + /// This accepts a closure that is used to launch the node via the + /// [`NodeCommand`](reth_cli_commands::node::NodeCommand). + pub fn run(self, launcher: L) -> eyre::Result<()> + where + L: FnOnce(WithLaunchContext, C::ChainSpec>>, Ext) -> Fut, + Fut: Future>, + { + self.with_runner(CliRunner::try_default_runtime()?, launcher) + } + + /// Execute the configured cli command with the provided [`CliRunner`]. + pub fn with_runner(self, runner: CliRunner, launcher: L) -> eyre::Result<()> + where + L: FnOnce(WithLaunchContext, C::ChainSpec>>, Ext) -> Fut, + Fut: Future>, + { + let mut this = self.configure(); + this.set_runner(runner); + this.run(FnLauncher::new::(async move |builder, chain_spec| { + launcher(builder, chain_spec).await + })) + } +} + +#[cfg(test)] +mod test { + use crate::{Cli, chainspec::OpChainSpecParser, commands::Commands}; + use clap::Parser; + use reth_cli_commands::{NodeCommand, node::NoArgs}; + use reth_optimism_chainspec::{BASE_MAINNET, OP_DEV}; + use reth_optimism_node::args::RollupArgs; + + #[test] + fn parse_dev() { + let cmd = NodeCommand::::parse_from(["op-reth", "--dev"]); + let chain = OP_DEV.clone(); + assert_eq!(cmd.chain.chain, chain.chain); + assert_eq!(cmd.chain.genesis_hash(), chain.genesis_hash()); + assert_eq!( + cmd.chain.paris_block_and_final_difficulty, + chain.paris_block_and_final_difficulty + ); + assert_eq!(cmd.chain.hardforks, chain.hardforks); + + assert!(cmd.rpc.http); + assert!(cmd.network.discovery.disable_discovery); + + assert!(cmd.dev.dev); + } + + #[test] + fn parse_node() { + let cmd = Cli::::parse_from([ + "op-reth", + "node", + "--chain", + "base", + "--datadir", + "/mnt/datadirs/base", + "--instance", + "2", + "--http", + "--http.addr", + "0.0.0.0", + "--ws", + "--ws.addr", + "0.0.0.0", + "--http.api", + "admin,debug,eth,net,trace,txpool,web3,rpc,reth,ots", + "--rollup.sequencer-http", + "https://mainnet-sequencer.base.org", + "--rpc-max-tracing-requests", + "1000000", + "--rpc.gascap", + "18446744073709551615", + "--rpc.max-connections", + "429496729", + "--rpc.max-logs-per-response", + "0", + "--rpc.max-subscriptions-per-connection", + "10000", + "--metrics", + "9003", + "--tracing-otlp=http://localhost:4318/v1/traces", + "--log.file.max-size", + "100", + ]); + + match cmd.command { + Commands::Node(command) => { + assert_eq!(command.chain.as_ref(), BASE_MAINNET.as_ref()); + } + _ => panic!("unexpected command"), + } + } +} diff --git a/op-reth/crates/cli/src/ovm_file_codec.rs b/rust/op-reth/crates/cli/src/ovm_file_codec.rs similarity index 98% rename from op-reth/crates/cli/src/ovm_file_codec.rs rename to rust/op-reth/crates/cli/src/ovm_file_codec.rs index 83f3e487282..9d4704f0a6a 100644 --- a/op-reth/crates/cli/src/ovm_file_codec.rs +++ b/rust/op-reth/crates/cli/src/ovm_file_codec.rs @@ -1,15 +1,16 @@ use alloy_consensus::{ - transaction::{from_eip155_value, RlpEcdsaDecodableTx, RlpEcdsaEncodableTx}, Header, TxEip1559, TxEip2930, TxEip7702, TxLegacy, + transaction::{RlpEcdsaDecodableTx, RlpEcdsaEncodableTx, from_eip155_value}, }; use alloy_eips::{ + Typed2718, eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718}, eip4895::Withdrawals, - Typed2718, }; use alloy_primitives::{ + B256, Signature, TxHash, U256, bytes::{Buf, BytesMut}, - keccak256, Signature, TxHash, B256, U256, + keccak256, }; use alloy_rlp::{Decodable, Error as RlpError, RlpDecodable}; use derive_more::{AsRef, Deref}; @@ -287,7 +288,7 @@ impl Decodable2718 for OvmTransactionSigned { mod tests { use crate::ovm_file_codec::OvmTransactionSigned; use alloy_consensus::Typed2718; - use alloy_primitives::{address, b256, hex, TxKind, U256}; + use alloy_primitives::{TxKind, U256, address, b256, hex}; use op_alloy_consensus::OpTypedTransaction; const DEPOSIT_FUNCTION_SELECTOR: [u8; 4] = [0xb6, 0xb5, 0x5f, 0x25]; use alloy_rlp::Decodable; diff --git a/op-reth/crates/cli/src/receipt_file_codec.rs b/rust/op-reth/crates/cli/src/receipt_file_codec.rs similarity index 98% rename from op-reth/crates/cli/src/receipt_file_codec.rs rename to rust/op-reth/crates/cli/src/receipt_file_codec.rs index e12af039eac..8cbb89abe09 100644 --- a/op-reth/crates/cli/src/receipt_file_codec.rs +++ b/rust/op-reth/crates/cli/src/receipt_file_codec.rs @@ -2,8 +2,8 @@ use alloy_consensus::Receipt; use alloy_primitives::{ + Address, B256, Bloom, Bytes, Log, bytes::{Buf, BytesMut}, - Address, Bloom, Bytes, Log, B256, }; use alloy_rlp::{Decodable, RlpDecodable}; use op_alloy_consensus::{OpDepositReceipt, OpTxType}; @@ -43,7 +43,7 @@ where fn decode(&mut self, src: &mut BytesMut) -> Result, Self::Error> { if src.is_empty() { - return Ok(None) + return Ok(None); } let buf_slice = &mut src.as_ref(); @@ -121,7 +121,7 @@ impl TryFrom for OpReceipt { #[cfg(test)] pub(crate) mod test { use alloy_consensus::{Receipt, TxReceipt}; - use alloy_primitives::{address, b256, hex, LogData}; + use alloy_primitives::{LogData, address, b256, hex}; use super::*; @@ -149,7 +149,9 @@ pub(crate) mod test { "00000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000400000000000100000000000000200000000002000000000000001000000000000000000004000000000000000000000000000040000400000100400000000000000100000000000000000000000000000020000000000000000000000000000000000000000000000001000000000000000000000100000000000000000000000000000000000000000000000000000000000000088000000080000000000010000000000000000000000000000800008000120000000000000000000000000000000002000" )), logs: receipt.receipt.into_logs(), - tx_hash: b256!("0x5e77a04531c7c107af1882d76cbff9486d0a9aa53701c30888509d4f5f2b003a"), contract_address: Address::ZERO, gas_used: 202813, + tx_hash: b256!("0x5e77a04531c7c107af1882d76cbff9486d0a9aa53701c30888509d4f5f2b003a"), + contract_address: Address::ZERO, + gas_used: 202813, block_hash: b256!("0xbee7192e575af30420cae0c7776304ac196077ee72b048970549e4f08e875453"), block_number: receipt.number, transaction_index: 0, diff --git a/rust/op-reth/crates/consensus/Cargo.toml b/rust/op-reth/crates/consensus/Cargo.toml new file mode 100644 index 00000000000..717620cc0a1 --- /dev/null +++ b/rust/op-reth/crates/consensus/Cargo.toml @@ -0,0 +1,76 @@ +[package] +name = "reth-optimism-consensus" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" +exclude = [".github/"] + +[lints] +workspace = true + +[dependencies] +# reth +reth-execution-types.workspace = true +reth-chainspec.workspace = true +reth-consensus-common.workspace = true +reth-consensus.workspace = true +reth-primitives-traits.workspace = true +reth-storage-api.workspace = true +reth-storage-errors.workspace = true +reth-trie-common.workspace = true + +# op-reth +reth-optimism-forks.workspace = true +reth-optimism-primitives.workspace = true + +# ethereum +alloy-eips.workspace = true +alloy-primitives.workspace = true +alloy-consensus.workspace = true +alloy-trie.workspace = true +revm.workspace = true + +# misc +tracing.workspace = true +thiserror.workspace = true +reth-optimism-chainspec.workspace = true + +[dev-dependencies] +reth-provider = { workspace = true, features = ["test-utils"] } +reth-db-common.workspace = true +reth-revm.workspace = true +reth-trie.workspace = true +reth-optimism-node.workspace = true + +alloy-chains.workspace = true + +op-alloy-consensus.workspace = true + +[features] +default = ["std"] +std = [ + "reth-chainspec/std", + "reth-consensus/std", + "reth-consensus-common/std", + "reth-primitives-traits/std", + "reth-optimism-forks/std", + "reth-optimism-chainspec/std", + "reth-optimism-primitives/std", + "reth-storage-api/std", + "reth-storage-errors/std", + "reth-trie-common/std", + "alloy-chains/std", + "alloy-eips/std", + "alloy-primitives/std", + "alloy-consensus/std", + "alloy-trie/std", + "reth-revm/std", + "revm/std", + "tracing/std", + "thiserror/std", + "reth-execution-types/std", + "op-alloy-consensus/std", +] diff --git a/op-reth/crates/consensus/src/error.rs b/rust/op-reth/crates/consensus/src/error.rs similarity index 100% rename from op-reth/crates/consensus/src/error.rs rename to rust/op-reth/crates/consensus/src/error.rs diff --git a/rust/op-reth/crates/consensus/src/lib.rs b/rust/op-reth/crates/consensus/src/lib.rs new file mode 100644 index 00000000000..7cf199f3099 --- /dev/null +++ b/rust/op-reth/crates/consensus/src/lib.rs @@ -0,0 +1,789 @@ +//! Optimism Consensus implementation. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +extern crate alloc; + +use alloc::{format, sync::Arc}; +use alloy_consensus::{ + BlockHeader as _, EMPTY_OMMER_ROOT_HASH, constants::MAXIMUM_EXTRA_DATA_SIZE, +}; +use alloy_primitives::B64; +use core::fmt::Debug; +use reth_chainspec::EthChainSpec; +use reth_consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator, ReceiptRootBloom}; +use reth_consensus_common::validation::{ + validate_against_parent_eip1559_base_fee, validate_against_parent_hash_number, + validate_against_parent_timestamp, validate_cancun_gas, validate_header_base_fee, + validate_header_extra_data, validate_header_gas, +}; +use reth_execution_types::BlockExecutionResult; +use reth_optimism_forks::OpHardforks; +use reth_optimism_primitives::DepositReceipt; +use reth_primitives_traits::{ + Block, BlockBody, BlockHeader, GotExpected, NodePrimitives, RecoveredBlock, SealedBlock, + SealedHeader, +}; + +mod proof; +pub use proof::calculate_receipt_root_no_memo_optimism; + +pub mod validation; +pub use validation::{canyon, isthmus, validate_block_post_execution}; + +pub mod error; +pub use error::OpConsensusError; + +/// Optimism consensus implementation. +/// +/// Provides basic checks as outlined in the execution specs. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct OpBeaconConsensus { + /// Configuration + chain_spec: Arc, + /// Maximum allowed extra data size in bytes + max_extra_data_size: usize, +} + +impl OpBeaconConsensus { + /// Create a new instance of [`OpBeaconConsensus`] + pub const fn new(chain_spec: Arc) -> Self { + Self { chain_spec, max_extra_data_size: MAXIMUM_EXTRA_DATA_SIZE } + } + + /// Returns the maximum allowed extra data size. + pub const fn max_extra_data_size(&self) -> usize { + self.max_extra_data_size + } + + /// Sets the maximum allowed extra data size and returns the updated instance. + pub const fn with_max_extra_data_size(mut self, size: usize) -> Self { + self.max_extra_data_size = size; + self + } +} + +impl FullConsensus for OpBeaconConsensus +where + N: NodePrimitives, + ChainSpec: EthChainSpec
+ OpHardforks + Debug + Send + Sync, +{ + fn validate_block_post_execution( + &self, + block: &RecoveredBlock, + result: &BlockExecutionResult, + receipt_root_bloom: Option, + ) -> Result<(), ConsensusError> { + validate_block_post_execution(block.header(), &self.chain_spec, result, receipt_root_bloom) + } +} + +impl Consensus for OpBeaconConsensus +where + B: Block, + ChainSpec: EthChainSpec
+ OpHardforks + Debug + Send + Sync, +{ + fn validate_body_against_header( + &self, + body: &B::Body, + header: &SealedHeader, + ) -> Result<(), ConsensusError> { + validation::validate_body_against_header_op(&self.chain_spec, body, header.header()) + } + + fn validate_block_pre_execution(&self, block: &SealedBlock) -> Result<(), ConsensusError> { + // Check ommers hash + let ommers_hash = block.body().calculate_ommers_root(); + if Some(block.ommers_hash()) != ommers_hash { + return Err(ConsensusError::BodyOmmersHashDiff( + GotExpected { + got: ommers_hash.unwrap_or(EMPTY_OMMER_ROOT_HASH), + expected: block.ommers_hash(), + } + .into(), + )); + } + + // Check transaction root + if let Err(error) = block.ensure_transaction_root_valid() { + return Err(ConsensusError::BodyTransactionRootDiff(error.into())); + } + + // Check empty shanghai-withdrawals + if self.chain_spec.is_canyon_active_at_timestamp(block.timestamp()) { + canyon::ensure_empty_shanghai_withdrawals(block.body()).map_err(|err| { + ConsensusError::Other(format!("failed to verify block {}: {err}", block.number())) + })? + } else { + return Ok(()); + } + + // Blob gas used validation + // In Jovian, the blob gas used computation has changed. We are moving the blob base fee + // validation to post-execution since the DA footprint calculation is stateful. + // Pre-execution we only validate that the blob gas used is present in the header. + if self.chain_spec.is_jovian_active_at_timestamp(block.timestamp()) { + block.blob_gas_used().ok_or(ConsensusError::BlobGasUsedMissing)?; + } else if self.chain_spec.is_ecotone_active_at_timestamp(block.timestamp()) { + validate_cancun_gas(block)?; + } + + // Check withdrawals root field in header + if self.chain_spec.is_isthmus_active_at_timestamp(block.timestamp()) { + // storage root of withdrawals pre-deploy is verified post-execution + isthmus::ensure_withdrawals_storage_root_is_some(block.header()).map_err(|err| { + ConsensusError::Other(format!("failed to verify block {}: {err}", block.number())) + })? + } else { + // canyon is active, else would have returned already + canyon::ensure_empty_withdrawals_root(block.header())? + } + + Ok(()) + } +} + +impl HeaderValidator for OpBeaconConsensus +where + H: BlockHeader, + ChainSpec: EthChainSpec
+ OpHardforks + Debug + Send + Sync, +{ + fn validate_header(&self, header: &SealedHeader) -> Result<(), ConsensusError> { + let header = header.header(); + // with OP-stack Bedrock activation number determines when TTD (eth Merge) has been reached. + debug_assert!( + self.chain_spec.is_bedrock_active_at_block(header.number()), + "manually import OVM blocks" + ); + + if header.nonce() != Some(B64::ZERO) { + return Err(ConsensusError::TheMergeNonceIsNotZero); + } + + if header.ommers_hash() != EMPTY_OMMER_ROOT_HASH { + return Err(ConsensusError::TheMergeOmmerRootIsNotEmpty); + } + + // Post-merge, the consensus layer is expected to perform checks such that the block + // timestamp is a function of the slot. This is different from pre-merge, where blocks + // are only allowed to be in the future (compared to the system's clock) by a certain + // threshold. + // + // Block validation with respect to the parent should ensure that the block timestamp + // is greater than its parent timestamp. + + // validate header extra data for all networks post merge + validate_header_extra_data(header, self.max_extra_data_size)?; + validate_header_gas(header)?; + validate_header_base_fee(header, &self.chain_spec) + } + + fn validate_header_against_parent( + &self, + header: &SealedHeader, + parent: &SealedHeader, + ) -> Result<(), ConsensusError> { + validate_against_parent_hash_number(header.header(), parent)?; + + if self.chain_spec.is_bedrock_active_at_block(header.number()) { + validate_against_parent_timestamp(header.header(), parent.header())?; + } + + validate_against_parent_eip1559_base_fee( + header.header(), + parent.header(), + &self.chain_spec, + )?; + + // Ensure that the blob gas fields for this block are correctly set. + // In the op-stack, the excess blob gas is always 0 for all blocks after ecotone. + // The blob gas used and the excess blob gas should both be set after ecotone. + // After Jovian, the blob gas used contains the current DA footprint. + if self.chain_spec.is_ecotone_active_at_timestamp(header.timestamp()) { + let blob_gas_used = header.blob_gas_used().ok_or(ConsensusError::BlobGasUsedMissing)?; + + // Before Jovian and after ecotone, the blob gas used should be 0. + if !self.chain_spec.is_jovian_active_at_timestamp(header.timestamp()) && + blob_gas_used != 0 + { + return Err(ConsensusError::BlobGasUsedDiff(GotExpected { + got: blob_gas_used, + expected: 0, + })); + } + + let excess_blob_gas = + header.excess_blob_gas().ok_or(ConsensusError::ExcessBlobGasMissing)?; + if excess_blob_gas != 0 { + return Err(ConsensusError::ExcessBlobGasDiff { + diff: GotExpected { got: excess_blob_gas, expected: 0 }, + parent_excess_blob_gas: parent.excess_blob_gas().unwrap_or(0), + parent_blob_gas_used: parent.blob_gas_used().unwrap_or(0), + }); + } + } + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use alloy_consensus::{BlockBody, Eip658Value, Header, Receipt, TxEip7702, TxReceipt}; + use alloy_eips::{eip4895::Withdrawals, eip7685::Requests}; + use alloy_primitives::{Address, Bytes, Log, Signature, U256}; + use op_alloy_consensus::{ + OpTypedTransaction, encode_holocene_extra_data, encode_jovian_extra_data, + }; + use reth_chainspec::BaseFeeParams; + use reth_consensus::{Consensus, ConsensusError, FullConsensus, HeaderValidator}; + use reth_optimism_chainspec::{OP_MAINNET, OpChainSpec, OpChainSpecBuilder}; + use reth_optimism_primitives::{OpPrimitives, OpReceipt, OpTransactionSigned}; + use reth_primitives_traits::{RecoveredBlock, SealedBlock, SealedHeader, proofs}; + use reth_provider::BlockExecutionResult; + + use crate::OpBeaconConsensus; + + fn mock_tx(nonce: u64) -> OpTransactionSigned { + let tx = TxEip7702 { + chain_id: 1u64, + nonce, + max_fee_per_gas: 0x28f000fff, + max_priority_fee_per_gas: 0x28f000fff, + gas_limit: 10, + to: Address::default(), + value: U256::from(3_u64), + input: Bytes::from(vec![1, 2]), + access_list: Default::default(), + authorization_list: Default::default(), + }; + + let signature = Signature::new(U256::default(), U256::default(), true); + + OpTransactionSigned::new_unhashed(OpTypedTransaction::Eip7702(tx), signature) + } + + #[test] + fn test_block_blob_gas_used_validation_isthmus() { + let chain_spec = OpChainSpecBuilder::default() + .isthmus_activated() + .genesis(OP_MAINNET.genesis.clone()) + .chain(OP_MAINNET.chain) + .build(); + + // create a tx + let transaction = mock_tx(0); + + let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); + + let header = Header { + base_fee_per_gas: Some(1337), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(0), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + timestamp: u64::MAX, + ..Default::default() + }; + let body = BlockBody { + transactions: vec![transaction], + ommers: vec![], + withdrawals: Some(Withdrawals::default()), + }; + + let block = SealedBlock::seal_slow(alloy_consensus::Block { header, body }); + + // validate blob, it should pass blob gas used validation + let pre_execution = beacon_consensus.validate_block_pre_execution(&block); + + assert!(pre_execution.is_ok()); + } + + #[test] + fn test_block_blob_gas_used_validation_failure_isthmus() { + let chain_spec = OpChainSpecBuilder::default() + .isthmus_activated() + .genesis(OP_MAINNET.genesis.clone()) + .chain(OP_MAINNET.chain) + .build(); + + // create a tx + let transaction = mock_tx(0); + + let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); + + let header = Header { + base_fee_per_gas: Some(1337), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(10), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + timestamp: u64::MAX, + ..Default::default() + }; + let body = BlockBody { + transactions: vec![transaction], + ommers: vec![], + withdrawals: Some(Withdrawals::default()), + }; + + let block = SealedBlock::seal_slow(alloy_consensus::Block { header, body }); + + // validate blob, it should fail blob gas used validation + let pre_execution = beacon_consensus.validate_block_pre_execution(&block); + + assert!(matches!( + pre_execution.unwrap_err(), + ConsensusError::BlobGasUsedDiff(diff) if diff.got == 10 && diff.expected == 0 + )); + } + + #[test] + fn test_block_blob_gas_used_validation_jovian() { + const BLOB_GAS_USED: u64 = 1000; + const GAS_USED: u64 = 10; + + let chain_spec = OpChainSpecBuilder::default() + .jovian_activated() + .genesis(OP_MAINNET.genesis.clone()) + .chain(OP_MAINNET.chain) + .build(); + + // create a tx + let transaction = mock_tx(0); + + let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); + + let receipt = OpReceipt::Eip7702(Receipt:: { + status: Eip658Value::success(), + cumulative_gas_used: GAS_USED, + logs: vec![], + }); + + let header = Header { + base_fee_per_gas: Some(1337), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(BLOB_GAS_USED), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + timestamp: u64::MAX, + gas_used: GAS_USED, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + ..Default::default() + }; + let body = BlockBody { + transactions: vec![transaction], + ommers: vec![], + withdrawals: Some(Withdrawals::default()), + }; + + let block = SealedBlock::seal_slow(alloy_consensus::Block { header, body }); + + let result = BlockExecutionResult:: { + blob_gas_used: BLOB_GAS_USED, + receipts: vec![receipt], + requests: Requests::default(), + gas_used: GAS_USED, + }; + + // validate blob, it should pass blob gas used validation + let pre_execution = beacon_consensus.validate_block_pre_execution(&block); + + assert!(pre_execution.is_ok()); + + let block = RecoveredBlock::new_sealed(block, vec![Address::default()]); + + let post_execution = as FullConsensus>::validate_block_post_execution( + &beacon_consensus, + &block, + &result, + None, + ); + + // validate blob, it should pass blob gas used validation + assert!(post_execution.is_ok()); + } + + #[test] + fn test_block_blob_gas_used_validation_failure_jovian() { + const BLOB_GAS_USED: u64 = 1000; + const GAS_USED: u64 = 10; + + let chain_spec = OpChainSpecBuilder::default() + .jovian_activated() + .genesis(OP_MAINNET.genesis.clone()) + .chain(OP_MAINNET.chain) + .build(); + + // create a tx + let transaction = mock_tx(0); + + let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); + + let receipt = OpReceipt::Eip7702(Receipt:: { + status: Eip658Value::success(), + cumulative_gas_used: GAS_USED, + logs: vec![], + }); + + let header = Header { + base_fee_per_gas: Some(1337), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(BLOB_GAS_USED), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + gas_used: GAS_USED, + timestamp: u64::MAX, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + ..Default::default() + }; + let body = BlockBody { + transactions: vec![transaction], + ommers: vec![], + withdrawals: Some(Withdrawals::default()), + }; + + let block = SealedBlock::seal_slow(alloy_consensus::Block { header, body }); + + let result = BlockExecutionResult:: { + blob_gas_used: BLOB_GAS_USED + 1, + receipts: vec![receipt], + requests: Requests::default(), + gas_used: GAS_USED, + }; + + // validate blob, it should pass blob gas used validation + let pre_execution = beacon_consensus.validate_block_pre_execution(&block); + + assert!(pre_execution.is_ok()); + + let block = RecoveredBlock::new_sealed(block, vec![Address::default()]); + + let post_execution = as FullConsensus>::validate_block_post_execution( + &beacon_consensus, + &block, + &result, + None, + ); + + // validate blob, it should fail blob gas used validation post execution. + assert!(matches!( + post_execution.unwrap_err(), + ConsensusError::BlobGasUsedDiff(diff) + if diff.got == BLOB_GAS_USED + 1 && diff.expected == BLOB_GAS_USED + )); + } + + #[test] + fn test_header_min_base_fee_validation() { + const MIN_BASE_FEE: u64 = 1000; + + let chain_spec = OpChainSpecBuilder::default() + .jovian_activated() + .genesis(OP_MAINNET.genesis.clone()) + .chain(OP_MAINNET.chain) + .build(); + + // create a tx + let transaction = mock_tx(0); + + let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); + + let receipt = OpReceipt::Eip7702(Receipt:: { + status: Eip658Value::success(), + cumulative_gas_used: 0, + logs: vec![], + }); + + let parent = Header { + number: 0, + base_fee_per_gas: Some(MIN_BASE_FEE / 10), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(0), + excess_blob_gas: Some(0), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + gas_used: 0, + timestamp: u64::MAX - 1, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + extra_data: encode_jovian_extra_data( + Default::default(), + BaseFeeParams::optimism(), + MIN_BASE_FEE, + ) + .unwrap(), + ..Default::default() + }; + let parent = SealedHeader::seal_slow(parent); + + let header = Header { + number: 1, + base_fee_per_gas: Some(MIN_BASE_FEE), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(0), + excess_blob_gas: Some(0), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + gas_used: 0, + timestamp: u64::MAX, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + parent_hash: parent.hash(), + ..Default::default() + }; + let header = SealedHeader::seal_slow(header); + + let result = beacon_consensus.validate_header_against_parent(&header, &parent); + + assert!(result.is_ok()); + } + + #[test] + fn test_header_min_base_fee_validation_failure() { + const MIN_BASE_FEE: u64 = 1000; + + let chain_spec = OpChainSpecBuilder::default() + .jovian_activated() + .genesis(OP_MAINNET.genesis.clone()) + .chain(OP_MAINNET.chain) + .build(); + + // create a tx + let transaction = mock_tx(0); + + let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); + + let receipt = OpReceipt::Eip7702(Receipt:: { + status: Eip658Value::success(), + cumulative_gas_used: 0, + logs: vec![], + }); + + let parent = Header { + number: 0, + base_fee_per_gas: Some(MIN_BASE_FEE / 10), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(0), + excess_blob_gas: Some(0), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + gas_used: 0, + timestamp: u64::MAX - 1, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + extra_data: encode_jovian_extra_data( + Default::default(), + BaseFeeParams::optimism(), + MIN_BASE_FEE, + ) + .unwrap(), + ..Default::default() + }; + let parent = SealedHeader::seal_slow(parent); + + let header = Header { + number: 1, + base_fee_per_gas: Some(MIN_BASE_FEE - 1), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(0), + excess_blob_gas: Some(0), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + gas_used: 0, + timestamp: u64::MAX, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + parent_hash: parent.hash(), + ..Default::default() + }; + let header = SealedHeader::seal_slow(header); + + let result = beacon_consensus.validate_header_against_parent(&header, &parent); + + assert!(matches!( + result.unwrap_err(), + ConsensusError::BaseFeeDiff(diff) + if diff.got == MIN_BASE_FEE - 1 && diff.expected == MIN_BASE_FEE + )); + } + + #[test] + fn test_header_da_footprint_validation() { + const MIN_BASE_FEE: u64 = 100_000; + const DA_FOOTPRINT: u64 = GAS_LIMIT - 1; + const GAS_LIMIT: u64 = 100_000_000; + + let chain_spec = OpChainSpecBuilder::default() + .jovian_activated() + .genesis(OP_MAINNET.genesis.clone()) + .chain(OP_MAINNET.chain) + .build(); + + // create a tx + let transaction = mock_tx(0); + + let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); + + let receipt = OpReceipt::Eip7702(Receipt:: { + status: Eip658Value::success(), + cumulative_gas_used: 0, + logs: vec![], + }); + + let parent = Header { + number: 0, + base_fee_per_gas: Some(MIN_BASE_FEE), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(DA_FOOTPRINT), + excess_blob_gas: Some(0), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + gas_used: 0, + timestamp: u64::MAX - 1, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + extra_data: encode_jovian_extra_data( + Default::default(), + BaseFeeParams::optimism(), + MIN_BASE_FEE, + ) + .unwrap(), + gas_limit: GAS_LIMIT, + ..Default::default() + }; + let parent = SealedHeader::seal_slow(parent); + + let header = Header { + number: 1, + base_fee_per_gas: Some(MIN_BASE_FEE + MIN_BASE_FEE / 10), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(DA_FOOTPRINT), + excess_blob_gas: Some(0), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + gas_used: 0, + timestamp: u64::MAX, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + parent_hash: parent.hash(), + ..Default::default() + }; + let header = SealedHeader::seal_slow(header); + + let result = beacon_consensus.validate_header_against_parent(&header, &parent); + + assert!(result.is_ok()); + } + + #[test] + fn test_header_isthmus_validation() { + const MIN_BASE_FEE: u64 = 100_000; + const DA_FOOTPRINT: u64 = GAS_LIMIT - 1; + const GAS_LIMIT: u64 = 100_000_000; + + let chain_spec = OpChainSpecBuilder::default() + .isthmus_activated() + .genesis(OP_MAINNET.genesis.clone()) + .chain(OP_MAINNET.chain) + .build(); + + // create a tx + let transaction = mock_tx(0); + + let beacon_consensus = OpBeaconConsensus::new(Arc::new(chain_spec)); + + let receipt = OpReceipt::Eip7702(Receipt:: { + status: Eip658Value::success(), + cumulative_gas_used: 0, + logs: vec![], + }); + + let parent = Header { + number: 0, + base_fee_per_gas: Some(MIN_BASE_FEE), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(DA_FOOTPRINT), + excess_blob_gas: Some(0), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + gas_used: 0, + timestamp: u64::MAX - 1, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + extra_data: encode_holocene_extra_data(Default::default(), BaseFeeParams::optimism()) + .unwrap(), + gas_limit: GAS_LIMIT, + ..Default::default() + }; + let parent = SealedHeader::seal_slow(parent); + + let header = Header { + number: 1, + base_fee_per_gas: Some(MIN_BASE_FEE - 2 * MIN_BASE_FEE / 100), + withdrawals_root: Some(proofs::calculate_withdrawals_root(&[])), + blob_gas_used: Some(DA_FOOTPRINT), + excess_blob_gas: Some(0), + transactions_root: proofs::calculate_transaction_root(std::slice::from_ref( + &transaction, + )), + gas_used: 0, + timestamp: u64::MAX, + receipts_root: proofs::calculate_receipt_root(std::slice::from_ref( + &receipt.with_bloom_ref(), + )), + logs_bloom: receipt.bloom(), + parent_hash: parent.hash(), + ..Default::default() + }; + let header = SealedHeader::seal_slow(header); + + let result = beacon_consensus.validate_header_against_parent(&header, &parent); + + assert!(matches!( + result.unwrap_err(), + ConsensusError::BlobGasUsedDiff(diff) + if diff.got == DA_FOOTPRINT && diff.expected == 0 + )); + } +} diff --git a/rust/op-reth/crates/consensus/src/proof.rs b/rust/op-reth/crates/consensus/src/proof.rs new file mode 100644 index 00000000000..196463a6948 --- /dev/null +++ b/rust/op-reth/crates/consensus/src/proof.rs @@ -0,0 +1,493 @@ +//! Helper function for Receipt root calculation for Optimism hardforks. + +use alloc::vec::Vec; +use alloy_consensus::ReceiptWithBloom; +use alloy_eips::eip2718::Encodable2718; +use alloy_primitives::B256; +use alloy_trie::root::ordered_trie_root_with_encoder; +use reth_optimism_forks::OpHardforks; +use reth_optimism_primitives::DepositReceipt; + +/// Calculates the receipt root for a header. +pub(crate) fn calculate_receipt_root_optimism( + receipts: &[ReceiptWithBloom<&R>], + chain_spec: impl OpHardforks, + timestamp: u64, +) -> B256 { + // There is a minor bug in op-geth and op-erigon where in the Regolith hardfork, + // the receipt root calculation does not include the deposit nonce in the receipt + // encoding. In the Regolith Hardfork, we must strip the deposit nonce from the + // receipts before calculating the receipt root. This was corrected in the Canyon + // hardfork. + if chain_spec.is_regolith_active_at_timestamp(timestamp) && + !chain_spec.is_canyon_active_at_timestamp(timestamp) + { + let receipts = receipts + .iter() + .map(|receipt| { + let mut receipt = receipt.clone().map_receipt(|r| r.clone()); + if let Some(receipt) = receipt.receipt.as_deposit_receipt_mut() { + receipt.deposit_nonce = None; + } + receipt + }) + .collect::>(); + + return ordered_trie_root_with_encoder(receipts.as_slice(), |r, buf| r.encode_2718(buf)); + } + + ordered_trie_root_with_encoder(receipts, |r, buf| r.encode_2718(buf)) +} + +/// Calculates the receipt root for a header for the reference type of an OP receipt. +/// +/// NOTE: Prefer calculate receipt root optimism if you have log blooms memoized. +pub fn calculate_receipt_root_no_memo_optimism( + receipts: &[R], + chain_spec: impl OpHardforks, + timestamp: u64, +) -> B256 { + // There is a minor bug in op-geth and op-erigon where in the Regolith hardfork, + // the receipt root calculation does not include the deposit nonce in the receipt + // encoding. In the Regolith Hardfork, we must strip the deposit nonce from the + // receipts before calculating the receipt root. This was corrected in the Canyon + // hardfork. + if chain_spec.is_regolith_active_at_timestamp(timestamp) && + !chain_spec.is_canyon_active_at_timestamp(timestamp) + { + let receipts = receipts + .iter() + .map(|r| { + let mut r = (*r).clone(); + if let Some(receipt) = r.as_deposit_receipt_mut() { + receipt.deposit_nonce = None; + } + r + }) + .collect::>(); + + return ordered_trie_root_with_encoder(&receipts, |r, buf| { + r.with_bloom_ref().encode_2718(buf); + }); + } + + ordered_trie_root_with_encoder(receipts, |r, buf| { + r.with_bloom_ref().encode_2718(buf); + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_consensus::{Receipt, ReceiptWithBloom, TxReceipt}; + use alloy_primitives::{Address, Bytes, Log, LogData, b256, bloom, hex}; + use op_alloy_consensus::OpDepositReceipt; + use reth_optimism_chainspec::BASE_SEPOLIA; + use reth_optimism_primitives::OpReceipt; + + /// Tests that the receipt root is computed correctly for the regolith block. + /// This was implemented due to a minor bug in op-geth and op-erigon where in + /// the Regolith hardfork, the receipt root calculation does not include the + /// deposit nonce in the receipt encoding. + /// To fix this an op-reth patch was applied to the receipt root calculation + /// to strip the deposit nonce from each receipt before calculating the root. + #[test] + fn check_optimism_receipt_root() { + let cases = [ + // Deposit nonces didn't exist in Bedrock; No need to strip. For the purposes of this + // test, we do have them, so we should get the same root as Canyon. + ( + "bedrock", + 1679079599, + b256!("0xe255fed45eae7ede0556fe4fabc77b0d294d18781a5a581cab09127bc4cd9ffb"), + ), + // Deposit nonces introduced in Regolith. They weren't included in the receipt RLP, + // so we need to strip them - the receipt root will differ. + ( + "regolith", + 1679079600, + b256!("0xe255fed45eae7ede0556fe4fabc77b0d294d18781a5a581cab09127bc4cd9ffb"), + ), + // Receipt root hashing bug fixed in Canyon. Back to including the deposit nonce + // in the receipt RLP when computing the receipt root. + ( + "canyon", + 1699981200, + b256!("0x6eefbb5efb95235476654a8bfbf8cb64a4f5f0b0c80b700b0c5964550beee6d7"), + ), + ]; + + for case in cases { + let receipts = [ + // 0xb0d6ee650637911394396d81172bd1c637d568ed1fbddab0daddfca399c58b53 + OpReceipt::Deposit(OpDepositReceipt { + inner: Receipt { + status: true.into(), + cumulative_gas_used: 46913, + logs: vec![], + }, + deposit_nonce: Some(4012991u64), + deposit_receipt_version: None, + }), + // 0x2f433586bae30573c393adfa02bc81d2a1888a3d6c9869f473fb57245166bd9a + OpReceipt::Eip1559(Receipt { + status: true.into(), + cumulative_gas_used: 118083, + logs: vec![ + Log { + address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xc3d58168c5ae7397731d063d5bbf3d657854427343f4c083240f7aacaa2d0f62" + ), + b256!( + "0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9" + ), + b256!( + "0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9" + ), + b256!( + "0x0000000000000000000000000000000000000000000000000000000000000000" + ), + ], + Bytes::from_static(&hex!( + "00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001" + )), + ), + }, + Log { + address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xc3d58168c5ae7397731d063d5bbf3d657854427343f4c083240f7aacaa2d0f62" + ), + b256!( + "0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9" + ), + b256!( + "0x0000000000000000000000000000000000000000000000000000000000000000" + ), + b256!( + "0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9" + ), + ], + Bytes::from_static(&hex!( + "00000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000001" + )), + ), + }, + Log { + address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0x0eb774bb9698a73583fe07b6972cf2dcc08d1d97581a22861f45feb86b395820" + ), + b256!( + "0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9" + ), + b256!( + "0x000000000000000000000000c498902843af527e674846bb7edefa8ad62b8fb9" + ), + ], + Bytes::from_static(&hex!( + "0000000000000000000000000000000000000000000000000000000000000003" + )), + ), + }, + ], + }), + // 0x6c33676e8f6077f46a62eabab70bc6d1b1b18a624b0739086d77093a1ecf8266 + OpReceipt::Eip1559(Receipt { + status: true.into(), + cumulative_gas_used: 189253, + logs: vec![ + Log { + address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xc3d58168c5ae7397731d063d5bbf3d657854427343f4c083240f7aacaa2d0f62" + ), + b256!( + "0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec" + ), + b256!( + "0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec" + ), + b256!( + "0x0000000000000000000000000000000000000000000000000000000000000000" + ), + ], + Bytes::from_static(&hex!( + "00000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000001" + )), + ), + }, + Log { + address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xc3d58168c5ae7397731d063d5bbf3d657854427343f4c083240f7aacaa2d0f62" + ), + b256!( + "0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec" + ), + b256!( + "0x0000000000000000000000000000000000000000000000000000000000000000" + ), + b256!( + "0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec" + ), + ], + Bytes::from_static(&hex!( + "00000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000001" + )), + ), + }, + Log { + address: hex!("ddb6dcce6b794415145eb5caa6cd335aeda9c272").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0x0eb774bb9698a73583fe07b6972cf2dcc08d1d97581a22861f45feb86b395820" + ), + b256!( + "0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec" + ), + b256!( + "0x0000000000000000000000009d521a04bee134ff8136d2ec957e5bc8c50394ec" + ), + ], + Bytes::from_static(&hex!( + "0000000000000000000000000000000000000000000000000000000000000003" + )), + ), + }, + ], + }), + // 0x4d3ecbef04ba7ce7f5ab55be0c61978ca97c117d7da448ed9771d4ff0c720a3f + OpReceipt::Eip1559(Receipt { + status: true.into(), + cumulative_gas_used: 346969, + logs: vec![ + Log { + address: hex!("4200000000000000000000000000000000000006").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" + ), + b256!( + "0x000000000000000000000000c3feb4ef4c2a5af77add15c95bd98f6b43640cc8" + ), + b256!( + "0x0000000000000000000000002992607c1614484fe6d865088e5c048f0650afd4" + ), + ], + Bytes::from_static(&hex!( + "0000000000000000000000000000000000000000000000000018de76816d8000" + )), + ), + }, + Log { + address: hex!("cf8e7e6b26f407dee615fc4db18bf829e7aa8c09").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" + ), + b256!( + "0x0000000000000000000000002992607c1614484fe6d865088e5c048f0650afd4" + ), + b256!( + "0x0000000000000000000000008dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09" + ), + ], + Bytes::from_static(&hex!( + "000000000000000000000000000000000000000000000002d24d8e9ac1aa79e2" + )), + ), + }, + Log { + address: hex!("2992607c1614484fe6d865088e5c048f0650afd4").into(), + data: LogData::new_unchecked( + vec![b256!( + "0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1" + )], + Bytes::from_static(&hex!( + "000000000000000000000000000000000000000000000009bd50642785c15736000000000000000000000000000000000000000000011bb7ac324f724a29bbbf" + )), + ), + }, + Log { + address: hex!("2992607c1614484fe6d865088e5c048f0650afd4").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822" + ), + b256!( + "0x00000000000000000000000029843613c7211d014f5dd5718cf32bcd314914cb" + ), + b256!( + "0x0000000000000000000000008dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09" + ), + ], + Bytes::from_static(&hex!( + "0000000000000000000000000000000000000000000000000018de76816d800000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002d24d8e9ac1aa79e2" + )), + ), + }, + Log { + address: hex!("6d0f8d488b669aa9ba2d0f0b7b75a88bf5051cd3").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" + ), + b256!( + "0x0000000000000000000000008dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09" + ), + b256!( + "0x000000000000000000000000c3feb4ef4c2a5af77add15c95bd98f6b43640cc8" + ), + ], + Bytes::from_static(&hex!( + "00000000000000000000000000000000000000000000000014bc73062aea8093" + )), + ), + }, + Log { + address: hex!("8dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09").into(), + data: LogData::new_unchecked( + vec![b256!( + "0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1" + )], + Bytes::from_static(&hex!( + "00000000000000000000000000000000000000000000002f122cfadc1ca82a35000000000000000000000000000000000000000000000665879dc0609945d6d1" + )), + ), + }, + Log { + address: hex!("8dbffe4c8bf3caf5deae3a99b50cfcf3648cbc09").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822" + ), + b256!( + "0x00000000000000000000000029843613c7211d014f5dd5718cf32bcd314914cb" + ), + b256!( + "0x000000000000000000000000c3feb4ef4c2a5af77add15c95bd98f6b43640cc8" + ), + ], + Bytes::from_static(&hex!( + "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002d24d8e9ac1aa79e200000000000000000000000000000000000000000000000014bc73062aea80930000000000000000000000000000000000000000000000000000000000000000" + )), + ), + }, + ], + }), + // 0xf738af5eb00ba23dbc1be2dbce41dbc0180f0085b7fb46646e90bf737af90351 + OpReceipt::Eip1559(Receipt { + status: true.into(), + cumulative_gas_used: 623249, + logs: vec![ + Log { + address: hex!("ac6564f3718837caadd42eed742d75c12b90a052").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" + ), + b256!( + "0x0000000000000000000000000000000000000000000000000000000000000000" + ), + b256!( + "0x000000000000000000000000a4fa7f3fbf0677f254ebdb1646146864c305b76e" + ), + b256!( + "0x000000000000000000000000000000000000000000000000000000000011a1d3" + ), + ], + Default::default(), + ), + }, + Log { + address: hex!("ac6564f3718837caadd42eed742d75c12b90a052").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0x9d89e36eadf856db0ad9ffb5a569e07f95634dddd9501141ecf04820484ad0dc" + ), + b256!( + "0x000000000000000000000000a4fa7f3fbf0677f254ebdb1646146864c305b76e" + ), + b256!( + "0x000000000000000000000000000000000000000000000000000000000011a1d3" + ), + ], + Bytes::from_static(&hex!( + "00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000037697066733a2f2f516d515141646b33736538396b47716577395256567a316b68643548375562476d4d4a485a62566f386a6d346f4a2f30000000000000000000" + )), + ), + }, + Log { + address: hex!("ac6564f3718837caadd42eed742d75c12b90a052").into(), + data: LogData::new_unchecked( + vec![ + b256!( + "0x110d160a1bedeea919a88fbc4b2a9fb61b7e664084391b6ca2740db66fef80fe" + ), + b256!( + "0x00000000000000000000000084d47f6eea8f8d87910448325519d1bb45c2972a" + ), + b256!( + "0x000000000000000000000000a4fa7f3fbf0677f254ebdb1646146864c305b76e" + ), + b256!( + "0x000000000000000000000000000000000000000000000000000000000011a1d3" + ), + ], + Bytes::from_static(&hex!( + "0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000a4fa7f3fbf0677f254ebdb1646146864c305b76e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001400000000000000000000000000000000000000000000000000000000000000000000000000000000000000000eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007717500762343034303661353035646234633961386163316433306335633332303265370000000000000000000000000000000000000000000000000000000000000037697066733a2f2f516d515141646b33736538396b47716577395256567a316b68643548375562476d4d4a485a62566f386a6d346f4a2f30000000000000000000" + )), + ), + }, + ], + }), + ]; + let root = calculate_receipt_root_optimism( + &receipts.iter().map(TxReceipt::with_bloom_ref).collect::>(), + BASE_SEPOLIA.as_ref(), + case.1, + ); + assert_eq!(root, case.2); + } + } + + #[test] + fn check_receipt_root_optimism() { + let logs = vec![Log { + address: Address::ZERO, + data: LogData::new_unchecked(vec![], Default::default()), + }]; + let logs_bloom = bloom!( + "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001" + ); + let inner = + OpReceipt::Eip2930(Receipt { status: true.into(), cumulative_gas_used: 102068, logs }); + let receipt = ReceiptWithBloom { receipt: &inner, logs_bloom }; + let receipt = vec![receipt]; + let root = calculate_receipt_root_optimism(&receipt, BASE_SEPOLIA.as_ref(), 0); + assert_eq!( + root, + b256!("0xfe70ae4a136d98944951b2123859698d59ad251a381abc9960fa81cae3d0d4a0") + ); + } +} diff --git a/rust/op-reth/crates/consensus/src/validation/canyon.rs b/rust/op-reth/crates/consensus/src/validation/canyon.rs new file mode 100644 index 00000000000..432b121a48f --- /dev/null +++ b/rust/op-reth/crates/consensus/src/validation/canyon.rs @@ -0,0 +1,41 @@ +//! Canyon consensus rule checks. + +use alloy_consensus::BlockHeader; +use alloy_trie::EMPTY_ROOT_HASH; +use reth_consensus::ConsensusError; +use reth_primitives_traits::{BlockBody, GotExpected}; + +use crate::OpConsensusError; + +/// Verifies that withdrawals root in block header (Shanghai) is always [`EMPTY_ROOT_HASH`] in +/// Canyon. +#[inline] +pub fn ensure_empty_withdrawals_root(header: &H) -> Result<(), ConsensusError> { + // Shanghai rule + let header_withdrawals_root = + &header.withdrawals_root().ok_or(ConsensusError::WithdrawalsRootMissing)?; + + // Canyon rules + if *header_withdrawals_root != EMPTY_ROOT_HASH { + return Err(ConsensusError::BodyWithdrawalsRootDiff( + GotExpected { got: *header_withdrawals_root, expected: EMPTY_ROOT_HASH }.into(), + )); + } + + Ok(()) +} + +/// Verifies that withdrawals in block body (Shanghai) is always empty in Canyon. +/// +#[inline] +pub fn ensure_empty_shanghai_withdrawals(body: &T) -> Result<(), OpConsensusError> { + // Shanghai rule + let withdrawals = body.withdrawals().ok_or(ConsensusError::BodyWithdrawalsMissing)?; + + // Canyon rule + if !withdrawals.as_ref().is_empty() { + return Err(OpConsensusError::WithdrawalsNonEmpty); + } + + Ok(()) +} diff --git a/rust/op-reth/crates/consensus/src/validation/isthmus.rs b/rust/op-reth/crates/consensus/src/validation/isthmus.rs new file mode 100644 index 00000000000..cf9309b9268 --- /dev/null +++ b/rust/op-reth/crates/consensus/src/validation/isthmus.rs @@ -0,0 +1,195 @@ +//! Block verification w.r.t. consensus rules new in Isthmus hardfork. + +use crate::OpConsensusError; +use alloy_consensus::BlockHeader; +use alloy_primitives::B256; +use alloy_trie::EMPTY_ROOT_HASH; +use reth_optimism_primitives::L2_TO_L1_MESSAGE_PASSER_ADDRESS; +use reth_storage_api::{StorageRootProvider, errors::ProviderResult}; +use reth_trie_common::HashedStorage; +use revm::database::BundleState; +use tracing::warn; + +/// Verifies that `withdrawals_root` (i.e. `l2tol1-msg-passer` storage root since Isthmus) field is +/// set in block header. +pub fn ensure_withdrawals_storage_root_is_some( + header: H, +) -> Result<(), OpConsensusError> { + header.withdrawals_root().ok_or(OpConsensusError::L2WithdrawalsRootMissing)?; + + Ok(()) +} + +/// Computes the storage root of predeploy `L2ToL1MessagePasser.sol`. +/// +/// Uses state updates from block execution. See also [`withdrawals_root_prehashed`]. +pub fn withdrawals_root( + state_updates: &BundleState, + state: DB, +) -> ProviderResult { + // if l2 withdrawals transactions were executed there will be storage updates for + // `L2ToL1MessagePasser.sol` predeploy + withdrawals_root_prehashed( + state_updates + .state() + .get(&L2_TO_L1_MESSAGE_PASSER_ADDRESS) + .map(|acc| { + HashedStorage::from_plain_storage( + acc.status, + acc.storage.iter().map(|(slot, value)| (slot, &value.present_value)), + ) + }) + .unwrap_or_default(), + state, + ) +} + +/// Computes the storage root of predeploy `L2ToL1MessagePasser.sol`. +/// +/// Uses pre-hashed storage updates of `L2ToL1MessagePasser.sol` predeploy, resulting from +/// execution of L2 withdrawals transactions. If none, takes empty [`HashedStorage::default`]. +pub fn withdrawals_root_prehashed( + hashed_storage_updates: HashedStorage, + state: DB, +) -> ProviderResult { + state.storage_root(L2_TO_L1_MESSAGE_PASSER_ADDRESS, hashed_storage_updates) +} + +/// Verifies block header field `withdrawals_root` against storage root of +/// `L2ToL1MessagePasser.sol` predeploy post block execution. +/// +/// Takes state updates resulting from execution of block. +/// +/// See . +pub fn verify_withdrawals_root( + state_updates: &BundleState, + state: DB, + header: H, +) -> Result<(), OpConsensusError> +where + DB: StorageRootProvider, + H: BlockHeader, +{ + let header_storage_root = + header.withdrawals_root().ok_or(OpConsensusError::L2WithdrawalsRootMissing)?; + + let storage_root = withdrawals_root(state_updates, state) + .map_err(OpConsensusError::L2WithdrawalsRootCalculationFail)?; + + if storage_root == EMPTY_ROOT_HASH { + // if there was no MessagePasser contract storage, something is wrong + // (it should at least store an implementation address and owner address) + warn!("isthmus: no storage root for L2ToL1MessagePasser contract"); + } + + if header_storage_root != storage_root { + return Err(OpConsensusError::L2WithdrawalsRootMismatch { + header: header_storage_root, + exec_res: storage_root, + }); + } + + Ok(()) +} + +/// Verifies block header field `withdrawals_root` against storage root of +/// `L2ToL1MessagePasser.sol` predeploy post block execution. +/// +/// Takes pre-hashed storage updates of `L2ToL1MessagePasser.sol` predeploy, resulting from +/// execution of block, if any. Otherwise takes empty [`HashedStorage::default`]. +/// +/// See . +pub fn verify_withdrawals_root_prehashed( + hashed_storage_updates: HashedStorage, + state: DB, + header: H, +) -> Result<(), OpConsensusError> +where + DB: StorageRootProvider, + H: BlockHeader, +{ + let header_storage_root = + header.withdrawals_root().ok_or(OpConsensusError::L2WithdrawalsRootMissing)?; + + let storage_root = withdrawals_root_prehashed(hashed_storage_updates, state) + .map_err(OpConsensusError::L2WithdrawalsRootCalculationFail)?; + + if header_storage_root != storage_root { + return Err(OpConsensusError::L2WithdrawalsRootMismatch { + header: header_storage_root, + exec_res: storage_root, + }); + } + + Ok(()) +} + +#[cfg(test)] +mod test { + use super::*; + use alloc::sync::Arc; + use alloy_chains::Chain; + use alloy_consensus::Header; + use alloy_primitives::{B256, U256, keccak256}; + use core::str::FromStr; + use reth_db_common::init::init_genesis; + use reth_optimism_chainspec::OpChainSpecBuilder; + use reth_optimism_node::OpNode; + use reth_provider::{ + StateWriter, providers::BlockchainProvider, + test_utils::create_test_provider_factory_with_node_types, + }; + use reth_revm::db::BundleState; + use reth_storage_api::StateProviderFactory; + use reth_trie::{HashedStorage, test_utils::storage_root_prehashed}; + use reth_trie_common::HashedPostState; + + #[test] + fn l2tol1_message_passer_no_withdrawals() { + let hashed_address = keccak256(L2_TO_L1_MESSAGE_PASSER_ADDRESS); + + // create account storage + let init_storage = HashedStorage::from_iter( + false, + [ + "50000000000000000000000000000004253371b55351a08cb3267d4d265530b6", + "512428ed685fff57294d1a9cbb147b18ae5db9cf6ae4b312fa1946ba0561882e", + "51e6784c736ef8548f856909870b38e49ef7a4e3e77e5e945e0d5e6fcaa3037f", + ] + .into_iter() + .map(|str| (B256::from_str(str).unwrap(), U256::from(1))), + ); + let mut state = HashedPostState::default(); + state.storages.insert(hashed_address, init_storage.clone()); + + // init test db + // note: must be empty (default) chain spec to ensure storage is empty after init genesis, + // otherwise can't use `storage_root_prehashed` to determine storage root later + let provider_factory = create_test_provider_factory_with_node_types::(Arc::new( + OpChainSpecBuilder::default().chain(Chain::dev()).genesis(Default::default()).build(), + )); + let _ = init_genesis(&provider_factory).unwrap(); + + // write account storage to database + let provider_rw = provider_factory.provider_rw().unwrap(); + provider_rw.write_hashed_state(&state.clone().into_sorted()).unwrap(); + provider_rw.commit().unwrap(); + + // create block header with withdrawals root set to storage root of l2tol1-msg-passer + let header = Header { + withdrawals_root: Some(storage_root_prehashed(init_storage.storage)), + ..Default::default() + }; + + // create state provider factory + let state_provider_factory = BlockchainProvider::new(provider_factory).unwrap(); + + // validate block against existing state by passing empty state updates + verify_withdrawals_root( + &BundleState::default(), + state_provider_factory.latest().expect("load state"), + &header, + ) + .unwrap(); + } +} diff --git a/rust/op-reth/crates/consensus/src/validation/mod.rs b/rust/op-reth/crates/consensus/src/validation/mod.rs new file mode 100644 index 00000000000..10861f5b9c0 --- /dev/null +++ b/rust/op-reth/crates/consensus/src/validation/mod.rs @@ -0,0 +1,588 @@ +//! Verification of blocks w.r.t. Optimism hardforks. + +pub mod canyon; +pub mod isthmus; + +// Re-export the decode_holocene_base_fee function for compatibility +use reth_execution_types::BlockExecutionResult; +pub use reth_optimism_chainspec::decode_holocene_base_fee; + +use crate::proof::calculate_receipt_root_optimism; +use alloc::vec::Vec; +use alloy_consensus::{BlockHeader, EMPTY_OMMER_ROOT_HASH, TxReceipt}; +use alloy_eips::Encodable2718; +use alloy_primitives::{B256, Bloom, Bytes}; +use alloy_trie::EMPTY_ROOT_HASH; +use reth_consensus::ConsensusError; +use reth_optimism_forks::OpHardforks; +use reth_optimism_primitives::DepositReceipt; +use reth_primitives_traits::{BlockBody, GotExpected, receipt::gas_spent_by_transactions}; + +/// Ensures the block response data matches the header. +/// +/// This ensures the body response items match the header's hashes: +/// - ommer hash +/// - transaction root +/// - withdrawals root: the body's withdrawals root must only match the header's before isthmus +pub fn validate_body_against_header_op( + chain_spec: impl OpHardforks, + body: &B, + header: &H, +) -> Result<(), ConsensusError> +where + B: BlockBody, + H: reth_primitives_traits::BlockHeader, +{ + let ommers_hash = body.calculate_ommers_root(); + if Some(header.ommers_hash()) != ommers_hash { + return Err(ConsensusError::BodyOmmersHashDiff( + GotExpected { + got: ommers_hash.unwrap_or(EMPTY_OMMER_ROOT_HASH), + expected: header.ommers_hash(), + } + .into(), + )); + } + + let tx_root = body.calculate_tx_root(); + if header.transactions_root() != tx_root { + return Err(ConsensusError::BodyTransactionRootDiff( + GotExpected { got: tx_root, expected: header.transactions_root() }.into(), + )); + } + + match (header.withdrawals_root(), body.calculate_withdrawals_root()) { + (Some(header_withdrawals_root), Some(withdrawals_root)) => { + // after isthmus, the withdrawals root field is repurposed and no longer mirrors the + // withdrawals root computed from the body + if chain_spec.is_isthmus_active_at_timestamp(header.timestamp()) { + // After isthmus we only ensure that the body has empty withdrawals + if withdrawals_root != EMPTY_ROOT_HASH { + return Err(ConsensusError::BodyWithdrawalsRootDiff( + GotExpected { got: withdrawals_root, expected: EMPTY_ROOT_HASH }.into(), + )); + } + } else { + // before isthmus we ensure that the header root matches the body + if withdrawals_root != header_withdrawals_root { + return Err(ConsensusError::BodyWithdrawalsRootDiff( + GotExpected { got: withdrawals_root, expected: header_withdrawals_root } + .into(), + )); + } + } + } + (None, None) => { + // this is ok because we assume the fork is not active in this case + } + _ => return Err(ConsensusError::WithdrawalsRootUnexpected), + } + + Ok(()) +} + +/// Validate a block with regard to execution results: +/// +/// - Compares the receipts root in the block header to the block body +/// - Compares the gas used in the block header to the actual gas usage after execution +/// +/// If `receipt_root_bloom` is provided, the pre-computed receipt root and logs bloom are used +/// instead of computing them from the receipts. +pub fn validate_block_post_execution( + header: impl BlockHeader, + chain_spec: impl OpHardforks, + result: &BlockExecutionResult, + receipt_root_bloom: Option<(B256, Bloom)>, +) -> Result<(), ConsensusError> { + // Validate that the blob gas used is present and correctly computed if Jovian is active. + if chain_spec.is_jovian_active_at_timestamp(header.timestamp()) { + let computed_blob_gas_used = result.blob_gas_used; + let header_blob_gas_used = + header.blob_gas_used().ok_or(ConsensusError::BlobGasUsedMissing)?; + + if computed_blob_gas_used != header_blob_gas_used { + return Err(ConsensusError::BlobGasUsedDiff(GotExpected { + got: computed_blob_gas_used, + expected: header_blob_gas_used, + })); + } + } + + let receipts = &result.receipts; + + // Before Byzantium, receipts contained state root that would mean that expensive + // operation as hashing that is required for state root got calculated in every + // transaction This was replaced with is_success flag. + // See more about EIP here: https://eips.ethereum.org/EIPS/eip-658 + if chain_spec.is_byzantium_active_at_block(header.number()) { + let result = if let Some((receipts_root, logs_bloom)) = receipt_root_bloom { + compare_receipts_root_and_logs_bloom( + receipts_root, + logs_bloom, + header.receipts_root(), + header.logs_bloom(), + ) + } else { + verify_receipts_optimism( + header.receipts_root(), + header.logs_bloom(), + receipts, + chain_spec, + header.timestamp(), + ) + }; + + if let Err(error) = result { + let receipts = receipts + .iter() + .map(|r| Bytes::from(r.with_bloom_ref().encoded_2718())) + .collect::>(); + tracing::debug!(%error, ?receipts, "receipts verification failed"); + return Err(error); + } + } + + // Check if gas used matches the value set in header. + let cumulative_gas_used = + receipts.last().map(|receipt| receipt.cumulative_gas_used()).unwrap_or(0); + if header.gas_used() != cumulative_gas_used { + return Err(ConsensusError::BlockGasUsed { + gas: GotExpected { got: cumulative_gas_used, expected: header.gas_used() }, + gas_spent_by_tx: gas_spent_by_transactions(receipts), + }); + } + + Ok(()) +} + +/// Verify the calculated receipts root against the expected receipts root. +fn verify_receipts_optimism( + expected_receipts_root: B256, + expected_logs_bloom: Bloom, + receipts: &[R], + chain_spec: impl OpHardforks, + timestamp: u64, +) -> Result<(), ConsensusError> { + // Calculate receipts root. + let receipts_with_bloom = receipts.iter().map(TxReceipt::with_bloom_ref).collect::>(); + let receipts_root = + calculate_receipt_root_optimism(&receipts_with_bloom, chain_spec, timestamp); + + // Calculate header logs bloom. + let logs_bloom = receipts_with_bloom.iter().fold(Bloom::ZERO, |bloom, r| bloom | r.bloom_ref()); + + compare_receipts_root_and_logs_bloom( + receipts_root, + logs_bloom, + expected_receipts_root, + expected_logs_bloom, + )?; + + Ok(()) +} + +/// Compare the calculated receipts root with the expected receipts root, also compare +/// the calculated logs bloom with the expected logs bloom. +fn compare_receipts_root_and_logs_bloom( + calculated_receipts_root: B256, + calculated_logs_bloom: Bloom, + expected_receipts_root: B256, + expected_logs_bloom: Bloom, +) -> Result<(), ConsensusError> { + if calculated_receipts_root != expected_receipts_root { + return Err(ConsensusError::BodyReceiptRootDiff( + GotExpected { got: calculated_receipts_root, expected: expected_receipts_root }.into(), + )); + } + + if calculated_logs_bloom != expected_logs_bloom { + return Err(ConsensusError::BodyBloomLogDiff( + GotExpected { got: calculated_logs_bloom, expected: expected_logs_bloom }.into(), + )); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_consensus::Header; + use alloy_eips::eip7685::Requests; + use alloy_primitives::{Bytes, U256, b256, hex}; + use op_alloy_consensus::OpTxEnvelope; + use reth_chainspec::{BaseFeeParams, ChainSpec, EthChainSpec, ForkCondition, Hardfork}; + use reth_optimism_chainspec::{BASE_SEPOLIA, OpChainSpec}; + use reth_optimism_forks::{BASE_SEPOLIA_HARDFORKS, OpHardfork}; + use reth_optimism_primitives::OpReceipt; + use std::sync::Arc; + + const HOLOCENE_TIMESTAMP: u64 = 1700000000; + const ISTHMUS_TIMESTAMP: u64 = 1750000000; + const JOVIAN_TIMESTAMP: u64 = 1800000000; + const BLOCK_TIME_SECONDS: u64 = 2; + + fn holocene_chainspec() -> Arc { + let mut hardforks = BASE_SEPOLIA_HARDFORKS.clone(); + hardforks + .insert(OpHardfork::Holocene.boxed(), ForkCondition::Timestamp(HOLOCENE_TIMESTAMP)); + Arc::new(OpChainSpec { + inner: ChainSpec { + chain: BASE_SEPOLIA.inner.chain, + genesis: BASE_SEPOLIA.inner.genesis.clone(), + genesis_header: BASE_SEPOLIA.inner.genesis_header.clone(), + paris_block_and_final_difficulty: Some((0, U256::from(0))), + hardforks, + base_fee_params: BASE_SEPOLIA.inner.base_fee_params.clone(), + prune_delete_limit: 10000, + ..Default::default() + }, + }) + } + + fn isthmus_chainspec() -> OpChainSpec { + let mut chainspec = BASE_SEPOLIA.as_ref().clone(); + chainspec + .inner + .hardforks + .insert(OpHardfork::Isthmus.boxed(), ForkCondition::Timestamp(ISTHMUS_TIMESTAMP)); + chainspec + } + + fn jovian_chainspec() -> OpChainSpec { + let mut chainspec = BASE_SEPOLIA.as_ref().clone(); + chainspec + .inner + .hardforks + .insert(OpHardfork::Jovian.boxed(), ForkCondition::Timestamp(JOVIAN_TIMESTAMP)); + chainspec + } + + #[test] + fn test_get_base_fee_pre_holocene() { + let op_chain_spec = BASE_SEPOLIA.clone(); + let parent = Header { + base_fee_per_gas: Some(1), + gas_used: 15763614, + gas_limit: 144000000, + ..Default::default() + }; + let base_fee = + reth_optimism_chainspec::OpChainSpec::next_block_base_fee(&op_chain_spec, &parent, 0); + assert_eq!( + base_fee.unwrap(), + op_chain_spec.next_block_base_fee(&parent, 0).unwrap_or_default() + ); + } + + #[test] + fn test_get_base_fee_holocene_extra_data_not_set() { + let op_chain_spec = holocene_chainspec(); + let parent = Header { + base_fee_per_gas: Some(1), + gas_used: 15763614, + gas_limit: 144000000, + timestamp: HOLOCENE_TIMESTAMP + 3, + extra_data: Bytes::from_static(&[0, 0, 0, 0, 0, 0, 0, 0, 0]), + ..Default::default() + }; + let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( + &op_chain_spec, + &parent, + HOLOCENE_TIMESTAMP + 5, + ); + assert_eq!( + base_fee.unwrap(), + op_chain_spec.next_block_base_fee(&parent, 0).unwrap_or_default() + ); + } + + #[test] + fn test_get_base_fee_holocene_extra_data_set() { + let parent = Header { + base_fee_per_gas: Some(1), + gas_used: 15763614, + gas_limit: 144000000, + extra_data: Bytes::from_static(&[0, 0, 0, 0, 8, 0, 0, 0, 8]), + timestamp: HOLOCENE_TIMESTAMP + 3, + ..Default::default() + }; + + let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( + &holocene_chainspec(), + &parent, + HOLOCENE_TIMESTAMP + 5, + ); + assert_eq!( + base_fee.unwrap(), + parent + .next_block_base_fee(BaseFeeParams::new(0x00000008, 0x00000008)) + .unwrap_or_default() + ); + } + + // + #[test] + fn test_get_base_fee_holocene_extra_data_set_base_sepolia() { + let parent = Header { + base_fee_per_gas: Some(507), + gas_used: 4847634, + gas_limit: 60000000, + extra_data: hex!("00000000fa0000000a").into(), + timestamp: 1735315544, + ..Default::default() + }; + + let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( + &*BASE_SEPOLIA, + &parent, + 1735315546, + ) + .unwrap(); + assert_eq!(base_fee, 507); + } + + #[test] + fn test_get_base_fee_holocene_extra_data_set_and_min_base_fee_set() { + const MIN_BASE_FEE: u64 = 10; + + let mut extra_data = Vec::new(); + // eip1559 params + extra_data.append(&mut hex!("00000000fa0000000a").to_vec()); + // min base fee + extra_data.append(&mut MIN_BASE_FEE.to_be_bytes().to_vec()); + let extra_data = Bytes::from(extra_data); + + let parent = Header { + base_fee_per_gas: Some(507), + gas_used: 4847634, + gas_limit: 60000000, + extra_data, + timestamp: 1735315544, + ..Default::default() + }; + + let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( + &*BASE_SEPOLIA, + &parent, + 1735315546, + ); + assert_eq!(base_fee, None); + } + + /// The version byte for Jovian is 1. + const JOVIAN_EXTRA_DATA_VERSION_BYTE: u8 = 1; + + #[test] + fn test_get_base_fee_jovian_extra_data_and_min_base_fee_not_set() { + let op_chain_spec = jovian_chainspec(); + + let mut extra_data = Vec::new(); + extra_data.push(JOVIAN_EXTRA_DATA_VERSION_BYTE); + // eip1559 params + extra_data.append(&mut [0_u8; 8].to_vec()); + let extra_data = Bytes::from(extra_data); + + let parent = Header { + base_fee_per_gas: Some(1), + gas_used: 15763614, + gas_limit: 144000000, + timestamp: JOVIAN_TIMESTAMP, + extra_data, + ..Default::default() + }; + let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( + &op_chain_spec, + &parent, + JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS, + ); + assert_eq!(base_fee, None); + } + + /// After Jovian, the next block base fee cannot be less than the minimum base fee. + #[test] + fn test_get_base_fee_jovian_default_extra_data_and_min_base_fee() { + const CURR_BASE_FEE: u64 = 1; + const MIN_BASE_FEE: u64 = 10; + + let mut extra_data = Vec::new(); + extra_data.push(JOVIAN_EXTRA_DATA_VERSION_BYTE); + // eip1559 params + extra_data.append(&mut [0_u8; 8].to_vec()); + // min base fee + extra_data.append(&mut MIN_BASE_FEE.to_be_bytes().to_vec()); + let extra_data = Bytes::from(extra_data); + + let op_chain_spec = jovian_chainspec(); + let parent = Header { + base_fee_per_gas: Some(CURR_BASE_FEE), + gas_used: 15763614, + gas_limit: 144000000, + timestamp: JOVIAN_TIMESTAMP, + extra_data, + ..Default::default() + }; + let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( + &op_chain_spec, + &parent, + JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS, + ); + assert_eq!(base_fee, Some(MIN_BASE_FEE)); + } + + /// After Jovian, the next block base fee cannot be less than the minimum base fee. + #[test] + fn test_jovian_min_base_fee_cannot_decrease() { + const MIN_BASE_FEE: u64 = 10; + + let mut extra_data = Vec::new(); + extra_data.push(JOVIAN_EXTRA_DATA_VERSION_BYTE); + // eip1559 params + extra_data.append(&mut [0_u8; 8].to_vec()); + // min base fee + extra_data.append(&mut MIN_BASE_FEE.to_be_bytes().to_vec()); + let extra_data = Bytes::from(extra_data); + + let op_chain_spec = jovian_chainspec(); + + // If we're currently at the minimum base fee, the next block base fee cannot decrease. + let parent = Header { + base_fee_per_gas: Some(MIN_BASE_FEE), + gas_used: 10, + gas_limit: 144000000, + timestamp: JOVIAN_TIMESTAMP, + extra_data: extra_data.clone(), + ..Default::default() + }; + let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( + &op_chain_spec, + &parent, + JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS, + ); + assert_eq!(base_fee, Some(MIN_BASE_FEE)); + + // The next block can increase the base fee + let parent = Header { + base_fee_per_gas: Some(MIN_BASE_FEE), + gas_used: 144000000, + gas_limit: 144000000, + timestamp: JOVIAN_TIMESTAMP, + extra_data, + ..Default::default() + }; + let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( + &op_chain_spec, + &parent, + JOVIAN_TIMESTAMP + 2 * BLOCK_TIME_SECONDS, + ); + assert_eq!(base_fee, Some(MIN_BASE_FEE + 1)); + } + + #[test] + fn test_jovian_base_fee_can_decrease_if_above_min_base_fee() { + const MIN_BASE_FEE: u64 = 10; + + let mut extra_data = Vec::new(); + extra_data.push(JOVIAN_EXTRA_DATA_VERSION_BYTE); + // eip1559 params + extra_data.append(&mut [0_u8; 8].to_vec()); + // min base fee + extra_data.append(&mut MIN_BASE_FEE.to_be_bytes().to_vec()); + let extra_data = Bytes::from(extra_data); + + let op_chain_spec = jovian_chainspec(); + + let parent = Header { + base_fee_per_gas: Some(100 * MIN_BASE_FEE), + gas_used: 10, + gas_limit: 144000000, + timestamp: JOVIAN_TIMESTAMP, + extra_data, + ..Default::default() + }; + let base_fee = reth_optimism_chainspec::OpChainSpec::next_block_base_fee( + &op_chain_spec, + &parent, + JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS, + ) + .unwrap(); + assert_eq!( + base_fee, + op_chain_spec + .inner + .next_block_base_fee(&parent, JOVIAN_TIMESTAMP + BLOCK_TIME_SECONDS) + .unwrap() + ); + } + + #[test] + fn body_against_header_isthmus() { + let chainspec = isthmus_chainspec(); + let header = Header { + base_fee_per_gas: Some(507), + gas_used: 4847634, + gas_limit: 60000000, + extra_data: hex!("00000000fa0000000a").into(), + timestamp: 1800000000, + withdrawals_root: Some(b256!( + "0x611e1d75cbb77fa782d79485a8384e853bc92e56883c313a51e3f9feef9a9a71" + )), + ..Default::default() + }; + let mut body = alloy_consensus::BlockBody:: { + transactions: vec![], + ommers: vec![], + withdrawals: Some(Default::default()), + }; + validate_body_against_header_op(&chainspec, &body, &header).unwrap(); + + body.withdrawals.take(); + validate_body_against_header_op(&chainspec, &body, &header).unwrap_err(); + } + + #[test] + fn test_jovian_blob_gas_used_validation() { + const BLOB_GAS_USED: u64 = 1000; + const GAS_USED: u64 = 5000; + + let chainspec = jovian_chainspec(); + let header = Header { + timestamp: JOVIAN_TIMESTAMP, + blob_gas_used: Some(BLOB_GAS_USED), + ..Default::default() + }; + + let result = BlockExecutionResult:: { + blob_gas_used: BLOB_GAS_USED, + receipts: vec![], + requests: Requests::default(), + gas_used: GAS_USED, + }; + validate_block_post_execution(&header, &chainspec, &result, None).unwrap(); + } + + #[test] + fn test_jovian_blob_gas_used_validation_mismatched() { + const BLOB_GAS_USED: u64 = 1000; + const GAS_USED: u64 = 5000; + + let chainspec = jovian_chainspec(); + let header = Header { + timestamp: JOVIAN_TIMESTAMP, + blob_gas_used: Some(BLOB_GAS_USED + 1), + ..Default::default() + }; + + let result = BlockExecutionResult:: { + blob_gas_used: BLOB_GAS_USED, + receipts: vec![], + requests: Requests::default(), + gas_used: GAS_USED, + }; + assert!(matches!( + validate_block_post_execution(&header, &chainspec, &result, None).unwrap_err(), + ConsensusError::BlobGasUsedDiff(diff) + if diff.got == BLOB_GAS_USED && diff.expected == BLOB_GAS_USED + 1 + )); + } +} diff --git a/rust/op-reth/crates/evm/Cargo.toml b/rust/op-reth/crates/evm/Cargo.toml new file mode 100644 index 00000000000..cf8e964af2f --- /dev/null +++ b/rust/op-reth/crates/evm/Cargo.toml @@ -0,0 +1,83 @@ +[package] +name = "reth-optimism-evm" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" + +[lints] +workspace = true + +[dependencies] +# Reth +reth-chainspec.workspace = true +reth-evm = { workspace = true, features = ["op"] } +reth-primitives-traits.workspace = true +reth-execution-errors.workspace = true +reth-execution-types.workspace = true +reth-storage-errors.workspace = true + +reth-rpc-eth-api = { workspace = true, optional = true } + +# ethereum +alloy-eips.workspace = true +alloy-evm.workspace = true +alloy-primitives.workspace = true +alloy-op-evm.workspace = true +op-alloy-consensus.workspace = true +op-alloy-rpc-types-engine.workspace = true +alloy-consensus.workspace = true + +# Optimism +reth-optimism-chainspec.workspace = true +reth-optimism-consensus.workspace = true +reth-optimism-forks.workspace = true +reth-optimism-primitives.workspace = true + +# revm +revm.workspace = true +op-revm.workspace = true + +# misc +thiserror.workspace = true + +[dev-dependencies] +reth-evm = { workspace = true, features = ["test-utils"] } +reth-revm = { workspace = true, features = ["test-utils"] } +alloy-genesis.workspace = true +reth-optimism-primitives = { workspace = true, features = ["arbitrary"] } + +[features] +default = ["std"] +std = [ + "reth-revm/std", + "alloy-consensus/std", + "alloy-eips/std", + "alloy-genesis/std", + "alloy-primitives/std", + "reth-primitives-traits/std", + "revm/std", + "reth-optimism-primitives/std", + "reth-optimism-forks/std", + "thiserror/std", + "op-alloy-consensus/std", + "reth-chainspec/std", + "reth-optimism-consensus/std", + "reth-optimism-chainspec/std", + "reth-execution-errors/std", + "reth-execution-types/std", + "alloy-evm/std", + "alloy-op-evm/std", + "op-revm/std", + "reth-evm/std", + "op-alloy-rpc-types-engine/std", + "reth-storage-errors/std", +] +portable = [ + "reth-revm/portable", + "op-revm/portable", + "revm/portable", +] +rpc = ["reth-rpc-eth-api", "reth-optimism-primitives/serde", "reth-optimism-primitives/reth-codec", "alloy-evm/rpc"] diff --git a/rust/op-reth/crates/evm/src/build.rs b/rust/op-reth/crates/evm/src/build.rs new file mode 100644 index 00000000000..336967499d2 --- /dev/null +++ b/rust/op-reth/crates/evm/src/build.rs @@ -0,0 +1,154 @@ +use alloc::sync::Arc; +use alloy_consensus::{ + Block, BlockBody, EMPTY_OMMER_ROOT_HASH, Header, TxReceipt, constants::EMPTY_WITHDRAWALS, + proofs, +}; +use alloy_eips::{eip7685::EMPTY_REQUESTS_HASH, merge::BEACON_NONCE}; +use alloy_evm::block::BlockExecutorFactory; +use alloy_op_evm::OpBlockExecutionCtx; +use alloy_primitives::logs_bloom; +use reth_evm::execute::{BlockAssembler, BlockAssemblerInput}; +use reth_execution_errors::BlockExecutionError; +use reth_execution_types::BlockExecutionResult; +use reth_optimism_consensus::{calculate_receipt_root_no_memo_optimism, isthmus}; +use reth_optimism_forks::OpHardforks; +use reth_optimism_primitives::DepositReceipt; +use reth_primitives_traits::{Receipt, SignedTransaction}; +use revm::context::Block as _; + +/// Block builder for Optimism. +#[derive(Debug)] +pub struct OpBlockAssembler { + chain_spec: Arc, +} + +impl OpBlockAssembler { + /// Creates a new [`OpBlockAssembler`]. + pub const fn new(chain_spec: Arc) -> Self { + Self { chain_spec } + } +} + +impl OpBlockAssembler { + /// Builds a block for `input` without any bounds on header `H`. + pub fn assemble_block< + F: for<'a> BlockExecutorFactory< + ExecutionCtx<'a>: Into, + Transaction: SignedTransaction, + Receipt: Receipt + DepositReceipt, + >, + H, + >( + &self, + input: BlockAssemblerInput<'_, '_, F, H>, + ) -> Result, BlockExecutionError> { + let BlockAssemblerInput { + evm_env, + execution_ctx: ctx, + transactions, + output: BlockExecutionResult { receipts, gas_used, blob_gas_used, requests: _ }, + bundle_state, + state_root, + state_provider, + .. + } = input; + let ctx = ctx.into(); + + let timestamp = evm_env.block_env.timestamp().saturating_to(); + + let transactions_root = proofs::calculate_transaction_root(&transactions); + let receipts_root = + calculate_receipt_root_no_memo_optimism(receipts, &self.chain_spec, timestamp); + let logs_bloom = logs_bloom(receipts.iter().flat_map(|r| r.logs())); + + let mut requests_hash = None; + + let withdrawals_root = if self.chain_spec.is_isthmus_active_at_timestamp(timestamp) { + // always empty requests hash post isthmus + requests_hash = Some(EMPTY_REQUESTS_HASH); + + // withdrawals root field in block header is used for storage root of L2 predeploy + // `l2tol1-message-passer` + Some( + isthmus::withdrawals_root(bundle_state, state_provider) + .map_err(BlockExecutionError::other)?, + ) + } else if self.chain_spec.is_canyon_active_at_timestamp(timestamp) { + Some(EMPTY_WITHDRAWALS) + } else { + None + }; + + let (excess_blob_gas, blob_gas_used) = + if self.chain_spec.is_jovian_active_at_timestamp(timestamp) { + // In jovian, we're using the blob gas used field to store the current da + // footprint's value. + (Some(0), Some(*blob_gas_used)) + } else if self.chain_spec.is_ecotone_active_at_timestamp(timestamp) { + (Some(0), Some(0)) + } else { + (None, None) + }; + + let header = Header { + parent_hash: ctx.parent_hash, + ommers_hash: EMPTY_OMMER_ROOT_HASH, + beneficiary: evm_env.block_env.beneficiary(), + state_root, + transactions_root, + receipts_root, + withdrawals_root, + logs_bloom, + timestamp, + mix_hash: evm_env.block_env.prevrandao().unwrap_or_default(), + nonce: BEACON_NONCE.into(), + base_fee_per_gas: Some(evm_env.block_env.basefee()), + number: evm_env.block_env.number().saturating_to(), + gas_limit: evm_env.block_env.gas_limit(), + difficulty: evm_env.block_env.difficulty(), + gas_used: *gas_used, + extra_data: ctx.extra_data, + parent_beacon_block_root: ctx.parent_beacon_block_root, + blob_gas_used, + excess_blob_gas, + requests_hash, + }; + + Ok(Block::new( + header, + BlockBody { + transactions, + ommers: Default::default(), + withdrawals: self + .chain_spec + .is_canyon_active_at_timestamp(timestamp) + .then(Default::default), + }, + )) + } +} + +impl Clone for OpBlockAssembler { + fn clone(&self) -> Self { + Self { chain_spec: self.chain_spec.clone() } + } +} + +impl BlockAssembler for OpBlockAssembler +where + ChainSpec: OpHardforks, + F: for<'a> BlockExecutorFactory< + ExecutionCtx<'a> = OpBlockExecutionCtx, + Transaction: SignedTransaction, + Receipt: Receipt + DepositReceipt, + >, +{ + type Block = Block; + + fn assemble_block( + &self, + input: BlockAssemblerInput<'_, '_, F>, + ) -> Result { + self.assemble_block(input) + } +} diff --git a/rust/op-reth/crates/evm/src/config.rs b/rust/op-reth/crates/evm/src/config.rs new file mode 100644 index 00000000000..9a1f8500c1f --- /dev/null +++ b/rust/op-reth/crates/evm/src/config.rs @@ -0,0 +1,51 @@ +pub use alloy_op_evm::{ + spec as revm_spec, spec_by_timestamp_after_bedrock as revm_spec_by_timestamp_after_bedrock, +}; +use op_alloy_rpc_types_engine::OpFlashblockPayloadBase; +use revm::primitives::{Address, B256, Bytes}; + +/// Context relevant for execution of a next block w.r.t OP. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct OpNextBlockEnvAttributes { + /// The timestamp of the next block. + pub timestamp: u64, + /// The suggested fee recipient for the next block. + pub suggested_fee_recipient: Address, + /// The randomness value for the next block. + pub prev_randao: B256, + /// Block gas limit. + pub gas_limit: u64, + /// The parent beacon block root. + pub parent_beacon_block_root: Option, + /// Encoded EIP-1559 parameters to include into block's `extra_data` field. + pub extra_data: Bytes, +} + +#[cfg(feature = "rpc")] +impl reth_rpc_eth_api::helpers::pending_block::BuildPendingEnv + for OpNextBlockEnvAttributes +{ + fn build_pending_env(parent: &crate::SealedHeader) -> Self { + Self { + timestamp: parent.timestamp().saturating_add(12), + suggested_fee_recipient: parent.beneficiary(), + prev_randao: B256::random(), + gas_limit: parent.gas_limit(), + parent_beacon_block_root: parent.parent_beacon_block_root(), + extra_data: parent.extra_data().clone(), + } + } +} + +impl From for OpNextBlockEnvAttributes { + fn from(base: OpFlashblockPayloadBase) -> Self { + Self { + timestamp: base.timestamp, + suggested_fee_recipient: base.fee_recipient, + prev_randao: base.prev_randao, + gas_limit: base.gas_limit, + parent_beacon_block_root: Some(base.parent_beacon_block_root), + extra_data: base.extra_data, + } + } +} diff --git a/op-reth/crates/evm/src/error.rs b/rust/op-reth/crates/evm/src/error.rs similarity index 100% rename from op-reth/crates/evm/src/error.rs rename to rust/op-reth/crates/evm/src/error.rs diff --git a/op-reth/crates/evm/src/execute.rs b/rust/op-reth/crates/evm/src/execute.rs similarity index 98% rename from op-reth/crates/evm/src/execute.rs rename to rust/op-reth/crates/evm/src/execute.rs index ff8a72dc82a..d0e9df5e19b 100644 --- a/op-reth/crates/evm/src/execute.rs +++ b/rust/op-reth/crates/evm/src/execute.rs @@ -8,7 +8,7 @@ mod tests { use crate::{OpEvmConfig, OpRethReceiptBuilder}; use alloc::sync::Arc; use alloy_consensus::{Block, BlockBody, Header, SignableTransaction, TxEip1559}; - use alloy_primitives::{b256, Address, Signature, StorageKey, StorageValue, U256}; + use alloy_primitives::{Address, Signature, StorageKey, StorageValue, U256, b256}; use op_alloy_consensus::TxDeposit; use op_revm::constants::L1_BLOCK_CONTRACT; use reth_chainspec::MIN_TRANSACTION_GAS; diff --git a/op-reth/crates/evm/src/l1.rs b/rust/op-reth/crates/evm/src/l1.rs similarity index 99% rename from op-reth/crates/evm/src/l1.rs rename to rust/op-reth/crates/evm/src/l1.rs index 2afe6e9d3a2..ea34b9a77ce 100644 --- a/op-reth/crates/evm/src/l1.rs +++ b/rust/op-reth/crates/evm/src/l1.rs @@ -1,8 +1,8 @@ //! Optimism-specific implementation and utilities for the executor -use crate::{error::L1BlockInfoError, revm_spec_by_timestamp_after_bedrock, OpBlockExecutionError}; +use crate::{OpBlockExecutionError, error::L1BlockInfoError, revm_spec_by_timestamp_after_bedrock}; use alloy_consensus::Transaction; -use alloy_primitives::{hex, U16, U256}; +use alloy_primitives::{U16, U256, hex}; use op_revm::L1BlockInfo; use reth_execution_errors::BlockExecutionError; use reth_optimism_forks::OpHardforks; @@ -362,7 +362,7 @@ mod tests { #[test] fn sanity_l1_block() { use alloy_consensus::Header; - use alloy_primitives::{hex_literal::hex, Bytes}; + use alloy_primitives::{Bytes, hex_literal::hex}; let bytes = Bytes::from_static(&hex!( "7ef9015aa044bae9d41b8380d781187b426c6fe43df5fb2fb57bd4466ef6a701e1f01e015694deaddeaddeaddeaddeaddeaddeaddeaddead000194420000000000000000000000000000000000001580808408f0d18001b90104015d8eb900000000000000000000000000000000000000000000000000000000008057650000000000000000000000000000000000000000000000000000000063d96d10000000000000000000000000000000000000000000000000000000000009f35273d89754a1e0387b89520d989d3be9c37c1f32495a88faf1ea05c61121ab0d1900000000000000000000000000000000000000000000000000000000000000010000000000000000000000002d679b567db6187c0c8323fa982cfb88b74dbcc7000000000000000000000000000000000000000000000000000000000000083400000000000000000000000000000000000000000000000000000000000f4240" diff --git a/rust/op-reth/crates/evm/src/lib.rs b/rust/op-reth/crates/evm/src/lib.rs new file mode 100644 index 00000000000..54ea9ae7b84 --- /dev/null +++ b/rust/op-reth/crates/evm/src/lib.rs @@ -0,0 +1,902 @@ +//! EVM config for vanilla optimism. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(feature = "std"), no_std)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +extern crate alloc; + +use alloc::sync::Arc; +use alloy_consensus::{BlockHeader, Header}; +use alloy_evm::{EvmFactory, FromRecoveredTx, FromTxWithEncoded}; +use alloy_op_evm::block::{OpTxEnv, receipt_builder::OpReceiptBuilder}; +use core::fmt::Debug; +use op_alloy_consensus::EIP1559ParamError; +use op_revm::{OpSpecId, OpTransaction}; +use reth_chainspec::EthChainSpec; +use reth_evm::{ + ConfigureEvm, EvmEnv, TransactionEnv, eth::NextEvmEnvAttributes, precompiles::PrecompilesMap, +}; +use reth_optimism_chainspec::OpChainSpec; +use reth_optimism_forks::OpHardforks; +use reth_optimism_primitives::{DepositReceipt, OpPrimitives}; +use reth_primitives_traits::{NodePrimitives, SealedBlock, SealedHeader, SignedTransaction}; +use revm::context::{BlockEnv, TxEnv}; + +#[allow(unused_imports)] +use { + alloy_eips::Decodable2718, + alloy_primitives::{Bytes, U256}, + op_alloy_rpc_types_engine::OpExecutionData, + reth_evm::{EvmEnvFor, ExecutionCtxFor}, + reth_primitives_traits::{TxTy, WithEncoded}, + reth_storage_errors::any::AnyError, + revm::{ + context::CfgEnv, context_interface::block::BlobExcessGasAndPrice, + primitives::hardfork::SpecId, + }, +}; + +#[cfg(feature = "std")] +use reth_evm::{ConfigureEngineEvm, ExecutableTxIterator}; + +mod config; +pub use config::{OpNextBlockEnvAttributes, revm_spec, revm_spec_by_timestamp_after_bedrock}; +mod execute; +pub use execute::*; +pub mod l1; +pub use l1::*; +mod receipts; +pub use receipts::*; +mod build; +pub use build::OpBlockAssembler; + +mod error; +pub use error::{L1BlockInfoError, OpBlockExecutionError}; + +pub use alloy_op_evm::{OpBlockExecutionCtx, OpBlockExecutorFactory, OpEvm, OpEvmFactory}; + +/// Optimism-related EVM configuration. +#[derive(Debug)] +pub struct OpEvmConfig< + ChainSpec = OpChainSpec, + N: NodePrimitives = OpPrimitives, + R = OpRethReceiptBuilder, + EvmFactory = OpEvmFactory, +> { + /// Inner [`OpBlockExecutorFactory`]. + pub executor_factory: OpBlockExecutorFactory, EvmFactory>, + /// Optimism block assembler. + pub block_assembler: OpBlockAssembler, + #[doc(hidden)] + pub _pd: core::marker::PhantomData, +} + +impl Clone + for OpEvmConfig +{ + fn clone(&self) -> Self { + Self { + executor_factory: self.executor_factory.clone(), + block_assembler: self.block_assembler.clone(), + _pd: self._pd, + } + } +} + +impl OpEvmConfig { + /// Creates a new [`OpEvmConfig`] with the given chain spec for OP chains. + pub fn optimism(chain_spec: Arc) -> Self { + Self::new(chain_spec, OpRethReceiptBuilder::default()) + } +} + +impl OpEvmConfig { + /// Creates a new [`OpEvmConfig`] with the given chain spec. + pub fn new(chain_spec: Arc, receipt_builder: R) -> Self { + Self { + block_assembler: OpBlockAssembler::new(chain_spec.clone()), + executor_factory: OpBlockExecutorFactory::new( + receipt_builder, + chain_spec, + OpEvmFactory::default(), + ), + _pd: core::marker::PhantomData, + } + } +} + +impl OpEvmConfig +where + ChainSpec: OpHardforks, + N: NodePrimitives, +{ + /// Returns the chain spec associated with this configuration. + pub const fn chain_spec(&self) -> &Arc { + self.executor_factory.spec() + } +} + +impl ConfigureEvm for OpEvmConfig +where + ChainSpec: EthChainSpec
+ OpHardforks, + N: NodePrimitives< + Receipt = R::Receipt, + SignedTx = R::Transaction, + BlockHeader = Header, + BlockBody = alloy_consensus::BlockBody, + Block = alloy_consensus::Block, + >, + OpTransaction: FromRecoveredTx + FromTxWithEncoded, + R: OpReceiptBuilder, + EvmF: EvmFactory< + Tx: FromRecoveredTx + + FromTxWithEncoded + + TransactionEnv + + OpTxEnv, + Precompiles = PrecompilesMap, + Spec = OpSpecId, + BlockEnv = BlockEnv, + > + Debug, + Self: Send + Sync + Unpin + Clone + 'static, +{ + type Primitives = N; + type Error = EIP1559ParamError; + type NextBlockEnvCtx = OpNextBlockEnvAttributes; + type BlockExecutorFactory = OpBlockExecutorFactory, EvmF>; + type BlockAssembler = OpBlockAssembler; + + fn block_executor_factory(&self) -> &Self::BlockExecutorFactory { + &self.executor_factory + } + + fn block_assembler(&self) -> &Self::BlockAssembler { + &self.block_assembler + } + + fn evm_env(&self, header: &Header) -> Result, Self::Error> { + Ok(EvmEnv::for_op_block(header, self.chain_spec(), self.chain_spec().chain().id())) + } + + fn next_evm_env( + &self, + parent: &Header, + attributes: &Self::NextBlockEnvCtx, + ) -> Result, Self::Error> { + Ok(EvmEnv::for_op_next_block( + parent, + NextEvmEnvAttributes { + timestamp: attributes.timestamp, + suggested_fee_recipient: attributes.suggested_fee_recipient, + prev_randao: attributes.prev_randao, + gas_limit: attributes.gas_limit, + }, + self.chain_spec().next_block_base_fee(parent, attributes.timestamp).unwrap_or_default(), + self.chain_spec(), + self.chain_spec().chain().id(), + )) + } + + fn context_for_block( + &self, + block: &'_ SealedBlock, + ) -> Result { + Ok(OpBlockExecutionCtx { + parent_hash: block.header().parent_hash(), + parent_beacon_block_root: block.header().parent_beacon_block_root(), + extra_data: block.header().extra_data().clone(), + }) + } + + fn context_for_next_block( + &self, + parent: &SealedHeader, + attributes: Self::NextBlockEnvCtx, + ) -> Result { + Ok(OpBlockExecutionCtx { + parent_hash: parent.hash(), + parent_beacon_block_root: attributes.parent_beacon_block_root, + extra_data: attributes.extra_data, + }) + } +} + +#[cfg(feature = "std")] +impl ConfigureEngineEvm for OpEvmConfig +where + ChainSpec: EthChainSpec
+ OpHardforks, + N: NodePrimitives< + Receipt = R::Receipt, + SignedTx = R::Transaction, + BlockHeader = Header, + BlockBody = alloy_consensus::BlockBody, + Block = alloy_consensus::Block, + >, + OpTransaction: FromRecoveredTx + FromTxWithEncoded, + R: OpReceiptBuilder, + Self: Send + Sync + Unpin + Clone + 'static, +{ + fn evm_env_for_payload( + &self, + payload: &OpExecutionData, + ) -> Result, Self::Error> { + let timestamp = payload.payload.timestamp(); + let block_number = payload.payload.block_number(); + + let spec = revm_spec_by_timestamp_after_bedrock(self.chain_spec(), timestamp); + + let cfg_env = CfgEnv::new() + .with_chain_id(self.chain_spec().chain().id()) + .with_spec_and_mainnet_gas_params(spec); + + let blob_excess_gas_and_price = spec + .into_eth_spec() + .is_enabled_in(SpecId::CANCUN) + .then_some(BlobExcessGasAndPrice { excess_blob_gas: 0, blob_gasprice: 1 }); + + let block_env = BlockEnv { + number: U256::from(block_number), + beneficiary: payload.payload.as_v1().fee_recipient, + timestamp: U256::from(timestamp), + difficulty: if spec.into_eth_spec() >= SpecId::MERGE { + U256::ZERO + } else { + payload.payload.as_v1().prev_randao.into() + }, + prevrandao: (spec.into_eth_spec() >= SpecId::MERGE) + .then(|| payload.payload.as_v1().prev_randao), + gas_limit: payload.payload.as_v1().gas_limit, + basefee: payload.payload.as_v1().base_fee_per_gas.to(), + // EIP-4844 excess blob gas of this block, introduced in Cancun + blob_excess_gas_and_price, + }; + + Ok(EvmEnv { cfg_env, block_env }) + } + + fn context_for_payload<'a>( + &self, + payload: &'a OpExecutionData, + ) -> Result, Self::Error> { + Ok(OpBlockExecutionCtx { + parent_hash: payload.parent_hash(), + parent_beacon_block_root: payload.sidecar.parent_beacon_block_root(), + extra_data: payload.payload.as_v1().extra_data.clone(), + }) + } + + fn tx_iterator_for_payload( + &self, + payload: &OpExecutionData, + ) -> Result, Self::Error> { + let transactions = payload.payload.transactions().clone(); + let convert = |encoded: Bytes| { + let tx = TxTy::::decode_2718_exact(encoded.as_ref()) + .map_err(AnyError::new)?; + let signer = tx.try_recover().map_err(AnyError::new)?; + Ok::<_, AnyError>(WithEncoded::new(encoded, tx.with_signer(signer))) + }; + + Ok((transactions, convert)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloc::collections::BTreeMap; + use alloy_consensus::{Header, Receipt}; + use alloy_eips::eip7685::Requests; + use alloy_genesis::Genesis; + use alloy_primitives::{Address, B256, LogData, bytes, map::HashMap}; + use op_revm::OpSpecId; + use reth_chainspec::ChainSpec; + use reth_evm::execute::ProviderError; + use reth_execution_types::{ + AccountRevertInit, BundleStateInit, Chain, ExecutionOutcome, RevertsInit, + }; + use reth_optimism_chainspec::{BASE_MAINNET, OpChainSpec}; + use reth_optimism_primitives::{OpBlock, OpPrimitives, OpReceipt}; + use reth_primitives_traits::{Account, RecoveredBlock}; + use revm::{ + database::{BundleState, CacheDB}, + database_interface::EmptyDBTyped, + inspector::NoOpInspector, + primitives::Log, + state::AccountInfo, + }; + use std::sync::Arc; + + fn test_evm_config() -> OpEvmConfig { + OpEvmConfig::optimism(BASE_MAINNET.clone()) + } + + #[test] + fn test_fill_cfg_and_block_env() { + // Create a default header + let header = Header::default(); + + // Build the ChainSpec for Ethereum mainnet, activating London, Paris, and Shanghai + // hardforks + let chain_spec = ChainSpec::builder() + .chain(0.into()) + .genesis(Genesis::default()) + .london_activated() + .paris_activated() + .shanghai_activated() + .build(); + + // Use the `OpEvmConfig` to create the `cfg_env` and `block_env` based on the ChainSpec, + // Header, and total difficulty + let EvmEnv { cfg_env, .. } = + OpEvmConfig::optimism(Arc::new(OpChainSpec { inner: chain_spec.clone() })) + .evm_env(&header) + .unwrap(); + + // Assert that the chain ID in the `cfg_env` is correctly set to the chain ID of the + // ChainSpec + assert_eq!(cfg_env.chain_id, chain_spec.chain().id()); + } + + #[test] + fn test_evm_with_env_default_spec() { + let evm_config = test_evm_config(); + + let db = CacheDB::>::default(); + + let evm_env = EvmEnv::default(); + + let evm = evm_config.evm_with_env(db, evm_env.clone()); + + // Check that the EVM environment + assert_eq!(evm.cfg, evm_env.cfg_env); + } + + #[test] + fn test_evm_with_env_custom_cfg() { + let evm_config = test_evm_config(); + + let db = CacheDB::>::default(); + + // Create a custom configuration environment with a chain ID of 111 + let cfg = + CfgEnv::new().with_chain_id(111).with_spec_and_mainnet_gas_params(OpSpecId::default()); + + let evm_env = EvmEnv { cfg_env: cfg.clone(), ..Default::default() }; + + let evm = evm_config.evm_with_env(db, evm_env); + + // Check that the EVM environment is initialized with the custom environment + assert_eq!(evm.cfg, cfg); + } + + #[test] + fn test_evm_with_env_custom_block_and_tx() { + let evm_config = test_evm_config(); + + let db = CacheDB::>::default(); + + // Create customs block and tx env + let block = BlockEnv { + basefee: 1000, + gas_limit: 10_000_000, + number: U256::from(42), + ..Default::default() + }; + + let evm_env = EvmEnv { block_env: block, ..Default::default() }; + + let evm = evm_config.evm_with_env(db, evm_env.clone()); + + // Verify that the block and transaction environments are set correctly + assert_eq!(evm.block, evm_env.block_env); + } + + #[test] + fn test_evm_with_spec_id() { + let evm_config = test_evm_config(); + + let db = CacheDB::>::default(); + + let evm_env = EvmEnv { + cfg_env: CfgEnv::new().with_spec_and_mainnet_gas_params(OpSpecId::ECOTONE), + ..Default::default() + }; + + let evm = evm_config.evm_with_env(db, evm_env.clone()); + + assert_eq!(evm.cfg, evm_env.cfg_env); + } + + #[test] + fn test_evm_with_env_and_default_inspector() { + let evm_config = test_evm_config(); + let db = CacheDB::>::default(); + + let evm_env = EvmEnv { cfg_env: Default::default(), ..Default::default() }; + + let evm = evm_config.evm_with_env_and_inspector(db, evm_env.clone(), NoOpInspector {}); + + // Check that the EVM environment is set to default values + assert_eq!(evm.block, evm_env.block_env); + assert_eq!(evm.cfg, evm_env.cfg_env); + } + + #[test] + fn test_evm_with_env_inspector_and_custom_cfg() { + let evm_config = test_evm_config(); + let db = CacheDB::>::default(); + + let cfg = + CfgEnv::new().with_chain_id(111).with_spec_and_mainnet_gas_params(OpSpecId::default()); + let block = BlockEnv::default(); + let evm_env = EvmEnv { block_env: block, cfg_env: cfg.clone() }; + + let evm = evm_config.evm_with_env_and_inspector(db, evm_env.clone(), NoOpInspector {}); + + // Check that the EVM environment is set with custom configuration + assert_eq!(evm.cfg, cfg); + assert_eq!(evm.block, evm_env.block_env); + } + + #[test] + fn test_evm_with_env_inspector_and_custom_block_tx() { + let evm_config = test_evm_config(); + let db = CacheDB::>::default(); + + // Create custom block and tx environment + let block = BlockEnv { + basefee: 1000, + gas_limit: 10_000_000, + number: U256::from(42), + ..Default::default() + }; + let evm_env = EvmEnv { block_env: block, ..Default::default() }; + + let evm = evm_config.evm_with_env_and_inspector(db, evm_env.clone(), NoOpInspector {}); + + // Verify that the block and transaction environments are set correctly + assert_eq!(evm.block, evm_env.block_env); + } + + #[test] + fn test_evm_with_env_inspector_and_spec_id() { + let evm_config = test_evm_config(); + let db = CacheDB::>::default(); + + let evm_env = EvmEnv { + cfg_env: CfgEnv::new().with_spec_and_mainnet_gas_params(OpSpecId::ECOTONE), + ..Default::default() + }; + + let evm = evm_config.evm_with_env_and_inspector(db, evm_env.clone(), NoOpInspector {}); + + // Check that the spec ID is set properly + assert_eq!(evm.cfg, evm_env.cfg_env); + assert_eq!(evm.block, evm_env.block_env); + } + + #[test] + fn receipts_by_block_hash() { + // Create a default recovered block + let block: RecoveredBlock = Default::default(); + + // Define block hashes for block1 and block2 + let block1_hash = B256::new([0x01; 32]); + let block2_hash = B256::new([0x02; 32]); + + // Clone the default block into block1 and block2 + let mut block1 = block.clone(); + let mut block2 = block; + + // Set the hashes of block1 and block2 + block1.set_block_number(10); + block1.set_hash(block1_hash); + + block2.set_block_number(11); + block2.set_hash(block2_hash); + + // Create a random receipt object, receipt1 + let receipt1 = OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![], + status: true.into(), + }); + + // Create another random receipt object, receipt2 + let receipt2 = OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 1325345, + logs: vec![], + status: true.into(), + }); + + // Create a Receipts object with a vector of receipt vectors + let receipts = vec![vec![receipt1.clone()], vec![receipt2]]; + + // Create an ExecutionOutcome object with the created bundle, receipts, an empty requests + // vector, and first_block set to 10 + let execution_outcome = ExecutionOutcome:: { + bundle: Default::default(), + receipts, + requests: vec![], + first_block: 10, + }; + + // Create a Chain object with a BTreeMap of blocks mapped to their block numbers, + // including block1_hash and block2_hash, and the execution_outcome + let chain: Chain = + Chain::new([block1, block2], execution_outcome.clone(), BTreeMap::new()); + + // Assert that the proper receipt vector is returned for block1_hash + assert_eq!(chain.receipts_by_block_hash(block1_hash), Some(vec![&receipt1])); + + // Create an ExecutionOutcome object with a single receipt vector containing receipt1 + let execution_outcome1 = ExecutionOutcome { + bundle: Default::default(), + receipts: vec![vec![receipt1]], + requests: vec![], + first_block: 10, + }; + + // Assert that the execution outcome at the first block contains only the first receipt + assert_eq!(chain.execution_outcome_at_block(10), Some(execution_outcome1)); + + // Assert that the execution outcome at the tip block contains the whole execution outcome + assert_eq!(chain.execution_outcome_at_block(11), Some(execution_outcome)); + } + + #[test] + fn test_initialization() { + // Create a new BundleState object with initial data + let bundle = BundleState::new( + vec![(Address::new([2; 20]), None, Some(AccountInfo::default()), HashMap::default())], + vec![vec![(Address::new([2; 20]), None, vec![])]], + vec![], + ); + + // Create a Receipts object with a vector of receipt vectors + let receipts = vec![vec![Some(OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![], + status: true.into(), + }))]]; + + // Create a Requests object with a vector of requests + let requests = vec![Requests::new(vec![bytes!("dead"), bytes!("beef"), bytes!("beebee")])]; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: bundle.clone(), + receipts: receipts.clone(), + requests: requests.clone(), + first_block, + }; + + // Assert that creating a new ExecutionOutcome using the constructor matches exec_res + assert_eq!( + ExecutionOutcome::new(bundle, receipts.clone(), first_block, requests.clone()), + exec_res + ); + + // Create a BundleStateInit object and insert initial data + let mut state_init: BundleStateInit = HashMap::default(); + state_init + .insert(Address::new([2; 20]), (None, Some(Account::default()), HashMap::default())); + + // Create a HashMap for account reverts and insert initial data + let mut revert_inner: HashMap = HashMap::default(); + revert_inner.insert(Address::new([2; 20]), (None, vec![])); + + // Create a RevertsInit object and insert the revert_inner data + let mut revert_init: RevertsInit = HashMap::default(); + revert_init.insert(123, revert_inner); + + // Assert that creating a new ExecutionOutcome using the new_init method matches + // exec_res + assert_eq!( + ExecutionOutcome::new_init( + state_init, + revert_init, + vec![], + receipts, + first_block, + requests, + ), + exec_res + ); + } + + #[test] + fn test_block_number_to_index() { + // Create a Receipts object with a vector of receipt vectors + let receipts = vec![vec![Some(OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![], + status: true.into(), + }))]]; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: Default::default(), + receipts, + requests: vec![], + first_block, + }; + + // Test before the first block + assert_eq!(exec_res.block_number_to_index(12), None); + + // Test after the first block but index larger than receipts length + assert_eq!(exec_res.block_number_to_index(133), None); + + // Test after the first block + assert_eq!(exec_res.block_number_to_index(123), Some(0)); + } + + #[test] + fn test_get_logs() { + // Create a Receipts object with a vector of receipt vectors + let receipts = vec![vec![OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![Log::::default()], + status: true.into(), + })]]; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: Default::default(), + receipts, + requests: vec![], + first_block, + }; + + // Get logs for block number 123 + let logs: Vec<&Log> = exec_res.logs(123).unwrap().collect(); + + // Assert that the logs match the expected logs + assert_eq!(logs, vec![&Log::::default()]); + } + + #[test] + fn test_receipts_by_block() { + // Create a Receipts object with a vector of receipt vectors + let receipts = vec![vec![Some(OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![Log::::default()], + status: true.into(), + }))]]; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: Default::default(), // Default value for bundle + receipts, // Include the created receipts + requests: vec![], // Empty vector for requests + first_block, // Set the first block number + }; + + // Get receipts for block number 123 and convert the result into a vector + let receipts_by_block: Vec<_> = exec_res.receipts_by_block(123).iter().collect(); + + // Assert that the receipts for block number 123 match the expected receipts + assert_eq!( + receipts_by_block, + vec![&Some(OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![Log::::default()], + status: true.into(), + }))] + ); + } + + #[test] + fn test_receipts_len() { + // Create a Receipts object with a vector of receipt vectors + let receipts = vec![vec![Some(OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![Log::::default()], + status: true.into(), + }))]]; + + // Create an empty Receipts object + let receipts_empty = vec![]; + + // Define the first block number + let first_block = 123; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = ExecutionOutcome { + bundle: Default::default(), // Default value for bundle + receipts, // Include the created receipts + requests: vec![], // Empty vector for requests + first_block, // Set the first block number + }; + + // Assert that the length of receipts in exec_res is 1 + assert_eq!(exec_res.len(), 1); + + // Assert that exec_res is not empty + assert!(!exec_res.is_empty()); + + // Create a ExecutionOutcome object with an empty Receipts object + let exec_res_empty_receipts: ExecutionOutcome = ExecutionOutcome { + bundle: Default::default(), // Default value for bundle + receipts: receipts_empty, // Include the empty receipts + requests: vec![], // Empty vector for requests + first_block, // Set the first block number + }; + + // Assert that the length of receipts in exec_res_empty_receipts is 0 + assert_eq!(exec_res_empty_receipts.len(), 0); + + // Assert that exec_res_empty_receipts is empty + assert!(exec_res_empty_receipts.is_empty()); + } + + #[test] + fn test_revert_to() { + // Create a random receipt object + let receipt = OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![], + status: true.into(), + }); + + // Create a Receipts object with a vector of receipt vectors + let receipts = vec![vec![Some(receipt.clone())], vec![Some(receipt.clone())]]; + + // Define the first block number + let first_block = 123; + + // Create a request. + let request = bytes!("deadbeef"); + + // Create a vector of Requests containing the request. + let requests = + vec![Requests::new(vec![request.clone()]), Requests::new(vec![request.clone()])]; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let mut exec_res = + ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; + + // Assert that the revert_to method returns true when reverting to the initial block number. + assert!(exec_res.revert_to(123)); + + // Assert that the receipts are properly cut after reverting to the initial block number. + assert_eq!(exec_res.receipts, vec![vec![Some(receipt)]]); + + // Assert that the requests are properly cut after reverting to the initial block number. + assert_eq!(exec_res.requests, vec![Requests::new(vec![request])]); + + // Assert that the revert_to method returns false when attempting to revert to a block + // number greater than the initial block number. + assert!(!exec_res.revert_to(133)); + + // Assert that the revert_to method returns false when attempting to revert to a block + // number less than the initial block number. + assert!(!exec_res.revert_to(10)); + } + + #[test] + fn test_extend_execution_outcome() { + // Create a Receipt object with specific attributes. + let receipt = OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![], + status: true.into(), + }); + + // Create a Receipts object containing the receipt. + let receipts = vec![vec![Some(receipt.clone())]]; + + // Create a request. + let request = bytes!("deadbeef"); + + // Create a vector of Requests containing the request. + let requests = vec![Requests::new(vec![request.clone()])]; + + // Define the initial block number. + let first_block = 123; + + // Create an ExecutionOutcome object. + let mut exec_res = + ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; + + // Extend the ExecutionOutcome object by itself. + exec_res.extend(exec_res.clone()); + + // Assert the extended ExecutionOutcome matches the expected outcome. + assert_eq!( + exec_res, + ExecutionOutcome { + bundle: Default::default(), + receipts: vec![vec![Some(receipt.clone())], vec![Some(receipt)]], + requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], + first_block: 123, + } + ); + } + + #[test] + fn test_split_at_execution_outcome() { + // Create a random receipt object + let receipt = OpReceipt::Legacy(Receipt:: { + cumulative_gas_used: 46913, + logs: vec![], + status: true.into(), + }); + + // Create a Receipts object with a vector of receipt vectors + let receipts = vec![ + vec![Some(receipt.clone())], + vec![Some(receipt.clone())], + vec![Some(receipt.clone())], + ]; + + // Define the first block number + let first_block = 123; + + // Create a request. + let request = bytes!("deadbeef"); + + // Create a vector of Requests containing the request. + let requests = vec![ + Requests::new(vec![request.clone()]), + Requests::new(vec![request.clone()]), + Requests::new(vec![request.clone()]), + ]; + + // Create a ExecutionOutcome object with the created bundle, receipts, requests, and + // first_block + let exec_res = + ExecutionOutcome { bundle: Default::default(), receipts, requests, first_block }; + + // Split the ExecutionOutcome at block number 124 + let result = exec_res.clone().split_at(124); + + // Define the expected lower ExecutionOutcome after splitting + let lower_execution_outcome = ExecutionOutcome { + bundle: Default::default(), + receipts: vec![vec![Some(receipt.clone())]], + requests: vec![Requests::new(vec![request.clone()])], + first_block, + }; + + // Define the expected higher ExecutionOutcome after splitting + let higher_execution_outcome = ExecutionOutcome { + bundle: Default::default(), + receipts: vec![vec![Some(receipt.clone())], vec![Some(receipt)]], + requests: vec![Requests::new(vec![request.clone()]), Requests::new(vec![request])], + first_block: 124, + }; + + // Assert that the split result matches the expected lower and higher outcomes + assert_eq!(result.0, Some(lower_execution_outcome)); + assert_eq!(result.1, higher_execution_outcome); + + // Assert that splitting at the first block number returns None for the lower outcome + assert_eq!(exec_res.clone().split_at(123), (None, exec_res)); + } +} diff --git a/op-reth/crates/evm/src/receipts.rs b/rust/op-reth/crates/evm/src/receipts.rs similarity index 100% rename from op-reth/crates/evm/src/receipts.rs rename to rust/op-reth/crates/evm/src/receipts.rs diff --git a/rust/op-reth/crates/flashblocks/Cargo.toml b/rust/op-reth/crates/flashblocks/Cargo.toml new file mode 100644 index 00000000000..413bbe0f031 --- /dev/null +++ b/rust/op-reth/crates/flashblocks/Cargo.toml @@ -0,0 +1,60 @@ +[package] +name = "reth-optimism-flashblocks" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" + +[lints] +workspace = true + +[dependencies] +# reth +reth-optimism-primitives = { workspace = true, features = ["serde"] } +reth-chain-state = { workspace = true, features = ["serde"] } +reth-primitives-traits = { workspace = true, features = ["serde"] } +reth-engine-primitives = { workspace = true, features = ["std"] } +reth-execution-types = { workspace = true, features = ["serde"] } +reth-evm.workspace = true +reth-revm.workspace = true +reth-optimism-payload-builder.workspace = true +reth-rpc-eth-types.workspace = true +reth-errors.workspace = true +reth-payload-primitives.workspace = true +reth-storage-api.workspace = true +reth-tasks.workspace = true +reth-metrics.workspace = true + +# alloy +alloy-eips = { workspace = true, features = ["serde"] } +alloy-primitives = { workspace = true, features = ["serde"] } +alloy-rpc-types-engine = { workspace = true, features = ["serde"] } +alloy-consensus.workspace = true + +# op-alloy +op-alloy-rpc-types-engine = { workspace = true, features = ["k256"] } + +# io +tokio.workspace = true +tokio-tungstenite = { workspace = true, features = ["rustls-tls-native-roots"] } +serde_json.workspace = true +url.workspace = true +futures-util.workspace = true +brotli = { workspace = true, features = ["std"] } + +# debug +tracing.workspace = true +metrics.workspace = true + +# errors +eyre.workspace = true + +ringbuffer.workspace = true +derive_more.workspace = true + +[dev-dependencies] +test-case.workspace = true +alloy-consensus.workspace = true +op-alloy-consensus.workspace = true diff --git a/op-reth/crates/flashblocks/src/cache.rs b/rust/op-reth/crates/flashblocks/src/cache.rs similarity index 99% rename from op-reth/crates/flashblocks/src/cache.rs rename to rust/op-reth/crates/flashblocks/src/cache.rs index 9aeed3435e3..0ddc2e19adf 100644 --- a/op-reth/crates/flashblocks/src/cache.rs +++ b/rust/op-reth/crates/flashblocks/src/cache.rs @@ -4,9 +4,9 @@ //! and intelligently selects which sequence to build based on the local chain tip. use crate::{ + FlashBlock, FlashBlockCompleteSequence, PendingFlashBlock, sequence::{FlashBlockPendingSequence, SequenceExecutionOutcome}, worker::BuildArgs, - FlashBlock, FlashBlockCompleteSequence, PendingFlashBlock, }; use alloy_eips::eip2718::WithEncoded; use alloy_primitives::B256; @@ -101,7 +101,7 @@ impl SequenceManager { // Bundle completed sequence with its decoded transactions and push to cache // Ring buffer automatically evicts oldest entry when full let txs = std::mem::take(&mut self.pending_transactions); - self.completed_cache.push((completed, txs)); + self.completed_cache.enqueue((completed, txs)); // ensure cache is wiped on new flashblock let _ = self.pending.take_cached_reads(); diff --git a/op-reth/crates/flashblocks/src/consensus.rs b/rust/op-reth/crates/flashblocks/src/consensus.rs similarity index 100% rename from op-reth/crates/flashblocks/src/consensus.rs rename to rust/op-reth/crates/flashblocks/src/consensus.rs diff --git a/rust/op-reth/crates/flashblocks/src/lib.rs b/rust/op-reth/crates/flashblocks/src/lib.rs new file mode 100644 index 00000000000..08c7499d585 --- /dev/null +++ b/rust/op-reth/crates/flashblocks/src/lib.rs @@ -0,0 +1,87 @@ +//! A downstream integration of Flashblocks. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +use reth_primitives_traits::NodePrimitives; +use std::sync::Arc; + +// Included to enable serde feature for OpReceipt type used transitively +use reth_optimism_primitives as _; + +mod consensus; +pub use consensus::FlashBlockConsensusClient; + +mod payload; +pub use payload::{FlashBlock, PendingFlashBlock}; + +mod sequence; +pub use sequence::{ + FlashBlockCompleteSequence, FlashBlockPendingSequence, SequenceExecutionOutcome, +}; + +mod service; +pub use service::{FlashBlockBuildInfo, FlashBlockService}; + +mod worker; + +mod cache; + +pub mod validation; + +#[cfg(test)] +mod test_utils; + +mod ws; +pub use ws::{FlashBlockDecoder, WsConnect, WsConnector, WsFlashBlockStream}; + +/// Receiver of the most recent [`PendingFlashBlock`] built out of [`FlashBlock`]s. +/// +/// [`FlashBlock`]: crate::FlashBlock +pub type PendingBlockRx = tokio::sync::watch::Receiver>>; + +/// Receiver of the sequences of [`FlashBlock`]s built. +/// +/// [`FlashBlock`]: crate::FlashBlock +pub type FlashBlockCompleteSequenceRx = + tokio::sync::broadcast::Receiver; + +/// Receiver of received [`FlashBlock`]s from the (websocket) subscription. +/// +/// [`FlashBlock`]: crate::FlashBlock +pub type FlashBlockRx = tokio::sync::broadcast::Receiver>; + +/// Receiver that signals whether a [`FlashBlock`] is currently being built. +pub type InProgressFlashBlockRx = tokio::sync::watch::Receiver>; + +/// Container for all flashblocks-related listeners. +/// +/// Groups together the channels for flashblock-related updates. +#[derive(Debug)] +pub struct FlashblocksListeners { + /// Receiver of the most recent executed [`PendingFlashBlock`] built out of [`FlashBlock`]s. + pub pending_block_rx: PendingBlockRx, + /// Subscription channel of the complete sequences of [`FlashBlock`]s built. + pub flashblocks_sequence: tokio::sync::broadcast::Sender, + /// Receiver that signals whether a [`FlashBlock`] is currently being built. + pub in_progress_rx: InProgressFlashBlockRx, + /// Subscription channel for received flashblocks from the (websocket) connection. + pub received_flashblocks: tokio::sync::broadcast::Sender>, +} + +impl FlashblocksListeners { + /// Creates a new [`FlashblocksListeners`] with the given channels. + pub const fn new( + pending_block_rx: PendingBlockRx, + flashblocks_sequence: tokio::sync::broadcast::Sender, + in_progress_rx: InProgressFlashBlockRx, + received_flashblocks: tokio::sync::broadcast::Sender>, + ) -> Self { + Self { pending_block_rx, flashblocks_sequence, in_progress_rx, received_flashblocks } + } +} diff --git a/op-reth/crates/flashblocks/src/payload.rs b/rust/op-reth/crates/flashblocks/src/payload.rs similarity index 100% rename from op-reth/crates/flashblocks/src/payload.rs rename to rust/op-reth/crates/flashblocks/src/payload.rs diff --git a/op-reth/crates/flashblocks/src/sequence.rs b/rust/op-reth/crates/flashblocks/src/sequence.rs similarity index 99% rename from op-reth/crates/flashblocks/src/sequence.rs rename to rust/op-reth/crates/flashblocks/src/sequence.rs index abf9e6d514c..64ac0227290 100644 --- a/op-reth/crates/flashblocks/src/sequence.rs +++ b/rust/op-reth/crates/flashblocks/src/sequence.rs @@ -1,8 +1,8 @@ use crate::{FlashBlock, FlashBlockCompleteSequenceRx}; -use alloy_primitives::{Bytes, B256}; +use alloy_primitives::{B256, Bytes}; use alloy_rpc_types_engine::PayloadId; use core::mem; -use eyre::{bail, OptionExt}; +use eyre::{OptionExt, bail}; use op_alloy_rpc_types_engine::OpFlashblockPayloadBase; use reth_revm::cached::CachedReads; use std::{collections::BTreeMap, ops::Deref}; diff --git a/op-reth/crates/flashblocks/src/service.rs b/rust/op-reth/crates/flashblocks/src/service.rs similarity index 98% rename from op-reth/crates/flashblocks/src/service.rs rename to rust/op-reth/crates/flashblocks/src/service.rs index ee0229d7f00..49094129419 100644 --- a/op-reth/crates/flashblocks/src/service.rs +++ b/rust/op-reth/crates/flashblocks/src/service.rs @@ -1,6 +1,6 @@ use crate::{ - cache::SequenceManager, worker::FlashBlockBuilder, FlashBlock, FlashBlockCompleteSequence, - FlashBlockCompleteSequenceRx, InProgressFlashBlockRx, PendingFlashBlock, + FlashBlock, FlashBlockCompleteSequence, FlashBlockCompleteSequenceRx, InProgressFlashBlockRx, + PendingFlashBlock, cache::SequenceManager, worker::FlashBlockBuilder, }; use alloy_primitives::B256; use futures_util::{FutureExt, Stream, StreamExt}; diff --git a/rust/op-reth/crates/flashblocks/src/test_utils.rs b/rust/op-reth/crates/flashblocks/src/test_utils.rs new file mode 100644 index 00000000000..de33b9f6f40 --- /dev/null +++ b/rust/op-reth/crates/flashblocks/src/test_utils.rs @@ -0,0 +1,331 @@ +//! Test utilities for flashblocks. +//! +//! Provides a factory for creating test flashblocks with automatic timestamp management. +//! +//! # Examples +//! +//! ## Simple: Create a flashblock sequence for the same block +//! +//! ```ignore +//! let factory = TestFlashBlockFactory::new(); // Default 2 second block time +//! let fb0 = factory.flashblock_at(0).build(); +//! let fb1 = factory.flashblock_after(&fb0).build(); +//! let fb2 = factory.flashblock_after(&fb1).build(); +//! ``` +//! +//! ## Create flashblocks with transactions +//! +//! ```ignore +//! let factory = TestFlashBlockFactory::new(); +//! let fb0 = factory.flashblock_at(0).build(); +//! let txs = vec![Bytes::from_static(&[1, 2, 3])]; +//! let fb1 = factory.flashblock_after(&fb0).transactions(txs).build(); +//! ``` +//! +//! ## Test across multiple blocks (timestamps auto-increment) +//! +//! ```ignore +//! let factory = TestFlashBlockFactory::new(); // Default 2 second blocks +//! +//! // Block 100 at timestamp 1000000 +//! let fb0 = factory.flashblock_at(0).build(); +//! let fb1 = factory.flashblock_after(&fb0).build(); +//! +//! // Block 101 at timestamp 1000002 (auto-incremented by block_time) +//! let fb2 = factory.flashblock_for_next_block(&fb1).build(); +//! let fb3 = factory.flashblock_after(&fb2).build(); +//! ``` +//! +//! ## Full control with builder +//! +//! ```ignore +//! let factory = TestFlashBlockFactory::new(); +//! let fb = factory.builder() +//! .block_number(100) +//! .parent_hash(specific_hash) +//! .state_root(computed_root) +//! .transactions(txs) +//! .build(); +//! ``` + +use crate::FlashBlock; +use alloy_primitives::{Address, B256, Bloom, Bytes, U256}; +use alloy_rpc_types_engine::PayloadId; +use op_alloy_rpc_types_engine::{ + OpFlashblockPayloadBase, OpFlashblockPayloadDelta, OpFlashblockPayloadMetadata, +}; + +/// Factory for creating test flashblocks with automatic timestamp management. +/// +/// Tracks `block_time` to automatically increment timestamps when creating new blocks. +/// Returns builders that can be further customized before calling `build()`. +/// +/// # Examples +/// +/// ```ignore +/// let factory = TestFlashBlockFactory::new(); // Default 2 second block time +/// let fb0 = factory.flashblock_at(0).build(); +/// let fb1 = factory.flashblock_after(&fb0).build(); +/// let fb2 = factory.flashblock_for_next_block(&fb1).build(); // timestamp auto-increments +/// ``` +#[derive(Debug)] +pub(crate) struct TestFlashBlockFactory { + /// Block time in seconds (used to auto-increment timestamps) + block_time: u64, + /// Starting timestamp for the first block + base_timestamp: u64, + /// Current block number being tracked + current_block_number: u64, +} + +impl TestFlashBlockFactory { + /// Creates a new factory with a default block time of 2 seconds. + /// + /// Use [`with_block_time`](Self::with_block_time) to customize the block time. + pub(crate) fn new() -> Self { + Self { block_time: 2, base_timestamp: 1_000_000, current_block_number: 100 } + } + + pub(crate) fn with_block_time(mut self, block_time: u64) -> Self { + self.block_time = block_time; + self + } + + /// Creates a builder for a flashblock at the specified index (within the current block). + /// + /// Returns a builder with index set, allowing further customization before building. + /// + /// # Examples + /// + /// ```ignore + /// let factory = TestFlashBlockFactory::new(); + /// let fb0 = factory.flashblock_at(0).build(); // Simple usage + /// let fb1 = factory.flashblock_at(1).state_root(specific_root).build(); // Customize + /// ``` + pub(crate) fn flashblock_at(&self, index: u64) -> TestFlashBlockBuilder { + self.builder().index(index).block_number(self.current_block_number) + } + + /// Creates a builder for a flashblock following the previous one in the same sequence. + /// + /// Automatically increments the index and maintains `block_number` and `payload_id`. + /// Returns a builder allowing further customization. + /// + /// # Examples + /// + /// ```ignore + /// let factory = TestFlashBlockFactory::new(); + /// let fb0 = factory.flashblock_at(0).build(); + /// let fb1 = factory.flashblock_after(&fb0).build(); // Simple + /// let fb2 = factory.flashblock_after(&fb1).transactions(txs).build(); // With txs + /// ``` + pub(crate) fn flashblock_after(&self, previous: &FlashBlock) -> TestFlashBlockBuilder { + let parent_hash = + previous.base.as_ref().map(|b| b.parent_hash).unwrap_or(previous.diff.block_hash); + + self.builder() + .index(previous.index + 1) + .block_number(previous.metadata.block_number) + .payload_id(previous.payload_id) + .parent_hash(parent_hash) + .timestamp(previous.base.as_ref().map(|b| b.timestamp).unwrap_or(self.base_timestamp)) + } + + /// Creates a builder for a flashblock for the next block, starting a new sequence at index 0. + /// + /// Increments block number, uses previous `block_hash` as `parent_hash`, generates new + /// `payload_id`, and automatically increments the timestamp by `block_time`. + /// Returns a builder allowing further customization. + /// + /// # Examples + /// + /// ```ignore + /// let factory = TestFlashBlockFactory::new(); // 2 second blocks + /// let fb0 = factory.flashblock_at(0).build(); // Block 100, timestamp 1000000 + /// let fb1 = factory.flashblock_for_next_block(&fb0).build(); // Block 101, timestamp 1000002 + /// let fb2 = factory.flashblock_for_next_block(&fb1).transactions(txs).build(); // Customize + /// ``` + pub(crate) fn flashblock_for_next_block(&self, previous: &FlashBlock) -> TestFlashBlockBuilder { + let prev_timestamp = + previous.base.as_ref().map(|b| b.timestamp).unwrap_or(self.base_timestamp); + + self.builder() + .index(0) + .block_number(previous.metadata.block_number + 1) + .payload_id(PayloadId::new(B256::random().0[0..8].try_into().unwrap())) + .parent_hash(previous.diff.block_hash) + .timestamp(prev_timestamp + self.block_time) + } + + /// Returns a custom builder for full control over flashblock creation. + /// + /// Use this when the convenience methods don't provide enough control. + /// + /// # Examples + /// + /// ```ignore + /// let factory = TestFlashBlockFactory::new(); + /// let fb = factory.builder() + /// .index(5) + /// .block_number(200) + /// .parent_hash(specific_hash) + /// .state_root(computed_root) + /// .build(); + /// ``` + pub(crate) fn builder(&self) -> TestFlashBlockBuilder { + TestFlashBlockBuilder { + index: 0, + block_number: self.current_block_number, + payload_id: PayloadId::new([1u8; 8]), + parent_hash: B256::random(), + timestamp: self.base_timestamp, + base: None, + block_hash: B256::random(), + state_root: B256::ZERO, + receipts_root: B256::ZERO, + logs_bloom: Bloom::default(), + gas_used: 0, + transactions: vec![], + withdrawals: vec![], + withdrawals_root: B256::ZERO, + blob_gas_used: None, + } + } +} + +/// Custom builder for creating test flashblocks with full control. +/// +/// Created via [`TestFlashBlockFactory::builder()`]. +#[derive(Debug)] +pub(crate) struct TestFlashBlockBuilder { + index: u64, + block_number: u64, + payload_id: PayloadId, + parent_hash: B256, + timestamp: u64, + base: Option, + block_hash: B256, + state_root: B256, + receipts_root: B256, + logs_bloom: Bloom, + gas_used: u64, + transactions: Vec, + withdrawals: Vec, + withdrawals_root: B256, + blob_gas_used: Option, +} + +impl TestFlashBlockBuilder { + /// Sets the flashblock index. + pub(crate) fn index(mut self, index: u64) -> Self { + self.index = index; + self + } + + /// Sets the block number. + pub(crate) fn block_number(mut self, block_number: u64) -> Self { + self.block_number = block_number; + self + } + + /// Sets the payload ID. + pub(crate) fn payload_id(mut self, payload_id: PayloadId) -> Self { + self.payload_id = payload_id; + self + } + + /// Sets the parent hash. + pub(crate) fn parent_hash(mut self, parent_hash: B256) -> Self { + self.parent_hash = parent_hash; + self + } + + /// Sets the timestamp. + pub(crate) fn timestamp(mut self, timestamp: u64) -> Self { + self.timestamp = timestamp; + self + } + + /// Sets the base payload. Automatically created for index 0 if not set. + #[allow(dead_code)] + pub(crate) fn base(mut self, base: OpFlashblockPayloadBase) -> Self { + self.base = Some(base); + self + } + + /// Sets the block hash in the diff. + #[allow(dead_code)] + pub(crate) fn block_hash(mut self, block_hash: B256) -> Self { + self.block_hash = block_hash; + self + } + + /// Sets the state root in the diff. + #[allow(dead_code)] + pub(crate) fn state_root(mut self, state_root: B256) -> Self { + self.state_root = state_root; + self + } + + /// Sets the receipts root in the diff. + #[allow(dead_code)] + pub(crate) fn receipts_root(mut self, receipts_root: B256) -> Self { + self.receipts_root = receipts_root; + self + } + + /// Sets the transactions in the diff. + pub(crate) fn transactions(mut self, transactions: Vec) -> Self { + self.transactions = transactions; + self + } + + /// Sets the gas used in the diff. + #[allow(dead_code)] + pub(crate) fn gas_used(mut self, gas_used: u64) -> Self { + self.gas_used = gas_used; + self + } + + /// Builds the flashblock. + /// + /// If index is 0 and no base was explicitly set, creates a default base. + pub(crate) fn build(mut self) -> FlashBlock { + // Auto-create base for index 0 if not set + if self.index == 0 && self.base.is_none() { + self.base = Some(OpFlashblockPayloadBase { + parent_hash: self.parent_hash, + parent_beacon_block_root: B256::random(), + fee_recipient: Address::default(), + prev_randao: B256::random(), + block_number: self.block_number, + gas_limit: 30_000_000, + timestamp: self.timestamp, + extra_data: Default::default(), + base_fee_per_gas: U256::from(1_000_000_000u64), + }); + } + + FlashBlock { + index: self.index, + payload_id: self.payload_id, + base: self.base, + diff: OpFlashblockPayloadDelta { + block_hash: self.block_hash, + state_root: self.state_root, + receipts_root: self.receipts_root, + logs_bloom: self.logs_bloom, + gas_used: self.gas_used, + transactions: self.transactions, + withdrawals: self.withdrawals, + withdrawals_root: self.withdrawals_root, + blob_gas_used: self.blob_gas_used, + }, + metadata: OpFlashblockPayloadMetadata { + block_number: self.block_number, + receipts: Default::default(), + new_account_balances: Default::default(), + }, + } + } +} diff --git a/op-reth/crates/flashblocks/src/validation.rs b/rust/op-reth/crates/flashblocks/src/validation.rs similarity index 100% rename from op-reth/crates/flashblocks/src/validation.rs rename to rust/op-reth/crates/flashblocks/src/validation.rs diff --git a/op-reth/crates/flashblocks/src/worker.rs b/rust/op-reth/crates/flashblocks/src/worker.rs similarity index 96% rename from op-reth/crates/flashblocks/src/worker.rs rename to rust/op-reth/crates/flashblocks/src/worker.rs index e1b29c27296..202056ba727 100644 --- a/op-reth/crates/flashblocks/src/worker.rs +++ b/rust/op-reth/crates/flashblocks/src/worker.rs @@ -1,18 +1,18 @@ use crate::PendingFlashBlock; -use alloy_eips::{eip2718::WithEncoded, BlockNumberOrTag}; +use alloy_eips::{BlockNumberOrTag, eip2718::WithEncoded}; use alloy_primitives::B256; use op_alloy_rpc_types_engine::OpFlashblockPayloadBase; use reth_chain_state::{ComputedTrieData, ExecutedBlock}; use reth_errors::RethError; use reth_evm::{ - execute::{BlockBuilder, BlockBuilderOutcome}, ConfigureEvm, + execute::{BlockBuilder, BlockBuilderOutcome}, }; use reth_execution_types::BlockExecutionOutput; use reth_primitives_traits::{BlockTy, HeaderTy, NodePrimitives, ReceiptTy, Recovered}; use reth_revm::{cached::CachedReads, database::StateProviderDatabase, db::State}; use reth_rpc_eth_types::{EthApiError, PendingBlock}; -use reth_storage_api::{noop::NoopProvider, BlockReaderIdExt, StateProviderFactory}; +use reth_storage_api::{BlockReaderIdExt, StateProviderFactory, noop::NoopProvider}; use std::{ sync::Arc, time::{Duration, Instant}, @@ -76,7 +76,7 @@ where if args.base.parent_hash != latest_hash { trace!(target: "flashblocks", flashblock_parent = ?args.base.parent_hash, local_latest=?latest.num_hash(),"Skipping non consecutive flashblock"); // doesn't attach to the latest block - return Ok(None) + return Ok(None); } let state_provider = self.provider.history_by_block_hash(latest.hash())?; diff --git a/op-reth/crates/flashblocks/src/ws/decoding.rs b/rust/op-reth/crates/flashblocks/src/ws/decoding.rs similarity index 100% rename from op-reth/crates/flashblocks/src/ws/decoding.rs rename to rust/op-reth/crates/flashblocks/src/ws/decoding.rs diff --git a/rust/op-reth/crates/flashblocks/src/ws/mod.rs b/rust/op-reth/crates/flashblocks/src/ws/mod.rs new file mode 100644 index 00000000000..4412acff24b --- /dev/null +++ b/rust/op-reth/crates/flashblocks/src/ws/mod.rs @@ -0,0 +1,6 @@ +pub use stream::{WsConnect, WsConnector, WsFlashBlockStream}; + +mod decoding; +pub use decoding::FlashBlockDecoder; + +mod stream; diff --git a/rust/op-reth/crates/flashblocks/src/ws/stream.rs b/rust/op-reth/crates/flashblocks/src/ws/stream.rs new file mode 100644 index 00000000000..f33c75c785c --- /dev/null +++ b/rust/op-reth/crates/flashblocks/src/ws/stream.rs @@ -0,0 +1,543 @@ +use crate::{FlashBlock, ws::FlashBlockDecoder}; +use futures_util::{ + FutureExt, Sink, Stream, StreamExt, + stream::{SplitSink, SplitStream}, +}; +use std::{ + fmt::{Debug, Formatter}, + future::Future, + pin::Pin, + task::{Context, Poll, ready}, +}; +use tokio::net::TcpStream; +use tokio_tungstenite::{ + MaybeTlsStream, WebSocketStream, connect_async, + tungstenite::{Bytes, Error, Message, protocol::CloseFrame}, +}; +use tracing::debug; +use url::Url; + +/// An asynchronous stream of [`FlashBlock`] from a websocket connection. +/// +/// The stream attempts to connect to a websocket URL and then decode each received item. +/// +/// If the connection fails, the error is returned and connection retried. The number of retries is +/// unbounded. +pub struct WsFlashBlockStream { + ws_url: Url, + state: State, + connector: Connector, + decoder: Box, + connect: ConnectFuture, + stream: Option, + sink: Option, +} + +impl WsFlashBlockStream { + /// Creates a new websocket stream over `ws_url`. + pub fn new(ws_url: Url) -> Self { + Self { + ws_url, + state: State::default(), + connector: WsConnector, + decoder: Box::new(()), + connect: Box::pin(async move { Err(Error::ConnectionClosed)? }), + stream: None, + sink: None, + } + } + + /// Sets the [`FlashBlock`] decoder for the websocket stream. + pub fn with_decoder(self, decoder: Box) -> Self { + Self { decoder, ..self } + } +} + +impl WsFlashBlockStream { + /// Creates a new websocket stream over `ws_url`. + pub fn with_connector(ws_url: Url, connector: C) -> Self { + Self { + ws_url, + state: State::default(), + decoder: Box::new(()), + connector, + connect: Box::pin(async move { Err(Error::ConnectionClosed)? }), + stream: None, + sink: None, + } + } +} + +impl Stream for WsFlashBlockStream +where + Str: Stream> + Unpin, + S: Sink + Send + Unpin, + C: WsConnect + Clone + Send + 'static + Unpin, +{ + type Item = eyre::Result; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + let this = self.get_mut(); + + 'start: loop { + if this.state == State::Initial { + this.connect(); + } + + if this.state == State::Connect { + match ready!(this.connect.poll_unpin(cx)) { + Ok((sink, stream)) => this.stream(sink, stream), + Err(err) => { + this.state = State::Initial; + + return Poll::Ready(Some(Err(err))); + } + } + } + + while let State::Stream(msg) = &mut this.state { + if msg.is_some() { + let mut sink = Pin::new(this.sink.as_mut().unwrap()); + let _ = ready!(sink.as_mut().poll_ready(cx)); + if let Some(pong) = msg.take() { + let _ = sink.as_mut().start_send(pong); + } + let _ = ready!(sink.as_mut().poll_flush(cx)); + } + + let Some(msg) = ready!( + this.stream + .as_mut() + .expect("Stream state should be unreachable without stream") + .poll_next_unpin(cx) + ) else { + this.state = State::Initial; + + continue 'start; + }; + + match msg { + Ok(Message::Binary(bytes)) => { + return Poll::Ready(Some(this.decoder.decode(bytes))); + } + Ok(Message::Text(bytes)) => { + return Poll::Ready(Some(this.decoder.decode(bytes.into()))); + } + Ok(Message::Ping(bytes)) => this.ping(bytes), + Ok(Message::Close(frame)) => this.close(frame), + Ok(msg) => { + debug!(target: "flashblocks", "Received unexpected message: {:?}", msg) + } + Err(err) => return Poll::Ready(Some(Err(err.into()))), + } + } + } + } +} + +impl WsFlashBlockStream +where + C: WsConnect + Clone + Send + 'static, +{ + fn connect(&mut self) { + let ws_url = self.ws_url.clone(); + let mut connector = self.connector.clone(); + + Pin::new(&mut self.connect).set(Box::pin(async move { connector.connect(ws_url).await })); + + self.state = State::Connect; + } + + fn stream(&mut self, sink: S, stream: Stream) { + self.sink.replace(sink); + self.stream.replace(stream); + + self.state = State::Stream(None); + } + + fn ping(&mut self, pong: Bytes) { + if let State::Stream(current) = &mut self.state { + current.replace(Message::Pong(pong)); + } + } + + fn close(&mut self, frame: Option) { + if let State::Stream(current) = &mut self.state { + current.replace(Message::Close(frame)); + } + } +} + +impl Debug for WsFlashBlockStream { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("FlashBlockStream") + .field("ws_url", &self.ws_url) + .field("state", &self.state) + .field("connector", &self.connector) + .field("connect", &"Pin>>") + .field("stream", &self.stream) + .finish() + } +} + +#[derive(Default, Debug, Eq, PartialEq)] +enum State { + #[default] + Initial, + Connect, + Stream(Option), +} + +type Ws = WebSocketStream>; +type WsStream = SplitStream; +type WsSink = SplitSink; +type ConnectFuture = + Pin> + Send + 'static>>; + +/// The `WsConnect` trait allows for connecting to a websocket. +/// +/// Implementors of the `WsConnect` trait are called 'connectors'. +/// +/// Connectors are defined by one method, [`connect()`]. A call to [`connect()`] attempts to +/// establish a secure websocket connection and return an asynchronous stream of [`Message`]s +/// wrapped in a [`Result`]. +/// +/// [`connect()`]: Self::connect +pub trait WsConnect { + /// An associated `Stream` of [`Message`]s wrapped in a [`Result`] that this connection returns. + type Stream; + + /// An associated `Sink` of [`Message`]s that this connection sends. + type Sink; + + /// Asynchronously connects to a websocket hosted on `ws_url`. + /// + /// See the [`WsConnect`] documentation for details. + fn connect( + &mut self, + ws_url: Url, + ) -> impl Future> + Send; +} + +/// Establishes a secure websocket subscription. +/// +/// See the [`WsConnect`] documentation for details. +#[derive(Debug, Clone)] +pub struct WsConnector; + +impl WsConnect for WsConnector { + type Stream = WsStream; + type Sink = WsSink; + + async fn connect(&mut self, ws_url: Url) -> eyre::Result<(WsSink, WsStream)> { + let (stream, _response) = connect_async(ws_url.as_str()).await?; + + Ok(stream.split()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::bytes::Bytes; + use brotli::enc::BrotliEncoderParams; + use std::{future, iter}; + use tokio_tungstenite::tungstenite::{ + Error, + protocol::frame::{Frame, coding::CloseCode}, + }; + + /// A `FakeConnector` creates [`FakeStream`]. + /// + /// It simulates the websocket stream instead of connecting to a real websocket. + #[derive(Clone)] + struct FakeConnector(FakeStream); + + /// A `FakeConnectorWithSink` creates [`FakeStream`] and [`FakeSink`]. + /// + /// It simulates the websocket stream instead of connecting to a real websocket. It also accepts + /// messages into an in-memory buffer. + #[derive(Clone)] + struct FakeConnectorWithSink(FakeStream); + + /// Simulates a websocket stream while using a preprogrammed set of messages instead. + #[derive(Default)] + struct FakeStream(Vec>); + + impl FakeStream { + fn new(mut messages: Vec>) -> Self { + messages.reverse(); + + Self(messages) + } + } + + impl Clone for FakeStream { + fn clone(&self) -> Self { + Self( + self.0 + .iter() + .map(|v| match v { + Ok(msg) => Ok(msg.clone()), + Err(err) => Err(match err { + Error::AttackAttempt => Error::AttackAttempt, + err => unimplemented!("Cannot clone this error: {err}"), + }), + }) + .collect(), + ) + } + } + + impl Stream for FakeStream { + type Item = Result; + + fn poll_next(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll> { + let this = self.get_mut(); + + Poll::Ready(this.0.pop()) + } + } + + #[derive(Clone)] + struct NoopSink; + + impl Sink for NoopSink { + type Error = (); + + fn poll_ready( + self: Pin<&mut Self>, + _cx: &mut Context<'_>, + ) -> Poll> { + unimplemented!() + } + + fn start_send(self: Pin<&mut Self>, _item: T) -> Result<(), Self::Error> { + unimplemented!() + } + + fn poll_flush( + self: Pin<&mut Self>, + _cx: &mut Context<'_>, + ) -> Poll> { + unimplemented!() + } + + fn poll_close( + self: Pin<&mut Self>, + _cx: &mut Context<'_>, + ) -> Poll> { + unimplemented!() + } + } + + /// Receives [`Message`]s and stores them. A call to `start_send` first buffers the message + /// to simulate flushing behavior. + #[derive(Clone, Default)] + struct FakeSink(Option, Vec); + + impl Sink for FakeSink { + type Error = (); + + fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + self.poll_flush(cx) + } + + fn start_send(self: Pin<&mut Self>, item: Message) -> Result<(), Self::Error> { + self.get_mut().0.replace(item); + Ok(()) + } + + fn poll_flush( + self: Pin<&mut Self>, + _cx: &mut Context<'_>, + ) -> Poll> { + let this = self.get_mut(); + if let Some(item) = this.0.take() { + this.1.push(item); + } + Poll::Ready(Ok(())) + } + + fn poll_close( + self: Pin<&mut Self>, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + } + + impl WsConnect for FakeConnector { + type Stream = FakeStream; + type Sink = NoopSink; + + fn connect( + &mut self, + _ws_url: Url, + ) -> impl Future> + Send { + future::ready(Ok((NoopSink, self.0.clone()))) + } + } + + impl>> From for FakeConnector { + fn from(value: T) -> Self { + Self(FakeStream::new(value.into_iter().collect())) + } + } + + impl WsConnect for FakeConnectorWithSink { + type Stream = FakeStream; + type Sink = FakeSink; + + fn connect( + &mut self, + _ws_url: Url, + ) -> impl Future> + Send { + future::ready(Ok((FakeSink::default(), self.0.clone()))) + } + } + + impl>> From for FakeConnectorWithSink { + fn from(value: T) -> Self { + Self(FakeStream::new(value.into_iter().collect())) + } + } + + /// Repeatedly fails to connect with the given error message. + #[derive(Clone)] + struct FailingConnector(String); + + impl WsConnect for FailingConnector { + type Stream = FakeStream; + type Sink = NoopSink; + + fn connect( + &mut self, + _ws_url: Url, + ) -> impl Future> + Send { + future::ready(Err(eyre::eyre!("{}", &self.0))) + } + } + + fn to_json_message, F: Fn(B) -> Message>( + wrapper_f: F, + ) -> impl Fn(&FlashBlock) -> Result + use { + move |block| to_json_message_using(block, &wrapper_f) + } + + fn to_json_binary_message(block: &FlashBlock) -> Result { + to_json_message_using(block, Message::Binary) + } + + fn to_json_message_using, F: Fn(B) -> Message>( + block: &FlashBlock, + wrapper_f: F, + ) -> Result { + Ok(wrapper_f(B::try_from(Bytes::from(serde_json::to_vec(block).unwrap())).unwrap())) + } + + fn to_brotli_message(block: &FlashBlock) -> Result { + let json = serde_json::to_vec(block).unwrap(); + let mut compressed = Vec::new(); + brotli::BrotliCompress( + &mut json.as_slice(), + &mut compressed, + &BrotliEncoderParams::default(), + )?; + + Ok(Message::Binary(Bytes::from(compressed))) + } + + fn flashblock() -> FlashBlock { + Default::default() + } + + #[test_case::test_case(to_json_message(Message::Binary); "json binary")] + #[test_case::test_case(to_json_message(Message::Text); "json UTF-8")] + #[test_case::test_case(to_brotli_message; "brotli")] + #[tokio::test] + async fn test_stream_decodes_messages_successfully( + to_message: impl Fn(&FlashBlock) -> Result, + ) { + let flashblocks = [flashblock()]; + let connector = FakeConnector::from(flashblocks.iter().map(to_message)); + let ws_url = "http://localhost".parse().unwrap(); + let stream = WsFlashBlockStream::with_connector(ws_url, connector); + + let actual_messages: Vec<_> = stream.take(1).map(Result::unwrap).collect().await; + let expected_messages = flashblocks.to_vec(); + + assert_eq!(actual_messages, expected_messages); + } + + #[test_case::test_case(Message::Pong(Bytes::from(b"test".as_slice())); "pong")] + #[test_case::test_case(Message::Frame(Frame::pong(b"test".as_slice())); "frame")] + #[tokio::test] + async fn test_stream_ignores_unexpected_message(message: Message) { + let flashblock = flashblock(); + let connector = FakeConnector::from([Ok(message), to_json_binary_message(&flashblock)]); + let ws_url = "http://localhost".parse().unwrap(); + let mut stream = WsFlashBlockStream::with_connector(ws_url, connector); + + let expected_message = flashblock; + let actual_message = + stream.next().await.expect("Binary message should not be ignored").unwrap(); + + assert_eq!(actual_message, expected_message) + } + + #[tokio::test] + async fn test_stream_passes_errors_through() { + let connector = FakeConnector::from([Err(Error::AttackAttempt)]); + let ws_url = "http://localhost".parse().unwrap(); + let stream = WsFlashBlockStream::with_connector(ws_url, connector); + + let actual_messages: Vec<_> = + stream.take(1).map(Result::unwrap_err).map(|e| format!("{e}")).collect().await; + let expected_messages = vec!["Attack attempt detected".to_owned()]; + + assert_eq!(actual_messages, expected_messages); + } + + #[tokio::test] + async fn test_connect_error_causes_retries() { + let tries = 3; + let error_msg = "test".to_owned(); + let connector = FailingConnector(error_msg.clone()); + let ws_url = "http://localhost".parse().unwrap(); + let stream = WsFlashBlockStream::with_connector(ws_url, connector); + + let actual_errors: Vec<_> = + stream.take(tries).map(Result::unwrap_err).map(|e| format!("{e}")).collect().await; + let expected_errors: Vec<_> = iter::repeat_n(error_msg, tries).collect(); + + assert_eq!(actual_errors, expected_errors); + } + + #[test_case::test_case( + Message::Close(Some(CloseFrame { code: CloseCode::Normal, reason: "test".into() })), + Message::Close(Some(CloseFrame { code: CloseCode::Normal, reason: "test".into() })); + "close" + )] + #[test_case::test_case( + Message::Ping(Bytes::from_static(&[1u8, 2, 3])), + Message::Pong(Bytes::from_static(&[1u8, 2, 3])); + "ping" + )] + #[tokio::test] + async fn test_stream_responds_to_messages(msg: Message, expected_response: Message) { + let flashblock = flashblock(); + let messages = [Ok(msg), to_json_binary_message(&flashblock)]; + let connector = FakeConnectorWithSink::from(messages); + let ws_url = "http://localhost".parse().unwrap(); + let mut stream = WsFlashBlockStream::with_connector(ws_url, connector); + + let _ = stream.next().await; + + let expected_response = vec![expected_response]; + let FakeSink(actual_buffer, actual_response) = stream.sink.unwrap(); + + assert!(actual_buffer.is_none(), "buffer not flushed: {actual_buffer:#?}"); + assert_eq!(actual_response, expected_response); + } +} diff --git a/op-reth/crates/flashblocks/tests/it/main.rs b/rust/op-reth/crates/flashblocks/tests/it/main.rs similarity index 100% rename from op-reth/crates/flashblocks/tests/it/main.rs rename to rust/op-reth/crates/flashblocks/tests/it/main.rs diff --git a/rust/op-reth/crates/flashblocks/tests/it/stream.rs b/rust/op-reth/crates/flashblocks/tests/it/stream.rs new file mode 100644 index 00000000000..a590d8f0e0c --- /dev/null +++ b/rust/op-reth/crates/flashblocks/tests/it/stream.rs @@ -0,0 +1,15 @@ +use futures_util::stream::StreamExt; +use reth_optimism_flashblocks::WsFlashBlockStream; + +#[tokio::test] +async fn test_online_streaming_flashblocks_from_remote_source_is_successful() { + let items = 3; + let ws_url = "wss://sepolia.flashblocks.base.org/ws".parse().unwrap(); + let stream = WsFlashBlockStream::new(ws_url); + + let blocks: Vec<_> = stream.take(items).collect().await; + + for block in blocks { + assert!(block.is_ok()); + } +} diff --git a/rust/op-reth/crates/hardforks/Cargo.toml b/rust/op-reth/crates/hardforks/Cargo.toml new file mode 100644 index 00000000000..4cf0eff3e30 --- /dev/null +++ b/rust/op-reth/crates/hardforks/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "reth-optimism-forks" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" +description = "Optimism hardforks used in op-reth" + +[lints] +workspace = true + +[dependencies] +# reth +reth-ethereum-forks.workspace = true + +# ethereum +alloy-op-hardforks.workspace = true +alloy-primitives.workspace = true + +# misc +once_cell.workspace = true + +[features] +default = ["std"] +std = [ + "alloy-primitives/std", + "once_cell/std", + "reth-ethereum-forks/std", +] +serde = [ + "alloy-primitives/serde", + "reth-ethereum-forks/serde", + "alloy-op-hardforks/serde", +] diff --git a/op-reth/crates/hardforks/src/lib.rs b/rust/op-reth/crates/hardforks/src/lib.rs similarity index 100% rename from op-reth/crates/hardforks/src/lib.rs rename to rust/op-reth/crates/hardforks/src/lib.rs diff --git a/rust/op-reth/crates/node/Cargo.toml b/rust/op-reth/crates/node/Cargo.toml new file mode 100644 index 00000000000..b66a4e431ae --- /dev/null +++ b/rust/op-reth/crates/node/Cargo.toml @@ -0,0 +1,141 @@ +[package] +name = "reth-optimism-node" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" + +[lints] +workspace = true + +[dependencies] +# reth +reth-chainspec.workspace = true +## ensure secp256k1 recovery with rayon support is activated +reth-primitives-traits = { workspace = true, features = ["secp256k1", "rayon"] } +reth-payload-builder.workspace = true +reth-consensus.workspace = true +reth-node-api.workspace = true +reth-node-builder.workspace = true +reth-tracing.workspace = true +reth-provider.workspace = true +reth-transaction-pool.workspace = true +reth-network.workspace = true +reth-evm.workspace = true +reth-rpc-server-types.workspace = true +reth-tasks = { workspace = true, optional = true } +reth-trie-common.workspace = true +reth-node-core.workspace = true +reth-rpc-engine-api.workspace = true +reth-engine-local = { workspace = true, features = ["op"] } +reth-rpc-api.workspace = true + +# op-reth +reth-optimism-payload-builder.workspace = true +reth-optimism-evm = { workspace = true, features = ["std", "rpc"] } +reth-optimism-rpc.workspace = true +reth-optimism-storage.workspace = true +reth-optimism-txpool.workspace = true +reth-optimism-chainspec.workspace = true +reth-optimism-consensus = { workspace = true, features = ["std"] } +reth-optimism-forks.workspace = true +reth-optimism-primitives = { workspace = true, features = ["serde", "serde-bincode-compat", "reth-codec"] } + +# revm with required optimism features +# Note: this must be kept to ensure all features are properly enabled/forwarded +revm = { workspace = true, features = ["secp256k1", "blst", "c-kzg", "memory_limit"] } +op-revm.workspace = true + +# ethereum +alloy-primitives.workspace = true +op-alloy-consensus.workspace = true +op-alloy-rpc-types-engine.workspace = true +alloy-rpc-types-engine.workspace = true +alloy-rpc-types-eth.workspace = true +alloy-consensus.workspace = true + +# async +tokio.workspace = true + +# misc +clap.workspace = true +serde.workspace = true +eyre.workspace = true +url.workspace = true + +# test-utils dependencies +reth-db-api = { workspace = true, optional = true } +reth-e2e-test-utils = { workspace = true, optional = true } +alloy-genesis = { workspace = true, optional = true } +serde_json = { workspace = true, optional = true } + +[dev-dependencies] +reth-optimism-node = { workspace = true, features = ["test-utils"] } +reth-db = { workspace = true, features = ["op", "test-utils"] } +reth-node-builder = { workspace = true, features = ["test-utils"] } +reth-provider = { workspace = true, features = ["test-utils"] } +reth-tasks.workspace = true +reth-payload-util.workspace = true +reth-revm = { workspace = true, features = ["std"] } +reth-rpc.workspace = true +reth-rpc-eth-types.workspace = true +reth-stages-types.workspace = true +reth-trie-db.workspace = true + +alloy-network.workspace = true +alloy-op-hardforks.workspace = true +futures.workspace = true +op-alloy-network.workspace = true + +[features] +default = ["reth-codec"] +asm-keccak = [ + "alloy-primitives/asm-keccak", + "reth-optimism-node/asm-keccak", + "reth-node-core/asm-keccak", + "revm/asm-keccak", +] +keccak-cache-global = [ + "alloy-primitives/keccak-cache-global", + "reth-node-core/keccak-cache-global", + "reth-optimism-node/keccak-cache-global", +] +js-tracer = [ + "reth-node-builder/js-tracer", + "reth-optimism-node/js-tracer", + "reth-rpc/js-tracer", + "reth-rpc-eth-types/js-tracer", +] +test-utils = [ + "reth-codec", + "reth-tasks", + "dep:reth-db-api", + "reth-db-api/op", + "reth-e2e-test-utils", + "alloy-genesis", + "serde_json", + "reth-node-builder/test-utils", + "reth-chainspec/test-utils", + "reth-consensus/test-utils", + "reth-evm/test-utils", + "reth-network/test-utils", + "reth-payload-builder/test-utils", + "reth-revm/test-utils", + "reth-db/test-utils", + "reth-provider/test-utils", + "reth-transaction-pool/test-utils", + "reth-optimism-node/test-utils", + "reth-optimism-primitives/arbitrary", + "reth-primitives-traits/test-utils", + "reth-trie-common/test-utils", + "reth-trie-db/test-utils", + "reth-stages-types/test-utils", + "reth-db-api?/test-utils" +] +reth-codec = ["reth-optimism-primitives/reth-codec"] + +[[test]] +name = "e2e_testsuite" +path = "tests/e2e-testsuite/main.rs" diff --git a/op-reth/crates/node/src/args.rs b/rust/op-reth/crates/node/src/args.rs similarity index 100% rename from op-reth/crates/node/src/args.rs rename to rust/op-reth/crates/node/src/args.rs diff --git a/rust/op-reth/crates/node/src/engine.rs b/rust/op-reth/crates/node/src/engine.rs new file mode 100644 index 00000000000..217cb953e68 --- /dev/null +++ b/rust/op-reth/crates/node/src/engine.rs @@ -0,0 +1,511 @@ +use alloy_consensus::BlockHeader; +use alloy_primitives::B256; +use alloy_rpc_types_engine::{ExecutionPayloadEnvelopeV2, ExecutionPayloadV1}; +use op_alloy_rpc_types_engine::{ + OpExecutionData, OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, + OpPayloadAttributes, +}; +use reth_consensus::ConsensusError; +use reth_node_api::{ + BuiltPayload, EngineApiValidator, EngineTypes, NodePrimitives, PayloadValidator, + payload::{ + EngineApiMessageVersion, EngineObjectValidationError, MessageValidationKind, + NewPayloadError, PayloadOrAttributes, PayloadTypes, VersionSpecificValidationError, + validate_parent_beacon_block_root_presence, + }, + validate_version_specific_fields, +}; +use reth_optimism_consensus::isthmus; +use reth_optimism_forks::OpHardforks; +use reth_optimism_payload_builder::{OpExecutionPayloadValidator, OpPayloadTypes}; +use reth_optimism_primitives::{L2_TO_L1_MESSAGE_PASSER_ADDRESS, OpBlock}; +use reth_primitives_traits::{Block, RecoveredBlock, SealedBlock, SignedTransaction}; +use reth_provider::StateProviderFactory; +use reth_trie_common::{HashedPostState, KeyHasher}; +use std::{marker::PhantomData, sync::Arc}; + +/// The types used in the optimism beacon consensus engine. +#[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] +#[non_exhaustive] +pub struct OpEngineTypes { + _marker: PhantomData, +} + +impl> PayloadTypes for OpEngineTypes { + type ExecutionData = T::ExecutionData; + type BuiltPayload = T::BuiltPayload; + type PayloadAttributes = T::PayloadAttributes; + type PayloadBuilderAttributes = T::PayloadBuilderAttributes; + + fn block_to_payload( + block: SealedBlock< + <::Primitives as NodePrimitives>::Block, + >, + ) -> ::ExecutionData { + OpExecutionData::from_block_unchecked( + block.hash(), + &block.into_block().into_ethereum_block(), + ) + } +} + +impl> EngineTypes for OpEngineTypes +where + T::BuiltPayload: BuiltPayload> + + TryInto + + TryInto + + TryInto + + TryInto, +{ + type ExecutionPayloadEnvelopeV1 = ExecutionPayloadV1; + type ExecutionPayloadEnvelopeV2 = ExecutionPayloadEnvelopeV2; + type ExecutionPayloadEnvelopeV3 = OpExecutionPayloadEnvelopeV3; + type ExecutionPayloadEnvelopeV4 = OpExecutionPayloadEnvelopeV4; + type ExecutionPayloadEnvelopeV5 = OpExecutionPayloadEnvelopeV4; + type ExecutionPayloadEnvelopeV6 = OpExecutionPayloadEnvelopeV4; +} + +/// Validator for Optimism engine API. +#[derive(Debug)] +pub struct OpEngineValidator { + inner: OpExecutionPayloadValidator, + provider: P, + hashed_addr_l2tol1_msg_passer: B256, + phantom: PhantomData, +} + +impl OpEngineValidator { + /// Instantiates a new validator. + pub fn new(chain_spec: Arc, provider: P) -> Self { + let hashed_addr_l2tol1_msg_passer = KH::hash_key(L2_TO_L1_MESSAGE_PASSER_ADDRESS); + Self { + inner: OpExecutionPayloadValidator::new(chain_spec), + provider, + hashed_addr_l2tol1_msg_passer, + phantom: PhantomData, + } + } +} + +impl Clone for OpEngineValidator +where + P: Clone, + ChainSpec: OpHardforks, +{ + fn clone(&self) -> Self { + Self { + inner: OpExecutionPayloadValidator::new(self.inner.clone()), + provider: self.provider.clone(), + hashed_addr_l2tol1_msg_passer: self.hashed_addr_l2tol1_msg_passer, + phantom: Default::default(), + } + } +} + +impl OpEngineValidator +where + ChainSpec: OpHardforks, +{ + /// Returns the chain spec used by the validator. + #[inline] + pub fn chain_spec(&self) -> &ChainSpec { + self.inner.chain_spec() + } +} + +impl PayloadValidator for OpEngineValidator +where + P: StateProviderFactory + Unpin + 'static, + Tx: SignedTransaction + Unpin + 'static, + ChainSpec: OpHardforks + Send + Sync + 'static, + Types: PayloadTypes, +{ + type Block = alloy_consensus::Block; + + fn validate_block_post_execution_with_hashed_state( + &self, + state_updates: &HashedPostState, + block: &RecoveredBlock, + ) -> Result<(), ConsensusError> { + if self.chain_spec().is_isthmus_active_at_timestamp(block.timestamp()) { + let Ok(state) = self.provider.state_by_block_hash(block.parent_hash()) else { + // FIXME: we don't necessarily have access to the parent block here because the + // parent block isn't necessarily part of the canonical chain yet. Instead this + // function should receive the list of in memory blocks as input + return Ok(()); + }; + let predeploy_storage_updates = state_updates + .storages + .get(&self.hashed_addr_l2tol1_msg_passer) + .cloned() + .unwrap_or_default(); + isthmus::verify_withdrawals_root_prehashed( + predeploy_storage_updates, + state, + block.header(), + ) + .map_err(|err| { + ConsensusError::Other(format!("failed to verify block post-execution: {err}")) + })? + } + + Ok(()) + } + + fn convert_payload_to_block( + &self, + payload: OpExecutionData, + ) -> Result, NewPayloadError> { + self.inner.ensure_well_formed_payload(payload).map_err(NewPayloadError::other) + } +} + +impl EngineApiValidator for OpEngineValidator +where + Types: PayloadTypes< + PayloadAttributes = OpPayloadAttributes, + ExecutionData = OpExecutionData, + BuiltPayload: BuiltPayload>, + >, + P: StateProviderFactory + Unpin + 'static, + Tx: SignedTransaction + Unpin + 'static, + ChainSpec: OpHardforks + Send + Sync + 'static, +{ + fn validate_version_specific_fields( + &self, + version: EngineApiMessageVersion, + payload_or_attrs: PayloadOrAttributes< + '_, + Types::ExecutionData, + ::PayloadAttributes, + >, + ) -> Result<(), EngineObjectValidationError> { + validate_withdrawals_presence( + self.chain_spec(), + version, + payload_or_attrs.message_validation_kind(), + payload_or_attrs.timestamp(), + payload_or_attrs.withdrawals().is_some(), + )?; + validate_parent_beacon_block_root_presence( + self.chain_spec(), + version, + payload_or_attrs.message_validation_kind(), + payload_or_attrs.timestamp(), + payload_or_attrs.parent_beacon_block_root().is_some(), + ) + } + + fn ensure_well_formed_attributes( + &self, + version: EngineApiMessageVersion, + attributes: &::PayloadAttributes, + ) -> Result<(), EngineObjectValidationError> { + validate_version_specific_fields( + self.chain_spec(), + version, + PayloadOrAttributes::::PayloadAttributes( + attributes, + ), + )?; + + if attributes.gas_limit.is_none() { + return Err(EngineObjectValidationError::InvalidParams( + "MissingGasLimitInPayloadAttributes".to_string().into(), + )); + } + + if self + .chain_spec() + .is_holocene_active_at_timestamp(attributes.payload_attributes.timestamp) + { + let (elasticity, denominator) = + attributes.decode_eip_1559_params().ok_or_else(|| { + EngineObjectValidationError::InvalidParams( + "MissingEip1559ParamsInPayloadAttributes".to_string().into(), + ) + })?; + + if elasticity != 0 && denominator == 0 { + return Err(EngineObjectValidationError::InvalidParams( + "Eip1559ParamsDenominatorZero".to_string().into(), + )); + } else if denominator != 0 && elasticity == 0 { + return Err(EngineObjectValidationError::InvalidParams( + "Eip1559ParamsElasticityZero".to_string().into(), + )); + } + } + + if self.chain_spec().is_jovian_active_at_timestamp(attributes.payload_attributes.timestamp) + { + if attributes.min_base_fee.is_none() { + return Err(EngineObjectValidationError::InvalidParams( + "MissingMinBaseFeeInPayloadAttributes".to_string().into(), + )); + } + } else if attributes.min_base_fee.is_some() { + return Err(EngineObjectValidationError::InvalidParams( + "MinBaseFeeNotAllowedBeforeJovian".to_string().into(), + )); + } + + Ok(()) + } +} + +/// Validates the presence of the `withdrawals` field according to the payload timestamp. +/// +/// After Canyon, withdrawals field must be [Some]. +/// Before Canyon, withdrawals field must be [None]; +/// +/// Canyon activates the Shanghai EIPs, see the Canyon specs for more details: +/// +pub fn validate_withdrawals_presence( + chain_spec: impl OpHardforks, + version: EngineApiMessageVersion, + message_validation_kind: MessageValidationKind, + timestamp: u64, + has_withdrawals: bool, +) -> Result<(), EngineObjectValidationError> { + let is_shanghai = chain_spec.is_canyon_active_at_timestamp(timestamp); + + match version { + EngineApiMessageVersion::V1 => { + if has_withdrawals { + return Err(message_validation_kind + .to_error(VersionSpecificValidationError::WithdrawalsNotSupportedInV1)); + } + if is_shanghai { + return Err(message_validation_kind + .to_error(VersionSpecificValidationError::NoWithdrawalsPostShanghai)); + } + } + EngineApiMessageVersion::V2 | + EngineApiMessageVersion::V3 | + EngineApiMessageVersion::V4 | + EngineApiMessageVersion::V5 => { + if is_shanghai && !has_withdrawals { + return Err(message_validation_kind + .to_error(VersionSpecificValidationError::NoWithdrawalsPostShanghai)); + } + if !is_shanghai && has_withdrawals { + return Err(message_validation_kind + .to_error(VersionSpecificValidationError::HasWithdrawalsPreShanghai)); + } + } + }; + + Ok(()) +} + +#[cfg(test)] +mod test { + use super::*; + + use crate::engine; + use alloy_op_hardforks::BASE_SEPOLIA_JOVIAN_TIMESTAMP; + use alloy_primitives::{Address, B64, B256, b64}; + use alloy_rpc_types_engine::PayloadAttributes; + use reth_optimism_chainspec::BASE_SEPOLIA; + use reth_provider::noop::NoopProvider; + use reth_trie_common::KeccakKeyHasher; + + macro_rules! assert_invalid_params_error { + ($result:expr, $msg:expr) => {{ + let err = $result.expect_err("expected InvalidParams error"); + match err { + EngineObjectValidationError::InvalidParams(inner) => { + assert_eq!(inner.to_string(), $msg); + } + other => panic!("expected InvalidParams, got {other:?}"), + } + }}; + } + + const fn get_attributes( + eip_1559_params: Option, + min_base_fee: Option, + timestamp: u64, + ) -> OpPayloadAttributes { + OpPayloadAttributes { + gas_limit: Some(1000), + eip_1559_params, + min_base_fee, + transactions: None, + no_tx_pool: None, + payload_attributes: PayloadAttributes { + timestamp, + prev_randao: B256::ZERO, + suggested_fee_recipient: Address::ZERO, + withdrawals: Some(vec![]), + parent_beacon_block_root: Some(B256::ZERO), + }, + } + } + + #[test] + fn test_well_formed_attributes_pre_holocene() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = get_attributes(None, None, 1732633199); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert!(result.is_ok()); + } + + #[test] + fn test_well_formed_attributes_holocene_no_eip1559_params() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = get_attributes(None, None, 1732633200); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert_invalid_params_error!(result, "MissingEip1559ParamsInPayloadAttributes"); + } + + #[test] + fn test_well_formed_attributes_holocene_eip1559_params_zero_denominator() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = get_attributes(Some(b64!("0000000000000008")), None, 1732633200); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert_invalid_params_error!(result, "Eip1559ParamsDenominatorZero"); + } + + #[test] + fn test_well_formed_attributes_holocene_eip1559_params_zero_elasticity() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = get_attributes(Some(b64!("0000000800000000")), None, 1732633200); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert_invalid_params_error!(result, "Eip1559ParamsElasticityZero"); + } + + #[test] + fn test_well_formed_attributes_holocene_valid() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = get_attributes(Some(b64!("0000000800000008")), None, 1732633200); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert!(result.is_ok()); + } + + #[test] + fn test_well_formed_attributes_holocene_valid_all_zero() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = get_attributes(Some(b64!("0000000000000000")), None, 1732633200); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert!(result.is_ok()); + } + + #[test] + fn test_well_formed_attributes_jovian_valid() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = + get_attributes(Some(b64!("0000000000000000")), Some(1), BASE_SEPOLIA_JOVIAN_TIMESTAMP); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert!(result.is_ok()); + } + + /// After Jovian (and holocene), eip1559 params must be Some + #[test] + fn test_malformed_attributes_jovian_with_eip_1559_params_none() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = get_attributes(None, Some(1), BASE_SEPOLIA_JOVIAN_TIMESTAMP); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert_invalid_params_error!(result, "MissingEip1559ParamsInPayloadAttributes"); + } + + /// Before Jovian, min base fee must be None + #[test] + fn test_malformed_attributes_pre_jovian_with_min_base_fee() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = get_attributes(Some(b64!("0000000000000000")), Some(1), 1732633200); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert_invalid_params_error!(result, "MinBaseFeeNotAllowedBeforeJovian"); + } + + /// After Jovian, min base fee must be Some + #[test] + fn test_malformed_attributes_post_jovian_with_min_base_fee_none() { + let validator = OpEngineValidator::new::( + BASE_SEPOLIA.clone(), + NoopProvider::default(), + ); + let attributes = + get_attributes(Some(b64!("0000000000000000")), None, BASE_SEPOLIA_JOVIAN_TIMESTAMP); + + let result = as EngineApiValidator< + OpEngineTypes, + >>::ensure_well_formed_attributes( + &validator, EngineApiMessageVersion::V3, &attributes, + ); + assert_invalid_params_error!(result, "MissingMinBaseFeeInPayloadAttributes"); + } +} diff --git a/rust/op-reth/crates/node/src/lib.rs b/rust/op-reth/crates/node/src/lib.rs new file mode 100644 index 00000000000..b0e9cea4c0b --- /dev/null +++ b/rust/op-reth/crates/node/src/lib.rs @@ -0,0 +1,51 @@ +//! Standalone crate for Optimism-specific Reth configuration and builder types. +//! +//! # features +//! - `js-tracer`: Enable the `JavaScript` tracer for the `debug_trace` endpoints + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +// Used for feature activation only. +#[cfg(feature = "test-utils")] +use reth_db_api as _; + +/// CLI argument parsing for the optimism node. +pub mod args; + +/// Exports optimism-specific implementations of the [`EngineTypes`](reth_node_api::EngineTypes) +/// trait. +pub mod engine; +pub use engine::OpEngineTypes; + +pub mod node; +pub use node::*; + +pub mod rpc; +pub use rpc::OpEngineApiBuilder; + +pub mod version; +pub use version::OP_NAME_CLIENT; + +pub use reth_optimism_txpool as txpool; + +/// Helpers for running test node instances. +#[cfg(feature = "test-utils")] +pub mod utils; + +pub use reth_optimism_payload_builder::{ + self as payload, OpBuiltPayload, OpPayloadAttributes, OpPayloadBuilder, + OpPayloadBuilderAttributes, OpPayloadPrimitives, OpPayloadTypes, config::OpDAConfig, +}; + +pub use reth_optimism_evm::*; + +pub use reth_optimism_storage::OpStorage; + +use op_revm as _; +use revm as _; diff --git a/rust/op-reth/crates/node/src/node.rs b/rust/op-reth/crates/node/src/node.rs new file mode 100644 index 00000000000..c483ed08aef --- /dev/null +++ b/rust/op-reth/crates/node/src/node.rs @@ -0,0 +1,1304 @@ +//! Optimism Node types config. + +use crate::{ + OpEngineApiBuilder, OpEngineTypes, + args::RollupArgs, + engine::OpEngineValidator, + txpool::{OpTransactionPool, OpTransactionValidator}, +}; +use op_alloy_consensus::{OpPooledTransaction, interop::SafetyLevel}; +use op_alloy_rpc_types_engine::OpExecutionData; +use reth_chainspec::{ChainSpecProvider, EthChainSpec, Hardforks}; +use reth_engine_local::LocalPayloadAttributesBuilder; +use reth_evm::ConfigureEvm; +use reth_network::{ + NetworkConfig, NetworkHandle, NetworkManager, NetworkPrimitives, PeersInfo, + types::BasicNetworkPrimitives, +}; +use reth_node_api::{ + AddOnsContext, BuildNextEnv, EngineTypes, FullNodeComponents, HeaderTy, NodeAddOns, + NodePrimitives, PayloadAttributesBuilder, PayloadTypes, PrimitivesTy, TxTy, +}; +use reth_node_builder::{ + BuilderContext, DebugNode, Node, NodeAdapter, NodeComponentsBuilder, + components::{ + BasicPayloadServiceBuilder, ComponentsBuilder, ConsensusBuilder, ExecutorBuilder, + NetworkBuilder, PayloadBuilderBuilder, PoolBuilder, PoolBuilderConfigOverrides, + TxPoolBuilder, + }, + node::{FullNodeTypes, NodeTypes}, + rpc::{ + BasicEngineValidatorBuilder, EngineApiBuilder, EngineValidatorAddOn, + EngineValidatorBuilder, EthApiBuilder, Identity, PayloadValidatorBuilder, RethRpcAddOns, + RethRpcMiddleware, RethRpcServerHandles, RpcAddOns, RpcContext, RpcHandle, + }, +}; +use reth_optimism_chainspec::{OpChainSpec, OpHardfork}; +use reth_optimism_consensus::OpBeaconConsensus; +use reth_optimism_evm::{OpEvmConfig, OpRethReceiptBuilder}; +use reth_optimism_forks::OpHardforks; +use reth_optimism_payload_builder::{ + OpAttributes, OpBuiltPayload, OpPayloadPrimitives, + builder::OpPayloadTransactions, + config::{OpBuilderConfig, OpDAConfig, OpGasLimitConfig}, +}; +use reth_optimism_primitives::{DepositReceipt, OpPrimitives}; +use reth_optimism_rpc::{ + SequencerClient, + eth::{OpEthApiBuilder, ext::OpEthExtApi}, + historical::{HistoricalRpc, HistoricalRpcClient}, + miner::{MinerApiExtServer, OpMinerExtApi}, + witness::{DebugExecutionWitnessApiServer, OpDebugWitnessApi}, +}; +use reth_optimism_storage::OpStorage; +use reth_optimism_txpool::{ + OpPooledTx, + supervisor::{DEFAULT_SUPERVISOR_URL, SupervisorClient}, +}; +use reth_provider::{CanonStateSubscriptions, providers::ProviderFactoryBuilder}; +use reth_rpc_api::{DebugApiServer, L2EthApiExtServer, eth::RpcTypes}; +use reth_rpc_server_types::RethRpcModule; +use reth_tracing::tracing::{debug, info}; +use reth_transaction_pool::{ + EthPoolTransaction, PoolPooledTx, PoolTransaction, TransactionPool, + TransactionValidationTaskExecutor, blobstore::DiskFileBlobStore, +}; +use reth_trie_common::KeccakKeyHasher; +use serde::de::DeserializeOwned; +use std::{marker::PhantomData, sync::Arc}; +use url::Url; + +/// Marker trait for Optimism node types with standard engine, chain spec, and primitives. +pub trait OpNodeTypes: + NodeTypes +{ +} +/// Blanket impl for all node types that conform to the Optimism spec. +impl OpNodeTypes for N where + N: NodeTypes< + Payload = OpEngineTypes, + ChainSpec: OpHardforks + Hardforks, + Primitives = OpPrimitives, + > +{ +} + +/// Helper trait for Optimism node types with full configuration including storage and execution +/// data. +pub trait OpFullNodeTypes: + NodeTypes< + ChainSpec: OpHardforks, + Primitives: OpPayloadPrimitives, + Storage = OpStorage, + Payload: EngineTypes, + > +{ +} + +impl OpFullNodeTypes for N where + N: NodeTypes< + ChainSpec: OpHardforks, + Primitives: OpPayloadPrimitives, + Storage = OpStorage, + Payload: EngineTypes, + > +{ +} + +/// Type configuration for a regular Optimism node. +#[derive(Debug, Default, Clone)] +#[non_exhaustive] +pub struct OpNode { + /// Additional Optimism args + pub args: RollupArgs, + /// Data availability configuration for the OP builder. + /// + /// Used to throttle the size of the data availability payloads (configured by the batcher via + /// the `miner_` api). + /// + /// By default no throttling is applied. + pub da_config: OpDAConfig, + /// Gas limit configuration for the OP builder. + /// Used to control the gas limit of the blocks produced by the OP builder.(configured by the + /// batcher via the `miner_` api) + pub gas_limit_config: OpGasLimitConfig, +} + +/// A [`ComponentsBuilder`] with its generic arguments set to a stack of Optimism specific builders. +pub type OpNodeComponentBuilder = ComponentsBuilder< + Node, + OpPoolBuilder, + BasicPayloadServiceBuilder, + OpNetworkBuilder, + OpExecutorBuilder, + OpConsensusBuilder, +>; + +impl OpNode { + /// Creates a new instance of the Optimism node type. + pub fn new(args: RollupArgs) -> Self { + Self { + args, + da_config: OpDAConfig::default(), + gas_limit_config: OpGasLimitConfig::default(), + } + } + + /// Configure the data availability configuration for the OP builder. + pub fn with_da_config(mut self, da_config: OpDAConfig) -> Self { + self.da_config = da_config; + self + } + + /// Configure the gas limit configuration for the OP builder. + pub fn with_gas_limit_config(mut self, gas_limit_config: OpGasLimitConfig) -> Self { + self.gas_limit_config = gas_limit_config; + self + } + + /// Returns the components for the given [`RollupArgs`]. + pub fn components(&self) -> OpNodeComponentBuilder + where + Node: FullNodeTypes, + { + let RollupArgs { disable_txpool_gossip, compute_pending_block, discovery_v4, .. } = + self.args; + ComponentsBuilder::default() + .node_types::() + .executor(OpExecutorBuilder::default()) + .pool( + OpPoolBuilder::default() + .with_enable_tx_conditional(self.args.enable_tx_conditional) + .with_supervisor( + self.args.supervisor_http.clone(), + self.args.supervisor_safety_level, + ), + ) + .payload(BasicPayloadServiceBuilder::new( + OpPayloadBuilder::new(compute_pending_block) + .with_da_config(self.da_config.clone()) + .with_gas_limit_config(self.gas_limit_config.clone()), + )) + .network(OpNetworkBuilder::new(disable_txpool_gossip, !discovery_v4)) + .consensus(OpConsensusBuilder::default()) + } + + /// Returns [`OpAddOnsBuilder`] with configured arguments. + pub fn add_ons_builder(&self) -> OpAddOnsBuilder { + OpAddOnsBuilder::default() + .with_sequencer(self.args.sequencer.clone()) + .with_sequencer_headers(self.args.sequencer_headers.clone()) + .with_da_config(self.da_config.clone()) + .with_gas_limit_config(self.gas_limit_config.clone()) + .with_enable_tx_conditional(self.args.enable_tx_conditional) + .with_min_suggested_priority_fee(self.args.min_suggested_priority_fee) + .with_historical_rpc(self.args.historical_rpc.clone()) + .with_flashblocks(self.args.flashblocks_url.clone()) + .with_flashblock_consensus(self.args.flashblock_consensus) + } + + /// Instantiates the [`ProviderFactoryBuilder`] for an opstack node. + /// + /// # Open a Providerfactory in read-only mode from a datadir + /// + /// See also: [`ProviderFactoryBuilder`] and + /// [`ReadOnlyConfig`](reth_provider::providers::ReadOnlyConfig). + /// + /// ```no_run + /// use reth_optimism_chainspec::BASE_MAINNET; + /// use reth_optimism_node::OpNode; + /// + /// let factory = + /// OpNode::provider_factory_builder().open_read_only(BASE_MAINNET.clone(), "datadir").unwrap(); + /// ``` + /// + /// # Open a Providerfactory manually with all required components + /// + /// ```no_run + /// use reth_db::open_db_read_only; + /// use reth_optimism_chainspec::OpChainSpecBuilder; + /// use reth_optimism_node::OpNode; + /// use reth_provider::providers::{RocksDBProvider, StaticFileProvider}; + /// use std::sync::Arc; + /// + /// let factory = OpNode::provider_factory_builder() + /// .db(Arc::new(open_db_read_only("db", Default::default()).unwrap())) + /// .chainspec(OpChainSpecBuilder::base_mainnet().build().into()) + /// .static_file(StaticFileProvider::read_only("db/static_files", false).unwrap()) + /// .rocksdb_provider(RocksDBProvider::builder("db/rocksdb").build().unwrap()) + /// .build_provider_factory(); + /// ``` + pub fn provider_factory_builder() -> ProviderFactoryBuilder { + ProviderFactoryBuilder::default() + } +} + +impl Node for OpNode +where + N: FullNodeTypes, +{ + type ComponentsBuilder = ComponentsBuilder< + N, + OpPoolBuilder, + BasicPayloadServiceBuilder, + OpNetworkBuilder, + OpExecutorBuilder, + OpConsensusBuilder, + >; + + type AddOns = OpAddOns< + NodeAdapter>::Components>, + OpEthApiBuilder, + OpEngineValidatorBuilder, + OpEngineApiBuilder, + BasicEngineValidatorBuilder, + >; + + fn components_builder(&self) -> Self::ComponentsBuilder { + Self::components(self) + } + + fn add_ons(&self) -> Self::AddOns { + self.add_ons_builder().build() + } +} + +impl DebugNode for OpNode +where + N: FullNodeComponents, +{ + type RpcBlock = alloy_rpc_types_eth::Block; + + fn rpc_to_primitive_block(rpc_block: Self::RpcBlock) -> reth_node_api::BlockTy { + rpc_block.into_consensus() + } + + fn local_payload_attributes_builder( + chain_spec: &Self::ChainSpec, + ) -> impl PayloadAttributesBuilder<::PayloadAttributes> { + LocalPayloadAttributesBuilder::new(Arc::new(chain_spec.clone())) + } +} + +impl NodeTypes for OpNode { + type Primitives = OpPrimitives; + type ChainSpec = OpChainSpec; + type Storage = OpStorage; + type Payload = OpEngineTypes; +} + +/// Add-ons w.r.t. optimism. +/// +/// This type provides optimism-specific addons to the node and exposes the RPC server and engine +/// API. +#[derive(Debug)] +pub struct OpAddOns< + N: FullNodeComponents, + EthB: EthApiBuilder, + PVB, + EB = OpEngineApiBuilder, + EVB = BasicEngineValidatorBuilder, + RpcMiddleware = Identity, +> { + /// Rpc add-ons responsible for launching the RPC servers and instantiating the RPC handlers + /// and eth-api. + pub rpc_add_ons: RpcAddOns, + /// Data availability configuration for the OP builder. + pub da_config: OpDAConfig, + /// Gas limit configuration for the OP builder. + pub gas_limit_config: OpGasLimitConfig, + /// Sequencer client, configured to forward submitted transactions to sequencer of given OP + /// network. + pub sequencer_url: Option, + /// Headers to use for the sequencer client requests. + pub sequencer_headers: Vec, + /// RPC endpoint for historical data. + /// + /// This can be used to forward pre-bedrock rpc requests (op-mainnet). + pub historical_rpc: Option, + /// Enable transaction conditionals. + enable_tx_conditional: bool, + min_suggested_priority_fee: u64, +} + +impl OpAddOns +where + N: FullNodeComponents, + EthB: EthApiBuilder, +{ + /// Creates a new instance from components. + #[allow(clippy::too_many_arguments)] + pub const fn new( + rpc_add_ons: RpcAddOns, + da_config: OpDAConfig, + gas_limit_config: OpGasLimitConfig, + sequencer_url: Option, + sequencer_headers: Vec, + historical_rpc: Option, + enable_tx_conditional: bool, + min_suggested_priority_fee: u64, + ) -> Self { + Self { + rpc_add_ons, + da_config, + gas_limit_config, + sequencer_url, + sequencer_headers, + historical_rpc, + enable_tx_conditional, + min_suggested_priority_fee, + } + } +} + +impl Default for OpAddOns +where + N: FullNodeComponents, + OpEthApiBuilder: EthApiBuilder, +{ + fn default() -> Self { + Self::builder().build() + } +} + +impl + OpAddOns< + N, + OpEthApiBuilder, + OpEngineValidatorBuilder, + OpEngineApiBuilder, + RpcMiddleware, + > +where + N: FullNodeComponents, + OpEthApiBuilder: EthApiBuilder, +{ + /// Build a [`OpAddOns`] using [`OpAddOnsBuilder`]. + pub fn builder() -> OpAddOnsBuilder { + OpAddOnsBuilder::default() + } +} + +impl OpAddOns +where + N: FullNodeComponents, + EthB: EthApiBuilder, +{ + /// Maps the [`reth_node_builder::rpc::EngineApiBuilder`] builder type. + pub fn with_engine_api( + self, + engine_api_builder: T, + ) -> OpAddOns { + let Self { + rpc_add_ons, + da_config, + gas_limit_config, + sequencer_url, + sequencer_headers, + historical_rpc, + enable_tx_conditional, + min_suggested_priority_fee, + .. + } = self; + OpAddOns::new( + rpc_add_ons.with_engine_api(engine_api_builder), + da_config, + gas_limit_config, + sequencer_url, + sequencer_headers, + historical_rpc, + enable_tx_conditional, + min_suggested_priority_fee, + ) + } + + /// Maps the [`PayloadValidatorBuilder`] builder type. + pub fn with_payload_validator( + self, + payload_validator_builder: T, + ) -> OpAddOns { + let Self { + rpc_add_ons, + da_config, + gas_limit_config, + sequencer_url, + sequencer_headers, + enable_tx_conditional, + min_suggested_priority_fee, + historical_rpc, + .. + } = self; + OpAddOns::new( + rpc_add_ons.with_payload_validator(payload_validator_builder), + da_config, + gas_limit_config, + sequencer_url, + sequencer_headers, + historical_rpc, + enable_tx_conditional, + min_suggested_priority_fee, + ) + } + + /// Sets the RPC middleware stack for processing RPC requests. + /// + /// This method configures a custom middleware stack that will be applied to all RPC requests + /// across HTTP, `WebSocket`, and IPC transports. The middleware is applied to the RPC service + /// layer, allowing you to intercept, modify, or enhance RPC request processing. + /// + /// See also [`RpcAddOns::with_rpc_middleware`]. + pub fn with_rpc_middleware(self, rpc_middleware: T) -> OpAddOns { + let Self { + rpc_add_ons, + da_config, + gas_limit_config, + sequencer_url, + sequencer_headers, + enable_tx_conditional, + min_suggested_priority_fee, + historical_rpc, + .. + } = self; + OpAddOns::new( + rpc_add_ons.with_rpc_middleware(rpc_middleware), + da_config, + gas_limit_config, + sequencer_url, + sequencer_headers, + historical_rpc, + enable_tx_conditional, + min_suggested_priority_fee, + ) + } + + /// Sets the hook that is run once the rpc server is started. + pub fn on_rpc_started(mut self, hook: F) -> Self + where + F: FnOnce(RpcContext<'_, N, EthB::EthApi>, RethRpcServerHandles) -> eyre::Result<()> + + Send + + 'static, + { + self.rpc_add_ons = self.rpc_add_ons.on_rpc_started(hook); + self + } + + /// Sets the hook that is run to configure the rpc modules. + pub fn extend_rpc_modules(mut self, hook: F) -> Self + where + F: FnOnce(RpcContext<'_, N, EthB::EthApi>) -> eyre::Result<()> + Send + 'static, + { + self.rpc_add_ons = self.rpc_add_ons.extend_rpc_modules(hook); + self + } +} + +impl NodeAddOns + for OpAddOns +where + N: FullNodeComponents< + Types: NodeTypes< + ChainSpec: OpHardforks, + Primitives: OpPayloadPrimitives, + Payload: PayloadTypes, + >, + Evm: ConfigureEvm< + NextBlockEnvCtx: BuildNextEnv< + Attrs, + HeaderTy, + ::ChainSpec, + >, + >, + Pool: TransactionPool, + >, + EthB: EthApiBuilder, + PVB: Send, + EB: EngineApiBuilder, + EVB: EngineValidatorBuilder, + RpcMiddleware: RethRpcMiddleware, + Attrs: OpAttributes, RpcPayloadAttributes: DeserializeOwned>, +{ + type Handle = RpcHandle; + + async fn launch_add_ons( + self, + ctx: reth_node_api::AddOnsContext<'_, N>, + ) -> eyre::Result { + let Self { + rpc_add_ons, + da_config, + gas_limit_config, + sequencer_url, + sequencer_headers, + enable_tx_conditional, + historical_rpc, + .. + } = self; + + let maybe_pre_bedrock_historical_rpc = historical_rpc + .and_then(|historical_rpc| { + ctx.node + .provider() + .chain_spec() + .op_fork_activation(OpHardfork::Bedrock) + .block_number() + .filter(|activation| *activation > 0) + .map(|bedrock_block| (historical_rpc, bedrock_block)) + }) + .map(|(historical_rpc, bedrock_block)| -> eyre::Result<_> { + info!(target: "reth::cli", %bedrock_block, ?historical_rpc, "Using historical RPC endpoint pre bedrock"); + let provider = ctx.node.provider().clone(); + let client = HistoricalRpcClient::new(&historical_rpc)?; + let layer = HistoricalRpc::new(provider, client, bedrock_block); + Ok(layer) + }) + .transpose()? + ; + + let rpc_add_ons = rpc_add_ons.option_layer_rpc_middleware(maybe_pre_bedrock_historical_rpc); + + let builder = reth_optimism_payload_builder::OpPayloadBuilder::new( + ctx.node.pool().clone(), + ctx.node.provider().clone(), + ctx.node.evm_config().clone(), + ); + // install additional OP specific rpc methods + let debug_ext = OpDebugWitnessApi::<_, _, _, Attrs>::new( + ctx.node.provider().clone(), + Box::new(ctx.node.task_executor().clone()), + builder, + ); + let miner_ext = OpMinerExtApi::new(da_config, gas_limit_config); + + let sequencer_client = if let Some(url) = sequencer_url { + Some(SequencerClient::new_with_headers(url, sequencer_headers).await?) + } else { + None + }; + + let tx_conditional_ext: OpEthExtApi = OpEthExtApi::new( + sequencer_client, + ctx.node.pool().clone(), + ctx.node.provider().clone(), + ); + + rpc_add_ons + .launch_add_ons_with(ctx, move |container| { + let reth_node_builder::rpc::RpcModuleContainer { modules, auth_module, registry } = + container; + + debug!(target: "reth::cli", "Installing debug payload witness rpc endpoint"); + modules.merge_if_module_configured(RethRpcModule::Debug, debug_ext.into_rpc())?; + + // extend the miner namespace if configured in the regular http server + modules.add_or_replace_if_module_configured( + RethRpcModule::Miner, + miner_ext.clone().into_rpc(), + )?; + + // install the miner extension in the authenticated if configured + if modules.module_config().contains_any(&RethRpcModule::Miner) { + debug!(target: "reth::cli", "Installing miner DA rpc endpoint"); + auth_module.merge_auth_methods(miner_ext.into_rpc())?; + } + + // install the debug namespace in the authenticated if configured + if modules.module_config().contains_any(&RethRpcModule::Debug) { + debug!(target: "reth::cli", "Installing debug rpc endpoint"); + auth_module.merge_auth_methods(registry.debug_api().into_rpc())?; + } + + if enable_tx_conditional { + // extend the eth namespace if configured in the regular http server + modules.merge_if_module_configured( + RethRpcModule::Eth, + tx_conditional_ext.into_rpc(), + )?; + } + + Ok(()) + }) + .await + } +} + +impl RethRpcAddOns + for OpAddOns +where + N: FullNodeComponents< + Types: NodeTypes< + ChainSpec: OpHardforks, + Primitives: OpPayloadPrimitives, + Payload: PayloadTypes, + >, + Evm: ConfigureEvm< + NextBlockEnvCtx: BuildNextEnv< + Attrs, + HeaderTy, + ::ChainSpec, + >, + >, + >, + <::Pool as TransactionPool>::Transaction: OpPooledTx, + EthB: EthApiBuilder, + PVB: PayloadValidatorBuilder, + EB: EngineApiBuilder, + EVB: EngineValidatorBuilder, + RpcMiddleware: RethRpcMiddleware, + Attrs: OpAttributes, RpcPayloadAttributes: DeserializeOwned>, +{ + type EthApi = EthB::EthApi; + + fn hooks_mut(&mut self) -> &mut reth_node_builder::rpc::RpcHooks { + self.rpc_add_ons.hooks_mut() + } +} + +impl EngineValidatorAddOn + for OpAddOns +where + N: FullNodeComponents, + EthB: EthApiBuilder, + PVB: Send, + EB: EngineApiBuilder, + EVB: EngineValidatorBuilder, + RpcMiddleware: Send, +{ + type ValidatorBuilder = EVB; + + fn engine_validator_builder(&self) -> Self::ValidatorBuilder { + EngineValidatorAddOn::engine_validator_builder(&self.rpc_add_ons) + } +} + +/// A regular optimism evm and executor builder. +#[derive(Debug, Clone)] +#[non_exhaustive] +pub struct OpAddOnsBuilder { + /// Sequencer client, configured to forward submitted transactions to sequencer of given OP + /// network. + sequencer_url: Option, + /// Headers to use for the sequencer client requests. + sequencer_headers: Vec, + /// RPC endpoint for historical data. + historical_rpc: Option, + /// Data availability configuration for the OP builder. + da_config: Option, + /// Gas limit configuration for the OP builder. + gas_limit_config: Option, + /// Enable transaction conditionals. + enable_tx_conditional: bool, + /// Marker for network types. + _nt: PhantomData, + /// Minimum suggested priority fee (tip) + min_suggested_priority_fee: u64, + /// RPC middleware to use + rpc_middleware: RpcMiddleware, + /// Optional tokio runtime to use for the RPC server. + tokio_runtime: Option, + /// A URL pointing to a secure websocket service that streams out flashblocks. + flashblocks_url: Option, + /// Enable flashblock consensus client to drive chain forward. + flashblock_consensus: bool, +} + +impl Default for OpAddOnsBuilder { + fn default() -> Self { + Self { + sequencer_url: None, + sequencer_headers: Vec::new(), + historical_rpc: None, + da_config: None, + gas_limit_config: None, + enable_tx_conditional: false, + min_suggested_priority_fee: 1_000_000, + _nt: PhantomData, + rpc_middleware: Identity::new(), + tokio_runtime: None, + flashblocks_url: None, + flashblock_consensus: false, + } + } +} + +impl OpAddOnsBuilder { + /// With a [`SequencerClient`]. + pub fn with_sequencer(mut self, sequencer_client: Option) -> Self { + self.sequencer_url = sequencer_client; + self + } + + /// With headers to use for the sequencer client requests. + pub fn with_sequencer_headers(mut self, sequencer_headers: Vec) -> Self { + self.sequencer_headers = sequencer_headers; + self + } + + /// Configure the data availability configuration for the OP builder. + pub fn with_da_config(mut self, da_config: OpDAConfig) -> Self { + self.da_config = Some(da_config); + self + } + + /// Configure the gas limit configuration for the OP payload builder. + pub fn with_gas_limit_config(mut self, gas_limit_config: OpGasLimitConfig) -> Self { + self.gas_limit_config = Some(gas_limit_config); + self + } + + /// Configure if transaction conditional should be enabled. + pub const fn with_enable_tx_conditional(mut self, enable_tx_conditional: bool) -> Self { + self.enable_tx_conditional = enable_tx_conditional; + self + } + + /// Configure the minimum priority fee (tip) + pub const fn with_min_suggested_priority_fee(mut self, min: u64) -> Self { + self.min_suggested_priority_fee = min; + self + } + + /// Configures the endpoint for historical RPC forwarding. + pub fn with_historical_rpc(mut self, historical_rpc: Option) -> Self { + self.historical_rpc = historical_rpc; + self + } + + /// Configures a custom tokio runtime for the RPC server. + /// + /// Caution: This runtime must not be created from within asynchronous context. + pub fn with_tokio_runtime(mut self, tokio_runtime: Option) -> Self { + self.tokio_runtime = tokio_runtime; + self + } + + /// Configure the RPC middleware to use + pub fn with_rpc_middleware(self, rpc_middleware: T) -> OpAddOnsBuilder { + let Self { + sequencer_url, + sequencer_headers, + historical_rpc, + da_config, + gas_limit_config, + enable_tx_conditional, + min_suggested_priority_fee, + tokio_runtime, + _nt, + flashblocks_url, + flashblock_consensus, + .. + } = self; + OpAddOnsBuilder { + sequencer_url, + sequencer_headers, + historical_rpc, + da_config, + gas_limit_config, + enable_tx_conditional, + min_suggested_priority_fee, + _nt, + rpc_middleware, + tokio_runtime, + flashblocks_url, + flashblock_consensus, + } + } + + /// With a URL pointing to a flashblocks secure websocket subscription. + pub fn with_flashblocks(mut self, flashblocks_url: Option) -> Self { + self.flashblocks_url = flashblocks_url; + self + } + + /// With a flashblock consensus client to drive chain forward. + pub const fn with_flashblock_consensus(mut self, flashblock_consensus: bool) -> Self { + self.flashblock_consensus = flashblock_consensus; + self + } +} + +impl OpAddOnsBuilder { + /// Builds an instance of [`OpAddOns`]. + pub fn build( + self, + ) -> OpAddOns, PVB, EB, EVB, RpcMiddleware> + where + N: FullNodeComponents, + OpEthApiBuilder: EthApiBuilder, + PVB: PayloadValidatorBuilder + Default, + EB: Default, + EVB: Default, + { + let Self { + sequencer_url, + sequencer_headers, + da_config, + gas_limit_config, + enable_tx_conditional, + min_suggested_priority_fee, + historical_rpc, + rpc_middleware, + tokio_runtime, + flashblocks_url, + flashblock_consensus, + .. + } = self; + + OpAddOns::new( + RpcAddOns::new( + OpEthApiBuilder::default() + .with_sequencer(sequencer_url.clone()) + .with_sequencer_headers(sequencer_headers.clone()) + .with_min_suggested_priority_fee(min_suggested_priority_fee) + .with_flashblocks(flashblocks_url) + .with_flashblock_consensus(flashblock_consensus), + PVB::default(), + EB::default(), + EVB::default(), + rpc_middleware, + ) + .with_tokio_runtime(tokio_runtime), + da_config.unwrap_or_default(), + gas_limit_config.unwrap_or_default(), + sequencer_url, + sequencer_headers, + historical_rpc, + enable_tx_conditional, + min_suggested_priority_fee, + ) + } +} + +/// A regular optimism evm and executor builder. +#[derive(Debug, Copy, Clone, Default)] +#[non_exhaustive] +pub struct OpExecutorBuilder; + +impl ExecutorBuilder for OpExecutorBuilder +where + Node: FullNodeTypes>, +{ + type EVM = + OpEvmConfig<::ChainSpec, ::Primitives>; + + async fn build_evm(self, ctx: &BuilderContext) -> eyre::Result { + let evm_config = OpEvmConfig::new(ctx.chain_spec(), OpRethReceiptBuilder::default()); + + Ok(evm_config) + } +} + +/// A basic optimism transaction pool. +/// +/// This contains various settings that can be configured and take precedence over the node's +/// config. +#[derive(Debug)] +pub struct OpPoolBuilder { + /// Enforced overrides that are applied to the pool config. + pub pool_config_overrides: PoolBuilderConfigOverrides, + /// Enable transaction conditionals. + pub enable_tx_conditional: bool, + /// Supervisor client url + pub supervisor_http: String, + /// Supervisor safety level + pub supervisor_safety_level: SafetyLevel, + /// Marker for the pooled transaction type. + _pd: core::marker::PhantomData, +} + +impl Default for OpPoolBuilder { + fn default() -> Self { + Self { + pool_config_overrides: Default::default(), + enable_tx_conditional: false, + supervisor_http: DEFAULT_SUPERVISOR_URL.to_string(), + supervisor_safety_level: SafetyLevel::CrossUnsafe, + _pd: Default::default(), + } + } +} + +impl Clone for OpPoolBuilder { + fn clone(&self) -> Self { + Self { + pool_config_overrides: self.pool_config_overrides.clone(), + enable_tx_conditional: self.enable_tx_conditional, + supervisor_http: self.supervisor_http.clone(), + supervisor_safety_level: self.supervisor_safety_level, + _pd: core::marker::PhantomData, + } + } +} + +impl OpPoolBuilder { + /// Sets the `enable_tx_conditional` flag on the pool builder. + pub const fn with_enable_tx_conditional(mut self, enable_tx_conditional: bool) -> Self { + self.enable_tx_conditional = enable_tx_conditional; + self + } + + /// Sets the [`PoolBuilderConfigOverrides`] on the pool builder. + pub fn with_pool_config_overrides( + mut self, + pool_config_overrides: PoolBuilderConfigOverrides, + ) -> Self { + self.pool_config_overrides = pool_config_overrides; + self + } + + /// Sets the supervisor client + pub fn with_supervisor( + mut self, + supervisor_client: String, + supervisor_safety_level: SafetyLevel, + ) -> Self { + self.supervisor_http = supervisor_client; + self.supervisor_safety_level = supervisor_safety_level; + self + } +} + +impl PoolBuilder for OpPoolBuilder +where + Node: FullNodeTypes>, + T: EthPoolTransaction> + OpPooledTx, + Evm: ConfigureEvm> + Clone + 'static, +{ + type Pool = OpTransactionPool; + + async fn build_pool( + self, + ctx: &BuilderContext, + evm_config: Evm, + ) -> eyre::Result { + let Self { pool_config_overrides, .. } = self; + + // supervisor used for interop + if ctx.chain_spec().is_interop_active_at_timestamp(ctx.head().timestamp) && + self.supervisor_http == DEFAULT_SUPERVISOR_URL + { + info!(target: "reth::cli", + url=%DEFAULT_SUPERVISOR_URL, + "Default supervisor url is used, consider changing --rollup.supervisor-http." + ); + } + let supervisor_client = SupervisorClient::builder(self.supervisor_http.clone()) + .minimum_safety(self.supervisor_safety_level) + .build() + .await; + + let blob_store = reth_node_builder::components::create_blob_store(ctx)?; + let validator = + TransactionValidationTaskExecutor::eth_builder(ctx.provider().clone(), evm_config) + .no_eip4844() + .with_max_tx_input_bytes(ctx.config().txpool.max_tx_input_bytes) + .kzg_settings(ctx.kzg_settings()?) + .set_tx_fee_cap(ctx.config().rpc.rpc_tx_fee_cap) + .with_max_tx_gas_limit(ctx.config().txpool.max_tx_gas_limit) + .with_minimum_priority_fee(ctx.config().txpool.minimum_priority_fee) + .with_additional_tasks( + pool_config_overrides + .additional_validation_tasks + .unwrap_or_else(|| ctx.config().txpool.additional_validation_tasks), + ) + .build_with_tasks(ctx.task_executor().clone(), blob_store.clone()) + .map(|validator| { + OpTransactionValidator::new(validator) + // In --dev mode we can't require gas fees because we're unable to decode + // the L1 block info + .require_l1_data_gas_fee(!ctx.config().dev.dev) + .with_supervisor(supervisor_client.clone()) + }); + + let final_pool_config = pool_config_overrides.apply(ctx.pool_config()); + + let transaction_pool = TxPoolBuilder::new(ctx) + .with_validator(validator) + .build_and_spawn_maintenance_task(blob_store, final_pool_config)?; + + info!(target: "reth::cli", "Transaction pool initialized"); + debug!(target: "reth::cli", "Spawned txpool maintenance task"); + + // The Op txpool maintenance task is only spawned when interop is active + if ctx.chain_spec().is_interop_active_at_timestamp(ctx.head().timestamp) { + // spawn the Op txpool maintenance task + let chain_events = ctx.provider().canonical_state_stream(); + ctx.task_executor().spawn_critical( + "Op txpool interop maintenance task", + reth_optimism_txpool::maintain::maintain_transaction_pool_interop_future( + transaction_pool.clone(), + chain_events, + supervisor_client, + ), + ); + debug!(target: "reth::cli", "Spawned Op interop txpool maintenance task"); + } + + if self.enable_tx_conditional { + // spawn the Op txpool maintenance task + let chain_events = ctx.provider().canonical_state_stream(); + ctx.task_executor().spawn_critical( + "Op txpool conditional maintenance task", + reth_optimism_txpool::maintain::maintain_transaction_pool_conditional_future( + transaction_pool.clone(), + chain_events, + ), + ); + debug!(target: "reth::cli", "Spawned Op conditional txpool maintenance task"); + } + + Ok(transaction_pool) + } +} + +/// A basic optimism payload service builder +#[derive(Debug, Default, Clone)] +pub struct OpPayloadBuilder { + /// By default the pending block equals the latest block + /// to save resources and not leak txs from the tx-pool, + /// this flag enables computing of the pending block + /// from the tx-pool instead. + /// + /// If `compute_pending_block` is not enabled, the payload builder + /// will use the payload attributes from the latest block. Note + /// that this flag is not yet functional. + pub compute_pending_block: bool, + /// The type responsible for yielding the best transactions for the payload if mempool + /// transactions are allowed. + pub best_transactions: Txs, + /// This data availability configuration specifies constraints for the payload builder + /// when assembling payloads + pub da_config: OpDAConfig, + /// Gas limit configuration for the OP builder. + /// This is used to configure gas limit related constraints for the payload builder. + pub gas_limit_config: OpGasLimitConfig, +} + +impl OpPayloadBuilder { + /// Create a new instance with the given `compute_pending_block` flag and data availability + /// config. + pub fn new(compute_pending_block: bool) -> Self { + Self { + compute_pending_block, + best_transactions: (), + da_config: OpDAConfig::default(), + gas_limit_config: OpGasLimitConfig::default(), + } + } + + /// Configure the data availability configuration for the OP payload builder. + pub fn with_da_config(mut self, da_config: OpDAConfig) -> Self { + self.da_config = da_config; + self + } + + /// Configure the gas limit configuration for the OP payload builder. + pub fn with_gas_limit_config(mut self, gas_limit_config: OpGasLimitConfig) -> Self { + self.gas_limit_config = gas_limit_config; + self + } +} + +impl OpPayloadBuilder { + /// Configures the type responsible for yielding the transactions that should be included in the + /// payload. + pub fn with_transactions(self, best_transactions: T) -> OpPayloadBuilder { + let Self { compute_pending_block, da_config, gas_limit_config, .. } = self; + OpPayloadBuilder { compute_pending_block, best_transactions, da_config, gas_limit_config } + } +} + +impl PayloadBuilderBuilder for OpPayloadBuilder +where + Node: FullNodeTypes< + Provider: ChainSpecProvider, + Types: NodeTypes< + Primitives: OpPayloadPrimitives, + Payload: PayloadTypes< + BuiltPayload = OpBuiltPayload>, + PayloadBuilderAttributes = Attrs, + >, + >, + >, + Evm: ConfigureEvm< + Primitives = PrimitivesTy, + NextBlockEnvCtx: BuildNextEnv< + Attrs, + HeaderTy, + ::ChainSpec, + >, + > + 'static, + Pool: TransactionPool>> + Unpin + 'static, + Txs: OpPayloadTransactions, + Attrs: OpAttributes>, +{ + type PayloadBuilder = + reth_optimism_payload_builder::OpPayloadBuilder; + + async fn build_payload_builder( + self, + ctx: &BuilderContext, + pool: Pool, + evm_config: Evm, + ) -> eyre::Result { + let payload_builder = reth_optimism_payload_builder::OpPayloadBuilder::with_builder_config( + pool, + ctx.provider().clone(), + evm_config, + OpBuilderConfig { + da_config: self.da_config.clone(), + gas_limit_config: self.gas_limit_config.clone(), + }, + ) + .with_transactions(self.best_transactions.clone()) + .set_compute_pending_block(self.compute_pending_block); + Ok(payload_builder) + } +} + +/// A basic optimism network builder. +#[derive(Debug, Default)] +pub struct OpNetworkBuilder { + /// Disable transaction pool gossip + pub disable_txpool_gossip: bool, + /// Disable discovery v4 + pub disable_discovery_v4: bool, +} + +impl Clone for OpNetworkBuilder { + fn clone(&self) -> Self { + Self::new(self.disable_txpool_gossip, self.disable_discovery_v4) + } +} + +impl OpNetworkBuilder { + /// Creates a new `OpNetworkBuilder`. + pub const fn new(disable_txpool_gossip: bool, disable_discovery_v4: bool) -> Self { + Self { disable_txpool_gossip, disable_discovery_v4 } + } +} + +impl OpNetworkBuilder { + /// Returns the [`NetworkConfig`] that contains the settings to launch the p2p network. + /// + /// This applies the configured [`OpNetworkBuilder`] settings. + pub fn network_config( + &self, + ctx: &BuilderContext, + ) -> eyre::Result> + where + Node: FullNodeTypes>, + NetworkP: NetworkPrimitives, + { + let disable_txpool_gossip = self.disable_txpool_gossip; + let disable_discovery_v4 = self.disable_discovery_v4; + let args = &ctx.config().network; + let network_builder = ctx + .network_config_builder()? + // apply discovery settings + .apply(|mut builder| { + let rlpx_socket = (args.addr, args.port).into(); + if disable_discovery_v4 || args.discovery.disable_discovery { + builder = builder.disable_discv4_discovery(); + } + if !args.discovery.disable_discovery { + builder = builder.discovery_v5( + args.discovery.discovery_v5_builder( + rlpx_socket, + ctx.config() + .network + .resolved_bootnodes() + .or_else(|| ctx.chain_spec().bootnodes()) + .unwrap_or_default(), + ), + ); + } + + builder + }); + + let mut network_config = ctx.build_network_config(network_builder); + + // When `sequencer_endpoint` is configured, the node will forward all transactions to a + // Sequencer node for execution and inclusion on L1, and disable its own txpool + // gossip to prevent other parties in the network from learning about them. + network_config.tx_gossip_disabled = disable_txpool_gossip; + + Ok(network_config) + } +} + +impl NetworkBuilder for OpNetworkBuilder +where + Node: FullNodeTypes>, + Pool: TransactionPool>> + + Unpin + + 'static, +{ + type Network = + NetworkHandle, PoolPooledTx>>; + + async fn build_network( + self, + ctx: &BuilderContext, + pool: Pool, + ) -> eyre::Result { + let network_config = self.network_config(ctx)?; + let network = NetworkManager::builder(network_config).await?; + let handle = ctx.start_network(network, pool); + info!(target: "reth::cli", enode=%handle.local_node_record(), "P2P networking initialized"); + + Ok(handle) + } +} + +/// A basic optimism consensus builder. +#[derive(Debug, Default, Clone)] +#[non_exhaustive] +pub struct OpConsensusBuilder; + +impl ConsensusBuilder for OpConsensusBuilder +where + Node: FullNodeTypes< + Types: NodeTypes< + ChainSpec: OpHardforks, + Primitives: NodePrimitives, + >, + >, +{ + type Consensus = Arc::ChainSpec>>; + + async fn build_consensus(self, ctx: &BuilderContext) -> eyre::Result { + Ok(Arc::new(OpBeaconConsensus::new(ctx.chain_spec()))) + } +} + +/// Builder for [`OpEngineValidator`]. +#[derive(Debug, Default, Clone)] +#[non_exhaustive] +pub struct OpEngineValidatorBuilder; + +impl PayloadValidatorBuilder for OpEngineValidatorBuilder +where + Node: FullNodeComponents< + Types: NodeTypes< + ChainSpec: OpHardforks, + Payload: PayloadTypes, + >, + >, +{ + type Validator = OpEngineValidator< + Node::Provider, + <::Primitives as NodePrimitives>::SignedTx, + ::ChainSpec, + >; + + async fn build(self, ctx: &AddOnsContext<'_, Node>) -> eyre::Result { + Ok(OpEngineValidator::new::( + ctx.config.chain.clone(), + ctx.node.provider().clone(), + )) + } +} + +/// Network primitive types used by Optimism networks. +pub type OpNetworkPrimitives = BasicNetworkPrimitives; diff --git a/rust/op-reth/crates/node/src/rpc.rs b/rust/op-reth/crates/node/src/rpc.rs new file mode 100644 index 00000000000..7268fd4f809 --- /dev/null +++ b/rust/op-reth/crates/node/src/rpc.rs @@ -0,0 +1,155 @@ +//! RPC component builder +//! +//! # Example +//! +//! Builds offline `TraceApi` with only EVM and database. This can be useful +//! for example when downloading a state snapshot (pre-synced node) from some mirror. +//! +//! ```rust +//! use alloy_rpc_types_eth::BlockId; +//! use op_alloy_network::Optimism; +//! use reth_db::test_utils::create_test_rw_db_with_path; +//! use reth_node_builder::{ +//! ConsensusEngineHandle, LaunchContext, NodeConfig, RethFullAdapter, +//! components::ComponentsBuilder, +//! hooks::OnComponentInitializedHook, +//! rpc::{EthApiBuilder, EthApiCtx}, +//! }; +//! use reth_optimism_chainspec::OP_SEPOLIA; +//! use reth_optimism_evm::OpEvmConfig; +//! use reth_optimism_node::{OpExecutorBuilder, OpNetworkPrimitives, OpNode}; +//! use reth_optimism_rpc::OpEthApiBuilder; +//! use reth_optimism_txpool::OpPooledTransaction; +//! use reth_provider::providers::BlockchainProvider; +//! use reth_rpc::TraceApi; +//! use reth_rpc_eth_types::{EthConfig, EthStateCache}; +//! use reth_tasks::{TaskManager, pool::BlockingTaskGuard}; +//! use reth_trie_db::ChangesetCache; +//! use std::sync::Arc; +//! +//! #[tokio::main] +//! async fn main() { +//! // build core node with all components disabled except EVM and state +//! let sepolia = NodeConfig::new(OP_SEPOLIA.clone()); +//! let db = create_test_rw_db_with_path(sepolia.datadir()); +//! let tasks = TaskManager::current(); +//! let launch_ctx = LaunchContext::new(tasks.executor(), sepolia.datadir()); +//! let node = launch_ctx +//! .with_loaded_toml_config(sepolia) +//! .unwrap() +//! .attach(Arc::new(db)) +//! .with_provider_factory::<_, OpEvmConfig>(ChangesetCache::new()) +//! .await +//! .unwrap() +//! .with_genesis() +//! .unwrap() +//! .with_metrics_task() // todo: shouldn't be req to set up blockchain db +//! .with_blockchain_db::, _>(move |provider_factory| { +//! Ok(BlockchainProvider::new(provider_factory).unwrap()) +//! }) +//! .unwrap() +//! .with_components( +//! ComponentsBuilder::default() +//! .node_types::>() +//! .noop_pool::() +//! .executor(OpExecutorBuilder::default()) +//! .noop_consensus() +//! .noop_network::() +//! .noop_payload(), +//! Box::new(()) as Box>, +//! ) +//! .await +//! .unwrap(); +//! +//! // build `eth` namespace API +//! let config = EthConfig::default(); +//! let cache = EthStateCache::spawn_with( +//! node.provider_factory().clone(), +//! config.cache, +//! node.task_executor().clone(), +//! ); +//! // Create a dummy beacon engine handle for offline mode +//! let (tx, _) = tokio::sync::mpsc::unbounded_channel(); +//! let ctx = EthApiCtx { +//! components: node.node_adapter(), +//! config, +//! cache, +//! engine_handle: ConsensusEngineHandle::new(tx), +//! }; +//! let eth_api = OpEthApiBuilder::::default().build_eth_api(ctx).await.unwrap(); +//! +//! // build `trace` namespace API +//! let trace_api = TraceApi::new(eth_api, BlockingTaskGuard::new(10), EthConfig::default()); +//! +//! // fetch traces for latest block +//! let traces = trace_api.trace_block(BlockId::latest()).await.unwrap(); +//! } +//! ``` + +pub use reth_optimism_rpc::{OpEngineApi, OpEthApi, OpEthApiBuilder}; + +use crate::OP_NAME_CLIENT; +use alloy_rpc_types_engine::ClientVersionV1; +use op_alloy_rpc_types_engine::OpExecutionData; +use reth_chainspec::EthereumHardforks; +use reth_node_api::{ + AddOnsContext, EngineApiValidator, EngineTypes, FullNodeComponents, NodeTypes, +}; +use reth_node_builder::rpc::{EngineApiBuilder, PayloadValidatorBuilder}; +use reth_node_core::version::{CLIENT_CODE, version_metadata}; +use reth_optimism_rpc::engine::OP_ENGINE_CAPABILITIES; +use reth_payload_builder::PayloadStore; +use reth_rpc_engine_api::{EngineApi, EngineCapabilities}; + +/// Builder for basic [`OpEngineApi`] implementation. +#[derive(Debug, Default, Clone)] +pub struct OpEngineApiBuilder { + engine_validator_builder: EV, +} + +impl EngineApiBuilder for OpEngineApiBuilder +where + N: FullNodeComponents< + Types: NodeTypes< + ChainSpec: EthereumHardforks, + Payload: EngineTypes, + >, + >, + EV: PayloadValidatorBuilder, + EV::Validator: EngineApiValidator<::Payload>, +{ + type EngineApi = OpEngineApi< + N::Provider, + ::Payload, + N::Pool, + EV::Validator, + ::ChainSpec, + >; + + async fn build_engine_api(self, ctx: &AddOnsContext<'_, N>) -> eyre::Result { + let Self { engine_validator_builder } = self; + + let engine_validator = engine_validator_builder.build(ctx).await?; + let client = ClientVersionV1 { + code: CLIENT_CODE, + name: OP_NAME_CLIENT.to_string(), + version: version_metadata().cargo_pkg_version.to_string(), + commit: version_metadata().vergen_git_sha.to_string(), + }; + let inner = EngineApi::new( + ctx.node.provider().clone(), + ctx.config.chain.clone(), + ctx.beacon_engine_handle.clone(), + PayloadStore::new(ctx.node.payload_builder_handle().clone()), + ctx.node.pool().clone(), + Box::new(ctx.node.task_executor().clone()), + client, + EngineCapabilities::new(OP_ENGINE_CAPABILITIES.iter().copied()), + engine_validator, + ctx.config.engine.accept_execution_requests_hash, + ctx.node.network().clone(), + ); + + Ok(OpEngineApi::new(inner)) + } +} diff --git a/rust/op-reth/crates/node/src/utils.rs b/rust/op-reth/crates/node/src/utils.rs new file mode 100644 index 00000000000..fa29c8d00c7 --- /dev/null +++ b/rust/op-reth/crates/node/src/utils.rs @@ -0,0 +1,74 @@ +use crate::{OpBuiltPayload, OpNode as OtherOpNode, OpPayloadBuilderAttributes}; +use alloy_genesis::Genesis; +use alloy_primitives::{Address, B256}; +use alloy_rpc_types_engine::PayloadAttributes; +use reth_e2e_test_utils::{ + NodeHelperType, TmpDB, transaction::TransactionTestContext, wallet::Wallet, +}; +use reth_node_api::NodeTypesWithDBAdapter; +use reth_optimism_chainspec::OpChainSpecBuilder; +use reth_payload_builder::EthPayloadBuilderAttributes; +use reth_provider::providers::BlockchainProvider; +use reth_tasks::TaskManager; +use std::sync::Arc; +use tokio::sync::Mutex; + +/// Optimism Node Helper type +pub(crate) type OpNode = + NodeHelperType>>; + +/// Creates the initial setup with `num_nodes` of the node config, started and connected. +pub async fn setup(num_nodes: usize) -> eyre::Result<(Vec, TaskManager, Wallet)> { + let genesis: Genesis = + serde_json::from_str(include_str!("../tests/assets/genesis.json")).unwrap(); + reth_e2e_test_utils::setup_engine( + num_nodes, + Arc::new(OpChainSpecBuilder::base_mainnet().genesis(genesis).ecotone_activated().build()), + false, + Default::default(), + optimism_payload_attributes, + ) + .await +} + +/// Advance the chain with sequential payloads returning them in the end. +pub async fn advance_chain( + length: usize, + node: &mut OpNode, + wallet: Arc>, +) -> eyre::Result> { + node.advance(length as u64, |_| { + let wallet = wallet.clone(); + Box::pin(async move { + let mut wallet = wallet.lock().await; + let tx_fut = TransactionTestContext::optimism_l1_block_info_tx( + wallet.chain_id, + wallet.inner.clone(), + wallet.inner_nonce, + ); + wallet.inner_nonce += 1; + tx_fut.await + }) + }) + .await +} + +/// Helper function to create a new eth payload attributes +pub fn optimism_payload_attributes(timestamp: u64) -> OpPayloadBuilderAttributes { + let attributes = PayloadAttributes { + timestamp, + prev_randao: B256::ZERO, + suggested_fee_recipient: Address::ZERO, + withdrawals: Some(vec![]), + parent_beacon_block_root: Some(B256::ZERO), + }; + + OpPayloadBuilderAttributes { + payload_attributes: EthPayloadBuilderAttributes::new(B256::ZERO, attributes), + transactions: vec![], + no_tx_pool: false, + gas_limit: Some(30_000_000), + eip_1559_params: None, + min_base_fee: None, + } +} diff --git a/op-reth/crates/node/src/version.rs b/rust/op-reth/crates/node/src/version.rs similarity index 100% rename from op-reth/crates/node/src/version.rs rename to rust/op-reth/crates/node/src/version.rs diff --git a/op-reth/crates/node/tests/assets/genesis.json b/rust/op-reth/crates/node/tests/assets/genesis.json similarity index 100% rename from op-reth/crates/node/tests/assets/genesis.json rename to rust/op-reth/crates/node/tests/assets/genesis.json diff --git a/op-reth/crates/node/tests/e2e-testsuite/main.rs b/rust/op-reth/crates/node/tests/e2e-testsuite/main.rs similarity index 100% rename from op-reth/crates/node/tests/e2e-testsuite/main.rs rename to rust/op-reth/crates/node/tests/e2e-testsuite/main.rs diff --git a/op-reth/crates/node/tests/e2e-testsuite/p2p.rs b/rust/op-reth/crates/node/tests/e2e-testsuite/p2p.rs similarity index 100% rename from op-reth/crates/node/tests/e2e-testsuite/p2p.rs rename to rust/op-reth/crates/node/tests/e2e-testsuite/p2p.rs diff --git a/op-reth/crates/node/tests/e2e-testsuite/testsuite.rs b/rust/op-reth/crates/node/tests/e2e-testsuite/testsuite.rs similarity index 96% rename from op-reth/crates/node/tests/e2e-testsuite/testsuite.rs rename to rust/op-reth/crates/node/tests/e2e-testsuite/testsuite.rs index b031b3a8266..a95034778cd 100644 --- a/op-reth/crates/node/tests/e2e-testsuite/testsuite.rs +++ b/rust/op-reth/crates/node/tests/e2e-testsuite/testsuite.rs @@ -1,12 +1,12 @@ -use alloy_primitives::{Address, B256, B64}; +use alloy_primitives::{Address, B64, B256}; use eyre::Result; use op_alloy_rpc_types_engine::OpPayloadAttributes; use reth_e2e_test_utils::testsuite::{ + TestBuilder, actions::AssertMineBlock, setup::{NetworkSetup, Setup}, - TestBuilder, }; -use reth_optimism_chainspec::{OpChainSpecBuilder, OP_MAINNET}; +use reth_optimism_chainspec::{OP_MAINNET, OpChainSpecBuilder}; use reth_optimism_node::{OpEngineTypes, OpNode}; use std::sync::Arc; diff --git a/rust/op-reth/crates/node/tests/it/builder.rs b/rust/op-reth/crates/node/tests/it/builder.rs new file mode 100644 index 00000000000..8a30faaa6b7 --- /dev/null +++ b/rust/op-reth/crates/node/tests/it/builder.rs @@ -0,0 +1,169 @@ +//! Node builder setup tests. + +use alloy_primitives::{Bytes, address}; +use core::marker::PhantomData; +use op_revm::{ + OpContext, OpHaltReason, OpSpecId, OpTransaction, OpTransactionError, + precompiles::OpPrecompiles, +}; +use reth_db::test_utils::create_test_rw_db; +use reth_evm::{Database, Evm, EvmEnv, EvmFactory, precompiles::PrecompilesMap}; +use reth_node_api::{FullNodeComponents, NodeTypesWithDBAdapter}; +use reth_node_builder::{ + BuilderContext, FullNodeTypes, Node, NodeBuilder, NodeConfig, NodeTypes, + components::ExecutorBuilder, +}; +use reth_optimism_chainspec::{BASE_MAINNET, OP_SEPOLIA, OpChainSpec}; +use reth_optimism_evm::{OpBlockExecutorFactory, OpEvm, OpEvmFactory, OpRethReceiptBuilder}; +use reth_optimism_node::{OpEvmConfig, OpExecutorBuilder, OpNode, args::RollupArgs}; +use reth_optimism_primitives::OpPrimitives; +use reth_provider::providers::BlockchainProvider; +use revm::{ + Inspector, + context::{BlockEnv, ContextTr, TxEnv}, + context_interface::result::EVMError, + inspector::NoOpInspector, + interpreter::interpreter::EthInterpreter, + precompile::{Precompile, PrecompileId, PrecompileOutput, PrecompileResult, Precompiles}, +}; +use std::sync::OnceLock; + +#[test] +fn test_basic_setup() { + // parse CLI -> config + let config = NodeConfig::new(BASE_MAINNET.clone()); + let db = create_test_rw_db(); + let args = RollupArgs::default(); + let op_node = OpNode::new(args); + let _builder = NodeBuilder::new(config) + .with_database(db) + .with_types_and_provider::>>() + .with_components(op_node.components()) + .with_add_ons(op_node.add_ons()) + .on_component_initialized(move |ctx| { + let _provider = ctx.provider(); + Ok(()) + }) + .on_node_started(|_full_node| Ok(())) + .on_rpc_started(|_ctx, handles| { + let _client = handles.rpc.http_client(); + Ok(()) + }) + .extend_rpc_modules(|ctx| { + let _ = ctx.config(); + let _ = ctx.node().provider(); + + Ok(()) + }) + .check_launch(); +} + +#[test] +fn test_setup_custom_precompiles() { + /// Unichain custom precompiles. + struct UniPrecompiles; + + impl UniPrecompiles { + /// Returns map of precompiles for Unichain. + fn precompiles(spec_id: OpSpecId) -> PrecompilesMap { + static INSTANCE: OnceLock = OnceLock::new(); + + PrecompilesMap::from_static(INSTANCE.get_or_init(|| { + let mut precompiles = OpPrecompiles::new_with_spec(spec_id).precompiles().clone(); + // Custom precompile. + let precompile = Precompile::new( + PrecompileId::custom("custom"), + address!("0x0000000000000000000000000000000000756e69"), + |_, _| PrecompileResult::Ok(PrecompileOutput::new(0, Bytes::new())), + ); + precompiles.extend([precompile]); + precompiles + })) + } + } + + /// Builds Unichain EVM configuration. + #[derive(Clone, Debug)] + struct UniEvmFactory; + + impl EvmFactory for UniEvmFactory { + type Evm>> = OpEvm; + type Context = OpContext; + type Tx = OpTransaction; + type Error = + EVMError; + type HaltReason = OpHaltReason; + type Spec = OpSpecId; + type BlockEnv = BlockEnv; + type Precompiles = PrecompilesMap; + + fn create_evm( + &self, + db: DB, + input: EvmEnv, + ) -> Self::Evm { + let mut op_evm = OpEvmFactory::default().create_evm(db, input); + *op_evm.components_mut().2 = UniPrecompiles::precompiles(*op_evm.ctx().cfg().spec()); + + op_evm + } + + fn create_evm_with_inspector< + DB: Database, + I: Inspector, EthInterpreter>, + >( + &self, + db: DB, + input: EvmEnv, + inspector: I, + ) -> Self::Evm { + let mut op_evm = + OpEvmFactory::default().create_evm_with_inspector(db, input, inspector); + *op_evm.components_mut().2 = UniPrecompiles::precompiles(*op_evm.ctx().cfg().spec()); + + op_evm + } + } + + /// Unichain executor builder. + struct UniExecutorBuilder; + + impl ExecutorBuilder for UniExecutorBuilder + where + Node: FullNodeTypes>, + { + type EVM = OpEvmConfig< + OpChainSpec, + ::Primitives, + OpRethReceiptBuilder, + UniEvmFactory, + >; + + async fn build_evm(self, ctx: &BuilderContext) -> eyre::Result { + let OpEvmConfig { executor_factory, block_assembler, _pd: _ } = + OpExecutorBuilder::default().build_evm(ctx).await?; + let uni_executor_factory = OpBlockExecutorFactory::new( + *executor_factory.receipt_builder(), + ctx.chain_spec(), + UniEvmFactory, + ); + let uni_evm_config = OpEvmConfig { + executor_factory: uni_executor_factory, + block_assembler, + _pd: PhantomData, + }; + Ok(uni_evm_config) + } + } + + NodeBuilder::new(NodeConfig::new(OP_SEPOLIA.clone())) + .with_database(create_test_rw_db()) + .with_types::() + .with_components( + OpNode::default() + .components() + // Custom EVM configuration + .executor(UniExecutorBuilder), + ) + .check_launch(); +} diff --git a/op-reth/crates/node/tests/it/custom_genesis.rs b/rust/op-reth/crates/node/tests/it/custom_genesis.rs similarity index 82% rename from op-reth/crates/node/tests/it/custom_genesis.rs rename to rust/op-reth/crates/node/tests/it/custom_genesis.rs index da194566507..635153746a4 100644 --- a/op-reth/crates/node/tests/it/custom_genesis.rs +++ b/rust/op-reth/crates/node/tests/it/custom_genesis.rs @@ -11,8 +11,8 @@ use reth_e2e_test_utils::{ use reth_node_builder::{EngineNodeLauncher, Node, NodeBuilder, NodeConfig}; use reth_node_core::args::DatadirArgs; use reth_optimism_chainspec::OpChainSpecBuilder; -use reth_optimism_node::{utils::optimism_payload_attributes, OpNode}; -use reth_provider::{providers::BlockchainProvider, HeaderProvider, StageCheckpointReader}; +use reth_optimism_node::{OpNode, utils::optimism_payload_attributes}; +use reth_provider::{HeaderProvider, StageCheckpointReader, providers::BlockchainProvider}; use reth_stages_types::StageId; use std::sync::Arc; use tokio::sync::Mutex; @@ -36,10 +36,13 @@ async fn test_op_node_custom_genesis_number() { let wallet = Arc::new(Mutex::new(Wallet::default().with_chain_id(chain_spec.chain().into()))); // Configure and launch the node - let config = NodeConfig::new(chain_spec.clone()).with_datadir_args(DatadirArgs { - datadir: reth_db::test_utils::tempdir_path().into(), - ..Default::default() - }); + let mut config = + NodeConfig::new(chain_spec.clone()).with_unused_ports().with_datadir_args(DatadirArgs { + datadir: reth_db::test_utils::tempdir_path().into(), + ..Default::default() + }); + config.network.discovery.discv5_port = 0; + config.network.discovery.discv5_port_ipv6 = 0; let db = create_test_rw_db_with_path( config .datadir @@ -70,23 +73,22 @@ async fn test_op_node_custom_genesis_number() { // Verify stage checkpoints are initialized to genesis block number (1000) for stage in StageId::ALL { let checkpoint = node.inner.provider.get_stage_checkpoint(stage).unwrap(); - assert!(checkpoint.is_some(), "Stage {:?} checkpoint should exist", stage); + assert!(checkpoint.is_some(), "Stage {stage:?} checkpoint should exist"); assert_eq!( checkpoint.unwrap().block_number, 1000, - "Stage {:?} checkpoint should be at genesis block 1000", - stage + "Stage {stage:?} checkpoint should be at genesis block 1000" ); } // Query genesis block should succeed let genesis_header = node.inner.provider.header_by_number(genesis_number).unwrap(); - assert!(genesis_header.is_some(), "Genesis block at {} should exist", genesis_number); + assert!(genesis_header.is_some(), "Genesis block at {genesis_number} should exist"); // Query blocks before genesis should return None for block_num in [0, 1, genesis_number - 1] { let header = node.inner.provider.header_by_number(block_num).unwrap(); - assert!(header.is_none(), "Block {} before genesis should not exist", block_num); + assert!(header.is_none(), "Block {block_num} before genesis should not exist"); } // Advance the chain with a single block diff --git a/op-reth/crates/node/tests/it/main.rs b/rust/op-reth/crates/node/tests/it/main.rs similarity index 100% rename from op-reth/crates/node/tests/it/main.rs rename to rust/op-reth/crates/node/tests/it/main.rs diff --git a/op-reth/crates/node/tests/it/priority.rs b/rust/op-reth/crates/node/tests/it/priority.rs similarity index 94% rename from op-reth/crates/node/tests/it/priority.rs rename to rust/op-reth/crates/node/tests/it/priority.rs index f831c65ca93..f6ff043966f 100644 --- a/op-reth/crates/node/tests/it/priority.rs +++ b/rust/op-reth/crates/node/tests/it/priority.rs @@ -1,6 +1,6 @@ //! Node builder test that customizes priority of transactions in the block. -use alloy_consensus::{transaction::Recovered, SignableTransaction, Transaction, TxEip1559}; +use alloy_consensus::{SignableTransaction, Transaction, TxEip1559, transaction::Recovered}; use alloy_genesis::Genesis; use alloy_network::TxSignerSync; use alloy_primitives::{Address, ChainId, TxKind}; @@ -11,12 +11,13 @@ use reth_e2e_test_utils::{ }; use reth_node_api::FullNodeTypes; use reth_node_builder::{ - components::{BasicPayloadServiceBuilder, ComponentsBuilder}, EngineNodeLauncher, Node, NodeBuilder, NodeConfig, + components::{BasicPayloadServiceBuilder, ComponentsBuilder}, }; use reth_node_core::args::DatadirArgs; use reth_optimism_chainspec::OpChainSpecBuilder; use reth_optimism_node::{ + OpNode, args::RollupArgs, node::{ OpConsensusBuilder, OpExecutorBuilder, OpNetworkBuilder, OpNodeComponentBuilder, @@ -24,7 +25,6 @@ use reth_optimism_node::{ }, txpool::OpPooledTransaction, utils::optimism_payload_attributes, - OpNode, }; use reth_optimism_payload_builder::builder::OpPayloadTransactions; use reth_payload_util::{ @@ -120,10 +120,13 @@ async fn test_custom_block_priority_config() { let wallet = Arc::new(Mutex::new(Wallet::default().with_chain_id(chain_spec.chain().into()))); // Configure and launch the node. - let config = NodeConfig::new(chain_spec).with_datadir_args(DatadirArgs { - datadir: reth_db::test_utils::tempdir_path().into(), - ..Default::default() - }); + let mut config = + NodeConfig::new(chain_spec).with_unused_ports().with_datadir_args(DatadirArgs { + datadir: reth_db::test_utils::tempdir_path().into(), + ..Default::default() + }); + config.network.discovery.discv5_port = 0; + config.network.discovery.discv5_port_ipv6 = 0; let db = create_test_rw_db_with_path( config .datadir diff --git a/rust/op-reth/crates/node/tests/it/rpc.rs b/rust/op-reth/crates/node/tests/it/rpc.rs new file mode 100644 index 00000000000..72aff692e32 --- /dev/null +++ b/rust/op-reth/crates/node/tests/it/rpc.rs @@ -0,0 +1,44 @@ +//! RPC integration tests. + +use reth_network::types::NatResolver; +use reth_node_builder::{NodeBuilder, NodeHandle}; +use reth_node_core::{ + args::{NetworkArgs, RpcServerArgs}, + node_config::NodeConfig, +}; +use reth_optimism_chainspec::BASE_MAINNET; +use reth_optimism_node::OpNode; +use reth_rpc_api::servers::AdminApiServer; +use reth_tasks::TaskManager; + +// +#[tokio::test] +async fn test_admin_external_ip() -> eyre::Result<()> { + reth_tracing::init_test_tracing(); + + let exec = TaskManager::current(); + let exec = exec.executor(); + + let external_ip = "10.64.128.71".parse().unwrap(); + // Node setup + let mut network_args = NetworkArgs::default() + .with_unused_ports() + .with_nat_resolver(NatResolver::ExternalIp(external_ip)); + network_args.discovery.discv5_port = 0; + network_args.discovery.discv5_port_ipv6 = 0; + let node_config = NodeConfig::test() + .map_chain(BASE_MAINNET.clone()) + .with_network(network_args) + .with_rpc(RpcServerArgs::default().with_unused_ports().with_http()); + + let NodeHandle { node, node_exit_future: _ } = + NodeBuilder::new(node_config).testing_node(exec).node(OpNode::default()).launch().await?; + + let api = node.add_ons_handle.admin_api(); + + let info = api.node_info().await.unwrap(); + + assert_eq!(info.ip, external_ip); + + Ok(()) +} diff --git a/rust/op-reth/crates/payload/Cargo.toml b/rust/op-reth/crates/payload/Cargo.toml new file mode 100644 index 00000000000..15d75620e19 --- /dev/null +++ b/rust/op-reth/crates/payload/Cargo.toml @@ -0,0 +1,54 @@ +[package] +name = "reth-optimism-payload-builder" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" +description = "A payload builder for op-reth that builds optimistic payloads." + +[lints] +workspace = true + +[dependencies] +# reth +reth-chainspec.workspace = true +reth-primitives-traits.workspace = true +reth-revm = { workspace = true, features = ["witness"] } +reth-transaction-pool.workspace = true +reth-storage-api.workspace = true +reth-evm.workspace = true +reth-execution-types.workspace = true +reth-payload-builder.workspace = true +reth-payload-builder-primitives.workspace = true +reth-payload-util.workspace = true +reth-payload-primitives = { workspace = true, features = ["op"] } +reth-basic-payload-builder.workspace = true +reth-payload-validator.workspace = true + +# op-reth +reth-optimism-evm.workspace = true +reth-optimism-forks.workspace = true +reth-optimism-primitives.workspace = true +reth-optimism-txpool.workspace = true + +# ethereum +revm.workspace = true +alloy-eips.workspace = true +alloy-primitives.workspace = true +alloy-rlp.workspace = true +op-alloy-rpc-types-engine = { workspace = true, features = ["serde"] } +op-alloy-consensus.workspace = true +alloy-rpc-types-engine.workspace = true +alloy-rpc-types-debug.workspace = true +alloy-consensus.workspace = true +alloy-evm.workspace = true + +# misc +derive_more.workspace = true +tracing.workspace = true +thiserror.workspace = true +sha2.workspace = true +serde.workspace = true +either.workspace = true diff --git a/rust/op-reth/crates/payload/src/builder.rs b/rust/op-reth/crates/payload/src/builder.rs new file mode 100644 index 00000000000..2477a8bf9a9 --- /dev/null +++ b/rust/op-reth/crates/payload/src/builder.rs @@ -0,0 +1,775 @@ +//! Optimism payload builder implementation. +use crate::{ + OpAttributes, OpPayloadBuilderAttributes, OpPayloadPrimitives, config::OpBuilderConfig, + error::OpPayloadBuilderError, payload::OpBuiltPayload, +}; +use alloy_consensus::{BlockHeader, Transaction, Typed2718}; +use alloy_evm::Evm as AlloyEvm; +use alloy_primitives::{B256, U256}; +use alloy_rpc_types_debug::ExecutionWitness; +use alloy_rpc_types_engine::PayloadId; +use reth_basic_payload_builder::*; +use reth_chainspec::{ChainSpecProvider, EthChainSpec}; +use reth_evm::{ + ConfigureEvm, Database, + block::BlockExecutorFor, + execute::{ + BlockBuilder, BlockBuilderOutcome, BlockExecutionError, BlockExecutor, BlockValidationError, + }, + op_revm::{L1BlockInfo, constants::L1_BLOCK_CONTRACT}, +}; +use reth_execution_types::BlockExecutionOutput; +use reth_optimism_forks::OpHardforks; +use reth_optimism_primitives::{L2_TO_L1_MESSAGE_PASSER_ADDRESS, transaction::OpTransaction}; +use reth_optimism_txpool::{ + OpPooledTx, + estimated_da_size::DataAvailabilitySized, + interop::{MaybeInteropTransaction, is_valid_interop}, +}; +use reth_payload_builder_primitives::PayloadBuilderError; +use reth_payload_primitives::{BuildNextEnv, BuiltPayloadExecutedBlock, PayloadBuilderAttributes}; +use reth_payload_util::{BestPayloadTransactions, NoopPayloadTransactions, PayloadTransactions}; +use reth_primitives_traits::{ + HeaderTy, NodePrimitives, SealedHeader, SealedHeaderFor, SignedTransaction, TxTy, +}; +use reth_revm::{ + cancelled::CancelOnDrop, database::StateProviderDatabase, db::State, + witness::ExecutionWitnessRecord, +}; +use reth_storage_api::{StateProvider, StateProviderFactory, errors::ProviderError}; +use reth_transaction_pool::{BestTransactionsAttributes, PoolTransaction, TransactionPool}; +use revm::context::{Block, BlockEnv}; +use std::{marker::PhantomData, sync::Arc}; +use tracing::{debug, trace, warn}; + +/// Optimism's payload builder +#[derive(Debug)] +pub struct OpPayloadBuilder< + Pool, + Client, + Evm, + Txs = (), + Attrs = OpPayloadBuilderAttributes::Primitives>>, +> { + /// The rollup's compute pending block configuration option. + pub compute_pending_block: bool, + /// The type responsible for creating the evm. + pub evm_config: Evm, + /// Transaction pool. + pub pool: Pool, + /// Node client. + pub client: Client, + /// Settings for the builder, e.g. DA settings. + pub config: OpBuilderConfig, + /// The type responsible for yielding the best transactions for the payload if mempool + /// transactions are allowed. + pub best_transactions: Txs, + /// Marker for the payload attributes type. + _pd: PhantomData, +} + +impl Clone for OpPayloadBuilder +where + Pool: Clone, + Client: Clone, + Evm: ConfigureEvm, + Txs: Clone, +{ + fn clone(&self) -> Self { + Self { + evm_config: self.evm_config.clone(), + pool: self.pool.clone(), + client: self.client.clone(), + config: self.config.clone(), + best_transactions: self.best_transactions.clone(), + compute_pending_block: self.compute_pending_block, + _pd: PhantomData, + } + } +} + +impl OpPayloadBuilder { + /// `OpPayloadBuilder` constructor. + /// + /// Configures the builder with the default settings. + pub fn new(pool: Pool, client: Client, evm_config: Evm) -> Self { + Self::with_builder_config(pool, client, evm_config, Default::default()) + } + + /// Configures the builder with the given [`OpBuilderConfig`]. + pub const fn with_builder_config( + pool: Pool, + client: Client, + evm_config: Evm, + config: OpBuilderConfig, + ) -> Self { + Self { + pool, + client, + compute_pending_block: true, + evm_config, + config, + best_transactions: (), + _pd: PhantomData, + } + } +} + +impl OpPayloadBuilder { + /// Sets the rollup's compute pending block configuration option. + pub const fn set_compute_pending_block(mut self, compute_pending_block: bool) -> Self { + self.compute_pending_block = compute_pending_block; + self + } + + /// Configures the type responsible for yielding the transactions that should be included in the + /// payload. + pub fn with_transactions( + self, + best_transactions: T, + ) -> OpPayloadBuilder { + let Self { pool, client, compute_pending_block, evm_config, config, .. } = self; + OpPayloadBuilder { + pool, + client, + compute_pending_block, + evm_config, + best_transactions, + config, + _pd: PhantomData, + } + } + + /// Enables the rollup's compute pending block configuration option. + pub const fn compute_pending_block(self) -> Self { + self.set_compute_pending_block(true) + } + + /// Returns the rollup's compute pending block configuration option. + pub const fn is_compute_pending_block(&self) -> bool { + self.compute_pending_block + } +} + +impl OpPayloadBuilder +where + Pool: TransactionPool>, + Client: StateProviderFactory + ChainSpecProvider, + N: OpPayloadPrimitives, + Evm: ConfigureEvm< + Primitives = N, + NextBlockEnvCtx: BuildNextEnv, + >, + Attrs: OpAttributes>, +{ + /// Constructs an Optimism payload from the transactions sent via the + /// Payload attributes by the sequencer. If the `no_tx_pool` argument is passed in + /// the payload attributes, the transaction pool will be ignored and the only transactions + /// included in the payload will be those sent through the attributes. + /// + /// Given build arguments including an Optimism client, transaction pool, + /// and configuration, this function creates a transaction payload. Returns + /// a result indicating success with the payload or an error in case of failure. + fn build_payload<'a, Txs>( + &self, + args: BuildArguments>, + best: impl FnOnce(BestTransactionsAttributes) -> Txs + Send + Sync + 'a, + ) -> Result>, PayloadBuilderError> + where + Txs: + PayloadTransactions + OpPooledTx>, + { + let BuildArguments { mut cached_reads, config, cancel, best_payload } = args; + + let ctx = OpPayloadBuilderCtx { + evm_config: self.evm_config.clone(), + builder_config: self.config.clone(), + chain_spec: self.client.chain_spec(), + config, + cancel, + best_payload, + }; + + let builder = OpBuilder::new(best); + + let state_provider = self.client.state_by_block_hash(ctx.parent().hash())?; + let state = StateProviderDatabase::new(&state_provider); + + if ctx.attributes().no_tx_pool() { + builder.build(state, &state_provider, ctx) + } else { + // sequencer mode we can reuse cachedreads from previous runs + builder.build(cached_reads.as_db_mut(state), &state_provider, ctx) + } + .map(|out| out.with_cached_reads(cached_reads)) + } + + /// Computes the witness for the payload. + pub fn payload_witness( + &self, + parent: SealedHeader, + attributes: Attrs::RpcPayloadAttributes, + ) -> Result + where + Attrs: PayloadBuilderAttributes, + { + let attributes = + Attrs::try_new(parent.hash(), attributes, 3).map_err(PayloadBuilderError::other)?; + + let config = PayloadConfig { parent_header: Arc::new(parent), attributes }; + let ctx = OpPayloadBuilderCtx { + evm_config: self.evm_config.clone(), + builder_config: self.config.clone(), + chain_spec: self.client.chain_spec(), + config, + cancel: Default::default(), + best_payload: Default::default(), + }; + + let state_provider = self.client.state_by_block_hash(ctx.parent().hash())?; + + let builder = OpBuilder::new(|_| NoopPayloadTransactions::::default()); + builder.witness(state_provider, &ctx) + } +} + +/// Implementation of the [`PayloadBuilder`] trait for [`OpPayloadBuilder`]. +impl PayloadBuilder + for OpPayloadBuilder +where + N: OpPayloadPrimitives, + Client: StateProviderFactory + ChainSpecProvider + Clone, + Pool: TransactionPool>, + Evm: ConfigureEvm< + Primitives = N, + NextBlockEnvCtx: BuildNextEnv, + >, + Txs: OpPayloadTransactions, + Attrs: OpAttributes, +{ + type Attributes = Attrs; + type BuiltPayload = OpBuiltPayload; + + fn try_build( + &self, + args: BuildArguments, + ) -> Result, PayloadBuilderError> { + let pool = self.pool.clone(); + self.build_payload(args, |attrs| self.best_transactions.best_transactions(pool, attrs)) + } + + fn on_missing_payload( + &self, + _args: BuildArguments, + ) -> MissingPayloadBehaviour { + // we want to await the job that's already in progress because that should be returned as + // is, there's no benefit in racing another job + MissingPayloadBehaviour::AwaitInProgress + } + + // NOTE: this should only be used for testing purposes because this doesn't have access to L1 + // system txs, hence on_missing_payload we return [MissingPayloadBehaviour::AwaitInProgress]. + fn build_empty_payload( + &self, + config: PayloadConfig, + ) -> Result { + let args = BuildArguments { + config, + cached_reads: Default::default(), + cancel: Default::default(), + best_payload: None, + }; + self.build_payload(args, |_| NoopPayloadTransactions::::default())? + .into_payload() + .ok_or_else(|| PayloadBuilderError::MissingPayload) + } +} + +/// The type that builds the payload. +/// +/// Payload building for optimism is composed of several steps. +/// The first steps are mandatory and defined by the protocol. +/// +/// 1. first all System calls are applied. +/// 2. After canyon the forced deployed `create2deployer` must be loaded +/// 3. all sequencer transactions are executed (part of the payload attributes) +/// +/// Depending on whether the node acts as a sequencer and is allowed to include additional +/// transactions (`no_tx_pool == false`): +/// 4. include additional transactions +/// +/// And finally +/// 5. build the block: compute all roots (txs, state) +#[derive(derive_more::Debug)] +pub struct OpBuilder<'a, Txs> { + /// Yields the best transaction to include if transactions from the mempool are allowed. + #[debug(skip)] + best: Box Txs + 'a>, +} + +impl<'a, Txs> OpBuilder<'a, Txs> { + /// Creates a new [`OpBuilder`]. + pub fn new(best: impl FnOnce(BestTransactionsAttributes) -> Txs + Send + Sync + 'a) -> Self { + Self { best: Box::new(best) } + } +} + +impl OpBuilder<'_, Txs> { + /// Builds the payload on top of the state. + pub fn build( + self, + db: impl Database, + state_provider: impl StateProvider, + ctx: OpPayloadBuilderCtx, + ) -> Result>, PayloadBuilderError> + where + Evm: ConfigureEvm< + Primitives = N, + NextBlockEnvCtx: BuildNextEnv, + >, + ChainSpec: EthChainSpec + OpHardforks, + N: OpPayloadPrimitives, + Txs: + PayloadTransactions + OpPooledTx>, + Attrs: OpAttributes, + { + let Self { best } = self; + debug!(target: "payload_builder", id=%ctx.payload_id(), parent_header = ?ctx.parent().hash(), parent_number = ctx.parent().number(), "building new payload"); + + let mut db = State::builder().with_database(db).with_bundle_update().build(); + + // Load the L1 block contract into the database cache. If the L1 block contract is not + // pre-loaded the database will panic when trying to fetch the DA footprint gas + // scalar. + db.load_cache_account(L1_BLOCK_CONTRACT).map_err(BlockExecutionError::other)?; + + let mut builder = ctx.block_builder(&mut db)?; + + // 1. apply pre-execution changes + builder.apply_pre_execution_changes().map_err(|err| { + warn!(target: "payload_builder", %err, "failed to apply pre-execution changes"); + PayloadBuilderError::Internal(err.into()) + })?; + + // 2. execute sequencer transactions + let mut info = ctx.execute_sequencer_transactions(&mut builder)?; + + // 3. if mem pool transactions are requested we execute them + if !ctx.attributes().no_tx_pool() { + let best_txs = best(ctx.best_transaction_attributes(builder.evm_mut().block())); + if ctx.execute_best_transactions(&mut info, &mut builder, best_txs)?.is_some() { + return Ok(BuildOutcomeKind::Cancelled); + } + + // check if the new payload is even more valuable + if !ctx.is_better_payload(info.total_fees) { + // can skip building the block + return Ok(BuildOutcomeKind::Aborted { fees: info.total_fees }); + } + } + + let BlockBuilderOutcome { execution_result, hashed_state, trie_updates, block } = + builder.finish(state_provider)?; + + let sealed_block = Arc::new(block.sealed_block().clone()); + debug!(target: "payload_builder", id=%ctx.attributes().payload_id(), sealed_block_header = ?sealed_block.header(), "sealed built block"); + + let execution_outcome = + BlockExecutionOutput { state: db.take_bundle(), result: execution_result }; + + // create the executed block data + let executed: BuiltPayloadExecutedBlock = BuiltPayloadExecutedBlock { + recovered_block: Arc::new(block), + execution_output: Arc::new(execution_outcome), + // Keep unsorted; conversion to sorted happens when needed downstream + hashed_state: either::Either::Left(Arc::new(hashed_state)), + trie_updates: either::Either::Left(Arc::new(trie_updates)), + }; + + let no_tx_pool = ctx.attributes().no_tx_pool(); + + let payload = + OpBuiltPayload::new(ctx.payload_id(), sealed_block, info.total_fees, Some(executed)); + + if no_tx_pool { + // if `no_tx_pool` is set only transactions from the payload attributes will be included + // in the payload. In other words, the payload is deterministic and we can + // freeze it once we've successfully built it. + Ok(BuildOutcomeKind::Freeze(payload)) + } else { + Ok(BuildOutcomeKind::Better { payload }) + } + } + + /// Builds the payload and returns its [`ExecutionWitness`] based on the state after execution. + pub fn witness( + self, + state_provider: impl StateProvider, + ctx: &OpPayloadBuilderCtx, + ) -> Result + where + Evm: ConfigureEvm< + Primitives = N, + NextBlockEnvCtx: BuildNextEnv, + >, + ChainSpec: EthChainSpec + OpHardforks, + N: OpPayloadPrimitives, + Txs: PayloadTransactions>, + Attrs: OpAttributes, + { + let mut db = State::builder() + .with_database(StateProviderDatabase::new(&state_provider)) + .with_bundle_update() + .build(); + let mut builder = ctx.block_builder(&mut db)?; + + builder.apply_pre_execution_changes()?; + ctx.execute_sequencer_transactions(&mut builder)?; + builder.into_executor().apply_post_execution_changes()?; + + if ctx.chain_spec.is_isthmus_active_at_timestamp(ctx.attributes().timestamp()) { + // force load `L2ToL1MessagePasser.sol` so l2 withdrawals root can be computed even if + // no l2 withdrawals in block + _ = db.load_cache_account(L2_TO_L1_MESSAGE_PASSER_ADDRESS)?; + } + + let ExecutionWitnessRecord { hashed_state, codes, keys, lowest_block_number: _ } = + ExecutionWitnessRecord::from_executed_state(&db); + let state = state_provider.witness(Default::default(), hashed_state)?; + Ok(ExecutionWitness { + state: state.into_iter().collect(), + codes, + keys, + ..Default::default() + }) + } +} + +/// A type that returns a the [`PayloadTransactions`] that should be included in the pool. +pub trait OpPayloadTransactions: Clone + Send + Sync + Unpin + 'static { + /// Returns an iterator that yields the transaction in the order they should get included in the + /// new payload. + fn best_transactions>( + &self, + pool: Pool, + attr: BestTransactionsAttributes, + ) -> impl PayloadTransactions; +} + +impl OpPayloadTransactions for () { + fn best_transactions>( + &self, + pool: Pool, + attr: BestTransactionsAttributes, + ) -> impl PayloadTransactions { + BestPayloadTransactions::new(pool.best_transactions_with_attributes(attr)) + } +} + +/// Holds the state after execution +#[derive(Debug)] +pub struct ExecutedPayload { + /// Tracked execution info + pub info: ExecutionInfo, + /// Withdrawal hash. + pub withdrawals_root: Option, + /// The transaction receipts. + pub receipts: Vec, + /// The block env used during execution. + pub block_env: BlockEnv, +} + +/// This acts as the container for executed transactions and its byproducts (receipts, gas used) +#[derive(Default, Debug)] +pub struct ExecutionInfo { + /// All gas used so far + pub cumulative_gas_used: u64, + /// Estimated DA size + pub cumulative_da_bytes_used: u64, + /// Tracks fees from executed mempool transactions + pub total_fees: U256, +} + +impl ExecutionInfo { + /// Create a new instance with allocated slots. + pub const fn new() -> Self { + Self { cumulative_gas_used: 0, cumulative_da_bytes_used: 0, total_fees: U256::ZERO } + } + + /// Returns true if the transaction would exceed the block limits: + /// - block gas limit: ensures the transaction still fits into the block. + /// - tx DA limit: if configured, ensures the tx does not exceed the maximum allowed DA limit + /// per tx. + /// - block DA limit: if configured, ensures the transaction's DA size does not exceed the + /// maximum allowed DA limit per block. + pub fn is_tx_over_limits( + &self, + tx_da_size: u64, + block_gas_limit: u64, + tx_data_limit: Option, + block_data_limit: Option, + tx_gas_limit: u64, + da_footprint_gas_scalar: Option, + ) -> bool { + if tx_data_limit.is_some_and(|da_limit| tx_da_size > da_limit) { + return true; + } + + let total_da_bytes_used = self.cumulative_da_bytes_used.saturating_add(tx_da_size); + + if block_data_limit.is_some_and(|da_limit| total_da_bytes_used > da_limit) { + return true; + } + + // Post Jovian: the tx DA footprint must be less than the block gas limit + if let Some(da_footprint_gas_scalar) = da_footprint_gas_scalar { + let tx_da_footprint = + total_da_bytes_used.saturating_mul(da_footprint_gas_scalar as u64); + if tx_da_footprint > block_gas_limit { + return true; + } + } + + self.cumulative_gas_used + tx_gas_limit > block_gas_limit + } +} + +/// Container type that holds all necessities to build a new payload. +#[derive(derive_more::Debug)] +pub struct OpPayloadBuilderCtx< + Evm: ConfigureEvm, + ChainSpec, + Attrs = OpPayloadBuilderAttributes::Primitives>>, +> { + /// The type that knows how to perform system calls and configure the evm. + pub evm_config: Evm, + /// Additional config for the builder/sequencer, e.g. DA and gas limit + pub builder_config: OpBuilderConfig, + /// The chainspec + pub chain_spec: Arc, + /// How to build the payload. + pub config: PayloadConfig>, + /// Marker to check whether the job has been cancelled. + pub cancel: CancelOnDrop, + /// The currently best payload. + pub best_payload: Option>, +} + +impl OpPayloadBuilderCtx +where + Evm: ConfigureEvm< + Primitives: OpPayloadPrimitives, + NextBlockEnvCtx: BuildNextEnv, ChainSpec>, + >, + ChainSpec: EthChainSpec + OpHardforks, + Attrs: OpAttributes>, +{ + /// Returns the parent block the payload will be build on. + pub fn parent(&self) -> &SealedHeaderFor { + self.config.parent_header.as_ref() + } + + /// Returns the builder attributes. + pub const fn attributes(&self) -> &Attrs { + &self.config.attributes + } + + /// Returns the current fee settings for transactions from the mempool + pub fn best_transaction_attributes(&self, block_env: impl Block) -> BestTransactionsAttributes { + BestTransactionsAttributes::new( + block_env.basefee(), + block_env.blob_gasprice().map(|p| p as u64), + ) + } + + /// Returns the unique id for this payload job. + pub fn payload_id(&self) -> PayloadId { + self.attributes().payload_id() + } + + /// Returns true if the fees are higher than the previous payload. + pub fn is_better_payload(&self, total_fees: U256) -> bool { + is_better_payload(self.best_payload.as_ref(), total_fees) + } + + /// Prepares a [`BlockBuilder`] for the next block. + pub fn block_builder<'a, DB: Database>( + &'a self, + db: &'a mut State, + ) -> Result< + impl BlockBuilder< + Primitives = Evm::Primitives, + Executor: BlockExecutorFor<'a, Evm::BlockExecutorFactory, DB>, + > + 'a, + PayloadBuilderError, + > { + self.evm_config + .builder_for_next_block( + db, + self.parent(), + Evm::NextBlockEnvCtx::build_next_env( + self.attributes(), + self.parent(), + self.chain_spec.as_ref(), + ) + .map_err(PayloadBuilderError::other)?, + ) + .map_err(PayloadBuilderError::other) + } + + /// Executes all sequencer transactions that are included in the payload attributes. + pub fn execute_sequencer_transactions( + &self, + builder: &mut impl BlockBuilder, + ) -> Result { + let mut info = ExecutionInfo::new(); + + for sequencer_tx in self.attributes().sequencer_transactions() { + // A sequencer's block should never contain blob transactions. + if sequencer_tx.value().is_eip4844() { + return Err(PayloadBuilderError::other( + OpPayloadBuilderError::BlobTransactionRejected, + )); + } + + // Convert the transaction to a [RecoveredTx]. This is + // purely for the purposes of utilizing the `evm_config.tx_env`` function. + // Deposit transactions do not have signatures, so if the tx is a deposit, this + // will just pull in its `from` address. + let sequencer_tx = sequencer_tx.value().try_clone_into_recovered().map_err(|_| { + PayloadBuilderError::other(OpPayloadBuilderError::TransactionEcRecoverFailed) + })?; + + let gas_used = match builder.execute_transaction(sequencer_tx.clone()) { + Ok(gas_used) => gas_used, + Err(BlockExecutionError::Validation(BlockValidationError::InvalidTx { + error, + .. + })) => { + trace!(target: "payload_builder", %error, ?sequencer_tx, "Error in sequencer transaction, skipping."); + continue; + } + Err(err) => { + // this is an error that we should treat as fatal for this attempt + return Err(PayloadBuilderError::EvmExecutionError(Box::new(err))); + } + }; + + // add gas used by the transaction to cumulative gas used, before creating the receipt + info.cumulative_gas_used += gas_used; + } + + Ok(info) + } + + /// Executes the given best transactions and updates the execution info. + /// + /// Returns `Ok(Some(())` if the job was cancelled. + pub fn execute_best_transactions( + &self, + info: &mut ExecutionInfo, + builder: &mut Builder, + mut best_txs: impl PayloadTransactions< + Transaction: PoolTransaction> + OpPooledTx, + >, + ) -> Result, PayloadBuilderError> + where + Builder: BlockBuilder, + <::Evm as AlloyEvm>::DB: Database, + { + let mut block_gas_limit = builder.evm_mut().block().gas_limit(); + if let Some(gas_limit_config) = self.builder_config.gas_limit_config.gas_limit() { + // If a gas limit is configured, use that limit as target if it's smaller, otherwise use + // the block's actual gas limit. + block_gas_limit = gas_limit_config.min(block_gas_limit); + }; + let block_da_limit = self.builder_config.da_config.max_da_block_size(); + let tx_da_limit = self.builder_config.da_config.max_da_tx_size(); + let base_fee = builder.evm_mut().block().basefee(); + + while let Some(tx) = best_txs.next(()) { + let interop = tx.interop_deadline(); + let tx_da_size = tx.estimated_da_size(); + let tx = tx.into_consensus(); + + let da_footprint_gas_scalar = self + .chain_spec + .is_jovian_active_at_timestamp(self.attributes().timestamp()) + .then_some( + L1BlockInfo::fetch_da_footprint_gas_scalar(builder.evm_mut().db_mut()).expect( + "DA footprint should always be available from the database post jovian", + ), + ); + + if info.is_tx_over_limits( + tx_da_size, + block_gas_limit, + tx_da_limit, + block_da_limit, + tx.gas_limit(), + da_footprint_gas_scalar, + ) { + // we can't fit this transaction into the block, so we need to mark it as + // invalid which also removes all dependent transaction from + // the iterator before we can continue + best_txs.mark_invalid(tx.signer(), tx.nonce()); + continue; + } + + // A sequencer's block should never contain blob or deposit transactions from the pool. + if tx.is_eip4844() || tx.is_deposit() { + best_txs.mark_invalid(tx.signer(), tx.nonce()); + continue; + } + + // We skip invalid cross chain txs, they would be removed on the next block update in + // the maintenance job + if let Some(interop) = interop && + !is_valid_interop(interop, self.config.attributes.timestamp()) + { + best_txs.mark_invalid(tx.signer(), tx.nonce()); + continue; + } + // check if the job was cancelled, if so we can exit early + if self.cancel.is_cancelled() { + return Ok(Some(())); + } + + let gas_used = match builder.execute_transaction(tx.clone()) { + Ok(gas_used) => gas_used, + Err(BlockExecutionError::Validation(BlockValidationError::InvalidTx { + error, + .. + })) => { + if error.is_nonce_too_low() { + // if the nonce is too low, we can skip this transaction + trace!(target: "payload_builder", %error, ?tx, "skipping nonce too low transaction"); + } else { + // if the transaction is invalid, we can skip it and all of its + // descendants + trace!(target: "payload_builder", %error, ?tx, "skipping invalid transaction and its descendants"); + best_txs.mark_invalid(tx.signer(), tx.nonce()); + } + continue; + } + Err(err) => { + // this is an error that we should treat as fatal for this attempt + return Err(PayloadBuilderError::EvmExecutionError(Box::new(err))); + } + }; + + // add gas used by the transaction to cumulative gas used, before creating the + // receipt + info.cumulative_gas_used += gas_used; + info.cumulative_da_bytes_used += tx_da_size; + + // update and add to total fees + let miner_fee = tx + .effective_tip_per_gas(base_fee) + .expect("fee is always valid; execution succeeded"); + info.total_fees += U256::from(miner_fee) * U256::from(gas_used); + } + + Ok(None) + } +} diff --git a/rust/op-reth/crates/payload/src/config.rs b/rust/op-reth/crates/payload/src/config.rs new file mode 100644 index 00000000000..b5fb48e50b1 --- /dev/null +++ b/rust/op-reth/crates/payload/src/config.rs @@ -0,0 +1,155 @@ +//! Additional configuration for the OP builder + +use std::sync::{Arc, atomic::AtomicU64}; + +/// Settings for the OP builder. +#[derive(Debug, Clone, Default)] +pub struct OpBuilderConfig { + /// Data availability configuration for the OP builder. + pub da_config: OpDAConfig, + /// Gas limit configuration for the OP builder. + pub gas_limit_config: OpGasLimitConfig, +} + +impl OpBuilderConfig { + /// Creates a new OP builder configuration with the given data availability configuration. + pub const fn new(da_config: OpDAConfig, gas_limit_config: OpGasLimitConfig) -> Self { + Self { da_config, gas_limit_config } + } + + /// Returns the Data Availability configuration for the OP builder, if it has configured + /// constraints. + pub fn constrained_da_config(&self) -> Option<&OpDAConfig> { + if self.da_config.is_empty() { None } else { Some(&self.da_config) } + } +} + +/// Contains the Data Availability configuration for the OP builder. +/// +/// This type is shareable and can be used to update the DA configuration for the OP payload +/// builder. +#[derive(Debug, Clone, Default)] +pub struct OpDAConfig { + inner: Arc, +} + +impl OpDAConfig { + /// Creates a new Data Availability configuration with the given maximum sizes. + pub fn new(max_da_tx_size: u64, max_da_block_size: u64) -> Self { + let this = Self::default(); + this.set_max_da_size(max_da_tx_size, max_da_block_size); + this + } + + /// Returns whether the configuration is empty. + pub fn is_empty(&self) -> bool { + self.max_da_tx_size().is_none() && self.max_da_block_size().is_none() + } + + /// Returns the max allowed data availability size per transactions, if any. + pub fn max_da_tx_size(&self) -> Option { + let val = self.inner.max_da_tx_size.load(std::sync::atomic::Ordering::Relaxed); + if val == 0 { None } else { Some(val) } + } + + /// Returns the max allowed data availability size per block, if any. + pub fn max_da_block_size(&self) -> Option { + let val = self.inner.max_da_block_size.load(std::sync::atomic::Ordering::Relaxed); + if val == 0 { None } else { Some(val) } + } + + /// Sets the maximum data availability size currently allowed for inclusion. 0 means no maximum. + pub fn set_max_da_size(&self, max_da_tx_size: u64, max_da_block_size: u64) { + self.set_max_tx_size(max_da_tx_size); + self.set_max_block_size(max_da_block_size); + } + + /// Sets the maximum data availability size per transaction currently allowed for inclusion. 0 + /// means no maximum. + pub fn set_max_tx_size(&self, max_da_tx_size: u64) { + self.inner.max_da_tx_size.store(max_da_tx_size, std::sync::atomic::Ordering::Relaxed); + } + + /// Sets the maximum data availability size per block currently allowed for inclusion. 0 means + /// no maximum. + pub fn set_max_block_size(&self, max_da_block_size: u64) { + self.inner.max_da_block_size.store(max_da_block_size, std::sync::atomic::Ordering::Relaxed); + } +} + +#[derive(Debug, Default)] +struct OpDAConfigInner { + /// Don't include any transactions with data availability size larger than this in any built + /// block + /// + /// 0 means no limit. + max_da_tx_size: AtomicU64, + /// Maximum total data availability size for a block + /// + /// 0 means no limit. + max_da_block_size: AtomicU64, +} + +/// Contains the Gas Limit configuration for the OP builder. +/// +/// This type is shareable and can be used to update the Gas Limit configuration for the OP payload +/// builder. +#[derive(Debug, Clone, Default)] +pub struct OpGasLimitConfig { + /// Gas limit for a transaction + /// + /// 0 means use the default gas limit. + gas_limit: Arc, +} + +impl OpGasLimitConfig { + /// Creates a new Gas Limit configuration with the given maximum gas limit. + pub fn new(max_gas_limit: u64) -> Self { + let this = Self::default(); + this.set_gas_limit(max_gas_limit); + this + } + /// Returns the gas limit for a transaction, if any. + pub fn gas_limit(&self) -> Option { + let val = self.gas_limit.load(std::sync::atomic::Ordering::Relaxed); + if val == 0 { None } else { Some(val) } + } + /// Sets the gas limit for a transaction. 0 means use the default gas limit. + pub fn set_gas_limit(&self, gas_limit: u64) { + self.gas_limit.store(gas_limit, std::sync::atomic::Ordering::Relaxed); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_da() { + let da = OpDAConfig::default(); + assert_eq!(da.max_da_tx_size(), None); + assert_eq!(da.max_da_block_size(), None); + da.set_max_da_size(100, 200); + assert_eq!(da.max_da_tx_size(), Some(100)); + assert_eq!(da.max_da_block_size(), Some(200)); + da.set_max_da_size(0, 0); + assert_eq!(da.max_da_tx_size(), None); + assert_eq!(da.max_da_block_size(), None); + } + + #[test] + fn test_da_constrained() { + let config = OpBuilderConfig::default(); + assert!(config.constrained_da_config().is_none()); + } + + #[test] + fn test_gas_limit() { + let gas_limit = OpGasLimitConfig::default(); + assert_eq!(gas_limit.gas_limit(), None); + gas_limit.set_gas_limit(50000); + assert_eq!(gas_limit.gas_limit(), Some(50000)); + gas_limit.set_gas_limit(0); + assert_eq!(gas_limit.gas_limit(), None); + } +} diff --git a/op-reth/crates/payload/src/error.rs b/rust/op-reth/crates/payload/src/error.rs similarity index 100% rename from op-reth/crates/payload/src/error.rs rename to rust/op-reth/crates/payload/src/error.rs diff --git a/rust/op-reth/crates/payload/src/lib.rs b/rust/op-reth/crates/payload/src/lib.rs new file mode 100644 index 00000000000..6ccd90ec373 --- /dev/null +++ b/rust/op-reth/crates/payload/src/lib.rs @@ -0,0 +1,56 @@ +//! Optimism's payload builder implementation. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg))] +#![allow(clippy::useless_let_if_seq)] + +extern crate alloc; + +pub mod builder; +pub use builder::OpPayloadBuilder; +pub mod error; +pub mod payload; +use op_alloy_rpc_types_engine::OpExecutionData; +pub use payload::{ + OpBuiltPayload, OpPayloadAttributes, OpPayloadBuilderAttributes, payload_id_optimism, +}; +mod traits; +use reth_optimism_primitives::OpPrimitives; +use reth_payload_primitives::{BuiltPayload, PayloadTypes}; +use reth_primitives_traits::{Block, NodePrimitives, SealedBlock}; +pub use traits::*; +pub mod validator; +pub use validator::OpExecutionPayloadValidator; + +pub mod config; + +/// ZST that aggregates Optimism [`PayloadTypes`]. +#[derive(Debug, Default, Clone, serde::Deserialize, serde::Serialize)] +#[non_exhaustive] +pub struct OpPayloadTypes(core::marker::PhantomData); + +impl PayloadTypes for OpPayloadTypes +where + OpBuiltPayload: BuiltPayload, +{ + type ExecutionData = OpExecutionData; + type BuiltPayload = OpBuiltPayload; + type PayloadAttributes = OpPayloadAttributes; + type PayloadBuilderAttributes = OpPayloadBuilderAttributes; + + fn block_to_payload( + block: SealedBlock< + <::Primitives as NodePrimitives>::Block, + >, + ) -> Self::ExecutionData { + OpExecutionData::from_block_unchecked( + block.hash(), + &block.into_block().into_ethereum_block(), + ) + } +} diff --git a/rust/op-reth/crates/payload/src/payload.rs b/rust/op-reth/crates/payload/src/payload.rs new file mode 100644 index 00000000000..516a598ab1e --- /dev/null +++ b/rust/op-reth/crates/payload/src/payload.rs @@ -0,0 +1,581 @@ +//! Payload related types + +use std::{fmt::Debug, sync::Arc}; + +use alloy_consensus::{Block, BlockHeader}; +use alloy_eips::{ + eip1559::BaseFeeParams, eip2718::Decodable2718, eip4895::Withdrawals, eip7685::Requests, +}; +use alloy_primitives::{Address, B64, B256, Bytes, U256, keccak256}; +use alloy_rlp::Encodable; +use alloy_rpc_types_engine::{ + BlobsBundleV1, ExecutionPayloadEnvelopeV2, ExecutionPayloadFieldV2, ExecutionPayloadV1, + ExecutionPayloadV3, PayloadId, +}; +use op_alloy_consensus::{EIP1559ParamError, encode_holocene_extra_data, encode_jovian_extra_data}; +use op_alloy_rpc_types_engine::{ + OpExecutionPayloadEnvelopeV3, OpExecutionPayloadEnvelopeV4, OpExecutionPayloadV4, +}; +use reth_chainspec::EthChainSpec; +use reth_optimism_evm::OpNextBlockEnvAttributes; +use reth_optimism_forks::OpHardforks; +use reth_payload_builder::{EthPayloadBuilderAttributes, PayloadBuilderError}; +use reth_payload_primitives::{ + BuildNextEnv, BuiltPayload, BuiltPayloadExecutedBlock, PayloadBuilderAttributes, +}; +use reth_primitives_traits::{ + NodePrimitives, SealedBlock, SealedHeader, SignedTransaction, WithEncoded, +}; + +/// Re-export for use in downstream arguments. +pub use op_alloy_rpc_types_engine::OpPayloadAttributes; +use reth_optimism_primitives::OpPrimitives; + +/// Optimism Payload Builder Attributes +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct OpPayloadBuilderAttributes { + /// Inner ethereum payload builder attributes + pub payload_attributes: EthPayloadBuilderAttributes, + /// `NoTxPool` option for the generated payload + pub no_tx_pool: bool, + /// Decoded transactions and the original EIP-2718 encoded bytes as received in the payload + /// attributes. + pub transactions: Vec>, + /// The gas limit for the generated payload + pub gas_limit: Option, + /// EIP-1559 parameters for the generated payload + pub eip_1559_params: Option, + /// Min base fee for the generated payload (only available post-Jovian) + pub min_base_fee: Option, +} + +impl Default for OpPayloadBuilderAttributes { + fn default() -> Self { + Self { + payload_attributes: Default::default(), + no_tx_pool: Default::default(), + gas_limit: Default::default(), + eip_1559_params: Default::default(), + transactions: Default::default(), + min_base_fee: Default::default(), + } + } +} + +impl OpPayloadBuilderAttributes { + /// Extracts the extra data parameters post-Holocene hardfork. + /// In Holocene, those parameters are the EIP-1559 base fee parameters. + pub fn get_holocene_extra_data( + &self, + default_base_fee_params: BaseFeeParams, + ) -> Result { + self.eip_1559_params + .map(|params| encode_holocene_extra_data(params, default_base_fee_params)) + .ok_or(EIP1559ParamError::NoEIP1559Params)? + } + + /// Extracts the extra data parameters post-Jovian hardfork. + /// Those parameters are the EIP-1559 parameters from Holocene and the minimum base fee. + pub fn get_jovian_extra_data( + &self, + default_base_fee_params: BaseFeeParams, + ) -> Result { + let min_base_fee = self.min_base_fee.ok_or(EIP1559ParamError::MinBaseFeeNotSet)?; + self.eip_1559_params + .map(|params| encode_jovian_extra_data(params, default_base_fee_params, min_base_fee)) + .ok_or(EIP1559ParamError::NoEIP1559Params)? + } +} + +impl PayloadBuilderAttributes + for OpPayloadBuilderAttributes +{ + type RpcPayloadAttributes = OpPayloadAttributes; + type Error = alloy_rlp::Error; + + /// Creates a new payload builder for the given parent block and the attributes. + /// + /// Derives the unique [`PayloadId`] for the given parent and attributes + fn try_new( + parent: B256, + attributes: OpPayloadAttributes, + version: u8, + ) -> Result { + let id = payload_id_optimism(&parent, &attributes, version); + + let transactions = attributes + .transactions + .unwrap_or_default() + .into_iter() + .map(|data| { + Decodable2718::decode_2718_exact(data.as_ref()).map(|tx| WithEncoded::new(data, tx)) + }) + .collect::>()?; + + let payload_attributes = EthPayloadBuilderAttributes { + id, + parent, + timestamp: attributes.payload_attributes.timestamp, + suggested_fee_recipient: attributes.payload_attributes.suggested_fee_recipient, + prev_randao: attributes.payload_attributes.prev_randao, + withdrawals: attributes.payload_attributes.withdrawals.unwrap_or_default().into(), + parent_beacon_block_root: attributes.payload_attributes.parent_beacon_block_root, + }; + + Ok(Self { + payload_attributes, + no_tx_pool: attributes.no_tx_pool.unwrap_or_default(), + transactions, + gas_limit: attributes.gas_limit, + eip_1559_params: attributes.eip_1559_params, + min_base_fee: attributes.min_base_fee, + }) + } + + fn payload_id(&self) -> PayloadId { + self.payload_attributes.id + } + + fn parent(&self) -> B256 { + self.payload_attributes.parent + } + + fn timestamp(&self) -> u64 { + self.payload_attributes.timestamp + } + + fn parent_beacon_block_root(&self) -> Option { + self.payload_attributes.parent_beacon_block_root + } + + fn suggested_fee_recipient(&self) -> Address { + self.payload_attributes.suggested_fee_recipient + } + + fn prev_randao(&self) -> B256 { + self.payload_attributes.prev_randao + } + + fn withdrawals(&self) -> &Withdrawals { + &self.payload_attributes.withdrawals + } +} + +impl From + for OpPayloadBuilderAttributes +{ + fn from(value: EthPayloadBuilderAttributes) -> Self { + Self { payload_attributes: value, ..Default::default() } + } +} + +/// Contains the built payload. +#[derive(Debug, Clone)] +pub struct OpBuiltPayload { + /// Identifier of the payload + pub(crate) id: PayloadId, + /// Sealed block + pub(crate) block: Arc>, + /// Block execution data for the payload, if any. + pub(crate) executed_block: Option>, + /// The fees of the block + pub(crate) fees: U256, +} + +// === impl BuiltPayload === + +impl OpBuiltPayload { + /// Initializes the payload with the given initial block. + pub const fn new( + id: PayloadId, + block: Arc>, + fees: U256, + executed_block: Option>, + ) -> Self { + Self { id, block, fees, executed_block } + } + + /// Returns the identifier of the payload. + pub const fn id(&self) -> PayloadId { + self.id + } + + /// Returns the built block(sealed) + pub fn block(&self) -> &SealedBlock { + &self.block + } + + /// Fees of the block + pub const fn fees(&self) -> U256 { + self.fees + } + + /// Converts the value into [`SealedBlock`]. + pub fn into_sealed_block(self) -> SealedBlock { + Arc::unwrap_or_clone(self.block) + } +} + +impl BuiltPayload for OpBuiltPayload { + type Primitives = N; + + fn block(&self) -> &SealedBlock { + self.block() + } + + fn fees(&self) -> U256 { + self.fees + } + + fn executed_block(&self) -> Option> { + self.executed_block.clone() + } + + fn requests(&self) -> Option { + None + } +} + +// V1 engine_getPayloadV1 response +impl From> for ExecutionPayloadV1 +where + T: SignedTransaction, + N: NodePrimitives>, +{ + fn from(value: OpBuiltPayload) -> Self { + Self::from_block_unchecked( + value.block().hash(), + &Arc::unwrap_or_clone(value.block).into_block(), + ) + } +} + +// V2 engine_getPayloadV2 response +impl From> for ExecutionPayloadEnvelopeV2 +where + T: SignedTransaction, + N: NodePrimitives>, +{ + fn from(value: OpBuiltPayload) -> Self { + let OpBuiltPayload { block, fees, .. } = value; + + Self { + block_value: fees, + execution_payload: ExecutionPayloadFieldV2::from_block_unchecked( + block.hash(), + &Arc::unwrap_or_clone(block).into_block(), + ), + } + } +} + +impl From> for OpExecutionPayloadEnvelopeV3 +where + T: SignedTransaction, + N: NodePrimitives>, +{ + fn from(value: OpBuiltPayload) -> Self { + let OpBuiltPayload { block, fees, .. } = value; + + let parent_beacon_block_root = block.parent_beacon_block_root.unwrap_or_default(); + + Self { + execution_payload: ExecutionPayloadV3::from_block_unchecked( + block.hash(), + &Arc::unwrap_or_clone(block).into_block(), + ), + block_value: fees, + // From the engine API spec: + // + // > Client software **MAY** use any heuristics to decide whether to set + // `shouldOverrideBuilder` flag or not. If client software does not implement any + // heuristic this flag **SHOULD** be set to `false`. + // + // Spec: + // + should_override_builder: false, + // No blobs for OP. + blobs_bundle: BlobsBundleV1 { blobs: vec![], commitments: vec![], proofs: vec![] }, + parent_beacon_block_root, + } + } +} + +impl From> for OpExecutionPayloadEnvelopeV4 +where + T: SignedTransaction, + N: NodePrimitives>, +{ + fn from(value: OpBuiltPayload) -> Self { + let OpBuiltPayload { block, fees, .. } = value; + + let parent_beacon_block_root = block.parent_beacon_block_root.unwrap_or_default(); + + let l2_withdrawals_root = block.withdrawals_root.unwrap_or_default(); + let payload_v3 = ExecutionPayloadV3::from_block_unchecked( + block.hash(), + &Arc::unwrap_or_clone(block).into_block(), + ); + + Self { + execution_payload: OpExecutionPayloadV4::from_v3_with_withdrawals_root( + payload_v3, + l2_withdrawals_root, + ), + block_value: fees, + // From the engine API spec: + // + // > Client software **MAY** use any heuristics to decide whether to set + // `shouldOverrideBuilder` flag or not. If client software does not implement any + // heuristic this flag **SHOULD** be set to `false`. + // + // Spec: + // + should_override_builder: false, + // No blobs for OP. + blobs_bundle: BlobsBundleV1 { blobs: vec![], commitments: vec![], proofs: vec![] }, + parent_beacon_block_root, + execution_requests: vec![], + } + } +} + +/// Generates the payload id for the configured payload from the [`OpPayloadAttributes`]. +/// +/// Returns an 8-byte identifier by hashing the payload components with sha256 hash. +/// +/// Note: This must be updated whenever the [`OpPayloadAttributes`] changes for a hardfork. +/// See also +pub fn payload_id_optimism( + parent: &B256, + attributes: &OpPayloadAttributes, + payload_version: u8, +) -> PayloadId { + use sha2::Digest; + let mut hasher = sha2::Sha256::new(); + hasher.update(parent.as_slice()); + hasher.update(&attributes.payload_attributes.timestamp.to_be_bytes()[..]); + hasher.update(attributes.payload_attributes.prev_randao.as_slice()); + hasher.update(attributes.payload_attributes.suggested_fee_recipient.as_slice()); + if let Some(withdrawals) = &attributes.payload_attributes.withdrawals { + let mut buf = Vec::new(); + withdrawals.encode(&mut buf); + hasher.update(buf); + } + + if let Some(parent_beacon_block) = attributes.payload_attributes.parent_beacon_block_root { + hasher.update(parent_beacon_block); + } + + let no_tx_pool = attributes.no_tx_pool.unwrap_or_default(); + if no_tx_pool || attributes.transactions.as_ref().is_some_and(|txs| !txs.is_empty()) { + hasher.update([no_tx_pool as u8]); + let txs_len = attributes.transactions.as_ref().map(|txs| txs.len()).unwrap_or_default(); + hasher.update(&txs_len.to_be_bytes()[..]); + if let Some(txs) = &attributes.transactions { + for tx in txs { + // we have to just hash the bytes here because otherwise we would need to decode + // the transactions here which really isn't ideal + let tx_hash = keccak256(tx); + // maybe we can try just taking the hash and not decoding + hasher.update(tx_hash) + } + } + } + + if let Some(gas_limit) = attributes.gas_limit { + hasher.update(gas_limit.to_be_bytes()); + } + + if let Some(eip_1559_params) = attributes.eip_1559_params { + hasher.update(eip_1559_params.as_slice()); + } + + if let Some(min_base_fee) = attributes.min_base_fee { + hasher.update(min_base_fee.to_be_bytes()); + } + + let mut out = hasher.finalize(); + out[0] = payload_version; + + #[allow(deprecated)] // generic-array 0.14 deprecated + PayloadId::new(out.as_slice()[..8].try_into().expect("sufficient length")) +} + +impl BuildNextEnv, H, ChainSpec> + for OpNextBlockEnvAttributes +where + H: BlockHeader, + T: SignedTransaction, + ChainSpec: EthChainSpec + OpHardforks, +{ + fn build_next_env( + attributes: &OpPayloadBuilderAttributes, + parent: &SealedHeader, + chain_spec: &ChainSpec, + ) -> Result { + let extra_data = if chain_spec.is_jovian_active_at_timestamp(attributes.timestamp()) { + attributes + .get_jovian_extra_data( + chain_spec.base_fee_params_at_timestamp(attributes.timestamp()), + ) + .map_err(PayloadBuilderError::other)? + } else if chain_spec.is_holocene_active_at_timestamp(attributes.timestamp()) { + attributes + .get_holocene_extra_data( + chain_spec.base_fee_params_at_timestamp(attributes.timestamp()), + ) + .map_err(PayloadBuilderError::other)? + } else { + Default::default() + }; + + Ok(Self { + timestamp: attributes.timestamp(), + suggested_fee_recipient: attributes.suggested_fee_recipient(), + prev_randao: attributes.prev_randao(), + gas_limit: attributes.gas_limit.unwrap_or_else(|| parent.gas_limit()), + parent_beacon_block_root: attributes.parent_beacon_block_root(), + extra_data, + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::OpPayloadAttributes; + use alloy_primitives::{FixedBytes, address, b256, bytes}; + use alloy_rpc_types_engine::PayloadAttributes; + use reth_optimism_primitives::OpTransactionSigned; + use reth_payload_primitives::EngineApiMessageVersion; + use std::str::FromStr; + + #[test] + fn test_payload_id_parity_op_geth() { + // INFO rollup_boost::server:received fork_choice_updated_v3 from builder and l2_client + // payload_id_builder="0x6ef26ca02318dcf9" payload_id_l2="0x03d2dae446d2a86a" + let expected = + PayloadId::new(FixedBytes::<8>::from_str("0x03d2dae446d2a86a").unwrap().into()); + let attrs = OpPayloadAttributes { + payload_attributes: PayloadAttributes { + timestamp: 1728933301, + prev_randao: b256!("0x9158595abbdab2c90635087619aa7042bbebe47642dfab3c9bfb934f6b082765"), + suggested_fee_recipient: address!("0x4200000000000000000000000000000000000011"), + withdrawals: Some([].into()), + parent_beacon_block_root: b256!("0x8fe0193b9bf83cb7e5a08538e494fecc23046aab9a497af3704f4afdae3250ff").into(), + }, + transactions: Some([bytes!("7ef8f8a0dc19cfa777d90980e4875d0a548a881baaa3f83f14d1bc0d3038bc329350e54194deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000f424000000000000000000000000300000000670d6d890000000000000125000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000014bf9181db6e381d4384bbf69c48b0ee0eed23c6ca26143c6d2544f9d39997a590000000000000000000000007f83d659683caf2767fd3c720981d51f5bc365bc")].into()), + no_tx_pool: None, + gas_limit: Some(30000000), + eip_1559_params: None, + min_base_fee: None, + }; + + // Reth's `PayloadId` should match op-geth's `PayloadId`. This fails + assert_eq!( + expected, + payload_id_optimism( + &b256!("0x3533bf30edaf9505d0810bf475cbe4e5f4b9889904b9845e83efdeab4e92eb1e"), + &attrs, + EngineApiMessageVersion::V3 as u8 + ) + ); + } + + #[test] + fn test_payload_id_parity_op_geth_jovian() { + // + let expected = + PayloadId::new(FixedBytes::<8>::from_str("0x046c65ffc4d659ec").unwrap().into()); + let attrs = OpPayloadAttributes { + payload_attributes: PayloadAttributes { + timestamp: 1728933301, + prev_randao: b256!("0x9158595abbdab2c90635087619aa7042bbebe47642dfab3c9bfb934f6b082765"), + suggested_fee_recipient: address!("0x4200000000000000000000000000000000000011"), + withdrawals: Some([].into()), + parent_beacon_block_root: b256!("0x8fe0193b9bf83cb7e5a08538e494fecc23046aab9a497af3704f4afdae3250ff").into(), + }, + transactions: Some([bytes!("7ef8f8a0dc19cfa777d90980e4875d0a548a881baaa3f83f14d1bc0d3038bc329350e54194deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000f424000000000000000000000000300000000670d6d890000000000000125000000000000000000000000000000000000000000000000000000000000000700000000000000000000000000000000000000000000000000000000000000014bf9181db6e381d4384bbf69c48b0ee0eed23c6ca26143c6d2544f9d39997a590000000000000000000000007f83d659683caf2767fd3c720981d51f5bc365bc")].into()), + no_tx_pool: None, + gas_limit: Some(30000000), + eip_1559_params: None, + min_base_fee: Some(100), + }; + + // Reth's `PayloadId` should match op-geth's `PayloadId`. This fails + assert_eq!( + expected, + payload_id_optimism( + &b256!("0x3533bf30edaf9505d0810bf475cbe4e5f4b9889904b9845e83efdeab4e92eb1e"), + &attrs, + EngineApiMessageVersion::V4 as u8 + ) + ); + } + + #[test] + fn test_get_extra_data_post_holocene() { + let attributes: OpPayloadBuilderAttributes = + OpPayloadBuilderAttributes { + eip_1559_params: Some(B64::from_str("0x0000000800000008").unwrap()), + ..Default::default() + }; + let extra_data = attributes.get_holocene_extra_data(BaseFeeParams::new(80, 60)); + assert_eq!(extra_data.unwrap(), Bytes::copy_from_slice(&[0, 0, 0, 0, 8, 0, 0, 0, 8])); + } + + #[test] + fn test_get_extra_data_post_holocene_default() { + let attributes: OpPayloadBuilderAttributes = + OpPayloadBuilderAttributes { eip_1559_params: Some(B64::ZERO), ..Default::default() }; + let extra_data = attributes.get_holocene_extra_data(BaseFeeParams::new(80, 60)); + assert_eq!(extra_data.unwrap(), Bytes::copy_from_slice(&[0, 0, 0, 0, 80, 0, 0, 0, 60])); + } + + #[test] + fn test_get_extra_data_post_jovian() { + let attributes: OpPayloadBuilderAttributes = + OpPayloadBuilderAttributes { + eip_1559_params: Some(B64::from_str("0x0000000800000008").unwrap()), + min_base_fee: Some(10), + ..Default::default() + }; + let extra_data = attributes.get_jovian_extra_data(BaseFeeParams::new(80, 60)); + assert_eq!( + extra_data.unwrap(), + // Version byte is 1 for Jovian, then holocene payload followed by 8 bytes for the + // minimum base fee + Bytes::copy_from_slice(&[1, 0, 0, 0, 8, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 10]) + ); + } + + #[test] + fn test_get_extra_data_post_jovian_default() { + let attributes: OpPayloadBuilderAttributes = + OpPayloadBuilderAttributes { + eip_1559_params: Some(B64::ZERO), + min_base_fee: Some(10), + ..Default::default() + }; + let extra_data = attributes.get_jovian_extra_data(BaseFeeParams::new(80, 60)); + assert_eq!( + extra_data.unwrap(), + // Version byte is 1 for Jovian, then holocene payload followed by 8 bytes for the + // minimum base fee + Bytes::copy_from_slice(&[1, 0, 0, 0, 80, 0, 0, 0, 60, 0, 0, 0, 0, 0, 0, 0, 10]) + ); + } + + #[test] + fn test_get_extra_data_post_jovian_no_base_fee() { + let attributes: OpPayloadBuilderAttributes = + OpPayloadBuilderAttributes { + eip_1559_params: Some(B64::ZERO), + min_base_fee: None, + ..Default::default() + }; + let extra_data = attributes.get_jovian_extra_data(BaseFeeParams::new(80, 60)); + assert_eq!(extra_data.unwrap_err(), EIP1559ParamError::MinBaseFeeNotSet); + } +} diff --git a/rust/op-reth/crates/payload/src/traits.rs b/rust/op-reth/crates/payload/src/traits.rs new file mode 100644 index 00000000000..bd371ee1a0f --- /dev/null +++ b/rust/op-reth/crates/payload/src/traits.rs @@ -0,0 +1,60 @@ +use alloy_consensus::BlockBody; +use reth_optimism_primitives::{DepositReceipt, transaction::OpTransaction}; +use reth_payload_primitives::PayloadBuilderAttributes; +use reth_primitives_traits::{FullBlockHeader, NodePrimitives, SignedTransaction, WithEncoded}; + +use crate::OpPayloadBuilderAttributes; + +/// Helper trait to encapsulate common bounds on [`NodePrimitives`] for OP payload builder. +pub trait OpPayloadPrimitives: + NodePrimitives< + Receipt: DepositReceipt, + SignedTx = Self::_TX, + BlockBody = BlockBody, + BlockHeader = Self::_Header, + > +{ + /// Helper AT to bound [`NodePrimitives::Block`] type without causing bound cycle. + type _TX: SignedTransaction + OpTransaction; + /// Helper AT to bound [`NodePrimitives::Block`] type without causing bound cycle. + type _Header: FullBlockHeader; +} + +impl OpPayloadPrimitives for T +where + Tx: SignedTransaction + OpTransaction, + T: NodePrimitives< + SignedTx = Tx, + Receipt: DepositReceipt, + BlockBody = BlockBody, + BlockHeader = Header, + >, + Header: FullBlockHeader, +{ + type _TX = Tx; + type _Header = Header; +} + +/// Attributes for the OP payload builder. +pub trait OpAttributes: PayloadBuilderAttributes { + /// Primitive transaction type. + type Transaction: SignedTransaction; + + /// Whether to use the transaction pool for the payload. + fn no_tx_pool(&self) -> bool; + + /// Sequencer transactions to include in the payload. + fn sequencer_transactions(&self) -> &[WithEncoded]; +} + +impl OpAttributes for OpPayloadBuilderAttributes { + type Transaction = T; + + fn no_tx_pool(&self) -> bool { + self.no_tx_pool + } + + fn sequencer_transactions(&self) -> &[WithEncoded] { + &self.transactions + } +} diff --git a/rust/op-reth/crates/payload/src/validator.rs b/rust/op-reth/crates/payload/src/validator.rs new file mode 100644 index 00000000000..88b424eb6bd --- /dev/null +++ b/rust/op-reth/crates/payload/src/validator.rs @@ -0,0 +1,101 @@ +//! Validates execution payload wrt Optimism consensus rules + +use alloc::sync::Arc; +use alloy_consensus::Block; +use alloy_rpc_types_engine::PayloadError; +use derive_more::{Constructor, Deref}; +use op_alloy_rpc_types_engine::{OpExecutionData, OpPayloadError}; +use reth_optimism_forks::OpHardforks; +use reth_payload_validator::{cancun, prague, shanghai}; +use reth_primitives_traits::{Block as _, SealedBlock, SignedTransaction}; + +/// Execution payload validator. +#[derive(Clone, Debug, Deref, Constructor)] +pub struct OpExecutionPayloadValidator { + /// Chain spec to validate against. + #[deref] + inner: Arc, +} + +impl OpExecutionPayloadValidator +where + ChainSpec: OpHardforks, +{ + /// Returns reference to chain spec. + pub fn chain_spec(&self) -> &ChainSpec { + &self.inner + } + + /// Ensures that the given payload does not violate any consensus rules that concern the block's + /// layout. + /// + /// See also [`ensure_well_formed_payload`]. + pub fn ensure_well_formed_payload( + &self, + payload: OpExecutionData, + ) -> Result>, OpPayloadError> { + ensure_well_formed_payload(self.chain_spec(), payload) + } +} + +/// Ensures that the given payload does not violate any consensus rules that concern the block's +/// layout, like: +/// - missing or invalid base fee +/// - invalid extra data +/// - invalid transactions +/// - incorrect hash +/// - block contains blob transactions or blob versioned hashes +/// - block contains l1 withdrawals +/// +/// The checks are done in the order that conforms with the engine-API specification. +/// +/// This is intended to be invoked after receiving the payload from the CLI. +/// The additional fields, starting with [`MaybeCancunPayloadFields`](alloy_rpc_types_engine::MaybeCancunPayloadFields), are not part of the payload, but are additional fields starting in the `engine_newPayloadV3` RPC call, See also +/// +/// If the cancun fields are provided this also validates that the versioned hashes in the block +/// are empty as well as those passed in the sidecar. If the payload fields are not provided. +/// +/// Validation according to specs . +pub fn ensure_well_formed_payload( + chain_spec: ChainSpec, + payload: OpExecutionData, +) -> Result>, OpPayloadError> +where + ChainSpec: OpHardforks, + T: SignedTransaction, +{ + let OpExecutionData { payload, sidecar } = payload; + + let expected_hash = payload.block_hash(); + + // First parse the block + let sealed_block = payload.try_into_block_with_sidecar(&sidecar)?.seal_slow(); + + // Ensure the hash included in the payload matches the block hash + if expected_hash != sealed_block.hash() { + return Err(PayloadError::BlockHash { + execution: sealed_block.hash(), + consensus: expected_hash, + } + .into()); + } + + shanghai::ensure_well_formed_fields( + sealed_block.body(), + chain_spec.is_shanghai_active_at_timestamp(sealed_block.timestamp), + )?; + + cancun::ensure_well_formed_header_and_sidecar_fields( + &sealed_block, + sidecar.ecotone(), + chain_spec.is_cancun_active_at_timestamp(sealed_block.timestamp), + )?; + + prague::ensure_well_formed_fields( + sealed_block.body(), + sidecar.isthmus(), + chain_spec.is_prague_active_at_timestamp(sealed_block.timestamp), + )?; + + Ok(sealed_block) +} diff --git a/rust/op-reth/crates/primitives/Cargo.toml b/rust/op-reth/crates/primitives/Cargo.toml new file mode 100644 index 00000000000..1a26b2c4a3a --- /dev/null +++ b/rust/op-reth/crates/primitives/Cargo.toml @@ -0,0 +1,96 @@ +[package] +name = "reth-optimism-primitives" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" +description = "OP primitive types" + +[lints] +workspace = true + +[dependencies] +# reth +reth-primitives-traits = { workspace = true, features = ["op"] } + +# ethereum +alloy-primitives.workspace = true +alloy-consensus.workspace = true +alloy-eips.workspace = true +alloy-rlp.workspace = true + +# op +op-alloy-consensus.workspace = true + +# codec +serde = { workspace = true, optional = true } +serde_with = { workspace = true, optional = true } + +[dev-dependencies] +reth-codecs = { workspace = true, features = ["test-utils", "op"] } + +bytes.workspace = true +modular-bitfield.workspace = true +reth-zstd-compressors.workspace = true +rand.workspace = true +arbitrary.workspace = true +rstest.workspace = true +serde_json.workspace = true +bincode.workspace = true + +proptest-arbitrary-interop.workspace = true +proptest.workspace = true +rand_08.workspace = true +secp256k1 = { workspace = true, features = ["rand", "std", "serde"] } + +[features] +default = ["std"] +std = [ + "reth-primitives-traits/std", + "alloy-consensus/std", + "alloy-primitives/std", + "serde?/std", + "alloy-rlp/std", + "op-alloy-consensus/std", + "serde_json/std", + "serde_with?/std", + "alloy-eips/std", + "bytes/std", + "reth-zstd-compressors/std", +] +alloy-compat = ["op-alloy-consensus/alloy-compat"] +reth-codec = [ + "std", + "reth-primitives-traits/reth-codec", +] +serde = [ + "dep:serde", + "reth-primitives-traits/serde", + "alloy-primitives/serde", + "alloy-consensus/serde", + "op-alloy-consensus/serde", + "alloy-eips/serde", + "rand/serde", + "rand_08/serde", + "bytes/serde", + "reth-codecs/serde", +] +serde-bincode-compat = [ + "serde", + "serde_with", + "alloy-consensus/serde-bincode-compat", + "op-alloy-consensus/serde-bincode-compat", + "reth-primitives-traits/serde-bincode-compat", + "alloy-eips/serde-bincode-compat", +] +arbitrary = [ + "std", + "reth-primitives-traits/arbitrary", + "op-alloy-consensus/arbitrary", + "alloy-consensus/arbitrary", + "alloy-primitives/arbitrary", + "alloy-eips/arbitrary", + "reth-codecs/arbitrary", +] diff --git a/op-reth/crates/primitives/src/bedrock.rs b/rust/op-reth/crates/primitives/src/bedrock.rs similarity index 95% rename from op-reth/crates/primitives/src/bedrock.rs rename to rust/op-reth/crates/primitives/src/bedrock.rs index 5ab72cf0d7d..2ea1d4dfd5e 100644 --- a/op-reth/crates/primitives/src/bedrock.rs +++ b/rust/op-reth/crates/primitives/src/bedrock.rs @@ -1,7 +1,7 @@ //! OP mainnet bedrock related data. -use alloy_consensus::{Header, EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH}; -use alloy_primitives::{address, b256, bloom, bytes, B256, B64, U256}; +use alloy_consensus::{EMPTY_OMMER_ROOT_HASH, EMPTY_ROOT_HASH, Header}; +use alloy_primitives::{B64, B256, U256, address, b256, bloom, bytes}; /// Transaction 0x9ed8f713b2cc6439657db52dcd2fdb9cc944915428f3c6e2a7703e242b259cb9 in block 985, /// replayed in blocks: @@ -43,12 +43,12 @@ pub const BLOCK_NUMS_REPLAYED_TX: [u64; 6] = [ /// with replayed transaction happen to only contain the single transaction. pub fn is_dup_tx(block_number: u64) -> bool { if block_number > BLOCK_NUMS_REPLAYED_TX[5] { - return false + return false; } // these blocks just have one transaction! if BLOCK_NUMS_REPLAYED_TX.contains(&block_number) { - return true + return true; } false diff --git a/op-reth/crates/primitives/src/lib.rs b/rust/op-reth/crates/primitives/src/lib.rs similarity index 100% rename from op-reth/crates/primitives/src/lib.rs rename to rust/op-reth/crates/primitives/src/lib.rs diff --git a/rust/op-reth/crates/primitives/src/receipt.rs b/rust/op-reth/crates/primitives/src/receipt.rs new file mode 100644 index 00000000000..9d8f0294f7f --- /dev/null +++ b/rust/op-reth/crates/primitives/src/receipt.rs @@ -0,0 +1,351 @@ +use alloc::vec::Vec; +use alloy_consensus::{ + Eip658Value, Eip2718EncodableReceipt, Receipt, ReceiptWithBloom, RlpDecodableReceipt, + RlpEncodableReceipt, TxReceipt, Typed2718, +}; +use alloy_eips::{ + Decodable2718, Encodable2718, + eip2718::{Eip2718Result, IsTyped2718}, +}; +use alloy_primitives::{Bloom, Log}; +use alloy_rlp::{BufMut, Decodable, Encodable, Header}; +use op_alloy_consensus::{OpDepositReceipt, OpReceipt, OpTxType}; +use reth_primitives_traits::InMemorySize; + +/// Trait for deposit receipt. +pub trait DepositReceipt: reth_primitives_traits::Receipt { + /// Converts a `Receipt` into a mutable Optimism deposit receipt. + fn as_deposit_receipt_mut(&mut self) -> Option<&mut OpDepositReceipt>; + + /// Extracts an Optimism deposit receipt from `Receipt`. + fn as_deposit_receipt(&self) -> Option<&OpDepositReceipt>; +} + +impl DepositReceipt for OpReceipt { + fn as_deposit_receipt_mut(&mut self) -> Option<&mut OpDepositReceipt> { + match self { + Self::Deposit(receipt) => Some(receipt), + _ => None, + } + } + + fn as_deposit_receipt(&self) -> Option<&OpDepositReceipt> { + match self { + Self::Deposit(receipt) => Some(receipt), + _ => None, + } + } +} + +#[cfg(all(feature = "serde", feature = "serde-bincode-compat"))] +pub(super) mod serde_bincode_compat { + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + use serde_with::{DeserializeAs, SerializeAs}; + + /// Bincode-compatible [`super::OpReceipt`] serde implementation. + /// + /// Intended to use with the [`serde_with::serde_as`] macro in the following way: + /// ```rust + /// use reth_optimism_primitives::OpReceipt; + /// use reth_primitives_traits::serde_bincode_compat::SerdeBincodeCompat; + /// use serde::{Deserialize, Serialize, de::DeserializeOwned}; + /// use serde_with::serde_as; + /// + /// #[serde_as] + /// #[derive(Serialize, Deserialize)] + /// struct Data { + /// #[serde_as( + /// as = "reth_primitives_traits::serde_bincode_compat::BincodeReprFor<'_, OpReceipt>" + /// )] + /// receipt: OpReceipt, + /// } + /// ``` + #[allow(rustdoc::private_doc_tests)] + #[derive(Debug, Serialize, Deserialize)] + pub enum OpReceipt<'a> { + /// Legacy receipt + Legacy(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), + /// EIP-2930 receipt + Eip2930(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), + /// EIP-1559 receipt + Eip1559(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), + /// EIP-7702 receipt + Eip7702(alloy_consensus::serde_bincode_compat::Receipt<'a, alloy_primitives::Log>), + /// Deposit receipt + Deposit( + op_alloy_consensus::serde_bincode_compat::OpDepositReceipt<'a, alloy_primitives::Log>, + ), + } + + impl<'a> From<&'a super::OpReceipt> for OpReceipt<'a> { + fn from(value: &'a super::OpReceipt) -> Self { + match value { + super::OpReceipt::Legacy(receipt) => Self::Legacy(receipt.into()), + super::OpReceipt::Eip2930(receipt) => Self::Eip2930(receipt.into()), + super::OpReceipt::Eip1559(receipt) => Self::Eip1559(receipt.into()), + super::OpReceipt::Eip7702(receipt) => Self::Eip7702(receipt.into()), + super::OpReceipt::Deposit(receipt) => Self::Deposit(receipt.into()), + } + } + } + + impl<'a> From> for super::OpReceipt { + fn from(value: OpReceipt<'a>) -> Self { + match value { + OpReceipt::Legacy(receipt) => Self::Legacy(receipt.into()), + OpReceipt::Eip2930(receipt) => Self::Eip2930(receipt.into()), + OpReceipt::Eip1559(receipt) => Self::Eip1559(receipt.into()), + OpReceipt::Eip7702(receipt) => Self::Eip7702(receipt.into()), + OpReceipt::Deposit(receipt) => Self::Deposit(receipt.into()), + } + } + } + + impl SerializeAs for OpReceipt<'_> { + fn serialize_as(source: &super::OpReceipt, serializer: S) -> Result + where + S: Serializer, + { + OpReceipt::<'_>::from(source).serialize(serializer) + } + } + + impl<'de> DeserializeAs<'de, super::OpReceipt> for OpReceipt<'de> { + fn deserialize_as(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + OpReceipt::<'_>::deserialize(deserializer).map(Into::into) + } + } + + #[cfg(test)] + mod tests { + use crate::{OpReceipt, receipt::serde_bincode_compat}; + use arbitrary::Arbitrary; + use rand::Rng; + use serde::{Deserialize, Serialize}; + use serde_with::serde_as; + + #[test] + fn test_tx_bincode_roundtrip() { + #[serde_as] + #[derive(Debug, PartialEq, Eq, Serialize, Deserialize)] + struct Data { + #[serde_as(as = "serde_bincode_compat::OpReceipt<'_>")] + receipt: OpReceipt, + } + + let mut bytes = [0u8; 1024]; + rand::rng().fill(bytes.as_mut_slice()); + let mut data = Data { + receipt: OpReceipt::arbitrary(&mut arbitrary::Unstructured::new(&bytes)).unwrap(), + }; + let success = data.receipt.as_receipt_mut().status.coerce_status(); + // // ensure we don't have an invalid poststate variant + data.receipt.as_receipt_mut().status = success.into(); + + let encoded = + bincode::serde::encode_to_vec(&data, bincode::config::standard()).unwrap(); + let (decoded, _): (Data, _) = + bincode::serde::decode_from_slice(&encoded, bincode::config::standard()).unwrap(); + assert_eq!(decoded, data); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_eips::eip2718::Encodable2718; + use alloy_primitives::{Bytes, address, b256, bytes, hex_literal::hex}; + use alloy_rlp::Encodable; + use reth_codecs::Compact; + + #[test] + fn test_decode_receipt() { + reth_codecs::test_utils::test_decode::(&hex!( + "c30328b52ffd23fc426961a00105007eb0042307705a97e503562eacf2b95060cce9de6de68386b6c155b73a9650021a49e2f8baad17f30faff5899d785c4c0873e45bc268bcf07560106424570d11f9a59e8f3db1efa4ceec680123712275f10d92c3411e1caaa11c7c5d591bc11487168e09934a9986848136da1b583babf3a7188e3aed007a1520f1cf4c1ca7d3482c6c28d37c298613c70a76940008816c4c95644579fd08471dc34732fd0f24" + )); + } + + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + #[test] + fn encode_legacy_receipt() { + let expected = hex!( + "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" + ); + + let mut data = Vec::with_capacity(expected.length()); + let receipt = ReceiptWithBloom { + receipt: OpReceipt::Legacy(Receipt:: { + status: Eip658Value::Eip658(false), + cumulative_gas_used: 0x1, + logs: vec![Log::new_unchecked( + address!("0x0000000000000000000000000000000000000011"), + vec![ + b256!("0x000000000000000000000000000000000000000000000000000000000000dead"), + b256!("0x000000000000000000000000000000000000000000000000000000000000beef"), + ], + bytes!("0100ff"), + )], + }), + logs_bloom: [0; 256].into(), + }; + + receipt.encode(&mut data); + + // check that the rlp length equals the length of the expected rlp + assert_eq!(receipt.length(), expected.len()); + assert_eq!(data, expected); + } + + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + #[test] + fn decode_legacy_receipt() { + let data = hex!( + "f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff" + ); + + // EIP658Receipt + let expected = ReceiptWithBloom { + receipt: OpReceipt::Legacy(Receipt:: { + status: Eip658Value::Eip658(false), + cumulative_gas_used: 0x1, + logs: vec![Log::new_unchecked( + address!("0x0000000000000000000000000000000000000011"), + vec![ + b256!("0x000000000000000000000000000000000000000000000000000000000000dead"), + b256!("0x000000000000000000000000000000000000000000000000000000000000beef"), + ], + bytes!("0100ff"), + )], + }), + logs_bloom: [0; 256].into(), + }; + + let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + } + + #[test] + fn decode_deposit_receipt_regolith_roundtrip() { + let data = hex!( + "b901107ef9010c0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf" + ); + + // Deposit Receipt (post-regolith) + let expected = ReceiptWithBloom { + receipt: OpReceipt::Deposit(OpDepositReceipt { + inner: Receipt:: { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 46913, + logs: vec![], + }, + deposit_nonce: Some(4012991), + deposit_receipt_version: None, + }), + logs_bloom: [0; 256].into(), + }; + + let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + + let mut buf = Vec::with_capacity(data.len()); + receipt.encode(&mut buf); + assert_eq!(buf, &data[..]); + } + + #[test] + fn decode_deposit_receipt_canyon_roundtrip() { + let data = hex!( + "b901117ef9010d0182b741b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000c0833d3bbf01" + ); + + // Deposit Receipt (post-canyon) + let expected = ReceiptWithBloom { + receipt: OpReceipt::Deposit(OpDepositReceipt { + inner: Receipt:: { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 46913, + logs: vec![], + }, + deposit_nonce: Some(4012991), + deposit_receipt_version: Some(1), + }), + logs_bloom: [0; 256].into(), + }; + + let receipt = ReceiptWithBloom::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + + let mut buf = Vec::with_capacity(data.len()); + expected.encode(&mut buf); + assert_eq!(buf, &data[..]); + } + + #[test] + fn gigantic_receipt() { + let receipt = OpReceipt::Legacy(Receipt:: { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 16747627, + logs: vec![ + Log::new_unchecked( + address!("0x4bf56695415f725e43c3e04354b604bcfb6dfb6e"), + vec![b256!( + "0xc69dc3d7ebff79e41f525be431d5cd3cc08f80eaf0f7819054a726eeb7086eb9" + )], + Bytes::from(vec![1; 0xffffff]), + ), + Log::new_unchecked( + address!("0xfaca325c86bf9c2d5b413cd7b90b209be92229c2"), + vec![b256!( + "0x8cca58667b1e9ffa004720ac99a3d61a138181963b294d270d91c53d36402ae2" + )], + Bytes::from(vec![1; 0xffffff]), + ), + ], + }); + + let mut data = vec![]; + receipt.to_compact(&mut data); + let (decoded, _) = OpReceipt::from_compact(&data[..], data.len()); + assert_eq!(decoded, receipt); + } + + #[test] + fn test_encode_2718_length() { + let receipt = ReceiptWithBloom { + receipt: OpReceipt::Eip1559(Receipt:: { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 21000, + logs: vec![], + }), + logs_bloom: Bloom::default(), + }; + + let encoded = receipt.encoded_2718(); + assert_eq!( + encoded.len(), + receipt.encode_2718_len(), + "Encoded length should match the actual encoded data length" + ); + + // Test for legacy receipt as well + let legacy_receipt = ReceiptWithBloom { + receipt: OpReceipt::Legacy(Receipt:: { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 21000, + logs: vec![], + }), + logs_bloom: Bloom::default(), + }; + + let legacy_encoded = legacy_receipt.encoded_2718(); + assert_eq!( + legacy_encoded.len(), + legacy_receipt.encode_2718_len(), + "Encoded length for legacy receipt should match the actual encoded data length" + ); + } +} diff --git a/op-reth/crates/primitives/src/transaction/mod.rs b/rust/op-reth/crates/primitives/src/transaction/mod.rs similarity index 100% rename from op-reth/crates/primitives/src/transaction/mod.rs rename to rust/op-reth/crates/primitives/src/transaction/mod.rs diff --git a/op-reth/crates/primitives/src/transaction/signed.rs b/rust/op-reth/crates/primitives/src/transaction/signed.rs similarity index 95% rename from op-reth/crates/primitives/src/transaction/signed.rs rename to rust/op-reth/crates/primitives/src/transaction/signed.rs index 896e62b3045..e33cef2945d 100644 --- a/op-reth/crates/primitives/src/transaction/signed.rs +++ b/rust/op-reth/crates/primitives/src/transaction/signed.rs @@ -4,16 +4,16 @@ use crate::transaction::OpTransaction; use alloc::vec::Vec; use alloy_consensus::{ - transaction::{RlpEcdsaDecodableTx, RlpEcdsaEncodableTx, SignerRecoverable, TxHashRef}, Sealed, SignableTransaction, Signed, Transaction, TxEip1559, TxEip2930, TxEip7702, TxLegacy, Typed2718, + transaction::{RlpEcdsaDecodableTx, RlpEcdsaEncodableTx, SignerRecoverable, TxHashRef}, }; use alloy_eips::{ eip2718::{Decodable2718, Eip2718Error, Eip2718Result, Encodable2718, IsTyped2718}, eip2930::AccessList, eip7702::SignedAuthorization, }; -use alloy_primitives::{keccak256, Address, Bytes, Signature, TxHash, TxKind, Uint, B256}; +use alloy_primitives::{Address, B256, Bytes, Signature, TxHash, TxKind, Uint, keccak256}; use alloy_rlp::Header; use core::{ hash::{Hash, Hasher}, @@ -23,17 +23,17 @@ use core::{ use op_alloy_consensus::{OpPooledTransaction, OpTxEnvelope, OpTypedTransaction, TxDeposit}; #[cfg(any(test, feature = "reth-codec"))] use reth_primitives_traits::{ + InMemorySize, SignedTransaction, crypto::secp256k1::{recover_signer, recover_signer_unchecked}, sync::OnceLock, transaction::{error::TransactionConversionError, signed::RecoveryError}, - InMemorySize, SignedTransaction, }; /// Signed transaction. #[cfg_attr(any(test, feature = "reth-codec"), reth_codecs::add_arbitrary_tests(rlp))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[derive(Debug, Clone, Eq)] -pub struct OpTransactionSigned { +pub(super) struct OpTransactionSigned { /// Transaction hash #[cfg_attr(feature = "serde", serde(skip))] hash: OnceLock, @@ -52,7 +52,7 @@ impl Deref for OpTransactionSigned { impl OpTransactionSigned { /// Creates a new signed transaction from the given transaction, signature and hash. - pub fn new(transaction: OpTypedTransaction, signature: Signature, hash: B256) -> Self { + pub(super) fn new(transaction: OpTypedTransaction, signature: Signature, hash: B256) -> Self { Self { hash: hash.into(), signature, transaction } } @@ -69,35 +69,35 @@ impl OpTransactionSigned { /// Consumes the type and returns the transaction. #[inline] - pub fn into_transaction(self) -> OpTypedTransaction { + pub(super) fn into_transaction(self) -> OpTypedTransaction { self.transaction } /// Returns the transaction. #[inline] - pub const fn transaction(&self) -> &OpTypedTransaction { + pub(super) const fn transaction(&self) -> &OpTypedTransaction { &self.transaction } /// Splits the `OpTransactionSigned` into its transaction and signature. - pub fn split(self) -> (OpTypedTransaction, Signature) { + pub(super) fn split(self) -> (OpTypedTransaction, Signature) { (self.transaction, self.signature) } /// Creates a new signed transaction from the given transaction and signature without the hash. /// /// Note: this only calculates the hash on the first [`OpTransactionSigned::hash`] call. - pub fn new_unhashed(transaction: OpTypedTransaction, signature: Signature) -> Self { + pub(super) fn new_unhashed(transaction: OpTypedTransaction, signature: Signature) -> Self { Self { hash: Default::default(), signature, transaction } } /// Returns whether this transaction is a deposit. - pub const fn is_deposit(&self) -> bool { + pub(super) const fn is_deposit(&self) -> bool { matches!(self.transaction, OpTypedTransaction::Deposit(_)) } /// Splits the transaction into parts. - pub fn into_parts(self) -> (OpTypedTransaction, Signature, B256) { + pub(super) fn into_parts(self) -> (OpTypedTransaction, Signature, B256) { let hash = *self.hash.get_or_init(|| self.recalculate_hash()); (self.transaction, self.signature, hash) } @@ -108,7 +108,7 @@ impl SignerRecoverable for OpTransactionSigned { // Optimism's Deposit transaction does not have a signature. Directly return the // `from` address. if let OpTypedTransaction::Deposit(TxDeposit { from, .. }) = self.transaction { - return Ok(from) + return Ok(from); } let Self { transaction, signature, .. } = self; @@ -120,7 +120,7 @@ impl SignerRecoverable for OpTransactionSigned { // Optimism's Deposit transaction does not have a signature. Directly return the // `from` address. if let OpTypedTransaction::Deposit(TxDeposit { from, .. }) = &self.transaction { - return Ok(*from) + return Ok(*from); } let Self { transaction, signature, .. } = self; @@ -241,11 +241,7 @@ impl alloy_rlp::Decodable for OpTransactionSigned { impl Encodable2718 for OpTransactionSigned { fn type_flag(&self) -> Option { - if Typed2718::is_legacy(self) { - None - } else { - Some(self.ty()) - } + if Typed2718::is_legacy(self) { None } else { Some(self.ty()) } } fn encode_2718_len(&self) -> usize { @@ -507,7 +503,7 @@ impl<'a> arbitrary::Arbitrary<'a> for OpTransactionSigned { let mut transaction = OpTypedTransaction::arbitrary(u)?; let secp = secp256k1::Secp256k1::new(); - let key_pair = secp256k1::Keypair::new(&secp, &mut rand_08::thread_rng()); + let key_pair = secp256k1::Keypair::new(&secp, &mut rand::rng()); let signature = reth_primitives_traits::crypto::secp256k1::sign_message( B256::from_slice(&key_pair.secret_bytes()[..]), signature_hash(&transaction), diff --git a/rust/op-reth/crates/primitives/src/transaction/tx_type.rs b/rust/op-reth/crates/primitives/src/transaction/tx_type.rs new file mode 100644 index 00000000000..bea52f338ed --- /dev/null +++ b/rust/op-reth/crates/primitives/src/transaction/tx_type.rs @@ -0,0 +1,47 @@ +//! Optimism transaction type. + +#[cfg(test)] +mod tests { + use alloy_consensus::constants::EIP7702_TX_TYPE_ID; + use op_alloy_consensus::{DEPOSIT_TX_TYPE_ID, OpTxType}; + use reth_codecs::{Compact, txtype::*}; + use rstest::rstest; + + #[rstest] + #[case(OpTxType::Legacy, COMPACT_IDENTIFIER_LEGACY, vec![])] + #[case(OpTxType::Eip2930, COMPACT_IDENTIFIER_EIP2930, vec![])] + #[case(OpTxType::Eip1559, COMPACT_IDENTIFIER_EIP1559, vec![])] + #[case(OpTxType::Eip7702, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![EIP7702_TX_TYPE_ID])] + #[case(OpTxType::Deposit, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![DEPOSIT_TX_TYPE_ID])] + fn test_txtype_to_compact( + #[case] tx_type: OpTxType, + #[case] expected_identifier: usize, + #[case] expected_buf: Vec, + ) { + let mut buf = vec![]; + let identifier = tx_type.to_compact(&mut buf); + + assert_eq!( + identifier, expected_identifier, + "Unexpected identifier for OpTxType {tx_type:?}", + ); + assert_eq!(buf, expected_buf, "Unexpected buffer for OpTxType {tx_type:?}",); + } + + #[rstest] + #[case(OpTxType::Legacy, COMPACT_IDENTIFIER_LEGACY, vec![])] + #[case(OpTxType::Eip2930, COMPACT_IDENTIFIER_EIP2930, vec![])] + #[case(OpTxType::Eip1559, COMPACT_IDENTIFIER_EIP1559, vec![])] + #[case(OpTxType::Eip7702, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![EIP7702_TX_TYPE_ID])] + #[case(OpTxType::Deposit, COMPACT_EXTENDED_IDENTIFIER_FLAG, vec![DEPOSIT_TX_TYPE_ID])] + fn test_txtype_from_compact( + #[case] expected_type: OpTxType, + #[case] identifier: usize, + #[case] buf: Vec, + ) { + let (actual_type, remaining_buf) = OpTxType::from_compact(&buf, identifier); + + assert_eq!(actual_type, expected_type, "Unexpected TxType for identifier {identifier}"); + assert!(remaining_buf.is_empty(), "Buffer not fully consumed for identifier {identifier}"); + } +} diff --git a/rust/op-reth/crates/reth/Cargo.toml b/rust/op-reth/crates/reth/Cargo.toml new file mode 100644 index 00000000000..f8486459355 --- /dev/null +++ b/rust/op-reth/crates/reth/Cargo.toml @@ -0,0 +1,162 @@ +[package] +name = "reth-op" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" + +[lints] +workspace = true + +[dependencies] +# reth +reth-primitives-traits = { workspace = true, features = ["op"] } +reth-chainspec.workspace = true +reth-network = { workspace = true, optional = true } +reth-network-api = { workspace = true, optional = true } +reth-eth-wire = { workspace = true, optional = true } +reth-provider = { workspace = true, optional = true } +reth-db = { workspace = true, optional = true, features = ["mdbx", "op"] } +reth-codecs = { workspace = true, optional = true } +reth-storage-api = { workspace = true, optional = true } +reth-node-api = { workspace = true, optional = true } +reth-node-core = { workspace = true, optional = true } +reth-consensus = { workspace = true, optional = true } +reth-consensus-common = { workspace = true, optional = true } +reth-evm = { workspace = true, optional = true } +reth-revm = { workspace = true, optional = true } +reth-rpc = { workspace = true, optional = true } +reth-rpc-api = { workspace = true, optional = true } +reth-rpc-eth-types = { workspace = true, optional = true } +reth-rpc-builder = { workspace = true, optional = true } +reth-exex = { workspace = true, optional = true } +reth-transaction-pool = { workspace = true, optional = true } +reth-trie = { workspace = true, optional = true } +reth-trie-db = { workspace = true, optional = true } +reth-node-builder = { workspace = true, optional = true } +reth-tasks = { workspace = true, optional = true } +reth-cli-util = { workspace = true, optional = true } +reth-engine-local = { workspace = true, optional = true } + +# reth-op +alloy-primitives.workspace = true +reth-optimism-primitives.workspace = true +reth-optimism-chainspec.workspace = true +reth-optimism-consensus = { workspace = true, optional = true } +reth-optimism-evm = { workspace = true, optional = true } +reth-optimism-node = { workspace = true, optional = true } +reth-optimism-rpc = { workspace = true, optional = true } +reth-optimism-cli = { workspace = true, optional = true } + +[features] +default = ["std"] +std = [ + "reth-chainspec/std", + "reth-consensus?/std", + "reth-consensus-common?/std", + "reth-optimism-chainspec/std", + "reth-optimism-consensus?/std", + "reth-optimism-evm?/std", + "reth-optimism-primitives/std", + "reth-primitives-traits/std", + "reth-storage-api?/std", + "reth-evm?/std", + "reth-revm?/std", + "alloy-primitives/std" +] +arbitrary = [ + "std", + "reth-chainspec/arbitrary", + "reth-optimism-primitives/arbitrary", + "reth-primitives-traits/arbitrary", + "reth-db?/arbitrary", + "reth-transaction-pool?/arbitrary", + "reth-eth-wire?/arbitrary", + "reth-codecs?/arbitrary", + "alloy-primitives/arbitrary" +] +keccak-cache-global = [ + "reth-optimism-node?/keccak-cache-global", + "reth-node-core?/keccak-cache-global", + "reth-optimism-cli?/keccak-cache-global", + "alloy-primitives/keccak-cache-global" +] +test-utils = [ + "reth-chainspec/test-utils", + "reth-consensus?/test-utils", + "reth-db?/test-utils", + "reth-evm?/test-utils", + "reth-revm?/test-utils", + "reth-network?/test-utils", + "reth-optimism-node?/test-utils", + "reth-primitives-traits/test-utils", + "reth-provider?/test-utils", + "reth-trie?/test-utils", + "reth-transaction-pool?/test-utils", + "reth-node-builder?/test-utils", + "reth-trie-db?/test-utils", + "reth-codecs?/test-utils", +] + +full = ["consensus", "evm", "node", "provider", "rpc", "trie", "pool", "network"] + +alloy-compat = ["reth-optimism-primitives/alloy-compat"] +cli = ["dep:reth-optimism-cli", "dep:reth-cli-util"] +consensus = [ + "dep:reth-consensus", + "dep:reth-consensus-common", + "dep:reth-optimism-consensus", +] +evm = ["dep:reth-evm", "dep:reth-optimism-evm", "dep:reth-revm"] +exex = ["provider", "dep:reth-exex"] +node-api = ["dep:reth-node-api", "dep:reth-node-core"] +node = [ + "provider", + "consensus", + "evm", + "network", + "node-api", + "dep:reth-optimism-node", + "dep:reth-node-builder", + "dep:reth-engine-local", + "rpc", + "trie-db", + "pool", +] +rpc = [ + "tasks", + "dep:reth-rpc", + "dep:reth-rpc-builder", + "dep:reth-rpc-api", + "dep:reth-rpc-eth-types", + "dep:reth-optimism-rpc", +] +tasks = ["dep:reth-tasks"] +jemalloc = [ + "reth-cli-util?/jemalloc", + "reth-node-core?/jemalloc", + "reth-optimism-cli?/jemalloc", +] +js-tracer = [ + "rpc", + "reth-rpc/js-tracer", + "reth-node-builder?/js-tracer", + "reth-optimism-node?/js-tracer", + "reth-rpc-eth-types?/js-tracer", +] +network = ["dep:reth-network", "tasks", "dep:reth-network-api", "dep:reth-eth-wire"] +otlp = [ + "reth-node-core?/otlp", + "reth-optimism-cli?/otlp", +] +portable = [ + "reth-optimism-evm?/portable", + "reth-revm?/portable", +] +provider = ["storage-api", "tasks", "dep:reth-provider", "dep:reth-db", "dep:reth-codecs"] +pool = ["dep:reth-transaction-pool"] +storage-api = ["dep:reth-storage-api"] +trie = ["dep:reth-trie", "alloy-primitives/rayon"] +trie-db = ["trie", "dep:reth-trie-db"] diff --git a/op-reth/crates/reth/src/lib.rs b/rust/op-reth/crates/reth/src/lib.rs similarity index 100% rename from op-reth/crates/reth/src/lib.rs rename to rust/op-reth/crates/reth/src/lib.rs diff --git a/rust/op-reth/crates/rpc/Cargo.toml b/rust/op-reth/crates/rpc/Cargo.toml new file mode 100644 index 00000000000..01061ae3667 --- /dev/null +++ b/rust/op-reth/crates/rpc/Cargo.toml @@ -0,0 +1,92 @@ +[package] +name = "reth-optimism-rpc" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" +description = "Ethereum RPC implementation for optimism." + +[lints] +workspace = true + +[dependencies] +# reth +reth-evm.workspace = true +reth-primitives-traits = { workspace = true, features = ["op"] } +reth-storage-api.workspace = true +reth-rpc-eth-api = { workspace = true, features = ["op"] } +reth-rpc-eth-types.workspace = true +reth-rpc-server-types.workspace = true +reth-tasks = { workspace = true, features = ["rayon"] } +reth-transaction-pool.workspace = true +reth-rpc.workspace = true +reth-rpc-api.workspace = true +reth-node-api.workspace = true +reth-node-builder.workspace = true +reth-chainspec.workspace = true +reth-chain-state.workspace = true +reth-rpc-engine-api.workspace = true + +# op-reth +reth-optimism-evm.workspace = true +reth-optimism-flashblocks.workspace = true +reth-optimism-payload-builder.workspace = true +reth-optimism-txpool.workspace = true +# TODO remove node-builder import +reth-optimism-primitives = { workspace = true, features = ["reth-codec", "serde-bincode-compat", "serde"] } +reth-optimism-forks.workspace = true + +# ethereum +alloy-eips.workspace = true +alloy-json-rpc.workspace = true +alloy-primitives.workspace = true +alloy-rpc-client.workspace = true +alloy-rpc-types-eth.workspace = true +alloy-rpc-types-debug.workspace = true +alloy-transport.workspace = true +alloy-transport-http.workspace = true +alloy-consensus.workspace = true +alloy-rpc-types-engine.workspace = true +op-alloy-network.workspace = true +op-alloy-rpc-types.workspace = true +op-alloy-rpc-types-engine.workspace = true +op-alloy-rpc-jsonrpsee.workspace = true +op-alloy-consensus.workspace = true +revm.workspace = true +op-revm.workspace = true + +# async +tokio.workspace = true +futures.workspace = true +tokio-stream.workspace = true +async-trait.workspace = true +tower.workspace = true + +# rpc +jsonrpsee-core.workspace = true +jsonrpsee-types.workspace = true +jsonrpsee.workspace = true +serde_json.workspace = true + +# misc +eyre.workspace = true +thiserror.workspace = true +tracing.workspace = true +derive_more = { workspace = true, features = ["constructor"] } + +# metrics +reth-metrics.workspace = true +metrics.workspace = true + +[dev-dependencies] +reth-optimism-chainspec.workspace = true +alloy-op-hardforks.workspace = true + +[features] +client = [ + "jsonrpsee/client", + "jsonrpsee/async-client", + "reth-rpc-eth-api/client", +] diff --git a/rust/op-reth/crates/rpc/src/engine.rs b/rust/op-reth/crates/rpc/src/engine.rs new file mode 100644 index 00000000000..f9e67fdfd91 --- /dev/null +++ b/rust/op-reth/crates/rpc/src/engine.rs @@ -0,0 +1,412 @@ +//! Implements the Optimism engine API RPC methods. + +use alloy_eips::eip7685::Requests; +use alloy_primitives::{B64, B256, BlockHash, U64}; +use alloy_rpc_types_engine::{ + ClientVersionV1, ExecutionPayloadBodiesV1, ExecutionPayloadInputV2, ExecutionPayloadV3, + ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, +}; +use derive_more::Constructor; +use jsonrpsee::proc_macros::rpc; +use jsonrpsee_core::{RpcResult, server::RpcModule}; +use op_alloy_rpc_types_engine::{ + OpExecutionData, OpExecutionPayloadV4, ProtocolVersion, ProtocolVersionFormatV0, + SuperchainSignal, +}; +use reth_chainspec::EthereumHardforks; +use reth_node_api::{EngineApiValidator, EngineTypes}; +use reth_rpc_api::IntoEngineApiRpcModule; +use reth_rpc_engine_api::EngineApi; +use reth_storage_api::{BlockReader, HeaderProvider, StateProviderFactory}; +use reth_transaction_pool::TransactionPool; +use tracing::{debug, info, trace}; + +/// The list of all supported Engine capabilities available over the engine endpoint. +/// +/// Spec: +pub const OP_ENGINE_CAPABILITIES: &[&str] = &[ + "engine_forkchoiceUpdatedV1", + "engine_forkchoiceUpdatedV2", + "engine_forkchoiceUpdatedV3", + "engine_getClientVersionV1", + "engine_getPayloadV2", + "engine_getPayloadV3", + "engine_getPayloadV4", + "engine_newPayloadV2", + "engine_newPayloadV3", + "engine_newPayloadV4", + "engine_getPayloadBodiesByHashV1", + "engine_getPayloadBodiesByRangeV1", + "engine_signalSuperchainV1", +]; + +/// OP Stack protocol version +/// See also: +pub const OP_STACK_SUPPORT: ProtocolVersion = ProtocolVersion::V0(ProtocolVersionFormatV0 { + build: B64::ZERO, + major: 9, + minor: 0, + patch: 0, + pre_release: 0, +}); + +/// Extension trait that gives access to Optimism engine API RPC methods. +/// +/// Note: +/// > The provider should use a JWT authentication layer. +/// +/// This follows the Optimism specs that can be found at: +/// +#[cfg_attr(not(feature = "client"), rpc(server, namespace = "engine"), server_bounds(Engine::PayloadAttributes: jsonrpsee::core::DeserializeOwned))] +#[cfg_attr(feature = "client", rpc(server, client, namespace = "engine", client_bounds(Engine::PayloadAttributes: jsonrpsee::core::Serialize + Clone), server_bounds(Engine::PayloadAttributes: jsonrpsee::core::DeserializeOwned)))] +pub trait OpEngineApi { + /// Sends the given payload to the execution layer client, as specified for the Shanghai fork. + /// + /// See also + /// + /// No modifications needed for OP compatibility. + #[method(name = "newPayloadV2")] + async fn new_payload_v2(&self, payload: ExecutionPayloadInputV2) -> RpcResult; + + /// Sends the given payload to the execution layer client, as specified for the Cancun fork. + /// + /// See also + /// + /// OP modifications: + /// - expected versioned hashes MUST be an empty array: therefore the `versioned_hashes` + /// parameter is removed. + /// - parent beacon block root MUST be the parent beacon block root from the L1 origin block of + /// the L2 block. + /// - blob versioned hashes MUST be empty list. + #[method(name = "newPayloadV3")] + async fn new_payload_v3( + &self, + payload: ExecutionPayloadV3, + versioned_hashes: Vec, + parent_beacon_block_root: B256, + ) -> RpcResult; + + /// Sends the given payload to the execution layer client, as specified for the Prague fork. + /// + /// See also + /// + /// - blob versioned hashes MUST be empty list. + /// - execution layer requests MUST be empty list. + #[method(name = "newPayloadV4")] + async fn new_payload_v4( + &self, + payload: OpExecutionPayloadV4, + versioned_hashes: Vec, + parent_beacon_block_root: B256, + execution_requests: Requests, + ) -> RpcResult; + + /// See also + /// + /// This exists because it is used by op-node: + /// + /// Caution: This should not accept the `withdrawals` field in the payload attributes. + #[method(name = "forkchoiceUpdatedV1")] + async fn fork_choice_updated_v1( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> RpcResult; + + /// Updates the execution layer client with the given fork choice, as specified for the Shanghai + /// fork. + /// + /// Caution: This should not accept the `parentBeaconBlockRoot` field in the payload attributes. + /// + /// See also + /// + /// OP modifications: + /// - The `payload_attributes` parameter is extended with the [`EngineTypes::PayloadAttributes`](EngineTypes) type as described in + #[method(name = "forkchoiceUpdatedV2")] + async fn fork_choice_updated_v2( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> RpcResult; + + /// Updates the execution layer client with the given fork choice, as specified for the Cancun + /// fork. + /// + /// See also + /// + /// OP modifications: + /// - Must be called with an Ecotone payload + /// - Attributes must contain the parent beacon block root field + /// - The `payload_attributes` parameter is extended with the [`EngineTypes::PayloadAttributes`](EngineTypes) type as described in + #[method(name = "forkchoiceUpdatedV3")] + async fn fork_choice_updated_v3( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> RpcResult; + + /// Retrieves an execution payload from a previously started build process, as specified for the + /// Shanghai fork. + /// + /// See also + /// + /// Note: + /// > Provider software MAY stop the corresponding build process after serving this call. + /// + /// No modifications needed for OP compatibility. + #[method(name = "getPayloadV2")] + async fn get_payload_v2( + &self, + payload_id: PayloadId, + ) -> RpcResult; + + /// Retrieves an execution payload from a previously started build process, as specified for the + /// Cancun fork. + /// + /// See also + /// + /// Note: + /// > Provider software MAY stop the corresponding build process after serving this call. + /// + /// OP modifications: + /// - the response type is extended to [`EngineTypes::ExecutionPayloadEnvelopeV3`]. + #[method(name = "getPayloadV3")] + async fn get_payload_v3( + &self, + payload_id: PayloadId, + ) -> RpcResult; + + /// Returns the most recent version of the payload that is available in the corresponding + /// payload build process at the time of receiving this call. + /// + /// See also + /// + /// Note: + /// > Provider software MAY stop the corresponding build process after serving this call. + /// + /// OP modifications: + /// - the response type is extended to [`EngineTypes::ExecutionPayloadEnvelopeV4`]. + #[method(name = "getPayloadV4")] + async fn get_payload_v4( + &self, + payload_id: PayloadId, + ) -> RpcResult; + + /// Returns the execution payload bodies by the given hash. + /// + /// See also + #[method(name = "getPayloadBodiesByHashV1")] + async fn get_payload_bodies_by_hash_v1( + &self, + block_hashes: Vec, + ) -> RpcResult; + + /// Returns the execution payload bodies by the range starting at `start`, containing `count` + /// blocks. + /// + /// WARNING: This method is associated with the `BeaconBlocksByRange` message in the consensus + /// layer p2p specification, meaning the input should be treated as untrusted or potentially + /// adversarial. + /// + /// Implementers should take care when acting on the input to this method, specifically + /// ensuring that the range is limited properly, and that the range boundaries are computed + /// correctly and without panics. + /// + /// See also + #[method(name = "getPayloadBodiesByRangeV1")] + async fn get_payload_bodies_by_range_v1( + &self, + start: U64, + count: U64, + ) -> RpcResult; + + /// Signals superchain information to the Engine. + /// Returns the latest supported OP-Stack protocol version of the execution engine. + /// See also + #[method(name = "engine_signalSuperchainV1")] + async fn signal_superchain_v1(&self, _signal: SuperchainSignal) -> RpcResult; + + /// Returns the execution client version information. + /// + /// Note: + /// > The `client_version` parameter identifies the consensus client. + /// + /// See also + #[method(name = "getClientVersionV1")] + async fn get_client_version_v1( + &self, + client_version: ClientVersionV1, + ) -> RpcResult>; + + /// Returns the list of Engine API methods supported by the execution layer client software. + /// + /// See also + #[method(name = "exchangeCapabilities")] + async fn exchange_capabilities(&self, capabilities: Vec) -> RpcResult>; +} + +/// The Engine API implementation that grants the Consensus layer access to data and +/// functions in the Execution layer that are crucial for the consensus process. +#[derive(Debug, Constructor)] +pub struct OpEngineApi { + inner: EngineApi, +} + +impl Clone + for OpEngineApi +where + PayloadT: EngineTypes, +{ + fn clone(&self) -> Self { + Self { inner: self.inner.clone() } + } +} + +#[async_trait::async_trait] +impl OpEngineApiServer + for OpEngineApi +where + Provider: HeaderProvider + BlockReader + StateProviderFactory + 'static, + EngineT: EngineTypes, + Pool: TransactionPool + 'static, + Validator: EngineApiValidator, + ChainSpec: EthereumHardforks + Send + Sync + 'static, +{ + async fn new_payload_v2(&self, payload: ExecutionPayloadInputV2) -> RpcResult { + trace!(target: "rpc::engine", "Serving engine_newPayloadV2"); + let payload = OpExecutionData::v2(payload); + Ok(self.inner.new_payload_v2_metered(payload).await?) + } + + async fn new_payload_v3( + &self, + payload: ExecutionPayloadV3, + versioned_hashes: Vec, + parent_beacon_block_root: B256, + ) -> RpcResult { + trace!(target: "rpc::engine", "Serving engine_newPayloadV3"); + let payload = OpExecutionData::v3(payload, versioned_hashes, parent_beacon_block_root); + + Ok(self.inner.new_payload_v3_metered(payload).await?) + } + + async fn new_payload_v4( + &self, + payload: OpExecutionPayloadV4, + versioned_hashes: Vec, + parent_beacon_block_root: B256, + execution_requests: Requests, + ) -> RpcResult { + trace!(target: "rpc::engine", "Serving engine_newPayloadV4"); + let payload = OpExecutionData::v4( + payload, + versioned_hashes, + parent_beacon_block_root, + execution_requests, + ); + + Ok(self.inner.new_payload_v4_metered(payload).await?) + } + + async fn fork_choice_updated_v1( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> RpcResult { + Ok(self.inner.fork_choice_updated_v1_metered(fork_choice_state, payload_attributes).await?) + } + + async fn fork_choice_updated_v2( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> RpcResult { + trace!(target: "rpc::engine", "Serving engine_forkchoiceUpdatedV2"); + Ok(self.inner.fork_choice_updated_v2_metered(fork_choice_state, payload_attributes).await?) + } + + async fn fork_choice_updated_v3( + &self, + fork_choice_state: ForkchoiceState, + payload_attributes: Option, + ) -> RpcResult { + trace!(target: "rpc::engine", "Serving engine_forkchoiceUpdatedV3"); + Ok(self.inner.fork_choice_updated_v3_metered(fork_choice_state, payload_attributes).await?) + } + + async fn get_payload_v2( + &self, + payload_id: PayloadId, + ) -> RpcResult { + debug!(target: "rpc::engine", id = %payload_id, "Serving engine_getPayloadV2"); + Ok(self.inner.get_payload_v2_metered(payload_id).await?) + } + + async fn get_payload_v3( + &self, + payload_id: PayloadId, + ) -> RpcResult { + trace!(target: "rpc::engine", "Serving engine_getPayloadV3"); + Ok(self.inner.get_payload_v3_metered(payload_id).await?) + } + + async fn get_payload_v4( + &self, + payload_id: PayloadId, + ) -> RpcResult { + trace!(target: "rpc::engine", "Serving engine_getPayloadV4"); + Ok(self.inner.get_payload_v4_metered(payload_id).await?) + } + + async fn get_payload_bodies_by_hash_v1( + &self, + block_hashes: Vec, + ) -> RpcResult { + trace!(target: "rpc::engine", "Serving engine_getPayloadBodiesByHashV1"); + Ok(self.inner.get_payload_bodies_by_hash_v1_metered(block_hashes).await?) + } + + async fn get_payload_bodies_by_range_v1( + &self, + start: U64, + count: U64, + ) -> RpcResult { + trace!(target: "rpc::engine", "Serving engine_getPayloadBodiesByRangeV1"); + Ok(self.inner.get_payload_bodies_by_range_v1_metered(start.to(), count.to()).await?) + } + + async fn signal_superchain_v1(&self, signal: SuperchainSignal) -> RpcResult { + trace!(target: "rpc::engine", "Serving signal_superchain_v1"); + info!( + target: "rpc::engine", + "Received superchain version signal local={:?} required={:?} recommended={:?}", + OP_STACK_SUPPORT, + signal.required, + signal.recommended + ); + Ok(OP_STACK_SUPPORT) + } + + async fn get_client_version_v1( + &self, + client: ClientVersionV1, + ) -> RpcResult> { + trace!(target: "rpc::engine", "Serving engine_getClientVersionV1"); + Ok(self.inner.get_client_version_v1(client)?) + } + + async fn exchange_capabilities(&self, _capabilities: Vec) -> RpcResult> { + Ok(self.inner.capabilities().list()) + } +} + +impl IntoEngineApiRpcModule + for OpEngineApi +where + EngineT: EngineTypes, + Self: OpEngineApiServer, +{ + fn into_rpc_module(self) -> RpcModule<()> { + self.into_rpc().remove_context() + } +} diff --git a/rust/op-reth/crates/rpc/src/error.rs b/rust/op-reth/crates/rpc/src/error.rs new file mode 100644 index 00000000000..a9b02314235 --- /dev/null +++ b/rust/op-reth/crates/rpc/src/error.rs @@ -0,0 +1,235 @@ +//! RPC errors specific to OP. + +use alloy_json_rpc::ErrorPayload; +use alloy_primitives::Bytes; +use alloy_rpc_types_eth::{BlockError, error::EthRpcErrorCode}; +use alloy_transport::{RpcError, TransportErrorKind}; +use jsonrpsee_types::error::{INTERNAL_ERROR_CODE, INVALID_PARAMS_CODE}; +use op_revm::{OpHaltReason, OpTransactionError}; +use reth_evm::execute::ProviderError; +use reth_optimism_evm::OpBlockExecutionError; +use reth_rpc_eth_api::{AsEthApiError, EthTxEnvError, TransactionConversionError}; +use reth_rpc_eth_types::{ + EthApiError, + error::api::{FromEvmHalt, FromRevert}, +}; +use reth_rpc_server_types::result::{internal_rpc_err, rpc_err}; +use revm::context_interface::result::{EVMError, InvalidTransaction}; +use std::{convert::Infallible, fmt::Display}; + +/// Optimism specific errors, that extend [`EthApiError`]. +#[derive(Debug, thiserror::Error)] +pub enum OpEthApiError { + /// L1 ethereum error. + #[error(transparent)] + Eth(#[from] EthApiError), + /// EVM error originating from invalid optimism data. + #[error(transparent)] + Evm(#[from] OpBlockExecutionError), + /// Thrown when calculating L1 gas fee. + #[error("failed to calculate l1 gas fee")] + L1BlockFeeError, + /// Thrown when calculating L1 gas used + #[error("failed to calculate l1 gas used")] + L1BlockGasError, + /// Wrapper for [`revm_primitives::InvalidTransaction`](InvalidTransaction). + #[error(transparent)] + InvalidTransaction(#[from] OpInvalidTransactionError), + /// Sequencer client error. + #[error(transparent)] + Sequencer(#[from] SequencerClientError), +} + +impl AsEthApiError for OpEthApiError { + fn as_err(&self) -> Option<&EthApiError> { + match self { + Self::Eth(err) => Some(err), + _ => None, + } + } +} + +impl From for jsonrpsee_types::error::ErrorObject<'static> { + fn from(err: OpEthApiError) -> Self { + match err { + OpEthApiError::Eth(err) => err.into(), + OpEthApiError::InvalidTransaction(err) => err.into(), + OpEthApiError::Evm(_) | + OpEthApiError::L1BlockFeeError | + OpEthApiError::L1BlockGasError => internal_rpc_err(err.to_string()), + OpEthApiError::Sequencer(err) => err.into(), + } + } +} + +/// Optimism specific invalid transaction errors +#[derive(thiserror::Error, Debug)] +pub enum OpInvalidTransactionError { + /// A deposit transaction was submitted as a system transaction post-regolith. + #[error("no system transactions allowed after regolith")] + DepositSystemTxPostRegolith, + /// A deposit transaction halted post-regolith + #[error("deposit transaction halted after regolith")] + HaltedDepositPostRegolith, + /// The encoded transaction was missing during evm execution. + #[error("missing enveloped transaction bytes")] + MissingEnvelopedTx, + /// Transaction conditional errors. + #[error(transparent)] + TxConditionalErr(#[from] TxConditionalErr), +} + +impl From for jsonrpsee_types::error::ErrorObject<'static> { + fn from(err: OpInvalidTransactionError) -> Self { + match err { + OpInvalidTransactionError::DepositSystemTxPostRegolith | + OpInvalidTransactionError::HaltedDepositPostRegolith | + OpInvalidTransactionError::MissingEnvelopedTx => { + rpc_err(EthRpcErrorCode::TransactionRejected.code(), err.to_string(), None) + } + OpInvalidTransactionError::TxConditionalErr(_) => err.into(), + } + } +} + +impl TryFrom for OpInvalidTransactionError { + type Error = InvalidTransaction; + + fn try_from(err: OpTransactionError) -> Result { + match err { + OpTransactionError::DepositSystemTxPostRegolith => { + Ok(Self::DepositSystemTxPostRegolith) + } + OpTransactionError::HaltedDepositPostRegolith => Ok(Self::HaltedDepositPostRegolith), + OpTransactionError::MissingEnvelopedTx => Ok(Self::MissingEnvelopedTx), + OpTransactionError::Base(err) => Err(err), + } + } +} + +/// Transaction conditional related errors. +#[derive(Debug, thiserror::Error)] +pub enum TxConditionalErr { + /// Transaction conditional cost exceeded maximum allowed + #[error("conditional cost exceeded maximum allowed")] + ConditionalCostExceeded, + /// Invalid conditional parameters + #[error("invalid conditional parameters")] + InvalidCondition, + /// Internal error + #[error("internal error: {0}")] + Internal(String), + /// Thrown if the conditional's storage value doesn't match the latest state's. + #[error("storage value mismatch")] + StorageValueMismatch, + /// Thrown when the conditional's storage root doesn't match the latest state's root. + #[error("storage root mismatch")] + StorageRootMismatch, +} + +impl TxConditionalErr { + /// Creates an internal error variant + pub fn internal(err: E) -> Self { + Self::Internal(err.to_string()) + } +} + +impl From for jsonrpsee_types::error::ErrorObject<'static> { + fn from(err: TxConditionalErr) -> Self { + let code = match &err { + TxConditionalErr::Internal(_) => INTERNAL_ERROR_CODE, + _ => INVALID_PARAMS_CODE, + }; + + jsonrpsee_types::error::ErrorObject::owned(code, err.to_string(), None::) + } +} + +/// Error type when interacting with the Sequencer +#[derive(Debug, thiserror::Error)] +pub enum SequencerClientError { + /// Wrapper around an [`RpcError`]. + #[error(transparent)] + HttpError(#[from] RpcError), +} + +impl From for jsonrpsee_types::error::ErrorObject<'static> { + fn from(err: SequencerClientError) -> Self { + match err { + SequencerClientError::HttpError(RpcError::ErrorResp(ErrorPayload { + code, + message, + data, + })) => jsonrpsee_types::error::ErrorObject::owned(code as i32, message, data), + err => jsonrpsee_types::error::ErrorObject::owned( + INTERNAL_ERROR_CODE, + err.to_string(), + None::, + ), + } + } +} + +impl From> for OpEthApiError +where + T: Into, +{ + fn from(error: EVMError) -> Self { + match error { + EVMError::Transaction(err) => match err.try_into() { + Ok(err) => Self::InvalidTransaction(err), + Err(err) => Self::Eth(EthApiError::InvalidTransaction(err.into())), + }, + EVMError::Database(err) => Self::Eth(err.into()), + EVMError::Header(err) => Self::Eth(err.into()), + EVMError::Custom(err) => Self::Eth(EthApiError::EvmCustom(err)), + } + } +} + +impl FromEvmHalt for OpEthApiError { + fn from_evm_halt(halt: OpHaltReason, gas_limit: u64) -> Self { + match halt { + OpHaltReason::FailedDeposit => { + OpInvalidTransactionError::HaltedDepositPostRegolith.into() + } + OpHaltReason::Base(halt) => EthApiError::from_evm_halt(halt, gas_limit).into(), + } + } +} + +impl FromRevert for OpEthApiError { + fn from_revert(output: Bytes) -> Self { + Self::Eth(EthApiError::from_revert(output)) + } +} + +impl From for OpEthApiError { + fn from(value: TransactionConversionError) -> Self { + Self::Eth(EthApiError::from(value)) + } +} + +impl From for OpEthApiError { + fn from(value: EthTxEnvError) -> Self { + Self::Eth(EthApiError::from(value)) + } +} + +impl From for OpEthApiError { + fn from(value: ProviderError) -> Self { + Self::Eth(EthApiError::from(value)) + } +} + +impl From for OpEthApiError { + fn from(value: BlockError) -> Self { + Self::Eth(EthApiError::from(value)) + } +} + +impl From for OpEthApiError { + fn from(value: Infallible) -> Self { + match value {} + } +} diff --git a/rust/op-reth/crates/rpc/src/eth/block.rs b/rust/op-reth/crates/rpc/src/eth/block.rs new file mode 100644 index 00000000000..2c89cfc0ec6 --- /dev/null +++ b/rust/op-reth/crates/rpc/src/eth/block.rs @@ -0,0 +1,23 @@ +//! Loads and formats OP block RPC response. + +use crate::{OpEthApi, OpEthApiError, eth::RpcNodeCore}; +use reth_rpc_eth_api::{ + FromEvmError, RpcConvert, + helpers::{EthBlocks, LoadBlock}, +}; + +impl EthBlocks for OpEthApi +where + N: RpcNodeCore, + OpEthApiError: FromEvmError, + Rpc: RpcConvert, +{ +} + +impl LoadBlock for OpEthApi +where + N: RpcNodeCore, + OpEthApiError: FromEvmError, + Rpc: RpcConvert, +{ +} diff --git a/op-reth/crates/rpc/src/eth/call.rs b/rust/op-reth/crates/rpc/src/eth/call.rs similarity index 89% rename from op-reth/crates/rpc/src/eth/call.rs rename to rust/op-reth/crates/rpc/src/eth/call.rs index db96bda83f3..af97668659d 100644 --- a/op-reth/crates/rpc/src/eth/call.rs +++ b/rust/op-reth/crates/rpc/src/eth/call.rs @@ -1,7 +1,7 @@ -use crate::{eth::RpcNodeCore, OpEthApi, OpEthApiError}; +use crate::{OpEthApi, OpEthApiError, eth::RpcNodeCore}; use reth_rpc_eth_api::{ - helpers::{estimate::EstimateCall, Call, EthCall}, FromEvmError, RpcConvert, + helpers::{Call, EthCall, estimate::EstimateCall}, }; impl EthCall for OpEthApi diff --git a/op-reth/crates/rpc/src/eth/ext.rs b/rust/op-reth/crates/rpc/src/eth/ext.rs similarity index 98% rename from op-reth/crates/rpc/src/eth/ext.rs rename to rust/op-reth/crates/rpc/src/eth/ext.rs index 6c4e1bc7cf1..f4ed1923793 100644 --- a/op-reth/crates/rpc/src/eth/ext.rs +++ b/rust/op-reth/crates/rpc/src/eth/ext.rs @@ -1,9 +1,9 @@ //! Eth API extension. -use crate::{error::TxConditionalErr, OpEthApiError, SequencerClient}; +use crate::{OpEthApiError, SequencerClient, error::TxConditionalErr}; use alloy_consensus::BlockHeader; use alloy_eips::BlockNumberOrTag; -use alloy_primitives::{Bytes, StorageKey, B256, U256}; +use alloy_primitives::{B256, Bytes, StorageKey, U256}; use alloy_rpc_types_eth::erc4337::{AccountStorage, TransactionConditional}; use jsonrpsee_core::RpcResult; use reth_optimism_txpool::conditional::MaybeConditionalTransaction; diff --git a/rust/op-reth/crates/rpc/src/eth/mod.rs b/rust/op-reth/crates/rpc/src/eth/mod.rs new file mode 100644 index 00000000000..52eff8d26ad --- /dev/null +++ b/rust/op-reth/crates/rpc/src/eth/mod.rs @@ -0,0 +1,623 @@ +//! OP-Reth `eth_` endpoint implementation. + +pub mod ext; +pub mod receipt; +pub mod transaction; + +mod block; +mod call; +mod pending_block; + +use crate::{ + OpEthApiError, SequencerClient, + eth::{receipt::OpReceiptConverter, transaction::OpTxInfoMapper}, +}; +use alloy_consensus::BlockHeader; +use alloy_eips::BlockNumHash; +use alloy_primitives::{B256, U256}; +use alloy_rpc_types_eth::{Filter, Log}; +use alloy_transport_http::reqwest::Url; +use eyre::WrapErr; +use futures::StreamExt; +use op_alloy_network::Optimism; +use op_alloy_rpc_types_engine::OpFlashblockPayloadBase; +pub use receipt::{OpReceiptBuilder, OpReceiptFieldsBuilder}; +use reth_chainspec::{EthereumHardforks, Hardforks}; +use reth_evm::ConfigureEvm; +use reth_node_api::{FullNodeComponents, FullNodeTypes, HeaderTy, NodeTypes}; +use reth_node_builder::rpc::{EthApiBuilder, EthApiCtx}; +use reth_optimism_flashblocks::{ + FlashBlockBuildInfo, FlashBlockCompleteSequence, FlashBlockCompleteSequenceRx, + FlashBlockConsensusClient, FlashBlockRx, FlashBlockService, FlashblocksListeners, + PendingBlockRx, PendingFlashBlock, WsFlashBlockStream, +}; +use reth_rpc::eth::core::EthApiInner; +use reth_rpc_eth_api::{ + EthApiTypes, FromEvmError, FullEthApiServer, RpcConvert, RpcConverter, RpcNodeCore, + RpcNodeCoreExt, RpcTypes, + helpers::{ + EthApiSpec, EthFees, EthState, LoadFee, LoadPendingBlock, LoadState, SpawnBlocking, Trace, + pending_block::BuildPendingEnv, + }, +}; +use reth_rpc_eth_types::{ + EthStateCache, FeeHistoryCache, GasPriceOracle, PendingBlock, + logs_utils::matching_block_logs_with_tx_hashes, +}; +use reth_storage_api::{BlockReaderIdExt, ProviderHeader}; +use reth_tasks::{ + TaskSpawner, + pool::{BlockingTaskGuard, BlockingTaskPool}, +}; +use std::{ + fmt::{self, Formatter}, + marker::PhantomData, + sync::Arc, + time::Duration, +}; +use tokio::{sync::watch, time}; +use tokio_stream::{Stream, wrappers::BroadcastStream}; +use tracing::info; + +/// Maximum duration to wait for a fresh flashblock when one is being built. +const MAX_FLASHBLOCK_WAIT_DURATION: Duration = Duration::from_millis(50); + +/// Adapter for [`EthApiInner`], which holds all the data required to serve core `eth_` API. +pub type EthApiNodeBackend = EthApiInner; + +/// OP-Reth `Eth` API implementation. +/// +/// This type provides the functionality for handling `eth_` related requests. +/// +/// This wraps a default `Eth` implementation, and provides additional functionality where the +/// optimism spec deviates from the default (ethereum) spec, e.g. transaction forwarding to the +/// sequencer, receipts, additional RPC fields for transaction receipts. +/// +/// This type implements the [`FullEthApi`](reth_rpc_eth_api::helpers::FullEthApi) by implemented +/// all the `Eth` helper traits and prerequisite traits. +pub struct OpEthApi { + /// Gateway to node's core components. + inner: Arc>, +} + +impl Clone for OpEthApi { + fn clone(&self) -> Self { + Self { inner: self.inner.clone() } + } +} + +impl OpEthApi { + /// Creates a new `OpEthApi`. + pub fn new( + eth_api: EthApiNodeBackend, + sequencer_client: Option, + min_suggested_priority_fee: U256, + flashblocks: Option>, + ) -> Self { + let inner = Arc::new(OpEthApiInner { + eth_api, + sequencer_client, + min_suggested_priority_fee, + flashblocks, + }); + Self { inner } + } + + /// Build a [`OpEthApi`] using [`OpEthApiBuilder`]. + pub const fn builder() -> OpEthApiBuilder { + OpEthApiBuilder::new() + } + + /// Returns a reference to the [`EthApiNodeBackend`]. + pub fn eth_api(&self) -> &EthApiNodeBackend { + self.inner.eth_api() + } + /// Returns the configured sequencer client, if any. + pub fn sequencer_client(&self) -> Option<&SequencerClient> { + self.inner.sequencer_client() + } + + /// Returns a cloned pending block receiver, if any. + pub fn pending_block_rx(&self) -> Option> { + self.inner.flashblocks.as_ref().map(|f| f.pending_block_rx.clone()) + } + + /// Returns a new subscription to received flashblocks. + pub fn subscribe_received_flashblocks(&self) -> Option { + self.inner.flashblocks.as_ref().map(|f| f.received_flashblocks.subscribe()) + } + + /// Returns a new subscription to flashblock sequences. + pub fn subscribe_flashblock_sequence(&self) -> Option { + self.inner.flashblocks.as_ref().map(|f| f.flashblocks_sequence.subscribe()) + } + + /// Returns a stream of matching flashblock receipts, if any. + /// + /// This will yield all new matching receipts received from _new_ flashblocks. + pub fn flashblock_receipts_stream( + &self, + filter: Filter, + ) -> Option + Send + Unpin> { + self.subscribe_received_flashblocks().map(|rx| { + BroadcastStream::new(rx) + .scan( + None::<(u64, u64)>, // state buffers base block number and timestamp + move |state, result| { + let fb = match result.ok() { + Some(fb) => fb, + None => return futures::future::ready(None), + }; + + // Update state from base flashblock for block level meta data. + if let Some(base) = &fb.base { + *state = Some((base.block_number, base.timestamp)); + } + + let Some((block_number, timestamp)) = *state else { + // we haven't received a new flashblock sequence yet, so we can skip + // until we receive the first index 0 (base) + return futures::future::ready(Some(Vec::new())); + }; + + let receipts = + fb.metadata.receipts.iter().map(|(tx, receipt)| (*tx, receipt)); + + let all_logs = matching_block_logs_with_tx_hashes( + &filter, + BlockNumHash::new(block_number, fb.diff.block_hash), + timestamp, + receipts, + false, + ); + + futures::future::ready(Some(all_logs)) + }, + ) + .flat_map(futures::stream::iter) + }) + } + + /// Returns information about the flashblock currently being built, if any. + fn flashblock_build_info(&self) -> Option { + self.inner.flashblocks.as_ref().and_then(|f| *f.in_progress_rx.borrow()) + } + + /// Extracts pending block if it matches the expected parent hash. + fn extract_matching_block( + &self, + block: Option<&PendingFlashBlock>, + parent_hash: B256, + ) -> Option> { + block.filter(|b| b.block().parent_hash() == parent_hash).map(|b| b.pending.clone()) + } + + /// Awaits a fresh flashblock if one is being built, otherwise returns current. + async fn flashblock( + &self, + parent_hash: B256, + ) -> eyre::Result>> { + let Some(rx) = self.inner.flashblocks.as_ref().map(|f| &f.pending_block_rx) else { + return Ok(None); + }; + + // Check if a flashblock is being built + if let Some(build_info) = self.flashblock_build_info() { + let current_index = rx.borrow().as_ref().map(|b| b.last_flashblock_index); + + // Check if this is the first flashblock or the next consecutive index + let is_next_index = current_index.is_none_or(|idx| build_info.index == idx + 1); + + // Wait only for relevant flashblocks: matching parent and next in sequence + if build_info.parent_hash == parent_hash && is_next_index { + let mut rx_clone = rx.clone(); + // Wait up to MAX_FLASHBLOCK_WAIT_DURATION for a new flashblock to arrive + let _ = time::timeout(MAX_FLASHBLOCK_WAIT_DURATION, rx_clone.changed()).await; + } + } + + // Fall back to current block + Ok(self.extract_matching_block(rx.borrow().as_ref(), parent_hash)) + } + + /// Returns a [`PendingBlock`] that is built out of flashblocks. + /// + /// If flashblocks receiver is not set, then it always returns `None`. + /// + /// It may wait up to 50ms for a fresh flashblock if one is currently being built. + pub async fn pending_flashblock(&self) -> eyre::Result>> + where + OpEthApiError: FromEvmError, + Rpc: RpcConvert, + { + let Some(latest) = self.provider().latest_header()? else { + return Ok(None); + }; + + self.flashblock(latest.hash()).await + } +} + +impl EthApiTypes for OpEthApi +where + N: RpcNodeCore, + Rpc: RpcConvert, +{ + type Error = OpEthApiError; + type NetworkTypes = Rpc::Network; + type RpcConvert = Rpc; + + fn converter(&self) -> &Self::RpcConvert { + self.inner.eth_api.converter() + } +} + +impl RpcNodeCore for OpEthApi +where + N: RpcNodeCore, + Rpc: RpcConvert, +{ + type Primitives = N::Primitives; + type Provider = N::Provider; + type Pool = N::Pool; + type Evm = N::Evm; + type Network = N::Network; + + #[inline] + fn pool(&self) -> &Self::Pool { + self.inner.eth_api.pool() + } + + #[inline] + fn evm_config(&self) -> &Self::Evm { + self.inner.eth_api.evm_config() + } + + #[inline] + fn network(&self) -> &Self::Network { + self.inner.eth_api.network() + } + + #[inline] + fn provider(&self) -> &Self::Provider { + self.inner.eth_api.provider() + } +} + +impl RpcNodeCoreExt for OpEthApi +where + N: RpcNodeCore, + Rpc: RpcConvert, +{ + #[inline] + fn cache(&self) -> &EthStateCache { + self.inner.eth_api.cache() + } +} + +impl EthApiSpec for OpEthApi +where + N: RpcNodeCore, + Rpc: RpcConvert, +{ + #[inline] + fn starting_block(&self) -> U256 { + self.inner.eth_api.starting_block() + } +} + +impl SpawnBlocking for OpEthApi +where + N: RpcNodeCore, + Rpc: RpcConvert, +{ + #[inline] + fn io_task_spawner(&self) -> impl TaskSpawner { + self.inner.eth_api.task_spawner() + } + + #[inline] + fn tracing_task_pool(&self) -> &BlockingTaskPool { + self.inner.eth_api.blocking_task_pool() + } + + #[inline] + fn tracing_task_guard(&self) -> &BlockingTaskGuard { + self.inner.eth_api.blocking_task_guard() + } + + #[inline] + fn blocking_io_task_guard(&self) -> &Arc { + self.inner.eth_api.blocking_io_request_semaphore() + } +} + +impl LoadFee for OpEthApi +where + N: RpcNodeCore, + OpEthApiError: FromEvmError, + Rpc: RpcConvert, +{ + #[inline] + fn gas_oracle(&self) -> &GasPriceOracle { + self.inner.eth_api.gas_oracle() + } + + #[inline] + fn fee_history_cache(&self) -> &FeeHistoryCache> { + self.inner.eth_api.fee_history_cache() + } + + async fn suggested_priority_fee(&self) -> Result { + self.inner + .eth_api + .gas_oracle() + .op_suggest_tip_cap(self.inner.min_suggested_priority_fee) + .await + .map_err(Into::into) + } +} + +impl LoadState for OpEthApi +where + N: RpcNodeCore, + Rpc: RpcConvert, + Self: LoadPendingBlock, +{ +} + +impl EthState for OpEthApi +where + N: RpcNodeCore, + Rpc: RpcConvert, + Self: LoadPendingBlock, +{ + #[inline] + fn max_proof_window(&self) -> u64 { + self.inner.eth_api.eth_proof_window() + } +} + +impl EthFees for OpEthApi +where + N: RpcNodeCore, + OpEthApiError: FromEvmError, + Rpc: RpcConvert, +{ +} + +impl Trace for OpEthApi +where + N: RpcNodeCore, + OpEthApiError: FromEvmError, + Rpc: RpcConvert, +{ +} + +impl fmt::Debug for OpEthApi { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("OpEthApi").finish_non_exhaustive() + } +} + +/// Container type `OpEthApi` +pub struct OpEthApiInner { + /// Gateway to node's core components. + eth_api: EthApiNodeBackend, + /// Sequencer client, configured to forward submitted transactions to sequencer of given OP + /// network. + sequencer_client: Option, + /// Minimum priority fee enforced by OP-specific logic. + /// + /// See also + min_suggested_priority_fee: U256, + /// Flashblocks listeners. + /// + /// If set, provides receivers for pending blocks, flashblock sequences, and build status. + flashblocks: Option>, +} + +impl fmt::Debug for OpEthApiInner { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("OpEthApiInner").finish() + } +} + +impl OpEthApiInner { + /// Returns a reference to the [`EthApiNodeBackend`]. + const fn eth_api(&self) -> &EthApiNodeBackend { + &self.eth_api + } + + /// Returns the configured sequencer client, if any. + const fn sequencer_client(&self) -> Option<&SequencerClient> { + self.sequencer_client.as_ref() + } +} + +/// Converter for OP RPC types. +pub type OpRpcConvert = RpcConverter< + NetworkT, + ::Evm, + OpReceiptConverter<::Provider>, + (), + OpTxInfoMapper<::Provider>, +>; + +/// Builds [`OpEthApi`] for Optimism. +#[derive(Debug)] +pub struct OpEthApiBuilder { + /// Sequencer client, configured to forward submitted transactions to sequencer of given OP + /// network. + sequencer_url: Option, + /// Headers to use for the sequencer client requests. + sequencer_headers: Vec, + /// Minimum suggested priority fee (tip) + min_suggested_priority_fee: u64, + /// A URL pointing to a secure websocket connection (wss) that streams out [flashblocks]. + /// + /// [flashblocks]: reth_optimism_flashblocks + flashblocks_url: Option, + /// Enable flashblock consensus client to drive the chain forward. + /// + /// When enabled, flashblock sequences are submitted to the engine API via + /// `newPayload` and `forkchoiceUpdated` calls, advancing the canonical chain state. + /// Requires `flashblocks_url` to be set. + flashblock_consensus: bool, + /// Marker for network types. + _nt: PhantomData, +} + +impl Default for OpEthApiBuilder { + fn default() -> Self { + Self { + sequencer_url: None, + sequencer_headers: Vec::new(), + min_suggested_priority_fee: 1_000_000, + flashblocks_url: None, + flashblock_consensus: false, + _nt: PhantomData, + } + } +} + +impl OpEthApiBuilder { + /// Creates a [`OpEthApiBuilder`] instance from core components. + pub const fn new() -> Self { + Self { + sequencer_url: None, + sequencer_headers: Vec::new(), + min_suggested_priority_fee: 1_000_000, + flashblocks_url: None, + flashblock_consensus: false, + _nt: PhantomData, + } + } + + /// With a [`SequencerClient`]. + pub fn with_sequencer(mut self, sequencer_url: Option) -> Self { + self.sequencer_url = sequencer_url; + self + } + + /// With headers to use for the sequencer client requests. + pub fn with_sequencer_headers(mut self, sequencer_headers: Vec) -> Self { + self.sequencer_headers = sequencer_headers; + self + } + + /// With minimum suggested priority fee (tip). + pub const fn with_min_suggested_priority_fee(mut self, min: u64) -> Self { + self.min_suggested_priority_fee = min; + self + } + + /// With a subscription to flashblocks secure websocket connection. + pub fn with_flashblocks(mut self, flashblocks_url: Option) -> Self { + self.flashblocks_url = flashblocks_url; + self + } + + /// With flashblock consensus client enabled to drive chain forward + pub const fn with_flashblock_consensus(mut self, flashblock_consensus: bool) -> Self { + self.flashblock_consensus = flashblock_consensus; + self + } +} + +impl EthApiBuilder for OpEthApiBuilder +where + N: FullNodeComponents< + Evm: ConfigureEvm< + NextBlockEnvCtx: BuildPendingEnv> + + From + + Unpin, + >, + Types: NodeTypes< + ChainSpec: Hardforks + EthereumHardforks, + Payload: reth_node_api::PayloadTypes< + ExecutionData: for<'a> TryFrom< + &'a FlashBlockCompleteSequence, + Error: std::fmt::Display, + >, + >, + >, + >, + NetworkT: RpcTypes, + OpRpcConvert: RpcConvert, + OpEthApi>: + FullEthApiServer, +{ + type EthApi = OpEthApi>; + + async fn build_eth_api(self, ctx: EthApiCtx<'_, N>) -> eyre::Result { + let Self { + sequencer_url, + sequencer_headers, + min_suggested_priority_fee, + flashblocks_url, + flashblock_consensus, + .. + } = self; + let rpc_converter = + RpcConverter::new(OpReceiptConverter::new(ctx.components.provider().clone())) + .with_mapper(OpTxInfoMapper::new(ctx.components.provider().clone())); + + let sequencer_client = if let Some(url) = sequencer_url { + Some( + SequencerClient::new_with_headers(&url, sequencer_headers) + .await + .wrap_err_with(|| format!("Failed to init sequencer client with: {url}"))?, + ) + } else { + None + }; + + let flashblocks = if let Some(ws_url) = flashblocks_url { + info!(target: "reth:cli", %ws_url, "Launching flashblocks service"); + + let (tx, pending_rx) = watch::channel(None); + let stream = WsFlashBlockStream::new(ws_url); + let service = FlashBlockService::new( + stream, + ctx.components.evm_config().clone(), + ctx.components.provider().clone(), + ctx.components.task_executor().clone(), + // enable state root calculation if flashblock_consensus is enabled. + flashblock_consensus, + ); + + let flashblocks_sequence = service.block_sequence_broadcaster().clone(); + let received_flashblocks = service.flashblocks_broadcaster().clone(); + let in_progress_rx = service.subscribe_in_progress(); + ctx.components.task_executor().spawn(Box::pin(service.run(tx))); + + if flashblock_consensus { + info!(target: "reth::cli", "Launching FlashBlockConsensusClient"); + let flashblock_client = FlashBlockConsensusClient::new( + ctx.engine_handle.clone(), + flashblocks_sequence.subscribe(), + )?; + ctx.components.task_executor().spawn(Box::pin(flashblock_client.run())); + } + + Some(FlashblocksListeners::new( + pending_rx, + flashblocks_sequence, + in_progress_rx, + received_flashblocks, + )) + } else { + None + }; + + let eth_api = ctx.eth_api_builder().with_rpc_converter(rpc_converter).build_inner(); + + Ok(OpEthApi::new( + eth_api, + sequencer_client, + U256::from(min_suggested_priority_fee), + flashblocks, + )) + } +} diff --git a/op-reth/crates/rpc/src/eth/pending_block.rs b/rust/op-reth/crates/rpc/src/eth/pending_block.rs similarity index 92% rename from op-reth/crates/rpc/src/eth/pending_block.rs rename to rust/op-reth/crates/rpc/src/eth/pending_block.rs index bf351d7de11..587693e8573 100644 --- a/op-reth/crates/rpc/src/eth/pending_block.rs +++ b/rust/op-reth/crates/rpc/src/eth/pending_block.rs @@ -5,12 +5,12 @@ use alloy_consensus::BlockHeader; use alloy_eips::BlockNumberOrTag; use reth_chain_state::BlockState; use reth_rpc_eth_api::{ - helpers::{pending_block::PendingEnvBuilder, LoadPendingBlock, SpawnBlocking}, FromEvmError, RpcConvert, RpcNodeCore, RpcNodeCoreExt, + helpers::{LoadPendingBlock, SpawnBlocking, pending_block::PendingEnvBuilder}, }; use reth_rpc_eth_types::{ - block::BlockAndReceipts, builder::config::PendingBlockKind, error::FromEthApiError, - EthApiError, PendingBlock, + EthApiError, PendingBlock, block::BlockAndReceipts, builder::config::PendingBlockKind, + error::FromEthApiError, }; use reth_storage_api::{BlockReaderIdExt, StateProviderBox, StateProviderFactory}; diff --git a/rust/op-reth/crates/rpc/src/eth/receipt.rs b/rust/op-reth/crates/rpc/src/eth/receipt.rs new file mode 100644 index 00000000000..77243039e97 --- /dev/null +++ b/rust/op-reth/crates/rpc/src/eth/receipt.rs @@ -0,0 +1,723 @@ +//! Loads and formats OP receipt RPC response. + +use crate::{OpEthApi, OpEthApiError, eth::RpcNodeCore}; +use alloy_consensus::{BlockHeader, Receipt, ReceiptWithBloom, TxReceipt}; +use alloy_eips::eip2718::Encodable2718; +use alloy_rpc_types_eth::{Log, TransactionReceipt}; +use op_alloy_consensus::{OpReceipt, OpTransaction}; +use op_alloy_rpc_types::{L1BlockInfo, OpTransactionReceipt, OpTransactionReceiptFields}; +use op_revm::estimate_tx_compressed_size; +use reth_chainspec::{ChainSpecProvider, EthChainSpec}; +use reth_node_api::NodePrimitives; +use reth_optimism_evm::RethL1BlockInfo; +use reth_optimism_forks::OpHardforks; +use reth_primitives_traits::SealedBlock; +use reth_rpc_eth_api::{ + RpcConvert, + helpers::LoadReceipt, + transaction::{ConvertReceiptInput, ReceiptConverter}, +}; +use reth_rpc_eth_types::{EthApiError, receipt::build_receipt}; +use reth_storage_api::BlockReader; +use std::fmt::Debug; + +impl LoadReceipt for OpEthApi +where + N: RpcNodeCore, + Rpc: RpcConvert, +{ +} + +/// Converter for OP receipts. +#[derive(Debug, Clone)] +pub struct OpReceiptConverter { + provider: Provider, +} + +impl OpReceiptConverter { + /// Creates a new [`OpReceiptConverter`]. + pub const fn new(provider: Provider) -> Self { + Self { provider } + } +} + +impl ReceiptConverter for OpReceiptConverter +where + N: NodePrimitives, + Provider: + BlockReader + ChainSpecProvider + Debug + 'static, +{ + type RpcReceipt = OpTransactionReceipt; + type Error = OpEthApiError; + + fn convert_receipts( + &self, + inputs: Vec>, + ) -> Result, Self::Error> { + let Some(block_number) = inputs.first().map(|r| r.meta.block_number) else { + return Ok(Vec::new()); + }; + + let block = self + .provider + .block_by_number(block_number)? + .ok_or(EthApiError::HeaderNotFound(block_number.into()))?; + + self.convert_receipts_with_block(inputs, &SealedBlock::new_unhashed(block)) + } + + fn convert_receipts_with_block( + &self, + inputs: Vec>, + block: &SealedBlock, + ) -> Result, Self::Error> { + let mut l1_block_info = match reth_optimism_evm::extract_l1_info(block.body()) { + Ok(l1_block_info) => l1_block_info, + Err(err) => { + let genesis_number = + self.provider.chain_spec().genesis().number.unwrap_or_default(); + // If it is the genesis block (i.e. block number is 0), there is no L1 info, so + // we return an empty l1_block_info. + if block.header().number() == genesis_number { + return Ok(vec![]); + } + return Err(err.into()); + } + }; + + let mut receipts = Vec::with_capacity(inputs.len()); + + for input in inputs { + // We must clear this cache as different L2 transactions can have different + // L1 costs. A potential improvement here is to only clear the cache if the + // new transaction input has changed, since otherwise the L1 cost wouldn't. + l1_block_info.clear_tx_l1_cost(); + + receipts.push( + OpReceiptBuilder::new(&self.provider.chain_spec(), input, &mut l1_block_info)? + .build(), + ); + } + + Ok(receipts) + } +} + +/// L1 fee and data gas for a non-deposit transaction, or deposit nonce and receipt version for a +/// deposit transaction. +#[derive(Debug, Clone)] +pub struct OpReceiptFieldsBuilder { + /// Block number. + pub block_number: u64, + /// Block timestamp. + pub block_timestamp: u64, + /// The L1 fee for transaction. + pub l1_fee: Option, + /// L1 gas used by transaction. + pub l1_data_gas: Option, + /// L1 fee scalar. + pub l1_fee_scalar: Option, + /* ---------------------------------------- Bedrock ---------------------------------------- */ + /// The base fee of the L1 origin block. + pub l1_base_fee: Option, + /* --------------------------------------- Regolith ---------------------------------------- */ + /// Deposit nonce, if this is a deposit transaction. + pub deposit_nonce: Option, + /* ---------------------------------------- Canyon ----------------------------------------- */ + /// Deposit receipt version, if this is a deposit transaction. + pub deposit_receipt_version: Option, + /* ---------------------------------------- Ecotone ---------------------------------------- */ + /// The current L1 fee scalar. + pub l1_base_fee_scalar: Option, + /// The current L1 blob base fee. + pub l1_blob_base_fee: Option, + /// The current L1 blob base fee scalar. + pub l1_blob_base_fee_scalar: Option, + /* ---------------------------------------- Isthmus ---------------------------------------- */ + /// The current operator fee scalar. + pub operator_fee_scalar: Option, + /// The current L1 blob base fee scalar. + pub operator_fee_constant: Option, + /* ---------------------------------------- Jovian ----------------------------------------- */ + /// The current DA footprint gas scalar. + pub da_footprint_gas_scalar: Option, +} + +impl OpReceiptFieldsBuilder { + /// Returns a new builder. + pub const fn new(block_timestamp: u64, block_number: u64) -> Self { + Self { + block_number, + block_timestamp, + l1_fee: None, + l1_data_gas: None, + l1_fee_scalar: None, + l1_base_fee: None, + deposit_nonce: None, + deposit_receipt_version: None, + l1_base_fee_scalar: None, + l1_blob_base_fee: None, + l1_blob_base_fee_scalar: None, + operator_fee_scalar: None, + operator_fee_constant: None, + da_footprint_gas_scalar: None, + } + } + + /// Applies [`L1BlockInfo`](op_revm::L1BlockInfo). + pub fn l1_block_info( + mut self, + chain_spec: &impl OpHardforks, + tx: &T, + l1_block_info: &mut op_revm::L1BlockInfo, + ) -> Result { + let raw_tx = tx.encoded_2718(); + let timestamp = self.block_timestamp; + + self.l1_fee = Some( + l1_block_info + .l1_tx_data_fee(chain_spec, timestamp, &raw_tx, tx.is_deposit()) + .map_err(|_| OpEthApiError::L1BlockFeeError)? + .saturating_to(), + ); + + self.l1_data_gas = Some( + l1_block_info + .l1_data_gas(chain_spec, timestamp, &raw_tx) + .map_err(|_| OpEthApiError::L1BlockGasError)? + .saturating_add(l1_block_info.l1_fee_overhead.unwrap_or_default()) + .saturating_to(), + ); + + self.l1_fee_scalar = (!chain_spec.is_ecotone_active_at_timestamp(timestamp)) + .then_some(f64::from(l1_block_info.l1_base_fee_scalar) / 1_000_000.0); + + self.l1_base_fee = Some(l1_block_info.l1_base_fee.saturating_to()); + self.l1_base_fee_scalar = Some(l1_block_info.l1_base_fee_scalar.saturating_to()); + self.l1_blob_base_fee = l1_block_info.l1_blob_base_fee.map(|fee| fee.saturating_to()); + self.l1_blob_base_fee_scalar = + l1_block_info.l1_blob_base_fee_scalar.map(|scalar| scalar.saturating_to()); + + // If the operator fee params are both set to 0, we don't add them to the receipt. + let operator_fee_scalar_has_non_zero_value: bool = + l1_block_info.operator_fee_scalar.is_some_and(|scalar| !scalar.is_zero()); + + let operator_fee_constant_has_non_zero_value = + l1_block_info.operator_fee_constant.is_some_and(|constant| !constant.is_zero()); + + if operator_fee_scalar_has_non_zero_value || operator_fee_constant_has_non_zero_value { + self.operator_fee_scalar = + l1_block_info.operator_fee_scalar.map(|scalar| scalar.saturating_to()); + self.operator_fee_constant = + l1_block_info.operator_fee_constant.map(|constant| constant.saturating_to()); + } + + self.da_footprint_gas_scalar = l1_block_info.da_footprint_gas_scalar; + + Ok(self) + } + + /// Applies deposit transaction metadata: deposit nonce. + pub const fn deposit_nonce(mut self, nonce: Option) -> Self { + self.deposit_nonce = nonce; + self + } + + /// Applies deposit transaction metadata: deposit receipt version. + pub const fn deposit_version(mut self, version: Option) -> Self { + self.deposit_receipt_version = version; + self + } + + /// Builds the [`OpTransactionReceiptFields`] object. + pub const fn build(self) -> OpTransactionReceiptFields { + let Self { + block_number: _, // used to compute other fields + block_timestamp: _, // used to compute other fields + l1_fee, + l1_data_gas: l1_gas_used, + l1_fee_scalar, + l1_base_fee: l1_gas_price, + deposit_nonce, + deposit_receipt_version, + l1_base_fee_scalar, + l1_blob_base_fee, + l1_blob_base_fee_scalar, + operator_fee_scalar, + operator_fee_constant, + da_footprint_gas_scalar, + } = self; + + OpTransactionReceiptFields { + l1_block_info: L1BlockInfo { + l1_gas_price, + l1_gas_used, + l1_fee, + l1_fee_scalar, + l1_base_fee_scalar, + l1_blob_base_fee, + l1_blob_base_fee_scalar, + operator_fee_scalar, + operator_fee_constant, + da_footprint_gas_scalar, + }, + deposit_nonce, + deposit_receipt_version, + } + } +} + +/// Builds an [`OpTransactionReceipt`]. +#[derive(Debug)] +pub struct OpReceiptBuilder { + /// Core receipt, has all the fields of an L1 receipt and is the basis for the OP receipt. + pub core_receipt: TransactionReceipt>>, + /// Additional OP receipt fields. + pub op_receipt_fields: OpTransactionReceiptFields, +} + +impl OpReceiptBuilder { + /// Returns a new builder. + pub fn new( + chain_spec: &impl OpHardforks, + input: ConvertReceiptInput<'_, N>, + l1_block_info: &mut op_revm::L1BlockInfo, + ) -> Result + where + N: NodePrimitives, + { + let timestamp = input.meta.timestamp; + let block_number = input.meta.block_number; + let tx_signed = *input.tx.inner(); + let mut core_receipt = build_receipt(input, None, |receipt, next_log_index, meta| { + let map_logs = move |receipt: alloy_consensus::Receipt| { + let Receipt { status, cumulative_gas_used, logs } = receipt; + let logs = Log::collect_for_receipt(next_log_index, meta, logs); + Receipt { status, cumulative_gas_used, logs } + }; + let mapped_receipt: OpReceipt = match receipt { + OpReceipt::Legacy(receipt) => OpReceipt::Legacy(map_logs(receipt)), + OpReceipt::Eip2930(receipt) => OpReceipt::Eip2930(map_logs(receipt)), + OpReceipt::Eip1559(receipt) => OpReceipt::Eip1559(map_logs(receipt)), + OpReceipt::Eip7702(receipt) => OpReceipt::Eip7702(map_logs(receipt)), + OpReceipt::Deposit(receipt) => OpReceipt::Deposit(receipt.map_inner(map_logs)), + }; + mapped_receipt.into_with_bloom() + }); + + // In jovian, we're using the blob gas used field to store the current da + // footprint's value. + // We're computing the jovian blob gas used before building the receipt since the inputs get + // consumed by the `build_receipt` function. + chain_spec.is_jovian_active_at_timestamp(timestamp).then(|| { + // Estimate the size of the transaction in bytes and multiply by the DA + // footprint gas scalar. + // Jovian specs: `https://github.com/ethereum-optimism/specs/blob/main/specs/protocol/jovian/exec-engine.md#da-footprint-block-limit` + let da_size = estimate_tx_compressed_size(tx_signed.encoded_2718().as_slice()) + .saturating_div(1_000_000) + .saturating_mul(l1_block_info.da_footprint_gas_scalar.unwrap_or_default().into()); + + core_receipt.blob_gas_used = Some(da_size); + }); + + let op_receipt_fields = OpReceiptFieldsBuilder::new(timestamp, block_number) + .l1_block_info(chain_spec, tx_signed, l1_block_info)? + .build(); + + Ok(Self { core_receipt, op_receipt_fields }) + } + + /// Builds [`OpTransactionReceipt`] by combining core (l1) receipt fields and additional OP + /// receipt fields. + pub fn build(self) -> OpTransactionReceipt { + let Self { core_receipt: inner, op_receipt_fields } = self; + + let OpTransactionReceiptFields { l1_block_info, .. } = op_receipt_fields; + + OpTransactionReceipt { inner, l1_block_info } + } +} + +#[cfg(test)] +mod test { + use super::*; + use alloy_consensus::{Block, BlockBody, Eip658Value, TxEip7702, transaction::TransactionMeta}; + use alloy_op_hardforks::{ + OP_MAINNET_ISTHMUS_TIMESTAMP, OP_MAINNET_JOVIAN_TIMESTAMP, OpChainHardforks, + }; + use alloy_primitives::{Address, Bytes, Signature, U256, hex}; + use op_alloy_consensus::OpTypedTransaction; + use op_alloy_network::eip2718::Decodable2718; + use reth_optimism_chainspec::{BASE_MAINNET, OP_MAINNET}; + use reth_optimism_primitives::{OpPrimitives, OpTransactionSigned}; + use reth_primitives_traits::Recovered; + + /// OP Mainnet transaction at index 0 in block 124665056. + /// + /// + const TX_SET_L1_BLOCK_OP_MAINNET_BLOCK_124665056: [u8; 251] = hex!( + "7ef8f8a0683079df94aa5b9cf86687d739a60a9b4f0835e520ec4d664e2e415dca17a6df94deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e200000146b000f79c500000000000000040000000066d052e700000000013ad8a3000000000000000000000000000000000000000000000000000000003ef1278700000000000000000000000000000000000000000000000000000000000000012fdf87b89884a61e74b322bbcf60386f543bfae7827725efaaf0ab1de2294a590000000000000000000000006887246668a3b87f54deb3b94ba47a6f63f32985" + ); + + /// OP Mainnet transaction at index 1 in block 124665056. + /// + /// + const TX_1_OP_MAINNET_BLOCK_124665056: [u8; 1176] = hex!( + "02f904940a8303fba78401d6d2798401db2b6d830493e0943e6f4f7866654c18f536170780344aa8772950b680b904246a761202000000000000000000000000087000a300de7200382b55d40045000000e5d60e0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000014000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000003a0000000000000000000000000000000000000000000000000000000000000022482ad56cb0000000000000000000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000120000000000000000000000000dc6ff44d5d932cbd77b52e5612ba0529dc6226f1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000044095ea7b300000000000000000000000021c4928109acb0659a88ae5329b5374a3024694c0000000000000000000000000000000000000000000000049b9ca9a6943400000000000000000000000000000000000000000000000000000000000000000000000000000000000021c4928109acb0659a88ae5329b5374a3024694c000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000024b6b55f250000000000000000000000000000000000000000000000049b9ca9a694340000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000415ec214a3950bea839a7e6fbb0ba1540ac2076acd50820e2d5ef83d0902cdffb24a47aff7de5190290769c4f0a9c6fabf63012986a0d590b1b571547a8c7050ea1b00000000000000000000000000000000000000000000000000000000000000c080a06db770e6e25a617fe9652f0958bd9bd6e49281a53036906386ed39ec48eadf63a07f47cf51a4a40b4494cf26efc686709a9b03939e20ee27e59682f5faa536667e" + ); + + /// Timestamp of OP mainnet block 124665056. + /// + /// + const BLOCK_124665056_TIMESTAMP: u64 = 1724928889; + + /// L1 block info for transaction at index 1 in block 124665056. + /// + /// + const TX_META_TX_1_OP_MAINNET_BLOCK_124665056: OpTransactionReceiptFields = + OpTransactionReceiptFields { + l1_block_info: L1BlockInfo { + l1_gas_price: Some(1055991687), // since bedrock l1 base fee + l1_gas_used: Some(4471), + l1_fee: Some(24681034813), + l1_fee_scalar: None, + l1_base_fee_scalar: Some(5227), + l1_blob_base_fee: Some(1), + l1_blob_base_fee_scalar: Some(1014213), + operator_fee_scalar: None, + operator_fee_constant: None, + da_footprint_gas_scalar: None, + }, + deposit_nonce: None, + deposit_receipt_version: None, + }; + + #[test] + fn op_receipt_fields_from_block_and_tx() { + // rig + let tx_0 = OpTransactionSigned::decode_2718( + &mut TX_SET_L1_BLOCK_OP_MAINNET_BLOCK_124665056.as_slice(), + ) + .unwrap(); + + let tx_1 = + OpTransactionSigned::decode_2718(&mut TX_1_OP_MAINNET_BLOCK_124665056.as_slice()) + .unwrap(); + + let block: Block = Block { + body: BlockBody { transactions: [tx_0, tx_1.clone()].to_vec(), ..Default::default() }, + ..Default::default() + }; + + let mut l1_block_info = + reth_optimism_evm::extract_l1_info(&block.body).expect("should extract l1 info"); + + // test + assert!(OP_MAINNET.is_fjord_active_at_timestamp(BLOCK_124665056_TIMESTAMP)); + + let receipt_meta = OpReceiptFieldsBuilder::new(BLOCK_124665056_TIMESTAMP, 124665056) + .l1_block_info(&*OP_MAINNET, &tx_1, &mut l1_block_info) + .expect("should parse revm l1 info") + .build(); + + let L1BlockInfo { + l1_gas_price, + l1_gas_used, + l1_fee, + l1_fee_scalar, + l1_base_fee_scalar, + l1_blob_base_fee, + l1_blob_base_fee_scalar, + operator_fee_scalar, + operator_fee_constant, + da_footprint_gas_scalar, + } = receipt_meta.l1_block_info; + + assert_eq!( + l1_gas_price, TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_gas_price, + "incorrect l1 base fee (former gas price)" + ); + assert_eq!( + l1_gas_used, TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_gas_used, + "incorrect l1 gas used" + ); + assert_eq!( + l1_fee, TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_fee, + "incorrect l1 fee" + ); + assert_eq!( + l1_fee_scalar, TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_fee_scalar, + "incorrect l1 fee scalar" + ); + assert_eq!( + l1_base_fee_scalar, + TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_base_fee_scalar, + "incorrect l1 base fee scalar" + ); + assert_eq!( + l1_blob_base_fee, + TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_blob_base_fee, + "incorrect l1 blob base fee" + ); + assert_eq!( + l1_blob_base_fee_scalar, + TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.l1_blob_base_fee_scalar, + "incorrect l1 blob base fee scalar" + ); + assert_eq!( + operator_fee_scalar, + TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.operator_fee_scalar, + "incorrect operator fee scalar" + ); + assert_eq!( + operator_fee_constant, + TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.operator_fee_constant, + "incorrect operator fee constant" + ); + assert_eq!( + da_footprint_gas_scalar, + TX_META_TX_1_OP_MAINNET_BLOCK_124665056.l1_block_info.da_footprint_gas_scalar, + "incorrect da footprint gas scalar" + ); + } + + #[test] + fn op_non_zero_operator_fee_params_included_in_receipt() { + let tx_1 = + OpTransactionSigned::decode_2718(&mut TX_1_OP_MAINNET_BLOCK_124665056.as_slice()) + .unwrap(); + + let mut l1_block_info = op_revm::L1BlockInfo { + operator_fee_scalar: Some(U256::ZERO), + operator_fee_constant: Some(U256::from(2)), + ..Default::default() + }; + + let receipt_meta = OpReceiptFieldsBuilder::new(BLOCK_124665056_TIMESTAMP, 124665056) + .l1_block_info(&*OP_MAINNET, &tx_1, &mut l1_block_info) + .expect("should parse revm l1 info") + .build(); + + let L1BlockInfo { operator_fee_scalar, operator_fee_constant, .. } = + receipt_meta.l1_block_info; + + assert_eq!(operator_fee_scalar, Some(0), "incorrect operator fee scalar"); + assert_eq!(operator_fee_constant, Some(2), "incorrect operator fee constant"); + } + + #[test] + fn op_zero_operator_fee_params_not_included_in_receipt() { + let tx_1 = + OpTransactionSigned::decode_2718(&mut TX_1_OP_MAINNET_BLOCK_124665056.as_slice()) + .unwrap(); + + let mut l1_block_info = op_revm::L1BlockInfo { + operator_fee_scalar: Some(U256::ZERO), + operator_fee_constant: Some(U256::ZERO), + ..Default::default() + }; + + let receipt_meta = OpReceiptFieldsBuilder::new(BLOCK_124665056_TIMESTAMP, 124665056) + .l1_block_info(&*OP_MAINNET, &tx_1, &mut l1_block_info) + .expect("should parse revm l1 info") + .build(); + + let L1BlockInfo { operator_fee_scalar, operator_fee_constant, .. } = + receipt_meta.l1_block_info; + + assert_eq!(operator_fee_scalar, None, "incorrect operator fee scalar"); + assert_eq!(operator_fee_constant, None, "incorrect operator fee constant"); + } + + // + #[test] + fn base_receipt_gas_fields() { + // https://basescan.org/tx/0x510fd4c47d78ba9f97c91b0f2ace954d5384c169c9545a77a373cf3ef8254e6e + let system = hex!( + "7ef8f8a0389e292420bcbf9330741f72074e39562a09ff5a00fd22e4e9eee7e34b81bca494deaddeaddeaddeaddeaddeaddeaddeaddead00019442000000000000000000000000000000000000158080830f424080b8a4440a5e20000008dd00101c120000000000000004000000006721035b00000000014189960000000000000000000000000000000000000000000000000000000349b4dcdc000000000000000000000000000000000000000000000000000000004ef9325cc5991ce750960f636ca2ffbb6e209bb3ba91412f21dd78c14ff154d1930f1f9a0000000000000000000000005050f69a9786f081509234f1a7f4684b5e5b76c9" + ); + let tx_0 = OpTransactionSigned::decode_2718(&mut &system[..]).unwrap(); + + let block: alloy_consensus::Block = Block { + body: BlockBody { transactions: vec![tx_0], ..Default::default() }, + ..Default::default() + }; + let mut l1_block_info = + reth_optimism_evm::extract_l1_info(&block.body).expect("should extract l1 info"); + + // https://basescan.org/tx/0xf9420cbaf66a2dda75a015488d37262cbfd4abd0aad7bb2be8a63e14b1fa7a94 + let tx = hex!( + "02f86c8221058034839a4ae283021528942f16386bb37709016023232523ff6d9daf444be380841249c58bc080a001b927eda2af9b00b52a57be0885e0303c39dd2831732e14051c2336470fd468a0681bf120baf562915841a48601c2b54a6742511e535cf8f71c95115af7ff63bd" + ); + let tx_1 = OpTransactionSigned::decode_2718(&mut &tx[..]).unwrap(); + + let receipt_meta = OpReceiptFieldsBuilder::new(1730216981, 21713817) + .l1_block_info(&*BASE_MAINNET, &tx_1, &mut l1_block_info) + .expect("should parse revm l1 info") + .build(); + + let L1BlockInfo { + l1_gas_price, + l1_gas_used, + l1_fee, + l1_fee_scalar, + l1_base_fee_scalar, + l1_blob_base_fee, + l1_blob_base_fee_scalar, + operator_fee_scalar, + operator_fee_constant, + da_footprint_gas_scalar, + } = receipt_meta.l1_block_info; + + assert_eq!(l1_gas_price, Some(14121491676), "incorrect l1 base fee (former gas price)"); + assert_eq!(l1_gas_used, Some(1600), "incorrect l1 gas used"); + assert_eq!(l1_fee, Some(191150293412), "incorrect l1 fee"); + assert!(l1_fee_scalar.is_none(), "incorrect l1 fee scalar"); + assert_eq!(l1_base_fee_scalar, Some(2269), "incorrect l1 base fee scalar"); + assert_eq!(l1_blob_base_fee, Some(1324954204), "incorrect l1 blob base fee"); + assert_eq!(l1_blob_base_fee_scalar, Some(1055762), "incorrect l1 blob base fee scalar"); + assert_eq!(operator_fee_scalar, None, "incorrect operator fee scalar"); + assert_eq!(operator_fee_constant, None, "incorrect operator fee constant"); + assert_eq!(da_footprint_gas_scalar, None, "incorrect da footprint gas scalar"); + } + + #[test] + fn da_footprint_gas_scalar_included_in_receipt_post_jovian() { + const DA_FOOTPRINT_GAS_SCALAR: u16 = 10; + + let tx = TxEip7702 { + chain_id: 1u64, + nonce: 0, + max_fee_per_gas: 0x28f000fff, + max_priority_fee_per_gas: 0x28f000fff, + gas_limit: 10, + to: Address::default(), + value: U256::from(3_u64), + input: Bytes::from(vec![1, 2]), + access_list: Default::default(), + authorization_list: Default::default(), + }; + + let signature = Signature::new(U256::default(), U256::default(), true); + + let tx = OpTransactionSigned::new_unhashed(OpTypedTransaction::Eip7702(tx), signature); + + let mut l1_block_info = op_revm::L1BlockInfo { + da_footprint_gas_scalar: Some(DA_FOOTPRINT_GAS_SCALAR), + ..Default::default() + }; + + let op_hardforks = OpChainHardforks::op_mainnet(); + + let receipt = OpReceiptFieldsBuilder::new(OP_MAINNET_JOVIAN_TIMESTAMP, u64::MAX) + .l1_block_info(&op_hardforks, &tx, &mut l1_block_info) + .expect("should parse revm l1 info") + .build(); + + assert_eq!(receipt.l1_block_info.da_footprint_gas_scalar, Some(DA_FOOTPRINT_GAS_SCALAR)); + } + + #[test] + fn blob_gas_used_included_in_receipt_post_jovian() { + const DA_FOOTPRINT_GAS_SCALAR: u16 = 100; + let tx = TxEip7702 { + chain_id: 1u64, + nonce: 0, + max_fee_per_gas: 0x28f000fff, + max_priority_fee_per_gas: 0x28f000fff, + gas_limit: 10, + to: Address::default(), + value: U256::from(3_u64), + access_list: Default::default(), + authorization_list: Default::default(), + input: Bytes::from(vec![0; 1_000_000]), + }; + + let signature = Signature::new(U256::default(), U256::default(), true); + + let tx = OpTransactionSigned::new_unhashed(OpTypedTransaction::Eip7702(tx), signature); + + let mut l1_block_info = op_revm::L1BlockInfo { + da_footprint_gas_scalar: Some(DA_FOOTPRINT_GAS_SCALAR), + ..Default::default() + }; + + let op_hardforks = OpChainHardforks::op_mainnet(); + + let op_receipt = OpReceiptBuilder::new( + &op_hardforks, + ConvertReceiptInput:: { + tx: Recovered::new_unchecked(&tx, Address::default()), + receipt: OpReceipt::Eip7702(Receipt { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 100, + logs: vec![], + }), + gas_used: 100, + next_log_index: 0, + meta: TransactionMeta { + timestamp: OP_MAINNET_JOVIAN_TIMESTAMP, + ..Default::default() + }, + }, + &mut l1_block_info, + ) + .unwrap(); + + let expected_blob_gas_used = estimate_tx_compressed_size(tx.encoded_2718().as_slice()) + .saturating_div(1_000_000) + .saturating_mul(DA_FOOTPRINT_GAS_SCALAR.into()); + + assert_eq!(op_receipt.core_receipt.blob_gas_used, Some(expected_blob_gas_used)); + } + + #[test] + fn blob_gas_used_not_included_in_receipt_post_isthmus() { + const DA_FOOTPRINT_GAS_SCALAR: u16 = 100; + let tx = TxEip7702 { + chain_id: 1u64, + nonce: 0, + max_fee_per_gas: 0x28f000fff, + max_priority_fee_per_gas: 0x28f000fff, + gas_limit: 10, + to: Address::default(), + value: U256::from(3_u64), + access_list: Default::default(), + authorization_list: Default::default(), + input: Bytes::from(vec![0; 1_000_000]), + }; + + let signature = Signature::new(U256::default(), U256::default(), true); + + let tx = OpTransactionSigned::new_unhashed(OpTypedTransaction::Eip7702(tx), signature); + + let mut l1_block_info = op_revm::L1BlockInfo { + da_footprint_gas_scalar: Some(DA_FOOTPRINT_GAS_SCALAR), + ..Default::default() + }; + + let op_hardforks = OpChainHardforks::op_mainnet(); + + let op_receipt = OpReceiptBuilder::new( + &op_hardforks, + ConvertReceiptInput:: { + tx: Recovered::new_unchecked(&tx, Address::default()), + receipt: OpReceipt::Eip7702(Receipt { + status: Eip658Value::Eip658(true), + cumulative_gas_used: 100, + logs: vec![], + }), + gas_used: 100, + next_log_index: 0, + meta: TransactionMeta { + timestamp: OP_MAINNET_ISTHMUS_TIMESTAMP, + ..Default::default() + }, + }, + &mut l1_block_info, + ) + .unwrap(); + + assert_eq!(op_receipt.core_receipt.blob_gas_used, None); + } +} diff --git a/rust/op-reth/crates/rpc/src/eth/transaction.rs b/rust/op-reth/crates/rpc/src/eth/transaction.rs new file mode 100644 index 00000000000..bfd17ce662d --- /dev/null +++ b/rust/op-reth/crates/rpc/src/eth/transaction.rs @@ -0,0 +1,302 @@ +//! Loads and formats OP transaction RPC response. + +use crate::{OpEthApi, OpEthApiError, SequencerClient}; +use alloy_primitives::{B256, Bytes}; +use alloy_rpc_types_eth::TransactionInfo; +use futures::StreamExt; +use op_alloy_consensus::{ + OpTransaction, + transaction::{OpDepositInfo, OpTransactionInfo}, +}; +use reth_chain_state::CanonStateSubscriptions; +use reth_optimism_primitives::DepositReceipt; +use reth_primitives_traits::{Recovered, SignedTransaction, SignerRecoverable, WithEncoded}; +use reth_rpc_eth_api::{ + EthApiTypes as _, FromEthApiError, FromEvmError, RpcConvert, RpcNodeCore, RpcReceipt, + TxInfoMapper, + helpers::{EthTransactions, LoadReceipt, LoadTransaction, SpawnBlocking, spec::SignersForRpc}, +}; +use reth_rpc_eth_types::{EthApiError, TransactionSource, block::convert_transaction_receipt}; +use reth_storage_api::{ProviderTx, ReceiptProvider, TransactionsProvider, errors::ProviderError}; +use reth_transaction_pool::{ + AddedTransactionOutcome, PoolPooledTx, PoolTransaction, TransactionOrigin, TransactionPool, +}; +use std::{ + fmt::{Debug, Formatter}, + future::Future, + time::Duration, +}; +use tokio_stream::wrappers::WatchStream; + +impl EthTransactions for OpEthApi +where + N: RpcNodeCore, + OpEthApiError: FromEvmError, + Rpc: RpcConvert, +{ + fn signers(&self) -> &SignersForRpc { + self.inner.eth_api.signers() + } + + fn send_raw_transaction_sync_timeout(&self) -> Duration { + self.inner.eth_api.send_raw_transaction_sync_timeout() + } + + async fn send_transaction( + &self, + tx: WithEncoded>>, + ) -> Result { + let (tx, recovered) = tx.split(); + + // broadcast raw transaction to subscribers if there is any. + self.eth_api().broadcast_raw_transaction(tx.clone()); + + let pool_transaction = ::Transaction::from_pooled(recovered); + + // On optimism, transactions are forwarded directly to the sequencer to be included in + // blocks that it builds. + if let Some(client) = self.raw_tx_forwarder().as_ref() { + tracing::debug!(target: "rpc::eth", hash = %pool_transaction.hash(), "forwarding raw transaction to sequencer"); + let hash = client.forward_raw_transaction(&tx).await.inspect_err(|err| { + tracing::debug!(target: "rpc::eth", %err, hash=% *pool_transaction.hash(), "failed to forward raw transaction"); + })?; + + // Retain tx in local tx pool after forwarding, for local RPC usage. + let _ = self.inner.eth_api.add_pool_transaction(pool_transaction).await.inspect_err(|err| { + tracing::warn!(target: "rpc::eth", %err, %hash, "successfully sent tx to sequencer, but failed to persist in local tx pool"); + }); + + return Ok(hash); + } + + // submit the transaction to the pool with a `Local` origin + let AddedTransactionOutcome { hash, .. } = self + .pool() + .add_transaction(TransactionOrigin::Local, pool_transaction) + .await + .map_err(Self::Error::from_eth_err)?; + + Ok(hash) + } + + /// Decodes and recovers the transaction and submits it to the pool. + /// + /// And awaits the receipt, checking both canonical blocks and flashblocks for faster + /// confirmation. + fn send_raw_transaction_sync( + &self, + tx: Bytes, + ) -> impl Future, Self::Error>> + Send { + let this = self.clone(); + let timeout_duration = self.send_raw_transaction_sync_timeout(); + async move { + let mut canonical_stream = this.provider().canonical_state_stream(); + let hash = EthTransactions::send_raw_transaction(&this, tx).await?; + let mut flashblock_stream = this.pending_block_rx().map(WatchStream::new); + + tokio::time::timeout(timeout_duration, async { + loop { + tokio::select! { + biased; + // check if the tx was preconfirmed in a new flashblock + flashblock = async { + if let Some(stream) = &mut flashblock_stream { + stream.next().await + } else { + futures::future::pending().await + } + } => { + if let Some(flashblock) = flashblock.flatten() { + // if flashblocks are supported, attempt to find id from the pending block + if let Some(receipt) = flashblock + .find_and_convert_transaction_receipt(hash, this.converter()) + { + return receipt; + } + } + } + // Listen for regular canonical block updates for inclusion + canonical_notification = canonical_stream.next() => { + if let Some(notification) = canonical_notification { + let chain = notification.committed(); + if let Some((block, tx, receipt, all_receipts)) = + chain.find_transaction_and_receipt_by_hash(hash) && + let Some(receipt) = convert_transaction_receipt( + block, + all_receipts, + tx, + receipt, + this.converter(), + ) + .transpose()? + { + return Ok(receipt); + } + } else { + // Canonical stream ended + break; + } + } + } + } + Err(Self::Error::from_eth_err(EthApiError::TransactionConfirmationTimeout { + hash, + duration: timeout_duration, + })) + }) + .await + .unwrap_or_else(|_elapsed| { + Err(Self::Error::from_eth_err(EthApiError::TransactionConfirmationTimeout { + hash, + duration: timeout_duration, + })) + }) + } + } + + /// Returns the transaction receipt for the given hash. + /// + /// With flashblocks, we should also lookup the pending block for the transaction + /// because this is considered confirmed/mined. + fn transaction_receipt( + &self, + hash: B256, + ) -> impl Future>, Self::Error>> + Send + { + let this = self.clone(); + async move { + // first attempt to fetch the mined transaction receipt data + let tx_receipt = this.load_transaction_and_receipt(hash).await?; + + if tx_receipt.is_none() { + // if flashblocks are supported, attempt to find id from the pending block + if let Ok(Some(pending_block)) = this.pending_flashblock().await && + let Some(Ok(receipt)) = pending_block + .find_and_convert_transaction_receipt(hash, this.converter()) + { + return Ok(Some(receipt)); + } + } + let Some((tx, meta, receipt)) = tx_receipt else { return Ok(None) }; + self.build_transaction_receipt(tx, meta, receipt).await.map(Some) + } + } +} + +impl LoadTransaction for OpEthApi +where + N: RpcNodeCore, + OpEthApiError: FromEvmError, + Rpc: RpcConvert, +{ + async fn transaction_by_hash( + &self, + hash: B256, + ) -> Result>>, Self::Error> { + // 1. Try to find the transaction on disk (historical blocks) + if let Some((tx, meta)) = self + .spawn_blocking_io(move |this| { + this.provider() + .transaction_by_hash_with_meta(hash) + .map_err(Self::Error::from_eth_err) + }) + .await? + { + let transaction = tx + .try_into_recovered_unchecked() + .map_err(|_| EthApiError::InvalidTransactionSignature)?; + + return Ok(Some(TransactionSource::Block { + transaction, + index: meta.index, + block_hash: meta.block_hash, + block_number: meta.block_number, + base_fee: meta.base_fee, + })); + } + + // 2. check flashblocks (sequencer preconfirmations) + if let Ok(Some(pending_block)) = self.pending_flashblock().await && + let Some(indexed_tx) = pending_block.block().find_indexed(hash) + { + let meta = indexed_tx.meta(); + return Ok(Some(TransactionSource::Block { + transaction: indexed_tx.recovered_tx().cloned(), + index: meta.index, + block_hash: meta.block_hash, + block_number: meta.block_number, + base_fee: meta.base_fee, + })); + } + + // 3. check local pool + if let Some(tx) = self.pool().get(&hash).map(|tx| tx.transaction.clone_into_consensus()) { + return Ok(Some(TransactionSource::Pool(tx))); + } + + Ok(None) + } +} + +impl OpEthApi +where + N: RpcNodeCore, + Rpc: RpcConvert, +{ + /// Returns the [`SequencerClient`] if one is set. + pub fn raw_tx_forwarder(&self) -> Option { + self.inner.sequencer_client.clone() + } +} + +/// Optimism implementation of [`TxInfoMapper`]. +/// +/// For deposits, receipt is fetched to extract `deposit_nonce` and `deposit_receipt_version`. +/// Otherwise, it works like regular Ethereum implementation, i.e. uses [`TransactionInfo`]. +pub struct OpTxInfoMapper { + provider: Provider, +} + +impl Clone for OpTxInfoMapper { + fn clone(&self) -> Self { + Self { provider: self.provider.clone() } + } +} + +impl Debug for OpTxInfoMapper { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("OpTxInfoMapper").finish() + } +} + +impl OpTxInfoMapper { + /// Creates [`OpTxInfoMapper`] that uses [`ReceiptProvider`] borrowed from given `eth_api`. + pub const fn new(provider: Provider) -> Self { + Self { provider } + } +} + +impl TxInfoMapper for OpTxInfoMapper +where + T: OpTransaction + SignedTransaction, + Provider: ReceiptProvider, +{ + type Out = OpTransactionInfo; + type Err = ProviderError; + + fn try_map(&self, tx: &T, tx_info: TransactionInfo) -> Result { + let deposit_meta = if tx.is_deposit() { + self.provider.receipt_by_hash(*tx.tx_hash())?.and_then(|receipt| { + receipt.as_deposit_receipt().map(|receipt| OpDepositInfo { + deposit_receipt_version: receipt.deposit_receipt_version, + deposit_nonce: receipt.deposit_nonce, + }) + }) + } else { + None + } + .unwrap_or_default(); + + Ok(OpTransactionInfo::new(tx_info, deposit_meta)) + } +} diff --git a/op-reth/crates/rpc/src/historical.rs b/rust/op-reth/crates/rpc/src/historical.rs similarity index 99% rename from op-reth/crates/rpc/src/historical.rs rename to rust/op-reth/crates/rpc/src/historical.rs index 6037da4fe71..5c9d26f96f3 100644 --- a/op-reth/crates/rpc/src/historical.rs +++ b/rust/op-reth/crates/rpc/src/historical.rs @@ -3,8 +3,9 @@ use crate::sequencer::Error; use alloy_eips::BlockId; use alloy_json_rpc::{RpcRecv, RpcSend}; -use alloy_primitives::{BlockNumber, B256}; +use alloy_primitives::{B256, BlockNumber}; use alloy_rpc_client::RpcClient; +use alloy_transport_http::reqwest; use jsonrpsee::BatchResponseBuilder; use jsonrpsee_core::{ middleware::{Batch, BatchEntry, Notification, RpcServiceT}, @@ -144,7 +145,7 @@ where Box::pin(async move { // Check if request should be forwarded to historical endpoint if let Some(response) = historical.maybe_forward_request(&req).await { - return response + return response; } // Handle the request with the inner service @@ -249,7 +250,7 @@ where /// the response if it was forwarded. async fn maybe_forward_request(&self, req: &Request<'_>) -> Option { if self.should_forward_request(req) { - return self.forward_to_historical(req).await + return self.forward_to_historical(req).await; } None } diff --git a/rust/op-reth/crates/rpc/src/lib.rs b/rust/op-reth/crates/rpc/src/lib.rs new file mode 100644 index 00000000000..d1f8e8dbdd0 --- /dev/null +++ b/rust/op-reth/crates/rpc/src/lib.rs @@ -0,0 +1,26 @@ +//! OP-Reth RPC support. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![cfg_attr(not(test), warn(unused_crate_dependencies))] +#![cfg_attr(docsrs, feature(doc_cfg))] + +pub mod engine; +pub mod error; +pub mod eth; +pub mod historical; +pub mod metrics; +pub mod miner; +pub mod sequencer; +pub mod witness; + +#[cfg(feature = "client")] +pub use engine::OpEngineApiClient; +pub use engine::{OP_ENGINE_CAPABILITIES, OpEngineApi, OpEngineApiServer}; +pub use error::{OpEthApiError, OpInvalidTransactionError, SequencerClientError}; +pub use eth::{OpEthApi, OpEthApiBuilder, OpReceiptBuilder}; +pub use metrics::SequencerMetrics; +pub use sequencer::SequencerClient; diff --git a/op-reth/crates/rpc/src/metrics.rs b/rust/op-reth/crates/rpc/src/metrics.rs similarity index 100% rename from op-reth/crates/rpc/src/metrics.rs rename to rust/op-reth/crates/rpc/src/metrics.rs diff --git a/op-reth/crates/rpc/src/miner.rs b/rust/op-reth/crates/rpc/src/miner.rs similarity index 96% rename from op-reth/crates/rpc/src/miner.rs rename to rust/op-reth/crates/rpc/src/miner.rs index f8780f37e82..bfb77fd344e 100644 --- a/op-reth/crates/rpc/src/miner.rs +++ b/rust/op-reth/crates/rpc/src/miner.rs @@ -1,9 +1,9 @@ //! Miner API extension for OP. use alloy_primitives::U64; -use jsonrpsee_core::{async_trait, RpcResult}; +use jsonrpsee_core::{RpcResult, async_trait}; pub use op_alloy_rpc_jsonrpsee::traits::MinerApiExtServer; -use reth_metrics::{metrics::Gauge, Metrics}; +use reth_metrics::{Metrics, metrics::Gauge}; use reth_optimism_payload_builder::config::{OpDAConfig, OpGasLimitConfig}; use tracing::debug; diff --git a/rust/op-reth/crates/rpc/src/sequencer.rs b/rust/op-reth/crates/rpc/src/sequencer.rs new file mode 100644 index 00000000000..11bc7477322 --- /dev/null +++ b/rust/op-reth/crates/rpc/src/sequencer.rs @@ -0,0 +1,282 @@ +//! Helpers for optimism specific RPC implementations. + +use crate::{SequencerClientError, SequencerMetrics}; +use alloy_json_rpc::{RpcRecv, RpcSend}; +use alloy_primitives::{B256, hex}; +use alloy_rpc_client::{BuiltInConnectionString, ClientBuilder, RpcClient as Client}; +use alloy_rpc_types_eth::erc4337::TransactionConditional; +use alloy_transport_http::{Http, reqwest}; +use std::{str::FromStr, sync::Arc, time::Instant}; +use thiserror::Error; +use tracing::warn; + +/// Sequencer client error +#[derive(Error, Debug)] +pub enum Error { + /// Invalid scheme + #[error("Invalid scheme of sequencer url: {0}")] + InvalidScheme(String), + /// Invalid header or value provided. + #[error("Invalid header: {0}")] + InvalidHeader(String), + /// Invalid url + #[error("Invalid sequencer url: {0}")] + InvalidUrl(String), + /// Establishing a connection to the sequencer endpoint resulted in an error. + #[error("Failed to connect to sequencer: {0}")] + TransportError( + #[from] + #[source] + alloy_transport::TransportError, + ), + /// Reqwest failed to init client + #[error("Failed to init reqwest client for sequencer: {0}")] + ReqwestError( + #[from] + #[source] + reqwest::Error, + ), +} + +/// A client to interact with a Sequencer +#[derive(Debug, Clone)] +pub struct SequencerClient { + inner: Arc, +} + +impl SequencerClientInner { + /// Creates a new instance with the given endpoint and client. + pub(crate) fn new(sequencer_endpoint: String, client: Client) -> Self { + let metrics = SequencerMetrics::default(); + Self { sequencer_endpoint, client, metrics } + } +} + +impl SequencerClient { + /// Creates a new [`SequencerClient`] for the given URL. + /// + /// If the URL is a websocket endpoint we connect a websocket instance. + pub async fn new(sequencer_endpoint: impl Into) -> Result { + Self::new_with_headers(sequencer_endpoint, Default::default()).await + } + + /// Creates a new `SequencerClient` for the given URL with the given headers + /// + /// This expects headers in the form: `header=value` + pub async fn new_with_headers( + sequencer_endpoint: impl Into, + headers: Vec, + ) -> Result { + let sequencer_endpoint = sequencer_endpoint.into(); + let endpoint = BuiltInConnectionString::from_str(&sequencer_endpoint)?; + if let BuiltInConnectionString::Http(url) = endpoint { + let mut builder = reqwest::Client::builder() + // we force use tls to prevent native issues + .use_rustls_tls(); + + if !headers.is_empty() { + let mut header_map = reqwest::header::HeaderMap::new(); + for header in headers { + if let Some((key, value)) = header.split_once('=') { + header_map.insert( + key.trim() + .parse::() + .map_err(|err| Error::InvalidHeader(err.to_string()))?, + value + .trim() + .parse::() + .map_err(|err| Error::InvalidHeader(err.to_string()))?, + ); + } + } + builder = builder.default_headers(header_map); + } + + let client = builder.build()?; + Self::with_http_client(url, client) + } else { + let client = ClientBuilder::default().connect_with(endpoint).await?; + let inner = SequencerClientInner::new(sequencer_endpoint, client); + Ok(Self { inner: Arc::new(inner) }) + } + } + + /// Creates a new [`SequencerClient`] with http transport with the given http client. + pub fn with_http_client( + sequencer_endpoint: impl Into, + client: reqwest::Client, + ) -> Result { + let sequencer_endpoint: String = sequencer_endpoint.into(); + let url = sequencer_endpoint + .parse() + .map_err(|_| Error::InvalidUrl(sequencer_endpoint.clone()))?; + + let http_client = Http::with_client(client, url); + let is_local = http_client.guess_local(); + let client = ClientBuilder::default().transport(http_client, is_local); + + let inner = SequencerClientInner::new(sequencer_endpoint, client); + Ok(Self { inner: Arc::new(inner) }) + } + + /// Returns the network of the client + pub fn endpoint(&self) -> &str { + &self.inner.sequencer_endpoint + } + + /// Returns the client + pub fn client(&self) -> &Client { + &self.inner.client + } + + /// Returns a reference to the [`SequencerMetrics`] for tracking client metrics. + fn metrics(&self) -> &SequencerMetrics { + &self.inner.metrics + } + + /// Sends a [`alloy_rpc_client::RpcCall`] request to the sequencer endpoint. + pub async fn request( + &self, + method: &str, + params: Params, + ) -> Result { + let resp = + self.client().request::(method.to_string(), params).await.inspect_err( + |err| { + warn!( + target: "rpc::sequencer", + %err, + "HTTP request to sequencer failed", + ); + }, + )?; + Ok(resp) + } + + /// Forwards a transaction to the sequencer endpoint. + pub async fn forward_raw_transaction(&self, tx: &[u8]) -> Result { + let start = Instant::now(); + let rlp_hex = hex::encode_prefixed(tx); + let tx_hash = + self.request("eth_sendRawTransaction", (rlp_hex,)).await.inspect_err(|err| { + warn!( + target: "rpc::eth", + %err, + "Failed to forward transaction to sequencer", + ); + })?; + self.metrics().record_forward_latency(start.elapsed()); + Ok(tx_hash) + } + + /// Forwards a transaction conditional to the sequencer endpoint. + pub async fn forward_raw_transaction_conditional( + &self, + tx: &[u8], + condition: TransactionConditional, + ) -> Result { + let start = Instant::now(); + let rlp_hex = hex::encode_prefixed(tx); + let tx_hash = self + .request("eth_sendRawTransactionConditional", (rlp_hex, condition)) + .await + .inspect_err(|err| { + warn!( + target: "rpc::eth", + %err, + "Failed to forward transaction conditional for sequencer", + ); + })?; + self.metrics().record_forward_latency(start.elapsed()); + Ok(tx_hash) + } +} + +#[derive(Debug)] +struct SequencerClientInner { + /// The endpoint of the sequencer + sequencer_endpoint: String, + /// The client + client: Client, + // Metrics for tracking sequencer forwarding + metrics: SequencerMetrics, +} + +#[cfg(test)] +mod tests { + use super::*; + use alloy_primitives::U64; + + #[tokio::test] + async fn test_http_body_str() { + let client = SequencerClient::new("http://localhost:8545").await.unwrap(); + + let request = client + .client() + .make_request("eth_getBlockByNumber", (U64::from(10),)) + .serialize() + .unwrap() + .take_request(); + let body = request.get(); + + assert_eq!( + body, + r#"{"method":"eth_getBlockByNumber","params":["0xa"],"id":0,"jsonrpc":"2.0"}"# + ); + + let condition = TransactionConditional::default(); + + let request = client + .client() + .make_request( + "eth_sendRawTransactionConditional", + (format!("0x{}", hex::encode("abcd")), condition), + ) + .serialize() + .unwrap() + .take_request(); + let body = request.get(); + + assert_eq!( + body, + r#"{"method":"eth_sendRawTransactionConditional","params":["0x61626364",{"knownAccounts":{}}],"id":1,"jsonrpc":"2.0"}"# + ); + } + + #[tokio::test] + #[ignore = "Start if WS is reachable at ws://localhost:8546"] + async fn test_ws_body_str() { + let client = SequencerClient::new("ws://localhost:8546").await.unwrap(); + + let request = client + .client() + .make_request("eth_getBlockByNumber", (U64::from(10),)) + .serialize() + .unwrap() + .take_request(); + let body = request.get(); + + assert_eq!( + body, + r#"{"method":"eth_getBlockByNumber","params":["0xa"],"id":0,"jsonrpc":"2.0"}"# + ); + + let condition = TransactionConditional::default(); + + let request = client + .client() + .make_request( + "eth_sendRawTransactionConditional", + (format!("0x{}", hex::encode("abcd")), condition), + ) + .serialize() + .unwrap() + .take_request(); + let body = request.get(); + + assert_eq!( + body, + r#"{"method":"eth_sendRawTransactionConditional","params":["0x61626364",{"knownAccounts":{}}],"id":1,"jsonrpc":"2.0"}"# + ); + } +} diff --git a/op-reth/crates/rpc/src/witness.rs b/rust/op-reth/crates/rpc/src/witness.rs similarity index 96% rename from op-reth/crates/rpc/src/witness.rs rename to rust/op-reth/crates/rpc/src/witness.rs index 1858b4fd2f1..f1095b7d962 100644 --- a/op-reth/crates/rpc/src/witness.rs +++ b/rust/op-reth/crates/rpc/src/witness.rs @@ -2,7 +2,7 @@ use alloy_primitives::B256; use alloy_rpc_types_debug::ExecutionWitness; -use jsonrpsee_core::{async_trait, RpcResult}; +use jsonrpsee_core::{RpcResult, async_trait}; use reth_chainspec::ChainSpecProvider; use reth_evm::ConfigureEvm; use reth_node_api::{BuildNextEnv, NodePrimitives}; @@ -11,15 +11,15 @@ use reth_optimism_payload_builder::{OpAttributes, OpPayloadBuilder, OpPayloadPri use reth_optimism_txpool::OpPooledTx; use reth_primitives_traits::{SealedHeader, TxTy}; pub use reth_rpc_api::DebugExecutionWitnessApiServer; -use reth_rpc_server_types::{result::internal_rpc_err, ToRpcResult}; +use reth_rpc_server_types::{ToRpcResult, result::internal_rpc_err}; use reth_storage_api::{ - errors::{ProviderError, ProviderResult}, BlockReaderIdExt, NodePrimitivesProvider, StateProviderFactory, + errors::{ProviderError, ProviderResult}, }; use reth_tasks::TaskSpawner; use reth_transaction_pool::TransactionPool; use std::{fmt::Debug, sync::Arc}; -use tokio::sync::{oneshot, Semaphore}; +use tokio::sync::{Semaphore, oneshot}; /// An extension to the `debug_` namespace of the RPC API. pub struct OpDebugWitnessApi { diff --git a/rust/op-reth/crates/storage/Cargo.toml b/rust/op-reth/crates/storage/Cargo.toml new file mode 100644 index 00000000000..94529f8e249 --- /dev/null +++ b/rust/op-reth/crates/storage/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "reth-optimism-storage" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" + +[lints] +workspace = true + +[dependencies] +# reth +reth-optimism-primitives = { workspace = true, features = ["serde", "reth-codec"] } +reth-storage-api = { workspace = true, features = ["db-api"] } + +# ethereum +alloy-consensus.workspace = true + +[dev-dependencies] +reth-codecs = { workspace = true, features = ["test-utils"] } +reth-prune-types.workspace = true +reth-stages-types.workspace = true + +[features] +default = ["std"] +std = [ + "reth-storage-api/std", + "reth-prune-types/std", + "reth-stages-types/std", + "alloy-consensus/std", + "reth-optimism-primitives/std", +] diff --git a/op-reth/crates/storage/src/chain.rs b/rust/op-reth/crates/storage/src/chain.rs similarity index 100% rename from op-reth/crates/storage/src/chain.rs rename to rust/op-reth/crates/storage/src/chain.rs diff --git a/op-reth/crates/storage/src/lib.rs b/rust/op-reth/crates/storage/src/lib.rs similarity index 100% rename from op-reth/crates/storage/src/lib.rs rename to rust/op-reth/crates/storage/src/lib.rs diff --git a/rust/op-reth/crates/txpool/Cargo.toml b/rust/op-reth/crates/txpool/Cargo.toml new file mode 100644 index 00000000000..9636d56efb2 --- /dev/null +++ b/rust/op-reth/crates/txpool/Cargo.toml @@ -0,0 +1,59 @@ +[package] +name = "reth-optimism-txpool" +version = "1.10.2" +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage = "https://paradigmxyz.github.io/reth" +repository = "https://github.com/paradigmxyz/reth" +description = "OP-Reth Transaction Pool" + +[lints] +workspace = true + +[dependencies] +# ethereum +alloy-consensus.workspace = true +alloy-eips.workspace = true +alloy-primitives.workspace = true +alloy-rpc-types-eth.workspace = true +alloy-rpc-client = { workspace = true, features = ["reqwest", "default"] } +alloy-json-rpc.workspace = true +alloy-serde.workspace = true + +# reth +reth-chainspec.workspace = true +reth-evm.workspace = true +reth-primitives-traits.workspace = true +reth-chain-state.workspace = true +reth-storage-api.workspace = true +reth-transaction-pool.workspace = true + +# revm +op-revm.workspace = true + +# optimism +op-alloy-consensus.workspace = true +op-alloy-flz.workspace = true +op-alloy-rpc-types.workspace = true +reth-optimism-evm.workspace = true +reth-optimism-forks.workspace = true +reth-optimism-primitives.workspace = true + +# metrics +reth-metrics.workspace = true +metrics.workspace = true + +# misc +c-kzg.workspace = true +derive_more.workspace = true +futures-util.workspace = true +parking_lot.workspace = true +serde.workspace = true +tracing.workspace = true +thiserror.workspace = true +tokio = { workspace = true, features = ["time"] } + +[dev-dependencies] +reth-optimism-chainspec.workspace = true +reth-provider = { workspace = true, features = ["test-utils"] } diff --git a/op-reth/crates/txpool/src/conditional.rs b/rust/op-reth/crates/txpool/src/conditional.rs similarity index 100% rename from op-reth/crates/txpool/src/conditional.rs rename to rust/op-reth/crates/txpool/src/conditional.rs diff --git a/op-reth/crates/txpool/src/error.rs b/rust/op-reth/crates/txpool/src/error.rs similarity index 100% rename from op-reth/crates/txpool/src/error.rs rename to rust/op-reth/crates/txpool/src/error.rs diff --git a/op-reth/crates/txpool/src/estimated_da_size.rs b/rust/op-reth/crates/txpool/src/estimated_da_size.rs similarity index 100% rename from op-reth/crates/txpool/src/estimated_da_size.rs rename to rust/op-reth/crates/txpool/src/estimated_da_size.rs diff --git a/op-reth/crates/txpool/src/interop.rs b/rust/op-reth/crates/txpool/src/interop.rs similarity index 100% rename from op-reth/crates/txpool/src/interop.rs rename to rust/op-reth/crates/txpool/src/interop.rs diff --git a/op-reth/crates/txpool/src/lib.rs b/rust/op-reth/crates/txpool/src/lib.rs similarity index 100% rename from op-reth/crates/txpool/src/lib.rs rename to rust/op-reth/crates/txpool/src/lib.rs diff --git a/op-reth/crates/txpool/src/maintain.rs b/rust/op-reth/crates/txpool/src/maintain.rs similarity index 95% rename from op-reth/crates/txpool/src/maintain.rs rename to rust/op-reth/crates/txpool/src/maintain.rs index c071bf708e4..8ac5bf4842d 100644 --- a/op-reth/crates/txpool/src/maintain.rs +++ b/rust/op-reth/crates/txpool/src/maintain.rs @@ -9,16 +9,16 @@ const MAX_SUPERVISOR_QUERIES: usize = 10; use crate::{ conditional::MaybeConditionalTransaction, - interop::{is_stale_interop, is_valid_interop, MaybeInteropTransaction}, + interop::{MaybeInteropTransaction, is_stale_interop, is_valid_interop}, supervisor::SupervisorClient, }; -use alloy_consensus::{conditional::BlockConditionalAttributes, BlockHeader}; -use futures_util::{future::BoxFuture, FutureExt, Stream, StreamExt}; +use alloy_consensus::{BlockHeader, conditional::BlockConditionalAttributes}; +use futures_util::{FutureExt, Stream, StreamExt, future::BoxFuture}; use metrics::{Gauge, Histogram}; use reth_chain_state::CanonStateNotification; -use reth_metrics::{metrics::Counter, Metrics}; +use reth_metrics::{Metrics, metrics::Counter}; use reth_primitives_traits::NodePrimitives; -use reth_transaction_pool::{error::PoolTransactionError, PoolTransaction, TransactionPool}; +use reth_transaction_pool::{PoolTransaction, TransactionPool, error::PoolTransactionError}; use std::time::Instant; use tracing::warn; diff --git a/rust/op-reth/crates/txpool/src/supervisor/access_list.rs b/rust/op-reth/crates/txpool/src/supervisor/access_list.rs new file mode 100644 index 00000000000..114bb362af2 --- /dev/null +++ b/rust/op-reth/crates/txpool/src/supervisor/access_list.rs @@ -0,0 +1,41 @@ +// Source: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +// Copyright © 2023 kona contributors Copyright © 2024 Optimism +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files (the “Software”), to deal in the Software without restriction, +// including without limitation the rights to use, copy, modify, merge, publish, distribute, +// sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all copies or +// substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT +// NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +use crate::supervisor::CROSS_L2_INBOX_ADDRESS; +use alloy_eips::eip2930::AccessListItem; +use alloy_primitives::B256; + +/// Parses [`AccessListItem`]s to inbox entries. +/// +/// Return flattened iterator with all inbox entries. +pub fn parse_access_list_items_to_inbox_entries<'a>( + access_list_items: impl Iterator, +) -> impl Iterator { + access_list_items.filter_map(parse_access_list_item_to_inbox_entries).flatten() +} + +/// Parse [`AccessListItem`] to inbox entries, if any. +/// Max 3 inbox entries can exist per [`AccessListItem`] that points to [`CROSS_L2_INBOX_ADDRESS`]. +/// +/// Returns `Vec::new()` if [`AccessListItem`] address doesn't point to [`CROSS_L2_INBOX_ADDRESS`]. +// Access-list spec: +fn parse_access_list_item_to_inbox_entries( + access_list_item: &AccessListItem, +) -> Option> { + (access_list_item.address == CROSS_L2_INBOX_ADDRESS) + .then(|| access_list_item.storage_keys.iter()) +} diff --git a/rust/op-reth/crates/txpool/src/supervisor/client.rs b/rust/op-reth/crates/txpool/src/supervisor/client.rs new file mode 100644 index 00000000000..e5cd8f42dcf --- /dev/null +++ b/rust/op-reth/crates/txpool/src/supervisor/client.rs @@ -0,0 +1,282 @@ +//! This is our custom implementation of validator struct + +use crate::{ + InvalidCrossTx, + supervisor::{ + ExecutingDescriptor, InteropTxValidatorError, metrics::SupervisorMetrics, + parse_access_list_items_to_inbox_entries, + }, +}; +use alloy_consensus::Transaction; +use alloy_eips::eip2930::AccessList; +use alloy_primitives::{B256, TxHash}; +use alloy_rpc_client::ReqwestClient; +use futures_util::{ + Stream, + future::BoxFuture, + stream::{self, StreamExt}, +}; +use op_alloy_consensus::interop::SafetyLevel; +use reth_transaction_pool::PoolTransaction; +use std::{ + borrow::Cow, + future::IntoFuture, + sync::Arc, + time::{Duration, Instant}, +}; +use tracing::trace; + +/// Supervisor hosted by op-labs +// TODO: This should be changed to actual supervisor url +pub const DEFAULT_SUPERVISOR_URL: &str = "http://localhost:1337/"; + +/// The default request timeout to use +pub const DEFAULT_REQUEST_TIMEOUT: Duration = Duration::from_millis(100); + +/// Implementation of the supervisor trait for the interop. +#[derive(Debug, Clone)] +pub struct SupervisorClient { + /// Stores type's data. + inner: Arc, +} + +impl SupervisorClient { + /// Returns a new [`SupervisorClientBuilder`]. + pub fn builder(supervisor_endpoint: impl Into) -> SupervisorClientBuilder { + SupervisorClientBuilder::new(supervisor_endpoint) + } + + /// Returns configured timeout. See [`SupervisorClientInner`]. + pub fn timeout(&self) -> Duration { + self.inner.timeout + } + + /// Returns configured minimum safety level. See [`SupervisorClient`]. + pub fn safety(&self) -> SafetyLevel { + self.inner.safety + } + + /// Executes a `supervisor_checkAccessList` with the configured safety level. + pub fn check_access_list<'a>( + &self, + inbox_entries: &'a [B256], + executing_descriptor: ExecutingDescriptor, + ) -> CheckAccessListRequest<'a> { + CheckAccessListRequest { + client: self.inner.client.clone(), + inbox_entries: Cow::Borrowed(inbox_entries), + executing_descriptor, + timeout: self.inner.timeout, + safety: self.inner.safety, + metrics: self.inner.metrics.clone(), + } + } + + /// Extracts commitment from access list entries, pointing to 0x420..022 and validates them + /// against supervisor. + /// + /// If commitment present pre-interop tx rejected. + /// + /// Returns: + /// None - if tx is not cross chain, + /// Some(Ok(()) - if tx is valid cross chain, + /// Some(Err(e)) - if tx is not valid or interop is not active + pub async fn is_valid_cross_tx( + &self, + access_list: Option<&AccessList>, + hash: &TxHash, + timestamp: u64, + timeout: Option, + is_interop_active: bool, + ) -> Option> { + // We don't need to check for deposit transaction in here, because they won't come from + // txpool + let access_list = access_list?; + let inbox_entries = parse_access_list_items_to_inbox_entries(access_list.iter()) + .copied() + .collect::>(); + if inbox_entries.is_empty() { + return None; + } + + // Interop check + if !is_interop_active { + // No cross chain tx allowed before interop + return Some(Err(InvalidCrossTx::CrossChainTxPreInterop)); + } + + if let Err(err) = self + .check_access_list( + inbox_entries.as_slice(), + ExecutingDescriptor::new(timestamp, timeout), + ) + .await + { + self.inner.metrics.increment_metrics_for_error(&err); + trace!(target: "txpool", hash=%hash, err=%err, "Cross chain transaction invalid"); + return Some(Err(InvalidCrossTx::ValidationError(err))); + } + Some(Ok(())) + } + + /// Creates a stream that revalidates interop transactions against the supervisor. + /// Returns + /// An implementation of `Stream` that is `Send`-able and tied to the lifetime `'a` of `self`. + /// Each item yielded by the stream is a tuple `(TItem, Option>)`. + /// - The first element is the original `TItem` that was revalidated. + /// - The second element is the `Option>` describes the outcome + /// - `None`: Transaction was not identified as a cross-chain candidate by initial checks. + /// - `Some(Ok(()))`: Supervisor confirmed the transaction is valid. + /// - `Some(Err(InvalidCrossTx))`: Supervisor indicated the transaction is invalid. + pub fn revalidate_interop_txs_stream<'a, TItem, InputIter>( + &'a self, + txs_to_revalidate: InputIter, + current_timestamp: u64, + revalidation_window: u64, + max_concurrent_queries: usize, + ) -> impl Stream>)> + Send + 'a + where + InputIter: IntoIterator + Send + 'a, + InputIter::IntoIter: Send + 'a, + TItem: PoolTransaction + Transaction + Send, + { + stream::iter(txs_to_revalidate.into_iter().map(move |tx_item| { + let client_for_async_task = self.clone(); + + async move { + let validation_result = client_for_async_task + .is_valid_cross_tx( + tx_item.access_list(), + tx_item.hash(), + current_timestamp, + Some(revalidation_window), + true, + ) + .await; + + // return the original transaction paired with its validation result. + (tx_item, validation_result) + } + })) + .buffered(max_concurrent_queries) + } +} + +/// Holds supervisor data. Inner type of [`SupervisorClient`]. +#[derive(Debug, Clone)] +pub struct SupervisorClientInner { + client: ReqwestClient, + /// The default + safety: SafetyLevel, + /// The default request timeout + timeout: Duration, + /// Metrics for tracking supervisor operations + metrics: SupervisorMetrics, +} + +/// Builds [`SupervisorClient`]. +#[derive(Debug)] +pub struct SupervisorClientBuilder { + /// Supervisor server's socket. + endpoint: String, + /// Timeout for requests. + /// + /// NOTE: this timeout is only effective if it's shorter than the timeout configured for the + /// underlying [`ReqwestClient`]. + timeout: Duration, + /// Minimum [`SafetyLevel`] of cross-chain transactions accepted by this client. + safety: SafetyLevel, +} + +impl SupervisorClientBuilder { + /// Creates a new builder. + pub fn new(supervisor_endpoint: impl Into) -> Self { + Self { + endpoint: supervisor_endpoint.into(), + timeout: DEFAULT_REQUEST_TIMEOUT, + safety: SafetyLevel::CrossUnsafe, + } + } + + /// Configures a custom timeout + pub const fn timeout(mut self, timeout: Duration) -> Self { + self.timeout = timeout; + self + } + + /// Sets minimum safety level to accept for cross chain transactions. + pub const fn minimum_safety(mut self, min_safety: SafetyLevel) -> Self { + self.safety = min_safety; + self + } + + /// Creates a new supervisor validator. + pub async fn build(self) -> SupervisorClient { + let Self { endpoint, timeout, safety } = self; + + let client = ReqwestClient::builder() + .connect(endpoint.as_str()) + .await + .expect("building supervisor client"); + + SupervisorClient { + inner: Arc::new(SupervisorClientInner { + client, + safety, + timeout, + metrics: SupervisorMetrics::default(), + }), + } + } +} + +/// A Request future that issues a `supervisor_checkAccessList` request. +#[derive(Debug, Clone)] +pub struct CheckAccessListRequest<'a> { + client: ReqwestClient, + inbox_entries: Cow<'a, [B256]>, + executing_descriptor: ExecutingDescriptor, + timeout: Duration, + safety: SafetyLevel, + metrics: SupervisorMetrics, +} + +impl<'a> CheckAccessListRequest<'a> { + /// Configures the timeout to use for the request if any. + pub const fn with_timeout(mut self, timeout: Duration) -> Self { + self.timeout = timeout; + self + } + + /// Configures the [`SafetyLevel`] for this request + pub const fn with_safety(mut self, safety: SafetyLevel) -> Self { + self.safety = safety; + self + } +} + +impl<'a> IntoFuture for CheckAccessListRequest<'a> { + type Output = Result<(), InteropTxValidatorError>; + type IntoFuture = BoxFuture<'a, Self::Output>; + + fn into_future(self) -> Self::IntoFuture { + let Self { client, inbox_entries, executing_descriptor, timeout, safety, metrics } = self; + Box::pin(async move { + let start = Instant::now(); + + let result = tokio::time::timeout( + timeout, + client.request( + "supervisor_checkAccessList", + (inbox_entries, safety, executing_descriptor), + ), + ) + .await; + metrics.record_supervisor_query(start.elapsed()); + + result + .map_err(|_| InteropTxValidatorError::Timeout(timeout.as_secs()))? + .map_err(InteropTxValidatorError::from_json_rpc) + }) + } +} diff --git a/op-reth/crates/txpool/src/supervisor/errors.rs b/rust/op-reth/crates/txpool/src/supervisor/errors.rs similarity index 100% rename from op-reth/crates/txpool/src/supervisor/errors.rs rename to rust/op-reth/crates/txpool/src/supervisor/errors.rs diff --git a/rust/op-reth/crates/txpool/src/supervisor/message.rs b/rust/op-reth/crates/txpool/src/supervisor/message.rs new file mode 100644 index 00000000000..4a7461b49c8 --- /dev/null +++ b/rust/op-reth/crates/txpool/src/supervisor/message.rs @@ -0,0 +1,37 @@ +//! Interop message primitives. +// Source: https://github.com/ethereum-optimism/optimism/tree/develop/rust/kona +// Copyright © 2023 kona contributors Copyright © 2024 Optimism +// +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and +// associated documentation files (the “Software”), to deal in the Software without restriction, +// including without limitation the rights to use, copy, modify, merge, publish, distribute, +// sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all copies or +// substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT +// NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +/// An [`ExecutingDescriptor`] is a part of the payload to `supervisor_checkAccessList` +/// Spec: +#[derive(Default, Debug, PartialEq, Eq, Clone, serde::Serialize, serde::Deserialize)] +pub struct ExecutingDescriptor { + /// The timestamp used to enforce timestamp [invariant](https://github.com/ethereum-optimism/specs/blob/main/specs/interop/derivation.md#invariants) + #[serde(with = "alloy_serde::quantity")] + timestamp: u64, + /// The timeout that requests verification to still hold at `timestamp+timeout` + /// (message expiry may drop previously valid messages). + #[serde(skip_serializing_if = "Option::is_none", with = "alloy_serde::quantity::opt")] + timeout: Option, +} + +impl ExecutingDescriptor { + /// Create a new [`ExecutingDescriptor`] from the timestamp and timeout + pub const fn new(timestamp: u64, timeout: Option) -> Self { + Self { timestamp, timeout } + } +} diff --git a/rust/op-reth/crates/txpool/src/supervisor/metrics.rs b/rust/op-reth/crates/txpool/src/supervisor/metrics.rs new file mode 100644 index 00000000000..626733abc61 --- /dev/null +++ b/rust/op-reth/crates/txpool/src/supervisor/metrics.rs @@ -0,0 +1,72 @@ +//! Optimism supervisor metrics + +use crate::supervisor::InteropTxValidatorError; +use op_alloy_rpc_types::SuperchainDAError; +use reth_metrics::{ + Metrics, + metrics::{Counter, Histogram}, +}; +use std::time::Duration; + +/// Optimism supervisor metrics +#[derive(Metrics, Clone)] +#[metrics(scope = "optimism_transaction_pool.supervisor")] +pub struct SupervisorMetrics { + /// How long it takes to query the supervisor in the Optimism transaction pool + pub(crate) supervisor_query_latency: Histogram, + + /// Counter for the number of times data was skipped + pub(crate) skipped_data_count: Counter, + /// Counter for the number of times an unknown chain was encountered + pub(crate) unknown_chain_count: Counter, + /// Counter for the number of times conflicting data was encountered + pub(crate) conflicting_data_count: Counter, + /// Counter for the number of times ineffective data was encountered + pub(crate) ineffective_data_count: Counter, + /// Counter for the number of times data was out of order + pub(crate) out_of_order_count: Counter, + /// Counter for the number of times data was awaiting replacement + pub(crate) awaiting_replacement_count: Counter, + /// Counter for the number of times data was out of scope + pub(crate) out_of_scope_count: Counter, + /// Counter for the number of times there was no parent for the first block + pub(crate) no_parent_for_first_block_count: Counter, + /// Counter for the number of times future data was encountered + pub(crate) future_data_count: Counter, + /// Counter for the number of times data was missed + pub(crate) missed_data_count: Counter, + /// Counter for the number of times data corruption was encountered + pub(crate) data_corruption_count: Counter, +} + +impl SupervisorMetrics { + /// Records the duration of supervisor queries + #[inline] + pub fn record_supervisor_query(&self, duration: Duration) { + self.supervisor_query_latency.record(duration.as_secs_f64()); + } + + /// Increments the metrics for the given error + pub fn increment_metrics_for_error(&self, error: &InteropTxValidatorError) { + if let InteropTxValidatorError::InvalidEntry(inner) = error { + match inner { + SuperchainDAError::SkippedData => self.skipped_data_count.increment(1), + SuperchainDAError::UnknownChain => self.unknown_chain_count.increment(1), + SuperchainDAError::ConflictingData => self.conflicting_data_count.increment(1), + SuperchainDAError::IneffectiveData => self.ineffective_data_count.increment(1), + SuperchainDAError::OutOfOrder => self.out_of_order_count.increment(1), + SuperchainDAError::AwaitingReplacement => { + self.awaiting_replacement_count.increment(1) + } + SuperchainDAError::OutOfScope => self.out_of_scope_count.increment(1), + SuperchainDAError::NoParentForFirstBlock => { + self.no_parent_for_first_block_count.increment(1) + } + SuperchainDAError::FutureData => self.future_data_count.increment(1), + SuperchainDAError::MissedData => self.missed_data_count.increment(1), + SuperchainDAError::DataCorruption => self.data_corruption_count.increment(1), + _ => {} + } + } + } +} diff --git a/rust/op-reth/crates/txpool/src/supervisor/mod.rs b/rust/op-reth/crates/txpool/src/supervisor/mod.rs new file mode 100644 index 00000000000..289b3cb7277 --- /dev/null +++ b/rust/op-reth/crates/txpool/src/supervisor/mod.rs @@ -0,0 +1,12 @@ +//! Supervisor support for interop +mod access_list; +pub use access_list::parse_access_list_items_to_inbox_entries; +pub use op_alloy_consensus::interop::*; + +pub mod client; +pub use client::{DEFAULT_SUPERVISOR_URL, SupervisorClient, SupervisorClientBuilder}; +mod errors; +pub use errors::InteropTxValidatorError; +mod message; +pub use message::ExecutingDescriptor; +pub mod metrics; diff --git a/rust/op-reth/crates/txpool/src/transaction.rs b/rust/op-reth/crates/txpool/src/transaction.rs new file mode 100644 index 00000000000..1ed83561047 --- /dev/null +++ b/rust/op-reth/crates/txpool/src/transaction.rs @@ -0,0 +1,362 @@ +use crate::{ + conditional::MaybeConditionalTransaction, estimated_da_size::DataAvailabilitySized, + interop::MaybeInteropTransaction, +}; +use alloy_consensus::{BlobTransactionValidationError, Typed2718, transaction::Recovered}; +use alloy_eips::{ + eip2718::{Encodable2718, WithEncoded}, + eip2930::AccessList, + eip7594::BlobTransactionSidecarVariant, + eip7702::SignedAuthorization, +}; +use alloy_primitives::{Address, B256, Bytes, TxHash, TxKind, U256}; +use alloy_rpc_types_eth::erc4337::TransactionConditional; +use c_kzg::KzgSettings; +use core::fmt::Debug; +use reth_optimism_primitives::OpTransactionSigned; +use reth_primitives_traits::{InMemorySize, SignedTransaction}; +use reth_transaction_pool::{ + EthBlobTransactionSidecar, EthPoolTransaction, EthPooledTransaction, PoolTransaction, +}; +use std::{ + borrow::Cow, + sync::{ + Arc, OnceLock, + atomic::{AtomicU64, Ordering}, + }, +}; + +/// Marker for no-interop transactions +pub(crate) const NO_INTEROP_TX: u64 = 0; + +/// Pool transaction for OP. +/// +/// This type wraps the actual transaction and caches values that are frequently used by the pool. +/// For payload building this lazily tracks values that are required during payload building: +/// - Estimated compressed size of this transaction +#[derive(Debug, Clone, derive_more::Deref)] +pub struct OpPooledTransaction< + Cons = OpTransactionSigned, + Pooled = op_alloy_consensus::OpPooledTransaction, +> { + #[deref] + inner: EthPooledTransaction, + /// The estimated size of this transaction, lazily computed. + estimated_tx_compressed_size: OnceLock, + /// The pooled transaction type. + _pd: core::marker::PhantomData, + + /// Optional conditional attached to this transaction. + conditional: Option>, + + /// Optional interop deadline attached to this transaction. + interop: Arc, + + /// Cached EIP-2718 encoded bytes of the transaction, lazily computed. + encoded_2718: OnceLock, +} + +impl OpPooledTransaction { + /// Create new instance of [Self]. + pub fn new(transaction: Recovered, encoded_length: usize) -> Self { + Self { + inner: EthPooledTransaction::new(transaction, encoded_length), + estimated_tx_compressed_size: Default::default(), + conditional: None, + interop: Arc::new(AtomicU64::new(NO_INTEROP_TX)), + _pd: core::marker::PhantomData, + encoded_2718: Default::default(), + } + } + + /// Returns the estimated compressed size of a transaction in bytes. + /// This value is computed based on the following formula: + /// `max(minTransactionSize, intercept + fastlzCoef*fastlzSize) / 1e6` + /// Uses cached EIP-2718 encoded bytes to avoid recomputing the encoding for each estimation. + pub fn estimated_compressed_size(&self) -> u64 { + *self + .estimated_tx_compressed_size + .get_or_init(|| op_alloy_flz::tx_estimated_size_fjord_bytes(self.encoded_2718())) + } + + /// Returns lazily computed EIP-2718 encoded bytes of the transaction. + pub fn encoded_2718(&self) -> &Bytes { + self.encoded_2718.get_or_init(|| self.inner.transaction().encoded_2718().into()) + } + + /// Conditional setter. + pub fn with_conditional(mut self, conditional: TransactionConditional) -> Self { + self.conditional = Some(Box::new(conditional)); + self + } +} + +impl MaybeConditionalTransaction for OpPooledTransaction { + fn set_conditional(&mut self, conditional: TransactionConditional) { + self.conditional = Some(Box::new(conditional)) + } + + fn conditional(&self) -> Option<&TransactionConditional> { + self.conditional.as_deref() + } +} + +impl MaybeInteropTransaction for OpPooledTransaction { + fn set_interop_deadline(&self, deadline: u64) { + self.interop.store(deadline, Ordering::Relaxed); + } + + fn interop_deadline(&self) -> Option { + let interop = self.interop.load(Ordering::Relaxed); + if interop > NO_INTEROP_TX { + return Some(interop); + } + None + } +} + +impl DataAvailabilitySized for OpPooledTransaction { + fn estimated_da_size(&self) -> u64 { + self.estimated_compressed_size() + } +} + +impl PoolTransaction for OpPooledTransaction +where + Cons: SignedTransaction + From, + Pooled: SignedTransaction + TryFrom, +{ + type TryFromConsensusError = >::Error; + type Consensus = Cons; + type Pooled = Pooled; + + fn clone_into_consensus(&self) -> Recovered { + self.inner.transaction().clone() + } + + fn into_consensus(self) -> Recovered { + self.inner.transaction + } + + fn into_consensus_with2718(self) -> WithEncoded> { + let encoding = self.encoded_2718().clone(); + self.inner.transaction.into_encoded_with(encoding) + } + + fn from_pooled(tx: Recovered) -> Self { + let encoded_len = tx.encode_2718_len(); + Self::new(tx.convert(), encoded_len) + } + + fn hash(&self) -> &TxHash { + self.inner.transaction.tx_hash() + } + + fn sender(&self) -> Address { + self.inner.transaction.signer() + } + + fn sender_ref(&self) -> &Address { + self.inner.transaction.signer_ref() + } + + fn cost(&self) -> &U256 { + &self.inner.cost + } + + fn encoded_length(&self) -> usize { + self.inner.encoded_length + } +} + +impl Typed2718 for OpPooledTransaction { + fn ty(&self) -> u8 { + self.inner.ty() + } +} + +impl InMemorySize for OpPooledTransaction { + fn size(&self) -> usize { + self.inner.size() + } +} + +impl alloy_consensus::Transaction for OpPooledTransaction +where + Cons: alloy_consensus::Transaction, + Pooled: Debug + Send + Sync + 'static, +{ + fn chain_id(&self) -> Option { + self.inner.chain_id() + } + + fn nonce(&self) -> u64 { + self.inner.nonce() + } + + fn gas_limit(&self) -> u64 { + self.inner.gas_limit() + } + + fn gas_price(&self) -> Option { + self.inner.gas_price() + } + + fn max_fee_per_gas(&self) -> u128 { + self.inner.max_fee_per_gas() + } + + fn max_priority_fee_per_gas(&self) -> Option { + self.inner.max_priority_fee_per_gas() + } + + fn max_fee_per_blob_gas(&self) -> Option { + self.inner.max_fee_per_blob_gas() + } + + fn priority_fee_or_price(&self) -> u128 { + self.inner.priority_fee_or_price() + } + + fn effective_gas_price(&self, base_fee: Option) -> u128 { + self.inner.effective_gas_price(base_fee) + } + + fn is_dynamic_fee(&self) -> bool { + self.inner.is_dynamic_fee() + } + + fn kind(&self) -> TxKind { + self.inner.kind() + } + + fn is_create(&self) -> bool { + self.inner.is_create() + } + + fn value(&self) -> U256 { + self.inner.value() + } + + fn input(&self) -> &Bytes { + self.inner.input() + } + + fn access_list(&self) -> Option<&AccessList> { + self.inner.access_list() + } + + fn blob_versioned_hashes(&self) -> Option<&[B256]> { + self.inner.blob_versioned_hashes() + } + + fn authorization_list(&self) -> Option<&[SignedAuthorization]> { + self.inner.authorization_list() + } +} + +impl EthPoolTransaction for OpPooledTransaction +where + Cons: SignedTransaction + From, + Pooled: SignedTransaction + TryFrom, + >::Error: core::error::Error, +{ + fn take_blob(&mut self) -> EthBlobTransactionSidecar { + EthBlobTransactionSidecar::None + } + + fn try_into_pooled_eip4844( + self, + _sidecar: Arc, + ) -> Option> { + None + } + + fn try_from_eip4844( + _tx: Recovered, + _sidecar: BlobTransactionSidecarVariant, + ) -> Option { + None + } + + fn validate_blob( + &self, + _sidecar: &BlobTransactionSidecarVariant, + _settings: &KzgSettings, + ) -> Result<(), BlobTransactionValidationError> { + Err(BlobTransactionValidationError::NotBlobTransaction(self.ty())) + } +} + +/// Helper trait to provide payload builder with access to conditionals and encoded bytes of +/// transaction. +pub trait OpPooledTx: + MaybeConditionalTransaction + MaybeInteropTransaction + PoolTransaction + DataAvailabilitySized +{ + /// Returns the EIP-2718 encoded bytes of the transaction. + fn encoded_2718(&self) -> Cow<'_, Bytes>; +} + +impl OpPooledTx for OpPooledTransaction +where + Cons: SignedTransaction + From, + Pooled: SignedTransaction + TryFrom, + >::Error: core::error::Error, +{ + fn encoded_2718(&self) -> Cow<'_, Bytes> { + Cow::Borrowed(self.encoded_2718()) + } +} + +#[cfg(test)] +mod tests { + use crate::{OpPooledTransaction, OpTransactionValidator}; + use alloy_consensus::transaction::Recovered; + use alloy_eips::eip2718::Encodable2718; + use alloy_primitives::{TxKind, U256}; + use op_alloy_consensus::TxDeposit; + use reth_optimism_chainspec::OP_MAINNET; + use reth_optimism_evm::OpEvmConfig; + use reth_optimism_primitives::{OpPrimitives, OpTransactionSigned}; + use reth_provider::test_utils::MockEthProvider; + use reth_transaction_pool::{ + TransactionOrigin, TransactionValidationOutcome, blobstore::InMemoryBlobStore, + validate::EthTransactionValidatorBuilder, + }; + #[tokio::test] + async fn validate_optimism_transaction() { + let client = MockEthProvider::::new() + .with_chain_spec(OP_MAINNET.clone()) + .with_genesis_block(); + let evm_config = OpEvmConfig::optimism(OP_MAINNET.clone()); + let validator = EthTransactionValidatorBuilder::new(client, evm_config) + .no_shanghai() + .no_cancun() + .build(InMemoryBlobStore::default()); + let validator = OpTransactionValidator::new(validator); + + let origin = TransactionOrigin::External; + let signer = Default::default(); + let deposit_tx = TxDeposit { + source_hash: Default::default(), + from: signer, + to: TxKind::Create, + mint: 0, + value: U256::ZERO, + gas_limit: 0, + is_system_transaction: false, + input: Default::default(), + }; + let signed_tx: OpTransactionSigned = deposit_tx.into(); + let signed_recovered = Recovered::new_unchecked(signed_tx, signer); + let len = signed_recovered.encode_2718_len(); + let pooled_tx: OpPooledTransaction = OpPooledTransaction::new(signed_recovered, len); + let outcome = validator.validate_one(origin, pooled_tx).await; + + let err = match outcome { + TransactionValidationOutcome::Invalid(_, err) => err, + _ => panic!("Expected invalid transaction"), + }; + assert_eq!(err.to_string(), "transaction type not supported"); + } +} diff --git a/rust/op-reth/crates/txpool/src/validator.rs b/rust/op-reth/crates/txpool/src/validator.rs new file mode 100644 index 00000000000..cb48ae71d16 --- /dev/null +++ b/rust/op-reth/crates/txpool/src/validator.rs @@ -0,0 +1,354 @@ +use crate::{InvalidCrossTx, OpPooledTx, supervisor::SupervisorClient}; +use alloy_consensus::{BlockHeader, Transaction}; +use op_revm::L1BlockInfo; +use parking_lot::RwLock; +use reth_chainspec::ChainSpecProvider; +use reth_evm::ConfigureEvm; +use reth_optimism_evm::RethL1BlockInfo; +use reth_optimism_forks::OpHardforks; +use reth_primitives_traits::{ + Block, BlockBody, BlockTy, GotExpected, SealedBlock, + transaction::error::InvalidTransactionError, +}; +use reth_storage_api::{AccountInfoReader, BlockReaderIdExt, StateProviderFactory}; +use reth_transaction_pool::{ + EthPoolTransaction, EthTransactionValidator, TransactionOrigin, TransactionValidationOutcome, + TransactionValidator, error::InvalidPoolTransactionError, +}; +use std::sync::{ + Arc, + atomic::{AtomicBool, AtomicU64, Ordering}, +}; + +/// The interval for which we check transaction against supervisor, 1 hour. +const TRANSACTION_VALIDITY_WINDOW_SECS: u64 = 3600; + +/// Tracks additional infos for the current block. +#[derive(Debug, Default)] +pub struct OpL1BlockInfo { + /// The current L1 block info. + l1_block_info: RwLock, + /// Current block timestamp. + timestamp: AtomicU64, +} + +impl OpL1BlockInfo { + /// Returns the most recent timestamp + pub fn timestamp(&self) -> u64 { + self.timestamp.load(Ordering::Relaxed) + } +} + +/// Validator for Optimism transactions. +#[derive(Debug, Clone)] +pub struct OpTransactionValidator { + /// The type that performs the actual validation. + inner: Arc>, + /// Additional block info required for validation. + block_info: Arc, + /// If true, ensure that the transaction's sender has enough balance to cover the L1 gas fee + /// derived from the tracked L1 block info that is extracted from the first transaction in the + /// L2 block. + require_l1_data_gas_fee: bool, + /// Client used to check transaction validity with op-supervisor + supervisor_client: Option, + /// tracks activated forks relevant for transaction validation + fork_tracker: Arc, +} + +impl OpTransactionValidator { + /// Returns the configured chain spec + pub fn chain_spec(&self) -> Arc + where + Client: ChainSpecProvider, + { + self.inner.chain_spec() + } + + /// Returns the configured client + pub fn client(&self) -> &Client { + self.inner.client() + } + + /// Returns the current block timestamp. + fn block_timestamp(&self) -> u64 { + self.block_info.timestamp.load(Ordering::Relaxed) + } + + /// Whether to ensure that the transaction's sender has enough balance to also cover the L1 gas + /// fee. + pub fn require_l1_data_gas_fee(self, require_l1_data_gas_fee: bool) -> Self { + Self { require_l1_data_gas_fee, ..self } + } + + /// Returns whether this validator also requires the transaction's sender to have enough balance + /// to cover the L1 gas fee. + pub const fn requires_l1_data_gas_fee(&self) -> bool { + self.require_l1_data_gas_fee + } +} + +impl OpTransactionValidator +where + Client: + ChainSpecProvider + StateProviderFactory + BlockReaderIdExt + Sync, + Tx: EthPoolTransaction + OpPooledTx, + Evm: ConfigureEvm, +{ + /// Create a new [`OpTransactionValidator`]. + pub fn new(inner: EthTransactionValidator) -> Self { + let this = Self::with_block_info(inner, OpL1BlockInfo::default()); + if let Ok(Some(block)) = + this.inner.client().block_by_number_or_tag(alloy_eips::BlockNumberOrTag::Latest) + { + // genesis block has no txs, so we can't extract L1 info, we set the block info to empty + // so that we will accept txs into the pool before the first block + if block.header().number() == 0 { + this.block_info.timestamp.store(block.header().timestamp(), Ordering::Relaxed); + } else { + this.update_l1_block_info(block.header(), block.body().transactions().first()); + } + } + + this + } + + /// Create a new [`OpTransactionValidator`] with the given [`OpL1BlockInfo`]. + pub fn with_block_info( + inner: EthTransactionValidator, + block_info: OpL1BlockInfo, + ) -> Self { + Self { + inner: Arc::new(inner), + block_info: Arc::new(block_info), + require_l1_data_gas_fee: true, + supervisor_client: None, + fork_tracker: Arc::new(OpForkTracker { interop: AtomicBool::from(false) }), + } + } + + /// Set the supervisor client and safety level + pub fn with_supervisor(mut self, supervisor_client: SupervisorClient) -> Self { + self.supervisor_client = Some(supervisor_client); + self + } + + /// Update the L1 block info for the given header and system transaction, if any. + /// + /// Note: this supports optional system transaction, in case this is used in a dev setup + pub fn update_l1_block_info(&self, header: &H, tx: Option<&T>) + where + H: BlockHeader, + T: Transaction, + { + self.block_info.timestamp.store(header.timestamp(), Ordering::Relaxed); + + if let Some(Ok(l1_block_info)) = tx.map(reth_optimism_evm::extract_l1_info_from_tx) { + *self.block_info.l1_block_info.write() = l1_block_info; + } + + if self.chain_spec().is_interop_active_at_timestamp(header.timestamp()) { + self.fork_tracker.interop.store(true, Ordering::Relaxed); + } + } + + /// Validates a single transaction. + /// + /// See also [`TransactionValidator::validate_transaction`] + /// + /// This behaves the same as [`OpTransactionValidator::validate_one_with_state`], but creates + /// a new state provider internally. + pub async fn validate_one( + &self, + origin: TransactionOrigin, + transaction: Tx, + ) -> TransactionValidationOutcome { + self.validate_one_with_state(origin, transaction, &mut None).await + } + + /// Validates a single transaction with a provided state provider. + /// + /// This allows reusing the same state provider across multiple transaction validations. + /// + /// See also [`TransactionValidator::validate_transaction`] + /// + /// This behaves the same as [`EthTransactionValidator::validate_one_with_state`], but in + /// addition applies OP validity checks: + /// - ensures tx is not eip4844 + /// - ensures cross chain transactions are valid wrt locally configured safety level + /// - ensures that the account has enough balance to cover the L1 gas cost + pub async fn validate_one_with_state( + &self, + origin: TransactionOrigin, + transaction: Tx, + state: &mut Option>, + ) -> TransactionValidationOutcome { + if transaction.is_eip4844() { + return TransactionValidationOutcome::Invalid( + transaction, + InvalidTransactionError::TxTypeNotSupported.into(), + ); + } + + // Interop cross tx validation + match self.is_valid_cross_tx(&transaction).await { + Some(Err(err)) => { + let err = match err { + InvalidCrossTx::CrossChainTxPreInterop => { + InvalidTransactionError::TxTypeNotSupported.into() + } + err => InvalidPoolTransactionError::Other(Box::new(err)), + }; + return TransactionValidationOutcome::Invalid(transaction, err); + } + Some(Ok(_)) => { + // valid interop tx + transaction.set_interop_deadline( + self.block_timestamp() + TRANSACTION_VALIDITY_WINDOW_SECS, + ); + } + _ => {} + } + + let outcome = self.inner.validate_one_with_state(origin, transaction, state); + + self.apply_op_checks(outcome) + } + + /// Performs the necessary opstack specific checks based on top of the regular eth outcome. + fn apply_op_checks( + &self, + outcome: TransactionValidationOutcome, + ) -> TransactionValidationOutcome { + if !self.requires_l1_data_gas_fee() { + // no need to check L1 gas fee + return outcome; + } + // ensure that the account has enough balance to cover the L1 gas cost + if let TransactionValidationOutcome::Valid { + balance, + state_nonce, + transaction: valid_tx, + propagate, + bytecode_hash, + authorities, + } = outcome + { + let mut l1_block_info = self.block_info.l1_block_info.read().clone(); + + let encoded = valid_tx.transaction().encoded_2718(); + + let cost_addition = match l1_block_info.l1_tx_data_fee( + self.chain_spec(), + self.block_timestamp(), + &encoded, + false, + ) { + Ok(cost) => cost, + Err(err) => { + return TransactionValidationOutcome::Error(*valid_tx.hash(), Box::new(err)); + } + }; + let cost = valid_tx.transaction().cost().saturating_add(cost_addition); + + // Checks for max cost + if cost > balance { + return TransactionValidationOutcome::Invalid( + valid_tx.into_transaction(), + InvalidTransactionError::InsufficientFunds( + GotExpected { got: balance, expected: cost }.into(), + ) + .into(), + ); + } + + return TransactionValidationOutcome::Valid { + balance, + state_nonce, + transaction: valid_tx, + propagate, + bytecode_hash, + authorities, + }; + } + outcome + } + + /// Wrapper for is valid cross tx + pub async fn is_valid_cross_tx(&self, tx: &Tx) -> Option> { + // We don't need to check for deposit transaction in here, because they won't come from + // txpool + self.supervisor_client + .as_ref()? + .is_valid_cross_tx( + tx.access_list(), + tx.hash(), + self.block_info.timestamp.load(Ordering::Relaxed), + Some(TRANSACTION_VALIDITY_WINDOW_SECS), + self.fork_tracker.is_interop_activated(), + ) + .await + } +} + +impl TransactionValidator for OpTransactionValidator +where + Client: + ChainSpecProvider + StateProviderFactory + BlockReaderIdExt + Sync, + Tx: EthPoolTransaction + OpPooledTx, + Evm: ConfigureEvm, +{ + type Transaction = Tx; + type Block = BlockTy; + + async fn validate_transaction( + &self, + origin: TransactionOrigin, + transaction: Self::Transaction, + ) -> TransactionValidationOutcome { + self.validate_one(origin, transaction).await + } + + async fn validate_transactions( + &self, + transactions: Vec<(TransactionOrigin, Self::Transaction)>, + ) -> Vec> { + futures_util::future::join_all( + transactions.into_iter().map(|(origin, tx)| self.validate_one(origin, tx)), + ) + .await + } + + async fn validate_transactions_with_origin( + &self, + origin: TransactionOrigin, + transactions: impl IntoIterator + Send, + ) -> Vec> { + futures_util::future::join_all( + transactions.into_iter().map(|tx| self.validate_one(origin, tx)), + ) + .await + } + + fn on_new_head_block(&self, new_tip_block: &SealedBlock) { + self.inner.on_new_head_block(new_tip_block); + self.update_l1_block_info( + new_tip_block.header(), + new_tip_block.body().transactions().first(), + ); + } +} + +/// Keeps track of whether certain forks are activated +#[derive(Debug)] +pub(crate) struct OpForkTracker { + /// Tracks if interop is activated at the block's timestamp. + interop: AtomicBool, +} + +impl OpForkTracker { + /// Returns `true` if Interop fork is activated. + pub(crate) fn is_interop_activated(&self) -> bool { + self.interop.load(Ordering::Relaxed) + } +} diff --git a/op-reth/dprint.json b/rust/op-reth/dprint.json similarity index 100% rename from op-reth/dprint.json rename to rust/op-reth/dprint.json diff --git a/op-reth/examples/custom-node/Cargo.toml b/rust/op-reth/examples/custom-node/Cargo.toml similarity index 100% rename from op-reth/examples/custom-node/Cargo.toml rename to rust/op-reth/examples/custom-node/Cargo.toml diff --git a/op-reth/examples/custom-node/src/chainspec.rs b/rust/op-reth/examples/custom-node/src/chainspec.rs similarity index 100% rename from op-reth/examples/custom-node/src/chainspec.rs rename to rust/op-reth/examples/custom-node/src/chainspec.rs diff --git a/rust/op-reth/examples/custom-node/src/engine.rs b/rust/op-reth/examples/custom-node/src/engine.rs new file mode 100644 index 00000000000..821e890a2a9 --- /dev/null +++ b/rust/op-reth/examples/custom-node/src/engine.rs @@ -0,0 +1,335 @@ +use crate::{ + CustomNode, + chainspec::CustomChainSpec, + evm::CustomEvmConfig, + primitives::{CustomHeader, CustomNodePrimitives, CustomTransaction}, +}; +use alloy_eips::eip2718::WithEncoded; +use alloy_primitives::Bytes; +use op_alloy_rpc_types_engine::{OpExecutionData, OpExecutionPayload}; +use reth_engine_primitives::EngineApiValidator; +use reth_ethereum::{ + node::api::{ + AddOnsContext, BuiltPayload, BuiltPayloadExecutedBlock, EngineApiMessageVersion, + EngineObjectValidationError, ExecutionPayload, FullNodeComponents, NewPayloadError, + NodePrimitives, PayloadAttributes, PayloadBuilderAttributes, PayloadOrAttributes, + PayloadTypes, PayloadValidator, validate_version_specific_fields, + }, + primitives::SealedBlock, + storage::StateProviderFactory, + trie::{KeccakKeyHasher, KeyHasher}, +}; +use reth_node_builder::{InvalidPayloadAttributesError, rpc::PayloadValidatorBuilder}; +use reth_op::node::{ + OpBuiltPayload, OpEngineTypes, OpPayloadAttributes, OpPayloadBuilderAttributes, + engine::OpEngineValidator, payload::OpAttributes, +}; +use revm_primitives::U256; +use serde::{Deserialize, Serialize}; +use std::sync::Arc; +use thiserror::Error; + +#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +pub struct CustomPayloadTypes; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CustomExecutionData { + pub inner: OpExecutionData, + pub extension: u64, +} + +impl ExecutionPayload for CustomExecutionData { + fn parent_hash(&self) -> revm_primitives::B256 { + self.inner.parent_hash() + } + + fn block_hash(&self) -> revm_primitives::B256 { + self.inner.block_hash() + } + + fn block_number(&self) -> u64 { + self.inner.block_number() + } + + fn withdrawals(&self) -> Option<&Vec> { + None + } + + fn block_access_list(&self) -> Option<&Bytes> { + None + } + + fn parent_beacon_block_root(&self) -> Option { + self.inner.parent_beacon_block_root() + } + + fn timestamp(&self) -> u64 { + self.inner.timestamp() + } + + fn gas_used(&self) -> u64 { + self.inner.gas_used() + } + + fn transaction_count(&self) -> usize { + self.inner.payload.as_v1().transactions.len() + } +} + +impl TryFrom<&reth_optimism_flashblocks::FlashBlockCompleteSequence> for CustomExecutionData { + type Error = &'static str; + + fn try_from( + sequence: &reth_optimism_flashblocks::FlashBlockCompleteSequence, + ) -> Result { + let inner = OpExecutionData::try_from(sequence)?; + Ok(Self { inner, extension: sequence.last().diff.gas_used }) + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct CustomPayloadAttributes { + #[serde(flatten)] + inner: OpPayloadAttributes, + extension: u64, +} + +impl PayloadAttributes for CustomPayloadAttributes { + fn timestamp(&self) -> u64 { + self.inner.timestamp() + } + + fn withdrawals(&self) -> Option<&Vec> { + self.inner.withdrawals() + } + + fn parent_beacon_block_root(&self) -> Option { + self.inner.parent_beacon_block_root() + } +} + +#[derive(Debug, Clone)] +pub struct CustomPayloadBuilderAttributes { + pub inner: OpPayloadBuilderAttributes, + pub extension: u64, +} + +impl PayloadBuilderAttributes for CustomPayloadBuilderAttributes { + type RpcPayloadAttributes = CustomPayloadAttributes; + type Error = alloy_rlp::Error; + + fn try_new( + parent: revm_primitives::B256, + rpc_payload_attributes: Self::RpcPayloadAttributes, + version: u8, + ) -> Result + where + Self: Sized, + { + let CustomPayloadAttributes { inner, extension } = rpc_payload_attributes; + + Ok(Self { inner: OpPayloadBuilderAttributes::try_new(parent, inner, version)?, extension }) + } + + fn payload_id(&self) -> alloy_rpc_types_engine::PayloadId { + self.inner.payload_id() + } + + fn parent(&self) -> revm_primitives::B256 { + self.inner.parent() + } + + fn timestamp(&self) -> u64 { + self.inner.timestamp() + } + + fn parent_beacon_block_root(&self) -> Option { + self.inner.parent_beacon_block_root() + } + + fn suggested_fee_recipient(&self) -> revm_primitives::Address { + self.inner.suggested_fee_recipient() + } + + fn prev_randao(&self) -> revm_primitives::B256 { + self.inner.prev_randao() + } + + fn withdrawals(&self) -> &alloy_eips::eip4895::Withdrawals { + self.inner.withdrawals() + } +} + +impl OpAttributes for CustomPayloadBuilderAttributes { + type Transaction = CustomTransaction; + + fn no_tx_pool(&self) -> bool { + self.inner.no_tx_pool + } + + fn sequencer_transactions(&self) -> &[WithEncoded] { + &self.inner.transactions + } +} + +#[derive(Debug, Clone)] +pub struct CustomBuiltPayload(pub OpBuiltPayload); + +impl BuiltPayload for CustomBuiltPayload { + type Primitives = CustomNodePrimitives; + + fn block(&self) -> &SealedBlock<::Block> { + self.0.block() + } + + fn fees(&self) -> U256 { + self.0.fees() + } + + fn executed_block(&self) -> Option> { + self.0.executed_block() + } + + fn requests(&self) -> Option { + self.0.requests() + } +} + +impl From + for alloy_consensus::Block<::SignedTx> +{ + fn from(value: CustomBuiltPayload) -> Self { + value.0.into_sealed_block().into_block().map_header(|header| header.inner) + } +} + +impl PayloadTypes for CustomPayloadTypes { + type ExecutionData = CustomExecutionData; + type BuiltPayload = OpBuiltPayload; + type PayloadAttributes = CustomPayloadAttributes; + type PayloadBuilderAttributes = CustomPayloadBuilderAttributes; + + fn block_to_payload( + block: SealedBlock< + <::Primitives as NodePrimitives>::Block, + >, + ) -> Self::ExecutionData { + let extension = block.header().extension; + let block_hash = block.hash(); + let block = block.into_block().map_header(|header| header.inner); + let (payload, sidecar) = OpExecutionPayload::from_block_unchecked(block_hash, &block); + CustomExecutionData { inner: OpExecutionData { payload, sidecar }, extension } + } +} + +/// Custom engine validator +#[derive(Debug, Clone)] +pub struct CustomEngineValidator

{ + inner: OpEngineValidator, +} + +impl

CustomEngineValidator

+where + P: Send + Sync + Unpin + 'static, +{ + /// Instantiates a new validator. + pub fn new(chain_spec: Arc, provider: P) -> Self { + Self { inner: OpEngineValidator::new::(chain_spec, provider) } + } + + /// Returns the chain spec used by the validator. + #[inline] + fn chain_spec(&self) -> &CustomChainSpec { + self.inner.chain_spec() + } +} + +impl

PayloadValidator for CustomEngineValidator

+where + P: StateProviderFactory + Send + Sync + Unpin + 'static, +{ + type Block = crate::primitives::block::Block; + + fn validate_payload_attributes_against_header( + &self, + _attr: &CustomPayloadAttributes, + _header: &::Header, + ) -> Result<(), InvalidPayloadAttributesError> { + // skip default timestamp validation + Ok(()) + } + + fn convert_payload_to_block( + &self, + payload: CustomExecutionData, + ) -> Result, NewPayloadError> { + let sealed_block = PayloadValidator::::convert_payload_to_block( + &self.inner, + payload.inner, + )?; + let (header, body) = sealed_block.split_sealed_header_body(); + let header = CustomHeader { inner: header.into_header(), extension: payload.extension }; + let body = body.map_ommers(|_| CustomHeader::default()); + Ok(SealedBlock::::from_parts_unhashed(header, body)) + } +} + +impl

EngineApiValidator for CustomEngineValidator

+where + P: StateProviderFactory + Send + Sync + Unpin + 'static, +{ + fn validate_version_specific_fields( + &self, + version: EngineApiMessageVersion, + payload_or_attrs: PayloadOrAttributes<'_, CustomExecutionData, CustomPayloadAttributes>, + ) -> Result<(), EngineObjectValidationError> { + validate_version_specific_fields(self.chain_spec(), version, payload_or_attrs) + } + + fn ensure_well_formed_attributes( + &self, + version: EngineApiMessageVersion, + attributes: &CustomPayloadAttributes, + ) -> Result<(), EngineObjectValidationError> { + validate_version_specific_fields( + self.chain_spec(), + version, + PayloadOrAttributes::::PayloadAttributes(attributes), + )?; + + // custom validation logic - ensure that the custom field is not zero + // if attributes.extension == 0 { + // return Err(EngineObjectValidationError::invalid_params( + // CustomError::CustomFieldIsNotZero, + // )) + // } + + Ok(()) + } +} + +/// Custom error type used in payload attributes validation +#[derive(Debug, Error)] +pub enum CustomError { + #[error("Custom field is not zero")] + CustomFieldIsNotZero, +} + +/// Custom engine validator builder +#[derive(Debug, Default, Clone, Copy)] +#[non_exhaustive] +pub struct CustomEngineValidatorBuilder; + +impl PayloadValidatorBuilder for CustomEngineValidatorBuilder +where + N: FullNodeComponents, +{ + type Validator = CustomEngineValidator; + + async fn build(self, ctx: &AddOnsContext<'_, N>) -> eyre::Result { + Ok(CustomEngineValidator::new::( + ctx.config.chain.clone(), + ctx.node.provider().clone(), + )) + } +} diff --git a/op-reth/examples/custom-node/src/engine_api.rs b/rust/op-reth/examples/custom-node/src/engine_api.rs similarity index 97% rename from op-reth/examples/custom-node/src/engine_api.rs rename to rust/op-reth/examples/custom-node/src/engine_api.rs index 1a947d8ec5d..0aec72ecd01 100644 --- a/op-reth/examples/custom-node/src/engine_api.rs +++ b/rust/op-reth/examples/custom-node/src/engine_api.rs @@ -1,13 +1,13 @@ use crate::{ + CustomNode, engine::{CustomExecutionData, CustomPayloadAttributes, CustomPayloadTypes}, primitives::CustomNodePrimitives, - CustomNode, }; use alloy_rpc_types_engine::{ ExecutionPayloadV3, ForkchoiceState, ForkchoiceUpdated, PayloadId, PayloadStatus, }; use async_trait::async_trait; -use jsonrpsee::{core::RpcResult, proc_macros::rpc, RpcModule}; +use jsonrpsee::{RpcModule, core::RpcResult, proc_macros::rpc}; use reth_ethereum::node::api::{ AddOnsContext, ConsensusEngineHandle, EngineApiMessageVersion, FullNodeComponents, }; @@ -55,7 +55,7 @@ pub trait CustomEngineApi { #[method(name = "getPayload")] async fn get_payload(&self, payload_id: PayloadId) - -> RpcResult; + -> RpcResult; } pub struct CustomEngineApi { diff --git a/op-reth/examples/custom-node/src/evm/alloy.rs b/rust/op-reth/examples/custom-node/src/evm/alloy.rs similarity index 93% rename from op-reth/examples/custom-node/src/evm/alloy.rs rename to rust/op-reth/examples/custom-node/src/evm/alloy.rs index d8df842cfc5..c500143cae9 100644 --- a/op-reth/examples/custom-node/src/evm/alloy.rs +++ b/rust/op-reth/examples/custom-node/src/evm/alloy.rs @@ -1,16 +1,16 @@ use crate::evm::{CustomTxEnv, PaymentTxEnv}; -use alloy_evm::{precompiles::PrecompilesMap, Database, Evm, EvmEnv, EvmFactory}; +use alloy_evm::{Database, Evm, EvmEnv, EvmFactory, precompiles::PrecompilesMap}; use alloy_op_evm::{OpEvm, OpEvmFactory}; use alloy_primitives::{Address, Bytes}; use op_revm::{ - precompiles::OpPrecompiles, L1BlockInfo, OpContext, OpHaltReason, OpSpecId, OpTransaction, - OpTransactionError, + L1BlockInfo, OpContext, OpHaltReason, OpSpecId, OpTransaction, OpTransactionError, + precompiles::OpPrecompiles, }; use reth_ethereum::evm::revm::{ - context::{result::ResultAndState, BlockEnv, CfgEnv}, + Context, Inspector, Journal, + context::{BlockEnv, CfgEnv, result::ResultAndState}, handler::PrecompileProvider, interpreter::InterpreterResult, - Context, Inspector, Journal, }; use revm::{context_interface::result::EVMError, inspector::NoOpInspector}; use std::error::Error; diff --git a/op-reth/examples/custom-node/src/evm/assembler.rs b/rust/op-reth/examples/custom-node/src/evm/assembler.rs similarity index 82% rename from op-reth/examples/custom-node/src/evm/assembler.rs rename to rust/op-reth/examples/custom-node/src/evm/assembler.rs index dd1cfd3cb46..2fb06eb8dc9 100644 --- a/op-reth/examples/custom-node/src/evm/assembler.rs +++ b/rust/op-reth/examples/custom-node/src/evm/assembler.rs @@ -8,7 +8,7 @@ use reth_ethereum::{ evm::primitives::execute::{BlockAssembler, BlockAssemblerInput}, primitives::Receipt, }; -use reth_op::{node::OpBlockAssembler, DepositReceipt}; +use reth_op::{DepositReceipt, node::OpBlockAssembler}; use std::sync::Arc; #[derive(Clone, Debug)] @@ -25,10 +25,10 @@ impl CustomBlockAssembler { impl BlockAssembler for CustomBlockAssembler where F: for<'a> BlockExecutorFactory< - ExecutionCtx<'a> = CustomBlockExecutionCtx, - Transaction = CustomTransaction, - Receipt: Receipt + DepositReceipt, - >, + ExecutionCtx<'a> = CustomBlockExecutionCtx, + Transaction = CustomTransaction, + Receipt: Receipt + DepositReceipt, + >, { type Block = Block; diff --git a/rust/op-reth/examples/custom-node/src/evm/builder.rs b/rust/op-reth/examples/custom-node/src/evm/builder.rs new file mode 100644 index 00000000000..4be3253b9d5 --- /dev/null +++ b/rust/op-reth/examples/custom-node/src/evm/builder.rs @@ -0,0 +1,22 @@ +use crate::{chainspec::CustomChainSpec, evm::CustomEvmConfig, primitives::CustomNodePrimitives}; +use reth_ethereum::node::api::FullNodeTypes; +use reth_node_builder::{BuilderContext, NodeTypes, components::ExecutorBuilder}; +use std::{future, future::Future}; + +#[derive(Debug, Clone, Default)] +#[non_exhaustive] +pub struct CustomExecutorBuilder; + +impl ExecutorBuilder for CustomExecutorBuilder +where + Node::Types: NodeTypes, +{ + type EVM = CustomEvmConfig; + + fn build_evm( + self, + ctx: &BuilderContext, + ) -> impl Future> + Send { + future::ready(Ok(CustomEvmConfig::new(ctx.chain_spec()))) + } +} diff --git a/rust/op-reth/examples/custom-node/src/evm/config.rs b/rust/op-reth/examples/custom-node/src/evm/config.rs new file mode 100644 index 00000000000..094e0b48de5 --- /dev/null +++ b/rust/op-reth/examples/custom-node/src/evm/config.rs @@ -0,0 +1,180 @@ +use crate::{ + chainspec::CustomChainSpec, + engine::{CustomExecutionData, CustomPayloadBuilderAttributes}, + evm::{CustomBlockAssembler, alloy::CustomEvmFactory, executor::CustomBlockExecutionCtx}, + primitives::{Block, CustomHeader, CustomNodePrimitives, CustomTransaction}, +}; +use alloy_consensus::BlockHeader; +use alloy_eips::{Decodable2718, eip2718::WithEncoded}; +use alloy_evm::EvmEnv; +use alloy_op_evm::OpBlockExecutionCtx; +use alloy_rpc_types_engine::PayloadError; +use op_alloy_rpc_types_engine::flashblock::OpFlashblockPayloadBase; +use op_revm::OpSpecId; +use reth_engine_primitives::ExecutableTxIterator; +use reth_ethereum::{ + chainspec::EthChainSpec, + node::api::{BuildNextEnv, ConfigureEvm, PayloadBuilderError}, + primitives::{SealedBlock, SealedHeader}, +}; +use reth_node_builder::{ConfigureEngineEvm, NewPayloadError}; +use reth_op::{ + chainspec::OpHardforks, + evm::primitives::{EvmEnvFor, ExecutionCtxFor}, + node::{OpEvmConfig, OpNextBlockEnvAttributes, OpRethReceiptBuilder}, + primitives::SignedTransaction, +}; +use reth_rpc_api::eth::helpers::pending_block::BuildPendingEnv; +use revm_primitives::Bytes; +use std::sync::Arc; + +#[derive(Debug, Clone)] +pub struct CustomEvmConfig { + pub(super) inner: OpEvmConfig, + pub(super) block_assembler: CustomBlockAssembler, + pub(super) custom_evm_factory: CustomEvmFactory, +} + +impl CustomEvmConfig { + pub fn new(chain_spec: Arc) -> Self { + Self { + inner: OpEvmConfig::new( + Arc::new(chain_spec.inner().clone()), + OpRethReceiptBuilder::default(), + ), + block_assembler: CustomBlockAssembler::new(chain_spec), + custom_evm_factory: CustomEvmFactory::new(), + } + } +} + +impl ConfigureEvm for CustomEvmConfig { + type Primitives = CustomNodePrimitives; + type Error = ::Error; + type NextBlockEnvCtx = CustomNextBlockEnvAttributes; + type BlockExecutorFactory = Self; + type BlockAssembler = CustomBlockAssembler; + + fn block_executor_factory(&self) -> &Self::BlockExecutorFactory { + self + } + + fn block_assembler(&self) -> &Self::BlockAssembler { + &self.block_assembler + } + + fn evm_env(&self, header: &CustomHeader) -> Result, Self::Error> { + self.inner.evm_env(header) + } + + fn next_evm_env( + &self, + parent: &CustomHeader, + attributes: &CustomNextBlockEnvAttributes, + ) -> Result, Self::Error> { + self.inner.next_evm_env(parent, &attributes.inner) + } + + fn context_for_block( + &self, + block: &SealedBlock, + ) -> Result { + Ok(CustomBlockExecutionCtx { + inner: OpBlockExecutionCtx { + parent_hash: block.header().parent_hash(), + parent_beacon_block_root: block.header().parent_beacon_block_root(), + extra_data: block.header().extra_data().clone(), + }, + extension: block.extension, + }) + } + + fn context_for_next_block( + &self, + parent: &SealedHeader, + attributes: Self::NextBlockEnvCtx, + ) -> Result { + Ok(CustomBlockExecutionCtx { + inner: OpBlockExecutionCtx { + parent_hash: parent.hash(), + parent_beacon_block_root: attributes.inner.parent_beacon_block_root, + extra_data: attributes.inner.extra_data, + }, + extension: attributes.extension, + }) + } +} + +impl ConfigureEngineEvm for CustomEvmConfig { + fn evm_env_for_payload( + &self, + payload: &CustomExecutionData, + ) -> Result, Self::Error> { + self.inner.evm_env_for_payload(&payload.inner) + } + + fn context_for_payload<'a>( + &self, + payload: &'a CustomExecutionData, + ) -> Result, Self::Error> { + Ok(CustomBlockExecutionCtx { + inner: self.inner.context_for_payload(&payload.inner)?, + extension: payload.extension, + }) + } + + fn tx_iterator_for_payload( + &self, + payload: &CustomExecutionData, + ) -> Result, Self::Error> { + let transactions = payload.inner.payload.transactions().clone(); + let convert = |encoded: Bytes| { + let tx = CustomTransaction::decode_2718_exact(encoded.as_ref()) + .map_err(Into::into) + .map_err(PayloadError::Decode)?; + let signer = tx.try_recover().map_err(NewPayloadError::other)?; + Ok::<_, NewPayloadError>(WithEncoded::new(encoded, tx.with_signer(signer))) + }; + Ok((transactions, convert)) + } +} + +/// Additional parameters required for executing next block of custom transactions. +#[derive(Debug, Clone)] +pub struct CustomNextBlockEnvAttributes { + inner: OpNextBlockEnvAttributes, + extension: u64, +} + +impl From for CustomNextBlockEnvAttributes { + fn from(value: OpFlashblockPayloadBase) -> Self { + Self { inner: value.into(), extension: 0 } + } +} + +impl BuildPendingEnv for CustomNextBlockEnvAttributes { + fn build_pending_env(parent: &SealedHeader) -> Self { + Self { + inner: OpNextBlockEnvAttributes::build_pending_env(parent), + extension: parent.extension, + } + } +} + +impl BuildNextEnv + for CustomNextBlockEnvAttributes +where + H: BlockHeader, + ChainSpec: EthChainSpec + OpHardforks, +{ + fn build_next_env( + attributes: &CustomPayloadBuilderAttributes, + parent: &SealedHeader, + chain_spec: &ChainSpec, + ) -> Result { + let inner = + OpNextBlockEnvAttributes::build_next_env(&attributes.inner, parent, chain_spec)?; + + Ok(CustomNextBlockEnvAttributes { inner, extension: attributes.extension }) + } +} diff --git a/rust/op-reth/examples/custom-node/src/evm/env.rs b/rust/op-reth/examples/custom-node/src/evm/env.rs new file mode 100644 index 00000000000..1ad5d140251 --- /dev/null +++ b/rust/op-reth/examples/custom-node/src/evm/env.rs @@ -0,0 +1,340 @@ +use crate::primitives::{CustomTransaction, TxPayment}; +use alloy_eips::{Typed2718, eip2930::AccessList}; +use alloy_evm::{FromRecoveredTx, FromTxWithEncoded, IntoTxEnv}; +use alloy_op_evm::block::OpTxEnv; +use alloy_primitives::{Address, B256, Bytes, TxKind, U256}; +use op_alloy_consensus::OpTxEnvelope; +use op_revm::OpTransaction; +use reth_ethereum::evm::{primitives::TransactionEnv, revm::context::TxEnv}; + +/// An Optimism transaction extended by [`PaymentTxEnv`] that can be fed to [`Evm`]. +/// +/// [`Evm`]: alloy_evm::Evm +#[derive(Clone, Debug)] +pub enum CustomTxEnv { + Op(OpTransaction), + Payment(PaymentTxEnv), +} + +/// A transaction environment is a set of information related to an Ethereum transaction that can be +/// fed to [`Evm`] for execution. +/// +/// [`Evm`]: alloy_evm::Evm +#[derive(Clone, Debug, Default)] +pub struct PaymentTxEnv(pub TxEnv); + +impl revm::context::Transaction for CustomTxEnv { + type AccessListItem<'a> + = ::AccessListItem<'a> + where + Self: 'a; + type Authorization<'a> + = ::Authorization<'a> + where + Self: 'a; + + fn tx_type(&self) -> u8 { + match self { + Self::Op(tx) => tx.tx_type(), + Self::Payment(tx) => tx.tx_type(), + } + } + + fn caller(&self) -> Address { + match self { + Self::Op(tx) => tx.caller(), + Self::Payment(tx) => tx.caller(), + } + } + + fn gas_limit(&self) -> u64 { + match self { + Self::Op(tx) => tx.gas_limit(), + Self::Payment(tx) => tx.gas_limit(), + } + } + + fn value(&self) -> U256 { + match self { + Self::Op(tx) => tx.value(), + Self::Payment(tx) => tx.value(), + } + } + + fn input(&self) -> &Bytes { + match self { + Self::Op(tx) => tx.input(), + Self::Payment(tx) => tx.input(), + } + } + + fn nonce(&self) -> u64 { + match self { + Self::Op(tx) => revm::context::Transaction::nonce(tx), + Self::Payment(tx) => revm::context::Transaction::nonce(tx), + } + } + + fn kind(&self) -> TxKind { + match self { + Self::Op(tx) => tx.kind(), + Self::Payment(tx) => tx.kind(), + } + } + + fn chain_id(&self) -> Option { + match self { + Self::Op(tx) => tx.chain_id(), + Self::Payment(tx) => tx.chain_id(), + } + } + + fn gas_price(&self) -> u128 { + match self { + Self::Op(tx) => tx.gas_price(), + Self::Payment(tx) => tx.gas_price(), + } + } + + fn access_list(&self) -> Option>> { + Some(match self { + Self::Op(tx) => tx.base.access_list.iter(), + Self::Payment(tx) => tx.0.access_list.iter(), + }) + } + + fn blob_versioned_hashes(&self) -> &[B256] { + match self { + Self::Op(tx) => tx.blob_versioned_hashes(), + Self::Payment(tx) => tx.blob_versioned_hashes(), + } + } + + fn max_fee_per_blob_gas(&self) -> u128 { + match self { + Self::Op(tx) => tx.max_fee_per_blob_gas(), + Self::Payment(tx) => tx.max_fee_per_blob_gas(), + } + } + + fn authorization_list_len(&self) -> usize { + match self { + Self::Op(tx) => tx.authorization_list_len(), + Self::Payment(tx) => tx.authorization_list_len(), + } + } + + fn authorization_list(&self) -> impl Iterator> { + match self { + Self::Op(tx) => tx.base.authorization_list.iter(), + Self::Payment(tx) => tx.0.authorization_list.iter(), + } + } + + fn max_priority_fee_per_gas(&self) -> Option { + match self { + Self::Op(tx) => tx.max_priority_fee_per_gas(), + Self::Payment(tx) => tx.max_priority_fee_per_gas(), + } + } +} + +impl revm::context::Transaction for PaymentTxEnv { + type AccessListItem<'a> + = ::AccessListItem<'a> + where + Self: 'a; + type Authorization<'a> + = ::Authorization<'a> + where + Self: 'a; + + fn tx_type(&self) -> u8 { + self.0.tx_type() + } + + fn caller(&self) -> Address { + self.0.caller() + } + + fn gas_limit(&self) -> u64 { + self.0.gas_limit() + } + + fn value(&self) -> U256 { + self.0.value() + } + + fn input(&self) -> &Bytes { + self.0.input() + } + + fn nonce(&self) -> u64 { + revm::context::Transaction::nonce(&self.0) + } + + fn kind(&self) -> TxKind { + self.0.kind() + } + + fn chain_id(&self) -> Option { + self.0.chain_id() + } + + fn gas_price(&self) -> u128 { + self.0.gas_price() + } + + fn access_list(&self) -> Option>> { + self.0.access_list() + } + + fn blob_versioned_hashes(&self) -> &[B256] { + self.0.blob_versioned_hashes() + } + + fn max_fee_per_blob_gas(&self) -> u128 { + self.0.max_fee_per_blob_gas() + } + + fn authorization_list_len(&self) -> usize { + self.0.authorization_list_len() + } + + fn authorization_list(&self) -> impl Iterator> { + self.0.authorization_list() + } + + fn max_priority_fee_per_gas(&self) -> Option { + self.0.max_priority_fee_per_gas() + } +} + +impl TransactionEnv for PaymentTxEnv { + fn set_gas_limit(&mut self, gas_limit: u64) { + self.0.set_gas_limit(gas_limit); + } + + fn nonce(&self) -> u64 { + self.0.nonce() + } + + fn set_nonce(&mut self, nonce: u64) { + self.0.set_nonce(nonce); + } + + fn set_access_list(&mut self, access_list: AccessList) { + self.0.set_access_list(access_list); + } +} + +impl TransactionEnv for CustomTxEnv { + fn set_gas_limit(&mut self, gas_limit: u64) { + match self { + Self::Op(tx) => tx.set_gas_limit(gas_limit), + Self::Payment(tx) => tx.set_gas_limit(gas_limit), + } + } + + fn nonce(&self) -> u64 { + match self { + Self::Op(tx) => tx.nonce(), + Self::Payment(tx) => tx.nonce(), + } + } + + fn set_nonce(&mut self, nonce: u64) { + match self { + Self::Op(tx) => tx.set_nonce(nonce), + Self::Payment(tx) => tx.set_nonce(nonce), + } + } + + fn set_access_list(&mut self, access_list: AccessList) { + match self { + Self::Op(tx) => tx.set_access_list(access_list), + Self::Payment(tx) => tx.set_access_list(access_list), + } + } +} + +impl FromRecoveredTx for TxEnv { + fn from_recovered_tx(tx: &TxPayment, caller: Address) -> Self { + let TxPayment { + chain_id, + nonce, + gas_limit, + max_fee_per_gas, + max_priority_fee_per_gas, + to, + value, + } = tx; + Self { + tx_type: tx.ty(), + caller, + gas_limit: *gas_limit, + gas_price: *max_fee_per_gas, + gas_priority_fee: Some(*max_priority_fee_per_gas), + kind: TxKind::Call(*to), + value: *value, + nonce: *nonce, + chain_id: Some(*chain_id), + ..Default::default() + } + } +} + +impl FromTxWithEncoded for TxEnv { + fn from_encoded_tx(tx: &TxPayment, sender: Address, _encoded: Bytes) -> Self { + Self::from_recovered_tx(tx, sender) + } +} + +impl FromRecoveredTx for CustomTxEnv { + fn from_recovered_tx(tx: &OpTxEnvelope, sender: Address) -> Self { + Self::Op(OpTransaction::from_recovered_tx(tx, sender)) + } +} + +impl FromTxWithEncoded for CustomTxEnv { + fn from_encoded_tx(tx: &OpTxEnvelope, sender: Address, encoded: Bytes) -> Self { + Self::Op(OpTransaction::from_encoded_tx(tx, sender, encoded)) + } +} + +impl FromRecoveredTx for CustomTxEnv { + fn from_recovered_tx(tx: &CustomTransaction, sender: Address) -> Self { + match tx { + CustomTransaction::Op(tx) => Self::from_recovered_tx(tx, sender), + CustomTransaction::Payment(tx) => { + Self::Payment(PaymentTxEnv(TxEnv::from_recovered_tx(tx.tx(), sender))) + } + } + } +} + +impl FromTxWithEncoded for CustomTxEnv { + fn from_encoded_tx(tx: &CustomTransaction, sender: Address, encoded: Bytes) -> Self { + match tx { + CustomTransaction::Op(tx) => Self::from_encoded_tx(tx, sender, encoded), + CustomTransaction::Payment(tx) => { + Self::Payment(PaymentTxEnv(TxEnv::from_encoded_tx(tx.tx(), sender, encoded))) + } + } + } +} + +impl IntoTxEnv for CustomTxEnv { + fn into_tx_env(self) -> Self { + self + } +} + +impl OpTxEnv for CustomTxEnv { + fn encoded_bytes(&self) -> Option<&Bytes> { + match self { + Self::Op(tx) => tx.encoded_bytes(), + Self::Payment(_) => None, + } + } +} diff --git a/op-reth/examples/custom-node/src/evm/executor.rs b/rust/op-reth/examples/custom-node/src/evm/executor.rs similarity index 95% rename from op-reth/examples/custom-node/src/evm/executor.rs rename to rust/op-reth/examples/custom-node/src/evm/executor.rs index 3b935b4c64e..e396f47fc4e 100644 --- a/op-reth/examples/custom-node/src/evm/executor.rs +++ b/rust/op-reth/examples/custom-node/src/evm/executor.rs @@ -1,22 +1,22 @@ use crate::{ evm::{ - alloy::{CustomEvm, CustomEvmFactory}, CustomEvmConfig, CustomTxEnv, + alloy::{CustomEvm, CustomEvmFactory}, }, primitives::CustomTransaction, }; use alloy_consensus::transaction::Recovered; use alloy_evm::{ + Database, Evm, RecoveredTx, block::{ BlockExecutionError, BlockExecutionResult, BlockExecutor, BlockExecutorFactory, BlockExecutorFor, ExecutableTx, OnStateHook, }, precompiles::PrecompilesMap, - Database, Evm, RecoveredTx, }; -use alloy_op_evm::{block::OpTxResult, OpBlockExecutionCtx, OpBlockExecutor}; +use alloy_op_evm::{OpBlockExecutionCtx, OpBlockExecutor, block::OpTxResult}; use reth_ethereum::evm::primitives::InspectorFor; -use reth_op::{chainspec::OpChainSpec, node::OpRethReceiptBuilder, OpReceipt, OpTxType}; +use reth_op::{OpReceipt, OpTxType, chainspec::OpChainSpec, node::OpRethReceiptBuilder}; use revm::database::State; use std::sync::Arc; diff --git a/op-reth/examples/custom-node/src/evm/mod.rs b/rust/op-reth/examples/custom-node/src/evm/mod.rs similarity index 100% rename from op-reth/examples/custom-node/src/evm/mod.rs rename to rust/op-reth/examples/custom-node/src/evm/mod.rs diff --git a/rust/op-reth/examples/custom-node/src/lib.rs b/rust/op-reth/examples/custom-node/src/lib.rs new file mode 100644 index 00000000000..9892f2af529 --- /dev/null +++ b/rust/op-reth/examples/custom-node/src/lib.rs @@ -0,0 +1,87 @@ +//! This example shows how to implement a custom node. +//! +//! A node consists of: +//! - primitives: block,header,transactions +//! - components: network,pool,evm +//! - engine: advances the node + +#![cfg_attr(not(test), warn(unused_crate_dependencies))] + +use crate::{ + engine::{CustomEngineValidatorBuilder, CustomPayloadTypes}, + engine_api::CustomEngineApiBuilder, + evm::CustomExecutorBuilder, + pool::CustomPooledTransaction, + primitives::CustomTransaction, + rpc::CustomRpcTypes, +}; +use chainspec::CustomChainSpec; +use primitives::CustomNodePrimitives; +use reth_ethereum::node::api::{FullNodeTypes, NodeTypes}; +use reth_node_builder::{ + Node, NodeAdapter, + components::{BasicPayloadServiceBuilder, ComponentsBuilder}, +}; +use reth_op::{ + node::{ + OpAddOns, OpNode, + node::{OpConsensusBuilder, OpNetworkBuilder, OpPayloadBuilder, OpPoolBuilder}, + txpool, + }, + rpc::OpEthApiBuilder, +}; + +pub mod chainspec; +pub mod engine; +pub mod engine_api; +pub mod evm; +pub mod pool; +pub mod primitives; +pub mod rpc; + +#[derive(Debug, Clone)] +pub struct CustomNode { + inner: OpNode, +} + +impl NodeTypes for CustomNode { + type Primitives = CustomNodePrimitives; + type ChainSpec = CustomChainSpec; + type Storage = ::Storage; + type Payload = CustomPayloadTypes; +} + +impl Node for CustomNode +where + N: FullNodeTypes, +{ + type ComponentsBuilder = ComponentsBuilder< + N, + OpPoolBuilder>, + BasicPayloadServiceBuilder, + OpNetworkBuilder, + CustomExecutorBuilder, + OpConsensusBuilder, + >; + + type AddOns = OpAddOns< + NodeAdapter, + OpEthApiBuilder, + CustomEngineValidatorBuilder, + CustomEngineApiBuilder, + >; + + fn components_builder(&self) -> Self::ComponentsBuilder { + ComponentsBuilder::default() + .node_types::() + .pool(OpPoolBuilder::default()) + .executor(CustomExecutorBuilder::default()) + .payload(BasicPayloadServiceBuilder::new(OpPayloadBuilder::new(false))) + .network(OpNetworkBuilder::new(false, false)) + .consensus(OpConsensusBuilder::default()) + } + + fn add_ons(&self) -> Self::AddOns { + self.inner.add_ons_builder().build() + } +} diff --git a/op-reth/examples/custom-node/src/pool.rs b/rust/op-reth/examples/custom-node/src/pool.rs similarity index 96% rename from op-reth/examples/custom-node/src/pool.rs rename to rust/op-reth/examples/custom-node/src/pool.rs index 8828803a0f3..fad72be0d77 100644 --- a/op-reth/examples/custom-node/src/pool.rs +++ b/rust/op-reth/examples/custom-node/src/pool.rs @@ -1,14 +1,14 @@ use crate::primitives::{CustomTransaction, TxPayment}; use alloy_consensus::{ + Signed, TransactionEnvelope, crypto::RecoveryError, error::ValueError, transaction::{SignerRecoverable, TxHashRef}, - Signed, TransactionEnvelope, }; -use alloy_primitives::{Address, Sealed, B256}; +use alloy_primitives::{Address, B256, Sealed}; use op_alloy_consensus::{OpPooledTransaction, OpTransaction, TxDeposit}; use reth_ethereum::primitives::{ - serde_bincode_compat::RlpBincode, InMemorySize, SignedTransaction, + InMemorySize, SignedTransaction, serde_bincode_compat::RlpBincode, }; #[derive(Clone, Debug, TransactionEnvelope)] diff --git a/op-reth/examples/custom-node/src/primitives/block.rs b/rust/op-reth/examples/custom-node/src/primitives/block.rs similarity index 100% rename from op-reth/examples/custom-node/src/primitives/block.rs rename to rust/op-reth/examples/custom-node/src/primitives/block.rs diff --git a/op-reth/examples/custom-node/src/primitives/header.rs b/rust/op-reth/examples/custom-node/src/primitives/header.rs similarity index 95% rename from op-reth/examples/custom-node/src/primitives/header.rs rename to rust/op-reth/examples/custom-node/src/primitives/header.rs index 946bad51894..beaa7626f00 100644 --- a/op-reth/examples/custom-node/src/primitives/header.rs +++ b/rust/op-reth/examples/custom-node/src/primitives/header.rs @@ -1,8 +1,8 @@ use alloy_consensus::Header; -use alloy_primitives::{Address, BlockNumber, Bloom, Bytes, Sealable, B256, B64, U256}; +use alloy_primitives::{Address, B64, B256, BlockNumber, Bloom, Bytes, Sealable, U256}; use alloy_rlp::{Encodable, RlpDecodable, RlpEncodable}; use reth_codecs::Compact; -use reth_ethereum::primitives::{serde_bincode_compat::RlpBincode, BlockHeader, InMemorySize}; +use reth_ethereum::primitives::{BlockHeader, InMemorySize, serde_bincode_compat::RlpBincode}; use revm_primitives::keccak256; use serde::{Deserialize, Serialize}; diff --git a/op-reth/examples/custom-node/src/primitives/mod.rs b/rust/op-reth/examples/custom-node/src/primitives/mod.rs similarity index 100% rename from op-reth/examples/custom-node/src/primitives/mod.rs rename to rust/op-reth/examples/custom-node/src/primitives/mod.rs diff --git a/rust/op-reth/examples/custom-node/src/primitives/tx.rs b/rust/op-reth/examples/custom-node/src/primitives/tx.rs new file mode 100644 index 00000000000..803d5d238b6 --- /dev/null +++ b/rust/op-reth/examples/custom-node/src/primitives/tx.rs @@ -0,0 +1,144 @@ +use super::TxPayment; +use alloy_consensus::{ + Signed, TransactionEnvelope, + crypto::RecoveryError, + transaction::{SignerRecoverable, TxHashRef}, +}; +use alloy_eips::Encodable2718; +use alloy_primitives::{B256, Sealed, Signature}; +use alloy_rlp::BufMut; +use op_alloy_consensus::{OpTxEnvelope, TxDeposit}; +use reth_codecs::{ + Compact, + alloy::transaction::{CompactEnvelope, FromTxCompact, ToTxCompact}, +}; +use reth_ethereum::primitives::{InMemorySize, serde_bincode_compat::RlpBincode}; +use reth_op::{OpTransaction, primitives::SignedTransaction}; +use revm_primitives::Address; + +/// Either [`OpTxEnvelope`] or [`TxPayment`]. +#[derive(Debug, Clone, TransactionEnvelope)] +#[envelope(tx_type_name = TxTypeCustom)] +pub enum CustomTransaction { + /// A regular Optimism transaction as defined by [`OpTxEnvelope`]. + #[envelope(flatten)] + Op(OpTxEnvelope), + /// A [`TxPayment`] tagged with type 0x2A (decimal 42). + #[envelope(ty = 42)] + Payment(Signed), +} + +impl RlpBincode for CustomTransaction {} + +impl reth_codecs::alloy::transaction::Envelope for CustomTransaction { + fn signature(&self) -> &Signature { + match self { + CustomTransaction::Op(tx) => reth_codecs::alloy::transaction::Envelope::signature(tx), + CustomTransaction::Payment(tx) => tx.signature(), + } + } + + fn tx_type(&self) -> Self::TxType { + match self { + CustomTransaction::Op(tx) => TxTypeCustom::Op(tx.tx_type()), + CustomTransaction::Payment(_) => TxTypeCustom::Payment, + } + } +} + +impl FromTxCompact for CustomTransaction { + type TxType = TxTypeCustom; + + fn from_tx_compact(buf: &[u8], tx_type: Self::TxType, signature: Signature) -> (Self, &[u8]) + where + Self: Sized, + { + match tx_type { + TxTypeCustom::Op(tx_type) => { + let (tx, buf) = OpTxEnvelope::from_tx_compact(buf, tx_type, signature); + (Self::Op(tx), buf) + } + TxTypeCustom::Payment => { + let (tx, buf) = TxPayment::from_compact(buf, buf.len()); + let tx = Signed::new_unhashed(tx, signature); + (Self::Payment(tx), buf) + } + } + } +} + +impl ToTxCompact for CustomTransaction { + fn to_tx_compact(&self, buf: &mut (impl BufMut + AsMut<[u8]>)) { + match self { + CustomTransaction::Op(tx) => tx.to_tx_compact(buf), + CustomTransaction::Payment(tx) => { + tx.tx().to_compact(buf); + } + } + } +} + +impl Compact for CustomTransaction { + fn to_compact(&self, buf: &mut B) -> usize + where + B: BufMut + AsMut<[u8]>, + { + ::to_compact(self, buf) + } + + fn from_compact(buf: &[u8], len: usize) -> (Self, &[u8]) { + ::from_compact(buf, len) + } +} + +impl OpTransaction for CustomTransaction { + fn is_deposit(&self) -> bool { + match self { + CustomTransaction::Op(op) => op.is_deposit(), + CustomTransaction::Payment(_) => false, + } + } + + fn as_deposit(&self) -> Option<&Sealed> { + match self { + CustomTransaction::Op(op) => op.as_deposit(), + CustomTransaction::Payment(_) => None, + } + } +} + +impl SignerRecoverable for CustomTransaction { + fn recover_signer(&self) -> Result { + match self { + CustomTransaction::Op(tx) => SignerRecoverable::recover_signer(tx), + CustomTransaction::Payment(tx) => SignerRecoverable::recover_signer(tx), + } + } + + fn recover_signer_unchecked(&self) -> Result { + match self { + CustomTransaction::Op(tx) => SignerRecoverable::recover_signer_unchecked(tx), + CustomTransaction::Payment(tx) => SignerRecoverable::recover_signer_unchecked(tx), + } + } +} + +impl TxHashRef for CustomTransaction { + fn tx_hash(&self) -> &B256 { + match self { + CustomTransaction::Op(tx) => TxHashRef::tx_hash(tx), + CustomTransaction::Payment(tx) => tx.hash(), + } + } +} + +impl SignedTransaction for CustomTransaction {} + +impl InMemorySize for CustomTransaction { + fn size(&self) -> usize { + match self { + CustomTransaction::Op(tx) => InMemorySize::size(tx), + CustomTransaction::Payment(tx) => InMemorySize::size(tx), + } + } +} diff --git a/op-reth/examples/custom-node/src/primitives/tx_custom.rs b/rust/op-reth/examples/custom-node/src/primitives/tx_custom.rs similarity index 96% rename from op-reth/examples/custom-node/src/primitives/tx_custom.rs rename to rust/op-reth/examples/custom-node/src/primitives/tx_custom.rs index 210696f49c6..d65012df171 100644 --- a/op-reth/examples/custom-node/src/primitives/tx_custom.rs +++ b/rust/op-reth/examples/custom-node/src/primitives/tx_custom.rs @@ -1,12 +1,12 @@ use crate::primitives::PAYMENT_TX_TYPE_ID; use alloy_consensus::{ - transaction::{RlpEcdsaDecodableTx, RlpEcdsaEncodableTx}, SignableTransaction, Transaction, + transaction::{RlpEcdsaDecodableTx, RlpEcdsaEncodableTx}, }; -use alloy_eips::{eip2930::AccessList, eip7702::SignedAuthorization, Typed2718}; -use alloy_primitives::{Address, Bytes, ChainId, Signature, TxKind, B256, U256}; +use alloy_eips::{Typed2718, eip2930::AccessList, eip7702::SignedAuthorization}; +use alloy_primitives::{Address, B256, Bytes, ChainId, Signature, TxKind, U256}; use alloy_rlp::{BufMut, Decodable, Encodable}; -use reth_ethereum::primitives::{serde_bincode_compat::RlpBincode, InMemorySize}; +use reth_ethereum::primitives::{InMemorySize, serde_bincode_compat::RlpBincode}; /// A transaction with a priority fee ([EIP-1559](https://eips.ethereum.org/EIPS/eip-1559)). #[derive( diff --git a/rust/op-reth/examples/custom-node/src/primitives/tx_type.rs b/rust/op-reth/examples/custom-node/src/primitives/tx_type.rs new file mode 100644 index 00000000000..338c49819fb --- /dev/null +++ b/rust/op-reth/examples/custom-node/src/primitives/tx_type.rs @@ -0,0 +1,39 @@ +use crate::primitives::TxTypeCustom; +use alloy_primitives::bytes::{Buf, BufMut}; +use reth_codecs::{Compact, txtype::COMPACT_EXTENDED_IDENTIFIER_FLAG}; + +pub const PAYMENT_TX_TYPE_ID: u8 = 42; + +impl Compact for TxTypeCustom { + fn to_compact(&self, buf: &mut B) -> usize + where + B: BufMut + AsMut<[u8]>, + { + match self { + Self::Op(ty) => ty.to_compact(buf), + Self::Payment => { + buf.put_u8(PAYMENT_TX_TYPE_ID); + COMPACT_EXTENDED_IDENTIFIER_FLAG + } + } + } + + fn from_compact(mut buf: &[u8], identifier: usize) -> (Self, &[u8]) { + match identifier { + COMPACT_EXTENDED_IDENTIFIER_FLAG => ( + { + let extended_identifier = buf.get_u8(); + match extended_identifier { + PAYMENT_TX_TYPE_ID => Self::Payment, + _ => panic!("Unsupported TxType identifier: {extended_identifier}"), + } + }, + buf, + ), + v => { + let (inner, buf) = TxTypeCustom::from_compact(buf, v); + (inner, buf) + } + } + } +} diff --git a/op-reth/examples/custom-node/src/rpc.rs b/rust/op-reth/examples/custom-node/src/rpc.rs similarity index 100% rename from op-reth/examples/custom-node/src/rpc.rs rename to rust/op-reth/examples/custom-node/src/rpc.rs diff --git a/op-reth/examples/engine-api-access/Cargo.toml b/rust/op-reth/examples/engine-api-access/Cargo.toml similarity index 100% rename from op-reth/examples/engine-api-access/Cargo.toml rename to rust/op-reth/examples/engine-api-access/Cargo.toml diff --git a/rust/op-reth/examples/engine-api-access/src/main.rs b/rust/op-reth/examples/engine-api-access/src/main.rs new file mode 100644 index 00000000000..87023011177 --- /dev/null +++ b/rust/op-reth/examples/engine-api-access/src/main.rs @@ -0,0 +1,47 @@ +//! Example demonstrating how to access the Engine API instance during construction. +//! +//! Run with +//! +//! ```sh +//! cargo run -p example-engine-api-access +//! ``` + +use reth_db::test_utils::create_test_rw_db; +use reth_node_builder::{EngineApiExt, FullNodeComponents, NodeBuilder, NodeConfig}; +use reth_optimism_chainspec::BASE_MAINNET; +use reth_optimism_node::{ + OpAddOns, OpEngineApiBuilder, OpNode, args::RollupArgs, node::OpEngineValidatorBuilder, +}; +use tokio::sync::oneshot; + +#[tokio::main] +async fn main() { + // Op node configuration and setup + let config = NodeConfig::new(BASE_MAINNET.clone()); + let db = create_test_rw_db(); + let args = RollupArgs::default(); + let op_node = OpNode::new(args); + + let (engine_api_tx, _engine_api_rx) = oneshot::channel(); + + let engine_api = + EngineApiExt::new(OpEngineApiBuilder::::default(), move |api| { + let _ = engine_api_tx.send(api); + }); + + let _builder = NodeBuilder::new(config) + .with_database(db) + .with_types::() + .with_components(op_node.components()) + .with_add_ons(OpAddOns::default().with_engine_api(engine_api)) + .on_component_initialized(move |ctx| { + let _provider = ctx.provider(); + Ok(()) + }) + .on_node_started(|_full_node| Ok(())) + .on_rpc_started(|_ctx, handles| { + let _client = handles.rpc.http_client(); + Ok(()) + }) + .check_launch(); +} diff --git a/op-reth/examples/exex-hello-world/Cargo.toml b/rust/op-reth/examples/exex-hello-world/Cargo.toml similarity index 100% rename from op-reth/examples/exex-hello-world/Cargo.toml rename to rust/op-reth/examples/exex-hello-world/Cargo.toml diff --git a/rust/op-reth/examples/exex-hello-world/src/main.rs b/rust/op-reth/examples/exex-hello-world/src/main.rs new file mode 100644 index 00000000000..68b6afc00d4 --- /dev/null +++ b/rust/op-reth/examples/exex-hello-world/src/main.rs @@ -0,0 +1,145 @@ +//! Example for a simple Execution Extension +//! +//! Run with +//! +//! ```sh +//! cargo run -p example-exex-hello-world -- node --dev --dev.block-time 5s +//! ``` + +use clap::Parser; +use futures::TryStreamExt; +use reth_ethereum::{ + chainspec::EthereumHardforks, + exex::{ExExContext, ExExEvent, ExExNotification}, + node::{ + EthereumNode, + api::{FullNodeComponents, NodeTypes}, + builder::rpc::RpcHandle, + }, + rpc::api::eth::helpers::FullEthApi, +}; +use reth_tracing::tracing::info; +use tokio::sync::oneshot; + +/// Additional CLI arguments +#[derive(Parser)] +struct ExExArgs { + /// whether to launch an op-reth node + #[arg(long)] + optimism: bool, +} + +/// A basic subscription loop of new blocks. +async fn my_exex(mut ctx: ExExContext) -> eyre::Result<()> { + while let Some(notification) = ctx.notifications.try_next().await? { + match ¬ification { + ExExNotification::ChainCommitted { new } => { + info!(committed_chain = ?new.range(), "Received commit"); + } + ExExNotification::ChainReorged { old, new } => { + info!(from_chain = ?old.range(), to_chain = ?new.range(), "Received reorg"); + } + ExExNotification::ChainReverted { old } => { + info!(reverted_chain = ?old.range(), "Received revert"); + } + }; + + if let Some(committed_chain) = notification.committed_chain() { + ctx.events.send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; + } + } + + Ok(()) +} + +/// This is an example of how to access the [`RpcHandle`] inside an ExEx. It receives the +/// [`RpcHandle`] once the node is launched fully. +/// +/// This function supports both Opstack Eth API and ethereum Eth API. +/// +/// The received handle gives access to the `EthApi` has full access to all eth api functionality +/// [`FullEthApi`]. And also gives access to additional eth-related rpc method handlers, such as eth +/// filter. +async fn ethapi_exex( + mut ctx: ExExContext, + rpc_handle: oneshot::Receiver>, +) -> eyre::Result<()> +where + Node: FullNodeComponents>, + EthApi: FullEthApi, +{ + // Wait for the ethapi to be sent from the main function + let rpc_handle = rpc_handle.await?; + info!("Received rpc handle inside exex"); + + // obtain the ethapi from the rpc handle + let ethapi = rpc_handle.eth_api(); + + // EthFilter type that provides all eth_getlogs related logic + let _eth_filter = rpc_handle.eth_handlers().filter.clone(); + // EthPubSub type that provides all eth_subscribe logic + let _eth_pubsub = rpc_handle.eth_handlers().pubsub.clone(); + // The TraceApi type that provides all the trace_ handlers + let _trace_api = rpc_handle.trace_api(); + // The DebugApi type that provides all the debug_ handlers + let _debug_api = rpc_handle.debug_api(); + + while let Some(notification) = ctx.notifications.try_next().await? { + if let Some(committed_chain) = notification.committed_chain() { + ctx.events.send(ExExEvent::FinishedHeight(committed_chain.tip().num_hash()))?; + + // can use the eth api to interact with the node + let _rpc_block = ethapi.rpc_block(committed_chain.tip().hash().into(), true).await?; + } + } + + Ok(()) +} + +fn main() -> eyre::Result<()> { + let args = ExExArgs::parse(); + + if args.optimism { + reth_op::cli::Cli::parse_args().run(|builder, _| { + let (rpc_handle_tx, rpc_handle_rx) = oneshot::channel(); + Box::pin(async move { + let handle = builder + .node(reth_op::node::OpNode::default()) + .install_exex("my-exex", async move |ctx| Ok(my_exex(ctx))) + .install_exex("ethapi-exex", async move |ctx| { + Ok(ethapi_exex(ctx, rpc_handle_rx)) + }) + .launch() + .await?; + + // Retrieve the rpc handle from the node and send it to the exex + rpc_handle_tx + .send(handle.node.add_ons_handle.clone()) + .expect("Failed to send ethapi to ExEx"); + + handle.wait_for_node_exit().await + }) + }) + } else { + reth_ethereum::cli::Cli::parse_args().run(|builder, _| { + Box::pin(async move { + let (rpc_handle_tx, rpc_handle_rx) = oneshot::channel(); + let handle = builder + .node(EthereumNode::default()) + .install_exex("my-exex", async move |ctx| Ok(my_exex(ctx))) + .install_exex("ethapi-exex", async move |ctx| { + Ok(ethapi_exex(ctx, rpc_handle_rx)) + }) + .launch() + .await?; + + // Retrieve the rpc handle from the node and send it to the exex + rpc_handle_tx + .send(handle.node.add_ons_handle.clone()) + .expect("Failed to send ethapi to ExEx"); + + handle.wait_for_node_exit().await + }) + }) + } +} diff --git a/op-reth/examples/op-db-access/Cargo.toml b/rust/op-reth/examples/op-db-access/Cargo.toml similarity index 100% rename from op-reth/examples/op-db-access/Cargo.toml rename to rust/op-reth/examples/op-db-access/Cargo.toml diff --git a/op-reth/examples/op-db-access/src/main.rs b/rust/op-reth/examples/op-db-access/src/main.rs similarity index 100% rename from op-reth/examples/op-db-access/src/main.rs rename to rust/op-reth/examples/op-db-access/src/main.rs diff --git a/op-reth/flake.lock b/rust/op-reth/flake.lock similarity index 100% rename from op-reth/flake.lock rename to rust/op-reth/flake.lock diff --git a/op-reth/flake.nix b/rust/op-reth/flake.nix similarity index 100% rename from op-reth/flake.nix rename to rust/op-reth/flake.nix diff --git a/op-reth/funding.json b/rust/op-reth/funding.json similarity index 100% rename from op-reth/funding.json rename to rust/op-reth/funding.json diff --git a/rust/op-reth/justfile b/rust/op-reth/justfile new file mode 100644 index 00000000000..497347fe8c3 --- /dev/null +++ b/rust/op-reth/justfile @@ -0,0 +1,31 @@ +# default recipe to display help information +default: + @just --list + +# Check for unused dependencies in the crate graph. +check-udeps: + cargo +nightly udeps --workspace --lib --examples --tests --benches --all-features --locked + +# Run unit tests with optional edge storage feature +test edge='': + #!/usr/bin/env bash + set -euo pipefail + RUST_BACKTRACE=1 cargo nextest run \ + --features "asm-keccak {{edge}}" --locked \ + --workspace \ + --no-tests=warn \ + -E "!kind(test) and not binary(e2e_testsuite) and not test(test_online)" + +# Run integration tests for reth-optimism-node +test-integration: + RUST_BACKTRACE=1 cargo nextest run --locked -p reth-optimism-node + +# Check Windows cross-compilation +check-windows: + rustup target add x86_64-pc-windows-gnu + cargo check -p op-reth --target x86_64-pc-windows-gnu + +# Build all examples +examples: + cargo build --examples --locked + diff --git a/rust/package-lock.json b/rust/package-lock.json new file mode 100644 index 00000000000..ab15834c689 --- /dev/null +++ b/rust/package-lock.json @@ -0,0 +1,6 @@ +{ + "name": "rust", + "lockfileVersion": 3, + "requires": true, + "packages": {} +} diff --git a/kona/rust-toolchain.toml b/rust/rust-toolchain.toml similarity index 100% rename from kona/rust-toolchain.toml rename to rust/rust-toolchain.toml diff --git a/rust/rustfmt.toml b/rust/rustfmt.toml new file mode 100644 index 00000000000..94a9b0349de --- /dev/null +++ b/rust/rustfmt.toml @@ -0,0 +1,9 @@ +style_edition = "2024" +imports_granularity = "Crate" +use_small_heuristics = "Max" +comment_width = 100 +wrap_comments = true +binop_separator = "Back" +format_code_in_doc_comments = true +doc_comment_code_block_width = 100 +format_macro_matchers = true diff --git a/rust/typos.toml b/rust/typos.toml new file mode 100644 index 00000000000..b3f237d9a9f --- /dev/null +++ b/rust/typos.toml @@ -0,0 +1,113 @@ +[files] +extend-exclude = [ + ".git", + "target", + "Cargo.lock", + "kona/docker/recipes/kona-node-dev/kona-node/bootstores", + "docs/docs/dist/", + "*.min.js", + "op-alloy/CHANGELOG.md", + "op-reth/crates/storage/libmdbx-rs/mdbx-sys/libmdbx", + "op-reth/testing/ef-tests", +] + +[default] +extend-ignore-re = [ + # Hex strings of various lengths + "(?i)0x[0-9a-f]{8,}", + "(?i)[0-9a-f]{32,}", + "(?i)[0-9a-f]{8}", + "(?i)[0-9a-f]{40}", + "(?i)[0-9a-f]{64}", + # Ordinals in identifiers + "[0-9]+nd", + "[0-9]+th", + "[0-9]+st", + "[0-9]+rd", + # Base64 encoded strings (common in tests and configs) + "[A-Za-z0-9+/]{20,}={0,2}", +] + +[default.extend-words] +# Valid Rust/Cargo terms +crate = "crate" +crates = "crates" + +# Blockchain/Ethereum specific terms +alloy = "alloy" +anvil = "anvil" +asm = "asm" +asterisc = "asterisc" +batcher = "batcher" +bedrock = "bedrock" +bootnode = "bootnode" +cannon = "cannon" +chainid = "chainid" +codegen = "codegen" +derivation = "derivation" +enr = "enr" +ethereum = "ethereum" +fpvm = "fpvm" +geth = "geth" +hel = "hel" +interop = "interop" +kona = "kona" +libmdbx = "libmdbx" +merkle = "merkle" +mips = "mips" +mpsc = "mpsc" +optimism = "optimism" +preimage = "preimage" +revm = "revm" +risc = "risc" +rollup = "rollup" +rpc = "rpc" +sequencer = "sequencer" +ser = "ser" +serde = "serde" +supervisor = "supervisor" +superchain = "superchain" +trie = "trie" +txs = "txs" +udeps = "udeps" +usize = "usize" +workspaces = "workspaces" + +# Technical abbreviations and acronyms +api = "api" +cli = "cli" +cfg = "cfg" +const = "const" +env = "env" +impl = "impl" +io = "io" +lru = "lru" +mpt = "mpt" +msg = "msg" +mut = "mut" +nums = "nums" +num = "num" +ok = "ok" +std = "std" +structs = "structs" +ty = "ty" +typ = "typ" +vec = "vec" + +# Additional project-specific terms +flate = "flate" +ratatui = "ratatui" +superseed = "superseed" +jsonrpsee = "jsonrpsee" +tokio = "tokio" +async = "async" +await = "await" + +# op-reth specific +seeked = "seeked" +Seeked = "Seeked" +Whe = "Whe" +ONL = "ONL" +Iy = "Iy" +Pn = "Pn" +BA = "BA"